• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
17 
18 #include "libpandabase/mem/mem.h"
19 #include "libpandabase/utils/logger.h"
20 #include "runtime/include/runtime.h"
21 #include "runtime/include/thread.h"
22 #include "runtime/include/gc_task.h"
23 #include "runtime/mem/region_allocator.h"
24 #include "runtime/mem/region_space-inl.h"
25 #include "runtime/mem/runslots_allocator-inl.h"
26 #include "runtime/mem/freelist_allocator-inl.h"
27 #include "runtime/mem/alloc_config.h"
28 #include "runtime/arch/memory_helpers.h"
29 
30 namespace ark::mem {
31 
32 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,size_t initSpaceSize,bool extend,size_t regionSize,size_t emptyTenuredRegionsMaxCount)33 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
34                                                       SpaceType spaceType, AllocatorType allocatorType,
35                                                       size_t initSpaceSize, bool extend, size_t regionSize,
36                                                       size_t emptyTenuredRegionsMaxCount)
37     : memStats_(memStats),
38       spaceType_(spaceType),
39       spaces_(spaces),
40       regionPool_(regionSize, extend, spaces,
41                   InternalAllocatorPtr(InternalAllocator<>::GetInternalAllocatorFromRuntime())),
42       regionSpace_(spaceType, allocatorType, &regionPool_, emptyTenuredRegionsMaxCount),
43       initBlock_(0, nullptr)
44 {
45     ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT ||
46            spaceType_ == SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
47     ASSERT(regionSize != 0);
48     initBlock_ = NULLPOOL;
49     if (initSpaceSize > 0) {
50         ASSERT(initSpaceSize % regionSize == 0);
51         initBlock_ = spaces_->AllocSharedPool(initSpaceSize, spaceType, AllocatorType::REGION_ALLOCATOR, this);
52         ASSERT(initBlock_.GetMem() != nullptr);
53         ASSERT(initBlock_.GetSize() >= initSpaceSize);
54         if (initBlock_.GetMem() != nullptr) {
55             regionPool_.InitRegionBlock(ToUintPtr(initBlock_.GetMem()), ToUintPtr(initBlock_.GetMem()) + initSpaceSize);
56             ASAN_POISON_MEMORY_REGION(initBlock_.GetMem(), initBlock_.GetSize());
57         }
58     }
59 }
60 
61 template <typename LockConfigT>
RegionAllocatorBase(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,AllocatorType allocatorType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)62 RegionAllocatorBase<LockConfigT>::RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces,
63                                                       SpaceType spaceType, AllocatorType allocatorType,
64                                                       RegionPool *sharedRegionPool, size_t emptyTenuredRegionsMaxCount)
65     : memStats_(memStats),
66       spaces_(spaces),
67       spaceType_(spaceType),
68       regionPool_(0, false, spaces, nullptr),  // unused
69       regionSpace_(spaceType, allocatorType, sharedRegionPool, emptyTenuredRegionsMaxCount),
70       initBlock_(0, nullptr)  // unused
71 {
72     ASSERT(spaceType_ == SpaceType::SPACE_TYPE_OBJECT || spaceType_ == SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
73 }
74 
75 template <typename LockConfigT>
76 template <typename AllocConfigT, OSPagesAllocPolicy OS_ALLOC_POLICY>
CreateAndSetUpNewRegion(size_t regionSize,RegionFlag regionType,RegionFlag properties)77 Region *RegionAllocatorBase<LockConfigT>::CreateAndSetUpNewRegion(size_t regionSize, RegionFlag regionType,
78                                                                   RegionFlag properties)
79 {
80     Region *region = AllocRegion<OS_ALLOC_POLICY>(regionSize, regionType, properties);
81     if (LIKELY(region != nullptr)) {
82         if (regionType == RegionFlag::IS_EDEN) {
83             AllocConfigT::OnInitYoungRegion({region->Begin(), region->End()});
84         }
85         // Do memory barrier here to make sure all threads see references to bitmaps.
86         // The situation:
87         // A mutator thread allocates a new object. During object allocation the mutator
88         // allocates a new region, sets up the region header, allocates object in the region and publishes
89         // the reference to the object.
90         // GC thread does concurrent marking. It sees the reference to the new object and gets the region
91         // by the object address.
92         // Since GC thread doesn't locks region_lock_ we need to do memory barrier here to make
93         // sure GC thread sees all bitmaps from the region header.
94         arch::FullMemoryBarrier();
95         // Getting region by object is a bit operation and TSAN doesn't
96         // sees the relation between region creation and region access.
97         // This annotation suggests TSAN that this code always executes before
98         // the region will be accessed.
99         // See the corresponding annotation in ObjectToRegion
100         TSAN_ANNOTATE_HAPPENS_BEFORE(region);
101     }
102     return region;
103 }
104 
105 template <typename LockConfigT>
GetAllRegions()106 PandaVector<Region *> RegionAllocatorBase<LockConfigT>::GetAllRegions()
107 {
108     PandaVector<Region *> vector;
109     os::memory::LockHolder lock(this->regionLock_);
110     GetSpace()->IterateRegions([&](Region *region) { vector.push_back(region); });
111     return vector;
112 }
113 
114 template <typename LockConfigT>
CalculateDeadObjectsRatio()115 double RegionAllocatorBase<LockConfigT>::CalculateDeadObjectsRatio()
116 {
117     os::memory::LockHolder lock(this->regionLock_);
118     size_t totalSize = 0;
119     size_t deadObjectSize = 0;
120     size_t count = 0;
121     GetSpace()->IterateRegions([&count, &totalSize, &deadObjectSize](Region *region) {
122         count++;
123         totalSize += region->Size();
124         deadObjectSize += region->GetGarbageBytes();
125     });
126     if (totalSize == 0) {
127         return 0;
128     }
129     return static_cast<double>(deadObjectSize) / totalSize;
130 }
131 
132 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend,size_t emptyTenuredRegionsMaxCount)133 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
134                                                             SpaceType spaceType, size_t initSpaceSize, bool extend,
135                                                             size_t emptyTenuredRegionsMaxCount)
136     : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, initSpaceSize,
137                                        extend, REGION_SIZE, emptyTenuredRegionsMaxCount),
138       fullRegion_(nullptr, 0, 0),
139       edenCurrentRegion_(&fullRegion_)
140 {
141 }
142 
143 template <typename AllocConfigT, typename LockConfigT>
RegionAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool,size_t emptyTenuredRegionsMaxCount)144 RegionAllocator<AllocConfigT, LockConfigT>::RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
145                                                             SpaceType spaceType, RegionPool *sharedRegionPool,
146                                                             size_t emptyTenuredRegionsMaxCount)
147     : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, sharedRegionPool,
148                                        emptyTenuredRegionsMaxCount),
149       fullRegion_(nullptr, 0, 0),
150       edenCurrentRegion_(&fullRegion_)
151 {
152 }
153 
154 template <typename AllocConfigT, typename LockConfigT>
155 template <RegionFlag REGION_TYPE>
AllocRegular(size_t alignSize)156 void *RegionAllocator<AllocConfigT, LockConfigT>::AllocRegular(size_t alignSize)
157 {
158     static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
159     static_assert((REGION_TYPE == RegionFlag::IS_EDEN) || (REGION_TYPE == RegionFlag::IS_OLD) ||
160                   (REGION_TYPE == RegionFlag::IS_PINNED));
161     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
162     if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
163         void *mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
164         if (mem != nullptr) {
165             return mem;
166         }
167 
168         os::memory::LockHolder lock(this->regionLock_);
169         mem = GetCurrentRegion<IS_ATOMIC, REGION_TYPE>()->template Alloc<IS_ATOMIC>(alignSize);
170         if (mem != nullptr) {
171             return mem;
172         }
173 
174         Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGION_TYPE);
175         if (LIKELY(region != nullptr)) {
176             // Here we need memory barrier to make the allocation visible
177             // in all threads before SetCurrentRegion
178             mem = region->template Alloc<IS_ATOMIC>(alignSize);
179             SetCurrentRegion<IS_ATOMIC, REGION_TYPE>(region);
180         }
181 
182         return mem;
183     }
184     void *mem = nullptr;
185     Region *regionTo = PopFromRegionQueue<IS_ATOMIC, REGION_TYPE>();
186     if (regionTo != nullptr) {
187         // Here we need memory barrier to make the allocation visible
188         // in all threads before SetCurrentRegion
189         mem = regionTo->template Alloc<IS_ATOMIC>(alignSize);
190         if (mem != nullptr) {
191             if constexpr (REGION_TYPE == RegionFlag::IS_PINNED) {
192                 regionTo->PinObject();
193             }
194             PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
195             return mem;
196         }
197     }
198 
199     os::memory::LockHolder lock(this->regionLock_);
200     regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_OLD);
201     if (LIKELY(regionTo != nullptr)) {
202         // Here we need memory barrier to make the allocation visible
203         // in all threads before SetCurrentRegion
204         mem = regionTo->template Alloc<IS_ATOMIC>(alignSize);
205         if constexpr (REGION_TYPE == RegionFlag::IS_PINNED) {
206             regionTo->PinObject();
207         }
208         PushToRegionQueue<IS_ATOMIC, REGION_TYPE>(regionTo);
209     }
210     return mem;
211 }
212 
213 template <typename AllocConfigT, typename LockConfigT>
214 template <RegionFlag REGION_TYPE, bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align,bool pinned)215 void *RegionAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align, bool pinned)
216 {
217     ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
218     size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
219     void *mem = nullptr;
220     // for movable & regular size object, allocate it from a region
221     // for nonmovable or large size object, allocate a seprate large region for it
222     if (this->GetSpaceType() != SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT &&
223         LIKELY(alignSize <= GetMaxRegularObjectSize())) {
224         mem = pinned ? AllocRegular<IS_PINNED>(alignSize) : AllocRegular<REGION_TYPE>(alignSize);
225     } else {
226         os::memory::LockHolder lock(this->regionLock_);
227         Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(
228             Region::RegionSize(alignSize, REGION_SIZE), REGION_TYPE, IS_LARGE_OBJECT);
229         if (LIKELY(region != nullptr)) {
230             mem = region->Alloc<false>(alignSize);
231         }
232     }
233     if (mem != nullptr) {
234         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
235         if constexpr (UPDATE_MEMSTATS) {
236             AllocConfigT::OnAlloc(alignSize, this->spaceType_, this->memStats_);
237             AllocConfigT::MemoryInit(mem);
238         }
239         // Do it after memory init because we can reach this memory after setting live bitmap
240         if ((REGION_TYPE == RegionFlag::IS_OLD) || pinned) {
241             auto liveBitmap = this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap();
242             ASSERT(liveBitmap != nullptr);
243             liveBitmap->AtomicTestAndSet(mem);
244         }
245     }
246     return mem;
247 }
248 
249 template <typename AllocConfigT, typename LockConfigT>
PinObject(ObjectHeader * object)250 void RegionAllocator<AllocConfigT, LockConfigT>::PinObject(ObjectHeader *object)
251 {
252     auto *region = ObjectToRegion(object);
253     ASSERT(region != nullptr);
254     region->PinObject();
255 }
256 
257 template <typename AllocConfigT, typename LockConfigT>
UnpinObject(ObjectHeader * object)258 void RegionAllocator<AllocConfigT, LockConfigT>::UnpinObject(ObjectHeader *object)
259 {
260     auto *region = ObjectToRegion(object);
261     ASSERT(region != nullptr);
262     region->UnpinObject();
263     if (!region->HasPinnedObjects()) {
264         static constexpr bool IS_ATOMIC = std::is_same_v<LockConfigT, RegionAllocatorLockConfig::CommonLock>;
265         PandaVector<Region *> *regionQueue = GetRegionQueuePointer<RegionFlag::IS_PINNED>();
266         os::memory::LockHolder<LockConfigT, IS_ATOMIC> lock(*GetQueueLock<RegionFlag::IS_PINNED>());
267         auto itRegion = std::find(regionQueue->begin(), regionQueue->end(), region);
268         if (itRegion != regionQueue->end()) {
269             regionQueue->erase(itRegion);
270         }
271     }
272 }
273 
274 template <typename AllocConfigT, typename LockConfigT>
CreateTLAB(size_t size)275 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLAB(size_t size)
276 {
277     ASSERT(size <= GetMaxRegularObjectSize());
278     ASSERT(AlignUp(size, GetAlignmentInBytes(DEFAULT_ALIGNMENT)) == size);
279     TLAB *tlab = nullptr;
280 
281     {
282         os::memory::LockHolder lock(this->regionLock_);
283         Region *region = nullptr;
284         // first search in partial tlab map
285         auto largestTlab = retainedTlabs_.begin();
286         if (largestTlab != retainedTlabs_.end() && largestTlab->first >= size) {
287             LOG(DEBUG, ALLOC) << "Use retained tlabs region " << region;
288             region = largestTlab->second;
289             retainedTlabs_.erase(largestTlab);
290             ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
291         }
292 
293         // allocate a free region if none partial tlab has enough space
294         if (region == nullptr) {
295             region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
296             if (LIKELY(region != nullptr)) {
297                 region->CreateTLABSupport();
298             }
299         }
300         if (region != nullptr) {
301             tlab = CreateTLABInRegion(region, size);
302             auto remainingSize = region->GetRemainingSizeForTLABs();
303             if (remainingSize >= size) {
304                 LOG(DEBUG, ALLOC) << "Add a region " << region << " with remained size " << remainingSize
305                                   << " to retained_tlabs";
306                 retainedTlabs_.insert(std::make_pair(remainingSize, region));
307             }
308         }
309     }
310 
311     return tlab;
312 }
313 
314 template <typename AllocConfigT, typename LockConfigT>
CreateRegionSizeTLAB()315 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateRegionSizeTLAB()
316 {
317     TLAB *tlab = nullptr;
318 
319     os::memory::LockHolder lock(this->regionLock_);
320     Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_EDEN);
321     if (LIKELY(region != nullptr)) {
322         region->CreateTLABSupport();
323         size_t size = region->GetRemainingSizeForTLABs();
324         tlab = CreateTLABInRegion(region, size);
325     }
326 
327     return tlab;
328 }
329 
330 template <typename AllocConfigT, typename LockConfigT>
CreateTLABInRegion(Region * region,size_t size)331 TLAB *RegionAllocator<AllocConfigT, LockConfigT>::CreateTLABInRegion(Region *region, size_t size)
332 {
333     // We don't reuse the same region for different TLABs.
334     // Therefore, update the size
335     TLAB *tlab = region->CreateTLAB(size);
336     ASSERT(tlab != nullptr);
337     LOG(DEBUG, ALLOC) << "Found a region " << region << " and create tlab " << tlab << " with memory starts at "
338                       << tlab->GetStartAddr() << " and with size " << tlab->GetSize();
339     return tlab;
340 }
341 
342 template <typename AllocConfigT, typename LockConfigT>
343 template <bool INCLUDE_CURRENT_REGION>
GetTopGarbageRegions(double garbageThreshold)344 PandaVector<std::pair<uint32_t, Region *>> RegionAllocator<AllocConfigT, LockConfigT>::GetTopGarbageRegions(
345     double garbageThreshold)
346 {
347     PandaVector<std::pair<uint32_t, Region *>> topGarbageRegions;
348     this->GetSpace()->IterateRegions([&](Region *region) {
349         if (region->HasFlag(IS_EDEN) || region->HasFlag(RegionFlag::IS_RESERVED)) {
350             return;
351         }
352         if constexpr (!INCLUDE_CURRENT_REGION) {
353             if (IsInCurrentRegion<true, RegionFlag::IS_OLD>(region)) {
354                 return;
355             }
356         }
357         auto garbageBytes = region->GetGarbageBytes();
358         if (region->GetLiveBytes() == 0U) {
359             // Assign a huge value to garbage bytes for an empty region, to place it to the end of the array
360             garbageBytes = DEFAULT_REGION_SIZE;
361         }
362         if (static_cast<double>(garbageBytes) / region->GetAllocatedBytes() >= garbageThreshold) {
363             topGarbageRegions.emplace_back(std::make_pair(garbageBytes, region));
364         }
365     });
366     std::sort(topGarbageRegions.begin(), topGarbageRegions.end());
367     return topGarbageRegions;
368 }
369 
370 template <typename AllocConfigT, typename LockConfigT>
371 template <RegionFlag REGIONS_TYPE>
GetAllSpecificRegions()372 PandaVector<Region *> RegionAllocator<AllocConfigT, LockConfigT>::GetAllSpecificRegions()
373 {
374     PandaVector<Region *> vector;
375     this->GetSpace()->IterateRegions([&](Region *region) {
376         if (region->HasFlag(REGIONS_TYPE)) {
377             vector.push_back(region);
378         }
379     });
380     return vector;
381 }
382 
383 template <typename AllocConfigT, typename LockConfigT>
CalculateInternalOldFragmentation()384 double RegionAllocator<AllocConfigT, LockConfigT>::CalculateInternalOldFragmentation()
385 {
386     constexpr RegionFlag REGIONS_TYPE = RegionFlag::IS_OLD;
387     size_t totalFreeSize = 0;
388     size_t totalAllocatedSize = 0;
389     this->GetSpace()->IterateRegions([this, &totalFreeSize, &totalAllocatedSize](Region *region) {
390         if (!region->HasFlag(REGIONS_TYPE)) {
391             return;
392         }
393         if (IsInCurrentRegion<false, REGIONS_TYPE>(region)) {
394             return;
395         }
396         size_t allocatedBytes = region->GetAllocatedBytes();
397         size_t regionSize = region->Size();
398         ASSERT(regionSize >= allocatedBytes);
399         totalFreeSize += regionSize - allocatedBytes;
400         totalAllocatedSize += allocatedBytes;
401     });
402     if (totalAllocatedSize == 0) {
403         return 0;
404     }
405     return static_cast<double>(totalFreeSize) / totalAllocatedSize;
406 }
407 
408 template <typename AllocConfigT, typename LockConfigT>
409 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactAllSpecificRegions(const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)410 void RegionAllocator<AllocConfigT, LockConfigT>::CompactAllSpecificRegions(const GCObjectVisitor &deathChecker,
411                                                                            const ObjectVisitorEx &moveHandler)
412 {
413     // NOLINTNEXTLINE(readability-braces-around-statements)
414     if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {  // NOLINT(bugprone-suspicious-semicolon)
415         // NOTE(aemelenko): Implement it if need to call this method with the same regions type.
416         // There is an issue with IterateRegions during creating a new one.
417         ASSERT(REGIONS_TYPE_FROM != REGIONS_TYPE_TO);
418         ResetCurrentRegion<false, REGIONS_TYPE_TO>();
419     }
420     this->GetSpace()->IterateRegions([this, &deathChecker, &moveHandler](Region *region) {
421         if (!region->HasFlag(REGIONS_TYPE_FROM)) {
422             return;
423         }
424         CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(region, deathChecker, moveHandler);
425     });
426 }
427 
428 template <typename AllocConfigT, typename LockConfigT>
429 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSeveralSpecificRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)430 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSeveralSpecificRegions(const PandaVector<Region *> &regions,
431                                                                                const GCObjectVisitor &deathChecker,
432                                                                                const ObjectVisitorEx &moveHandler)
433 {
434     for (auto i : regions) {
435         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
436         if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
437             [[maybe_unused]] bool foundedRegion = IsInCurrentRegion<false, REGIONS_TYPE_TO>(i);
438             ASSERT(!foundedRegion);
439         }
440         CompactSpecificRegion<REGIONS_TYPE_FROM, REGIONS_TYPE_TO, USE_MARKED_BITMAP>(i, deathChecker, moveHandler);
441     }
442 }
443 
444 template <typename AllocConfigT, typename LockConfigT>
445 template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP>
CompactSpecificRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveHandler)446 void RegionAllocator<AllocConfigT, LockConfigT>::CompactSpecificRegion(Region *region,
447                                                                        const GCObjectVisitor &deathChecker,
448                                                                        const ObjectVisitorEx &moveHandler)
449 {
450     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
451     if constexpr (REGIONS_TYPE_FROM == REGIONS_TYPE_TO) {
452         // It is bad if we compact one region into itself.
453         [[maybe_unused]] bool isCurrentRegion = IsInCurrentRegion<true, REGIONS_TYPE_TO>(region);
454         ASSERT(!isCurrentRegion);
455     }
456     auto createNewRegion = [&]() {
457         os::memory::LockHolder lock(this->regionLock_);
458         Region *regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, REGIONS_TYPE_TO);
459         ASSERT(regionTo != nullptr);
460         return regionTo;
461     };
462 
463     Region *regionTo = PopFromRegionQueue<true, REGIONS_TYPE_TO>();
464     if (regionTo == nullptr) {
465         regionTo = createNewRegion();
466     }
467     size_t liveBytes = 0;
468     // Don't use atomic in this method because we work with not shared region
469     auto visitor = [&](ObjectHeader *object) {
470         // If we use mark-bitmap then we iterate over alive object, so no need death-checker
471         if constexpr (!USE_MARKED_BITMAP) {
472             if (deathChecker(object) != ObjectStatus::ALIVE_OBJECT) {
473                 return;
474             }
475         }
476         size_t objectSize = GetObjectSize(object);
477         size_t alignedSize = AlignUp(objectSize, DEFAULT_ALIGNMENT_IN_BYTES);
478         void *dst = regionTo->template Alloc<false>(alignedSize);
479         if (dst == nullptr) {
480             regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
481             liveBytes = 0;
482             regionTo = createNewRegion();
483             dst = regionTo->template Alloc<false>(alignedSize);
484         }
485         // Don't initialize memory for an object here because we will use memcpy anyway
486         ASSERT(dst != nullptr);
487         memcpy_s(dst, objectSize, object, objectSize);
488         // need to mark as alive moved object
489         ASSERT(regionTo->GetLiveBitmap() != nullptr);
490         regionTo->IncreaseAllocatedObjects();
491         regionTo->GetLiveBitmap()->Set(dst);
492         liveBytes += alignedSize;
493         moveHandler(object, static_cast<ObjectHeader *>(dst));
494     };
495 
496     ASSERT(region->HasFlag(REGIONS_TYPE_FROM));
497 
498     const std::function<void(ObjectHeader *)> visitorFunctor(visitor);
499     // NOLINTNEXTLINE(readability-braces-around-statements)
500     if constexpr (USE_MARKED_BITMAP) {
501         region->GetMarkBitmap()->IterateOverMarkedChunks(
502             [&visitorFunctor](void *objectAddr) { visitorFunctor(static_cast<ObjectHeader *>(objectAddr)); });
503     } else {  // NOLINT(readability-misleading-indentation)
504         region->IterateOverObjects(visitorFunctor);
505     }
506     regionTo->SetLiveBytes(regionTo->GetLiveBytes() + liveBytes);
507 
508     PushToRegionQueue<true, REGIONS_TYPE_TO>(regionTo);
509 }
510 
511 template <typename AllocConfigT, typename LockConfigT>
ReserveRegionIfNeeded()512 void RegionAllocator<AllocConfigT, LockConfigT>::ReserveRegionIfNeeded()
513 {
514     if (reservedRegion_ != nullptr) {
515         return;
516     }
517     reservedRegion_ = this->GetSpace()->NewRegion(REGION_SIZE, RegionFlag::IS_OLD, RegionFlag::IS_RESERVED);
518     ASSERT(reservedRegion_ != nullptr);
519     reservedRegion_->RmvFlag(RegionFlag::IS_OLD);
520 }
521 
522 template <typename AllocConfigT, typename LockConfigT>
ReleaseReservedRegion()523 void RegionAllocator<AllocConfigT, LockConfigT>::ReleaseReservedRegion()
524 {
525     ASSERT(reservedRegion_ != nullptr);
526     this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease, OSPagesPolicy::NO_RETURN>(
527         reservedRegion_);
528     reservedRegion_ = nullptr;
529 }
530 
531 template <typename AllocConfigT, typename LockConfigT>
532 template <bool USE_MARKED_BITMAP, bool FULL_GC>
PromoteYoungRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitor & aliveObjectsHandler)533 size_t RegionAllocator<AllocConfigT, LockConfigT>::PromoteYoungRegion(Region *region,
534                                                                       const GCObjectVisitor &deathChecker,
535                                                                       const ObjectVisitor &aliveObjectsHandler)
536 {
537     ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
538     size_t aliveMoveCount = 0;
539     // We should create live bitmap here and copy alive object in marked bitmap to it
540     region->CreateLiveBitmap();
541     region->CloneMarkBitmapToLiveBitmap();
542     [[maybe_unused]] auto visitor = [&aliveObjectsHandler, &region](ObjectHeader *object) {
543         aliveObjectsHandler(object);
544         region->IncreaseAllocatedObjects();
545     };
546     // NOLINTNEXTLINE(readability-braces-around-statements)
547     if constexpr (USE_MARKED_BITMAP) {
548         if constexpr (FULL_GC) {
549             region->GetMarkBitmap()->IterateOverMarkedChunks(
550                 [&visitor](void *objectAddr) { visitor(static_cast<ObjectHeader *>(objectAddr)); });
551         } else {
552             aliveMoveCount = region->UpdateAllocatedObjects();
553         }
554     } else {  // NOLINT(readability-misleading-indentation)
555         auto liveCheckVisitor = [&visitor, &deathChecker](ObjectHeader *object) {
556             if (deathChecker(object) == ObjectStatus::ALIVE_OBJECT) {
557                 visitor(object);
558             }
559         };
560         region->IterateOverObjects(liveCheckVisitor);
561     }
562     // We set not actual value here but we will update it later
563     region->SetLiveBytes(region->GetAllocatedBytes());
564     this->GetSpace()->PromoteYoungRegion(region);
565     return aliveMoveCount;
566 }
567 
568 template <typename AllocConfigT, typename LockConfigT>
569 template <RegionFlag REGIONS_TYPE>
ResetAllSpecificRegions()570 void RegionAllocator<AllocConfigT, LockConfigT>::ResetAllSpecificRegions()
571 {
572     ResetCurrentRegion<false, REGIONS_TYPE>();
573     this->GetSpace()->IterateRegions([&](Region *region) {
574         if (!region->HasFlag(REGIONS_TYPE)) {
575             return;
576         }
577         this->GetSpace()->template FreeRegion<RegionSpace::ReleaseRegionsPolicy::NoRelease>(region);
578     });
579     if constexpr (REGIONS_TYPE == RegionFlag::IS_EDEN) {
580         retainedTlabs_.clear();
581     }
582 }
583 
584 template <typename AllocConfigT, typename LockConfigT>
585 template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
586           OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
ResetSeveralSpecificRegions(const Container & regions)587 void RegionAllocator<AllocConfigT, LockConfigT>::ResetSeveralSpecificRegions(const Container &regions)
588 {
589     os::memory::LockHolder<LockConfigT, NEED_LOCK> lock(this->regionLock_);
590     ASSERT(REGIONS_TYPE != RegionFlag::IS_EDEN);
591     ASSERT((REGIONS_TYPE != RegionFlag::IS_EDEN) || (retainedTlabs_.empty()));
592     for (Region *region : regions) {
593         ASSERT(!(IsInCurrentRegion<false, REGIONS_TYPE>(region)));
594         ASSERT(region->HasFlag(REGIONS_TYPE));
595         this->GetSpace()->template FreeRegion<REGIONS_RELEASE_POLICY, OS_PAGES_POLICY>(region);
596     }
597 }
598 
599 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,size_t initSpaceSize,bool extend)600 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
601     MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, size_t initSpaceSize, bool extend)
602     : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(), initSpaceSize,
603                                        extend, REGION_SIZE, 0),
604       objectAllocator_(memStats, spaceType)
605 {
606 }
607 
608 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
RegionNonmovableAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType,RegionPool * sharedRegionPool)609 RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::RegionNonmovableAllocator(
610     MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType, RegionPool *sharedRegionPool)
611     : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, ObjectAllocator::GetAllocatorType(),
612                                        sharedRegionPool, 0),
613       objectAllocator_(memStats, spaceType)
614 {
615 }
616 
617 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Alloc(size_t size,Alignment align)618 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Alloc(size_t size, Alignment align)
619 {
620     ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
621     size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
622     ASSERT(alignSize <= ObjectAllocator::GetMaxSize());
623 
624     void *mem = objectAllocator_.Alloc(alignSize);
625     if (UNLIKELY(mem == nullptr)) {
626         mem = NewRegionAndRetryAlloc(size, align);
627         if (UNLIKELY(mem == nullptr)) {
628             return nullptr;
629         }
630     }
631     auto liveBitmap = this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap();
632     ASSERT(liveBitmap != nullptr);
633     liveBitmap->AtomicTestAndSet(mem);
634     return mem;
635 }
636 
637 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Free(void * mem)638 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Free(void *mem)
639 {
640     this->GetRegion(reinterpret_cast<ObjectHeader *>(mem))->GetLiveBitmap()->AtomicTestAndClear(mem);
641 
642     objectAllocator_.Free(mem);
643 }
644 
645 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
Collect(const GCObjectVisitor & deathChecker)646 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::Collect(const GCObjectVisitor &deathChecker)
647 {
648     os::memory::LockHolder lock(this->regionLock_);
649     objectAllocator_.Collect(deathChecker);
650 }
651 
652 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
VisitAndRemoveFreeRegions(const RegionsVisitor & regionVisitor)653 void RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::VisitAndRemoveFreeRegions(
654     const RegionsVisitor &regionVisitor)
655 {
656     os::memory::LockHolder lock(this->regionLock_);
657     // Add free region into vector to not do extra work with region_visitor
658     // inside object_allocator_.
659     PandaVector<Region *> freeRegions;
660 
661     objectAllocator_.VisitAndRemoveFreePools([&freeRegions](void *mem, [[maybe_unused]] size_t size) {
662         auto *region = AddrToRegion(mem);
663         ASSERT(ToUintPtr(mem) + size == region->End());
664         // We don't remove this region here, because don't want to do some extra work with visitor here.
665         freeRegions.push_back(region);
666     });
667 
668     if (!freeRegions.empty()) {
669         regionVisitor(freeRegions);
670 
671         for (auto i : freeRegions) {
672             this->GetSpace()->FreeRegion(i);
673         }
674     }
675 }
676 
677 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
NewRegionAndRetryAlloc(size_t objectSize,Alignment align)678 void *RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::NewRegionAndRetryAlloc(size_t objectSize,
679                                                                                                     Alignment align)
680 {
681     os::memory::LockHolder lock(this->regionLock_);
682     size_t poolHeadSize = AlignUp(Region::HeadSize(), ObjectAllocator::PoolAlign());
683     ASSERT(AlignUp(poolHeadSize + objectSize, REGION_SIZE) == REGION_SIZE);
684     while (true) {
685         Region *region = this->template CreateAndSetUpNewRegion<AllocConfigT>(REGION_SIZE, RegionFlag::IS_NONMOVABLE);
686         if (UNLIKELY(region == nullptr)) {
687             return nullptr;
688         }
689         ASSERT(region->GetLiveBitmap() != nullptr);
690         uintptr_t alignedPool = ToUintPtr(region) + poolHeadSize;
691         bool addedMemoryPool = objectAllocator_.AddMemoryPool(ToVoidPtr(alignedPool), REGION_SIZE - poolHeadSize);
692         ASSERT(addedMemoryPool);
693         if (UNLIKELY(!addedMemoryPool)) {
694             LOG(FATAL, ALLOC) << "ObjectAllocator: couldn't add memory pool to allocator";
695         }
696         void *mem = objectAllocator_.Alloc(objectSize, align);
697         if (LIKELY(mem != nullptr)) {
698             return mem;
699         }
700     }
701     return nullptr;
702 }
703 
704 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
CalculateDeadObjectsRatio()705 double RegionNonmovableAllocator<AllocConfigT, LockConfigT, ObjectAllocator>::CalculateDeadObjectsRatio()
706 {
707 #ifdef PANDA_MEASURE_FRAGMENTATION
708     os::memory::LockHolder lock(this->regionLock_);
709     size_t totalSize = 0;
710     size_t liveObjectSize = 0;
711     size_t count = 0;
712     this->GetSpace()->IterateRegions([&count, &totalSize, &liveObjectSize](Region *region) {
713         count++;
714         totalSize += region->Size();
715         liveObjectSize += region->GetLiveBytes();
716     });
717     if (totalSize == 0) {
718         return 0;
719     }
720     auto allocatedBytes = objectAllocator_.GetAllocatedBytes();
721     ASSERT(allocatedBytes >= liveObjectSize);
722     auto deadObjectSize = allocatedBytes - liveObjectSize;
723     return static_cast<double>(deadObjectSize) / totalSize;
724 #else
725     LOG(FATAL, ALLOC) << "Not implemented. Requires build with define PANDA_MEASURE_FRAGMENTATION";
726     UNREACHABLE();
727 #endif  // #ifdef PANDA_MEASURE_FRAGMENTATION
728 }
729 
730 template <typename AllocConfigT, typename LockConfigT>
RegionHumongousAllocator(MemStatsType * memStats,GenerationalSpaces * spaces,SpaceType spaceType)731 RegionHumongousAllocator<AllocConfigT, LockConfigT>::RegionHumongousAllocator(MemStatsType *memStats,
732                                                                               GenerationalSpaces *spaces,
733                                                                               SpaceType spaceType)
734     : RegionAllocatorBase<LockConfigT>(memStats, spaces, spaceType, AllocatorType::REGION_ALLOCATOR, 0, true,
735                                        REGION_SIZE, 0)
736 {
737 }
738 
739 template <typename AllocConfigT, typename LockConfigT>
740 template <bool UPDATE_MEMSTATS>
Alloc(size_t size,Alignment align)741 void *RegionHumongousAllocator<AllocConfigT, LockConfigT>::Alloc(size_t size, Alignment align)
742 {
743     ASSERT(GetAlignmentInBytes(align) % GetAlignmentInBytes(DEFAULT_ALIGNMENT) == 0);
744     size_t alignSize = AlignUp(size, GetAlignmentInBytes(align));
745     Region *region = nullptr;
746     void *mem = nullptr;
747     // allocate a seprate large region for object
748     {
749         os::memory::LockHolder lock(this->regionLock_);
750         region = this->template CreateAndSetUpNewRegion<AllocConfigT, OSPagesAllocPolicy::ZEROED_MEMORY>(
751             Region::RegionSize(alignSize, REGION_SIZE), IS_OLD, IS_LARGE_OBJECT);
752         if (LIKELY(region != nullptr)) {
753             mem = region->Alloc<false>(alignSize);
754             ASSERT(mem != nullptr);
755             ASSERT(region->GetLiveBitmap() != nullptr);
756             // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
757             if constexpr (UPDATE_MEMSTATS) {
758                 AllocConfigT::OnAlloc(region->Size(), this->spaceType_, this->memStats_);
759                 // We don't set up memory here because the requested memory should
760                 // be zeroed
761             }
762             // Do it after memory init because we can reach this memory after setting live bitmap
763             region->GetLiveBitmap()->AtomicTestAndSet(mem);
764         }
765     }
766     return mem;
767 }
768 
769 template <typename AllocConfigT, typename LockConfigT>
CollectAndRemoveFreeRegions(const RegionsVisitor & regionVisitor,const GCObjectVisitor & deathChecker)770 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::CollectAndRemoveFreeRegions(
771     const RegionsVisitor &regionVisitor, const GCObjectVisitor &deathChecker)
772 {
773     // Add free region into vector to not do extra work with region_visitor during region iteration
774     PandaVector<Region *> freeRegions;
775 
776     {
777         os::memory::LockHolder lock(this->regionLock_);
778         this->GetSpace()->IterateRegions([this, &deathChecker, &freeRegions](Region *region) {
779             this->Collect(region, deathChecker);
780             if (region->HasFlag(IS_FREE)) {
781                 freeRegions.push_back(region);
782             }
783         });
784     }
785 
786     if (!freeRegions.empty()) {
787         regionVisitor(freeRegions);
788 
789         for (auto i : freeRegions) {
790             os::memory::LockHolder lock(this->regionLock_);
791             ResetRegion(i);
792         }
793     }
794 }
795 
796 template <typename AllocConfigT, typename LockConfigT>
Collect(Region * region,const GCObjectVisitor & deathChecker)797 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::Collect(Region *region, const GCObjectVisitor &deathChecker)
798 {
799     ASSERT(region->HasFlag(RegionFlag::IS_LARGE_OBJECT));
800     ObjectHeader *objectToProceed = nullptr;
801     objectToProceed = region->GetLargeObject();
802     if (deathChecker(objectToProceed) == ObjectStatus::DEAD_OBJECT) {
803         region->AddFlag(RegionFlag::IS_FREE);
804     }
805 }
806 
807 template <typename AllocConfigT, typename LockConfigT>
ResetRegion(Region * region)808 void RegionHumongousAllocator<AllocConfigT, LockConfigT>::ResetRegion(Region *region)
809 {
810     ASSERT(region->HasFlag(RegionFlag::IS_FREE));
811     region->RmvFlag(RegionFlag::IS_FREE);
812     this->GetSpace()->FreeRegion(region);
813 }
814 
815 template <typename AllocConfigT, typename LockConfigT>
CalculateInternalFragmentation()816 double RegionHumongousAllocator<AllocConfigT, LockConfigT>::CalculateInternalFragmentation()
817 {
818     size_t totalFreeSize = 0;
819     size_t totalAllocatedSize = 0;
820     this->GetSpace()->IterateRegions([&totalFreeSize, &totalAllocatedSize](Region *region) {
821         auto allocatedBytes = region->GetAllocatedBytes();
822         auto regionSize = region->Size();
823         ASSERT(regionSize >= allocatedBytes);
824         auto freeSize = regionSize - allocatedBytes;
825         totalFreeSize += freeSize;
826         totalAllocatedSize += allocatedBytes;
827     });
828     if (totalAllocatedSize == 0) {
829         return 0;
830     }
831     return static_cast<double>(totalFreeSize) / totalAllocatedSize;
832 }
833 
834 template <typename AllocConfigT, typename LockConfigT>
835 using RegionRunslotsAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, RunSlotsAllocator<AllocConfigT>>;
836 
837 template <typename AllocConfigT, typename LockConfigT>
838 using RegionFreeListAllocator = RegionNonmovableAllocator<AllocConfigT, LockConfigT, FreeListAllocator<AllocConfigT>>;
839 
840 }  // namespace ark::mem
841 
842 #endif  // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_INL_H
843