• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
17 
18 #include <atomic>
19 #include <cstdint>
20 
21 #include "runtime/mem/region_space.h"
22 
23 namespace ark {
24 class ManagedThread;
25 struct GCTask;
26 }  // namespace ark
27 
28 namespace ark::mem {
29 
30 class RegionAllocatorLockConfig {
31 public:
32     using CommonLock = os::memory::Mutex;
33     using DummyLock = os::memory::DummyLock;
34 };
35 
36 using RegionsVisitor = std::function<void(PandaVector<Region *> &vector)>;
37 
38 /// Return the region which corresponds to the start of the object.
ObjectToRegion(const ObjectHeader * object)39 static inline Region *ObjectToRegion(const ObjectHeader *object)
40 {
41     auto *region = reinterpret_cast<Region *>(((ToUintPtr(object)) & ~DEFAULT_REGION_MASK));
42     ASSERT(ToUintPtr(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(object)) == ToUintPtr(region));
43     // Getting region by object is a bit operation and TSAN doesn't
44     // sees the relation between region creation and region access.
45     // This annotation suggests TSAN that this code always executes after
46     // the region gets created.
47     // See the corresponding annotation in RegionAllocatorBase::CreateAndSetUpNewRegion
48     TSAN_ANNOTATE_HAPPENS_AFTER(region);
49     return region;
50 }
51 
IsSameRegion(const void * o1,const void * o2,size_t regionSizeBits)52 static inline bool IsSameRegion(const void *o1, const void *o2, size_t regionSizeBits)
53 {
54     return ((ToUintPtr(o1) ^ ToUintPtr(o2)) >> regionSizeBits) == 0;
55 }
56 
57 /// Return the region which corresponds to the address.
AddrToRegion(const void * addr)58 static inline Region *AddrToRegion(const void *addr)
59 {
60     auto regionAddr = PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(addr);
61     return static_cast<Region *>(regionAddr);
62 }
63 
64 template <typename LockConfigT>
65 class RegionAllocatorBase {
66 public:
67     NO_MOVE_SEMANTIC(RegionAllocatorBase);
68     NO_COPY_SEMANTIC(RegionAllocatorBase);
69 
70     explicit RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
71                                  AllocatorType allocatorType, size_t initSpaceSize, bool extend, size_t regionSize,
72                                  size_t emptyTenuredRegionsMaxCount);
73     explicit RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
74                                  AllocatorType allocatorType, RegionPool *sharedRegionPool,
75                                  size_t emptyTenuredRegionsMaxCount);
76 
~RegionAllocatorBase()77     virtual ~RegionAllocatorBase()
78     {
79         ClearRegionsPool();
80     }
81 
GetRegion(const ObjectHeader * object)82     Region *GetRegion(const ObjectHeader *object) const
83     {
84         return regionSpace_.GetRegion(object);
85     }
86 
GetSpace()87     RegionSpace *GetSpace()
88     {
89         return &regionSpace_;
90     }
91 
GetSpace()92     const RegionSpace *GetSpace() const
93     {
94         return &regionSpace_;
95     }
96 
97     PandaVector<Region *> GetAllRegions();
98 
99     template <RegionFlag REGION_TYPE, OSPagesPolicy OS_PAGES_POLICY>
ReleaseEmptyRegions()100     void ReleaseEmptyRegions()
101     {
102         this->GetSpace()->template ReleaseEmptyRegions<REGION_TYPE, OS_PAGES_POLICY>();
103     }
104 
105     virtual double CalculateDeadObjectsRatio();
106 
107 protected:
ClearRegionsPool()108     void ClearRegionsPool()
109     {
110         regionSpace_.FreeAllRegions();
111 
112         if (initBlock_.GetMem() != nullptr) {
113             spaces_->FreeSharedPool(initBlock_.GetMem(), initBlock_.GetSize());
114             initBlock_ = NULLPOOL;
115         }
116     }
117 
118     template <OSPagesAllocPolicy OS_ALLOC_POLICY>
AllocRegion(size_t regionSize,RegionFlag edenOrOldOrNonmovable,RegionFlag properties)119     Region *AllocRegion(size_t regionSize, RegionFlag edenOrOldOrNonmovable, RegionFlag properties)
120     {
121         return regionSpace_.NewRegion(regionSize, edenOrOldOrNonmovable, properties, OS_ALLOC_POLICY);
122     }
123 
GetSpaceType()124     SpaceType GetSpaceType() const
125     {
126         return spaceType_;
127     }
128 
129     template <typename AllocConfigT, OSPagesAllocPolicy OS_ALLOC_POLICY = OSPagesAllocPolicy::NO_POLICY>
130     Region *CreateAndSetUpNewRegion(size_t regionSize, RegionFlag regionType, RegionFlag properties = IS_UNUSED)
131         REQUIRES(regionLock_);
132 
133     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
134     LockConfigT regionLock_;
135     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
136     MemStatsType *memStats_;
137     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
138     SpaceType spaceType_;
139     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
140     GenerationalSpaces *spaces_;
141     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
142     RegionPool regionPool_;  // self created pool, only used by this allocator
143     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
144     RegionSpace regionSpace_;  // the target region space used by this allocator
145     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
146     Pool initBlock_;  // the initial memory block for region allocation
147 };
148 
149 /// @brief A region-based bump-pointer allocator.
150 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
151 class RegionAllocator final : public RegionAllocatorBase<LockConfigT> {
152 public:
153     static constexpr bool USE_PARTIAL_TLAB = true;
154     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
155 
156     NO_MOVE_SEMANTIC(RegionAllocator);
157     NO_COPY_SEMANTIC(RegionAllocator);
158 
159     /**
160      * @brief Create new region allocator
161      * @param mem_stats - memory statistics
162      * @param space_type - space type
163      * @param init_space_size - initial continuous space size, 0 means no need for initial space
164      * @param extend - true means that will allocate more regions from mmap pool if initial space is not enough
165      */
166     explicit RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
167                              SpaceType spaceType = SpaceType::SPACE_TYPE_OBJECT, size_t initSpaceSize = 0,
168                              bool extend = true, size_t emptyTenuredRegionsMaxCount = 0);
169 
170     /**
171      * @brief Create new region allocator with shared region pool specified
172      * @param mem_stats - memory statistics
173      * @param space_type - space type
174      * @param shared_region_pool - a shared region pool that can be reused by multi-spaces
175      */
176     explicit RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
177                              RegionPool *sharedRegionPool, size_t emptyTenuredRegionsMaxCount = 0);
178 
179     ~RegionAllocator() override = default;
180 
181     template <RegionFlag REGION_TYPE = RegionFlag::IS_EDEN, bool UPDATE_MEMSTATS = true>
182     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT, bool pinned = false);
183 
184     template <typename T>
AllocArray(size_t arrLength)185     T *AllocArray(size_t arrLength)
186     {
187         return static_cast<T *>(Alloc(sizeof(T) * arrLength));
188     }
189 
Free(void * mem)190     void Free([[maybe_unused]] void *mem) {}
191 
192     void PinObject(ObjectHeader *object);
193 
194     void UnpinObject(ObjectHeader *object);
195 
196     /**
197      * @brief Create a TLAB of the specified size
198      * @param size - required size of tlab
199      * @return newly allocated TLAB, TLAB is set to Empty is allocation failed.
200      */
201     TLAB *CreateTLAB(size_t size);
202 
203     /**
204      * @brief Create a TLAB in a new region. TLAB will occupy the whole region.
205      * @return newly allocated TLAB, TLAB is set to Empty is allocation failed.
206      */
207     TLAB *CreateRegionSizeTLAB();
208 
209     /**
210      * @brief Iterates over all objects allocated by this allocator.
211      * @param visitor - function pointer or functor
212      */
213     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)214     void IterateOverObjects(const ObjectVisitor &visitor)
215     {
216         this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
217     }
218 
219     template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)220     void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
221     {
222         this->GetSpace()->IterateRegions([&](Region *region) {
223             if (!region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
224                 return;
225             }
226             region->IterateOverObjects([&visitor, begin, end](ObjectHeader *obj) {
227                 if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
228                     visitor(obj);
229                 }
230             });
231         });
232     }
233 
234     template <bool INCLUDE_CURRENT_REGION>
235     PandaVector<std::pair<uint32_t, Region *>> GetTopGarbageRegions(double garbageThreshold);
236 
237     /**
238      * Return a vector of all regions with the specific type.
239      * @tparam regions_type - type of regions needed to proceed.
240      * @return vector of all regions with the /param regions_type type
241      */
242     template <RegionFlag REGIONS_TYPE>
243     PandaVector<Region *> GetAllSpecificRegions();
244 
245     double CalculateInternalOldFragmentation();
246 
247     /**
248      * Iterate over all regions with type /param regions_type_from
249      * and move all alive objects to the regions with type /param regions_type_to.
250      * NOTE: /param regions_type_from and /param regions_type_to can't be equal.
251      * @tparam regions_type_from - type of regions needed to proceed.
252      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
253      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
254      * @param death_checker - checker what will return objects status for iterated object.
255      * @param move_handler - called for every moved object
256      *  can be used as a simple visitor if we enable /param use_marked_bitmap
257      */
258     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
259     void CompactAllSpecificRegions(const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveHandler);
260 
261     template <RegionFlag REGION_TYPE>
ClearCurrentRegion()262     void ClearCurrentRegion()
263     {
264         ResetCurrentRegion<false, REGION_TYPE>();
265     }
266 
267     /**
268      * Iterate over specific regions from vector
269      * and move all alive objects to the regions with type /param regions_type_to.
270      * @tparam regions_type_from - type of regions needed to proceed.
271      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
272      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
273      * @param regions - vector of regions needed to proceed.
274      * @param death_checker - checker what will return objects status for iterated object.
275      * @param move_handler - called for every moved object
276      *  can be used as a simple visitor if we enable /param use_marked_bitmap
277      */
278     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
279     void CompactSeveralSpecificRegions(const PandaVector<Region *> &regions, const GCObjectVisitor &deathChecker,
280                                        const ObjectVisitorEx &moveHandler);
281 
282     /**
283      * Iterate over specific region
284      * and move all alive objects to the regions with type /param regions_type_to.
285      * @tparam regions_type_from - type of regions needed to proceed.
286      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
287      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
288      * @param region - region needed to proceed.
289      * @param death_checker - checker what will return objects status for iterated object.
290      * @param move_handler - called for every moved object
291      *  can be used as a simple visitor if we enable /param use_marked_bitmap
292      */
293     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
294     void CompactSpecificRegion(Region *regions, const GCObjectVisitor &deathChecker,
295                                const ObjectVisitorEx &moveHandler);
296     /**
297      * Promote region and return a counter of moved alive objects during MixedGC if use_marked_bitmap == true, or 0 in
298      * any other case scenarios.
299      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
300      * @tparam full_gc - check it's FullGC or MixedGC.
301      * @param region - region needed to proceed.
302      * @param death_checker - checker what will return objects status for iterated object.
303      * @param alive_objects_handler - called for every alive object.
304      */
305     template <bool USE_MARKED_BITMAP = false, bool FULL_GC = false>
306     size_t PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker,
307                               const ObjectVisitor &aliveObjectsHandler);
308 
309     /**
310      * Reset all regions with type /param regions_type.
311      * @tparam regions_type - type of regions needed to proceed.
312      */
313     template <RegionFlag REGIONS_TYPE>
314     void ResetAllSpecificRegions();
315 
316     /**
317      * Reset regions from vector.
318      * @tparam REGIONS_TYPE - type of regions needed to proceed.
319      * @tparam REGIONS_RELEASE_POLICY - region need to be placed in the free queue or returned to mempool.
320      * @tparam OS_PAGES_POLICY - if we need to return region pages to OS or not.
321      * @tparam NEED_LOCK - if we need to take region lock or not. Use it if we allocate regions in parallel
322      * @param regions - vector of regions needed to proceed.
323      * @tparam Container - region's container type
324      */
325     template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
326               OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
327     void ResetSeveralSpecificRegions(const Container &regions);
328 
329     /// Reserve one region if no reserved region
330     void ReserveRegionIfNeeded();
331 
332     /// Release reserved region to free space
333     void ReleaseReservedRegion();
334 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)335     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
336     {
337         this->ClearRegionsPool();
338     }
339 
GetMaxRegularObjectSize()340     constexpr static size_t GetMaxRegularObjectSize()
341     {
342         return REGION_SIZE - AlignUp(sizeof(Region), DEFAULT_ALIGNMENT_IN_BYTES);
343     }
344 
ContainObject(const ObjectHeader * object)345     bool ContainObject(const ObjectHeader *object) const
346     {
347         return this->GetSpace()->ContainObject(object);
348     }
349 
IsLive(const ObjectHeader * object)350     bool IsLive(const ObjectHeader *object) const
351     {
352         return this->GetSpace()->IsLive(object);
353     }
354 
GetAllocatorType()355     static constexpr AllocatorType GetAllocatorType()
356     {
357         return AllocatorType::REGION_ALLOCATOR;
358     }
359 
SetDesiredEdenLength(size_t edenLength)360     void SetDesiredEdenLength(size_t edenLength)
361     {
362         this->GetSpace()->SetDesiredEdenLength(edenLength);
363     }
364 
AddPromotedRegionToQueueIfPinned(Region * region)365     void AddPromotedRegionToQueueIfPinned(Region *region)
366     {
367         if (region->HasPinnedObjects()) {
368             ASSERT(region->HasFlag(RegionFlag::IS_PROMOTED));
369             PushToRegionQueue<false, RegionFlag::IS_PINNED>(region);
370         }
371     }
372 
373 private:
374     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
GetCurrentRegion()375     Region *GetCurrentRegion()
376     {
377         Region **curRegion = GetCurrentRegionPointerUnsafe<REGION_TYPE>();
378         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
379         if constexpr (USE_ATOMIC) {
380             // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
381             // constraints imposed on other reads or writes
382             return reinterpret_cast<std::atomic<Region *> *>(curRegion)->load(std::memory_order_relaxed);
383             // NOLINTNEXTLINE(readability-misleading-indentation)
384         }
385         return *curRegion;
386     }
387 
388     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
SetCurrentRegion(Region * region)389     void SetCurrentRegion(Region *region)
390     {
391         Region **curRegion = GetCurrentRegionPointerUnsafe<REGION_TYPE>();
392         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
393         if constexpr (USE_ATOMIC) {
394             // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
395             // constraints imposed on other reads or writes
396             reinterpret_cast<std::atomic<Region *> *>(curRegion)->store(region, std::memory_order_relaxed);
397             // NOLINTNEXTLINE(readability-misleading-indentation)
398         } else {
399             *curRegion = region;
400         }
401     }
402 
403     template <RegionFlag REGION_TYPE>
GetCurrentRegionPointerUnsafe()404     Region **GetCurrentRegionPointerUnsafe()
405     {
406         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
407         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
408             return &edenCurrentRegion_;
409         }
410         UNREACHABLE();
411         return nullptr;
412     }
413 
414     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
ResetCurrentRegion()415     void ResetCurrentRegion()
416     {
417         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
418         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
419             SetCurrentRegion<USE_ATOMIC, REGION_TYPE>(&fullRegion_);
420             return;
421         }
422         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
423         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
424             os::memory::LockHolder<os::memory::Mutex, USE_ATOMIC> lock(*GetQueueLock<REGION_TYPE>());
425             GetRegionQueuePointer<REGION_TYPE>()->clear();
426             return;
427         }
428         UNREACHABLE();
429     }
430 
431     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
IsInCurrentRegion(Region * region)432     bool IsInCurrentRegion(Region *region)
433     {
434         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
435         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
436             return GetCurrentRegion<USE_ATOMIC, REGION_TYPE>() == region;
437         }
438         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
439         if constexpr (REGION_TYPE != RegionFlag::IS_OLD) {
440             LOG(FATAL, ALLOC) << "Region type is neither eden nor old";
441         }
442         os::memory::LockHolder<os::memory::Mutex, USE_ATOMIC> lock(*GetQueueLock<REGION_TYPE>());
443         for (auto i : *GetRegionQueuePointer<REGION_TYPE>()) {
444             if (i == region) {
445                 return true;
446             }
447         }
448         return false;
449     }
450 
451 public:
452     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
PopFromRegionQueue()453     Region *PopFromRegionQueue()
454     {
455         PandaVector<Region *> *regionQueue = GetRegionQueuePointer<REGION_TYPE>();
456         os::memory::LockHolder<os::memory::Mutex, USE_ATOMIC> lock(*GetQueueLock<REGION_TYPE>());
457         if (regionQueue->empty()) {
458             return nullptr;
459         }
460         auto *region = regionQueue->back();
461         regionQueue->pop_back();
462         return region;
463     }
464 
465     // NOLINTNEXTLINE(readability-identifier-naming)
466     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
PushToRegionQueue(Region * region)467     void PushToRegionQueue(Region *region)
468     {
469         PandaVector<Region *> *regionQueue = GetRegionQueuePointer<REGION_TYPE>();
470         os::memory::LockHolder<os::memory::Mutex, USE_ATOMIC> lock(*GetQueueLock<REGION_TYPE>());
471         regionQueue->push_back(region);
472     }
473 
474     template <bool USE_ATOMIC = true, RegionFlag REGION_TYPE>
CreateAndSetUpNewRegionWithLock()475     Region *CreateAndSetUpNewRegionWithLock()
476     {
477         os::memory::LockHolder<LockConfigT, USE_ATOMIC> lock(this->regionLock_);
478         Region *regionTo = this->template CreateAndSetUpNewRegion<AllocConfigT>(DEFAULT_REGION_SIZE, REGION_TYPE);
479         ASSERT(regionTo != nullptr);
480         return regionTo;
481     }
482 
483 private:
484     template <RegionFlag REGION_TYPE>
GetQueueLock()485     os::memory::Mutex *GetQueueLock()
486     {
487         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
488         if constexpr (REGION_TYPE == RegionFlag::IS_OLD || REGION_TYPE == RegionFlag::IS_PINNED) {
489             return &oldQueueLock_;
490         }
491         UNREACHABLE();
492         return nullptr;
493     }
494 
495     template <RegionFlag REGION_TYPE>
GetRegionQueuePointer()496     PandaVector<Region *> *GetRegionQueuePointer()
497     {
498         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
499         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
500             return &oldRegionQueue_;
501         } else if constexpr (REGION_TYPE == RegionFlag::IS_PINNED) {
502             return &pinnedRegionQueue_;
503         }
504         UNREACHABLE();
505         return nullptr;
506     }
507 
508     template <RegionFlag REGION_TYPE>
509     void *AllocRegular(size_t alignSize);
510     TLAB *CreateTLABInRegion(Region *region, size_t size);
511 
512     Region fullRegion_;
513     Region *edenCurrentRegion_;
514     Region *reservedRegion_ = nullptr;
515     os::memory::Mutex oldQueueLock_;
516     PandaVector<Region *> oldRegionQueue_;
517     PandaVector<Region *> pinnedRegionQueue_;
518     // To store partially used Regions that can be reused later.
519     ark::PandaMultiMap<size_t, Region *, std::greater<size_t>> retainedTlabs_;
520     friend class test::RegionAllocatorTest;
521 };
522 
523 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
524 class RegionNonmovableAllocator final : public RegionAllocatorBase<LockConfigT> {
525 public:
526     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
527 
528     NO_MOVE_SEMANTIC(RegionNonmovableAllocator);
529     NO_COPY_SEMANTIC(RegionNonmovableAllocator);
530 
531     explicit RegionNonmovableAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
532                                        size_t initSpaceSize = 0, bool extend = true);
533     explicit RegionNonmovableAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
534                                        RegionPool *sharedRegionPool);
535 
536     ~RegionNonmovableAllocator() override = default;
537 
538     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
539 
540     void Free(void *mem);
541 
542     void Collect(const GCObjectVisitor &deathChecker);
543 
544     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & objVisitor)545     void IterateOverObjects(const ObjectVisitor &objVisitor)
546     {
547         objectAllocator_.IterateOverObjects(objVisitor);
548     }
549 
550     template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & memVisitor,void * begin,void * end)551     void IterateOverObjectsInRange(const MemVisitor &memVisitor, void *begin, void *end)
552     {
553         objectAllocator_.IterateOverObjectsInRange(memVisitor, begin, end);
554     }
555 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)556     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
557     {
558         objectAllocator_.VisitAndRemoveAllPools([this](void *mem, [[maybe_unused]] size_t size) {
559             auto *region = AddrToRegion(mem);
560             ASSERT(ToUintPtr(mem) + size == region->End());
561             this->GetSpace()->FreeRegion(region);
562         });
563     }
564 
565     void VisitAndRemoveFreeRegions(const RegionsVisitor &regionVisitor);
566 
GetMaxSize()567     constexpr static size_t GetMaxSize()
568     {
569         // NOTE(yxr) : get accurate max payload size in a freelist pool
570         return std::min(ObjectAllocator::GetMaxSize(), static_cast<size_t>(REGION_SIZE - 1_KB));
571     }
572 
ContainObject(const ObjectHeader * object)573     bool ContainObject(const ObjectHeader *object) const
574     {
575         return objectAllocator_.ContainObject(object);
576     }
577 
IsLive(const ObjectHeader * object)578     bool IsLive(const ObjectHeader *object) const
579     {
580         ASSERT(this->GetRegion(object)->GetLiveBitmap() != nullptr);
581         return this->GetRegion(object)->GetLiveBitmap()->AtomicTest(const_cast<ObjectHeader *>(object));
582     }
583 
CalculateExternalFragmentation()584     double CalculateExternalFragmentation()
585     {
586         return objectAllocator_.CalculateExternalFragmentation();
587     }
588 
589     double CalculateDeadObjectsRatio() override;
590 
591 private:
592     void *NewRegionAndRetryAlloc(size_t objectSize, Alignment align);
593 
594     mutable ObjectAllocator objectAllocator_;
595 };
596 
597 /// @brief A region-based humongous allocator.
598 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
599 class RegionHumongousAllocator final : public RegionAllocatorBase<LockConfigT> {
600 public:
601     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
602 
603     NO_MOVE_SEMANTIC(RegionHumongousAllocator);
604     NO_COPY_SEMANTIC(RegionHumongousAllocator);
605 
606     /**
607      * @brief Create new humongous region allocator
608      * @param mem_stats - memory statistics
609      * @param space_type - space type
610      */
611     explicit RegionHumongousAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType);
612 
613     ~RegionHumongousAllocator() override = default;
614 
615     template <bool UPDATE_MEMSTATS = true>
616     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
617 
618     template <typename T>
AllocArray(size_t arrLength)619     T *AllocArray(size_t arrLength)
620     {
621         return static_cast<T *>(Alloc(sizeof(T) * arrLength));
622     }
623 
Free(void * mem)624     void Free([[maybe_unused]] void *mem) {}
625 
626     void CollectAndRemoveFreeRegions(const RegionsVisitor &regionVisitor, const GCObjectVisitor &deathChecker);
627 
628     /**
629      * @brief Iterates over all objects allocated by this allocator.
630      * @param visitor - function pointer or functor
631      */
632     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)633     void IterateOverObjects(const ObjectVisitor &visitor)
634     {
635         this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
636     }
637 
638     template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)639     void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
640     {
641         this->GetSpace()->IterateRegions([&](Region *region) {
642             if (!region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
643                 return;
644             }
645             region->IterateOverObjects([&visitor, begin, end](ObjectHeader *obj) {
646                 if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
647                     visitor(obj);
648                 }
649             });
650         });
651     }
652 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)653     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
654     {
655         this->ClearRegionsPool();
656     }
657 
ContainObject(const ObjectHeader * object)658     bool ContainObject(const ObjectHeader *object) const
659     {
660         return this->GetSpace()->template ContainObject<true>(object);
661     }
662 
IsLive(const ObjectHeader * object)663     bool IsLive(const ObjectHeader *object) const
664     {
665         return this->GetSpace()->template IsLive<true>(object);
666     }
667 
668     double CalculateInternalFragmentation();
669 
670 private:
671     void ResetRegion(Region *region);
672     void Collect(Region *region, const GCObjectVisitor &deathChecker);
673 
674     // If we change this constant, we will increase fragmentation dramatically
675     static_assert(REGION_SIZE / PANDA_POOL_ALIGNMENT_IN_BYTES == 1);
676     friend class test::RegionAllocatorTest;
677 };
678 
679 }  // namespace ark::mem
680 
681 #endif  // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
682