• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
16 #define PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
17 
18 #include <atomic>
19 #include <cstdint>
20 
21 #include "runtime/mem/region_space.h"
22 
23 namespace panda {
24 class ManagedThread;
25 struct GCTask;
26 }  // namespace panda
27 
28 namespace panda::mem {
29 
30 class RegionAllocatorLockConfig {
31 public:
32     using CommonLock = os::memory::Mutex;
33     using DummyLock = os::memory::DummyLock;
34 };
35 
36 using RegionsVisitor = std::function<void(PandaVector<Region *> &vector)>;
37 
38 /// Return the region which corresponds to the start of the object.
ObjectToRegion(const ObjectHeader * object)39 static inline Region *ObjectToRegion(const ObjectHeader *object)
40 {
41     auto *region = reinterpret_cast<Region *>(((ToUintPtr(object)) & ~DEFAULT_REGION_MASK));
42     ASSERT(ToUintPtr(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(object)) == ToUintPtr(region));
43     // Getting region by object is a bit operation and TSAN doesn't
44     // sees the relation between region creation and region access.
45     // This annotation suggests TSAN that this code always executes after
46     // the region gets created.
47     // See the corresponding annotation in RegionAllocatorBase::CreateAndSetUpNewRegion
48     TSAN_ANNOTATE_HAPPENS_AFTER(region);
49     return region;
50 }
51 
IsSameRegion(const void * o1,const void * o2,size_t regionSizeBits)52 static inline bool IsSameRegion(const void *o1, const void *o2, size_t regionSizeBits)
53 {
54     return (((ToUintPtr(o1) ^ ToUintPtr(o2)) >> regionSizeBits) == 0);
55 }
56 
57 /// Return the region which corresponds to the address.
AddrToRegion(const void * addr)58 static inline Region *AddrToRegion(const void *addr)
59 {
60     auto regionAddr = PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(addr);
61     return static_cast<Region *>(regionAddr);
62 }
63 
64 template <typename LockConfigT>
65 class RegionAllocatorBase {
66 public:
67     NO_MOVE_SEMANTIC(RegionAllocatorBase);
68     NO_COPY_SEMANTIC(RegionAllocatorBase);
69 
70     explicit RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
71                                  AllocatorType allocatorType, size_t initSpaceSize, bool extend, size_t regionSize,
72                                  size_t emptyTenuredRegionsMaxCount);
73     explicit RegionAllocatorBase(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
74                                  AllocatorType allocatorType, RegionPool *sharedRegionPool,
75                                  size_t emptyTenuredRegionsMaxCount);
76 
~RegionAllocatorBase()77     virtual ~RegionAllocatorBase()
78     {
79         ClearRegionsPool();
80     }
81 
GetRegion(const ObjectHeader * object)82     Region *GetRegion(const ObjectHeader *object) const
83     {
84         return regionSpace_.GetRegion(object);
85     }
86 
GetSpace()87     RegionSpace *GetSpace()
88     {
89         return &regionSpace_;
90     }
91 
GetSpace()92     const RegionSpace *GetSpace() const
93     {
94         return &regionSpace_;
95     }
96 
97     PandaVector<Region *> GetAllRegions();
98 
99     template <RegionFlag REGION_TYPE, OSPagesPolicy OS_PAGES_POLICY>
ReleaseEmptyRegions()100     void ReleaseEmptyRegions()
101     {
102         this->GetSpace()->template ReleaseEmptyRegions<REGION_TYPE, OS_PAGES_POLICY>();
103     }
104 
105 protected:
ClearRegionsPool()106     void ClearRegionsPool()
107     {
108         regionSpace_.FreeAllRegions();
109 
110         if (initBlock_.GetMem() != nullptr) {
111             spaces_->FreeSharedPool(initBlock_.GetMem(), initBlock_.GetSize());
112             initBlock_ = NULLPOOL;
113         }
114     }
115 
116     template <OSPagesAllocPolicy OS_ALLOC_POLICY>
AllocRegion(size_t regionSize,RegionFlag edenOrOldOrNonmovable,RegionFlag properties)117     Region *AllocRegion(size_t regionSize, RegionFlag edenOrOldOrNonmovable, RegionFlag properties)
118     {
119         return regionSpace_.NewRegion(regionSize, edenOrOldOrNonmovable, properties, OS_ALLOC_POLICY);
120     }
121 
GetSpaceType()122     SpaceType GetSpaceType() const
123     {
124         return spaceType_;
125     }
126 
127     template <typename AllocConfigT, OSPagesAllocPolicy OS_ALLOC_POLICY = OSPagesAllocPolicy::NO_POLICY>
128     Region *CreateAndSetUpNewRegion(size_t regionSize, RegionFlag regionType, RegionFlag properties = IS_UNUSED)
129         REQUIRES(regionLock_);
130 
131     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
132     LockConfigT regionLock_;
133     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
134     MemStatsType *memStats_;
135     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
136     SpaceType spaceType_;
137     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
138     GenerationalSpaces *spaces_;
139     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
140     RegionPool regionPool_;  // self created pool, only used by this allocator
141     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
142     RegionSpace regionSpace_;  // the target region space used by this allocator
143     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
144     Pool initBlock_;  // the initial memory block for region allocation
145 };
146 
147 /// @brief A region-based bump-pointer allocator.
148 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
149 class RegionAllocator final : public RegionAllocatorBase<LockConfigT> {
150 public:
151     static constexpr bool USE_PARTIAL_TLAB = true;
152     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
153 
154     NO_MOVE_SEMANTIC(RegionAllocator);
155     NO_COPY_SEMANTIC(RegionAllocator);
156 
157     /**
158      * @brief Create new region allocator
159      * @param mem_stats - memory statistics
160      * @param space_type - space type
161      * @param init_space_size - initial continuous space size, 0 means no need for initial space
162      * @param extend - true means that will allocate more regions from mmap pool if initial space is not enough
163      */
164     explicit RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces,
165                              SpaceType spaceType = SpaceType::SPACE_TYPE_OBJECT, size_t initSpaceSize = 0,
166                              bool extend = true, size_t emptyTenuredRegionsMaxCount = 0);
167 
168     /**
169      * @brief Create new region allocator with shared region pool specified
170      * @param mem_stats - memory statistics
171      * @param space_type - space type
172      * @param shared_region_pool - a shared region pool that can be reused by multi-spaces
173      */
174     explicit RegionAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
175                              RegionPool *sharedRegionPool, size_t emptyTenuredRegionsMaxCount = 0);
176 
177     ~RegionAllocator() override = default;
178 
179     template <RegionFlag REGION_TYPE = RegionFlag::IS_EDEN, bool UPDATE_MEMSTATS = true>
180     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
181 
182     template <typename T>
AllocArray(size_t arrLength)183     T *AllocArray(size_t arrLength)
184     {
185         return static_cast<T *>(Alloc(sizeof(T) * arrLength));
186     }
187 
Free(void * mem)188     void Free([[maybe_unused]] void *mem) {}
189 
190     void PinObject(ObjectHeader *object);
191 
192     void UnpinObject(ObjectHeader *object);
193 
194     /**
195      * @brief Create a TLAB of the specified size
196      * @param size - required size of tlab
197      * @return newly allocated TLAB, TLAB is set to Empty is allocation failed.
198      */
199     TLAB *CreateTLAB(size_t size);
200 
201     /**
202      * @brief Create a TLAB in a new region. TLAB will occupy the whole region.
203      * @return newly allocated TLAB, TLAB is set to Empty is allocation failed.
204      */
205     TLAB *CreateRegionSizeTLAB();
206 
207     /**
208      * @brief Iterates over all objects allocated by this allocator.
209      * @param visitor - function pointer or functor
210      */
211     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)212     void IterateOverObjects(const ObjectVisitor &visitor)
213     {
214         this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
215     }
216 
217     template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)218     void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
219     {
220         auto objVisitor = [&visitor, begin, end](ObjectHeader *obj) {
221             if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
222                 visitor(obj);
223             }
224         };
225         this->GetSpace()->IterateRegions([&](Region *region) {
226             if (region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
227                 region->IterateOverObjects(objVisitor);
228             }
229         });
230     }
231 
232     template <bool INCLUDE_CURRENT_REGION>
233     PandaPriorityQueue<std::pair<uint32_t, Region *>> GetTopGarbageRegions();
234 
235     /**
236      * Return a vector of all regions with the specific type.
237      * @tparam regions_type - type of regions needed to proceed.
238      * @return vector of all regions with the /param regions_type type
239      */
240     template <RegionFlag REGIONS_TYPE>
241     PandaVector<Region *> GetAllSpecificRegions();
242 
243     /**
244      * Iterate over all regions with type /param regions_type_from
245      * and move all alive objects to the regions with type /param regions_type_to.
246      * NOTE: /param regions_type_from and /param regions_type_to can't be equal.
247      * @tparam regions_type_from - type of regions needed to proceed.
248      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
249      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
250      * @param death_checker - checker what will return objects status for iterated object.
251      * @param move_handler - called for every moved object
252      *  can be used as a simple visitor if we enable /param use_marked_bitmap
253      */
254     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
255     void CompactAllSpecificRegions(const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveHandler);
256 
257     template <RegionFlag REGION_TYPE>
ClearCurrentRegion()258     void ClearCurrentRegion()
259     {
260         ResetCurrentRegion<false, REGION_TYPE>();
261     }
262 
263     /**
264      * Iterate over specific regions from vector
265      * and move all alive objects to the regions with type /param regions_type_to.
266      * @tparam regions_type_from - type of regions needed to proceed.
267      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
268      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
269      * @param regions - vector of regions needed to proceed.
270      * @param death_checker - checker what will return objects status for iterated object.
271      * @param move_handler - called for every moved object
272      *  can be used as a simple visitor if we enable /param use_marked_bitmap
273      */
274     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
275     void CompactSeveralSpecificRegions(const PandaVector<Region *> &regions, const GCObjectVisitor &deathChecker,
276                                        const ObjectVisitorEx &moveHandler);
277 
278     /**
279      * Iterate over specific region
280      * and move all alive objects to the regions with type /param regions_type_to.
281      * @tparam regions_type_from - type of regions needed to proceed.
282      * @tparam regions_type_to - type of regions to which we want to move all alive objects.
283      * @tparam use_marked_bitmap - if we need to use marked_bitmap from the regions or not.
284      * @param region - region needed to proceed.
285      * @param death_checker - checker what will return objects status for iterated object.
286      * @param move_handler - called for every moved object
287      *  can be used as a simple visitor if we enable /param use_marked_bitmap
288      */
289     template <RegionFlag REGIONS_TYPE_FROM, RegionFlag REGIONS_TYPE_TO, bool USE_MARKED_BITMAP = false>
290     void CompactSpecificRegion(Region *regions, const GCObjectVisitor &deathChecker,
291                                const ObjectVisitorEx &moveHandler);
292 
293     template <bool USE_MARKED_BITMAP = false>
294     void PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker,
295                             const ObjectVisitor &aliveObjectsHandler);
296 
297     /**
298      * Reset all regions with type /param regions_type.
299      * @tparam regions_type - type of regions needed to proceed.
300      */
301     template <RegionFlag REGIONS_TYPE>
302     void ResetAllSpecificRegions();
303 
304     /**
305      * Reset regions from vector.
306      * @tparam REGIONS_TYPE - type of regions needed to proceed.
307      * @tparam REGIONS_RELEASE_POLICY - region need to be placed in the free queue or returned to mempool.
308      * @tparam OS_PAGES_POLICY - if we need to return region pages to OS or not.
309      * @tparam NEED_LOCK - if we need to take region lock or not. Use it if we allocate regions in parallel
310      * @param regions - vector of regions needed to proceed.
311      * @tparam Container - region's container type
312      */
313     template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
314               OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
315     void ResetSeveralSpecificRegions(const Container &regions);
316 
317     /// Reserve one region if no reserved region
318     void ReserveRegionIfNeeded();
319 
320     /// Release reserved region to free space
321     void ReleaseReservedRegion();
322 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)323     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
324     {
325         this->ClearRegionsPool();
326     }
327 
GetMaxRegularObjectSize()328     constexpr static size_t GetMaxRegularObjectSize()
329     {
330         return REGION_SIZE - AlignUp(sizeof(Region), DEFAULT_ALIGNMENT_IN_BYTES);
331     }
332 
ContainObject(const ObjectHeader * object)333     bool ContainObject(const ObjectHeader *object) const
334     {
335         return this->GetSpace()->ContainObject(object);
336     }
337 
IsLive(const ObjectHeader * object)338     bool IsLive(const ObjectHeader *object) const
339     {
340         return this->GetSpace()->IsLive(object);
341     }
342 
GetAllocatorType()343     static constexpr AllocatorType GetAllocatorType()
344     {
345         return AllocatorType::REGION_ALLOCATOR;
346     }
347 
SetDesiredEdenLength(size_t edenLength)348     void SetDesiredEdenLength(size_t edenLength)
349     {
350         this->GetSpace()->SetDesiredEdenLength(edenLength);
351     }
352 
353 private:
354     // NOLINTNEXTLINE(readability-identifier-naming)
355     template <bool atomic = true, RegionFlag REGION_TYPE>
GetCurrentRegion()356     Region *GetCurrentRegion()
357     {
358         Region **curRegion = GetCurrentRegionPointerUnsafe<REGION_TYPE>();
359         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
360         if constexpr (atomic) {
361             // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
362             // constraints imposed on other reads or writes
363             return reinterpret_cast<std::atomic<Region *> *>(curRegion)->load(std::memory_order_relaxed);
364             // NOLINTNEXTLINE(readability-misleading-indentation)
365         }
366         return *curRegion;
367     }
368 
369     // NOLINTNEXTLINE(readability-identifier-naming)
370     template <bool atomic = true, RegionFlag REGION_TYPE>
SetCurrentRegion(Region * region)371     void SetCurrentRegion(Region *region)
372     {
373         Region **curRegion = GetCurrentRegionPointerUnsafe<REGION_TYPE>();
374         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
375         if constexpr (atomic) {
376             // Atomic with relaxed order reason: data race with cur_region with no synchronization or ordering
377             // constraints imposed on other reads or writes
378             reinterpret_cast<std::atomic<Region *> *>(curRegion)->store(region, std::memory_order_relaxed);
379             // NOLINTNEXTLINE(readability-misleading-indentation)
380         } else {
381             *curRegion = region;
382         }
383     }
384 
385     template <RegionFlag REGION_TYPE>
GetCurrentRegionPointerUnsafe()386     Region **GetCurrentRegionPointerUnsafe()
387     {
388         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
389         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
390             return &edenCurrentRegion_;
391         }
392         UNREACHABLE();
393         return nullptr;
394     }
395 
396     // NOLINTNEXTLINE(readability-identifier-naming)
397     template <bool atomic = true, RegionFlag REGION_TYPE>
ResetCurrentRegion()398     void ResetCurrentRegion()
399     {
400         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
401         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
402             SetCurrentRegion<atomic, REGION_TYPE>(&fullRegion_);
403             return;
404         }
405         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
406         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
407             // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
408             if constexpr (atomic) {
409                 os::memory::LockHolder lock(*GetQueueLock<REGION_TYPE>());
410                 GetRegionQueuePointer<REGION_TYPE>()->clear();
411                 return;
412             }
413             GetRegionQueuePointer<REGION_TYPE>()->clear();
414             return;
415         }
416         UNREACHABLE();
417     }
418 
419     // NOLINTNEXTLINE(readability-identifier-naming)
420     template <bool atomic = true, RegionFlag REGION_TYPE>
IsInCurrentRegion(Region * region)421     bool IsInCurrentRegion(Region *region)
422     {
423         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
424         if constexpr (REGION_TYPE == RegionFlag::IS_EDEN) {
425             return GetCurrentRegion<atomic, REGION_TYPE>() == region;
426         }
427         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
428         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
429             // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
430             if constexpr (atomic) {
431                 os::memory::LockHolder lock(*GetQueueLock<REGION_TYPE>());
432                 for (auto i : *GetRegionQueuePointer<REGION_TYPE>()) {
433                     if (i == region) {
434                         return true;
435                     }
436                 }
437                 return false;
438             }
439             for (auto i : *GetRegionQueuePointer<REGION_TYPE>()) {
440                 if (i == region) {
441                     return true;
442                 }
443             }
444             return false;
445         }
446         UNREACHABLE();
447         return false;
448     }
449 
450     // NOLINTNEXTLINE(readability-identifier-naming)
451     template <bool atomic = true, RegionFlag REGION_TYPE>
PopFromRegionQueue()452     Region *PopFromRegionQueue()
453     {
454         PandaVector<Region *> *regionQueue = GetRegionQueuePointer<REGION_TYPE>();
455         Region *region = nullptr;
456         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
457         if constexpr (atomic) {
458             os::memory::LockHolder lock(*GetQueueLock<REGION_TYPE>());
459             if (!regionQueue->empty()) {
460                 region = regionQueue->back();
461                 regionQueue->pop_back();
462             }
463             return region;
464             // NOLINTNEXTLINE(readability-misleading-indentation)
465         }
466         if (!regionQueue->empty()) {
467             region = regionQueue->back();
468             regionQueue->pop_back();
469         }
470         return region;
471     }
472 
473     // NOLINTNEXTLINE(readability-identifier-naming)
474     template <bool atomic = true, RegionFlag REGION_TYPE>
PushToRegionQueue(Region * region)475     void PushToRegionQueue(Region *region)
476     {
477         PandaVector<Region *> *regionQueue = GetRegionQueuePointer<REGION_TYPE>();
478         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
479         if constexpr (atomic) {
480             os::memory::LockHolder lock(*GetQueueLock<REGION_TYPE>());
481             regionQueue->push_back(region);
482             return;
483             // NOLINTNEXTLINE(readability-misleading-indentation)
484         }
485         regionQueue->push_back(region);
486     }
487 
488     template <RegionFlag REGION_TYPE>
GetQueueLock()489     os::memory::Mutex *GetQueueLock()
490     {
491         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
492         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
493             return &oldQueueLock_;
494         }
495         UNREACHABLE();
496         return nullptr;
497     }
498 
499     template <RegionFlag REGION_TYPE>
GetRegionQueuePointer()500     PandaVector<Region *> *GetRegionQueuePointer()
501     {
502         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
503         if constexpr (REGION_TYPE == RegionFlag::IS_OLD) {
504             return &oldRegionQueue_;
505         }
506         UNREACHABLE();
507         return nullptr;
508     }
509 
510     template <RegionFlag REGION_TYPE>
511     void *AllocRegular(size_t alignSize);
512     TLAB *CreateTLABInRegion(Region *region, size_t size);
513 
514     Region fullRegion_;
515     Region *edenCurrentRegion_;
516     Region *reservedRegion_ = nullptr;
517     os::memory::Mutex oldQueueLock_;
518     PandaVector<Region *> oldRegionQueue_;
519     // To store partially used Regions that can be reused later.
520     panda::PandaMultiMap<size_t, Region *, std::greater<size_t>> retainedTlabs_;
521     friend class test::RegionAllocatorTest;
522 };
523 
524 template <typename AllocConfigT, typename LockConfigT, typename ObjectAllocator>
525 class RegionNonmovableAllocator final : public RegionAllocatorBase<LockConfigT> {
526 public:
527     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
528 
529     NO_MOVE_SEMANTIC(RegionNonmovableAllocator);
530     NO_COPY_SEMANTIC(RegionNonmovableAllocator);
531 
532     explicit RegionNonmovableAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
533                                        size_t initSpaceSize = 0, bool extend = true);
534     explicit RegionNonmovableAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType,
535                                        RegionPool *sharedRegionPool);
536 
537     ~RegionNonmovableAllocator() override = default;
538 
539     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
540 
541     void Free(void *mem);
542 
543     void Collect(const GCObjectVisitor &deathChecker);
544 
545     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & objVisitor)546     void IterateOverObjects(const ObjectVisitor &objVisitor)
547     {
548         objectAllocator_.IterateOverObjects(objVisitor);
549     }
550 
551     template <typename MemVisitor>
IterateOverObjectsInRange(const MemVisitor & memVisitor,void * begin,void * end)552     void IterateOverObjectsInRange(const MemVisitor &memVisitor, void *begin, void *end)
553     {
554         objectAllocator_.IterateOverObjectsInRange(memVisitor, begin, end);
555     }
556 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)557     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
558     {
559         objectAllocator_.VisitAndRemoveAllPools([this](void *mem, [[maybe_unused]] size_t size) {
560             auto *region = AddrToRegion(mem);
561             ASSERT(ToUintPtr(mem) + size == region->End());
562             this->GetSpace()->FreeRegion(region);
563         });
564     }
565 
566     void VisitAndRemoveFreeRegions(const RegionsVisitor &regionVisitor);
567 
GetMaxSize()568     constexpr static size_t GetMaxSize()
569     {
570         // NOTE(yxr) : get accurate max payload size in a freelist pool
571         return std::min(ObjectAllocator::GetMaxSize(), static_cast<size_t>(REGION_SIZE - 1_KB));
572     }
573 
ContainObject(const ObjectHeader * object)574     bool ContainObject(const ObjectHeader *object) const
575     {
576         return objectAllocator_.ContainObject(object);
577     }
578 
IsLive(const ObjectHeader * object)579     bool IsLive(const ObjectHeader *object) const
580     {
581         ASSERT(this->GetRegion(object)->GetLiveBitmap() != nullptr);
582         return this->GetRegion(object)->GetLiveBitmap()->AtomicTest(const_cast<ObjectHeader *>(object));
583     }
584 
585 private:
586     void *NewRegionAndRetryAlloc(size_t objectSize, Alignment align);
587 
588     mutable ObjectAllocator objectAllocator_;
589 };
590 
591 /// @brief A region-based humongous allocator.
592 template <typename AllocConfigT, typename LockConfigT = RegionAllocatorLockConfig::CommonLock>
593 class RegionHumongousAllocator final : public RegionAllocatorBase<LockConfigT> {
594 public:
595     static constexpr size_t REGION_SIZE = DEFAULT_REGION_SIZE;
596 
597     NO_MOVE_SEMANTIC(RegionHumongousAllocator);
598     NO_COPY_SEMANTIC(RegionHumongousAllocator);
599 
600     /**
601      * @brief Create new humongous region allocator
602      * @param mem_stats - memory statistics
603      * @param space_type - space type
604      */
605     explicit RegionHumongousAllocator(MemStatsType *memStats, GenerationalSpaces *spaces, SpaceType spaceType);
606 
607     ~RegionHumongousAllocator() override = default;
608 
609     template <bool UPDATE_MEMSTATS = true>
610     void *Alloc(size_t size, Alignment align = DEFAULT_ALIGNMENT);
611 
612     template <typename T>
AllocArray(size_t arrLength)613     T *AllocArray(size_t arrLength)
614     {
615         return static_cast<T *>(Alloc(sizeof(T) * arrLength));
616     }
617 
Free(void * mem)618     void Free([[maybe_unused]] void *mem) {}
619 
620     void CollectAndRemoveFreeRegions(const RegionsVisitor &regionVisitor, const GCObjectVisitor &deathChecker);
621 
622     /**
623      * @brief Iterates over all objects allocated by this allocator.
624      * @param visitor - function pointer or functor
625      */
626     template <typename ObjectVisitor>
IterateOverObjects(const ObjectVisitor & visitor)627     void IterateOverObjects(const ObjectVisitor &visitor)
628     {
629         this->GetSpace()->IterateRegions([&](Region *region) { region->IterateOverObjects(visitor); });
630     }
631 
632     template <typename ObjectVisitor>
IterateOverObjectsInRange(const ObjectVisitor & visitor,void * begin,void * end)633     void IterateOverObjectsInRange(const ObjectVisitor &visitor, void *begin, void *end)
634     {
635         auto objVisitor = [&visitor, begin, end](ObjectHeader *obj) {
636             if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(obj) < ToUintPtr(end)) {
637                 visitor(obj);
638             }
639         };
640         this->GetSpace()->IterateRegions([&](Region *region) {
641             if (region->Intersect(ToUintPtr(begin), ToUintPtr(end))) {
642                 region->IterateOverObjects(objVisitor);
643             }
644         });
645     }
646 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)647     void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &memVisitor)
648     {
649         this->ClearRegionsPool();
650     }
651 
ContainObject(const ObjectHeader * object)652     bool ContainObject(const ObjectHeader *object) const
653     {
654         return this->GetSpace()->template ContainObject<true>(object);
655     }
656 
IsLive(const ObjectHeader * object)657     bool IsLive(const ObjectHeader *object) const
658     {
659         return this->GetSpace()->template IsLive<true>(object);
660     }
661 
662 private:
663     void ResetRegion(Region *region);
664     void Collect(Region *region, const GCObjectVisitor &deathChecker);
665 
666     // If we change this constant, we will increase fragmentation dramatically
667     static_assert(REGION_SIZE / PANDA_POOL_ALIGNMENT_IN_BYTES == 1);
668     friend class test::RegionAllocatorTest;
669 };
670 
671 }  // namespace panda::mem
672 
673 #endif  // PANDA_RUNTIME_MEM_REGION_ALLOCATOR_H
674