• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2024-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef RUNTIME_MEM_ALLOCATOR_H
16 #define RUNTIME_MEM_ALLOCATOR_H
17 
18 #include <functional>
19 
20 #include "libpandabase/mem/code_allocator.h"
21 #include "libpandabase/mem/mem.h"
22 #include "libpandabase/mem/pool_map.h"
23 #include "libpandabase/utils/logger.h"
24 #include "libpandabase/macros.h"
25 #include "runtime/mem/bump-allocator.h"
26 #include "runtime/mem/freelist_allocator.h"
27 #include "runtime/mem/gc/bitmap.h"
28 #include "runtime/mem/gc/gc_types.h"
29 #include "runtime/mem/humongous_obj_allocator.h"
30 #include "runtime/mem/internal_allocator.h"
31 #include "runtime/mem/runslots_allocator.h"
32 #include "runtime/mem/pygote_space_allocator.h"
33 #include "runtime/mem/heap_space.h"
34 
35 namespace ark {
36 class ObjectHeader;
37 }  // namespace ark
38 
39 namespace ark {
40 class ManagedThread;
41 }  // namespace ark
42 
43 namespace ark {
44 class BaseClass;
45 }  // namespace ark
46 
47 namespace ark::mem {
48 
49 class ObjectAllocConfigWithCrossingMap;
50 class ObjectAllocConfig;
51 class TLAB;
52 
53 /// AllocatorPurpose and GCCollectMode provide info when we should collect from some allocator or not
54 enum class AllocatorPurpose {
55     ALLOCATOR_PURPOSE_OBJECT,    // Allocator for objects
56     ALLOCATOR_PURPOSE_INTERNAL,  // Space for runtime internal needs
57 };
58 
59 template <AllocatorType>
60 class AllocatorTraits {
61 };
62 
63 template <>
64 class AllocatorTraits<AllocatorType::RUNSLOTS_ALLOCATOR> {
65     using AllocType = RunSlotsAllocator<ObjectAllocConfig>;
66     static constexpr bool HAS_FREE {true};  // indicates allocator can free
67 };
68 
69 template <typename T, AllocScope ALLOC_SCOPE_T>
70 class AllocatorAdapter;
71 
72 class Allocator {
73 public:
74     template <typename T, AllocScope ALLOC_SCOPE_T = AllocScope::GLOBAL>
75     using AdapterType = AllocatorAdapter<T, ALLOC_SCOPE_T>;
76 
77     NO_COPY_SEMANTIC(Allocator);
78     NO_MOVE_SEMANTIC(Allocator);
Allocator(MemStatsType * memStats,AllocatorPurpose purpose,GCCollectMode gcCollectMode)79     explicit Allocator(MemStatsType *memStats, AllocatorPurpose purpose, GCCollectMode gcCollectMode)
80         : memStats_(memStats), allocatorPurpose_(purpose), gcCollectMode_(gcCollectMode)
81     {
82     }
83     virtual ~Allocator() = 0;
84 
GetPurpose()85     ALWAYS_INLINE AllocatorPurpose GetPurpose() const
86     {
87         return allocatorPurpose_;
88     }
89 
GetCollectMode()90     ALWAYS_INLINE GCCollectMode GetCollectMode() const
91     {
92         return gcCollectMode_;
93     }
94 
GetMemStats()95     ALWAYS_INLINE MemStatsType *GetMemStats() const
96     {
97         return memStats_;
98     }
99 
Alloc(size_t size)100     [[nodiscard]] void *Alloc(size_t size)
101     {
102         return Allocate(size, CalculateAllocatorAlignment(alignof(uintptr_t)), nullptr);
103     }
104 
Alloc(size_t size,Alignment align)105     [[nodiscard]] void *Alloc(size_t size, Alignment align)
106     {
107         return Allocate(size, align, nullptr);
108     }
109 
AllocLocal(size_t size)110     [[nodiscard]] void *AllocLocal(size_t size)
111     {
112         return AllocateLocal(size, CalculateAllocatorAlignment(alignof(uintptr_t)), nullptr);
113     }
114 
AllocLocal(size_t size,Alignment align)115     [[nodiscard]] void *AllocLocal(size_t size, Alignment align)
116     {
117         return AllocateLocal(size, align, nullptr);
118     }
119 
120     [[nodiscard]] virtual void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread) = 0;
121 
122     [[nodiscard]] virtual void *AllocateLocal(size_t size, Alignment align,
123                                               [[maybe_unused]] ark::ManagedThread *thread) = 0;
124 
125     template <class T>
AllocArray(size_t size)126     [[nodiscard]] T *AllocArray(size_t size)
127     {
128         return static_cast<T *>(this->Allocate(sizeof(T) * size, CalculateAllocatorAlignment(alignof(T)), nullptr));
129     }
130 
131     template <class T>
AllocArrayLocal(size_t size)132     [[nodiscard]] T *AllocArrayLocal(size_t size)
133     {
134         return static_cast<T *>(
135             this->AllocateLocal(sizeof(T) * size, CalculateAllocatorAlignment(alignof(T)), nullptr));
136     }
137 
138     template <class T>
Delete(T * ptr)139     void Delete(T *ptr)
140     {
141         if (ptr == nullptr) {
142             return;
143         }
144         // NOLINTNEXTLINE(readability-braces-around-statements,bugprone-suspicious-semicolon)
145         if constexpr (std::is_class_v<T>) {
146             ptr->~T();
147         }
148         Free(ptr);
149     }
150 
151     template <typename T>
DeleteArray(T * data)152     void DeleteArray(T *data)
153     {
154         if (data == nullptr) {
155             return;
156         }
157         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
158             AlignUp(sizeof(size_t), GetAlignmentInBytes(GetAlignment<T>()));
159         void *p = ToVoidPtr(ToUintPtr(data) - SIZE_BEFORE_DATA_OFFSET);
160         size_t size = *static_cast<size_t *>(p);
161         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
162         if constexpr (std::is_class_v<T>) {
163             // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
164             for (size_t i = 0; i < size; ++i, ++data) {
165                 data->~T();
166             }
167         }
168         Free(p);
169     }
170 
171     virtual void Free(void *mem) = 0;
172 
173     virtual void VisitAndRemoveAllPools(const MemVisitor &memVisitor) = 0;
174 
175     virtual void VisitAndRemoveFreePools(const MemVisitor &memVisitor) = 0;
176 
IterateOverYoungObjects(const ObjectVisitor & objectVisitor)177     virtual void IterateOverYoungObjects([[maybe_unused]] const ObjectVisitor &objectVisitor)
178     {
179         LOG(FATAL, ALLOC) << "Allocator::IterateOverYoungObjects" << std::endl;
180     }
181 
IterateOverTenuredObjects(const ObjectVisitor & objectVisitor)182     virtual void IterateOverTenuredObjects([[maybe_unused]] const ObjectVisitor &objectVisitor)
183     {
184         LOG(FATAL, ALLOC) << "Allocator::IterateOverTenuredObjects" << std::endl;
185     }
186 
187     /// @brief iterates all objects in object allocator
IterateRegularSizeObjects(const ObjectVisitor & objectVisitor)188     virtual void IterateRegularSizeObjects([[maybe_unused]] const ObjectVisitor &objectVisitor)
189     {
190         LOG(FATAL, ALLOC) << "Allocator::IterateRegularSizeObjects";
191     }
192 
193     /// @brief iterates objects in all allocators except object allocator
IterateNonRegularSizeObjects(const ObjectVisitor & objectVisitor)194     virtual void IterateNonRegularSizeObjects([[maybe_unused]] const ObjectVisitor &objectVisitor)
195     {
196         LOG(FATAL, ALLOC) << "Allocator::IterateNonRegularSizeObjects";
197     }
198 
FreeObjectsMovedToPygoteSpace()199     virtual void FreeObjectsMovedToPygoteSpace()
200     {
201         LOG(FATAL, ALLOC) << "Allocator::FreeObjectsMovedToPygoteSpace";
202     }
203 
204     virtual void IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor) = 0;
205 
206     virtual void IterateOverObjects(const ObjectVisitor &objectVisitor) = 0;
207 
208     template <AllocScope ALLOC_SCOPE_T = AllocScope::GLOBAL>
209     AllocatorAdapter<void, ALLOC_SCOPE_T> Adapter();
210 
211     template <typename T, typename... Args>
New(Args &&...args)212     std::enable_if_t<!std::is_array_v<T>, T *> New(Args &&...args)
213     {
214         void *p = Alloc(sizeof(T), CalculateAllocatorAlignment(alignof(T)));
215         if (UNLIKELY(p == nullptr)) {
216             return nullptr;
217         }
218         new (p) T(std::forward<Args>(args)...);  // NOLINT(bugprone-throw-keyword-missing)
219         return reinterpret_cast<T *>(p);
220     }
221 
222     template <typename T>
New(size_t size)223     std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> New(size_t size)
224     {
225         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
226             AlignUp(sizeof(size_t), GetAlignmentInBytes(GetAlignment<T>()));
227         using ElementType = std::remove_extent_t<T>;
228         void *p = Alloc(SIZE_BEFORE_DATA_OFFSET + sizeof(ElementType) * size, CalculateAllocatorAlignment(alignof(T)));
229         if (UNLIKELY(p == nullptr)) {
230             return nullptr;
231         }
232         *static_cast<size_t *>(p) = size;
233         auto *data = ToNativePtr<ElementType>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
234         ElementType *currentElement = data;
235         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
236         for (size_t i = 0; i < size; ++i, ++currentElement) {
237             new (currentElement) ElementType();
238         }
239         return data;
240     }
241 
242     template <typename T, typename... Args>
NewLocal(Args &&...args)243     std::enable_if_t<!std::is_array_v<T>, T *> NewLocal(Args &&...args)
244     {
245         void *p = AllocLocal(sizeof(T), CalculateAllocatorAlignment(alignof(T)));
246         if (UNLIKELY(p == nullptr)) {
247             return nullptr;
248         }
249         new (p) T(std::forward<Args>(args)...);  // NOLINT(bugprone-throw-keyword-missing)
250         return reinterpret_cast<T *>(p);
251     }
252 
253     template <typename T>
NewLocal(size_t size)254     std::enable_if_t<is_unbounded_array_v<T>, std::remove_extent_t<T> *> NewLocal(size_t size)
255     {
256         static constexpr size_t SIZE_BEFORE_DATA_OFFSET =
257             AlignUp(sizeof(size_t), GetAlignmentInBytes(GetAlignment<T>()));
258         using ElementType = std::remove_extent_t<T>;
259         void *p =
260             AllocLocal(SIZE_BEFORE_DATA_OFFSET + sizeof(ElementType) * size, CalculateAllocatorAlignment(alignof(T)));
261         *static_cast<size_t *>(p) = size;
262         auto *data = ToNativePtr<ElementType>(ToUintPtr(p) + SIZE_BEFORE_DATA_OFFSET);
263         ElementType *currentElement = data;
264         // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
265         for (size_t i = 0; i < size; ++i, ++currentElement) {
266             new (currentElement) ElementType();
267         }
268         return data;
269     }
270 
AllocateInLargeAllocator(size_t size,Alignment align,BaseClass * cls)271     virtual void *AllocateInLargeAllocator([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align,
272                                            [[maybe_unused]] BaseClass *cls)
273     {
274         return nullptr;
275     }
276 
277 #if defined(TRACK_INTERNAL_ALLOCATIONS)
Dump()278     virtual void Dump() {}
279 #endif
280 
281 protected:
282     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
283     MemStatsType *memStats_;
284 
285 private:
286     virtual Alignment CalculateAllocatorAlignment(size_t align) = 0;
287 
288     AllocatorPurpose allocatorPurpose_;
289     GCCollectMode gcCollectMode_;
290 };
291 
292 class ObjectAllocatorBase : public Allocator {
293 protected:
294     using PygoteAllocator = PygoteSpaceAllocator<ObjectAllocConfig>;  // Allocator for pygote space
295 
296     /// @brief Add new memory pools to object_allocator and allocate memory in them
297     template <typename AllocT, bool NEED_LOCK = true>
298     inline void *AddPoolsAndAlloc(size_t size, Alignment align, AllocT *objectAllocator, size_t poolSize,
299                                   SpaceType spaceType, HeapSpace *heapSpace);
300 
301     /**
302      * Try to allocate memory for the object and if failed add new memory pools and allocate again
303      * @param size - size of the object in bytes
304      * @param align - alignment
305      * @param object_allocator - allocator for the object
306      * @param pool_size - size of a memory pool for specified allocator
307      * @param space_type - SpaceType of the object
308      * @return pointer to allocated memory or nullptr if failed
309      */
310     template <typename AllocT, bool NEED_LOCK = true>
311     inline void *AllocateSafe(size_t size, Alignment align, AllocT *objectAllocator, size_t poolSize,
312                               SpaceType spaceType, HeapSpace *heapSpace);
313 
314     /**
315      * @brief Initialize an object memory allocated by an allocator.
316      *        NOTE: object header should be zero
317      * @param mem - pointer to allocated object
318      * @param size - size of the object in bytes
319      */
320     void ObjectMemoryInit(void *mem, size_t size) const;
321 
322     /**
323      * @brief Initialize memory which will be used for objects.
324      * @param mem - pointer to allocated memory
325      * @param size - size of the memory in bytes
326      */
327     void MemoryInitialize(void *mem, size_t size) const;
328 
329 public:
330     enum class ObjMemInitPolicy : bool { NO_INIT, REQUIRE_INIT };
331     ObjectAllocatorBase() = delete;
332     NO_COPY_SEMANTIC(ObjectAllocatorBase);
333     NO_MOVE_SEMANTIC(ObjectAllocatorBase);
334 
335     explicit ObjectAllocatorBase(MemStatsType *memStats, GCCollectMode gcCollectMode, bool createPygoteSpaceAllocator);
336 
337     ~ObjectAllocatorBase() override;
Allocate(size_t size,Alignment align,ark::ManagedThread * thread)338     void *Allocate([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align,
339                    [[maybe_unused]] ark::ManagedThread *thread) final
340     {
341         LOG(FATAL, ALLOC)
342             << "Don't use common Allocate method for object allocation without object initialization argument";
343         return nullptr;
344     }
345 
346     [[nodiscard]] virtual void *Allocate(size_t size, Alignment align, ark::ManagedThread *thread,
347                                          ObjMemInitPolicy objInit, bool pinned) = 0;
348     [[nodiscard]] virtual void *AllocateNonMovable(size_t size, Alignment align, ark::ManagedThread *thread,
349                                                    ObjMemInitPolicy objInit) = 0;
350 
351     /**
352      * Iterate over all objects and reclaim memory for objects reported as true by gc_object_visitor
353      * @param gc_object_visitor - function which return true for ObjectHeader if we can reclaim memory occupied by
354      * object
355      */
356     virtual void Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode) = 0;
357 
358     /**
359      * Return max size for regular size objects
360      * @return max size in bytes for regular size objects
361      */
362     virtual size_t GetRegularObjectMaxSize() = 0;
363 
364     /**
365      * Return max size for large objects
366      * @return max size in bytes for large objects
367      */
368     virtual size_t GetLargeObjectMaxSize() = 0;
369 
370     /**
371      * Checks if object in the young space
372      * @param object address
373      * @return true if @param object is in young space
374      */
375     virtual bool IsObjectInYoungSpace(const ObjectHeader *obj) = 0;
376 
377     /**
378      * Checks if @param mem_range intersect young space
379      * @param mem_range
380      * @return true if @param mem_range is intersect young space
381      */
382     virtual bool IsIntersectedWithYoung(const MemRange &memRange) = 0;
383 
384     /**
385      * Checks if object in the non-movable space
386      * @param obj
387      * @return true if @param obj is in non-movable space
388      */
389     virtual bool IsObjectInNonMovableSpace(const ObjectHeader *obj) = 0;
390 
391     /// @return true if allocator has an young space
392     virtual bool HasYoungSpace() = 0;
393 
394     /**
395      * Pin object address in heap space. Such object can not be moved to other space
396      * @param object object for pinning
397      */
398     virtual void PinObject(ObjectHeader *object) = 0;
399 
400     /**
401      * Unpin pinned object in heap space. Such object can be moved after this operation
402      * @param object object for unpinning
403      */
404     virtual void UnpinObject(ObjectHeader *object) = 0;
405 
406     /**
407      * Get young space memory ranges
408      * \note PandaVector can't be used here
409      * @return young space memory ranges
410      */
411     virtual const std::vector<MemRange> &GetYoungSpaceMemRanges() = 0;
412 
413     virtual std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() = 0;
414 
415     virtual void ResetYoungAllocator() = 0;
416 
417     virtual TLAB *CreateNewTLAB(size_t tlabSize) = 0;
418 
419     virtual size_t GetTLABMaxAllocSize() = 0;
420 
421     virtual bool IsTLABSupported() = 0;
422 
423     /// @brief Check if the object allocator contains the object starting at address obj
424     virtual bool ContainObject([[maybe_unused]] const ObjectHeader *obj) const = 0;
425 
426     /**
427      * @brief Check if the object obj is live: obj is allocated already and
428      * not collected yet.
429      */
430     virtual bool IsLive([[maybe_unused]] const ObjectHeader *obj) = 0;
431 
432     /// @brief Check if current allocators' allocation state is valid.
433     virtual size_t VerifyAllocatorStatus() = 0;
434 
435     virtual HeapSpace *GetHeapSpace() = 0;
436 
GetPygoteSpaceAllocator()437     PygoteAllocator *GetPygoteSpaceAllocator()
438     {
439         return pygoteSpaceAllocator_;
440     }
441 
GetPygoteSpaceAllocator()442     const PygoteAllocator *GetPygoteSpaceAllocator() const
443     {
444         return pygoteSpaceAllocator_;
445     }
446 
DisablePygoteAlloc()447     void DisablePygoteAlloc()
448     {
449         pygoteAllocEnabled_ = false;
450     }
451 
IsPygoteAllocEnabled()452     bool IsPygoteAllocEnabled() const
453     {
454         ASSERT(!pygoteAllocEnabled_ || pygoteSpaceAllocator_ != nullptr);
455         return pygoteAllocEnabled_;
456     }
457 
GetObjectSpaceFreeBytes()458     static size_t GetObjectSpaceFreeBytes()
459     {
460         return PoolManager::GetMmapMemPool()->GetObjectSpaceFreeBytes();
461     }
462 
463     bool HaveEnoughPoolsInObjectSpace(size_t poolsNum) const;
464 
465 protected:
466     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
467     PygoteAllocator *pygoteSpaceAllocator_ = nullptr;
468     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
469     bool pygoteAllocEnabled_ = false;
470 
471 private:
Free(void * mem)472     void Free([[maybe_unused]] void *mem) final
473     {
474         LOG(FATAL, ALLOC) << "ObjectAllocatorBase shouldn't have Free";
475     }
476 };
477 
478 /**
479  * Template wrapper for single underlying allocator
480  * @tparam AllocT
481  */
482 template <typename AllocT, AllocatorPurpose ALLOCATOR_PURPOSE>
483 class AllocatorSingleT final : public Allocator {
484 public:
485     // NOLINTNEXTLINE(readability-magic-numbers)
AllocatorSingleT(MemStatsType * memStats)486     explicit AllocatorSingleT(MemStatsType *memStats)
487         : Allocator(memStats, ALLOCATOR_PURPOSE, GCCollectMode::GC_NONE), allocator_(memStats)
488     {
489     }
490     ~AllocatorSingleT() final = default;
491     NO_COPY_SEMANTIC(AllocatorSingleT);
492     DEFAULT_NOEXCEPT_MOVE_SEMANTIC(AllocatorSingleT);
493 
Allocate(size_t size,Alignment align,ark::ManagedThread * thread)494     [[nodiscard]] void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread) final
495     {
496         return allocator_.Alloc(size, align);
497     }
498 
AllocateLocal(size_t size,Alignment align,ark::ManagedThread * thread)499     [[nodiscard]] void *AllocateLocal(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread) final
500     {
501         return allocator_.AllocLocal(size, align);
502     }
503 
Free(void * mem)504     void Free(void *mem) final
505     {
506         allocator_.Free(mem);
507     }
508 
VisitAndRemoveAllPools(const MemVisitor & memVisitor)509     void VisitAndRemoveAllPools(const MemVisitor &memVisitor) final
510     {
511         allocator_.VisitAndRemoveAllPools(memVisitor);
512     }
513 
VisitAndRemoveFreePools(const MemVisitor & memVisitor)514     void VisitAndRemoveFreePools(const MemVisitor &memVisitor) final
515     {
516         allocator_.VisitAndRemoveFreePools(memVisitor);
517     }
518 
IterateOverObjectsInRange(MemRange memRange,const ObjectVisitor & objectVisitor)519     void IterateOverObjectsInRange([[maybe_unused]] MemRange memRange,
520                                    [[maybe_unused]] const ObjectVisitor &objectVisitor) final
521     {
522         LOG(FATAL, ALLOC) << "IterateOverObjectsInRange not implemented for AllocatorSinglet";
523     }
524 
IterateOverObjects(const ObjectVisitor & objectVisitor)525     void IterateOverObjects([[maybe_unused]] const ObjectVisitor &objectVisitor) final
526     {
527         LOG(FATAL, ALLOC) << "IterateOverObjects not implemented for AllocatorSinglet";
528     }
529 
530 #if defined(TRACK_INTERNAL_ALLOCATIONS)
Dump()531     void Dump() override
532     {
533         allocator_.Dump();
534     }
535 #endif
536 
537 private:
CalculateAllocatorAlignment(size_t align)538     Alignment CalculateAllocatorAlignment(size_t align) final
539     {
540         if constexpr (ALLOCATOR_PURPOSE == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT) {
541             return GetAlignment(align);
542         }
543         return GetInternalAlignment(align);
544     }
545 
546     AllocT allocator_;
547 };
548 
549 /**
550  * Class is pointer wrapper. It checks if type of allocator matches expected.
551  * @tparam allocatorType - type of allocator
552  */
553 template <AllocatorPurpose ALLOCATOR_PURPOSE>
554 class AllocatorPtr {
555 public:
556     AllocatorPtr() = default;
557     // NOLINTNEXTLINE(google-explicit-constructor)
AllocatorPtr(std::nullptr_t aNullptr)558     AllocatorPtr(std::nullptr_t aNullptr) noexcept : allocatorPtr_(aNullptr) {}
559 
AllocatorPtr(Allocator * allocator)560     explicit AllocatorPtr(Allocator *allocator) : allocatorPtr_(allocator) {}
561 
562     Allocator *operator->()
563     {
564         ASSERT((allocatorPtr_ == nullptr) || (allocatorPtr_->GetPurpose() == ALLOCATOR_PURPOSE));
565         return allocatorPtr_;
566     }
567 
568     AllocatorPtr &operator=(std::nullptr_t aNullptr) noexcept
569     {
570         allocatorPtr_ = aNullptr;
571         return *this;
572     }
573 
574     AllocatorPtr &operator=(Allocator *allocator)
575     {
576         allocatorPtr_ = allocator;
577         return *this;
578     }
579 
580     explicit operator Allocator *()
581     {
582         return allocatorPtr_;
583     }
584 
585     explicit operator ObjectAllocatorBase *()
586     {
587         ASSERT(allocatorPtr_->GetPurpose() == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
588         return static_cast<ObjectAllocatorBase *>(allocatorPtr_);
589     }
590 
591     ALWAYS_INLINE bool operator==(const AllocatorPtr &other)
592     {
593         return allocatorPtr_ == static_cast<Allocator *>(other);
594     }
595 
596     ALWAYS_INLINE bool operator==(std::nullptr_t) noexcept
597     {
598         return allocatorPtr_ == nullptr;
599     }
600 
601     ALWAYS_INLINE bool operator!=(std::nullptr_t) noexcept
602     {
603         return allocatorPtr_ != nullptr;
604     }
605 
AsObjectAllocator()606     ObjectAllocatorBase *AsObjectAllocator()
607     {
608         ASSERT(ALLOCATOR_PURPOSE == AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT);
609         return this->operator ark::mem::ObjectAllocatorBase *();
610     }
611 
612     ~AllocatorPtr() = default;
613 
614     DEFAULT_COPY_SEMANTIC(AllocatorPtr);
615     DEFAULT_NOEXCEPT_MOVE_SEMANTIC(AllocatorPtr);
616 
617 protected:
618     // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
619     Allocator *allocatorPtr_ = nullptr;
620 };
621 
622 using InternalAllocatorPtr = AllocatorPtr<AllocatorPurpose::ALLOCATOR_PURPOSE_INTERNAL>;
623 using ObjectAllocatorPtr = AllocatorPtr<AllocatorPurpose::ALLOCATOR_PURPOSE_OBJECT>;
624 
625 template <InternalAllocatorConfig CONFIG>
626 using InternalAllocatorT = AllocatorSingleT<InternalAllocator<CONFIG>, AllocatorPurpose::ALLOCATOR_PURPOSE_INTERNAL>;
627 
628 template <MTModeT MT_MODE = MT_MODE_MULTI>
629 class ObjectAllocatorNoGen : public ObjectAllocatorBase {
630     using ObjectAllocator = RunSlotsAllocator<ObjectAllocConfig>;       // Allocator used for middle size allocations
631     using LargeObjectAllocator = FreeListAllocator<ObjectAllocConfig>;  // Allocator used for large objects
632     using HumongousObjectAllocator = HumongousObjAllocator<ObjectAllocConfig>;  // Allocator used for humongous objects
633 
634 public:
635     NO_MOVE_SEMANTIC(ObjectAllocatorNoGen);
636     NO_COPY_SEMANTIC(ObjectAllocatorNoGen);
637 
638     explicit ObjectAllocatorNoGen(MemStatsType *memStats, bool createPygoteSpaceAllocator);
639 
640     ~ObjectAllocatorNoGen() override;
641 
642     [[nodiscard]] void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
643                                  ObjMemInitPolicy objInit, bool pinned) override;
644 
645     [[nodiscard]] void *AllocateNonMovable(size_t size, Alignment align, ark::ManagedThread *thread,
646                                            ObjMemInitPolicy objInit) override;
647 
648     void VisitAndRemoveAllPools(const MemVisitor &memVisitor) final;
649 
650     void VisitAndRemoveFreePools(const MemVisitor &memVisitor) final;
651 
652     void IterateOverObjects(const ObjectVisitor &objectVisitor) final;
653 
654     /// @brief iterates all objects in object allocator
655     void IterateRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
656 
657     /// @brief iterates objects in all allocators except object allocator
658     void IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
659 
660     void FreeObjectsMovedToPygoteSpace() final;
661 
662     void Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode) final;
663 
664     size_t GetRegularObjectMaxSize() final;
665 
666     size_t GetLargeObjectMaxSize() final;
667 
IsObjectInYoungSpace(const ObjectHeader * obj)668     bool IsObjectInYoungSpace([[maybe_unused]] const ObjectHeader *obj) final
669     {
670         return false;
671     }
672 
IsIntersectedWithYoung(const MemRange & memRange)673     bool IsIntersectedWithYoung([[maybe_unused]] const MemRange &memRange) final
674     {
675         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: IsIntersectedWithYoung not applicable";
676         return false;
677     }
678 
IsObjectInNonMovableSpace(const ObjectHeader * obj)679     bool IsObjectInNonMovableSpace([[maybe_unused]] const ObjectHeader *obj) final
680     {
681         return true;
682     }
683 
HasYoungSpace()684     bool HasYoungSpace() final
685     {
686         return false;
687     }
688 
PinObject(ObjectHeader * object)689     void PinObject([[maybe_unused]] ObjectHeader *object) final {}
690 
UnpinObject(ObjectHeader * object)691     void UnpinObject([[maybe_unused]] ObjectHeader *object) final {}
692 
GetYoungSpaceMemRanges()693     const std::vector<MemRange> &GetYoungSpaceMemRanges() final
694     {
695         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: GetYoungSpaceMemRanges not applicable";
696         UNREACHABLE();
697     }
698 
GetYoungSpaceBitmaps()699     std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() final
700     {
701         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: GetYoungBitmaps not applicable";
702         UNREACHABLE();
703     }
704 
ResetYoungAllocator()705     void ResetYoungAllocator() final
706     {
707         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: ResetYoungAllocator not applicable";
708     }
709 
710     TLAB *CreateNewTLAB(size_t tlabSize) final;
711 
712     size_t GetTLABMaxAllocSize() final;
713 
IsTLABSupported()714     bool IsTLABSupported() final
715     {
716         return false;
717     }
718 
IterateOverObjectsInRange(MemRange memRange,const ObjectVisitor & objectVisitor)719     void IterateOverObjectsInRange([[maybe_unused]] MemRange memRange,
720                                    [[maybe_unused]] const ObjectVisitor &objectVisitor) final
721     {
722         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: IterateOverObjectsInRange not implemented";
723     }
724 
725     bool ContainObject(const ObjectHeader *obj) const final;
726 
727     bool IsLive(const ObjectHeader *obj) final;
728 
VerifyAllocatorStatus()729     size_t VerifyAllocatorStatus() final
730     {
731         size_t failCount = 0;
732         failCount += objectAllocator_->VerifyAllocator();
733         // NOTE(yyang): add verify for large/humongous allocator
734         return failCount;
735     }
736 
AllocateLocal(size_t,Alignment,ark::ManagedThread *)737     [[nodiscard]] void *AllocateLocal(size_t /* size */, Alignment /* align */, ark::ManagedThread * /* thread */) final
738     {
739         LOG(FATAL, ALLOC) << "ObjectAllocatorNoGen: AllocateLocal not supported";
740         return nullptr;
741     }
742 
GetHeapSpace()743     HeapSpace *GetHeapSpace() override
744     {
745         return &heapSpace_;
746     }
747 
748 private:
749     Alignment CalculateAllocatorAlignment(size_t align) final;
750 
751     ObjectAllocator *objectAllocator_ = nullptr;
752     LargeObjectAllocator *largeObjectAllocator_ = nullptr;
753     HumongousObjectAllocator *humongousObjectAllocator_ = nullptr;
754     HeapSpace heapSpace_;
755 };
756 
757 // Base class for all generational GCs
758 class ObjectAllocatorGenBase : public ObjectAllocatorBase {
759 public:
760     explicit ObjectAllocatorGenBase(MemStatsType *memStats, GCCollectMode gcCollectMode,
761                                     bool createPygoteSpaceAllocator);
762 
GetHeapSpace()763     GenerationalSpaces *GetHeapSpace() override
764     {
765         return &heapSpaces_;
766     }
767 
768     ~ObjectAllocatorGenBase() override = default;
769 
770     virtual void *AllocateTenured(size_t size) = 0;
771     virtual void *AllocateTenuredWithoutLocks(size_t size) = 0;
772 
773     NO_COPY_SEMANTIC(ObjectAllocatorGenBase);
774     NO_MOVE_SEMANTIC(ObjectAllocatorGenBase);
775 
776     /// Updates young space mem ranges, bitmaps etc
777     virtual void UpdateSpaceData() = 0;
778 
779     /// Invalidates space mem ranges, bitmaps etc
780     virtual void InvalidateSpaceData() final;
781 
782 protected:
GetYoungRanges()783     ALWAYS_INLINE std::vector<MemRange> &GetYoungRanges()
784     {
785         return ranges_;
786     }
787 
GetYoungBitmaps()788     ALWAYS_INLINE std::vector<MarkBitmap *> &GetYoungBitmaps()
789     {
790         return youngBitmaps_;
791     }
792 
793     GenerationalSpaces heapSpaces_;  // NOLINT(misc-non-private-member-variables-in-classes)
794 private:
795     std::vector<MemRange> ranges_;            // Ranges for young space
796     std::vector<MarkBitmap *> youngBitmaps_;  // Bitmaps for young regions
797 };
798 
799 template <MTModeT MT_MODE = MT_MODE_MULTI>
800 class ObjectAllocatorGen final : public ObjectAllocatorGenBase {
801     using YoungGenAllocator = BumpPointerAllocator<ObjectAllocConfigWithCrossingMap,
802                                                    BumpPointerAllocatorLockConfig::ParameterizedLock<MT_MODE>, true>;
803     using ObjectAllocator =
804         RunSlotsAllocator<ObjectAllocConfigWithCrossingMap>;  // Allocator used for middle size allocations
805     using LargeObjectAllocator =
806         FreeListAllocator<ObjectAllocConfigWithCrossingMap>;  // Allocator used for large objects
807     using HumongousObjectAllocator =
808         HumongousObjAllocator<ObjectAllocConfigWithCrossingMap>;  // Allocator used for humongous objects
809 
810 public:
811     NO_MOVE_SEMANTIC(ObjectAllocatorGen);
812     NO_COPY_SEMANTIC(ObjectAllocatorGen);
813 
814     explicit ObjectAllocatorGen(MemStatsType *memStats, bool createPygoteSpaceAllocator);
815 
816     ~ObjectAllocatorGen() final;
817 
818     [[nodiscard]] void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
819                                  ObjMemInitPolicy objInit, bool pinned) final;
820 
821     [[nodiscard]] void *AllocateNonMovable(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
822                                            ObjMemInitPolicy objInit) final;
823 
AllocateTenured(size_t size)824     void *AllocateTenured(size_t size) final
825     {
826         return AllocateTenuredImpl<true>(size);
827     }
828 
AllocateTenuredWithoutLocks(size_t size)829     void *AllocateTenuredWithoutLocks(size_t size) final
830     {
831         return AllocateTenuredImpl<false>(size);
832     }
833 
834     void VisitAndRemoveAllPools(const MemVisitor &memVisitor) final;
835 
836     void VisitAndRemoveFreePools(const MemVisitor &memVisitor) final;
837 
838     void IterateOverYoungObjects(const ObjectVisitor &objectVisitor) final;
839 
840     void IterateOverTenuredObjects(const ObjectVisitor &objectVisitor) final;
841 
842     void IterateOverObjects(const ObjectVisitor &objectVisitor) final;
843 
844     /// @brief iterates all objects in object allocator
845     void IterateRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
846 
847     /// @brief iterates objects in all allocators except object allocator
848     void IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
849 
850     void FreeObjectsMovedToPygoteSpace() final;
851 
852     void Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode) final;
853 
854     size_t GetRegularObjectMaxSize() final;
855 
856     size_t GetLargeObjectMaxSize() final;
857 
858     bool IsObjectInYoungSpace(const ObjectHeader *obj) final;
859 
PinObject(ObjectHeader * object)860     void PinObject([[maybe_unused]] ObjectHeader *object) final
861     {
862         ASSERT(!IsObjectInYoungSpace(object));
863     }
864 
UnpinObject(ObjectHeader * object)865     void UnpinObject([[maybe_unused]] ObjectHeader *object) final
866     {
867         ASSERT(!IsObjectInYoungSpace(object));
868     }
869 
870     bool IsIntersectedWithYoung(const MemRange &memRange) final;
871 
872     bool IsObjectInNonMovableSpace(const ObjectHeader *obj) final;
873 
874     bool HasYoungSpace() final;
875 
876     const std::vector<MemRange> &GetYoungSpaceMemRanges() final;
877 
878     std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() final;
879 
880     void ResetYoungAllocator() final;
881 
882     TLAB *CreateNewTLAB(size_t tlabSize) final;
883 
884     /**
885      * @brief This method should be used carefully, since in case of adaptive TLAB
886      * it only shows max possible size (grow limit) of a TLAB
887      */
888     size_t GetTLABMaxAllocSize() final;
889 
IsTLABSupported()890     bool IsTLABSupported() final
891     {
892         return true;
893     }
894 
895     void IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor) final;
896 
897     bool ContainObject(const ObjectHeader *obj) const final;
898 
899     bool IsLive(const ObjectHeader *obj) final;
900 
VerifyAllocatorStatus()901     size_t VerifyAllocatorStatus() final
902     {
903         size_t failCount = 0;
904         failCount += objectAllocator_->VerifyAllocator();
905         // NOTE(yyang): add verify for large/humongous allocator
906         return failCount;
907     }
908 
AllocateLocal(size_t,Alignment,ark::ManagedThread *)909     [[nodiscard]] void *AllocateLocal(size_t /* size */, Alignment /* align */, ark::ManagedThread * /* thread */) final
910     {
911         LOG(FATAL, ALLOC) << "ObjectAllocatorGen: AllocateLocal not supported";
912         return nullptr;
913     }
914 
915     static size_t GetYoungAllocMaxSize();
916 
917     void UpdateSpaceData() final;
918 
919 private:
920     Alignment CalculateAllocatorAlignment(size_t align) final;
921 
922     YoungGenAllocator *youngGenAllocator_ = nullptr;
923     ObjectAllocator *objectAllocator_ = nullptr;
924     LargeObjectAllocator *largeObjectAllocator_ = nullptr;
925     HumongousObjectAllocator *humongousObjectAllocator_ = nullptr;
926     MemStatsType *memStats_ = nullptr;
927     ObjectAllocator *nonMovableObjectAllocator_ = nullptr;
928     LargeObjectAllocator *largeNonMovableObjectAllocator_ = nullptr;
929 
930     template <bool NEED_LOCK = true>
931     void *AllocateTenuredImpl(size_t size);
932 };
933 
934 template <GCType GC_TYPE, MTModeT MT_MODE = MT_MODE_MULTI>
935 class AllocConfig {
936 };
937 
938 }  // namespace ark::mem
939 
940 #endif  // RUNTIME_MEM_ALLOCATOR_H
941