1 /** 2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 #ifndef PANDA_OBJECT_ALLOCATOR_HYBRID_H 16 #define PANDA_OBJECT_ALLOCATOR_HYBRID_H 17 18 #include "runtime/include/mem/allocator.h" 19 #include "runtime/mem/region_allocator.h" 20 21 namespace panda::mem { 22 23 class HybridObjectAllocator final : public ObjectAllocatorBase { 24 public: 25 using ObjectAllocator = RegionAllocator<ObjectAllocConfig>; 26 using LargeObjectAllocator = FreeListAllocator<ObjectAllocConfig>; // Allocator used for large objects 27 using HumongousObjectAllocator = HumongousObjAllocator<ObjectAllocConfig>; // Allocator used for humongous objects 28 NO_MOVE_SEMANTIC(HybridObjectAllocator); 29 NO_COPY_SEMANTIC(HybridObjectAllocator); 30 explicit HybridObjectAllocator(mem::MemStatsType *mem_stats, bool create_pygote_space_allocator); 31 32 ~HybridObjectAllocator() final; 33 34 [[nodiscard]] void *Allocate(size_t size, Alignment align, [[maybe_unused]] panda::ManagedThread *thread) final; 35 36 [[nodiscard]] void *AllocateInLargeAllocator(size_t size, Alignment align, BaseClass *cls) final; 37 AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread)38 [[nodiscard]] void *AllocateNonMovable([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align, 39 [[maybe_unused]] panda::ManagedThread *thread) final 40 { 41 return nullptr; 42 } 43 IterateOverObjects(const ObjectVisitor & object_visitor)44 void IterateOverObjects([[maybe_unused]] const ObjectVisitor &object_visitor) final {} 45 VisitAndRemoveAllPools(const MemVisitor & mem_visitor)46 void VisitAndRemoveAllPools([[maybe_unused]] const MemVisitor &mem_visitor) final {} 47 VisitAndRemoveFreePools(const MemVisitor & mem_visitor)48 void VisitAndRemoveFreePools([[maybe_unused]] const MemVisitor &mem_visitor) final {} 49 Collect(const GCObjectVisitor & gc_object_visitor,GCCollectMode collect_mode)50 void Collect([[maybe_unused]] const GCObjectVisitor &gc_object_visitor, 51 [[maybe_unused]] GCCollectMode collect_mode) final 52 { 53 } 54 IterateOverObjectsInRange(MemRange mem_range,const ObjectVisitor & object_visitor)55 void IterateOverObjectsInRange([[maybe_unused]] MemRange mem_range, 56 [[maybe_unused]] const ObjectVisitor &object_visitor) final 57 { 58 } 59 GetRegularObjectMaxSize()60 size_t GetRegularObjectMaxSize() final 61 { 62 return 0; 63 } 64 GetLargeObjectMaxSize()65 size_t GetLargeObjectMaxSize() final 66 { 67 return 0; 68 } 69 IsAddressInYoungSpace(uintptr_t address)70 bool IsAddressInYoungSpace([[maybe_unused]] uintptr_t address) final 71 { 72 return false; 73 } 74 IsIntersectedWithYoung(const MemRange & mem_range)75 bool IsIntersectedWithYoung([[maybe_unused]] const MemRange &mem_range) final 76 { 77 return false; 78 } 79 IsObjectInNonMovableSpace(const ObjectHeader * obj)80 bool IsObjectInNonMovableSpace([[maybe_unused]] const ObjectHeader *obj) final 81 { 82 return false; 83 } 84 HasYoungSpace()85 bool HasYoungSpace() final 86 { 87 return false; 88 } 89 GetYoungSpaceMemRanges()90 const std::vector<MemRange> &GetYoungSpaceMemRanges() final 91 { 92 UNREACHABLE(); 93 } 94 GetYoungSpaceBitmaps()95 std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() final 96 { 97 UNREACHABLE(); 98 } 99 ResetYoungAllocator()100 void ResetYoungAllocator() final {} 101 102 TLAB *CreateNewTLAB(ManagedThread *thread) final; 103 104 size_t GetTLABMaxAllocSize() final; 105 IsTLABSupported()106 bool IsTLABSupported() final 107 { 108 return true; 109 } 110 111 bool ContainObject(const ObjectHeader *obj) const final; 112 113 bool IsLive(const ObjectHeader *obj) final; 114 115 size_t VerifyAllocatorStatus() final; 116 GetHeapSpace()117 HeapSpace *GetHeapSpace() override 118 { 119 return &heap_space_; 120 } 121 GetRegularObjectAllocator()122 ObjectAllocator *GetRegularObjectAllocator() 123 { 124 return object_allocator_; 125 } 126 GetLargeObjectAllocator()127 LargeObjectAllocator *GetLargeObjectAllocator() 128 { 129 return large_object_allocator_; 130 } 131 GetHumongousObjectAllocator()132 HumongousObjectAllocator *GetHumongousObjectAllocator() 133 { 134 return humongous_object_allocator_; 135 } 136 GetLargeThreshold()137 constexpr static size_t GetLargeThreshold() 138 { 139 return LARGE_OBJECT_THRESHHOLD; 140 } AllocateLocal(size_t,Alignment,panda::ManagedThread *)141 [[nodiscard]] void *AllocateLocal(size_t /* size */, Alignment /* align */, 142 panda::ManagedThread * /* thread */) final 143 { 144 LOG(FATAL, ALLOC) << "HybridObjectAllocator: AllocateLocal not supported"; 145 return nullptr; 146 } 147 148 private: 149 ObjectAllocator *object_allocator_ = nullptr; 150 LargeObjectAllocator *large_object_allocator_ = nullptr; 151 HumongousObjectAllocator *humongous_object_allocator_ = nullptr; 152 size_t static constexpr LARGE_OBJECT_THRESHHOLD = 12_KB; 153 // RegionAllocator use generations 154 GenerationalSpaces heap_space_; 155 }; 156 157 } // namespace panda::mem 158 #endif // PANDA_OBJECT_ALLOCATOR_HYBRID_H 159