• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
17 #define RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
18 
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/region_allocator.h"
21 #include "runtime/mem/region_allocator-inl.h"
22 #include "runtime/mem/gc/g1/g1-allocator_constants.h"
23 
24 namespace ark::mem {
25 class ObjectAllocConfigWithCrossingMap;
26 class ObjectAllocConfig;
27 class TLAB;
28 
29 template <MTModeT MT_MODE = MT_MODE_MULTI>
30 class ObjectAllocatorG1 final : public ObjectAllocatorGenBase {
31     using ObjectAllocator = RegionAllocator<ObjectAllocConfig>;
32     using NonMovableAllocator = RegionNonmovableAllocator<ObjectAllocConfig, RegionAllocatorLockConfig::CommonLock,
33                                                           FreeListAllocator<ObjectAllocConfig>>;
34     using HumongousObjectAllocator =
35         RegionHumongousAllocator<ObjectAllocConfig>;  // Allocator used for humongous objects
36 
37     // REGION_SIZE should not change here.
38     // If it is necessary to change this value, it must be done through changes to G1_REGION_SIZE
39     static constexpr size_t REGION_SIZE = mem::G1_REGION_SIZE;
40     static_assert(REGION_SIZE == mem::G1_REGION_SIZE);
41 
42 public:
43     NO_MOVE_SEMANTIC(ObjectAllocatorG1);
44     NO_COPY_SEMANTIC(ObjectAllocatorG1);
45 
46     explicit ObjectAllocatorG1(MemStatsType *memStats, bool createPygoteSpaceAllocator);
47 
48     ~ObjectAllocatorG1() final = default;
49 
50     void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread, ObjMemInitPolicy objInit,
51                    bool pinned) final;
52 
53     void *AllocateNonMovable(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
54                              ObjMemInitPolicy objInit) final;
55 
56     void PinObject(ObjectHeader *object) final;
57 
58     void UnpinObject(ObjectHeader *object) final;
59 
60     void VisitAndRemoveAllPools(const MemVisitor &memVisitor) final;
61 
62     void VisitAndRemoveFreePools(const MemVisitor &memVisitor) final;
63 
64     void IterateOverYoungObjects(const ObjectVisitor &objectVisitor) final;
65 
66     size_t GetMaxYoungRegionsCount();
67 
68     PandaVector<Region *> GetYoungRegions();
69 
70     PandaVector<Region *> GetMovableRegions();
71 
72     PandaVector<Region *> GetAllRegions();
73 
74     /// Returns a vector which contains non-movable and humongous regions
75     PandaVector<Region *> GetNonRegularRegions();
76 
77     void IterateOverTenuredObjects(const ObjectVisitor &objectVisitor) final;
78 
79     void IterateOverHumongousObjects(const ObjectVisitor &objectVisitor);
80 
81     void IterateOverObjects(const ObjectVisitor &objectVisitor) final;
82 
83     /// @brief iterates all objects in object allocator
84     void IterateRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
85 
86     /// @brief iterates objects in all allocators except object allocator
87     void IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
88 
89     void FreeObjectsMovedToPygoteSpace() final;
90 
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)91     void Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode) final
92     {
93         (void)gcObjectVisitor;
94         (void)collectMode;
95         UNREACHABLE();
96     }
97 
98     /**
99      * Collect non regular regions (i.e. remove dead objects from Humongous and NonMovable regions
100      * and remove empty regions).
101      */
102     void CollectNonRegularRegions(const RegionsVisitor &regionVisitor, const GCObjectVisitor &gcObjectVisitor);
103 
104     size_t GetRegularObjectMaxSize() final;
105 
106     size_t GetLargeObjectMaxSize() final;
107 
108     bool IsObjectInYoungSpace(const ObjectHeader *obj) final;
109 
110     bool IsIntersectedWithYoung(const MemRange &memRange) final;
111 
112     bool HasYoungSpace() final;
113 
114     const std::vector<MemRange> &GetYoungSpaceMemRanges() final;
115 
116     template <bool INCLUDE_CURRENT_REGION>
GetTopGarbageRegions()117     PandaPriorityQueue<std::pair<uint32_t, Region *>> GetTopGarbageRegions()
118     {
119         return objectAllocator_->template GetTopGarbageRegions<INCLUDE_CURRENT_REGION>();
120     }
121 
122     std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() final;
123 
ReserveRegionIfNeeded()124     void ReserveRegionIfNeeded()
125     {
126         objectAllocator_->ReserveRegionIfNeeded();
127     }
128 
ReleaseReservedRegion()129     void ReleaseReservedRegion()
130     {
131         objectAllocator_->ReleaseReservedRegion();
132     }
133 
134     void ResetYoungAllocator() final;
135 
136     template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
137               OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
ResetRegions(const Container & regions)138     void ResetRegions(const Container &regions)
139     {
140         objectAllocator_->ResetSeveralSpecificRegions<REGIONS_TYPE, REGIONS_RELEASE_POLICY, OS_PAGES_POLICY, NEED_LOCK>(
141             regions);
142     }
143 
144     TLAB *CreateNewTLAB(size_t tlabSize) final;
145 
146     /**
147      * @brief This method should be used carefully, since in case of adaptive TLAB
148      * it only shows max possible size (grow limit) of a TLAB
149      */
150     size_t GetTLABMaxAllocSize() final;
151 
IsTLABSupported()152     bool IsTLABSupported() final
153     {
154         return true;
155     }
156 
157     void IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor) final;
158 
159     bool ContainObject(const ObjectHeader *obj) const final;
160 
161     bool IsLive(const ObjectHeader *obj) final;
162 
VerifyAllocatorStatus()163     size_t VerifyAllocatorStatus() final
164     {
165         LOG(FATAL, ALLOC) << "Not implemented";
166         return 0;
167     }
168 
AllocateLocal(size_t size,Alignment align,ark::ManagedThread * thread)169     [[nodiscard]] void *AllocateLocal([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align,
170                                       [[maybe_unused]] ark::ManagedThread *thread) final
171     {
172         LOG(FATAL, ALLOC) << "ObjectAllocatorGen: AllocateLocal not supported";
173         return nullptr;
174     }
175 
176     bool IsObjectInNonMovableSpace(const ObjectHeader *obj) final;
177 
178     void UpdateSpaceData() final;
179 
180     void CompactYoungRegions(const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveChecker);
181 
AddPromotedRegionToQueueIfPinned(Region * region)182     void AddPromotedRegionToQueueIfPinned(Region *region)
183     {
184         objectAllocator_->AddPromotedRegionToQueueIfPinned(region);
185     }
186     template <RegionFlag REGION_TYPE, bool USE_MARKBITMAP = false>
CompactRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveChecker)187     void CompactRegion(Region *region, const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveChecker)
188     {
189         objectAllocator_->template CompactSpecificRegion<REGION_TYPE, RegionFlag::IS_OLD, USE_MARKBITMAP>(
190             region, deathChecker, moveChecker);
191     }
192 
193     template <bool USE_MARKBITMAP>
PromoteYoungRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitor & promotionChecker)194     void PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker, const ObjectVisitor &promotionChecker)
195     {
196         ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
197         objectAllocator_->template PromoteYoungRegion<USE_MARKBITMAP>(region, deathChecker, promotionChecker);
198     }
199 
200     void CompactTenuredRegions(const PandaVector<Region *> &regions, const GCObjectVisitor &deathChecker,
201                                const ObjectVisitorEx &moveChecker);
202 
203     template <bool USE_ATOMIC = true>
PopFromOldRegionQueue()204     Region *PopFromOldRegionQueue()
205     {
206         return objectAllocator_->template PopFromRegionQueue<USE_ATOMIC, RegionFlag::IS_OLD>();
207     }
208 
209     template <bool USE_ATOMIC = true>
PushToOldRegionQueue(Region * region)210     void PushToOldRegionQueue(Region *region)
211     {
212         objectAllocator_->template PushToRegionQueue<USE_ATOMIC, RegionFlag::IS_OLD>(region);
213     }
214 
215     template <bool USE_ATOMIC = true>
CreateAndSetUpNewOldRegion()216     Region *CreateAndSetUpNewOldRegion()
217     {
218         return objectAllocator_->template CreateAndSetUpNewRegionWithLock<USE_ATOMIC, RegionFlag::IS_OLD>();
219     }
220 
ClearCurrentTenuredRegion()221     void ClearCurrentTenuredRegion()
222     {
223         objectAllocator_->template ClearCurrentRegion<IS_OLD>();
224     }
225 
GetRegionSize()226     static constexpr size_t GetRegionSize()
227     {
228         return REGION_SIZE;
229     }
230 
HaveTenuredSize(size_t numRegions)231     bool HaveTenuredSize(size_t numRegions) const
232     {
233         return objectAllocator_->GetSpace()->GetPool()->HaveTenuredSize(numRegions * ObjectAllocator::REGION_SIZE);
234     }
235 
HaveFreeRegions(size_t numRegions)236     bool HaveFreeRegions(size_t numRegions) const
237     {
238         return objectAllocator_->GetSpace()->GetPool()->HaveFreeRegions(numRegions, ObjectAllocator::REGION_SIZE);
239     }
240 
GetYoungAllocMaxSize()241     static constexpr size_t GetYoungAllocMaxSize()
242     {
243         // NOTE(dtrubenkov): FIX to more meaningful value
244         return ObjectAllocator::GetMaxRegularObjectSize();
245     }
246 
247     template <RegionFlag REGION_TYPE, OSPagesPolicy OS_PAGES_POLICY>
ReleaseEmptyRegions()248     void ReleaseEmptyRegions()
249     {
250         objectAllocator_->ReleaseEmptyRegions<REGION_TYPE, OS_PAGES_POLICY>();
251     }
252 
SetDesiredEdenLength(size_t edenLength)253     void SetDesiredEdenLength(size_t edenLength)
254     {
255         objectAllocator_->SetDesiredEdenLength(edenLength);
256     }
257 
258 private:
259     Alignment CalculateAllocatorAlignment(size_t align) final;
260 
261     PandaUniquePtr<ObjectAllocator> objectAllocator_ {nullptr};
262     PandaUniquePtr<NonMovableAllocator> nonmovableAllocator_ {nullptr};
263     PandaUniquePtr<HumongousObjectAllocator> humongousObjectAllocator_ {nullptr};
264     MemStatsType *memStats_ {nullptr};
265 
266     void *AllocateTenured(size_t size) final;
267 
268     void *AllocateTenuredWithoutLocks(size_t size) final;
269 
270     friend class AllocTypeConfigG1;
271 };
272 
273 }  // namespace ark::mem
274 
275 #endif  // RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
276