• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
17 #define RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
18 
19 #include "runtime/include/mem/allocator.h"
20 #include "runtime/mem/region_allocator.h"
21 #include "runtime/mem/region_allocator-inl.h"
22 #include "runtime/mem/gc/g1/g1-allocator_constants.h"
23 
24 namespace ark::mem {
25 class ObjectAllocConfigWithCrossingMap;
26 class ObjectAllocConfig;
27 class TLAB;
28 
29 template <MTModeT MT_MODE = MT_MODE_MULTI>
30 class ObjectAllocatorG1 final : public ObjectAllocatorGenBase {
31     using ObjectAllocator = RegionAllocator<ObjectAllocConfig>;
32     using NonMovableAllocator = RegionNonmovableAllocator<ObjectAllocConfig, RegionAllocatorLockConfig::CommonLock,
33                                                           FreeListAllocator<ObjectAllocConfig>>;
34     using HumongousObjectAllocator =
35         RegionHumongousAllocator<ObjectAllocConfig>;  // Allocator used for humongous objects
36 
37     // REGION_SIZE should not change here.
38     // If it is necessary to change this value, it must be done through changes to G1_REGION_SIZE
39     static constexpr size_t REGION_SIZE = mem::G1_REGION_SIZE;
40     static_assert(REGION_SIZE == mem::G1_REGION_SIZE);
41 
42 public:
43     NO_MOVE_SEMANTIC(ObjectAllocatorG1);
44     NO_COPY_SEMANTIC(ObjectAllocatorG1);
45 
46     explicit ObjectAllocatorG1(MemStatsType *memStats, bool createPygoteSpaceAllocator);
47 
48     ~ObjectAllocatorG1() final = default;
49 
50     void *Allocate(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread, ObjMemInitPolicy objInit,
51                    bool pinned) final;
52 
53     void *AllocateNonMovable(size_t size, Alignment align, [[maybe_unused]] ark::ManagedThread *thread,
54                              ObjMemInitPolicy objInit) final;
55 
56     void PinObject(ObjectHeader *object) final;
57 
58     void UnpinObject(ObjectHeader *object) final;
59 
60     void VisitAndRemoveAllPools(const MemVisitor &memVisitor) final;
61 
62     void VisitAndRemoveFreePools(const MemVisitor &memVisitor) final;
63 
64     void IterateOverYoungObjects(const ObjectVisitor &objectVisitor) final;
65 
66     size_t GetMaxYoungRegionsCount();
67 
68     PandaVector<Region *> GetYoungRegions();
69 
70     PandaVector<Region *> GetMovableRegions();
71 
72     PandaVector<Region *> GetAllRegions();
73 
74     /// Returns a vector which contains non-movable and humongous regions
75     PandaVector<Region *> GetNonRegularRegions();
76 
77     void IterateOverTenuredObjects(const ObjectVisitor &objectVisitor) final;
78 
79     void IterateOverHumongousObjects(const ObjectVisitor &objectVisitor);
80 
81     void IterateOverObjects(const ObjectVisitor &objectVisitor) final;
82 
83     /// @brief iterates all objects in object allocator
84     void IterateRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
85 
86     /// @brief iterates objects in all allocators except object allocator
87     void IterateNonRegularSizeObjects(const ObjectVisitor &objectVisitor) final;
88 
89     void FreeObjectsMovedToPygoteSpace() final;
90 
Collect(const GCObjectVisitor & gcObjectVisitor,GCCollectMode collectMode)91     void Collect(const GCObjectVisitor &gcObjectVisitor, GCCollectMode collectMode) final
92     {
93         (void)gcObjectVisitor;
94         (void)collectMode;
95         UNREACHABLE();
96     }
97 
98     /**
99      * Collect non regular regions (i.e. remove dead objects from Humongous and NonMovable regions
100      * and remove empty regions).
101      */
102     void CollectNonRegularRegions(const RegionsVisitor &regionVisitor, const GCObjectVisitor &gcObjectVisitor);
103 
104     size_t GetRegularObjectMaxSize() final;
105 
106     size_t GetLargeObjectMaxSize() final;
107 
108     bool IsObjectInYoungSpace(const ObjectHeader *obj) final;
109 
110     bool IsIntersectedWithYoung(const MemRange &memRange) final;
111 
112     bool HasYoungSpace() final;
113 
114     const std::vector<MemRange> &GetYoungSpaceMemRanges() final;
115 
116     template <bool INCLUDE_CURRENT_REGION>
117     PandaVector<std::pair<uint32_t, Region *>> GetTopGarbageRegions(double garbageThreshold = 0.0)
118     {
119         return objectAllocator_->template GetTopGarbageRegions<INCLUDE_CURRENT_REGION>(garbageThreshold);
120     }
121 
122     std::vector<MarkBitmap *> &GetYoungSpaceBitmaps() final;
123 
ReserveRegionIfNeeded()124     void ReserveRegionIfNeeded()
125     {
126         objectAllocator_->ReserveRegionIfNeeded();
127     }
128 
ReleaseReservedRegion()129     void ReleaseReservedRegion()
130     {
131         objectAllocator_->ReleaseReservedRegion();
132     }
133 
134     void ResetYoungAllocator() final;
135 
136     template <RegionFlag REGIONS_TYPE, RegionSpace::ReleaseRegionsPolicy REGIONS_RELEASE_POLICY,
137               OSPagesPolicy OS_PAGES_POLICY, bool NEED_LOCK, typename Container>
ResetRegions(const Container & regions)138     void ResetRegions(const Container &regions)
139     {
140         objectAllocator_->ResetSeveralSpecificRegions<REGIONS_TYPE, REGIONS_RELEASE_POLICY, OS_PAGES_POLICY, NEED_LOCK>(
141             regions);
142     }
143 
144     TLAB *CreateNewTLAB(size_t tlabSize) final;
145 
146     /**
147      * @brief This method should be used carefully, since in case of adaptive TLAB
148      * it only shows max possible size (grow limit) of a TLAB
149      */
150     size_t GetTLABMaxAllocSize() final;
151 
IsTLABSupported()152     bool IsTLABSupported() final
153     {
154         return true;
155     }
156 
157     void IterateOverObjectsInRange(MemRange memRange, const ObjectVisitor &objectVisitor) final;
158 
159     bool ContainObject(const ObjectHeader *obj) const final;
160 
161     bool IsLive(const ObjectHeader *obj) final;
162 
VerifyAllocatorStatus()163     size_t VerifyAllocatorStatus() final
164     {
165         LOG(FATAL, ALLOC) << "Not implemented";
166         return 0;
167     }
168 
AllocateLocal(size_t size,Alignment align,ark::ManagedThread * thread)169     [[nodiscard]] void *AllocateLocal([[maybe_unused]] size_t size, [[maybe_unused]] Alignment align,
170                                       [[maybe_unused]] ark::ManagedThread *thread) final
171     {
172         LOG(FATAL, ALLOC) << "ObjectAllocatorGen: AllocateLocal not supported";
173         return nullptr;
174     }
175 
176     bool IsObjectInNonMovableSpace(const ObjectHeader *obj) final;
177 
178     void UpdateSpaceData() final;
179 
180     void CompactYoungRegions(const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveChecker);
181 
AddPromotedRegionToQueueIfPinned(Region * region)182     void AddPromotedRegionToQueueIfPinned(Region *region)
183     {
184         objectAllocator_->AddPromotedRegionToQueueIfPinned(region);
185     }
186     template <RegionFlag REGION_TYPE, bool USE_MARKBITMAP = false>
CompactRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitorEx & moveChecker)187     void CompactRegion(Region *region, const GCObjectVisitor &deathChecker, const ObjectVisitorEx &moveChecker)
188     {
189         objectAllocator_->template CompactSpecificRegion<REGION_TYPE, RegionFlag::IS_OLD, USE_MARKBITMAP>(
190             region, deathChecker, moveChecker);
191     }
192 
193     template <bool USE_MARKBITMAP, bool FULL_GC>
PromoteYoungRegion(Region * region,const GCObjectVisitor & deathChecker,const ObjectVisitor & promotionChecker)194     size_t PromoteYoungRegion(Region *region, const GCObjectVisitor &deathChecker,
195                               const ObjectVisitor &promotionChecker)
196     {
197         ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
198         return objectAllocator_->template PromoteYoungRegion<USE_MARKBITMAP, FULL_GC>(region, deathChecker,
199                                                                                       promotionChecker);
200     }
201 
202     void CompactTenuredRegions(const PandaVector<Region *> &regions, const GCObjectVisitor &deathChecker,
203                                const ObjectVisitorEx &moveChecker);
204 
205     template <bool USE_ATOMIC = true>
PopFromOldRegionQueue()206     Region *PopFromOldRegionQueue()
207     {
208         return objectAllocator_->template PopFromRegionQueue<USE_ATOMIC, RegionFlag::IS_OLD>();
209     }
210 
211     template <bool USE_ATOMIC = true>
PushToOldRegionQueue(Region * region)212     void PushToOldRegionQueue(Region *region)
213     {
214         objectAllocator_->template PushToRegionQueue<USE_ATOMIC, RegionFlag::IS_OLD>(region);
215     }
216 
217     template <bool USE_ATOMIC = true>
CreateAndSetUpNewOldRegion()218     Region *CreateAndSetUpNewOldRegion()
219     {
220         return objectAllocator_->template CreateAndSetUpNewRegionWithLock<USE_ATOMIC, RegionFlag::IS_OLD>();
221     }
222 
ClearCurrentTenuredRegion()223     void ClearCurrentTenuredRegion()
224     {
225         objectAllocator_->template ClearCurrentRegion<IS_OLD>();
226     }
227 
GetRegionSize()228     static constexpr size_t GetRegionSize()
229     {
230         return REGION_SIZE;
231     }
232 
HaveTenuredSize(size_t numRegions)233     bool HaveTenuredSize(size_t numRegions) const
234     {
235         return objectAllocator_->GetSpace()->GetPool()->HaveTenuredSize(numRegions * ObjectAllocator::REGION_SIZE);
236     }
237 
HaveFreeRegions(size_t numRegions)238     bool HaveFreeRegions(size_t numRegions) const
239     {
240         return objectAllocator_->GetSpace()->GetPool()->HaveFreeRegions(numRegions, ObjectAllocator::REGION_SIZE);
241     }
242 
GetYoungAllocMaxSize()243     static constexpr size_t GetYoungAllocMaxSize()
244     {
245         // NOTE(dtrubenkov): FIX to more meaningful value
246         return ObjectAllocator::GetMaxRegularObjectSize();
247     }
248 
249     template <RegionFlag REGION_TYPE, OSPagesPolicy OS_PAGES_POLICY>
ReleaseEmptyRegions()250     void ReleaseEmptyRegions()
251     {
252         objectAllocator_->ReleaseEmptyRegions<REGION_TYPE, OS_PAGES_POLICY>();
253     }
254 
SetDesiredEdenLength(size_t edenLength)255     void SetDesiredEdenLength(size_t edenLength)
256     {
257         objectAllocator_->SetDesiredEdenLength(edenLength);
258     }
259 
CalculateNonMovableExternalFragmentation()260     double CalculateNonMovableExternalFragmentation()
261     {
262         return nonmovableAllocator_->CalculateExternalFragmentation();
263     }
264 
CalculateInternalOldFragmentation()265     double CalculateInternalOldFragmentation()
266     {
267         return objectAllocator_->CalculateInternalOldFragmentation();
268     }
269 
CalculateInternalHumongousFragmentation()270     double CalculateInternalHumongousFragmentation()
271     {
272         return humongousObjectAllocator_->CalculateInternalFragmentation();
273     }
274 
CalculateOldDeadObjectsRatio()275     double CalculateOldDeadObjectsRatio()
276     {
277         return objectAllocator_->CalculateDeadObjectsRatio();
278     }
279 
CalculateNonMovableDeadObjectsRatio()280     double CalculateNonMovableDeadObjectsRatio()
281     {
282         return nonmovableAllocator_->CalculateDeadObjectsRatio();
283     }
284 
285 private:
286     Alignment CalculateAllocatorAlignment(size_t align) final;
287 
288     PandaUniquePtr<ObjectAllocator> objectAllocator_ {nullptr};
289     PandaUniquePtr<NonMovableAllocator> nonmovableAllocator_ {nullptr};
290     PandaUniquePtr<HumongousObjectAllocator> humongousObjectAllocator_ {nullptr};
291     MemStatsType *memStats_ {nullptr};
292 
293     void *AllocateTenured(size_t size) final;
294 
295     void *AllocateTenuredWithoutLocks(size_t size) final;
296 
297     friend class AllocTypeConfigG1;
298 };
299 
300 }  // namespace ark::mem
301 
302 #endif  // RUNTIME_MEM_GC_G1_G1_ALLOCATOR_H
303