• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "runtime/include/mem/allocator-inl.h"
17 #include "runtime/mem/gc/g1/g1-allocator.h"
18 #include "runtime/mem/freelist_allocator-inl.h"
19 #include "runtime/mem/humongous_obj_allocator-inl.h"
20 #include "runtime/mem/pygote_space_allocator-inl.h"
21 #include "runtime/mem/rem_set-inl.h"
22 #include "runtime/include/panda_vm.h"
23 
24 namespace panda::mem {
25 
26 template <MTModeT MTMode>
ObjectAllocatorG1(MemStatsType * mem_stats,bool create_pygote_space_allocator)27 ObjectAllocatorG1<MTMode>::ObjectAllocatorG1(MemStatsType *mem_stats,
28                                              [[maybe_unused]] bool create_pygote_space_allocator)
29     : ObjectAllocatorGenBase(mem_stats, GCCollectMode::GC_ALL, false)
30 {
31     object_allocator_ = MakePandaUnique<ObjectAllocator>(mem_stats, &heap_spaces_);
32     nonmovable_allocator_ =
33         MakePandaUnique<NonMovableAllocator>(mem_stats, &heap_spaces_, SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT);
34     humongous_object_allocator_ =
35         MakePandaUnique<HumongousObjectAllocator>(mem_stats, &heap_spaces_, SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT);
36     mem_stats_ = mem_stats;
37 }
38 
39 template <MTModeT MTMode>
GetRegularObjectMaxSize()40 size_t ObjectAllocatorG1<MTMode>::GetRegularObjectMaxSize()
41 {
42     return ObjectAllocator::GetMaxRegularObjectSize();
43 }
44 
45 template <MTModeT MTMode>
GetLargeObjectMaxSize()46 size_t ObjectAllocatorG1<MTMode>::GetLargeObjectMaxSize()
47 {
48     return ObjectAllocator::GetMaxRegularObjectSize();
49 }
50 
51 template <MTModeT MTMode>
IsAddressInYoungSpace(uintptr_t address)52 bool ObjectAllocatorG1<MTMode>::IsAddressInYoungSpace(uintptr_t address)
53 {
54     auto &young_mem_ranges = GetYoungSpaceMemRanges();
55     for (const auto &mem_range : young_mem_ranges) {
56         if (mem_range.IsAddressInRange(address)) {
57             return true;
58         }
59     }
60     return false;
61 }
62 
63 template <MTModeT MTMode>
IsIntersectedWithYoung(const MemRange & mem_range)64 bool ObjectAllocatorG1<MTMode>::IsIntersectedWithYoung(const MemRange &mem_range)
65 {
66     auto young_mem_ranges = GetYoungSpaceMemRanges();
67     for (const auto &young_mem_range : young_mem_ranges) {
68         if (young_mem_range.IsIntersect(mem_range)) {
69             return true;
70         }
71     }
72     return false;
73 }
74 
75 template <MTModeT MTMode>
HasYoungSpace()76 bool ObjectAllocatorG1<MTMode>::HasYoungSpace()
77 {
78     return true;
79 }
80 
81 template <MTModeT MTMode>
GetYoungSpaceMemRanges()82 const std::vector<MemRange> &ObjectAllocatorG1<MTMode>::GetYoungSpaceMemRanges()
83 {
84     return GetYoungRanges();
85 }
86 
87 template <MTModeT MTMode>
GetYoungSpaceBitmaps()88 std::vector<MarkBitmap *> &ObjectAllocatorG1<MTMode>::GetYoungSpaceBitmaps()
89 {
90     return GetYoungBitmaps();
91 }
92 
93 template <MTModeT MTMode>
CreateNewTLAB(panda::ManagedThread * thread)94 TLAB *ObjectAllocatorG1<MTMode>::CreateNewTLAB([[maybe_unused]] panda::ManagedThread *thread)
95 {
96     // TODO(dtrubenkov): fix this
97     return object_allocator_->CreateNewTLAB(thread, TLAB_SIZE);
98 }
99 
100 template <MTModeT MTMode>
GetTLABMaxAllocSize()101 size_t ObjectAllocatorG1<MTMode>::GetTLABMaxAllocSize()
102 {
103     return PANDA_TLAB_MAX_ALLOC_SIZE;
104 }
105 
106 template <MTModeT MTMode>
IterateOverObjectsInRange(MemRange mem_range,const ObjectVisitor & object_visitor)107 void ObjectAllocatorG1<MTMode>::IterateOverObjectsInRange(MemRange mem_range, const ObjectVisitor &object_visitor)
108 {
109     // we need ensure that the mem range related to a card must be located in one allocator
110     auto space_type = PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(ToVoidPtr(mem_range.GetStartAddress()));
111     switch (space_type) {
112         case SpaceType::SPACE_TYPE_OBJECT:
113             object_allocator_->IterateOverObjectsInRange(object_visitor, ToVoidPtr(mem_range.GetStartAddress()),
114                                                          ToVoidPtr(mem_range.GetEndAddress()));
115             break;
116         case SpaceType::SPACE_TYPE_NON_MOVABLE_OBJECT: {
117             if (pygote_space_allocator_ != nullptr) {
118                 pygote_space_allocator_->IterateOverObjectsInRange(
119                     object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
120             }
121             auto region = AddrToRegion(ToVoidPtr(mem_range.GetStartAddress()));
122             region->GetLiveBitmap()->IterateOverMarkedChunkInRange(
123                 ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()),
124                 [&object_visitor](void *mem) { object_visitor(reinterpret_cast<ObjectHeader *>(mem)); });
125             break;
126         }
127         case SpaceType::SPACE_TYPE_HUMONGOUS_OBJECT:
128             humongous_object_allocator_->IterateOverObjectsInRange(
129                 object_visitor, ToVoidPtr(mem_range.GetStartAddress()), ToVoidPtr(mem_range.GetEndAddress()));
130             break;
131         default:
132             // if we reach this line, we may have an issue with multiVM CardTable iteration
133             UNREACHABLE();
134             break;
135     }
136 }
137 
138 // maybe ObjectAllocatorGen and ObjectAllocatorNoGen should have inheritance relationship
139 template <MTModeT MTMode>
ContainObject(const ObjectHeader * obj) const140 bool ObjectAllocatorG1<MTMode>::ContainObject(const ObjectHeader *obj) const
141 {
142     if (pygote_space_allocator_ != nullptr && pygote_space_allocator_->ContainObject(obj)) {
143         return true;
144     }
145     if (object_allocator_->ContainObject(obj)) {
146         return true;
147     }
148     if (nonmovable_allocator_->ContainObject(obj)) {
149         return true;
150     }
151     if (humongous_object_allocator_->ContainObject(obj)) {
152         return true;
153     }
154 
155     return false;
156 }
157 
158 template <MTModeT MTMode>
IsLive(const ObjectHeader * obj)159 bool ObjectAllocatorG1<MTMode>::IsLive(const ObjectHeader *obj)
160 {
161     if (pygote_space_allocator_ != nullptr && pygote_space_allocator_->ContainObject(obj)) {
162         return pygote_space_allocator_->IsLive(obj);
163     }
164     if (object_allocator_->ContainObject(obj)) {
165         return object_allocator_->IsLive(obj);
166     }
167     if (nonmovable_allocator_->ContainObject(obj)) {
168         return nonmovable_allocator_->IsLive(obj);
169     }
170     if (humongous_object_allocator_->ContainObject(obj)) {
171         return humongous_object_allocator_->IsLive(obj);
172     }
173     return false;
174 }
175 template <MTModeT MTMode>
Allocate(size_t size,Alignment align,panda::ManagedThread * thread)176 void *ObjectAllocatorG1<MTMode>::Allocate(size_t size, Alignment align, [[maybe_unused]] panda::ManagedThread *thread)
177 {
178     void *mem = nullptr;
179     size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
180     if (LIKELY(aligned_size <= GetYoungAllocMaxSize())) {
181         mem = object_allocator_->Alloc(size, align);
182     } else {
183         mem = humongous_object_allocator_->Alloc(size, DEFAULT_ALIGNMENT);
184     }
185     return mem;
186 }
187 
188 template <MTModeT MTMode>
AllocateNonMovable(size_t size,Alignment align,panda::ManagedThread * thread)189 void *ObjectAllocatorG1<MTMode>::AllocateNonMovable(size_t size, Alignment align,
190                                                     [[maybe_unused]] panda::ManagedThread *thread)
191 {
192     // before pygote fork, allocate small non-movable objects in pygote space
193     if (UNLIKELY(IsPygoteAllocEnabled() && pygote_space_allocator_->CanAllocNonMovable(size, align))) {
194         return pygote_space_allocator_->Alloc(size, align);
195     }
196     void *mem = nullptr;
197     size_t aligned_size = AlignUp(size, GetAlignmentInBytes(align));
198     if (aligned_size <= ObjectAllocator::GetMaxRegularObjectSize()) {
199         // TODO(dtrubenkov): check if we don't need to handle OOM
200         mem = nonmovable_allocator_->Alloc(aligned_size, align);
201     } else {
202         // Humongous objects are non-movable
203         mem = humongous_object_allocator_->Alloc(aligned_size, align);
204     }
205     return mem;
206 }
207 
208 template <MTModeT MTMode>
AllocateTenured(size_t size)209 void *ObjectAllocatorG1<MTMode>::AllocateTenured([[maybe_unused]] size_t size)
210 {
211     UNREACHABLE();
212     return nullptr;
213 }
214 
215 template <MTModeT MTMode>
AllocateTenuredWithoutLocks(size_t size)216 void *ObjectAllocatorG1<MTMode>::AllocateTenuredWithoutLocks([[maybe_unused]] size_t size)
217 {
218     UNREACHABLE();
219     return nullptr;
220 }
221 
222 template <MTModeT MTMode>
VisitAndRemoveAllPools(const MemVisitor & mem_visitor)223 void ObjectAllocatorG1<MTMode>::VisitAndRemoveAllPools(const MemVisitor &mem_visitor)
224 {
225     if (pygote_space_allocator_ != nullptr) {
226         pygote_space_allocator_->VisitAndRemoveAllPools(mem_visitor);
227     }
228     object_allocator_->VisitAndRemoveAllPools(mem_visitor);
229     nonmovable_allocator_->VisitAndRemoveAllPools(mem_visitor);
230     humongous_object_allocator_->VisitAndRemoveAllPools(mem_visitor);
231 }
232 
233 template <MTModeT MTMode>
VisitAndRemoveFreePools(const MemVisitor & mem_visitor)234 void ObjectAllocatorG1<MTMode>::VisitAndRemoveFreePools(const MemVisitor &mem_visitor)
235 {
236     if (pygote_space_allocator_ != nullptr) {
237         pygote_space_allocator_->VisitAndRemoveFreePools(mem_visitor);
238     }
239 }
240 
241 template <MTModeT MTMode>
IterateOverYoungObjects(const ObjectVisitor & object_visitor)242 void ObjectAllocatorG1<MTMode>::IterateOverYoungObjects(const ObjectVisitor &object_visitor)
243 {
244     auto young_regions = object_allocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>();
245     for (auto r : young_regions) {
246         r->template IterateOverObjects(object_visitor);
247     }
248 }
249 
250 template <MTModeT MTMode>
GetYoungRegions()251 PandaVector<Region *> ObjectAllocatorG1<MTMode>::GetYoungRegions()
252 {
253     return object_allocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>();
254 }
255 
256 template <MTModeT MTMode>
GetMovableRegions()257 PandaVector<Region *> ObjectAllocatorG1<MTMode>::GetMovableRegions()
258 {
259     return object_allocator_->GetAllRegions();
260 }
261 
262 template <MTModeT MTMode>
GetAllRegions()263 PandaVector<Region *> ObjectAllocatorG1<MTMode>::GetAllRegions()
264 {
265     PandaVector<Region *> regions = object_allocator_->GetAllRegions();
266     PandaVector<Region *> non_movable_regions = nonmovable_allocator_->GetAllRegions();
267     PandaVector<Region *> humongous_regions = humongous_object_allocator_->GetAllRegions();
268     regions.insert(regions.end(), non_movable_regions.begin(), non_movable_regions.end());
269     regions.insert(regions.end(), humongous_regions.begin(), humongous_regions.end());
270     return regions;
271 }
272 
273 template <MTModeT MTMode>
GetNonRegularRegions()274 PandaVector<Region *> ObjectAllocatorG1<MTMode>::GetNonRegularRegions()
275 {
276     PandaVector<Region *> regions = nonmovable_allocator_->GetAllRegions();
277     PandaVector<Region *> humongous_regions = humongous_object_allocator_->GetAllRegions();
278     regions.insert(regions.end(), humongous_regions.begin(), humongous_regions.end());
279     return regions;
280 }
281 
282 template <MTModeT MTMode>
CollectNonRegularRegions(const RegionsVisitor & region_visitor,const GCObjectVisitor & gc_object_visitor)283 void ObjectAllocatorG1<MTMode>::CollectNonRegularRegions(const RegionsVisitor &region_visitor,
284                                                          const GCObjectVisitor &gc_object_visitor)
285 {
286     nonmovable_allocator_->Collect(gc_object_visitor);
287     nonmovable_allocator_->VisitAndRemoveFreeRegions(region_visitor);
288     humongous_object_allocator_->CollectAndRemoveFreeRegions(region_visitor, gc_object_visitor);
289 }
290 
291 template <MTModeT MTMode>
IterateOverTenuredObjects(const ObjectVisitor & object_visitor)292 void ObjectAllocatorG1<MTMode>::IterateOverTenuredObjects(const ObjectVisitor &object_visitor)
293 {
294     if (pygote_space_allocator_ != nullptr) {
295         pygote_space_allocator_->IterateOverObjects(object_visitor);
296     }
297     object_allocator_->IterateOverObjects(object_visitor);
298     nonmovable_allocator_->IterateOverObjects(object_visitor);
299     IterateOverHumongousObjects(object_visitor);
300 }
301 
302 template <MTModeT MTMode>
IterateOverHumongousObjects(const ObjectVisitor & object_visitor)303 void ObjectAllocatorG1<MTMode>::IterateOverHumongousObjects(const ObjectVisitor &object_visitor)
304 {
305     humongous_object_allocator_->IterateOverObjects(object_visitor);
306 }
307 
IterateOverObjectsInRegion(Region * region,const ObjectVisitor & object_visitor)308 static inline void IterateOverObjectsInRegion(Region *region, const ObjectVisitor &object_visitor)
309 {
310     if (region->GetLiveBitmap() != nullptr) {
311         region->GetLiveBitmap()->IterateOverMarkedChunks(
312             [&object_visitor](void *mem) { object_visitor(static_cast<ObjectHeader *>(mem)); });
313     } else {
314         region->IterateOverObjects(object_visitor);
315     }
316 }
317 
318 template <MTModeT MTMode>
IterateOverObjects(const ObjectVisitor & object_visitor)319 void ObjectAllocatorG1<MTMode>::IterateOverObjects(const ObjectVisitor &object_visitor)
320 {
321     if (pygote_space_allocator_ != nullptr) {
322         pygote_space_allocator_->IterateOverObjects(object_visitor);
323     }
324     for (Region *region : object_allocator_->GetAllRegions()) {
325         IterateOverObjectsInRegion(region, object_visitor);
326     }
327     for (Region *region : nonmovable_allocator_->GetAllRegions()) {
328         IterateOverObjectsInRegion(region, object_visitor);
329     }
330     for (Region *region : humongous_object_allocator_->GetAllRegions()) {
331         IterateOverObjectsInRegion(region, object_visitor);
332     }
333 }
334 
335 template <MTModeT MTMode>
IterateRegularSizeObjects(const ObjectVisitor & object_visitor)336 void ObjectAllocatorG1<MTMode>::IterateRegularSizeObjects(const ObjectVisitor &object_visitor)
337 {
338     object_allocator_->IterateOverObjects(object_visitor);
339     nonmovable_allocator_->IterateOverObjects(object_visitor);
340 }
341 
342 template <MTModeT MTMode>
IterateNonRegularSizeObjects(const ObjectVisitor & object_visitor)343 void ObjectAllocatorG1<MTMode>::IterateNonRegularSizeObjects(const ObjectVisitor &object_visitor)
344 {
345     if (pygote_space_allocator_ != nullptr) {
346         pygote_space_allocator_->IterateOverObjects(object_visitor);
347     }
348     humongous_object_allocator_->IterateOverObjects(object_visitor);
349 }
350 
351 template <MTModeT MTMode>
FreeObjectsMovedToPygoteSpace()352 void ObjectAllocatorG1<MTMode>::FreeObjectsMovedToPygoteSpace()
353 {
354     // clear because we have move all objects in it to pygote space
355     // TODO(dtrubenkov): FIX clean object_allocator_
356     object_allocator_.reset(new (std::nothrow) ObjectAllocator(mem_stats_, &heap_spaces_));
357 }
358 
359 template <MTModeT MTMode>
ResetYoungAllocator()360 void ObjectAllocatorG1<MTMode>::ResetYoungAllocator()
361 {
362     MemStatsType *mem_stats = mem_stats_;
363     auto callback = [&mem_stats](ManagedThread *thread) {
364         if (!PANDA_TRACK_TLAB_ALLOCATIONS && (thread->GetTLAB()->GetOccupiedSize() != 0)) {
365             mem_stats->RecordAllocateObject(thread->GetTLAB()->GetOccupiedSize(), SpaceType::SPACE_TYPE_OBJECT);
366         }
367         thread->ClearTLAB();
368         return true;
369     };
370     // NOLINTNEXTLINE(readability-braces-around-statements)
371     if constexpr (MTMode == MT_MODE_MULTI) {
372         Thread::GetCurrent()->GetVM()->GetThreadManager()->EnumerateThreads(callback);
373     } else if (MTMode == MT_MODE_SINGLE) {  // NOLINT(readability-misleading-indentation)
374         callback(Thread::GetCurrent()->GetVM()->GetAssociatedThread());
375     } else {
376         UNREACHABLE();
377     }
378     object_allocator_->ResetAllSpecificRegions<RegionFlag::IS_EDEN>();
379 }
380 
381 template <MTModeT MTMode>
IsObjectInNonMovableSpace(const ObjectHeader * obj)382 bool ObjectAllocatorG1<MTMode>::IsObjectInNonMovableSpace(const ObjectHeader *obj)
383 {
384     return nonmovable_allocator_->ContainObject(obj);
385 }
386 
387 template <MTModeT MTMode>
UpdateSpaceData()388 void ObjectAllocatorG1<MTMode>::UpdateSpaceData()
389 {
390     ASSERT(GetYoungRanges().empty());
391     ASSERT(GetYoungBitmaps().empty());
392     for (auto r : object_allocator_->template GetAllSpecificRegions<RegionFlag::IS_EDEN>()) {
393         GetYoungRanges().emplace_back(r->Begin(), r->End());
394         GetYoungBitmaps().push_back(r->GetMarkBitmap());
395     }
396 }
397 
398 template <MTModeT MTMode>
CompactYoungRegions(const GCObjectVisitor & death_checker,const ObjectVisitorEx & move_checker)399 void ObjectAllocatorG1<MTMode>::CompactYoungRegions(const GCObjectVisitor &death_checker,
400                                                     const ObjectVisitorEx &move_checker)
401 {
402     object_allocator_->template CompactAllSpecificRegions<RegionFlag::IS_EDEN, RegionFlag::IS_OLD>(death_checker,
403                                                                                                    move_checker);
404 }
405 
406 template <MTModeT MTMode>
PromoteYoungRegion(Region * region,const GCObjectVisitor & death_checker,const ObjectVisitor & promotion_checker)407 void ObjectAllocatorG1<MTMode>::PromoteYoungRegion(Region *region, const GCObjectVisitor &death_checker,
408                                                    const ObjectVisitor &promotion_checker)
409 {
410     ASSERT(region->HasFlag(RegionFlag::IS_EDEN));
411     object_allocator_->template PromoteYoungRegion(region, death_checker, promotion_checker);
412 }
413 
414 template <MTModeT MTMode>
CompactTenuredRegions(const PandaVector<Region * > & regions,const GCObjectVisitor & death_checker,const ObjectVisitorEx & move_checker)415 void ObjectAllocatorG1<MTMode>::CompactTenuredRegions(const PandaVector<Region *> &regions,
416                                                       const GCObjectVisitor &death_checker,
417                                                       const ObjectVisitorEx &move_checker)
418 {
419     object_allocator_->template CompactSeveralSpecificRegions<RegionFlag::IS_OLD, RegionFlag::IS_OLD>(
420         regions, death_checker, move_checker);
421 }
422 
423 template <MTModeT MTMode>
ClearCurrentRegion()424 void ObjectAllocatorG1<MTMode>::ClearCurrentRegion()
425 {
426     object_allocator_->template ClearCurrentRegion<RegionFlag::IS_OLD>();
427 }
428 
429 template class ObjectAllocatorG1<MT_MODE_SINGLE>;
430 template class ObjectAllocatorG1<MT_MODE_MULTI>;
431 template class ObjectAllocatorG1<MT_MODE_TASK>;
432 
433 }  // namespace panda::mem
434