• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "runtime/mem/gc/gen-gc/gen-gc.h"
17 #include "runtime/mem/gc/gc_root-inl.h"
18 #include "runtime/mem/object_helpers-inl.h"
19 #include "runtime/include/panda_vm.h"
20 #include "runtime/mem/gc/card_table-inl.h"
21 #include "runtime/timing.h"
22 #include "runtime/mem/pygote_space_allocator-inl.h"
23 #include "runtime/mem/gc/static/gc_marker_static-inl.h"
24 #include "runtime/mem/gc/dynamic/gc_marker_dynamic-inl.h"
25 #include "runtime/mem/gc/generational-gc-base-inl.h"
26 
27 namespace panda::mem {
28 
29 template <class LanguageConfig>
GenGC(ObjectAllocatorBase * objectAllocator,const GCSettings & settings)30 GenGC<LanguageConfig>::GenGC(ObjectAllocatorBase *objectAllocator, const GCSettings &settings)
31     : GenerationalGC<LanguageConfig>(objectAllocator, settings),
32       marker_(this),
33       isExplicitConcurrentGcEnabled_(settings.IsExplicitConcurrentGcEnabled())
34 {
35     this->SetType(GCType::GEN_GC);
36     this->SetTLABsSupported();
37 }
38 
39 template <class LanguageConfig>
InitializeImpl()40 void GenGC<LanguageConfig>::InitializeImpl()
41 {
42     // GC saved the PandaVM instance, so we get allocator from the PandaVM.
43     InternalAllocatorPtr allocator = this->GetInternalAllocator();
44     this->CreateCardTable(allocator, PoolManager::GetMmapMemPool()->GetMinObjectAddress(),
45                           PoolManager::GetMmapMemPool()->GetTotalObjectSize());
46     auto barrierSet = allocator->New<GCGenBarrierSet>(allocator, this->GetCardTable(), CardTable::GetCardBits(),
47                                                       CardTable::GetCardDirtyValue());
48     ASSERT(barrierSet != nullptr);
49     this->SetGCBarrierSet(barrierSet);
50     LOG_DEBUG_GC << "GenGC initialized";
51 }
52 
53 template <class LanguageConfig>
ShouldRunTenuredGC(const GCTask & task)54 bool GenGC<LanguageConfig>::ShouldRunTenuredGC(const GCTask &task)
55 {
56     return task.reason == GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE;
57 }
58 
59 template <class LanguageConfig>
ShouldRunFullGC(const GCTask & task,bool haveEnoughSpaceForYoung) const60 bool GenGC<LanguageConfig>::ShouldRunFullGC(const GCTask &task, bool haveEnoughSpaceForYoung) const
61 {
62     return !haveEnoughSpaceForYoung || task.reason == GCTaskCause::OOM_CAUSE || this->IsExplicitFull(task) ||
63            this->IsOnPygoteFork() || task.reason == GCTaskCause::STARTUP_COMPLETE_CAUSE;
64 }
65 
66 template <class LanguageConfig>
RunPhasesImpl(GCTask & task)67 void GenGC<LanguageConfig>::RunPhasesImpl(GCTask &task)
68 {
69     LOG(DEBUG, GC) << "GenGC start";
70     uint64_t footprintBefore = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
71     LOG_DEBUG_GC << "Footprint before GC: " << footprintBefore;
72     if (this->IsLogDetailedGcInfoEnabled()) {
73         this->footprintList_.clear();
74         this->footprintList_.push_back({"Footprint before GC", footprintBefore});
75     }
76     uint64_t youngTotalTime = 0;
77     {
78         ScopedTiming t("Generational GC", *this->GetTiming());
79         this->memStats_.Reset();
80         // We trigger a full gc at first pygote fork
81         if (ShouldRunFullGC(task, HaveEnoughSpaceToMove())) {
82             GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats());
83             marker_.BindBitmaps(true);  // clear pygote live bitmaps, we will rebuild it
84             this->GetObjectGenAllocator()->InvalidateSpaceData();
85             this->GetObjectGenAllocator()->UpdateSpaceData();
86             RunFullGC(task);
87         } else {
88             {
89                 GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats());
90                 time::Timer timer(&youngTotalTime, true);
91                 marker_.BindBitmaps(false);
92                 this->GetObjectGenAllocator()->InvalidateSpaceData();
93                 this->GetObjectGenAllocator()->UpdateSpaceData();
94                 LOG_DEBUG_GC << "Young range: " << this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
95                 RunYoungGC(task);
96                 if (youngTotalTime > 0) {
97                     this->GetStats()->AddTimeValue(youngTotalTime, TimeTypeStats::YOUNG_TOTAL_TIME);
98                 }
99                 uint64_t footprintYoung = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
100                 LOG_DEBUG_GC << "Footprint after young: " << footprintYoung;
101                 if (this->IsLogDetailedGcInfoEnabled()) {
102                     this->footprintList_.push_back({"Footprint after young", footprintYoung});
103                 }
104             }
105             if (ShouldRunTenuredGC(task)) {
106                 marker_.BindBitmaps(true);  // clear pygote live bitmaps, we will rebuild it
107                 this->GetObjectGenAllocator()->InvalidateSpaceData();
108                 this->GetObjectGenAllocator()->UpdateSpaceData();
109                 RunTenuredGC(task);
110             }
111         }
112     }
113     uint64_t footprintAfter = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
114     LOG_DEBUG_GC << "Footprint after GC: " << footprintAfter;
115     if (this->IsLogDetailedGcInfoEnabled()) {
116         this->footprintList_.push_back({"Footprint after GC", footprintAfter});
117     }
118 }
119 
120 template <class LanguageConfig>
PreStartupImp()121 void GenGC<LanguageConfig>::PreStartupImp()
122 {
123     GenerationalGC<LanguageConfig>::DisableTenuredGC();
124 }
125 
126 template <class LanguageConfig>
InitGCBits(panda::ObjectHeader * objHeader)127 void GenGC<LanguageConfig>::InitGCBits(panda::ObjectHeader *objHeader)
128 {
129     if (UNLIKELY(this->GetGCPhase() == GCPhase::GC_PHASE_SWEEP) &&
130         (!this->GetObjectAllocator()->IsObjectInYoungSpace(objHeader))) {
131         objHeader->SetMarkedForGC();
132         // do unmark if out of sweep phase otherwise we may miss it in sweep
133         if (UNLIKELY(this->GetGCPhase() != GCPhase::GC_PHASE_SWEEP)) {
134             objHeader->SetUnMarkedForGC();
135         }
136     } else {
137         objHeader->SetUnMarkedForGC();
138     }
139     LOG_DEBUG_GC << "Init gc bits for object: " << std::hex << objHeader << " bit: " << objHeader->IsMarkedForGC()
140                  << ", is marked = " << IsMarked(objHeader);
141 }
142 
143 template <class LanguageConfig>
InitGCBitsForAllocationInTLAB(panda::ObjectHeader * objHeader)144 void GenGC<LanguageConfig>::InitGCBitsForAllocationInTLAB(panda::ObjectHeader *objHeader)
145 {
146     // Compiler will allocate objects in TLABs only in young space
147     // Therefore, set unmarked for GC here.
148     objHeader->SetUnMarkedForGC();
149 }
150 
151 template <class LanguageConfig>
RunYoungGC(GCTask & task)152 void GenGC<LanguageConfig>::RunYoungGC(GCTask &task)
153 {
154     GCScope<TRACE_TIMING> scopedTrace(__FUNCTION__, this);
155     LOG_DEBUG_GC << "GenGC RunYoungGC start";
156     uint64_t youngPauseTime;
157     {
158         NoAtomicGCMarkerScope scope(&this->marker_);
159         time::Timer timer(&youngPauseTime, true);
160         // NOLINTNEXTLINE(performance-unnecessary-value-param)
161         MarkYoung(task);
162         CollectYoungAndMove();
163         this->GetCardTable()->ClearAll();
164     }
165     if (youngPauseTime > 0) {
166         this->GetStats()->AddTimeValue(youngPauseTime, TimeTypeStats::YOUNG_PAUSED_TIME);
167     }
168     LOG_DEBUG_GC << "GenGC RunYoungGC end";
169     task.collectionType = GCCollectionType::YOUNG;
170 }
171 
172 template <class LanguageConfig>
MarkYoung(const GCTask & task)173 void GenGC<LanguageConfig>::MarkYoung(const GCTask &task)
174 {
175     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_MARK_YOUNG);
176 
177     // Iterate over roots and add other roots
178     GCMarkingStackType objectsStack(this);
179     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
180     auto youngMr = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
181     GCRootVisitor gcMarkYoung = [&objectsStack, &youngMr, this](const GCRoot &gcRoot) {
182         // Skip non-young roots
183         auto rootObjectPtr = gcRoot.GetObjectHeader();
184         ASSERT(rootObjectPtr != nullptr);
185         if (!youngMr.IsAddressInRange(ToUintPtr(rootObjectPtr))) {
186             LOG_DEBUG_GC << "Skip root for young mark: " << std::hex << rootObjectPtr;
187             return;
188         }
189         LOG(DEBUG, GC) << "root " << GetDebugInfoAboutObject(rootObjectPtr);
190         if (this->MarkObjectIfNotMarked(rootObjectPtr)) {
191             objectsStack.PushToStack(gcRoot.GetType(), rootObjectPtr);
192             this->MarkYoungStack(&objectsStack);
193         }
194     };
195     {
196         GCScope<TRACE_TIMING> markingYoungRootsTrace("Marking roots young", this);
197         this->VisitRoots(gcMarkYoung,
198                          VisitGCRootFlags::ACCESS_ROOT_NONE | VisitGCRootFlags::ACCESS_ROOT_AOT_STRINGS_ONLY_YOUNG);
199     }
200     {
201         ScopedTiming visitCardTableRootsTiming("VisitCardTableRoots", *this->GetTiming());
202         LOG_DEBUG_GC << "START Marking tenured -> young roots";
203         MemRangeChecker tenuredRangeChecker = [&youngMr](MemRange &memRange) -> bool {
204             return !youngMr.IsIntersect(memRange);
205         };
206         ObjectChecker tenuredRangeYoungObjectChecker = [&youngMr](const ObjectHeader *objectHeader) -> bool {
207             return youngMr.IsAddressInRange(ToUintPtr(objectHeader));
208         };
209 
210         ObjectChecker fromObjectChecker = []([[maybe_unused]] const ObjectHeader *objectHeader) -> bool {
211             return true;
212         };
213 
214         this->VisitCardTableRoots(this->GetCardTable(), gcMarkYoung, tenuredRangeChecker,
215                                   tenuredRangeYoungObjectChecker, fromObjectChecker,
216                                   CardTableProcessedFlag::VISIT_MARKED | CardTableProcessedFlag::VISIT_PROCESSED);
217     }
218     // reference-processor in VisitCardTableRoots can add new objects to stack
219     this->MarkYoungStack(&objectsStack);
220     LOG_DEBUG_GC << "END Marking tenured -> young roots";
221     auto refClearPred = [this]([[maybe_unused]] const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
222     this->GetPandaVm()->HandleReferences(task, refClearPred);
223 }
224 
225 template <class LanguageConfig>
MarkYoungStack(GCMarkingStackType * stack)226 void GenGC<LanguageConfig>::MarkYoungStack(GCMarkingStackType *stack)
227 {
228     trace::ScopedTrace scopedTrace(__FUNCTION__);
229     ASSERT(stack != nullptr);
230     auto allocator = this->GetObjectAllocator();
231     auto &youngRanges = allocator->GetYoungSpaceMemRanges();
232     auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
233     while (!stack->Empty()) {
234         auto *object = this->PopObjectFromStack(stack);
235         ValidateObject(nullptr, object);
236         auto *cls = object->template ClassAddr<BaseClass>();
237         LOG_DEBUG_GC << "current object " << GetDebugInfoAboutObject(object);
238 
239         bool inRange = false;
240         for (const auto &r : youngRanges) {
241             if (r.IsAddressInRange(ToUintPtr(object))) {
242                 inRange = true;
243                 break;
244             }
245         }
246         if (inRange) {
247             marker_.MarkInstance(stack, object, cls, refPred);
248         }
249     }
250 }
251 
252 template <class LanguageConfig>
CollectVerificationInfo(const MemRange & youngMemRange)253 HeapVerifierIntoGC<LanguageConfig> GenGC<LanguageConfig>::CollectVerificationInfo(const MemRange &youngMemRange)
254 {
255     HeapVerifierIntoGC<LanguageConfig> youngVerifier(this->GetPandaVm()->GetHeapManager());
256     if (this->GetSettings()->IntoGCHeapVerification() && !this->IsFullGC()) {
257         ScopedTiming collectVerificationTiming("CollectVerificationInfo", *this->GetTiming());
258         youngVerifier.CollectVerificationInfo(PandaVector<MemRange>(1U, youngMemRange));
259     }
260     return youngVerifier;
261 }
262 
263 template <class LanguageConfig>
VerifyCollectAndMove(HeapVerifierIntoGC<LanguageConfig> && youngVerifier)264 void GenGC<LanguageConfig>::VerifyCollectAndMove(HeapVerifierIntoGC<LanguageConfig> &&youngVerifier)
265 {
266     if (this->GetSettings()->IntoGCHeapVerification() && !this->IsFullGC()) {
267         ScopedTiming verificationTiming("Verification", *this->GetTiming());
268         size_t failsCount = youngVerifier.VerifyAll();
269         if (this->GetSettings()->FailOnHeapVerification() && failsCount > 0) {
270             LOG(FATAL, GC) << "Heap was corrupted during GC, HeapVerifier found " << failsCount << " corruptions";
271         }
272     }
273 }
274 
275 // NOLINTNEXTLINE(readability-function-size)
276 template <class LanguageConfig>
CollectYoungAndMove()277 void GenGC<LanguageConfig>::CollectYoungAndMove()
278 {
279     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_COLLECT_YOUNG_AND_MOVE);
280     LOG_DEBUG_GC << "== GenGC CollectYoungAndMove start ==";
281     // NOTE(dtrubenkov): add assert that we in STW
282     PandaVector<ObjectHeader *> movedObjects;
283     size_t prevMovedSize = this->GetPandaVm()->GetMemStats()->GetLastYoungObjectsMovedBytes();
284     constexpr size_t MINIMAL_PREALLOC_MOVE_OBJ = 32U;
285     // Adaptive preallocate buffer for moved_objects to avoid useless reallocations
286     movedObjects.reserve(std::max(MINIMAL_PREALLOC_MOVE_OBJ, prevMovedSize / GetMinimalObjectSize()));
287     size_t youngMoveSize = 0;
288     size_t youngMoveCount = 0;
289     size_t youngDeleteSize = 0;
290     size_t youngDeleteCount = 0;
291     size_t bytesInHeapBeforeMove = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
292 
293     auto *objectAllocator = this->GetObjectGenAllocator();
294     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
295     auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
296     HeapVerifierIntoGC<LanguageConfig> youngVerifier = CollectVerificationInfo(youngMemRange);
297 
298     std::function<void(ObjectHeader * objectHeader)> moveVisitor(
299         [this, &objectAllocator, &movedObjects, &youngMoveSize, &youngMoveCount, &youngDeleteSize,
300          &youngDeleteCount](ObjectHeader *objectHeader) -> void {
301             size_t size = GetObjectSize(objectHeader);
302             ASSERT(size <= ObjectAllocatorGen<>::GetYoungAllocMaxSize());
303             // Use aligned size here, because we need to proceed MemStats correctly.
304             size_t alignedSize = GetAlignedObjectSize(size);
305             if (objectHeader->IsMarkedForGC<false>()) {
306                 auto dst = reinterpret_cast<ObjectHeader *>(objectAllocator->AllocateTenuredWithoutLocks(size));
307                 ASSERT(dst != nullptr);
308                 memcpy_s(dst, size, objectHeader, size);
309                 youngMoveSize += alignedSize;
310                 youngMoveCount++;
311                 LOG_DEBUG_OBJECT_EVENTS << "MOVE object " << objectHeader << " -> " << dst << ", size = " << size;
312                 movedObjects.push_back(dst);
313                 // set unmarked dst
314                 UnMarkObject(dst);
315                 this->SetForwardAddress(objectHeader, dst);
316             } else {
317                 LOG_DEBUG_OBJECT_EVENTS << "DELETE OBJECT young: " << objectHeader;
318                 ++youngDeleteCount;
319                 youngDeleteSize += alignedSize;
320             }
321             // We will record all object in MemStats as SPACE_TYPE_OBJECT, so check it
322             ASSERT(PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(objectHeader) == SpaceType::SPACE_TYPE_OBJECT);
323         });
324     {
325         ScopedTiming moveTiming("MoveAndSweep", *this->GetTiming());
326         objectAllocator->IterateOverYoungObjects(moveVisitor);
327     }
328     this->memStats_.RecordSizeMovedYoung(youngMoveSize);
329     this->memStats_.RecordCountMovedYoung(youngMoveCount);
330     this->memStats_.RecordSizeFreedYoung(youngDeleteSize);
331     this->memStats_.RecordCountFreedYoung(youngDeleteCount);
332     UpdateRefsToMovedObjects(&movedObjects);
333     this->VerifyCollectAndMove(std::move(youngVerifier));
334     SweepYoungVmRefs();
335     // Remove young
336     objectAllocator->ResetYoungAllocator();
337 
338     this->UpdateMemStats(bytesInHeapBeforeMove, false);
339 
340     LOG_DEBUG_GC << "== GenGC CollectYoungAndMove end ==";
341 }
342 
343 template <class LanguageConfig>
UpdateRefsToMovedObjects(PandaVector<ObjectHeader * > * movedObjects)344 void GenGC<LanguageConfig>::UpdateRefsToMovedObjects(PandaVector<ObjectHeader *> *movedObjects)
345 {
346     GCScope<TRACE_TIMING> scope("UpdateRefsToMovedObjects", this);
347 
348     auto objAllocator = this->GetObjectAllocator();
349     // Update references exyoung -> young
350     LOG_DEBUG_GC << "process moved objects cnt = " << std::dec << movedObjects->size();
351     LOG_DEBUG_GC << "=== Update exyoung -> young references. START. ===";
352     for (auto obj : *movedObjects) {
353         ObjectHelpers<LanguageConfig::LANG_TYPE>::UpdateRefsToMovedObjects(obj);
354     }
355 
356     LOG_DEBUG_GC << "=== Update exyoung -> young references. END. ===";
357     // update references tenured -> young
358     LOG_DEBUG_GC << "=== Update tenured -> young references. START. ===";
359     auto youngSpace = objAllocator->GetYoungSpaceMemRanges().at(0);
360     auto updateRefsInObject(
361         [](ObjectHeader *obj) { ObjectHelpers<LanguageConfig::LANG_TYPE>::UpdateRefsToMovedObjects(obj); });
362     this->GetCardTable()->VisitMarked(
363         [&updateRefsInObject, &objAllocator, &youngSpace](const MemRange &memRange) {
364             if (!youngSpace.Contains(memRange)) {
365                 objAllocator->IterateOverObjectsInRange(memRange, updateRefsInObject);
366             }
367         },
368         CardTableProcessedFlag::VISIT_MARKED | CardTableProcessedFlag::VISIT_PROCESSED);
369     LOG_DEBUG_GC << "=== Update tenured -> young references. END. ===";
370     this->CommonUpdateRefsToMovedObjects();
371 }
372 
373 template <class LanguageConfig>
RunTenuredGC(GCTask & task)374 void GenGC<LanguageConfig>::RunTenuredGC(GCTask &task)
375 {
376     GCScope<TRACE_TIMING> scope(__FUNCTION__, this);
377     LOG_DEBUG_GC << "GC tenured start";
378     GCMarkingStackType objectsStack(this);
379     {
380         GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats(), nullptr, PauseTypeStats::COMMON_PAUSE);
381         {
382             ScopedTiming unMarkTiming("UnMark", *this->GetTiming());
383             // Unmark all because no filter out tenured when mark young
384             // NOTE(dtrubenk): remove this
385             this->GetObjectAllocator()->IterateOverObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
386         }
387         InitialMark(&objectsStack);
388     }
389     this->ConcurrentMark(&objectsStack);
390     // NOLINTNEXTLINE(performance-unnecessary-value-param)
391     ReMark(&objectsStack, task);
392 
393     ASSERT(objectsStack.Empty());
394     {
395         ScopedTiming unMarkYoungTiming("UnMarkYoung", *this->GetTiming());
396         this->GetObjectAllocator()->IterateOverYoungObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
397     }
398     Sweep<true>();
399     LOG_DEBUG_GC << "GC tenured end";
400     task.collectionType = GCCollectionType::TENURED;
401 }
402 
403 // Full GC is ran on pause
404 template <class LanguageConfig>
RunFullGC(GCTask & task)405 void GenGC<LanguageConfig>::RunFullGC(GCTask &task)
406 {
407     GCScope<TRACE_TIMING> fullGcScope(__FUNCTION__, this);
408     LOG_DEBUG_GC << "Full GC start";
409     this->SetFullGC(true);
410     {
411         ScopedTiming unMarkTiming("UnMark", *this->GetTiming());
412         this->GetObjectAllocator()->IterateOverObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
413     }
414     FullMark(task);
415     Sweep<false>();
416     // Young GC
417     if (LIKELY(HaveEnoughSpaceToMove())) {
418         // We already marked objects above so just collect and move
419         CollectYoungAndMove();
420         this->GetCardTable()->ClearAll();
421     }
422     this->SetFullGC(false);
423     LOG_DEBUG_GC << "Full GC end";
424     task.collectionType = GCCollectionType::FULL;
425 }
426 
427 template <class LanguageConfig>
MarkRoots(GCMarkingStackType * objectsStack,CardTableVisitFlag visitCardTableRoots,const ReferenceCheckPredicateT & refPred,VisitGCRootFlags flags)428 void GenGC<LanguageConfig>::MarkRoots(GCMarkingStackType *objectsStack, CardTableVisitFlag visitCardTableRoots,
429                                       const ReferenceCheckPredicateT &refPred, VisitGCRootFlags flags)
430 {
431     trace::ScopedTrace scopedTrace(__FUNCTION__);
432     GCRootVisitor gcMarkRoots = [this, &objectsStack, &refPred](const GCRoot &gcRoot) {
433         ObjectHeader *rootObject = gcRoot.GetObjectHeader();
434         ObjectHeader *fromObject = gcRoot.GetFromObjectHeader();
435         LOG_DEBUG_GC << "Handle root " << GetDebugInfoAboutObject(rootObject);
436         if (UNLIKELY(fromObject != nullptr) &&
437             this->IsReference(fromObject->NotAtomicClassAddr<BaseClass>(), fromObject, refPred)) {
438             LOG_DEBUG_GC << "Add reference: " << GetDebugInfoAboutObject(fromObject) << " to stack";
439             marker_.Mark(fromObject);
440             this->ProcessReference(objectsStack, fromObject->NotAtomicClassAddr<BaseClass>(), fromObject,
441                                    GC::EmptyReferenceProcessPredicate);
442         } else {
443             // we should always add this object to the stack, because we could mark this object in InitialMark, but
444             // write to some fields in ConcurrentMark - need to iterate over all fields again, MarkObjectIfNotMarked
445             // can't be used here
446             marker_.Mark(rootObject);
447             objectsStack->PushToStack(gcRoot.GetType(), rootObject);
448         }
449     };
450     this->VisitRoots(gcMarkRoots, flags);
451     if (visitCardTableRoots == CardTableVisitFlag::VISIT_ENABLED) {
452         auto allocator = this->GetObjectAllocator();
453         ASSERT(allocator->GetYoungSpaceMemRanges().size() == 1);
454         MemRange youngMr = allocator->GetYoungSpaceMemRanges().at(0);
455         MemRangeChecker youngRangeChecker = []([[maybe_unused]] MemRange &memRange) -> bool { return true; };
456         ObjectChecker youngRangeTenuredObjectChecker = [&youngMr](const ObjectHeader *objectHeader) -> bool {
457             return !youngMr.IsAddressInRange(ToUintPtr(objectHeader));
458         };
459         ObjectChecker fromObjectChecker = [&youngMr, this](const ObjectHeader *objectHeader) -> bool {
460             // Don't visit objects which are in tenured and not marked.
461             return youngMr.IsAddressInRange(ToUintPtr(objectHeader)) || IsMarked(objectHeader);
462         };
463         this->VisitCardTableRoots(this->GetCardTable(), gcMarkRoots, youngRangeChecker, youngRangeTenuredObjectChecker,
464                                   fromObjectChecker, CardTableProcessedFlag::VISIT_MARKED);
465     }
466 }
467 
468 template <class LanguageConfig>
InitialMark(GCMarkingStackType * objectsStack)469 void GenGC<LanguageConfig>::InitialMark(GCMarkingStackType *objectsStack)
470 {
471     GCScope<TRACE_TIMING_PHASE> gcScope(__FUNCTION__, this, GCPhase::GC_PHASE_INITIAL_MARK);
472     {
473         NoAtomicGCMarkerScope scope(&this->marker_);
474         auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
475         MarkRoots(objectsStack, CardTableVisitFlag::VISIT_DISABLED, refPred,
476                   VisitGCRootFlags::ACCESS_ROOT_NONE | VisitGCRootFlags::START_RECORDING_NEW_ROOT);
477     }
478 }
479 
480 template <class LanguageConfig>
ConcurrentMark(GCMarkingStackType * objectsStack)481 NO_THREAD_SAFETY_ANALYSIS void GenGC<LanguageConfig>::ConcurrentMark(GCMarkingStackType *objectsStack)
482 {
483     GCScope<TRACE_TIMING_PHASE> scopedFunc(__FUNCTION__, this, GCPhase::GC_PHASE_MARK);
484     ConcurrentScope concurrentScope(this);
485     auto *objectAllocator = this->GetObjectAllocator();
486     this->MarkImpl(
487         &marker_, objectsStack, CardTableVisitFlag::VISIT_ENABLED,
488         // Process 'weak' references as regular object on concurrent phase to avoid
489         // concurrent access to referent
490         []([[maybe_unused]] const ObjectHeader *obj) { return false; },
491         // non-young mem range checker
492         [objectAllocator](MemRange &memRange) { return !objectAllocator->IsIntersectedWithYoung(memRange); });
493 }
494 
495 template <class LanguageConfig>
ReMark(GCMarkingStackType * objectsStack,const GCTask & task)496 void GenGC<LanguageConfig>::ReMark(GCMarkingStackType *objectsStack, const GCTask &task)
497 {
498     GCScope<TRACE_TIMING_PHASE> gcScope(__FUNCTION__, this, GCPhase::GC_PHASE_REMARK);
499     GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats(), nullptr, PauseTypeStats::REMARK_PAUSE);
500 
501     // NOTE(dtrubenkov): consider iterational concurrent marking of card table
502     {
503         NoAtomicGCMarkerScope scope(&this->marker_);
504         auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
505         MarkRoots(objectsStack, CardTableVisitFlag::VISIT_ENABLED, refPred,
506                   VisitGCRootFlags::ACCESS_ROOT_ONLY_NEW | VisitGCRootFlags::END_RECORDING_NEW_ROOT);
507         this->MarkStack(&marker_, objectsStack, GC::EmptyMarkPreprocess, refPred);
508         {
509             ScopedTiming t1("VisitInternalStringTable", *this->GetTiming());
510             this->GetPandaVm()->VisitStringTable(
511                 [this, &objectsStack](ObjectHeader *str) {
512                     if (this->MarkObjectIfNotMarked(str)) {
513                         ASSERT(str != nullptr);
514                         objectsStack->PushToStack(RootType::STRING_TABLE, str);
515                     }
516                 },
517                 VisitGCRootFlags::ACCESS_ROOT_ONLY_NEW | VisitGCRootFlags::END_RECORDING_NEW_ROOT);
518             this->MarkStack(&marker_, objectsStack, GC::EmptyMarkPreprocess, refPred);
519         }
520         // NOLINTNEXTLINE(performance-unnecessary-value-param)
521         this->GetPandaVm()->HandleReferences(task, GC::EmptyReferenceProcessPredicate);
522     }
523 }
524 
525 template <class LanguageConfig>
FullMark(const GCTask & task)526 void GenGC<LanguageConfig>::FullMark(const GCTask &task)
527 {
528     GCScope<TRACE_TIMING_PHASE> fullMarkScope(__FUNCTION__, this, GCPhase::GC_PHASE_MARK);
529     NoAtomicGCMarkerScope markerScope(&this->marker_);
530 
531     GCMarkingStackType objectsStack(this);
532     VisitGCRootFlags flags = VisitGCRootFlags::ACCESS_ROOT_ALL;
533     auto refPred = GC::EmptyReferenceProcessPredicate;
534     // Mark all reachable objects
535     MarkRoots(&objectsStack, CardTableVisitFlag::VISIT_DISABLED, refPred, flags);
536     this->GetPandaVm()->VisitStringTable(
537         [this, &objectsStack](ObjectHeader *str) {
538             if (this->MarkObjectIfNotMarked(str)) {
539                 ASSERT(str != nullptr);
540                 objectsStack.PushToStack(RootType::STRING_TABLE, str);
541             }
542         },
543         flags);
544     this->MarkStack(&marker_, &objectsStack, GC::EmptyMarkPreprocess, refPred);
545     auto refClearPred = []([[maybe_unused]] const ObjectHeader *obj) { return true; };
546     // NOLINTNEXTLINE(performance-unnecessary-value-param)
547     this->GetPandaVm()->HandleReferences(task, refClearPred);
548 }
549 
550 template <class LanguageConfig>
MarkReferences(GCMarkingStackType * references,GCPhase gcPhase)551 void GenGC<LanguageConfig>::MarkReferences(GCMarkingStackType *references, GCPhase gcPhase)
552 {
553     trace::ScopedTrace scopedTrace(__FUNCTION__);
554     LOG_DEBUG_GC << "Start marking " << references->Size() << " references";
555     auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
556     if (gcPhase == GCPhase::GC_PHASE_MARK_YOUNG) {
557         this->MarkYoungStack(references);
558     } else if (gcPhase == GCPhase::GC_PHASE_INITIAL_MARK || gcPhase == GCPhase::GC_PHASE_MARK ||
559                gcPhase == GCPhase::GC_PHASE_REMARK) {
560         this->MarkStack(&marker_, references, GC::EmptyMarkPreprocess, refPred);
561     } else {
562         UNREACHABLE();
563     }
564 }
565 
566 template <class LanguageConfig>
MarkObject(ObjectHeader * object)567 void GenGC<LanguageConfig>::MarkObject(ObjectHeader *object)
568 {
569     marker_.Mark(object);
570 }
571 
572 template <class LanguageConfig>
UnMarkObject(ObjectHeader * objectHeader)573 void GenGC<LanguageConfig>::UnMarkObject(ObjectHeader *objectHeader)
574 {
575     LOG_DEBUG_GC << "Set unmark for GC " << GetDebugInfoAboutObject(objectHeader);
576     this->marker_.UnMark(objectHeader);
577 }
578 
579 template <class LanguageConfig>
IsMarked(const ObjectHeader * object) const580 bool GenGC<LanguageConfig>::IsMarked(const ObjectHeader *object) const
581 {
582     return this->marker_.IsMarked(object);
583 }
584 
585 // NO_THREAD_SAFETY_ANALYSIS because clang thread safety analysis
586 template <class LanguageConfig>
587 template <bool CONCURRENT>
Sweep()588 NO_THREAD_SAFETY_ANALYSIS void GenGC<LanguageConfig>::Sweep()
589 {
590     GCScope<TRACE_TIMING> gcScope(__FUNCTION__, this);
591     ConcurrentScope concurrentScope(this, false);
592     size_t freedObjectSize = 0U;
593     size_t freedObjectCount = 0U;
594 
595     // NB! can't move block out of brace, we need to make sure GC_PHASE_SWEEP cleared
596     {
597         GCScopedPhase scopedPhase(this, GCPhase::GC_PHASE_SWEEP);
598         // NOTE(dtrubenkov): make concurrent
599         ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
600         // new strings may be created in young space during tenured gc, we shouldn't collect them
601         auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
602         this->GetPandaVm()->SweepVmRefs([this, &youngMemRange](ObjectHeader *object) {
603             if (youngMemRange.IsAddressInRange(ToUintPtr(object))) {
604                 return ObjectStatus::ALIVE_OBJECT;
605             }
606             return this->marker_.MarkChecker(object);
607         });
608         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
609         if constexpr (CONCURRENT) {
610             concurrentScope.Start();  // enable concurrent after GC_PHASE_SWEEP has been set
611         }
612 
613         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
614         if constexpr (CONCURRENT && LanguageConfig::MT_MODE == MT_MODE_MULTI) {
615             // Run monitor deflation again, to avoid object was reclaimed before monitor deflate.
616             auto youngMr = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
617             this->GetPandaVm()->GetMonitorPool()->DeflateMonitorsWithCallBack([&youngMr, this](Monitor *monitor) {
618                 ObjectHeader *objectHeader = monitor->GetObject();
619                 return (!IsMarked(objectHeader)) && (!youngMr.IsAddressInRange(ToUintPtr(objectHeader)));
620             });
621         }
622 
623         this->GetObjectAllocator()->Collect(
624             [this, &freedObjectSize, &freedObjectCount](ObjectHeader *object) {
625                 auto status = this->marker_.MarkChecker(object);
626                 if (status == ObjectStatus::DEAD_OBJECT) {
627                     LOG_DEBUG_OBJECT_EVENTS << "DELETE OBJECT tenured: " << object;
628                     freedObjectSize += GetAlignedObjectSize(GetObjectSize(object));
629                     freedObjectCount++;
630                 }
631                 return status;
632             },
633             GCCollectMode::GC_ALL);
634         this->GetObjectAllocator()->VisitAndRemoveFreePools([this](void *mem, size_t size) {
635             this->GetCardTable()->ClearCardRange(ToUintPtr(mem), ToUintPtr(mem) + size);
636             PoolManager::GetMmapMemPool()->FreePool(mem, size);
637         });
638     }
639 
640     this->memStats_.RecordSizeFreedTenured(freedObjectSize);
641     this->memStats_.RecordCountFreedTenured(freedObjectCount);
642 
643     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
644     if constexpr (CONCURRENT) {
645         // In concurrent sweep phase, the new created objects may being marked in InitGCBits,
646         // so we need wait for that done, then we can safely unmark objects concurrent with mutator.
647         ASSERT(this->GetGCPhase() != GCPhase::GC_PHASE_SWEEP);  // Make sure we are out of sweep scope
648         this->GetObjectAllocator()->IterateOverTenuredObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
649     }
650 }
651 
652 template <class LanguageConfig>
SweepYoungVmRefs()653 void GenGC<LanguageConfig>::SweepYoungVmRefs()
654 {
655     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_SWEEP);
656     // new strings may be created in young space during tenured gc, we shouldn't collect them
657     // Sweep string table here to avoid dangling references
658     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
659     // new strings may be created in young space during tenured gc, we shouldn't collect them
660     auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
661     this->GetPandaVm()->SweepVmRefs([&youngMemRange](ObjectHeader *objectHeader) {
662         if (youngMemRange.IsAddressInRange(ToUintPtr(objectHeader))) {
663             return ObjectStatus::DEAD_OBJECT;
664         }
665         return ObjectStatus::ALIVE_OBJECT;
666     });
667 }
668 
669 template <class LanguageConfig>
InGCSweepRange(const ObjectHeader * obj) const670 bool GenGC<LanguageConfig>::InGCSweepRange(const ObjectHeader *obj) const
671 {
672     bool inYoungSpace = this->GetObjectAllocator()->IsObjectInYoungSpace(obj);
673     auto phase = this->GetGCPhase();
674     // Do young GC and the object is in the young space
675     if (phase == GCPhase::GC_PHASE_MARK_YOUNG && inYoungSpace) {
676         return true;
677     }
678 
679     // Do tenured GC and the object is in the tenured space
680     if (phase != GCPhase::GC_PHASE_MARK_YOUNG && !inYoungSpace) {
681         return true;
682     }
683 
684     return this->IsFullGC();
685 }
686 
687 template <class LanguageConfig>
IsPostponeGCSupported() const688 bool GenGC<LanguageConfig>::IsPostponeGCSupported() const
689 {
690     // Gen GC doesn't support GC postponing because
691     // we have to move young space objects
692     return false;
693 }
694 
695 template <class LanguageConfig>
HaveEnoughSpaceToMove() const696 bool GenGC<LanguageConfig>::HaveEnoughSpaceToMove() const
697 {
698     // hack for pools because we have 2 type of pools in tenures space, in bad cases objects can be moved to different
699     // spaces. And move 4M objects in bump-allocator to other allocator, may need more than 4M space in other allocator
700     // - so we need 3 empty pools.
701     // NOTE(xucheng) : remove the checker when we can do part young collection.
702     // The min num that can guarantee that we move all objects in young space.
703     constexpr size_t POOLS_NUM = 3;
704     return this->GetObjectAllocator()->HaveEnoughPoolsInObjectSpace(POOLS_NUM);
705 }
706 
707 TEMPLATE_CLASS_LANGUAGE_CONFIG(GenGC);
708 
709 }  // namespace panda::mem
710