• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /**
2  * Copyright (c) 2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "runtime/mem/gc/gen-gc/gen-gc.h"
17 #include "runtime/mem/gc/gc_root-inl.h"
18 #include "runtime/mem/object_helpers-inl.h"
19 #include "runtime/include/panda_vm.h"
20 #include "runtime/mem/gc/card_table-inl.h"
21 #include "runtime/timing.h"
22 #include "runtime/mem/pygote_space_allocator-inl.h"
23 #include "runtime/mem/gc/static/gc_marker_static-inl.h"
24 #include "runtime/mem/gc/dynamic/gc_marker_dynamic-inl.h"
25 #include "runtime/mem/gc/generational-gc-base-inl.h"
26 #include "runtime/mem/gc/gc_adaptive_stack_inl.h"
27 
28 namespace ark::mem {
29 
30 template <class LanguageConfig>
GenGC(ObjectAllocatorBase * objectAllocator,const GCSettings & settings)31 GenGC<LanguageConfig>::GenGC(ObjectAllocatorBase *objectAllocator, const GCSettings &settings)
32     : GenerationalGC<LanguageConfig>(objectAllocator, settings),
33       marker_(this),
34       isExplicitConcurrentGcEnabled_(settings.IsExplicitConcurrentGcEnabled())
35 {
36     this->SetType(GCType::GEN_GC);
37     this->SetTLABsSupported();
38 }
39 
40 template <class LanguageConfig>
InitializeImpl()41 void GenGC<LanguageConfig>::InitializeImpl()
42 {
43     // GC saved the PandaVM instance, so we get allocator from the PandaVM.
44     InternalAllocatorPtr allocator = this->GetInternalAllocator();
45     this->CreateCardTable(allocator, PoolManager::GetMmapMemPool()->GetMinObjectAddress(),
46                           PoolManager::GetMmapMemPool()->GetTotalObjectSize());
47     auto barrierSet = allocator->New<GCGenBarrierSet>(allocator, this->GetCardTable(), CardTable::GetCardBits(),
48                                                       CardTable::GetCardDirtyValue());
49     ASSERT(barrierSet != nullptr);
50     this->SetGCBarrierSet(barrierSet);
51     LOG_DEBUG_GC << "GenGC initialized";
52 }
53 
54 template <class LanguageConfig>
ShouldRunTenuredGC(const GCTask & task)55 bool GenGC<LanguageConfig>::ShouldRunTenuredGC(const GCTask &task)
56 {
57     return task.reason == GCTaskCause::HEAP_USAGE_THRESHOLD_CAUSE;
58 }
59 
60 template <class LanguageConfig>
ShouldRunFullGC(const GCTask & task,bool haveEnoughSpaceForYoung) const61 bool GenGC<LanguageConfig>::ShouldRunFullGC(const GCTask &task, bool haveEnoughSpaceForYoung) const
62 {
63     return !haveEnoughSpaceForYoung || task.reason == GCTaskCause::OOM_CAUSE || this->IsExplicitFull(task) ||
64            this->IsOnPygoteFork() || task.reason == GCTaskCause::STARTUP_COMPLETE_CAUSE;
65 }
66 
67 template <class LanguageConfig>
RunPhasesImpl(GCTask & task)68 void GenGC<LanguageConfig>::RunPhasesImpl(GCTask &task)
69 {
70     LOG(DEBUG, GC) << "GenGC start";
71     uint64_t footprintBefore = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
72     LOG_DEBUG_GC << "Footprint before GC: " << footprintBefore;
73     if (this->IsLogDetailedGcInfoEnabled()) {
74         this->footprintList_.clear();
75         this->footprintList_.push_back({"Footprint before GC", footprintBefore});
76     }
77     uint64_t youngTotalTime = 0;
78     {
79         ScopedTiming t("Generational GC", *this->GetTiming());
80         this->memStats_.Reset();
81         // We trigger a full gc at first pygote fork
82         if (ShouldRunFullGC(task, HaveEnoughSpaceToMove())) {
83             GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats());
84             marker_.BindBitmaps(true);  // clear pygote live bitmaps, we will rebuild it
85             this->GetObjectGenAllocator()->InvalidateSpaceData();
86             this->GetObjectGenAllocator()->UpdateSpaceData();
87             RunFullGC(task);
88         } else {
89             {
90                 GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats());
91                 time::Timer timer(&youngTotalTime, true);
92                 marker_.BindBitmaps(false);
93                 this->GetObjectGenAllocator()->InvalidateSpaceData();
94                 this->GetObjectGenAllocator()->UpdateSpaceData();
95                 LOG_DEBUG_GC << "Young range: " << this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
96                 RunYoungGC(task);
97                 if (youngTotalTime > 0) {
98                     this->GetStats()->AddTimeValue(youngTotalTime, TimeTypeStats::YOUNG_TOTAL_TIME);
99                 }
100                 uint64_t footprintYoung = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
101                 LOG_DEBUG_GC << "Footprint after young: " << footprintYoung;
102                 if (this->IsLogDetailedGcInfoEnabled()) {
103                     this->footprintList_.push_back({"Footprint after young", footprintYoung});
104                 }
105             }
106             if (ShouldRunTenuredGC(task)) {
107                 marker_.BindBitmaps(true);  // clear pygote live bitmaps, we will rebuild it
108                 this->GetObjectGenAllocator()->InvalidateSpaceData();
109                 this->GetObjectGenAllocator()->UpdateSpaceData();
110                 RunTenuredGC(task);
111             }
112         }
113     }
114     uint64_t footprintAfter = this->GetPandaVm()->GetMemStats()->GetFootprintHeap();
115     LOG_DEBUG_GC << "Footprint after GC: " << footprintAfter;
116     if (this->IsLogDetailedGcInfoEnabled()) {
117         this->footprintList_.push_back({"Footprint after GC", footprintAfter});
118     }
119 }
120 
121 template <class LanguageConfig>
PreStartupImp()122 void GenGC<LanguageConfig>::PreStartupImp()
123 {
124     GenerationalGC<LanguageConfig>::DisableTenuredGC();
125 }
126 
127 template <class LanguageConfig>
InitGCBits(ark::ObjectHeader * objHeader)128 void GenGC<LanguageConfig>::InitGCBits(ark::ObjectHeader *objHeader)
129 {
130     if (UNLIKELY(this->GetGCPhase() == GCPhase::GC_PHASE_SWEEP) &&
131         (!this->GetObjectAllocator()->IsObjectInYoungSpace(objHeader))) {
132         objHeader->SetMarkedForGC();
133         // do unmark if out of sweep phase otherwise we may miss it in sweep
134         if (UNLIKELY(this->GetGCPhase() != GCPhase::GC_PHASE_SWEEP)) {
135             objHeader->SetUnMarkedForGC();
136         }
137     } else {
138         objHeader->SetUnMarkedForGC();
139     }
140     LOG_DEBUG_GC << "Init gc bits for object: " << std::hex << objHeader << " bit: " << objHeader->IsMarkedForGC()
141                  << ", is marked = " << IsMarked(objHeader);
142 }
143 
144 template <class LanguageConfig>
InitGCBitsForAllocationInTLAB(ark::ObjectHeader * objHeader)145 void GenGC<LanguageConfig>::InitGCBitsForAllocationInTLAB(ark::ObjectHeader *objHeader)
146 {
147     // Compiler will allocate objects in TLABs only in young space
148     // Therefore, set unmarked for GC here.
149     objHeader->SetUnMarkedForGC();
150 }
151 
152 template <class LanguageConfig>
RunYoungGC(GCTask & task)153 void GenGC<LanguageConfig>::RunYoungGC(GCTask &task)
154 {
155     GCScope<TRACE_TIMING> scopedTrace(__FUNCTION__, this);
156     LOG_DEBUG_GC << "GenGC RunYoungGC start";
157     uint64_t youngPauseTime;
158     {
159         NoAtomicGCMarkerScope scope(&this->marker_);
160         time::Timer timer(&youngPauseTime, true);
161         // NOLINTNEXTLINE(performance-unnecessary-value-param)
162         MarkYoung(task);
163         CollectYoungAndMove();
164         this->GetCardTable()->ClearAll();
165     }
166     if (youngPauseTime > 0) {
167         this->GetStats()->AddTimeValue(youngPauseTime, TimeTypeStats::YOUNG_PAUSED_TIME);
168     }
169     LOG_DEBUG_GC << "GenGC RunYoungGC end";
170     task.collectionType = GCCollectionType::YOUNG;
171 }
172 
173 template <class LanguageConfig>
MarkYoung(const GCTask & task)174 void GenGC<LanguageConfig>::MarkYoung(const GCTask &task)
175 {
176     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_MARK_YOUNG);
177 
178     // Iterate over roots and add other roots
179     GCMarkingStackType objectsStack(this);
180     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
181     auto youngMr = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
182     GCRootVisitor gcMarkYoung = [&objectsStack, &youngMr, this](const GCRoot &gcRoot) {
183         // Skip non-young roots
184         auto rootObjectPtr = gcRoot.GetObjectHeader();
185         ASSERT(rootObjectPtr != nullptr);
186         if (!youngMr.IsAddressInRange(ToUintPtr(rootObjectPtr))) {
187             LOG_DEBUG_GC << "Skip root for young mark: " << std::hex << rootObjectPtr;
188             return;
189         }
190         LOG(DEBUG, GC) << "root " << GetDebugInfoAboutObject(rootObjectPtr);
191         if (this->MarkObjectIfNotMarked(rootObjectPtr)) {
192             objectsStack.PushToStack(gcRoot.GetType(), rootObjectPtr);
193             this->MarkYoungStack(&objectsStack);
194         }
195     };
196     {
197         GCScope<TRACE_TIMING> markingYoungRootsTrace("Marking roots young", this);
198         this->VisitRoots(gcMarkYoung,
199                          VisitGCRootFlags::ACCESS_ROOT_NONE | VisitGCRootFlags::ACCESS_ROOT_AOT_STRINGS_ONLY_YOUNG);
200     }
201     {
202         ScopedTiming visitCardTableRootsTiming("VisitCardTableRoots", *this->GetTiming());
203         LOG_DEBUG_GC << "START Marking tenured -> young roots";
204         MemRangeChecker tenuredRangeChecker = [&youngMr](MemRange &memRange) -> bool {
205             return !youngMr.IsIntersect(memRange);
206         };
207         ObjectChecker tenuredRangeYoungObjectChecker = [&youngMr](const ObjectHeader *objectHeader) -> bool {
208             return youngMr.IsAddressInRange(ToUintPtr(objectHeader));
209         };
210 
211         ObjectChecker fromObjectChecker = []([[maybe_unused]] const ObjectHeader *objectHeader) -> bool {
212             return true;
213         };
214 
215         this->VisitCardTableRoots(this->GetCardTable(), gcMarkYoung, tenuredRangeChecker,
216                                   tenuredRangeYoungObjectChecker, fromObjectChecker,
217                                   CardTableProcessedFlag::VISIT_MARKED | CardTableProcessedFlag::VISIT_PROCESSED);
218     }
219     // reference-processor in VisitCardTableRoots can add new objects to stack
220     this->MarkYoungStack(&objectsStack);
221     LOG_DEBUG_GC << "END Marking tenured -> young roots";
222     auto refClearPred = [this]([[maybe_unused]] const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
223     this->GetPandaVm()->HandleReferences(task, refClearPred);
224 }
225 
226 template <class LanguageConfig>
MarkYoungStack(GCMarkingStackType * stack)227 void GenGC<LanguageConfig>::MarkYoungStack(GCMarkingStackType *stack)
228 {
229     trace::ScopedTrace scopedTrace(__FUNCTION__);
230     ASSERT(stack != nullptr);
231     auto allocator = this->GetObjectAllocator();
232     auto &youngRanges = allocator->GetYoungSpaceMemRanges();
233     auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
234     while (!stack->Empty()) {
235         auto *object = this->PopObjectFromStack(stack);
236         ValidateObject(nullptr, object);
237         auto *cls = object->template ClassAddr<BaseClass>();
238         LOG_DEBUG_GC << "current object " << GetDebugInfoAboutObject(object);
239 
240         bool inRange = false;
241         for (const auto &r : youngRanges) {
242             if (r.IsAddressInRange(ToUintPtr(object))) {
243                 inRange = true;
244                 break;
245             }
246         }
247         if (inRange) {
248             marker_.MarkInstance(stack, object, cls, refPred);
249         }
250     }
251 }
252 
253 template <class LanguageConfig>
CollectVerificationInfo(const MemRange & youngMemRange)254 HeapVerifierIntoGC<LanguageConfig> GenGC<LanguageConfig>::CollectVerificationInfo(const MemRange &youngMemRange)
255 {
256     HeapVerifierIntoGC<LanguageConfig> youngVerifier(this->GetPandaVm()->GetHeapManager());
257     if (this->GetSettings()->IntoGCHeapVerification() && !this->IsFullGC()) {
258         ScopedTiming collectVerificationTiming("CollectVerificationInfo", *this->GetTiming());
259         youngVerifier.CollectVerificationInfo(PandaVector<MemRange>(1U, youngMemRange));
260     }
261     return youngVerifier;
262 }
263 
264 template <class LanguageConfig>
VerifyCollectAndMove(HeapVerifierIntoGC<LanguageConfig> && youngVerifier)265 void GenGC<LanguageConfig>::VerifyCollectAndMove(HeapVerifierIntoGC<LanguageConfig> &&youngVerifier)
266 {
267     if (this->GetSettings()->IntoGCHeapVerification() && !this->IsFullGC()) {
268         ScopedTiming verificationTiming("Verification", *this->GetTiming());
269         size_t failsCount = youngVerifier.VerifyAll();
270         if (this->GetSettings()->FailOnHeapVerification() && failsCount > 0) {
271             LOG(FATAL, GC) << "Heap was corrupted during GC, HeapVerifier found " << failsCount << " corruptions";
272         }
273     }
274 }
275 
276 // NOLINTNEXTLINE(readability-function-size)
277 template <class LanguageConfig>
CollectYoungAndMove()278 void GenGC<LanguageConfig>::CollectYoungAndMove()
279 {
280     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_COLLECT_YOUNG_AND_MOVE);
281     LOG_DEBUG_GC << "== GenGC CollectYoungAndMove start ==";
282     // NOTE(dtrubenkov): add assert that we in STW
283     PandaVector<ObjectHeader *> movedObjects;
284     size_t prevMovedSize = this->GetPandaVm()->GetMemStats()->GetLastYoungObjectsMovedBytes();
285     constexpr size_t MINIMAL_PREALLOC_MOVE_OBJ = 32U;
286     // Adaptive preallocate buffer for moved_objects to avoid useless reallocations
287     movedObjects.reserve(std::max(MINIMAL_PREALLOC_MOVE_OBJ, prevMovedSize / GetMinimalObjectSize()));
288     size_t youngMoveSize = 0;
289     size_t youngMoveCount = 0;
290     size_t youngDeleteSize = 0;
291     size_t youngDeleteCount = 0;
292 
293     auto *objectAllocator = this->GetObjectGenAllocator();
294     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
295     auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
296     HeapVerifierIntoGC<LanguageConfig> youngVerifier = CollectVerificationInfo(youngMemRange);
297 
298     std::function<void(ObjectHeader * objectHeader)> moveVisitor(
299         [this, &objectAllocator, &movedObjects, &youngMoveSize, &youngMoveCount, &youngDeleteSize,
300          &youngDeleteCount](ObjectHeader *objectHeader) -> void {
301             size_t size = GetObjectSize(objectHeader);
302             ASSERT(size <= Runtime::GetOptions().GetMaxTlabSize());
303             // Use aligned size here, because we need to proceed MemStats correctly.
304             size_t alignedSize = GetAlignedObjectSize(size);
305             if (objectHeader->IsMarkedForGC<false>()) {
306                 auto dst = reinterpret_cast<ObjectHeader *>(objectAllocator->AllocateTenuredWithoutLocks(size));
307                 ASSERT(dst != nullptr);
308                 memcpy_s(dst, size, objectHeader, size);
309                 youngMoveSize += alignedSize;
310                 youngMoveCount++;
311                 LOG_DEBUG_OBJECT_EVENTS << "MOVE object " << objectHeader << " -> " << dst << ", size = " << size;
312                 movedObjects.push_back(dst);
313                 // set unmarked dst
314                 UnMarkObject(dst);
315                 this->SetForwardAddress(objectHeader, dst);
316             } else {
317                 LOG_DEBUG_OBJECT_EVENTS << "DELETE OBJECT young: " << objectHeader;
318                 ++youngDeleteCount;
319                 youngDeleteSize += alignedSize;
320             }
321             // We will record all object in MemStats as SPACE_TYPE_OBJECT, so check it
322             ASSERT(PoolManager::GetMmapMemPool()->GetSpaceTypeForAddr(objectHeader) == SpaceType::SPACE_TYPE_OBJECT);
323         });
324     {
325         ScopedTiming moveTiming("MoveAndSweep", *this->GetTiming());
326         objectAllocator->IterateOverYoungObjects(moveVisitor);
327     }
328     this->memStats_.RecordYoungStats(youngMoveSize, youngMoveCount, youngDeleteSize, youngDeleteCount);
329     UpdateRefsToMovedObjects(&movedObjects);
330     this->VerifyCollectAndMove(std::move(youngVerifier));
331     SweepYoungVmRefs();
332     // Remove young
333     objectAllocator->ResetYoungAllocator();
334 
335     this->UpdateMemStats(this->GetPandaVm()->GetMemStats()->GetFootprintHeap(), false);
336 
337     LOG_DEBUG_GC << "== GenGC CollectYoungAndMove end ==";
338 }
339 
340 template <class LanguageConfig>
UpdateRefsToMovedObjects(PandaVector<ObjectHeader * > * movedObjects)341 void GenGC<LanguageConfig>::UpdateRefsToMovedObjects(PandaVector<ObjectHeader *> *movedObjects)
342 {
343     GCScope<TRACE_TIMING> scope("UpdateRefsToMovedObjects", this);
344 
345     auto objAllocator = this->GetObjectAllocator();
346     // Update references exyoung -> young
347     LOG_DEBUG_GC << "process moved objects cnt = " << std::dec << movedObjects->size();
348     LOG_DEBUG_GC << "=== Update exyoung -> young references. START. ===";
349     for (auto obj : *movedObjects) {
350         ObjectHelpers<LanguageConfig::LANG_TYPE>::UpdateRefsToMovedObjects(obj);
351     }
352 
353     LOG_DEBUG_GC << "=== Update exyoung -> young references. END. ===";
354     // update references tenured -> young
355     LOG_DEBUG_GC << "=== Update tenured -> young references. START. ===";
356     auto youngSpace = objAllocator->GetYoungSpaceMemRanges().at(0);
357     auto updateRefsInObject(
358         [](ObjectHeader *obj) { ObjectHelpers<LanguageConfig::LANG_TYPE>::UpdateRefsToMovedObjects(obj); });
359     this->GetCardTable()->VisitMarked(
360         [&updateRefsInObject, &objAllocator, &youngSpace](const MemRange &memRange) {
361             if (!youngSpace.Contains(memRange)) {
362                 objAllocator->IterateOverObjectsInRange(memRange, updateRefsInObject);
363             }
364         },
365         CardTableProcessedFlag::VISIT_MARKED | CardTableProcessedFlag::VISIT_PROCESSED);
366     LOG_DEBUG_GC << "=== Update tenured -> young references. END. ===";
367     this->CommonUpdateRefsToMovedObjects();
368 }
369 
370 template <class LanguageConfig>
RunTenuredGC(GCTask & task)371 void GenGC<LanguageConfig>::RunTenuredGC(GCTask &task)
372 {
373     GCScope<TRACE_TIMING> scope(__FUNCTION__, this);
374     LOG_DEBUG_GC << "GC tenured start";
375     GCMarkingStackType objectsStack(this);
376     {
377         GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats(), nullptr, PauseTypeStats::COMMON_PAUSE);
378         {
379             ScopedTiming unMarkTiming("UnMark", *this->GetTiming());
380             // Unmark all because no filter out tenured when mark young
381             // NOTE(dtrubenk): remove this
382             this->GetObjectAllocator()->IterateOverObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
383         }
384         InitialMark(&objectsStack);
385     }
386     this->ConcurrentMark(&objectsStack);
387     // NOLINTNEXTLINE(performance-unnecessary-value-param)
388     ReMark(&objectsStack, task);
389 
390     ASSERT(objectsStack.Empty());
391     {
392         ScopedTiming unMarkYoungTiming("UnMarkYoung", *this->GetTiming());
393         this->GetObjectAllocator()->IterateOverYoungObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
394     }
395     Sweep<true>();
396     LOG_DEBUG_GC << "GC tenured end";
397     task.collectionType = GCCollectionType::TENURED;
398 }
399 
400 // Full GC is ran on pause
401 template <class LanguageConfig>
RunFullGC(GCTask & task)402 void GenGC<LanguageConfig>::RunFullGC(GCTask &task)
403 {
404     GCScope<TRACE_TIMING> fullGcScope(__FUNCTION__, this);
405     LOG_DEBUG_GC << "Full GC start";
406     this->SetFullGC(true);
407     {
408         ScopedTiming unMarkTiming("UnMark", *this->GetTiming());
409         this->GetObjectAllocator()->IterateOverObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
410     }
411     FullMark(task);
412     Sweep<false>();
413     // Young GC
414     if (LIKELY(HaveEnoughSpaceToMove())) {
415         // We already marked objects above so just collect and move
416         CollectYoungAndMove();
417         this->GetCardTable()->ClearAll();
418     }
419     this->SetFullGC(false);
420     LOG_DEBUG_GC << "Full GC end";
421     task.collectionType = GCCollectionType::FULL;
422 }
423 
424 template <class LanguageConfig>
MarkRoots(GCMarkingStackType * objectsStack,CardTableVisitFlag visitCardTableRoots,const ReferenceCheckPredicateT & refPred,VisitGCRootFlags flags)425 void GenGC<LanguageConfig>::MarkRoots(GCMarkingStackType *objectsStack, CardTableVisitFlag visitCardTableRoots,
426                                       const ReferenceCheckPredicateT &refPred, VisitGCRootFlags flags)
427 {
428     trace::ScopedTrace scopedTrace(__FUNCTION__);
429     GCRootVisitor gcMarkRoots = [this, &objectsStack, &refPred](const GCRoot &gcRoot) {
430         ObjectHeader *rootObject = gcRoot.GetObjectHeader();
431         ObjectHeader *fromObject = gcRoot.GetFromObjectHeader();
432         LOG_DEBUG_GC << "Handle root " << GetDebugInfoAboutObject(rootObject);
433         if (UNLIKELY(fromObject != nullptr) &&
434             this->IsReference(fromObject->NotAtomicClassAddr<BaseClass>(), fromObject, refPred)) {
435             LOG_DEBUG_GC << "Add reference: " << GetDebugInfoAboutObject(fromObject) << " to stack";
436             marker_.Mark(fromObject);
437             this->ProcessReference(objectsStack, fromObject->NotAtomicClassAddr<BaseClass>(), fromObject,
438                                    GC::EmptyReferenceProcessPredicate);
439         } else {
440             // we should always add this object to the stack, because we could mark this object in InitialMark, but
441             // write to some fields in ConcurrentMark - need to iterate over all fields again, MarkObjectIfNotMarked
442             // can't be used here
443             marker_.Mark(rootObject);
444             objectsStack->PushToStack(gcRoot.GetType(), rootObject);
445         }
446     };
447     this->VisitRoots(gcMarkRoots, flags);
448     if (visitCardTableRoots == CardTableVisitFlag::VISIT_ENABLED) {
449         auto allocator = this->GetObjectAllocator();
450         ASSERT(allocator->GetYoungSpaceMemRanges().size() == 1);
451         MemRange youngMr = allocator->GetYoungSpaceMemRanges().at(0);
452         MemRangeChecker youngRangeChecker = []([[maybe_unused]] MemRange &memRange) -> bool { return true; };
453         ObjectChecker youngRangeTenuredObjectChecker = [&youngMr](const ObjectHeader *objectHeader) -> bool {
454             return !youngMr.IsAddressInRange(ToUintPtr(objectHeader));
455         };
456         ObjectChecker fromObjectChecker = [&youngMr, this](const ObjectHeader *objectHeader) -> bool {
457             // Don't visit objects which are in tenured and not marked.
458             return youngMr.IsAddressInRange(ToUintPtr(objectHeader)) || IsMarked(objectHeader);
459         };
460         this->VisitCardTableRoots(this->GetCardTable(), gcMarkRoots, youngRangeChecker, youngRangeTenuredObjectChecker,
461                                   fromObjectChecker, CardTableProcessedFlag::VISIT_MARKED);
462     }
463 }
464 
465 template <class LanguageConfig>
InitialMark(GCMarkingStackType * objectsStack)466 void GenGC<LanguageConfig>::InitialMark(GCMarkingStackType *objectsStack)
467 {
468     GCScope<TRACE_TIMING_PHASE> gcScope(__FUNCTION__, this, GCPhase::GC_PHASE_INITIAL_MARK);
469     {
470         NoAtomicGCMarkerScope scope(&this->marker_);
471         auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
472         MarkRoots(objectsStack, CardTableVisitFlag::VISIT_DISABLED, refPred,
473                   VisitGCRootFlags::ACCESS_ROOT_NONE | VisitGCRootFlags::START_RECORDING_NEW_ROOT);
474     }
475 }
476 
477 template <class LanguageConfig>
ConcurrentMark(GCMarkingStackType * objectsStack)478 NO_THREAD_SAFETY_ANALYSIS void GenGC<LanguageConfig>::ConcurrentMark(GCMarkingStackType *objectsStack)
479 {
480     GCScope<TRACE_TIMING_PHASE> scopedFunc(__FUNCTION__, this, GCPhase::GC_PHASE_MARK);
481     ConcurrentScope concurrentScope(this);
482     auto *objectAllocator = this->GetObjectAllocator();
483     this->MarkImpl(
484         &marker_, objectsStack, CardTableVisitFlag::VISIT_ENABLED,
485         // Process 'weak' references as regular object on concurrent phase to avoid
486         // concurrent access to referent
487         []([[maybe_unused]] const ObjectHeader *obj) { return false; },
488         // non-young mem range checker
489         [objectAllocator](MemRange &memRange) { return !objectAllocator->IsIntersectedWithYoung(memRange); });
490 }
491 
492 template <class LanguageConfig>
ReMark(GCMarkingStackType * objectsStack,const GCTask & task)493 void GenGC<LanguageConfig>::ReMark(GCMarkingStackType *objectsStack, const GCTask &task)
494 {
495     GCScope<TRACE_TIMING_PHASE> gcScope(__FUNCTION__, this, GCPhase::GC_PHASE_REMARK);
496     GCScopedPauseStats scopedPauseStats(this->GetPandaVm()->GetGCStats(), nullptr, PauseTypeStats::REMARK_PAUSE);
497 
498     // NOTE(dtrubenkov): consider iterational concurrent marking of card table
499     {
500         NoAtomicGCMarkerScope scope(&this->marker_);
501         auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
502         MarkRoots(objectsStack, CardTableVisitFlag::VISIT_ENABLED, refPred,
503                   VisitGCRootFlags::ACCESS_ROOT_ONLY_NEW | VisitGCRootFlags::END_RECORDING_NEW_ROOT);
504         this->MarkStack(&marker_, objectsStack, GC::EmptyMarkPreprocess, refPred);
505         {
506             ScopedTiming t1("VisitInternalStringTable", *this->GetTiming());
507             this->GetPandaVm()->VisitStringTable(
508                 [this, &objectsStack](ObjectHeader *str) {
509                     if (this->MarkObjectIfNotMarked(str)) {
510                         ASSERT(str != nullptr);
511                         objectsStack->PushToStack(RootType::STRING_TABLE, str);
512                     }
513                 },
514                 VisitGCRootFlags::ACCESS_ROOT_ONLY_NEW | VisitGCRootFlags::END_RECORDING_NEW_ROOT);
515             this->MarkStack(&marker_, objectsStack, GC::EmptyMarkPreprocess, refPred);
516         }
517         // ConcurrentMark doesn't visit young objects - so we can't clear references which are in young-space because we
518         // don't know which objects are marked. We will process them on young GC separately later, here we process
519         // only refs in tenured-space
520         this->GetPandaVm()->HandleReferences(task, refPred);
521     }
522 }
523 
524 template <class LanguageConfig>
FullMark(const GCTask & task)525 void GenGC<LanguageConfig>::FullMark(const GCTask &task)
526 {
527     GCScope<TRACE_TIMING_PHASE> fullMarkScope(__FUNCTION__, this, GCPhase::GC_PHASE_MARK);
528     NoAtomicGCMarkerScope markerScope(&this->marker_);
529 
530     GCMarkingStackType objectsStack(this);
531     VisitGCRootFlags flags = VisitGCRootFlags::ACCESS_ROOT_ALL;
532     auto refPred = GC::EmptyReferenceProcessPredicate;
533     // Mark all reachable objects
534     MarkRoots(&objectsStack, CardTableVisitFlag::VISIT_DISABLED, refPred, flags);
535     this->GetPandaVm()->VisitStringTable(
536         [this, &objectsStack](ObjectHeader *str) {
537             if (this->MarkObjectIfNotMarked(str)) {
538                 ASSERT(str != nullptr);
539                 objectsStack.PushToStack(RootType::STRING_TABLE, str);
540             }
541         },
542         flags);
543     this->MarkStack(&marker_, &objectsStack, GC::EmptyMarkPreprocess, refPred);
544     auto refClearPred = []([[maybe_unused]] const ObjectHeader *obj) { return true; };
545     // NOLINTNEXTLINE(performance-unnecessary-value-param)
546     this->GetPandaVm()->HandleReferences(task, refClearPred);
547 }
548 
549 template <class LanguageConfig>
MarkReferences(GCMarkingStackType * references,GCPhase gcPhase)550 void GenGC<LanguageConfig>::MarkReferences(GCMarkingStackType *references, GCPhase gcPhase)
551 {
552     trace::ScopedTrace scopedTrace(__FUNCTION__);
553     LOG_DEBUG_GC << "Start marking " << references->Size() << " references";
554     auto refPred = [this](const ObjectHeader *obj) { return this->InGCSweepRange(obj); };
555     if (gcPhase == GCPhase::GC_PHASE_MARK_YOUNG) {
556         this->MarkYoungStack(references);
557     } else if (gcPhase == GCPhase::GC_PHASE_INITIAL_MARK || gcPhase == GCPhase::GC_PHASE_MARK ||
558                gcPhase == GCPhase::GC_PHASE_REMARK) {
559         this->MarkStack(&marker_, references, GC::EmptyMarkPreprocess, refPred);
560     } else {
561         UNREACHABLE();
562     }
563 }
564 
565 template <class LanguageConfig>
MarkObject(ObjectHeader * object)566 void GenGC<LanguageConfig>::MarkObject(ObjectHeader *object)
567 {
568     marker_.Mark(object);
569 }
570 
571 template <class LanguageConfig>
UnMarkObject(ObjectHeader * objectHeader)572 void GenGC<LanguageConfig>::UnMarkObject(ObjectHeader *objectHeader)
573 {
574     LOG_DEBUG_GC << "Set unmark for GC " << GetDebugInfoAboutObject(objectHeader);
575     this->marker_.UnMark(objectHeader);
576 }
577 
578 template <class LanguageConfig>
IsMarked(const ObjectHeader * object) const579 bool GenGC<LanguageConfig>::IsMarked(const ObjectHeader *object) const
580 {
581     return this->marker_.IsMarked(object);
582 }
583 
584 // NO_THREAD_SAFETY_ANALYSIS because clang thread safety analysis
585 template <class LanguageConfig>
586 template <bool CONCURRENT>
Sweep()587 NO_THREAD_SAFETY_ANALYSIS void GenGC<LanguageConfig>::Sweep()
588 {
589     GCScope<TRACE_TIMING> gcScope(__FUNCTION__, this);
590     ConcurrentScope concurrentScope(this, false);
591     size_t freedObjectSize = 0U;
592     size_t freedObjectCount = 0U;
593 
594     // NB! can't move block out of brace, we need to make sure GC_PHASE_SWEEP cleared
595     {
596         GCScopedPhase scopedPhase(this, GCPhase::GC_PHASE_SWEEP);
597         // NOTE(dtrubenkov): make concurrent
598         ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
599         // new strings may be created in young space during tenured gc, we shouldn't collect them
600         auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
601         this->GetPandaVm()->SweepVmRefs([this, &youngMemRange](ObjectHeader *object) {
602             if (youngMemRange.IsAddressInRange(ToUintPtr(object))) {
603                 return ObjectStatus::ALIVE_OBJECT;
604             }
605             return this->marker_.MarkChecker(object);
606         });
607         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
608         if constexpr (CONCURRENT) {
609             concurrentScope.Start();  // enable concurrent after GC_PHASE_SWEEP has been set
610         }
611 
612         // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
613         if constexpr (CONCURRENT && LanguageConfig::MT_MODE != MT_MODE_SINGLE) {
614             // Run monitor deflation again, to avoid object was reclaimed before monitor deflate.
615             auto youngMr = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
616             this->GetPandaVm()->GetMonitorPool()->DeflateMonitorsWithCallBack([&youngMr, this](Monitor *monitor) {
617                 ObjectHeader *objectHeader = monitor->GetObject();
618                 return (!IsMarked(objectHeader)) && (!youngMr.IsAddressInRange(ToUintPtr(objectHeader)));
619             });
620         }
621 
622         this->GetObjectAllocator()->Collect(
623             [this, &freedObjectSize, &freedObjectCount](ObjectHeader *object) {
624                 auto status = this->marker_.MarkChecker(object);
625                 if (status == ObjectStatus::DEAD_OBJECT) {
626                     LOG_DEBUG_OBJECT_EVENTS << "DELETE OBJECT tenured: " << object;
627                     freedObjectSize += GetAlignedObjectSize(GetObjectSize(object));
628                     freedObjectCount++;
629                 }
630                 return status;
631             },
632             GCCollectMode::GC_ALL);
633         this->GetObjectAllocator()->VisitAndRemoveFreePools([this](void *mem, size_t size) {
634             this->GetCardTable()->ClearCardRange(ToUintPtr(mem), ToUintPtr(mem) + size);
635             PoolManager::GetMmapMemPool()->FreePool(mem, size);
636         });
637     }
638 
639     this->memStats_.RecordSizeFreedTenured(freedObjectSize);
640     this->memStats_.RecordCountFreedTenured(freedObjectCount);
641 
642     // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
643     if constexpr (CONCURRENT) {
644         // In concurrent sweep phase, the new created objects may being marked in InitGCBits,
645         // so we need wait for that done, then we can safely unmark objects concurrent with mutator.
646         ASSERT(this->GetGCPhase() != GCPhase::GC_PHASE_SWEEP);  // Make sure we are out of sweep scope
647         this->GetObjectAllocator()->IterateOverTenuredObjects([this](ObjectHeader *obj) { this->marker_.UnMark(obj); });
648     }
649 }
650 
651 template <class LanguageConfig>
SweepYoungVmRefs()652 void GenGC<LanguageConfig>::SweepYoungVmRefs()
653 {
654     GCScope<TRACE_TIMING_PHASE> scope(__FUNCTION__, this, GCPhase::GC_PHASE_SWEEP);
655     // new strings may be created in young space during tenured gc, we shouldn't collect them
656     // Sweep string table here to avoid dangling references
657     ASSERT(this->GetObjectAllocator()->GetYoungSpaceMemRanges().size() == 1);
658     // new strings may be created in young space during tenured gc, we shouldn't collect them
659     auto youngMemRange = this->GetObjectAllocator()->GetYoungSpaceMemRanges().at(0);
660     this->GetPandaVm()->SweepVmRefs([&youngMemRange](ObjectHeader *objectHeader) {
661         if (youngMemRange.IsAddressInRange(ToUintPtr(objectHeader))) {
662             return ObjectStatus::DEAD_OBJECT;
663         }
664         return ObjectStatus::ALIVE_OBJECT;
665     });
666 }
667 
668 template <class LanguageConfig>
InGCSweepRange(const ObjectHeader * obj) const669 bool GenGC<LanguageConfig>::InGCSweepRange(const ObjectHeader *obj) const
670 {
671     bool inYoungSpace = this->GetObjectAllocator()->IsObjectInYoungSpace(obj);
672     auto phase = this->GetGCPhase();
673     // Do young GC and the object is in the young space
674     if (phase == GCPhase::GC_PHASE_MARK_YOUNG && inYoungSpace) {
675         return true;
676     }
677 
678     // Do tenured GC and the object is in the tenured space
679     if (phase != GCPhase::GC_PHASE_MARK_YOUNG && !inYoungSpace) {
680         return true;
681     }
682 
683     return this->IsFullGC();
684 }
685 
686 template <class LanguageConfig>
IsPostponeGCSupported() const687 bool GenGC<LanguageConfig>::IsPostponeGCSupported() const
688 {
689     // Gen GC doesn't support GC postponing because
690     // we have to move young space objects
691     return false;
692 }
693 
694 template <class LanguageConfig>
HaveEnoughSpaceToMove() const695 bool GenGC<LanguageConfig>::HaveEnoughSpaceToMove() const
696 {
697     // hack for pools because we have 2 type of pools in tenures space, in bad cases objects can be moved to different
698     // spaces. And move 4M objects in bump-allocator to other allocator, may need more than 4M space in other allocator
699     // - so we need 3 empty pools.
700     // NOTE(xucheng) : remove the checker when we can do part young collection.
701     // The min num that can guarantee that we move all objects in young space.
702     constexpr size_t POOLS_NUM = 3;
703     return this->GetObjectAllocator()->HaveEnoughPoolsInObjectSpace(POOLS_NUM);
704 }
705 
706 TEMPLATE_CLASS_LANGUAGE_CONFIG(GenGC);
707 
708 }  // namespace ark::mem
709