Searched refs:mark_compact_ (Results 1 – 3 of 3) sorted by relevance
4278 : mark_compact_(mark_compact), self_(self) {} in ThreadRootsVisitor()4290 if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) { in VisitRoots()4302 if (mark_compact_->MarkObjectNonNullNoPush</*kParallel*/true>(obj)) { in VisitRoots()4314 MutexLock mu(self_, mark_compact_->lock_); in Flush()4318 while (!mark_compact_->mark_stack_->BumpBack(idx_, &start, &end)) { in Flush()4319 mark_compact_->ExpandMarkStack(); in Flush()4338 MarkCompact* const mark_compact_; member in art::gc::collector::MarkCompact::ThreadRootsVisitor4344 explicit CheckpointMarkThreadRoots(MarkCompact* mark_compact) : mark_compact_(mark_compact) {} in CheckpointMarkThreadRoots()4356 ThreadRootsVisitor</*kBufferSize*/ 20> visitor(mark_compact_, self); in Run()4364 mark_compact_->GetBarrier().Pass(self); in Run()[all …]
842 DCHECK(!gUseUserfaultfd || mark_compact_ != nullptr); in MarkCompactCollector()843 return mark_compact_; in MarkCompactCollector()846 bool IsPerformingUffdCompaction() { return gUseUserfaultfd && mark_compact_->IsCompacting(); } in IsPerformingUffdCompaction()1641 collector::MarkCompact* mark_compact_; variable
799 mark_compact_ = new collector::MarkCompact(this); in Heap()800 garbage_collectors_.push_back(mark_compact_); in Heap()2843 collector = mark_compact_; in CollectGarbageInternal()3896 DCHECK_NE(mark_compact_, nullptr); in ClampGrowthLimit()3898 mark_compact_->ClampGrowthLimit(capacity_); in ClampGrowthLimit()4745 DCHECK_NE(mark_compact_, nullptr); in PostForkChildAction()4746 mark_compact_->CreateUserfaultfd(/*post_fork*/true); in PostForkChildAction()