• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "mark_compact.h"
18 
19 #include "base/logging.h"
20 #include "base/mutex-inl.h"
21 #include "base/timing_logger.h"
22 #include "gc/accounting/heap_bitmap-inl.h"
23 #include "gc/accounting/mod_union_table.h"
24 #include "gc/accounting/space_bitmap-inl.h"
25 #include "gc/heap.h"
26 #include "gc/reference_processor.h"
27 #include "gc/space/bump_pointer_space-inl.h"
28 #include "gc/space/large_object_space.h"
29 #include "gc/space/space-inl.h"
30 #include "mirror/class-inl.h"
31 #include "mirror/object-inl.h"
32 #include "runtime.h"
33 #include "stack.h"
34 #include "thread-inl.h"
35 #include "thread_list.h"
36 
37 namespace art {
38 namespace gc {
39 namespace collector {
40 
BindBitmaps()41 void MarkCompact::BindBitmaps() {
42   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
43   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
44   // Mark all of the spaces we never collect as immune.
45   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
46     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
47         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
48       immune_spaces_.AddSpace(space);
49     }
50   }
51 }
52 
MarkCompact(Heap * heap,const std::string & name_prefix)53 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
54     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
55       space_(nullptr), collector_name_(name_), updating_references_(false) {
56 }
57 
RunPhases()58 void MarkCompact::RunPhases() {
59   Thread* self = Thread::Current();
60   InitializePhase();
61   CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
62   {
63     ScopedPause pause(this);
64     GetHeap()->PreGcVerificationPaused(this);
65     GetHeap()->PrePauseRosAllocVerification(this);
66     MarkingPhase();
67     ReclaimPhase();
68   }
69   GetHeap()->PostGcVerification(this);
70   FinishPhase();
71 }
72 
ForwardObject(mirror::Object * obj)73 void MarkCompact::ForwardObject(mirror::Object* obj) {
74   const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
75   LockWord lock_word = obj->GetLockWord(false);
76   // If we have a non empty lock word, store it and restore it later.
77   if (!LockWord::IsDefault(lock_word)) {
78     // Set the bit in the bitmap so that we know to restore it later.
79     objects_with_lockword_->Set(obj);
80     lock_words_to_restore_.push_back(lock_word);
81   }
82   obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
83                    false);
84   bump_pointer_ += alloc_size;
85   ++live_objects_in_space_;
86 }
87 
88 class CalculateObjectForwardingAddressVisitor {
89  public:
CalculateObjectForwardingAddressVisitor(MarkCompact * collector)90   explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
91       : collector_(collector) {}
operator ()(mirror::Object * obj) const92   void operator()(mirror::Object* obj) const REQUIRES(Locks::mutator_lock_,
93                                                                       Locks::heap_bitmap_lock_) {
94     DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
95     DCHECK(collector_->IsMarked(obj) != nullptr);
96     collector_->ForwardObject(obj);
97   }
98 
99  private:
100   MarkCompact* const collector_;
101 };
102 
CalculateObjectForwardingAddresses()103 void MarkCompact::CalculateObjectForwardingAddresses() {
104   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
105   // The bump pointer in the space where the next forwarding address will be.
106   bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
107   // Visit all the marked objects in the bitmap.
108   CalculateObjectForwardingAddressVisitor visitor(this);
109   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
110                                                reinterpret_cast<uintptr_t>(space_->End()),
111                                                visitor);
112 }
113 
InitializePhase()114 void MarkCompact::InitializePhase() {
115   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
116   mark_stack_ = heap_->GetMarkStack();
117   DCHECK(mark_stack_ != nullptr);
118   immune_spaces_.Reset();
119   CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
120   // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
121   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
122   mark_bitmap_ = heap_->GetMarkBitmap();
123   live_objects_in_space_ = 0;
124 }
125 
ProcessReferences(Thread * self)126 void MarkCompact::ProcessReferences(Thread* self) {
127   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
128   heap_->GetReferenceProcessor()->ProcessReferences(
129       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this);
130 }
131 
132 class BitmapSetSlowPathVisitor {
133  public:
operator ()(const mirror::Object * obj) const134   void operator()(const mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) {
135     // Marking a large object, make sure its aligned as a sanity check.
136     if (!IsAligned<kPageSize>(obj)) {
137       Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
138       LOG(FATAL) << obj;
139     }
140   }
141 };
142 
MarkObject(mirror::Object * obj)143 inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) {
144   if (obj == nullptr) {
145     return nullptr;
146   }
147   if (kUseBakerOrBrooksReadBarrier) {
148     // Verify all the objects have the correct forward pointer installed.
149     obj->AssertReadBarrierPointer();
150   }
151   if (!immune_spaces_.IsInImmuneRegion(obj)) {
152     if (objects_before_forwarding_->HasAddress(obj)) {
153       if (!objects_before_forwarding_->Set(obj)) {
154         MarkStackPush(obj);  // This object was not previously marked.
155       }
156     } else {
157       DCHECK(!space_->HasAddress(obj));
158       BitmapSetSlowPathVisitor visitor;
159       if (!mark_bitmap_->Set(obj, visitor)) {
160         // This object was not previously marked.
161         MarkStackPush(obj);
162       }
163     }
164   }
165   return obj;
166 }
167 
MarkingPhase()168 void MarkCompact::MarkingPhase() {
169   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
170   Thread* self = Thread::Current();
171   // Bitmap which describes which objects we have to move.
172   objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
173       "objects before forwarding", space_->Begin(), space_->Size()));
174   // Bitmap which describes which lock words we need to restore.
175   objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
176       "objects with lock words", space_->Begin(), space_->Size()));
177   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
178   // Assume the cleared space is already empty.
179   BindBitmaps();
180   t.NewTiming("ProcessCards");
181   // Process dirty cards and add dirty cards to mod-union tables.
182   heap_->ProcessCards(GetTimings(), false, false, true);
183   // Clear the whole card table since we cannot get any additional dirty cards during the
184   // paused GC. This saves memory but only works for pause the world collectors.
185   t.NewTiming("ClearCardTable");
186   heap_->GetCardTable()->ClearCardTable();
187   // Need to do this before the checkpoint since we don't want any threads to add references to
188   // the live stack during the recursive mark.
189   if (kUseThreadLocalAllocationStack) {
190     t.NewTiming("RevokeAllThreadLocalAllocationStacks");
191     heap_->RevokeAllThreadLocalAllocationStacks(self);
192   }
193   t.NewTiming("SwapStacks");
194   heap_->SwapStacks();
195   {
196     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
197     MarkRoots();
198     // Mark roots of immune spaces.
199     UpdateAndMarkModUnion();
200     // Recursively mark remaining objects.
201     MarkReachableObjects();
202   }
203   ProcessReferences(self);
204   {
205     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
206     SweepSystemWeaks();
207   }
208   Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
209   // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
210   // before they are properly counted.
211   RevokeAllThreadLocalBuffers();
212   // Disabled due to an issue where we have objects in the bump pointer space which reference dead
213   // objects.
214   // heap_->PreSweepingGcVerification(this);
215 }
216 
UpdateAndMarkModUnion()217 void MarkCompact::UpdateAndMarkModUnion() {
218   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
219   for (auto& space : heap_->GetContinuousSpaces()) {
220     // If the space is immune then we need to mark the references to other spaces.
221     if (immune_spaces_.ContainsSpace(space)) {
222       accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
223       if (table != nullptr) {
224         // TODO: Improve naming.
225         TimingLogger::ScopedTiming t2(
226             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
227                                      "UpdateAndMarkImageModUnionTable", GetTimings());
228         table->UpdateAndMarkReferences(this);
229       }
230     }
231   }
232 }
233 
MarkReachableObjects()234 void MarkCompact::MarkReachableObjects() {
235   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
236   accounting::ObjectStack* live_stack = heap_->GetLiveStack();
237   {
238     TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
239     heap_->MarkAllocStackAsLive(live_stack);
240   }
241   live_stack->Reset();
242   // Recursively process the mark stack.
243   ProcessMarkStack();
244 }
245 
ReclaimPhase()246 void MarkCompact::ReclaimPhase() {
247   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
248   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
249   // Reclaim unmarked objects.
250   Sweep(false);
251   // Swap the live and mark bitmaps for each space which we modified space. This is an
252   // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
253   // bitmaps.
254   SwapBitmaps();
255   GetHeap()->UnBindBitmaps();  // Unbind the live and mark bitmaps.
256   Compact();
257 }
258 
ResizeMarkStack(size_t new_size)259 void MarkCompact::ResizeMarkStack(size_t new_size) {
260   std::vector<StackReference<mirror::Object>> temp(mark_stack_->Begin(), mark_stack_->End());
261   CHECK_LE(mark_stack_->Size(), new_size);
262   mark_stack_->Resize(new_size);
263   for (auto& obj : temp) {
264     mark_stack_->PushBack(obj.AsMirrorPtr());
265   }
266 }
267 
MarkStackPush(mirror::Object * obj)268 inline void MarkCompact::MarkStackPush(mirror::Object* obj) {
269   if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
270     ResizeMarkStack(mark_stack_->Capacity() * 2);
271   }
272   // The object must be pushed on to the mark stack.
273   mark_stack_->PushBack(obj);
274 }
275 
MarkHeapReference(mirror::HeapReference<mirror::Object> * obj_ptr)276 void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr) {
277   if (updating_references_) {
278     UpdateHeapReference(obj_ptr);
279   } else {
280     MarkObject(obj_ptr->AsMirrorPtr());
281   }
282 }
283 
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)284 void MarkCompact::VisitRoots(
285     mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
286   for (size_t i = 0; i < count; ++i) {
287     MarkObject(*roots[i]);
288   }
289 }
290 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)291 void MarkCompact::VisitRoots(
292     mirror::CompressedReference<mirror::Object>** roots, size_t count,
293     const RootInfo& info ATTRIBUTE_UNUSED) {
294   for (size_t i = 0; i < count; ++i) {
295     MarkObject(roots[i]->AsMirrorPtr());
296   }
297 }
298 
299 class UpdateRootVisitor : public RootVisitor {
300  public:
UpdateRootVisitor(MarkCompact * collector)301   explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {
302   }
303 
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)304   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
305       OVERRIDE REQUIRES(Locks::mutator_lock_)
306       SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
307     for (size_t i = 0; i < count; ++i) {
308       mirror::Object* obj = *roots[i];
309       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
310       if (obj != new_obj) {
311         *roots[i] = new_obj;
312         DCHECK(new_obj != nullptr);
313       }
314     }
315   }
316 
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)317   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
318                   const RootInfo& info ATTRIBUTE_UNUSED)
319       OVERRIDE REQUIRES(Locks::mutator_lock_)
320       SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
321     for (size_t i = 0; i < count; ++i) {
322       mirror::Object* obj = roots[i]->AsMirrorPtr();
323       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
324       if (obj != new_obj) {
325         roots[i]->Assign(new_obj);
326         DCHECK(new_obj != nullptr);
327       }
328     }
329   }
330 
331  private:
332   MarkCompact* const collector_;
333 };
334 
335 class UpdateObjectReferencesVisitor {
336  public:
UpdateObjectReferencesVisitor(MarkCompact * collector)337   explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
338   }
operator ()(mirror::Object * obj) const339   void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
340           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
341     collector_->UpdateObjectReferences(obj);
342   }
343 
344  private:
345   MarkCompact* const collector_;
346 };
347 
UpdateReferences()348 void MarkCompact::UpdateReferences() {
349   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
350   updating_references_ = true;
351   Runtime* runtime = Runtime::Current();
352   // Update roots.
353   UpdateRootVisitor update_root_visitor(this);
354   runtime->VisitRoots(&update_root_visitor);
355   // Update object references in mod union tables and spaces.
356   for (const auto& space : heap_->GetContinuousSpaces()) {
357     // If the space is immune then we need to mark the references to other spaces.
358     accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
359     if (table != nullptr) {
360       // TODO: Improve naming.
361       TimingLogger::ScopedTiming t2(
362           space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
363                                    "UpdateImageModUnionTableReferences",
364                                    GetTimings());
365       table->UpdateAndMarkReferences(this);
366     } else {
367       // No mod union table, so we need to scan the space using bitmap visit.
368       // Scan the space using bitmap visit.
369       accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
370       if (bitmap != nullptr) {
371         UpdateObjectReferencesVisitor visitor(this);
372         bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
373                                  reinterpret_cast<uintptr_t>(space->End()),
374                                  visitor);
375       }
376     }
377   }
378   CHECK(!kMovingClasses)
379       << "Didn't update large object classes since they are assumed to not move.";
380   // Update the system weaks, these should already have been swept.
381   runtime->SweepSystemWeaks(this);
382   // Update the objects in the bump pointer space last, these objects don't have a bitmap.
383   UpdateObjectReferencesVisitor visitor(this);
384   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
385                                                reinterpret_cast<uintptr_t>(space_->End()),
386                                                visitor);
387   // Update the reference processor cleared list.
388   heap_->GetReferenceProcessor()->UpdateRoots(this);
389   updating_references_ = false;
390 }
391 
Compact()392 void MarkCompact::Compact() {
393   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
394   CalculateObjectForwardingAddresses();
395   UpdateReferences();
396   MoveObjects();
397   // Space
398   int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
399   int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
400       reinterpret_cast<int64_t>(bump_pointer_);
401   t.NewTiming("RecordFree");
402   space_->RecordFree(objects_freed, bytes_freed);
403   RecordFree(ObjectBytePair(objects_freed, bytes_freed));
404   space_->SetEnd(bump_pointer_);
405   // Need to zero out the memory we freed. TODO: Use madvise for pages.
406   memset(bump_pointer_, 0, bytes_freed);
407 }
408 
409 // Marks all objects in the root set.
MarkRoots()410 void MarkCompact::MarkRoots() {
411   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
412   Runtime::Current()->VisitRoots(this);
413 }
414 
UpdateHeapReference(mirror::HeapReference<mirror::Object> * reference)415 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
416   mirror::Object* obj = reference->AsMirrorPtr();
417   if (obj != nullptr) {
418     mirror::Object* new_obj = GetMarkedForwardAddress(obj);
419     if (obj != new_obj) {
420       DCHECK(new_obj != nullptr);
421       reference->Assign(new_obj);
422     }
423   }
424 }
425 
426 class UpdateReferenceVisitor {
427  public:
UpdateReferenceVisitor(MarkCompact * collector)428   explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
429   }
430 
operator ()(mirror::Object * obj,MemberOffset offset,bool) const431   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const
432       ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
433     collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
434   }
435 
operator ()(mirror::Class *,mirror::Reference * ref) const436   void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
437       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
438     collector_->UpdateHeapReference(
439         ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
440   }
441 
442   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const443   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
444       NO_THREAD_SAFETY_ANALYSIS {
445     if (!root->IsNull()) {
446       VisitRoot(root);
447     }
448   }
449 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const450   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
451       NO_THREAD_SAFETY_ANALYSIS {
452     root->Assign(collector_->GetMarkedForwardAddress(root->AsMirrorPtr()));
453   }
454 
455  private:
456   MarkCompact* const collector_;
457 };
458 
UpdateObjectReferences(mirror::Object * obj)459 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
460   UpdateReferenceVisitor visitor(this);
461   obj->VisitReferences(visitor, visitor);
462 }
463 
GetMarkedForwardAddress(mirror::Object * obj)464 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) {
465   DCHECK(obj != nullptr);
466   if (objects_before_forwarding_->HasAddress(obj)) {
467     DCHECK(objects_before_forwarding_->Test(obj));
468     mirror::Object* ret =
469         reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
470     DCHECK(ret != nullptr);
471     return ret;
472   }
473   DCHECK(!space_->HasAddress(obj));
474   return obj;
475 }
476 
IsMarked(mirror::Object * object)477 mirror::Object* MarkCompact::IsMarked(mirror::Object* object) {
478   if (immune_spaces_.IsInImmuneRegion(object)) {
479     return object;
480   }
481   if (updating_references_) {
482     return GetMarkedForwardAddress(object);
483   }
484   if (objects_before_forwarding_->HasAddress(object)) {
485     return objects_before_forwarding_->Test(object) ? object : nullptr;
486   }
487   return mark_bitmap_->Test(object) ? object : nullptr;
488 }
489 
IsMarkedHeapReference(mirror::HeapReference<mirror::Object> * ref_ptr)490 bool MarkCompact::IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref_ptr) {
491   // Side effect free since we call this before ever moving objects.
492   return IsMarked(ref_ptr->AsMirrorPtr()) != nullptr;
493 }
494 
SweepSystemWeaks()495 void MarkCompact::SweepSystemWeaks() {
496   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
497   Runtime::Current()->SweepSystemWeaks(this);
498 }
499 
ShouldSweepSpace(space::ContinuousSpace * space) const500 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
501   return space != space_ && !immune_spaces_.ContainsSpace(space);
502 }
503 
504 class MoveObjectVisitor {
505  public:
MoveObjectVisitor(MarkCompact * collector)506   explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
507   }
operator ()(mirror::Object * obj) const508   void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
509           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
510       collector_->MoveObject(obj, obj->SizeOf());
511   }
512 
513  private:
514   MarkCompact* const collector_;
515 };
516 
MoveObject(mirror::Object * obj,size_t len)517 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
518   // Look at the forwarding address stored in the lock word to know where to copy.
519   DCHECK(space_->HasAddress(obj)) << obj;
520   uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
521   mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
522   DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
523   // Use memmove since there may be overlap.
524   memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
525   // Restore the saved lock word if needed.
526   LockWord lock_word = LockWord::Default();
527   if (UNLIKELY(objects_with_lockword_->Test(obj))) {
528     lock_word = lock_words_to_restore_.front();
529     lock_words_to_restore_.pop_front();
530   }
531   dest_obj->SetLockWord(lock_word, false);
532 }
533 
MoveObjects()534 void MarkCompact::MoveObjects() {
535   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
536   // Move the objects in the before forwarding bitmap.
537   MoveObjectVisitor visitor(this);
538   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
539                                                reinterpret_cast<uintptr_t>(space_->End()),
540                                                visitor);
541   CHECK(lock_words_to_restore_.empty());
542 }
543 
Sweep(bool swap_bitmaps)544 void MarkCompact::Sweep(bool swap_bitmaps) {
545   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
546   DCHECK(mark_stack_->IsEmpty());
547   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
548     if (space->IsContinuousMemMapAllocSpace()) {
549       space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
550       if (!ShouldSweepSpace(alloc_space)) {
551         continue;
552       }
553       TimingLogger::ScopedTiming t2(
554           alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
555       RecordFree(alloc_space->Sweep(swap_bitmaps));
556     }
557   }
558   SweepLargeObjects(swap_bitmaps);
559 }
560 
SweepLargeObjects(bool swap_bitmaps)561 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
562   space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
563   if (los != nullptr) {
564     TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
565     RecordFreeLOS(los->Sweep(swap_bitmaps));
566   }
567 }
568 
569 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
570 // marked, put it on the appropriate list in the heap for later processing.
DelayReferenceReferent(mirror::Class * klass,mirror::Reference * reference)571 void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
572   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
573 }
574 
575 class MarkCompactMarkObjectVisitor {
576  public:
MarkCompactMarkObjectVisitor(MarkCompact * collector)577   explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
578   }
579 
operator ()(mirror::Object * obj,MemberOffset offset,bool) const580   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
581       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
582     // Object was already verified when we scanned it.
583     collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
584   }
585 
operator ()(mirror::Class * klass,mirror::Reference * ref) const586   void operator()(mirror::Class* klass, mirror::Reference* ref) const
587       SHARED_REQUIRES(Locks::mutator_lock_)
588       REQUIRES(Locks::heap_bitmap_lock_) {
589     collector_->DelayReferenceReferent(klass, ref);
590   }
591 
592   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root) const593   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
594       NO_THREAD_SAFETY_ANALYSIS {
595     if (!root->IsNull()) {
596       VisitRoot(root);
597     }
598   }
599 
VisitRoot(mirror::CompressedReference<mirror::Object> * root) const600   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
601       NO_THREAD_SAFETY_ANALYSIS {
602     collector_->MarkObject(root->AsMirrorPtr());
603   }
604 
605  private:
606   MarkCompact* const collector_;
607 };
608 
609 // Visit all of the references of an object and update.
ScanObject(mirror::Object * obj)610 void MarkCompact::ScanObject(mirror::Object* obj) {
611   MarkCompactMarkObjectVisitor visitor(this);
612   obj->VisitReferences(visitor, visitor);
613 }
614 
615 // Scan anything that's on the mark stack.
ProcessMarkStack()616 void MarkCompact::ProcessMarkStack() {
617   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
618   while (!mark_stack_->IsEmpty()) {
619     mirror::Object* obj = mark_stack_->PopBack();
620     DCHECK(obj != nullptr);
621     ScanObject(obj);
622   }
623 }
624 
SetSpace(space::BumpPointerSpace * space)625 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
626   DCHECK(space != nullptr);
627   space_ = space;
628 }
629 
FinishPhase()630 void MarkCompact::FinishPhase() {
631   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
632   space_ = nullptr;
633   CHECK(mark_stack_->IsEmpty());
634   mark_stack_->Reset();
635   // Clear all of the spaces' mark bitmaps.
636   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
637   heap_->ClearMarkedObjects();
638   // Release our bitmaps.
639   objects_before_forwarding_.reset(nullptr);
640   objects_with_lockword_.reset(nullptr);
641 }
642 
RevokeAllThreadLocalBuffers()643 void MarkCompact::RevokeAllThreadLocalBuffers() {
644   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
645   GetHeap()->RevokeAllThreadLocalBuffers();
646 }
647 
648 }  // namespace collector
649 }  // namespace gc
650 }  // namespace art
651