1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "mark_compact.h"
18
19 #include "base/logging.h"
20 #include "base/mutex-inl.h"
21 #include "base/timing_logger.h"
22 #include "gc/accounting/heap_bitmap-inl.h"
23 #include "gc/accounting/mod_union_table.h"
24 #include "gc/accounting/remembered_set.h"
25 #include "gc/accounting/space_bitmap-inl.h"
26 #include "gc/heap.h"
27 #include "gc/reference_processor.h"
28 #include "gc/space/bump_pointer_space.h"
29 #include "gc/space/bump_pointer_space-inl.h"
30 #include "gc/space/image_space.h"
31 #include "gc/space/large_object_space.h"
32 #include "gc/space/space-inl.h"
33 #include "indirect_reference_table.h"
34 #include "intern_table.h"
35 #include "jni_internal.h"
36 #include "mark_sweep-inl.h"
37 #include "monitor.h"
38 #include "mirror/art_field.h"
39 #include "mirror/art_field-inl.h"
40 #include "mirror/class-inl.h"
41 #include "mirror/class_loader.h"
42 #include "mirror/dex_cache.h"
43 #include "mirror/reference-inl.h"
44 #include "mirror/object-inl.h"
45 #include "mirror/object_array.h"
46 #include "mirror/object_array-inl.h"
47 #include "runtime.h"
48 #include "stack.h"
49 #include "thread-inl.h"
50 #include "thread_list.h"
51
52 using ::art::mirror::Object;
53
54 namespace art {
55 namespace gc {
56 namespace collector {
57
BindBitmaps()58 void MarkCompact::BindBitmaps() {
59 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
60 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
61 // Mark all of the spaces we never collect as immune.
62 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
63 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
64 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
65 CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
66 }
67 }
68 }
69
MarkCompact(Heap * heap,const std::string & name_prefix)70 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
71 : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
72 space_(nullptr), collector_name_(name_) {
73 }
74
RunPhases()75 void MarkCompact::RunPhases() {
76 Thread* self = Thread::Current();
77 InitializePhase();
78 CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
79 {
80 ScopedPause pause(this);
81 GetHeap()->PreGcVerificationPaused(this);
82 GetHeap()->PrePauseRosAllocVerification(this);
83 MarkingPhase();
84 ReclaimPhase();
85 }
86 GetHeap()->PostGcVerification(this);
87 FinishPhase();
88 }
89
ForwardObject(mirror::Object * obj)90 void MarkCompact::ForwardObject(mirror::Object* obj) {
91 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
92 LockWord lock_word = obj->GetLockWord(false);
93 // If we have a non empty lock word, store it and restore it later.
94 if (lock_word.GetValue() != LockWord().GetValue()) {
95 // Set the bit in the bitmap so that we know to restore it later.
96 objects_with_lockword_->Set(obj);
97 lock_words_to_restore_.push_back(lock_word);
98 }
99 obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
100 false);
101 bump_pointer_ += alloc_size;
102 ++live_objects_in_space_;
103 }
104
105 class CalculateObjectForwardingAddressVisitor {
106 public:
CalculateObjectForwardingAddressVisitor(MarkCompact * collector)107 explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
108 : collector_(collector) {}
operator ()(mirror::Object * obj) const109 void operator()(mirror::Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_,
110 Locks::heap_bitmap_lock_) {
111 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
112 DCHECK(collector_->IsMarked(obj));
113 collector_->ForwardObject(obj);
114 }
115
116 private:
117 MarkCompact* const collector_;
118 };
119
CalculateObjectForwardingAddresses()120 void MarkCompact::CalculateObjectForwardingAddresses() {
121 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
122 // The bump pointer in the space where the next forwarding address will be.
123 bump_pointer_ = reinterpret_cast<byte*>(space_->Begin());
124 // Visit all the marked objects in the bitmap.
125 CalculateObjectForwardingAddressVisitor visitor(this);
126 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
127 reinterpret_cast<uintptr_t>(space_->End()),
128 visitor);
129 }
130
InitializePhase()131 void MarkCompact::InitializePhase() {
132 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
133 mark_stack_ = heap_->GetMarkStack();
134 DCHECK(mark_stack_ != nullptr);
135 immune_region_.Reset();
136 CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
137 // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
138 ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
139 mark_bitmap_ = heap_->GetMarkBitmap();
140 live_objects_in_space_ = 0;
141 }
142
ProcessReferences(Thread * self)143 void MarkCompact::ProcessReferences(Thread* self) {
144 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
145 heap_->GetReferenceProcessor()->ProcessReferences(
146 false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(),
147 &HeapReferenceMarkedCallback, &MarkObjectCallback, &ProcessMarkStackCallback, this);
148 }
149
150 class BitmapSetSlowPathVisitor {
151 public:
operator ()(const mirror::Object * obj) const152 void operator()(const mirror::Object* obj) const {
153 // Marking a large object, make sure its aligned as a sanity check.
154 if (!IsAligned<kPageSize>(obj)) {
155 Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
156 LOG(FATAL) << obj;
157 }
158 }
159 };
160
MarkObject(mirror::Object * obj)161 inline void MarkCompact::MarkObject(mirror::Object* obj) {
162 if (obj == nullptr) {
163 return;
164 }
165 if (kUseBakerOrBrooksReadBarrier) {
166 // Verify all the objects have the correct forward pointer installed.
167 obj->AssertReadBarrierPointer();
168 }
169 if (immune_region_.ContainsObject(obj)) {
170 return;
171 }
172 if (objects_before_forwarding_->HasAddress(obj)) {
173 if (!objects_before_forwarding_->Set(obj)) {
174 MarkStackPush(obj); // This object was not previously marked.
175 }
176 } else {
177 DCHECK(!space_->HasAddress(obj));
178 BitmapSetSlowPathVisitor visitor;
179 if (!mark_bitmap_->Set(obj, visitor)) {
180 // This object was not previously marked.
181 MarkStackPush(obj);
182 }
183 }
184 }
185
MarkingPhase()186 void MarkCompact::MarkingPhase() {
187 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
188 Thread* self = Thread::Current();
189 // Bitmap which describes which objects we have to move.
190 objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
191 "objects before forwarding", space_->Begin(), space_->Size()));
192 // Bitmap which describes which lock words we need to restore.
193 objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
194 "objects with lock words", space_->Begin(), space_->Size()));
195 CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
196 // Assume the cleared space is already empty.
197 BindBitmaps();
198 t.NewTiming("ProcessCards");
199 // Process dirty cards and add dirty cards to mod-union tables.
200 heap_->ProcessCards(GetTimings(), false);
201 // Clear the whole card table since we can not Get any additional dirty cards during the
202 // paused GC. This saves memory but only works for pause the world collectors.
203 t.NewTiming("ClearCardTable");
204 heap_->GetCardTable()->ClearCardTable();
205 // Need to do this before the checkpoint since we don't want any threads to add references to
206 // the live stack during the recursive mark.
207 if (kUseThreadLocalAllocationStack) {
208 t.NewTiming("RevokeAllThreadLocalAllocationStacks");
209 heap_->RevokeAllThreadLocalAllocationStacks(self);
210 }
211 t.NewTiming("SwapStacks");
212 heap_->SwapStacks(self);
213 {
214 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
215 MarkRoots();
216 // Mark roots of immune spaces.
217 UpdateAndMarkModUnion();
218 // Recursively mark remaining objects.
219 MarkReachableObjects();
220 }
221 ProcessReferences(self);
222 {
223 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
224 SweepSystemWeaks();
225 }
226 // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
227 // before they are properly counted.
228 RevokeAllThreadLocalBuffers();
229 // Disabled due to an issue where we have objects in the bump pointer space which reference dead
230 // objects.
231 // heap_->PreSweepingGcVerification(this);
232 }
233
UpdateAndMarkModUnion()234 void MarkCompact::UpdateAndMarkModUnion() {
235 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
236 for (auto& space : heap_->GetContinuousSpaces()) {
237 // If the space is immune then we need to mark the references to other spaces.
238 if (immune_region_.ContainsSpace(space)) {
239 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
240 if (table != nullptr) {
241 // TODO: Improve naming.
242 TimingLogger::ScopedTiming t(
243 space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
244 "UpdateAndMarkImageModUnionTable", GetTimings());
245 table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
246 }
247 }
248 }
249 }
250
MarkReachableObjects()251 void MarkCompact::MarkReachableObjects() {
252 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
253 accounting::ObjectStack* live_stack = heap_->GetLiveStack();
254 {
255 TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
256 heap_->MarkAllocStackAsLive(live_stack);
257 }
258 live_stack->Reset();
259 // Recursively process the mark stack.
260 ProcessMarkStack();
261 }
262
ReclaimPhase()263 void MarkCompact::ReclaimPhase() {
264 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
265 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
266 // Reclaim unmarked objects.
267 Sweep(false);
268 // Swap the live and mark bitmaps for each space which we modified space. This is an
269 // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
270 // bitmaps.
271 SwapBitmaps();
272 GetHeap()->UnBindBitmaps(); // Unbind the live and mark bitmaps.
273 Compact();
274 }
275
ResizeMarkStack(size_t new_size)276 void MarkCompact::ResizeMarkStack(size_t new_size) {
277 std::vector<Object*> temp(mark_stack_->Begin(), mark_stack_->End());
278 CHECK_LE(mark_stack_->Size(), new_size);
279 mark_stack_->Resize(new_size);
280 for (const auto& obj : temp) {
281 mark_stack_->PushBack(obj);
282 }
283 }
284
MarkStackPush(Object * obj)285 inline void MarkCompact::MarkStackPush(Object* obj) {
286 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
287 ResizeMarkStack(mark_stack_->Capacity() * 2);
288 }
289 // The object must be pushed on to the mark stack.
290 mark_stack_->PushBack(obj);
291 }
292
ProcessMarkStackCallback(void * arg)293 void MarkCompact::ProcessMarkStackCallback(void* arg) {
294 reinterpret_cast<MarkCompact*>(arg)->ProcessMarkStack();
295 }
296
MarkObjectCallback(mirror::Object * root,void * arg)297 mirror::Object* MarkCompact::MarkObjectCallback(mirror::Object* root, void* arg) {
298 reinterpret_cast<MarkCompact*>(arg)->MarkObject(root);
299 return root;
300 }
301
MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object> * obj_ptr,void * arg)302 void MarkCompact::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr,
303 void* arg) {
304 reinterpret_cast<MarkCompact*>(arg)->MarkObject(obj_ptr->AsMirrorPtr());
305 }
306
DelayReferenceReferentCallback(mirror::Class * klass,mirror::Reference * ref,void * arg)307 void MarkCompact::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
308 void* arg) {
309 reinterpret_cast<MarkCompact*>(arg)->DelayReferenceReferent(klass, ref);
310 }
311
MarkRootCallback(Object ** root,void * arg,uint32_t,RootType)312 void MarkCompact::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*/,
313 RootType /*root_type*/) {
314 reinterpret_cast<MarkCompact*>(arg)->MarkObject(*root);
315 }
316
UpdateRootCallback(Object ** root,void * arg,uint32_t,RootType)317 void MarkCompact::UpdateRootCallback(Object** root, void* arg, uint32_t /*thread_id*/,
318 RootType /*root_type*/) {
319 mirror::Object* obj = *root;
320 mirror::Object* new_obj = reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
321 if (obj != new_obj) {
322 *root = new_obj;
323 DCHECK(new_obj != nullptr);
324 }
325 }
326
327 class UpdateObjectReferencesVisitor {
328 public:
UpdateObjectReferencesVisitor(MarkCompact * collector)329 explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
330 }
operator ()(mirror::Object * obj) const331 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
332 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
333 collector_->UpdateObjectReferences(obj);
334 }
335
336 private:
337 MarkCompact* const collector_;
338 };
339
UpdateReferences()340 void MarkCompact::UpdateReferences() {
341 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
342 Runtime* runtime = Runtime::Current();
343 // Update roots.
344 runtime->VisitRoots(UpdateRootCallback, this);
345 // Update object references in mod union tables and spaces.
346 for (const auto& space : heap_->GetContinuousSpaces()) {
347 // If the space is immune then we need to mark the references to other spaces.
348 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
349 if (table != nullptr) {
350 // TODO: Improve naming.
351 TimingLogger::ScopedTiming t(
352 space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
353 "UpdateImageModUnionTableReferences",
354 GetTimings());
355 table->UpdateAndMarkReferences(&UpdateHeapReferenceCallback, this);
356 } else {
357 // No mod union table, so we need to scan the space using bitmap visit.
358 // Scan the space using bitmap visit.
359 accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
360 if (bitmap != nullptr) {
361 UpdateObjectReferencesVisitor visitor(this);
362 bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
363 reinterpret_cast<uintptr_t>(space->End()),
364 visitor);
365 }
366 }
367 }
368 CHECK(!kMovingClasses)
369 << "Didn't update large object classes since they are assumed to not move.";
370 // Update the system weaks, these should already have been swept.
371 runtime->SweepSystemWeaks(&MarkedForwardingAddressCallback, this);
372 // Update the objects in the bump pointer space last, these objects don't have a bitmap.
373 UpdateObjectReferencesVisitor visitor(this);
374 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
375 reinterpret_cast<uintptr_t>(space_->End()),
376 visitor);
377 // Update the reference processor cleared list.
378 heap_->GetReferenceProcessor()->UpdateRoots(&MarkedForwardingAddressCallback, this);
379 }
380
Compact()381 void MarkCompact::Compact() {
382 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
383 CalculateObjectForwardingAddresses();
384 UpdateReferences();
385 MoveObjects();
386 // Space
387 int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
388 int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
389 reinterpret_cast<int64_t>(bump_pointer_);
390 t.NewTiming("RecordFree");
391 space_->RecordFree(objects_freed, bytes_freed);
392 RecordFree(ObjectBytePair(objects_freed, bytes_freed));
393 space_->SetEnd(bump_pointer_);
394 // Need to zero out the memory we freed. TODO: Use madvise for pages.
395 memset(bump_pointer_, 0, bytes_freed);
396 }
397
398 // Marks all objects in the root set.
MarkRoots()399 void MarkCompact::MarkRoots() {
400 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
401 Runtime::Current()->VisitRoots(MarkRootCallback, this);
402 }
403
MarkedForwardingAddressCallback(mirror::Object * obj,void * arg)404 mirror::Object* MarkCompact::MarkedForwardingAddressCallback(mirror::Object* obj, void* arg) {
405 return reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
406 }
407
UpdateHeapReference(mirror::HeapReference<mirror::Object> * reference)408 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
409 mirror::Object* obj = reference->AsMirrorPtr();
410 if (obj != nullptr) {
411 mirror::Object* new_obj = GetMarkedForwardAddress(obj);
412 if (obj != new_obj) {
413 DCHECK(new_obj != nullptr);
414 reference->Assign(new_obj);
415 }
416 }
417 }
418
UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object> * reference,void * arg)419 void MarkCompact::UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
420 void* arg) {
421 reinterpret_cast<MarkCompact*>(arg)->UpdateHeapReference(reference);
422 }
423
424 class UpdateReferenceVisitor {
425 public:
UpdateReferenceVisitor(MarkCompact * collector)426 explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
427 }
428
operator ()(Object * obj,MemberOffset offset,bool) const429 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
430 ALWAYS_INLINE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
431 collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
432 }
433
operator ()(mirror::Class *,mirror::Reference * ref) const434 void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
435 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
436 collector_->UpdateHeapReference(
437 ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
438 }
439
440 private:
441 MarkCompact* const collector_;
442 };
443
UpdateObjectReferences(mirror::Object * obj)444 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
445 UpdateReferenceVisitor visitor(this);
446 obj->VisitReferences<kMovingClasses>(visitor, visitor);
447 }
448
GetMarkedForwardAddress(mirror::Object * obj) const449 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) const {
450 DCHECK(obj != nullptr);
451 if (objects_before_forwarding_->HasAddress(obj)) {
452 DCHECK(objects_before_forwarding_->Test(obj));
453 mirror::Object* ret =
454 reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
455 DCHECK(ret != nullptr);
456 return ret;
457 }
458 DCHECK(!space_->HasAddress(obj));
459 DCHECK(IsMarked(obj));
460 return obj;
461 }
462
IsMarked(const Object * object) const463 inline bool MarkCompact::IsMarked(const Object* object) const {
464 if (immune_region_.ContainsObject(object)) {
465 return true;
466 }
467 if (objects_before_forwarding_->HasAddress(object)) {
468 return objects_before_forwarding_->Test(object);
469 }
470 return mark_bitmap_->Test(object);
471 }
472
IsMarkedCallback(mirror::Object * object,void * arg)473 mirror::Object* MarkCompact::IsMarkedCallback(mirror::Object* object, void* arg) {
474 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(object) ? object : nullptr;
475 }
476
HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object> * ref_ptr,void * arg)477 bool MarkCompact::HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
478 void* arg) {
479 // Side effect free since we call this before ever moving objects.
480 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(ref_ptr->AsMirrorPtr());
481 }
482
SweepSystemWeaks()483 void MarkCompact::SweepSystemWeaks() {
484 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
485 Runtime::Current()->SweepSystemWeaks(IsMarkedCallback, this);
486 }
487
ShouldSweepSpace(space::ContinuousSpace * space) const488 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
489 return space != space_ && !immune_region_.ContainsSpace(space);
490 }
491
492 class MoveObjectVisitor {
493 public:
MoveObjectVisitor(MarkCompact * collector)494 explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
495 }
operator ()(mirror::Object * obj) const496 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
497 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
498 collector_->MoveObject(obj, obj->SizeOf());
499 }
500
501 private:
502 MarkCompact* const collector_;
503 };
504
MoveObject(mirror::Object * obj,size_t len)505 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
506 // Look at the forwarding address stored in the lock word to know where to copy.
507 DCHECK(space_->HasAddress(obj)) << obj;
508 uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
509 mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
510 DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
511 // Use memmove since there may be overlap.
512 memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
513 // Restore the saved lock word if needed.
514 LockWord lock_word;
515 if (UNLIKELY(objects_with_lockword_->Test(obj))) {
516 lock_word = lock_words_to_restore_.front();
517 lock_words_to_restore_.pop_front();
518 }
519 dest_obj->SetLockWord(lock_word, false);
520 }
521
MoveObjects()522 void MarkCompact::MoveObjects() {
523 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
524 // Move the objects in the before forwarding bitmap.
525 MoveObjectVisitor visitor(this);
526 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
527 reinterpret_cast<uintptr_t>(space_->End()),
528 visitor);
529 CHECK(lock_words_to_restore_.empty());
530 }
531
Sweep(bool swap_bitmaps)532 void MarkCompact::Sweep(bool swap_bitmaps) {
533 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
534 DCHECK(mark_stack_->IsEmpty());
535 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
536 if (space->IsContinuousMemMapAllocSpace()) {
537 space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
538 if (!ShouldSweepSpace(alloc_space)) {
539 continue;
540 }
541 TimingLogger::ScopedTiming t(
542 alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
543 RecordFree(alloc_space->Sweep(swap_bitmaps));
544 }
545 }
546 SweepLargeObjects(swap_bitmaps);
547 }
548
SweepLargeObjects(bool swap_bitmaps)549 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
550 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
551 RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps));
552 }
553
554 // Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
555 // marked, put it on the appropriate list in the heap for later processing.
DelayReferenceReferent(mirror::Class * klass,mirror::Reference * reference)556 void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
557 heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference,
558 &HeapReferenceMarkedCallback, this);
559 }
560
561 class MarkCompactMarkObjectVisitor {
562 public:
MarkCompactMarkObjectVisitor(MarkCompact * collector)563 explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
564 }
565
operator ()(Object * obj,MemberOffset offset,bool) const566 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
567 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
568 // Object was already verified when we scanned it.
569 collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
570 }
571
operator ()(mirror::Class * klass,mirror::Reference * ref) const572 void operator()(mirror::Class* klass, mirror::Reference* ref) const
573 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
574 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
575 collector_->DelayReferenceReferent(klass, ref);
576 }
577
578 private:
579 MarkCompact* const collector_;
580 };
581
582 // Visit all of the references of an object and update.
ScanObject(Object * obj)583 void MarkCompact::ScanObject(Object* obj) {
584 MarkCompactMarkObjectVisitor visitor(this);
585 obj->VisitReferences<kMovingClasses>(visitor, visitor);
586 }
587
588 // Scan anything that's on the mark stack.
ProcessMarkStack()589 void MarkCompact::ProcessMarkStack() {
590 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
591 while (!mark_stack_->IsEmpty()) {
592 Object* obj = mark_stack_->PopBack();
593 DCHECK(obj != nullptr);
594 ScanObject(obj);
595 }
596 }
597
SetSpace(space::BumpPointerSpace * space)598 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
599 DCHECK(space != nullptr);
600 space_ = space;
601 }
602
FinishPhase()603 void MarkCompact::FinishPhase() {
604 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
605 space_ = nullptr;
606 CHECK(mark_stack_->IsEmpty());
607 mark_stack_->Reset();
608 // Clear all of the spaces' mark bitmaps.
609 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
610 heap_->ClearMarkedObjects();
611 // Release our bitmaps.
612 objects_before_forwarding_.reset(nullptr);
613 objects_with_lockword_.reset(nullptr);
614 }
615
RevokeAllThreadLocalBuffers()616 void MarkCompact::RevokeAllThreadLocalBuffers() {
617 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
618 GetHeap()->RevokeAllThreadLocalBuffers();
619 }
620
621 } // namespace collector
622 } // namespace gc
623 } // namespace art
624