1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define ATRACE_TAG ATRACE_TAG_DALVIK
18
19 #include <stdio.h>
20 #include <cutils/trace.h>
21
22 #include "garbage_collector.h"
23
24 #include "base/logging.h"
25 #include "base/mutex-inl.h"
26 #include "gc/accounting/heap_bitmap.h"
27 #include "gc/space/large_object_space.h"
28 #include "gc/space/space-inl.h"
29 #include "thread.h"
30 #include "thread_list.h"
31
32 namespace art {
33 namespace gc {
34 namespace collector {
35
GarbageCollector(Heap * heap,const std::string & name)36 GarbageCollector::GarbageCollector(Heap* heap, const std::string& name)
37 : heap_(heap),
38 name_(name),
39 verbose_(VLOG_IS_ON(heap)),
40 duration_ns_(0),
41 timings_(name_.c_str(), true, verbose_),
42 cumulative_timings_(name) {
43 ResetCumulativeStatistics();
44 }
45
HandleDirtyObjectsPhase()46 bool GarbageCollector::HandleDirtyObjectsPhase() {
47 DCHECK(IsConcurrent());
48 return true;
49 }
50
RegisterPause(uint64_t nano_length)51 void GarbageCollector::RegisterPause(uint64_t nano_length) {
52 pause_times_.push_back(nano_length);
53 }
54
ResetCumulativeStatistics()55 void GarbageCollector::ResetCumulativeStatistics() {
56 cumulative_timings_.Reset();
57 total_time_ns_ = 0;
58 total_paused_time_ns_ = 0;
59 total_freed_objects_ = 0;
60 total_freed_bytes_ = 0;
61 }
62
Run()63 void GarbageCollector::Run() {
64 ThreadList* thread_list = Runtime::Current()->GetThreadList();
65 uint64_t start_time = NanoTime();
66 pause_times_.clear();
67 duration_ns_ = 0;
68
69 InitializePhase();
70
71 if (!IsConcurrent()) {
72 // Pause is the entire length of the GC.
73 uint64_t pause_start = NanoTime();
74 ATRACE_BEGIN("Application threads suspended");
75 thread_list->SuspendAll();
76 MarkingPhase();
77 ReclaimPhase();
78 thread_list->ResumeAll();
79 ATRACE_END();
80 uint64_t pause_end = NanoTime();
81 pause_times_.push_back(pause_end - pause_start);
82 } else {
83 Thread* self = Thread::Current();
84 {
85 ReaderMutexLock mu(self, *Locks::mutator_lock_);
86 MarkingPhase();
87 }
88 bool done = false;
89 while (!done) {
90 uint64_t pause_start = NanoTime();
91 ATRACE_BEGIN("Suspending mutator threads");
92 thread_list->SuspendAll();
93 ATRACE_END();
94 ATRACE_BEGIN("All mutator threads suspended");
95 done = HandleDirtyObjectsPhase();
96 ATRACE_END();
97 uint64_t pause_end = NanoTime();
98 ATRACE_BEGIN("Resuming mutator threads");
99 thread_list->ResumeAll();
100 ATRACE_END();
101 pause_times_.push_back(pause_end - pause_start);
102 }
103 {
104 ReaderMutexLock mu(self, *Locks::mutator_lock_);
105 ReclaimPhase();
106 }
107 }
108
109 uint64_t end_time = NanoTime();
110 duration_ns_ = end_time - start_time;
111
112 FinishPhase();
113 }
114
SwapBitmaps()115 void GarbageCollector::SwapBitmaps() {
116 // Swap the live and mark bitmaps for each alloc space. This is needed since sweep re-swaps
117 // these bitmaps. The bitmap swapping is an optimization so that we do not need to clear the live
118 // bits of dead objects in the live bitmap.
119 const GcType gc_type = GetGcType();
120 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
121 // We never allocate into zygote spaces.
122 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect ||
123 (gc_type == kGcTypeFull &&
124 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect)) {
125 accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap();
126 accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
127 if (live_bitmap != mark_bitmap) {
128 heap_->GetLiveBitmap()->ReplaceBitmap(live_bitmap, mark_bitmap);
129 heap_->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
130 space->AsDlMallocSpace()->SwapBitmaps();
131 }
132 }
133 }
134 for (const auto& disc_space : GetHeap()->GetDiscontinuousSpaces()) {
135 space::LargeObjectSpace* space = down_cast<space::LargeObjectSpace*>(disc_space);
136 accounting::SpaceSetMap* live_set = space->GetLiveObjects();
137 accounting::SpaceSetMap* mark_set = space->GetMarkObjects();
138 heap_->GetLiveBitmap()->ReplaceObjectSet(live_set, mark_set);
139 heap_->GetMarkBitmap()->ReplaceObjectSet(mark_set, live_set);
140 down_cast<space::LargeObjectSpace*>(space)->SwapBitmaps();
141 }
142 }
143
144 } // namespace collector
145 } // namespace gc
146 } // namespace art
147