1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_INL_H_
18 #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_INL_H_
19
20 #include "space_bitmap.h"
21
22 #include <memory>
23
24 #include <android-base/logging.h>
25
26 #include "base/atomic.h"
27 #include "base/bit_utils.h"
28
29 namespace art {
30 namespace gc {
31 namespace accounting {
32
33 template<size_t kAlignment>
AtomicTestAndSet(const mirror::Object * obj)34 inline bool SpaceBitmap<kAlignment>::AtomicTestAndSet(const mirror::Object* obj) {
35 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
36 DCHECK_GE(addr, heap_begin_);
37 const uintptr_t offset = addr - heap_begin_;
38 const size_t index = OffsetToIndex(offset);
39 const uintptr_t mask = OffsetToMask(offset);
40 Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index];
41 DCHECK_LT(index, bitmap_size_ / sizeof(intptr_t)) << " bitmap_size_ = " << bitmap_size_;
42 uintptr_t old_word;
43 do {
44 old_word = atomic_entry->load(std::memory_order_relaxed);
45 // Fast path: The bit is already set.
46 if ((old_word & mask) != 0) {
47 DCHECK(Test(obj));
48 return true;
49 }
50 } while (!atomic_entry->CompareAndSetWeakRelaxed(old_word, old_word | mask));
51 DCHECK(Test(obj));
52 return false;
53 }
54
55 template<size_t kAlignment>
Test(const mirror::Object * obj)56 inline bool SpaceBitmap<kAlignment>::Test(const mirror::Object* obj) const {
57 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
58 DCHECK(HasAddress(obj)) << obj;
59 DCHECK(bitmap_begin_ != nullptr);
60 DCHECK_GE(addr, heap_begin_);
61 const uintptr_t offset = addr - heap_begin_;
62 size_t index = OffsetToIndex(offset);
63 return (bitmap_begin_[index].load(std::memory_order_relaxed) & OffsetToMask(offset)) != 0;
64 }
65
66 template<size_t kAlignment>
67 template<typename Visitor>
VisitMarkedRange(uintptr_t visit_begin,uintptr_t visit_end,Visitor && visitor)68 inline void SpaceBitmap<kAlignment>::VisitMarkedRange(uintptr_t visit_begin,
69 uintptr_t visit_end,
70 Visitor&& visitor) const {
71 DCHECK_LE(visit_begin, visit_end);
72 #if 0
73 for (uintptr_t i = visit_begin; i < visit_end; i += kAlignment) {
74 mirror::Object* obj = reinterpret_cast<mirror::Object*>(i);
75 if (Test(obj)) {
76 visitor(obj);
77 }
78 }
79 #else
80 DCHECK_LE(heap_begin_, visit_begin);
81 DCHECK_LE(visit_end, HeapLimit());
82
83 const uintptr_t offset_start = visit_begin - heap_begin_;
84 const uintptr_t offset_end = visit_end - heap_begin_;
85
86 const uintptr_t index_start = OffsetToIndex(offset_start);
87 const uintptr_t index_end = OffsetToIndex(offset_end);
88
89 const size_t bit_start = (offset_start / kAlignment) % kBitsPerIntPtrT;
90 const size_t bit_end = (offset_end / kAlignment) % kBitsPerIntPtrT;
91
92 // Index(begin) ... Index(end)
93 // [xxxxx???][........][????yyyy]
94 // ^ ^
95 // | #---- Bit of visit_end
96 // #---- Bit of visit_begin
97 //
98
99 // Left edge.
100 uintptr_t left_edge = bitmap_begin_[index_start];
101 // Mark of lower bits that are not in range.
102 left_edge &= ~((static_cast<uintptr_t>(1) << bit_start) - 1);
103
104 // Right edge. Either unique, or left_edge.
105 uintptr_t right_edge;
106
107 if (index_start < index_end) {
108 // Left edge != right edge.
109
110 // Traverse left edge.
111 if (left_edge != 0) {
112 const uintptr_t ptr_base = IndexToOffset(index_start) + heap_begin_;
113 do {
114 const size_t shift = CTZ(left_edge);
115 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
116 visitor(obj);
117 left_edge ^= (static_cast<uintptr_t>(1)) << shift;
118 } while (left_edge != 0);
119 }
120
121 // Traverse the middle, full part.
122 for (size_t i = index_start + 1; i < index_end; ++i) {
123 uintptr_t w = bitmap_begin_[i].load(std::memory_order_relaxed);
124 if (w != 0) {
125 const uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
126 // Iterate on the bits set in word `w`, from the least to the most significant bit.
127 do {
128 const size_t shift = CTZ(w);
129 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
130 visitor(obj);
131 w ^= (static_cast<uintptr_t>(1)) << shift;
132 } while (w != 0);
133 }
134 }
135
136 // Right edge is unique.
137 // But maybe we don't have anything to do: visit_end starts in a new word...
138 if (bit_end == 0) {
139 // Do not read memory, as it could be after the end of the bitmap.
140 right_edge = 0;
141 } else {
142 right_edge = bitmap_begin_[index_end];
143 }
144 } else {
145 // Right edge = left edge.
146 right_edge = left_edge;
147 }
148
149 // Right edge handling.
150 right_edge &= ((static_cast<uintptr_t>(1) << bit_end) - 1);
151 if (right_edge != 0) {
152 const uintptr_t ptr_base = IndexToOffset(index_end) + heap_begin_;
153 // Iterate on the bits set in word `right_edge`, from the least to the most significant bit.
154 do {
155 const size_t shift = CTZ(right_edge);
156 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
157 visitor(obj);
158 right_edge ^= (static_cast<uintptr_t>(1)) << shift;
159 } while (right_edge != 0);
160 }
161 #endif
162 }
163
164 template<size_t kAlignment>
165 template<typename Visitor>
Walk(Visitor && visitor)166 void SpaceBitmap<kAlignment>::Walk(Visitor&& visitor) {
167 CHECK(bitmap_begin_ != nullptr);
168
169 uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
170 Atomic<uintptr_t>* bitmap_begin = bitmap_begin_;
171 for (uintptr_t i = 0; i <= end; ++i) {
172 uintptr_t w = bitmap_begin[i].load(std::memory_order_relaxed);
173 if (w != 0) {
174 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
175 do {
176 const size_t shift = CTZ(w);
177 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
178 visitor(obj);
179 w ^= (static_cast<uintptr_t>(1)) << shift;
180 } while (w != 0);
181 }
182 }
183 }
184
185 template<size_t kAlignment>
186 template<bool kSetBit>
Modify(const mirror::Object * obj)187 inline bool SpaceBitmap<kAlignment>::Modify(const mirror::Object* obj) {
188 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
189 DCHECK_GE(addr, heap_begin_);
190 DCHECK(HasAddress(obj)) << obj;
191 const uintptr_t offset = addr - heap_begin_;
192 const size_t index = OffsetToIndex(offset);
193 const uintptr_t mask = OffsetToMask(offset);
194 DCHECK_LT(index, bitmap_size_ / sizeof(intptr_t)) << " bitmap_size_ = " << bitmap_size_;
195 Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index];
196 uintptr_t old_word = atomic_entry->load(std::memory_order_relaxed);
197 if (kSetBit) {
198 // Check the bit before setting the word incase we are trying to mark a read only bitmap
199 // like an image space bitmap. This bitmap is mapped as read only and will fault if we
200 // attempt to change any words. Since all of the objects are marked, this will never
201 // occur if we check before setting the bit. This also prevents dirty pages that would
202 // occur if the bitmap was read write and we did not check the bit.
203 if ((old_word & mask) == 0) {
204 atomic_entry->store(old_word | mask, std::memory_order_relaxed);
205 }
206 } else {
207 atomic_entry->store(old_word & ~mask, std::memory_order_relaxed);
208 }
209 DCHECK_EQ(Test(obj), kSetBit);
210 return (old_word & mask) != 0;
211 }
212
213 template<size_t kAlignment>
214 inline std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap) {
215 return stream
216 << bitmap.GetName() << "["
217 << "begin=" << reinterpret_cast<const void*>(bitmap.HeapBegin())
218 << ",end=" << reinterpret_cast<const void*>(bitmap.HeapLimit())
219 << "]";
220 }
221
222 } // namespace accounting
223 } // namespace gc
224 } // namespace art
225
226 #endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_INL_H_
227