1 /*
2 * Copyright 2021 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
19
20 #include "gc/space/bump_pointer_space.h"
21 #include "mark_compact.h"
22 #include "mirror/object-inl.h"
23 #include "thread-inl.h"
24
25 namespace art HIDDEN {
26 namespace gc {
27 namespace collector {
28
UpdateClassAfterObjectMap(mirror::Object * obj)29 inline void MarkCompact::UpdateClassAfterObjectMap(mirror::Object* obj) {
30 mirror::Class* klass = obj->GetClass<kVerifyNone, kWithoutReadBarrier>();
31 if (UNLIKELY(std::less<mirror::Object*>{}(obj, klass) && HasAddress(klass))) {
32 auto [iter, success] = class_after_obj_map_.try_emplace(ObjReference::FromMirrorPtr(klass),
33 ObjReference::FromMirrorPtr(obj));
34 if (!success && std::less<mirror::Object*>{}(obj, iter->second.AsMirrorPtr())) {
35 iter->second = ObjReference::FromMirrorPtr(obj);
36 }
37 }
38 }
39
40 template <size_t kAlignment>
SetLiveWords(uintptr_t begin,size_t size)41 inline uintptr_t MarkCompact::LiveWordsBitmap<kAlignment>::SetLiveWords(uintptr_t begin,
42 size_t size) {
43 const uintptr_t begin_bit_idx = MemRangeBitmap::BitIndexFromAddr(begin);
44 DCHECK(!Bitmap::TestBit(begin_bit_idx))
45 << "begin:" << begin << " size:" << size << " begin_bit_idx:" << begin_bit_idx;
46 // Range to set bit: [begin, end]
47 uintptr_t end = begin + size - kAlignment;
48 const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(end);
49 uintptr_t* begin_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(begin_bit_idx);
50 uintptr_t* end_bm_address = Bitmap::Begin() + Bitmap::BitIndexToWordIndex(end_bit_idx);
51 ptrdiff_t diff = end_bm_address - begin_bm_address;
52 uintptr_t mask = Bitmap::BitIndexToMask(begin_bit_idx);
53 // Bits that needs to be set in the first word, if it's not also the last word
54 mask = ~(mask - 1);
55 if (diff > 0) {
56 *begin_bm_address |= mask;
57 mask = ~0;
58 // Even though memset can handle the (diff == 1) case but we should avoid the
59 // overhead of a function call for this, highly likely (as most of the objects
60 // are small), case.
61 if (diff > 1) {
62 // Set all intermediate bits to 1.
63 std::memset(static_cast<void*>(begin_bm_address + 1), 0xff, (diff - 1) * sizeof(uintptr_t));
64 }
65 }
66 uintptr_t end_mask = Bitmap::BitIndexToMask(end_bit_idx);
67 *end_bm_address |= mask & (end_mask | (end_mask - 1));
68 return begin_bit_idx;
69 }
70
71 template <size_t kAlignment> template <typename Visitor>
VisitLiveStrides(uintptr_t begin_bit_idx,uint8_t * end,const size_t bytes,Visitor && visitor)72 inline void MarkCompact::LiveWordsBitmap<kAlignment>::VisitLiveStrides(uintptr_t begin_bit_idx,
73 uint8_t* end,
74 const size_t bytes,
75 Visitor&& visitor) const {
76 // Range to visit [begin_bit_idx, end_bit_idx]
77 DCHECK(IsAligned<kAlignment>(end));
78 end -= kAlignment;
79 const uintptr_t end_bit_idx = MemRangeBitmap::BitIndexFromAddr(reinterpret_cast<uintptr_t>(end));
80 DCHECK_LE(begin_bit_idx, end_bit_idx);
81 uintptr_t begin_word_idx = Bitmap::BitIndexToWordIndex(begin_bit_idx);
82 const uintptr_t end_word_idx = Bitmap::BitIndexToWordIndex(end_bit_idx);
83 DCHECK(Bitmap::TestBit(begin_bit_idx));
84 size_t stride_size = 0;
85 size_t idx_in_word = 0;
86 size_t num_heap_words = bytes / kAlignment;
87 uintptr_t live_stride_start_idx;
88 uintptr_t word = Bitmap::Begin()[begin_word_idx];
89
90 // Setup the first word.
91 word &= ~(Bitmap::BitIndexToMask(begin_bit_idx) - 1);
92 begin_bit_idx = RoundDown(begin_bit_idx, Bitmap::kBitsPerBitmapWord);
93
94 do {
95 if (UNLIKELY(begin_word_idx == end_word_idx)) {
96 uintptr_t mask = Bitmap::BitIndexToMask(end_bit_idx);
97 word &= mask | (mask - 1);
98 }
99 if (~word == 0) {
100 // All bits in the word are marked.
101 if (stride_size == 0) {
102 live_stride_start_idx = begin_bit_idx;
103 }
104 stride_size += Bitmap::kBitsPerBitmapWord;
105 if (num_heap_words <= stride_size) {
106 break;
107 }
108 } else {
109 while (word != 0) {
110 // discard 0s
111 size_t shift = CTZ(word);
112 idx_in_word += shift;
113 word >>= shift;
114 if (stride_size > 0) {
115 if (shift > 0) {
116 if (num_heap_words <= stride_size) {
117 break;
118 }
119 visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
120 num_heap_words -= stride_size;
121 live_stride_start_idx = begin_bit_idx + idx_in_word;
122 stride_size = 0;
123 }
124 } else {
125 live_stride_start_idx = begin_bit_idx + idx_in_word;
126 }
127 // consume 1s
128 shift = CTZ(~word);
129 DCHECK_NE(shift, 0u);
130 word >>= shift;
131 idx_in_word += shift;
132 stride_size += shift;
133 }
134 // If the whole word == 0 or the higher bits are 0s, then we exit out of
135 // the above loop without completely consuming the word, so call visitor,
136 // if needed.
137 if (idx_in_word < Bitmap::kBitsPerBitmapWord && stride_size > 0) {
138 if (num_heap_words <= stride_size) {
139 break;
140 }
141 visitor(live_stride_start_idx, stride_size, /*is_last*/ false);
142 num_heap_words -= stride_size;
143 stride_size = 0;
144 }
145 idx_in_word = 0;
146 }
147 begin_bit_idx += Bitmap::kBitsPerBitmapWord;
148 begin_word_idx++;
149 if (UNLIKELY(begin_word_idx > end_word_idx)) {
150 num_heap_words = std::min(stride_size, num_heap_words);
151 break;
152 }
153 word = Bitmap::Begin()[begin_word_idx];
154 } while (true);
155
156 if (stride_size > 0) {
157 visitor(live_stride_start_idx, num_heap_words, /*is_last*/ true);
158 }
159 }
160
161 template <size_t kAlignment>
162 inline
FindNthLiveWordOffset(size_t chunk_idx,uint32_t n)163 uint32_t MarkCompact::LiveWordsBitmap<kAlignment>::FindNthLiveWordOffset(size_t chunk_idx,
164 uint32_t n) const {
165 DCHECK_LT(n, kBitsPerVectorWord);
166 const size_t index = chunk_idx * kBitmapWordsPerVectorWord;
167 for (uint32_t i = 0; i < kBitmapWordsPerVectorWord; i++) {
168 uintptr_t word = Bitmap::Begin()[index + i];
169 if (~word == 0) {
170 if (n < Bitmap::kBitsPerBitmapWord) {
171 return i * Bitmap::kBitsPerBitmapWord + n;
172 }
173 n -= Bitmap::kBitsPerBitmapWord;
174 } else {
175 uint32_t j = 0;
176 while (word != 0) {
177 // count contiguous 0s
178 uint32_t shift = CTZ(word);
179 word >>= shift;
180 j += shift;
181 // count contiguous 1s
182 shift = CTZ(~word);
183 DCHECK_NE(shift, 0u);
184 if (shift > n) {
185 return i * Bitmap::kBitsPerBitmapWord + j + n;
186 }
187 n -= shift;
188 word >>= shift;
189 j += shift;
190 }
191 }
192 }
193 LOG(FATAL) << "Unreachable";
194 UNREACHABLE();
195 }
196
IsOnAllocStack(mirror::Object * ref)197 inline bool MarkCompact::IsOnAllocStack(mirror::Object* ref) {
198 // Pairs with release fence after allocation-stack push in
199 // Heap::AllocObjectWithAllocator().
200 std::atomic_thread_fence(std::memory_order_acquire);
201 accounting::ObjectStack* stack = heap_->GetAllocationStack();
202 return stack->Contains(ref);
203 }
204
UpdateRef(mirror::Object * obj,MemberOffset offset,uint8_t * begin,uint8_t * end)205 inline mirror::Object* MarkCompact::UpdateRef(mirror::Object* obj,
206 MemberOffset offset,
207 uint8_t* begin,
208 uint8_t* end) {
209 mirror::Object* old_ref = obj->GetFieldObject<
210 mirror::Object, kVerifyNone, kWithoutReadBarrier, /*kIsVolatile*/false>(offset);
211 if (kIsDebugBuild) {
212 if (HasAddress(old_ref) &&
213 reinterpret_cast<uint8_t*>(old_ref) < black_allocations_begin_ &&
214 !moving_space_bitmap_->Test(old_ref)) {
215 mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
216 std::ostringstream oss;
217 heap_->DumpSpaces(oss);
218 MemMap::DumpMaps(oss, /* terse= */ true);
219 LOG(FATAL) << "Not marked in the bitmap ref=" << old_ref
220 << " from_ref=" << from_ref
221 << " offset=" << offset
222 << " obj=" << obj
223 << " obj-validity=" << IsValidObject(obj)
224 << " from-space=" << static_cast<void*>(from_space_begin_)
225 << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
226 << " from_ref "
227 << heap_->GetVerification()->DumpRAMAroundAddress(
228 reinterpret_cast<uintptr_t>(from_ref), 128)
229 << " obj "
230 << heap_->GetVerification()->DumpRAMAroundAddress(
231 reinterpret_cast<uintptr_t>(obj), 128)
232 << " old_ref " << heap_->GetVerification()->DumpRAMAroundAddress(
233 reinterpret_cast<uintptr_t>(old_ref), 128)
234 << " maps\n" << oss.str();
235 }
236 }
237 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
238 if (new_ref != old_ref) {
239 obj->SetFieldObjectWithoutWriteBarrier<
240 /*kTransactionActive*/false, /*kCheckTransaction*/false, kVerifyNone, /*kIsVolatile*/false>(
241 offset,
242 new_ref);
243 }
244 return new_ref;
245 }
246
VerifyRootSingleUpdate(void * root,mirror::Object * old_ref,const RootInfo & info)247 inline bool MarkCompact::VerifyRootSingleUpdate(void* root,
248 mirror::Object* old_ref,
249 const RootInfo& info) {
250 // ASAN promotes stack-frames to heap in order to detect
251 // stack-use-after-return issues. And HWASAN has pointers tagged, which makes
252 // it difficult to recognize and prevent stack pointers from being checked.
253 // So skip using double-root update detection on ASANs.
254 if (kIsDebugBuild && !kMemoryToolIsAvailable && !kHwAsanEnabled) {
255 void* stack_low_addr = stack_low_addr_;
256 void* stack_high_addr = stack_high_addr_;
257 if (!HasAddress(old_ref)) {
258 return false;
259 }
260 Thread* self = Thread::Current();
261 if (UNLIKELY(stack_low_addr == nullptr)) {
262 // TODO(Simulator): Test that this should not operate on the simulated stack when the
263 // simulator supports mark compact.
264 stack_low_addr = self->GetStackEnd<kNativeStackType>();
265 stack_high_addr = reinterpret_cast<char*>(stack_low_addr)
266 + self->GetUsableStackSize<kNativeStackType>();
267 }
268 if (std::less<void*>{}(root, stack_low_addr) || std::greater<void*>{}(root, stack_high_addr)) {
269 bool inserted;
270 {
271 MutexLock mu(self, lock_);
272 inserted = updated_roots_->insert(root).second;
273 }
274 if (!inserted) {
275 std::ostringstream oss;
276 heap_->DumpSpaces(oss);
277 MemMap::DumpMaps(oss, /* terse= */ true);
278 CHECK(inserted) << "root=" << root << " old_ref=" << old_ref
279 << " stack_low_addr=" << stack_low_addr
280 << " stack_high_addr=" << stack_high_addr << " maps\n"
281 << oss.str();
282 }
283 }
284 DCHECK(reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_ ||
285 moving_space_bitmap_->Test(old_ref))
286 << "ref=" << old_ref << " <" << mirror::Object::PrettyTypeOf(old_ref) << "> RootInfo ["
287 << info << "]";
288 }
289 return true;
290 }
291
UpdateRoot(mirror::CompressedReference<mirror::Object> * root,uint8_t * begin,uint8_t * end,const RootInfo & info)292 inline mirror::Object* MarkCompact::UpdateRoot(mirror::CompressedReference<mirror::Object>* root,
293 uint8_t* begin,
294 uint8_t* end,
295 const RootInfo& info) {
296 DCHECK(!root->IsNull());
297 mirror::Object* old_ref = root->AsMirrorPtr();
298 if (VerifyRootSingleUpdate(root, old_ref, info)) {
299 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
300 if (old_ref != new_ref) {
301 root->Assign(new_ref);
302 }
303 return new_ref;
304 }
305 return nullptr;
306 }
307
UpdateRoot(mirror::Object ** root,uint8_t * begin,uint8_t * end,const RootInfo & info)308 inline mirror::Object* MarkCompact::UpdateRoot(mirror::Object** root,
309 uint8_t* begin,
310 uint8_t* end,
311 const RootInfo& info) {
312 mirror::Object* old_ref = *root;
313 if (VerifyRootSingleUpdate(root, old_ref, info)) {
314 mirror::Object* new_ref = PostCompactAddress(old_ref, begin, end);
315 if (old_ref != new_ref) {
316 *root = new_ref;
317 }
318 return new_ref;
319 }
320 return nullptr;
321 }
322
323 template <size_t kAlignment>
CountLiveWordsUpto(size_t bit_idx)324 inline size_t MarkCompact::LiveWordsBitmap<kAlignment>::CountLiveWordsUpto(size_t bit_idx) const {
325 const size_t word_offset = Bitmap::BitIndexToWordIndex(bit_idx);
326 uintptr_t word;
327 size_t ret = 0;
328 // This is needed only if we decide to make chunks 128-bit but still
329 // choose to use 64-bit word for bitmap. Ideally we should use 128-bit
330 // SIMD instructions to compute popcount.
331 if (kBitmapWordsPerVectorWord > 1) {
332 for (size_t i = RoundDown(word_offset, kBitmapWordsPerVectorWord); i < word_offset; i++) {
333 word = Bitmap::Begin()[i];
334 ret += POPCOUNT(word);
335 }
336 }
337 word = Bitmap::Begin()[word_offset];
338 const uintptr_t mask = Bitmap::BitIndexToMask(bit_idx);
339 DCHECK_NE(word & mask, 0u)
340 << " word_offset:" << word_offset
341 << " bit_idx:" << bit_idx
342 << " bit_idx_in_word:" << (bit_idx % Bitmap::kBitsPerBitmapWord)
343 << std::hex << " word: 0x" << word
344 << " mask: 0x" << mask << std::dec;
345 ret += POPCOUNT(word & (mask - 1));
346 return ret;
347 }
348
PostCompactBlackObjAddr(mirror::Object * old_ref)349 inline mirror::Object* MarkCompact::PostCompactBlackObjAddr(mirror::Object* old_ref) const {
350 return reinterpret_cast<mirror::Object*>(reinterpret_cast<uint8_t*>(old_ref)
351 - black_objs_slide_diff_);
352 }
353
PostCompactOldObjAddr(mirror::Object * old_ref)354 inline mirror::Object* MarkCompact::PostCompactOldObjAddr(mirror::Object* old_ref) const {
355 const uintptr_t begin = live_words_bitmap_->Begin();
356 const uintptr_t addr_offset = reinterpret_cast<uintptr_t>(old_ref) - begin;
357 const size_t vec_idx = addr_offset / kOffsetChunkSize;
358 const size_t live_bytes_in_bitmap_word =
359 live_words_bitmap_->CountLiveWordsUpto(addr_offset / kAlignment) * kAlignment;
360 return reinterpret_cast<mirror::Object*>(begin
361 + chunk_info_vec_[vec_idx]
362 + live_bytes_in_bitmap_word);
363 }
364
PostCompactAddressUnchecked(mirror::Object * old_ref)365 inline mirror::Object* MarkCompact::PostCompactAddressUnchecked(mirror::Object* old_ref) const {
366 if (reinterpret_cast<uint8_t*>(old_ref) >= black_allocations_begin_) {
367 return PostCompactBlackObjAddr(old_ref);
368 }
369 if (kIsDebugBuild) {
370 mirror::Object* from_ref = GetFromSpaceAddr(old_ref);
371 if (!moving_space_bitmap_->Test(old_ref)) {
372 std::ostringstream oss;
373 Runtime::Current()->GetHeap()->DumpSpaces(oss);
374 MemMap::DumpMaps(oss, /* terse= */ true);
375 LOG(FATAL) << "ref=" << old_ref
376 << " from_ref=" << from_ref
377 << " from-space=" << static_cast<void*>(from_space_begin_)
378 << " bitmap= " << moving_space_bitmap_->DumpMemAround(old_ref)
379 << heap_->GetVerification()->DumpRAMAroundAddress(
380 reinterpret_cast<uintptr_t>(from_ref), 128)
381 << " maps\n" << oss.str();
382 }
383 }
384 return PostCompactOldObjAddr(old_ref);
385 }
386
PostCompactAddress(mirror::Object * old_ref,uint8_t * begin,uint8_t * end)387 inline mirror::Object* MarkCompact::PostCompactAddress(mirror::Object* old_ref,
388 uint8_t* begin,
389 uint8_t* end) const {
390 if (LIKELY(HasAddress(old_ref, begin, end))) {
391 return PostCompactAddressUnchecked(old_ref);
392 }
393 return old_ref;
394 }
395
396 } // namespace collector
397 } // namespace gc
398 } // namespace art
399
400 #endif // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_INL_H_
401