1 // Copyright 2017 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_HEAP_INVALIDATED_SLOTS_INL_H_ 6 #define V8_HEAP_INVALIDATED_SLOTS_INL_H_ 7 8 #include "src/heap/invalidated-slots.h" 9 #include "src/heap/spaces.h" 10 #include "src/objects/objects-inl.h" 11 #include "src/utils/allocation.h" 12 13 namespace v8 { 14 namespace internal { 15 IsValid(Address slot)16bool InvalidatedSlotsFilter::IsValid(Address slot) { 17 #ifdef DEBUG 18 DCHECK_LT(slot, sentinel_); 19 // Slots must come in non-decreasing order. 20 DCHECK_LE(last_slot_, slot); 21 last_slot_ = slot; 22 #endif 23 if (slot < invalidated_start_) { 24 return true; 25 } 26 27 while (slot >= next_invalidated_start_) { 28 NextInvalidatedObject(); 29 } 30 31 HeapObject invalidated_object = HeapObject::FromAddress(invalidated_start_); 32 33 if (invalidated_size_ == 0) { 34 DCHECK(MarkCompactCollector::IsMapOrForwardedMap(invalidated_object.map())); 35 invalidated_size_ = invalidated_object.Size(); 36 } 37 38 int offset = static_cast<int>(slot - invalidated_start_); 39 40 // OLD_TO_OLD can have slots in map word unlike other remembered sets. 41 DCHECK_GE(offset, 0); 42 DCHECK_IMPLIES(remembered_set_type_ != OLD_TO_OLD, offset > 0); 43 44 if (offset < invalidated_size_) 45 return offset == 0 || 46 invalidated_object.IsValidSlot(invalidated_object.map(), offset); 47 48 NextInvalidatedObject(); 49 return true; 50 } 51 NextInvalidatedObject()52void InvalidatedSlotsFilter::NextInvalidatedObject() { 53 invalidated_start_ = next_invalidated_start_; 54 invalidated_size_ = 0; 55 56 if (iterator_ == iterator_end_) { 57 next_invalidated_start_ = sentinel_; 58 } else { 59 next_invalidated_start_ = iterator_->address(); 60 iterator_++; 61 } 62 } 63 Free(Address free_start,Address free_end)64void InvalidatedSlotsCleanup::Free(Address free_start, Address free_end) { 65 #ifdef DEBUG 66 DCHECK_LT(free_start, free_end); 67 // Free regions should come in increasing order and do not overlap 68 DCHECK_LE(last_free_, free_start); 69 last_free_ = free_start; 70 #endif 71 72 if (iterator_ == iterator_end_) return; 73 74 // Ignore invalidated objects that start before free region 75 while (invalidated_start_ < free_start) { 76 ++iterator_; 77 NextInvalidatedObject(); 78 } 79 80 // Remove all invalidated objects that start within 81 // free region. 82 while (invalidated_start_ < free_end) { 83 iterator_ = invalidated_slots_->erase(iterator_); 84 NextInvalidatedObject(); 85 } 86 } 87 NextInvalidatedObject()88void InvalidatedSlotsCleanup::NextInvalidatedObject() { 89 if (iterator_ != iterator_end_) { 90 invalidated_start_ = iterator_->address(); 91 } else { 92 invalidated_start_ = sentinel_; 93 } 94 } 95 96 } // namespace internal 97 } // namespace v8 98 99 #endif // V8_HEAP_INVALIDATED_SLOTS_INL_H_ 100