1 // Copyright 2022 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/cppgc/remembered-set.h"
6
7 #include <algorithm>
8
9 #include "include/cppgc/visitor.h"
10 #include "src/heap/cppgc/heap-object-header.h"
11 #include "src/heap/cppgc/heap-page.h"
12 #include "src/heap/cppgc/marking-state.h"
13
14 namespace cppgc {
15 namespace internal {
16
17 namespace {
18
19 // Visit remembered set that was recorded in the generational barrier.
VisitRememberedSlots(const std::set<void * > & slots,const HeapBase & heap,MutatorMarkingState & mutator_marking_state)20 void VisitRememberedSlots(const std::set<void*>& slots, const HeapBase& heap,
21 MutatorMarkingState& mutator_marking_state) {
22 for (void* slot : slots) {
23 // Slot must always point to a valid, not freed object.
24 auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
25 ->ObjectHeaderFromInnerAddress(slot);
26 // The age checking in the generational barrier is imprecise, since a card
27 // may have mixed young/old objects. Check here precisely if the object is
28 // old.
29 if (slot_header.IsYoung()) continue;
30 // The design of young generation requires collections to be executed at the
31 // top level (with the guarantee that no objects are currently being in
32 // construction). This can be ensured by running young GCs from safe points
33 // or by reintroducing nested allocation scopes that avoid finalization.
34 DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
35
36 void* value = *reinterpret_cast<void**>(slot);
37 // Slot could be updated to nullptr or kSentinelPointer by the mutator.
38 if (value == kSentinelPointer || value == nullptr) continue;
39
40 #if DEBUG
41 // Check that the slot can not point to a freed object.
42 HeapObjectHeader& header =
43 BasePage::FromPayload(value)->ObjectHeaderFromInnerAddress(value);
44 DCHECK(!header.IsFree());
45 #endif
46
47 mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
48 }
49 }
50
51 // Visits source objects that were recorded in the generational barrier for
52 // slots.
VisitRememberedSourceObjects(const std::set<HeapObjectHeader * > & remembered_source_objects,Visitor & visitor)53 void VisitRememberedSourceObjects(
54 const std::set<HeapObjectHeader*>& remembered_source_objects,
55 Visitor& visitor) {
56 for (HeapObjectHeader* source_hoh : remembered_source_objects) {
57 DCHECK(source_hoh);
58 // The age checking in the generational barrier is imprecise, since a card
59 // may have mixed young/old objects. Check here precisely if the object is
60 // old.
61 if (source_hoh->IsYoung()) continue;
62 // The design of young generation requires collections to be executed at the
63 // top level (with the guarantee that no objects are currently being in
64 // construction). This can be ensured by running young GCs from safe points
65 // or by reintroducing nested allocation scopes that avoid finalization.
66 DCHECK(!source_hoh->template IsInConstruction<AccessMode::kNonAtomic>());
67
68 const TraceCallback trace_callback =
69 GlobalGCInfoTable::GCInfoFromIndex(source_hoh->GetGCInfoIndex()).trace;
70
71 // Process eagerly to avoid reaccounting.
72 trace_callback(&visitor, source_hoh->ObjectStart());
73 }
74 }
75
76 } // namespace
77
AddSlot(void * slot)78 void OldToNewRememberedSet::AddSlot(void* slot) {
79 remembered_slots_.insert(slot);
80 }
81
AddSourceObject(HeapObjectHeader & hoh)82 void OldToNewRememberedSet::AddSourceObject(HeapObjectHeader& hoh) {
83 remembered_source_objects_.insert(&hoh);
84 }
85
AddWeakCallback(WeakCallbackItem item)86 void OldToNewRememberedSet::AddWeakCallback(WeakCallbackItem item) {
87 // TODO(1029379): WeakCallbacks are also executed for weak collections.
88 // Consider splitting weak-callbacks in custom weak callbacks and ones for
89 // collections.
90 remembered_weak_callbacks_.insert(item);
91 }
92
InvalidateRememberedSlotsInRange(void * begin,void * end)93 void OldToNewRememberedSet::InvalidateRememberedSlotsInRange(void* begin,
94 void* end) {
95 // TODO(1029379): The 2 binary walks can be optimized with a custom algorithm.
96 auto from = remembered_slots_.lower_bound(begin),
97 to = remembered_slots_.lower_bound(end);
98 remembered_slots_.erase(from, to);
99 #if defined(ENABLE_SLOW_DCHECKS)
100 // Check that no remembered slots are referring to the freed area.
101 DCHECK(std::none_of(remembered_slots_.begin(), remembered_slots_.end(),
102 [begin, end](void* slot) {
103 void* value = *reinterpret_cast<void**>(slot);
104 return begin <= value && value < end;
105 }));
106 #endif // defined(ENABLE_SLOW_DCHECKS)
107 }
108
InvalidateRememberedSourceObject(HeapObjectHeader & header)109 void OldToNewRememberedSet::InvalidateRememberedSourceObject(
110 HeapObjectHeader& header) {
111 remembered_source_objects_.erase(&header);
112 }
113
Visit(Visitor & visitor,MutatorMarkingState & marking_state)114 void OldToNewRememberedSet::Visit(Visitor& visitor,
115 MutatorMarkingState& marking_state) {
116 VisitRememberedSlots(remembered_slots_, heap_, marking_state);
117 VisitRememberedSourceObjects(remembered_source_objects_, visitor);
118 }
119
ExecuteCustomCallbacks(LivenessBroker broker)120 void OldToNewRememberedSet::ExecuteCustomCallbacks(LivenessBroker broker) {
121 for (const auto& callback : remembered_weak_callbacks_) {
122 callback.callback(broker, callback.parameter);
123 }
124 }
125
ReleaseCustomCallbacks()126 void OldToNewRememberedSet::ReleaseCustomCallbacks() {
127 remembered_weak_callbacks_.clear();
128 }
129
Reset()130 void OldToNewRememberedSet::Reset() {
131 remembered_slots_.clear();
132 remembered_source_objects_.clear();
133 }
134
135 } // namespace internal
136 } // namespace cppgc
137