1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/marking-barrier.h"
6
7 #include "src/heap/heap-inl.h"
8 #include "src/heap/heap.h"
9 #include "src/heap/incremental-marking-inl.h"
10 #include "src/heap/incremental-marking.h"
11 #include "src/heap/mark-compact-inl.h"
12 #include "src/heap/mark-compact.h"
13 #include "src/heap/marking-barrier-inl.h"
14 #include "src/heap/marking-worklist-inl.h"
15 #include "src/heap/marking-worklist.h"
16 #include "src/heap/safepoint.h"
17 #include "src/objects/js-array-buffer.h"
18
19 namespace v8 {
20 namespace internal {
21
MarkingBarrier(Heap * heap)22 MarkingBarrier::MarkingBarrier(Heap* heap)
23 : heap_(heap),
24 collector_(heap_->mark_compact_collector()),
25 incremental_marking_(heap_->incremental_marking()),
26 worklist_(collector_->marking_worklists()->shared()),
27 is_main_thread_barrier_(true) {}
28
MarkingBarrier(LocalHeap * local_heap)29 MarkingBarrier::MarkingBarrier(LocalHeap* local_heap)
30 : heap_(local_heap->heap()),
31 collector_(heap_->mark_compact_collector()),
32 incremental_marking_(nullptr),
33 worklist_(collector_->marking_worklists()->shared()),
34 is_main_thread_barrier_(false) {}
35
~MarkingBarrier()36 MarkingBarrier::~MarkingBarrier() { DCHECK(worklist_.IsLocalEmpty()); }
37
Write(HeapObject host,HeapObjectSlot slot,HeapObject value)38 void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
39 HeapObject value) {
40 if (MarkValue(host, value)) {
41 if (is_compacting_ && slot.address()) {
42 collector_->RecordSlot(host, slot, value);
43 }
44 }
45 }
46
Write(Code host,RelocInfo * reloc_info,HeapObject value)47 void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
48 if (MarkValue(host, value)) {
49 if (is_compacting_) {
50 if (is_main_thread_barrier_) {
51 // An optimization to avoid allocating additional typed slots for the
52 // main thread.
53 collector_->RecordRelocSlot(host, reloc_info, value);
54 } else {
55 RecordRelocSlot(host, reloc_info, value);
56 }
57 }
58 }
59 }
60
Write(JSArrayBuffer host,ArrayBufferExtension * extension)61 void MarkingBarrier::Write(JSArrayBuffer host,
62 ArrayBufferExtension* extension) {
63 if (!V8_CONCURRENT_MARKING_BOOL && !marking_state_.IsBlack(host)) {
64 // The extension will be marked when the marker visits the host object.
65 return;
66 }
67 extension->Mark();
68 }
69
Write(DescriptorArray descriptor_array,int number_of_own_descriptors)70 void MarkingBarrier::Write(DescriptorArray descriptor_array,
71 int number_of_own_descriptors) {
72 DCHECK(is_main_thread_barrier_);
73 int16_t raw_marked = descriptor_array.raw_number_of_marked_descriptors();
74 if (NumberOfMarkedDescriptors::decode(collector_->epoch(), raw_marked) <
75 number_of_own_descriptors) {
76 collector_->MarkDescriptorArrayFromWriteBarrier(descriptor_array,
77 number_of_own_descriptors);
78 }
79 }
80
RecordRelocSlot(Code host,RelocInfo * rinfo,HeapObject target)81 void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
82 HeapObject target) {
83 MarkCompactCollector::RecordRelocSlotInfo info =
84 MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
85 if (info.should_record) {
86 auto& typed_slots = typed_slots_map_[info.memory_chunk];
87 if (!typed_slots) {
88 typed_slots.reset(new TypedSlots());
89 }
90 typed_slots->Insert(info.slot_type, info.offset);
91 }
92 }
93
94 // static
ActivateAll(Heap * heap,bool is_compacting)95 void MarkingBarrier::ActivateAll(Heap* heap, bool is_compacting) {
96 heap->marking_barrier()->Activate(is_compacting);
97 if (FLAG_local_heaps) {
98 heap->safepoint()->IterateLocalHeaps(
99 [is_compacting](LocalHeap* local_heap) {
100 local_heap->marking_barrier()->Activate(is_compacting);
101 });
102 }
103 }
104
105 // static
DeactivateAll(Heap * heap)106 void MarkingBarrier::DeactivateAll(Heap* heap) {
107 heap->marking_barrier()->Deactivate();
108 if (FLAG_local_heaps) {
109 heap->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
110 local_heap->marking_barrier()->Deactivate();
111 });
112 }
113 }
114
115 // static
PublishAll(Heap * heap)116 void MarkingBarrier::PublishAll(Heap* heap) {
117 heap->marking_barrier()->Publish();
118 if (FLAG_local_heaps) {
119 heap->safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
120 local_heap->marking_barrier()->Publish();
121 });
122 }
123 }
124
Publish()125 void MarkingBarrier::Publish() {
126 DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
127 if (is_activated_) {
128 worklist_.Publish();
129 for (auto& it : typed_slots_map_) {
130 MemoryChunk* memory_chunk = it.first;
131 std::unique_ptr<TypedSlots>& typed_slots = it.second;
132 RememberedSet<OLD_TO_OLD>::MergeTyped(memory_chunk,
133 std::move(typed_slots));
134 }
135 typed_slots_map_.clear();
136 }
137 }
138
DeactivateSpace(PagedSpace * space)139 void MarkingBarrier::DeactivateSpace(PagedSpace* space) {
140 DCHECK(is_main_thread_barrier_);
141 for (Page* p : *space) {
142 p->SetOldGenerationPageFlags(false);
143 }
144 }
145
DeactivateSpace(NewSpace * space)146 void MarkingBarrier::DeactivateSpace(NewSpace* space) {
147 DCHECK(is_main_thread_barrier_);
148 for (Page* p : *space) {
149 p->SetYoungGenerationPageFlags(false);
150 }
151 }
152
Deactivate()153 void MarkingBarrier::Deactivate() {
154 is_activated_ = false;
155 is_compacting_ = false;
156 DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
157 if (is_main_thread_barrier_) {
158 DeactivateSpace(heap_->old_space());
159 DeactivateSpace(heap_->map_space());
160 DeactivateSpace(heap_->code_space());
161 DeactivateSpace(heap_->new_space());
162 for (LargePage* p : *heap_->new_lo_space()) {
163 p->SetYoungGenerationPageFlags(false);
164 DCHECK(p->IsLargePage());
165 }
166 for (LargePage* p : *heap_->lo_space()) {
167 p->SetOldGenerationPageFlags(false);
168 }
169 for (LargePage* p : *heap_->code_lo_space()) {
170 p->SetOldGenerationPageFlags(false);
171 }
172 }
173 DCHECK(typed_slots_map_.empty());
174 DCHECK(worklist_.IsLocalEmpty());
175 }
176
ActivateSpace(PagedSpace * space)177 void MarkingBarrier::ActivateSpace(PagedSpace* space) {
178 DCHECK(is_main_thread_barrier_);
179 for (Page* p : *space) {
180 p->SetOldGenerationPageFlags(true);
181 }
182 }
183
ActivateSpace(NewSpace * space)184 void MarkingBarrier::ActivateSpace(NewSpace* space) {
185 DCHECK(is_main_thread_barrier_);
186 for (Page* p : *space) {
187 p->SetYoungGenerationPageFlags(true);
188 }
189 }
190
Activate(bool is_compacting)191 void MarkingBarrier::Activate(bool is_compacting) {
192 DCHECK(!is_activated_);
193 DCHECK(worklist_.IsLocalEmpty());
194 DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
195 is_compacting_ = is_compacting;
196 is_activated_ = true;
197 if (is_main_thread_barrier_) {
198 ActivateSpace(heap_->old_space());
199 ActivateSpace(heap_->map_space());
200 ActivateSpace(heap_->code_space());
201 ActivateSpace(heap_->new_space());
202
203 for (LargePage* p : *heap_->new_lo_space()) {
204 p->SetYoungGenerationPageFlags(true);
205 DCHECK(p->IsLargePage());
206 }
207
208 for (LargePage* p : *heap_->lo_space()) {
209 p->SetOldGenerationPageFlags(true);
210 }
211
212 for (LargePage* p : *heap_->code_lo_space()) {
213 p->SetOldGenerationPageFlags(true);
214 }
215 }
216 }
217
218 } // namespace internal
219 } // namespace v8
220