1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_MARKING_VISITOR_INL_H_
6 #define V8_HEAP_MARKING_VISITOR_INL_H_
7
8 #include "src/heap/marking-visitor.h"
9 #include "src/heap/objects-visiting-inl.h"
10 #include "src/heap/objects-visiting.h"
11 #include "src/heap/spaces.h"
12 #include "src/objects/objects.h"
13 #include "src/snapshot/deserializer.h"
14
15 namespace v8 {
16 namespace internal {
17
18 // ===========================================================================
19 // Visiting strong and weak pointers =========================================
20 // ===========================================================================
21
22 template <typename ConcreteVisitor, typename MarkingState>
MarkObject(HeapObject host,HeapObject object)23 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
24 HeapObject host, HeapObject object) {
25 concrete_visitor()->SynchronizePageAccess(object);
26 if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
27 local_marking_worklists_->Push(object);
28 if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
29 TraceRetainingPathMode::kEnabled)) {
30 heap_->AddRetainer(host, object);
31 }
32 }
33 }
34
35 // class template arguments
36 template <typename ConcreteVisitor, typename MarkingState>
37 // method template arguments
38 template <typename THeapObjectSlot>
ProcessStrongHeapObject(HeapObject host,THeapObjectSlot slot,HeapObject heap_object)39 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessStrongHeapObject(
40 HeapObject host, THeapObjectSlot slot, HeapObject heap_object) {
41 MarkObject(host, heap_object);
42 concrete_visitor()->RecordSlot(host, slot, heap_object);
43 }
44
45 // class template arguments
46 template <typename ConcreteVisitor, typename MarkingState>
47 // method template arguments
48 template <typename THeapObjectSlot>
ProcessWeakHeapObject(HeapObject host,THeapObjectSlot slot,HeapObject heap_object)49 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
50 HeapObject host, THeapObjectSlot slot, HeapObject heap_object) {
51 concrete_visitor()->SynchronizePageAccess(heap_object);
52 if (concrete_visitor()->marking_state()->IsBlackOrGrey(heap_object)) {
53 // Weak references with live values are directly processed here to
54 // reduce the processing time of weak cells during the main GC
55 // pause.
56 concrete_visitor()->RecordSlot(host, slot, heap_object);
57 } else {
58 // If we do not know about liveness of the value, we have to process
59 // the reference when we know the liveness of the whole transitive
60 // closure.
61 weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot));
62 }
63 }
64
65 // class template arguments
66 template <typename ConcreteVisitor, typename MarkingState>
67 // method template arguments
68 template <typename TSlot>
69 V8_INLINE void
VisitPointersImpl(HeapObject host,TSlot start,TSlot end)70 MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitPointersImpl(
71 HeapObject host, TSlot start, TSlot end) {
72 using THeapObjectSlot = typename TSlot::THeapObjectSlot;
73 for (TSlot slot = start; slot < end; ++slot) {
74 typename TSlot::TObject object = slot.Relaxed_Load();
75 HeapObject heap_object;
76 if (object.GetHeapObjectIfStrong(&heap_object)) {
77 // If the reference changes concurrently from strong to weak, the write
78 // barrier will treat the weak reference as strong, so we won't miss the
79 // weak reference.
80 ProcessStrongHeapObject(host, THeapObjectSlot(slot), heap_object);
81 } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&heap_object)) {
82 ProcessWeakHeapObject(host, THeapObjectSlot(slot), heap_object);
83 }
84 }
85 }
86
87 template <typename ConcreteVisitor, typename MarkingState>
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)88 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
89 Code host, RelocInfo* rinfo) {
90 DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode()));
91 HeapObject object = rinfo->target_object();
92 if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
93 if (host.IsWeakObject(object)) {
94 weak_objects_->weak_objects_in_code.Push(task_id_,
95 std::make_pair(object, host));
96 } else {
97 MarkObject(host, object);
98 }
99 }
100 concrete_visitor()->RecordRelocSlot(host, rinfo, object);
101 }
102
103 template <typename ConcreteVisitor, typename MarkingState>
VisitCodeTarget(Code host,RelocInfo * rinfo)104 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodeTarget(
105 Code host, RelocInfo* rinfo) {
106 DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
107 Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
108 MarkObject(host, target);
109 concrete_visitor()->RecordRelocSlot(host, rinfo, target);
110 }
111
112 // ===========================================================================
113 // Object participating in bytecode flushing =================================
114 // ===========================================================================
115
116 template <typename ConcreteVisitor, typename MarkingState>
VisitBytecodeArray(Map map,BytecodeArray object)117 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitBytecodeArray(
118 Map map, BytecodeArray object) {
119 if (!concrete_visitor()->ShouldVisit(object)) return 0;
120 int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
121 this->VisitMapPointer(object);
122 BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
123 if (!is_forced_gc_) {
124 object.MakeOlder();
125 }
126 return size;
127 }
128
129 template <typename ConcreteVisitor, typename MarkingState>
VisitJSFunction(Map map,JSFunction object)130 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
131 Map map, JSFunction object) {
132 int size = concrete_visitor()->VisitJSObjectSubclass(map, object);
133 // Check if the JSFunction needs reset due to bytecode being flushed.
134 if (bytecode_flush_mode_ != BytecodeFlushMode::kDoNotFlushBytecode &&
135 object.NeedsResetDueToFlushedBytecode()) {
136 weak_objects_->flushed_js_functions.Push(task_id_, object);
137 }
138 return size;
139 }
140
141 template <typename ConcreteVisitor, typename MarkingState>
VisitSharedFunctionInfo(Map map,SharedFunctionInfo shared_info)142 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
143 Map map, SharedFunctionInfo shared_info) {
144 if (!concrete_visitor()->ShouldVisit(shared_info)) return 0;
145
146 int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
147 this->VisitMapPointer(shared_info);
148 SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
149
150 // If the SharedFunctionInfo has old bytecode, mark it as flushable,
151 // otherwise visit the function data field strongly.
152 if (shared_info.ShouldFlushBytecode(bytecode_flush_mode_)) {
153 weak_objects_->bytecode_flushing_candidates.Push(task_id_, shared_info);
154 } else {
155 VisitPointer(shared_info,
156 shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
157 }
158 return size;
159 }
160
161 // ===========================================================================
162 // Fixed arrays that need incremental processing and can be left-trimmed =====
163 // ===========================================================================
164
165 template <typename ConcreteVisitor, typename MarkingState>
166 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
VisitFixedArrayWithProgressBar(Map map,FixedArray object,MemoryChunk * chunk)167 VisitFixedArrayWithProgressBar(Map map, FixedArray object,
168 MemoryChunk* chunk) {
169 const int kProgressBarScanningChunk = kMaxRegularHeapObjectSize;
170 STATIC_ASSERT(kMaxRegularHeapObjectSize % kTaggedSize == 0);
171 DCHECK(concrete_visitor()->marking_state()->IsBlackOrGrey(object));
172 concrete_visitor()->marking_state()->GreyToBlack(object);
173 int size = FixedArray::BodyDescriptor::SizeOf(map, object);
174 size_t current_progress_bar = chunk->ProgressBar();
175 int start = static_cast<int>(current_progress_bar);
176 if (start == 0) {
177 this->VisitMapPointer(object);
178 start = FixedArray::BodyDescriptor::kStartOffset;
179 }
180 int end = Min(size, start + kProgressBarScanningChunk);
181 if (start < end) {
182 VisitPointers(object, object.RawField(start), object.RawField(end));
183 bool success = chunk->TrySetProgressBar(current_progress_bar, end);
184 CHECK(success);
185 if (end < size) {
186 // The object can be pushed back onto the marking worklist only after
187 // progress bar was updated.
188 local_marking_worklists_->Push(object);
189 }
190 }
191 return end - start;
192 }
193
194 template <typename ConcreteVisitor, typename MarkingState>
VisitFixedArray(Map map,FixedArray object)195 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedArray(
196 Map map, FixedArray object) {
197 // Arrays with the progress bar are not left-trimmable because they reside
198 // in the large object space.
199 MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
200 return chunk->IsFlagSet<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR)
201 ? VisitFixedArrayWithProgressBar(map, object, chunk)
202 : concrete_visitor()->VisitLeftTrimmableArray(map, object);
203 }
204
205 template <typename ConcreteVisitor, typename MarkingState>
VisitFixedDoubleArray(Map map,FixedDoubleArray object)206 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedDoubleArray(
207 Map map, FixedDoubleArray object) {
208 return concrete_visitor()->VisitLeftTrimmableArray(map, object);
209 }
210
211 // ===========================================================================
212 // Objects participating in embedder tracing =================================
213 // ===========================================================================
214
215 template <typename ConcreteVisitor, typename MarkingState>
216 template <typename T>
217 int MarkingVisitorBase<ConcreteVisitor,
VisitEmbedderTracingSubclass(Map map,T object)218 MarkingState>::VisitEmbedderTracingSubclass(Map map,
219 T object) {
220 DCHECK(object.IsApiWrapper());
221 int size = concrete_visitor()->VisitJSObjectSubclass(map, object);
222 if (size && is_embedder_tracing_enabled_) {
223 // Success: The object needs to be processed for embedder references on
224 // the main thread.
225 local_marking_worklists_->PushEmbedder(object);
226 }
227 return size;
228 }
229
230 template <typename ConcreteVisitor, typename MarkingState>
VisitJSApiObject(Map map,JSObject object)231 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSApiObject(
232 Map map, JSObject object) {
233 return VisitEmbedderTracingSubclass(map, object);
234 }
235
236 template <typename ConcreteVisitor, typename MarkingState>
VisitJSArrayBuffer(Map map,JSArrayBuffer object)237 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSArrayBuffer(
238 Map map, JSArrayBuffer object) {
239 object.MarkExtension();
240 return VisitEmbedderTracingSubclass(map, object);
241 }
242
243 template <typename ConcreteVisitor, typename MarkingState>
VisitJSDataView(Map map,JSDataView object)244 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSDataView(
245 Map map, JSDataView object) {
246 return VisitEmbedderTracingSubclass(map, object);
247 }
248
249 template <typename ConcreteVisitor, typename MarkingState>
VisitJSTypedArray(Map map,JSTypedArray object)250 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSTypedArray(
251 Map map, JSTypedArray object) {
252 return VisitEmbedderTracingSubclass(map, object);
253 }
254
255 // ===========================================================================
256 // Weak JavaScript objects ===================================================
257 // ===========================================================================
258
259 template <typename ConcreteVisitor, typename MarkingState>
VisitEphemeronHashTable(Map map,EphemeronHashTable table)260 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
261 Map map, EphemeronHashTable table) {
262 if (!concrete_visitor()->ShouldVisit(table)) return 0;
263 weak_objects_->ephemeron_hash_tables.Push(task_id_, table);
264
265 for (InternalIndex i : table.IterateEntries()) {
266 ObjectSlot key_slot =
267 table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
268 HeapObject key = HeapObject::cast(table.KeyAt(i));
269
270 concrete_visitor()->SynchronizePageAccess(key);
271 concrete_visitor()->RecordSlot(table, key_slot, key);
272
273 ObjectSlot value_slot =
274 table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
275
276 if (concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
277 VisitPointer(table, value_slot);
278 } else {
279 Object value_obj = table.ValueAt(i);
280
281 if (value_obj.IsHeapObject()) {
282 HeapObject value = HeapObject::cast(value_obj);
283 concrete_visitor()->SynchronizePageAccess(value);
284 concrete_visitor()->RecordSlot(table, value_slot, value);
285
286 // Revisit ephemerons with both key and value unreachable at end
287 // of concurrent marking cycle.
288 if (concrete_visitor()->marking_state()->IsWhite(value)) {
289 weak_objects_->discovered_ephemerons.Push(task_id_,
290 Ephemeron{key, value});
291 }
292 }
293 }
294 }
295 return table.SizeFromMap(map);
296 }
297
298 template <typename ConcreteVisitor, typename MarkingState>
VisitJSWeakRef(Map map,JSWeakRef weak_ref)299 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
300 Map map, JSWeakRef weak_ref) {
301 int size = concrete_visitor()->VisitJSObjectSubclass(map, weak_ref);
302 if (size == 0) return 0;
303 if (weak_ref.target().IsHeapObject()) {
304 HeapObject target = HeapObject::cast(weak_ref.target());
305 concrete_visitor()->SynchronizePageAccess(target);
306 if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
307 // Record the slot inside the JSWeakRef, since the
308 // VisitJSObjectSubclass above didn't visit it.
309 ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
310 concrete_visitor()->RecordSlot(weak_ref, slot, target);
311 } else {
312 // JSWeakRef points to a potentially dead object. We have to process
313 // them when we know the liveness of the whole transitive closure.
314 weak_objects_->js_weak_refs.Push(task_id_, weak_ref);
315 }
316 }
317 return size;
318 }
319
320 template <typename ConcreteVisitor, typename MarkingState>
VisitWeakCell(Map map,WeakCell weak_cell)321 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
322 Map map, WeakCell weak_cell) {
323 if (!concrete_visitor()->ShouldVisit(weak_cell)) return 0;
324
325 int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
326 this->VisitMapPointer(weak_cell);
327 WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
328 HeapObject target = weak_cell.relaxed_target();
329 HeapObject unregister_token = HeapObject::cast(weak_cell.unregister_token());
330 concrete_visitor()->SynchronizePageAccess(target);
331 concrete_visitor()->SynchronizePageAccess(unregister_token);
332 if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
333 concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
334 // Record the slots inside the WeakCell, since the IterateBody above
335 // didn't visit it.
336 ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
337 concrete_visitor()->RecordSlot(weak_cell, slot, target);
338 slot = weak_cell.RawField(WeakCell::kUnregisterTokenOffset);
339 concrete_visitor()->RecordSlot(weak_cell, slot, unregister_token);
340 } else {
341 // WeakCell points to a potentially dead object or a dead unregister
342 // token. We have to process them when we know the liveness of the whole
343 // transitive closure.
344 weak_objects_->weak_cells.Push(task_id_, weak_cell);
345 }
346 return size;
347 }
348
349 // ===========================================================================
350 // Custom weakness in descriptor arrays and transition arrays ================
351 // ===========================================================================
352
353 template <typename ConcreteVisitor, typename MarkingState>
MarkDescriptorArrayBlack(DescriptorArray descriptors)354 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkDescriptorArrayBlack(
355 DescriptorArray descriptors) {
356 concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
357 if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
358 VisitPointer(descriptors, descriptors.map_slot());
359 VisitPointers(descriptors, descriptors.GetFirstPointerSlot(),
360 descriptors.GetDescriptorSlot(0));
361 return DescriptorArray::BodyDescriptor::SizeOf(descriptors.map(),
362 descriptors);
363 }
364 return 0;
365 }
366
367 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptors(DescriptorArray descriptor_array,int number_of_own_descriptors)368 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptors(
369 DescriptorArray descriptor_array, int number_of_own_descriptors) {
370 int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
371 int16_t old_marked = descriptor_array.UpdateNumberOfMarkedDescriptors(
372 mark_compact_epoch_, new_marked);
373 if (old_marked < new_marked) {
374 VisitPointers(
375 descriptor_array,
376 MaybeObjectSlot(descriptor_array.GetDescriptorSlot(old_marked)),
377 MaybeObjectSlot(descriptor_array.GetDescriptorSlot(new_marked)));
378 }
379 }
380
381 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptorArray(Map map,DescriptorArray array)382 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
383 Map map, DescriptorArray array) {
384 if (!concrete_visitor()->ShouldVisit(array)) return 0;
385 this->VisitMapPointer(array);
386 int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
387 VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
388 VisitDescriptors(array, array.number_of_descriptors());
389 return size;
390 }
391
392 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptorsForMap(Map map)393 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorsForMap(
394 Map map) {
395 if (!map.CanTransition()) return 0;
396
397 // Maps that can transition share their descriptor arrays and require
398 // special visiting logic to avoid memory leaks.
399 // Since descriptor arrays are potentially shared, ensure that only the
400 // descriptors that belong to this map are marked. The first time a
401 // non-empty descriptor array is marked, its header is also visited. The
402 // slot holding the descriptor array will be implicitly recorded when the
403 // pointer fields of this map are visited.
404
405 Object maybe_descriptors =
406 TaggedField<Object, Map::kInstanceDescriptorsOffset>::Acquire_Load(
407 heap_->isolate(), map);
408
409 // If the descriptors are a Smi, then this Map is in the process of being
410 // deserialized, and doesn't yet have an initialized descriptor field.
411 if (maybe_descriptors.IsSmi()) {
412 DCHECK_EQ(maybe_descriptors, Deserializer::uninitialized_field_value());
413 return 0;
414 }
415
416 DescriptorArray descriptors = DescriptorArray::cast(maybe_descriptors);
417
418 // Don't do any special processing of strong descriptor arrays, let them get
419 // marked through the normal visitor mechanism.
420 if (descriptors.IsStrongDescriptorArray()) {
421 return 0;
422 }
423
424 int size = MarkDescriptorArrayBlack(descriptors);
425 int number_of_own_descriptors = map.NumberOfOwnDescriptors();
426 if (number_of_own_descriptors) {
427 // It is possible that the concurrent marker observes the
428 // number_of_own_descriptors out of sync with the descriptors. In that
429 // case the marking write barrier for the descriptor array will ensure
430 // that all required descriptors are marked. The concurrent marker
431 // just should avoid crashing in that case. That's why we need the
432 // std::min<int>() below.
433 VisitDescriptors(descriptors,
434 std::min<int>(number_of_own_descriptors,
435 descriptors.number_of_descriptors()));
436 }
437
438 return size;
439 }
440
441 template <typename ConcreteVisitor, typename MarkingState>
VisitMap(Map meta_map,Map map)442 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
443 Map map) {
444 if (!concrete_visitor()->ShouldVisit(map)) return 0;
445 int size = Map::BodyDescriptor::SizeOf(meta_map, map);
446 size += VisitDescriptorsForMap(map);
447
448 // Mark the pointer fields of the Map. If there is a transitions array, it has
449 // been marked already, so it is fine that one of these fields contains a
450 // pointer to it.
451 Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
452 return size;
453 }
454
455 template <typename ConcreteVisitor, typename MarkingState>
VisitTransitionArray(Map map,TransitionArray array)456 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
457 Map map, TransitionArray array) {
458 if (!concrete_visitor()->ShouldVisit(array)) return 0;
459 this->VisitMapPointer(array);
460 int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
461 TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
462 weak_objects_->transition_arrays.Push(task_id_, array);
463 return size;
464 }
465
466 } // namespace internal
467 } // namespace v8
468
469 #endif // V8_HEAP_MARKING_VISITOR_INL_H_
470