• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_MARKING_VISITOR_INL_H_
6 #define V8_HEAP_MARKING_VISITOR_INL_H_
7 
8 #include "src/heap/marking-visitor.h"
9 #include "src/heap/marking-worklist.h"
10 #include "src/heap/objects-visiting-inl.h"
11 #include "src/heap/objects-visiting.h"
12 #include "src/heap/progress-bar.h"
13 #include "src/heap/spaces.h"
14 #include "src/objects/objects.h"
15 #include "src/objects/smi.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // ===========================================================================
21 // Visiting strong and weak pointers =========================================
22 // ===========================================================================
23 
24 template <typename ConcreteVisitor, typename MarkingState>
MarkObject(HeapObject host,HeapObject object)25 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkObject(
26     HeapObject host, HeapObject object) {
27   DCHECK(ReadOnlyHeap::Contains(object) || heap_->Contains(object));
28   concrete_visitor()->SynchronizePageAccess(object);
29   AddStrongReferenceForReferenceSummarizer(host, object);
30   if (concrete_visitor()->marking_state()->WhiteToGrey(object)) {
31     local_marking_worklists_->Push(object);
32     if (V8_UNLIKELY(concrete_visitor()->retaining_path_mode() ==
33                     TraceRetainingPathMode::kEnabled)) {
34       heap_->AddRetainer(host, object);
35     }
36   }
37 }
38 
39 // class template arguments
40 template <typename ConcreteVisitor, typename MarkingState>
41 // method template arguments
42 template <typename THeapObjectSlot>
ProcessStrongHeapObject(HeapObject host,THeapObjectSlot slot,HeapObject heap_object)43 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessStrongHeapObject(
44     HeapObject host, THeapObjectSlot slot, HeapObject heap_object) {
45   concrete_visitor()->SynchronizePageAccess(heap_object);
46   if (!is_shared_heap_ && heap_object.InSharedHeap()) return;
47   MarkObject(host, heap_object);
48   concrete_visitor()->RecordSlot(host, slot, heap_object);
49 }
50 
51 // class template arguments
52 template <typename ConcreteVisitor, typename MarkingState>
53 // method template arguments
54 template <typename THeapObjectSlot>
ProcessWeakHeapObject(HeapObject host,THeapObjectSlot slot,HeapObject heap_object)55 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::ProcessWeakHeapObject(
56     HeapObject host, THeapObjectSlot slot, HeapObject heap_object) {
57   concrete_visitor()->SynchronizePageAccess(heap_object);
58   if (!is_shared_heap_ && heap_object.InSharedHeap()) return;
59   if (concrete_visitor()->marking_state()->IsBlackOrGrey(heap_object)) {
60     // Weak references with live values are directly processed here to
61     // reduce the processing time of weak cells during the main GC
62     // pause.
63     concrete_visitor()->RecordSlot(host, slot, heap_object);
64   } else {
65     // If we do not know about liveness of the value, we have to process
66     // the reference when we know the liveness of the whole transitive
67     // closure.
68     local_weak_objects_->weak_references_local.Push(std::make_pair(host, slot));
69     AddWeakReferenceForReferenceSummarizer(host, heap_object);
70   }
71 }
72 
73 // class template arguments
74 template <typename ConcreteVisitor, typename MarkingState>
75 // method template arguments
76 template <typename TSlot>
77 V8_INLINE void
VisitPointersImpl(HeapObject host,TSlot start,TSlot end)78 MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitPointersImpl(
79     HeapObject host, TSlot start, TSlot end) {
80   using THeapObjectSlot = typename TSlot::THeapObjectSlot;
81   for (TSlot slot = start; slot < end; ++slot) {
82     typename TSlot::TObject object =
83         slot.Relaxed_Load(ObjectVisitorWithCageBases::cage_base());
84     HeapObject heap_object;
85     if (object.GetHeapObjectIfStrong(&heap_object)) {
86       // If the reference changes concurrently from strong to weak, the write
87       // barrier will treat the weak reference as strong, so we won't miss the
88       // weak reference.
89       ProcessStrongHeapObject(host, THeapObjectSlot(slot), heap_object);
90     } else if (TSlot::kCanBeWeak && object.GetHeapObjectIfWeak(&heap_object)) {
91       ProcessWeakHeapObject(host, THeapObjectSlot(slot), heap_object);
92     }
93   }
94 }
95 
96 template <typename ConcreteVisitor, typename MarkingState>
97 V8_INLINE void
VisitCodePointerImpl(HeapObject host,CodeObjectSlot slot)98 MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodePointerImpl(
99     HeapObject host, CodeObjectSlot slot) {
100   CHECK(V8_EXTERNAL_CODE_SPACE_BOOL);
101   Object object =
102       slot.Relaxed_Load(ObjectVisitorWithCageBases::code_cage_base());
103   HeapObject heap_object;
104   if (object.GetHeapObjectIfStrong(&heap_object)) {
105     // If the reference changes concurrently from strong to weak, the write
106     // barrier will treat the weak reference as strong, so we won't miss the
107     // weak reference.
108     ProcessStrongHeapObject(host, HeapObjectSlot(slot), heap_object);
109   }
110 }
111 
112 template <typename ConcreteVisitor, typename MarkingState>
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)113 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEmbeddedPointer(
114     Code host, RelocInfo* rinfo) {
115   DCHECK(RelocInfo::IsEmbeddedObjectMode(rinfo->rmode()));
116   HeapObject object =
117       rinfo->target_object(ObjectVisitorWithCageBases::cage_base());
118   if (!is_shared_heap_ && object.InSharedHeap()) return;
119 
120   if (!concrete_visitor()->marking_state()->IsBlackOrGrey(object)) {
121     if (host.IsWeakObject(object)) {
122       local_weak_objects_->weak_objects_in_code_local.Push(
123           std::make_pair(object, host));
124       AddWeakReferenceForReferenceSummarizer(host, object);
125     } else {
126       MarkObject(host, object);
127     }
128   }
129   concrete_visitor()->RecordRelocSlot(host, rinfo, object);
130 }
131 
132 template <typename ConcreteVisitor, typename MarkingState>
VisitCodeTarget(Code host,RelocInfo * rinfo)133 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitCodeTarget(
134     Code host, RelocInfo* rinfo) {
135   DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
136   Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
137 
138   if (!is_shared_heap_ && target.InSharedHeap()) return;
139   MarkObject(host, target);
140   concrete_visitor()->RecordRelocSlot(host, rinfo, target);
141 }
142 
143 // ===========================================================================
144 // Object participating in bytecode flushing =================================
145 // ===========================================================================
146 
147 template <typename ConcreteVisitor, typename MarkingState>
VisitBytecodeArray(Map map,BytecodeArray object)148 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitBytecodeArray(
149     Map map, BytecodeArray object) {
150   if (!concrete_visitor()->ShouldVisit(object)) return 0;
151   int size = BytecodeArray::BodyDescriptor::SizeOf(map, object);
152   this->VisitMapPointer(object);
153   BytecodeArray::BodyDescriptor::IterateBody(map, object, size, this);
154   if (!should_keep_ages_unchanged_) {
155     object.MakeOlder();
156   }
157   return size;
158 }
159 
160 template <typename ConcreteVisitor, typename MarkingState>
VisitJSFunction(Map map,JSFunction js_function)161 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSFunction(
162     Map map, JSFunction js_function) {
163   int size = concrete_visitor()->VisitJSObjectSubclass(map, js_function);
164   if (js_function.ShouldFlushBaselineCode(code_flush_mode_)) {
165     DCHECK(IsBaselineCodeFlushingEnabled(code_flush_mode_));
166     local_weak_objects_->baseline_flushing_candidates_local.Push(js_function);
167   } else {
168     VisitPointer(js_function, js_function.RawField(JSFunction::kCodeOffset));
169     // TODO(mythria): Consider updating the check for ShouldFlushBaselineCode to
170     // also include cases where there is old bytecode even when there is no
171     // baseline code and remove this check here.
172     if (IsByteCodeFlushingEnabled(code_flush_mode_) &&
173         js_function.NeedsResetDueToFlushedBytecode()) {
174       local_weak_objects_->flushed_js_functions_local.Push(js_function);
175     }
176   }
177   return size;
178 }
179 
180 template <typename ConcreteVisitor, typename MarkingState>
VisitSharedFunctionInfo(Map map,SharedFunctionInfo shared_info)181 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitSharedFunctionInfo(
182     Map map, SharedFunctionInfo shared_info) {
183   if (!concrete_visitor()->ShouldVisit(shared_info)) return 0;
184 
185   int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, shared_info);
186   this->VisitMapPointer(shared_info);
187   SharedFunctionInfo::BodyDescriptor::IterateBody(map, shared_info, size, this);
188 
189   if (!shared_info.ShouldFlushCode(code_flush_mode_)) {
190     // If the SharedFunctionInfo doesn't have old bytecode visit the function
191     // data strongly.
192     VisitPointer(shared_info,
193                  shared_info.RawField(SharedFunctionInfo::kFunctionDataOffset));
194   } else if (!IsByteCodeFlushingEnabled(code_flush_mode_)) {
195     // If bytecode flushing is disabled but baseline code flushing is enabled
196     // then we have to visit the bytecode but not the baseline code.
197     DCHECK(IsBaselineCodeFlushingEnabled(code_flush_mode_));
198     CodeT baseline_codet = CodeT::cast(shared_info.function_data(kAcquireLoad));
199     // Safe to do a relaxed load here since the CodeT was acquire-loaded.
200     Code baseline_code = FromCodeT(baseline_codet, kRelaxedLoad);
201     // Visit the bytecode hanging off baseline code.
202     VisitPointer(baseline_code,
203                  baseline_code.RawField(
204                      Code::kDeoptimizationDataOrInterpreterDataOffset));
205     local_weak_objects_->code_flushing_candidates_local.Push(shared_info);
206   } else {
207     // In other cases, record as a flushing candidate since we have old
208     // bytecode.
209     local_weak_objects_->code_flushing_candidates_local.Push(shared_info);
210   }
211   return size;
212 }
213 
214 // ===========================================================================
215 // Fixed arrays that need incremental processing and can be left-trimmed =====
216 // ===========================================================================
217 
218 template <typename ConcreteVisitor, typename MarkingState>
219 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
VisitFixedArrayWithProgressBar(Map map,FixedArray object,ProgressBar & progress_bar)220     VisitFixedArrayWithProgressBar(Map map, FixedArray object,
221                                    ProgressBar& progress_bar) {
222   const int kProgressBarScanningChunk = kMaxRegularHeapObjectSize;
223   STATIC_ASSERT(kMaxRegularHeapObjectSize % kTaggedSize == 0);
224   DCHECK(concrete_visitor()->marking_state()->IsBlackOrGrey(object));
225   concrete_visitor()->marking_state()->GreyToBlack(object);
226   int size = FixedArray::BodyDescriptor::SizeOf(map, object);
227   size_t current_progress_bar = progress_bar.Value();
228   int start = static_cast<int>(current_progress_bar);
229   if (start == 0) {
230     this->VisitMapPointer(object);
231     start = FixedArray::BodyDescriptor::kStartOffset;
232   }
233   int end = std::min(size, start + kProgressBarScanningChunk);
234   if (start < end) {
235     VisitPointers(object, object.RawField(start), object.RawField(end));
236     bool success = progress_bar.TrySetNewValue(current_progress_bar, end);
237     CHECK(success);
238     if (end < size) {
239       // The object can be pushed back onto the marking worklist only after
240       // progress bar was updated.
241       local_marking_worklists_->Push(object);
242     }
243   }
244   return end - start;
245 }
246 
247 template <typename ConcreteVisitor, typename MarkingState>
VisitFixedArray(Map map,FixedArray object)248 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedArray(
249     Map map, FixedArray object) {
250   // Arrays with the progress bar are not left-trimmable because they reside
251   // in the large object space.
252   ProgressBar& progress_bar =
253       MemoryChunk::FromHeapObject(object)->ProgressBar();
254   return CanUpdateValuesInHeap() && progress_bar.IsEnabled()
255              ? VisitFixedArrayWithProgressBar(map, object, progress_bar)
256              : concrete_visitor()->VisitLeftTrimmableArray(map, object);
257 }
258 
259 template <typename ConcreteVisitor, typename MarkingState>
VisitFixedDoubleArray(Map map,FixedDoubleArray object)260 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitFixedDoubleArray(
261     Map map, FixedDoubleArray object) {
262   return concrete_visitor()->VisitLeftTrimmableArray(map, object);
263 }
264 
265 // ===========================================================================
266 // Objects participating in embedder tracing =================================
267 // ===========================================================================
268 
269 template <typename ConcreteVisitor, typename MarkingState>
270 template <typename T>
271 inline int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
VisitEmbedderTracingSubClassNoEmbedderTracing(Map map,T object)272     VisitEmbedderTracingSubClassNoEmbedderTracing(Map map, T object) {
273   return concrete_visitor()->VisitJSObjectSubclass(map, object);
274 }
275 
276 template <typename ConcreteVisitor, typename MarkingState>
277 template <typename T>
278 inline int MarkingVisitorBase<ConcreteVisitor, MarkingState>::
VisitEmbedderTracingSubClassWithEmbedderTracing(Map map,T object)279     VisitEmbedderTracingSubClassWithEmbedderTracing(Map map, T object) {
280   const bool requires_snapshot =
281       local_marking_worklists_->SupportsExtractWrapper();
282   MarkingWorklists::Local::WrapperSnapshot wrapper_snapshot;
283   const bool valid_snapshot =
284       requires_snapshot &&
285       local_marking_worklists_->ExtractWrapper(map, object, wrapper_snapshot);
286   const int size = concrete_visitor()->VisitJSObjectSubclass(map, object);
287   if (size) {
288     if (valid_snapshot) {
289       // Success: The object needs to be processed for embedder references.
290       local_marking_worklists_->PushExtractedWrapper(wrapper_snapshot);
291     } else if (!requires_snapshot) {
292       // Snapshot not supported. Just fall back to pushing the wrapper itself
293       // instead which will be processed on the main thread.
294       local_marking_worklists_->PushWrapper(object);
295     }
296   }
297   return size;
298 }
299 
300 template <typename ConcreteVisitor, typename MarkingState>
301 template <typename T>
302 int MarkingVisitorBase<ConcreteVisitor,
VisitEmbedderTracingSubclass(Map map,T object)303                        MarkingState>::VisitEmbedderTracingSubclass(Map map,
304                                                                    T object) {
305   DCHECK(object.MayHaveEmbedderFields());
306   if (V8_LIKELY(is_embedder_tracing_enabled_)) {
307     return VisitEmbedderTracingSubClassWithEmbedderTracing(map, object);
308   }
309   return VisitEmbedderTracingSubClassNoEmbedderTracing(map, object);
310 }
311 
312 template <typename ConcreteVisitor, typename MarkingState>
VisitJSApiObject(Map map,JSObject object)313 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSApiObject(
314     Map map, JSObject object) {
315   return VisitEmbedderTracingSubclass(map, object);
316 }
317 
318 template <typename ConcreteVisitor, typename MarkingState>
VisitJSArrayBuffer(Map map,JSArrayBuffer object)319 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSArrayBuffer(
320     Map map, JSArrayBuffer object) {
321   object.MarkExtension();
322   return VisitEmbedderTracingSubclass(map, object);
323 }
324 
325 template <typename ConcreteVisitor, typename MarkingState>
VisitJSDataView(Map map,JSDataView object)326 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSDataView(
327     Map map, JSDataView object) {
328   return VisitEmbedderTracingSubclass(map, object);
329 }
330 
331 template <typename ConcreteVisitor, typename MarkingState>
VisitJSTypedArray(Map map,JSTypedArray object)332 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSTypedArray(
333     Map map, JSTypedArray object) {
334   return VisitEmbedderTracingSubclass(map, object);
335 }
336 
337 // ===========================================================================
338 // Weak JavaScript objects ===================================================
339 // ===========================================================================
340 
341 template <typename ConcreteVisitor, typename MarkingState>
VisitEphemeronHashTable(Map map,EphemeronHashTable table)342 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitEphemeronHashTable(
343     Map map, EphemeronHashTable table) {
344   if (!concrete_visitor()->ShouldVisit(table)) return 0;
345   local_weak_objects_->ephemeron_hash_tables_local.Push(table);
346 
347   for (InternalIndex i : table.IterateEntries()) {
348     ObjectSlot key_slot =
349         table.RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
350     HeapObject key = HeapObject::cast(table.KeyAt(i));
351 
352     concrete_visitor()->SynchronizePageAccess(key);
353     concrete_visitor()->RecordSlot(table, key_slot, key);
354     AddWeakReferenceForReferenceSummarizer(table, key);
355 
356     ObjectSlot value_slot =
357         table.RawFieldOfElementAt(EphemeronHashTable::EntryToValueIndex(i));
358 
359     if ((!is_shared_heap_ && key.InSharedHeap()) ||
360         concrete_visitor()->marking_state()->IsBlackOrGrey(key)) {
361       VisitPointer(table, value_slot);
362     } else {
363       Object value_obj = table.ValueAt(i);
364 
365       if (value_obj.IsHeapObject()) {
366         HeapObject value = HeapObject::cast(value_obj);
367         concrete_visitor()->SynchronizePageAccess(value);
368         concrete_visitor()->RecordSlot(table, value_slot, value);
369         AddWeakReferenceForReferenceSummarizer(table, value);
370 
371         if (!is_shared_heap_ && value.InSharedHeap()) continue;
372 
373         // Revisit ephemerons with both key and value unreachable at end
374         // of concurrent marking cycle.
375         if (concrete_visitor()->marking_state()->IsWhite(value)) {
376           local_weak_objects_->discovered_ephemerons_local.Push(
377               Ephemeron{key, value});
378         }
379       }
380     }
381   }
382   return table.SizeFromMap(map);
383 }
384 
385 template <typename ConcreteVisitor, typename MarkingState>
VisitJSWeakRef(Map map,JSWeakRef weak_ref)386 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitJSWeakRef(
387     Map map, JSWeakRef weak_ref) {
388   int size = concrete_visitor()->VisitJSObjectSubclass(map, weak_ref);
389   if (size == 0) return 0;
390   if (weak_ref.target().IsHeapObject()) {
391     HeapObject target = HeapObject::cast(weak_ref.target());
392     concrete_visitor()->SynchronizePageAccess(target);
393     if (concrete_visitor()->marking_state()->IsBlackOrGrey(target)) {
394       // Record the slot inside the JSWeakRef, since the
395       // VisitJSObjectSubclass above didn't visit it.
396       ObjectSlot slot = weak_ref.RawField(JSWeakRef::kTargetOffset);
397       concrete_visitor()->RecordSlot(weak_ref, slot, target);
398     } else {
399       // JSWeakRef points to a potentially dead object. We have to process
400       // them when we know the liveness of the whole transitive closure.
401       local_weak_objects_->js_weak_refs_local.Push(weak_ref);
402       AddWeakReferenceForReferenceSummarizer(weak_ref, target);
403     }
404   }
405   return size;
406 }
407 
408 template <typename ConcreteVisitor, typename MarkingState>
VisitWeakCell(Map map,WeakCell weak_cell)409 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitWeakCell(
410     Map map, WeakCell weak_cell) {
411   if (!concrete_visitor()->ShouldVisit(weak_cell)) return 0;
412 
413   int size = WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
414   this->VisitMapPointer(weak_cell);
415   WeakCell::BodyDescriptor::IterateBody(map, weak_cell, size, this);
416   HeapObject target = weak_cell.relaxed_target();
417   HeapObject unregister_token = weak_cell.relaxed_unregister_token();
418   concrete_visitor()->SynchronizePageAccess(target);
419   concrete_visitor()->SynchronizePageAccess(unregister_token);
420   if (concrete_visitor()->marking_state()->IsBlackOrGrey(target) &&
421       concrete_visitor()->marking_state()->IsBlackOrGrey(unregister_token)) {
422     // Record the slots inside the WeakCell, since the IterateBody above
423     // didn't visit it.
424     ObjectSlot slot = weak_cell.RawField(WeakCell::kTargetOffset);
425     concrete_visitor()->RecordSlot(weak_cell, slot, target);
426     slot = weak_cell.RawField(WeakCell::kUnregisterTokenOffset);
427     concrete_visitor()->RecordSlot(weak_cell, slot, unregister_token);
428   } else {
429     // WeakCell points to a potentially dead object or a dead unregister
430     // token. We have to process them when we know the liveness of the whole
431     // transitive closure.
432     local_weak_objects_->weak_cells_local.Push(weak_cell);
433     AddWeakReferenceForReferenceSummarizer(weak_cell, target);
434     AddWeakReferenceForReferenceSummarizer(weak_cell, unregister_token);
435   }
436   return size;
437 }
438 
439 // ===========================================================================
440 // Custom weakness in descriptor arrays and transition arrays ================
441 // ===========================================================================
442 
443 template <typename ConcreteVisitor, typename MarkingState>
MarkDescriptorArrayBlack(DescriptorArray descriptors)444 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::MarkDescriptorArrayBlack(
445     DescriptorArray descriptors) {
446   concrete_visitor()->marking_state()->WhiteToGrey(descriptors);
447   if (concrete_visitor()->marking_state()->GreyToBlack(descriptors)) {
448     VisitMapPointer(descriptors);
449     VisitPointers(descriptors, descriptors.GetFirstPointerSlot(),
450                   descriptors.GetDescriptorSlot(0));
451     return DescriptorArray::BodyDescriptor::SizeOf(descriptors.map(),
452                                                    descriptors);
453   }
454   return 0;
455 }
456 
457 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptors(DescriptorArray descriptor_array,int number_of_own_descriptors)458 void MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptors(
459     DescriptorArray descriptor_array, int number_of_own_descriptors) {
460   int16_t new_marked = static_cast<int16_t>(number_of_own_descriptors);
461   int16_t old_marked = 0;
462   if (CanUpdateValuesInHeap()) {
463     old_marked = descriptor_array.UpdateNumberOfMarkedDescriptors(
464         mark_compact_epoch_, new_marked);
465   }
466   if (old_marked < new_marked) {
467     VisitPointers(
468         descriptor_array,
469         MaybeObjectSlot(descriptor_array.GetDescriptorSlot(old_marked)),
470         MaybeObjectSlot(descriptor_array.GetDescriptorSlot(new_marked)));
471   }
472 }
473 
474 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptorArray(Map map,DescriptorArray array)475 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorArray(
476     Map map, DescriptorArray array) {
477   if (!concrete_visitor()->ShouldVisit(array)) return 0;
478   this->VisitMapPointer(array);
479   int size = DescriptorArray::BodyDescriptor::SizeOf(map, array);
480   VisitPointers(array, array.GetFirstPointerSlot(), array.GetDescriptorSlot(0));
481   VisitDescriptors(array, array.number_of_descriptors());
482   return size;
483 }
484 
485 template <typename ConcreteVisitor, typename MarkingState>
VisitDescriptorsForMap(Map map)486 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitDescriptorsForMap(
487     Map map) {
488   if (!map.CanTransition()) return 0;
489 
490   // Maps that can transition share their descriptor arrays and require
491   // special visiting logic to avoid memory leaks.
492   // Since descriptor arrays are potentially shared, ensure that only the
493   // descriptors that belong to this map are marked. The first time a
494   // non-empty descriptor array is marked, its header is also visited. The
495   // slot holding the descriptor array will be implicitly recorded when the
496   // pointer fields of this map are visited.
497 
498   Object maybe_descriptors =
499       TaggedField<Object, Map::kInstanceDescriptorsOffset>::Acquire_Load(
500           heap_->isolate(), map);
501 
502   // If the descriptors are a Smi, then this Map is in the process of being
503   // deserialized, and doesn't yet have an initialized descriptor field.
504   if (maybe_descriptors.IsSmi()) {
505     DCHECK_EQ(maybe_descriptors, Smi::uninitialized_deserialization_value());
506     return 0;
507   }
508 
509   DescriptorArray descriptors = DescriptorArray::cast(maybe_descriptors);
510 
511   // Don't do any special processing of strong descriptor arrays, let them get
512   // marked through the normal visitor mechanism.
513   if (descriptors.IsStrongDescriptorArray()) {
514     return 0;
515   }
516   concrete_visitor()->SynchronizePageAccess(descriptors);
517   int size = MarkDescriptorArrayBlack(descriptors);
518   int number_of_own_descriptors = map.NumberOfOwnDescriptors();
519   if (number_of_own_descriptors) {
520     // It is possible that the concurrent marker observes the
521     // number_of_own_descriptors out of sync with the descriptors. In that
522     // case the marking write barrier for the descriptor array will ensure
523     // that all required descriptors are marked. The concurrent marker
524     // just should avoid crashing in that case. That's why we need the
525     // std::min<int>() below.
526     VisitDescriptors(descriptors,
527                      std::min<int>(number_of_own_descriptors,
528                                    descriptors.number_of_descriptors()));
529   }
530 
531   return size;
532 }
533 
534 template <typename ConcreteVisitor, typename MarkingState>
VisitMap(Map meta_map,Map map)535 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitMap(Map meta_map,
536                                                                 Map map) {
537   if (!concrete_visitor()->ShouldVisit(map)) return 0;
538   int size = Map::BodyDescriptor::SizeOf(meta_map, map);
539   size += VisitDescriptorsForMap(map);
540 
541   // Mark the pointer fields of the Map. If there is a transitions array, it has
542   // been marked already, so it is fine that one of these fields contains a
543   // pointer to it.
544   Map::BodyDescriptor::IterateBody(meta_map, map, size, this);
545   return size;
546 }
547 
548 template <typename ConcreteVisitor, typename MarkingState>
VisitTransitionArray(Map map,TransitionArray array)549 int MarkingVisitorBase<ConcreteVisitor, MarkingState>::VisitTransitionArray(
550     Map map, TransitionArray array) {
551   if (!concrete_visitor()->ShouldVisit(array)) return 0;
552   this->VisitMapPointer(array);
553   int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
554   TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
555   local_weak_objects_->transition_arrays_local.Push(array);
556   return size;
557 }
558 
559 }  // namespace internal
560 }  // namespace v8
561 
562 #endif  // V8_HEAP_MARKING_VISITOR_INL_H_
563