1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_CPPGC_MARKING_STATE_H_
6 #define V8_HEAP_CPPGC_MARKING_STATE_H_
7
8 #include <algorithm>
9
10 #include "include/cppgc/trace-trait.h"
11 #include "include/cppgc/visitor.h"
12 #include "src/base/logging.h"
13 #include "src/heap/cppgc/compaction-worklists.h"
14 #include "src/heap/cppgc/globals.h"
15 #include "src/heap/cppgc/heap-object-header.h"
16 #include "src/heap/cppgc/heap-page.h"
17 #include "src/heap/cppgc/liveness-broker.h"
18 #include "src/heap/cppgc/marking-worklists.h"
19
20 namespace cppgc {
21 namespace internal {
22
23 // C++ marking implementation.
24 class MarkingStateBase {
25 public:
26 inline MarkingStateBase(HeapBase&, MarkingWorklists&);
27
28 MarkingStateBase(const MarkingStateBase&) = delete;
29 MarkingStateBase& operator=(const MarkingStateBase&) = delete;
30
31 inline void MarkAndPush(const void*, TraceDescriptor);
32 inline void MarkAndPush(HeapObjectHeader&);
33
34 inline void PushMarked(HeapObjectHeader&, TraceDescriptor desc);
35
Publish()36 void Publish() { marking_worklist_.Publish(); }
37
marking_worklist()38 MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
39 return marking_worklist_;
40 }
41 MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist()42 not_fully_constructed_worklist() {
43 return not_fully_constructed_worklist_;
44 }
45
46 protected:
47 inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
48
49 inline bool MarkNoPush(HeapObjectHeader&);
50
51 HeapBase& heap_;
52
53 MarkingWorklists::MarkingWorklist::Local marking_worklist_;
54 MarkingWorklists::NotFullyConstructedWorklist&
55 not_fully_constructed_worklist_;
56 };
57
MarkingStateBase(HeapBase & heap,MarkingWorklists & marking_worklists)58 MarkingStateBase::MarkingStateBase(HeapBase& heap,
59 MarkingWorklists& marking_worklists)
60 : heap_(heap),
61 marking_worklist_(marking_worklists.marking_worklist()),
62 not_fully_constructed_worklist_(
63 *marking_worklists.not_fully_constructed_worklist()) {}
64
MarkAndPush(const void * object,TraceDescriptor desc)65 void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
66 DCHECK_NOT_NULL(object);
67 MarkAndPush(
68 HeapObjectHeader::FromObject(const_cast<void*>(desc.base_object_payload)),
69 desc);
70 }
71
MarkAndPush(HeapObjectHeader & header,TraceDescriptor desc)72 void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
73 TraceDescriptor desc) {
74 DCHECK_NOT_NULL(desc.callback);
75
76 if (header.IsInConstruction<AccessMode::kAtomic>()) {
77 not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
78 } else if (MarkNoPush(header)) {
79 PushMarked(header, desc);
80 }
81 }
82
MarkNoPush(HeapObjectHeader & header)83 bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
84 // A GC should only mark the objects that belong in its heap.
85 DCHECK_EQ(&heap_, &BasePage::FromPayload(&header)->heap());
86 // Never mark free space objects. This would e.g. hint to marking a promptly
87 // freed backing store.
88 DCHECK(!header.IsFree<AccessMode::kAtomic>());
89 return header.TryMarkAtomic();
90 }
91
MarkAndPush(HeapObjectHeader & header)92 void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
93 MarkAndPush(
94 header,
95 {header.ObjectStart(),
96 GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
97 }
98
PushMarked(HeapObjectHeader & header,TraceDescriptor desc)99 void MarkingStateBase::PushMarked(HeapObjectHeader& header,
100 TraceDescriptor desc) {
101 DCHECK(header.IsMarked<AccessMode::kAtomic>());
102 DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
103 DCHECK_NOT_NULL(desc.callback);
104
105 marking_worklist_.Push(desc);
106 }
107
108 class BasicMarkingState : public MarkingStateBase {
109 public:
110 inline BasicMarkingState(HeapBase& heap, MarkingWorklists&,
111 CompactionWorklists*);
112
113 BasicMarkingState(const BasicMarkingState&) = delete;
114 BasicMarkingState& operator=(const BasicMarkingState&) = delete;
115
116 inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
117 WeakCallback, const void*);
118 inline void RegisterWeakCallback(WeakCallback, const void*);
119
RegisterMovableReference(const void ** slot)120 void RegisterMovableReference(const void** slot) {
121 if (!movable_slots_worklist_) return;
122 movable_slots_worklist_->Push(slot);
123 }
124
125 // Weak containers are special in that they may require re-tracing if
126 // reachable through stack, even if the container was already traced before.
127 // ProcessWeakContainer records which weak containers were already marked so
128 // that conservative stack scanning knows to retrace them.
129 inline void ProcessWeakContainer(const void*, TraceDescriptor, WeakCallback,
130 const void*);
131
132 inline void ProcessEphemeron(const void*, const void*, TraceDescriptor,
133 Visitor&);
134
135 inline void AccountMarkedBytes(const HeapObjectHeader&);
136 inline void AccountMarkedBytes(size_t);
marked_bytes()137 size_t marked_bytes() const { return marked_bytes_; }
138
Publish()139 void Publish() {
140 MarkingStateBase::Publish();
141 previously_not_fully_constructed_worklist_.Publish();
142 weak_callback_worklist_.Publish();
143 write_barrier_worklist_.Publish();
144 concurrent_marking_bailout_worklist_.Publish();
145 discovered_ephemeron_pairs_worklist_.Publish();
146 ephemeron_pairs_for_processing_worklist_.Publish();
147 if (IsCompactionEnabled()) movable_slots_worklist_->Publish();
148 }
149
150 MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local&
previously_not_fully_constructed_worklist()151 previously_not_fully_constructed_worklist() {
152 return previously_not_fully_constructed_worklist_;
153 }
weak_callback_worklist()154 MarkingWorklists::WeakCallbackWorklist::Local& weak_callback_worklist() {
155 return weak_callback_worklist_;
156 }
write_barrier_worklist()157 MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist() {
158 return write_barrier_worklist_;
159 }
160 MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local&
concurrent_marking_bailout_worklist()161 concurrent_marking_bailout_worklist() {
162 return concurrent_marking_bailout_worklist_;
163 }
164 MarkingWorklists::EphemeronPairsWorklist::Local&
discovered_ephemeron_pairs_worklist()165 discovered_ephemeron_pairs_worklist() {
166 return discovered_ephemeron_pairs_worklist_;
167 }
168 MarkingWorklists::EphemeronPairsWorklist::Local&
ephemeron_pairs_for_processing_worklist()169 ephemeron_pairs_for_processing_worklist() {
170 return ephemeron_pairs_for_processing_worklist_;
171 }
weak_containers_worklist()172 MarkingWorklists::WeakContainersWorklist& weak_containers_worklist() {
173 return weak_containers_worklist_;
174 }
175 MarkingWorklists::RetraceMarkedObjectsWorklist::Local&
retrace_marked_objects_worklist()176 retrace_marked_objects_worklist() {
177 return retrace_marked_objects_worklist_;
178 }
179
180 CompactionWorklists::MovableReferencesWorklist::Local*
movable_slots_worklist()181 movable_slots_worklist() {
182 return movable_slots_worklist_.get();
183 }
184
DidDiscoverNewEphemeronPairs()185 bool DidDiscoverNewEphemeronPairs() const {
186 return discovered_new_ephemeron_pairs_;
187 }
188
ResetDidDiscoverNewEphemeronPairs()189 void ResetDidDiscoverNewEphemeronPairs() {
190 discovered_new_ephemeron_pairs_ = false;
191 }
192
set_in_atomic_pause()193 void set_in_atomic_pause() { in_atomic_pause_ = true; }
194
195 protected:
196 inline void RegisterWeakContainer(HeapObjectHeader&);
197
IsCompactionEnabled()198 inline bool IsCompactionEnabled() const {
199 return movable_slots_worklist_.get();
200 }
201
202 MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local
203 previously_not_fully_constructed_worklist_;
204 MarkingWorklists::WeakCallbackWorklist::Local weak_callback_worklist_;
205 MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_;
206 MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local
207 concurrent_marking_bailout_worklist_;
208 MarkingWorklists::EphemeronPairsWorklist::Local
209 discovered_ephemeron_pairs_worklist_;
210 MarkingWorklists::EphemeronPairsWorklist::Local
211 ephemeron_pairs_for_processing_worklist_;
212 MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
213 MarkingWorklists::RetraceMarkedObjectsWorklist::Local
214 retrace_marked_objects_worklist_;
215 // Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
216 // that compaction is currently enabled and slots must be recorded.
217 std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
218 movable_slots_worklist_;
219
220 size_t marked_bytes_ = 0;
221 bool in_ephemeron_processing_ = false;
222 bool discovered_new_ephemeron_pairs_ = false;
223 bool in_atomic_pause_ = false;
224 };
225
BasicMarkingState(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)226 BasicMarkingState::BasicMarkingState(HeapBase& heap,
227 MarkingWorklists& marking_worklists,
228 CompactionWorklists* compaction_worklists)
229 : MarkingStateBase(heap, marking_worklists),
230 previously_not_fully_constructed_worklist_(
231 marking_worklists.previously_not_fully_constructed_worklist()),
232 weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
233 write_barrier_worklist_(marking_worklists.write_barrier_worklist()),
234 concurrent_marking_bailout_worklist_(
235 marking_worklists.concurrent_marking_bailout_worklist()),
236 discovered_ephemeron_pairs_worklist_(
237 marking_worklists.discovered_ephemeron_pairs_worklist()),
238 ephemeron_pairs_for_processing_worklist_(
239 marking_worklists.ephemeron_pairs_for_processing_worklist()),
240 weak_containers_worklist_(*marking_worklists.weak_containers_worklist()),
241 retrace_marked_objects_worklist_(
242 marking_worklists.retrace_marked_objects_worklist()) {
243 if (compaction_worklists) {
244 movable_slots_worklist_ =
245 std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
246 compaction_worklists->movable_slots_worklist());
247 }
248 }
249
RegisterWeakReferenceIfNeeded(const void * object,TraceDescriptor desc,WeakCallback weak_callback,const void * parameter)250 void BasicMarkingState::RegisterWeakReferenceIfNeeded(
251 const void* object, TraceDescriptor desc, WeakCallback weak_callback,
252 const void* parameter) {
253 // Filter out already marked values. The write barrier for WeakMember
254 // ensures that any newly set value after this point is kept alive and does
255 // not require the callback.
256 const HeapObjectHeader& header =
257 HeapObjectHeader::FromObject(desc.base_object_payload);
258 if (!header.IsInConstruction<AccessMode::kAtomic>() &&
259 header.IsMarked<AccessMode::kAtomic>())
260 return;
261 RegisterWeakCallback(weak_callback, parameter);
262 }
263
RegisterWeakCallback(WeakCallback callback,const void * object)264 void BasicMarkingState::RegisterWeakCallback(WeakCallback callback,
265 const void* object) {
266 DCHECK_NOT_NULL(callback);
267 weak_callback_worklist_.Push({callback, object});
268 }
269
RegisterWeakContainer(HeapObjectHeader & header)270 void BasicMarkingState::RegisterWeakContainer(HeapObjectHeader& header) {
271 weak_containers_worklist_.Push<AccessMode::kAtomic>(&header);
272 }
273
ProcessWeakContainer(const void * object,TraceDescriptor desc,WeakCallback callback,const void * data)274 void BasicMarkingState::ProcessWeakContainer(const void* object,
275 TraceDescriptor desc,
276 WeakCallback callback,
277 const void* data) {
278 DCHECK_NOT_NULL(object);
279
280 HeapObjectHeader& header =
281 HeapObjectHeader::FromObject(const_cast<void*>(object));
282
283 if (header.IsInConstruction<AccessMode::kAtomic>()) {
284 not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
285 return;
286 }
287
288 // Only mark the container initially. Its buckets will be processed after
289 // marking.
290 if (!MarkNoPush(header)) return;
291
292 RegisterWeakContainer(header);
293
294 // Register final weak processing of the backing store.
295 RegisterWeakCallback(callback, data);
296
297 // Weak containers might not require tracing. In such cases the callback in
298 // the TraceDescriptor will be nullptr. For ephemerons the callback will be
299 // non-nullptr so that the container is traced and the ephemeron pairs are
300 // processed.
301 if (desc.callback) {
302 PushMarked(header, desc);
303 } else {
304 // For weak containers, there's no trace callback and no processing loop to
305 // update the marked bytes, hence inline that here.
306 AccountMarkedBytes(header);
307 }
308 }
309
ProcessEphemeron(const void * key,const void * value,TraceDescriptor value_desc,Visitor & visitor)310 void BasicMarkingState::ProcessEphemeron(const void* key, const void* value,
311 TraceDescriptor value_desc,
312 Visitor& visitor) {
313 // ProcessEphemeron is not expected to find new ephemerons recursively, which
314 // would break the main marking loop.
315 DCHECK(!in_ephemeron_processing_);
316 in_ephemeron_processing_ = true;
317 // Keys are considered live even in incremental/concurrent marking settings
318 // because the write barrier for WeakMember ensures that any newly set value
319 // after this point is kept alive and does not require the callback.
320 const bool key_in_construction =
321 HeapObjectHeader::FromObject(key).IsInConstruction<AccessMode::kAtomic>();
322 const bool key_considered_as_live =
323 key_in_construction
324 ? in_atomic_pause_
325 : HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>();
326 DCHECK_IMPLIES(
327 key_in_construction && in_atomic_pause_,
328 HeapObjectHeader::FromObject(key).IsMarked<AccessMode::kAtomic>());
329 if (key_considered_as_live) {
330 if (value_desc.base_object_payload) {
331 MarkAndPush(value_desc.base_object_payload, value_desc);
332 } else {
333 // If value_desc.base_object_payload is nullptr, the value is not GCed and
334 // should be immediately traced.
335 value_desc.callback(&visitor, value);
336 }
337 } else {
338 discovered_ephemeron_pairs_worklist_.Push({key, value, value_desc});
339 discovered_new_ephemeron_pairs_ = true;
340 }
341 in_ephemeron_processing_ = false;
342 }
343
AccountMarkedBytes(const HeapObjectHeader & header)344 void BasicMarkingState::AccountMarkedBytes(const HeapObjectHeader& header) {
345 AccountMarkedBytes(
346 header.IsLargeObject<AccessMode::kAtomic>()
347 ? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
348 ->PayloadSize()
349 : header.AllocatedSize<AccessMode::kAtomic>());
350 }
351
AccountMarkedBytes(size_t marked_bytes)352 void BasicMarkingState::AccountMarkedBytes(size_t marked_bytes) {
353 marked_bytes_ += marked_bytes;
354 }
355
356 class MutatorMarkingState : public BasicMarkingState {
357 public:
MutatorMarkingState(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)358 MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
359 CompactionWorklists* compaction_worklists)
360 : BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
361
MarkNoPush(HeapObjectHeader & header)362 inline bool MarkNoPush(HeapObjectHeader& header) {
363 return MutatorMarkingState::BasicMarkingState::MarkNoPush(header);
364 }
365
366 inline void ReTraceMarkedWeakContainer(cppgc::Visitor&, HeapObjectHeader&);
367
368 inline void DynamicallyMarkAddress(ConstAddress);
369
370 // Moves objects in not_fully_constructed_worklist_ to
371 // previously_not_full_constructed_worklists_.
372 void FlushNotFullyConstructedObjects();
373
374 // Moves ephemeron pairs in discovered_ephemeron_pairs_worklist_ to
375 // ephemeron_pairs_for_processing_worklist_.
376 void FlushDiscoveredEphemeronPairs();
377
378 inline void InvokeWeakRootsCallbackIfNeeded(const void*, TraceDescriptor,
379 WeakCallback, const void*);
380
381 inline bool IsMarkedWeakContainer(HeapObjectHeader&);
382
383 private:
384 // Weak containers are strongly retraced during conservative stack scanning.
385 // Stack scanning happens once per GC at the start of the atomic pause.
386 // Because the visitor is not retained between GCs, there is no need to clear
387 // the set at the end of GC.
388 class RecentlyRetracedWeakContainers {
389 static constexpr size_t kMaxCacheSize = 8;
390
391 public:
392 inline bool Contains(const HeapObjectHeader*);
393 inline void Insert(const HeapObjectHeader*);
394
395 private:
396 std::vector<const HeapObjectHeader*> recently_retraced_cache_;
397 size_t last_used_index_ = -1;
398 } recently_retraced_weak_containers_;
399 };
400
ReTraceMarkedWeakContainer(cppgc::Visitor & visitor,HeapObjectHeader & header)401 void MutatorMarkingState::ReTraceMarkedWeakContainer(cppgc::Visitor& visitor,
402 HeapObjectHeader& header) {
403 DCHECK(weak_containers_worklist_.Contains(&header));
404 recently_retraced_weak_containers_.Insert(&header);
405 retrace_marked_objects_worklist().Push(&header);
406 }
407
DynamicallyMarkAddress(ConstAddress address)408 void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {
409 HeapObjectHeader& header =
410 BasePage::FromPayload(address)->ObjectHeaderFromInnerAddress(
411 const_cast<Address>(address));
412 DCHECK(!header.IsInConstruction());
413 if (MarkNoPush(header)) {
414 marking_worklist_.Push(
415 {reinterpret_cast<void*>(header.ObjectStart()),
416 GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
417 }
418 }
419
InvokeWeakRootsCallbackIfNeeded(const void * object,TraceDescriptor desc,WeakCallback weak_callback,const void * parameter)420 void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
421 const void* object, TraceDescriptor desc, WeakCallback weak_callback,
422 const void* parameter) {
423 // Since weak roots are only traced at the end of marking, we can execute
424 // the callback instead of registering it.
425 #if DEBUG
426 const HeapObjectHeader& header =
427 HeapObjectHeader::FromObject(desc.base_object_payload);
428 DCHECK_IMPLIES(header.IsInConstruction(),
429 header.IsMarked<AccessMode::kAtomic>());
430 #endif // DEBUG
431 weak_callback(LivenessBrokerFactory::Create(), parameter);
432 }
433
IsMarkedWeakContainer(HeapObjectHeader & header)434 bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
435 const bool result =
436 weak_containers_worklist_.Contains<AccessMode::kAtomic>(&header) &&
437 !recently_retraced_weak_containers_.Contains(&header);
438 DCHECK_IMPLIES(result, header.IsMarked<AccessMode::kAtomic>());
439 DCHECK_IMPLIES(result, !header.IsInConstruction());
440 return result;
441 }
442
Contains(const HeapObjectHeader * header)443 bool MutatorMarkingState::RecentlyRetracedWeakContainers::Contains(
444 const HeapObjectHeader* header) {
445 return std::find(recently_retraced_cache_.begin(),
446 recently_retraced_cache_.end(),
447 header) != recently_retraced_cache_.end();
448 }
449
Insert(const HeapObjectHeader * header)450 void MutatorMarkingState::RecentlyRetracedWeakContainers::Insert(
451 const HeapObjectHeader* header) {
452 last_used_index_ = (last_used_index_ + 1) % kMaxCacheSize;
453 if (recently_retraced_cache_.size() <= last_used_index_)
454 recently_retraced_cache_.push_back(header);
455 else
456 recently_retraced_cache_[last_used_index_] = header;
457 }
458
459 class ConcurrentMarkingState : public BasicMarkingState {
460 public:
ConcurrentMarkingState(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)461 ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
462 CompactionWorklists* compaction_worklists)
463 : BasicMarkingState(heap, marking_worklists, compaction_worklists) {}
464
~ConcurrentMarkingState()465 ~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); }
466
RecentlyMarkedBytes()467 size_t RecentlyMarkedBytes() {
468 return marked_bytes_ - std::exchange(last_marked_bytes_, marked_bytes_);
469 }
470
AccountDeferredMarkedBytes(size_t deferred_bytes)471 inline void AccountDeferredMarkedBytes(size_t deferred_bytes) {
472 // AccountDeferredMarkedBytes is called from Trace methods, which are always
473 // called after AccountMarkedBytes, so there should be no underflow here.
474 DCHECK_LE(deferred_bytes, marked_bytes_);
475 marked_bytes_ -= deferred_bytes;
476 }
477
478 private:
479 size_t last_marked_bytes_ = 0;
480 };
481
482 template <size_t deadline_check_interval, typename WorklistLocal,
483 typename Callback, typename Predicate>
DrainWorklistWithPredicate(Predicate should_yield,WorklistLocal & worklist_local,Callback callback)484 bool DrainWorklistWithPredicate(Predicate should_yield,
485 WorklistLocal& worklist_local,
486 Callback callback) {
487 if (worklist_local.IsLocalAndGlobalEmpty()) return true;
488 // For concurrent markers, should_yield also reports marked bytes.
489 if (should_yield()) return false;
490 size_t processed_callback_count = deadline_check_interval;
491 typename WorklistLocal::ItemType item;
492 while (worklist_local.Pop(&item)) {
493 callback(item);
494 if (--processed_callback_count == 0) {
495 if (should_yield()) {
496 return false;
497 }
498 processed_callback_count = deadline_check_interval;
499 }
500 }
501 return true;
502 }
503
504 template <AccessMode mode>
DynamicallyTraceMarkedObject(Visitor & visitor,const HeapObjectHeader & header)505 void DynamicallyTraceMarkedObject(Visitor& visitor,
506 const HeapObjectHeader& header) {
507 DCHECK(!header.IsInConstruction<mode>());
508 DCHECK(header.IsMarked<AccessMode::kAtomic>());
509 header.Trace<mode>(&visitor);
510 }
511
512 } // namespace internal
513 } // namespace cppgc
514
515 #endif // V8_HEAP_CPPGC_MARKING_STATE_H_
516