• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CPPGC_MARKING_STATE_H_
6 #define V8_HEAP_CPPGC_MARKING_STATE_H_
7 
8 #include "include/cppgc/trace-trait.h"
9 #include "src/heap/cppgc/compaction-worklists.h"
10 #include "src/heap/cppgc/globals.h"
11 #include "src/heap/cppgc/heap-object-header.h"
12 #include "src/heap/cppgc/heap-page.h"
13 #include "src/heap/cppgc/liveness-broker.h"
14 #include "src/heap/cppgc/marking-worklists.h"
15 
16 namespace cppgc {
17 namespace internal {
18 
19 // C++ marking implementation.
20 class MarkingStateBase {
21  public:
22   inline MarkingStateBase(HeapBase& heap, MarkingWorklists&,
23                           CompactionWorklists*);
24 
25   MarkingStateBase(const MarkingStateBase&) = delete;
26   MarkingStateBase& operator=(const MarkingStateBase&) = delete;
27 
28   inline void MarkAndPush(const void*, TraceDescriptor);
29   inline void MarkAndPush(HeapObjectHeader&);
30 
31   inline void PushMarked(HeapObjectHeader&, TraceDescriptor desc);
32 
33   inline void RegisterWeakReferenceIfNeeded(const void*, TraceDescriptor,
34                                             WeakCallback, const void*);
35   inline void RegisterWeakCallback(WeakCallback, const void*);
36 
RegisterMovableReference(const void ** slot)37   void RegisterMovableReference(const void** slot) {
38     if (!movable_slots_worklist_) return;
39     movable_slots_worklist_->Push(slot);
40   }
41 
42   // Weak containers are special in that they may require re-tracing if
43   // reachable through stack, even if the container was already traced before.
44   // ProcessWeakContainer records which weak containers were already marked so
45   // that conservative stack scanning knows to retrace them.
46   inline void ProcessWeakContainer(const void*, TraceDescriptor, WeakCallback,
47                                    const void*);
48 
49   inline void ProcessEphemeron(const void*, TraceDescriptor);
50 
51   inline void AccountMarkedBytes(const HeapObjectHeader&);
52   inline void AccountMarkedBytes(size_t);
marked_bytes()53   size_t marked_bytes() const { return marked_bytes_; }
54 
Publish()55   void Publish() {
56     marking_worklist_.Publish();
57     previously_not_fully_constructed_worklist_.Publish();
58     weak_callback_worklist_.Publish();
59     write_barrier_worklist_.Publish();
60     concurrent_marking_bailout_worklist_.Publish();
61     discovered_ephemeron_pairs_worklist_.Publish();
62     ephemeron_pairs_for_processing_worklist_.Publish();
63     if (IsCompactionEnabled()) movable_slots_worklist_->Publish();
64   }
65 
marking_worklist()66   MarkingWorklists::MarkingWorklist::Local& marking_worklist() {
67     return marking_worklist_;
68   }
69   MarkingWorklists::NotFullyConstructedWorklist&
not_fully_constructed_worklist()70   not_fully_constructed_worklist() {
71     return not_fully_constructed_worklist_;
72   }
73   MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local&
previously_not_fully_constructed_worklist()74   previously_not_fully_constructed_worklist() {
75     return previously_not_fully_constructed_worklist_;
76   }
weak_callback_worklist()77   MarkingWorklists::WeakCallbackWorklist::Local& weak_callback_worklist() {
78     return weak_callback_worklist_;
79   }
write_barrier_worklist()80   MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist() {
81     return write_barrier_worklist_;
82   }
83   MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local&
concurrent_marking_bailout_worklist()84   concurrent_marking_bailout_worklist() {
85     return concurrent_marking_bailout_worklist_;
86   }
87   MarkingWorklists::EphemeronPairsWorklist::Local&
discovered_ephemeron_pairs_worklist()88   discovered_ephemeron_pairs_worklist() {
89     return discovered_ephemeron_pairs_worklist_;
90   }
91   MarkingWorklists::EphemeronPairsWorklist::Local&
ephemeron_pairs_for_processing_worklist()92   ephemeron_pairs_for_processing_worklist() {
93     return ephemeron_pairs_for_processing_worklist_;
94   }
weak_containers_worklist()95   MarkingWorklists::WeakContainersWorklist& weak_containers_worklist() {
96     return weak_containers_worklist_;
97   }
98 
99   CompactionWorklists::MovableReferencesWorklist::Local*
movable_slots_worklist()100   movable_slots_worklist() {
101     return movable_slots_worklist_.get();
102   }
103 
NotifyCompactionCancelled()104   void NotifyCompactionCancelled() {
105     DCHECK(IsCompactionEnabled());
106     movable_slots_worklist_->Clear();
107     movable_slots_worklist_.reset();
108   }
109 
110  protected:
111   inline void MarkAndPush(HeapObjectHeader&, TraceDescriptor);
112 
113   inline bool MarkNoPush(HeapObjectHeader&);
114 
115   inline void RegisterWeakContainer(HeapObjectHeader&);
116 
IsCompactionEnabled()117   inline bool IsCompactionEnabled() const {
118     return movable_slots_worklist_.get();
119   }
120 
121 #ifdef DEBUG
122   HeapBase& heap_;
123 #endif  // DEBUG
124 
125   MarkingWorklists::MarkingWorklist::Local marking_worklist_;
126   MarkingWorklists::NotFullyConstructedWorklist&
127       not_fully_constructed_worklist_;
128   MarkingWorklists::PreviouslyNotFullyConstructedWorklist::Local
129       previously_not_fully_constructed_worklist_;
130   MarkingWorklists::WeakCallbackWorklist::Local weak_callback_worklist_;
131   MarkingWorklists::WriteBarrierWorklist::Local write_barrier_worklist_;
132   MarkingWorklists::ConcurrentMarkingBailoutWorklist::Local
133       concurrent_marking_bailout_worklist_;
134   MarkingWorklists::EphemeronPairsWorklist::Local
135       discovered_ephemeron_pairs_worklist_;
136   MarkingWorklists::EphemeronPairsWorklist::Local
137       ephemeron_pairs_for_processing_worklist_;
138   MarkingWorklists::WeakContainersWorklist& weak_containers_worklist_;
139   // Existence of the worklist (|movable_slot_worklist_| != nullptr) denotes
140   // that compaction is currently enabled and slots must be recorded.
141   std::unique_ptr<CompactionWorklists::MovableReferencesWorklist::Local>
142       movable_slots_worklist_;
143 
144   size_t marked_bytes_ = 0;
145 };
146 
MarkingStateBase(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)147 MarkingStateBase::MarkingStateBase(HeapBase& heap,
148                                    MarkingWorklists& marking_worklists,
149                                    CompactionWorklists* compaction_worklists)
150     :
151 #ifdef DEBUG
152       heap_(heap),
153 #endif  // DEBUG
154       marking_worklist_(marking_worklists.marking_worklist()),
155       not_fully_constructed_worklist_(
156           *marking_worklists.not_fully_constructed_worklist()),
157       previously_not_fully_constructed_worklist_(
158           marking_worklists.previously_not_fully_constructed_worklist()),
159       weak_callback_worklist_(marking_worklists.weak_callback_worklist()),
160       write_barrier_worklist_(marking_worklists.write_barrier_worklist()),
161       concurrent_marking_bailout_worklist_(
162           marking_worklists.concurrent_marking_bailout_worklist()),
163       discovered_ephemeron_pairs_worklist_(
164           marking_worklists.discovered_ephemeron_pairs_worklist()),
165       ephemeron_pairs_for_processing_worklist_(
166           marking_worklists.ephemeron_pairs_for_processing_worklist()),
167       weak_containers_worklist_(*marking_worklists.weak_containers_worklist()) {
168   if (compaction_worklists) {
169     movable_slots_worklist_ =
170         std::make_unique<CompactionWorklists::MovableReferencesWorklist::Local>(
171             compaction_worklists->movable_slots_worklist());
172   }
173 }
174 
MarkAndPush(const void * object,TraceDescriptor desc)175 void MarkingStateBase::MarkAndPush(const void* object, TraceDescriptor desc) {
176   DCHECK_NOT_NULL(object);
177   MarkAndPush(HeapObjectHeader::FromPayload(
178                   const_cast<void*>(desc.base_object_payload)),
179               desc);
180 }
181 
MarkAndPush(HeapObjectHeader & header,TraceDescriptor desc)182 void MarkingStateBase::MarkAndPush(HeapObjectHeader& header,
183                                    TraceDescriptor desc) {
184   DCHECK_NOT_NULL(desc.callback);
185 
186   if (header.IsInConstruction<AccessMode::kAtomic>()) {
187     not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
188   } else if (MarkNoPush(header)) {
189     PushMarked(header, desc);
190   }
191 }
192 
MarkNoPush(HeapObjectHeader & header)193 bool MarkingStateBase::MarkNoPush(HeapObjectHeader& header) {
194   // A GC should only mark the objects that belong in its heap.
195   DCHECK_EQ(&heap_, BasePage::FromPayload(&header)->heap());
196   // Never mark free space objects. This would e.g. hint to marking a promptly
197   // freed backing store.
198   DCHECK(!header.IsFree<AccessMode::kAtomic>());
199   return header.TryMarkAtomic();
200 }
201 
MarkAndPush(HeapObjectHeader & header)202 void MarkingStateBase::MarkAndPush(HeapObjectHeader& header) {
203   MarkAndPush(
204       header,
205       {header.Payload(),
206        GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
207 }
208 
PushMarked(HeapObjectHeader & header,TraceDescriptor desc)209 void MarkingStateBase::PushMarked(HeapObjectHeader& header,
210                                   TraceDescriptor desc) {
211   DCHECK(header.IsMarked<AccessMode::kAtomic>());
212   DCHECK(!header.IsInConstruction<AccessMode::kAtomic>());
213   DCHECK_NOT_NULL(desc.callback);
214 
215   marking_worklist_.Push(desc);
216 }
217 
RegisterWeakReferenceIfNeeded(const void * object,TraceDescriptor desc,WeakCallback weak_callback,const void * parameter)218 void MarkingStateBase::RegisterWeakReferenceIfNeeded(const void* object,
219                                                      TraceDescriptor desc,
220                                                      WeakCallback weak_callback,
221                                                      const void* parameter) {
222   // Filter out already marked values. The write barrier for WeakMember
223   // ensures that any newly set value after this point is kept alive and does
224   // not require the callback.
225   if (HeapObjectHeader::FromPayload(desc.base_object_payload)
226           .IsMarked<AccessMode::kAtomic>())
227     return;
228   RegisterWeakCallback(weak_callback, parameter);
229 }
230 
RegisterWeakCallback(WeakCallback callback,const void * object)231 void MarkingStateBase::RegisterWeakCallback(WeakCallback callback,
232                                             const void* object) {
233   DCHECK_NOT_NULL(callback);
234   weak_callback_worklist_.Push({callback, object});
235 }
236 
RegisterWeakContainer(HeapObjectHeader & header)237 void MarkingStateBase::RegisterWeakContainer(HeapObjectHeader& header) {
238   weak_containers_worklist_.Push<AccessMode::kAtomic>(&header);
239 }
240 
ProcessWeakContainer(const void * object,TraceDescriptor desc,WeakCallback callback,const void * data)241 void MarkingStateBase::ProcessWeakContainer(const void* object,
242                                             TraceDescriptor desc,
243                                             WeakCallback callback,
244                                             const void* data) {
245   DCHECK_NOT_NULL(object);
246 
247   HeapObjectHeader& header =
248       HeapObjectHeader::FromPayload(const_cast<void*>(object));
249 
250   if (header.IsInConstruction<AccessMode::kAtomic>()) {
251     not_fully_constructed_worklist_.Push<AccessMode::kAtomic>(&header);
252     return;
253   }
254 
255   // Only mark the container initially. Its buckets will be processed after
256   // marking.
257   if (!MarkNoPush(header)) return;
258   RegisterWeakContainer(header);
259 
260   // Register final weak processing of the backing store.
261   RegisterWeakCallback(callback, data);
262 
263   // Weak containers might not require tracing. In such cases the callback in
264   // the TraceDescriptor will be nullptr. For ephemerons the callback will be
265   // non-nullptr so that the container is traced and the ephemeron pairs are
266   // processed.
267   if (desc.callback) PushMarked(header, desc);
268 }
269 
ProcessEphemeron(const void * key,TraceDescriptor value_desc)270 void MarkingStateBase::ProcessEphemeron(const void* key,
271                                         TraceDescriptor value_desc) {
272   // Filter out already marked keys. The write barrier for WeakMember
273   // ensures that any newly set value after this point is kept alive and does
274   // not require the callback.
275   if (HeapObjectHeader::FromPayload(key).IsMarked<AccessMode::kAtomic>()) {
276     MarkAndPush(value_desc.base_object_payload, value_desc);
277     return;
278   }
279   discovered_ephemeron_pairs_worklist_.Push({key, value_desc});
280 }
281 
AccountMarkedBytes(const HeapObjectHeader & header)282 void MarkingStateBase::AccountMarkedBytes(const HeapObjectHeader& header) {
283   AccountMarkedBytes(
284       header.IsLargeObject<AccessMode::kAtomic>()
285           ? reinterpret_cast<const LargePage*>(BasePage::FromPayload(&header))
286                 ->PayloadSize()
287           : header.GetSize<AccessMode::kAtomic>());
288 }
289 
AccountMarkedBytes(size_t marked_bytes)290 void MarkingStateBase::AccountMarkedBytes(size_t marked_bytes) {
291   marked_bytes_ += marked_bytes;
292 }
293 
294 class MutatorMarkingState : public MarkingStateBase {
295  public:
MutatorMarkingState(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)296   MutatorMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
297                       CompactionWorklists* compaction_worklists)
298       : MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
299 
MarkNoPush(HeapObjectHeader & header)300   inline bool MarkNoPush(HeapObjectHeader& header) {
301     return MutatorMarkingState::MarkingStateBase::MarkNoPush(header);
302   }
303 
304   inline void PushMarkedWeakContainer(HeapObjectHeader&);
305 
306   inline void DynamicallyMarkAddress(ConstAddress);
307 
308   // Moves objects in not_fully_constructed_worklist_ to
309   // previously_not_full_constructed_worklists_.
310   void FlushNotFullyConstructedObjects();
311 
312   // Moves ephemeron pairs in discovered_ephemeron_pairs_worklist_ to
313   // ephemeron_pairs_for_processing_worklist_.
314   void FlushDiscoveredEphemeronPairs();
315 
316   inline void InvokeWeakRootsCallbackIfNeeded(const void*, TraceDescriptor,
317                                               WeakCallback, const void*);
318 
319   inline bool IsMarkedWeakContainer(HeapObjectHeader&);
320 };
321 
PushMarkedWeakContainer(HeapObjectHeader & header)322 void MutatorMarkingState::PushMarkedWeakContainer(HeapObjectHeader& header) {
323   DCHECK(weak_containers_worklist_.Contains(&header));
324   weak_containers_worklist_.Erase(&header);
325   PushMarked(
326       header,
327       {header.Payload(),
328        GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
329 }
330 
DynamicallyMarkAddress(ConstAddress address)331 void MutatorMarkingState::DynamicallyMarkAddress(ConstAddress address) {
332   HeapObjectHeader& header =
333       BasePage::FromPayload(address)->ObjectHeaderFromInnerAddress(
334           const_cast<Address>(address));
335   DCHECK(!header.IsInConstruction());
336   if (MarkNoPush(header)) {
337     marking_worklist_.Push(
338         {reinterpret_cast<void*>(header.Payload()),
339          GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex()).trace});
340   }
341 }
342 
InvokeWeakRootsCallbackIfNeeded(const void * object,TraceDescriptor desc,WeakCallback weak_callback,const void * parameter)343 void MutatorMarkingState::InvokeWeakRootsCallbackIfNeeded(
344     const void* object, TraceDescriptor desc, WeakCallback weak_callback,
345     const void* parameter) {
346   // Since weak roots are only traced at the end of marking, we can execute
347   // the callback instead of registering it.
348 #if DEBUG
349   const HeapObjectHeader& header =
350       HeapObjectHeader::FromPayload(desc.base_object_payload);
351   DCHECK_IMPLIES(header.IsInConstruction(), header.IsMarked());
352 #endif  // DEBUG
353   weak_callback(LivenessBrokerFactory::Create(), parameter);
354 }
355 
IsMarkedWeakContainer(HeapObjectHeader & header)356 bool MutatorMarkingState::IsMarkedWeakContainer(HeapObjectHeader& header) {
357   const bool result = weak_containers_worklist_.Contains(&header);
358   DCHECK_IMPLIES(result, header.IsMarked());
359   return result;
360 }
361 
362 class ConcurrentMarkingState : public MarkingStateBase {
363  public:
ConcurrentMarkingState(HeapBase & heap,MarkingWorklists & marking_worklists,CompactionWorklists * compaction_worklists)364   ConcurrentMarkingState(HeapBase& heap, MarkingWorklists& marking_worklists,
365                          CompactionWorklists* compaction_worklists)
366       : MarkingStateBase(heap, marking_worklists, compaction_worklists) {}
367 
~ConcurrentMarkingState()368   ~ConcurrentMarkingState() { DCHECK_EQ(last_marked_bytes_, marked_bytes_); }
369 
RecentlyMarkedBytes()370   size_t RecentlyMarkedBytes() {
371     return marked_bytes_ - std::exchange(last_marked_bytes_, marked_bytes_);
372   }
373 
AccountDeferredMarkedBytes(size_t deferred_bytes)374   inline void AccountDeferredMarkedBytes(size_t deferred_bytes) {
375     // AccountDeferredMarkedBytes is called from Trace methods, which are always
376     // called after AccountMarkedBytes, so there should be no underflow here.
377     DCHECK_LE(deferred_bytes, marked_bytes_);
378     marked_bytes_ -= deferred_bytes;
379   }
380 
381  private:
382   size_t last_marked_bytes_ = 0;
383 };
384 
385 template <size_t deadline_check_interval, typename WorklistLocal,
386           typename Callback, typename Predicate>
DrainWorklistWithPredicate(Predicate should_yield,WorklistLocal & worklist_local,Callback callback)387 bool DrainWorklistWithPredicate(Predicate should_yield,
388                                 WorklistLocal& worklist_local,
389                                 Callback callback) {
390   if (worklist_local.IsLocalAndGlobalEmpty()) return true;
391   // For concurrent markers, should_yield also reports marked bytes.
392   if (should_yield()) return false;
393   size_t processed_callback_count = deadline_check_interval;
394   typename WorklistLocal::ItemType item;
395   while (worklist_local.Pop(&item)) {
396     callback(item);
397     if (--processed_callback_count == 0) {
398       if (should_yield()) {
399         return false;
400       }
401       processed_callback_count = deadline_check_interval;
402     }
403   }
404   return true;
405 }
406 
407 template <AccessMode mode>
DynamicallyTraceMarkedObject(Visitor & visitor,const HeapObjectHeader & header)408 void DynamicallyTraceMarkedObject(Visitor& visitor,
409                                   const HeapObjectHeader& header) {
410   DCHECK(!header.IsInConstruction<mode>());
411   DCHECK(header.IsMarked<mode>());
412   const GCInfo& gcinfo =
413       GlobalGCInfoTable::GCInfoFromIndex(header.GetGCInfoIndex<mode>());
414   gcinfo.trace(&visitor, header.Payload());
415 }
416 
417 }  // namespace internal
418 }  // namespace cppgc
419 
420 #endif  // V8_HEAP_CPPGC_MARKING_STATE_H_
421