• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
6 #define V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
7 
8 #include <unordered_set>
9 
10 #include "include/cppgc/visitor.h"
11 #include "src/base/platform/mutex.h"
12 #include "src/heap/base/worklist.h"
13 #include "src/heap/cppgc/heap-object-header.h"
14 
15 namespace cppgc {
16 namespace internal {
17 
18 class MarkingWorklists {
19  private:
20   class V8_EXPORT_PRIVATE ExternalMarkingWorklist {
21    public:
22     template <AccessMode = AccessMode::kNonAtomic>
23     void Push(HeapObjectHeader*);
24     template <AccessMode = AccessMode::kNonAtomic>
25     bool Contains(HeapObjectHeader*);
26     template <AccessMode = AccessMode::kNonAtomic>
27     std::unordered_set<HeapObjectHeader*> Extract();
28     template <AccessMode = AccessMode::kNonAtomic>
29     void Clear();
30     template <AccessMode = AccessMode::kNonAtomic>
31     bool IsEmpty();
32 
33     ~ExternalMarkingWorklist();
34 
35    private:
36     template <AccessMode>
37     struct ConditionalMutexGuard;
38 
39     void* operator new(size_t) = delete;
40     void* operator new[](size_t) = delete;
41     void operator delete(void*) = delete;
42     void operator delete[](void*) = delete;
43 
44     v8::base::Mutex lock_;
45     std::unordered_set<HeapObjectHeader*> objects_;
46   };
47 
48  public:
49   static constexpr int kMutatorThreadId = 0;
50 
51   using MarkingItem = cppgc::TraceDescriptor;
52 
53   struct WeakCallbackItem {
54     cppgc::WeakCallback callback;
55     const void* parameter;
56   };
57 
58   struct ConcurrentMarkingBailoutItem {
59     const void* parameter;
60     TraceCallback callback;
61     size_t bailedout_size;
62   };
63 
64   struct EphemeronPairItem {
65     const void* key;
66     const void* value;
67     TraceDescriptor value_desc;
68   };
69 
70   // Segment size of 512 entries necessary to avoid throughput regressions.
71   // Since the work list is currently a temporary object this is not a problem.
72   using MarkingWorklist =
73       heap::base::Worklist<MarkingItem, 512 /* local entries */>;
74   using NotFullyConstructedWorklist = ExternalMarkingWorklist;
75   using PreviouslyNotFullyConstructedWorklist =
76       heap::base::Worklist<HeapObjectHeader*, 16 /* local entries */>;
77   using WeakCallbackWorklist =
78       heap::base::Worklist<WeakCallbackItem, 64 /* local entries */>;
79   using WriteBarrierWorklist =
80       heap::base::Worklist<HeapObjectHeader*, 64 /*local entries */>;
81   using ConcurrentMarkingBailoutWorklist =
82       heap::base::Worklist<ConcurrentMarkingBailoutItem,
83                            64 /* local entries */>;
84   using EphemeronPairsWorklist =
85       heap::base::Worklist<EphemeronPairItem, 64 /* local entries */>;
86   using WeakContainersWorklist = ExternalMarkingWorklist;
87   using RetraceMarkedObjectsWorklist =
88       heap::base::Worklist<HeapObjectHeader*, 16 /* local entries */>;
89 
marking_worklist()90   MarkingWorklist* marking_worklist() { return &marking_worklist_; }
not_fully_constructed_worklist()91   NotFullyConstructedWorklist* not_fully_constructed_worklist() {
92     return &not_fully_constructed_worklist_;
93   }
94   PreviouslyNotFullyConstructedWorklist*
previously_not_fully_constructed_worklist()95   previously_not_fully_constructed_worklist() {
96     return &previously_not_fully_constructed_worklist_;
97   }
write_barrier_worklist()98   WriteBarrierWorklist* write_barrier_worklist() {
99     return &write_barrier_worklist_;
100   }
weak_callback_worklist()101   WeakCallbackWorklist* weak_callback_worklist() {
102     return &weak_callback_worklist_;
103   }
concurrent_marking_bailout_worklist()104   ConcurrentMarkingBailoutWorklist* concurrent_marking_bailout_worklist() {
105     return &concurrent_marking_bailout_worklist_;
106   }
discovered_ephemeron_pairs_worklist()107   EphemeronPairsWorklist* discovered_ephemeron_pairs_worklist() {
108     return &discovered_ephemeron_pairs_worklist_;
109   }
ephemeron_pairs_for_processing_worklist()110   EphemeronPairsWorklist* ephemeron_pairs_for_processing_worklist() {
111     return &ephemeron_pairs_for_processing_worklist_;
112   }
weak_containers_worklist()113   WeakContainersWorklist* weak_containers_worklist() {
114     return &weak_containers_worklist_;
115   }
retrace_marked_objects_worklist()116   RetraceMarkedObjectsWorklist* retrace_marked_objects_worklist() {
117     return &retrace_marked_objects_worklist_;
118   }
119 
120   void ClearForTesting();
121 
122  private:
123   MarkingWorklist marking_worklist_;
124   NotFullyConstructedWorklist not_fully_constructed_worklist_;
125   PreviouslyNotFullyConstructedWorklist
126       previously_not_fully_constructed_worklist_;
127   WriteBarrierWorklist write_barrier_worklist_;
128   WeakCallbackWorklist weak_callback_worklist_;
129   ConcurrentMarkingBailoutWorklist concurrent_marking_bailout_worklist_;
130   EphemeronPairsWorklist discovered_ephemeron_pairs_worklist_;
131   EphemeronPairsWorklist ephemeron_pairs_for_processing_worklist_;
132   WeakContainersWorklist weak_containers_worklist_;
133   RetraceMarkedObjectsWorklist retrace_marked_objects_worklist_;
134 };
135 
136 template <>
137 struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
138     AccessMode::kNonAtomic> {
139   explicit ConditionalMutexGuard(v8::base::Mutex*) {}
140 };
141 
142 template <>
143 struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
144     AccessMode::kAtomic> {
145   explicit ConditionalMutexGuard(v8::base::Mutex* lock) : guard_(lock) {}
146 
147  private:
148   v8::base::MutexGuard guard_;
149 };
150 
151 template <AccessMode mode>
152 void MarkingWorklists::ExternalMarkingWorklist::Push(HeapObjectHeader* object) {
153   DCHECK_NOT_NULL(object);
154   ConditionalMutexGuard<mode> guard(&lock_);
155   objects_.insert(object);
156 }
157 
158 template <AccessMode mode>
159 bool MarkingWorklists::ExternalMarkingWorklist::Contains(
160     HeapObjectHeader* object) {
161   ConditionalMutexGuard<mode> guard(&lock_);
162   return objects_.find(object) != objects_.end();
163 }
164 
165 template <AccessMode mode>
166 std::unordered_set<HeapObjectHeader*>
167 MarkingWorklists::ExternalMarkingWorklist::Extract() {
168   ConditionalMutexGuard<mode> guard(&lock_);
169   std::unordered_set<HeapObjectHeader*> extracted;
170   std::swap(extracted, objects_);
171   DCHECK(objects_.empty());
172   return extracted;
173 }
174 
175 template <AccessMode mode>
176 void MarkingWorklists::ExternalMarkingWorklist::Clear() {
177   ConditionalMutexGuard<mode> guard(&lock_);
178   objects_.clear();
179 }
180 
181 template <AccessMode mode>
182 bool MarkingWorklists::ExternalMarkingWorklist::IsEmpty() {
183   ConditionalMutexGuard<mode> guard(&lock_);
184   return objects_.empty();
185 }
186 
187 }  // namespace internal
188 }  // namespace cppgc
189 
190 #endif  // V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
191