• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
6 #define V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
7 
8 #include <unordered_set>
9 
10 #include "include/cppgc/visitor.h"
11 #include "src/base/platform/mutex.h"
12 #include "src/heap/base/worklist.h"
13 #include "src/heap/cppgc/heap-object-header.h"
14 
15 namespace cppgc {
16 namespace internal {
17 
18 class MarkingWorklists {
19  private:
20   class V8_EXPORT_PRIVATE ExternalMarkingWorklist {
21    public:
22     template <AccessMode = AccessMode::kNonAtomic>
23     void Push(HeapObjectHeader*);
24     template <AccessMode = AccessMode::kNonAtomic>
25     void Erase(HeapObjectHeader*);
26     template <AccessMode = AccessMode::kNonAtomic>
27     bool Contains(HeapObjectHeader*);
28     template <AccessMode = AccessMode::kNonAtomic>
29     std::unordered_set<HeapObjectHeader*> Extract();
30     template <AccessMode = AccessMode::kNonAtomic>
31     void Clear();
32     template <AccessMode = AccessMode::kNonAtomic>
33     bool IsEmpty();
34 
35     ~ExternalMarkingWorklist();
36 
37    private:
38     template <AccessMode>
39     struct ConditionalMutexGuard;
40 
41     void* operator new(size_t) = delete;
42     void* operator new[](size_t) = delete;
43     void operator delete(void*) = delete;
44     void operator delete[](void*) = delete;
45 
46     v8::base::Mutex lock_;
47     std::unordered_set<HeapObjectHeader*> objects_;
48   };
49 
50  public:
51   static constexpr int kMutatorThreadId = 0;
52 
53   using MarkingItem = cppgc::TraceDescriptor;
54 
55   struct WeakCallbackItem {
56     cppgc::WeakCallback callback;
57     const void* parameter;
58   };
59 
60   struct ConcurrentMarkingBailoutItem {
61     const void* parameter;
62     TraceCallback callback;
63     size_t bailedout_size;
64   };
65 
66   struct EphemeronPairItem {
67     const void* key;
68     TraceDescriptor value_desc;
69   };
70 
71   // Segment size of 512 entries necessary to avoid throughput regressions.
72   // Since the work list is currently a temporary object this is not a problem.
73   using MarkingWorklist =
74       heap::base::Worklist<MarkingItem, 512 /* local entries */>;
75   using NotFullyConstructedWorklist = ExternalMarkingWorklist;
76   using PreviouslyNotFullyConstructedWorklist =
77       heap::base::Worklist<HeapObjectHeader*, 16 /* local entries */>;
78   using WeakCallbackWorklist =
79       heap::base::Worklist<WeakCallbackItem, 64 /* local entries */>;
80   using WriteBarrierWorklist =
81       heap::base::Worklist<HeapObjectHeader*, 64 /*local entries */>;
82   using ConcurrentMarkingBailoutWorklist =
83       heap::base::Worklist<ConcurrentMarkingBailoutItem,
84                            64 /* local entries */>;
85   using EphemeronPairsWorklist =
86       heap::base::Worklist<EphemeronPairItem, 64 /* local entries */>;
87   using WeakContainersWorklist = ExternalMarkingWorklist;
88 
marking_worklist()89   MarkingWorklist* marking_worklist() { return &marking_worklist_; }
not_fully_constructed_worklist()90   NotFullyConstructedWorklist* not_fully_constructed_worklist() {
91     return &not_fully_constructed_worklist_;
92   }
93   PreviouslyNotFullyConstructedWorklist*
previously_not_fully_constructed_worklist()94   previously_not_fully_constructed_worklist() {
95     return &previously_not_fully_constructed_worklist_;
96   }
write_barrier_worklist()97   WriteBarrierWorklist* write_barrier_worklist() {
98     return &write_barrier_worklist_;
99   }
weak_callback_worklist()100   WeakCallbackWorklist* weak_callback_worklist() {
101     return &weak_callback_worklist_;
102   }
concurrent_marking_bailout_worklist()103   ConcurrentMarkingBailoutWorklist* concurrent_marking_bailout_worklist() {
104     return &concurrent_marking_bailout_worklist_;
105   }
discovered_ephemeron_pairs_worklist()106   EphemeronPairsWorklist* discovered_ephemeron_pairs_worklist() {
107     return &discovered_ephemeron_pairs_worklist_;
108   }
ephemeron_pairs_for_processing_worklist()109   EphemeronPairsWorklist* ephemeron_pairs_for_processing_worklist() {
110     return &ephemeron_pairs_for_processing_worklist_;
111   }
weak_containers_worklist()112   WeakContainersWorklist* weak_containers_worklist() {
113     return &weak_containers_worklist_;
114   }
115 
116   void ClearForTesting();
117 
118  private:
119   MarkingWorklist marking_worklist_;
120   NotFullyConstructedWorklist not_fully_constructed_worklist_;
121   PreviouslyNotFullyConstructedWorklist
122       previously_not_fully_constructed_worklist_;
123   WriteBarrierWorklist write_barrier_worklist_;
124   WeakCallbackWorklist weak_callback_worklist_;
125   ConcurrentMarkingBailoutWorklist concurrent_marking_bailout_worklist_;
126   EphemeronPairsWorklist discovered_ephemeron_pairs_worklist_;
127   EphemeronPairsWorklist ephemeron_pairs_for_processing_worklist_;
128   WeakContainersWorklist weak_containers_worklist_;
129 };
130 
131 template <>
132 struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
133     AccessMode::kNonAtomic> {
134   explicit ConditionalMutexGuard(v8::base::Mutex*) {}
135 };
136 
137 template <>
138 struct MarkingWorklists::ExternalMarkingWorklist::ConditionalMutexGuard<
139     AccessMode::kAtomic> {
140   explicit ConditionalMutexGuard(v8::base::Mutex* lock) : guard_(lock) {}
141 
142  private:
143   v8::base::MutexGuard guard_;
144 };
145 
146 template <AccessMode mode>
147 void MarkingWorklists::ExternalMarkingWorklist::Push(HeapObjectHeader* object) {
148   DCHECK_NOT_NULL(object);
149   ConditionalMutexGuard<mode> guard(&lock_);
150   objects_.insert(object);
151 }
152 
153 template <AccessMode mode>
154 void MarkingWorklists::ExternalMarkingWorklist::Erase(
155     HeapObjectHeader* object) {
156   DCHECK_NOT_NULL(object);
157   ConditionalMutexGuard<mode> guard(&lock_);
158   objects_.erase(object);
159 }
160 
161 template <AccessMode mode>
162 bool MarkingWorklists::ExternalMarkingWorklist::Contains(
163     HeapObjectHeader* object) {
164   ConditionalMutexGuard<mode> guard(&lock_);
165   return objects_.find(object) != objects_.end();
166 }
167 
168 template <AccessMode mode>
169 std::unordered_set<HeapObjectHeader*>
170 MarkingWorklists::ExternalMarkingWorklist::Extract() {
171   ConditionalMutexGuard<mode> guard(&lock_);
172   std::unordered_set<HeapObjectHeader*> extracted;
173   std::swap(extracted, objects_);
174   DCHECK(objects_.empty());
175   return extracted;
176 }
177 
178 template <AccessMode mode>
179 void MarkingWorklists::ExternalMarkingWorklist::Clear() {
180   ConditionalMutexGuard<mode> guard(&lock_);
181   objects_.clear();
182 }
183 
184 template <AccessMode mode>
185 bool MarkingWorklists::ExternalMarkingWorklist::IsEmpty() {
186   ConditionalMutexGuard<mode> guard(&lock_);
187   return objects_.empty();
188 }
189 
190 }  // namespace internal
191 }  // namespace cppgc
192 
193 #endif  // V8_HEAP_CPPGC_MARKING_WORKLISTS_H_
194