• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_LOCAL_HEAP_H_
6 #define V8_HEAP_LOCAL_HEAP_H_
7 
8 #include <atomic>
9 #include <memory>
10 
11 #include "src/base/platform/condition-variable.h"
12 #include "src/base/platform/mutex.h"
13 #include "src/common/assert-scope.h"
14 #include "src/execution/isolate.h"
15 #include "src/handles/persistent-handles.h"
16 #include "src/heap/concurrent-allocator.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 class Heap;
22 class Safepoint;
23 class LocalHandles;
24 
25 // LocalHeap is used by the GC to track all threads with heap access in order to
26 // stop them before performing a collection. LocalHeaps can be either Parked or
27 // Running and are in Parked mode when initialized.
28 //   Running: Thread is allowed to access the heap but needs to give the GC the
29 //            chance to run regularly by manually invoking Safepoint(). The
30 //            thread can be parked using ParkedScope.
31 //   Parked:  Heap access is not allowed, so the GC will not stop this thread
32 //            for a collection. Useful when threads do not need heap access for
33 //            some time or for blocking operations like locking a mutex.
34 class V8_EXPORT_PRIVATE LocalHeap {
35  public:
36   explicit LocalHeap(
37       Heap* heap, ThreadKind kind,
38       std::unique_ptr<PersistentHandles> persistent_handles = nullptr);
39   ~LocalHeap();
40 
41   // Invoked by main thread to signal this thread that it needs to halt in a
42   // safepoint.
43   void RequestSafepoint();
44 
45   // Frequently invoked by local thread to check whether safepoint was requested
46   // from the main thread.
Safepoint()47   void Safepoint() {
48     // In case garbage collection is disabled, the thread isn't even allowed to
49     // invoke Safepoint(). Otherwise a GC might happen here.
50     DCHECK(AllowGarbageCollection::IsAllowed());
51 
52     if (IsSafepointRequested()) {
53       ClearSafepointRequested();
54       EnterSafepoint();
55     }
56   }
57 
handles()58   LocalHandles* handles() { return handles_.get(); }
59 
60   template <typename T>
NewPersistentHandle(T object)61   Handle<T> NewPersistentHandle(T object) {
62     if (!persistent_handles_) {
63       EnsurePersistentHandles();
64     }
65     return persistent_handles_->NewHandle(object);
66   }
67 
68   template <typename T>
NewPersistentHandle(Handle<T> object)69   Handle<T> NewPersistentHandle(Handle<T> object) {
70     return NewPersistentHandle(*object);
71   }
72 
73   template <typename T>
NewPersistentMaybeHandle(MaybeHandle<T> maybe_handle)74   MaybeHandle<T> NewPersistentMaybeHandle(MaybeHandle<T> maybe_handle) {
75     Handle<T> handle;
76     if (maybe_handle.ToHandle(&handle)) {
77       return NewPersistentHandle(handle);
78     }
79     return kNullMaybeHandle;
80   }
81 
82   void AttachPersistentHandles(
83       std::unique_ptr<PersistentHandles> persistent_handles);
84   std::unique_ptr<PersistentHandles> DetachPersistentHandles();
85 #ifdef DEBUG
86   bool ContainsPersistentHandle(Address* location);
87   bool ContainsLocalHandle(Address* location);
88   bool IsHandleDereferenceAllowed();
89 #endif
90 
91   bool IsParked();
92 
heap()93   Heap* heap() { return heap_; }
94 
marking_barrier()95   MarkingBarrier* marking_barrier() { return marking_barrier_.get(); }
old_space_allocator()96   ConcurrentAllocator* old_space_allocator() { return &old_space_allocator_; }
97 
98   // Mark/Unmark linear allocation areas black. Used for black allocation.
99   void MarkLinearAllocationAreaBlack();
100   void UnmarkLinearAllocationArea();
101 
102   // Give up linear allocation areas. Used for mark-compact GC.
103   void FreeLinearAllocationArea();
104 
105   // Create filler object in linear allocation areas. Verifying requires
106   // iterable heap.
107   void MakeLinearAllocationAreaIterable();
108 
109   // Fetches a pointer to the local heap from the thread local storage.
110   // It is intended to be used in handle and write barrier code where it is
111   // difficult to get a pointer to the current instance of local heap otherwise.
112   // The result may be a nullptr if there is no local heap instance associated
113   // with the current thread.
114   static LocalHeap* Current();
115 
116   // Allocate an uninitialized object.
117   V8_WARN_UNUSED_RESULT inline AllocationResult AllocateRaw(
118       int size_in_bytes, AllocationType allocation,
119       AllocationOrigin origin = AllocationOrigin::kRuntime,
120       AllocationAlignment alignment = kWordAligned);
121 
122   // Allocates an uninitialized object and crashes when object
123   // cannot be allocated.
124   V8_WARN_UNUSED_RESULT inline Address AllocateRawOrFail(
125       int size_in_bytes, AllocationType allocation,
126       AllocationOrigin origin = AllocationOrigin::kRuntime,
127       AllocationAlignment alignment = kWordAligned);
128 
is_main_thread()129   bool is_main_thread() const { return is_main_thread_; }
130 
131   // Requests GC and blocks until the collection finishes.
132   void PerformCollection();
133 
134  private:
135   enum class ThreadState {
136     // Threads in this state need to be stopped in a safepoint.
137     Running,
138     // Thread was parked, which means that the thread is not allowed to access
139     // or manipulate the heap in any way.
140     Parked,
141     // Thread was stopped in a safepoint.
142     Safepoint
143   };
144 
145   // Slow path of allocation that performs GC and then retries allocation in
146   // loop.
147   Address PerformCollectionAndAllocateAgain(int object_size,
148                                             AllocationType type,
149                                             AllocationOrigin origin,
150                                             AllocationAlignment alignment);
151 
152   void Park();
153   void Unpark();
154   void EnsureParkedBeforeDestruction();
155 
156   void EnsurePersistentHandles();
157 
IsSafepointRequested()158   V8_INLINE bool IsSafepointRequested() {
159     return safepoint_requested_.load(std::memory_order_relaxed);
160   }
161   void ClearSafepointRequested();
162 
163   void EnterSafepoint();
164 
165   Heap* heap_;
166   bool is_main_thread_;
167 
168   base::Mutex state_mutex_;
169   base::ConditionVariable state_change_;
170   ThreadState state_;
171 
172   std::atomic<bool> safepoint_requested_;
173 
174   bool allocation_failed_;
175 
176   LocalHeap* prev_;
177   LocalHeap* next_;
178 
179   std::unique_ptr<LocalHandles> handles_;
180   std::unique_ptr<PersistentHandles> persistent_handles_;
181   std::unique_ptr<MarkingBarrier> marking_barrier_;
182 
183   ConcurrentAllocator old_space_allocator_;
184 
185   friend class Heap;
186   friend class GlobalSafepoint;
187   friend class ParkedScope;
188   friend class UnparkedScope;
189   friend class ConcurrentAllocator;
190 };
191 
192 // Scope that explicitly parks LocalHeap prohibiting access to the heap and the
193 // creation of Handles.
194 class ParkedScope {
195  public:
ParkedScope(LocalHeap * local_heap)196   explicit ParkedScope(LocalHeap* local_heap) : local_heap_(local_heap) {
197     local_heap_->Park();
198   }
199 
~ParkedScope()200   ~ParkedScope() { local_heap_->Unpark(); }
201 
202  private:
203   LocalHeap* const local_heap_;
204 };
205 
206 // Scope that explicitly unparks LocalHeap allowing access to the heap and the
207 // creation of Handles.
208 class UnparkedScope {
209  public:
UnparkedScope(LocalHeap * local_heap)210   explicit UnparkedScope(LocalHeap* local_heap) : local_heap_(local_heap) {
211     local_heap_->Unpark();
212   }
213 
~UnparkedScope()214   ~UnparkedScope() { local_heap_->Park(); }
215 
216  private:
217   LocalHeap* const local_heap_;
218 };
219 
220 class ParkedMutexGuard {
221   base::Mutex* guard_;
222 
223  public:
ParkedMutexGuard(LocalHeap * local_heap,base::Mutex * guard)224   explicit ParkedMutexGuard(LocalHeap* local_heap, base::Mutex* guard)
225       : guard_(guard) {
226     ParkedScope scope(local_heap);
227     guard_->Lock();
228   }
229 
~ParkedMutexGuard()230   ~ParkedMutexGuard() { guard_->Unlock(); }
231 };
232 
233 }  // namespace internal
234 }  // namespace v8
235 
236 #endif  // V8_HEAP_LOCAL_HEAP_H_
237