• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
6 #define V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
7 
8 // Clients of this interface shouldn't depend on lots of heap internals.
9 // Do not include anything from src/heap here!
10 
11 #include "src/heap/heap-write-barrier.h"
12 
13 #include "src/common/globals.h"
14 #include "src/objects/code.h"
15 #include "src/objects/compressed-slots-inl.h"
16 #include "src/objects/fixed-array.h"
17 #include "src/objects/heap-object.h"
18 #include "src/objects/maybe-object-inl.h"
19 #include "src/objects/slots-inl.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 // Defined in heap.cc.
25 V8_EXPORT_PRIVATE bool Heap_PageFlagsAreConsistent(HeapObject object);
26 V8_EXPORT_PRIVATE bool Heap_ValueMightRequireGenerationalWriteBarrier(
27     HeapObject value);
28 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
29                                                     Address slot,
30                                                     HeapObject value);
31 V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
32 
33 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
34                                                            RelocInfo* rinfo,
35                                                            HeapObject object);
36 
37 V8_EXPORT_PRIVATE void Heap_GenerationalEphemeronKeyBarrierSlow(
38     Heap* heap, EphemeronHashTable table, Address slot);
39 
40 // Do not use these internal details anywhere outside of this file. These
41 // internals are only intended to shortcut write barrier checks.
42 namespace heap_internals {
43 
44 struct MemoryChunk {
45   static constexpr uintptr_t kFlagsOffset = kSizetSize;
46   static constexpr uintptr_t kHeapOffset = kSizetSize + kUIntptrSize;
47   static constexpr uintptr_t kIsExecutableBit = uintptr_t{1} << 0;
48   static constexpr uintptr_t kMarkingBit = uintptr_t{1} << 17;
49   static constexpr uintptr_t kFromPageBit = uintptr_t{1} << 3;
50   static constexpr uintptr_t kToPageBit = uintptr_t{1} << 4;
51   static constexpr uintptr_t kReadOnlySpaceBit = uintptr_t{1} << 20;
52 
FromHeapObjectMemoryChunk53   V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
54       HeapObject object) {
55     DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
56     return reinterpret_cast<MemoryChunk*>(object.ptr() & ~kPageAlignmentMask);
57   }
58 
IsMarkingMemoryChunk59   V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
60 
InYoungGenerationMemoryChunk61   V8_INLINE bool InYoungGeneration() const {
62     if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
63     constexpr uintptr_t kYoungGenerationMask = kFromPageBit | kToPageBit;
64     return GetFlags() & kYoungGenerationMask;
65   }
66 
GetFlagsMemoryChunk67   V8_INLINE uintptr_t GetFlags() const {
68     return *reinterpret_cast<const uintptr_t*>(reinterpret_cast<Address>(this) +
69                                                kFlagsOffset);
70   }
71 
GetHeapMemoryChunk72   V8_INLINE Heap* GetHeap() {
73     Heap* heap = *reinterpret_cast<Heap**>(reinterpret_cast<Address>(this) +
74                                            kHeapOffset);
75     DCHECK_NOT_NULL(heap);
76     return heap;
77   }
78 
InReadOnlySpaceMemoryChunk79   V8_INLINE bool InReadOnlySpace() const {
80     return GetFlags() & kReadOnlySpaceBit;
81   }
82 
InCodeSpaceMemoryChunk83   V8_INLINE bool InCodeSpace() const { return GetFlags() & kIsExecutableBit; }
84 };
85 
GenerationalBarrierInternal(HeapObject object,Address slot,HeapObject value)86 inline void GenerationalBarrierInternal(HeapObject object, Address slot,
87                                         HeapObject value) {
88   DCHECK(Heap_PageFlagsAreConsistent(object));
89   heap_internals::MemoryChunk* value_chunk =
90       heap_internals::MemoryChunk::FromHeapObject(value);
91   heap_internals::MemoryChunk* object_chunk =
92       heap_internals::MemoryChunk::FromHeapObject(object);
93 
94   if (!value_chunk->InYoungGeneration() || object_chunk->InYoungGeneration()) {
95     return;
96   }
97 
98   Heap_GenerationalBarrierSlow(object, slot, value);
99 }
100 
GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,Address slot,HeapObject value)101 inline void GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,
102                                                     Address slot,
103                                                     HeapObject value) {
104   DCHECK(Heap::PageFlagsAreConsistent(table));
105   heap_internals::MemoryChunk* value_chunk =
106       heap_internals::MemoryChunk::FromHeapObject(value);
107   heap_internals::MemoryChunk* table_chunk =
108       heap_internals::MemoryChunk::FromHeapObject(table);
109 
110   if (!value_chunk->InYoungGeneration() || table_chunk->InYoungGeneration()) {
111     return;
112   }
113 
114   Heap_GenerationalEphemeronKeyBarrierSlow(table_chunk->GetHeap(), table, slot);
115 }
116 
117 }  // namespace heap_internals
118 
WriteBarrierForCode(Code host,RelocInfo * rinfo,Object value)119 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
120   DCHECK(!HasWeakHeapObjectTag(value));
121   if (!value.IsHeapObject()) return;
122   WriteBarrierForCode(host, rinfo, HeapObject::cast(value));
123 }
124 
WriteBarrierForCode(Code host,RelocInfo * rinfo,HeapObject value)125 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, HeapObject value) {
126   GenerationalBarrierForCode(host, rinfo, value);
127   WriteBarrier::Marking(host, rinfo, value);
128 }
129 
WriteBarrierForCode(Code host)130 inline void WriteBarrierForCode(Code host) {
131   Heap_WriteBarrierForCodeSlow(host);
132 }
133 
GenerationalBarrier(HeapObject object,ObjectSlot slot,Object value)134 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
135                                 Object value) {
136   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
137   DCHECK(!HasWeakHeapObjectTag(value));
138   if (!value.IsHeapObject()) return;
139   GenerationalBarrier(object, slot, HeapObject::cast(value));
140 }
141 
GenerationalBarrier(HeapObject object,ObjectSlot slot,Code value)142 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
143                                 Code value) {
144   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
145   DCHECK(!Heap_ValueMightRequireGenerationalWriteBarrier(value));
146 }
147 
GenerationalBarrier(HeapObject object,ObjectSlot slot,HeapObject value)148 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
149                                 HeapObject value) {
150   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
151   DCHECK(!HasWeakHeapObjectTag(*slot));
152   heap_internals::GenerationalBarrierInternal(object, slot.address(), value);
153 }
154 
GenerationalEphemeronKeyBarrier(EphemeronHashTable table,ObjectSlot slot,Object value)155 inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
156                                             ObjectSlot slot, Object value) {
157   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
158   DCHECK(!HasWeakHeapObjectTag(*slot));
159   DCHECK(!HasWeakHeapObjectTag(value));
160   DCHECK(value.IsHeapObject());
161   heap_internals::GenerationalEphemeronKeyBarrierInternal(
162       table, slot.address(), HeapObject::cast(value));
163 }
164 
GenerationalBarrier(HeapObject object,MaybeObjectSlot slot,MaybeObject value)165 inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
166                                 MaybeObject value) {
167   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
168   HeapObject value_heap_object;
169   if (!value->GetHeapObject(&value_heap_object)) return;
170   heap_internals::GenerationalBarrierInternal(object, slot.address(),
171                                               value_heap_object);
172 }
173 
GenerationalBarrierForCode(Code host,RelocInfo * rinfo,HeapObject object)174 inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
175                                        HeapObject object) {
176   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
177   heap_internals::MemoryChunk* object_chunk =
178       heap_internals::MemoryChunk::FromHeapObject(object);
179   if (!object_chunk->InYoungGeneration()) return;
180   Heap_GenerationalBarrierForCodeSlow(host, rinfo, object);
181 }
182 
GetWriteBarrierModeForObject(HeapObject object,const DisallowGarbageCollection * promise)183 inline WriteBarrierMode GetWriteBarrierModeForObject(
184     HeapObject object, const DisallowGarbageCollection* promise) {
185   if (FLAG_disable_write_barriers) return SKIP_WRITE_BARRIER;
186   DCHECK(Heap_PageFlagsAreConsistent(object));
187   heap_internals::MemoryChunk* chunk =
188       heap_internals::MemoryChunk::FromHeapObject(object);
189   if (chunk->IsMarking()) return UPDATE_WRITE_BARRIER;
190   if (chunk->InYoungGeneration()) return SKIP_WRITE_BARRIER;
191   return UPDATE_WRITE_BARRIER;
192 }
193 
ObjectInYoungGeneration(Object object)194 inline bool ObjectInYoungGeneration(Object object) {
195   // TODO(rong): Fix caller of this function when we deploy
196   // v8_use_third_party_heap.
197   if (FLAG_single_generation) return false;
198   if (object.IsSmi()) return false;
199   return heap_internals::MemoryChunk::FromHeapObject(HeapObject::cast(object))
200       ->InYoungGeneration();
201 }
202 
IsReadOnlyHeapObject(HeapObject object)203 inline bool IsReadOnlyHeapObject(HeapObject object) {
204   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return ReadOnlyHeap::Contains(object);
205   heap_internals::MemoryChunk* chunk =
206       heap_internals::MemoryChunk::FromHeapObject(object);
207   return chunk->InReadOnlySpace();
208 }
209 
IsCodeSpaceObject(HeapObject object)210 inline bool IsCodeSpaceObject(HeapObject object) {
211   heap_internals::MemoryChunk* chunk =
212       heap_internals::MemoryChunk::FromHeapObject(object);
213   return chunk->InCodeSpace();
214 }
215 
GetHeapIfMarking(HeapObject object)216 base::Optional<Heap*> WriteBarrier::GetHeapIfMarking(HeapObject object) {
217   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return {};
218   heap_internals::MemoryChunk* chunk =
219       heap_internals::MemoryChunk::FromHeapObject(object);
220   if (!chunk->IsMarking()) return {};
221   return chunk->GetHeap();
222 }
223 
Marking(HeapObject host,ObjectSlot slot,Object value)224 void WriteBarrier::Marking(HeapObject host, ObjectSlot slot, Object value) {
225   DCHECK(!HasWeakHeapObjectTag(value));
226   if (!value.IsHeapObject()) return;
227   Marking(host, HeapObjectSlot(slot), HeapObject::cast(value));
228 }
229 
Marking(HeapObject host,MaybeObjectSlot slot,MaybeObject value)230 void WriteBarrier::Marking(HeapObject host, MaybeObjectSlot slot,
231                            MaybeObject value) {
232   HeapObject value_heap_object;
233   if (!value->GetHeapObject(&value_heap_object)) return;
234   Marking(host, HeapObjectSlot(slot), value_heap_object);
235 }
236 
Marking(HeapObject host,HeapObjectSlot slot,HeapObject value)237 void WriteBarrier::Marking(HeapObject host, HeapObjectSlot slot,
238                            HeapObject value) {
239   auto heap = GetHeapIfMarking(host);
240   if (!heap) return;
241   MarkingSlow(*heap, host, slot, value);
242 }
243 
Marking(Code host,RelocInfo * reloc_info,HeapObject value)244 void WriteBarrier::Marking(Code host, RelocInfo* reloc_info, HeapObject value) {
245   auto heap = GetHeapIfMarking(host);
246   if (!heap) return;
247   MarkingSlow(*heap, host, reloc_info, value);
248 }
249 
Marking(JSArrayBuffer host,ArrayBufferExtension * extension)250 void WriteBarrier::Marking(JSArrayBuffer host,
251                            ArrayBufferExtension* extension) {
252   if (!extension) return;
253   auto heap = GetHeapIfMarking(host);
254   if (!heap) return;
255   MarkingSlow(*heap, host, extension);
256 }
257 
Marking(DescriptorArray descriptor_array,int number_of_own_descriptors)258 void WriteBarrier::Marking(DescriptorArray descriptor_array,
259                            int number_of_own_descriptors) {
260   auto heap = GetHeapIfMarking(descriptor_array);
261   if (!heap) return;
262   MarkingSlow(*heap, descriptor_array, number_of_own_descriptors);
263 }
264 
265 // static
MarkingFromGlobalHandle(Object value)266 void WriteBarrier::MarkingFromGlobalHandle(Object value) {
267   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
268   if (!value.IsHeapObject()) return;
269 
270   HeapObject heap_value = HeapObject::cast(value);
271   // Value may be in read only space but the chunk should never be marked
272   // as marking which would result in a bail out.
273   auto heap = GetHeapIfMarking(heap_value);
274   if (!heap) return;
275   MarkingSlowFromGlobalHandle(*heap, heap_value);
276 }
277 
278 // static
MarkingFromInternalFields(JSObject host)279 void WriteBarrier::MarkingFromInternalFields(JSObject host) {
280   if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
281   auto heap = GetHeapIfMarking(host);
282   if (!heap) return;
283   MarkingSlowFromInternalFields(*heap, host);
284 }
285 
286 #ifdef ENABLE_SLOW_DCHECKS
287 // static
288 template <typename T>
IsRequired(HeapObject host,T value)289 bool WriteBarrier::IsRequired(HeapObject host, T value) {
290   if (BasicMemoryChunk::FromHeapObject(host)->InYoungGeneration()) return false;
291   if (value.IsSmi()) return false;
292   if (value.IsCleared()) return false;
293   HeapObject target = value.GetHeapObject();
294   if (ReadOnlyHeap::Contains(target)) return false;
295   return !IsImmortalImmovableHeapObject(target);
296 }
297 #endif
298 
299 }  // namespace internal
300 }  // namespace v8
301 
302 #endif  // V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
303