1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
6 #define V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
7
8 // Clients of this interface shouldn't depend on lots of heap internals.
9 // Do not include anything from src/heap here!
10
11 #include "src/heap/heap-write-barrier.h"
12
13 #include "src/common/globals.h"
14 #include "src/objects/code.h"
15 #include "src/objects/compressed-slots-inl.h"
16 #include "src/objects/fixed-array.h"
17 #include "src/objects/heap-object.h"
18 #include "src/objects/maybe-object-inl.h"
19 #include "src/objects/slots-inl.h"
20
21 namespace v8 {
22 namespace internal {
23
24 // Defined in heap.cc.
25 V8_EXPORT_PRIVATE bool Heap_PageFlagsAreConsistent(HeapObject object);
26 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
27 Address slot,
28 HeapObject value);
29 V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
30
31 V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
32 RelocInfo* rinfo,
33 HeapObject object);
34
35 V8_EXPORT_PRIVATE void Heap_GenerationalEphemeronKeyBarrierSlow(
36 Heap* heap, EphemeronHashTable table, Address slot);
37
38 // Do not use these internal details anywhere outside of this file. These
39 // internals are only intended to shortcut write barrier checks.
40 namespace heap_internals {
41
42 struct MemoryChunk {
43 static constexpr uintptr_t kFlagsOffset = kSizetSize;
44 static constexpr uintptr_t kHeapOffset = kSizetSize + kUIntptrSize;
45 static constexpr uintptr_t kMarkingBit = uintptr_t{1} << 18;
46 static constexpr uintptr_t kFromPageBit = uintptr_t{1} << 3;
47 static constexpr uintptr_t kToPageBit = uintptr_t{1} << 4;
48 static constexpr uintptr_t kReadOnlySpaceBit = uintptr_t{1} << 21;
49
FromHeapObjectMemoryChunk50 V8_INLINE static heap_internals::MemoryChunk* FromHeapObject(
51 HeapObject object) {
52 DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL);
53 return reinterpret_cast<MemoryChunk*>(object.ptr() & ~kPageAlignmentMask);
54 }
55
IsMarkingMemoryChunk56 V8_INLINE bool IsMarking() const { return GetFlags() & kMarkingBit; }
57
InYoungGenerationMemoryChunk58 V8_INLINE bool InYoungGeneration() const {
59 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return false;
60 constexpr uintptr_t kYoungGenerationMask = kFromPageBit | kToPageBit;
61 return GetFlags() & kYoungGenerationMask;
62 }
63
GetFlagsMemoryChunk64 V8_INLINE uintptr_t GetFlags() const {
65 return *reinterpret_cast<const uintptr_t*>(reinterpret_cast<Address>(this) +
66 kFlagsOffset);
67 }
68
GetHeapMemoryChunk69 V8_INLINE Heap* GetHeap() {
70 Heap* heap = *reinterpret_cast<Heap**>(reinterpret_cast<Address>(this) +
71 kHeapOffset);
72 DCHECK_NOT_NULL(heap);
73 return heap;
74 }
75
InReadOnlySpaceMemoryChunk76 V8_INLINE bool InReadOnlySpace() const {
77 return GetFlags() & kReadOnlySpaceBit;
78 }
79 };
80
GenerationalBarrierInternal(HeapObject object,Address slot,HeapObject value)81 inline void GenerationalBarrierInternal(HeapObject object, Address slot,
82 HeapObject value) {
83 DCHECK(Heap_PageFlagsAreConsistent(object));
84 heap_internals::MemoryChunk* value_chunk =
85 heap_internals::MemoryChunk::FromHeapObject(value);
86 heap_internals::MemoryChunk* object_chunk =
87 heap_internals::MemoryChunk::FromHeapObject(object);
88
89 if (!value_chunk->InYoungGeneration() || object_chunk->InYoungGeneration()) {
90 return;
91 }
92
93 Heap_GenerationalBarrierSlow(object, slot, value);
94 }
95
GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,Address slot,HeapObject value)96 inline void GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,
97 Address slot,
98 HeapObject value) {
99 DCHECK(Heap::PageFlagsAreConsistent(table));
100 heap_internals::MemoryChunk* value_chunk =
101 heap_internals::MemoryChunk::FromHeapObject(value);
102 heap_internals::MemoryChunk* table_chunk =
103 heap_internals::MemoryChunk::FromHeapObject(table);
104
105 if (!value_chunk->InYoungGeneration() || table_chunk->InYoungGeneration()) {
106 return;
107 }
108
109 Heap_GenerationalEphemeronKeyBarrierSlow(table_chunk->GetHeap(), table, slot);
110 }
111
112 } // namespace heap_internals
113
WriteBarrierForCode(Code host,RelocInfo * rinfo,Object value)114 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
115 DCHECK(!HasWeakHeapObjectTag(value));
116 if (!value.IsHeapObject()) return;
117 WriteBarrierForCode(host, rinfo, HeapObject::cast(value));
118 }
119
WriteBarrierForCode(Code host,RelocInfo * rinfo,HeapObject value)120 inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, HeapObject value) {
121 GenerationalBarrierForCode(host, rinfo, value);
122 WriteBarrier::Marking(host, rinfo, value);
123 }
124
WriteBarrierForCode(Code host)125 inline void WriteBarrierForCode(Code host) {
126 Heap_WriteBarrierForCodeSlow(host);
127 }
128
GenerationalBarrier(HeapObject object,ObjectSlot slot,Object value)129 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
130 Object value) {
131 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
132 DCHECK(!HasWeakHeapObjectTag(value));
133 if (!value.IsHeapObject()) return;
134 GenerationalBarrier(object, slot, HeapObject::cast(value));
135 }
136
GenerationalBarrier(HeapObject object,ObjectSlot slot,HeapObject value)137 inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
138 HeapObject value) {
139 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
140 DCHECK(!HasWeakHeapObjectTag(*slot));
141 heap_internals::GenerationalBarrierInternal(object, slot.address(),
142 HeapObject::cast(value));
143 }
144
GenerationalEphemeronKeyBarrier(EphemeronHashTable table,ObjectSlot slot,Object value)145 inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
146 ObjectSlot slot, Object value) {
147 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
148 DCHECK(!HasWeakHeapObjectTag(*slot));
149 DCHECK(!HasWeakHeapObjectTag(value));
150 DCHECK(value.IsHeapObject());
151 heap_internals::GenerationalEphemeronKeyBarrierInternal(
152 table, slot.address(), HeapObject::cast(value));
153 }
154
GenerationalBarrier(HeapObject object,MaybeObjectSlot slot,MaybeObject value)155 inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
156 MaybeObject value) {
157 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
158 HeapObject value_heap_object;
159 if (!value->GetHeapObject(&value_heap_object)) return;
160 heap_internals::GenerationalBarrierInternal(object, slot.address(),
161 value_heap_object);
162 }
163
GenerationalBarrierForCode(Code host,RelocInfo * rinfo,HeapObject object)164 inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
165 HeapObject object) {
166 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
167 heap_internals::MemoryChunk* object_chunk =
168 heap_internals::MemoryChunk::FromHeapObject(object);
169 if (!object_chunk->InYoungGeneration()) return;
170 Heap_GenerationalBarrierForCodeSlow(host, rinfo, object);
171 }
172
GetWriteBarrierModeForObject(HeapObject object,const DisallowHeapAllocation * promise)173 inline WriteBarrierMode GetWriteBarrierModeForObject(
174 HeapObject object, const DisallowHeapAllocation* promise) {
175 if (FLAG_disable_write_barriers) return SKIP_WRITE_BARRIER;
176 DCHECK(Heap_PageFlagsAreConsistent(object));
177 heap_internals::MemoryChunk* chunk =
178 heap_internals::MemoryChunk::FromHeapObject(object);
179 if (chunk->IsMarking()) return UPDATE_WRITE_BARRIER;
180 if (chunk->InYoungGeneration()) return SKIP_WRITE_BARRIER;
181 return UPDATE_WRITE_BARRIER;
182 }
183
ObjectInYoungGeneration(Object object)184 inline bool ObjectInYoungGeneration(Object object) {
185 // TODO(rong): Fix caller of this function when we deploy
186 // v8_use_third_party_heap.
187 if (FLAG_single_generation) return false;
188 if (object.IsSmi()) return false;
189 return heap_internals::MemoryChunk::FromHeapObject(HeapObject::cast(object))
190 ->InYoungGeneration();
191 }
192
IsReadOnlyHeapObject(HeapObject object)193 inline bool IsReadOnlyHeapObject(HeapObject object) {
194 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return ReadOnlyHeap::Contains(object);
195 heap_internals::MemoryChunk* chunk =
196 heap_internals::MemoryChunk::FromHeapObject(object);
197 return chunk->InReadOnlySpace();
198 }
199
GetHeapIfMarking(HeapObject object)200 base::Optional<Heap*> WriteBarrier::GetHeapIfMarking(HeapObject object) {
201 if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return {};
202 heap_internals::MemoryChunk* chunk =
203 heap_internals::MemoryChunk::FromHeapObject(object);
204 if (!chunk->IsMarking()) return {};
205 return chunk->GetHeap();
206 }
207
Marking(HeapObject host,ObjectSlot slot,Object value)208 void WriteBarrier::Marking(HeapObject host, ObjectSlot slot, Object value) {
209 DCHECK(!HasWeakHeapObjectTag(value));
210 if (!value.IsHeapObject()) return;
211 Marking(host, HeapObjectSlot(slot), HeapObject::cast(value));
212 }
213
Marking(HeapObject host,MaybeObjectSlot slot,MaybeObject value)214 void WriteBarrier::Marking(HeapObject host, MaybeObjectSlot slot,
215 MaybeObject value) {
216 HeapObject value_heap_object;
217 if (!value->GetHeapObject(&value_heap_object)) return;
218 Marking(host, HeapObjectSlot(slot), value_heap_object);
219 }
220
Marking(HeapObject host,HeapObjectSlot slot,HeapObject value)221 void WriteBarrier::Marking(HeapObject host, HeapObjectSlot slot,
222 HeapObject value) {
223 auto heap = GetHeapIfMarking(host);
224 if (!heap) return;
225 MarkingSlow(*heap, host, slot, value);
226 }
227
Marking(Code host,RelocInfo * reloc_info,HeapObject value)228 void WriteBarrier::Marking(Code host, RelocInfo* reloc_info, HeapObject value) {
229 auto heap = GetHeapIfMarking(host);
230 if (!heap) return;
231 MarkingSlow(*heap, host, reloc_info, value);
232 }
233
Marking(JSArrayBuffer host,ArrayBufferExtension * extension)234 void WriteBarrier::Marking(JSArrayBuffer host,
235 ArrayBufferExtension* extension) {
236 if (!extension) return;
237 auto heap = GetHeapIfMarking(host);
238 if (!heap) return;
239 MarkingSlow(*heap, host, extension);
240 }
241
Marking(DescriptorArray descriptor_array,int number_of_own_descriptors)242 void WriteBarrier::Marking(DescriptorArray descriptor_array,
243 int number_of_own_descriptors) {
244 auto heap = GetHeapIfMarking(descriptor_array);
245 if (!heap) return;
246 MarkingSlow(*heap, descriptor_array, number_of_own_descriptors);
247 }
248
249 } // namespace internal
250 } // namespace v8
251
252 #endif // V8_HEAP_HEAP_WRITE_BARRIER_INL_H_
253