• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_EMBEDDER_DATA_SLOT_INL_H_
6 #define V8_OBJECTS_EMBEDDER_DATA_SLOT_INL_H_
7 
8 #include "src/base/memory.h"
9 #include "src/common/globals.h"
10 #include "src/heap/heap-write-barrier-inl.h"
11 #include "src/objects/embedder-data-array.h"
12 #include "src/objects/embedder-data-slot.h"
13 #include "src/objects/js-objects-inl.h"
14 #include "src/objects/objects-inl.h"
15 #include "src/sandbox/external-pointer-inl.h"
16 
17 // Has to be the last include (doesn't have include guards):
18 #include "src/objects/object-macros.h"
19 
20 namespace v8 {
21 namespace internal {
22 
EmbedderDataSlot(EmbedderDataArray array,int entry_index)23 EmbedderDataSlot::EmbedderDataSlot(EmbedderDataArray array, int entry_index)
24     : SlotBase(FIELD_ADDR(array,
25                           EmbedderDataArray::OffsetOfElementAt(entry_index))) {}
26 
EmbedderDataSlot(JSObject object,int embedder_field_index)27 EmbedderDataSlot::EmbedderDataSlot(JSObject object, int embedder_field_index)
28     : SlotBase(FIELD_ADDR(
29           object, object.GetEmbedderFieldOffset(embedder_field_index))) {}
30 
EmbedderDataSlot(const EmbedderDataSlotSnapshot & snapshot)31 EmbedderDataSlot::EmbedderDataSlot(const EmbedderDataSlotSnapshot& snapshot)
32     : SlotBase(reinterpret_cast<Address>(&snapshot)) {}
33 
Initialize(Object initial_value)34 void EmbedderDataSlot::Initialize(Object initial_value) {
35   // TODO(v8) initialize the slot with Smi::zero() instead. This'll also
36   // guarantee that we don't need a write barrier.
37   DCHECK(initial_value.IsSmi() ||
38          ReadOnlyHeap::Contains(HeapObject::cast(initial_value)));
39   ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Store(initial_value);
40 #ifdef V8_COMPRESS_POINTERS
41   ObjectSlot(address() + kRawPayloadOffset).Relaxed_Store(Smi::zero());
42 #endif
43 }
44 
load_tagged()45 Object EmbedderDataSlot::load_tagged() const {
46   return ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Load();
47 }
48 
store_smi(Smi value)49 void EmbedderDataSlot::store_smi(Smi value) {
50   ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Store(value);
51 #ifdef V8_COMPRESS_POINTERS
52   // See gc_safe_store() for the reasons behind two stores.
53   ObjectSlot(address() + kRawPayloadOffset).Relaxed_Store(Smi::zero());
54 #endif
55 }
56 
57 // static
store_tagged(EmbedderDataArray array,int entry_index,Object value)58 void EmbedderDataSlot::store_tagged(EmbedderDataArray array, int entry_index,
59                                     Object value) {
60   int slot_offset = EmbedderDataArray::OffsetOfElementAt(entry_index);
61   ObjectSlot(FIELD_ADDR(array, slot_offset + kTaggedPayloadOffset))
62       .Relaxed_Store(value);
63   WRITE_BARRIER(array, slot_offset + kTaggedPayloadOffset, value);
64 #ifdef V8_COMPRESS_POINTERS
65   // See gc_safe_store() for the reasons behind two stores.
66   ObjectSlot(FIELD_ADDR(array, slot_offset + kRawPayloadOffset))
67       .Relaxed_Store(Smi::zero());
68 #endif
69 }
70 
71 // static
store_tagged(JSObject object,int embedder_field_index,Object value)72 void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
73                                     Object value) {
74   int slot_offset = object.GetEmbedderFieldOffset(embedder_field_index);
75   ObjectSlot(FIELD_ADDR(object, slot_offset + kTaggedPayloadOffset))
76       .Relaxed_Store(value);
77   WRITE_BARRIER(object, slot_offset + kTaggedPayloadOffset, value);
78 #ifdef V8_COMPRESS_POINTERS
79   // See gc_safe_store() for the reasons behind two stores.
80   ObjectSlot(FIELD_ADDR(object, slot_offset + kRawPayloadOffset))
81       .Relaxed_Store(Smi::zero());
82 #endif
83 }
84 
ToAlignedPointer(Isolate * isolate,void ** out_pointer)85 bool EmbedderDataSlot::ToAlignedPointer(Isolate* isolate,
86                                         void** out_pointer) const {
87   // We don't care about atomicity of access here because embedder slots
88   // are accessed this way only from the main thread via API during "mutator"
89   // phase which is propely synched with GC (concurrent marker may still look
90   // at the tagged part of the embedder slot but read-only access is ok).
91 #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
92   // The raw part must always contain a valid external pointer table index.
93   *out_pointer = reinterpret_cast<void*>(
94       ReadExternalPointerField(address() + kExternalPointerOffset, isolate,
95                                kEmbedderDataSlotPayloadTag));
96   return true;
97 #else
98   Address raw_value;
99   if (COMPRESS_POINTERS_BOOL) {
100     // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
101     // fields (external pointers, doubles and BigInt data) are only kTaggedSize
102     // aligned so we have to use unaligned pointer friendly way of accessing
103     // them in order to avoid undefined behavior in C++ code.
104     raw_value = base::ReadUnalignedValue<Address>(address());
105   } else {
106     raw_value = *location();
107   }
108   *out_pointer = reinterpret_cast<void*>(raw_value);
109   return HAS_SMI_TAG(raw_value);
110 #endif  // V8_SANDBOXED_EXTERNAL_POINTERS
111 }
112 
store_aligned_pointer(Isolate * isolate,void * ptr)113 bool EmbedderDataSlot::store_aligned_pointer(Isolate* isolate, void* ptr) {
114   Address value = reinterpret_cast<Address>(ptr);
115   if (!HAS_SMI_TAG(value)) return false;
116 #ifdef V8_SANDBOXED_EXTERNAL_POINTERS
117   DCHECK_EQ(0, value & kExternalPointerTagMask);
118   // This also mark the entry as alive until the next GC.
119   InitExternalPointerField(address() + kExternalPointerOffset, isolate, value,
120                            kEmbedderDataSlotPayloadTag);
121   ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Store(Smi::zero());
122   return true;
123 #else
124   gc_safe_store(isolate, value);
125   return true;
126 #endif  // V8_SANDBOXED_EXTERNAL_POINTERS
127 }
128 
load_raw(Isolate * isolate,const DisallowGarbageCollection & no_gc)129 EmbedderDataSlot::RawData EmbedderDataSlot::load_raw(
130     Isolate* isolate, const DisallowGarbageCollection& no_gc) const {
131   // We don't care about atomicity of access here because embedder slots
132   // are accessed this way only by serializer from the main thread when
133   // GC is not active (concurrent marker may still look at the tagged part
134   // of the embedder slot but read-only access is ok).
135 #ifdef V8_COMPRESS_POINTERS
136   // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
137   // fields (external pointers, doubles and BigInt data) are only kTaggedSize
138   // aligned so we have to use unaligned pointer friendly way of accessing them
139   // in order to avoid undefined behavior in C++ code.
140   return base::ReadUnalignedValue<EmbedderDataSlot::RawData>(address());
141 #else
142   return *location();
143 #endif
144 }
145 
store_raw(Isolate * isolate,EmbedderDataSlot::RawData data,const DisallowGarbageCollection & no_gc)146 void EmbedderDataSlot::store_raw(Isolate* isolate,
147                                  EmbedderDataSlot::RawData data,
148                                  const DisallowGarbageCollection& no_gc) {
149   gc_safe_store(isolate, data);
150 }
151 
gc_safe_store(Isolate * isolate,Address value)152 void EmbedderDataSlot::gc_safe_store(Isolate* isolate, Address value) {
153 #ifdef V8_COMPRESS_POINTERS
154   STATIC_ASSERT(kSmiShiftSize == 0);
155   STATIC_ASSERT(SmiValuesAre31Bits());
156   STATIC_ASSERT(kTaggedSize == kInt32Size);
157 
158   // We have to do two 32-bit stores here because
159   // 1) tagged part modifications must be atomic to be properly synchronized
160   //    with the concurrent marker.
161   // 2) atomicity of full pointer store is not guaranteed for embedder slots
162   //    since the address of the slot may not be kSystemPointerSize aligned
163   //    (only kTaggedSize alignment is guaranteed).
164   // TODO(ishell, v8:8875): revisit this once the allocation alignment
165   // inconsistency is fixed.
166   Address lo = static_cast<intptr_t>(static_cast<int32_t>(value));
167   ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Store(Smi(lo));
168   Address hi = value >> 32;
169   ObjectSlot(address() + kRawPayloadOffset).Relaxed_Store(Object(hi));
170 #else
171   ObjectSlot(address() + kTaggedPayloadOffset).Relaxed_Store(Smi(value));
172 #endif
173 }
174 
175 // static
PopulateEmbedderDataSnapshot(Map map,JSObject js_object,int entry_index,EmbedderDataSlotSnapshot & snapshot)176 void EmbedderDataSlot::PopulateEmbedderDataSnapshot(
177     Map map, JSObject js_object, int entry_index,
178     EmbedderDataSlotSnapshot& snapshot) {
179 #ifdef V8_COMPRESS_POINTERS
180   STATIC_ASSERT(sizeof(EmbedderDataSlotSnapshot) == sizeof(AtomicTagged_t) * 2);
181 #else   // !V8_COMPRESS_POINTERS
182   STATIC_ASSERT(sizeof(EmbedderDataSlotSnapshot) == sizeof(AtomicTagged_t));
183 #endif  // !V8_COMPRESS_POINTERS
184   STATIC_ASSERT(sizeof(EmbedderDataSlotSnapshot) == kEmbedderDataSlotSize);
185 
186   const Address field_base =
187       FIELD_ADDR(js_object, js_object.GetEmbedderFieldOffset(entry_index));
188 
189 #if defined(V8_TARGET_BIG_ENDIAN) && defined(V8_COMPRESS_POINTERS)
190   const int index = 1;
191 #else
192   const int index = 0;
193 #endif
194 
195   reinterpret_cast<AtomicTagged_t*>(&snapshot)[index] =
196       AsAtomicTagged::Relaxed_Load(
197           reinterpret_cast<AtomicTagged_t*>(field_base + kTaggedPayloadOffset));
198 #ifdef V8_COMPRESS_POINTERS
199   reinterpret_cast<AtomicTagged_t*>(&snapshot)[1 - index] =
200       AsAtomicTagged::Relaxed_Load(
201           reinterpret_cast<AtomicTagged_t*>(field_base + kRawPayloadOffset));
202 #endif  // V8_COMPRESS_POINTERS
203 }
204 
205 }  // namespace internal
206 }  // namespace v8
207 
208 #include "src/objects/object-macros-undef.h"
209 
210 #endif  // V8_OBJECTS_EMBEDDER_DATA_SLOT_INL_H_
211