• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
6 #define V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
7 
8 #include "src/heap/heap-write-barrier-inl.h"
9 #include "src/objects/js-array-buffer.h"
10 #include "src/objects/js-objects-inl.h"
11 #include "src/objects/objects-inl.h"
12 
13 // Has to be the last include (doesn't have include guards):
14 #include "src/objects/object-macros.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 #include "torque-generated/src/objects/js-array-buffer-tq-inl.inc"
20 
21 TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBuffer)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBufferView)22 TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBufferView)
23 TQ_OBJECT_CONSTRUCTORS_IMPL(JSTypedArray)
24 TQ_OBJECT_CONSTRUCTORS_IMPL(JSDataView)
25 
26 ACCESSORS(JSTypedArray, base_pointer, Object, kBasePointerOffset)
27 RELEASE_ACQUIRE_ACCESSORS(JSTypedArray, base_pointer, Object,
28                           kBasePointerOffset)
29 
30 size_t JSArrayBuffer::byte_length() const {
31   return ReadField<size_t>(kByteLengthOffset);
32 }
33 
set_byte_length(size_t value)34 void JSArrayBuffer::set_byte_length(size_t value) {
35   WriteField<size_t>(kByteLengthOffset, value);
36 }
37 
DEF_GETTER(JSArrayBuffer,backing_store,void *)38 DEF_GETTER(JSArrayBuffer, backing_store, void*) {
39   Address value = ReadSandboxedPointerField(kBackingStoreOffset, cage_base);
40   return reinterpret_cast<void*>(value);
41 }
42 
set_backing_store(Isolate * isolate,void * value)43 void JSArrayBuffer::set_backing_store(Isolate* isolate, void* value) {
44   Address addr = reinterpret_cast<Address>(value);
45   WriteSandboxedPointerField(kBackingStoreOffset, isolate, addr);
46 }
47 
GetBackingStore()48 std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() const {
49   if (!extension()) return nullptr;
50   return extension()->backing_store();
51 }
52 
GetByteLength()53 size_t JSArrayBuffer::GetByteLength() const {
54   if V8_UNLIKELY (is_shared() && is_resizable()) {
55     // Invariant: byte_length for GSAB is 0 (it needs to be read from the
56     // BackingStore).
57     DCHECK_EQ(0, byte_length());
58 
59     return GetBackingStore()->byte_length(std::memory_order_seq_cst);
60   }
61   return byte_length();
62 }
63 
GetBackingStoreRefForDeserialization()64 uint32_t JSArrayBuffer::GetBackingStoreRefForDeserialization() const {
65   return static_cast<uint32_t>(ReadField<Address>(kBackingStoreOffset));
66 }
67 
SetBackingStoreRefForSerialization(uint32_t ref)68 void JSArrayBuffer::SetBackingStoreRefForSerialization(uint32_t ref) {
69   WriteField<Address>(kBackingStoreOffset, static_cast<Address>(ref));
70 }
71 
extension()72 ArrayBufferExtension* JSArrayBuffer::extension() const {
73 #if V8_COMPRESS_POINTERS
74     // With pointer compression the extension-field might not be
75     // pointer-aligned. However on ARM64 this field needs to be aligned to
76     // perform atomic operations on it. Therefore we split the pointer into two
77     // 32-bit words that we update atomically. We don't have an ABA problem here
78     // since there can never be an Attach() after Detach() (transitions only
79     // from NULL --> some ptr --> NULL).
80 
81     // Synchronize with publishing release store of non-null extension
82     uint32_t lo = base::AsAtomic32::Acquire_Load(extension_lo());
83     if (lo & kUninitializedTagMask) return nullptr;
84 
85     // Synchronize with release store of null extension
86     uint32_t hi = base::AsAtomic32::Acquire_Load(extension_hi());
87     uint32_t verify_lo = base::AsAtomic32::Relaxed_Load(extension_lo());
88     if (lo != verify_lo) return nullptr;
89 
90     uintptr_t address = static_cast<uintptr_t>(lo);
91     address |= static_cast<uintptr_t>(hi) << 32;
92     return reinterpret_cast<ArrayBufferExtension*>(address);
93 #else
94     return base::AsAtomicPointer::Acquire_Load(extension_location());
95 #endif
96 }
97 
set_extension(ArrayBufferExtension * extension)98 void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
99 #if V8_COMPRESS_POINTERS
100     if (extension != nullptr) {
101       uintptr_t address = reinterpret_cast<uintptr_t>(extension);
102       base::AsAtomic32::Relaxed_Store(extension_hi(),
103                                       static_cast<uint32_t>(address >> 32));
104       base::AsAtomic32::Release_Store(extension_lo(),
105                                       static_cast<uint32_t>(address));
106     } else {
107       base::AsAtomic32::Relaxed_Store(extension_lo(),
108                                       0 | kUninitializedTagMask);
109       base::AsAtomic32::Release_Store(extension_hi(), 0);
110     }
111 #else
112     base::AsAtomicPointer::Release_Store(extension_location(), extension);
113 #endif
114     WriteBarrier::Marking(*this, extension);
115 }
116 
extension_location()117 ArrayBufferExtension** JSArrayBuffer::extension_location() const {
118   Address location = field_address(kExtensionOffset);
119   return reinterpret_cast<ArrayBufferExtension**>(location);
120 }
121 
122 #if V8_COMPRESS_POINTERS
extension_lo()123 uint32_t* JSArrayBuffer::extension_lo() const {
124   Address location = field_address(kExtensionOffset);
125   return reinterpret_cast<uint32_t*>(location);
126 }
127 
extension_hi()128 uint32_t* JSArrayBuffer::extension_hi() const {
129   Address location = field_address(kExtensionOffset) + sizeof(uint32_t);
130   return reinterpret_cast<uint32_t*>(location);
131 }
132 #endif
133 
clear_padding()134 void JSArrayBuffer::clear_padding() {
135   if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
136     DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
137     memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
138            FIELD_SIZE(kOptionalPaddingOffset));
139   }
140 }
141 
set_bit_field(uint32_t bits)142 void JSArrayBuffer::set_bit_field(uint32_t bits) {
143   RELAXED_WRITE_UINT32_FIELD(*this, kBitFieldOffset, bits);
144 }
145 
bit_field()146 uint32_t JSArrayBuffer::bit_field() const {
147   return RELAXED_READ_UINT32_FIELD(*this, kBitFieldOffset);
148 }
149 
150 // |bit_field| fields.
BIT_FIELD_ACCESSORS(JSArrayBuffer,bit_field,is_external,JSArrayBuffer::IsExternalBit)151 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_external,
152                     JSArrayBuffer::IsExternalBit)
153 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_detachable,
154                     JSArrayBuffer::IsDetachableBit)
155 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, was_detached,
156                     JSArrayBuffer::WasDetachedBit)
157 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory,
158                     JSArrayBuffer::IsAsmJsMemoryBit)
159 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
160                     JSArrayBuffer::IsSharedBit)
161 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_resizable,
162                     JSArrayBuffer::IsResizableBit)
163 
164 bool JSArrayBuffer::IsEmpty() const {
165   auto backing_store = GetBackingStore();
166   bool is_empty = !backing_store || backing_store->IsEmpty();
167   DCHECK_IMPLIES(is_empty, byte_length() == 0);
168   return is_empty;
169 }
170 
byte_offset()171 size_t JSArrayBufferView::byte_offset() const {
172   return ReadField<size_t>(kByteOffsetOffset);
173 }
174 
set_byte_offset(size_t value)175 void JSArrayBufferView::set_byte_offset(size_t value) {
176   WriteField<size_t>(kByteOffsetOffset, value);
177 }
178 
byte_length()179 size_t JSArrayBufferView::byte_length() const {
180   return ReadField<size_t>(kByteLengthOffset);
181 }
182 
set_byte_length(size_t value)183 void JSArrayBufferView::set_byte_length(size_t value) {
184   WriteField<size_t>(kByteLengthOffset, value);
185 }
186 
WasDetached()187 bool JSArrayBufferView::WasDetached() const {
188   return JSArrayBuffer::cast(buffer()).was_detached();
189 }
190 
BIT_FIELD_ACCESSORS(JSArrayBufferView,bit_field,is_length_tracking,JSArrayBufferView::IsLengthTrackingBit)191 BIT_FIELD_ACCESSORS(JSArrayBufferView, bit_field, is_length_tracking,
192                     JSArrayBufferView::IsLengthTrackingBit)
193 BIT_FIELD_ACCESSORS(JSArrayBufferView, bit_field, is_backed_by_rab,
194                     JSArrayBufferView::IsBackedByRabBit)
195 
196 bool JSArrayBufferView::IsVariableLength() const {
197   return is_length_tracking() || is_backed_by_rab();
198 }
199 
GetLengthOrOutOfBounds(bool & out_of_bounds)200 size_t JSTypedArray::GetLengthOrOutOfBounds(bool& out_of_bounds) const {
201   DCHECK(!out_of_bounds);
202   if (WasDetached()) return 0;
203   if (IsVariableLength()) {
204     return GetVariableLengthOrOutOfBounds(out_of_bounds);
205   }
206   return LengthUnchecked();
207 }
208 
GetLength()209 size_t JSTypedArray::GetLength() const {
210   bool out_of_bounds = false;
211   return GetLengthOrOutOfBounds(out_of_bounds);
212 }
213 
GetByteLength()214 size_t JSTypedArray::GetByteLength() const {
215   return GetLength() * element_size();
216 }
217 
IsOutOfBounds()218 bool JSTypedArray::IsOutOfBounds() const {
219   bool out_of_bounds = false;
220   GetLengthOrOutOfBounds(out_of_bounds);
221   return out_of_bounds;
222 }
223 
IsDetachedOrOutOfBounds()224 bool JSTypedArray::IsDetachedOrOutOfBounds() const {
225   if (WasDetached()) {
226     return true;
227   }
228   bool out_of_bounds = false;
229   GetLengthOrOutOfBounds(out_of_bounds);
230   return out_of_bounds;
231 }
232 
length()233 size_t JSTypedArray::length() const {
234   DCHECK(!is_length_tracking());
235   DCHECK(!is_backed_by_rab());
236   return ReadField<size_t>(kLengthOffset);
237 }
238 
LengthUnchecked()239 size_t JSTypedArray::LengthUnchecked() const {
240   return ReadField<size_t>(kLengthOffset);
241 }
242 
set_length(size_t value)243 void JSTypedArray::set_length(size_t value) {
244   WriteField<size_t>(kLengthOffset, value);
245 }
246 
DEF_GETTER(JSTypedArray,external_pointer,Address)247 DEF_GETTER(JSTypedArray, external_pointer, Address) {
248   return ReadSandboxedPointerField(kExternalPointerOffset, cage_base);
249 }
250 
set_external_pointer(Isolate * isolate,Address value)251 void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
252   WriteSandboxedPointerField(kExternalPointerOffset, isolate, value);
253 }
254 
ExternalPointerCompensationForOnHeapArray(PtrComprCageBase cage_base)255 Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
256     PtrComprCageBase cage_base) {
257 #ifdef V8_COMPRESS_POINTERS
258   return cage_base.address();
259 #else
260   return 0;
261 #endif
262 }
263 
GetExternalBackingStoreRefForDeserialization()264 uint32_t JSTypedArray::GetExternalBackingStoreRefForDeserialization() const {
265   DCHECK(!is_on_heap());
266   return static_cast<uint32_t>(ReadField<Address>(kExternalPointerOffset));
267 }
268 
SetExternalBackingStoreRefForSerialization(uint32_t ref)269 void JSTypedArray::SetExternalBackingStoreRefForSerialization(uint32_t ref) {
270   DCHECK(!is_on_heap());
271   WriteField<Address>(kExternalPointerOffset, static_cast<Address>(ref));
272 }
273 
RemoveExternalPointerCompensationForSerialization(Isolate * isolate)274 void JSTypedArray::RemoveExternalPointerCompensationForSerialization(
275     Isolate* isolate) {
276   DCHECK(is_on_heap());
277   Address offset =
278       external_pointer() - ExternalPointerCompensationForOnHeapArray(isolate);
279   WriteField<Address>(kExternalPointerOffset, offset);
280 }
281 
AddExternalPointerCompensationForDeserialization(Isolate * isolate)282 void JSTypedArray::AddExternalPointerCompensationForDeserialization(
283     Isolate* isolate) {
284   DCHECK(is_on_heap());
285   Address pointer = ReadField<Address>(kExternalPointerOffset) +
286                     ExternalPointerCompensationForOnHeapArray(isolate);
287   set_external_pointer(isolate, pointer);
288 }
289 
DataPtr()290 void* JSTypedArray::DataPtr() {
291   // Zero-extend Tagged_t to Address according to current compression scheme
292   // so that the addition with |external_pointer| (which already contains
293   // compensated offset value) will decompress the tagged value.
294   // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for details.
295   STATIC_ASSERT(kOffHeapDataPtrEqualsExternalPointer);
296   return reinterpret_cast<void*>(external_pointer() +
297                                  static_cast<Tagged_t>(base_pointer().ptr()));
298 }
299 
SetOffHeapDataPtr(Isolate * isolate,void * base,Address offset)300 void JSTypedArray::SetOffHeapDataPtr(Isolate* isolate, void* base,
301                                      Address offset) {
302   Address address = reinterpret_cast<Address>(base) + offset;
303   set_external_pointer(isolate, address);
304   // This is the only spot in which the `base_pointer` field can be mutated
305   // after object initialization. Note this can happen at most once, when
306   // `JSTypedArray::GetBuffer` transitions from an on- to off-heap
307   // representation.
308   // To play well with Turbofan concurrency requirements, `base_pointer` is set
309   // with a release store, after external_pointer has been set.
310   set_base_pointer(Smi::zero(), kReleaseStore, SKIP_WRITE_BARRIER);
311   DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr()));
312 }
313 
is_on_heap()314 bool JSTypedArray::is_on_heap() const {
315   // Keep synced with `is_on_heap(AcquireLoadTag)`.
316   DisallowGarbageCollection no_gc;
317   return base_pointer() != Smi::zero();
318 }
319 
is_on_heap(AcquireLoadTag tag)320 bool JSTypedArray::is_on_heap(AcquireLoadTag tag) const {
321   // Keep synced with `is_on_heap()`.
322   // Note: For Turbofan concurrency requirements, it's important that this
323   // function reads only `base_pointer`.
324   DisallowGarbageCollection no_gc;
325   return base_pointer(tag) != Smi::zero();
326 }
327 
328 // static
Validate(Isolate * isolate,Handle<Object> receiver,const char * method_name)329 MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
330                                                  Handle<Object> receiver,
331                                                  const char* method_name) {
332   if (V8_UNLIKELY(!receiver->IsJSTypedArray())) {
333     const MessageTemplate message = MessageTemplate::kNotTypedArray;
334     THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
335   }
336 
337   Handle<JSTypedArray> array = Handle<JSTypedArray>::cast(receiver);
338   if (V8_UNLIKELY(array->WasDetached())) {
339     const MessageTemplate message = MessageTemplate::kDetachedOperation;
340     Handle<String> operation =
341         isolate->factory()->NewStringFromAsciiChecked(method_name);
342     THROW_NEW_ERROR(isolate, NewTypeError(message, operation), JSTypedArray);
343   }
344 
345   if (V8_UNLIKELY(array->IsVariableLength() && array->IsOutOfBounds())) {
346     const MessageTemplate message = MessageTemplate::kDetachedOperation;
347     Handle<String> operation =
348         isolate->factory()->NewStringFromAsciiChecked(method_name);
349     THROW_NEW_ERROR(isolate, NewTypeError(message, operation), JSTypedArray);
350   }
351 
352   // spec describes to return `buffer`, but it may disrupt current
353   // implementations, and it's much useful to return array for now.
354   return array;
355 }
356 
DEF_GETTER(JSDataView,data_pointer,void *)357 DEF_GETTER(JSDataView, data_pointer, void*) {
358   Address value = ReadSandboxedPointerField(kDataPointerOffset, cage_base);
359   return reinterpret_cast<void*>(value);
360 }
361 
set_data_pointer(Isolate * isolate,void * ptr)362 void JSDataView::set_data_pointer(Isolate* isolate, void* ptr) {
363   Address value = reinterpret_cast<Address>(ptr);
364   WriteSandboxedPointerField(kDataPointerOffset, isolate, value);
365 }
366 
367 }  // namespace internal
368 }  // namespace v8
369 
370 #include "src/objects/object-macros-undef.h"
371 
372 #endif  // V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
373