1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/objects/elements.h"
6
7 #include "src/common/message-template.h"
8 #include "src/execution/arguments.h"
9 #include "src/execution/frames.h"
10 #include "src/execution/isolate-inl.h"
11 #include "src/execution/protectors-inl.h"
12 #include "src/heap/factory.h"
13 #include "src/heap/heap-inl.h" // For MaxNumberToStringCacheSize.
14 #include "src/heap/heap-write-barrier-inl.h"
15 #include "src/numbers/conversions.h"
16 #include "src/objects/arguments-inl.h"
17 #include "src/objects/hash-table-inl.h"
18 #include "src/objects/js-array-buffer-inl.h"
19 #include "src/objects/js-array-inl.h"
20 #include "src/objects/keys.h"
21 #include "src/objects/objects-inl.h"
22 #include "src/objects/slots-atomic-inl.h"
23 #include "src/objects/slots.h"
24 #include "src/utils/utils.h"
25
26 // Each concrete ElementsAccessor can handle exactly one ElementsKind,
27 // several abstract ElementsAccessor classes are used to allow sharing
28 // common code.
29 //
30 // Inheritance hierarchy:
31 // - ElementsAccessorBase (abstract)
32 // - FastElementsAccessor (abstract)
33 // - FastSmiOrObjectElementsAccessor
34 // - FastPackedSmiElementsAccessor
35 // - FastHoleySmiElementsAccessor
36 // - FastPackedObjectElementsAccessor
37 // - FastNonextensibleObjectElementsAccessor: template
38 // - FastPackedNonextensibleObjectElementsAccessor
39 // - FastHoleyNonextensibleObjectElementsAccessor
40 // - FastSealedObjectElementsAccessor: template
41 // - FastPackedSealedObjectElementsAccessor
42 // - FastHoleySealedObjectElementsAccessor
43 // - FastFrozenObjectElementsAccessor: template
44 // - FastPackedFrozenObjectElementsAccessor
45 // - FastHoleyFrozenObjectElementsAccessor
46 // - FastHoleyObjectElementsAccessor
47 // - FastDoubleElementsAccessor
48 // - FastPackedDoubleElementsAccessor
49 // - FastHoleyDoubleElementsAccessor
50 // - TypedElementsAccessor: template, with instantiations:
51 // - Uint8ElementsAccessor
52 // - Int8ElementsAccessor
53 // - Uint16ElementsAccessor
54 // - Int16ElementsAccessor
55 // - Uint32ElementsAccessor
56 // - Int32ElementsAccessor
57 // - Float32ElementsAccessor
58 // - Float64ElementsAccessor
59 // - Uint8ClampedElementsAccessor
60 // - BigUint64ElementsAccessor
61 // - BigInt64ElementsAccessor
62 // - DictionaryElementsAccessor
63 // - SloppyArgumentsElementsAccessor
64 // - FastSloppyArgumentsElementsAccessor
65 // - SlowSloppyArgumentsElementsAccessor
66 // - StringWrapperElementsAccessor
67 // - FastStringWrapperElementsAccessor
68 // - SlowStringWrapperElementsAccessor
69
70 namespace v8 {
71 namespace internal {
72
73 namespace {
74
75 #define RETURN_NOTHING_IF_NOT_SUCCESSFUL(call) \
76 do { \
77 if (!(call)) return Nothing<bool>(); \
78 } while (false)
79
80 #define RETURN_FAILURE_IF_NOT_SUCCESSFUL(call) \
81 do { \
82 ExceptionStatus status_enum_result = (call); \
83 if (!status_enum_result) return status_enum_result; \
84 } while (false)
85
86 static const int kPackedSizeNotKnown = -1;
87
88 enum Where { AT_START, AT_END };
89
90 // First argument in list is the accessor class, the second argument is the
91 // accessor ElementsKind, and the third is the backing store class. Use the
92 // fast element handler for smi-only arrays. The implementation is currently
93 // identical. Note that the order must match that of the ElementsKind enum for
94 // the |accessor_array[]| below to work.
95 #define ELEMENTS_LIST(V) \
96 V(FastPackedSmiElementsAccessor, PACKED_SMI_ELEMENTS, FixedArray) \
97 V(FastHoleySmiElementsAccessor, HOLEY_SMI_ELEMENTS, FixedArray) \
98 V(FastPackedObjectElementsAccessor, PACKED_ELEMENTS, FixedArray) \
99 V(FastHoleyObjectElementsAccessor, HOLEY_ELEMENTS, FixedArray) \
100 V(FastPackedDoubleElementsAccessor, PACKED_DOUBLE_ELEMENTS, \
101 FixedDoubleArray) \
102 V(FastHoleyDoubleElementsAccessor, HOLEY_DOUBLE_ELEMENTS, FixedDoubleArray) \
103 V(FastPackedNonextensibleObjectElementsAccessor, \
104 PACKED_NONEXTENSIBLE_ELEMENTS, FixedArray) \
105 V(FastHoleyNonextensibleObjectElementsAccessor, \
106 HOLEY_NONEXTENSIBLE_ELEMENTS, FixedArray) \
107 V(FastPackedSealedObjectElementsAccessor, PACKED_SEALED_ELEMENTS, \
108 FixedArray) \
109 V(FastHoleySealedObjectElementsAccessor, HOLEY_SEALED_ELEMENTS, FixedArray) \
110 V(FastPackedFrozenObjectElementsAccessor, PACKED_FROZEN_ELEMENTS, \
111 FixedArray) \
112 V(FastHoleyFrozenObjectElementsAccessor, HOLEY_FROZEN_ELEMENTS, FixedArray) \
113 V(DictionaryElementsAccessor, DICTIONARY_ELEMENTS, NumberDictionary) \
114 V(FastSloppyArgumentsElementsAccessor, FAST_SLOPPY_ARGUMENTS_ELEMENTS, \
115 FixedArray) \
116 V(SlowSloppyArgumentsElementsAccessor, SLOW_SLOPPY_ARGUMENTS_ELEMENTS, \
117 FixedArray) \
118 V(FastStringWrapperElementsAccessor, FAST_STRING_WRAPPER_ELEMENTS, \
119 FixedArray) \
120 V(SlowStringWrapperElementsAccessor, SLOW_STRING_WRAPPER_ELEMENTS, \
121 FixedArray) \
122 V(Uint8ElementsAccessor, UINT8_ELEMENTS, ByteArray) \
123 V(Int8ElementsAccessor, INT8_ELEMENTS, ByteArray) \
124 V(Uint16ElementsAccessor, UINT16_ELEMENTS, ByteArray) \
125 V(Int16ElementsAccessor, INT16_ELEMENTS, ByteArray) \
126 V(Uint32ElementsAccessor, UINT32_ELEMENTS, ByteArray) \
127 V(Int32ElementsAccessor, INT32_ELEMENTS, ByteArray) \
128 V(Float32ElementsAccessor, FLOAT32_ELEMENTS, ByteArray) \
129 V(Float64ElementsAccessor, FLOAT64_ELEMENTS, ByteArray) \
130 V(Uint8ClampedElementsAccessor, UINT8_CLAMPED_ELEMENTS, ByteArray) \
131 V(BigUint64ElementsAccessor, BIGUINT64_ELEMENTS, ByteArray) \
132 V(BigInt64ElementsAccessor, BIGINT64_ELEMENTS, ByteArray)
133
134 template <ElementsKind Kind>
135 class ElementsKindTraits {
136 public:
137 using BackingStore = FixedArrayBase;
138 };
139
140 #define ELEMENTS_TRAITS(Class, KindParam, Store) \
141 template <> \
142 class ElementsKindTraits<KindParam> { \
143 public: /* NOLINT */ \
144 static constexpr ElementsKind Kind = KindParam; \
145 using BackingStore = Store; \
146 }; \
147 constexpr ElementsKind ElementsKindTraits<KindParam>::Kind;
ELEMENTS_LIST(ELEMENTS_TRAITS)148 ELEMENTS_LIST(ELEMENTS_TRAITS)
149 #undef ELEMENTS_TRAITS
150
151 V8_WARN_UNUSED_RESULT
152 MaybeHandle<Object> ThrowArrayLengthRangeError(Isolate* isolate) {
153 THROW_NEW_ERROR(isolate, NewRangeError(MessageTemplate::kInvalidArrayLength),
154 Object);
155 }
156
GetWriteBarrierMode(ElementsKind kind)157 WriteBarrierMode GetWriteBarrierMode(ElementsKind kind) {
158 if (IsSmiElementsKind(kind)) return SKIP_WRITE_BARRIER;
159 if (IsDoubleElementsKind(kind)) return SKIP_WRITE_BARRIER;
160 return UPDATE_WRITE_BARRIER;
161 }
162
163 // If kCopyToEndAndInitializeToHole is specified as the copy_size to
164 // CopyElements, it copies all of elements from source after source_start to
165 // destination array, padding any remaining uninitialized elements in the
166 // destination array with the hole.
167 constexpr int kCopyToEndAndInitializeToHole = -1;
168
CopyObjectToObjectElements(Isolate * isolate,FixedArrayBase from_base,ElementsKind from_kind,uint32_t from_start,FixedArrayBase to_base,ElementsKind to_kind,uint32_t to_start,int raw_copy_size)169 void CopyObjectToObjectElements(Isolate* isolate, FixedArrayBase from_base,
170 ElementsKind from_kind, uint32_t from_start,
171 FixedArrayBase to_base, ElementsKind to_kind,
172 uint32_t to_start, int raw_copy_size) {
173 ReadOnlyRoots roots(isolate);
174 DCHECK(to_base.map() != roots.fixed_cow_array_map());
175 DisallowHeapAllocation no_allocation;
176 int copy_size = raw_copy_size;
177 if (raw_copy_size < 0) {
178 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
179 copy_size =
180 std::min(from_base.length() - from_start, to_base.length() - to_start);
181 int start = to_start + copy_size;
182 int length = to_base.length() - start;
183 if (length > 0) {
184 MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
185 roots.the_hole_value(), length);
186 }
187 }
188 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
189 (copy_size + static_cast<int>(from_start)) <= from_base.length());
190 if (copy_size == 0) return;
191 FixedArray from = FixedArray::cast(from_base);
192 FixedArray to = FixedArray::cast(to_base);
193 DCHECK(IsSmiOrObjectElementsKind(from_kind));
194 DCHECK(IsSmiOrObjectElementsKind(to_kind));
195
196 WriteBarrierMode write_barrier_mode =
197 (IsObjectElementsKind(from_kind) && IsObjectElementsKind(to_kind))
198 ? UPDATE_WRITE_BARRIER
199 : SKIP_WRITE_BARRIER;
200 to.CopyElements(isolate, to_start, from, from_start, copy_size,
201 write_barrier_mode);
202 }
203
CopyDictionaryToObjectElements(Isolate * isolate,FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,ElementsKind to_kind,uint32_t to_start,int raw_copy_size)204 void CopyDictionaryToObjectElements(Isolate* isolate, FixedArrayBase from_base,
205 uint32_t from_start, FixedArrayBase to_base,
206 ElementsKind to_kind, uint32_t to_start,
207 int raw_copy_size) {
208 DisallowHeapAllocation no_allocation;
209 NumberDictionary from = NumberDictionary::cast(from_base);
210 int copy_size = raw_copy_size;
211 if (raw_copy_size < 0) {
212 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
213 copy_size = from.max_number_key() + 1 - from_start;
214 int start = to_start + copy_size;
215 int length = to_base.length() - start;
216 if (length > 0) {
217 MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
218 ReadOnlyRoots(isolate).the_hole_value(), length);
219 }
220 }
221 DCHECK(to_base != from_base);
222 DCHECK(IsSmiOrObjectElementsKind(to_kind));
223 if (copy_size == 0) return;
224 FixedArray to = FixedArray::cast(to_base);
225 uint32_t to_length = to.length();
226 if (to_start + copy_size > to_length) {
227 copy_size = to_length - to_start;
228 }
229 WriteBarrierMode write_barrier_mode = GetWriteBarrierMode(to_kind);
230 for (int i = 0; i < copy_size; i++) {
231 InternalIndex entry = from.FindEntry(isolate, i + from_start);
232 if (entry.is_found()) {
233 Object value = from.ValueAt(entry);
234 DCHECK(!value.IsTheHole(isolate));
235 to.set(i + to_start, value, write_barrier_mode);
236 } else {
237 to.set_the_hole(isolate, i + to_start);
238 }
239 }
240 }
241
242 // NOTE: this method violates the handlified function signature convention:
243 // raw pointer parameters in the function that allocates.
244 // See ElementsAccessorBase::CopyElements() for details.
CopyDoubleToObjectElements(Isolate * isolate,FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int raw_copy_size)245 void CopyDoubleToObjectElements(Isolate* isolate, FixedArrayBase from_base,
246 uint32_t from_start, FixedArrayBase to_base,
247 uint32_t to_start, int raw_copy_size) {
248 int copy_size = raw_copy_size;
249 if (raw_copy_size < 0) {
250 DisallowHeapAllocation no_allocation;
251 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
252 copy_size =
253 std::min(from_base.length() - from_start, to_base.length() - to_start);
254 // Also initialize the area that will be copied over since HeapNumber
255 // allocation below can cause an incremental marking step, requiring all
256 // existing heap objects to be propertly initialized.
257 int start = to_start;
258 int length = to_base.length() - start;
259 if (length > 0) {
260 MemsetTagged(FixedArray::cast(to_base).RawFieldOfElementAt(start),
261 ReadOnlyRoots(isolate).the_hole_value(), length);
262 }
263 }
264
265 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
266 (copy_size + static_cast<int>(from_start)) <= from_base.length());
267 if (copy_size == 0) return;
268
269 // From here on, the code below could actually allocate. Therefore the raw
270 // values are wrapped into handles.
271 Handle<FixedDoubleArray> from(FixedDoubleArray::cast(from_base), isolate);
272 Handle<FixedArray> to(FixedArray::cast(to_base), isolate);
273
274 // Use an outer loop to not waste too much time on creating HandleScopes.
275 // On the other hand we might overflow a single handle scope depending on
276 // the copy_size.
277 int offset = 0;
278 while (offset < copy_size) {
279 HandleScope scope(isolate);
280 offset += 100;
281 for (int i = offset - 100; i < offset && i < copy_size; ++i) {
282 Handle<Object> value =
283 FixedDoubleArray::get(*from, i + from_start, isolate);
284 to->set(i + to_start, *value, UPDATE_WRITE_BARRIER);
285 }
286 }
287 }
288
CopyDoubleToDoubleElements(FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int raw_copy_size)289 void CopyDoubleToDoubleElements(FixedArrayBase from_base, uint32_t from_start,
290 FixedArrayBase to_base, uint32_t to_start,
291 int raw_copy_size) {
292 DisallowHeapAllocation no_allocation;
293 int copy_size = raw_copy_size;
294 if (raw_copy_size < 0) {
295 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
296 copy_size =
297 std::min(from_base.length() - from_start, to_base.length() - to_start);
298 for (int i = to_start + copy_size; i < to_base.length(); ++i) {
299 FixedDoubleArray::cast(to_base).set_the_hole(i);
300 }
301 }
302 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
303 (copy_size + static_cast<int>(from_start)) <= from_base.length());
304 if (copy_size == 0) return;
305 FixedDoubleArray from = FixedDoubleArray::cast(from_base);
306 FixedDoubleArray to = FixedDoubleArray::cast(to_base);
307 Address to_address = to.address() + FixedDoubleArray::kHeaderSize;
308 Address from_address = from.address() + FixedDoubleArray::kHeaderSize;
309 to_address += kDoubleSize * to_start;
310 from_address += kDoubleSize * from_start;
311 #ifdef V8_COMPRESS_POINTERS
312 // TODO(ishell, v8:8875): we use CopyTagged() in order to avoid unaligned
313 // access to double values in the arrays. This will no longed be necessary
314 // once the allocations alignment issue is fixed.
315 int words_per_double = (kDoubleSize / kTaggedSize);
316 CopyTagged(to_address, from_address,
317 static_cast<size_t>(words_per_double * copy_size));
318 #else
319 int words_per_double = (kDoubleSize / kSystemPointerSize);
320 CopyWords(to_address, from_address,
321 static_cast<size_t>(words_per_double * copy_size));
322 #endif
323 }
324
CopySmiToDoubleElements(FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int raw_copy_size)325 void CopySmiToDoubleElements(FixedArrayBase from_base, uint32_t from_start,
326 FixedArrayBase to_base, uint32_t to_start,
327 int raw_copy_size) {
328 DisallowHeapAllocation no_allocation;
329 int copy_size = raw_copy_size;
330 if (raw_copy_size < 0) {
331 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
332 copy_size = from_base.length() - from_start;
333 for (int i = to_start + copy_size; i < to_base.length(); ++i) {
334 FixedDoubleArray::cast(to_base).set_the_hole(i);
335 }
336 }
337 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
338 (copy_size + static_cast<int>(from_start)) <= from_base.length());
339 if (copy_size == 0) return;
340 FixedArray from = FixedArray::cast(from_base);
341 FixedDoubleArray to = FixedDoubleArray::cast(to_base);
342 Object the_hole = from.GetReadOnlyRoots().the_hole_value();
343 for (uint32_t from_end = from_start + static_cast<uint32_t>(copy_size);
344 from_start < from_end; from_start++, to_start++) {
345 Object hole_or_smi = from.get(from_start);
346 if (hole_or_smi == the_hole) {
347 to.set_the_hole(to_start);
348 } else {
349 to.set(to_start, Smi::ToInt(hole_or_smi));
350 }
351 }
352 }
353
CopyPackedSmiToDoubleElements(FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int packed_size,int raw_copy_size)354 void CopyPackedSmiToDoubleElements(FixedArrayBase from_base,
355 uint32_t from_start, FixedArrayBase to_base,
356 uint32_t to_start, int packed_size,
357 int raw_copy_size) {
358 DisallowHeapAllocation no_allocation;
359 int copy_size = raw_copy_size;
360 uint32_t to_end;
361 if (raw_copy_size < 0) {
362 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
363 copy_size = packed_size - from_start;
364 to_end = to_base.length();
365 for (uint32_t i = to_start + copy_size; i < to_end; ++i) {
366 FixedDoubleArray::cast(to_base).set_the_hole(i);
367 }
368 } else {
369 to_end = to_start + static_cast<uint32_t>(copy_size);
370 }
371 DCHECK(static_cast<int>(to_end) <= to_base.length());
372 DCHECK(packed_size >= 0 && packed_size <= copy_size);
373 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
374 (copy_size + static_cast<int>(from_start)) <= from_base.length());
375 if (copy_size == 0) return;
376 FixedArray from = FixedArray::cast(from_base);
377 FixedDoubleArray to = FixedDoubleArray::cast(to_base);
378 for (uint32_t from_end = from_start + static_cast<uint32_t>(packed_size);
379 from_start < from_end; from_start++, to_start++) {
380 Object smi = from.get(from_start);
381 DCHECK(!smi.IsTheHole());
382 to.set(to_start, Smi::ToInt(smi));
383 }
384 }
385
CopyObjectToDoubleElements(FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int raw_copy_size)386 void CopyObjectToDoubleElements(FixedArrayBase from_base, uint32_t from_start,
387 FixedArrayBase to_base, uint32_t to_start,
388 int raw_copy_size) {
389 DisallowHeapAllocation no_allocation;
390 int copy_size = raw_copy_size;
391 if (raw_copy_size < 0) {
392 DCHECK_EQ(kCopyToEndAndInitializeToHole, raw_copy_size);
393 copy_size = from_base.length() - from_start;
394 for (int i = to_start + copy_size; i < to_base.length(); ++i) {
395 FixedDoubleArray::cast(to_base).set_the_hole(i);
396 }
397 }
398 DCHECK((copy_size + static_cast<int>(to_start)) <= to_base.length() &&
399 (copy_size + static_cast<int>(from_start)) <= from_base.length());
400 if (copy_size == 0) return;
401 FixedArray from = FixedArray::cast(from_base);
402 FixedDoubleArray to = FixedDoubleArray::cast(to_base);
403 Object the_hole = from.GetReadOnlyRoots().the_hole_value();
404 for (uint32_t from_end = from_start + copy_size; from_start < from_end;
405 from_start++, to_start++) {
406 Object hole_or_object = from.get(from_start);
407 if (hole_or_object == the_hole) {
408 to.set_the_hole(to_start);
409 } else {
410 to.set(to_start, hole_or_object.Number());
411 }
412 }
413 }
414
CopyDictionaryToDoubleElements(Isolate * isolate,FixedArrayBase from_base,uint32_t from_start,FixedArrayBase to_base,uint32_t to_start,int raw_copy_size)415 void CopyDictionaryToDoubleElements(Isolate* isolate, FixedArrayBase from_base,
416 uint32_t from_start, FixedArrayBase to_base,
417 uint32_t to_start, int raw_copy_size) {
418 DisallowHeapAllocation no_allocation;
419 NumberDictionary from = NumberDictionary::cast(from_base);
420 int copy_size = raw_copy_size;
421 if (copy_size < 0) {
422 DCHECK_EQ(kCopyToEndAndInitializeToHole, copy_size);
423 copy_size = from.max_number_key() + 1 - from_start;
424 for (int i = to_start + copy_size; i < to_base.length(); ++i) {
425 FixedDoubleArray::cast(to_base).set_the_hole(i);
426 }
427 }
428 if (copy_size == 0) return;
429 FixedDoubleArray to = FixedDoubleArray::cast(to_base);
430 uint32_t to_length = to.length();
431 if (to_start + copy_size > to_length) {
432 copy_size = to_length - to_start;
433 }
434 for (int i = 0; i < copy_size; i++) {
435 InternalIndex entry = from.FindEntry(isolate, i + from_start);
436 if (entry.is_found()) {
437 to.set(i + to_start, from.ValueAt(entry).Number());
438 } else {
439 to.set_the_hole(i + to_start);
440 }
441 }
442 }
443
SortIndices(Isolate * isolate,Handle<FixedArray> indices,uint32_t sort_size)444 void SortIndices(Isolate* isolate, Handle<FixedArray> indices,
445 uint32_t sort_size) {
446 if (sort_size == 0) return;
447
448 // Use AtomicSlot wrapper to ensure that std::sort uses atomic load and
449 // store operations that are safe for concurrent marking.
450 AtomicSlot start(indices->GetFirstElementAddress());
451 AtomicSlot end(start + sort_size);
452 std::sort(start, end, [isolate](Tagged_t elementA, Tagged_t elementB) {
453 #ifdef V8_COMPRESS_POINTERS
454 Object a(DecompressTaggedAny(isolate, elementA));
455 Object b(DecompressTaggedAny(isolate, elementB));
456 #else
457 Object a(elementA);
458 Object b(elementB);
459 #endif
460 if (a.IsSmi() || !a.IsUndefined(isolate)) {
461 if (!b.IsSmi() && b.IsUndefined(isolate)) {
462 return true;
463 }
464 return a.Number() < b.Number();
465 }
466 return !b.IsSmi() && b.IsUndefined(isolate);
467 });
468 isolate->heap()->WriteBarrierForRange(*indices, ObjectSlot(start),
469 ObjectSlot(end));
470 }
471
IncludesValueSlowPath(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)472 Maybe<bool> IncludesValueSlowPath(Isolate* isolate, Handle<JSObject> receiver,
473 Handle<Object> value, size_t start_from,
474 size_t length) {
475 bool search_for_hole = value->IsUndefined(isolate);
476 for (size_t k = start_from; k < length; ++k) {
477 LookupIterator it(isolate, receiver, k);
478 if (!it.IsFound()) {
479 if (search_for_hole) return Just(true);
480 continue;
481 }
482 Handle<Object> element_k;
483 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
484 Object::GetProperty(&it), Nothing<bool>());
485
486 if (value->SameValueZero(*element_k)) return Just(true);
487 }
488
489 return Just(false);
490 }
491
IndexOfValueSlowPath(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)492 Maybe<int64_t> IndexOfValueSlowPath(Isolate* isolate, Handle<JSObject> receiver,
493 Handle<Object> value, size_t start_from,
494 size_t length) {
495 for (size_t k = start_from; k < length; ++k) {
496 LookupIterator it(isolate, receiver, k);
497 if (!it.IsFound()) {
498 continue;
499 }
500 Handle<Object> element_k;
501 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
502 isolate, element_k, Object::GetProperty(&it), Nothing<int64_t>());
503
504 if (value->StrictEquals(*element_k)) return Just<int64_t>(k);
505 }
506
507 return Just<int64_t>(-1);
508 }
509
510 // The InternalElementsAccessor is a helper class to expose otherwise protected
511 // methods to its subclasses. Namely, we don't want to publicly expose methods
512 // that take an entry (instead of an index) as an argument.
513 class InternalElementsAccessor : public ElementsAccessor {
514 public:
515 InternalIndex GetEntryForIndex(Isolate* isolate, JSObject holder,
516 FixedArrayBase backing_store,
517 size_t index) override = 0;
518
519 PropertyDetails GetDetails(JSObject holder, InternalIndex entry) override = 0;
520 };
521
522 // Base class for element handler implementations. Contains the
523 // the common logic for objects with different ElementsKinds.
524 // Subclasses must specialize method for which the element
525 // implementation differs from the base class implementation.
526 //
527 // This class is intended to be used in the following way:
528 //
529 // class SomeElementsAccessor :
530 // public ElementsAccessorBase<SomeElementsAccessor,
531 // BackingStoreClass> {
532 // ...
533 // }
534 //
535 // This is an example of the Curiously Recurring Template Pattern (see
536 // http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
537 // CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
538 // specialization of SomeElementsAccessor methods).
539 template <typename Subclass, typename ElementsTraitsParam>
540 class ElementsAccessorBase : public InternalElementsAccessor {
541 public:
542 ElementsAccessorBase() = default;
543 ElementsAccessorBase(const ElementsAccessorBase&) = delete;
544 ElementsAccessorBase& operator=(const ElementsAccessorBase&) = delete;
545
546 using ElementsTraits = ElementsTraitsParam;
547 using BackingStore = typename ElementsTraitsParam::BackingStore;
548
kind()549 static ElementsKind kind() { return ElementsTraits::Kind; }
550
ValidateContents(JSObject holder,size_t length)551 static void ValidateContents(JSObject holder, size_t length) {}
552
ValidateImpl(JSObject holder)553 static void ValidateImpl(JSObject holder) {
554 FixedArrayBase fixed_array_base = holder.elements();
555 if (!fixed_array_base.IsHeapObject()) return;
556 // Arrays that have been shifted in place can't be verified.
557 if (fixed_array_base.IsFreeSpaceOrFiller()) return;
558 size_t length = 0;
559 if (holder.IsJSArray()) {
560 Object length_obj = JSArray::cast(holder).length();
561 if (length_obj.IsSmi()) {
562 length = Smi::ToInt(length_obj);
563 }
564 } else if (holder.IsJSTypedArray()) {
565 length = JSTypedArray::cast(holder).length();
566 } else {
567 length = fixed_array_base.length();
568 }
569 Subclass::ValidateContents(holder, length);
570 }
571
Validate(JSObject holder)572 void Validate(JSObject holder) final {
573 DisallowHeapAllocation no_gc;
574 Subclass::ValidateImpl(holder);
575 }
576
HasElement(JSObject holder,uint32_t index,FixedArrayBase backing_store,PropertyFilter filter)577 bool HasElement(JSObject holder, uint32_t index, FixedArrayBase backing_store,
578 PropertyFilter filter) final {
579 return Subclass::HasElementImpl(holder.GetIsolate(), holder, index,
580 backing_store, filter);
581 }
582
HasElementImpl(Isolate * isolate,JSObject holder,size_t index,FixedArrayBase backing_store,PropertyFilter filter=ALL_PROPERTIES)583 static bool HasElementImpl(Isolate* isolate, JSObject holder, size_t index,
584 FixedArrayBase backing_store,
585 PropertyFilter filter = ALL_PROPERTIES) {
586 return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
587 filter)
588 .is_found();
589 }
590
HasEntry(JSObject holder,InternalIndex entry)591 bool HasEntry(JSObject holder, InternalIndex entry) final {
592 return Subclass::HasEntryImpl(holder.GetIsolate(), holder.elements(),
593 entry);
594 }
595
HasEntryImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)596 static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store,
597 InternalIndex entry) {
598 UNIMPLEMENTED();
599 }
600
HasAccessors(JSObject holder)601 bool HasAccessors(JSObject holder) final {
602 return Subclass::HasAccessorsImpl(holder, holder.elements());
603 }
604
HasAccessorsImpl(JSObject holder,FixedArrayBase backing_store)605 static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
606 return false;
607 }
608
Get(Handle<JSObject> holder,InternalIndex entry)609 Handle<Object> Get(Handle<JSObject> holder, InternalIndex entry) final {
610 return Subclass::GetInternalImpl(holder, entry);
611 }
612
GetInternalImpl(Handle<JSObject> holder,InternalIndex entry)613 static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
614 InternalIndex entry) {
615 return Subclass::GetImpl(holder->GetIsolate(), holder->elements(), entry);
616 }
617
GetImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)618 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
619 InternalIndex entry) {
620 return handle(BackingStore::cast(backing_store).get(entry.as_int()),
621 isolate);
622 }
623
Set(Handle<JSObject> holder,InternalIndex entry,Object value)624 void Set(Handle<JSObject> holder, InternalIndex entry, Object value) final {
625 Subclass::SetImpl(holder, entry, value);
626 }
627
Reconfigure(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)628 void Reconfigure(Handle<JSObject> object, Handle<FixedArrayBase> store,
629 InternalIndex entry, Handle<Object> value,
630 PropertyAttributes attributes) final {
631 Subclass::ReconfigureImpl(object, store, entry, value, attributes);
632 }
633
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)634 static void ReconfigureImpl(Handle<JSObject> object,
635 Handle<FixedArrayBase> store, InternalIndex entry,
636 Handle<Object> value,
637 PropertyAttributes attributes) {
638 UNREACHABLE();
639 }
640
Add(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)641 void Add(Handle<JSObject> object, uint32_t index, Handle<Object> value,
642 PropertyAttributes attributes, uint32_t new_capacity) final {
643 Subclass::AddImpl(object, index, value, attributes, new_capacity);
644 }
645
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)646 static void AddImpl(Handle<JSObject> object, uint32_t index,
647 Handle<Object> value, PropertyAttributes attributes,
648 uint32_t new_capacity) {
649 UNREACHABLE();
650 }
651
Push(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_size)652 uint32_t Push(Handle<JSArray> receiver, BuiltinArguments* args,
653 uint32_t push_size) final {
654 return Subclass::PushImpl(receiver, args, push_size);
655 }
656
PushImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_sized)657 static uint32_t PushImpl(Handle<JSArray> receiver, BuiltinArguments* args,
658 uint32_t push_sized) {
659 UNREACHABLE();
660 }
661
Unshift(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t unshift_size)662 uint32_t Unshift(Handle<JSArray> receiver, BuiltinArguments* args,
663 uint32_t unshift_size) final {
664 return Subclass::UnshiftImpl(receiver, args, unshift_size);
665 }
666
UnshiftImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t unshift_size)667 static uint32_t UnshiftImpl(Handle<JSArray> receiver, BuiltinArguments* args,
668 uint32_t unshift_size) {
669 UNREACHABLE();
670 }
671
Pop(Handle<JSArray> receiver)672 Handle<Object> Pop(Handle<JSArray> receiver) final {
673 return Subclass::PopImpl(receiver);
674 }
675
PopImpl(Handle<JSArray> receiver)676 static Handle<Object> PopImpl(Handle<JSArray> receiver) { UNREACHABLE(); }
677
Shift(Handle<JSArray> receiver)678 Handle<Object> Shift(Handle<JSArray> receiver) final {
679 return Subclass::ShiftImpl(receiver);
680 }
681
ShiftImpl(Handle<JSArray> receiver)682 static Handle<Object> ShiftImpl(Handle<JSArray> receiver) { UNREACHABLE(); }
683
SetLength(Handle<JSArray> array,uint32_t length)684 void SetLength(Handle<JSArray> array, uint32_t length) final {
685 Subclass::SetLengthImpl(array->GetIsolate(), array, length,
686 handle(array->elements(), array->GetIsolate()));
687 }
688
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)689 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
690 uint32_t length,
691 Handle<FixedArrayBase> backing_store) {
692 DCHECK(!array->SetLengthWouldNormalize(length));
693 DCHECK(IsFastElementsKind(array->GetElementsKind()));
694 uint32_t old_length = 0;
695 CHECK(array->length().ToArrayIndex(&old_length));
696
697 if (old_length < length) {
698 ElementsKind kind = array->GetElementsKind();
699 if (!IsHoleyElementsKind(kind)) {
700 kind = GetHoleyElementsKind(kind);
701 JSObject::TransitionElementsKind(array, kind);
702 }
703 }
704
705 // Check whether the backing store should be shrunk.
706 uint32_t capacity = backing_store->length();
707 old_length = std::min(old_length, capacity);
708 if (length == 0) {
709 array->initialize_elements();
710 } else if (length <= capacity) {
711 if (IsSmiOrObjectElementsKind(kind())) {
712 JSObject::EnsureWritableFastElements(array);
713 if (array->elements() != *backing_store) {
714 backing_store = handle(array->elements(), isolate);
715 }
716 }
717 if (2 * length + JSObject::kMinAddedElementsCapacity <= capacity) {
718 // If more than half the elements won't be used, trim the array.
719 // Do not trim from short arrays to prevent frequent trimming on
720 // repeated pop operations.
721 // Leave some space to allow for subsequent push operations.
722 int elements_to_trim = length + 1 == old_length
723 ? (capacity - length) / 2
724 : capacity - length;
725 isolate->heap()->RightTrimFixedArray(*backing_store, elements_to_trim);
726 // Fill the non-trimmed elements with holes.
727 BackingStore::cast(*backing_store)
728 .FillWithHoles(length,
729 std::min(old_length, capacity - elements_to_trim));
730 } else {
731 // Otherwise, fill the unused tail with holes.
732 BackingStore::cast(*backing_store).FillWithHoles(length, old_length);
733 }
734 } else {
735 // Check whether the backing store should be expanded.
736 capacity = std::max(length, JSObject::NewElementsCapacity(capacity));
737 Subclass::GrowCapacityAndConvertImpl(array, capacity);
738 }
739
740 array->set_length(Smi::FromInt(length));
741 JSObject::ValidateElements(*array);
742 }
743
NumberOfElements(JSObject receiver)744 size_t NumberOfElements(JSObject receiver) final {
745 return Subclass::NumberOfElementsImpl(receiver, receiver.elements());
746 }
747
NumberOfElementsImpl(JSObject receiver,FixedArrayBase backing_store)748 static uint32_t NumberOfElementsImpl(JSObject receiver,
749 FixedArrayBase backing_store) {
750 UNREACHABLE();
751 }
752
GetMaxIndex(JSObject receiver,FixedArrayBase elements)753 static size_t GetMaxIndex(JSObject receiver, FixedArrayBase elements) {
754 if (receiver.IsJSArray()) {
755 DCHECK(JSArray::cast(receiver).length().IsSmi());
756 return static_cast<uint32_t>(
757 Smi::ToInt(JSArray::cast(receiver).length()));
758 }
759 return Subclass::GetCapacityImpl(receiver, elements);
760 }
761
GetMaxNumberOfEntries(JSObject receiver,FixedArrayBase elements)762 static size_t GetMaxNumberOfEntries(JSObject receiver,
763 FixedArrayBase elements) {
764 return Subclass::GetMaxIndex(receiver, elements);
765 }
766
ConvertElementsWithCapacity(Handle<JSObject> object,Handle<FixedArrayBase> old_elements,ElementsKind from_kind,uint32_t capacity)767 static Handle<FixedArrayBase> ConvertElementsWithCapacity(
768 Handle<JSObject> object, Handle<FixedArrayBase> old_elements,
769 ElementsKind from_kind, uint32_t capacity) {
770 return ConvertElementsWithCapacity(object, old_elements, from_kind,
771 capacity, 0, 0);
772 }
773
ConvertElementsWithCapacity(Handle<JSObject> object,Handle<FixedArrayBase> old_elements,ElementsKind from_kind,uint32_t capacity,uint32_t src_index,uint32_t dst_index)774 static Handle<FixedArrayBase> ConvertElementsWithCapacity(
775 Handle<JSObject> object, Handle<FixedArrayBase> old_elements,
776 ElementsKind from_kind, uint32_t capacity, uint32_t src_index,
777 uint32_t dst_index) {
778 Isolate* isolate = object->GetIsolate();
779 Handle<FixedArrayBase> new_elements;
780 if (IsDoubleElementsKind(kind())) {
781 new_elements = isolate->factory()->NewFixedDoubleArray(capacity);
782 } else {
783 new_elements = isolate->factory()->NewUninitializedFixedArray(capacity);
784 }
785
786 int packed_size = kPackedSizeNotKnown;
787 if (IsFastPackedElementsKind(from_kind) && object->IsJSArray()) {
788 packed_size = Smi::ToInt(JSArray::cast(*object).length());
789 }
790
791 Subclass::CopyElementsImpl(isolate, *old_elements, src_index, *new_elements,
792 from_kind, dst_index, packed_size,
793 kCopyToEndAndInitializeToHole);
794
795 return new_elements;
796 }
797
TransitionElementsKindImpl(Handle<JSObject> object,Handle<Map> to_map)798 static void TransitionElementsKindImpl(Handle<JSObject> object,
799 Handle<Map> to_map) {
800 Isolate* isolate = object->GetIsolate();
801 Handle<Map> from_map = handle(object->map(), isolate);
802 ElementsKind from_kind = from_map->elements_kind();
803 ElementsKind to_kind = to_map->elements_kind();
804 if (IsHoleyElementsKind(from_kind)) {
805 to_kind = GetHoleyElementsKind(to_kind);
806 }
807 if (from_kind != to_kind) {
808 // This method should never be called for any other case.
809 DCHECK(IsFastElementsKind(from_kind));
810 DCHECK(IsFastElementsKind(to_kind));
811 DCHECK_NE(TERMINAL_FAST_ELEMENTS_KIND, from_kind);
812
813 Handle<FixedArrayBase> from_elements(object->elements(), isolate);
814 if (object->elements() == ReadOnlyRoots(isolate).empty_fixed_array() ||
815 IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind)) {
816 // No change is needed to the elements() buffer, the transition
817 // only requires a map change.
818 JSObject::MigrateToMap(isolate, object, to_map);
819 } else {
820 DCHECK(
821 (IsSmiElementsKind(from_kind) && IsDoubleElementsKind(to_kind)) ||
822 (IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind)));
823 uint32_t capacity = static_cast<uint32_t>(object->elements().length());
824 Handle<FixedArrayBase> elements = ConvertElementsWithCapacity(
825 object, from_elements, from_kind, capacity);
826 JSObject::SetMapAndElements(object, to_map, elements);
827 }
828 if (FLAG_trace_elements_transitions) {
829 JSObject::PrintElementsTransition(stdout, object, from_kind,
830 from_elements, to_kind,
831 handle(object->elements(), isolate));
832 }
833 }
834 }
835
GrowCapacityAndConvertImpl(Handle<JSObject> object,uint32_t capacity)836 static void GrowCapacityAndConvertImpl(Handle<JSObject> object,
837 uint32_t capacity) {
838 ElementsKind from_kind = object->GetElementsKind();
839 if (IsSmiOrObjectElementsKind(from_kind)) {
840 // Array optimizations rely on the prototype lookups of Array objects
841 // always returning undefined. If there is a store to the initial
842 // prototype object, make sure all of these optimizations are invalidated.
843 object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object);
844 }
845 Handle<FixedArrayBase> old_elements(object->elements(),
846 object->GetIsolate());
847 // This method should only be called if there's a reason to update the
848 // elements.
849 DCHECK(IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(kind()) ||
850 IsDictionaryElementsKind(from_kind) ||
851 static_cast<uint32_t>(old_elements->length()) < capacity);
852 Subclass::BasicGrowCapacityAndConvertImpl(object, old_elements, from_kind,
853 kind(), capacity);
854 }
855
BasicGrowCapacityAndConvertImpl(Handle<JSObject> object,Handle<FixedArrayBase> old_elements,ElementsKind from_kind,ElementsKind to_kind,uint32_t capacity)856 static void BasicGrowCapacityAndConvertImpl(
857 Handle<JSObject> object, Handle<FixedArrayBase> old_elements,
858 ElementsKind from_kind, ElementsKind to_kind, uint32_t capacity) {
859 Handle<FixedArrayBase> elements =
860 ConvertElementsWithCapacity(object, old_elements, from_kind, capacity);
861
862 if (IsHoleyElementsKind(from_kind)) {
863 to_kind = GetHoleyElementsKind(to_kind);
864 }
865 Handle<Map> new_map = JSObject::GetElementsTransitionMap(object, to_kind);
866 JSObject::SetMapAndElements(object, new_map, elements);
867
868 // Transition through the allocation site as well if present.
869 JSObject::UpdateAllocationSite(object, to_kind);
870
871 if (FLAG_trace_elements_transitions) {
872 JSObject::PrintElementsTransition(stdout, object, from_kind, old_elements,
873 to_kind, elements);
874 }
875 }
876
TransitionElementsKind(Handle<JSObject> object,Handle<Map> map)877 void TransitionElementsKind(Handle<JSObject> object, Handle<Map> map) final {
878 Subclass::TransitionElementsKindImpl(object, map);
879 }
880
GrowCapacityAndConvert(Handle<JSObject> object,uint32_t capacity)881 void GrowCapacityAndConvert(Handle<JSObject> object,
882 uint32_t capacity) final {
883 Subclass::GrowCapacityAndConvertImpl(object, capacity);
884 }
885
GrowCapacity(Handle<JSObject> object,uint32_t index)886 bool GrowCapacity(Handle<JSObject> object, uint32_t index) final {
887 // This function is intended to be called from optimized code. We don't
888 // want to trigger lazy deopts there, so refuse to handle cases that would.
889 if (object->map().is_prototype_map() ||
890 object->WouldConvertToSlowElements(index)) {
891 return false;
892 }
893 Handle<FixedArrayBase> old_elements(object->elements(),
894 object->GetIsolate());
895 uint32_t new_capacity = JSObject::NewElementsCapacity(index + 1);
896 DCHECK(static_cast<uint32_t>(old_elements->length()) < new_capacity);
897 Handle<FixedArrayBase> elements =
898 ConvertElementsWithCapacity(object, old_elements, kind(), new_capacity);
899
900 DCHECK_EQ(object->GetElementsKind(), kind());
901 // Transition through the allocation site as well if present.
902 if (JSObject::UpdateAllocationSite<AllocationSiteUpdateMode::kCheckOnly>(
903 object, kind())) {
904 return false;
905 }
906
907 object->set_elements(*elements);
908 return true;
909 }
910
Delete(Handle<JSObject> obj,InternalIndex entry)911 void Delete(Handle<JSObject> obj, InternalIndex entry) final {
912 Subclass::DeleteImpl(obj, entry);
913 }
914
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)915 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
916 uint32_t from_start, FixedArrayBase to,
917 ElementsKind from_kind, uint32_t to_start,
918 int packed_size, int copy_size) {
919 UNREACHABLE();
920 }
921
CopyElements(JSObject from_holder,uint32_t from_start,ElementsKind from_kind,Handle<FixedArrayBase> to,uint32_t to_start,int copy_size)922 void CopyElements(JSObject from_holder, uint32_t from_start,
923 ElementsKind from_kind, Handle<FixedArrayBase> to,
924 uint32_t to_start, int copy_size) final {
925 int packed_size = kPackedSizeNotKnown;
926 bool is_packed =
927 IsFastPackedElementsKind(from_kind) && from_holder.IsJSArray();
928 if (is_packed) {
929 packed_size = Smi::ToInt(JSArray::cast(from_holder).length());
930 if (copy_size >= 0 && packed_size > copy_size) {
931 packed_size = copy_size;
932 }
933 }
934 FixedArrayBase from = from_holder.elements();
935 // NOTE: the Subclass::CopyElementsImpl() methods
936 // violate the handlified function signature convention:
937 // raw pointer parameters in the function that allocates. This is done
938 // intentionally to avoid ArrayConcat() builtin performance degradation.
939 //
940 // Details: The idea is that allocations actually happen only in case of
941 // copying from object with fast double elements to object with object
942 // elements. In all the other cases there are no allocations performed and
943 // handle creation causes noticeable performance degradation of the builtin.
944 Subclass::CopyElementsImpl(from_holder.GetIsolate(), from, from_start, *to,
945 from_kind, to_start, packed_size, copy_size);
946 }
947
CopyElements(Isolate * isolate,Handle<FixedArrayBase> source,ElementsKind source_kind,Handle<FixedArrayBase> destination,int size)948 void CopyElements(Isolate* isolate, Handle<FixedArrayBase> source,
949 ElementsKind source_kind,
950 Handle<FixedArrayBase> destination, int size) override {
951 Subclass::CopyElementsImpl(isolate, *source, 0, *destination, source_kind,
952 0, kPackedSizeNotKnown, size);
953 }
954
CopyTypedArrayElementsSlice(JSTypedArray source,JSTypedArray destination,size_t start,size_t end)955 void CopyTypedArrayElementsSlice(JSTypedArray source,
956 JSTypedArray destination, size_t start,
957 size_t end) override {
958 Subclass::CopyTypedArrayElementsSliceImpl(source, destination, start, end);
959 }
960
CopyTypedArrayElementsSliceImpl(JSTypedArray source,JSTypedArray destination,size_t start,size_t end)961 static void CopyTypedArrayElementsSliceImpl(JSTypedArray source,
962 JSTypedArray destination,
963 size_t start, size_t end) {
964 UNREACHABLE();
965 }
966
CopyElements(Handle<Object> source,Handle<JSObject> destination,size_t length,size_t offset)967 Object CopyElements(Handle<Object> source, Handle<JSObject> destination,
968 size_t length, size_t offset) final {
969 return Subclass::CopyElementsHandleImpl(source, destination, length,
970 offset);
971 }
972
CopyElementsHandleImpl(Handle<Object> source,Handle<JSObject> destination,size_t length,size_t offset)973 static Object CopyElementsHandleImpl(Handle<Object> source,
974 Handle<JSObject> destination,
975 size_t length, size_t offset) {
976 UNREACHABLE();
977 }
978
Normalize(Handle<JSObject> object)979 Handle<NumberDictionary> Normalize(Handle<JSObject> object) final {
980 return Subclass::NormalizeImpl(
981 object, handle(object->elements(), object->GetIsolate()));
982 }
983
NormalizeImpl(Handle<JSObject> object,Handle<FixedArrayBase> elements)984 static Handle<NumberDictionary> NormalizeImpl(
985 Handle<JSObject> object, Handle<FixedArrayBase> elements) {
986 UNREACHABLE();
987 }
988
CollectValuesOrEntries(Isolate * isolate,Handle<JSObject> object,Handle<FixedArray> values_or_entries,bool get_entries,int * nof_items,PropertyFilter filter)989 Maybe<bool> CollectValuesOrEntries(Isolate* isolate, Handle<JSObject> object,
990 Handle<FixedArray> values_or_entries,
991 bool get_entries, int* nof_items,
992 PropertyFilter filter) override {
993 return Subclass::CollectValuesOrEntriesImpl(
994 isolate, object, values_or_entries, get_entries, nof_items, filter);
995 }
996
CollectValuesOrEntriesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArray> values_or_entries,bool get_entries,int * nof_items,PropertyFilter filter)997 static Maybe<bool> CollectValuesOrEntriesImpl(
998 Isolate* isolate, Handle<JSObject> object,
999 Handle<FixedArray> values_or_entries, bool get_entries, int* nof_items,
1000 PropertyFilter filter) {
1001 DCHECK_EQ(*nof_items, 0);
1002 KeyAccumulator accumulator(isolate, KeyCollectionMode::kOwnOnly,
1003 ALL_PROPERTIES);
1004 RETURN_NOTHING_IF_NOT_SUCCESSFUL(Subclass::CollectElementIndicesImpl(
1005 object, handle(object->elements(), isolate), &accumulator));
1006 Handle<FixedArray> keys = accumulator.GetKeys();
1007
1008 int count = 0;
1009 int i = 0;
1010 ElementsKind original_elements_kind = object->GetElementsKind();
1011
1012 for (; i < keys->length(); ++i) {
1013 Handle<Object> key(keys->get(i), isolate);
1014 uint32_t index;
1015 if (!key->ToUint32(&index)) continue;
1016
1017 DCHECK_EQ(object->GetElementsKind(), original_elements_kind);
1018 InternalIndex entry = Subclass::GetEntryForIndexImpl(
1019 isolate, *object, object->elements(), index, filter);
1020 if (entry.is_not_found()) continue;
1021 PropertyDetails details = Subclass::GetDetailsImpl(*object, entry);
1022
1023 Handle<Object> value;
1024 if (details.kind() == kData) {
1025 value = Subclass::GetInternalImpl(object, entry);
1026 } else {
1027 // This might modify the elements and/or change the elements kind.
1028 LookupIterator it(isolate, object, index, LookupIterator::OWN);
1029 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1030 isolate, value, Object::GetProperty(&it), Nothing<bool>());
1031 }
1032 if (get_entries) value = MakeEntryPair(isolate, index, value);
1033 values_or_entries->set(count++, *value);
1034 if (object->GetElementsKind() != original_elements_kind) break;
1035 }
1036
1037 // Slow path caused by changes in elements kind during iteration.
1038 for (; i < keys->length(); i++) {
1039 Handle<Object> key(keys->get(i), isolate);
1040 uint32_t index;
1041 if (!key->ToUint32(&index)) continue;
1042
1043 if (filter & ONLY_ENUMERABLE) {
1044 InternalElementsAccessor* accessor =
1045 reinterpret_cast<InternalElementsAccessor*>(
1046 object->GetElementsAccessor());
1047 InternalIndex entry = accessor->GetEntryForIndex(
1048 isolate, *object, object->elements(), index);
1049 if (entry.is_not_found()) continue;
1050 PropertyDetails details = accessor->GetDetails(*object, entry);
1051 if (!details.IsEnumerable()) continue;
1052 }
1053
1054 Handle<Object> value;
1055 LookupIterator it(isolate, object, index, LookupIterator::OWN);
1056 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, value, Object::GetProperty(&it),
1057 Nothing<bool>());
1058
1059 if (get_entries) value = MakeEntryPair(isolate, index, value);
1060 values_or_entries->set(count++, *value);
1061 }
1062
1063 *nof_items = count;
1064 return Just(true);
1065 }
1066
CollectElementIndices(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,KeyAccumulator * keys)1067 V8_WARN_UNUSED_RESULT ExceptionStatus CollectElementIndices(
1068 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
1069 KeyAccumulator* keys) final {
1070 if (keys->filter() & ONLY_ALL_CAN_READ) return ExceptionStatus::kSuccess;
1071 return Subclass::CollectElementIndicesImpl(object, backing_store, keys);
1072 }
1073
CollectElementIndicesImpl(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,KeyAccumulator * keys)1074 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
1075 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
1076 KeyAccumulator* keys) {
1077 DCHECK_NE(DICTIONARY_ELEMENTS, kind());
1078 // Non-dictionary elements can't have all-can-read accessors.
1079 size_t length = Subclass::GetMaxIndex(*object, *backing_store);
1080 PropertyFilter filter = keys->filter();
1081 Isolate* isolate = keys->isolate();
1082 Factory* factory = isolate->factory();
1083 for (size_t i = 0; i < length; i++) {
1084 if (Subclass::HasElementImpl(isolate, *object, i, *backing_store,
1085 filter)) {
1086 RETURN_FAILURE_IF_NOT_SUCCESSFUL(
1087 keys->AddKey(factory->NewNumberFromSize(i)));
1088 }
1089 }
1090 return ExceptionStatus::kSuccess;
1091 }
1092
DirectCollectElementIndicesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArrayBase> backing_store,GetKeysConversion convert,PropertyFilter filter,Handle<FixedArray> list,uint32_t * nof_indices,uint32_t insertion_index=0)1093 static Handle<FixedArray> DirectCollectElementIndicesImpl(
1094 Isolate* isolate, Handle<JSObject> object,
1095 Handle<FixedArrayBase> backing_store, GetKeysConversion convert,
1096 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
1097 uint32_t insertion_index = 0) {
1098 size_t length = Subclass::GetMaxIndex(*object, *backing_store);
1099 uint32_t const kMaxStringTableEntries =
1100 isolate->heap()->MaxNumberToStringCacheSize();
1101 for (size_t i = 0; i < length; i++) {
1102 if (Subclass::HasElementImpl(isolate, *object, i, *backing_store,
1103 filter)) {
1104 if (convert == GetKeysConversion::kConvertToString) {
1105 bool use_cache = i < kMaxStringTableEntries;
1106 Handle<String> index_string =
1107 isolate->factory()->SizeToString(i, use_cache);
1108 list->set(insertion_index, *index_string);
1109 } else {
1110 Handle<Object> number = isolate->factory()->NewNumberFromSize(i);
1111 list->set(insertion_index, *number);
1112 }
1113 insertion_index++;
1114 }
1115 }
1116 *nof_indices = insertion_index;
1117 return list;
1118 }
1119
PrependElementIndices(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,Handle<FixedArray> keys,GetKeysConversion convert,PropertyFilter filter)1120 MaybeHandle<FixedArray> PrependElementIndices(
1121 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
1122 Handle<FixedArray> keys, GetKeysConversion convert,
1123 PropertyFilter filter) final {
1124 return Subclass::PrependElementIndicesImpl(object, backing_store, keys,
1125 convert, filter);
1126 }
1127
PrependElementIndicesImpl(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,Handle<FixedArray> keys,GetKeysConversion convert,PropertyFilter filter)1128 static MaybeHandle<FixedArray> PrependElementIndicesImpl(
1129 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
1130 Handle<FixedArray> keys, GetKeysConversion convert,
1131 PropertyFilter filter) {
1132 Isolate* isolate = object->GetIsolate();
1133 uint32_t nof_property_keys = keys->length();
1134 size_t initial_list_length =
1135 Subclass::GetMaxNumberOfEntries(*object, *backing_store);
1136
1137 if (initial_list_length > FixedArray::kMaxLength - nof_property_keys) {
1138 return isolate->Throw<FixedArray>(isolate->factory()->NewRangeError(
1139 MessageTemplate::kInvalidArrayLength));
1140 }
1141 initial_list_length += nof_property_keys;
1142
1143 // Collect the element indices into a new list.
1144 DCHECK_LE(initial_list_length, std::numeric_limits<int>::max());
1145 MaybeHandle<FixedArray> raw_array = isolate->factory()->TryNewFixedArray(
1146 static_cast<int>(initial_list_length));
1147 Handle<FixedArray> combined_keys;
1148
1149 // If we have a holey backing store try to precisely estimate the backing
1150 // store size as a last emergency measure if we cannot allocate the big
1151 // array.
1152 if (!raw_array.ToHandle(&combined_keys)) {
1153 if (IsHoleyOrDictionaryElementsKind(kind())) {
1154 // If we overestimate the result list size we might end up in the
1155 // large-object space which doesn't free memory on shrinking the list.
1156 // Hence we try to estimate the final size for holey backing stores more
1157 // precisely here.
1158 initial_list_length =
1159 Subclass::NumberOfElementsImpl(*object, *backing_store);
1160 initial_list_length += nof_property_keys;
1161 }
1162 DCHECK_LE(initial_list_length, std::numeric_limits<int>::max());
1163 combined_keys = isolate->factory()->NewFixedArray(
1164 static_cast<int>(initial_list_length));
1165 }
1166
1167 uint32_t nof_indices = 0;
1168 bool needs_sorting = IsDictionaryElementsKind(kind()) ||
1169 IsSloppyArgumentsElementsKind(kind());
1170 combined_keys = Subclass::DirectCollectElementIndicesImpl(
1171 isolate, object, backing_store,
1172 needs_sorting ? GetKeysConversion::kKeepNumbers : convert, filter,
1173 combined_keys, &nof_indices);
1174
1175 if (needs_sorting) {
1176 SortIndices(isolate, combined_keys, nof_indices);
1177 // Indices from dictionary elements should only be converted after
1178 // sorting.
1179 if (convert == GetKeysConversion::kConvertToString) {
1180 for (uint32_t i = 0; i < nof_indices; i++) {
1181 Handle<Object> index_string = isolate->factory()->Uint32ToString(
1182 combined_keys->get(i).Number());
1183 combined_keys->set(i, *index_string);
1184 }
1185 }
1186 }
1187
1188 // Copy over the passed-in property keys.
1189 CopyObjectToObjectElements(isolate, *keys, PACKED_ELEMENTS, 0,
1190 *combined_keys, PACKED_ELEMENTS, nof_indices,
1191 nof_property_keys);
1192
1193 // For holey elements and arguments we might have to shrink the collected
1194 // keys since the estimates might be off.
1195 if (IsHoleyOrDictionaryElementsKind(kind()) ||
1196 IsSloppyArgumentsElementsKind(kind())) {
1197 // Shrink combined_keys to the final size.
1198 int final_size = nof_indices + nof_property_keys;
1199 DCHECK_LE(final_size, combined_keys->length());
1200 return FixedArray::ShrinkOrEmpty(isolate, combined_keys, final_size);
1201 }
1202
1203 return combined_keys;
1204 }
1205
AddElementsToKeyAccumulator(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)1206 V8_WARN_UNUSED_RESULT ExceptionStatus AddElementsToKeyAccumulator(
1207 Handle<JSObject> receiver, KeyAccumulator* accumulator,
1208 AddKeyConversion convert) final {
1209 return Subclass::AddElementsToKeyAccumulatorImpl(receiver, accumulator,
1210 convert);
1211 }
1212
GetCapacityImpl(JSObject holder,FixedArrayBase backing_store)1213 static uint32_t GetCapacityImpl(JSObject holder,
1214 FixedArrayBase backing_store) {
1215 return backing_store.length();
1216 }
1217
GetCapacity(JSObject holder,FixedArrayBase backing_store)1218 size_t GetCapacity(JSObject holder, FixedArrayBase backing_store) final {
1219 return Subclass::GetCapacityImpl(holder, backing_store);
1220 }
1221
FillImpl(Handle<JSObject> receiver,Handle<Object> obj_value,size_t start,size_t end)1222 static Object FillImpl(Handle<JSObject> receiver, Handle<Object> obj_value,
1223 size_t start, size_t end) {
1224 UNREACHABLE();
1225 }
1226
Fill(Handle<JSObject> receiver,Handle<Object> obj_value,size_t start,size_t end)1227 Object Fill(Handle<JSObject> receiver, Handle<Object> obj_value, size_t start,
1228 size_t end) override {
1229 return Subclass::FillImpl(receiver, obj_value, start, end);
1230 }
1231
IncludesValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1232 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
1233 Handle<JSObject> receiver,
1234 Handle<Object> value, size_t start_from,
1235 size_t length) {
1236 return IncludesValueSlowPath(isolate, receiver, value, start_from, length);
1237 }
1238
IncludesValue(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1239 Maybe<bool> IncludesValue(Isolate* isolate, Handle<JSObject> receiver,
1240 Handle<Object> value, size_t start_from,
1241 size_t length) final {
1242 return Subclass::IncludesValueImpl(isolate, receiver, value, start_from,
1243 length);
1244 }
1245
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1246 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
1247 Handle<JSObject> receiver,
1248 Handle<Object> value,
1249 size_t start_from, size_t length) {
1250 return IndexOfValueSlowPath(isolate, receiver, value, start_from, length);
1251 }
1252
IndexOfValue(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1253 Maybe<int64_t> IndexOfValue(Isolate* isolate, Handle<JSObject> receiver,
1254 Handle<Object> value, size_t start_from,
1255 size_t length) final {
1256 return Subclass::IndexOfValueImpl(isolate, receiver, value, start_from,
1257 length);
1258 }
1259
LastIndexOfValueImpl(Handle<JSObject> receiver,Handle<Object> value,size_t start_from)1260 static Maybe<int64_t> LastIndexOfValueImpl(Handle<JSObject> receiver,
1261 Handle<Object> value,
1262 size_t start_from) {
1263 UNREACHABLE();
1264 }
1265
LastIndexOfValue(Handle<JSObject> receiver,Handle<Object> value,size_t start_from)1266 Maybe<int64_t> LastIndexOfValue(Handle<JSObject> receiver,
1267 Handle<Object> value,
1268 size_t start_from) final {
1269 return Subclass::LastIndexOfValueImpl(receiver, value, start_from);
1270 }
1271
ReverseImpl(JSObject receiver)1272 static void ReverseImpl(JSObject receiver) { UNREACHABLE(); }
1273
Reverse(JSObject receiver)1274 void Reverse(JSObject receiver) final { Subclass::ReverseImpl(receiver); }
1275
GetEntryForIndexImpl(Isolate * isolate,JSObject holder,FixedArrayBase backing_store,size_t index,PropertyFilter filter)1276 static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
1277 FixedArrayBase backing_store,
1278 size_t index,
1279 PropertyFilter filter) {
1280 DCHECK(IsFastElementsKind(kind()) ||
1281 IsAnyNonextensibleElementsKind(kind()));
1282 size_t length = Subclass::GetMaxIndex(holder, backing_store);
1283 if (IsHoleyElementsKindForRead(kind())) {
1284 DCHECK_IMPLIES(
1285 index < length,
1286 index <= static_cast<size_t>(std::numeric_limits<int>::max()));
1287 return index < length &&
1288 !BackingStore::cast(backing_store)
1289 .is_the_hole(isolate, static_cast<int>(index))
1290 ? InternalIndex(index)
1291 : InternalIndex::NotFound();
1292 } else {
1293 return index < length ? InternalIndex(index) : InternalIndex::NotFound();
1294 }
1295 }
1296
GetEntryForIndex(Isolate * isolate,JSObject holder,FixedArrayBase backing_store,size_t index)1297 InternalIndex GetEntryForIndex(Isolate* isolate, JSObject holder,
1298 FixedArrayBase backing_store,
1299 size_t index) final {
1300 return Subclass::GetEntryForIndexImpl(isolate, holder, backing_store, index,
1301 ALL_PROPERTIES);
1302 }
1303
GetDetailsImpl(FixedArrayBase backing_store,InternalIndex entry)1304 static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
1305 InternalIndex entry) {
1306 return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
1307 }
1308
GetDetailsImpl(JSObject holder,InternalIndex entry)1309 static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
1310 return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
1311 }
1312
GetDetails(JSObject holder,InternalIndex entry)1313 PropertyDetails GetDetails(JSObject holder, InternalIndex entry) final {
1314 return Subclass::GetDetailsImpl(holder, entry);
1315 }
1316
CreateListFromArrayLike(Isolate * isolate,Handle<JSObject> object,uint32_t length)1317 Handle<FixedArray> CreateListFromArrayLike(Isolate* isolate,
1318 Handle<JSObject> object,
1319 uint32_t length) final {
1320 return Subclass::CreateListFromArrayLikeImpl(isolate, object, length);
1321 }
1322
CreateListFromArrayLikeImpl(Isolate * isolate,Handle<JSObject> object,uint32_t length)1323 static Handle<FixedArray> CreateListFromArrayLikeImpl(Isolate* isolate,
1324 Handle<JSObject> object,
1325 uint32_t length) {
1326 UNREACHABLE();
1327 }
1328 };
1329
1330 class DictionaryElementsAccessor
1331 : public ElementsAccessorBase<DictionaryElementsAccessor,
1332 ElementsKindTraits<DICTIONARY_ELEMENTS>> {
1333 public:
GetMaxIndex(JSObject receiver,FixedArrayBase elements)1334 static uint32_t GetMaxIndex(JSObject receiver, FixedArrayBase elements) {
1335 // We cannot properly estimate this for dictionaries.
1336 UNREACHABLE();
1337 }
1338
GetMaxNumberOfEntries(JSObject receiver,FixedArrayBase backing_store)1339 static uint32_t GetMaxNumberOfEntries(JSObject receiver,
1340 FixedArrayBase backing_store) {
1341 return NumberOfElementsImpl(receiver, backing_store);
1342 }
1343
NumberOfElementsImpl(JSObject receiver,FixedArrayBase backing_store)1344 static uint32_t NumberOfElementsImpl(JSObject receiver,
1345 FixedArrayBase backing_store) {
1346 NumberDictionary dict = NumberDictionary::cast(backing_store);
1347 return dict.NumberOfElements();
1348 }
1349
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)1350 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
1351 uint32_t length,
1352 Handle<FixedArrayBase> backing_store) {
1353 Handle<NumberDictionary> dict =
1354 Handle<NumberDictionary>::cast(backing_store);
1355 uint32_t old_length = 0;
1356 CHECK(array->length().ToArrayLength(&old_length));
1357 {
1358 DisallowHeapAllocation no_gc;
1359 ReadOnlyRoots roots(isolate);
1360 if (length < old_length) {
1361 if (dict->requires_slow_elements()) {
1362 // Find last non-deletable element in range of elements to be
1363 // deleted and adjust range accordingly.
1364 for (InternalIndex entry : dict->IterateEntries()) {
1365 Object index = dict->KeyAt(isolate, entry);
1366 if (dict->IsKey(roots, index)) {
1367 uint32_t number = static_cast<uint32_t>(index.Number());
1368 if (length <= number && number < old_length) {
1369 PropertyDetails details = dict->DetailsAt(entry);
1370 if (!details.IsConfigurable()) length = number + 1;
1371 }
1372 }
1373 }
1374 }
1375
1376 if (length == 0) {
1377 // Flush the backing store.
1378 array->initialize_elements();
1379 } else {
1380 // Remove elements that should be deleted.
1381 int removed_entries = 0;
1382 for (InternalIndex entry : dict->IterateEntries()) {
1383 Object index = dict->KeyAt(isolate, entry);
1384 if (dict->IsKey(roots, index)) {
1385 uint32_t number = static_cast<uint32_t>(index.Number());
1386 if (length <= number && number < old_length) {
1387 dict->ClearEntry(entry);
1388 removed_entries++;
1389 }
1390 }
1391 }
1392
1393 if (removed_entries > 0) {
1394 // Update the number of elements.
1395 dict->ElementsRemoved(removed_entries);
1396 }
1397 }
1398 }
1399 }
1400
1401 Handle<Object> length_obj = isolate->factory()->NewNumberFromUint(length);
1402 array->set_length(*length_obj);
1403 }
1404
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)1405 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
1406 uint32_t from_start, FixedArrayBase to,
1407 ElementsKind from_kind, uint32_t to_start,
1408 int packed_size, int copy_size) {
1409 UNREACHABLE();
1410 }
1411
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)1412 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
1413 Handle<NumberDictionary> dict(NumberDictionary::cast(obj->elements()),
1414 obj->GetIsolate());
1415 dict = NumberDictionary::DeleteEntry(obj->GetIsolate(), dict, entry);
1416 obj->set_elements(*dict);
1417 }
1418
HasAccessorsImpl(JSObject holder,FixedArrayBase backing_store)1419 static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
1420 DisallowHeapAllocation no_gc;
1421 NumberDictionary dict = NumberDictionary::cast(backing_store);
1422 if (!dict.requires_slow_elements()) return false;
1423 IsolateRoot isolate = GetIsolateForPtrCompr(holder);
1424 ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate);
1425 for (InternalIndex i : dict.IterateEntries()) {
1426 Object key = dict.KeyAt(isolate, i);
1427 if (!dict.IsKey(roots, key)) continue;
1428 PropertyDetails details = dict.DetailsAt(i);
1429 if (details.kind() == kAccessor) return true;
1430 }
1431 return false;
1432 }
1433
GetRaw(FixedArrayBase store,InternalIndex entry)1434 static Object GetRaw(FixedArrayBase store, InternalIndex entry) {
1435 NumberDictionary backing_store = NumberDictionary::cast(store);
1436 return backing_store.ValueAt(entry);
1437 }
1438
GetImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)1439 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
1440 InternalIndex entry) {
1441 return handle(GetRaw(backing_store, entry), isolate);
1442 }
1443
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)1444 static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
1445 Object value) {
1446 SetImpl(holder->elements(), entry, value);
1447 }
1448
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value)1449 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
1450 Object value) {
1451 NumberDictionary::cast(backing_store).ValueAtPut(entry, value);
1452 }
1453
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)1454 static void ReconfigureImpl(Handle<JSObject> object,
1455 Handle<FixedArrayBase> store, InternalIndex entry,
1456 Handle<Object> value,
1457 PropertyAttributes attributes) {
1458 NumberDictionary dictionary = NumberDictionary::cast(*store);
1459 if (attributes != NONE) object->RequireSlowElements(dictionary);
1460 dictionary.ValueAtPut(entry, *value);
1461 PropertyDetails details = dictionary.DetailsAt(entry);
1462 details = PropertyDetails(kData, attributes, PropertyCellType::kNoCell,
1463 details.dictionary_index());
1464
1465 dictionary.DetailsAtPut(entry, details);
1466 }
1467
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)1468 static void AddImpl(Handle<JSObject> object, uint32_t index,
1469 Handle<Object> value, PropertyAttributes attributes,
1470 uint32_t new_capacity) {
1471 PropertyDetails details(kData, attributes, PropertyCellType::kNoCell);
1472 Handle<NumberDictionary> dictionary =
1473 object->HasFastElements() || object->HasFastStringWrapperElements()
1474 ? JSObject::NormalizeElements(object)
1475 : handle(NumberDictionary::cast(object->elements()),
1476 object->GetIsolate());
1477 Handle<NumberDictionary> new_dictionary = NumberDictionary::Add(
1478 object->GetIsolate(), dictionary, index, value, details);
1479 new_dictionary->UpdateMaxNumberKey(index, object);
1480 if (attributes != NONE) object->RequireSlowElements(*new_dictionary);
1481 if (dictionary.is_identical_to(new_dictionary)) return;
1482 object->set_elements(*new_dictionary);
1483 }
1484
HasEntryImpl(Isolate * isolate,FixedArrayBase store,InternalIndex entry)1485 static bool HasEntryImpl(Isolate* isolate, FixedArrayBase store,
1486 InternalIndex entry) {
1487 DisallowHeapAllocation no_gc;
1488 NumberDictionary dict = NumberDictionary::cast(store);
1489 Object index = dict.KeyAt(isolate, entry);
1490 return !index.IsTheHole(isolate);
1491 }
1492
GetEntryForIndexImpl(Isolate * isolate,JSObject holder,FixedArrayBase store,size_t index,PropertyFilter filter)1493 static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
1494 FixedArrayBase store, size_t index,
1495 PropertyFilter filter) {
1496 DisallowHeapAllocation no_gc;
1497 NumberDictionary dictionary = NumberDictionary::cast(store);
1498 DCHECK_LE(index, std::numeric_limits<uint32_t>::max());
1499 InternalIndex entry =
1500 dictionary.FindEntry(isolate, static_cast<uint32_t>(index));
1501 if (entry.is_not_found()) return entry;
1502
1503 if (filter != ALL_PROPERTIES) {
1504 PropertyDetails details = dictionary.DetailsAt(entry);
1505 PropertyAttributes attr = details.attributes();
1506 if ((attr & filter) != 0) return InternalIndex::NotFound();
1507 }
1508 return entry;
1509 }
1510
GetDetailsImpl(JSObject holder,InternalIndex entry)1511 static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
1512 return GetDetailsImpl(holder.elements(), entry);
1513 }
1514
GetDetailsImpl(FixedArrayBase backing_store,InternalIndex entry)1515 static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
1516 InternalIndex entry) {
1517 return NumberDictionary::cast(backing_store).DetailsAt(entry);
1518 }
1519
FilterKey(Handle<NumberDictionary> dictionary,InternalIndex entry,Object raw_key,PropertyFilter filter)1520 static uint32_t FilterKey(Handle<NumberDictionary> dictionary,
1521 InternalIndex entry, Object raw_key,
1522 PropertyFilter filter) {
1523 DCHECK(raw_key.IsNumber());
1524 DCHECK_LE(raw_key.Number(), kMaxUInt32);
1525 PropertyDetails details = dictionary->DetailsAt(entry);
1526 PropertyAttributes attr = details.attributes();
1527 if ((attr & filter) != 0) return kMaxUInt32;
1528 return static_cast<uint32_t>(raw_key.Number());
1529 }
1530
GetKeyForEntryImpl(Isolate * isolate,Handle<NumberDictionary> dictionary,InternalIndex entry,PropertyFilter filter)1531 static uint32_t GetKeyForEntryImpl(Isolate* isolate,
1532 Handle<NumberDictionary> dictionary,
1533 InternalIndex entry,
1534 PropertyFilter filter) {
1535 DisallowHeapAllocation no_gc;
1536 Object raw_key = dictionary->KeyAt(isolate, entry);
1537 if (!dictionary->IsKey(ReadOnlyRoots(isolate), raw_key)) return kMaxUInt32;
1538 return FilterKey(dictionary, entry, raw_key, filter);
1539 }
1540
CollectElementIndicesImpl(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,KeyAccumulator * keys)1541 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
1542 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
1543 KeyAccumulator* keys) {
1544 if (keys->filter() & SKIP_STRINGS) return ExceptionStatus::kSuccess;
1545 Isolate* isolate = keys->isolate();
1546 Handle<NumberDictionary> dictionary =
1547 Handle<NumberDictionary>::cast(backing_store);
1548 Handle<FixedArray> elements = isolate->factory()->NewFixedArray(
1549 GetMaxNumberOfEntries(*object, *backing_store));
1550 int insertion_index = 0;
1551 PropertyFilter filter = keys->filter();
1552 ReadOnlyRoots roots(isolate);
1553 for (InternalIndex i : dictionary->IterateEntries()) {
1554 AllowHeapAllocation allow_gc;
1555 Object raw_key = dictionary->KeyAt(isolate, i);
1556 if (!dictionary->IsKey(roots, raw_key)) continue;
1557 uint32_t key = FilterKey(dictionary, i, raw_key, filter);
1558 if (key == kMaxUInt32) {
1559 // This might allocate, but {raw_key} is not used afterwards.
1560 keys->AddShadowingKey(raw_key, &allow_gc);
1561 continue;
1562 }
1563 elements->set(insertion_index, raw_key);
1564 insertion_index++;
1565 }
1566 SortIndices(isolate, elements, insertion_index);
1567 for (int i = 0; i < insertion_index; i++) {
1568 RETURN_FAILURE_IF_NOT_SUCCESSFUL(keys->AddKey(elements->get(i)));
1569 }
1570 return ExceptionStatus::kSuccess;
1571 }
1572
DirectCollectElementIndicesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArrayBase> backing_store,GetKeysConversion convert,PropertyFilter filter,Handle<FixedArray> list,uint32_t * nof_indices,uint32_t insertion_index=0)1573 static Handle<FixedArray> DirectCollectElementIndicesImpl(
1574 Isolate* isolate, Handle<JSObject> object,
1575 Handle<FixedArrayBase> backing_store, GetKeysConversion convert,
1576 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
1577 uint32_t insertion_index = 0) {
1578 if (filter & SKIP_STRINGS) return list;
1579 if (filter & ONLY_ALL_CAN_READ) return list;
1580
1581 Handle<NumberDictionary> dictionary =
1582 Handle<NumberDictionary>::cast(backing_store);
1583 for (InternalIndex i : dictionary->IterateEntries()) {
1584 uint32_t key = GetKeyForEntryImpl(isolate, dictionary, i, filter);
1585 if (key == kMaxUInt32) continue;
1586 Handle<Object> index = isolate->factory()->NewNumberFromUint(key);
1587 list->set(insertion_index, *index);
1588 insertion_index++;
1589 }
1590 *nof_indices = insertion_index;
1591 return list;
1592 }
1593
AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)1594 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
1595 Handle<JSObject> receiver, KeyAccumulator* accumulator,
1596 AddKeyConversion convert) {
1597 Isolate* isolate = accumulator->isolate();
1598 Handle<NumberDictionary> dictionary(
1599 NumberDictionary::cast(receiver->elements()), isolate);
1600 ReadOnlyRoots roots(isolate);
1601 for (InternalIndex i : dictionary->IterateEntries()) {
1602 Object k = dictionary->KeyAt(isolate, i);
1603 if (!dictionary->IsKey(roots, k)) continue;
1604 Object value = dictionary->ValueAt(isolate, i);
1605 DCHECK(!value.IsTheHole(isolate));
1606 DCHECK(!value.IsAccessorPair());
1607 DCHECK(!value.IsAccessorInfo());
1608 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
1609 }
1610 return ExceptionStatus::kSuccess;
1611 }
1612
IncludesValueFastPath(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length,Maybe<bool> * result)1613 static bool IncludesValueFastPath(Isolate* isolate, Handle<JSObject> receiver,
1614 Handle<Object> value, size_t start_from,
1615 size_t length, Maybe<bool>* result) {
1616 DisallowHeapAllocation no_gc;
1617 NumberDictionary dictionary = NumberDictionary::cast(receiver->elements());
1618 Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
1619 Object undefined = ReadOnlyRoots(isolate).undefined_value();
1620
1621 // Scan for accessor properties. If accessors are present, then elements
1622 // must be accessed in order via the slow path.
1623 bool found = false;
1624 for (InternalIndex i : dictionary.IterateEntries()) {
1625 Object k = dictionary.KeyAt(isolate, i);
1626 if (k == the_hole) continue;
1627 if (k == undefined) continue;
1628
1629 uint32_t index;
1630 if (!k.ToArrayIndex(&index) || index < start_from || index >= length) {
1631 continue;
1632 }
1633
1634 if (dictionary.DetailsAt(i).kind() == kAccessor) {
1635 // Restart from beginning in slow path, otherwise we may observably
1636 // access getters out of order
1637 return false;
1638 } else if (!found) {
1639 Object element_k = dictionary.ValueAt(isolate, i);
1640 if (value->SameValueZero(element_k)) found = true;
1641 }
1642 }
1643
1644 *result = Just(found);
1645 return true;
1646 }
1647
IncludesValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1648 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
1649 Handle<JSObject> receiver,
1650 Handle<Object> value, size_t start_from,
1651 size_t length) {
1652 DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver));
1653 bool search_for_hole = value->IsUndefined(isolate);
1654
1655 if (!search_for_hole) {
1656 Maybe<bool> result = Nothing<bool>();
1657 if (DictionaryElementsAccessor::IncludesValueFastPath(
1658 isolate, receiver, value, start_from, length, &result)) {
1659 return result;
1660 }
1661 }
1662 ElementsKind original_elements_kind = receiver->GetElementsKind();
1663 USE(original_elements_kind);
1664 Handle<NumberDictionary> dictionary(
1665 NumberDictionary::cast(receiver->elements()), isolate);
1666 // Iterate through the entire range, as accessing elements out of order is
1667 // observable.
1668 for (size_t k = start_from; k < length; ++k) {
1669 DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind);
1670 InternalIndex entry =
1671 dictionary->FindEntry(isolate, static_cast<uint32_t>(k));
1672 if (entry.is_not_found()) {
1673 if (search_for_hole) return Just(true);
1674 continue;
1675 }
1676
1677 PropertyDetails details = GetDetailsImpl(*dictionary, entry);
1678 switch (details.kind()) {
1679 case kData: {
1680 Object element_k = dictionary->ValueAt(entry);
1681 if (value->SameValueZero(element_k)) return Just(true);
1682 break;
1683 }
1684 case kAccessor: {
1685 LookupIterator it(isolate, receiver, k,
1686 LookupIterator::OWN_SKIP_INTERCEPTOR);
1687 DCHECK(it.IsFound());
1688 DCHECK_EQ(it.state(), LookupIterator::ACCESSOR);
1689 Handle<Object> element_k;
1690
1691 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
1692 Object::GetPropertyWithAccessor(&it),
1693 Nothing<bool>());
1694
1695 if (value->SameValueZero(*element_k)) return Just(true);
1696
1697 // Bailout to slow path if elements on prototype changed
1698 if (!JSObject::PrototypeHasNoElements(isolate, *receiver)) {
1699 return IncludesValueSlowPath(isolate, receiver, value, k + 1,
1700 length);
1701 }
1702
1703 // Continue if elements unchanged
1704 if (*dictionary == receiver->elements()) continue;
1705
1706 // Otherwise, bailout or update elements
1707
1708 // If switched to initial elements, return true if searching for
1709 // undefined, and false otherwise.
1710 if (receiver->map().GetInitialElements() == receiver->elements()) {
1711 return Just(search_for_hole);
1712 }
1713
1714 // If switched to fast elements, continue with the correct accessor.
1715 if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) {
1716 ElementsAccessor* accessor = receiver->GetElementsAccessor();
1717 return accessor->IncludesValue(isolate, receiver, value, k + 1,
1718 length);
1719 }
1720 dictionary =
1721 handle(NumberDictionary::cast(receiver->elements()), isolate);
1722 break;
1723 }
1724 }
1725 }
1726 return Just(false);
1727 }
1728
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)1729 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
1730 Handle<JSObject> receiver,
1731 Handle<Object> value,
1732 size_t start_from, size_t length) {
1733 DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver));
1734
1735 ElementsKind original_elements_kind = receiver->GetElementsKind();
1736 USE(original_elements_kind);
1737 Handle<NumberDictionary> dictionary(
1738 NumberDictionary::cast(receiver->elements()), isolate);
1739 // Iterate through entire range, as accessing elements out of order is
1740 // observable.
1741 for (size_t k = start_from; k < length; ++k) {
1742 DCHECK_EQ(receiver->GetElementsKind(), original_elements_kind);
1743 DCHECK_LE(k, std::numeric_limits<uint32_t>::max());
1744 InternalIndex entry =
1745 dictionary->FindEntry(isolate, static_cast<uint32_t>(k));
1746 if (entry.is_not_found()) continue;
1747
1748 PropertyDetails details =
1749 GetDetailsImpl(*dictionary, InternalIndex(entry));
1750 switch (details.kind()) {
1751 case kData: {
1752 Object element_k = dictionary->ValueAt(entry);
1753 if (value->StrictEquals(element_k)) {
1754 return Just<int64_t>(k);
1755 }
1756 break;
1757 }
1758 case kAccessor: {
1759 LookupIterator it(isolate, receiver, k,
1760 LookupIterator::OWN_SKIP_INTERCEPTOR);
1761 DCHECK(it.IsFound());
1762 DCHECK_EQ(it.state(), LookupIterator::ACCESSOR);
1763 Handle<Object> element_k;
1764
1765 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
1766 Object::GetPropertyWithAccessor(&it),
1767 Nothing<int64_t>());
1768
1769 if (value->StrictEquals(*element_k)) return Just<int64_t>(k);
1770
1771 // Bailout to slow path if elements on prototype changed.
1772 if (!JSObject::PrototypeHasNoElements(isolate, *receiver)) {
1773 return IndexOfValueSlowPath(isolate, receiver, value, k + 1,
1774 length);
1775 }
1776
1777 // Continue if elements unchanged.
1778 if (*dictionary == receiver->elements()) continue;
1779
1780 // Otherwise, bailout or update elements.
1781 if (receiver->GetElementsKind() != DICTIONARY_ELEMENTS) {
1782 // Otherwise, switch to slow path.
1783 return IndexOfValueSlowPath(isolate, receiver, value, k + 1,
1784 length);
1785 }
1786 dictionary =
1787 handle(NumberDictionary::cast(receiver->elements()), isolate);
1788 break;
1789 }
1790 }
1791 }
1792 return Just<int64_t>(-1);
1793 }
1794
ValidateContents(JSObject holder,size_t length)1795 static void ValidateContents(JSObject holder, size_t length) {
1796 DisallowHeapAllocation no_gc;
1797 #if DEBUG
1798 DCHECK_EQ(holder.map().elements_kind(), DICTIONARY_ELEMENTS);
1799 if (!FLAG_enable_slow_asserts) return;
1800 ReadOnlyRoots roots = holder.GetReadOnlyRoots();
1801 NumberDictionary dictionary = NumberDictionary::cast(holder.elements());
1802 // Validate the requires_slow_elements and max_number_key values.
1803 bool requires_slow_elements = false;
1804 int max_key = 0;
1805 for (InternalIndex i : dictionary.IterateEntries()) {
1806 Object k;
1807 if (!dictionary.ToKey(roots, i, &k)) continue;
1808 DCHECK_LE(0.0, k.Number());
1809 if (k.Number() > NumberDictionary::kRequiresSlowElementsLimit) {
1810 requires_slow_elements = true;
1811 } else {
1812 max_key = std::max(max_key, Smi::ToInt(k));
1813 }
1814 }
1815 if (requires_slow_elements) {
1816 DCHECK(dictionary.requires_slow_elements());
1817 } else if (!dictionary.requires_slow_elements()) {
1818 DCHECK_LE(max_key, dictionary.max_number_key());
1819 }
1820 #endif
1821 }
1822 };
1823
1824 // Super class for all fast element arrays.
1825 template <typename Subclass, typename KindTraits>
1826 class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
1827 public:
1828 using BackingStore = typename KindTraits::BackingStore;
1829
NormalizeImpl(Handle<JSObject> object,Handle<FixedArrayBase> store)1830 static Handle<NumberDictionary> NormalizeImpl(Handle<JSObject> object,
1831 Handle<FixedArrayBase> store) {
1832 Isolate* isolate = object->GetIsolate();
1833 ElementsKind kind = Subclass::kind();
1834
1835 // Ensure that notifications fire if the array or object prototypes are
1836 // normalizing.
1837 if (IsSmiOrObjectElementsKind(kind) ||
1838 kind == FAST_STRING_WRAPPER_ELEMENTS) {
1839 isolate->UpdateNoElementsProtectorOnNormalizeElements(object);
1840 }
1841
1842 int capacity = object->GetFastElementsUsage();
1843 Handle<NumberDictionary> dictionary =
1844 NumberDictionary::New(isolate, capacity);
1845
1846 PropertyDetails details = PropertyDetails::Empty();
1847 int j = 0;
1848 int max_number_key = -1;
1849 for (int i = 0; j < capacity; i++) {
1850 if (IsHoleyElementsKindForRead(kind)) {
1851 if (BackingStore::cast(*store).is_the_hole(isolate, i)) continue;
1852 }
1853 max_number_key = i;
1854 Handle<Object> value =
1855 Subclass::GetImpl(isolate, *store, InternalIndex(i));
1856 dictionary =
1857 NumberDictionary::Add(isolate, dictionary, i, value, details);
1858 j++;
1859 }
1860
1861 if (max_number_key > 0) {
1862 dictionary->UpdateMaxNumberKey(static_cast<uint32_t>(max_number_key),
1863 object);
1864 }
1865 return dictionary;
1866 }
1867
DeleteAtEnd(Handle<JSObject> obj,Handle<BackingStore> backing_store,uint32_t entry)1868 static void DeleteAtEnd(Handle<JSObject> obj,
1869 Handle<BackingStore> backing_store, uint32_t entry) {
1870 uint32_t length = static_cast<uint32_t>(backing_store->length());
1871 Isolate* isolate = obj->GetIsolate();
1872 for (; entry > 0; entry--) {
1873 if (!backing_store->is_the_hole(isolate, entry - 1)) break;
1874 }
1875 if (entry == 0) {
1876 FixedArray empty = ReadOnlyRoots(isolate).empty_fixed_array();
1877 // Dynamically ask for the elements kind here since we manually redirect
1878 // the operations for argument backing stores.
1879 if (obj->GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS) {
1880 SloppyArgumentsElements::cast(obj->elements()).set_arguments(empty);
1881 } else {
1882 obj->set_elements(empty);
1883 }
1884 return;
1885 }
1886
1887 isolate->heap()->RightTrimFixedArray(*backing_store, length - entry);
1888 }
1889
DeleteCommon(Handle<JSObject> obj,uint32_t entry,Handle<FixedArrayBase> store)1890 static void DeleteCommon(Handle<JSObject> obj, uint32_t entry,
1891 Handle<FixedArrayBase> store) {
1892 DCHECK(obj->HasSmiOrObjectElements() || obj->HasDoubleElements() ||
1893 obj->HasNonextensibleElements() || obj->HasFastArgumentsElements() ||
1894 obj->HasFastStringWrapperElements());
1895 Handle<BackingStore> backing_store = Handle<BackingStore>::cast(store);
1896 if (!obj->IsJSArray() &&
1897 entry == static_cast<uint32_t>(store->length()) - 1) {
1898 DeleteAtEnd(obj, backing_store, entry);
1899 return;
1900 }
1901
1902 Isolate* isolate = obj->GetIsolate();
1903 backing_store->set_the_hole(isolate, entry);
1904
1905 // TODO(verwaest): Move this out of elements.cc.
1906 // If an old space backing store is larger than a certain size and
1907 // has too few used values, normalize it.
1908 const int kMinLengthForSparsenessCheck = 64;
1909 if (backing_store->length() < kMinLengthForSparsenessCheck) return;
1910 // TODO(ulan): Check if it works with young large objects.
1911 if (ObjectInYoungGeneration(*backing_store)) return;
1912 uint32_t length = 0;
1913 if (obj->IsJSArray()) {
1914 JSArray::cast(*obj).length().ToArrayLength(&length);
1915 } else {
1916 length = static_cast<uint32_t>(store->length());
1917 }
1918
1919 // To avoid doing the check on every delete, use a counter-based heuristic.
1920 const int kLengthFraction = 16;
1921 // The above constant must be large enough to ensure that we check for
1922 // normalization frequently enough. At a minimum, it should be large
1923 // enough to reliably hit the "window" of remaining elements count where
1924 // normalization would be beneficial.
1925 STATIC_ASSERT(kLengthFraction >=
1926 NumberDictionary::kEntrySize *
1927 NumberDictionary::kPreferFastElementsSizeFactor);
1928 size_t current_counter = isolate->elements_deletion_counter();
1929 if (current_counter < length / kLengthFraction) {
1930 isolate->set_elements_deletion_counter(current_counter + 1);
1931 return;
1932 }
1933 // Reset the counter whenever the full check is performed.
1934 isolate->set_elements_deletion_counter(0);
1935
1936 if (!obj->IsJSArray()) {
1937 uint32_t i;
1938 for (i = entry + 1; i < length; i++) {
1939 if (!backing_store->is_the_hole(isolate, i)) break;
1940 }
1941 if (i == length) {
1942 DeleteAtEnd(obj, backing_store, entry);
1943 return;
1944 }
1945 }
1946 int num_used = 0;
1947 for (int i = 0; i < backing_store->length(); ++i) {
1948 if (!backing_store->is_the_hole(isolate, i)) {
1949 ++num_used;
1950 // Bail out if a number dictionary wouldn't be able to save much space.
1951 if (NumberDictionary::kPreferFastElementsSizeFactor *
1952 NumberDictionary::ComputeCapacity(num_used) *
1953 NumberDictionary::kEntrySize >
1954 static_cast<uint32_t>(backing_store->length())) {
1955 return;
1956 }
1957 }
1958 }
1959 JSObject::NormalizeElements(obj);
1960 }
1961
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)1962 static void ReconfigureImpl(Handle<JSObject> object,
1963 Handle<FixedArrayBase> store, InternalIndex entry,
1964 Handle<Object> value,
1965 PropertyAttributes attributes) {
1966 Handle<NumberDictionary> dictionary = JSObject::NormalizeElements(object);
1967 entry = InternalIndex(
1968 dictionary->FindEntry(object->GetIsolate(), entry.as_uint32()));
1969 DictionaryElementsAccessor::ReconfigureImpl(object, dictionary, entry,
1970 value, attributes);
1971 }
1972
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)1973 static void AddImpl(Handle<JSObject> object, uint32_t index,
1974 Handle<Object> value, PropertyAttributes attributes,
1975 uint32_t new_capacity) {
1976 DCHECK_EQ(NONE, attributes);
1977 ElementsKind from_kind = object->GetElementsKind();
1978 ElementsKind to_kind = Subclass::kind();
1979 if (IsDictionaryElementsKind(from_kind) ||
1980 IsDoubleElementsKind(from_kind) != IsDoubleElementsKind(to_kind) ||
1981 Subclass::GetCapacityImpl(*object, object->elements()) !=
1982 new_capacity) {
1983 Subclass::GrowCapacityAndConvertImpl(object, new_capacity);
1984 } else {
1985 if (IsFastElementsKind(from_kind) && from_kind != to_kind) {
1986 JSObject::TransitionElementsKind(object, to_kind);
1987 }
1988 if (IsSmiOrObjectElementsKind(from_kind)) {
1989 DCHECK(IsSmiOrObjectElementsKind(to_kind));
1990 JSObject::EnsureWritableFastElements(object);
1991 }
1992 }
1993 Subclass::SetImpl(object, InternalIndex(index), *value);
1994 }
1995
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)1996 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
1997 ElementsKind kind = KindTraits::Kind;
1998 if (IsFastPackedElementsKind(kind) ||
1999 kind == PACKED_NONEXTENSIBLE_ELEMENTS) {
2000 JSObject::TransitionElementsKind(obj, GetHoleyElementsKind(kind));
2001 }
2002 if (IsSmiOrObjectElementsKind(KindTraits::Kind) ||
2003 IsNonextensibleElementsKind(kind)) {
2004 JSObject::EnsureWritableFastElements(obj);
2005 }
2006 DeleteCommon(obj, entry.as_uint32(),
2007 handle(obj->elements(), obj->GetIsolate()));
2008 }
2009
HasEntryImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)2010 static bool HasEntryImpl(Isolate* isolate, FixedArrayBase backing_store,
2011 InternalIndex entry) {
2012 return !BackingStore::cast(backing_store)
2013 .is_the_hole(isolate, entry.as_int());
2014 }
2015
NumberOfElementsImpl(JSObject receiver,FixedArrayBase backing_store)2016 static uint32_t NumberOfElementsImpl(JSObject receiver,
2017 FixedArrayBase backing_store) {
2018 size_t max_index = Subclass::GetMaxIndex(receiver, backing_store);
2019 DCHECK_LE(max_index, std::numeric_limits<uint32_t>::max());
2020 if (IsFastPackedElementsKind(Subclass::kind())) {
2021 return static_cast<uint32_t>(max_index);
2022 }
2023 Isolate* isolate = receiver.GetIsolate();
2024 uint32_t count = 0;
2025 for (size_t i = 0; i < max_index; i++) {
2026 if (Subclass::HasEntryImpl(isolate, backing_store, InternalIndex(i))) {
2027 count++;
2028 }
2029 }
2030 return count;
2031 }
2032
AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)2033 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
2034 Handle<JSObject> receiver, KeyAccumulator* accumulator,
2035 AddKeyConversion convert) {
2036 Isolate* isolate = accumulator->isolate();
2037 Handle<FixedArrayBase> elements(receiver->elements(), isolate);
2038 size_t length = Subclass::GetMaxNumberOfEntries(*receiver, *elements);
2039 for (size_t i = 0; i < length; i++) {
2040 if (IsFastPackedElementsKind(KindTraits::Kind) ||
2041 HasEntryImpl(isolate, *elements, InternalIndex(i))) {
2042 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(
2043 Subclass::GetImpl(isolate, *elements, InternalIndex(i)), convert));
2044 }
2045 }
2046 return ExceptionStatus::kSuccess;
2047 }
2048
ValidateContents(JSObject holder,size_t length)2049 static void ValidateContents(JSObject holder, size_t length) {
2050 #if DEBUG
2051 Isolate* isolate = holder.GetIsolate();
2052 Heap* heap = isolate->heap();
2053 FixedArrayBase elements = holder.elements();
2054 Map map = elements.map();
2055 if (IsSmiOrObjectElementsKind(KindTraits::Kind)) {
2056 DCHECK_NE(map, ReadOnlyRoots(heap).fixed_double_array_map());
2057 } else if (IsDoubleElementsKind(KindTraits::Kind)) {
2058 DCHECK_NE(map, ReadOnlyRoots(heap).fixed_cow_array_map());
2059 if (map == ReadOnlyRoots(heap).fixed_array_map()) DCHECK_EQ(0u, length);
2060 } else {
2061 UNREACHABLE();
2062 }
2063 if (length == 0u) return; // nothing to do!
2064 #if ENABLE_SLOW_DCHECKS
2065 DisallowHeapAllocation no_gc;
2066 BackingStore backing_store = BackingStore::cast(elements);
2067 DCHECK(length <= std::numeric_limits<int>::max());
2068 int length_int = static_cast<int>(length);
2069 if (IsSmiElementsKind(KindTraits::Kind)) {
2070 HandleScope scope(isolate);
2071 for (int i = 0; i < length_int; i++) {
2072 DCHECK(BackingStore::get(backing_store, i, isolate)->IsSmi() ||
2073 (IsHoleyElementsKind(KindTraits::Kind) &&
2074 backing_store.is_the_hole(isolate, i)));
2075 }
2076 } else if (KindTraits::Kind == PACKED_ELEMENTS ||
2077 KindTraits::Kind == PACKED_DOUBLE_ELEMENTS) {
2078 for (int i = 0; i < length_int; i++) {
2079 DCHECK(!backing_store.is_the_hole(isolate, i));
2080 }
2081 } else {
2082 DCHECK(IsHoleyElementsKind(KindTraits::Kind));
2083 }
2084 #endif
2085 #endif
2086 }
2087
PopImpl(Handle<JSArray> receiver)2088 static Handle<Object> PopImpl(Handle<JSArray> receiver) {
2089 return Subclass::RemoveElement(receiver, AT_END);
2090 }
2091
ShiftImpl(Handle<JSArray> receiver)2092 static Handle<Object> ShiftImpl(Handle<JSArray> receiver) {
2093 return Subclass::RemoveElement(receiver, AT_START);
2094 }
2095
PushImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_size)2096 static uint32_t PushImpl(Handle<JSArray> receiver, BuiltinArguments* args,
2097 uint32_t push_size) {
2098 Handle<FixedArrayBase> backing_store(receiver->elements(),
2099 receiver->GetIsolate());
2100 return Subclass::AddArguments(receiver, backing_store, args, push_size,
2101 AT_END);
2102 }
2103
UnshiftImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t unshift_size)2104 static uint32_t UnshiftImpl(Handle<JSArray> receiver, BuiltinArguments* args,
2105 uint32_t unshift_size) {
2106 Handle<FixedArrayBase> backing_store(receiver->elements(),
2107 receiver->GetIsolate());
2108 return Subclass::AddArguments(receiver, backing_store, args, unshift_size,
2109 AT_START);
2110 }
2111
MoveElements(Isolate * isolate,Handle<JSArray> receiver,Handle<FixedArrayBase> backing_store,int dst_index,int src_index,int len,int hole_start,int hole_end)2112 static void MoveElements(Isolate* isolate, Handle<JSArray> receiver,
2113 Handle<FixedArrayBase> backing_store, int dst_index,
2114 int src_index, int len, int hole_start,
2115 int hole_end) {
2116 Handle<BackingStore> dst_elms = Handle<BackingStore>::cast(backing_store);
2117 if (len > JSArray::kMaxCopyElements && dst_index == 0 &&
2118 isolate->heap()->CanMoveObjectStart(*dst_elms)) {
2119 // Update all the copies of this backing_store handle.
2120 *dst_elms.location() =
2121 BackingStore::cast(
2122 isolate->heap()->LeftTrimFixedArray(*dst_elms, src_index))
2123 .ptr();
2124 receiver->set_elements(*dst_elms);
2125 // Adjust the hole offset as the array has been shrunk.
2126 hole_end -= src_index;
2127 DCHECK_LE(hole_start, backing_store->length());
2128 DCHECK_LE(hole_end, backing_store->length());
2129 } else if (len != 0) {
2130 WriteBarrierMode mode = GetWriteBarrierMode(KindTraits::Kind);
2131 dst_elms->MoveElements(isolate, dst_index, src_index, len, mode);
2132 }
2133 if (hole_start != hole_end) {
2134 dst_elms->FillWithHoles(hole_start, hole_end);
2135 }
2136 }
2137
FillImpl(Handle<JSObject> receiver,Handle<Object> obj_value,size_t start,size_t end)2138 static Object FillImpl(Handle<JSObject> receiver, Handle<Object> obj_value,
2139 size_t start, size_t end) {
2140 // Ensure indexes are within array bounds
2141 DCHECK_LE(0, start);
2142 DCHECK_LE(start, end);
2143
2144 // Make sure COW arrays are copied.
2145 if (IsSmiOrObjectElementsKind(Subclass::kind())) {
2146 JSObject::EnsureWritableFastElements(receiver);
2147 }
2148
2149 // Make sure we have enough space.
2150 DCHECK_LE(end, std::numeric_limits<uint32_t>::max());
2151 if (end > Subclass::GetCapacityImpl(*receiver, receiver->elements())) {
2152 Subclass::GrowCapacityAndConvertImpl(receiver,
2153 static_cast<uint32_t>(end));
2154 CHECK_EQ(Subclass::kind(), receiver->GetElementsKind());
2155 }
2156 DCHECK_LE(end, Subclass::GetCapacityImpl(*receiver, receiver->elements()));
2157
2158 for (size_t index = start; index < end; ++index) {
2159 Subclass::SetImpl(receiver, InternalIndex(index), *obj_value);
2160 }
2161 return *receiver;
2162 }
2163
IncludesValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> search_value,size_t start_from,size_t length)2164 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
2165 Handle<JSObject> receiver,
2166 Handle<Object> search_value,
2167 size_t start_from, size_t length) {
2168 DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver));
2169 DisallowHeapAllocation no_gc;
2170 FixedArrayBase elements_base = receiver->elements();
2171 Object the_hole = ReadOnlyRoots(isolate).the_hole_value();
2172 Object undefined = ReadOnlyRoots(isolate).undefined_value();
2173 Object value = *search_value;
2174
2175 if (start_from >= length) return Just(false);
2176
2177 // Elements beyond the capacity of the backing store treated as undefined.
2178 size_t elements_length = static_cast<size_t>(elements_base.length());
2179 if (value == undefined && elements_length < length) return Just(true);
2180 if (elements_length == 0) {
2181 DCHECK_NE(value, undefined);
2182 return Just(false);
2183 }
2184
2185 length = std::min(elements_length, length);
2186 DCHECK_LE(length, std::numeric_limits<int>::max());
2187
2188 if (!value.IsNumber()) {
2189 if (value == undefined) {
2190 // Search for `undefined` or The Hole. Even in the case of
2191 // PACKED_DOUBLE_ELEMENTS or PACKED_SMI_ELEMENTS, we might encounter The
2192 // Hole here, since the {length} used here can be larger than
2193 // JSArray::length.
2194 if (IsSmiOrObjectElementsKind(Subclass::kind()) ||
2195 IsAnyNonextensibleElementsKind(Subclass::kind())) {
2196 FixedArray elements = FixedArray::cast(receiver->elements());
2197
2198 for (size_t k = start_from; k < length; ++k) {
2199 Object element_k = elements.get(static_cast<int>(k));
2200
2201 if (element_k == the_hole || element_k == undefined) {
2202 return Just(true);
2203 }
2204 }
2205 return Just(false);
2206 } else {
2207 // Search for The Hole in HOLEY_DOUBLE_ELEMENTS or
2208 // PACKED_DOUBLE_ELEMENTS.
2209 DCHECK(IsDoubleElementsKind(Subclass::kind()));
2210 FixedDoubleArray elements =
2211 FixedDoubleArray::cast(receiver->elements());
2212
2213 for (size_t k = start_from; k < length; ++k) {
2214 if (elements.is_the_hole(static_cast<int>(k))) return Just(true);
2215 }
2216 return Just(false);
2217 }
2218 } else if (!IsObjectElementsKind(Subclass::kind()) &&
2219 !IsAnyNonextensibleElementsKind(Subclass::kind())) {
2220 // Search for non-number, non-Undefined value, with either
2221 // PACKED_SMI_ELEMENTS, PACKED_DOUBLE_ELEMENTS, HOLEY_SMI_ELEMENTS or
2222 // HOLEY_DOUBLE_ELEMENTS. Guaranteed to return false, since these
2223 // elements kinds can only contain Number values or undefined.
2224 return Just(false);
2225 } else {
2226 // Search for non-number, non-Undefined value with either
2227 // PACKED_ELEMENTS or HOLEY_ELEMENTS.
2228 DCHECK(IsObjectElementsKind(Subclass::kind()) ||
2229 IsAnyNonextensibleElementsKind(Subclass::kind()));
2230 FixedArray elements = FixedArray::cast(receiver->elements());
2231
2232 for (size_t k = start_from; k < length; ++k) {
2233 Object element_k = elements.get(static_cast<int>(k));
2234 if (element_k == the_hole) continue;
2235 if (value.SameValueZero(element_k)) return Just(true);
2236 }
2237 return Just(false);
2238 }
2239 } else {
2240 if (!value.IsNaN()) {
2241 double search_value = value.Number();
2242 if (IsDoubleElementsKind(Subclass::kind())) {
2243 // Search for non-NaN Number in PACKED_DOUBLE_ELEMENTS or
2244 // HOLEY_DOUBLE_ELEMENTS --- Skip TheHole, and trust UCOMISD or
2245 // similar operation for result.
2246 FixedDoubleArray elements =
2247 FixedDoubleArray::cast(receiver->elements());
2248
2249 for (size_t k = start_from; k < length; ++k) {
2250 if (elements.is_the_hole(static_cast<int>(k))) continue;
2251 if (elements.get_scalar(static_cast<int>(k)) == search_value) {
2252 return Just(true);
2253 }
2254 }
2255 return Just(false);
2256 } else {
2257 // Search for non-NaN Number in PACKED_ELEMENTS, HOLEY_ELEMENTS,
2258 // PACKED_SMI_ELEMENTS or HOLEY_SMI_ELEMENTS --- Skip non-Numbers,
2259 // and trust UCOMISD or similar operation for result
2260 FixedArray elements = FixedArray::cast(receiver->elements());
2261
2262 for (size_t k = start_from; k < length; ++k) {
2263 Object element_k = elements.get(static_cast<int>(k));
2264 if (element_k.IsNumber() && element_k.Number() == search_value) {
2265 return Just(true);
2266 }
2267 }
2268 return Just(false);
2269 }
2270 } else {
2271 // Search for NaN --- NaN cannot be represented with Smi elements, so
2272 // abort if ElementsKind is PACKED_SMI_ELEMENTS or HOLEY_SMI_ELEMENTS
2273 if (IsSmiElementsKind(Subclass::kind())) return Just(false);
2274
2275 if (IsDoubleElementsKind(Subclass::kind())) {
2276 // Search for NaN in PACKED_DOUBLE_ELEMENTS or
2277 // HOLEY_DOUBLE_ELEMENTS --- Skip The Hole and trust
2278 // std::isnan(elementK) for result
2279 FixedDoubleArray elements =
2280 FixedDoubleArray::cast(receiver->elements());
2281
2282 for (size_t k = start_from; k < length; ++k) {
2283 if (elements.is_the_hole(static_cast<int>(k))) continue;
2284 if (std::isnan(elements.get_scalar(static_cast<int>(k)))) {
2285 return Just(true);
2286 }
2287 }
2288 return Just(false);
2289 } else {
2290 // Search for NaN in PACKED_ELEMENTS or HOLEY_ELEMENTS. Return true
2291 // if elementK->IsHeapNumber() && std::isnan(elementK->Number())
2292 DCHECK(IsObjectElementsKind(Subclass::kind()) ||
2293 IsAnyNonextensibleElementsKind(Subclass::kind()));
2294 FixedArray elements = FixedArray::cast(receiver->elements());
2295
2296 for (size_t k = start_from; k < length; ++k) {
2297 if (elements.get(static_cast<int>(k)).IsNaN()) return Just(true);
2298 }
2299 return Just(false);
2300 }
2301 }
2302 }
2303 }
2304
CreateListFromArrayLikeImpl(Isolate * isolate,Handle<JSObject> object,uint32_t length)2305 static Handle<FixedArray> CreateListFromArrayLikeImpl(Isolate* isolate,
2306 Handle<JSObject> object,
2307 uint32_t length) {
2308 Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
2309 Handle<FixedArrayBase> elements(object->elements(), isolate);
2310 for (uint32_t i = 0; i < length; i++) {
2311 InternalIndex entry(i);
2312 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
2313 Handle<Object> value;
2314 value = Subclass::GetImpl(isolate, *elements, entry);
2315 if (value->IsName()) {
2316 value = isolate->factory()->InternalizeName(Handle<Name>::cast(value));
2317 }
2318 result->set(i, *value);
2319 }
2320 return result;
2321 }
2322
RemoveElement(Handle<JSArray> receiver,Where remove_position)2323 static Handle<Object> RemoveElement(Handle<JSArray> receiver,
2324 Where remove_position) {
2325 Isolate* isolate = receiver->GetIsolate();
2326 ElementsKind kind = KindTraits::Kind;
2327 if (IsSmiOrObjectElementsKind(kind)) {
2328 HandleScope scope(isolate);
2329 JSObject::EnsureWritableFastElements(receiver);
2330 }
2331 Handle<FixedArrayBase> backing_store(receiver->elements(), isolate);
2332 uint32_t length = static_cast<uint32_t>(Smi::ToInt(receiver->length()));
2333 DCHECK_GT(length, 0);
2334 int new_length = length - 1;
2335 int remove_index = remove_position == AT_START ? 0 : new_length;
2336 Handle<Object> result =
2337 Subclass::GetImpl(isolate, *backing_store, InternalIndex(remove_index));
2338 if (remove_position == AT_START) {
2339 Subclass::MoveElements(isolate, receiver, backing_store, 0, 1, new_length,
2340 0, 0);
2341 }
2342 Subclass::SetLengthImpl(isolate, receiver, new_length, backing_store);
2343
2344 if (IsHoleyElementsKind(kind) && result->IsTheHole(isolate)) {
2345 return isolate->factory()->undefined_value();
2346 }
2347 return result;
2348 }
2349
AddArguments(Handle<JSArray> receiver,Handle<FixedArrayBase> backing_store,BuiltinArguments * args,uint32_t add_size,Where add_position)2350 static uint32_t AddArguments(Handle<JSArray> receiver,
2351 Handle<FixedArrayBase> backing_store,
2352 BuiltinArguments* args, uint32_t add_size,
2353 Where add_position) {
2354 uint32_t length = Smi::ToInt(receiver->length());
2355 DCHECK_LT(0, add_size);
2356 uint32_t elms_len = backing_store->length();
2357 // Check we do not overflow the new_length.
2358 DCHECK(add_size <= static_cast<uint32_t>(Smi::kMaxValue - length));
2359 uint32_t new_length = length + add_size;
2360
2361 if (new_length > elms_len) {
2362 // New backing storage is needed.
2363 uint32_t capacity = JSObject::NewElementsCapacity(new_length);
2364 // If we add arguments to the start we have to shift the existing objects.
2365 int copy_dst_index = add_position == AT_START ? add_size : 0;
2366 // Copy over all objects to a new backing_store.
2367 backing_store = Subclass::ConvertElementsWithCapacity(
2368 receiver, backing_store, KindTraits::Kind, capacity, 0,
2369 copy_dst_index);
2370 receiver->set_elements(*backing_store);
2371 } else if (add_position == AT_START) {
2372 // If the backing store has enough capacity and we add elements to the
2373 // start we have to shift the existing objects.
2374 Isolate* isolate = receiver->GetIsolate();
2375 Subclass::MoveElements(isolate, receiver, backing_store, add_size, 0,
2376 length, 0, 0);
2377 }
2378
2379 int insertion_index = add_position == AT_START ? 0 : length;
2380 // Copy the arguments to the start.
2381 Subclass::CopyArguments(args, backing_store, add_size, 1, insertion_index);
2382 // Set the length.
2383 receiver->set_length(Smi::FromInt(new_length));
2384 return new_length;
2385 }
2386
CopyArguments(BuiltinArguments * args,Handle<FixedArrayBase> dst_store,uint32_t copy_size,uint32_t src_index,uint32_t dst_index)2387 static void CopyArguments(BuiltinArguments* args,
2388 Handle<FixedArrayBase> dst_store,
2389 uint32_t copy_size, uint32_t src_index,
2390 uint32_t dst_index) {
2391 // Add the provided values.
2392 DisallowHeapAllocation no_gc;
2393 FixedArrayBase raw_backing_store = *dst_store;
2394 WriteBarrierMode mode = raw_backing_store.GetWriteBarrierMode(no_gc);
2395 for (uint32_t i = 0; i < copy_size; i++) {
2396 Object argument = (*args)[src_index + i];
2397 DCHECK(!argument.IsTheHole());
2398 Subclass::SetImpl(raw_backing_store, InternalIndex(dst_index + i),
2399 argument, mode);
2400 }
2401 }
2402 };
2403
2404 template <typename Subclass, typename KindTraits>
2405 class FastSmiOrObjectElementsAccessor
2406 : public FastElementsAccessor<Subclass, KindTraits> {
2407 public:
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)2408 static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
2409 Object value) {
2410 SetImpl(holder->elements(), entry, value);
2411 }
2412
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value)2413 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2414 Object value) {
2415 FixedArray::cast(backing_store).set(entry.as_int(), value);
2416 }
2417
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value,WriteBarrierMode mode)2418 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2419 Object value, WriteBarrierMode mode) {
2420 FixedArray::cast(backing_store).set(entry.as_int(), value, mode);
2421 }
2422
GetRaw(FixedArray backing_store,InternalIndex entry)2423 static Object GetRaw(FixedArray backing_store, InternalIndex entry) {
2424 return backing_store.get(entry.as_int());
2425 }
2426
2427 // NOTE: this method violates the handlified function signature convention:
2428 // raw pointer parameters in the function that allocates.
2429 // See ElementsAccessor::CopyElements() for details.
2430 // This method could actually allocate if copying from double elements to
2431 // object elements.
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)2432 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
2433 uint32_t from_start, FixedArrayBase to,
2434 ElementsKind from_kind, uint32_t to_start,
2435 int packed_size, int copy_size) {
2436 DisallowHeapAllocation no_gc;
2437 ElementsKind to_kind = KindTraits::Kind;
2438 switch (from_kind) {
2439 case PACKED_SMI_ELEMENTS:
2440 case HOLEY_SMI_ELEMENTS:
2441 case PACKED_ELEMENTS:
2442 case PACKED_FROZEN_ELEMENTS:
2443 case PACKED_SEALED_ELEMENTS:
2444 case PACKED_NONEXTENSIBLE_ELEMENTS:
2445 case HOLEY_ELEMENTS:
2446 case HOLEY_FROZEN_ELEMENTS:
2447 case HOLEY_SEALED_ELEMENTS:
2448 case HOLEY_NONEXTENSIBLE_ELEMENTS:
2449 CopyObjectToObjectElements(isolate, from, from_kind, from_start, to,
2450 to_kind, to_start, copy_size);
2451 break;
2452 case PACKED_DOUBLE_ELEMENTS:
2453 case HOLEY_DOUBLE_ELEMENTS: {
2454 AllowHeapAllocation allow_allocation;
2455 DCHECK(IsObjectElementsKind(to_kind));
2456 CopyDoubleToObjectElements(isolate, from, from_start, to, to_start,
2457 copy_size);
2458 break;
2459 }
2460 case DICTIONARY_ELEMENTS:
2461 CopyDictionaryToObjectElements(isolate, from, from_start, to, to_kind,
2462 to_start, copy_size);
2463 break;
2464 case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
2465 case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
2466 case FAST_STRING_WRAPPER_ELEMENTS:
2467 case SLOW_STRING_WRAPPER_ELEMENTS:
2468 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) case TYPE##_ELEMENTS:
2469 TYPED_ARRAYS(TYPED_ARRAY_CASE)
2470 #undef TYPED_ARRAY_CASE
2471 // This function is currently only used for JSArrays with non-zero
2472 // length.
2473 UNREACHABLE();
2474 case NO_ELEMENTS:
2475 break; // Nothing to do.
2476 }
2477 }
2478
CollectValuesOrEntriesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArray> values_or_entries,bool get_entries,int * nof_items,PropertyFilter filter)2479 static Maybe<bool> CollectValuesOrEntriesImpl(
2480 Isolate* isolate, Handle<JSObject> object,
2481 Handle<FixedArray> values_or_entries, bool get_entries, int* nof_items,
2482 PropertyFilter filter) {
2483 int count = 0;
2484 if (get_entries) {
2485 // Collecting entries needs to allocate, so this code must be handlified.
2486 Handle<FixedArray> elements(FixedArray::cast(object->elements()),
2487 isolate);
2488 uint32_t length = elements->length();
2489 for (uint32_t index = 0; index < length; ++index) {
2490 InternalIndex entry(index);
2491 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
2492 Handle<Object> value = Subclass::GetImpl(isolate, *elements, entry);
2493 value = MakeEntryPair(isolate, index, value);
2494 values_or_entries->set(count++, *value);
2495 }
2496 } else {
2497 // No allocations here, so we can avoid handlification overhead.
2498 DisallowHeapAllocation no_gc;
2499 FixedArray elements = FixedArray::cast(object->elements());
2500 uint32_t length = elements.length();
2501 for (uint32_t index = 0; index < length; ++index) {
2502 InternalIndex entry(index);
2503 if (!Subclass::HasEntryImpl(isolate, elements, entry)) continue;
2504 Object value = GetRaw(elements, entry);
2505 values_or_entries->set(count++, value);
2506 }
2507 }
2508 *nof_items = count;
2509 return Just(true);
2510 }
2511
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> search_value,size_t start_from,size_t length)2512 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
2513 Handle<JSObject> receiver,
2514 Handle<Object> search_value,
2515 size_t start_from, size_t length) {
2516 DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver));
2517 DisallowHeapAllocation no_gc;
2518 FixedArrayBase elements_base = receiver->elements();
2519 Object value = *search_value;
2520
2521 if (start_from >= length) return Just<int64_t>(-1);
2522
2523 length = std::min(static_cast<size_t>(elements_base.length()), length);
2524
2525 // Only FAST_{,HOLEY_}ELEMENTS can store non-numbers.
2526 if (!value.IsNumber() && !IsObjectElementsKind(Subclass::kind()) &&
2527 !IsAnyNonextensibleElementsKind(Subclass::kind())) {
2528 return Just<int64_t>(-1);
2529 }
2530 // NaN can never be found by strict equality.
2531 if (value.IsNaN()) return Just<int64_t>(-1);
2532
2533 // k can be greater than receiver->length() below, but it is bounded by
2534 // elements_base->length() so we never read out of bounds. This means that
2535 // elements->get(k) can return the hole, for which the StrictEquals will
2536 // always fail.
2537 FixedArray elements = FixedArray::cast(receiver->elements());
2538 STATIC_ASSERT(FixedArray::kMaxLength <=
2539 std::numeric_limits<uint32_t>::max());
2540 for (size_t k = start_from; k < length; ++k) {
2541 if (value.StrictEquals(elements.get(static_cast<uint32_t>(k)))) {
2542 return Just<int64_t>(k);
2543 }
2544 }
2545 return Just<int64_t>(-1);
2546 }
2547 };
2548
2549 class FastPackedSmiElementsAccessor
2550 : public FastSmiOrObjectElementsAccessor<
2551 FastPackedSmiElementsAccessor,
2552 ElementsKindTraits<PACKED_SMI_ELEMENTS>> {};
2553
2554 class FastHoleySmiElementsAccessor
2555 : public FastSmiOrObjectElementsAccessor<
2556 FastHoleySmiElementsAccessor,
2557 ElementsKindTraits<HOLEY_SMI_ELEMENTS>> {};
2558
2559 class FastPackedObjectElementsAccessor
2560 : public FastSmiOrObjectElementsAccessor<
2561 FastPackedObjectElementsAccessor,
2562 ElementsKindTraits<PACKED_ELEMENTS>> {};
2563
2564 template <typename Subclass, typename KindTraits>
2565 class FastNonextensibleObjectElementsAccessor
2566 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
2567 public:
2568 using BackingStore = typename KindTraits::BackingStore;
2569
PushImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_size)2570 static uint32_t PushImpl(Handle<JSArray> receiver, BuiltinArguments* args,
2571 uint32_t push_size) {
2572 UNREACHABLE();
2573 }
2574
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)2575 static void AddImpl(Handle<JSObject> object, uint32_t index,
2576 Handle<Object> value, PropertyAttributes attributes,
2577 uint32_t new_capacity) {
2578 UNREACHABLE();
2579 }
2580
2581 // TODO(duongn): refactor this due to code duplication of sealed version.
2582 // Consider using JSObject::NormalizeElements(). Also consider follow the fast
2583 // element logic instead of changing to dictionary mode.
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)2584 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
2585 uint32_t length,
2586 Handle<FixedArrayBase> backing_store) {
2587 uint32_t old_length = 0;
2588 CHECK(array->length().ToArrayIndex(&old_length));
2589 if (length == old_length) {
2590 // Do nothing.
2591 return;
2592 }
2593
2594 // Transition to DICTIONARY_ELEMENTS.
2595 // Convert to dictionary mode.
2596 Handle<NumberDictionary> new_element_dictionary =
2597 old_length == 0 ? isolate->factory()->empty_slow_element_dictionary()
2598 : array->GetElementsAccessor()->Normalize(array);
2599
2600 // Migrate map.
2601 Handle<Map> new_map = Map::Copy(isolate, handle(array->map(), isolate),
2602 "SlowCopyForSetLengthImpl");
2603 new_map->set_is_extensible(false);
2604 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
2605 JSObject::MigrateToMap(isolate, array, new_map);
2606
2607 if (!new_element_dictionary.is_null()) {
2608 array->set_elements(*new_element_dictionary);
2609 }
2610
2611 if (array->elements() !=
2612 ReadOnlyRoots(isolate).empty_slow_element_dictionary()) {
2613 Handle<NumberDictionary> dictionary(array->element_dictionary(), isolate);
2614 // Make sure we never go back to the fast case
2615 array->RequireSlowElements(*dictionary);
2616 JSObject::ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate),
2617 dictionary,
2618 PropertyAttributes::NONE);
2619 }
2620
2621 // Set length.
2622 Handle<FixedArrayBase> new_backing_store(array->elements(), isolate);
2623 DictionaryElementsAccessor::SetLengthImpl(isolate, array, length,
2624 new_backing_store);
2625 }
2626 };
2627
2628 class FastPackedNonextensibleObjectElementsAccessor
2629 : public FastNonextensibleObjectElementsAccessor<
2630 FastPackedNonextensibleObjectElementsAccessor,
2631 ElementsKindTraits<PACKED_NONEXTENSIBLE_ELEMENTS>> {};
2632
2633 class FastHoleyNonextensibleObjectElementsAccessor
2634 : public FastNonextensibleObjectElementsAccessor<
2635 FastHoleyNonextensibleObjectElementsAccessor,
2636 ElementsKindTraits<HOLEY_NONEXTENSIBLE_ELEMENTS>> {};
2637
2638 template <typename Subclass, typename KindTraits>
2639 class FastSealedObjectElementsAccessor
2640 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
2641 public:
2642 using BackingStore = typename KindTraits::BackingStore;
2643
RemoveElement(Handle<JSArray> receiver,Where remove_position)2644 static Handle<Object> RemoveElement(Handle<JSArray> receiver,
2645 Where remove_position) {
2646 UNREACHABLE();
2647 }
2648
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)2649 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
2650 UNREACHABLE();
2651 }
2652
DeleteAtEnd(Handle<JSObject> obj,Handle<BackingStore> backing_store,uint32_t entry)2653 static void DeleteAtEnd(Handle<JSObject> obj,
2654 Handle<BackingStore> backing_store, uint32_t entry) {
2655 UNREACHABLE();
2656 }
2657
DeleteCommon(Handle<JSObject> obj,uint32_t entry,Handle<FixedArrayBase> store)2658 static void DeleteCommon(Handle<JSObject> obj, uint32_t entry,
2659 Handle<FixedArrayBase> store) {
2660 UNREACHABLE();
2661 }
2662
PopImpl(Handle<JSArray> receiver)2663 static Handle<Object> PopImpl(Handle<JSArray> receiver) { UNREACHABLE(); }
2664
PushImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_size)2665 static uint32_t PushImpl(Handle<JSArray> receiver, BuiltinArguments* args,
2666 uint32_t push_size) {
2667 UNREACHABLE();
2668 }
2669
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)2670 static void AddImpl(Handle<JSObject> object, uint32_t index,
2671 Handle<Object> value, PropertyAttributes attributes,
2672 uint32_t new_capacity) {
2673 UNREACHABLE();
2674 }
2675
2676 // TODO(duongn): refactor this due to code duplication of nonextensible
2677 // version. Consider using JSObject::NormalizeElements(). Also consider follow
2678 // the fast element logic instead of changing to dictionary mode.
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)2679 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
2680 uint32_t length,
2681 Handle<FixedArrayBase> backing_store) {
2682 uint32_t old_length = 0;
2683 CHECK(array->length().ToArrayIndex(&old_length));
2684 if (length == old_length) {
2685 // Do nothing.
2686 return;
2687 }
2688
2689 // Transition to DICTIONARY_ELEMENTS.
2690 // Convert to dictionary mode
2691 Handle<NumberDictionary> new_element_dictionary =
2692 old_length == 0 ? isolate->factory()->empty_slow_element_dictionary()
2693 : array->GetElementsAccessor()->Normalize(array);
2694
2695 // Migrate map.
2696 Handle<Map> new_map = Map::Copy(isolate, handle(array->map(), isolate),
2697 "SlowCopyForSetLengthImpl");
2698 new_map->set_is_extensible(false);
2699 new_map->set_elements_kind(DICTIONARY_ELEMENTS);
2700 JSObject::MigrateToMap(isolate, array, new_map);
2701
2702 if (!new_element_dictionary.is_null()) {
2703 array->set_elements(*new_element_dictionary);
2704 }
2705
2706 if (array->elements() !=
2707 ReadOnlyRoots(isolate).empty_slow_element_dictionary()) {
2708 Handle<NumberDictionary> dictionary(array->element_dictionary(), isolate);
2709 // Make sure we never go back to the fast case
2710 array->RequireSlowElements(*dictionary);
2711 JSObject::ApplyAttributesToDictionary(isolate, ReadOnlyRoots(isolate),
2712 dictionary,
2713 PropertyAttributes::SEALED);
2714 }
2715
2716 // Set length
2717 Handle<FixedArrayBase> new_backing_store(array->elements(), isolate);
2718 DictionaryElementsAccessor::SetLengthImpl(isolate, array, length,
2719 new_backing_store);
2720 }
2721 };
2722
2723 class FastPackedSealedObjectElementsAccessor
2724 : public FastSealedObjectElementsAccessor<
2725 FastPackedSealedObjectElementsAccessor,
2726 ElementsKindTraits<PACKED_SEALED_ELEMENTS>> {};
2727
2728 class FastHoleySealedObjectElementsAccessor
2729 : public FastSealedObjectElementsAccessor<
2730 FastHoleySealedObjectElementsAccessor,
2731 ElementsKindTraits<HOLEY_SEALED_ELEMENTS>> {};
2732
2733 template <typename Subclass, typename KindTraits>
2734 class FastFrozenObjectElementsAccessor
2735 : public FastSmiOrObjectElementsAccessor<Subclass, KindTraits> {
2736 public:
2737 using BackingStore = typename KindTraits::BackingStore;
2738
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)2739 static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
2740 Object value) {
2741 UNREACHABLE();
2742 }
2743
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value)2744 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2745 Object value) {
2746 UNREACHABLE();
2747 }
2748
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value,WriteBarrierMode mode)2749 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2750 Object value, WriteBarrierMode mode) {
2751 UNREACHABLE();
2752 }
2753
RemoveElement(Handle<JSArray> receiver,Where remove_position)2754 static Handle<Object> RemoveElement(Handle<JSArray> receiver,
2755 Where remove_position) {
2756 UNREACHABLE();
2757 }
2758
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)2759 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
2760 UNREACHABLE();
2761 }
2762
DeleteAtEnd(Handle<JSObject> obj,Handle<BackingStore> backing_store,uint32_t entry)2763 static void DeleteAtEnd(Handle<JSObject> obj,
2764 Handle<BackingStore> backing_store, uint32_t entry) {
2765 UNREACHABLE();
2766 }
2767
DeleteCommon(Handle<JSObject> obj,uint32_t entry,Handle<FixedArrayBase> store)2768 static void DeleteCommon(Handle<JSObject> obj, uint32_t entry,
2769 Handle<FixedArrayBase> store) {
2770 UNREACHABLE();
2771 }
2772
PopImpl(Handle<JSArray> receiver)2773 static Handle<Object> PopImpl(Handle<JSArray> receiver) { UNREACHABLE(); }
2774
PushImpl(Handle<JSArray> receiver,BuiltinArguments * args,uint32_t push_size)2775 static uint32_t PushImpl(Handle<JSArray> receiver, BuiltinArguments* args,
2776 uint32_t push_size) {
2777 UNREACHABLE();
2778 }
2779
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)2780 static void AddImpl(Handle<JSObject> object, uint32_t index,
2781 Handle<Object> value, PropertyAttributes attributes,
2782 uint32_t new_capacity) {
2783 UNREACHABLE();
2784 }
2785
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)2786 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
2787 uint32_t length,
2788 Handle<FixedArrayBase> backing_store) {
2789 UNREACHABLE();
2790 }
2791
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)2792 static void ReconfigureImpl(Handle<JSObject> object,
2793 Handle<FixedArrayBase> store, InternalIndex entry,
2794 Handle<Object> value,
2795 PropertyAttributes attributes) {
2796 UNREACHABLE();
2797 }
2798 };
2799
2800 class FastPackedFrozenObjectElementsAccessor
2801 : public FastFrozenObjectElementsAccessor<
2802 FastPackedFrozenObjectElementsAccessor,
2803 ElementsKindTraits<PACKED_FROZEN_ELEMENTS>> {};
2804
2805 class FastHoleyFrozenObjectElementsAccessor
2806 : public FastFrozenObjectElementsAccessor<
2807 FastHoleyFrozenObjectElementsAccessor,
2808 ElementsKindTraits<HOLEY_FROZEN_ELEMENTS>> {};
2809
2810 class FastHoleyObjectElementsAccessor
2811 : public FastSmiOrObjectElementsAccessor<
2812 FastHoleyObjectElementsAccessor, ElementsKindTraits<HOLEY_ELEMENTS>> {
2813 };
2814
2815 template <typename Subclass, typename KindTraits>
2816 class FastDoubleElementsAccessor
2817 : public FastElementsAccessor<Subclass, KindTraits> {
2818 public:
GetImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)2819 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
2820 InternalIndex entry) {
2821 return FixedDoubleArray::get(FixedDoubleArray::cast(backing_store),
2822 entry.as_int(), isolate);
2823 }
2824
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)2825 static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
2826 Object value) {
2827 SetImpl(holder->elements(), entry, value);
2828 }
2829
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value)2830 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2831 Object value) {
2832 FixedDoubleArray::cast(backing_store).set(entry.as_int(), value.Number());
2833 }
2834
SetImpl(FixedArrayBase backing_store,InternalIndex entry,Object value,WriteBarrierMode mode)2835 static inline void SetImpl(FixedArrayBase backing_store, InternalIndex entry,
2836 Object value, WriteBarrierMode mode) {
2837 FixedDoubleArray::cast(backing_store).set(entry.as_int(), value.Number());
2838 }
2839
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)2840 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
2841 uint32_t from_start, FixedArrayBase to,
2842 ElementsKind from_kind, uint32_t to_start,
2843 int packed_size, int copy_size) {
2844 DisallowHeapAllocation no_allocation;
2845 switch (from_kind) {
2846 case PACKED_SMI_ELEMENTS:
2847 CopyPackedSmiToDoubleElements(from, from_start, to, to_start,
2848 packed_size, copy_size);
2849 break;
2850 case HOLEY_SMI_ELEMENTS:
2851 CopySmiToDoubleElements(from, from_start, to, to_start, copy_size);
2852 break;
2853 case PACKED_DOUBLE_ELEMENTS:
2854 case HOLEY_DOUBLE_ELEMENTS:
2855 CopyDoubleToDoubleElements(from, from_start, to, to_start, copy_size);
2856 break;
2857 case PACKED_ELEMENTS:
2858 case PACKED_FROZEN_ELEMENTS:
2859 case PACKED_SEALED_ELEMENTS:
2860 case PACKED_NONEXTENSIBLE_ELEMENTS:
2861 case HOLEY_ELEMENTS:
2862 case HOLEY_FROZEN_ELEMENTS:
2863 case HOLEY_SEALED_ELEMENTS:
2864 case HOLEY_NONEXTENSIBLE_ELEMENTS:
2865 CopyObjectToDoubleElements(from, from_start, to, to_start, copy_size);
2866 break;
2867 case DICTIONARY_ELEMENTS:
2868 CopyDictionaryToDoubleElements(isolate, from, from_start, to, to_start,
2869 copy_size);
2870 break;
2871 case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
2872 case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
2873 case FAST_STRING_WRAPPER_ELEMENTS:
2874 case SLOW_STRING_WRAPPER_ELEMENTS:
2875 case NO_ELEMENTS:
2876 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) case TYPE##_ELEMENTS:
2877 TYPED_ARRAYS(TYPED_ARRAY_CASE)
2878 #undef TYPED_ARRAY_CASE
2879 // This function is currently only used for JSArrays with non-zero
2880 // length.
2881 UNREACHABLE();
2882 }
2883 }
2884
CollectValuesOrEntriesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArray> values_or_entries,bool get_entries,int * nof_items,PropertyFilter filter)2885 static Maybe<bool> CollectValuesOrEntriesImpl(
2886 Isolate* isolate, Handle<JSObject> object,
2887 Handle<FixedArray> values_or_entries, bool get_entries, int* nof_items,
2888 PropertyFilter filter) {
2889 Handle<FixedDoubleArray> elements(
2890 FixedDoubleArray::cast(object->elements()), isolate);
2891 int count = 0;
2892 uint32_t length = elements->length();
2893 for (uint32_t index = 0; index < length; ++index) {
2894 InternalIndex entry(index);
2895 if (!Subclass::HasEntryImpl(isolate, *elements, entry)) continue;
2896 Handle<Object> value = Subclass::GetImpl(isolate, *elements, entry);
2897 if (get_entries) {
2898 value = MakeEntryPair(isolate, index, value);
2899 }
2900 values_or_entries->set(count++, *value);
2901 }
2902 *nof_items = count;
2903 return Just(true);
2904 }
2905
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> search_value,size_t start_from,size_t length)2906 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
2907 Handle<JSObject> receiver,
2908 Handle<Object> search_value,
2909 size_t start_from, size_t length) {
2910 DCHECK(JSObject::PrototypeHasNoElements(isolate, *receiver));
2911 DisallowHeapAllocation no_gc;
2912 FixedArrayBase elements_base = receiver->elements();
2913 Object value = *search_value;
2914
2915 length = std::min(static_cast<size_t>(elements_base.length()), length);
2916
2917 if (start_from >= length) return Just<int64_t>(-1);
2918
2919 if (!value.IsNumber()) {
2920 return Just<int64_t>(-1);
2921 }
2922 if (value.IsNaN()) {
2923 return Just<int64_t>(-1);
2924 }
2925 double numeric_search_value = value.Number();
2926 FixedDoubleArray elements = FixedDoubleArray::cast(receiver->elements());
2927
2928 STATIC_ASSERT(FixedDoubleArray::kMaxLength <=
2929 std::numeric_limits<int>::max());
2930 for (size_t k = start_from; k < length; ++k) {
2931 int k_int = static_cast<int>(k);
2932 if (elements.is_the_hole(k_int)) {
2933 continue;
2934 }
2935 if (elements.get_scalar(k_int) == numeric_search_value) {
2936 return Just<int64_t>(k);
2937 }
2938 }
2939 return Just<int64_t>(-1);
2940 }
2941 };
2942
2943 class FastPackedDoubleElementsAccessor
2944 : public FastDoubleElementsAccessor<
2945 FastPackedDoubleElementsAccessor,
2946 ElementsKindTraits<PACKED_DOUBLE_ELEMENTS>> {};
2947
2948 class FastHoleyDoubleElementsAccessor
2949 : public FastDoubleElementsAccessor<
2950 FastHoleyDoubleElementsAccessor,
2951 ElementsKindTraits<HOLEY_DOUBLE_ELEMENTS>> {};
2952
2953 // Super class for all external element arrays.
2954 template <ElementsKind Kind, typename ElementType>
2955 class TypedElementsAccessor
2956 : public ElementsAccessorBase<TypedElementsAccessor<Kind, ElementType>,
2957 ElementsKindTraits<Kind>> {
2958 public:
2959 using BackingStore = typename ElementsKindTraits<Kind>::BackingStore;
2960 using AccessorClass = TypedElementsAccessor<Kind, ElementType>;
2961
2962 // Conversions from (other) scalar values.
FromScalar(int value)2963 static ElementType FromScalar(int value) {
2964 return static_cast<ElementType>(value);
2965 }
FromScalar(uint32_t value)2966 static ElementType FromScalar(uint32_t value) {
2967 return static_cast<ElementType>(value);
2968 }
FromScalar(double value)2969 static ElementType FromScalar(double value) {
2970 return FromScalar(DoubleToInt32(value));
2971 }
FromScalar(int64_t value)2972 static ElementType FromScalar(int64_t value) { UNREACHABLE(); }
FromScalar(uint64_t value)2973 static ElementType FromScalar(uint64_t value) { UNREACHABLE(); }
2974
2975 // Conversions from objects / handles.
FromObject(Object value,bool * lossless=nullptr)2976 static ElementType FromObject(Object value, bool* lossless = nullptr) {
2977 if (value.IsSmi()) {
2978 return FromScalar(Smi::ToInt(value));
2979 } else if (value.IsHeapNumber()) {
2980 return FromScalar(HeapNumber::cast(value).value());
2981 } else {
2982 // Clamp undefined here as well. All other types have been
2983 // converted to a number type further up in the call chain.
2984 DCHECK(value.IsUndefined());
2985 return FromScalar(Oddball::cast(value).to_number_raw());
2986 }
2987 }
FromHandle(Handle<Object> value,bool * lossless=nullptr)2988 static ElementType FromHandle(Handle<Object> value,
2989 bool* lossless = nullptr) {
2990 return FromObject(*value, lossless);
2991 }
2992
2993 // Conversion of scalar value to handlified object.
2994 static Handle<Object> ToHandle(Isolate* isolate, ElementType value);
2995
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)2996 static void SetImpl(Handle<JSObject> holder, InternalIndex entry,
2997 Object value) {
2998 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(holder);
2999 DCHECK_LE(entry.raw_value(), typed_array->length());
3000 SetImpl(static_cast<ElementType*>(typed_array->DataPtr()),
3001 entry.raw_value(), FromObject(value));
3002 }
3003
SetImpl(ElementType * data_ptr,size_t entry,ElementType value)3004 static void SetImpl(ElementType* data_ptr, size_t entry, ElementType value) {
3005 // The JavaScript memory model allows for racy reads and writes to a
3006 // SharedArrayBuffer's backing store. ThreadSanitizer will catch these
3007 // racy accesses and warn about them, so we disable TSAN for these reads
3008 // and writes using annotations.
3009 //
3010 // We don't use relaxed atomics here, as it is not a requirement of the
3011 // JavaScript memory model to have tear-free reads of overlapping accesses,
3012 // and using relaxed atomics may introduce overhead.
3013 TSAN_ANNOTATE_IGNORE_WRITES_BEGIN;
3014 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3015 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
3016 // fields (external pointers, doubles and BigInt data) are only
3017 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
3018 // accessing them in order to avoid undefined behavior in C++ code.
3019 base::WriteUnalignedValue<ElementType>(
3020 reinterpret_cast<Address>(data_ptr + entry), value);
3021 } else {
3022 data_ptr[entry] = value;
3023 }
3024 TSAN_ANNOTATE_IGNORE_WRITES_END;
3025 }
3026
GetInternalImpl(Handle<JSObject> holder,InternalIndex entry)3027 static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
3028 InternalIndex entry) {
3029 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(holder);
3030 Isolate* isolate = typed_array->GetIsolate();
3031 DCHECK_LE(entry.raw_value(), typed_array->length());
3032 DCHECK(!typed_array->WasDetached());
3033 ElementType elem = GetImpl(
3034 static_cast<ElementType*>(typed_array->DataPtr()), entry.raw_value());
3035 return ToHandle(isolate, elem);
3036 }
3037
GetImpl(Isolate * isolate,FixedArrayBase backing_store,InternalIndex entry)3038 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase backing_store,
3039 InternalIndex entry) {
3040 UNREACHABLE();
3041 }
3042
GetImpl(ElementType * data_ptr,size_t entry)3043 static ElementType GetImpl(ElementType* data_ptr, size_t entry) {
3044 // The JavaScript memory model allows for racy reads and writes to a
3045 // SharedArrayBuffer's backing store. ThreadSanitizer will catch these
3046 // racy accesses and warn about them, so we disable TSAN for these reads
3047 // and writes using annotations.
3048 //
3049 // We don't use relaxed atomics here, as it is not a requirement of the
3050 // JavaScript memory model to have tear-free reads of overlapping accesses,
3051 // and using relaxed atomics may introduce overhead.
3052 TSAN_ANNOTATE_IGNORE_READS_BEGIN;
3053 ElementType result;
3054 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3055 // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size
3056 // fields (external pointers, doubles and BigInt data) are only
3057 // kTaggedSize aligned so we have to use unaligned pointer friendly way of
3058 // accessing them in order to avoid undefined behavior in C++ code.
3059 result = base::ReadUnalignedValue<ElementType>(
3060 reinterpret_cast<Address>(data_ptr + entry));
3061 } else {
3062 result = data_ptr[entry];
3063 }
3064 TSAN_ANNOTATE_IGNORE_READS_END;
3065 return result;
3066 }
3067
GetDetailsImpl(JSObject holder,InternalIndex entry)3068 static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
3069 return PropertyDetails(kData, DONT_DELETE, PropertyCellType::kNoCell);
3070 }
3071
GetDetailsImpl(FixedArrayBase backing_store,InternalIndex entry)3072 static PropertyDetails GetDetailsImpl(FixedArrayBase backing_store,
3073 InternalIndex entry) {
3074 return PropertyDetails(kData, DONT_DELETE, PropertyCellType::kNoCell);
3075 }
3076
HasElementImpl(Isolate * isolate,JSObject holder,size_t index,FixedArrayBase backing_store,PropertyFilter filter)3077 static bool HasElementImpl(Isolate* isolate, JSObject holder, size_t index,
3078 FixedArrayBase backing_store,
3079 PropertyFilter filter) {
3080 return index < AccessorClass::GetCapacityImpl(holder, backing_store);
3081 }
3082
HasAccessorsImpl(JSObject holder,FixedArrayBase backing_store)3083 static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
3084 return false;
3085 }
3086
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> backing_store)3087 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
3088 uint32_t length,
3089 Handle<FixedArrayBase> backing_store) {
3090 // External arrays do not support changing their length.
3091 UNREACHABLE();
3092 }
3093
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)3094 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
3095 UNREACHABLE();
3096 }
3097
GetEntryForIndexImpl(Isolate * isolate,JSObject holder,FixedArrayBase backing_store,size_t index,PropertyFilter filter)3098 static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
3099 FixedArrayBase backing_store,
3100 size_t index,
3101 PropertyFilter filter) {
3102 return index < AccessorClass::GetCapacityImpl(holder, backing_store)
3103 ? InternalIndex(index)
3104 : InternalIndex::NotFound();
3105 }
3106
GetCapacityImpl(JSObject holder,FixedArrayBase backing_store)3107 static size_t GetCapacityImpl(JSObject holder, FixedArrayBase backing_store) {
3108 JSTypedArray typed_array = JSTypedArray::cast(holder);
3109 if (typed_array.WasDetached()) return 0;
3110 return typed_array.length();
3111 }
3112
NumberOfElementsImpl(JSObject receiver,FixedArrayBase backing_store)3113 static size_t NumberOfElementsImpl(JSObject receiver,
3114 FixedArrayBase backing_store) {
3115 return AccessorClass::GetCapacityImpl(receiver, backing_store);
3116 }
3117
AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)3118 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
3119 Handle<JSObject> receiver, KeyAccumulator* accumulator,
3120 AddKeyConversion convert) {
3121 Isolate* isolate = receiver->GetIsolate();
3122 Handle<FixedArrayBase> elements(receiver->elements(), isolate);
3123 size_t length = AccessorClass::GetCapacityImpl(*receiver, *elements);
3124 for (size_t i = 0; i < length; i++) {
3125 Handle<Object> value =
3126 AccessorClass::GetInternalImpl(receiver, InternalIndex(i));
3127 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
3128 }
3129 return ExceptionStatus::kSuccess;
3130 }
3131
CollectValuesOrEntriesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArray> values_or_entries,bool get_entries,int * nof_items,PropertyFilter filter)3132 static Maybe<bool> CollectValuesOrEntriesImpl(
3133 Isolate* isolate, Handle<JSObject> object,
3134 Handle<FixedArray> values_or_entries, bool get_entries, int* nof_items,
3135 PropertyFilter filter) {
3136 int count = 0;
3137 if ((filter & ONLY_CONFIGURABLE) == 0) {
3138 Handle<FixedArrayBase> elements(object->elements(), isolate);
3139 size_t length = AccessorClass::GetCapacityImpl(*object, *elements);
3140 for (size_t index = 0; index < length; ++index) {
3141 Handle<Object> value =
3142 AccessorClass::GetInternalImpl(object, InternalIndex(index));
3143 if (get_entries) {
3144 value = MakeEntryPair(isolate, index, value);
3145 }
3146 values_or_entries->set(count++, *value);
3147 }
3148 }
3149 *nof_items = count;
3150 return Just(true);
3151 }
3152
FillImpl(Handle<JSObject> receiver,Handle<Object> value,size_t start,size_t end)3153 static Object FillImpl(Handle<JSObject> receiver, Handle<Object> value,
3154 size_t start, size_t end) {
3155 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(receiver);
3156 DCHECK(!typed_array->WasDetached());
3157 DCHECK_LE(start, end);
3158 DCHECK_LE(end, typed_array->length());
3159 DisallowHeapAllocation no_gc;
3160 ElementType scalar = FromHandle(value);
3161 ElementType* data = static_cast<ElementType*>(typed_array->DataPtr());
3162 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3163 // TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
3164 std::fill(UnalignedSlot<ElementType>(data + start),
3165 UnalignedSlot<ElementType>(data + end), scalar);
3166 } else {
3167 std::fill(data + start, data + end, scalar);
3168 }
3169 return *typed_array;
3170 }
3171
IncludesValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)3172 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
3173 Handle<JSObject> receiver,
3174 Handle<Object> value, size_t start_from,
3175 size_t length) {
3176 DisallowHeapAllocation no_gc;
3177 JSTypedArray typed_array = JSTypedArray::cast(*receiver);
3178
3179 // TODO(caitp): return Just(false) here when implementing strict throwing on
3180 // detached views.
3181 if (typed_array.WasDetached()) {
3182 return Just(value->IsUndefined(isolate) && length > start_from);
3183 }
3184
3185 if (value->IsUndefined(isolate) && length > typed_array.length()) {
3186 return Just(true);
3187 }
3188
3189 // Prototype has no elements, and not searching for the hole --- limit
3190 // search to backing store length.
3191 if (typed_array.length() < length) {
3192 length = typed_array.length();
3193 }
3194
3195 ElementType typed_search_value;
3196 ElementType* data_ptr =
3197 reinterpret_cast<ElementType*>(typed_array.DataPtr());
3198 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
3199 if (!value->IsBigInt()) return Just(false);
3200 bool lossless;
3201 typed_search_value = FromHandle(value, &lossless);
3202 if (!lossless) return Just(false);
3203 } else {
3204 if (!value->IsNumber()) return Just(false);
3205 double search_value = value->Number();
3206 if (!std::isfinite(search_value)) {
3207 // Integral types cannot represent +Inf or NaN.
3208 if (Kind < FLOAT32_ELEMENTS || Kind > FLOAT64_ELEMENTS) {
3209 return Just(false);
3210 }
3211 if (std::isnan(search_value)) {
3212 for (size_t k = start_from; k < length; ++k) {
3213 double elem_k =
3214 static_cast<double>(AccessorClass::GetImpl(data_ptr, k));
3215 if (std::isnan(elem_k)) return Just(true);
3216 }
3217 return Just(false);
3218 }
3219 } else if (search_value < std::numeric_limits<ElementType>::lowest() ||
3220 search_value > std::numeric_limits<ElementType>::max()) {
3221 // Return false if value can't be represented in this space.
3222 return Just(false);
3223 }
3224 typed_search_value = static_cast<ElementType>(search_value);
3225 if (static_cast<double>(typed_search_value) != search_value) {
3226 return Just(false); // Loss of precision.
3227 }
3228 }
3229
3230 for (size_t k = start_from; k < length; ++k) {
3231 ElementType elem_k = AccessorClass::GetImpl(data_ptr, k);
3232 if (elem_k == typed_search_value) return Just(true);
3233 }
3234 return Just(false);
3235 }
3236
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> receiver,Handle<Object> value,size_t start_from,size_t length)3237 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
3238 Handle<JSObject> receiver,
3239 Handle<Object> value,
3240 size_t start_from, size_t length) {
3241 DisallowHeapAllocation no_gc;
3242 JSTypedArray typed_array = JSTypedArray::cast(*receiver);
3243
3244 if (typed_array.WasDetached()) return Just<int64_t>(-1);
3245
3246 ElementType typed_search_value;
3247
3248 ElementType* data_ptr =
3249 reinterpret_cast<ElementType*>(typed_array.DataPtr());
3250 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
3251 if (!value->IsBigInt()) return Just<int64_t>(-1);
3252 bool lossless;
3253 typed_search_value = FromHandle(value, &lossless);
3254 if (!lossless) return Just<int64_t>(-1);
3255 } else {
3256 if (!value->IsNumber()) return Just<int64_t>(-1);
3257 double search_value = value->Number();
3258 if (!std::isfinite(search_value)) {
3259 // Integral types cannot represent +Inf or NaN.
3260 if (Kind < FLOAT32_ELEMENTS || Kind > FLOAT64_ELEMENTS) {
3261 return Just<int64_t>(-1);
3262 }
3263 if (std::isnan(search_value)) {
3264 return Just<int64_t>(-1);
3265 }
3266 } else if (search_value < std::numeric_limits<ElementType>::lowest() ||
3267 search_value > std::numeric_limits<ElementType>::max()) {
3268 // Return false if value can't be represented in this ElementsKind.
3269 return Just<int64_t>(-1);
3270 }
3271 typed_search_value = static_cast<ElementType>(search_value);
3272 if (static_cast<double>(typed_search_value) != search_value) {
3273 return Just<int64_t>(-1); // Loss of precision.
3274 }
3275 }
3276
3277 // Prototype has no elements, and not searching for the hole --- limit
3278 // search to backing store length.
3279 if (typed_array.length() < length) {
3280 length = typed_array.length();
3281 }
3282
3283 for (size_t k = start_from; k < length; ++k) {
3284 ElementType elem_k = AccessorClass::GetImpl(data_ptr, k);
3285 if (elem_k == typed_search_value) return Just<int64_t>(k);
3286 }
3287 return Just<int64_t>(-1);
3288 }
3289
LastIndexOfValueImpl(Handle<JSObject> receiver,Handle<Object> value,size_t start_from)3290 static Maybe<int64_t> LastIndexOfValueImpl(Handle<JSObject> receiver,
3291 Handle<Object> value,
3292 size_t start_from) {
3293 DisallowHeapAllocation no_gc;
3294 JSTypedArray typed_array = JSTypedArray::cast(*receiver);
3295
3296 DCHECK(!typed_array.WasDetached());
3297
3298 ElementType typed_search_value;
3299
3300 ElementType* data_ptr =
3301 reinterpret_cast<ElementType*>(typed_array.DataPtr());
3302 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
3303 if (!value->IsBigInt()) return Just<int64_t>(-1);
3304 bool lossless;
3305 typed_search_value = FromHandle(value, &lossless);
3306 if (!lossless) return Just<int64_t>(-1);
3307 } else {
3308 if (!value->IsNumber()) return Just<int64_t>(-1);
3309 double search_value = value->Number();
3310 if (!std::isfinite(search_value)) {
3311 if (std::is_integral<ElementType>::value) {
3312 // Integral types cannot represent +Inf or NaN.
3313 return Just<int64_t>(-1);
3314 } else if (std::isnan(search_value)) {
3315 // Strict Equality Comparison of NaN is always false.
3316 return Just<int64_t>(-1);
3317 }
3318 } else if (search_value < std::numeric_limits<ElementType>::lowest() ||
3319 search_value > std::numeric_limits<ElementType>::max()) {
3320 // Return -1 if value can't be represented in this ElementsKind.
3321 return Just<int64_t>(-1);
3322 }
3323 typed_search_value = static_cast<ElementType>(search_value);
3324 if (static_cast<double>(typed_search_value) != search_value) {
3325 return Just<int64_t>(-1); // Loss of precision.
3326 }
3327 }
3328
3329 DCHECK_LT(start_from, typed_array.length());
3330 size_t k = start_from;
3331 do {
3332 ElementType elem_k = AccessorClass::GetImpl(data_ptr, k);
3333 if (elem_k == typed_search_value) return Just<int64_t>(k);
3334 } while (k-- != 0);
3335 return Just<int64_t>(-1);
3336 }
3337
ReverseImpl(JSObject receiver)3338 static void ReverseImpl(JSObject receiver) {
3339 DisallowHeapAllocation no_gc;
3340 JSTypedArray typed_array = JSTypedArray::cast(receiver);
3341
3342 DCHECK(!typed_array.WasDetached());
3343
3344 size_t len = typed_array.length();
3345 if (len == 0) return;
3346
3347 ElementType* data = static_cast<ElementType*>(typed_array.DataPtr());
3348 if (COMPRESS_POINTERS_BOOL && alignof(ElementType) > kTaggedSize) {
3349 // TODO(ishell, v8:8875): See UnalignedSlot<T> for details.
3350 std::reverse(UnalignedSlot<ElementType>(data),
3351 UnalignedSlot<ElementType>(data + len));
3352 } else {
3353 std::reverse(data, data + len);
3354 }
3355 }
3356
CreateListFromArrayLikeImpl(Isolate * isolate,Handle<JSObject> object,uint32_t length)3357 static Handle<FixedArray> CreateListFromArrayLikeImpl(Isolate* isolate,
3358 Handle<JSObject> object,
3359 uint32_t length) {
3360 Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(object);
3361 Handle<FixedArray> result = isolate->factory()->NewFixedArray(length);
3362 for (uint32_t i = 0; i < length; i++) {
3363 Handle<Object> value =
3364 AccessorClass::GetInternalImpl(typed_array, InternalIndex(i));
3365 result->set(i, *value);
3366 }
3367 return result;
3368 }
3369
CopyTypedArrayElementsSliceImpl(JSTypedArray source,JSTypedArray destination,size_t start,size_t end)3370 static void CopyTypedArrayElementsSliceImpl(JSTypedArray source,
3371 JSTypedArray destination,
3372 size_t start, size_t end) {
3373 DisallowHeapAllocation no_gc;
3374 DCHECK_EQ(destination.GetElementsKind(), AccessorClass::kind());
3375 CHECK(!source.WasDetached());
3376 CHECK(!destination.WasDetached());
3377 DCHECK_LE(start, end);
3378 DCHECK_LE(end, source.length());
3379 size_t count = end - start;
3380 DCHECK_LE(count, destination.length());
3381 ElementType* dest_data = static_cast<ElementType*>(destination.DataPtr());
3382 switch (source.GetElementsKind()) {
3383 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3384 case TYPE##_ELEMENTS: { \
3385 ctype* source_data = reinterpret_cast<ctype*>(source.DataPtr()) + start; \
3386 CopyBetweenBackingStores<TYPE##_ELEMENTS, ctype>(source_data, dest_data, \
3387 count); \
3388 break; \
3389 }
3390 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3391 #undef TYPED_ARRAY_CASE
3392 default:
3393 UNREACHABLE();
3394 break;
3395 }
3396 }
3397
HasSimpleRepresentation(ExternalArrayType type)3398 static bool HasSimpleRepresentation(ExternalArrayType type) {
3399 return !(type == kExternalFloat32Array || type == kExternalFloat64Array ||
3400 type == kExternalUint8ClampedArray);
3401 }
3402
3403 template <ElementsKind SourceKind, typename SourceElementType>
CopyBetweenBackingStores(SourceElementType * source_data_ptr,ElementType * dest_data_ptr,size_t length)3404 static void CopyBetweenBackingStores(SourceElementType* source_data_ptr,
3405 ElementType* dest_data_ptr,
3406 size_t length) {
3407 DisallowHeapAllocation no_gc;
3408 for (size_t i = 0; i < length; i++) {
3409 // We use scalar accessors to avoid boxing/unboxing, so there are no
3410 // allocations.
3411 SourceElementType source_elem =
3412 TypedElementsAccessor<SourceKind, SourceElementType>::GetImpl(
3413 source_data_ptr, i);
3414 ElementType dest_elem = FromScalar(source_elem);
3415 SetImpl(dest_data_ptr, i, dest_elem);
3416 }
3417 }
3418
CopyElementsFromTypedArray(JSTypedArray source,JSTypedArray destination,size_t length,size_t offset)3419 static void CopyElementsFromTypedArray(JSTypedArray source,
3420 JSTypedArray destination,
3421 size_t length, size_t offset) {
3422 // The source is a typed array, so we know we don't need to do ToNumber
3423 // side-effects, as the source elements will always be a number.
3424 DisallowHeapAllocation no_gc;
3425
3426 CHECK(!source.WasDetached());
3427 CHECK(!destination.WasDetached());
3428
3429 DCHECK_LE(offset, destination.length());
3430 DCHECK_LE(length, destination.length() - offset);
3431 DCHECK_LE(length, source.length());
3432
3433 ExternalArrayType source_type = source.type();
3434 ExternalArrayType destination_type = destination.type();
3435
3436 bool same_type = source_type == destination_type;
3437 bool same_size = source.element_size() == destination.element_size();
3438 bool both_are_simple = HasSimpleRepresentation(source_type) &&
3439 HasSimpleRepresentation(destination_type);
3440
3441 uint8_t* source_data = static_cast<uint8_t*>(source.DataPtr());
3442 uint8_t* dest_data = static_cast<uint8_t*>(destination.DataPtr());
3443 size_t source_byte_length = source.byte_length();
3444 size_t dest_byte_length = destination.byte_length();
3445
3446 // We can simply copy the backing store if the types are the same, or if
3447 // we are converting e.g. Uint8 <-> Int8, as the binary representation
3448 // will be the same. This is not the case for floats or clamped Uint8,
3449 // which have special conversion operations.
3450 if (same_type || (same_size && both_are_simple)) {
3451 size_t element_size = source.element_size();
3452 std::memmove(dest_data + offset * element_size, source_data,
3453 length * element_size);
3454 } else {
3455 std::unique_ptr<uint8_t[]> cloned_source_elements;
3456
3457 // If the typedarrays are overlapped, clone the source.
3458 if (dest_data + dest_byte_length > source_data &&
3459 source_data + source_byte_length > dest_data) {
3460 cloned_source_elements.reset(new uint8_t[source_byte_length]);
3461 std::memcpy(cloned_source_elements.get(), source_data,
3462 source_byte_length);
3463 source_data = cloned_source_elements.get();
3464 }
3465
3466 switch (source.GetElementsKind()) {
3467 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3468 case TYPE##_ELEMENTS: \
3469 CopyBetweenBackingStores<TYPE##_ELEMENTS, ctype>( \
3470 reinterpret_cast<ctype*>(source_data), \
3471 reinterpret_cast<ElementType*>(dest_data) + offset, length); \
3472 break;
3473 TYPED_ARRAYS(TYPED_ARRAY_CASE)
3474 default:
3475 UNREACHABLE();
3476 break;
3477 }
3478 #undef TYPED_ARRAY_CASE
3479 }
3480 }
3481
HoleyPrototypeLookupRequired(Isolate * isolate,Context context,JSArray source)3482 static bool HoleyPrototypeLookupRequired(Isolate* isolate, Context context,
3483 JSArray source) {
3484 DisallowHeapAllocation no_gc;
3485 DisallowJavascriptExecution no_js(isolate);
3486
3487 #ifdef V8_ENABLE_FORCE_SLOW_PATH
3488 if (isolate->force_slow_path()) return true;
3489 #endif
3490
3491 Object source_proto = source.map().prototype();
3492
3493 // Null prototypes are OK - we don't need to do prototype chain lookups on
3494 // them.
3495 if (source_proto.IsNull(isolate)) return false;
3496 if (source_proto.IsJSProxy()) return true;
3497 if (!context.native_context().is_initial_array_prototype(
3498 JSObject::cast(source_proto))) {
3499 return true;
3500 }
3501
3502 return !Protectors::IsNoElementsIntact(isolate);
3503 }
3504
TryCopyElementsFastNumber(Context context,JSArray source,JSTypedArray destination,size_t length,size_t offset)3505 static bool TryCopyElementsFastNumber(Context context, JSArray source,
3506 JSTypedArray destination, size_t length,
3507 size_t offset) {
3508 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) return false;
3509 Isolate* isolate = source.GetIsolate();
3510 DisallowHeapAllocation no_gc;
3511 DisallowJavascriptExecution no_js(isolate);
3512
3513 CHECK(!destination.WasDetached());
3514
3515 size_t current_length;
3516 DCHECK(source.length().IsNumber() &&
3517 TryNumberToSize(source.length(), ¤t_length) &&
3518 length <= current_length);
3519 USE(current_length);
3520
3521 size_t dest_length = destination.length();
3522 DCHECK(length + offset <= dest_length);
3523 USE(dest_length);
3524
3525 ElementsKind kind = source.GetElementsKind();
3526
3527 // When we find the hole, we normally have to look up the element on the
3528 // prototype chain, which is not handled here and we return false instead.
3529 // When the array has the original array prototype, and that prototype has
3530 // not been changed in a way that would affect lookups, we can just convert
3531 // the hole into undefined.
3532 if (HoleyPrototypeLookupRequired(isolate, context, source)) return false;
3533
3534 Oddball undefined = ReadOnlyRoots(isolate).undefined_value();
3535 ElementType* dest_data =
3536 reinterpret_cast<ElementType*>(destination.DataPtr()) + offset;
3537
3538 // Fast-path for packed Smi kind.
3539 if (kind == PACKED_SMI_ELEMENTS) {
3540 FixedArray source_store = FixedArray::cast(source.elements());
3541
3542 for (size_t i = 0; i < length; i++) {
3543 Object elem = source_store.get(static_cast<int>(i));
3544 SetImpl(dest_data, i, FromScalar(Smi::ToInt(elem)));
3545 }
3546 return true;
3547 } else if (kind == HOLEY_SMI_ELEMENTS) {
3548 FixedArray source_store = FixedArray::cast(source.elements());
3549 for (size_t i = 0; i < length; i++) {
3550 if (source_store.is_the_hole(isolate, static_cast<int>(i))) {
3551 SetImpl(dest_data, i, FromObject(undefined));
3552 } else {
3553 Object elem = source_store.get(static_cast<int>(i));
3554 SetImpl(dest_data, i, FromScalar(Smi::ToInt(elem)));
3555 }
3556 }
3557 return true;
3558 } else if (kind == PACKED_DOUBLE_ELEMENTS) {
3559 // Fast-path for packed double kind. We avoid boxing and then immediately
3560 // unboxing the double here by using get_scalar.
3561 FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
3562
3563 for (size_t i = 0; i < length; i++) {
3564 // Use the from_double conversion for this specific TypedArray type,
3565 // rather than relying on C++ to convert elem.
3566 double elem = source_store.get_scalar(static_cast<int>(i));
3567 SetImpl(dest_data, i, FromScalar(elem));
3568 }
3569 return true;
3570 } else if (kind == HOLEY_DOUBLE_ELEMENTS) {
3571 FixedDoubleArray source_store = FixedDoubleArray::cast(source.elements());
3572 for (size_t i = 0; i < length; i++) {
3573 if (source_store.is_the_hole(static_cast<int>(i))) {
3574 SetImpl(dest_data, i, FromObject(undefined));
3575 } else {
3576 double elem = source_store.get_scalar(static_cast<int>(i));
3577 SetImpl(dest_data, i, FromScalar(elem));
3578 }
3579 }
3580 return true;
3581 }
3582 return false;
3583 }
3584
CopyElementsHandleSlow(Handle<Object> source,Handle<JSTypedArray> destination,size_t length,size_t offset)3585 static Object CopyElementsHandleSlow(Handle<Object> source,
3586 Handle<JSTypedArray> destination,
3587 size_t length, size_t offset) {
3588 Isolate* isolate = destination->GetIsolate();
3589 for (size_t i = 0; i < length; i++) {
3590 Handle<Object> elem;
3591 LookupIterator it(isolate, source, i);
3592 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, elem,
3593 Object::GetProperty(&it));
3594 if (Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS) {
3595 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, elem,
3596 BigInt::FromObject(isolate, elem));
3597 } else {
3598 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, elem,
3599 Object::ToNumber(isolate, elem));
3600 }
3601
3602 if (V8_UNLIKELY(destination->WasDetached())) {
3603 const char* op = "set";
3604 const MessageTemplate message = MessageTemplate::kDetachedOperation;
3605 Handle<String> operation =
3606 isolate->factory()->NewStringFromAsciiChecked(op);
3607 THROW_NEW_ERROR_RETURN_FAILURE(isolate,
3608 NewTypeError(message, operation));
3609 }
3610 // The spec says we store the length, then get each element, so we don't
3611 // need to check changes to length.
3612 SetImpl(destination, InternalIndex(offset + i), *elem);
3613 }
3614 return *isolate->factory()->undefined_value();
3615 }
3616
3617 // This doesn't guarantee that the destination array will be completely
3618 // filled. The caller must do this by passing a source with equal length, if
3619 // that is required.
CopyElementsHandleImpl(Handle<Object> source,Handle<JSObject> destination,size_t length,size_t offset)3620 static Object CopyElementsHandleImpl(Handle<Object> source,
3621 Handle<JSObject> destination,
3622 size_t length, size_t offset) {
3623 Isolate* isolate = destination->GetIsolate();
3624 Handle<JSTypedArray> destination_ta =
3625 Handle<JSTypedArray>::cast(destination);
3626 DCHECK_LE(offset + length, destination_ta->length());
3627
3628 if (length == 0) return *isolate->factory()->undefined_value();
3629
3630 // All conversions from TypedArrays can be done without allocation.
3631 if (source->IsJSTypedArray()) {
3632 CHECK(!destination_ta->WasDetached());
3633 Handle<JSTypedArray> source_ta = Handle<JSTypedArray>::cast(source);
3634 ElementsKind source_kind = source_ta->GetElementsKind();
3635 bool source_is_bigint =
3636 source_kind == BIGINT64_ELEMENTS || source_kind == BIGUINT64_ELEMENTS;
3637 bool target_is_bigint =
3638 Kind == BIGINT64_ELEMENTS || Kind == BIGUINT64_ELEMENTS;
3639 // If we have to copy more elements than we have in the source, we need to
3640 // do special handling and conversion; that happens in the slow case.
3641 if (source_is_bigint == target_is_bigint && !source_ta->WasDetached() &&
3642 length + offset <= source_ta->length()) {
3643 CopyElementsFromTypedArray(*source_ta, *destination_ta, length, offset);
3644 return *isolate->factory()->undefined_value();
3645 }
3646 } else if (source->IsJSArray()) {
3647 CHECK(!destination_ta->WasDetached());
3648 // Fast cases for packed numbers kinds where we don't need to allocate.
3649 Handle<JSArray> source_js_array = Handle<JSArray>::cast(source);
3650 size_t current_length;
3651 DCHECK(source_js_array->length().IsNumber());
3652 if (TryNumberToSize(source_js_array->length(), ¤t_length) &&
3653 length <= current_length) {
3654 Handle<JSArray> source_array = Handle<JSArray>::cast(source);
3655 if (TryCopyElementsFastNumber(isolate->context(), *source_array,
3656 *destination_ta, length, offset)) {
3657 return *isolate->factory()->undefined_value();
3658 }
3659 }
3660 }
3661 // Final generic case that handles prototype chain lookups, getters, proxies
3662 // and observable side effects via valueOf, etc.
3663 return CopyElementsHandleSlow(source, destination_ta, length, offset);
3664 }
3665 };
3666
3667 // static
3668 template <>
ToHandle(Isolate * isolate,int8_t value)3669 Handle<Object> TypedElementsAccessor<INT8_ELEMENTS, int8_t>::ToHandle(
3670 Isolate* isolate, int8_t value) {
3671 return handle(Smi::FromInt(value), isolate);
3672 }
3673
3674 // static
3675 template <>
ToHandle(Isolate * isolate,uint8_t value)3676 Handle<Object> TypedElementsAccessor<UINT8_ELEMENTS, uint8_t>::ToHandle(
3677 Isolate* isolate, uint8_t value) {
3678 return handle(Smi::FromInt(value), isolate);
3679 }
3680
3681 // static
3682 template <>
ToHandle(Isolate * isolate,int16_t value)3683 Handle<Object> TypedElementsAccessor<INT16_ELEMENTS, int16_t>::ToHandle(
3684 Isolate* isolate, int16_t value) {
3685 return handle(Smi::FromInt(value), isolate);
3686 }
3687
3688 // static
3689 template <>
ToHandle(Isolate * isolate,uint16_t value)3690 Handle<Object> TypedElementsAccessor<UINT16_ELEMENTS, uint16_t>::ToHandle(
3691 Isolate* isolate, uint16_t value) {
3692 return handle(Smi::FromInt(value), isolate);
3693 }
3694
3695 // static
3696 template <>
ToHandle(Isolate * isolate,int32_t value)3697 Handle<Object> TypedElementsAccessor<INT32_ELEMENTS, int32_t>::ToHandle(
3698 Isolate* isolate, int32_t value) {
3699 return isolate->factory()->NewNumberFromInt(value);
3700 }
3701
3702 // static
3703 template <>
ToHandle(Isolate * isolate,uint32_t value)3704 Handle<Object> TypedElementsAccessor<UINT32_ELEMENTS, uint32_t>::ToHandle(
3705 Isolate* isolate, uint32_t value) {
3706 return isolate->factory()->NewNumberFromUint(value);
3707 }
3708
3709 // static
3710 template <>
FromScalar(double value)3711 float TypedElementsAccessor<FLOAT32_ELEMENTS, float>::FromScalar(double value) {
3712 return DoubleToFloat32(value);
3713 }
3714
3715 // static
3716 template <>
ToHandle(Isolate * isolate,float value)3717 Handle<Object> TypedElementsAccessor<FLOAT32_ELEMENTS, float>::ToHandle(
3718 Isolate* isolate, float value) {
3719 return isolate->factory()->NewNumber(value);
3720 }
3721
3722 // static
3723 template <>
FromScalar(double value)3724 double TypedElementsAccessor<FLOAT64_ELEMENTS, double>::FromScalar(
3725 double value) {
3726 return value;
3727 }
3728
3729 // static
3730 template <>
ToHandle(Isolate * isolate,double value)3731 Handle<Object> TypedElementsAccessor<FLOAT64_ELEMENTS, double>::ToHandle(
3732 Isolate* isolate, double value) {
3733 return isolate->factory()->NewNumber(value);
3734 }
3735
3736 // static
3737 template <>
FromScalar(int value)3738 uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
3739 int value) {
3740 if (value < 0x00) return 0x00;
3741 if (value > 0xFF) return 0xFF;
3742 return static_cast<uint8_t>(value);
3743 }
3744
3745 // static
3746 template <>
FromScalar(uint32_t value)3747 uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
3748 uint32_t value) {
3749 // We need this special case for Uint32 -> Uint8Clamped, because the highest
3750 // Uint32 values will be negative as an int, clamping to 0, rather than 255.
3751 if (value > 0xFF) return 0xFF;
3752 return static_cast<uint8_t>(value);
3753 }
3754
3755 // static
3756 template <>
FromScalar(double value)3757 uint8_t TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::FromScalar(
3758 double value) {
3759 // Handle NaNs and less than zero values which clamp to zero.
3760 if (!(value > 0)) return 0;
3761 if (value > 0xFF) return 0xFF;
3762 return static_cast<uint8_t>(lrint(value));
3763 }
3764
3765 // static
3766 template <>
ToHandle(Isolate * isolate,uint8_t value)3767 Handle<Object> TypedElementsAccessor<UINT8_CLAMPED_ELEMENTS, uint8_t>::ToHandle(
3768 Isolate* isolate, uint8_t value) {
3769 return handle(Smi::FromInt(value), isolate);
3770 }
3771
3772 // static
3773 template <>
FromScalar(int value)3774 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
3775 int value) {
3776 UNREACHABLE();
3777 }
3778
3779 // static
3780 template <>
FromScalar(uint32_t value)3781 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
3782 uint32_t value) {
3783 UNREACHABLE();
3784 }
3785
3786 // static
3787 template <>
FromScalar(double value)3788 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
3789 double value) {
3790 UNREACHABLE();
3791 }
3792
3793 // static
3794 template <>
FromScalar(int64_t value)3795 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
3796 int64_t value) {
3797 return value;
3798 }
3799
3800 // static
3801 template <>
FromScalar(uint64_t value)3802 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromScalar(
3803 uint64_t value) {
3804 return static_cast<int64_t>(value);
3805 }
3806
3807 // static
3808 template <>
FromObject(Object value,bool * lossless)3809 int64_t TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::FromObject(
3810 Object value, bool* lossless) {
3811 return BigInt::cast(value).AsInt64(lossless);
3812 }
3813
3814 // static
3815 template <>
ToHandle(Isolate * isolate,int64_t value)3816 Handle<Object> TypedElementsAccessor<BIGINT64_ELEMENTS, int64_t>::ToHandle(
3817 Isolate* isolate, int64_t value) {
3818 return BigInt::FromInt64(isolate, value);
3819 }
3820
3821 // static
3822 template <>
FromScalar(int value)3823 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
3824 int value) {
3825 UNREACHABLE();
3826 }
3827
3828 // static
3829 template <>
FromScalar(uint32_t value)3830 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
3831 uint32_t value) {
3832 UNREACHABLE();
3833 }
3834
3835 // static
3836 template <>
FromScalar(double value)3837 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
3838 double value) {
3839 UNREACHABLE();
3840 }
3841
3842 // static
3843 template <>
FromScalar(int64_t value)3844 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
3845 int64_t value) {
3846 return static_cast<uint64_t>(value);
3847 }
3848
3849 // static
3850 template <>
FromScalar(uint64_t value)3851 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromScalar(
3852 uint64_t value) {
3853 return value;
3854 }
3855
3856 // static
3857 template <>
FromObject(Object value,bool * lossless)3858 uint64_t TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::FromObject(
3859 Object value, bool* lossless) {
3860 return BigInt::cast(value).AsUint64(lossless);
3861 }
3862
3863 // static
3864 template <>
ToHandle(Isolate * isolate,uint64_t value)3865 Handle<Object> TypedElementsAccessor<BIGUINT64_ELEMENTS, uint64_t>::ToHandle(
3866 Isolate* isolate, uint64_t value) {
3867 return BigInt::FromUint64(isolate, value);
3868 }
3869
3870 #define FIXED_ELEMENTS_ACCESSOR(Type, type, TYPE, ctype) \
3871 using Type##ElementsAccessor = TypedElementsAccessor<TYPE##_ELEMENTS, ctype>;
3872 TYPED_ARRAYS(FIXED_ELEMENTS_ACCESSOR)
3873 #undef FIXED_ELEMENTS_ACCESSOR
3874
3875 template <typename Subclass, typename ArgumentsAccessor, typename KindTraits>
3876 class SloppyArgumentsElementsAccessor
3877 : public ElementsAccessorBase<Subclass, KindTraits> {
3878 public:
ConvertArgumentsStoreResult(Handle<SloppyArgumentsElements> elements,Handle<Object> result)3879 static void ConvertArgumentsStoreResult(
3880 Handle<SloppyArgumentsElements> elements, Handle<Object> result) {
3881 UNREACHABLE();
3882 }
3883
GetImpl(Isolate * isolate,FixedArrayBase parameters,InternalIndex entry)3884 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase parameters,
3885 InternalIndex entry) {
3886 Handle<SloppyArgumentsElements> elements(
3887 SloppyArgumentsElements::cast(parameters), isolate);
3888 uint32_t length = elements->length();
3889 if (entry.as_uint32() < length) {
3890 // Read context mapped entry.
3891 DisallowHeapAllocation no_gc;
3892 Object probe = elements->mapped_entries(entry.as_uint32());
3893 DCHECK(!probe.IsTheHole(isolate));
3894 Context context = elements->context();
3895 int context_entry = Smi::ToInt(probe);
3896 DCHECK(!context.get(context_entry).IsTheHole(isolate));
3897 return handle(context.get(context_entry), isolate);
3898 } else {
3899 // Entry is not context mapped, defer to the arguments.
3900 Handle<Object> result = ArgumentsAccessor::GetImpl(
3901 isolate, elements->arguments(), entry.adjust_down(length));
3902 return Subclass::ConvertArgumentsStoreResult(isolate, elements, result);
3903 }
3904 }
3905
TransitionElementsKindImpl(Handle<JSObject> object,Handle<Map> map)3906 static void TransitionElementsKindImpl(Handle<JSObject> object,
3907 Handle<Map> map) {
3908 UNREACHABLE();
3909 }
3910
GrowCapacityAndConvertImpl(Handle<JSObject> object,uint32_t capacity)3911 static void GrowCapacityAndConvertImpl(Handle<JSObject> object,
3912 uint32_t capacity) {
3913 UNREACHABLE();
3914 }
3915
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)3916 static inline void SetImpl(Handle<JSObject> holder, InternalIndex entry,
3917 Object value) {
3918 SetImpl(holder->elements(), entry, value);
3919 }
3920
SetImpl(FixedArrayBase store,InternalIndex entry,Object value)3921 static inline void SetImpl(FixedArrayBase store, InternalIndex entry,
3922 Object value) {
3923 SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
3924 uint32_t length = elements.length();
3925 if (entry.as_uint32() < length) {
3926 // Store context mapped entry.
3927 DisallowHeapAllocation no_gc;
3928 Object probe = elements.mapped_entries(entry.as_uint32());
3929 DCHECK(!probe.IsTheHole());
3930 Context context = Context::cast(elements.context());
3931 int context_entry = Smi::ToInt(probe);
3932 DCHECK(!context.get(context_entry).IsTheHole());
3933 context.set(context_entry, value);
3934 } else {
3935 // Entry is not context mapped defer to arguments.
3936 FixedArray arguments = elements.arguments();
3937 Object current =
3938 ArgumentsAccessor::GetRaw(arguments, entry.adjust_down(length));
3939 if (current.IsAliasedArgumentsEntry()) {
3940 AliasedArgumentsEntry alias = AliasedArgumentsEntry::cast(current);
3941 Context context = Context::cast(elements.context());
3942 int context_entry = alias.aliased_context_slot();
3943 DCHECK(!context.get(context_entry).IsTheHole());
3944 context.set(context_entry, value);
3945 } else {
3946 ArgumentsAccessor::SetImpl(arguments, entry.adjust_down(length), value);
3947 }
3948 }
3949 }
3950
SetLengthImpl(Isolate * isolate,Handle<JSArray> array,uint32_t length,Handle<FixedArrayBase> parameter_map)3951 static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
3952 uint32_t length,
3953 Handle<FixedArrayBase> parameter_map) {
3954 // Sloppy arguments objects are not arrays.
3955 UNREACHABLE();
3956 }
3957
GetCapacityImpl(JSObject holder,FixedArrayBase store)3958 static uint32_t GetCapacityImpl(JSObject holder, FixedArrayBase store) {
3959 SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
3960 FixedArray arguments = elements.arguments();
3961 return elements.length() +
3962 ArgumentsAccessor::GetCapacityImpl(holder, arguments);
3963 }
3964
GetMaxNumberOfEntries(JSObject holder,FixedArrayBase backing_store)3965 static uint32_t GetMaxNumberOfEntries(JSObject holder,
3966 FixedArrayBase backing_store) {
3967 SloppyArgumentsElements elements =
3968 SloppyArgumentsElements::cast(backing_store);
3969 FixedArrayBase arguments = elements.arguments();
3970 size_t max_entries =
3971 ArgumentsAccessor::GetMaxNumberOfEntries(holder, arguments);
3972 DCHECK_LE(max_entries, std::numeric_limits<uint32_t>::max());
3973 return elements.length() + static_cast<uint32_t>(max_entries);
3974 }
3975
NumberOfElementsImpl(JSObject receiver,FixedArrayBase backing_store)3976 static uint32_t NumberOfElementsImpl(JSObject receiver,
3977 FixedArrayBase backing_store) {
3978 Isolate* isolate = receiver.GetIsolate();
3979 SloppyArgumentsElements elements =
3980 SloppyArgumentsElements::cast(backing_store);
3981 FixedArrayBase arguments = elements.arguments();
3982 uint32_t nof_elements = 0;
3983 uint32_t length = elements.length();
3984 for (uint32_t index = 0; index < length; index++) {
3985 if (HasParameterMapArg(isolate, elements, index)) nof_elements++;
3986 }
3987 return nof_elements +
3988 ArgumentsAccessor::NumberOfElementsImpl(receiver, arguments);
3989 }
3990
AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)3991 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
3992 Handle<JSObject> receiver, KeyAccumulator* accumulator,
3993 AddKeyConversion convert) {
3994 Isolate* isolate = accumulator->isolate();
3995 Handle<FixedArrayBase> elements(receiver->elements(), isolate);
3996 uint32_t length = GetCapacityImpl(*receiver, *elements);
3997 for (uint32_t index = 0; index < length; index++) {
3998 InternalIndex entry(index);
3999 if (!HasEntryImpl(isolate, *elements, entry)) continue;
4000 Handle<Object> value = GetImpl(isolate, *elements, entry);
4001 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(value, convert));
4002 }
4003 return ExceptionStatus::kSuccess;
4004 }
4005
HasEntryImpl(Isolate * isolate,FixedArrayBase parameters,InternalIndex entry)4006 static bool HasEntryImpl(Isolate* isolate, FixedArrayBase parameters,
4007 InternalIndex entry) {
4008 SloppyArgumentsElements elements =
4009 SloppyArgumentsElements::cast(parameters);
4010 uint32_t length = elements.length();
4011 if (entry.raw_value() < length) {
4012 return HasParameterMapArg(isolate, elements, entry.raw_value());
4013 }
4014 FixedArrayBase arguments = elements.arguments();
4015 return ArgumentsAccessor::HasEntryImpl(isolate, arguments,
4016 entry.adjust_down(length));
4017 }
4018
HasAccessorsImpl(JSObject holder,FixedArrayBase backing_store)4019 static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
4020 SloppyArgumentsElements elements =
4021 SloppyArgumentsElements::cast(backing_store);
4022 FixedArray arguments = elements.arguments();
4023 return ArgumentsAccessor::HasAccessorsImpl(holder, arguments);
4024 }
4025
GetEntryForIndexImpl(Isolate * isolate,JSObject holder,FixedArrayBase parameters,size_t index,PropertyFilter filter)4026 static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
4027 FixedArrayBase parameters,
4028 size_t index,
4029 PropertyFilter filter) {
4030 SloppyArgumentsElements elements =
4031 SloppyArgumentsElements::cast(parameters);
4032 if (HasParameterMapArg(isolate, elements, index)) {
4033 return InternalIndex(index);
4034 }
4035 FixedArray arguments = elements.arguments();
4036 InternalIndex entry = ArgumentsAccessor::GetEntryForIndexImpl(
4037 isolate, holder, arguments, index, filter);
4038 if (entry.is_not_found()) return entry;
4039 // Arguments entries could overlap with the dictionary entries, hence offset
4040 // them by the number of context mapped entries.
4041 return entry.adjust_up(elements.length());
4042 }
4043
GetDetailsImpl(JSObject holder,InternalIndex entry)4044 static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
4045 SloppyArgumentsElements elements =
4046 SloppyArgumentsElements::cast(holder.elements());
4047 uint32_t length = elements.length();
4048 if (entry.as_uint32() < length) {
4049 return PropertyDetails(kData, NONE, PropertyCellType::kNoCell);
4050 }
4051 FixedArray arguments = elements.arguments();
4052 return ArgumentsAccessor::GetDetailsImpl(arguments,
4053 entry.adjust_down(length));
4054 }
4055
HasParameterMapArg(Isolate * isolate,SloppyArgumentsElements elements,size_t index)4056 static bool HasParameterMapArg(Isolate* isolate,
4057 SloppyArgumentsElements elements,
4058 size_t index) {
4059 uint32_t length = elements.length();
4060 if (index >= length) return false;
4061 return !elements.mapped_entries(static_cast<uint32_t>(index))
4062 .IsTheHole(isolate);
4063 }
4064
DeleteImpl(Handle<JSObject> obj,InternalIndex entry)4065 static void DeleteImpl(Handle<JSObject> obj, InternalIndex entry) {
4066 Handle<SloppyArgumentsElements> elements(
4067 SloppyArgumentsElements::cast(obj->elements()), obj->GetIsolate());
4068 uint32_t length = elements->length();
4069 InternalIndex delete_or_entry = entry;
4070 if (entry.as_uint32() < length) {
4071 delete_or_entry = InternalIndex::NotFound();
4072 }
4073 Subclass::SloppyDeleteImpl(obj, elements, delete_or_entry);
4074 // SloppyDeleteImpl allocates a new dictionary elements store. For making
4075 // heap verification happy we postpone clearing out the mapped entry.
4076 if (entry.as_uint32() < length) {
4077 elements->set_mapped_entries(entry.as_uint32(),
4078 obj->GetReadOnlyRoots().the_hole_value());
4079 }
4080 }
4081
SloppyDeleteImpl(Handle<JSObject> obj,Handle<SloppyArgumentsElements> elements,InternalIndex entry)4082 static void SloppyDeleteImpl(Handle<JSObject> obj,
4083 Handle<SloppyArgumentsElements> elements,
4084 InternalIndex entry) {
4085 // Implemented in subclasses.
4086 UNREACHABLE();
4087 }
4088
CollectElementIndicesImpl(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,KeyAccumulator * keys)4089 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
4090 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
4091 KeyAccumulator* keys) {
4092 Isolate* isolate = keys->isolate();
4093 uint32_t nof_indices = 0;
4094 Handle<FixedArray> indices = isolate->factory()->NewFixedArray(
4095 GetCapacityImpl(*object, *backing_store));
4096 DirectCollectElementIndicesImpl(isolate, object, backing_store,
4097 GetKeysConversion::kKeepNumbers,
4098 ENUMERABLE_STRINGS, indices, &nof_indices);
4099 SortIndices(isolate, indices, nof_indices);
4100 for (uint32_t i = 0; i < nof_indices; i++) {
4101 RETURN_FAILURE_IF_NOT_SUCCESSFUL(keys->AddKey(indices->get(i)));
4102 }
4103 return ExceptionStatus::kSuccess;
4104 }
4105
DirectCollectElementIndicesImpl(Isolate * isolate,Handle<JSObject> object,Handle<FixedArrayBase> backing_store,GetKeysConversion convert,PropertyFilter filter,Handle<FixedArray> list,uint32_t * nof_indices,uint32_t insertion_index=0)4106 static Handle<FixedArray> DirectCollectElementIndicesImpl(
4107 Isolate* isolate, Handle<JSObject> object,
4108 Handle<FixedArrayBase> backing_store, GetKeysConversion convert,
4109 PropertyFilter filter, Handle<FixedArray> list, uint32_t* nof_indices,
4110 uint32_t insertion_index = 0) {
4111 Handle<SloppyArgumentsElements> elements =
4112 Handle<SloppyArgumentsElements>::cast(backing_store);
4113 uint32_t length = elements->length();
4114
4115 for (uint32_t i = 0; i < length; ++i) {
4116 if (elements->mapped_entries(i).IsTheHole(isolate)) continue;
4117 if (convert == GetKeysConversion::kConvertToString) {
4118 Handle<String> index_string = isolate->factory()->Uint32ToString(i);
4119 list->set(insertion_index, *index_string);
4120 } else {
4121 list->set(insertion_index, Smi::FromInt(i));
4122 }
4123 insertion_index++;
4124 }
4125
4126 Handle<FixedArray> store(elements->arguments(), isolate);
4127 return ArgumentsAccessor::DirectCollectElementIndicesImpl(
4128 isolate, object, store, convert, filter, list, nof_indices,
4129 insertion_index);
4130 }
4131
IncludesValueImpl(Isolate * isolate,Handle<JSObject> object,Handle<Object> value,size_t start_from,size_t length)4132 static Maybe<bool> IncludesValueImpl(Isolate* isolate,
4133 Handle<JSObject> object,
4134 Handle<Object> value, size_t start_from,
4135 size_t length) {
4136 DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
4137 Handle<Map> original_map(object->map(), isolate);
4138 Handle<SloppyArgumentsElements> elements(
4139 SloppyArgumentsElements::cast(object->elements()), isolate);
4140 bool search_for_hole = value->IsUndefined(isolate);
4141
4142 for (size_t k = start_from; k < length; ++k) {
4143 DCHECK_EQ(object->map(), *original_map);
4144 InternalIndex entry =
4145 GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
4146 if (entry.is_not_found()) {
4147 if (search_for_hole) return Just(true);
4148 continue;
4149 }
4150
4151 Handle<Object> element_k = Subclass::GetImpl(isolate, *elements, entry);
4152
4153 if (element_k->IsAccessorPair()) {
4154 LookupIterator it(isolate, object, k, LookupIterator::OWN);
4155 DCHECK(it.IsFound());
4156 DCHECK_EQ(it.state(), LookupIterator::ACCESSOR);
4157 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
4158 Object::GetPropertyWithAccessor(&it),
4159 Nothing<bool>());
4160
4161 if (value->SameValueZero(*element_k)) return Just(true);
4162
4163 if (object->map() != *original_map) {
4164 // Some mutation occurred in accessor. Abort "fast" path
4165 return IncludesValueSlowPath(isolate, object, value, k + 1, length);
4166 }
4167 } else if (value->SameValueZero(*element_k)) {
4168 return Just(true);
4169 }
4170 }
4171 return Just(false);
4172 }
4173
IndexOfValueImpl(Isolate * isolate,Handle<JSObject> object,Handle<Object> value,size_t start_from,size_t length)4174 static Maybe<int64_t> IndexOfValueImpl(Isolate* isolate,
4175 Handle<JSObject> object,
4176 Handle<Object> value,
4177 size_t start_from, size_t length) {
4178 DCHECK(JSObject::PrototypeHasNoElements(isolate, *object));
4179 Handle<Map> original_map(object->map(), isolate);
4180 Handle<SloppyArgumentsElements> elements(
4181 SloppyArgumentsElements::cast(object->elements()), isolate);
4182
4183 for (size_t k = start_from; k < length; ++k) {
4184 DCHECK_EQ(object->map(), *original_map);
4185 InternalIndex entry =
4186 GetEntryForIndexImpl(isolate, *object, *elements, k, ALL_PROPERTIES);
4187 if (entry.is_not_found()) {
4188 continue;
4189 }
4190
4191 Handle<Object> element_k = Subclass::GetImpl(isolate, *elements, entry);
4192
4193 if (element_k->IsAccessorPair()) {
4194 LookupIterator it(isolate, object, k, LookupIterator::OWN);
4195 DCHECK(it.IsFound());
4196 DCHECK_EQ(it.state(), LookupIterator::ACCESSOR);
4197 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, element_k,
4198 Object::GetPropertyWithAccessor(&it),
4199 Nothing<int64_t>());
4200
4201 if (value->StrictEquals(*element_k)) {
4202 return Just<int64_t>(k);
4203 }
4204
4205 if (object->map() != *original_map) {
4206 // Some mutation occurred in accessor. Abort "fast" path.
4207 return IndexOfValueSlowPath(isolate, object, value, k + 1, length);
4208 }
4209 } else if (value->StrictEquals(*element_k)) {
4210 return Just<int64_t>(k);
4211 }
4212 }
4213 return Just<int64_t>(-1);
4214 }
4215 };
4216
4217 class SlowSloppyArgumentsElementsAccessor
4218 : public SloppyArgumentsElementsAccessor<
4219 SlowSloppyArgumentsElementsAccessor, DictionaryElementsAccessor,
4220 ElementsKindTraits<SLOW_SLOPPY_ARGUMENTS_ELEMENTS>> {
4221 public:
ConvertArgumentsStoreResult(Isolate * isolate,Handle<SloppyArgumentsElements> elements,Handle<Object> result)4222 static Handle<Object> ConvertArgumentsStoreResult(
4223 Isolate* isolate, Handle<SloppyArgumentsElements> elements,
4224 Handle<Object> result) {
4225 // Elements of the arguments object in slow mode might be slow aliases.
4226 if (result->IsAliasedArgumentsEntry()) {
4227 DisallowHeapAllocation no_gc;
4228 AliasedArgumentsEntry alias = AliasedArgumentsEntry::cast(*result);
4229 Context context = elements->context();
4230 int context_entry = alias.aliased_context_slot();
4231 DCHECK(!context.get(context_entry).IsTheHole(isolate));
4232 return handle(context.get(context_entry), isolate);
4233 }
4234 return result;
4235 }
SloppyDeleteImpl(Handle<JSObject> obj,Handle<SloppyArgumentsElements> elements,InternalIndex entry)4236 static void SloppyDeleteImpl(Handle<JSObject> obj,
4237 Handle<SloppyArgumentsElements> elements,
4238 InternalIndex entry) {
4239 // No need to delete a context mapped entry from the arguments elements.
4240 if (entry.is_not_found()) return;
4241 Isolate* isolate = obj->GetIsolate();
4242 Handle<NumberDictionary> dict(NumberDictionary::cast(elements->arguments()),
4243 isolate);
4244 uint32_t length = elements->length();
4245 dict =
4246 NumberDictionary::DeleteEntry(isolate, dict, entry.adjust_down(length));
4247 elements->set_arguments(*dict);
4248 }
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)4249 static void AddImpl(Handle<JSObject> object, uint32_t index,
4250 Handle<Object> value, PropertyAttributes attributes,
4251 uint32_t new_capacity) {
4252 Isolate* isolate = object->GetIsolate();
4253 Handle<SloppyArgumentsElements> elements(
4254 SloppyArgumentsElements::cast(object->elements()), isolate);
4255 Handle<FixedArrayBase> old_arguments(
4256 FixedArrayBase::cast(elements->arguments()), isolate);
4257 Handle<NumberDictionary> dictionary =
4258 old_arguments->IsNumberDictionary()
4259 ? Handle<NumberDictionary>::cast(old_arguments)
4260 : JSObject::NormalizeElements(object);
4261 PropertyDetails details(kData, attributes, PropertyCellType::kNoCell);
4262 Handle<NumberDictionary> new_dictionary =
4263 NumberDictionary::Add(isolate, dictionary, index, value, details);
4264 if (attributes != NONE) object->RequireSlowElements(*new_dictionary);
4265 if (*dictionary != *new_dictionary) {
4266 elements->set_arguments(*new_dictionary);
4267 }
4268 }
4269
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)4270 static void ReconfigureImpl(Handle<JSObject> object,
4271 Handle<FixedArrayBase> store, InternalIndex entry,
4272 Handle<Object> value,
4273 PropertyAttributes attributes) {
4274 Isolate* isolate = object->GetIsolate();
4275 Handle<SloppyArgumentsElements> elements =
4276 Handle<SloppyArgumentsElements>::cast(store);
4277 uint32_t length = elements->length();
4278 if (entry.as_uint32() < length) {
4279 Object probe = elements->mapped_entries(entry.as_uint32());
4280 DCHECK(!probe.IsTheHole(isolate));
4281 Context context = elements->context();
4282 int context_entry = Smi::ToInt(probe);
4283 DCHECK(!context.get(context_entry).IsTheHole(isolate));
4284 context.set(context_entry, *value);
4285
4286 // Redefining attributes of an aliased element destroys fast aliasing.
4287 elements->set_mapped_entries(entry.as_uint32(),
4288 ReadOnlyRoots(isolate).the_hole_value());
4289 // For elements that are still writable we re-establish slow aliasing.
4290 if ((attributes & READ_ONLY) == 0) {
4291 value = isolate->factory()->NewAliasedArgumentsEntry(context_entry);
4292 }
4293
4294 PropertyDetails details(kData, attributes, PropertyCellType::kNoCell);
4295 Handle<NumberDictionary> arguments(
4296 NumberDictionary::cast(elements->arguments()), isolate);
4297 arguments = NumberDictionary::Add(isolate, arguments, entry.as_uint32(),
4298 value, details);
4299 // If the attributes were NONE, we would have called set rather than
4300 // reconfigure.
4301 DCHECK_NE(NONE, attributes);
4302 object->RequireSlowElements(*arguments);
4303 elements->set_arguments(*arguments);
4304 } else {
4305 Handle<FixedArrayBase> arguments(elements->arguments(), isolate);
4306 DictionaryElementsAccessor::ReconfigureImpl(
4307 object, arguments, entry.adjust_down(length), value, attributes);
4308 }
4309 }
4310 };
4311
4312 class FastSloppyArgumentsElementsAccessor
4313 : public SloppyArgumentsElementsAccessor<
4314 FastSloppyArgumentsElementsAccessor, FastHoleyObjectElementsAccessor,
4315 ElementsKindTraits<FAST_SLOPPY_ARGUMENTS_ELEMENTS>> {
4316 public:
ConvertArgumentsStoreResult(Isolate * isolate,Handle<SloppyArgumentsElements> paramtere_map,Handle<Object> result)4317 static Handle<Object> ConvertArgumentsStoreResult(
4318 Isolate* isolate, Handle<SloppyArgumentsElements> paramtere_map,
4319 Handle<Object> result) {
4320 DCHECK(!result->IsAliasedArgumentsEntry());
4321 return result;
4322 }
4323
GetArguments(Isolate * isolate,FixedArrayBase store)4324 static Handle<FixedArray> GetArguments(Isolate* isolate,
4325 FixedArrayBase store) {
4326 SloppyArgumentsElements elements = SloppyArgumentsElements::cast(store);
4327 return Handle<FixedArray>(elements.arguments(), isolate);
4328 }
4329
NormalizeImpl(Handle<JSObject> object,Handle<FixedArrayBase> elements)4330 static Handle<NumberDictionary> NormalizeImpl(
4331 Handle<JSObject> object, Handle<FixedArrayBase> elements) {
4332 Handle<FixedArray> arguments =
4333 GetArguments(object->GetIsolate(), *elements);
4334 return FastHoleyObjectElementsAccessor::NormalizeImpl(object, arguments);
4335 }
4336
NormalizeArgumentsElements(Handle<JSObject> object,Handle<SloppyArgumentsElements> elements,InternalIndex * entry)4337 static Handle<NumberDictionary> NormalizeArgumentsElements(
4338 Handle<JSObject> object, Handle<SloppyArgumentsElements> elements,
4339 InternalIndex* entry) {
4340 Handle<NumberDictionary> dictionary = JSObject::NormalizeElements(object);
4341 elements->set_arguments(*dictionary);
4342 // kMaxUInt32 indicates that a context mapped element got deleted. In this
4343 // case we only normalize the elements (aka. migrate to SLOW_SLOPPY).
4344 if (entry->is_not_found()) return dictionary;
4345 uint32_t length = elements->length();
4346 if (entry->as_uint32() >= length) {
4347 *entry =
4348 dictionary
4349 ->FindEntry(object->GetIsolate(), entry->as_uint32() - length)
4350 .adjust_up(length);
4351 }
4352 return dictionary;
4353 }
4354
SloppyDeleteImpl(Handle<JSObject> obj,Handle<SloppyArgumentsElements> elements,InternalIndex entry)4355 static void SloppyDeleteImpl(Handle<JSObject> obj,
4356 Handle<SloppyArgumentsElements> elements,
4357 InternalIndex entry) {
4358 // Always normalize element on deleting an entry.
4359 NormalizeArgumentsElements(obj, elements, &entry);
4360 SlowSloppyArgumentsElementsAccessor::SloppyDeleteImpl(obj, elements, entry);
4361 }
4362
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)4363 static void AddImpl(Handle<JSObject> object, uint32_t index,
4364 Handle<Object> value, PropertyAttributes attributes,
4365 uint32_t new_capacity) {
4366 DCHECK_EQ(NONE, attributes);
4367 Isolate* isolate = object->GetIsolate();
4368 Handle<SloppyArgumentsElements> elements(
4369 SloppyArgumentsElements::cast(object->elements()), isolate);
4370 Handle<FixedArray> old_arguments(elements->arguments(), isolate);
4371 if (old_arguments->IsNumberDictionary() ||
4372 static_cast<uint32_t>(old_arguments->length()) < new_capacity) {
4373 GrowCapacityAndConvertImpl(object, new_capacity);
4374 }
4375 FixedArray arguments = elements->arguments();
4376 // For fast holey objects, the entry equals the index. The code above made
4377 // sure that there's enough space to store the value. We cannot convert
4378 // index to entry explicitly since the slot still contains the hole, so the
4379 // current EntryForIndex would indicate that it is "absent" by returning
4380 // kMaxUInt32.
4381 FastHoleyObjectElementsAccessor::SetImpl(arguments, InternalIndex(index),
4382 *value);
4383 }
4384
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)4385 static void ReconfigureImpl(Handle<JSObject> object,
4386 Handle<FixedArrayBase> store, InternalIndex entry,
4387 Handle<Object> value,
4388 PropertyAttributes attributes) {
4389 DCHECK_EQ(object->elements(), *store);
4390 Handle<SloppyArgumentsElements> elements(
4391 SloppyArgumentsElements::cast(*store), object->GetIsolate());
4392 NormalizeArgumentsElements(object, elements, &entry);
4393 SlowSloppyArgumentsElementsAccessor::ReconfigureImpl(object, store, entry,
4394 value, attributes);
4395 }
4396
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)4397 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
4398 uint32_t from_start, FixedArrayBase to,
4399 ElementsKind from_kind, uint32_t to_start,
4400 int packed_size, int copy_size) {
4401 DCHECK(!to.IsNumberDictionary());
4402 if (from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS) {
4403 CopyDictionaryToObjectElements(isolate, from, from_start, to,
4404 HOLEY_ELEMENTS, to_start, copy_size);
4405 } else {
4406 DCHECK_EQ(FAST_SLOPPY_ARGUMENTS_ELEMENTS, from_kind);
4407 CopyObjectToObjectElements(isolate, from, HOLEY_ELEMENTS, from_start, to,
4408 HOLEY_ELEMENTS, to_start, copy_size);
4409 }
4410 }
4411
GrowCapacityAndConvertImpl(Handle<JSObject> object,uint32_t capacity)4412 static void GrowCapacityAndConvertImpl(Handle<JSObject> object,
4413 uint32_t capacity) {
4414 Isolate* isolate = object->GetIsolate();
4415 Handle<SloppyArgumentsElements> elements(
4416 SloppyArgumentsElements::cast(object->elements()), isolate);
4417 Handle<FixedArray> old_arguments(FixedArray::cast(elements->arguments()),
4418 isolate);
4419 ElementsKind from_kind = object->GetElementsKind();
4420 // This method should only be called if there's a reason to update the
4421 // elements.
4422 DCHECK(from_kind == SLOW_SLOPPY_ARGUMENTS_ELEMENTS ||
4423 static_cast<uint32_t>(old_arguments->length()) < capacity);
4424 Handle<FixedArrayBase> arguments =
4425 ConvertElementsWithCapacity(object, old_arguments, from_kind, capacity);
4426 Handle<Map> new_map = JSObject::GetElementsTransitionMap(
4427 object, FAST_SLOPPY_ARGUMENTS_ELEMENTS);
4428 JSObject::MigrateToMap(isolate, object, new_map);
4429 elements->set_arguments(FixedArray::cast(*arguments));
4430 JSObject::ValidateElements(*object);
4431 }
4432 };
4433
4434 template <typename Subclass, typename BackingStoreAccessor, typename KindTraits>
4435 class StringWrapperElementsAccessor
4436 : public ElementsAccessorBase<Subclass, KindTraits> {
4437 public:
GetInternalImpl(Handle<JSObject> holder,InternalIndex entry)4438 static Handle<Object> GetInternalImpl(Handle<JSObject> holder,
4439 InternalIndex entry) {
4440 return GetImpl(holder, entry);
4441 }
4442
GetImpl(Handle<JSObject> holder,InternalIndex entry)4443 static Handle<Object> GetImpl(Handle<JSObject> holder, InternalIndex entry) {
4444 Isolate* isolate = holder->GetIsolate();
4445 Handle<String> string(GetString(*holder), isolate);
4446 uint32_t length = static_cast<uint32_t>(string->length());
4447 if (entry.as_uint32() < length) {
4448 return isolate->factory()->LookupSingleCharacterStringFromCode(
4449 String::Flatten(isolate, string)->Get(entry.as_int()));
4450 }
4451 return BackingStoreAccessor::GetImpl(isolate, holder->elements(),
4452 entry.adjust_down(length));
4453 }
4454
GetImpl(Isolate * isolate,FixedArrayBase elements,InternalIndex entry)4455 static Handle<Object> GetImpl(Isolate* isolate, FixedArrayBase elements,
4456 InternalIndex entry) {
4457 UNREACHABLE();
4458 }
4459
GetDetailsImpl(JSObject holder,InternalIndex entry)4460 static PropertyDetails GetDetailsImpl(JSObject holder, InternalIndex entry) {
4461 uint32_t length = static_cast<uint32_t>(GetString(holder).length());
4462 if (entry.as_uint32() < length) {
4463 PropertyAttributes attributes =
4464 static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE);
4465 return PropertyDetails(kData, attributes, PropertyCellType::kNoCell);
4466 }
4467 return BackingStoreAccessor::GetDetailsImpl(holder,
4468 entry.adjust_down(length));
4469 }
4470
GetEntryForIndexImpl(Isolate * isolate,JSObject holder,FixedArrayBase backing_store,size_t index,PropertyFilter filter)4471 static InternalIndex GetEntryForIndexImpl(Isolate* isolate, JSObject holder,
4472 FixedArrayBase backing_store,
4473 size_t index,
4474 PropertyFilter filter) {
4475 uint32_t length = static_cast<uint32_t>(GetString(holder).length());
4476 if (index < length) return InternalIndex(index);
4477 InternalIndex backing_store_entry =
4478 BackingStoreAccessor::GetEntryForIndexImpl(
4479 isolate, holder, backing_store, index, filter);
4480 if (backing_store_entry.is_not_found()) return backing_store_entry;
4481 return backing_store_entry.adjust_up(length);
4482 }
4483
DeleteImpl(Handle<JSObject> holder,InternalIndex entry)4484 static void DeleteImpl(Handle<JSObject> holder, InternalIndex entry) {
4485 uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
4486 if (entry.as_uint32() < length) {
4487 return; // String contents can't be deleted.
4488 }
4489 BackingStoreAccessor::DeleteImpl(holder, entry.adjust_down(length));
4490 }
4491
SetImpl(Handle<JSObject> holder,InternalIndex entry,Object value)4492 static void SetImpl(Handle<JSObject> holder, InternalIndex entry,
4493 Object value) {
4494 uint32_t length = static_cast<uint32_t>(GetString(*holder).length());
4495 if (entry.as_uint32() < length) {
4496 return; // String contents are read-only.
4497 }
4498 BackingStoreAccessor::SetImpl(holder->elements(), entry.adjust_down(length),
4499 value);
4500 }
4501
AddImpl(Handle<JSObject> object,uint32_t index,Handle<Object> value,PropertyAttributes attributes,uint32_t new_capacity)4502 static void AddImpl(Handle<JSObject> object, uint32_t index,
4503 Handle<Object> value, PropertyAttributes attributes,
4504 uint32_t new_capacity) {
4505 DCHECK(index >= static_cast<uint32_t>(GetString(*object).length()));
4506 // Explicitly grow fast backing stores if needed. Dictionaries know how to
4507 // extend their capacity themselves.
4508 if (KindTraits::Kind == FAST_STRING_WRAPPER_ELEMENTS &&
4509 (object->GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS ||
4510 BackingStoreAccessor::GetCapacityImpl(*object, object->elements()) !=
4511 new_capacity)) {
4512 GrowCapacityAndConvertImpl(object, new_capacity);
4513 }
4514 BackingStoreAccessor::AddImpl(object, index, value, attributes,
4515 new_capacity);
4516 }
4517
ReconfigureImpl(Handle<JSObject> object,Handle<FixedArrayBase> store,InternalIndex entry,Handle<Object> value,PropertyAttributes attributes)4518 static void ReconfigureImpl(Handle<JSObject> object,
4519 Handle<FixedArrayBase> store, InternalIndex entry,
4520 Handle<Object> value,
4521 PropertyAttributes attributes) {
4522 uint32_t length = static_cast<uint32_t>(GetString(*object).length());
4523 if (entry.as_uint32() < length) {
4524 return; // String contents can't be reconfigured.
4525 }
4526 BackingStoreAccessor::ReconfigureImpl(
4527 object, store, entry.adjust_down(length), value, attributes);
4528 }
4529
AddElementsToKeyAccumulatorImpl(Handle<JSObject> receiver,KeyAccumulator * accumulator,AddKeyConversion convert)4530 V8_WARN_UNUSED_RESULT static ExceptionStatus AddElementsToKeyAccumulatorImpl(
4531 Handle<JSObject> receiver, KeyAccumulator* accumulator,
4532 AddKeyConversion convert) {
4533 Isolate* isolate = receiver->GetIsolate();
4534 Handle<String> string(GetString(*receiver), isolate);
4535 string = String::Flatten(isolate, string);
4536 uint32_t length = static_cast<uint32_t>(string->length());
4537 for (uint32_t i = 0; i < length; i++) {
4538 Handle<String> key =
4539 isolate->factory()->LookupSingleCharacterStringFromCode(
4540 string->Get(i));
4541 RETURN_FAILURE_IF_NOT_SUCCESSFUL(accumulator->AddKey(key, convert));
4542 }
4543 return BackingStoreAccessor::AddElementsToKeyAccumulatorImpl(
4544 receiver, accumulator, convert);
4545 }
4546
CollectElementIndicesImpl(Handle<JSObject> object,Handle<FixedArrayBase> backing_store,KeyAccumulator * keys)4547 V8_WARN_UNUSED_RESULT static ExceptionStatus CollectElementIndicesImpl(
4548 Handle<JSObject> object, Handle<FixedArrayBase> backing_store,
4549 KeyAccumulator* keys) {
4550 uint32_t length = GetString(*object).length();
4551 Factory* factory = keys->isolate()->factory();
4552 for (uint32_t i = 0; i < length; i++) {
4553 RETURN_FAILURE_IF_NOT_SUCCESSFUL(
4554 keys->AddKey(factory->NewNumberFromUint(i)));
4555 }
4556 return BackingStoreAccessor::CollectElementIndicesImpl(object,
4557 backing_store, keys);
4558 }
4559
GrowCapacityAndConvertImpl(Handle<JSObject> object,uint32_t capacity)4560 static void GrowCapacityAndConvertImpl(Handle<JSObject> object,
4561 uint32_t capacity) {
4562 Handle<FixedArrayBase> old_elements(object->elements(),
4563 object->GetIsolate());
4564 ElementsKind from_kind = object->GetElementsKind();
4565 if (from_kind == FAST_STRING_WRAPPER_ELEMENTS) {
4566 // The optimizing compiler relies on the prototype lookups of String
4567 // objects always returning undefined. If there's a store to the
4568 // initial String.prototype object, make sure all the optimizations
4569 // are invalidated.
4570 object->GetIsolate()->UpdateNoElementsProtectorOnSetLength(object);
4571 }
4572 // This method should only be called if there's a reason to update the
4573 // elements.
4574 DCHECK(from_kind == SLOW_STRING_WRAPPER_ELEMENTS ||
4575 static_cast<uint32_t>(old_elements->length()) < capacity);
4576 Subclass::BasicGrowCapacityAndConvertImpl(object, old_elements, from_kind,
4577 FAST_STRING_WRAPPER_ELEMENTS,
4578 capacity);
4579 }
4580
CopyElementsImpl(Isolate * isolate,FixedArrayBase from,uint32_t from_start,FixedArrayBase to,ElementsKind from_kind,uint32_t to_start,int packed_size,int copy_size)4581 static void CopyElementsImpl(Isolate* isolate, FixedArrayBase from,
4582 uint32_t from_start, FixedArrayBase to,
4583 ElementsKind from_kind, uint32_t to_start,
4584 int packed_size, int copy_size) {
4585 DCHECK(!to.IsNumberDictionary());
4586 if (from_kind == SLOW_STRING_WRAPPER_ELEMENTS) {
4587 CopyDictionaryToObjectElements(isolate, from, from_start, to,
4588 HOLEY_ELEMENTS, to_start, copy_size);
4589 } else {
4590 DCHECK_EQ(FAST_STRING_WRAPPER_ELEMENTS, from_kind);
4591 CopyObjectToObjectElements(isolate, from, HOLEY_ELEMENTS, from_start, to,
4592 HOLEY_ELEMENTS, to_start, copy_size);
4593 }
4594 }
4595
NumberOfElementsImpl(JSObject object,FixedArrayBase backing_store)4596 static uint32_t NumberOfElementsImpl(JSObject object,
4597 FixedArrayBase backing_store) {
4598 uint32_t length = GetString(object).length();
4599 return length +
4600 BackingStoreAccessor::NumberOfElementsImpl(object, backing_store);
4601 }
4602
4603 private:
GetString(JSObject holder)4604 static String GetString(JSObject holder) {
4605 DCHECK(holder.IsJSPrimitiveWrapper());
4606 JSPrimitiveWrapper js_value = JSPrimitiveWrapper::cast(holder);
4607 DCHECK(js_value.value().IsString());
4608 return String::cast(js_value.value());
4609 }
4610 };
4611
4612 class FastStringWrapperElementsAccessor
4613 : public StringWrapperElementsAccessor<
4614 FastStringWrapperElementsAccessor, FastHoleyObjectElementsAccessor,
4615 ElementsKindTraits<FAST_STRING_WRAPPER_ELEMENTS>> {
4616 public:
NormalizeImpl(Handle<JSObject> object,Handle<FixedArrayBase> elements)4617 static Handle<NumberDictionary> NormalizeImpl(
4618 Handle<JSObject> object, Handle<FixedArrayBase> elements) {
4619 return FastHoleyObjectElementsAccessor::NormalizeImpl(object, elements);
4620 }
4621 };
4622
4623 class SlowStringWrapperElementsAccessor
4624 : public StringWrapperElementsAccessor<
4625 SlowStringWrapperElementsAccessor, DictionaryElementsAccessor,
4626 ElementsKindTraits<SLOW_STRING_WRAPPER_ELEMENTS>> {
4627 public:
HasAccessorsImpl(JSObject holder,FixedArrayBase backing_store)4628 static bool HasAccessorsImpl(JSObject holder, FixedArrayBase backing_store) {
4629 return DictionaryElementsAccessor::HasAccessorsImpl(holder, backing_store);
4630 }
4631 };
4632
4633 } // namespace
4634
ArrayConstructInitializeElements(Handle<JSArray> array,JavaScriptArguments * args)4635 MaybeHandle<Object> ArrayConstructInitializeElements(
4636 Handle<JSArray> array, JavaScriptArguments* args) {
4637 if (args->length() == 0) {
4638 // Optimize the case where there are no parameters passed.
4639 JSArray::Initialize(array, JSArray::kPreallocatedArrayElements);
4640 return array;
4641
4642 } else if (args->length() == 1 && args->at(0)->IsNumber()) {
4643 uint32_t length;
4644 if (!args->at(0)->ToArrayLength(&length)) {
4645 return ThrowArrayLengthRangeError(array->GetIsolate());
4646 }
4647
4648 // Optimize the case where there is one argument and the argument is a small
4649 // smi.
4650 if (length > 0 && length < JSArray::kInitialMaxFastElementArray) {
4651 ElementsKind elements_kind = array->GetElementsKind();
4652 JSArray::Initialize(array, length, length);
4653
4654 if (!IsHoleyElementsKind(elements_kind)) {
4655 elements_kind = GetHoleyElementsKind(elements_kind);
4656 JSObject::TransitionElementsKind(array, elements_kind);
4657 }
4658 } else if (length == 0) {
4659 JSArray::Initialize(array, JSArray::kPreallocatedArrayElements);
4660 } else {
4661 // Take the argument as the length.
4662 JSArray::Initialize(array, 0);
4663 JSArray::SetLength(array, length);
4664 }
4665 return array;
4666 }
4667
4668 Factory* factory = array->GetIsolate()->factory();
4669
4670 // Set length and elements on the array.
4671 int number_of_elements = args->length();
4672 JSObject::EnsureCanContainElements(array, args, number_of_elements,
4673 ALLOW_CONVERTED_DOUBLE_ELEMENTS);
4674
4675 // Allocate an appropriately typed elements array.
4676 ElementsKind elements_kind = array->GetElementsKind();
4677 Handle<FixedArrayBase> elms;
4678 if (IsDoubleElementsKind(elements_kind)) {
4679 elms = Handle<FixedArrayBase>::cast(
4680 factory->NewFixedDoubleArray(number_of_elements));
4681 } else {
4682 elms = Handle<FixedArrayBase>::cast(
4683 factory->NewFixedArrayWithHoles(number_of_elements));
4684 }
4685
4686 // Fill in the content
4687 switch (elements_kind) {
4688 case HOLEY_SMI_ELEMENTS:
4689 case PACKED_SMI_ELEMENTS: {
4690 Handle<FixedArray> smi_elms = Handle<FixedArray>::cast(elms);
4691 for (int entry = 0; entry < number_of_elements; entry++) {
4692 smi_elms->set(entry, (*args)[entry], SKIP_WRITE_BARRIER);
4693 }
4694 break;
4695 }
4696 case HOLEY_ELEMENTS:
4697 case PACKED_ELEMENTS: {
4698 DisallowHeapAllocation no_gc;
4699 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
4700 Handle<FixedArray> object_elms = Handle<FixedArray>::cast(elms);
4701 for (int entry = 0; entry < number_of_elements; entry++) {
4702 object_elms->set(entry, (*args)[entry], mode);
4703 }
4704 break;
4705 }
4706 case HOLEY_DOUBLE_ELEMENTS:
4707 case PACKED_DOUBLE_ELEMENTS: {
4708 Handle<FixedDoubleArray> double_elms =
4709 Handle<FixedDoubleArray>::cast(elms);
4710 for (int entry = 0; entry < number_of_elements; entry++) {
4711 double_elms->set(entry, (*args)[entry].Number());
4712 }
4713 break;
4714 }
4715 default:
4716 UNREACHABLE();
4717 }
4718
4719 array->set_elements(*elms);
4720 array->set_length(Smi::FromInt(number_of_elements));
4721 return array;
4722 }
4723
CopyFastNumberJSArrayElementsToTypedArray(Address raw_context,Address raw_source,Address raw_destination,uintptr_t length,uintptr_t offset)4724 void CopyFastNumberJSArrayElementsToTypedArray(Address raw_context,
4725 Address raw_source,
4726 Address raw_destination,
4727 uintptr_t length,
4728 uintptr_t offset) {
4729 Context context = Context::cast(Object(raw_context));
4730 JSArray source = JSArray::cast(Object(raw_source));
4731 JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
4732
4733 switch (destination.GetElementsKind()) {
4734 #define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
4735 case TYPE##_ELEMENTS: \
4736 CHECK(Type##ElementsAccessor::TryCopyElementsFastNumber( \
4737 context, source, destination, length, offset)); \
4738 break;
4739 TYPED_ARRAYS(TYPED_ARRAYS_CASE)
4740 #undef TYPED_ARRAYS_CASE
4741 default:
4742 UNREACHABLE();
4743 }
4744 }
4745
CopyTypedArrayElementsToTypedArray(Address raw_source,Address raw_destination,uintptr_t length,uintptr_t offset)4746 void CopyTypedArrayElementsToTypedArray(Address raw_source,
4747 Address raw_destination,
4748 uintptr_t length, uintptr_t offset) {
4749 JSTypedArray source = JSTypedArray::cast(Object(raw_source));
4750 JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
4751
4752 switch (destination.GetElementsKind()) {
4753 #define TYPED_ARRAYS_CASE(Type, type, TYPE, ctype) \
4754 case TYPE##_ELEMENTS: \
4755 Type##ElementsAccessor::CopyElementsFromTypedArray(source, destination, \
4756 length, offset); \
4757 break;
4758 TYPED_ARRAYS(TYPED_ARRAYS_CASE)
4759 #undef TYPED_ARRAYS_CASE
4760 default:
4761 UNREACHABLE();
4762 }
4763 }
4764
CopyTypedArrayElementsSlice(Address raw_source,Address raw_destination,uintptr_t start,uintptr_t end)4765 void CopyTypedArrayElementsSlice(Address raw_source, Address raw_destination,
4766 uintptr_t start, uintptr_t end) {
4767 JSTypedArray source = JSTypedArray::cast(Object(raw_source));
4768 JSTypedArray destination = JSTypedArray::cast(Object(raw_destination));
4769
4770 destination.GetElementsAccessor()->CopyTypedArrayElementsSlice(
4771 source, destination, start, end);
4772 }
4773
InitializeOncePerProcess()4774 void ElementsAccessor::InitializeOncePerProcess() {
4775 static ElementsAccessor* accessor_array[] = {
4776 #define ACCESSOR_ARRAY(Class, Kind, Store) new Class(),
4777 ELEMENTS_LIST(ACCESSOR_ARRAY)
4778 #undef ACCESSOR_ARRAY
4779 };
4780
4781 STATIC_ASSERT((sizeof(accessor_array) / sizeof(*accessor_array)) ==
4782 kElementsKindCount);
4783
4784 elements_accessors_ = accessor_array;
4785 }
4786
TearDown()4787 void ElementsAccessor::TearDown() {
4788 if (elements_accessors_ == nullptr) return;
4789 #define ACCESSOR_DELETE(Class, Kind, Store) delete elements_accessors_[Kind];
4790 ELEMENTS_LIST(ACCESSOR_DELETE)
4791 #undef ACCESSOR_DELETE
4792 elements_accessors_ = nullptr;
4793 }
4794
Concat(Isolate * isolate,BuiltinArguments * args,uint32_t concat_size,uint32_t result_len)4795 Handle<JSArray> ElementsAccessor::Concat(Isolate* isolate,
4796 BuiltinArguments* args,
4797 uint32_t concat_size,
4798 uint32_t result_len) {
4799 ElementsKind result_elements_kind = GetInitialFastElementsKind();
4800 bool has_raw_doubles = false;
4801 {
4802 DisallowHeapAllocation no_gc;
4803 bool is_holey = false;
4804 for (uint32_t i = 0; i < concat_size; i++) {
4805 Object arg = (*args)[i];
4806 ElementsKind arg_kind = JSArray::cast(arg).GetElementsKind();
4807 has_raw_doubles = has_raw_doubles || IsDoubleElementsKind(arg_kind);
4808 is_holey = is_holey || IsHoleyElementsKind(arg_kind);
4809 result_elements_kind =
4810 GetMoreGeneralElementsKind(result_elements_kind, arg_kind);
4811 }
4812 if (is_holey) {
4813 result_elements_kind = GetHoleyElementsKind(result_elements_kind);
4814 }
4815 }
4816
4817 // If a double array is concatted into a fast elements array, the fast
4818 // elements array needs to be initialized to contain proper holes, since
4819 // boxing doubles may cause incremental marking.
4820 bool requires_double_boxing =
4821 has_raw_doubles && !IsDoubleElementsKind(result_elements_kind);
4822 ArrayStorageAllocationMode mode = requires_double_boxing
4823 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
4824 : DONT_INITIALIZE_ARRAY_ELEMENTS;
4825 Handle<JSArray> result_array = isolate->factory()->NewJSArray(
4826 result_elements_kind, result_len, result_len, mode);
4827 if (result_len == 0) return result_array;
4828
4829 uint32_t insertion_index = 0;
4830 Handle<FixedArrayBase> storage(result_array->elements(), isolate);
4831 ElementsAccessor* accessor = ElementsAccessor::ForKind(result_elements_kind);
4832 for (uint32_t i = 0; i < concat_size; i++) {
4833 // It is crucial to keep |array| in a raw pointer form to avoid
4834 // performance degradation.
4835 JSArray array = JSArray::cast((*args)[i]);
4836 uint32_t len = 0;
4837 array.length().ToArrayLength(&len);
4838 if (len == 0) continue;
4839 ElementsKind from_kind = array.GetElementsKind();
4840 accessor->CopyElements(array, 0, from_kind, storage, insertion_index, len);
4841 insertion_index += len;
4842 }
4843
4844 DCHECK_EQ(insertion_index, result_len);
4845 return result_array;
4846 }
4847
4848 ElementsAccessor** ElementsAccessor::elements_accessors_ = nullptr;
4849
4850 #undef ELEMENTS_LIST
4851 #undef RETURN_NOTHING_IF_NOT_SUCCESSFUL
4852 #undef RETURN_FAILURE_IF_NOT_SUCCESSFUL
4853 } // namespace internal
4854 } // namespace v8
4855