1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/builtins.h"
6 #include "src/builtins/builtins-utils.h"
7
8 #include "src/code-factory.h"
9 #include "src/code-stub-assembler.h"
10 #include "src/contexts.h"
11 #include "src/counters.h"
12 #include "src/elements.h"
13 #include "src/isolate.h"
14 #include "src/lookup.h"
15 #include "src/objects-inl.h"
16 #include "src/prototype.h"
17
18 namespace v8 {
19 namespace internal {
20
21 namespace {
22
ClampedToInteger(Isolate * isolate,Object * object,int * out)23 inline bool ClampedToInteger(Isolate* isolate, Object* object, int* out) {
24 // This is an extended version of ECMA-262 7.1.11 handling signed values
25 // Try to convert object to a number and clamp values to [kMinInt, kMaxInt]
26 if (object->IsSmi()) {
27 *out = Smi::cast(object)->value();
28 return true;
29 } else if (object->IsHeapNumber()) {
30 double value = HeapNumber::cast(object)->value();
31 if (std::isnan(value)) {
32 *out = 0;
33 } else if (value > kMaxInt) {
34 *out = kMaxInt;
35 } else if (value < kMinInt) {
36 *out = kMinInt;
37 } else {
38 *out = static_cast<int>(value);
39 }
40 return true;
41 } else if (object->IsNullOrUndefined(isolate)) {
42 *out = 0;
43 return true;
44 } else if (object->IsBoolean()) {
45 *out = object->IsTrue(isolate);
46 return true;
47 }
48 return false;
49 }
50
GetSloppyArgumentsLength(Isolate * isolate,Handle<JSObject> object,int * out)51 inline bool GetSloppyArgumentsLength(Isolate* isolate, Handle<JSObject> object,
52 int* out) {
53 Context* context = *isolate->native_context();
54 Map* map = object->map();
55 if (map != context->sloppy_arguments_map() &&
56 map != context->strict_arguments_map() &&
57 map != context->fast_aliased_arguments_map()) {
58 return false;
59 }
60 DCHECK(object->HasFastElements() || object->HasFastArgumentsElements());
61 Object* len_obj = object->InObjectPropertyAt(JSArgumentsObject::kLengthIndex);
62 if (!len_obj->IsSmi()) return false;
63 *out = Max(0, Smi::cast(len_obj)->value());
64
65 FixedArray* parameters = FixedArray::cast(object->elements());
66 if (object->HasSloppyArgumentsElements()) {
67 FixedArray* arguments = FixedArray::cast(parameters->get(1));
68 return *out <= arguments->length();
69 }
70 return *out <= parameters->length();
71 }
72
IsJSArrayFastElementMovingAllowed(Isolate * isolate,JSArray * receiver)73 inline bool IsJSArrayFastElementMovingAllowed(Isolate* isolate,
74 JSArray* receiver) {
75 return JSObject::PrototypeHasNoElements(isolate, receiver);
76 }
77
HasSimpleElements(JSObject * current)78 inline bool HasSimpleElements(JSObject* current) {
79 return current->map()->instance_type() > LAST_CUSTOM_ELEMENTS_RECEIVER &&
80 !current->GetElementsAccessor()->HasAccessors(current);
81 }
82
HasOnlySimpleReceiverElements(Isolate * isolate,JSObject * receiver)83 inline bool HasOnlySimpleReceiverElements(Isolate* isolate,
84 JSObject* receiver) {
85 // Check that we have no accessors on the receiver's elements.
86 if (!HasSimpleElements(receiver)) return false;
87 return JSObject::PrototypeHasNoElements(isolate, receiver);
88 }
89
HasOnlySimpleElements(Isolate * isolate,JSReceiver * receiver)90 inline bool HasOnlySimpleElements(Isolate* isolate, JSReceiver* receiver) {
91 DisallowHeapAllocation no_gc;
92 PrototypeIterator iter(isolate, receiver, kStartAtReceiver);
93 for (; !iter.IsAtEnd(); iter.Advance()) {
94 if (iter.GetCurrent()->IsJSProxy()) return false;
95 JSObject* current = iter.GetCurrent<JSObject>();
96 if (!HasSimpleElements(current)) return false;
97 }
98 return true;
99 }
100
101 // Returns |false| if not applicable.
102 MUST_USE_RESULT
EnsureJSArrayWithWritableFastElements(Isolate * isolate,Handle<Object> receiver,BuiltinArguments * args,int first_added_arg)103 inline bool EnsureJSArrayWithWritableFastElements(Isolate* isolate,
104 Handle<Object> receiver,
105 BuiltinArguments* args,
106 int first_added_arg) {
107 if (!receiver->IsJSArray()) return false;
108 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
109 ElementsKind origin_kind = array->GetElementsKind();
110 if (IsDictionaryElementsKind(origin_kind)) return false;
111 if (!array->map()->is_extensible()) return false;
112 if (args == nullptr) return true;
113
114 // If there may be elements accessors in the prototype chain, the fast path
115 // cannot be used if there arguments to add to the array.
116 if (!IsJSArrayFastElementMovingAllowed(isolate, *array)) return false;
117
118 // Adding elements to the array prototype would break code that makes sure
119 // it has no elements. Handle that elsewhere.
120 if (isolate->IsAnyInitialArrayPrototype(array)) return false;
121
122 // Need to ensure that the arguments passed in args can be contained in
123 // the array.
124 int args_length = args->length();
125 if (first_added_arg >= args_length) return true;
126
127 if (IsFastObjectElementsKind(origin_kind)) return true;
128 ElementsKind target_kind = origin_kind;
129 {
130 DisallowHeapAllocation no_gc;
131 for (int i = first_added_arg; i < args_length; i++) {
132 Object* arg = (*args)[i];
133 if (arg->IsHeapObject()) {
134 if (arg->IsHeapNumber()) {
135 target_kind = FAST_DOUBLE_ELEMENTS;
136 } else {
137 target_kind = FAST_ELEMENTS;
138 break;
139 }
140 }
141 }
142 }
143 if (target_kind != origin_kind) {
144 // Use a short-lived HandleScope to avoid creating several copies of the
145 // elements handle which would cause issues when left-trimming later-on.
146 HandleScope scope(isolate);
147 JSObject::TransitionElementsKind(array, target_kind);
148 }
149 return true;
150 }
151
CallJsIntrinsic(Isolate * isolate,Handle<JSFunction> function,BuiltinArguments args)152 MUST_USE_RESULT static Object* CallJsIntrinsic(Isolate* isolate,
153 Handle<JSFunction> function,
154 BuiltinArguments args) {
155 HandleScope handleScope(isolate);
156 int argc = args.length() - 1;
157 ScopedVector<Handle<Object>> argv(argc);
158 for (int i = 0; i < argc; ++i) {
159 argv[i] = args.at(i + 1);
160 }
161 RETURN_RESULT_OR_FAILURE(
162 isolate,
163 Execution::Call(isolate, function, args.receiver(), argc, argv.start()));
164 }
165 } // namespace
166
BUILTIN(ArrayPush)167 BUILTIN(ArrayPush) {
168 HandleScope scope(isolate);
169 Handle<Object> receiver = args.receiver();
170 if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1)) {
171 return CallJsIntrinsic(isolate, isolate->array_push(), args);
172 }
173 // Fast Elements Path
174 int to_add = args.length() - 1;
175 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
176 int len = Smi::cast(array->length())->value();
177 if (to_add == 0) return Smi::FromInt(len);
178
179 // Currently fixed arrays cannot grow too big, so we should never hit this.
180 DCHECK_LE(to_add, Smi::kMaxValue - Smi::cast(array->length())->value());
181
182 if (JSArray::HasReadOnlyLength(array)) {
183 return CallJsIntrinsic(isolate, isolate->array_push(), args);
184 }
185
186 ElementsAccessor* accessor = array->GetElementsAccessor();
187 int new_length = accessor->Push(array, &args, to_add);
188 return Smi::FromInt(new_length);
189 }
190
Generate_FastArrayPush(compiler::CodeAssemblerState * state)191 void Builtins::Generate_FastArrayPush(compiler::CodeAssemblerState* state) {
192 typedef compiler::Node Node;
193 typedef CodeStubAssembler::Label Label;
194 typedef CodeStubAssembler::Variable Variable;
195 CodeStubAssembler assembler(state);
196 Variable arg_index(&assembler, MachineType::PointerRepresentation());
197 Label default_label(&assembler, &arg_index);
198 Label smi_transition(&assembler);
199 Label object_push_pre(&assembler);
200 Label object_push(&assembler, &arg_index);
201 Label double_push(&assembler, &arg_index);
202 Label double_transition(&assembler);
203 Label runtime(&assembler, Label::kDeferred);
204
205 Node* argc = assembler.Parameter(BuiltinDescriptor::kArgumentsCount);
206 Node* context = assembler.Parameter(BuiltinDescriptor::kContext);
207 Node* new_target = assembler.Parameter(BuiltinDescriptor::kNewTarget);
208
209 CodeStubArguments args(&assembler, assembler.ChangeInt32ToIntPtr(argc));
210 Node* receiver = args.GetReceiver();
211 Node* kind = nullptr;
212
213 Label fast(&assembler);
214 {
215 assembler.BranchIfFastJSArray(
216 receiver, context, CodeStubAssembler::FastJSArrayAccessMode::ANY_ACCESS,
217 &fast, &runtime);
218 }
219
220 assembler.Bind(&fast);
221 {
222 // Disallow pushing onto prototypes. It might be the JSArray prototype.
223 // Disallow pushing onto non-extensible objects.
224 assembler.Comment("Disallow pushing onto prototypes");
225 Node* map = assembler.LoadMap(receiver);
226 Node* bit_field2 = assembler.LoadMapBitField2(map);
227 int mask = static_cast<int>(Map::IsPrototypeMapBits::kMask) |
228 (1 << Map::kIsExtensible);
229 Node* test = assembler.Word32And(bit_field2, assembler.Int32Constant(mask));
230 assembler.GotoIf(
231 assembler.Word32NotEqual(
232 test, assembler.Int32Constant(1 << Map::kIsExtensible)),
233 &runtime);
234
235 // Disallow pushing onto arrays in dictionary named property mode. We need
236 // to figure out whether the length property is still writable.
237 assembler.Comment(
238 "Disallow pushing onto arrays in dictionary named property mode");
239 assembler.GotoIf(assembler.IsDictionaryMap(map), &runtime);
240
241 // Check whether the length property is writable. The length property is the
242 // only default named property on arrays. It's nonconfigurable, hence is
243 // guaranteed to stay the first property.
244 Node* descriptors = assembler.LoadMapDescriptors(map);
245 Node* details = assembler.LoadFixedArrayElement(
246 descriptors, DescriptorArray::ToDetailsIndex(0));
247 assembler.GotoIf(
248 assembler.IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask),
249 &runtime);
250
251 arg_index.Bind(assembler.IntPtrConstant(0));
252 kind = assembler.DecodeWord32<Map::ElementsKindBits>(bit_field2);
253
254 assembler.GotoIf(
255 assembler.Int32GreaterThan(
256 kind, assembler.Int32Constant(FAST_HOLEY_SMI_ELEMENTS)),
257 &object_push_pre);
258
259 Node* new_length = assembler.BuildAppendJSArray(
260 FAST_SMI_ELEMENTS, context, receiver, args, arg_index, &smi_transition);
261 args.PopAndReturn(new_length);
262 }
263
264 // If the argument is not a smi, then use a heavyweight SetProperty to
265 // transition the array for only the single next element. If the argument is
266 // a smi, the failure is due to some other reason and we should fall back on
267 // the most generic implementation for the rest of the array.
268 assembler.Bind(&smi_transition);
269 {
270 Node* arg = args.AtIndex(arg_index.value());
271 assembler.GotoIf(assembler.TaggedIsSmi(arg), &default_label);
272 Node* length = assembler.LoadJSArrayLength(receiver);
273 // TODO(danno): Use the KeyedStoreGeneric stub here when possible,
274 // calling into the runtime to do the elements transition is overkill.
275 assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
276 assembler.SmiConstant(STRICT));
277 assembler.Increment(arg_index);
278 // The runtime SetProperty call could have converted the array to dictionary
279 // mode, which must be detected to abort the fast-path.
280 Node* map = assembler.LoadMap(receiver);
281 Node* bit_field2 = assembler.LoadMapBitField2(map);
282 Node* kind = assembler.DecodeWord32<Map::ElementsKindBits>(bit_field2);
283 assembler.GotoIf(assembler.Word32Equal(
284 kind, assembler.Int32Constant(DICTIONARY_ELEMENTS)),
285 &default_label);
286
287 assembler.GotoIfNotNumber(arg, &object_push);
288 assembler.Goto(&double_push);
289 }
290
291 assembler.Bind(&object_push_pre);
292 {
293 assembler.Branch(assembler.Int32GreaterThan(
294 kind, assembler.Int32Constant(FAST_HOLEY_ELEMENTS)),
295 &double_push, &object_push);
296 }
297
298 assembler.Bind(&object_push);
299 {
300 Node* new_length = assembler.BuildAppendJSArray(
301 FAST_ELEMENTS, context, receiver, args, arg_index, &default_label);
302 args.PopAndReturn(new_length);
303 }
304
305 assembler.Bind(&double_push);
306 {
307 Node* new_length =
308 assembler.BuildAppendJSArray(FAST_DOUBLE_ELEMENTS, context, receiver,
309 args, arg_index, &double_transition);
310 args.PopAndReturn(new_length);
311 }
312
313 // If the argument is not a double, then use a heavyweight SetProperty to
314 // transition the array for only the single next element. If the argument is
315 // a double, the failure is due to some other reason and we should fall back
316 // on the most generic implementation for the rest of the array.
317 assembler.Bind(&double_transition);
318 {
319 Node* arg = args.AtIndex(arg_index.value());
320 assembler.GotoIfNumber(arg, &default_label);
321 Node* length = assembler.LoadJSArrayLength(receiver);
322 // TODO(danno): Use the KeyedStoreGeneric stub here when possible,
323 // calling into the runtime to do the elements transition is overkill.
324 assembler.CallRuntime(Runtime::kSetProperty, context, receiver, length, arg,
325 assembler.SmiConstant(STRICT));
326 assembler.Increment(arg_index);
327 // The runtime SetProperty call could have converted the array to dictionary
328 // mode, which must be detected to abort the fast-path.
329 Node* map = assembler.LoadMap(receiver);
330 Node* bit_field2 = assembler.LoadMapBitField2(map);
331 Node* kind = assembler.DecodeWord32<Map::ElementsKindBits>(bit_field2);
332 assembler.GotoIf(assembler.Word32Equal(
333 kind, assembler.Int32Constant(DICTIONARY_ELEMENTS)),
334 &default_label);
335 assembler.Goto(&object_push);
336 }
337
338 // Fallback that stores un-processed arguments using the full, heavyweight
339 // SetProperty machinery.
340 assembler.Bind(&default_label);
341 {
342 args.ForEach(
343 [&assembler, receiver, context](Node* arg) {
344 Node* length = assembler.LoadJSArrayLength(receiver);
345 assembler.CallRuntime(Runtime::kSetProperty, context, receiver,
346 length, arg, assembler.SmiConstant(STRICT));
347 },
348 arg_index.value());
349 args.PopAndReturn(assembler.LoadJSArrayLength(receiver));
350 }
351
352 assembler.Bind(&runtime);
353 {
354 Node* target = assembler.LoadFromFrame(
355 StandardFrameConstants::kFunctionOffset, MachineType::TaggedPointer());
356 assembler.TailCallStub(CodeFactory::ArrayPush(assembler.isolate()), context,
357 target, new_target, argc);
358 }
359 }
360
BUILTIN(ArrayPop)361 BUILTIN(ArrayPop) {
362 HandleScope scope(isolate);
363 Handle<Object> receiver = args.receiver();
364 if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, nullptr, 0)) {
365 return CallJsIntrinsic(isolate, isolate->array_pop(), args);
366 }
367
368 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
369
370 uint32_t len = static_cast<uint32_t>(Smi::cast(array->length())->value());
371 if (len == 0) return isolate->heap()->undefined_value();
372
373 if (JSArray::HasReadOnlyLength(array)) {
374 return CallJsIntrinsic(isolate, isolate->array_pop(), args);
375 }
376
377 Handle<Object> result;
378 if (IsJSArrayFastElementMovingAllowed(isolate, JSArray::cast(*receiver))) {
379 // Fast Elements Path
380 result = array->GetElementsAccessor()->Pop(array);
381 } else {
382 // Use Slow Lookup otherwise
383 uint32_t new_length = len - 1;
384 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
385 isolate, result, JSReceiver::GetElement(isolate, array, new_length));
386 JSArray::SetLength(array, new_length);
387 }
388 return *result;
389 }
390
BUILTIN(ArrayShift)391 BUILTIN(ArrayShift) {
392 HandleScope scope(isolate);
393 Heap* heap = isolate->heap();
394 Handle<Object> receiver = args.receiver();
395 if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, nullptr, 0) ||
396 !IsJSArrayFastElementMovingAllowed(isolate, JSArray::cast(*receiver))) {
397 return CallJsIntrinsic(isolate, isolate->array_shift(), args);
398 }
399 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
400
401 int len = Smi::cast(array->length())->value();
402 if (len == 0) return heap->undefined_value();
403
404 if (JSArray::HasReadOnlyLength(array)) {
405 return CallJsIntrinsic(isolate, isolate->array_shift(), args);
406 }
407
408 Handle<Object> first = array->GetElementsAccessor()->Shift(array);
409 return *first;
410 }
411
BUILTIN(ArrayUnshift)412 BUILTIN(ArrayUnshift) {
413 HandleScope scope(isolate);
414 Handle<Object> receiver = args.receiver();
415 if (!EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1)) {
416 return CallJsIntrinsic(isolate, isolate->array_unshift(), args);
417 }
418 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
419 int to_add = args.length() - 1;
420 if (to_add == 0) return array->length();
421
422 // Currently fixed arrays cannot grow too big, so we should never hit this.
423 DCHECK_LE(to_add, Smi::kMaxValue - Smi::cast(array->length())->value());
424
425 if (JSArray::HasReadOnlyLength(array)) {
426 return CallJsIntrinsic(isolate, isolate->array_unshift(), args);
427 }
428
429 ElementsAccessor* accessor = array->GetElementsAccessor();
430 int new_length = accessor->Unshift(array, &args, to_add);
431 return Smi::FromInt(new_length);
432 }
433
434 class ForEachCodeStubAssembler : public CodeStubAssembler {
435 public:
ForEachCodeStubAssembler(compiler::CodeAssemblerState * state)436 explicit ForEachCodeStubAssembler(compiler::CodeAssemblerState* state)
437 : CodeStubAssembler(state) {}
438
VisitOneElement(Node * context,Node * this_arg,Node * o,Node * k,Node * callbackfn)439 void VisitOneElement(Node* context, Node* this_arg, Node* o, Node* k,
440 Node* callbackfn) {
441 Comment("begin VisitOneElement");
442
443 // a. Let Pk be ToString(k).
444 Node* p_k = ToString(context, k);
445
446 // b. Let kPresent be HasProperty(O, Pk).
447 // c. ReturnIfAbrupt(kPresent).
448 Node* k_present =
449 CallStub(CodeFactory::HasProperty(isolate()), context, p_k, o);
450
451 // d. If kPresent is true, then
452 Label not_present(this);
453 GotoIf(WordNotEqual(k_present, TrueConstant()), ¬_present);
454
455 // i. Let kValue be Get(O, Pk).
456 // ii. ReturnIfAbrupt(kValue).
457 Node* k_value =
458 CallStub(CodeFactory::GetProperty(isolate()), context, o, k);
459
460 // iii. Let funcResult be Call(callbackfn, T, «kValue, k, O»).
461 // iv. ReturnIfAbrupt(funcResult).
462 CallJS(CodeFactory::Call(isolate()), context, callbackfn, this_arg, k_value,
463 k, o);
464
465 Goto(¬_present);
466 Bind(¬_present);
467 Comment("end VisitOneElement");
468 }
469
VisitAllFastElements(Node * context,ElementsKind kind,Node * this_arg,Node * o,Node * len,Node * callbackfn,ParameterMode mode)470 void VisitAllFastElements(Node* context, ElementsKind kind, Node* this_arg,
471 Node* o, Node* len, Node* callbackfn,
472 ParameterMode mode) {
473 Comment("begin VisitAllFastElements");
474 Variable original_map(this, MachineRepresentation::kTagged);
475 original_map.Bind(LoadMap(o));
476 VariableList list({&original_map}, zone());
477 BuildFastLoop(
478 list, IntPtrOrSmiConstant(0, mode), TaggedToParameter(len, mode),
479 [context, kind, this, o, &original_map, callbackfn, this_arg,
480 mode](Node* index) {
481 Label one_element_done(this), array_changed(this, Label::kDeferred),
482 hole_element(this);
483
484 // Check if o's map has changed during the callback. If so, we have to
485 // fall back to the slower spec implementation for the rest of the
486 // iteration.
487 Node* o_map = LoadMap(o);
488 GotoIf(WordNotEqual(o_map, original_map.value()), &array_changed);
489
490 // Check if o's length has changed during the callback and if the
491 // index is now out of range of the new length.
492 Node* tagged_index = ParameterToTagged(index, mode);
493 GotoIf(SmiGreaterThanOrEqual(tagged_index, LoadJSArrayLength(o)),
494 &array_changed);
495
496 // Re-load the elements array. If may have been resized.
497 Node* elements = LoadElements(o);
498
499 // Fast case: load the element directly from the elements FixedArray
500 // and call the callback if the element is not the hole.
501 DCHECK(kind == FAST_ELEMENTS || kind == FAST_DOUBLE_ELEMENTS);
502 int base_size = kind == FAST_ELEMENTS
503 ? FixedArray::kHeaderSize
504 : (FixedArray::kHeaderSize - kHeapObjectTag);
505 Node* offset = ElementOffsetFromIndex(index, kind, mode, base_size);
506 Node* value = nullptr;
507 if (kind == FAST_ELEMENTS) {
508 value = LoadObjectField(elements, offset);
509 GotoIf(WordEqual(value, TheHoleConstant()), &hole_element);
510 } else {
511 Node* double_value =
512 LoadDoubleWithHoleCheck(elements, offset, &hole_element);
513 value = AllocateHeapNumberWithValue(double_value);
514 }
515 CallJS(CodeFactory::Call(isolate()), context, callbackfn, this_arg,
516 value, tagged_index, o);
517 Goto(&one_element_done);
518
519 Bind(&hole_element);
520 BranchIfPrototypesHaveNoElements(o_map, &one_element_done,
521 &array_changed);
522
523 // O's changed during the forEach. Use the implementation precisely
524 // specified in the spec for the rest of the iteration, also making
525 // the failed original_map sticky in case of a subseuent change that
526 // goes back to the original map.
527 Bind(&array_changed);
528 VisitOneElement(context, this_arg, o, ParameterToTagged(index, mode),
529 callbackfn);
530 original_map.Bind(UndefinedConstant());
531 Goto(&one_element_done);
532
533 Bind(&one_element_done);
534 },
535 1, mode, IndexAdvanceMode::kPost);
536 Comment("end VisitAllFastElements");
537 }
538 };
539
TF_BUILTIN(ArrayForEach,ForEachCodeStubAssembler)540 TF_BUILTIN(ArrayForEach, ForEachCodeStubAssembler) {
541 Label non_array(this), examine_elements(this), fast_elements(this),
542 slow(this), maybe_double_elements(this), fast_double_elements(this);
543
544 Node* receiver = Parameter(ForEachDescriptor::kReceiver);
545 Node* callbackfn = Parameter(ForEachDescriptor::kCallback);
546 Node* this_arg = Parameter(ForEachDescriptor::kThisArg);
547 Node* context = Parameter(ForEachDescriptor::kContext);
548
549 // TODO(danno): Seriously? Do we really need to throw the exact error message
550 // on null and undefined so that the webkit tests pass?
551 Label throw_null_undefined_exception(this, Label::kDeferred);
552 GotoIf(WordEqual(receiver, NullConstant()), &throw_null_undefined_exception);
553 GotoIf(WordEqual(receiver, UndefinedConstant()),
554 &throw_null_undefined_exception);
555
556 // By the book: taken directly from the ECMAScript 2015 specification
557
558 // 1. Let O be ToObject(this value).
559 // 2. ReturnIfAbrupt(O)
560 Node* o = CallStub(CodeFactory::ToObject(isolate()), context, receiver);
561
562 // 3. Let len be ToLength(Get(O, "length")).
563 // 4. ReturnIfAbrupt(len).
564 Variable merged_length(this, MachineRepresentation::kTagged);
565 Label has_length(this, &merged_length), not_js_array(this);
566 GotoIf(DoesntHaveInstanceType(o, JS_ARRAY_TYPE), ¬_js_array);
567 merged_length.Bind(LoadJSArrayLength(o));
568 Goto(&has_length);
569 Bind(¬_js_array);
570 Node* len_property =
571 CallStub(CodeFactory::GetProperty(isolate()), context, o,
572 HeapConstant(isolate()->factory()->length_string()));
573 merged_length.Bind(
574 CallStub(CodeFactory::ToLength(isolate()), context, len_property));
575 Goto(&has_length);
576 Bind(&has_length);
577 Node* len = merged_length.value();
578
579 // 5. If IsCallable(callbackfn) is false, throw a TypeError exception.
580 Label type_exception(this, Label::kDeferred);
581 GotoIf(TaggedIsSmi(callbackfn), &type_exception);
582 GotoIfNot(IsCallableMap(LoadMap(callbackfn)), &type_exception);
583
584 // 6. If thisArg was supplied, let T be thisArg; else let T be undefined.
585 // [Already done by the arguments adapter]
586
587 // Non-smi lengths must use the slow path.
588 GotoIf(TaggedIsNotSmi(len), &slow);
589
590 BranchIfFastJSArray(o, context,
591 CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
592 &examine_elements, &slow);
593
594 Bind(&examine_elements);
595
596 ParameterMode mode = OptimalParameterMode();
597
598 // Select by ElementsKind
599 Node* o_map = LoadMap(o);
600 Node* bit_field2 = LoadMapBitField2(o_map);
601 Node* kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
602 Branch(Int32GreaterThan(kind, Int32Constant(FAST_HOLEY_ELEMENTS)),
603 &maybe_double_elements, &fast_elements);
604
605 Bind(&fast_elements);
606 {
607 VisitAllFastElements(context, FAST_ELEMENTS, this_arg, o, len, callbackfn,
608 mode);
609
610 // No exception, return success
611 Return(UndefinedConstant());
612 }
613
614 Bind(&maybe_double_elements);
615 Branch(Int32GreaterThan(kind, Int32Constant(FAST_HOLEY_DOUBLE_ELEMENTS)),
616 &slow, &fast_double_elements);
617
618 Bind(&fast_double_elements);
619 {
620 VisitAllFastElements(context, FAST_DOUBLE_ELEMENTS, this_arg, o, len,
621 callbackfn, mode);
622
623 // No exception, return success
624 Return(UndefinedConstant());
625 }
626
627 Bind(&slow);
628 {
629 // By the book: taken from the ECMAScript 2015 specification (cont.)
630
631 // 7. Let k be 0.
632 Variable k(this, MachineRepresentation::kTagged);
633 k.Bind(SmiConstant(0));
634
635 // 8. Repeat, while k < len
636 Label loop(this, &k);
637 Label after_loop(this);
638 Goto(&loop);
639 Bind(&loop);
640 {
641 GotoUnlessNumberLessThan(k.value(), len, &after_loop);
642
643 VisitOneElement(context, this_arg, o, k.value(), callbackfn);
644
645 // e. Increase k by 1.
646 k.Bind(NumberInc(k.value()));
647 Goto(&loop);
648 }
649 Bind(&after_loop);
650 Return(UndefinedConstant());
651 }
652
653 Bind(&throw_null_undefined_exception);
654 {
655 CallRuntime(Runtime::kThrowTypeError, context,
656 SmiConstant(MessageTemplate::kCalledOnNullOrUndefined),
657 HeapConstant(isolate()->factory()->NewStringFromAsciiChecked(
658 "Array.prototype.forEach")));
659 Unreachable();
660 }
661
662 Bind(&type_exception);
663 {
664 CallRuntime(Runtime::kThrowTypeError, context,
665 SmiConstant(MessageTemplate::kCalledNonCallable), callbackfn);
666 Unreachable();
667 }
668 }
669
BUILTIN(ArraySlice)670 BUILTIN(ArraySlice) {
671 HandleScope scope(isolate);
672 Handle<Object> receiver = args.receiver();
673 int len = -1;
674 int relative_start = 0;
675 int relative_end = 0;
676
677 if (receiver->IsJSArray()) {
678 DisallowHeapAllocation no_gc;
679 JSArray* array = JSArray::cast(*receiver);
680 if (V8_UNLIKELY(!array->HasFastElements() ||
681 !IsJSArrayFastElementMovingAllowed(isolate, array) ||
682 !isolate->IsArraySpeciesLookupChainIntact() ||
683 // If this is a subclass of Array, then call out to JS
684 !array->HasArrayPrototype(isolate))) {
685 AllowHeapAllocation allow_allocation;
686 return CallJsIntrinsic(isolate, isolate->array_slice(), args);
687 }
688 len = Smi::cast(array->length())->value();
689 } else if (receiver->IsJSObject() &&
690 GetSloppyArgumentsLength(isolate, Handle<JSObject>::cast(receiver),
691 &len)) {
692 // Array.prototype.slice.call(arguments, ...) is quite a common idiom
693 // (notably more than 50% of invocations in Web apps).
694 // Treat it in C++ as well.
695 DCHECK(JSObject::cast(*receiver)->HasFastElements() ||
696 JSObject::cast(*receiver)->HasFastArgumentsElements());
697 } else {
698 AllowHeapAllocation allow_allocation;
699 return CallJsIntrinsic(isolate, isolate->array_slice(), args);
700 }
701 DCHECK_LE(0, len);
702 int argument_count = args.length() - 1;
703 // Note carefully chosen defaults---if argument is missing,
704 // it's undefined which gets converted to 0 for relative_start
705 // and to len for relative_end.
706 relative_start = 0;
707 relative_end = len;
708 if (argument_count > 0) {
709 DisallowHeapAllocation no_gc;
710 if (!ClampedToInteger(isolate, args[1], &relative_start)) {
711 AllowHeapAllocation allow_allocation;
712 return CallJsIntrinsic(isolate, isolate->array_slice(), args);
713 }
714 if (argument_count > 1) {
715 Object* end_arg = args[2];
716 // slice handles the end_arg specially
717 if (end_arg->IsUndefined(isolate)) {
718 relative_end = len;
719 } else if (!ClampedToInteger(isolate, end_arg, &relative_end)) {
720 AllowHeapAllocation allow_allocation;
721 return CallJsIntrinsic(isolate, isolate->array_slice(), args);
722 }
723 }
724 }
725
726 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
727 uint32_t actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
728 : Min(relative_start, len);
729
730 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
731 uint32_t actual_end =
732 (relative_end < 0) ? Max(len + relative_end, 0) : Min(relative_end, len);
733
734 Handle<JSObject> object = Handle<JSObject>::cast(receiver);
735 ElementsAccessor* accessor = object->GetElementsAccessor();
736 return *accessor->Slice(object, actual_start, actual_end);
737 }
738
BUILTIN(ArraySplice)739 BUILTIN(ArraySplice) {
740 HandleScope scope(isolate);
741 Handle<Object> receiver = args.receiver();
742 if (V8_UNLIKELY(
743 !EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3) ||
744 // If this is a subclass of Array, then call out to JS.
745 !Handle<JSArray>::cast(receiver)->HasArrayPrototype(isolate) ||
746 // If anything with @@species has been messed with, call out to JS.
747 !isolate->IsArraySpeciesLookupChainIntact())) {
748 return CallJsIntrinsic(isolate, isolate->array_splice(), args);
749 }
750 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
751
752 int argument_count = args.length() - 1;
753 int relative_start = 0;
754 if (argument_count > 0) {
755 DisallowHeapAllocation no_gc;
756 if (!ClampedToInteger(isolate, args[1], &relative_start)) {
757 AllowHeapAllocation allow_allocation;
758 return CallJsIntrinsic(isolate, isolate->array_splice(), args);
759 }
760 }
761 int len = Smi::cast(array->length())->value();
762 // clip relative start to [0, len]
763 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
764 : Min(relative_start, len);
765
766 int actual_delete_count;
767 if (argument_count == 1) {
768 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
769 // given as a request to delete all the elements from the start.
770 // And it differs from the case of undefined delete count.
771 // This does not follow ECMA-262, but we do the same for compatibility.
772 DCHECK(len - actual_start >= 0);
773 actual_delete_count = len - actual_start;
774 } else {
775 int delete_count = 0;
776 DisallowHeapAllocation no_gc;
777 if (argument_count > 1) {
778 if (!ClampedToInteger(isolate, args[2], &delete_count)) {
779 AllowHeapAllocation allow_allocation;
780 return CallJsIntrinsic(isolate, isolate->array_splice(), args);
781 }
782 }
783 actual_delete_count = Min(Max(delete_count, 0), len - actual_start);
784 }
785
786 int add_count = (argument_count > 1) ? (argument_count - 2) : 0;
787 int new_length = len - actual_delete_count + add_count;
788
789 if (new_length != len && JSArray::HasReadOnlyLength(array)) {
790 AllowHeapAllocation allow_allocation;
791 return CallJsIntrinsic(isolate, isolate->array_splice(), args);
792 }
793 ElementsAccessor* accessor = array->GetElementsAccessor();
794 Handle<JSArray> result_array = accessor->Splice(
795 array, actual_start, actual_delete_count, &args, add_count);
796 return *result_array;
797 }
798
799 // Array Concat -------------------------------------------------------------
800
801 namespace {
802
803 /**
804 * A simple visitor visits every element of Array's.
805 * The backend storage can be a fixed array for fast elements case,
806 * or a dictionary for sparse array. Since Dictionary is a subtype
807 * of FixedArray, the class can be used by both fast and slow cases.
808 * The second parameter of the constructor, fast_elements, specifies
809 * whether the storage is a FixedArray or Dictionary.
810 *
811 * An index limit is used to deal with the situation that a result array
812 * length overflows 32-bit non-negative integer.
813 */
814 class ArrayConcatVisitor {
815 public:
ArrayConcatVisitor(Isolate * isolate,Handle<HeapObject> storage,bool fast_elements)816 ArrayConcatVisitor(Isolate* isolate, Handle<HeapObject> storage,
817 bool fast_elements)
818 : isolate_(isolate),
819 storage_(isolate->global_handles()->Create(*storage)),
820 index_offset_(0u),
821 bit_field_(
822 FastElementsField::encode(fast_elements) |
823 ExceedsLimitField::encode(false) |
824 IsFixedArrayField::encode(storage->IsFixedArray()) |
825 HasSimpleElementsField::encode(storage->IsFixedArray() ||
826 storage->map()->instance_type() >
827 LAST_CUSTOM_ELEMENTS_RECEIVER)) {
828 DCHECK(!(this->fast_elements() && !is_fixed_array()));
829 }
830
~ArrayConcatVisitor()831 ~ArrayConcatVisitor() { clear_storage(); }
832
visit(uint32_t i,Handle<Object> elm)833 MUST_USE_RESULT bool visit(uint32_t i, Handle<Object> elm) {
834 uint32_t index = index_offset_ + i;
835
836 if (i >= JSObject::kMaxElementCount - index_offset_) {
837 set_exceeds_array_limit(true);
838 // Exception hasn't been thrown at this point. Return true to
839 // break out, and caller will throw. !visit would imply that
840 // there is already a pending exception.
841 return true;
842 }
843
844 if (!is_fixed_array()) {
845 LookupIterator it(isolate_, storage_, index, LookupIterator::OWN);
846 MAYBE_RETURN(
847 JSReceiver::CreateDataProperty(&it, elm, Object::THROW_ON_ERROR),
848 false);
849 return true;
850 }
851
852 if (fast_elements()) {
853 if (index < static_cast<uint32_t>(storage_fixed_array()->length())) {
854 storage_fixed_array()->set(index, *elm);
855 return true;
856 }
857 // Our initial estimate of length was foiled, possibly by
858 // getters on the arrays increasing the length of later arrays
859 // during iteration.
860 // This shouldn't happen in anything but pathological cases.
861 SetDictionaryMode();
862 // Fall-through to dictionary mode.
863 }
864 DCHECK(!fast_elements());
865 Handle<SeededNumberDictionary> dict(
866 SeededNumberDictionary::cast(*storage_));
867 // The object holding this backing store has just been allocated, so
868 // it cannot yet be used as a prototype.
869 Handle<JSObject> not_a_prototype_holder;
870 Handle<SeededNumberDictionary> result = SeededNumberDictionary::AtNumberPut(
871 dict, index, elm, not_a_prototype_holder);
872 if (!result.is_identical_to(dict)) {
873 // Dictionary needed to grow.
874 clear_storage();
875 set_storage(*result);
876 }
877 return true;
878 }
879
increase_index_offset(uint32_t delta)880 void increase_index_offset(uint32_t delta) {
881 if (JSObject::kMaxElementCount - index_offset_ < delta) {
882 index_offset_ = JSObject::kMaxElementCount;
883 } else {
884 index_offset_ += delta;
885 }
886 // If the initial length estimate was off (see special case in visit()),
887 // but the array blowing the limit didn't contain elements beyond the
888 // provided-for index range, go to dictionary mode now.
889 if (fast_elements() &&
890 index_offset_ >
891 static_cast<uint32_t>(FixedArrayBase::cast(*storage_)->length())) {
892 SetDictionaryMode();
893 }
894 }
895
exceeds_array_limit() const896 bool exceeds_array_limit() const {
897 return ExceedsLimitField::decode(bit_field_);
898 }
899
ToArray()900 Handle<JSArray> ToArray() {
901 DCHECK(is_fixed_array());
902 Handle<JSArray> array = isolate_->factory()->NewJSArray(0);
903 Handle<Object> length =
904 isolate_->factory()->NewNumber(static_cast<double>(index_offset_));
905 Handle<Map> map = JSObject::GetElementsTransitionMap(
906 array, fast_elements() ? FAST_HOLEY_ELEMENTS : DICTIONARY_ELEMENTS);
907 array->set_map(*map);
908 array->set_length(*length);
909 array->set_elements(*storage_fixed_array());
910 return array;
911 }
912
913 // Storage is either a FixedArray (if is_fixed_array()) or a JSReciever
914 // (otherwise)
storage_fixed_array()915 Handle<FixedArray> storage_fixed_array() {
916 DCHECK(is_fixed_array());
917 DCHECK(has_simple_elements());
918 return Handle<FixedArray>::cast(storage_);
919 }
storage_jsreceiver()920 Handle<JSReceiver> storage_jsreceiver() {
921 DCHECK(!is_fixed_array());
922 return Handle<JSReceiver>::cast(storage_);
923 }
has_simple_elements() const924 bool has_simple_elements() const {
925 return HasSimpleElementsField::decode(bit_field_);
926 }
927
928 private:
929 // Convert storage to dictionary mode.
SetDictionaryMode()930 void SetDictionaryMode() {
931 DCHECK(fast_elements() && is_fixed_array());
932 Handle<FixedArray> current_storage = storage_fixed_array();
933 Handle<SeededNumberDictionary> slow_storage(
934 SeededNumberDictionary::New(isolate_, current_storage->length()));
935 uint32_t current_length = static_cast<uint32_t>(current_storage->length());
936 FOR_WITH_HANDLE_SCOPE(
937 isolate_, uint32_t, i = 0, i, i < current_length, i++, {
938 Handle<Object> element(current_storage->get(i), isolate_);
939 if (!element->IsTheHole(isolate_)) {
940 // The object holding this backing store has just been allocated, so
941 // it cannot yet be used as a prototype.
942 Handle<JSObject> not_a_prototype_holder;
943 Handle<SeededNumberDictionary> new_storage =
944 SeededNumberDictionary::AtNumberPut(slow_storage, i, element,
945 not_a_prototype_holder);
946 if (!new_storage.is_identical_to(slow_storage)) {
947 slow_storage = loop_scope.CloseAndEscape(new_storage);
948 }
949 }
950 });
951 clear_storage();
952 set_storage(*slow_storage);
953 set_fast_elements(false);
954 }
955
clear_storage()956 inline void clear_storage() { GlobalHandles::Destroy(storage_.location()); }
957
set_storage(FixedArray * storage)958 inline void set_storage(FixedArray* storage) {
959 DCHECK(is_fixed_array());
960 DCHECK(has_simple_elements());
961 storage_ = isolate_->global_handles()->Create(storage);
962 }
963
964 class FastElementsField : public BitField<bool, 0, 1> {};
965 class ExceedsLimitField : public BitField<bool, 1, 1> {};
966 class IsFixedArrayField : public BitField<bool, 2, 1> {};
967 class HasSimpleElementsField : public BitField<bool, 3, 1> {};
968
fast_elements() const969 bool fast_elements() const { return FastElementsField::decode(bit_field_); }
set_fast_elements(bool fast)970 void set_fast_elements(bool fast) {
971 bit_field_ = FastElementsField::update(bit_field_, fast);
972 }
set_exceeds_array_limit(bool exceeds)973 void set_exceeds_array_limit(bool exceeds) {
974 bit_field_ = ExceedsLimitField::update(bit_field_, exceeds);
975 }
is_fixed_array() const976 bool is_fixed_array() const { return IsFixedArrayField::decode(bit_field_); }
977
978 Isolate* isolate_;
979 Handle<Object> storage_; // Always a global handle.
980 // Index after last seen index. Always less than or equal to
981 // JSObject::kMaxElementCount.
982 uint32_t index_offset_;
983 uint32_t bit_field_;
984 };
985
EstimateElementCount(Handle<JSArray> array)986 uint32_t EstimateElementCount(Handle<JSArray> array) {
987 DisallowHeapAllocation no_gc;
988 uint32_t length = static_cast<uint32_t>(array->length()->Number());
989 int element_count = 0;
990 switch (array->GetElementsKind()) {
991 case FAST_SMI_ELEMENTS:
992 case FAST_HOLEY_SMI_ELEMENTS:
993 case FAST_ELEMENTS:
994 case FAST_HOLEY_ELEMENTS: {
995 // Fast elements can't have lengths that are not representable by
996 // a 32-bit signed integer.
997 DCHECK(static_cast<int32_t>(FixedArray::kMaxLength) >= 0);
998 int fast_length = static_cast<int>(length);
999 Isolate* isolate = array->GetIsolate();
1000 FixedArray* elements = FixedArray::cast(array->elements());
1001 for (int i = 0; i < fast_length; i++) {
1002 if (!elements->get(i)->IsTheHole(isolate)) element_count++;
1003 }
1004 break;
1005 }
1006 case FAST_DOUBLE_ELEMENTS:
1007 case FAST_HOLEY_DOUBLE_ELEMENTS: {
1008 // Fast elements can't have lengths that are not representable by
1009 // a 32-bit signed integer.
1010 DCHECK(static_cast<int32_t>(FixedDoubleArray::kMaxLength) >= 0);
1011 int fast_length = static_cast<int>(length);
1012 if (array->elements()->IsFixedArray()) {
1013 DCHECK(FixedArray::cast(array->elements())->length() == 0);
1014 break;
1015 }
1016 FixedDoubleArray* elements = FixedDoubleArray::cast(array->elements());
1017 for (int i = 0; i < fast_length; i++) {
1018 if (!elements->is_the_hole(i)) element_count++;
1019 }
1020 break;
1021 }
1022 case DICTIONARY_ELEMENTS: {
1023 SeededNumberDictionary* dictionary =
1024 SeededNumberDictionary::cast(array->elements());
1025 Isolate* isolate = dictionary->GetIsolate();
1026 int capacity = dictionary->Capacity();
1027 for (int i = 0; i < capacity; i++) {
1028 Object* key = dictionary->KeyAt(i);
1029 if (dictionary->IsKey(isolate, key)) {
1030 element_count++;
1031 }
1032 }
1033 break;
1034 }
1035 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) case TYPE##_ELEMENTS:
1036
1037 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1038 #undef TYPED_ARRAY_CASE
1039 // External arrays are always dense.
1040 return length;
1041 case NO_ELEMENTS:
1042 return 0;
1043 case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
1044 case SLOW_SLOPPY_ARGUMENTS_ELEMENTS:
1045 case FAST_STRING_WRAPPER_ELEMENTS:
1046 case SLOW_STRING_WRAPPER_ELEMENTS:
1047 UNREACHABLE();
1048 return 0;
1049 }
1050 // As an estimate, we assume that the prototype doesn't contain any
1051 // inherited elements.
1052 return element_count;
1053 }
1054
1055 // Used for sorting indices in a List<uint32_t>.
compareUInt32(const uint32_t * ap,const uint32_t * bp)1056 int compareUInt32(const uint32_t* ap, const uint32_t* bp) {
1057 uint32_t a = *ap;
1058 uint32_t b = *bp;
1059 return (a == b) ? 0 : (a < b) ? -1 : 1;
1060 }
1061
CollectElementIndices(Handle<JSObject> object,uint32_t range,List<uint32_t> * indices)1062 void CollectElementIndices(Handle<JSObject> object, uint32_t range,
1063 List<uint32_t>* indices) {
1064 Isolate* isolate = object->GetIsolate();
1065 ElementsKind kind = object->GetElementsKind();
1066 switch (kind) {
1067 case FAST_SMI_ELEMENTS:
1068 case FAST_ELEMENTS:
1069 case FAST_HOLEY_SMI_ELEMENTS:
1070 case FAST_HOLEY_ELEMENTS: {
1071 DisallowHeapAllocation no_gc;
1072 FixedArray* elements = FixedArray::cast(object->elements());
1073 uint32_t length = static_cast<uint32_t>(elements->length());
1074 if (range < length) length = range;
1075 for (uint32_t i = 0; i < length; i++) {
1076 if (!elements->get(i)->IsTheHole(isolate)) {
1077 indices->Add(i);
1078 }
1079 }
1080 break;
1081 }
1082 case FAST_HOLEY_DOUBLE_ELEMENTS:
1083 case FAST_DOUBLE_ELEMENTS: {
1084 if (object->elements()->IsFixedArray()) {
1085 DCHECK(object->elements()->length() == 0);
1086 break;
1087 }
1088 Handle<FixedDoubleArray> elements(
1089 FixedDoubleArray::cast(object->elements()));
1090 uint32_t length = static_cast<uint32_t>(elements->length());
1091 if (range < length) length = range;
1092 for (uint32_t i = 0; i < length; i++) {
1093 if (!elements->is_the_hole(i)) {
1094 indices->Add(i);
1095 }
1096 }
1097 break;
1098 }
1099 case DICTIONARY_ELEMENTS: {
1100 DisallowHeapAllocation no_gc;
1101 SeededNumberDictionary* dict =
1102 SeededNumberDictionary::cast(object->elements());
1103 uint32_t capacity = dict->Capacity();
1104 FOR_WITH_HANDLE_SCOPE(isolate, uint32_t, j = 0, j, j < capacity, j++, {
1105 Object* k = dict->KeyAt(j);
1106 if (!dict->IsKey(isolate, k)) continue;
1107 DCHECK(k->IsNumber());
1108 uint32_t index = static_cast<uint32_t>(k->Number());
1109 if (index < range) {
1110 indices->Add(index);
1111 }
1112 });
1113 break;
1114 }
1115 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) case TYPE##_ELEMENTS:
1116
1117 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1118 #undef TYPED_ARRAY_CASE
1119 {
1120 uint32_t length = static_cast<uint32_t>(
1121 FixedArrayBase::cast(object->elements())->length());
1122 if (range <= length) {
1123 length = range;
1124 // We will add all indices, so we might as well clear it first
1125 // and avoid duplicates.
1126 indices->Clear();
1127 }
1128 for (uint32_t i = 0; i < length; i++) {
1129 indices->Add(i);
1130 }
1131 if (length == range) return; // All indices accounted for already.
1132 break;
1133 }
1134 case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
1135 case SLOW_SLOPPY_ARGUMENTS_ELEMENTS: {
1136 ElementsAccessor* accessor = object->GetElementsAccessor();
1137 for (uint32_t i = 0; i < range; i++) {
1138 if (accessor->HasElement(object, i)) {
1139 indices->Add(i);
1140 }
1141 }
1142 break;
1143 }
1144 case FAST_STRING_WRAPPER_ELEMENTS:
1145 case SLOW_STRING_WRAPPER_ELEMENTS: {
1146 DCHECK(object->IsJSValue());
1147 Handle<JSValue> js_value = Handle<JSValue>::cast(object);
1148 DCHECK(js_value->value()->IsString());
1149 Handle<String> string(String::cast(js_value->value()), isolate);
1150 uint32_t length = static_cast<uint32_t>(string->length());
1151 uint32_t i = 0;
1152 uint32_t limit = Min(length, range);
1153 for (; i < limit; i++) {
1154 indices->Add(i);
1155 }
1156 ElementsAccessor* accessor = object->GetElementsAccessor();
1157 for (; i < range; i++) {
1158 if (accessor->HasElement(object, i)) {
1159 indices->Add(i);
1160 }
1161 }
1162 break;
1163 }
1164 case NO_ELEMENTS:
1165 break;
1166 }
1167
1168 PrototypeIterator iter(isolate, object);
1169 if (!iter.IsAtEnd()) {
1170 // The prototype will usually have no inherited element indices,
1171 // but we have to check.
1172 CollectElementIndices(PrototypeIterator::GetCurrent<JSObject>(iter), range,
1173 indices);
1174 }
1175 }
1176
IterateElementsSlow(Isolate * isolate,Handle<JSReceiver> receiver,uint32_t length,ArrayConcatVisitor * visitor)1177 bool IterateElementsSlow(Isolate* isolate, Handle<JSReceiver> receiver,
1178 uint32_t length, ArrayConcatVisitor* visitor) {
1179 FOR_WITH_HANDLE_SCOPE(isolate, uint32_t, i = 0, i, i < length, ++i, {
1180 Maybe<bool> maybe = JSReceiver::HasElement(receiver, i);
1181 if (!maybe.IsJust()) return false;
1182 if (maybe.FromJust()) {
1183 Handle<Object> element_value;
1184 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1185 isolate, element_value, JSReceiver::GetElement(isolate, receiver, i),
1186 false);
1187 if (!visitor->visit(i, element_value)) return false;
1188 }
1189 });
1190 visitor->increase_index_offset(length);
1191 return true;
1192 }
1193 /**
1194 * A helper function that visits "array" elements of a JSReceiver in numerical
1195 * order.
1196 *
1197 * The visitor argument called for each existing element in the array
1198 * with the element index and the element's value.
1199 * Afterwards it increments the base-index of the visitor by the array
1200 * length.
1201 * Returns false if any access threw an exception, otherwise true.
1202 */
IterateElements(Isolate * isolate,Handle<JSReceiver> receiver,ArrayConcatVisitor * visitor)1203 bool IterateElements(Isolate* isolate, Handle<JSReceiver> receiver,
1204 ArrayConcatVisitor* visitor) {
1205 uint32_t length = 0;
1206
1207 if (receiver->IsJSArray()) {
1208 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
1209 length = static_cast<uint32_t>(array->length()->Number());
1210 } else {
1211 Handle<Object> val;
1212 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1213 isolate, val, Object::GetLengthFromArrayLike(isolate, receiver), false);
1214 // TODO(caitp): Support larger element indexes (up to 2^53-1).
1215 if (!val->ToUint32(&length)) {
1216 length = 0;
1217 }
1218 // TODO(cbruni): handle other element kind as well
1219 return IterateElementsSlow(isolate, receiver, length, visitor);
1220 }
1221
1222 if (!HasOnlySimpleElements(isolate, *receiver) ||
1223 !visitor->has_simple_elements()) {
1224 return IterateElementsSlow(isolate, receiver, length, visitor);
1225 }
1226 Handle<JSObject> array = Handle<JSObject>::cast(receiver);
1227
1228 switch (array->GetElementsKind()) {
1229 case FAST_SMI_ELEMENTS:
1230 case FAST_ELEMENTS:
1231 case FAST_HOLEY_SMI_ELEMENTS:
1232 case FAST_HOLEY_ELEMENTS: {
1233 // Run through the elements FixedArray and use HasElement and GetElement
1234 // to check the prototype for missing elements.
1235 Handle<FixedArray> elements(FixedArray::cast(array->elements()));
1236 int fast_length = static_cast<int>(length);
1237 DCHECK(fast_length <= elements->length());
1238 FOR_WITH_HANDLE_SCOPE(isolate, int, j = 0, j, j < fast_length, j++, {
1239 Handle<Object> element_value(elements->get(j), isolate);
1240 if (!element_value->IsTheHole(isolate)) {
1241 if (!visitor->visit(j, element_value)) return false;
1242 } else {
1243 Maybe<bool> maybe = JSReceiver::HasElement(array, j);
1244 if (!maybe.IsJust()) return false;
1245 if (maybe.FromJust()) {
1246 // Call GetElement on array, not its prototype, or getters won't
1247 // have the correct receiver.
1248 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1249 isolate, element_value,
1250 JSReceiver::GetElement(isolate, array, j), false);
1251 if (!visitor->visit(j, element_value)) return false;
1252 }
1253 }
1254 });
1255 break;
1256 }
1257 case FAST_HOLEY_DOUBLE_ELEMENTS:
1258 case FAST_DOUBLE_ELEMENTS: {
1259 // Empty array is FixedArray but not FixedDoubleArray.
1260 if (length == 0) break;
1261 // Run through the elements FixedArray and use HasElement and GetElement
1262 // to check the prototype for missing elements.
1263 if (array->elements()->IsFixedArray()) {
1264 DCHECK(array->elements()->length() == 0);
1265 break;
1266 }
1267 Handle<FixedDoubleArray> elements(
1268 FixedDoubleArray::cast(array->elements()));
1269 int fast_length = static_cast<int>(length);
1270 DCHECK(fast_length <= elements->length());
1271 FOR_WITH_HANDLE_SCOPE(isolate, int, j = 0, j, j < fast_length, j++, {
1272 if (!elements->is_the_hole(j)) {
1273 double double_value = elements->get_scalar(j);
1274 Handle<Object> element_value =
1275 isolate->factory()->NewNumber(double_value);
1276 if (!visitor->visit(j, element_value)) return false;
1277 } else {
1278 Maybe<bool> maybe = JSReceiver::HasElement(array, j);
1279 if (!maybe.IsJust()) return false;
1280 if (maybe.FromJust()) {
1281 // Call GetElement on array, not its prototype, or getters won't
1282 // have the correct receiver.
1283 Handle<Object> element_value;
1284 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1285 isolate, element_value,
1286 JSReceiver::GetElement(isolate, array, j), false);
1287 if (!visitor->visit(j, element_value)) return false;
1288 }
1289 }
1290 });
1291 break;
1292 }
1293
1294 case DICTIONARY_ELEMENTS: {
1295 Handle<SeededNumberDictionary> dict(array->element_dictionary());
1296 List<uint32_t> indices(dict->Capacity() / 2);
1297 // Collect all indices in the object and the prototypes less
1298 // than length. This might introduce duplicates in the indices list.
1299 CollectElementIndices(array, length, &indices);
1300 indices.Sort(&compareUInt32);
1301 int n = indices.length();
1302 FOR_WITH_HANDLE_SCOPE(isolate, int, j = 0, j, j < n, (void)0, {
1303 uint32_t index = indices[j];
1304 Handle<Object> element;
1305 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1306 isolate, element, JSReceiver::GetElement(isolate, array, index),
1307 false);
1308 if (!visitor->visit(index, element)) return false;
1309 // Skip to next different index (i.e., omit duplicates).
1310 do {
1311 j++;
1312 } while (j < n && indices[j] == index);
1313 });
1314 break;
1315 }
1316 case FAST_SLOPPY_ARGUMENTS_ELEMENTS:
1317 case SLOW_SLOPPY_ARGUMENTS_ELEMENTS: {
1318 FOR_WITH_HANDLE_SCOPE(
1319 isolate, uint32_t, index = 0, index, index < length, index++, {
1320 Handle<Object> element;
1321 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
1322 isolate, element, JSReceiver::GetElement(isolate, array, index),
1323 false);
1324 if (!visitor->visit(index, element)) return false;
1325 });
1326 break;
1327 }
1328 case NO_ELEMENTS:
1329 break;
1330 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) case TYPE##_ELEMENTS:
1331 TYPED_ARRAYS(TYPED_ARRAY_CASE)
1332 #undef TYPED_ARRAY_CASE
1333 return IterateElementsSlow(isolate, receiver, length, visitor);
1334 case FAST_STRING_WRAPPER_ELEMENTS:
1335 case SLOW_STRING_WRAPPER_ELEMENTS:
1336 // |array| is guaranteed to be an array or typed array.
1337 UNREACHABLE();
1338 break;
1339 }
1340 visitor->increase_index_offset(length);
1341 return true;
1342 }
1343
IsConcatSpreadable(Isolate * isolate,Handle<Object> obj)1344 static Maybe<bool> IsConcatSpreadable(Isolate* isolate, Handle<Object> obj) {
1345 HandleScope handle_scope(isolate);
1346 if (!obj->IsJSReceiver()) return Just(false);
1347 if (!isolate->IsIsConcatSpreadableLookupChainIntact(JSReceiver::cast(*obj))) {
1348 // Slow path if @@isConcatSpreadable has been used.
1349 Handle<Symbol> key(isolate->factory()->is_concat_spreadable_symbol());
1350 Handle<Object> value;
1351 MaybeHandle<Object> maybeValue =
1352 i::Runtime::GetObjectProperty(isolate, obj, key);
1353 if (!maybeValue.ToHandle(&value)) return Nothing<bool>();
1354 if (!value->IsUndefined(isolate)) return Just(value->BooleanValue());
1355 }
1356 return Object::IsArray(obj);
1357 }
1358
Slow_ArrayConcat(BuiltinArguments * args,Handle<Object> species,Isolate * isolate)1359 Object* Slow_ArrayConcat(BuiltinArguments* args, Handle<Object> species,
1360 Isolate* isolate) {
1361 int argument_count = args->length();
1362
1363 bool is_array_species = *species == isolate->context()->array_function();
1364
1365 // Pass 1: estimate the length and number of elements of the result.
1366 // The actual length can be larger if any of the arguments have getters
1367 // that mutate other arguments (but will otherwise be precise).
1368 // The number of elements is precise if there are no inherited elements.
1369
1370 ElementsKind kind = FAST_SMI_ELEMENTS;
1371
1372 uint32_t estimate_result_length = 0;
1373 uint32_t estimate_nof_elements = 0;
1374 FOR_WITH_HANDLE_SCOPE(isolate, int, i = 0, i, i < argument_count, i++, {
1375 Handle<Object> obj((*args)[i], isolate);
1376 uint32_t length_estimate;
1377 uint32_t element_estimate;
1378 if (obj->IsJSArray()) {
1379 Handle<JSArray> array(Handle<JSArray>::cast(obj));
1380 length_estimate = static_cast<uint32_t>(array->length()->Number());
1381 if (length_estimate != 0) {
1382 ElementsKind array_kind =
1383 GetPackedElementsKind(array->GetElementsKind());
1384 kind = GetMoreGeneralElementsKind(kind, array_kind);
1385 }
1386 element_estimate = EstimateElementCount(array);
1387 } else {
1388 if (obj->IsHeapObject()) {
1389 kind = GetMoreGeneralElementsKind(
1390 kind, obj->IsNumber() ? FAST_DOUBLE_ELEMENTS : FAST_ELEMENTS);
1391 }
1392 length_estimate = 1;
1393 element_estimate = 1;
1394 }
1395 // Avoid overflows by capping at kMaxElementCount.
1396 if (JSObject::kMaxElementCount - estimate_result_length < length_estimate) {
1397 estimate_result_length = JSObject::kMaxElementCount;
1398 } else {
1399 estimate_result_length += length_estimate;
1400 }
1401 if (JSObject::kMaxElementCount - estimate_nof_elements < element_estimate) {
1402 estimate_nof_elements = JSObject::kMaxElementCount;
1403 } else {
1404 estimate_nof_elements += element_estimate;
1405 }
1406 });
1407
1408 // If estimated number of elements is more than half of length, a
1409 // fixed array (fast case) is more time and space-efficient than a
1410 // dictionary.
1411 bool fast_case = is_array_species &&
1412 (estimate_nof_elements * 2) >= estimate_result_length &&
1413 isolate->IsIsConcatSpreadableLookupChainIntact();
1414
1415 if (fast_case && kind == FAST_DOUBLE_ELEMENTS) {
1416 Handle<FixedArrayBase> storage =
1417 isolate->factory()->NewFixedDoubleArray(estimate_result_length);
1418 int j = 0;
1419 bool failure = false;
1420 if (estimate_result_length > 0) {
1421 Handle<FixedDoubleArray> double_storage =
1422 Handle<FixedDoubleArray>::cast(storage);
1423 for (int i = 0; i < argument_count; i++) {
1424 Handle<Object> obj((*args)[i], isolate);
1425 if (obj->IsSmi()) {
1426 double_storage->set(j, Smi::cast(*obj)->value());
1427 j++;
1428 } else if (obj->IsNumber()) {
1429 double_storage->set(j, obj->Number());
1430 j++;
1431 } else {
1432 DisallowHeapAllocation no_gc;
1433 JSArray* array = JSArray::cast(*obj);
1434 uint32_t length = static_cast<uint32_t>(array->length()->Number());
1435 switch (array->GetElementsKind()) {
1436 case FAST_HOLEY_DOUBLE_ELEMENTS:
1437 case FAST_DOUBLE_ELEMENTS: {
1438 // Empty array is FixedArray but not FixedDoubleArray.
1439 if (length == 0) break;
1440 FixedDoubleArray* elements =
1441 FixedDoubleArray::cast(array->elements());
1442 for (uint32_t i = 0; i < length; i++) {
1443 if (elements->is_the_hole(i)) {
1444 // TODO(jkummerow/verwaest): We could be a bit more clever
1445 // here: Check if there are no elements/getters on the
1446 // prototype chain, and if so, allow creation of a holey
1447 // result array.
1448 // Same thing below (holey smi case).
1449 failure = true;
1450 break;
1451 }
1452 double double_value = elements->get_scalar(i);
1453 double_storage->set(j, double_value);
1454 j++;
1455 }
1456 break;
1457 }
1458 case FAST_HOLEY_SMI_ELEMENTS:
1459 case FAST_SMI_ELEMENTS: {
1460 Object* the_hole = isolate->heap()->the_hole_value();
1461 FixedArray* elements(FixedArray::cast(array->elements()));
1462 for (uint32_t i = 0; i < length; i++) {
1463 Object* element = elements->get(i);
1464 if (element == the_hole) {
1465 failure = true;
1466 break;
1467 }
1468 int32_t int_value = Smi::cast(element)->value();
1469 double_storage->set(j, int_value);
1470 j++;
1471 }
1472 break;
1473 }
1474 case FAST_HOLEY_ELEMENTS:
1475 case FAST_ELEMENTS:
1476 case DICTIONARY_ELEMENTS:
1477 case NO_ELEMENTS:
1478 DCHECK_EQ(0u, length);
1479 break;
1480 default:
1481 UNREACHABLE();
1482 }
1483 }
1484 if (failure) break;
1485 }
1486 }
1487 if (!failure) {
1488 return *isolate->factory()->NewJSArrayWithElements(storage, kind, j);
1489 }
1490 // In case of failure, fall through.
1491 }
1492
1493 Handle<HeapObject> storage;
1494 if (fast_case) {
1495 // The backing storage array must have non-existing elements to preserve
1496 // holes across concat operations.
1497 storage =
1498 isolate->factory()->NewFixedArrayWithHoles(estimate_result_length);
1499 } else if (is_array_species) {
1500 // TODO(126): move 25% pre-allocation logic into Dictionary::Allocate
1501 uint32_t at_least_space_for =
1502 estimate_nof_elements + (estimate_nof_elements >> 2);
1503 storage = SeededNumberDictionary::New(isolate, at_least_space_for);
1504 } else {
1505 DCHECK(species->IsConstructor());
1506 Handle<Object> length(Smi::kZero, isolate);
1507 Handle<Object> storage_object;
1508 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1509 isolate, storage_object,
1510 Execution::New(isolate, species, species, 1, &length));
1511 storage = Handle<HeapObject>::cast(storage_object);
1512 }
1513
1514 ArrayConcatVisitor visitor(isolate, storage, fast_case);
1515
1516 for (int i = 0; i < argument_count; i++) {
1517 Handle<Object> obj((*args)[i], isolate);
1518 Maybe<bool> spreadable = IsConcatSpreadable(isolate, obj);
1519 MAYBE_RETURN(spreadable, isolate->heap()->exception());
1520 if (spreadable.FromJust()) {
1521 Handle<JSReceiver> object = Handle<JSReceiver>::cast(obj);
1522 if (!IterateElements(isolate, object, &visitor)) {
1523 return isolate->heap()->exception();
1524 }
1525 } else {
1526 if (!visitor.visit(0, obj)) return isolate->heap()->exception();
1527 visitor.increase_index_offset(1);
1528 }
1529 }
1530
1531 if (visitor.exceeds_array_limit()) {
1532 THROW_NEW_ERROR_RETURN_FAILURE(
1533 isolate, NewRangeError(MessageTemplate::kInvalidArrayLength));
1534 }
1535
1536 if (is_array_species) {
1537 return *visitor.ToArray();
1538 } else {
1539 return *visitor.storage_jsreceiver();
1540 }
1541 }
1542
IsSimpleArray(Isolate * isolate,Handle<JSArray> obj)1543 bool IsSimpleArray(Isolate* isolate, Handle<JSArray> obj) {
1544 DisallowHeapAllocation no_gc;
1545 Map* map = obj->map();
1546 // If there is only the 'length' property we are fine.
1547 if (map->prototype() ==
1548 isolate->native_context()->initial_array_prototype() &&
1549 map->NumberOfOwnDescriptors() == 1) {
1550 return true;
1551 }
1552 // TODO(cbruni): slower lookup for array subclasses and support slow
1553 // @@IsConcatSpreadable lookup.
1554 return false;
1555 }
1556
Fast_ArrayConcat(Isolate * isolate,BuiltinArguments * args)1557 MaybeHandle<JSArray> Fast_ArrayConcat(Isolate* isolate,
1558 BuiltinArguments* args) {
1559 if (!isolate->IsIsConcatSpreadableLookupChainIntact()) {
1560 return MaybeHandle<JSArray>();
1561 }
1562 // We shouldn't overflow when adding another len.
1563 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
1564 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
1565 STATIC_ASSERT(FixedDoubleArray::kMaxLength < kHalfOfMaxInt);
1566 USE(kHalfOfMaxInt);
1567
1568 int n_arguments = args->length();
1569 int result_len = 0;
1570 {
1571 DisallowHeapAllocation no_gc;
1572 // Iterate through all the arguments performing checks
1573 // and calculating total length.
1574 for (int i = 0; i < n_arguments; i++) {
1575 Object* arg = (*args)[i];
1576 if (!arg->IsJSArray()) return MaybeHandle<JSArray>();
1577 if (!HasOnlySimpleReceiverElements(isolate, JSObject::cast(arg))) {
1578 return MaybeHandle<JSArray>();
1579 }
1580 // TODO(cbruni): support fast concatenation of DICTIONARY_ELEMENTS.
1581 if (!JSObject::cast(arg)->HasFastElements()) {
1582 return MaybeHandle<JSArray>();
1583 }
1584 Handle<JSArray> array(JSArray::cast(arg), isolate);
1585 if (!IsSimpleArray(isolate, array)) {
1586 return MaybeHandle<JSArray>();
1587 }
1588 // The Array length is guaranted to be <= kHalfOfMaxInt thus we won't
1589 // overflow.
1590 result_len += Smi::cast(array->length())->value();
1591 DCHECK(result_len >= 0);
1592 // Throw an Error if we overflow the FixedArray limits
1593 if (FixedDoubleArray::kMaxLength < result_len ||
1594 FixedArray::kMaxLength < result_len) {
1595 AllowHeapAllocation gc;
1596 THROW_NEW_ERROR(isolate,
1597 NewRangeError(MessageTemplate::kInvalidArrayLength),
1598 JSArray);
1599 }
1600 }
1601 }
1602 return ElementsAccessor::Concat(isolate, args, n_arguments, result_len);
1603 }
1604
1605 } // namespace
1606
1607 // ES6 22.1.3.1 Array.prototype.concat
BUILTIN(ArrayConcat)1608 BUILTIN(ArrayConcat) {
1609 HandleScope scope(isolate);
1610
1611 Handle<Object> receiver = args.receiver();
1612 // TODO(bmeurer): Do we really care about the exact exception message here?
1613 if (receiver->IsNullOrUndefined(isolate)) {
1614 THROW_NEW_ERROR_RETURN_FAILURE(
1615 isolate, NewTypeError(MessageTemplate::kCalledOnNullOrUndefined,
1616 isolate->factory()->NewStringFromAsciiChecked(
1617 "Array.prototype.concat")));
1618 }
1619 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1620 isolate, receiver, Object::ToObject(isolate, args.receiver()));
1621 args[0] = *receiver;
1622
1623 Handle<JSArray> result_array;
1624
1625 // Avoid a real species read to avoid extra lookups to the array constructor
1626 if (V8_LIKELY(receiver->IsJSArray() &&
1627 Handle<JSArray>::cast(receiver)->HasArrayPrototype(isolate) &&
1628 isolate->IsArraySpeciesLookupChainIntact())) {
1629 if (Fast_ArrayConcat(isolate, &args).ToHandle(&result_array)) {
1630 return *result_array;
1631 }
1632 if (isolate->has_pending_exception()) return isolate->heap()->exception();
1633 }
1634 // Reading @@species happens before anything else with a side effect, so
1635 // we can do it here to determine whether to take the fast path.
1636 Handle<Object> species;
1637 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1638 isolate, species, Object::ArraySpeciesConstructor(isolate, receiver));
1639 if (*species == *isolate->array_function()) {
1640 if (Fast_ArrayConcat(isolate, &args).ToHandle(&result_array)) {
1641 return *result_array;
1642 }
1643 if (isolate->has_pending_exception()) return isolate->heap()->exception();
1644 }
1645 return Slow_ArrayConcat(&args, species, isolate);
1646 }
1647
Generate_ArrayIsArray(compiler::CodeAssemblerState * state)1648 void Builtins::Generate_ArrayIsArray(compiler::CodeAssemblerState* state) {
1649 typedef compiler::Node Node;
1650 typedef CodeStubAssembler::Label Label;
1651 CodeStubAssembler assembler(state);
1652
1653 Node* object = assembler.Parameter(1);
1654 Node* context = assembler.Parameter(4);
1655
1656 Label call_runtime(&assembler), return_true(&assembler),
1657 return_false(&assembler);
1658
1659 assembler.GotoIf(assembler.TaggedIsSmi(object), &return_false);
1660 Node* instance_type = assembler.LoadInstanceType(object);
1661
1662 assembler.GotoIf(assembler.Word32Equal(
1663 instance_type, assembler.Int32Constant(JS_ARRAY_TYPE)),
1664 &return_true);
1665
1666 // TODO(verwaest): Handle proxies in-place.
1667 assembler.Branch(assembler.Word32Equal(
1668 instance_type, assembler.Int32Constant(JS_PROXY_TYPE)),
1669 &call_runtime, &return_false);
1670
1671 assembler.Bind(&return_true);
1672 assembler.Return(assembler.BooleanConstant(true));
1673
1674 assembler.Bind(&return_false);
1675 assembler.Return(assembler.BooleanConstant(false));
1676
1677 assembler.Bind(&call_runtime);
1678 assembler.Return(
1679 assembler.CallRuntime(Runtime::kArrayIsArray, context, object));
1680 }
1681
TF_BUILTIN(ArrayIncludes,CodeStubAssembler)1682 TF_BUILTIN(ArrayIncludes, CodeStubAssembler) {
1683 Node* const array = Parameter(0);
1684 Node* const search_element = Parameter(1);
1685 Node* const start_from = Parameter(2);
1686 Node* const context = Parameter(3 + 2);
1687
1688 Variable len_var(this, MachineType::PointerRepresentation()),
1689 index_var(this, MachineType::PointerRepresentation());
1690
1691 Label init_k(this), return_true(this), return_false(this), call_runtime(this);
1692 Label init_len(this), select_loop(this);
1693
1694 index_var.Bind(IntPtrConstant(0));
1695 len_var.Bind(IntPtrConstant(0));
1696
1697 // Take slow path if not a JSArray, if retrieving elements requires
1698 // traversing prototype, or if access checks are required.
1699 BranchIfFastJSArray(array, context,
1700 CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
1701 &init_len, &call_runtime);
1702
1703 Bind(&init_len);
1704 {
1705 // Handle case where JSArray length is not an Smi in the runtime
1706 Node* len = LoadObjectField(array, JSArray::kLengthOffset);
1707 GotoIfNot(TaggedIsSmi(len), &call_runtime);
1708
1709 len_var.Bind(SmiToWord(len));
1710
1711 GotoIf(IsUndefined(start_from), &select_loop);
1712
1713 // Bailout to slow path if startIndex is not an Smi.
1714 Branch(TaggedIsSmi(start_from), &init_k, &call_runtime);
1715 }
1716
1717 Bind(&init_k);
1718 CSA_ASSERT(this, TaggedIsSmi(start_from));
1719 Node* const untagged_start_from = SmiToWord(start_from);
1720 index_var.Bind(Select(
1721 IntPtrGreaterThanOrEqual(untagged_start_from, IntPtrConstant(0)),
1722 [=]() { return untagged_start_from; },
1723 [=]() {
1724 Node* const index = IntPtrAdd(len_var.value(), untagged_start_from);
1725 return SelectConstant(IntPtrLessThan(index, IntPtrConstant(0)),
1726 IntPtrConstant(0), index,
1727 MachineType::PointerRepresentation());
1728 },
1729 MachineType::PointerRepresentation()));
1730
1731 Goto(&select_loop);
1732 Bind(&select_loop);
1733 static int32_t kElementsKind[] = {
1734 FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
1735 FAST_HOLEY_ELEMENTS, FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
1736 };
1737
1738 Label if_smiorobjects(this), if_packed_doubles(this), if_holey_doubles(this);
1739 Label* element_kind_handlers[] = {&if_smiorobjects, &if_smiorobjects,
1740 &if_smiorobjects, &if_smiorobjects,
1741 &if_packed_doubles, &if_holey_doubles};
1742
1743 Node* map = LoadMap(array);
1744 Node* elements_kind = LoadMapElementsKind(map);
1745 Node* elements = LoadElements(array);
1746 Switch(elements_kind, &return_false, kElementsKind, element_kind_handlers,
1747 arraysize(kElementsKind));
1748
1749 Bind(&if_smiorobjects);
1750 {
1751 Variable search_num(this, MachineRepresentation::kFloat64);
1752 Label ident_loop(this, &index_var), heap_num_loop(this, &search_num),
1753 string_loop(this, &index_var), undef_loop(this, &index_var),
1754 not_smi(this), not_heap_num(this);
1755
1756 GotoIfNot(TaggedIsSmi(search_element), ¬_smi);
1757 search_num.Bind(SmiToFloat64(search_element));
1758 Goto(&heap_num_loop);
1759
1760 Bind(¬_smi);
1761 GotoIf(WordEqual(search_element, UndefinedConstant()), &undef_loop);
1762 Node* map = LoadMap(search_element);
1763 GotoIfNot(IsHeapNumberMap(map), ¬_heap_num);
1764 search_num.Bind(LoadHeapNumberValue(search_element));
1765 Goto(&heap_num_loop);
1766
1767 Bind(¬_heap_num);
1768 Node* search_type = LoadMapInstanceType(map);
1769 GotoIf(IsStringInstanceType(search_type), &string_loop);
1770 Goto(&ident_loop);
1771
1772 Bind(&ident_loop);
1773 {
1774 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1775 &return_false);
1776 Node* element_k = LoadFixedArrayElement(elements, index_var.value());
1777 GotoIf(WordEqual(element_k, search_element), &return_true);
1778
1779 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1780 Goto(&ident_loop);
1781 }
1782
1783 Bind(&undef_loop);
1784 {
1785 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1786 &return_false);
1787 Node* element_k = LoadFixedArrayElement(elements, index_var.value());
1788 GotoIf(WordEqual(element_k, UndefinedConstant()), &return_true);
1789 GotoIf(WordEqual(element_k, TheHoleConstant()), &return_true);
1790
1791 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1792 Goto(&undef_loop);
1793 }
1794
1795 Bind(&heap_num_loop);
1796 {
1797 Label nan_loop(this, &index_var), not_nan_loop(this, &index_var);
1798 BranchIfFloat64IsNaN(search_num.value(), &nan_loop, ¬_nan_loop);
1799
1800 Bind(¬_nan_loop);
1801 {
1802 Label continue_loop(this), not_smi(this);
1803 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1804 &return_false);
1805 Node* element_k = LoadFixedArrayElement(elements, index_var.value());
1806 GotoIfNot(TaggedIsSmi(element_k), ¬_smi);
1807 Branch(Float64Equal(search_num.value(), SmiToFloat64(element_k)),
1808 &return_true, &continue_loop);
1809
1810 Bind(¬_smi);
1811 GotoIfNot(IsHeapNumber(element_k), &continue_loop);
1812 Branch(Float64Equal(search_num.value(), LoadHeapNumberValue(element_k)),
1813 &return_true, &continue_loop);
1814
1815 Bind(&continue_loop);
1816 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1817 Goto(¬_nan_loop);
1818 }
1819
1820 Bind(&nan_loop);
1821 {
1822 Label continue_loop(this);
1823 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1824 &return_false);
1825 Node* element_k = LoadFixedArrayElement(elements, index_var.value());
1826 GotoIf(TaggedIsSmi(element_k), &continue_loop);
1827 GotoIfNot(IsHeapNumber(element_k), &continue_loop);
1828 BranchIfFloat64IsNaN(LoadHeapNumberValue(element_k), &return_true,
1829 &continue_loop);
1830
1831 Bind(&continue_loop);
1832 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1833 Goto(&nan_loop);
1834 }
1835 }
1836
1837 Bind(&string_loop);
1838 {
1839 Label continue_loop(this);
1840 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1841 &return_false);
1842 Node* element_k = LoadFixedArrayElement(elements, index_var.value());
1843 GotoIf(TaggedIsSmi(element_k), &continue_loop);
1844 GotoIfNot(IsStringInstanceType(LoadInstanceType(element_k)),
1845 &continue_loop);
1846
1847 // TODO(bmeurer): Consider inlining the StringEqual logic here.
1848 Node* result = CallStub(CodeFactory::StringEqual(isolate()), context,
1849 search_element, element_k);
1850 Branch(WordEqual(BooleanConstant(true), result), &return_true,
1851 &continue_loop);
1852
1853 Bind(&continue_loop);
1854 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1855 Goto(&string_loop);
1856 }
1857 }
1858
1859 Bind(&if_packed_doubles);
1860 {
1861 Label nan_loop(this, &index_var), not_nan_loop(this, &index_var),
1862 hole_loop(this, &index_var), search_notnan(this);
1863 Variable search_num(this, MachineRepresentation::kFloat64);
1864
1865 GotoIfNot(TaggedIsSmi(search_element), &search_notnan);
1866 search_num.Bind(SmiToFloat64(search_element));
1867 Goto(¬_nan_loop);
1868
1869 Bind(&search_notnan);
1870 GotoIfNot(IsHeapNumber(search_element), &return_false);
1871
1872 search_num.Bind(LoadHeapNumberValue(search_element));
1873
1874 BranchIfFloat64IsNaN(search_num.value(), &nan_loop, ¬_nan_loop);
1875
1876 // Search for HeapNumber
1877 Bind(¬_nan_loop);
1878 {
1879 Label continue_loop(this);
1880 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1881 &return_false);
1882 Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(),
1883 MachineType::Float64());
1884 Branch(Float64Equal(element_k, search_num.value()), &return_true,
1885 &continue_loop);
1886 Bind(&continue_loop);
1887 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1888 Goto(¬_nan_loop);
1889 }
1890
1891 // Search for NaN
1892 Bind(&nan_loop);
1893 {
1894 Label continue_loop(this);
1895 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1896 &return_false);
1897 Node* element_k = LoadFixedDoubleArrayElement(elements, index_var.value(),
1898 MachineType::Float64());
1899 BranchIfFloat64IsNaN(element_k, &return_true, &continue_loop);
1900 Bind(&continue_loop);
1901 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1902 Goto(&nan_loop);
1903 }
1904 }
1905
1906 Bind(&if_holey_doubles);
1907 {
1908 Label nan_loop(this, &index_var), not_nan_loop(this, &index_var),
1909 hole_loop(this, &index_var), search_notnan(this);
1910 Variable search_num(this, MachineRepresentation::kFloat64);
1911
1912 GotoIfNot(TaggedIsSmi(search_element), &search_notnan);
1913 search_num.Bind(SmiToFloat64(search_element));
1914 Goto(¬_nan_loop);
1915
1916 Bind(&search_notnan);
1917 GotoIf(WordEqual(search_element, UndefinedConstant()), &hole_loop);
1918 GotoIfNot(IsHeapNumber(search_element), &return_false);
1919
1920 search_num.Bind(LoadHeapNumberValue(search_element));
1921
1922 BranchIfFloat64IsNaN(search_num.value(), &nan_loop, ¬_nan_loop);
1923
1924 // Search for HeapNumber
1925 Bind(¬_nan_loop);
1926 {
1927 Label continue_loop(this);
1928 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1929 &return_false);
1930
1931 // Load double value or continue if it contains a double hole.
1932 Node* element_k = LoadFixedDoubleArrayElement(
1933 elements, index_var.value(), MachineType::Float64(), 0,
1934 CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
1935
1936 Branch(Float64Equal(element_k, search_num.value()), &return_true,
1937 &continue_loop);
1938 Bind(&continue_loop);
1939 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1940 Goto(¬_nan_loop);
1941 }
1942
1943 // Search for NaN
1944 Bind(&nan_loop);
1945 {
1946 Label continue_loop(this);
1947 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1948 &return_false);
1949
1950 // Load double value or continue if it contains a double hole.
1951 Node* element_k = LoadFixedDoubleArrayElement(
1952 elements, index_var.value(), MachineType::Float64(), 0,
1953 CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
1954
1955 BranchIfFloat64IsNaN(element_k, &return_true, &continue_loop);
1956 Bind(&continue_loop);
1957 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1958 Goto(&nan_loop);
1959 }
1960
1961 // Search for the Hole
1962 Bind(&hole_loop);
1963 {
1964 GotoIfNot(UintPtrLessThan(index_var.value(), len_var.value()),
1965 &return_false);
1966
1967 // Check if the element is a double hole, but don't load it.
1968 LoadFixedDoubleArrayElement(
1969 elements, index_var.value(), MachineType::None(), 0,
1970 CodeStubAssembler::INTPTR_PARAMETERS, &return_true);
1971
1972 index_var.Bind(IntPtrAdd(index_var.value(), IntPtrConstant(1)));
1973 Goto(&hole_loop);
1974 }
1975 }
1976
1977 Bind(&return_true);
1978 Return(TrueConstant());
1979
1980 Bind(&return_false);
1981 Return(FalseConstant());
1982
1983 Bind(&call_runtime);
1984 Return(CallRuntime(Runtime::kArrayIncludes_Slow, context, array,
1985 search_element, start_from));
1986 }
1987
Generate_ArrayIndexOf(compiler::CodeAssemblerState * state)1988 void Builtins::Generate_ArrayIndexOf(compiler::CodeAssemblerState* state) {
1989 typedef compiler::Node Node;
1990 typedef CodeStubAssembler::Label Label;
1991 typedef CodeStubAssembler::Variable Variable;
1992 CodeStubAssembler assembler(state);
1993
1994 Node* array = assembler.Parameter(0);
1995 Node* search_element = assembler.Parameter(1);
1996 Node* start_from = assembler.Parameter(2);
1997 Node* context = assembler.Parameter(3 + 2);
1998
1999 Node* intptr_zero = assembler.IntPtrConstant(0);
2000 Node* intptr_one = assembler.IntPtrConstant(1);
2001
2002 Node* undefined = assembler.UndefinedConstant();
2003
2004 Variable len_var(&assembler, MachineType::PointerRepresentation()),
2005 index_var(&assembler, MachineType::PointerRepresentation()),
2006 start_from_var(&assembler, MachineType::PointerRepresentation());
2007
2008 Label init_k(&assembler), return_found(&assembler),
2009 return_not_found(&assembler), call_runtime(&assembler);
2010
2011 Label init_len(&assembler);
2012
2013 index_var.Bind(intptr_zero);
2014 len_var.Bind(intptr_zero);
2015
2016 // Take slow path if not a JSArray, if retrieving elements requires
2017 // traversing prototype, or if access checks are required.
2018 assembler.BranchIfFastJSArray(
2019 array, context, CodeStubAssembler::FastJSArrayAccessMode::INBOUNDS_READ,
2020 &init_len, &call_runtime);
2021
2022 assembler.Bind(&init_len);
2023 {
2024 // Handle case where JSArray length is not an Smi in the runtime
2025 Node* len = assembler.LoadObjectField(array, JSArray::kLengthOffset);
2026 assembler.GotoIfNot(assembler.TaggedIsSmi(len), &call_runtime);
2027
2028 len_var.Bind(assembler.SmiToWord(len));
2029 assembler.Branch(assembler.WordEqual(len_var.value(), intptr_zero),
2030 &return_not_found, &init_k);
2031 }
2032
2033 assembler.Bind(&init_k);
2034 {
2035 // For now only deal with undefined and Smis here; we must be really careful
2036 // with side-effects from the ToInteger conversion as the side-effects might
2037 // render our assumptions about the receiver being a fast JSArray and the
2038 // length invalid.
2039 Label done(&assembler), init_k_smi(&assembler), init_k_other(&assembler),
2040 init_k_zero(&assembler), init_k_n(&assembler);
2041
2042 assembler.Branch(assembler.TaggedIsSmi(start_from), &init_k_smi,
2043 &init_k_other);
2044
2045 assembler.Bind(&init_k_smi);
2046 {
2047 start_from_var.Bind(assembler.SmiUntag(start_from));
2048 assembler.Goto(&init_k_n);
2049 }
2050
2051 assembler.Bind(&init_k_other);
2052 {
2053 // The fromIndex must be undefined here, otherwise bailout and let the
2054 // runtime deal with the full ToInteger conversion.
2055 assembler.GotoIfNot(assembler.IsUndefined(start_from), &call_runtime);
2056 start_from_var.Bind(intptr_zero);
2057 assembler.Goto(&init_k_n);
2058 }
2059
2060 assembler.Bind(&init_k_n);
2061 {
2062 Label if_positive(&assembler), if_negative(&assembler), done(&assembler);
2063 assembler.Branch(
2064 assembler.IntPtrLessThan(start_from_var.value(), intptr_zero),
2065 &if_negative, &if_positive);
2066
2067 assembler.Bind(&if_positive);
2068 {
2069 index_var.Bind(start_from_var.value());
2070 assembler.Goto(&done);
2071 }
2072
2073 assembler.Bind(&if_negative);
2074 {
2075 index_var.Bind(
2076 assembler.IntPtrAdd(len_var.value(), start_from_var.value()));
2077 assembler.Branch(
2078 assembler.IntPtrLessThan(index_var.value(), intptr_zero),
2079 &init_k_zero, &done);
2080 }
2081
2082 assembler.Bind(&init_k_zero);
2083 {
2084 index_var.Bind(intptr_zero);
2085 assembler.Goto(&done);
2086 }
2087
2088 assembler.Bind(&done);
2089 }
2090 }
2091
2092 static int32_t kElementsKind[] = {
2093 FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
2094 FAST_HOLEY_ELEMENTS, FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
2095 };
2096
2097 Label if_smiorobjects(&assembler), if_packed_doubles(&assembler),
2098 if_holey_doubles(&assembler);
2099 Label* element_kind_handlers[] = {&if_smiorobjects, &if_smiorobjects,
2100 &if_smiorobjects, &if_smiorobjects,
2101 &if_packed_doubles, &if_holey_doubles};
2102
2103 Node* map = assembler.LoadMap(array);
2104 Node* elements_kind = assembler.LoadMapElementsKind(map);
2105 Node* elements = assembler.LoadElements(array);
2106 assembler.Switch(elements_kind, &return_not_found, kElementsKind,
2107 element_kind_handlers, arraysize(kElementsKind));
2108
2109 assembler.Bind(&if_smiorobjects);
2110 {
2111 Variable search_num(&assembler, MachineRepresentation::kFloat64);
2112 Label ident_loop(&assembler, &index_var),
2113 heap_num_loop(&assembler, &search_num),
2114 string_loop(&assembler, &index_var), undef_loop(&assembler, &index_var),
2115 not_smi(&assembler), not_heap_num(&assembler);
2116
2117 assembler.GotoIfNot(assembler.TaggedIsSmi(search_element), ¬_smi);
2118 search_num.Bind(assembler.SmiToFloat64(search_element));
2119 assembler.Goto(&heap_num_loop);
2120
2121 assembler.Bind(¬_smi);
2122 assembler.GotoIf(assembler.WordEqual(search_element, undefined),
2123 &undef_loop);
2124 Node* map = assembler.LoadMap(search_element);
2125 assembler.GotoIfNot(assembler.IsHeapNumberMap(map), ¬_heap_num);
2126 search_num.Bind(assembler.LoadHeapNumberValue(search_element));
2127 assembler.Goto(&heap_num_loop);
2128
2129 assembler.Bind(¬_heap_num);
2130 Node* search_type = assembler.LoadMapInstanceType(map);
2131 assembler.GotoIf(assembler.IsStringInstanceType(search_type), &string_loop);
2132 assembler.Goto(&ident_loop);
2133
2134 assembler.Bind(&ident_loop);
2135 {
2136 assembler.GotoIfNot(
2137 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2138 &return_not_found);
2139 Node* element_k =
2140 assembler.LoadFixedArrayElement(elements, index_var.value());
2141 assembler.GotoIf(assembler.WordEqual(element_k, search_element),
2142 &return_found);
2143
2144 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2145 assembler.Goto(&ident_loop);
2146 }
2147
2148 assembler.Bind(&undef_loop);
2149 {
2150 assembler.GotoIfNot(
2151 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2152 &return_not_found);
2153 Node* element_k =
2154 assembler.LoadFixedArrayElement(elements, index_var.value());
2155 assembler.GotoIf(assembler.WordEqual(element_k, undefined),
2156 &return_found);
2157
2158 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2159 assembler.Goto(&undef_loop);
2160 }
2161
2162 assembler.Bind(&heap_num_loop);
2163 {
2164 Label not_nan_loop(&assembler, &index_var);
2165 assembler.BranchIfFloat64IsNaN(search_num.value(), &return_not_found,
2166 ¬_nan_loop);
2167
2168 assembler.Bind(¬_nan_loop);
2169 {
2170 Label continue_loop(&assembler), not_smi(&assembler);
2171 assembler.GotoIfNot(
2172 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2173 &return_not_found);
2174 Node* element_k =
2175 assembler.LoadFixedArrayElement(elements, index_var.value());
2176 assembler.GotoIfNot(assembler.TaggedIsSmi(element_k), ¬_smi);
2177 assembler.Branch(
2178 assembler.Float64Equal(search_num.value(),
2179 assembler.SmiToFloat64(element_k)),
2180 &return_found, &continue_loop);
2181
2182 assembler.Bind(¬_smi);
2183 assembler.GotoIfNot(
2184 assembler.IsHeapNumberMap(assembler.LoadMap(element_k)),
2185 &continue_loop);
2186 assembler.Branch(
2187 assembler.Float64Equal(search_num.value(),
2188 assembler.LoadHeapNumberValue(element_k)),
2189 &return_found, &continue_loop);
2190
2191 assembler.Bind(&continue_loop);
2192 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2193 assembler.Goto(¬_nan_loop);
2194 }
2195 }
2196
2197 assembler.Bind(&string_loop);
2198 {
2199 Label continue_loop(&assembler);
2200 assembler.GotoIfNot(
2201 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2202 &return_not_found);
2203 Node* element_k =
2204 assembler.LoadFixedArrayElement(elements, index_var.value());
2205 assembler.GotoIf(assembler.TaggedIsSmi(element_k), &continue_loop);
2206 assembler.GotoIfNot(
2207 assembler.IsStringInstanceType(assembler.LoadInstanceType(element_k)),
2208 &continue_loop);
2209
2210 // TODO(bmeurer): Consider inlining the StringEqual logic here.
2211 Callable callable = CodeFactory::StringEqual(assembler.isolate());
2212 Node* result =
2213 assembler.CallStub(callable, context, search_element, element_k);
2214 assembler.Branch(
2215 assembler.WordEqual(assembler.BooleanConstant(true), result),
2216 &return_found, &continue_loop);
2217
2218 assembler.Bind(&continue_loop);
2219 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2220 assembler.Goto(&string_loop);
2221 }
2222 }
2223
2224 assembler.Bind(&if_packed_doubles);
2225 {
2226 Label not_nan_loop(&assembler, &index_var), search_notnan(&assembler);
2227 Variable search_num(&assembler, MachineRepresentation::kFloat64);
2228
2229 assembler.GotoIfNot(assembler.TaggedIsSmi(search_element), &search_notnan);
2230 search_num.Bind(assembler.SmiToFloat64(search_element));
2231 assembler.Goto(¬_nan_loop);
2232
2233 assembler.Bind(&search_notnan);
2234 assembler.GotoIfNot(
2235 assembler.IsHeapNumberMap(assembler.LoadMap(search_element)),
2236 &return_not_found);
2237
2238 search_num.Bind(assembler.LoadHeapNumberValue(search_element));
2239
2240 assembler.BranchIfFloat64IsNaN(search_num.value(), &return_not_found,
2241 ¬_nan_loop);
2242
2243 // Search for HeapNumber
2244 assembler.Bind(¬_nan_loop);
2245 {
2246 Label continue_loop(&assembler);
2247 assembler.GotoIfNot(
2248 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2249 &return_not_found);
2250 Node* element_k = assembler.LoadFixedDoubleArrayElement(
2251 elements, index_var.value(), MachineType::Float64());
2252 assembler.Branch(assembler.Float64Equal(element_k, search_num.value()),
2253 &return_found, &continue_loop);
2254 assembler.Bind(&continue_loop);
2255 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2256 assembler.Goto(¬_nan_loop);
2257 }
2258 }
2259
2260 assembler.Bind(&if_holey_doubles);
2261 {
2262 Label not_nan_loop(&assembler, &index_var), search_notnan(&assembler);
2263 Variable search_num(&assembler, MachineRepresentation::kFloat64);
2264
2265 assembler.GotoIfNot(assembler.TaggedIsSmi(search_element), &search_notnan);
2266 search_num.Bind(assembler.SmiToFloat64(search_element));
2267 assembler.Goto(¬_nan_loop);
2268
2269 assembler.Bind(&search_notnan);
2270 assembler.GotoIfNot(
2271 assembler.IsHeapNumberMap(assembler.LoadMap(search_element)),
2272 &return_not_found);
2273
2274 search_num.Bind(assembler.LoadHeapNumberValue(search_element));
2275
2276 assembler.BranchIfFloat64IsNaN(search_num.value(), &return_not_found,
2277 ¬_nan_loop);
2278
2279 // Search for HeapNumber
2280 assembler.Bind(¬_nan_loop);
2281 {
2282 Label continue_loop(&assembler);
2283 assembler.GotoIfNot(
2284 assembler.UintPtrLessThan(index_var.value(), len_var.value()),
2285 &return_not_found);
2286
2287 // Load double value or continue if it contains a double hole.
2288 Node* element_k = assembler.LoadFixedDoubleArrayElement(
2289 elements, index_var.value(), MachineType::Float64(), 0,
2290 CodeStubAssembler::INTPTR_PARAMETERS, &continue_loop);
2291
2292 assembler.Branch(assembler.Float64Equal(element_k, search_num.value()),
2293 &return_found, &continue_loop);
2294 assembler.Bind(&continue_loop);
2295 index_var.Bind(assembler.IntPtrAdd(index_var.value(), intptr_one));
2296 assembler.Goto(¬_nan_loop);
2297 }
2298 }
2299
2300 assembler.Bind(&return_found);
2301 assembler.Return(assembler.SmiTag(index_var.value()));
2302
2303 assembler.Bind(&return_not_found);
2304 assembler.Return(assembler.NumberConstant(-1));
2305
2306 assembler.Bind(&call_runtime);
2307 assembler.Return(assembler.CallRuntime(Runtime::kArrayIndexOf, context, array,
2308 search_element, start_from));
2309 }
2310
2311 namespace {
2312
2313 template <IterationKind kIterationKind>
Generate_ArrayPrototypeIterationMethod(compiler::CodeAssemblerState * state)2314 void Generate_ArrayPrototypeIterationMethod(
2315 compiler::CodeAssemblerState* state) {
2316 typedef compiler::Node Node;
2317 typedef CodeStubAssembler::Label Label;
2318 typedef CodeStubAssembler::Variable Variable;
2319 CodeStubAssembler assembler(state);
2320
2321 Node* receiver = assembler.Parameter(0);
2322 Node* context = assembler.Parameter(3);
2323
2324 Variable var_array(&assembler, MachineRepresentation::kTagged);
2325 Variable var_map(&assembler, MachineRepresentation::kTagged);
2326 Variable var_type(&assembler, MachineRepresentation::kWord32);
2327
2328 Label if_isnotobject(&assembler, Label::kDeferred);
2329 Label create_array_iterator(&assembler);
2330
2331 assembler.GotoIf(assembler.TaggedIsSmi(receiver), &if_isnotobject);
2332 var_array.Bind(receiver);
2333 var_map.Bind(assembler.LoadMap(receiver));
2334 var_type.Bind(assembler.LoadMapInstanceType(var_map.value()));
2335 assembler.Branch(assembler.IsJSReceiverInstanceType(var_type.value()),
2336 &create_array_iterator, &if_isnotobject);
2337
2338 assembler.Bind(&if_isnotobject);
2339 {
2340 Callable callable = CodeFactory::ToObject(assembler.isolate());
2341 Node* result = assembler.CallStub(callable, context, receiver);
2342 var_array.Bind(result);
2343 var_map.Bind(assembler.LoadMap(result));
2344 var_type.Bind(assembler.LoadMapInstanceType(var_map.value()));
2345 assembler.Goto(&create_array_iterator);
2346 }
2347
2348 assembler.Bind(&create_array_iterator);
2349 assembler.Return(
2350 assembler.CreateArrayIterator(var_array.value(), var_map.value(),
2351 var_type.value(), context, kIterationKind));
2352 }
2353
2354 } // namespace
2355
Generate_ArrayPrototypeValues(compiler::CodeAssemblerState * state)2356 void Builtins::Generate_ArrayPrototypeValues(
2357 compiler::CodeAssemblerState* state) {
2358 Generate_ArrayPrototypeIterationMethod<IterationKind::kValues>(state);
2359 }
2360
Generate_ArrayPrototypeEntries(compiler::CodeAssemblerState * state)2361 void Builtins::Generate_ArrayPrototypeEntries(
2362 compiler::CodeAssemblerState* state) {
2363 Generate_ArrayPrototypeIterationMethod<IterationKind::kEntries>(state);
2364 }
2365
Generate_ArrayPrototypeKeys(compiler::CodeAssemblerState * state)2366 void Builtins::Generate_ArrayPrototypeKeys(
2367 compiler::CodeAssemblerState* state) {
2368 Generate_ArrayPrototypeIterationMethod<IterationKind::kKeys>(state);
2369 }
2370
Generate_ArrayIteratorPrototypeNext(compiler::CodeAssemblerState * state)2371 void Builtins::Generate_ArrayIteratorPrototypeNext(
2372 compiler::CodeAssemblerState* state) {
2373 typedef compiler::Node Node;
2374 typedef CodeStubAssembler::Label Label;
2375 typedef CodeStubAssembler::Variable Variable;
2376 CodeStubAssembler assembler(state);
2377
2378 Handle<String> operation = assembler.factory()->NewStringFromAsciiChecked(
2379 "Array Iterator.prototype.next", TENURED);
2380
2381 Node* iterator = assembler.Parameter(0);
2382 Node* context = assembler.Parameter(3);
2383
2384 Variable var_value(&assembler, MachineRepresentation::kTagged);
2385 Variable var_done(&assembler, MachineRepresentation::kTagged);
2386
2387 // Required, or else `throw_bad_receiver` fails a DCHECK due to these
2388 // variables not being bound along all paths, despite not being used.
2389 var_done.Bind(assembler.TrueConstant());
2390 var_value.Bind(assembler.UndefinedConstant());
2391
2392 Label throw_bad_receiver(&assembler, Label::kDeferred);
2393 Label set_done(&assembler);
2394 Label allocate_key_result(&assembler);
2395 Label allocate_entry_if_needed(&assembler);
2396 Label allocate_iterator_result(&assembler);
2397 Label generic_values(&assembler);
2398
2399 // If O does not have all of the internal slots of an Array Iterator Instance
2400 // (22.1.5.3), throw a TypeError exception
2401 assembler.GotoIf(assembler.TaggedIsSmi(iterator), &throw_bad_receiver);
2402 Node* instance_type = assembler.LoadInstanceType(iterator);
2403 assembler.GotoIf(
2404 assembler.Uint32LessThan(
2405 assembler.Int32Constant(LAST_ARRAY_ITERATOR_TYPE -
2406 FIRST_ARRAY_ITERATOR_TYPE),
2407 assembler.Int32Sub(instance_type, assembler.Int32Constant(
2408 FIRST_ARRAY_ITERATOR_TYPE))),
2409 &throw_bad_receiver);
2410
2411 // Let a be O.[[IteratedObject]].
2412 Node* array = assembler.LoadObjectField(
2413 iterator, JSArrayIterator::kIteratedObjectOffset);
2414
2415 // Let index be O.[[ArrayIteratorNextIndex]].
2416 Node* index =
2417 assembler.LoadObjectField(iterator, JSArrayIterator::kNextIndexOffset);
2418 Node* orig_map = assembler.LoadObjectField(
2419 iterator, JSArrayIterator::kIteratedObjectMapOffset);
2420 Node* array_map = assembler.LoadMap(array);
2421
2422 Label if_isfastarray(&assembler), if_isnotfastarray(&assembler),
2423 if_isdetached(&assembler, Label::kDeferred);
2424
2425 assembler.Branch(assembler.WordEqual(orig_map, array_map), &if_isfastarray,
2426 &if_isnotfastarray);
2427
2428 assembler.Bind(&if_isfastarray);
2429 {
2430 CSA_ASSERT(&assembler,
2431 assembler.Word32Equal(assembler.LoadMapInstanceType(array_map),
2432 assembler.Int32Constant(JS_ARRAY_TYPE)));
2433
2434 Node* length = assembler.LoadObjectField(array, JSArray::kLengthOffset);
2435
2436 CSA_ASSERT(&assembler, assembler.TaggedIsSmi(length));
2437 CSA_ASSERT(&assembler, assembler.TaggedIsSmi(index));
2438
2439 assembler.GotoIfNot(assembler.SmiBelow(index, length), &set_done);
2440
2441 Node* one = assembler.SmiConstant(Smi::FromInt(1));
2442 assembler.StoreObjectFieldNoWriteBarrier(iterator,
2443 JSArrayIterator::kNextIndexOffset,
2444 assembler.SmiAdd(index, one));
2445
2446 var_done.Bind(assembler.FalseConstant());
2447 Node* elements = assembler.LoadElements(array);
2448
2449 static int32_t kInstanceType[] = {
2450 JS_FAST_ARRAY_KEY_ITERATOR_TYPE,
2451 JS_FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2452 JS_FAST_HOLEY_SMI_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2453 JS_FAST_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2454 JS_FAST_HOLEY_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2455 JS_FAST_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2456 JS_FAST_HOLEY_DOUBLE_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2457 JS_FAST_SMI_ARRAY_VALUE_ITERATOR_TYPE,
2458 JS_FAST_HOLEY_SMI_ARRAY_VALUE_ITERATOR_TYPE,
2459 JS_FAST_ARRAY_VALUE_ITERATOR_TYPE,
2460 JS_FAST_HOLEY_ARRAY_VALUE_ITERATOR_TYPE,
2461 JS_FAST_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE,
2462 JS_FAST_HOLEY_DOUBLE_ARRAY_VALUE_ITERATOR_TYPE,
2463 };
2464
2465 Label packed_object_values(&assembler), holey_object_values(&assembler),
2466 packed_double_values(&assembler), holey_double_values(&assembler);
2467 Label* kInstanceTypeHandlers[] = {
2468 &allocate_key_result, &packed_object_values, &holey_object_values,
2469 &packed_object_values, &holey_object_values, &packed_double_values,
2470 &holey_double_values, &packed_object_values, &holey_object_values,
2471 &packed_object_values, &holey_object_values, &packed_double_values,
2472 &holey_double_values};
2473
2474 assembler.Switch(instance_type, &throw_bad_receiver, kInstanceType,
2475 kInstanceTypeHandlers, arraysize(kInstanceType));
2476
2477 assembler.Bind(&packed_object_values);
2478 {
2479 var_value.Bind(assembler.LoadFixedArrayElement(
2480 elements, index, 0, CodeStubAssembler::SMI_PARAMETERS));
2481 assembler.Goto(&allocate_entry_if_needed);
2482 }
2483
2484 assembler.Bind(&packed_double_values);
2485 {
2486 Node* value = assembler.LoadFixedDoubleArrayElement(
2487 elements, index, MachineType::Float64(), 0,
2488 CodeStubAssembler::SMI_PARAMETERS);
2489 var_value.Bind(assembler.AllocateHeapNumberWithValue(value));
2490 assembler.Goto(&allocate_entry_if_needed);
2491 }
2492
2493 assembler.Bind(&holey_object_values);
2494 {
2495 // Check the array_protector cell, and take the slow path if it's invalid.
2496 Node* invalid =
2497 assembler.SmiConstant(Smi::FromInt(Isolate::kProtectorInvalid));
2498 Node* cell = assembler.LoadRoot(Heap::kArrayProtectorRootIndex);
2499 Node* cell_value =
2500 assembler.LoadObjectField(cell, PropertyCell::kValueOffset);
2501 assembler.GotoIf(assembler.WordEqual(cell_value, invalid),
2502 &generic_values);
2503
2504 var_value.Bind(assembler.UndefinedConstant());
2505 Node* value = assembler.LoadFixedArrayElement(
2506 elements, index, 0, CodeStubAssembler::SMI_PARAMETERS);
2507 assembler.GotoIf(assembler.WordEqual(value, assembler.TheHoleConstant()),
2508 &allocate_entry_if_needed);
2509 var_value.Bind(value);
2510 assembler.Goto(&allocate_entry_if_needed);
2511 }
2512
2513 assembler.Bind(&holey_double_values);
2514 {
2515 // Check the array_protector cell, and take the slow path if it's invalid.
2516 Node* invalid =
2517 assembler.SmiConstant(Smi::FromInt(Isolate::kProtectorInvalid));
2518 Node* cell = assembler.LoadRoot(Heap::kArrayProtectorRootIndex);
2519 Node* cell_value =
2520 assembler.LoadObjectField(cell, PropertyCell::kValueOffset);
2521 assembler.GotoIf(assembler.WordEqual(cell_value, invalid),
2522 &generic_values);
2523
2524 var_value.Bind(assembler.UndefinedConstant());
2525 Node* value = assembler.LoadFixedDoubleArrayElement(
2526 elements, index, MachineType::Float64(), 0,
2527 CodeStubAssembler::SMI_PARAMETERS, &allocate_entry_if_needed);
2528 var_value.Bind(assembler.AllocateHeapNumberWithValue(value));
2529 assembler.Goto(&allocate_entry_if_needed);
2530 }
2531 }
2532
2533 assembler.Bind(&if_isnotfastarray);
2534 {
2535 Label if_istypedarray(&assembler), if_isgeneric(&assembler);
2536
2537 // If a is undefined, return CreateIterResultObject(undefined, true)
2538 assembler.GotoIf(assembler.WordEqual(array, assembler.UndefinedConstant()),
2539 &allocate_iterator_result);
2540
2541 Node* array_type = assembler.LoadInstanceType(array);
2542 assembler.Branch(
2543 assembler.Word32Equal(array_type,
2544 assembler.Int32Constant(JS_TYPED_ARRAY_TYPE)),
2545 &if_istypedarray, &if_isgeneric);
2546
2547 assembler.Bind(&if_isgeneric);
2548 {
2549 Label if_wasfastarray(&assembler);
2550
2551 Node* length = nullptr;
2552 {
2553 Variable var_length(&assembler, MachineRepresentation::kTagged);
2554 Label if_isarray(&assembler), if_isnotarray(&assembler),
2555 done(&assembler);
2556 assembler.Branch(
2557 assembler.Word32Equal(array_type,
2558 assembler.Int32Constant(JS_ARRAY_TYPE)),
2559 &if_isarray, &if_isnotarray);
2560
2561 assembler.Bind(&if_isarray);
2562 {
2563 var_length.Bind(
2564 assembler.LoadObjectField(array, JSArray::kLengthOffset));
2565
2566 // Invalidate protector cell if needed
2567 assembler.Branch(
2568 assembler.WordNotEqual(orig_map, assembler.UndefinedConstant()),
2569 &if_wasfastarray, &done);
2570
2571 assembler.Bind(&if_wasfastarray);
2572 {
2573 Label if_invalid(&assembler, Label::kDeferred);
2574 // A fast array iterator transitioned to a slow iterator during
2575 // iteration. Invalidate fast_array_iteration_prtoector cell to
2576 // prevent potential deopt loops.
2577 assembler.StoreObjectFieldNoWriteBarrier(
2578 iterator, JSArrayIterator::kIteratedObjectMapOffset,
2579 assembler.UndefinedConstant());
2580 assembler.GotoIf(
2581 assembler.Uint32LessThanOrEqual(
2582 instance_type, assembler.Int32Constant(
2583 JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE)),
2584 &done);
2585
2586 Node* invalid =
2587 assembler.SmiConstant(Smi::FromInt(Isolate::kProtectorInvalid));
2588 Node* cell =
2589 assembler.LoadRoot(Heap::kFastArrayIterationProtectorRootIndex);
2590 assembler.StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset,
2591 invalid);
2592 assembler.Goto(&done);
2593 }
2594 }
2595
2596 assembler.Bind(&if_isnotarray);
2597 {
2598 Node* length_string = assembler.HeapConstant(
2599 assembler.isolate()->factory()->length_string());
2600 Callable get_property = CodeFactory::GetProperty(assembler.isolate());
2601 Node* length =
2602 assembler.CallStub(get_property, context, array, length_string);
2603 Callable to_length = CodeFactory::ToLength(assembler.isolate());
2604 var_length.Bind(assembler.CallStub(to_length, context, length));
2605 assembler.Goto(&done);
2606 }
2607
2608 assembler.Bind(&done);
2609 length = var_length.value();
2610 }
2611
2612 assembler.GotoUnlessNumberLessThan(index, length, &set_done);
2613
2614 assembler.StoreObjectField(iterator, JSArrayIterator::kNextIndexOffset,
2615 assembler.NumberInc(index));
2616 var_done.Bind(assembler.FalseConstant());
2617
2618 assembler.Branch(
2619 assembler.Uint32LessThanOrEqual(
2620 instance_type,
2621 assembler.Int32Constant(JS_GENERIC_ARRAY_KEY_ITERATOR_TYPE)),
2622 &allocate_key_result, &generic_values);
2623
2624 assembler.Bind(&generic_values);
2625 {
2626 Callable get_property = CodeFactory::GetProperty(assembler.isolate());
2627 var_value.Bind(assembler.CallStub(get_property, context, array, index));
2628 assembler.Goto(&allocate_entry_if_needed);
2629 }
2630 }
2631
2632 assembler.Bind(&if_istypedarray);
2633 {
2634 Node* buffer =
2635 assembler.LoadObjectField(array, JSTypedArray::kBufferOffset);
2636 assembler.GotoIf(assembler.IsDetachedBuffer(buffer), &if_isdetached);
2637
2638 Node* length =
2639 assembler.LoadObjectField(array, JSTypedArray::kLengthOffset);
2640
2641 CSA_ASSERT(&assembler, assembler.TaggedIsSmi(length));
2642 CSA_ASSERT(&assembler, assembler.TaggedIsSmi(index));
2643
2644 assembler.GotoIfNot(assembler.SmiBelow(index, length), &set_done);
2645
2646 Node* one = assembler.SmiConstant(1);
2647 assembler.StoreObjectFieldNoWriteBarrier(
2648 iterator, JSArrayIterator::kNextIndexOffset,
2649 assembler.SmiAdd(index, one));
2650 var_done.Bind(assembler.FalseConstant());
2651
2652 Node* elements = assembler.LoadElements(array);
2653 Node* base_ptr = assembler.LoadObjectField(
2654 elements, FixedTypedArrayBase::kBasePointerOffset);
2655 Node* external_ptr = assembler.LoadObjectField(
2656 elements, FixedTypedArrayBase::kExternalPointerOffset,
2657 MachineType::Pointer());
2658 Node* data_ptr = assembler.IntPtrAdd(
2659 assembler.BitcastTaggedToWord(base_ptr), external_ptr);
2660
2661 static int32_t kInstanceType[] = {
2662 JS_TYPED_ARRAY_KEY_ITERATOR_TYPE,
2663 JS_UINT8_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2664 JS_UINT8_CLAMPED_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2665 JS_INT8_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2666 JS_UINT16_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2667 JS_INT16_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2668 JS_UINT32_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2669 JS_INT32_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2670 JS_FLOAT32_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2671 JS_FLOAT64_ARRAY_KEY_VALUE_ITERATOR_TYPE,
2672 JS_UINT8_ARRAY_VALUE_ITERATOR_TYPE,
2673 JS_UINT8_CLAMPED_ARRAY_VALUE_ITERATOR_TYPE,
2674 JS_INT8_ARRAY_VALUE_ITERATOR_TYPE,
2675 JS_UINT16_ARRAY_VALUE_ITERATOR_TYPE,
2676 JS_INT16_ARRAY_VALUE_ITERATOR_TYPE,
2677 JS_UINT32_ARRAY_VALUE_ITERATOR_TYPE,
2678 JS_INT32_ARRAY_VALUE_ITERATOR_TYPE,
2679 JS_FLOAT32_ARRAY_VALUE_ITERATOR_TYPE,
2680 JS_FLOAT64_ARRAY_VALUE_ITERATOR_TYPE,
2681 };
2682
2683 Label uint8_values(&assembler), int8_values(&assembler),
2684 uint16_values(&assembler), int16_values(&assembler),
2685 uint32_values(&assembler), int32_values(&assembler),
2686 float32_values(&assembler), float64_values(&assembler);
2687 Label* kInstanceTypeHandlers[] = {
2688 &allocate_key_result, &uint8_values, &uint8_values,
2689 &int8_values, &uint16_values, &int16_values,
2690 &uint32_values, &int32_values, &float32_values,
2691 &float64_values, &uint8_values, &uint8_values,
2692 &int8_values, &uint16_values, &int16_values,
2693 &uint32_values, &int32_values, &float32_values,
2694 &float64_values,
2695 };
2696
2697 var_done.Bind(assembler.FalseConstant());
2698 assembler.Switch(instance_type, &throw_bad_receiver, kInstanceType,
2699 kInstanceTypeHandlers, arraysize(kInstanceType));
2700
2701 assembler.Bind(&uint8_values);
2702 {
2703 Node* value_uint8 = assembler.LoadFixedTypedArrayElement(
2704 data_ptr, index, UINT8_ELEMENTS, CodeStubAssembler::SMI_PARAMETERS);
2705 var_value.Bind(assembler.SmiFromWord32(value_uint8));
2706 assembler.Goto(&allocate_entry_if_needed);
2707 }
2708
2709 assembler.Bind(&int8_values);
2710 {
2711 Node* value_int8 = assembler.LoadFixedTypedArrayElement(
2712 data_ptr, index, INT8_ELEMENTS, CodeStubAssembler::SMI_PARAMETERS);
2713 var_value.Bind(assembler.SmiFromWord32(value_int8));
2714 assembler.Goto(&allocate_entry_if_needed);
2715 }
2716
2717 assembler.Bind(&uint16_values);
2718 {
2719 Node* value_uint16 = assembler.LoadFixedTypedArrayElement(
2720 data_ptr, index, UINT16_ELEMENTS,
2721 CodeStubAssembler::SMI_PARAMETERS);
2722 var_value.Bind(assembler.SmiFromWord32(value_uint16));
2723 assembler.Goto(&allocate_entry_if_needed);
2724 }
2725
2726 assembler.Bind(&int16_values);
2727 {
2728 Node* value_int16 = assembler.LoadFixedTypedArrayElement(
2729 data_ptr, index, INT16_ELEMENTS, CodeStubAssembler::SMI_PARAMETERS);
2730 var_value.Bind(assembler.SmiFromWord32(value_int16));
2731 assembler.Goto(&allocate_entry_if_needed);
2732 }
2733
2734 assembler.Bind(&uint32_values);
2735 {
2736 Node* value_uint32 = assembler.LoadFixedTypedArrayElement(
2737 data_ptr, index, UINT32_ELEMENTS,
2738 CodeStubAssembler::SMI_PARAMETERS);
2739 var_value.Bind(assembler.ChangeUint32ToTagged(value_uint32));
2740 assembler.Goto(&allocate_entry_if_needed);
2741 }
2742 assembler.Bind(&int32_values);
2743 {
2744 Node* value_int32 = assembler.LoadFixedTypedArrayElement(
2745 data_ptr, index, INT32_ELEMENTS, CodeStubAssembler::SMI_PARAMETERS);
2746 var_value.Bind(assembler.ChangeInt32ToTagged(value_int32));
2747 assembler.Goto(&allocate_entry_if_needed);
2748 }
2749 assembler.Bind(&float32_values);
2750 {
2751 Node* value_float32 = assembler.LoadFixedTypedArrayElement(
2752 data_ptr, index, FLOAT32_ELEMENTS,
2753 CodeStubAssembler::SMI_PARAMETERS);
2754 var_value.Bind(assembler.AllocateHeapNumberWithValue(
2755 assembler.ChangeFloat32ToFloat64(value_float32)));
2756 assembler.Goto(&allocate_entry_if_needed);
2757 }
2758 assembler.Bind(&float64_values);
2759 {
2760 Node* value_float64 = assembler.LoadFixedTypedArrayElement(
2761 data_ptr, index, FLOAT64_ELEMENTS,
2762 CodeStubAssembler::SMI_PARAMETERS);
2763 var_value.Bind(assembler.AllocateHeapNumberWithValue(value_float64));
2764 assembler.Goto(&allocate_entry_if_needed);
2765 }
2766 }
2767 }
2768
2769 assembler.Bind(&set_done);
2770 {
2771 assembler.StoreObjectFieldNoWriteBarrier(
2772 iterator, JSArrayIterator::kIteratedObjectOffset,
2773 assembler.UndefinedConstant());
2774 assembler.Goto(&allocate_iterator_result);
2775 }
2776
2777 assembler.Bind(&allocate_key_result);
2778 {
2779 var_value.Bind(index);
2780 var_done.Bind(assembler.FalseConstant());
2781 assembler.Goto(&allocate_iterator_result);
2782 }
2783
2784 assembler.Bind(&allocate_entry_if_needed);
2785 {
2786 assembler.GotoIf(
2787 assembler.Int32GreaterThan(
2788 instance_type,
2789 assembler.Int32Constant(LAST_ARRAY_KEY_VALUE_ITERATOR_TYPE)),
2790 &allocate_iterator_result);
2791
2792 Node* elements = assembler.AllocateFixedArray(FAST_ELEMENTS,
2793 assembler.IntPtrConstant(2));
2794 assembler.StoreFixedArrayElement(elements, 0, index, SKIP_WRITE_BARRIER);
2795 assembler.StoreFixedArrayElement(elements, 1, var_value.value(),
2796 SKIP_WRITE_BARRIER);
2797
2798 Node* entry = assembler.Allocate(JSArray::kSize);
2799 Node* map =
2800 assembler.LoadContextElement(assembler.LoadNativeContext(context),
2801 Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX);
2802
2803 assembler.StoreMapNoWriteBarrier(entry, map);
2804 assembler.StoreObjectFieldRoot(entry, JSArray::kPropertiesOffset,
2805 Heap::kEmptyFixedArrayRootIndex);
2806 assembler.StoreObjectFieldNoWriteBarrier(entry, JSArray::kElementsOffset,
2807 elements);
2808 assembler.StoreObjectFieldNoWriteBarrier(
2809 entry, JSArray::kLengthOffset, assembler.SmiConstant(Smi::FromInt(2)));
2810
2811 var_value.Bind(entry);
2812 assembler.Goto(&allocate_iterator_result);
2813 }
2814
2815 assembler.Bind(&allocate_iterator_result);
2816 {
2817 Node* result = assembler.Allocate(JSIteratorResult::kSize);
2818 Node* map =
2819 assembler.LoadContextElement(assembler.LoadNativeContext(context),
2820 Context::ITERATOR_RESULT_MAP_INDEX);
2821 assembler.StoreMapNoWriteBarrier(result, map);
2822 assembler.StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOffset,
2823 Heap::kEmptyFixedArrayRootIndex);
2824 assembler.StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
2825 Heap::kEmptyFixedArrayRootIndex);
2826 assembler.StoreObjectFieldNoWriteBarrier(
2827 result, JSIteratorResult::kValueOffset, var_value.value());
2828 assembler.StoreObjectFieldNoWriteBarrier(
2829 result, JSIteratorResult::kDoneOffset, var_done.value());
2830 assembler.Return(result);
2831 }
2832
2833 assembler.Bind(&throw_bad_receiver);
2834 {
2835 // The {receiver} is not a valid JSArrayIterator.
2836 assembler.CallRuntime(Runtime::kThrowIncompatibleMethodReceiver, context,
2837 assembler.HeapConstant(operation), iterator);
2838 assembler.Unreachable();
2839 }
2840
2841 assembler.Bind(&if_isdetached);
2842 {
2843 Node* message = assembler.SmiConstant(MessageTemplate::kDetachedOperation);
2844 assembler.CallRuntime(Runtime::kThrowTypeError, context, message,
2845 assembler.HeapConstant(operation));
2846 assembler.Unreachable();
2847 }
2848 }
2849
2850 } // namespace internal
2851 } // namespace v8
2852