1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h"
6 #include "src/frames-inl.h"
7 #include "src/frames.h"
8
9 namespace v8 {
10 namespace internal {
11
12 using compiler::Node;
13
CodeStubAssembler(compiler::CodeAssemblerState * state)14 CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
15 : compiler::CodeAssembler(state) {
16 if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
17 HandleBreakOnNode();
18 }
19 }
20
HandleBreakOnNode()21 void CodeStubAssembler::HandleBreakOnNode() {
22 // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
23 // string specifying the name of a stub and NODE is number specifying node id.
24 const char* name = state()->name();
25 size_t name_length = strlen(name);
26 if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
27 // Different name.
28 return;
29 }
30 size_t option_length = strlen(FLAG_csa_trap_on_node);
31 if (option_length < name_length + 2 ||
32 FLAG_csa_trap_on_node[name_length] != ',') {
33 // Option is too short.
34 return;
35 }
36 const char* start = &FLAG_csa_trap_on_node[name_length + 1];
37 char* end;
38 int node_id = static_cast<int>(strtol(start, &end, 10));
39 if (start == end) {
40 // Bad node id.
41 return;
42 }
43 BreakOnNode(node_id);
44 }
45
Assert(const NodeGenerator & codition_body,const char * message,const char * file,int line)46 void CodeStubAssembler::Assert(const NodeGenerator& codition_body,
47 const char* message, const char* file,
48 int line) {
49 #if defined(DEBUG)
50 if (FLAG_debug_code) {
51 Label ok(this);
52 Label not_ok(this, Label::kDeferred);
53 if (message != nullptr && FLAG_code_comments) {
54 Comment("[ Assert: %s", message);
55 } else {
56 Comment("[ Assert");
57 }
58 Node* condition = codition_body();
59 DCHECK_NOT_NULL(condition);
60 Branch(condition, &ok, ¬_ok);
61 Bind(¬_ok);
62 if (message != nullptr) {
63 char chars[1024];
64 Vector<char> buffer(chars);
65 if (file != nullptr) {
66 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file,
67 line);
68 } else {
69 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
70 }
71 CallRuntime(
72 Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
73 HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
74 }
75 DebugBreak();
76 Goto(&ok);
77 Bind(&ok);
78 Comment("] Assert");
79 }
80 #endif
81 }
82
Select(Node * condition,const NodeGenerator & true_body,const NodeGenerator & false_body,MachineRepresentation rep)83 Node* CodeStubAssembler::Select(Node* condition, const NodeGenerator& true_body,
84 const NodeGenerator& false_body,
85 MachineRepresentation rep) {
86 Variable value(this, rep);
87 Label vtrue(this), vfalse(this), end(this);
88 Branch(condition, &vtrue, &vfalse);
89
90 Bind(&vtrue);
91 {
92 value.Bind(true_body());
93 Goto(&end);
94 }
95 Bind(&vfalse);
96 {
97 value.Bind(false_body());
98 Goto(&end);
99 }
100
101 Bind(&end);
102 return value.value();
103 }
104
SelectConstant(Node * condition,Node * true_value,Node * false_value,MachineRepresentation rep)105 Node* CodeStubAssembler::SelectConstant(Node* condition, Node* true_value,
106 Node* false_value,
107 MachineRepresentation rep) {
108 return Select(condition, [=] { return true_value; },
109 [=] { return false_value; }, rep);
110 }
111
SelectInt32Constant(Node * condition,int true_value,int false_value)112 Node* CodeStubAssembler::SelectInt32Constant(Node* condition, int true_value,
113 int false_value) {
114 return SelectConstant(condition, Int32Constant(true_value),
115 Int32Constant(false_value),
116 MachineRepresentation::kWord32);
117 }
118
SelectIntPtrConstant(Node * condition,int true_value,int false_value)119 Node* CodeStubAssembler::SelectIntPtrConstant(Node* condition, int true_value,
120 int false_value) {
121 return SelectConstant(condition, IntPtrConstant(true_value),
122 IntPtrConstant(false_value),
123 MachineType::PointerRepresentation());
124 }
125
SelectBooleanConstant(Node * condition)126 Node* CodeStubAssembler::SelectBooleanConstant(Node* condition) {
127 return SelectConstant(condition, TrueConstant(), FalseConstant(),
128 MachineRepresentation::kTagged);
129 }
130
SelectTaggedConstant(Node * condition,Node * true_value,Node * false_value)131 Node* CodeStubAssembler::SelectTaggedConstant(Node* condition, Node* true_value,
132 Node* false_value) {
133 return SelectConstant(condition, true_value, false_value,
134 MachineRepresentation::kTagged);
135 }
136
SelectSmiConstant(Node * condition,Smi * true_value,Smi * false_value)137 Node* CodeStubAssembler::SelectSmiConstant(Node* condition, Smi* true_value,
138 Smi* false_value) {
139 return SelectConstant(condition, SmiConstant(true_value),
140 SmiConstant(false_value),
141 MachineRepresentation::kTaggedSigned);
142 }
143
NoContextConstant()144 Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); }
145
146 #define HEAP_CONSTANT_ACCESSOR(rootName, name) \
147 Node* CodeStubAssembler::name##Constant() { \
148 return LoadRoot(Heap::k##rootName##RootIndex); \
149 }
150 HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
151 #undef HEAP_CONSTANT_ACCESSOR
152
153 #define HEAP_CONSTANT_TEST(rootName, name) \
154 Node* CodeStubAssembler::Is##name(Node* value) { \
155 return WordEqual(value, name##Constant()); \
156 }
157 HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
158 #undef HEAP_CONSTANT_TEST
159
HashSeed()160 Node* CodeStubAssembler::HashSeed() {
161 return LoadAndUntagToWord32Root(Heap::kHashSeedRootIndex);
162 }
163
StaleRegisterConstant()164 Node* CodeStubAssembler::StaleRegisterConstant() {
165 return LoadRoot(Heap::kStaleRegisterRootIndex);
166 }
167
IntPtrOrSmiConstant(int value,ParameterMode mode)168 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
169 if (mode == SMI_PARAMETERS) {
170 return SmiConstant(Smi::FromInt(value));
171 } else {
172 DCHECK_EQ(INTPTR_PARAMETERS, mode);
173 return IntPtrConstant(value);
174 }
175 }
176
IsIntPtrOrSmiConstantZero(Node * test)177 bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test) {
178 int32_t constant_test;
179 Smi* smi_test;
180 if ((ToInt32Constant(test, constant_test) && constant_test == 0) ||
181 (ToSmiConstant(test, smi_test) && smi_test->value() == 0)) {
182 return true;
183 }
184 return false;
185 }
186
IntPtrRoundUpToPowerOfTwo32(Node * value)187 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
188 Comment("IntPtrRoundUpToPowerOfTwo32");
189 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
190 value = IntPtrSub(value, IntPtrConstant(1));
191 for (int i = 1; i <= 16; i *= 2) {
192 value = WordOr(value, WordShr(value, IntPtrConstant(i)));
193 }
194 return IntPtrAdd(value, IntPtrConstant(1));
195 }
196
WordIsPowerOfTwo(Node * value)197 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
198 // value && !(value & (value - 1))
199 return WordEqual(
200 Select(
201 WordEqual(value, IntPtrConstant(0)),
202 [=] { return IntPtrConstant(1); },
203 [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); },
204 MachineType::PointerRepresentation()),
205 IntPtrConstant(0));
206 }
207
Float64Round(Node * x)208 Node* CodeStubAssembler::Float64Round(Node* x) {
209 Node* one = Float64Constant(1.0);
210 Node* one_half = Float64Constant(0.5);
211
212 Label return_x(this);
213
214 // Round up {x} towards Infinity.
215 Variable var_x(this, MachineRepresentation::kFloat64, Float64Ceil(x));
216
217 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
218 &return_x);
219 var_x.Bind(Float64Sub(var_x.value(), one));
220 Goto(&return_x);
221
222 Bind(&return_x);
223 return var_x.value();
224 }
225
Float64Ceil(Node * x)226 Node* CodeStubAssembler::Float64Ceil(Node* x) {
227 if (IsFloat64RoundUpSupported()) {
228 return Float64RoundUp(x);
229 }
230
231 Node* one = Float64Constant(1.0);
232 Node* zero = Float64Constant(0.0);
233 Node* two_52 = Float64Constant(4503599627370496.0E0);
234 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
235
236 Variable var_x(this, MachineRepresentation::kFloat64, x);
237 Label return_x(this), return_minus_x(this);
238
239 // Check if {x} is greater than zero.
240 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
241 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
242 &if_xnotgreaterthanzero);
243
244 Bind(&if_xgreaterthanzero);
245 {
246 // Just return {x} unless it's in the range ]0,2^52[.
247 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
248
249 // Round positive {x} towards Infinity.
250 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
251 GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
252 var_x.Bind(Float64Add(var_x.value(), one));
253 Goto(&return_x);
254 }
255
256 Bind(&if_xnotgreaterthanzero);
257 {
258 // Just return {x} unless it's in the range ]-2^52,0[
259 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
260 GotoIfNot(Float64LessThan(x, zero), &return_x);
261
262 // Round negated {x} towards Infinity and return the result negated.
263 Node* minus_x = Float64Neg(x);
264 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
265 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
266 var_x.Bind(Float64Sub(var_x.value(), one));
267 Goto(&return_minus_x);
268 }
269
270 Bind(&return_minus_x);
271 var_x.Bind(Float64Neg(var_x.value()));
272 Goto(&return_x);
273
274 Bind(&return_x);
275 return var_x.value();
276 }
277
Float64Floor(Node * x)278 Node* CodeStubAssembler::Float64Floor(Node* x) {
279 if (IsFloat64RoundDownSupported()) {
280 return Float64RoundDown(x);
281 }
282
283 Node* one = Float64Constant(1.0);
284 Node* zero = Float64Constant(0.0);
285 Node* two_52 = Float64Constant(4503599627370496.0E0);
286 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
287
288 Variable var_x(this, MachineRepresentation::kFloat64, x);
289 Label return_x(this), return_minus_x(this);
290
291 // Check if {x} is greater than zero.
292 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
293 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
294 &if_xnotgreaterthanzero);
295
296 Bind(&if_xgreaterthanzero);
297 {
298 // Just return {x} unless it's in the range ]0,2^52[.
299 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
300
301 // Round positive {x} towards -Infinity.
302 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
303 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
304 var_x.Bind(Float64Sub(var_x.value(), one));
305 Goto(&return_x);
306 }
307
308 Bind(&if_xnotgreaterthanzero);
309 {
310 // Just return {x} unless it's in the range ]-2^52,0[
311 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
312 GotoIfNot(Float64LessThan(x, zero), &return_x);
313
314 // Round negated {x} towards -Infinity and return the result negated.
315 Node* minus_x = Float64Neg(x);
316 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
317 GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
318 var_x.Bind(Float64Add(var_x.value(), one));
319 Goto(&return_minus_x);
320 }
321
322 Bind(&return_minus_x);
323 var_x.Bind(Float64Neg(var_x.value()));
324 Goto(&return_x);
325
326 Bind(&return_x);
327 return var_x.value();
328 }
329
Float64RoundToEven(Node * x)330 Node* CodeStubAssembler::Float64RoundToEven(Node* x) {
331 if (IsFloat64RoundTiesEvenSupported()) {
332 return Float64RoundTiesEven(x);
333 }
334 // See ES#sec-touint8clamp for details.
335 Node* f = Float64Floor(x);
336 Node* f_and_half = Float64Add(f, Float64Constant(0.5));
337
338 Variable var_result(this, MachineRepresentation::kFloat64);
339 Label return_f(this), return_f_plus_one(this), done(this);
340
341 GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
342 GotoIf(Float64LessThan(x, f_and_half), &return_f);
343 {
344 Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
345 Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
346 &return_f_plus_one);
347 }
348
349 Bind(&return_f);
350 var_result.Bind(f);
351 Goto(&done);
352
353 Bind(&return_f_plus_one);
354 var_result.Bind(Float64Add(f, Float64Constant(1.0)));
355 Goto(&done);
356
357 Bind(&done);
358 return var_result.value();
359 }
360
Float64Trunc(Node * x)361 Node* CodeStubAssembler::Float64Trunc(Node* x) {
362 if (IsFloat64RoundTruncateSupported()) {
363 return Float64RoundTruncate(x);
364 }
365
366 Node* one = Float64Constant(1.0);
367 Node* zero = Float64Constant(0.0);
368 Node* two_52 = Float64Constant(4503599627370496.0E0);
369 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
370
371 Variable var_x(this, MachineRepresentation::kFloat64, x);
372 Label return_x(this), return_minus_x(this);
373
374 // Check if {x} is greater than 0.
375 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
376 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
377 &if_xnotgreaterthanzero);
378
379 Bind(&if_xgreaterthanzero);
380 {
381 if (IsFloat64RoundDownSupported()) {
382 var_x.Bind(Float64RoundDown(x));
383 } else {
384 // Just return {x} unless it's in the range ]0,2^52[.
385 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
386
387 // Round positive {x} towards -Infinity.
388 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
389 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
390 var_x.Bind(Float64Sub(var_x.value(), one));
391 }
392 Goto(&return_x);
393 }
394
395 Bind(&if_xnotgreaterthanzero);
396 {
397 if (IsFloat64RoundUpSupported()) {
398 var_x.Bind(Float64RoundUp(x));
399 Goto(&return_x);
400 } else {
401 // Just return {x} unless its in the range ]-2^52,0[.
402 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
403 GotoIfNot(Float64LessThan(x, zero), &return_x);
404
405 // Round negated {x} towards -Infinity and return result negated.
406 Node* minus_x = Float64Neg(x);
407 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
408 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
409 var_x.Bind(Float64Sub(var_x.value(), one));
410 Goto(&return_minus_x);
411 }
412 }
413
414 Bind(&return_minus_x);
415 var_x.Bind(Float64Neg(var_x.value()));
416 Goto(&return_x);
417
418 Bind(&return_x);
419 return var_x.value();
420 }
421
SmiShiftBitsConstant()422 Node* CodeStubAssembler::SmiShiftBitsConstant() {
423 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
424 }
425
SmiFromWord32(Node * value)426 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
427 value = ChangeInt32ToIntPtr(value);
428 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
429 }
430
SmiTag(Node * value)431 Node* CodeStubAssembler::SmiTag(Node* value) {
432 int32_t constant_value;
433 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
434 return SmiConstant(Smi::FromInt(constant_value));
435 }
436 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
437 }
438
SmiUntag(Node * value)439 Node* CodeStubAssembler::SmiUntag(Node* value) {
440 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant());
441 }
442
SmiToWord32(Node * value)443 Node* CodeStubAssembler::SmiToWord32(Node* value) {
444 Node* result = SmiUntag(value);
445 return TruncateWordToWord32(result);
446 }
447
SmiToFloat64(Node * value)448 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
449 return ChangeInt32ToFloat64(SmiToWord32(value));
450 }
451
SmiMax(Node * a,Node * b)452 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
453 return SelectTaggedConstant(SmiLessThan(a, b), b, a);
454 }
455
SmiMin(Node * a,Node * b)456 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
457 return SelectTaggedConstant(SmiLessThan(a, b), a, b);
458 }
459
SmiMod(Node * a,Node * b)460 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
461 Variable var_result(this, MachineRepresentation::kTagged);
462 Label return_result(this, &var_result),
463 return_minuszero(this, Label::kDeferred),
464 return_nan(this, Label::kDeferred);
465
466 // Untag {a} and {b}.
467 a = SmiToWord32(a);
468 b = SmiToWord32(b);
469
470 // Return NaN if {b} is zero.
471 GotoIf(Word32Equal(b, Int32Constant(0)), &return_nan);
472
473 // Check if {a} is non-negative.
474 Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
475 Branch(Int32LessThanOrEqual(Int32Constant(0), a), &if_aisnotnegative,
476 &if_aisnegative);
477
478 Bind(&if_aisnotnegative);
479 {
480 // Fast case, don't need to check any other edge cases.
481 Node* r = Int32Mod(a, b);
482 var_result.Bind(SmiFromWord32(r));
483 Goto(&return_result);
484 }
485
486 Bind(&if_aisnegative);
487 {
488 if (SmiValuesAre32Bits()) {
489 // Check if {a} is kMinInt and {b} is -1 (only relevant if the
490 // kMinInt is actually representable as a Smi).
491 Label join(this);
492 GotoIfNot(Word32Equal(a, Int32Constant(kMinInt)), &join);
493 GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero);
494 Goto(&join);
495 Bind(&join);
496 }
497
498 // Perform the integer modulus operation.
499 Node* r = Int32Mod(a, b);
500
501 // Check if {r} is zero, and if so return -0, because we have to
502 // take the sign of the left hand side {a}, which is negative.
503 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
504
505 // The remainder {r} can be outside the valid Smi range on 32bit
506 // architectures, so we cannot just say SmiFromWord32(r) here.
507 var_result.Bind(ChangeInt32ToTagged(r));
508 Goto(&return_result);
509 }
510
511 Bind(&return_minuszero);
512 var_result.Bind(MinusZeroConstant());
513 Goto(&return_result);
514
515 Bind(&return_nan);
516 var_result.Bind(NanConstant());
517 Goto(&return_result);
518
519 Bind(&return_result);
520 return var_result.value();
521 }
522
SmiMul(Node * a,Node * b)523 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
524 Variable var_result(this, MachineRepresentation::kTagged);
525 Variable var_lhs_float64(this, MachineRepresentation::kFloat64),
526 var_rhs_float64(this, MachineRepresentation::kFloat64);
527 Label return_result(this, &var_result);
528
529 // Both {a} and {b} are Smis. Convert them to integers and multiply.
530 Node* lhs32 = SmiToWord32(a);
531 Node* rhs32 = SmiToWord32(b);
532 Node* pair = Int32MulWithOverflow(lhs32, rhs32);
533
534 Node* overflow = Projection(1, pair);
535
536 // Check if the multiplication overflowed.
537 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
538 Branch(overflow, &if_overflow, &if_notoverflow);
539 Bind(&if_notoverflow);
540 {
541 // If the answer is zero, we may need to return -0.0, depending on the
542 // input.
543 Label answer_zero(this), answer_not_zero(this);
544 Node* answer = Projection(0, pair);
545 Node* zero = Int32Constant(0);
546 Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
547 Bind(&answer_not_zero);
548 {
549 var_result.Bind(ChangeInt32ToTagged(answer));
550 Goto(&return_result);
551 }
552 Bind(&answer_zero);
553 {
554 Node* or_result = Word32Or(lhs32, rhs32);
555 Label if_should_be_negative_zero(this), if_should_be_zero(this);
556 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
557 &if_should_be_zero);
558 Bind(&if_should_be_negative_zero);
559 {
560 var_result.Bind(MinusZeroConstant());
561 Goto(&return_result);
562 }
563 Bind(&if_should_be_zero);
564 {
565 var_result.Bind(SmiConstant(0));
566 Goto(&return_result);
567 }
568 }
569 }
570 Bind(&if_overflow);
571 {
572 var_lhs_float64.Bind(SmiToFloat64(a));
573 var_rhs_float64.Bind(SmiToFloat64(b));
574 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
575 Node* result = AllocateHeapNumberWithValue(value);
576 var_result.Bind(result);
577 Goto(&return_result);
578 }
579
580 Bind(&return_result);
581 return var_result.value();
582 }
583
TruncateWordToWord32(Node * value)584 Node* CodeStubAssembler::TruncateWordToWord32(Node* value) {
585 if (Is64()) {
586 return TruncateInt64ToInt32(value);
587 }
588 return value;
589 }
590
TaggedIsSmi(Node * a)591 Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
592 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
593 IntPtrConstant(0));
594 }
595
TaggedIsNotSmi(Node * a)596 Node* CodeStubAssembler::TaggedIsNotSmi(Node* a) {
597 return WordNotEqual(
598 WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
599 IntPtrConstant(0));
600 }
601
TaggedIsPositiveSmi(Node * a)602 Node* CodeStubAssembler::TaggedIsPositiveSmi(Node* a) {
603 return WordEqual(WordAnd(BitcastTaggedToWord(a),
604 IntPtrConstant(kSmiTagMask | kSmiSignMask)),
605 IntPtrConstant(0));
606 }
607
WordIsWordAligned(Node * word)608 Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
609 return WordEqual(IntPtrConstant(0),
610 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
611 }
612
BranchIfPrototypesHaveNoElements(Node * receiver_map,Label * definitely_no_elements,Label * possibly_elements)613 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
614 Node* receiver_map, Label* definitely_no_elements,
615 Label* possibly_elements) {
616 Variable var_map(this, MachineRepresentation::kTagged, receiver_map);
617 Label loop_body(this, &var_map);
618 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
619 Goto(&loop_body);
620
621 Bind(&loop_body);
622 {
623 Node* map = var_map.value();
624 Node* prototype = LoadMapPrototype(map);
625 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
626 Node* prototype_map = LoadMap(prototype);
627 // Pessimistically assume elements if a Proxy, Special API Object,
628 // or JSValue wrapper is found on the prototype chain. After this
629 // instance type check, it's not necessary to check for interceptors or
630 // access checks.
631 GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(prototype_map),
632 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
633 possibly_elements);
634 GotoIf(WordNotEqual(LoadElements(prototype), empty_elements),
635 possibly_elements);
636 var_map.Bind(prototype_map);
637 Goto(&loop_body);
638 }
639 }
640
BranchIfJSReceiver(Node * object,Label * if_true,Label * if_false)641 void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
642 Label* if_false) {
643 GotoIf(TaggedIsSmi(object), if_false);
644 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
645 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
646 Int32Constant(FIRST_JS_RECEIVER_TYPE)),
647 if_true, if_false);
648 }
649
BranchIfJSObject(Node * object,Label * if_true,Label * if_false)650 void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
651 Label* if_false) {
652 GotoIf(TaggedIsSmi(object), if_false);
653 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
654 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
655 Int32Constant(FIRST_JS_OBJECT_TYPE)),
656 if_true, if_false);
657 }
658
BranchIfFastJSArray(Node * object,Node * context,CodeStubAssembler::FastJSArrayAccessMode mode,Label * if_true,Label * if_false)659 void CodeStubAssembler::BranchIfFastJSArray(
660 Node* object, Node* context, CodeStubAssembler::FastJSArrayAccessMode mode,
661 Label* if_true, Label* if_false) {
662 // Bailout if receiver is a Smi.
663 GotoIf(TaggedIsSmi(object), if_false);
664
665 Node* map = LoadMap(object);
666
667 // Bailout if instance type is not JS_ARRAY_TYPE.
668 GotoIf(Word32NotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
669 if_false);
670
671 Node* elements_kind = LoadMapElementsKind(map);
672
673 // Bailout if receiver has slow elements.
674 GotoIfNot(IsFastElementsKind(elements_kind), if_false);
675
676 // Check prototype chain if receiver does not have packed elements.
677 if (mode == FastJSArrayAccessMode::INBOUNDS_READ) {
678 GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
679 }
680 BranchIfPrototypesHaveNoElements(map, if_true, if_false);
681 }
682
AllocateRawUnaligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)683 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
684 AllocationFlags flags,
685 Node* top_address,
686 Node* limit_address) {
687 Node* top = Load(MachineType::Pointer(), top_address);
688 Node* limit = Load(MachineType::Pointer(), limit_address);
689
690 // If there's not enough space, call the runtime.
691 Variable result(this, MachineRepresentation::kTagged);
692 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
693 Label merge_runtime(this, &result);
694
695 if (flags & kAllowLargeObjectAllocation) {
696 Label next(this);
697 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
698
699 Node* runtime_flags = SmiConstant(
700 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
701 AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
702 Node* const runtime_result =
703 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
704 SmiTag(size_in_bytes), runtime_flags);
705 result.Bind(runtime_result);
706 Goto(&merge_runtime);
707
708 Bind(&next);
709 }
710
711 Node* new_top = IntPtrAdd(top, size_in_bytes);
712 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
713 &no_runtime_call);
714
715 Bind(&runtime_call);
716 Node* runtime_result;
717 if (flags & kPretenured) {
718 Node* runtime_flags = SmiConstant(
719 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
720 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
721 runtime_result =
722 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
723 SmiTag(size_in_bytes), runtime_flags);
724 } else {
725 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
726 NoContextConstant(), SmiTag(size_in_bytes));
727 }
728 result.Bind(runtime_result);
729 Goto(&merge_runtime);
730
731 // When there is enough space, return `top' and bump it up.
732 Bind(&no_runtime_call);
733 Node* no_runtime_result = top;
734 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
735 new_top);
736 no_runtime_result = BitcastWordToTagged(
737 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)));
738 result.Bind(no_runtime_result);
739 Goto(&merge_runtime);
740
741 Bind(&merge_runtime);
742 return result.value();
743 }
744
AllocateRawAligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)745 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
746 AllocationFlags flags,
747 Node* top_address,
748 Node* limit_address) {
749 Node* top = Load(MachineType::Pointer(), top_address);
750 Node* limit = Load(MachineType::Pointer(), limit_address);
751 Variable adjusted_size(this, MachineType::PointerRepresentation(),
752 size_in_bytes);
753 if (flags & kDoubleAlignment) {
754 Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
755 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned,
756 &aligned);
757
758 Bind(¬_aligned);
759 Node* not_aligned_size =
760 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
761 adjusted_size.Bind(not_aligned_size);
762 Goto(&merge);
763
764 Bind(&aligned);
765 Goto(&merge);
766
767 Bind(&merge);
768 }
769
770 Variable address(
771 this, MachineRepresentation::kTagged,
772 AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
773
774 Label needs_filler(this), doesnt_need_filler(this),
775 merge_address(this, &address);
776 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
777 &needs_filler);
778
779 Bind(&needs_filler);
780 // Store a filler and increase the address by kPointerSize.
781 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
782 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
783 address.Bind(BitcastWordToTagged(
784 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))));
785 Goto(&merge_address);
786
787 Bind(&doesnt_need_filler);
788 Goto(&merge_address);
789
790 Bind(&merge_address);
791 // Update the top.
792 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
793 IntPtrAdd(top, adjusted_size.value()));
794 return address.value();
795 }
796
Allocate(Node * size_in_bytes,AllocationFlags flags)797 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
798 Comment("Allocate");
799 bool const new_space = !(flags & kPretenured);
800 Node* top_address = ExternalConstant(
801 new_space
802 ? ExternalReference::new_space_allocation_top_address(isolate())
803 : ExternalReference::old_space_allocation_top_address(isolate()));
804 DCHECK_EQ(kPointerSize,
805 ExternalReference::new_space_allocation_limit_address(isolate())
806 .address() -
807 ExternalReference::new_space_allocation_top_address(isolate())
808 .address());
809 DCHECK_EQ(kPointerSize,
810 ExternalReference::old_space_allocation_limit_address(isolate())
811 .address() -
812 ExternalReference::old_space_allocation_top_address(isolate())
813 .address());
814 Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
815
816 #ifdef V8_HOST_ARCH_32_BIT
817 if (flags & kDoubleAlignment) {
818 return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address);
819 }
820 #endif
821
822 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
823 }
824
Allocate(int size_in_bytes,AllocationFlags flags)825 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
826 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
827 }
828
InnerAllocate(Node * previous,Node * offset)829 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
830 return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
831 }
832
InnerAllocate(Node * previous,int offset)833 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
834 return InnerAllocate(previous, IntPtrConstant(offset));
835 }
836
IsRegularHeapObjectSize(Node * size)837 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
838 return UintPtrLessThanOrEqual(size,
839 IntPtrConstant(kMaxRegularHeapObjectSize));
840 }
841
BranchIfToBooleanIsTrue(Node * value,Label * if_true,Label * if_false)842 void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
843 Label* if_false) {
844 Label if_valueissmi(this), if_valueisnotsmi(this),
845 if_valueisheapnumber(this, Label::kDeferred);
846
847 // Rule out false {value}.
848 GotoIf(WordEqual(value, BooleanConstant(false)), if_false);
849
850 // Check if {value} is a Smi or a HeapObject.
851 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
852
853 Bind(&if_valueissmi);
854 {
855 // The {value} is a Smi, only need to check against zero.
856 BranchIfSmiEqual(value, SmiConstant(0), if_false, if_true);
857 }
858
859 Bind(&if_valueisnotsmi);
860 {
861 // Check if {value} is the empty string.
862 GotoIf(IsEmptyString(value), if_false);
863
864 // The {value} is a HeapObject, load its map.
865 Node* value_map = LoadMap(value);
866
867 // Only null, undefined and document.all have the undetectable bit set,
868 // so we can return false immediately when that bit is set.
869 Node* value_map_bitfield = LoadMapBitField(value_map);
870 Node* value_map_undetectable =
871 Word32And(value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
872
873 // Check if the {value} is undetectable.
874 GotoIfNot(Word32Equal(value_map_undetectable, Int32Constant(0)), if_false);
875
876 // We still need to handle numbers specially, but all other {value}s
877 // that make it here yield true.
878 Branch(IsHeapNumberMap(value_map), &if_valueisheapnumber, if_true);
879
880 Bind(&if_valueisheapnumber);
881 {
882 // Load the floating point value of {value}.
883 Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
884 MachineType::Float64());
885
886 // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
887 Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
888 if_true, if_false);
889 }
890 }
891 }
892
LoadFromFrame(int offset,MachineType rep)893 Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
894 Node* frame_pointer = LoadFramePointer();
895 return Load(rep, frame_pointer, IntPtrConstant(offset));
896 }
897
LoadFromParentFrame(int offset,MachineType rep)898 Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
899 Node* frame_pointer = LoadParentFramePointer();
900 return Load(rep, frame_pointer, IntPtrConstant(offset));
901 }
902
LoadBufferObject(Node * buffer,int offset,MachineType rep)903 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
904 MachineType rep) {
905 return Load(rep, buffer, IntPtrConstant(offset));
906 }
907
LoadObjectField(Node * object,int offset,MachineType rep)908 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
909 MachineType rep) {
910 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
911 }
912
LoadObjectField(Node * object,Node * offset,MachineType rep)913 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset,
914 MachineType rep) {
915 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
916 }
917
LoadAndUntagObjectField(Node * object,int offset)918 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) {
919 if (Is64()) {
920 #if V8_TARGET_LITTLE_ENDIAN
921 offset += kPointerSize / 2;
922 #endif
923 return ChangeInt32ToInt64(
924 LoadObjectField(object, offset, MachineType::Int32()));
925 } else {
926 return SmiToWord(LoadObjectField(object, offset, MachineType::AnyTagged()));
927 }
928 }
929
LoadAndUntagToWord32ObjectField(Node * object,int offset)930 Node* CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
931 int offset) {
932 if (Is64()) {
933 #if V8_TARGET_LITTLE_ENDIAN
934 offset += kPointerSize / 2;
935 #endif
936 return LoadObjectField(object, offset, MachineType::Int32());
937 } else {
938 return SmiToWord32(
939 LoadObjectField(object, offset, MachineType::AnyTagged()));
940 }
941 }
942
LoadAndUntagSmi(Node * base,int index)943 Node* CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
944 if (Is64()) {
945 #if V8_TARGET_LITTLE_ENDIAN
946 index += kPointerSize / 2;
947 #endif
948 return ChangeInt32ToInt64(
949 Load(MachineType::Int32(), base, IntPtrConstant(index)));
950 } else {
951 return SmiToWord(
952 Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
953 }
954 }
955
LoadAndUntagToWord32Root(Heap::RootListIndex root_index)956 Node* CodeStubAssembler::LoadAndUntagToWord32Root(
957 Heap::RootListIndex root_index) {
958 Node* roots_array_start =
959 ExternalConstant(ExternalReference::roots_array_start(isolate()));
960 int index = root_index * kPointerSize;
961 if (Is64()) {
962 #if V8_TARGET_LITTLE_ENDIAN
963 index += kPointerSize / 2;
964 #endif
965 return Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index));
966 } else {
967 return SmiToWord32(Load(MachineType::AnyTagged(), roots_array_start,
968 IntPtrConstant(index)));
969 }
970 }
971
StoreAndTagSmi(Node * base,int offset,Node * value)972 Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
973 if (Is64()) {
974 int zero_offset = offset + kPointerSize / 2;
975 int payload_offset = offset;
976 #if V8_TARGET_LITTLE_ENDIAN
977 std::swap(zero_offset, payload_offset);
978 #endif
979 StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
980 IntPtrConstant(zero_offset), Int32Constant(0));
981 return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
982 IntPtrConstant(payload_offset),
983 TruncateInt64ToInt32(value));
984 } else {
985 return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
986 IntPtrConstant(offset), SmiTag(value));
987 }
988 }
989
LoadHeapNumberValue(Node * object)990 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
991 return LoadObjectField(object, HeapNumber::kValueOffset,
992 MachineType::Float64());
993 }
994
LoadMap(Node * object)995 Node* CodeStubAssembler::LoadMap(Node* object) {
996 return LoadObjectField(object, HeapObject::kMapOffset);
997 }
998
LoadInstanceType(Node * object)999 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
1000 return LoadMapInstanceType(LoadMap(object));
1001 }
1002
HasInstanceType(Node * object,InstanceType instance_type)1003 Node* CodeStubAssembler::HasInstanceType(Node* object,
1004 InstanceType instance_type) {
1005 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type));
1006 }
1007
DoesntHaveInstanceType(Node * object,InstanceType instance_type)1008 Node* CodeStubAssembler::DoesntHaveInstanceType(Node* object,
1009 InstanceType instance_type) {
1010 return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1011 }
1012
LoadProperties(Node * object)1013 Node* CodeStubAssembler::LoadProperties(Node* object) {
1014 return LoadObjectField(object, JSObject::kPropertiesOffset);
1015 }
1016
LoadElements(Node * object)1017 Node* CodeStubAssembler::LoadElements(Node* object) {
1018 return LoadObjectField(object, JSObject::kElementsOffset);
1019 }
1020
LoadJSArrayLength(Node * array)1021 Node* CodeStubAssembler::LoadJSArrayLength(Node* array) {
1022 CSA_ASSERT(this, IsJSArray(array));
1023 return LoadObjectField(array, JSArray::kLengthOffset);
1024 }
1025
LoadFixedArrayBaseLength(Node * array)1026 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
1027 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
1028 }
1029
LoadAndUntagFixedArrayBaseLength(Node * array)1030 Node* CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(Node* array) {
1031 return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1032 }
1033
LoadMapBitField(Node * map)1034 Node* CodeStubAssembler::LoadMapBitField(Node* map) {
1035 CSA_SLOW_ASSERT(this, IsMap(map));
1036 return LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8());
1037 }
1038
LoadMapBitField2(Node * map)1039 Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
1040 CSA_SLOW_ASSERT(this, IsMap(map));
1041 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8());
1042 }
1043
LoadMapBitField3(Node * map)1044 Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
1045 CSA_SLOW_ASSERT(this, IsMap(map));
1046 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32());
1047 }
1048
LoadMapInstanceType(Node * map)1049 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
1050 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
1051 }
1052
LoadMapElementsKind(Node * map)1053 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) {
1054 CSA_SLOW_ASSERT(this, IsMap(map));
1055 Node* bit_field2 = LoadMapBitField2(map);
1056 return DecodeWord32<Map::ElementsKindBits>(bit_field2);
1057 }
1058
LoadMapDescriptors(Node * map)1059 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
1060 CSA_SLOW_ASSERT(this, IsMap(map));
1061 return LoadObjectField(map, Map::kDescriptorsOffset);
1062 }
1063
LoadMapPrototype(Node * map)1064 Node* CodeStubAssembler::LoadMapPrototype(Node* map) {
1065 CSA_SLOW_ASSERT(this, IsMap(map));
1066 return LoadObjectField(map, Map::kPrototypeOffset);
1067 }
1068
LoadMapPrototypeInfo(Node * map,Label * if_no_proto_info)1069 Node* CodeStubAssembler::LoadMapPrototypeInfo(Node* map,
1070 Label* if_no_proto_info) {
1071 CSA_ASSERT(this, IsMap(map));
1072 Node* prototype_info =
1073 LoadObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1074 GotoIf(TaggedIsSmi(prototype_info), if_no_proto_info);
1075 GotoIfNot(WordEqual(LoadMap(prototype_info),
1076 LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1077 if_no_proto_info);
1078 return prototype_info;
1079 }
1080
LoadMapInstanceSize(Node * map)1081 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
1082 CSA_SLOW_ASSERT(this, IsMap(map));
1083 return ChangeUint32ToWord(
1084 LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
1085 }
1086
LoadMapInobjectProperties(Node * map)1087 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
1088 CSA_SLOW_ASSERT(this, IsMap(map));
1089 // See Map::GetInObjectProperties() for details.
1090 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1091 CSA_ASSERT(this,
1092 Int32GreaterThanOrEqual(LoadMapInstanceType(map),
1093 Int32Constant(FIRST_JS_OBJECT_TYPE)));
1094 return ChangeUint32ToWord(LoadObjectField(
1095 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1096 MachineType::Uint8()));
1097 }
1098
LoadMapConstructorFunctionIndex(Node * map)1099 Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
1100 CSA_SLOW_ASSERT(this, IsMap(map));
1101 // See Map::GetConstructorFunctionIndex() for details.
1102 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1103 CSA_ASSERT(this, Int32LessThanOrEqual(LoadMapInstanceType(map),
1104 Int32Constant(LAST_PRIMITIVE_TYPE)));
1105 return ChangeUint32ToWord(LoadObjectField(
1106 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1107 MachineType::Uint8()));
1108 }
1109
LoadMapConstructor(Node * map)1110 Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
1111 CSA_SLOW_ASSERT(this, IsMap(map));
1112 Variable result(this, MachineRepresentation::kTagged,
1113 LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1114
1115 Label done(this), loop(this, &result);
1116 Goto(&loop);
1117 Bind(&loop);
1118 {
1119 GotoIf(TaggedIsSmi(result.value()), &done);
1120 Node* is_map_type =
1121 Word32Equal(LoadInstanceType(result.value()), Int32Constant(MAP_TYPE));
1122 GotoIfNot(is_map_type, &done);
1123 result.Bind(
1124 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
1125 Goto(&loop);
1126 }
1127 Bind(&done);
1128 return result.value();
1129 }
1130
LoadSharedFunctionInfoSpecialField(Node * shared,int offset,ParameterMode mode)1131 Node* CodeStubAssembler::LoadSharedFunctionInfoSpecialField(
1132 Node* shared, int offset, ParameterMode mode) {
1133 if (Is64()) {
1134 Node* result = LoadObjectField(shared, offset, MachineType::Int32());
1135 if (mode == SMI_PARAMETERS) {
1136 result = SmiTag(result);
1137 } else {
1138 result = ChangeUint32ToWord(result);
1139 }
1140 return result;
1141 } else {
1142 Node* result = LoadObjectField(shared, offset);
1143 if (mode != SMI_PARAMETERS) {
1144 result = SmiUntag(result);
1145 }
1146 return result;
1147 }
1148 }
1149
LoadNameHashField(Node * name)1150 Node* CodeStubAssembler::LoadNameHashField(Node* name) {
1151 CSA_ASSERT(this, IsName(name));
1152 return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
1153 }
1154
LoadNameHash(Node * name,Label * if_hash_not_computed)1155 Node* CodeStubAssembler::LoadNameHash(Node* name, Label* if_hash_not_computed) {
1156 Node* hash_field = LoadNameHashField(name);
1157 if (if_hash_not_computed != nullptr) {
1158 GotoIf(Word32Equal(
1159 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
1160 Int32Constant(0)),
1161 if_hash_not_computed);
1162 }
1163 return Word32Shr(hash_field, Int32Constant(Name::kHashShift));
1164 }
1165
LoadStringLength(Node * object)1166 Node* CodeStubAssembler::LoadStringLength(Node* object) {
1167 CSA_ASSERT(this, IsString(object));
1168 return LoadObjectField(object, String::kLengthOffset);
1169 }
1170
LoadJSValueValue(Node * object)1171 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1172 CSA_ASSERT(this, IsJSValue(object));
1173 return LoadObjectField(object, JSValue::kValueOffset);
1174 }
1175
LoadWeakCellValueUnchecked(Node * weak_cell)1176 Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) {
1177 // TODO(ishell): fix callers.
1178 return LoadObjectField(weak_cell, WeakCell::kValueOffset);
1179 }
1180
LoadWeakCellValue(Node * weak_cell,Label * if_cleared)1181 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) {
1182 CSA_ASSERT(this, IsWeakCell(weak_cell));
1183 Node* value = LoadWeakCellValueUnchecked(weak_cell);
1184 if (if_cleared != nullptr) {
1185 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
1186 }
1187 return value;
1188 }
1189
LoadFixedArrayElement(Node * object,Node * index_node,int additional_offset,ParameterMode parameter_mode)1190 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
1191 int additional_offset,
1192 ParameterMode parameter_mode) {
1193 int32_t header_size =
1194 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1195 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1196 parameter_mode, header_size);
1197 return Load(MachineType::AnyTagged(), object, offset);
1198 }
1199
LoadFixedTypedArrayElement(Node * data_pointer,Node * index_node,ElementsKind elements_kind,ParameterMode parameter_mode)1200 Node* CodeStubAssembler::LoadFixedTypedArrayElement(
1201 Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1202 ParameterMode parameter_mode) {
1203 Node* offset =
1204 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
1205 MachineType type;
1206 switch (elements_kind) {
1207 case UINT8_ELEMENTS: /* fall through */
1208 case UINT8_CLAMPED_ELEMENTS:
1209 type = MachineType::Uint8();
1210 break;
1211 case INT8_ELEMENTS:
1212 type = MachineType::Int8();
1213 break;
1214 case UINT16_ELEMENTS:
1215 type = MachineType::Uint16();
1216 break;
1217 case INT16_ELEMENTS:
1218 type = MachineType::Int16();
1219 break;
1220 case UINT32_ELEMENTS:
1221 type = MachineType::Uint32();
1222 break;
1223 case INT32_ELEMENTS:
1224 type = MachineType::Int32();
1225 break;
1226 case FLOAT32_ELEMENTS:
1227 type = MachineType::Float32();
1228 break;
1229 case FLOAT64_ELEMENTS:
1230 type = MachineType::Float64();
1231 break;
1232 default:
1233 UNREACHABLE();
1234 }
1235 return Load(type, data_pointer, offset);
1236 }
1237
LoadAndUntagToWord32FixedArrayElement(Node * object,Node * index_node,int additional_offset,ParameterMode parameter_mode)1238 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1239 Node* object, Node* index_node, int additional_offset,
1240 ParameterMode parameter_mode) {
1241 int32_t header_size =
1242 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1243 #if V8_TARGET_LITTLE_ENDIAN
1244 if (Is64()) {
1245 header_size += kPointerSize / 2;
1246 }
1247 #endif
1248 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1249 parameter_mode, header_size);
1250 if (Is64()) {
1251 return Load(MachineType::Int32(), object, offset);
1252 } else {
1253 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1254 }
1255 }
1256
LoadFixedDoubleArrayElement(Node * object,Node * index_node,MachineType machine_type,int additional_offset,ParameterMode parameter_mode,Label * if_hole)1257 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1258 Node* object, Node* index_node, MachineType machine_type,
1259 int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1260 CSA_ASSERT(this, IsFixedDoubleArray(object));
1261 int32_t header_size =
1262 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1263 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
1264 parameter_mode, header_size);
1265 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1266 }
1267
LoadDoubleWithHoleCheck(Node * base,Node * offset,Label * if_hole,MachineType machine_type)1268 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1269 Label* if_hole,
1270 MachineType machine_type) {
1271 if (if_hole) {
1272 // TODO(ishell): Compare only the upper part for the hole once the
1273 // compiler is able to fold addition of already complex |offset| with
1274 // |kIeeeDoubleExponentWordOffset| into one addressing mode.
1275 if (Is64()) {
1276 Node* element = Load(MachineType::Uint64(), base, offset);
1277 GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
1278 } else {
1279 Node* element_upper = Load(
1280 MachineType::Uint32(), base,
1281 IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
1282 GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
1283 if_hole);
1284 }
1285 }
1286 if (machine_type.IsNone()) {
1287 // This means the actual value is not needed.
1288 return nullptr;
1289 }
1290 return Load(machine_type, base, offset);
1291 }
1292
LoadContextElement(Node * context,int slot_index)1293 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) {
1294 int offset = Context::SlotOffset(slot_index);
1295 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset));
1296 }
1297
LoadContextElement(Node * context,Node * slot_index)1298 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
1299 Node* offset =
1300 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1301 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1302 return Load(MachineType::AnyTagged(), context, offset);
1303 }
1304
StoreContextElement(Node * context,int slot_index,Node * value)1305 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index,
1306 Node* value) {
1307 int offset = Context::SlotOffset(slot_index);
1308 return Store(context, IntPtrConstant(offset), value);
1309 }
1310
StoreContextElement(Node * context,Node * slot_index,Node * value)1311 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
1312 Node* value) {
1313 Node* offset =
1314 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1315 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1316 return Store(context, offset, value);
1317 }
1318
StoreContextElementNoWriteBarrier(Node * context,int slot_index,Node * value)1319 Node* CodeStubAssembler::StoreContextElementNoWriteBarrier(Node* context,
1320 int slot_index,
1321 Node* value) {
1322 int offset = Context::SlotOffset(slot_index);
1323 return StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
1324 IntPtrConstant(offset), value);
1325 }
1326
LoadNativeContext(Node * context)1327 Node* CodeStubAssembler::LoadNativeContext(Node* context) {
1328 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX);
1329 }
1330
LoadJSArrayElementsMap(ElementsKind kind,Node * native_context)1331 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
1332 Node* native_context) {
1333 CSA_ASSERT(this, IsNativeContext(native_context));
1334 return LoadContextElement(native_context, Context::ArrayMapIndex(kind));
1335 }
1336
StoreHeapNumberValue(Node * object,Node * value)1337 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
1338 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
1339 MachineRepresentation::kFloat64);
1340 }
1341
StoreObjectField(Node * object,int offset,Node * value)1342 Node* CodeStubAssembler::StoreObjectField(
1343 Node* object, int offset, Node* value) {
1344 DCHECK_NE(HeapObject::kMapOffset, offset); // Use StoreMap instead.
1345 return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
1346 }
1347
StoreObjectField(Node * object,Node * offset,Node * value)1348 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
1349 Node* value) {
1350 int const_offset;
1351 if (ToInt32Constant(offset, const_offset)) {
1352 return StoreObjectField(object, const_offset, value);
1353 }
1354 return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
1355 value);
1356 }
1357
StoreObjectFieldNoWriteBarrier(Node * object,int offset,Node * value,MachineRepresentation rep)1358 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1359 Node* object, int offset, Node* value, MachineRepresentation rep) {
1360 return StoreNoWriteBarrier(rep, object,
1361 IntPtrConstant(offset - kHeapObjectTag), value);
1362 }
1363
StoreObjectFieldNoWriteBarrier(Node * object,Node * offset,Node * value,MachineRepresentation rep)1364 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1365 Node* object, Node* offset, Node* value, MachineRepresentation rep) {
1366 int const_offset;
1367 if (ToInt32Constant(offset, const_offset)) {
1368 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
1369 }
1370 return StoreNoWriteBarrier(
1371 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1372 }
1373
StoreMap(Node * object,Node * map)1374 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
1375 CSA_SLOW_ASSERT(this, IsMap(map));
1376 return StoreWithMapWriteBarrier(
1377 object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1378 }
1379
StoreMapNoWriteBarrier(Node * object,Heap::RootListIndex map_root_index)1380 Node* CodeStubAssembler::StoreMapNoWriteBarrier(
1381 Node* object, Heap::RootListIndex map_root_index) {
1382 return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
1383 }
1384
StoreMapNoWriteBarrier(Node * object,Node * map)1385 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
1386 CSA_SLOW_ASSERT(this, IsMap(map));
1387 return StoreNoWriteBarrier(
1388 MachineRepresentation::kTagged, object,
1389 IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
1390 }
1391
StoreObjectFieldRoot(Node * object,int offset,Heap::RootListIndex root_index)1392 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
1393 Heap::RootListIndex root_index) {
1394 if (Heap::RootIsImmortalImmovable(root_index)) {
1395 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
1396 } else {
1397 return StoreObjectField(object, offset, LoadRoot(root_index));
1398 }
1399 }
1400
StoreFixedArrayElement(Node * object,Node * index_node,Node * value,WriteBarrierMode barrier_mode,int additional_offset,ParameterMode parameter_mode)1401 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1402 Node* value,
1403 WriteBarrierMode barrier_mode,
1404 int additional_offset,
1405 ParameterMode parameter_mode) {
1406 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1407 barrier_mode == UPDATE_WRITE_BARRIER);
1408 int header_size =
1409 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1410 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1411 parameter_mode, header_size);
1412 if (barrier_mode == SKIP_WRITE_BARRIER) {
1413 return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
1414 value);
1415 } else {
1416 return Store(object, offset, value);
1417 }
1418 }
1419
StoreFixedDoubleArrayElement(Node * object,Node * index_node,Node * value,ParameterMode parameter_mode)1420 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1421 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1422 CSA_ASSERT(this, IsFixedDoubleArray(object));
1423 Node* offset =
1424 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1425 FixedArray::kHeaderSize - kHeapObjectTag);
1426 MachineRepresentation rep = MachineRepresentation::kFloat64;
1427 return StoreNoWriteBarrier(rep, object, offset, value);
1428 }
1429
BuildAppendJSArray(ElementsKind kind,Node * context,Node * array,CodeStubArguments & args,Variable & arg_index,Label * bailout)1430 Node* CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* context,
1431 Node* array,
1432 CodeStubArguments& args,
1433 Variable& arg_index,
1434 Label* bailout) {
1435 Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
1436 Label pre_bailout(this);
1437 Label success(this);
1438 Variable var_tagged_length(this, MachineRepresentation::kTagged);
1439 ParameterMode mode = OptimalParameterMode();
1440 Variable var_length(this, OptimalParameterRepresentation(),
1441 TaggedToParameter(LoadJSArrayLength(array), mode));
1442 Variable var_elements(this, MachineRepresentation::kTagged,
1443 LoadElements(array));
1444 Node* capacity =
1445 TaggedToParameter(LoadFixedArrayBaseLength(var_elements.value()), mode);
1446
1447 // Resize the capacity of the fixed array if it doesn't fit.
1448 Label fits(this, &var_elements);
1449 Node* first = arg_index.value();
1450 Node* growth = IntPtrSub(args.GetLength(), first);
1451 Node* new_length =
1452 IntPtrOrSmiAdd(WordToParameter(growth, mode), var_length.value(), mode);
1453 GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
1454 Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
1455 var_elements.Bind(GrowElementsCapacity(array, var_elements.value(), kind,
1456 kind, capacity, new_capacity, mode,
1457 &pre_bailout));
1458 Goto(&fits);
1459 Bind(&fits);
1460 Node* elements = var_elements.value();
1461
1462 // Push each argument onto the end of the array now that there is enough
1463 // capacity.
1464 CodeStubAssembler::VariableList push_vars({&var_length}, zone());
1465 args.ForEach(
1466 push_vars,
1467 [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
1468 if (IsFastSmiElementsKind(kind)) {
1469 GotoIf(TaggedIsNotSmi(arg), &pre_bailout);
1470 } else if (IsFastDoubleElementsKind(kind)) {
1471 GotoIfNotNumber(arg, &pre_bailout);
1472 }
1473 if (IsFastDoubleElementsKind(kind)) {
1474 Node* double_value = ChangeNumberToFloat64(arg);
1475 StoreFixedDoubleArrayElement(elements, var_length.value(),
1476 Float64SilenceNaN(double_value), mode);
1477 } else {
1478 WriteBarrierMode barrier_mode = IsFastSmiElementsKind(kind)
1479 ? SKIP_WRITE_BARRIER
1480 : UPDATE_WRITE_BARRIER;
1481 StoreFixedArrayElement(elements, var_length.value(), arg,
1482 barrier_mode, 0, mode);
1483 }
1484 Increment(var_length, 1, mode);
1485 },
1486 first, nullptr);
1487 {
1488 Node* length = ParameterToTagged(var_length.value(), mode);
1489 var_tagged_length.Bind(length);
1490 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1491 Goto(&success);
1492 }
1493
1494 Bind(&pre_bailout);
1495 {
1496 Node* length = ParameterToTagged(var_length.value(), mode);
1497 var_tagged_length.Bind(length);
1498 Node* diff = SmiSub(length, LoadJSArrayLength(array));
1499 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1500 arg_index.Bind(IntPtrAdd(arg_index.value(), SmiUntag(diff)));
1501 Goto(bailout);
1502 }
1503
1504 Bind(&success);
1505 return var_tagged_length.value();
1506 }
1507
AllocateHeapNumber(MutableMode mode)1508 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
1509 Node* result = Allocate(HeapNumber::kSize, kNone);
1510 Heap::RootListIndex heap_map_index =
1511 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
1512 : Heap::kMutableHeapNumberMapRootIndex;
1513 StoreMapNoWriteBarrier(result, heap_map_index);
1514 return result;
1515 }
1516
AllocateHeapNumberWithValue(Node * value,MutableMode mode)1517 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
1518 MutableMode mode) {
1519 Node* result = AllocateHeapNumber(mode);
1520 StoreHeapNumberValue(result, value);
1521 return result;
1522 }
1523
AllocateSeqOneByteString(int length,AllocationFlags flags)1524 Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
1525 AllocationFlags flags) {
1526 Comment("AllocateSeqOneByteString");
1527 if (length == 0) {
1528 return LoadRoot(Heap::kempty_stringRootIndex);
1529 }
1530 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
1531 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1532 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1533 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1534 SmiConstant(Smi::FromInt(length)));
1535 // Initialize both used and unused parts of hash field slot at once.
1536 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1537 IntPtrConstant(String::kEmptyHashField),
1538 MachineType::PointerRepresentation());
1539 return result;
1540 }
1541
AllocateSeqOneByteString(Node * context,Node * length,ParameterMode mode,AllocationFlags flags)1542 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1543 ParameterMode mode,
1544 AllocationFlags flags) {
1545 Comment("AllocateSeqOneByteString");
1546 Variable var_result(this, MachineRepresentation::kTagged);
1547
1548 // Compute the SeqOneByteString size and check if it fits into new space.
1549 Label if_lengthiszero(this), if_sizeissmall(this),
1550 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1551 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1552
1553 Node* raw_size = GetArrayAllocationSize(
1554 length, UINT8_ELEMENTS, mode,
1555 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1556 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1557 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1558 &if_sizeissmall, &if_notsizeissmall);
1559
1560 Bind(&if_sizeissmall);
1561 {
1562 // Just allocate the SeqOneByteString in new space.
1563 Node* result = Allocate(size, flags);
1564 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1565 StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
1566 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1567 ParameterToTagged(length, mode));
1568 // Initialize both used and unused parts of hash field slot at once.
1569 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
1570 IntPtrConstant(String::kEmptyHashField),
1571 MachineType::PointerRepresentation());
1572 var_result.Bind(result);
1573 Goto(&if_join);
1574 }
1575
1576 Bind(&if_notsizeissmall);
1577 {
1578 // We might need to allocate in large object space, go to the runtime.
1579 Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
1580 ParameterToTagged(length, mode));
1581 var_result.Bind(result);
1582 Goto(&if_join);
1583 }
1584
1585 Bind(&if_lengthiszero);
1586 {
1587 var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
1588 Goto(&if_join);
1589 }
1590
1591 Bind(&if_join);
1592 return var_result.value();
1593 }
1594
AllocateSeqTwoByteString(int length,AllocationFlags flags)1595 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
1596 AllocationFlags flags) {
1597 Comment("AllocateSeqTwoByteString");
1598 if (length == 0) {
1599 return LoadRoot(Heap::kempty_stringRootIndex);
1600 }
1601 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
1602 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1603 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1604 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
1605 SmiConstant(Smi::FromInt(length)));
1606 // Initialize both used and unused parts of hash field slot at once.
1607 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1608 IntPtrConstant(String::kEmptyHashField),
1609 MachineType::PointerRepresentation());
1610 return result;
1611 }
1612
AllocateSeqTwoByteString(Node * context,Node * length,ParameterMode mode,AllocationFlags flags)1613 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1614 ParameterMode mode,
1615 AllocationFlags flags) {
1616 Comment("AllocateSeqTwoByteString");
1617 Variable var_result(this, MachineRepresentation::kTagged);
1618
1619 // Compute the SeqTwoByteString size and check if it fits into new space.
1620 Label if_lengthiszero(this), if_sizeissmall(this),
1621 if_notsizeissmall(this, Label::kDeferred), if_join(this);
1622 GotoIf(WordEqual(length, IntPtrOrSmiConstant(0, mode)), &if_lengthiszero);
1623
1624 Node* raw_size = GetArrayAllocationSize(
1625 length, UINT16_ELEMENTS, mode,
1626 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1627 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1628 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1629 &if_sizeissmall, &if_notsizeissmall);
1630
1631 Bind(&if_sizeissmall);
1632 {
1633 // Just allocate the SeqTwoByteString in new space.
1634 Node* result = Allocate(size, flags);
1635 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1636 StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
1637 StoreObjectFieldNoWriteBarrier(
1638 result, SeqTwoByteString::kLengthOffset,
1639 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1640 // Initialize both used and unused parts of hash field slot at once.
1641 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
1642 IntPtrConstant(String::kEmptyHashField),
1643 MachineType::PointerRepresentation());
1644 var_result.Bind(result);
1645 Goto(&if_join);
1646 }
1647
1648 Bind(&if_notsizeissmall);
1649 {
1650 // We might need to allocate in large object space, go to the runtime.
1651 Node* result =
1652 CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
1653 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1654 var_result.Bind(result);
1655 Goto(&if_join);
1656 }
1657
1658 Bind(&if_lengthiszero);
1659 {
1660 var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
1661 Goto(&if_join);
1662 }
1663
1664 Bind(&if_join);
1665 return var_result.value();
1666 }
1667
AllocateSlicedString(Heap::RootListIndex map_root_index,Node * length,Node * parent,Node * offset)1668 Node* CodeStubAssembler::AllocateSlicedString(
1669 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1670 Node* offset) {
1671 CSA_ASSERT(this, TaggedIsSmi(length));
1672 Node* result = Allocate(SlicedString::kSize);
1673 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1674 StoreMapNoWriteBarrier(result, map_root_index);
1675 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1676 MachineRepresentation::kTagged);
1677 // Initialize both used and unused parts of hash field slot at once.
1678 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
1679 IntPtrConstant(String::kEmptyHashField),
1680 MachineType::PointerRepresentation());
1681 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
1682 MachineRepresentation::kTagged);
1683 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
1684 MachineRepresentation::kTagged);
1685 return result;
1686 }
1687
AllocateSlicedOneByteString(Node * length,Node * parent,Node * offset)1688 Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
1689 Node* offset) {
1690 return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
1691 parent, offset);
1692 }
1693
AllocateSlicedTwoByteString(Node * length,Node * parent,Node * offset)1694 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1695 Node* offset) {
1696 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1697 offset);
1698 }
1699
AllocateConsString(Heap::RootListIndex map_root_index,Node * length,Node * first,Node * second,AllocationFlags flags)1700 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1701 Node* length, Node* first,
1702 Node* second,
1703 AllocationFlags flags) {
1704 CSA_ASSERT(this, TaggedIsSmi(length));
1705 Node* result = Allocate(ConsString::kSize, flags);
1706 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1707 StoreMapNoWriteBarrier(result, map_root_index);
1708 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1709 MachineRepresentation::kTagged);
1710 // Initialize both used and unused parts of hash field slot at once.
1711 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
1712 IntPtrConstant(String::kEmptyHashField),
1713 MachineType::PointerRepresentation());
1714 bool const new_space = !(flags & kPretenured);
1715 if (new_space) {
1716 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
1717 MachineRepresentation::kTagged);
1718 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
1719 MachineRepresentation::kTagged);
1720 } else {
1721 StoreObjectField(result, ConsString::kFirstOffset, first);
1722 StoreObjectField(result, ConsString::kSecondOffset, second);
1723 }
1724 return result;
1725 }
1726
AllocateOneByteConsString(Node * length,Node * first,Node * second,AllocationFlags flags)1727 Node* CodeStubAssembler::AllocateOneByteConsString(Node* length, Node* first,
1728 Node* second,
1729 AllocationFlags flags) {
1730 return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
1731 second, flags);
1732 }
1733
AllocateTwoByteConsString(Node * length,Node * first,Node * second,AllocationFlags flags)1734 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
1735 Node* second,
1736 AllocationFlags flags) {
1737 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
1738 second, flags);
1739 }
1740
NewConsString(Node * context,Node * length,Node * left,Node * right,AllocationFlags flags)1741 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
1742 Node* right, AllocationFlags flags) {
1743 CSA_ASSERT(this, TaggedIsSmi(length));
1744 // Added string can be a cons string.
1745 Comment("Allocating ConsString");
1746 Node* left_instance_type = LoadInstanceType(left);
1747 Node* right_instance_type = LoadInstanceType(right);
1748
1749 // Compute intersection and difference of instance types.
1750 Node* anded_instance_types =
1751 Word32And(left_instance_type, right_instance_type);
1752 Node* xored_instance_types =
1753 Word32Xor(left_instance_type, right_instance_type);
1754
1755 // We create a one-byte cons string if
1756 // 1. both strings are one-byte, or
1757 // 2. at least one of the strings is two-byte, but happens to contain only
1758 // one-byte characters.
1759 // To do this, we check
1760 // 1. if both strings are one-byte, or if the one-byte data hint is set in
1761 // both strings, or
1762 // 2. if one of the strings has the one-byte data hint set and the other
1763 // string is one-byte.
1764 STATIC_ASSERT(kOneByteStringTag != 0);
1765 STATIC_ASSERT(kOneByteDataHintTag != 0);
1766 Label one_byte_map(this);
1767 Label two_byte_map(this);
1768 Variable result(this, MachineRepresentation::kTagged);
1769 Label done(this, &result);
1770 GotoIf(Word32NotEqual(Word32And(anded_instance_types,
1771 Int32Constant(kStringEncodingMask |
1772 kOneByteDataHintTag)),
1773 Int32Constant(0)),
1774 &one_byte_map);
1775 Branch(Word32NotEqual(Word32And(xored_instance_types,
1776 Int32Constant(kStringEncodingMask |
1777 kOneByteDataHintMask)),
1778 Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
1779 &two_byte_map, &one_byte_map);
1780
1781 Bind(&one_byte_map);
1782 Comment("One-byte ConsString");
1783 result.Bind(AllocateOneByteConsString(length, left, right, flags));
1784 Goto(&done);
1785
1786 Bind(&two_byte_map);
1787 Comment("Two-byte ConsString");
1788 result.Bind(AllocateTwoByteConsString(length, left, right, flags));
1789 Goto(&done);
1790
1791 Bind(&done);
1792
1793 return result.value();
1794 }
1795
AllocateRegExpResult(Node * context,Node * length,Node * index,Node * input)1796 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
1797 Node* index, Node* input) {
1798 Node* const max_length =
1799 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
1800 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
1801 USE(max_length);
1802
1803 // Allocate the JSRegExpResult.
1804 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
1805 // unneeded store of elements.
1806 Node* const result = Allocate(JSRegExpResult::kSize);
1807
1808 // TODO(jgruber): Store map as Heap constant?
1809 Node* const native_context = LoadNativeContext(context);
1810 Node* const map =
1811 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
1812 StoreMapNoWriteBarrier(result, map);
1813
1814 // Initialize the header before allocating the elements.
1815 Node* const empty_array = EmptyFixedArrayConstant();
1816 DCHECK(Heap::RootIsImmortalImmovable(Heap::kEmptyFixedArrayRootIndex));
1817 StoreObjectFieldNoWriteBarrier(result, JSArray::kPropertiesOffset,
1818 empty_array);
1819 StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, empty_array);
1820 StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset, length);
1821
1822 StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kIndexOffset, index);
1823 StoreObjectField(result, JSRegExpResult::kInputOffset, input);
1824
1825 Node* const zero = IntPtrConstant(0);
1826 Node* const length_intptr = SmiUntag(length);
1827 const ElementsKind elements_kind = FAST_ELEMENTS;
1828
1829 Node* const elements = AllocateFixedArray(elements_kind, length_intptr);
1830 StoreObjectField(result, JSArray::kElementsOffset, elements);
1831
1832 // Fill in the elements with undefined.
1833 FillFixedArrayWithValue(elements_kind, elements, zero, length_intptr,
1834 Heap::kUndefinedValueRootIndex);
1835
1836 return result;
1837 }
1838
AllocateNameDictionary(int at_least_space_for)1839 Node* CodeStubAssembler::AllocateNameDictionary(int at_least_space_for) {
1840 return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
1841 }
1842
AllocateNameDictionary(Node * at_least_space_for)1843 Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
1844 CSA_ASSERT(this, UintPtrLessThanOrEqual(
1845 at_least_space_for,
1846 IntPtrConstant(NameDictionary::kMaxCapacity)));
1847
1848 Node* capacity = HashTableComputeCapacity(at_least_space_for);
1849 CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
1850
1851 Node* length = EntryToIndex<NameDictionary>(capacity);
1852 Node* store_size =
1853 IntPtrAdd(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
1854 IntPtrConstant(NameDictionary::kHeaderSize));
1855
1856 Node* result = Allocate(store_size);
1857 Comment("Initialize NameDictionary");
1858 // Initialize FixedArray fields.
1859 DCHECK(Heap::RootIsImmortalImmovable(Heap::kHashTableMapRootIndex));
1860 StoreMapNoWriteBarrier(result, Heap::kHashTableMapRootIndex);
1861 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
1862 SmiFromWord(length));
1863 // Initialized HashTable fields.
1864 Node* zero = SmiConstant(0);
1865 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
1866 SKIP_WRITE_BARRIER);
1867 StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
1868 zero, SKIP_WRITE_BARRIER);
1869 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
1870 SmiTag(capacity), SKIP_WRITE_BARRIER);
1871 // Initialize Dictionary fields.
1872 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
1873 StoreFixedArrayElement(result, NameDictionary::kMaxNumberKeyIndex, filler,
1874 SKIP_WRITE_BARRIER);
1875 StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
1876 SmiConstant(PropertyDetails::kInitialIndex),
1877 SKIP_WRITE_BARRIER);
1878
1879 // Initialize NameDictionary elements.
1880 Node* result_word = BitcastTaggedToWord(result);
1881 Node* start_address = IntPtrAdd(
1882 result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
1883 NameDictionary::kElementsStartIndex) -
1884 kHeapObjectTag));
1885 Node* end_address = IntPtrAdd(
1886 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
1887 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
1888 return result;
1889 }
1890
AllocateJSObjectFromMap(Node * map,Node * properties,Node * elements,AllocationFlags flags)1891 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
1892 Node* elements,
1893 AllocationFlags flags) {
1894 CSA_ASSERT(this, IsMap(map));
1895 Node* size =
1896 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize));
1897 CSA_ASSERT(this, IsRegularHeapObjectSize(size));
1898 Node* object = Allocate(size, flags);
1899 StoreMapNoWriteBarrier(object, map);
1900 InitializeJSObjectFromMap(object, map, size, properties, elements);
1901 return object;
1902 }
1903
InitializeJSObjectFromMap(Node * object,Node * map,Node * size,Node * properties,Node * elements)1904 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
1905 Node* size, Node* properties,
1906 Node* elements) {
1907 // This helper assumes that the object is in new-space, as guarded by the
1908 // check in AllocatedJSObjectFromMap.
1909 if (properties == nullptr) {
1910 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
1911 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
1912 Heap::kEmptyFixedArrayRootIndex);
1913 } else {
1914 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
1915 properties);
1916 }
1917 if (elements == nullptr) {
1918 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
1919 Heap::kEmptyFixedArrayRootIndex);
1920 } else {
1921 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
1922 }
1923 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
1924 }
1925
InitializeJSObjectBody(Node * object,Node * map,Node * size,int start_offset)1926 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
1927 Node* size, int start_offset) {
1928 // TODO(cbruni): activate in-object slack tracking machinery.
1929 Comment("InitializeJSObjectBody");
1930 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
1931 // Calculate the untagged field addresses.
1932 object = BitcastTaggedToWord(object);
1933 Node* start_address =
1934 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
1935 Node* end_address =
1936 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
1937 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
1938 }
1939
StoreFieldsNoWriteBarrier(Node * start_address,Node * end_address,Node * value)1940 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
1941 Node* end_address,
1942 Node* value) {
1943 Comment("StoreFieldsNoWriteBarrier");
1944 CSA_ASSERT(this, WordIsWordAligned(start_address));
1945 CSA_ASSERT(this, WordIsWordAligned(end_address));
1946 BuildFastLoop(start_address, end_address,
1947 [this, value](Node* current) {
1948 StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
1949 value);
1950 },
1951 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
1952 }
1953
AllocateUninitializedJSArrayWithoutElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site)1954 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
1955 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
1956 Comment("begin allocation of JSArray without elements");
1957 int base_size = JSArray::kSize;
1958 if (allocation_site != nullptr) {
1959 base_size += AllocationMemento::kSize;
1960 }
1961
1962 Node* size = IntPtrConstant(base_size);
1963 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
1964 allocation_site, size);
1965 return array;
1966 }
1967
1968 std::pair<Node*, Node*>
AllocateUninitializedJSArrayWithElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * capacity,ParameterMode capacity_mode)1969 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
1970 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
1971 Node* capacity, ParameterMode capacity_mode) {
1972 Comment("begin allocation of JSArray with elements");
1973 int base_size = JSArray::kSize;
1974
1975 if (allocation_site != nullptr) {
1976 base_size += AllocationMemento::kSize;
1977 }
1978
1979 int elements_offset = base_size;
1980
1981 // Compute space for elements
1982 base_size += FixedArray::kHeaderSize;
1983 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
1984
1985 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
1986 allocation_site, size);
1987
1988 Node* elements = InnerAllocate(array, elements_offset);
1989 StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
1990
1991 return {array, elements};
1992 }
1993
AllocateUninitializedJSArray(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * size_in_bytes)1994 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
1995 Node* array_map,
1996 Node* length,
1997 Node* allocation_site,
1998 Node* size_in_bytes) {
1999 Node* array = Allocate(size_in_bytes);
2000
2001 Comment("write JSArray headers");
2002 StoreMapNoWriteBarrier(array, array_map);
2003
2004 CSA_ASSERT(this, TaggedIsSmi(length));
2005 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2006
2007 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
2008 Heap::kEmptyFixedArrayRootIndex);
2009
2010 if (allocation_site != nullptr) {
2011 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
2012 }
2013 return array;
2014 }
2015
AllocateJSArray(ElementsKind kind,Node * array_map,Node * capacity,Node * length,Node * allocation_site,ParameterMode capacity_mode)2016 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
2017 Node* capacity, Node* length,
2018 Node* allocation_site,
2019 ParameterMode capacity_mode) {
2020 Node *array = nullptr, *elements = nullptr;
2021 if (IsIntPtrOrSmiConstantZero(capacity)) {
2022 // Array is empty. Use the shared empty fixed array instead of allocating a
2023 // new one.
2024 array = AllocateUninitializedJSArrayWithoutElements(kind, array_map, length,
2025 nullptr);
2026 StoreObjectFieldRoot(array, JSArray::kElementsOffset,
2027 Heap::kEmptyFixedArrayRootIndex);
2028 } else {
2029 // Allocate both array and elements object, and initialize the JSArray.
2030 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
2031 kind, array_map, length, allocation_site, capacity, capacity_mode);
2032 // Setup elements object.
2033 Heap::RootListIndex elements_map_index =
2034 IsFastDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
2035 : Heap::kFixedArrayMapRootIndex;
2036 DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
2037 StoreMapNoWriteBarrier(elements, elements_map_index);
2038 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
2039 ParameterToTagged(capacity, capacity_mode));
2040 // Fill in the elements with holes.
2041 FillFixedArrayWithValue(kind, elements,
2042 IntPtrOrSmiConstant(0, capacity_mode), capacity,
2043 Heap::kTheHoleValueRootIndex, capacity_mode);
2044 }
2045
2046 return array;
2047 }
2048
AllocateFixedArray(ElementsKind kind,Node * capacity_node,ParameterMode mode,AllocationFlags flags)2049 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
2050 Node* capacity_node,
2051 ParameterMode mode,
2052 AllocationFlags flags) {
2053 CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
2054 IntPtrOrSmiConstant(0, mode), mode));
2055 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
2056
2057 // Allocate both array and elements object, and initialize the JSArray.
2058 Node* array = Allocate(total_size, flags);
2059 Heap::RootListIndex map_index = IsFastDoubleElementsKind(kind)
2060 ? Heap::kFixedDoubleArrayMapRootIndex
2061 : Heap::kFixedArrayMapRootIndex;
2062 DCHECK(Heap::RootIsImmortalImmovable(map_index));
2063 StoreMapNoWriteBarrier(array, map_index);
2064 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
2065 ParameterToTagged(capacity_node, mode));
2066 return array;
2067 }
2068
FillFixedArrayWithValue(ElementsKind kind,Node * array,Node * from_node,Node * to_node,Heap::RootListIndex value_root_index,ParameterMode mode)2069 void CodeStubAssembler::FillFixedArrayWithValue(
2070 ElementsKind kind, Node* array, Node* from_node, Node* to_node,
2071 Heap::RootListIndex value_root_index, ParameterMode mode) {
2072 bool is_double = IsFastDoubleElementsKind(kind);
2073 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
2074 value_root_index == Heap::kUndefinedValueRootIndex);
2075 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
2076 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
2077 Node* double_hole =
2078 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2079 Node* value = LoadRoot(value_root_index);
2080
2081 BuildFastFixedArrayForEach(
2082 array, kind, from_node, to_node,
2083 [this, value, is_double, double_hole](Node* array, Node* offset) {
2084 if (is_double) {
2085 // Don't use doubles to store the hole double, since manipulating the
2086 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2087 // change its value on ia32 (the x87 stack is used to return values
2088 // and stores to the stack silently clear the signalling bit).
2089 //
2090 // TODO(danno): When we have a Float32/Float64 wrapper class that
2091 // preserves double bits during manipulation, remove this code/change
2092 // this to an indexed Float64 store.
2093 if (Is64()) {
2094 StoreNoWriteBarrier(MachineRepresentation::kWord64, array, offset,
2095 double_hole);
2096 } else {
2097 StoreNoWriteBarrier(MachineRepresentation::kWord32, array, offset,
2098 double_hole);
2099 StoreNoWriteBarrier(MachineRepresentation::kWord32, array,
2100 IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
2101 double_hole);
2102 }
2103 } else {
2104 StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
2105 value);
2106 }
2107 },
2108 mode);
2109 }
2110
CopyFixedArrayElements(ElementsKind from_kind,Node * from_array,ElementsKind to_kind,Node * to_array,Node * element_count,Node * capacity,WriteBarrierMode barrier_mode,ParameterMode mode)2111 void CodeStubAssembler::CopyFixedArrayElements(
2112 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
2113 Node* to_array, Node* element_count, Node* capacity,
2114 WriteBarrierMode barrier_mode, ParameterMode mode) {
2115 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
2116 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
2117 Comment("[ CopyFixedArrayElements");
2118
2119 // Typed array elements are not supported.
2120 DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
2121 DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
2122
2123 Label done(this);
2124 bool from_double_elements = IsFastDoubleElementsKind(from_kind);
2125 bool to_double_elements = IsFastDoubleElementsKind(to_kind);
2126 bool element_size_matches =
2127 Is64() ||
2128 IsFastDoubleElementsKind(from_kind) == IsFastDoubleElementsKind(to_kind);
2129 bool doubles_to_objects_conversion =
2130 IsFastDoubleElementsKind(from_kind) && IsFastObjectElementsKind(to_kind);
2131 bool needs_write_barrier =
2132 doubles_to_objects_conversion || (barrier_mode == UPDATE_WRITE_BARRIER &&
2133 IsFastObjectElementsKind(to_kind));
2134 Node* double_hole =
2135 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2136
2137 if (doubles_to_objects_conversion) {
2138 // If the copy might trigger a GC, make sure that the FixedArray is
2139 // pre-initialized with holes to make sure that it's always in a
2140 // consistent state.
2141 FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
2142 capacity, Heap::kTheHoleValueRootIndex, mode);
2143 } else if (element_count != capacity) {
2144 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
2145 Heap::kTheHoleValueRootIndex, mode);
2146 }
2147
2148 Node* limit_offset = ElementOffsetFromIndex(
2149 IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
2150 Variable var_from_offset(this, MachineType::PointerRepresentation(),
2151 ElementOffsetFromIndex(element_count, from_kind,
2152 mode, first_element_offset));
2153 // This second variable is used only when the element sizes of source and
2154 // destination arrays do not match.
2155 Variable var_to_offset(this, MachineType::PointerRepresentation());
2156 if (element_size_matches) {
2157 var_to_offset.Bind(var_from_offset.value());
2158 } else {
2159 var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
2160 first_element_offset));
2161 }
2162
2163 Variable* vars[] = {&var_from_offset, &var_to_offset};
2164 Label decrement(this, 2, vars);
2165
2166 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
2167
2168 Bind(&decrement);
2169 {
2170 Node* from_offset = IntPtrSub(
2171 var_from_offset.value(),
2172 IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
2173 var_from_offset.Bind(from_offset);
2174
2175 Node* to_offset;
2176 if (element_size_matches) {
2177 to_offset = from_offset;
2178 } else {
2179 to_offset = IntPtrSub(
2180 var_to_offset.value(),
2181 IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
2182 var_to_offset.Bind(to_offset);
2183 }
2184
2185 Label next_iter(this), store_double_hole(this);
2186 Label* if_hole;
2187 if (doubles_to_objects_conversion) {
2188 // The target elements array is already preinitialized with holes, so we
2189 // can just proceed with the next iteration.
2190 if_hole = &next_iter;
2191 } else if (IsFastDoubleElementsKind(to_kind)) {
2192 if_hole = &store_double_hole;
2193 } else {
2194 // In all the other cases don't check for holes and copy the data as is.
2195 if_hole = nullptr;
2196 }
2197
2198 Node* value = LoadElementAndPrepareForStore(
2199 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
2200
2201 if (needs_write_barrier) {
2202 Store(to_array, to_offset, value);
2203 } else if (to_double_elements) {
2204 StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
2205 value);
2206 } else {
2207 StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, to_offset,
2208 value);
2209 }
2210 Goto(&next_iter);
2211
2212 if (if_hole == &store_double_hole) {
2213 Bind(&store_double_hole);
2214 // Don't use doubles to store the hole double, since manipulating the
2215 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2216 // change its value on ia32 (the x87 stack is used to return values
2217 // and stores to the stack silently clear the signalling bit).
2218 //
2219 // TODO(danno): When we have a Float32/Float64 wrapper class that
2220 // preserves double bits during manipulation, remove this code/change
2221 // this to an indexed Float64 store.
2222 if (Is64()) {
2223 StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array, to_offset,
2224 double_hole);
2225 } else {
2226 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array, to_offset,
2227 double_hole);
2228 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array,
2229 IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
2230 double_hole);
2231 }
2232 Goto(&next_iter);
2233 }
2234
2235 Bind(&next_iter);
2236 Node* compare = WordNotEqual(from_offset, limit_offset);
2237 Branch(compare, &decrement, &done);
2238 }
2239
2240 Bind(&done);
2241 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
2242 Comment("] CopyFixedArrayElements");
2243 }
2244
CopyStringCharacters(Node * from_string,Node * to_string,Node * from_index,Node * to_index,Node * character_count,String::Encoding from_encoding,String::Encoding to_encoding,ParameterMode mode)2245 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
2246 Node* from_index, Node* to_index,
2247 Node* character_count,
2248 String::Encoding from_encoding,
2249 String::Encoding to_encoding,
2250 ParameterMode mode) {
2251 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
2252 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
2253 DCHECK_IMPLIES(to_one_byte, from_one_byte);
2254 Comment("CopyStringCharacters %s -> %s",
2255 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
2256 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
2257
2258 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2259 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2260 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
2261 int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
2262 Node* from_offset =
2263 ElementOffsetFromIndex(from_index, from_kind, mode, header_size);
2264 Node* to_offset =
2265 ElementOffsetFromIndex(to_index, to_kind, mode, header_size);
2266 Node* byte_count = ElementOffsetFromIndex(character_count, from_kind, mode);
2267 Node* limit_offset = IntPtrAdd(from_offset, byte_count);
2268
2269 // Prepare the fast loop
2270 MachineType type =
2271 from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
2272 MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
2273 : MachineRepresentation::kWord16;
2274 int from_increment = 1 << ElementsKindToShiftSize(from_kind);
2275 int to_increment = 1 << ElementsKindToShiftSize(to_kind);
2276
2277 Variable current_to_offset(this, MachineType::PointerRepresentation(),
2278 to_offset);
2279 VariableList vars({¤t_to_offset}, zone());
2280 int to_index_constant = 0, from_index_constant = 0;
2281 Smi* to_index_smi = nullptr;
2282 Smi* from_index_smi = nullptr;
2283 bool index_same = (from_encoding == to_encoding) &&
2284 (from_index == to_index ||
2285 (ToInt32Constant(from_index, from_index_constant) &&
2286 ToInt32Constant(to_index, to_index_constant) &&
2287 from_index_constant == to_index_constant) ||
2288 (ToSmiConstant(from_index, from_index_smi) &&
2289 ToSmiConstant(to_index, to_index_smi) &&
2290 to_index_smi == from_index_smi));
2291 BuildFastLoop(vars, from_offset, limit_offset,
2292 [this, from_string, to_string, ¤t_to_offset, to_increment,
2293 type, rep, index_same](Node* offset) {
2294 Node* value = Load(type, from_string, offset);
2295 StoreNoWriteBarrier(
2296 rep, to_string,
2297 index_same ? offset : current_to_offset.value(), value);
2298 if (!index_same) {
2299 Increment(current_to_offset, to_increment);
2300 }
2301 },
2302 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
2303 }
2304
LoadElementAndPrepareForStore(Node * array,Node * offset,ElementsKind from_kind,ElementsKind to_kind,Label * if_hole)2305 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2306 Node* offset,
2307 ElementsKind from_kind,
2308 ElementsKind to_kind,
2309 Label* if_hole) {
2310 if (IsFastDoubleElementsKind(from_kind)) {
2311 Node* value =
2312 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
2313 if (!IsFastDoubleElementsKind(to_kind)) {
2314 value = AllocateHeapNumberWithValue(value);
2315 }
2316 return value;
2317
2318 } else {
2319 Node* value = Load(MachineType::AnyTagged(), array, offset);
2320 if (if_hole) {
2321 GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
2322 }
2323 if (IsFastDoubleElementsKind(to_kind)) {
2324 if (IsFastSmiElementsKind(from_kind)) {
2325 value = SmiToFloat64(value);
2326 } else {
2327 value = LoadHeapNumberValue(value);
2328 }
2329 }
2330 return value;
2331 }
2332 }
2333
CalculateNewElementsCapacity(Node * old_capacity,ParameterMode mode)2334 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
2335 ParameterMode mode) {
2336 Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
2337 Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
2338 Node* padding = IntPtrOrSmiConstant(16, mode);
2339 return IntPtrOrSmiAdd(new_capacity, padding, mode);
2340 }
2341
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Label * bailout)2342 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2343 ElementsKind kind, Node* key,
2344 Label* bailout) {
2345 Node* capacity = LoadFixedArrayBaseLength(elements);
2346
2347 ParameterMode mode = OptimalParameterMode();
2348 capacity = TaggedToParameter(capacity, mode);
2349 key = TaggedToParameter(key, mode);
2350
2351 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
2352 bailout);
2353 }
2354
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Node * capacity,ParameterMode mode,Label * bailout)2355 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2356 ElementsKind kind, Node* key,
2357 Node* capacity,
2358 ParameterMode mode,
2359 Label* bailout) {
2360 Comment("TryGrowElementsCapacity");
2361
2362 // If the gap growth is too big, fall back to the runtime.
2363 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
2364 Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
2365 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
2366
2367 // Calculate the capacity of the new backing store.
2368 Node* new_capacity = CalculateNewElementsCapacity(
2369 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
2370 return GrowElementsCapacity(object, elements, kind, kind, capacity,
2371 new_capacity, mode, bailout);
2372 }
2373
GrowElementsCapacity(Node * object,Node * elements,ElementsKind from_kind,ElementsKind to_kind,Node * capacity,Node * new_capacity,ParameterMode mode,Label * bailout)2374 Node* CodeStubAssembler::GrowElementsCapacity(
2375 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
2376 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
2377 Comment("[ GrowElementsCapacity");
2378 // If size of the allocation for the new capacity doesn't fit in a page
2379 // that we can bump-pointer allocate from, fall back to the runtime.
2380 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
2381 GotoIf(UintPtrOrSmiGreaterThanOrEqual(
2382 new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
2383 bailout);
2384
2385 // Allocate the new backing store.
2386 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
2387
2388 // Copy the elements from the old elements store to the new.
2389 // The size-check above guarantees that the |new_elements| is allocated
2390 // in new space so we can skip the write barrier.
2391 CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
2392 new_capacity, SKIP_WRITE_BARRIER, mode);
2393
2394 StoreObjectField(object, JSObject::kElementsOffset, new_elements);
2395 Comment("] GrowElementsCapacity");
2396 return new_elements;
2397 }
2398
InitializeAllocationMemento(Node * base_allocation,int base_allocation_size,Node * allocation_site)2399 void CodeStubAssembler::InitializeAllocationMemento(Node* base_allocation,
2400 int base_allocation_size,
2401 Node* allocation_site) {
2402 StoreObjectFieldNoWriteBarrier(
2403 base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
2404 HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
2405 StoreObjectFieldNoWriteBarrier(
2406 base_allocation,
2407 AllocationMemento::kAllocationSiteOffset + base_allocation_size,
2408 allocation_site);
2409 if (FLAG_allocation_site_pretenuring) {
2410 Node* count = LoadObjectField(allocation_site,
2411 AllocationSite::kPretenureCreateCountOffset);
2412 Node* incremented_count = SmiAdd(count, SmiConstant(Smi::FromInt(1)));
2413 StoreObjectFieldNoWriteBarrier(allocation_site,
2414 AllocationSite::kPretenureCreateCountOffset,
2415 incremented_count);
2416 }
2417 }
2418
TryTaggedToFloat64(Node * value,Label * if_valueisnotnumber)2419 Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
2420 Label* if_valueisnotnumber) {
2421 Label out(this);
2422 Variable var_result(this, MachineRepresentation::kFloat64);
2423
2424 // Check if the {value} is a Smi or a HeapObject.
2425 Label if_valueissmi(this), if_valueisnotsmi(this);
2426 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2427
2428 Bind(&if_valueissmi);
2429 {
2430 // Convert the Smi {value}.
2431 var_result.Bind(SmiToFloat64(value));
2432 Goto(&out);
2433 }
2434
2435 Bind(&if_valueisnotsmi);
2436 {
2437 // Check if {value} is a HeapNumber.
2438 Label if_valueisheapnumber(this);
2439 Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
2440 if_valueisnotnumber);
2441
2442 Bind(&if_valueisheapnumber);
2443 {
2444 // Load the floating point value.
2445 var_result.Bind(LoadHeapNumberValue(value));
2446 Goto(&out);
2447 }
2448 }
2449 Bind(&out);
2450 return var_result.value();
2451 }
2452
TruncateTaggedToFloat64(Node * context,Node * value)2453 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
2454 // We might need to loop once due to ToNumber conversion.
2455 Variable var_value(this, MachineRepresentation::kTagged),
2456 var_result(this, MachineRepresentation::kFloat64);
2457 Label loop(this, &var_value), done_loop(this, &var_result);
2458 var_value.Bind(value);
2459 Goto(&loop);
2460 Bind(&loop);
2461 {
2462 Label if_valueisnotnumber(this, Label::kDeferred);
2463
2464 // Load the current {value}.
2465 value = var_value.value();
2466
2467 // Convert {value} to Float64 if it is a number and convert it to a number
2468 // otherwise.
2469 Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
2470 var_result.Bind(result);
2471 Goto(&done_loop);
2472
2473 Bind(&if_valueisnotnumber);
2474 {
2475 // Convert the {value} to a Number first.
2476 Callable callable = CodeFactory::NonNumberToNumber(isolate());
2477 var_value.Bind(CallStub(callable, context, value));
2478 Goto(&loop);
2479 }
2480 }
2481 Bind(&done_loop);
2482 return var_result.value();
2483 }
2484
TruncateTaggedToWord32(Node * context,Node * value)2485 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
2486 // We might need to loop once due to ToNumber conversion.
2487 Variable var_value(this, MachineRepresentation::kTagged, value),
2488 var_result(this, MachineRepresentation::kWord32);
2489 Label loop(this, &var_value), done_loop(this, &var_result);
2490 Goto(&loop);
2491 Bind(&loop);
2492 {
2493 // Load the current {value}.
2494 value = var_value.value();
2495
2496 // Check if the {value} is a Smi or a HeapObject.
2497 Label if_valueissmi(this), if_valueisnotsmi(this);
2498 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2499
2500 Bind(&if_valueissmi);
2501 {
2502 // Convert the Smi {value}.
2503 var_result.Bind(SmiToWord32(value));
2504 Goto(&done_loop);
2505 }
2506
2507 Bind(&if_valueisnotsmi);
2508 {
2509 // Check if {value} is a HeapNumber.
2510 Label if_valueisheapnumber(this),
2511 if_valueisnotheapnumber(this, Label::kDeferred);
2512 Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
2513 &if_valueisnotheapnumber);
2514
2515 Bind(&if_valueisheapnumber);
2516 {
2517 // Truncate the floating point value.
2518 var_result.Bind(TruncateHeapNumberValueToWord32(value));
2519 Goto(&done_loop);
2520 }
2521
2522 Bind(&if_valueisnotheapnumber);
2523 {
2524 // Convert the {value} to a Number first.
2525 Callable callable = CodeFactory::NonNumberToNumber(isolate());
2526 var_value.Bind(CallStub(callable, context, value));
2527 Goto(&loop);
2528 }
2529 }
2530 }
2531 Bind(&done_loop);
2532 return var_result.value();
2533 }
2534
TruncateHeapNumberValueToWord32(Node * object)2535 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
2536 Node* value = LoadHeapNumberValue(object);
2537 return TruncateFloat64ToWord32(value);
2538 }
2539
ChangeFloat64ToTagged(Node * value)2540 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
2541 Node* value32 = RoundFloat64ToInt32(value);
2542 Node* value64 = ChangeInt32ToFloat64(value32);
2543
2544 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
2545
2546 Label if_valueisequal(this), if_valueisnotequal(this);
2547 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
2548 Bind(&if_valueisequal);
2549 {
2550 GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
2551 Branch(Int32LessThan(Float64ExtractHighWord32(value), Int32Constant(0)),
2552 &if_valueisheapnumber, &if_valueisint32);
2553 }
2554 Bind(&if_valueisnotequal);
2555 Goto(&if_valueisheapnumber);
2556
2557 Variable var_result(this, MachineRepresentation::kTagged);
2558 Bind(&if_valueisint32);
2559 {
2560 if (Is64()) {
2561 Node* result = SmiTag(ChangeInt32ToInt64(value32));
2562 var_result.Bind(result);
2563 Goto(&if_join);
2564 } else {
2565 Node* pair = Int32AddWithOverflow(value32, value32);
2566 Node* overflow = Projection(1, pair);
2567 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
2568 Branch(overflow, &if_overflow, &if_notoverflow);
2569 Bind(&if_overflow);
2570 Goto(&if_valueisheapnumber);
2571 Bind(&if_notoverflow);
2572 {
2573 Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2574 var_result.Bind(result);
2575 Goto(&if_join);
2576 }
2577 }
2578 }
2579 Bind(&if_valueisheapnumber);
2580 {
2581 Node* result = AllocateHeapNumberWithValue(value);
2582 var_result.Bind(result);
2583 Goto(&if_join);
2584 }
2585 Bind(&if_join);
2586 return var_result.value();
2587 }
2588
ChangeInt32ToTagged(Node * value)2589 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
2590 if (Is64()) {
2591 return SmiTag(ChangeInt32ToInt64(value));
2592 }
2593 Variable var_result(this, MachineRepresentation::kTagged);
2594 Node* pair = Int32AddWithOverflow(value, value);
2595 Node* overflow = Projection(1, pair);
2596 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
2597 if_join(this);
2598 Branch(overflow, &if_overflow, &if_notoverflow);
2599 Bind(&if_overflow);
2600 {
2601 Node* value64 = ChangeInt32ToFloat64(value);
2602 Node* result = AllocateHeapNumberWithValue(value64);
2603 var_result.Bind(result);
2604 }
2605 Goto(&if_join);
2606 Bind(&if_notoverflow);
2607 {
2608 Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2609 var_result.Bind(result);
2610 }
2611 Goto(&if_join);
2612 Bind(&if_join);
2613 return var_result.value();
2614 }
2615
ChangeUint32ToTagged(Node * value)2616 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
2617 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
2618 if_join(this);
2619 Variable var_result(this, MachineRepresentation::kTagged);
2620 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
2621 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
2622 &if_not_overflow);
2623
2624 Bind(&if_not_overflow);
2625 {
2626 if (Is64()) {
2627 var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
2628 } else {
2629 // If tagging {value} results in an overflow, we need to use a HeapNumber
2630 // to represent it.
2631 Node* pair = Int32AddWithOverflow(value, value);
2632 Node* overflow = Projection(1, pair);
2633 GotoIf(overflow, &if_overflow);
2634
2635 Node* result = BitcastWordToTaggedSigned(Projection(0, pair));
2636 var_result.Bind(result);
2637 }
2638 }
2639 Goto(&if_join);
2640
2641 Bind(&if_overflow);
2642 {
2643 Node* float64_value = ChangeUint32ToFloat64(value);
2644 var_result.Bind(AllocateHeapNumberWithValue(float64_value));
2645 }
2646 Goto(&if_join);
2647
2648 Bind(&if_join);
2649 return var_result.value();
2650 }
2651
ToThisString(Node * context,Node * value,char const * method_name)2652 Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
2653 char const* method_name) {
2654 Variable var_value(this, MachineRepresentation::kTagged, value);
2655
2656 // Check if the {value} is a Smi or a HeapObject.
2657 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
2658 if_valueisstring(this);
2659 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2660 Bind(&if_valueisnotsmi);
2661 {
2662 // Load the instance type of the {value}.
2663 Node* value_instance_type = LoadInstanceType(value);
2664
2665 // Check if the {value} is already String.
2666 Label if_valueisnotstring(this, Label::kDeferred);
2667 Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
2668 &if_valueisnotstring);
2669 Bind(&if_valueisnotstring);
2670 {
2671 // Check if the {value} is null.
2672 Label if_valueisnullorundefined(this, Label::kDeferred),
2673 if_valueisnotnullorundefined(this, Label::kDeferred),
2674 if_valueisnotnull(this, Label::kDeferred);
2675 Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
2676 &if_valueisnotnull);
2677 Bind(&if_valueisnotnull);
2678 {
2679 // Check if the {value} is undefined.
2680 Branch(WordEqual(value, UndefinedConstant()),
2681 &if_valueisnullorundefined, &if_valueisnotnullorundefined);
2682 Bind(&if_valueisnotnullorundefined);
2683 {
2684 // Convert the {value} to a String.
2685 Callable callable = CodeFactory::ToString(isolate());
2686 var_value.Bind(CallStub(callable, context, value));
2687 Goto(&if_valueisstring);
2688 }
2689 }
2690
2691 Bind(&if_valueisnullorundefined);
2692 {
2693 // The {value} is either null or undefined.
2694 CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
2695 HeapConstant(factory()->NewStringFromAsciiChecked(
2696 method_name, TENURED)));
2697 Unreachable();
2698 }
2699 }
2700 }
2701 Bind(&if_valueissmi);
2702 {
2703 // The {value} is a Smi, convert it to a String.
2704 Callable callable = CodeFactory::NumberToString(isolate());
2705 var_value.Bind(CallStub(callable, context, value));
2706 Goto(&if_valueisstring);
2707 }
2708 Bind(&if_valueisstring);
2709 return var_value.value();
2710 }
2711
ChangeNumberToFloat64(compiler::Node * value)2712 Node* CodeStubAssembler::ChangeNumberToFloat64(compiler::Node* value) {
2713 Variable result(this, MachineRepresentation::kFloat64);
2714 Label smi(this);
2715 Label done(this, &result);
2716 GotoIf(TaggedIsSmi(value), &smi);
2717 result.Bind(
2718 LoadObjectField(value, HeapNumber::kValueOffset, MachineType::Float64()));
2719 Goto(&done);
2720
2721 Bind(&smi);
2722 {
2723 result.Bind(SmiToFloat64(value));
2724 Goto(&done);
2725 }
2726
2727 Bind(&done);
2728 return result.value();
2729 }
2730
ToThisValue(Node * context,Node * value,PrimitiveType primitive_type,char const * method_name)2731 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
2732 PrimitiveType primitive_type,
2733 char const* method_name) {
2734 // We might need to loop once due to JSValue unboxing.
2735 Variable var_value(this, MachineRepresentation::kTagged, value);
2736 Label loop(this, &var_value), done_loop(this),
2737 done_throw(this, Label::kDeferred);
2738 Goto(&loop);
2739 Bind(&loop);
2740 {
2741 // Load the current {value}.
2742 value = var_value.value();
2743
2744 // Check if the {value} is a Smi or a HeapObject.
2745 GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
2746 ? &done_loop
2747 : &done_throw);
2748
2749 // Load the mape of the {value}.
2750 Node* value_map = LoadMap(value);
2751
2752 // Load the instance type of the {value}.
2753 Node* value_instance_type = LoadMapInstanceType(value_map);
2754
2755 // Check if {value} is a JSValue.
2756 Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
2757 Branch(Word32Equal(value_instance_type, Int32Constant(JS_VALUE_TYPE)),
2758 &if_valueisvalue, &if_valueisnotvalue);
2759
2760 Bind(&if_valueisvalue);
2761 {
2762 // Load the actual value from the {value}.
2763 var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
2764 Goto(&loop);
2765 }
2766
2767 Bind(&if_valueisnotvalue);
2768 {
2769 switch (primitive_type) {
2770 case PrimitiveType::kBoolean:
2771 GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
2772 break;
2773 case PrimitiveType::kNumber:
2774 GotoIf(
2775 Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
2776 &done_loop);
2777 break;
2778 case PrimitiveType::kString:
2779 GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
2780 break;
2781 case PrimitiveType::kSymbol:
2782 GotoIf(Word32Equal(value_instance_type, Int32Constant(SYMBOL_TYPE)),
2783 &done_loop);
2784 break;
2785 }
2786 Goto(&done_throw);
2787 }
2788 }
2789
2790 Bind(&done_throw);
2791 {
2792 // The {value} is not a compatible receiver for this method.
2793 CallRuntime(Runtime::kThrowNotGeneric, context,
2794 HeapConstant(factory()->NewStringFromAsciiChecked(method_name,
2795 TENURED)));
2796 Unreachable();
2797 }
2798
2799 Bind(&done_loop);
2800 return var_value.value();
2801 }
2802
ThrowIfNotInstanceType(Node * context,Node * value,InstanceType instance_type,char const * method_name)2803 Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
2804 InstanceType instance_type,
2805 char const* method_name) {
2806 Label out(this), throw_exception(this, Label::kDeferred);
2807 Variable var_value_map(this, MachineRepresentation::kTagged);
2808
2809 GotoIf(TaggedIsSmi(value), &throw_exception);
2810
2811 // Load the instance type of the {value}.
2812 var_value_map.Bind(LoadMap(value));
2813 Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
2814
2815 Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
2816 &throw_exception);
2817
2818 // The {value} is not a compatible receiver for this method.
2819 Bind(&throw_exception);
2820 CallRuntime(
2821 Runtime::kThrowIncompatibleMethodReceiver, context,
2822 HeapConstant(factory()->NewStringFromAsciiChecked(method_name, TENURED)),
2823 value);
2824 Unreachable();
2825
2826 Bind(&out);
2827 return var_value_map.value();
2828 }
2829
InstanceTypeEqual(Node * instance_type,int type)2830 Node* CodeStubAssembler::InstanceTypeEqual(Node* instance_type, int type) {
2831 return Word32Equal(instance_type, Int32Constant(type));
2832 }
2833
IsSpecialReceiverMap(Node * map)2834 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
2835 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
2836 uint32_t mask =
2837 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
2838 USE(mask);
2839 // Interceptors or access checks imply special receiver.
2840 CSA_ASSERT(this,
2841 SelectConstant(IsSetWord32(LoadMapBitField(map), mask), is_special,
2842 Int32Constant(1), MachineRepresentation::kWord32));
2843 return is_special;
2844 }
2845
IsDictionaryMap(Node * map)2846 Node* CodeStubAssembler::IsDictionaryMap(Node* map) {
2847 CSA_SLOW_ASSERT(this, IsMap(map));
2848 Node* bit_field3 = LoadMapBitField3(map);
2849 return Word32NotEqual(IsSetWord32<Map::DictionaryMap>(bit_field3),
2850 Int32Constant(0));
2851 }
2852
IsCallableMap(Node * map)2853 Node* CodeStubAssembler::IsCallableMap(Node* map) {
2854 CSA_ASSERT(this, IsMap(map));
2855 return Word32NotEqual(
2856 Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsCallable)),
2857 Int32Constant(0));
2858 }
2859
IsCallable(Node * object)2860 Node* CodeStubAssembler::IsCallable(Node* object) {
2861 return IsCallableMap(LoadMap(object));
2862 }
2863
IsConstructorMap(Node * map)2864 Node* CodeStubAssembler::IsConstructorMap(Node* map) {
2865 CSA_ASSERT(this, IsMap(map));
2866 return Word32NotEqual(
2867 Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsConstructor)),
2868 Int32Constant(0));
2869 }
2870
IsSpecialReceiverInstanceType(Node * instance_type)2871 Node* CodeStubAssembler::IsSpecialReceiverInstanceType(Node* instance_type) {
2872 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
2873 return Int32LessThanOrEqual(instance_type,
2874 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
2875 }
2876
IsStringInstanceType(Node * instance_type)2877 Node* CodeStubAssembler::IsStringInstanceType(Node* instance_type) {
2878 STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
2879 return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
2880 }
2881
IsJSReceiverInstanceType(Node * instance_type)2882 Node* CodeStubAssembler::IsJSReceiverInstanceType(Node* instance_type) {
2883 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2884 return Int32GreaterThanOrEqual(instance_type,
2885 Int32Constant(FIRST_JS_RECEIVER_TYPE));
2886 }
2887
IsJSReceiver(Node * object)2888 Node* CodeStubAssembler::IsJSReceiver(Node* object) {
2889 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
2890 return IsJSReceiverInstanceType(LoadInstanceType(object));
2891 }
2892
IsJSReceiverMap(Node * map)2893 Node* CodeStubAssembler::IsJSReceiverMap(Node* map) {
2894 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
2895 return IsJSReceiverInstanceType(LoadMapInstanceType(map));
2896 }
2897
IsJSObject(Node * object)2898 Node* CodeStubAssembler::IsJSObject(Node* object) {
2899 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
2900 return Int32GreaterThanOrEqual(LoadInstanceType(object),
2901 Int32Constant(FIRST_JS_RECEIVER_TYPE));
2902 }
2903
IsJSGlobalProxy(Node * object)2904 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
2905 return Word32Equal(LoadInstanceType(object),
2906 Int32Constant(JS_GLOBAL_PROXY_TYPE));
2907 }
2908
IsMap(Node * map)2909 Node* CodeStubAssembler::IsMap(Node* map) {
2910 return HasInstanceType(map, MAP_TYPE);
2911 }
2912
IsJSValue(Node * map)2913 Node* CodeStubAssembler::IsJSValue(Node* map) {
2914 return HasInstanceType(map, JS_VALUE_TYPE);
2915 }
2916
IsJSArray(Node * object)2917 Node* CodeStubAssembler::IsJSArray(Node* object) {
2918 return HasInstanceType(object, JS_ARRAY_TYPE);
2919 }
2920
IsWeakCell(Node * object)2921 Node* CodeStubAssembler::IsWeakCell(Node* object) {
2922 return HasInstanceType(object, WEAK_CELL_TYPE);
2923 }
2924
IsBoolean(Node * object)2925 Node* CodeStubAssembler::IsBoolean(Node* object) {
2926 return IsBooleanMap(LoadMap(object));
2927 }
2928
IsHeapNumber(Node * object)2929 Node* CodeStubAssembler::IsHeapNumber(Node* object) {
2930 return IsHeapNumberMap(LoadMap(object));
2931 }
2932
IsName(Node * object)2933 Node* CodeStubAssembler::IsName(Node* object) {
2934 return Int32LessThanOrEqual(LoadInstanceType(object),
2935 Int32Constant(LAST_NAME_TYPE));
2936 }
2937
IsString(Node * object)2938 Node* CodeStubAssembler::IsString(Node* object) {
2939 return Int32LessThanOrEqual(LoadInstanceType(object),
2940 Int32Constant(FIRST_NONSTRING_TYPE));
2941 }
2942
IsSymbol(Node * object)2943 Node* CodeStubAssembler::IsSymbol(Node* object) {
2944 return IsSymbolMap(LoadMap(object));
2945 }
2946
IsPrivateSymbol(Node * object)2947 Node* CodeStubAssembler::IsPrivateSymbol(Node* object) {
2948 return Select(
2949 IsSymbol(object),
2950 [=] {
2951 Node* const flags =
2952 SmiToWord32(LoadObjectField(object, Symbol::kFlagsOffset));
2953 const int kPrivateMask = 1 << Symbol::kPrivateBit;
2954 return IsSetWord32(flags, kPrivateMask);
2955 },
2956 [=] { return Int32Constant(0); }, MachineRepresentation::kWord32);
2957 }
2958
IsNativeContext(Node * object)2959 Node* CodeStubAssembler::IsNativeContext(Node* object) {
2960 return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
2961 }
2962
IsFixedDoubleArray(Node * object)2963 Node* CodeStubAssembler::IsFixedDoubleArray(Node* object) {
2964 return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
2965 }
2966
IsHashTable(Node * object)2967 Node* CodeStubAssembler::IsHashTable(Node* object) {
2968 return WordEqual(LoadMap(object), LoadRoot(Heap::kHashTableMapRootIndex));
2969 }
2970
IsDictionary(Node * object)2971 Node* CodeStubAssembler::IsDictionary(Node* object) {
2972 return Word32Or(IsHashTable(object), IsUnseededNumberDictionary(object));
2973 }
2974
IsUnseededNumberDictionary(Node * object)2975 Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
2976 return WordEqual(LoadMap(object),
2977 LoadRoot(Heap::kUnseededNumberDictionaryMapRootIndex));
2978 }
2979
IsJSFunction(Node * object)2980 Node* CodeStubAssembler::IsJSFunction(Node* object) {
2981 return HasInstanceType(object, JS_FUNCTION_TYPE);
2982 }
2983
StringCharCodeAt(Node * string,Node * index,ParameterMode parameter_mode)2984 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index,
2985 ParameterMode parameter_mode) {
2986 CSA_ASSERT(this, IsString(string));
2987 // Translate the {index} into a Word.
2988 index = ParameterToWord(index, parameter_mode);
2989
2990 // We may need to loop in case of cons, thin, or sliced strings.
2991 Variable var_index(this, MachineType::PointerRepresentation(), index);
2992 Variable var_string(this, MachineRepresentation::kTagged, string);
2993 Variable var_result(this, MachineRepresentation::kWord32);
2994 Variable* loop_vars[] = {&var_index, &var_string};
2995 Label done_loop(this, &var_result), loop(this, 2, loop_vars);
2996 Goto(&loop);
2997 Bind(&loop);
2998 {
2999 // Load the current {index}.
3000 index = var_index.value();
3001
3002 // Load the current {string}.
3003 string = var_string.value();
3004
3005 // Load the instance type of the {string}.
3006 Node* string_instance_type = LoadInstanceType(string);
3007
3008 // Check if the {string} is a SeqString.
3009 Label if_stringissequential(this), if_stringisnotsequential(this);
3010 Branch(Word32Equal(Word32And(string_instance_type,
3011 Int32Constant(kStringRepresentationMask)),
3012 Int32Constant(kSeqStringTag)),
3013 &if_stringissequential, &if_stringisnotsequential);
3014
3015 Bind(&if_stringissequential);
3016 {
3017 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
3018 Label if_stringistwobyte(this), if_stringisonebyte(this);
3019 Branch(Word32Equal(Word32And(string_instance_type,
3020 Int32Constant(kStringEncodingMask)),
3021 Int32Constant(kTwoByteStringTag)),
3022 &if_stringistwobyte, &if_stringisonebyte);
3023
3024 Bind(&if_stringisonebyte);
3025 {
3026 var_result.Bind(
3027 Load(MachineType::Uint8(), string,
3028 IntPtrAdd(index, IntPtrConstant(SeqOneByteString::kHeaderSize -
3029 kHeapObjectTag))));
3030 Goto(&done_loop);
3031 }
3032
3033 Bind(&if_stringistwobyte);
3034 {
3035 var_result.Bind(
3036 Load(MachineType::Uint16(), string,
3037 IntPtrAdd(WordShl(index, IntPtrConstant(1)),
3038 IntPtrConstant(SeqTwoByteString::kHeaderSize -
3039 kHeapObjectTag))));
3040 Goto(&done_loop);
3041 }
3042 }
3043
3044 Bind(&if_stringisnotsequential);
3045 {
3046 // Check if the {string} is a ConsString.
3047 Label if_stringiscons(this), if_stringisnotcons(this);
3048 Branch(Word32Equal(Word32And(string_instance_type,
3049 Int32Constant(kStringRepresentationMask)),
3050 Int32Constant(kConsStringTag)),
3051 &if_stringiscons, &if_stringisnotcons);
3052
3053 Bind(&if_stringiscons);
3054 {
3055 // Check whether the right hand side is the empty string (i.e. if
3056 // this is really a flat string in a cons string). If that is not
3057 // the case we flatten the string first.
3058 Label if_rhsisempty(this), if_rhsisnotempty(this, Label::kDeferred);
3059 Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
3060 Branch(WordEqual(rhs, EmptyStringConstant()), &if_rhsisempty,
3061 &if_rhsisnotempty);
3062
3063 Bind(&if_rhsisempty);
3064 {
3065 // Just operate on the left hand side of the {string}.
3066 var_string.Bind(LoadObjectField(string, ConsString::kFirstOffset));
3067 Goto(&loop);
3068 }
3069
3070 Bind(&if_rhsisnotempty);
3071 {
3072 // Flatten the {string} and lookup in the resulting string.
3073 var_string.Bind(CallRuntime(Runtime::kFlattenString,
3074 NoContextConstant(), string));
3075 Goto(&loop);
3076 }
3077 }
3078
3079 Bind(&if_stringisnotcons);
3080 {
3081 // Check if the {string} is an ExternalString.
3082 Label if_stringisexternal(this), if_stringisnotexternal(this);
3083 Branch(Word32Equal(Word32And(string_instance_type,
3084 Int32Constant(kStringRepresentationMask)),
3085 Int32Constant(kExternalStringTag)),
3086 &if_stringisexternal, &if_stringisnotexternal);
3087
3088 Bind(&if_stringisexternal);
3089 {
3090 // Check if the {string} is a short external string.
3091 Label if_stringisnotshort(this),
3092 if_stringisshort(this, Label::kDeferred);
3093 Branch(Word32Equal(Word32And(string_instance_type,
3094 Int32Constant(kShortExternalStringMask)),
3095 Int32Constant(0)),
3096 &if_stringisnotshort, &if_stringisshort);
3097
3098 Bind(&if_stringisnotshort);
3099 {
3100 // Load the actual resource data from the {string}.
3101 Node* string_resource_data =
3102 LoadObjectField(string, ExternalString::kResourceDataOffset,
3103 MachineType::Pointer());
3104
3105 // Check if the {string} is a TwoByteExternalString or a
3106 // OneByteExternalString.
3107 Label if_stringistwobyte(this), if_stringisonebyte(this);
3108 Branch(Word32Equal(Word32And(string_instance_type,
3109 Int32Constant(kStringEncodingMask)),
3110 Int32Constant(kTwoByteStringTag)),
3111 &if_stringistwobyte, &if_stringisonebyte);
3112
3113 Bind(&if_stringisonebyte);
3114 {
3115 var_result.Bind(
3116 Load(MachineType::Uint8(), string_resource_data, index));
3117 Goto(&done_loop);
3118 }
3119
3120 Bind(&if_stringistwobyte);
3121 {
3122 var_result.Bind(Load(MachineType::Uint16(), string_resource_data,
3123 WordShl(index, IntPtrConstant(1))));
3124 Goto(&done_loop);
3125 }
3126 }
3127
3128 Bind(&if_stringisshort);
3129 {
3130 // The {string} might be compressed, call the runtime.
3131 var_result.Bind(SmiToWord32(
3132 CallRuntime(Runtime::kExternalStringGetChar,
3133 NoContextConstant(), string, SmiTag(index))));
3134 Goto(&done_loop);
3135 }
3136 }
3137
3138 Bind(&if_stringisnotexternal);
3139 {
3140 Label if_stringissliced(this), if_stringisthin(this);
3141 Branch(
3142 Word32Equal(Word32And(string_instance_type,
3143 Int32Constant(kStringRepresentationMask)),
3144 Int32Constant(kSlicedStringTag)),
3145 &if_stringissliced, &if_stringisthin);
3146 Bind(&if_stringissliced);
3147 {
3148 // The {string} is a SlicedString, continue with its parent.
3149 Node* string_offset =
3150 LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
3151 Node* string_parent =
3152 LoadObjectField(string, SlicedString::kParentOffset);
3153 var_index.Bind(IntPtrAdd(index, string_offset));
3154 var_string.Bind(string_parent);
3155 Goto(&loop);
3156 }
3157 Bind(&if_stringisthin);
3158 {
3159 // The {string} is a ThinString, continue with its actual value.
3160 var_string.Bind(LoadObjectField(string, ThinString::kActualOffset));
3161 Goto(&loop);
3162 }
3163 }
3164 }
3165 }
3166 }
3167
3168 Bind(&done_loop);
3169 return var_result.value();
3170 }
3171
StringFromCharCode(Node * code)3172 Node* CodeStubAssembler::StringFromCharCode(Node* code) {
3173 Variable var_result(this, MachineRepresentation::kTagged);
3174
3175 // Check if the {code} is a one-byte char code.
3176 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
3177 if_done(this);
3178 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
3179 &if_codeisonebyte, &if_codeistwobyte);
3180 Bind(&if_codeisonebyte);
3181 {
3182 // Load the isolate wide single character string cache.
3183 Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
3184 Node* code_index = ChangeUint32ToWord(code);
3185
3186 // Check if we have an entry for the {code} in the single character string
3187 // cache already.
3188 Label if_entryisundefined(this, Label::kDeferred),
3189 if_entryisnotundefined(this);
3190 Node* entry = LoadFixedArrayElement(cache, code_index);
3191 Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
3192 &if_entryisnotundefined);
3193
3194 Bind(&if_entryisundefined);
3195 {
3196 // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
3197 Node* result = AllocateSeqOneByteString(1);
3198 StoreNoWriteBarrier(
3199 MachineRepresentation::kWord8, result,
3200 IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
3201 StoreFixedArrayElement(cache, code_index, result);
3202 var_result.Bind(result);
3203 Goto(&if_done);
3204 }
3205
3206 Bind(&if_entryisnotundefined);
3207 {
3208 // Return the entry from the {cache}.
3209 var_result.Bind(entry);
3210 Goto(&if_done);
3211 }
3212 }
3213
3214 Bind(&if_codeistwobyte);
3215 {
3216 // Allocate a new SeqTwoByteString for {code}.
3217 Node* result = AllocateSeqTwoByteString(1);
3218 StoreNoWriteBarrier(
3219 MachineRepresentation::kWord16, result,
3220 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
3221 var_result.Bind(result);
3222 Goto(&if_done);
3223 }
3224
3225 Bind(&if_done);
3226 return var_result.value();
3227 }
3228
3229 namespace {
3230
3231 // A wrapper around CopyStringCharacters which determines the correct string
3232 // encoding, allocates a corresponding sequential string, and then copies the
3233 // given character range using CopyStringCharacters.
3234 // |from_string| must be a sequential string. |from_index| and
3235 // |character_count| must be Smis s.t.
3236 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
AllocAndCopyStringCharacters(CodeStubAssembler * a,Node * context,Node * from,Node * from_instance_type,Node * from_index,Node * character_count)3237 Node* AllocAndCopyStringCharacters(CodeStubAssembler* a, Node* context,
3238 Node* from, Node* from_instance_type,
3239 Node* from_index, Node* character_count) {
3240 typedef CodeStubAssembler::Label Label;
3241 typedef CodeStubAssembler::Variable Variable;
3242
3243 Label end(a), two_byte_sequential(a);
3244 Variable var_result(a, MachineRepresentation::kTagged);
3245
3246 Node* const smi_zero = a->SmiConstant(Smi::kZero);
3247
3248 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
3249 a->GotoIf(a->Word32Equal(a->Word32And(from_instance_type,
3250 a->Int32Constant(kStringEncodingMask)),
3251 a->Int32Constant(0)),
3252 &two_byte_sequential);
3253
3254 // The subject string is a sequential one-byte string.
3255 {
3256 Node* result =
3257 a->AllocateSeqOneByteString(context, a->SmiToWord(character_count));
3258 a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3259 String::ONE_BYTE_ENCODING,
3260 String::ONE_BYTE_ENCODING,
3261 CodeStubAssembler::SMI_PARAMETERS);
3262 var_result.Bind(result);
3263
3264 a->Goto(&end);
3265 }
3266
3267 // The subject string is a sequential two-byte string.
3268 a->Bind(&two_byte_sequential);
3269 {
3270 Node* result =
3271 a->AllocateSeqTwoByteString(context, a->SmiToWord(character_count));
3272 a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3273 String::TWO_BYTE_ENCODING,
3274 String::TWO_BYTE_ENCODING,
3275 CodeStubAssembler::SMI_PARAMETERS);
3276 var_result.Bind(result);
3277
3278 a->Goto(&end);
3279 }
3280
3281 a->Bind(&end);
3282 return var_result.value();
3283 }
3284
3285 } // namespace
3286
SubString(Node * context,Node * string,Node * from,Node * to)3287 Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
3288 Node* to) {
3289 Label end(this);
3290 Label runtime(this);
3291
3292 Node* const int_zero = Int32Constant(0);
3293
3294 // Int32 variables.
3295 Variable var_instance_type(this, MachineRepresentation::kWord32, int_zero);
3296 Variable var_representation(this, MachineRepresentation::kWord32, int_zero);
3297
3298 Variable var_from(this, MachineRepresentation::kTagged, from); // Smi.
3299 Variable var_string(this, MachineRepresentation::kTagged, string); // String.
3300 Variable var_result(this, MachineRepresentation::kTagged); // String.
3301
3302 // Make sure first argument is a string.
3303 CSA_ASSERT(this, TaggedIsNotSmi(string));
3304 CSA_ASSERT(this, IsString(string));
3305
3306 // Load the instance type of the {string}.
3307 Node* const instance_type = LoadInstanceType(string);
3308 var_instance_type.Bind(instance_type);
3309
3310 // Make sure that both from and to are non-negative smis.
3311
3312 GotoIfNot(TaggedIsPositiveSmi(from), &runtime);
3313 GotoIfNot(TaggedIsPositiveSmi(to), &runtime);
3314
3315 Node* const substr_length = SmiSub(to, from);
3316 Node* const string_length = LoadStringLength(string);
3317
3318 // Begin dispatching based on substring length.
3319
3320 Label original_string_or_invalid_length(this);
3321 GotoIf(SmiAboveOrEqual(substr_length, string_length),
3322 &original_string_or_invalid_length);
3323
3324 // A real substring (substr_length < string_length).
3325
3326 Label single_char(this);
3327 GotoIf(SmiEqual(substr_length, SmiConstant(Smi::FromInt(1))), &single_char);
3328
3329 // TODO(jgruber): Add an additional case for substring of length == 0?
3330
3331 // Deal with different string types: update the index if necessary
3332 // and put the underlying string into var_string.
3333
3334 // If the string is not indirect, it can only be sequential or external.
3335 STATIC_ASSERT(kIsIndirectStringMask ==
3336 (kSlicedStringTag & kConsStringTag & kThinStringTag));
3337 STATIC_ASSERT(kIsIndirectStringMask != 0);
3338 Label underlying_unpacked(this);
3339 GotoIf(Word32Equal(
3340 Word32And(instance_type, Int32Constant(kIsIndirectStringMask)),
3341 Int32Constant(0)),
3342 &underlying_unpacked);
3343
3344 // The subject string is a sliced, cons, or thin string.
3345
3346 Label thin_string(this), thin_or_sliced(this);
3347 var_representation.Bind(
3348 Word32And(instance_type, Int32Constant(kStringRepresentationMask)));
3349 GotoIf(
3350 Word32NotEqual(var_representation.value(), Int32Constant(kConsStringTag)),
3351 &thin_or_sliced);
3352
3353 // Cons string. Check whether it is flat, then fetch first part.
3354 // Flat cons strings have an empty second part.
3355 {
3356 GotoIf(WordNotEqual(LoadObjectField(string, ConsString::kSecondOffset),
3357 EmptyStringConstant()),
3358 &runtime);
3359
3360 Node* first_string_part = LoadObjectField(string, ConsString::kFirstOffset);
3361 var_string.Bind(first_string_part);
3362 var_instance_type.Bind(LoadInstanceType(first_string_part));
3363 var_representation.Bind(Word32And(
3364 var_instance_type.value(), Int32Constant(kStringRepresentationMask)));
3365
3366 // The loaded first part might be a thin string.
3367 Branch(Word32Equal(Word32And(var_instance_type.value(),
3368 Int32Constant(kIsIndirectStringMask)),
3369 Int32Constant(0)),
3370 &underlying_unpacked, &thin_string);
3371 }
3372
3373 Bind(&thin_or_sliced);
3374 {
3375 GotoIf(
3376 Word32Equal(var_representation.value(), Int32Constant(kThinStringTag)),
3377 &thin_string);
3378 // Otherwise it's a sliced string.
3379 // Fetch parent and correct start index by offset.
3380 Node* sliced_offset =
3381 LoadObjectField(var_string.value(), SlicedString::kOffsetOffset);
3382 var_from.Bind(SmiAdd(from, sliced_offset));
3383
3384 Node* slice_parent = LoadObjectField(string, SlicedString::kParentOffset);
3385 var_string.Bind(slice_parent);
3386
3387 Node* slice_parent_instance_type = LoadInstanceType(slice_parent);
3388 var_instance_type.Bind(slice_parent_instance_type);
3389
3390 // The loaded parent might be a thin string.
3391 Branch(Word32Equal(Word32And(var_instance_type.value(),
3392 Int32Constant(kIsIndirectStringMask)),
3393 Int32Constant(0)),
3394 &underlying_unpacked, &thin_string);
3395 }
3396
3397 Bind(&thin_string);
3398 {
3399 Node* actual_string =
3400 LoadObjectField(var_string.value(), ThinString::kActualOffset);
3401 var_string.Bind(actual_string);
3402 var_instance_type.Bind(LoadInstanceType(actual_string));
3403 Goto(&underlying_unpacked);
3404 }
3405
3406 // The subject string can only be external or sequential string of either
3407 // encoding at this point.
3408 Label external_string(this);
3409 Bind(&underlying_unpacked);
3410 {
3411 if (FLAG_string_slices) {
3412 Label copy_routine(this);
3413
3414 // Short slice. Copy instead of slicing.
3415 GotoIf(SmiLessThan(substr_length,
3416 SmiConstant(Smi::FromInt(SlicedString::kMinLength))),
3417 ©_routine);
3418
3419 // Allocate new sliced string.
3420
3421 Label two_byte_slice(this);
3422 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3423 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3424
3425 Counters* counters = isolate()->counters();
3426 IncrementCounter(counters->sub_string_native(), 1);
3427
3428 GotoIf(Word32Equal(Word32And(var_instance_type.value(),
3429 Int32Constant(kStringEncodingMask)),
3430 Int32Constant(0)),
3431 &two_byte_slice);
3432
3433 var_result.Bind(AllocateSlicedOneByteString(
3434 substr_length, var_string.value(), var_from.value()));
3435 Goto(&end);
3436
3437 Bind(&two_byte_slice);
3438
3439 var_result.Bind(AllocateSlicedTwoByteString(
3440 substr_length, var_string.value(), var_from.value()));
3441 Goto(&end);
3442
3443 Bind(©_routine);
3444 }
3445
3446 // The subject string can only be external or sequential string of either
3447 // encoding at this point.
3448 STATIC_ASSERT(kExternalStringTag != 0);
3449 STATIC_ASSERT(kSeqStringTag == 0);
3450 GotoIfNot(Word32Equal(Word32And(var_instance_type.value(),
3451 Int32Constant(kExternalStringTag)),
3452 Int32Constant(0)),
3453 &external_string);
3454
3455 var_result.Bind(AllocAndCopyStringCharacters(
3456 this, context, var_string.value(), var_instance_type.value(),
3457 var_from.value(), substr_length));
3458
3459 Counters* counters = isolate()->counters();
3460 IncrementCounter(counters->sub_string_native(), 1);
3461
3462 Goto(&end);
3463 }
3464
3465 // Handle external string.
3466 Bind(&external_string);
3467 {
3468 Node* const fake_sequential_string = TryDerefExternalString(
3469 var_string.value(), var_instance_type.value(), &runtime);
3470
3471 var_result.Bind(AllocAndCopyStringCharacters(
3472 this, context, fake_sequential_string, var_instance_type.value(),
3473 var_from.value(), substr_length));
3474
3475 Counters* counters = isolate()->counters();
3476 IncrementCounter(counters->sub_string_native(), 1);
3477
3478 Goto(&end);
3479 }
3480
3481 // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
3482 Bind(&single_char);
3483 {
3484 Node* char_code = StringCharCodeAt(var_string.value(), var_from.value());
3485 var_result.Bind(StringFromCharCode(char_code));
3486 Goto(&end);
3487 }
3488
3489 Bind(&original_string_or_invalid_length);
3490 {
3491 // Longer than original string's length or negative: unsafe arguments.
3492 GotoIf(SmiAbove(substr_length, string_length), &runtime);
3493
3494 // Equal length - check if {from, to} == {0, str.length}.
3495 GotoIf(SmiAbove(from, SmiConstant(Smi::kZero)), &runtime);
3496
3497 // Return the original string (substr_length == string_length).
3498
3499 Counters* counters = isolate()->counters();
3500 IncrementCounter(counters->sub_string_native(), 1);
3501
3502 var_result.Bind(string);
3503 Goto(&end);
3504 }
3505
3506 // Fall back to a runtime call.
3507 Bind(&runtime);
3508 {
3509 var_result.Bind(
3510 CallRuntime(Runtime::kSubString, context, string, from, to));
3511 Goto(&end);
3512 }
3513
3514 Bind(&end);
3515 return var_result.value();
3516 }
3517
3518 namespace {
3519
IsExternalStringInstanceType(CodeStubAssembler * a,Node * const instance_type)3520 Node* IsExternalStringInstanceType(CodeStubAssembler* a,
3521 Node* const instance_type) {
3522 CSA_ASSERT(a, a->IsStringInstanceType(instance_type));
3523 return a->Word32Equal(
3524 a->Word32And(instance_type, a->Int32Constant(kStringRepresentationMask)),
3525 a->Int32Constant(kExternalStringTag));
3526 }
3527
IsShortExternalStringInstanceType(CodeStubAssembler * a,Node * const instance_type)3528 Node* IsShortExternalStringInstanceType(CodeStubAssembler* a,
3529 Node* const instance_type) {
3530 CSA_ASSERT(a, a->IsStringInstanceType(instance_type));
3531 STATIC_ASSERT(kShortExternalStringTag != 0);
3532 return a->Word32NotEqual(
3533 a->Word32And(instance_type, a->Int32Constant(kShortExternalStringMask)),
3534 a->Int32Constant(0));
3535 }
3536
3537 } // namespace
3538
TryDerefExternalString(Node * const string,Node * const instance_type,Label * if_bailout)3539 Node* CodeStubAssembler::TryDerefExternalString(Node* const string,
3540 Node* const instance_type,
3541 Label* if_bailout) {
3542 Label out(this);
3543
3544 USE(IsExternalStringInstanceType);
3545 CSA_ASSERT(this, IsExternalStringInstanceType(this, instance_type));
3546 GotoIf(IsShortExternalStringInstanceType(this, instance_type), if_bailout);
3547
3548 // Move the pointer so that offset-wise, it looks like a sequential string.
3549 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3550
3551 Node* resource_data = LoadObjectField(
3552 string, ExternalString::kResourceDataOffset, MachineType::Pointer());
3553 Node* const fake_sequential_string =
3554 IntPtrSub(resource_data,
3555 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3556
3557 return fake_sequential_string;
3558 }
3559
MaybeDerefIndirectString(Variable * var_string,Node * instance_type,Variable * var_did_something)3560 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
3561 Node* instance_type,
3562 Variable* var_did_something) {
3563 Label deref(this), done(this, var_did_something);
3564 Node* representation =
3565 Word32And(instance_type, Int32Constant(kStringRepresentationMask));
3566 GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), &deref);
3567 GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)), &done);
3568 // Cons string.
3569 Node* rhs = LoadObjectField(var_string->value(), ConsString::kSecondOffset);
3570 GotoIf(WordEqual(rhs, EmptyStringConstant()), &deref);
3571 Goto(&done);
3572
3573 Bind(&deref);
3574 STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
3575 var_string->Bind(
3576 LoadObjectField(var_string->value(), ThinString::kActualOffset));
3577 var_did_something->Bind(IntPtrConstant(1));
3578 Goto(&done);
3579
3580 Bind(&done);
3581 }
3582
MaybeDerefIndirectStrings(Variable * var_left,Node * left_instance_type,Variable * var_right,Node * right_instance_type,Label * did_something)3583 void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
3584 Node* left_instance_type,
3585 Variable* var_right,
3586 Node* right_instance_type,
3587 Label* did_something) {
3588 Variable var_did_something(this, MachineType::PointerRepresentation(),
3589 IntPtrConstant(0));
3590 MaybeDerefIndirectString(var_left, left_instance_type, &var_did_something);
3591 MaybeDerefIndirectString(var_right, right_instance_type, &var_did_something);
3592
3593 GotoIf(WordNotEqual(var_did_something.value(), IntPtrConstant(0)),
3594 did_something);
3595 // Fall through if neither string was an indirect string.
3596 }
3597
StringAdd(Node * context,Node * left,Node * right,AllocationFlags flags)3598 Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
3599 AllocationFlags flags) {
3600 Label check_right(this);
3601 Label runtime(this, Label::kDeferred);
3602 Label cons(this);
3603 Variable result(this, MachineRepresentation::kTagged);
3604 Label done(this, &result);
3605 Label done_native(this, &result);
3606 Counters* counters = isolate()->counters();
3607
3608 Node* left_length = LoadStringLength(left);
3609 GotoIf(WordNotEqual(IntPtrConstant(0), left_length), &check_right);
3610 result.Bind(right);
3611 Goto(&done_native);
3612
3613 Bind(&check_right);
3614 Node* right_length = LoadStringLength(right);
3615 GotoIf(WordNotEqual(IntPtrConstant(0), right_length), &cons);
3616 result.Bind(left);
3617 Goto(&done_native);
3618
3619 Bind(&cons);
3620 {
3621 CSA_ASSERT(this, TaggedIsSmi(left_length));
3622 CSA_ASSERT(this, TaggedIsSmi(right_length));
3623 Node* new_length = SmiAdd(left_length, right_length);
3624 GotoIf(SmiAboveOrEqual(new_length, SmiConstant(String::kMaxLength)),
3625 &runtime);
3626
3627 Variable var_left(this, MachineRepresentation::kTagged, left);
3628 Variable var_right(this, MachineRepresentation::kTagged, right);
3629 Variable* input_vars[2] = {&var_left, &var_right};
3630 Label non_cons(this, 2, input_vars);
3631 Label slow(this, Label::kDeferred);
3632 GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
3633 &non_cons);
3634
3635 result.Bind(NewConsString(context, new_length, var_left.value(),
3636 var_right.value(), flags));
3637 Goto(&done_native);
3638
3639 Bind(&non_cons);
3640
3641 Comment("Full string concatenate");
3642 Node* left_instance_type = LoadInstanceType(var_left.value());
3643 Node* right_instance_type = LoadInstanceType(var_right.value());
3644 // Compute intersection and difference of instance types.
3645
3646 Node* ored_instance_types =
3647 Word32Or(left_instance_type, right_instance_type);
3648 Node* xored_instance_types =
3649 Word32Xor(left_instance_type, right_instance_type);
3650
3651 // Check if both strings have the same encoding and both are sequential.
3652 GotoIf(Word32NotEqual(Word32And(xored_instance_types,
3653 Int32Constant(kStringEncodingMask)),
3654 Int32Constant(0)),
3655 &runtime);
3656 GotoIf(Word32NotEqual(Word32And(ored_instance_types,
3657 Int32Constant(kStringRepresentationMask)),
3658 Int32Constant(0)),
3659 &slow);
3660
3661 Label two_byte(this);
3662 GotoIf(Word32Equal(Word32And(ored_instance_types,
3663 Int32Constant(kStringEncodingMask)),
3664 Int32Constant(kTwoByteStringTag)),
3665 &two_byte);
3666 // One-byte sequential string case
3667 Node* new_string =
3668 AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
3669 CopyStringCharacters(var_left.value(), new_string, SmiConstant(Smi::kZero),
3670 SmiConstant(Smi::kZero), left_length,
3671 String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
3672 SMI_PARAMETERS);
3673 CopyStringCharacters(var_right.value(), new_string, SmiConstant(Smi::kZero),
3674 left_length, right_length, String::ONE_BYTE_ENCODING,
3675 String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
3676 result.Bind(new_string);
3677 Goto(&done_native);
3678
3679 Bind(&two_byte);
3680 {
3681 // Two-byte sequential string case
3682 new_string =
3683 AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
3684 CopyStringCharacters(var_left.value(), new_string,
3685 SmiConstant(Smi::kZero), SmiConstant(Smi::kZero),
3686 left_length, String::TWO_BYTE_ENCODING,
3687 String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
3688 CopyStringCharacters(var_right.value(), new_string,
3689 SmiConstant(Smi::kZero), left_length, right_length,
3690 String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
3691 SMI_PARAMETERS);
3692 result.Bind(new_string);
3693 Goto(&done_native);
3694 }
3695
3696 Bind(&slow);
3697 {
3698 // Try to unwrap indirect strings, restart the above attempt on success.
3699 MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
3700 right_instance_type, &non_cons);
3701 Goto(&runtime);
3702 }
3703 }
3704 Bind(&runtime);
3705 {
3706 result.Bind(CallRuntime(Runtime::kStringAdd, context, left, right));
3707 Goto(&done);
3708 }
3709
3710 Bind(&done_native);
3711 {
3712 IncrementCounter(counters->string_add_native(), 1);
3713 Goto(&done);
3714 }
3715
3716 Bind(&done);
3717 return result.value();
3718 }
3719
StringFromCodePoint(Node * codepoint,UnicodeEncoding encoding)3720 Node* CodeStubAssembler::StringFromCodePoint(Node* codepoint,
3721 UnicodeEncoding encoding) {
3722 Variable var_result(this, MachineRepresentation::kTagged,
3723 EmptyStringConstant());
3724
3725 Label if_isword16(this), if_isword32(this), return_result(this);
3726
3727 Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
3728 &if_isword32);
3729
3730 Bind(&if_isword16);
3731 {
3732 var_result.Bind(StringFromCharCode(codepoint));
3733 Goto(&return_result);
3734 }
3735
3736 Bind(&if_isword32);
3737 {
3738 switch (encoding) {
3739 case UnicodeEncoding::UTF16:
3740 break;
3741 case UnicodeEncoding::UTF32: {
3742 // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
3743 Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
3744
3745 // lead = (codepoint >> 10) + LEAD_OFFSET
3746 Node* lead =
3747 Int32Add(WordShr(codepoint, Int32Constant(10)), lead_offset);
3748
3749 // trail = (codepoint & 0x3FF) + 0xDC00;
3750 Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
3751 Int32Constant(0xDC00));
3752
3753 // codpoint = (trail << 16) | lead;
3754 codepoint = Word32Or(WordShl(trail, Int32Constant(16)), lead);
3755 break;
3756 }
3757 }
3758
3759 Node* value = AllocateSeqTwoByteString(2);
3760 StoreNoWriteBarrier(
3761 MachineRepresentation::kWord32, value,
3762 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3763 codepoint);
3764 var_result.Bind(value);
3765 Goto(&return_result);
3766 }
3767
3768 Bind(&return_result);
3769 return var_result.value();
3770 }
3771
StringToNumber(Node * context,Node * input)3772 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
3773 Label runtime(this, Label::kDeferred);
3774 Label end(this);
3775
3776 Variable var_result(this, MachineRepresentation::kTagged);
3777
3778 // Check if string has a cached array index.
3779 Node* hash = LoadNameHashField(input);
3780 Node* bit =
3781 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
3782 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
3783
3784 var_result.Bind(
3785 SmiTag(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
3786 Goto(&end);
3787
3788 Bind(&runtime);
3789 {
3790 var_result.Bind(CallRuntime(Runtime::kStringToNumber, context, input));
3791 Goto(&end);
3792 }
3793
3794 Bind(&end);
3795 return var_result.value();
3796 }
3797
NumberToString(Node * context,Node * argument)3798 Node* CodeStubAssembler::NumberToString(Node* context, Node* argument) {
3799 Variable result(this, MachineRepresentation::kTagged);
3800 Label runtime(this, Label::kDeferred);
3801 Label smi(this);
3802 Label done(this, &result);
3803
3804 // Load the number string cache.
3805 Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
3806
3807 // Make the hash mask from the length of the number string cache. It
3808 // contains two elements (number and string) for each cache entry.
3809 // TODO(ishell): cleanup mask handling.
3810 Node* mask =
3811 BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
3812 Node* one = IntPtrConstant(1);
3813 mask = IntPtrSub(mask, one);
3814
3815 GotoIf(TaggedIsSmi(argument), &smi);
3816
3817 // Argument isn't smi, check to see if it's a heap-number.
3818 Node* map = LoadMap(argument);
3819 GotoIfNot(IsHeapNumberMap(map), &runtime);
3820
3821 // Make a hash from the two 32-bit values of the double.
3822 Node* low =
3823 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
3824 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
3825 MachineType::Int32());
3826 Node* hash = Word32Xor(low, high);
3827 hash = ChangeInt32ToIntPtr(hash);
3828 hash = WordShl(hash, one);
3829 Node* index = WordAnd(hash, SmiUntag(BitcastWordToTagged(mask)));
3830
3831 // Cache entry's key must be a heap number
3832 Node* number_key = LoadFixedArrayElement(number_string_cache, index);
3833 GotoIf(TaggedIsSmi(number_key), &runtime);
3834 map = LoadMap(number_key);
3835 GotoIfNot(IsHeapNumberMap(map), &runtime);
3836
3837 // Cache entry's key must match the heap number value we're looking for.
3838 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
3839 MachineType::Int32());
3840 Node* high_compare = LoadObjectField(
3841 number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
3842 GotoIfNot(Word32Equal(low, low_compare), &runtime);
3843 GotoIfNot(Word32Equal(high, high_compare), &runtime);
3844
3845 // Heap number match, return value from cache entry.
3846 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3847 result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize));
3848 Goto(&done);
3849
3850 Bind(&runtime);
3851 {
3852 // No cache entry, go to the runtime.
3853 result.Bind(CallRuntime(Runtime::kNumberToString, context, argument));
3854 }
3855 Goto(&done);
3856
3857 Bind(&smi);
3858 {
3859 // Load the smi key, make sure it matches the smi we're looking for.
3860 Node* smi_index = BitcastWordToTagged(
3861 WordAnd(WordShl(BitcastTaggedToWord(argument), one), mask));
3862 Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
3863 SMI_PARAMETERS);
3864 GotoIf(WordNotEqual(smi_key, argument), &runtime);
3865
3866 // Smi match, return value from cache entry.
3867 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3868 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
3869 kPointerSize, SMI_PARAMETERS));
3870 Goto(&done);
3871 }
3872
3873 Bind(&done);
3874 return result.value();
3875 }
3876
ToName(Node * context,Node * value)3877 Node* CodeStubAssembler::ToName(Node* context, Node* value) {
3878 Label end(this);
3879 Variable var_result(this, MachineRepresentation::kTagged);
3880
3881 Label is_number(this);
3882 GotoIf(TaggedIsSmi(value), &is_number);
3883
3884 Label not_name(this);
3885 Node* value_instance_type = LoadInstanceType(value);
3886 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3887 GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
3888 ¬_name);
3889
3890 var_result.Bind(value);
3891 Goto(&end);
3892
3893 Bind(&is_number);
3894 {
3895 Callable callable = CodeFactory::NumberToString(isolate());
3896 var_result.Bind(CallStub(callable, context, value));
3897 Goto(&end);
3898 }
3899
3900 Bind(¬_name);
3901 {
3902 GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
3903 &is_number);
3904
3905 Label not_oddball(this);
3906 GotoIf(Word32NotEqual(value_instance_type, Int32Constant(ODDBALL_TYPE)),
3907 ¬_oddball);
3908
3909 var_result.Bind(LoadObjectField(value, Oddball::kToStringOffset));
3910 Goto(&end);
3911
3912 Bind(¬_oddball);
3913 {
3914 var_result.Bind(CallRuntime(Runtime::kToName, context, value));
3915 Goto(&end);
3916 }
3917 }
3918
3919 Bind(&end);
3920 return var_result.value();
3921 }
3922
NonNumberToNumber(Node * context,Node * input)3923 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
3924 // Assert input is a HeapObject (not smi or heap number)
3925 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
3926 CSA_ASSERT(this, Word32BinaryNot(IsHeapNumberMap(LoadMap(input))));
3927
3928 // We might need to loop once here due to ToPrimitive conversions.
3929 Variable var_input(this, MachineRepresentation::kTagged, input);
3930 Variable var_result(this, MachineRepresentation::kTagged);
3931 Label loop(this, &var_input);
3932 Label end(this);
3933 Goto(&loop);
3934 Bind(&loop);
3935 {
3936 // Load the current {input} value (known to be a HeapObject).
3937 Node* input = var_input.value();
3938
3939 // Dispatch on the {input} instance type.
3940 Node* input_instance_type = LoadInstanceType(input);
3941 Label if_inputisstring(this), if_inputisoddball(this),
3942 if_inputisreceiver(this, Label::kDeferred),
3943 if_inputisother(this, Label::kDeferred);
3944 GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
3945 GotoIf(Word32Equal(input_instance_type, Int32Constant(ODDBALL_TYPE)),
3946 &if_inputisoddball);
3947 Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
3948 &if_inputisother);
3949
3950 Bind(&if_inputisstring);
3951 {
3952 // The {input} is a String, use the fast stub to convert it to a Number.
3953 var_result.Bind(StringToNumber(context, input));
3954 Goto(&end);
3955 }
3956
3957 Bind(&if_inputisoddball);
3958 {
3959 // The {input} is an Oddball, we just need to load the Number value of it.
3960 var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
3961 Goto(&end);
3962 }
3963
3964 Bind(&if_inputisreceiver);
3965 {
3966 // The {input} is a JSReceiver, we need to convert it to a Primitive first
3967 // using the ToPrimitive type conversion, preferably yielding a Number.
3968 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
3969 isolate(), ToPrimitiveHint::kNumber);
3970 Node* result = CallStub(callable, context, input);
3971
3972 // Check if the {result} is already a Number.
3973 Label if_resultisnumber(this), if_resultisnotnumber(this);
3974 GotoIf(TaggedIsSmi(result), &if_resultisnumber);
3975 Node* result_map = LoadMap(result);
3976 Branch(IsHeapNumberMap(result_map), &if_resultisnumber,
3977 &if_resultisnotnumber);
3978
3979 Bind(&if_resultisnumber);
3980 {
3981 // The ToPrimitive conversion already gave us a Number, so we're done.
3982 var_result.Bind(result);
3983 Goto(&end);
3984 }
3985
3986 Bind(&if_resultisnotnumber);
3987 {
3988 // We now have a Primitive {result}, but it's not yet a Number.
3989 var_input.Bind(result);
3990 Goto(&loop);
3991 }
3992 }
3993
3994 Bind(&if_inputisother);
3995 {
3996 // The {input} is something else (e.g. Symbol), let the runtime figure
3997 // out the correct exception.
3998 // Note: We cannot tail call to the runtime here, as js-to-wasm
3999 // trampolines also use this code currently, and they declare all
4000 // outgoing parameters as untagged, while we would push a tagged
4001 // object here.
4002 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
4003 Goto(&end);
4004 }
4005 }
4006
4007 Bind(&end);
4008 return var_result.value();
4009 }
4010
ToNumber(Node * context,Node * input)4011 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
4012 Variable var_result(this, MachineRepresentation::kTagged);
4013 Label end(this);
4014
4015 Label not_smi(this, Label::kDeferred);
4016 GotoIfNot(TaggedIsSmi(input), ¬_smi);
4017 var_result.Bind(input);
4018 Goto(&end);
4019
4020 Bind(¬_smi);
4021 {
4022 Label not_heap_number(this, Label::kDeferred);
4023 Node* input_map = LoadMap(input);
4024 GotoIfNot(IsHeapNumberMap(input_map), ¬_heap_number);
4025
4026 var_result.Bind(input);
4027 Goto(&end);
4028
4029 Bind(¬_heap_number);
4030 {
4031 var_result.Bind(NonNumberToNumber(context, input));
4032 Goto(&end);
4033 }
4034 }
4035
4036 Bind(&end);
4037 return var_result.value();
4038 }
4039
ToUint32(Node * context,Node * input)4040 Node* CodeStubAssembler::ToUint32(Node* context, Node* input) {
4041 Node* const float_zero = Float64Constant(0.0);
4042 Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
4043
4044 Label out(this);
4045
4046 Variable var_result(this, MachineRepresentation::kTagged, input);
4047
4048 // Early exit for positive smis.
4049 {
4050 // TODO(jgruber): This branch and the recheck below can be removed once we
4051 // have a ToNumber with multiple exits.
4052 Label next(this, Label::kDeferred);
4053 Branch(TaggedIsPositiveSmi(input), &out, &next);
4054 Bind(&next);
4055 }
4056
4057 Node* const number = ToNumber(context, input);
4058 var_result.Bind(number);
4059
4060 // Perhaps we have a positive smi now.
4061 {
4062 Label next(this, Label::kDeferred);
4063 Branch(TaggedIsPositiveSmi(number), &out, &next);
4064 Bind(&next);
4065 }
4066
4067 Label if_isnegativesmi(this), if_isheapnumber(this);
4068 Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
4069
4070 Bind(&if_isnegativesmi);
4071 {
4072 // floor({input}) mod 2^32 === {input} + 2^32.
4073 Node* const float_number = SmiToFloat64(number);
4074 Node* const float_result = Float64Add(float_number, float_two_32);
4075 Node* const result = ChangeFloat64ToTagged(float_result);
4076 var_result.Bind(result);
4077 Goto(&out);
4078 }
4079
4080 Bind(&if_isheapnumber);
4081 {
4082 Label return_zero(this);
4083 Node* const value = LoadHeapNumberValue(number);
4084
4085 {
4086 // +-0.
4087 Label next(this);
4088 Branch(Float64Equal(value, float_zero), &return_zero, &next);
4089 Bind(&next);
4090 }
4091
4092 {
4093 // NaN.
4094 Label next(this);
4095 Branch(Float64Equal(value, value), &next, &return_zero);
4096 Bind(&next);
4097 }
4098
4099 {
4100 // +Infinity.
4101 Label next(this);
4102 Node* const positive_infinity =
4103 Float64Constant(std::numeric_limits<double>::infinity());
4104 Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
4105 Bind(&next);
4106 }
4107
4108 {
4109 // -Infinity.
4110 Label next(this);
4111 Node* const negative_infinity =
4112 Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
4113 Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
4114 Bind(&next);
4115 }
4116
4117 // Return floor({input}) mod 2^32 (assuming mod semantics that always return
4118 // positive results).
4119 {
4120 Node* x = Float64Floor(value);
4121 x = Float64Mod(x, float_two_32);
4122 x = Float64Add(x, float_two_32);
4123 x = Float64Mod(x, float_two_32);
4124
4125 Node* const result = ChangeFloat64ToTagged(x);
4126 var_result.Bind(result);
4127 Goto(&out);
4128 }
4129
4130 Bind(&return_zero);
4131 {
4132 var_result.Bind(SmiConstant(Smi::kZero));
4133 Goto(&out);
4134 }
4135 }
4136
4137 Bind(&out);
4138 return var_result.value();
4139 }
4140
ToString(Node * context,Node * input)4141 Node* CodeStubAssembler::ToString(Node* context, Node* input) {
4142 Label is_number(this);
4143 Label runtime(this, Label::kDeferred);
4144 Variable result(this, MachineRepresentation::kTagged);
4145 Label done(this, &result);
4146
4147 GotoIf(TaggedIsSmi(input), &is_number);
4148
4149 Node* input_map = LoadMap(input);
4150 Node* input_instance_type = LoadMapInstanceType(input_map);
4151
4152 result.Bind(input);
4153 GotoIf(IsStringInstanceType(input_instance_type), &done);
4154
4155 Label not_heap_number(this);
4156 Branch(IsHeapNumberMap(input_map), &is_number, ¬_heap_number);
4157
4158 Bind(&is_number);
4159 result.Bind(NumberToString(context, input));
4160 Goto(&done);
4161
4162 Bind(¬_heap_number);
4163 {
4164 GotoIf(Word32NotEqual(input_instance_type, Int32Constant(ODDBALL_TYPE)),
4165 &runtime);
4166 result.Bind(LoadObjectField(input, Oddball::kToStringOffset));
4167 Goto(&done);
4168 }
4169
4170 Bind(&runtime);
4171 {
4172 result.Bind(CallRuntime(Runtime::kToString, context, input));
4173 Goto(&done);
4174 }
4175
4176 Bind(&done);
4177 return result.value();
4178 }
4179
JSReceiverToPrimitive(Node * context,Node * input)4180 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
4181 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
4182 Variable result(this, MachineRepresentation::kTagged);
4183 Label done(this, &result);
4184
4185 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
4186
4187 Bind(&if_isreceiver);
4188 {
4189 // Convert {input} to a primitive first passing Number hint.
4190 Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
4191 result.Bind(CallStub(callable, context, input));
4192 Goto(&done);
4193 }
4194
4195 Bind(&if_isnotreceiver);
4196 {
4197 result.Bind(input);
4198 Goto(&done);
4199 }
4200
4201 Bind(&done);
4202 return result.value();
4203 }
4204
ToInteger(Node * context,Node * input,ToIntegerTruncationMode mode)4205 Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
4206 ToIntegerTruncationMode mode) {
4207 // We might need to loop once for ToNumber conversion.
4208 Variable var_arg(this, MachineRepresentation::kTagged, input);
4209 Label loop(this, &var_arg), out(this);
4210 Goto(&loop);
4211 Bind(&loop);
4212 {
4213 // Shared entry points.
4214 Label return_zero(this, Label::kDeferred);
4215
4216 // Load the current {arg} value.
4217 Node* arg = var_arg.value();
4218
4219 // Check if {arg} is a Smi.
4220 GotoIf(TaggedIsSmi(arg), &out);
4221
4222 // Check if {arg} is a HeapNumber.
4223 Label if_argisheapnumber(this),
4224 if_argisnotheapnumber(this, Label::kDeferred);
4225 Branch(IsHeapNumberMap(LoadMap(arg)), &if_argisheapnumber,
4226 &if_argisnotheapnumber);
4227
4228 Bind(&if_argisheapnumber);
4229 {
4230 // Load the floating-point value of {arg}.
4231 Node* arg_value = LoadHeapNumberValue(arg);
4232
4233 // Check if {arg} is NaN.
4234 GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
4235
4236 // Truncate {arg} towards zero.
4237 Node* value = Float64Trunc(arg_value);
4238
4239 if (mode == kTruncateMinusZero) {
4240 // Truncate -0.0 to 0.
4241 GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
4242 }
4243
4244 var_arg.Bind(ChangeFloat64ToTagged(value));
4245 Goto(&out);
4246 }
4247
4248 Bind(&if_argisnotheapnumber);
4249 {
4250 // Need to convert {arg} to a Number first.
4251 Callable callable = CodeFactory::NonNumberToNumber(isolate());
4252 var_arg.Bind(CallStub(callable, context, arg));
4253 Goto(&loop);
4254 }
4255
4256 Bind(&return_zero);
4257 var_arg.Bind(SmiConstant(Smi::kZero));
4258 Goto(&out);
4259 }
4260
4261 Bind(&out);
4262 return var_arg.value();
4263 }
4264
DecodeWord32(Node * word32,uint32_t shift,uint32_t mask)4265 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift,
4266 uint32_t mask) {
4267 return Word32Shr(Word32And(word32, Int32Constant(mask)),
4268 static_cast<int>(shift));
4269 }
4270
DecodeWord(Node * word,uint32_t shift,uint32_t mask)4271 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
4272 return WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift));
4273 }
4274
SetCounter(StatsCounter * counter,int value)4275 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
4276 if (FLAG_native_code_counters && counter->Enabled()) {
4277 Node* counter_address = ExternalConstant(ExternalReference(counter));
4278 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
4279 Int32Constant(value));
4280 }
4281 }
4282
IncrementCounter(StatsCounter * counter,int delta)4283 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
4284 DCHECK(delta > 0);
4285 if (FLAG_native_code_counters && counter->Enabled()) {
4286 Node* counter_address = ExternalConstant(ExternalReference(counter));
4287 Node* value = Load(MachineType::Int32(), counter_address);
4288 value = Int32Add(value, Int32Constant(delta));
4289 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4290 }
4291 }
4292
DecrementCounter(StatsCounter * counter,int delta)4293 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
4294 DCHECK(delta > 0);
4295 if (FLAG_native_code_counters && counter->Enabled()) {
4296 Node* counter_address = ExternalConstant(ExternalReference(counter));
4297 Node* value = Load(MachineType::Int32(), counter_address);
4298 value = Int32Sub(value, Int32Constant(delta));
4299 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4300 }
4301 }
4302
Increment(Variable & variable,int value,ParameterMode mode)4303 void CodeStubAssembler::Increment(Variable& variable, int value,
4304 ParameterMode mode) {
4305 DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
4306 variable.rep() == MachineType::PointerRepresentation());
4307 DCHECK_IMPLIES(mode == SMI_PARAMETERS,
4308 variable.rep() == MachineRepresentation::kTagged ||
4309 variable.rep() == MachineRepresentation::kTaggedSigned);
4310 variable.Bind(
4311 IntPtrOrSmiAdd(variable.value(), IntPtrOrSmiConstant(value, mode), mode));
4312 }
4313
Use(Label * label)4314 void CodeStubAssembler::Use(Label* label) {
4315 GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
4316 }
4317
TryToName(Node * key,Label * if_keyisindex,Variable * var_index,Label * if_keyisunique,Variable * var_unique,Label * if_bailout)4318 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
4319 Variable* var_index, Label* if_keyisunique,
4320 Variable* var_unique, Label* if_bailout) {
4321 DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
4322 DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
4323 Comment("TryToName");
4324
4325 Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this);
4326 // Handle Smi and HeapNumber keys.
4327 var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
4328 Goto(if_keyisindex);
4329
4330 Bind(&if_keyisnotindex);
4331 Node* key_map = LoadMap(key);
4332 var_unique->Bind(key);
4333 // Symbols are unique.
4334 GotoIf(IsSymbolMap(key_map), if_keyisunique);
4335 Node* key_instance_type = LoadMapInstanceType(key_map);
4336 // Miss if |key| is not a String.
4337 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
4338 GotoIfNot(IsStringInstanceType(key_instance_type), if_bailout);
4339 // |key| is a String. Check if it has a cached array index.
4340 Node* hash = LoadNameHashField(key);
4341 Node* contains_index =
4342 Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
4343 GotoIf(Word32Equal(contains_index, Int32Constant(0)), &if_hascachedindex);
4344 // No cached array index. If the string knows that it contains an index,
4345 // then it must be an uncacheable index. Handle this case in the runtime.
4346 Node* not_an_index =
4347 Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
4348 GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout);
4349 // Check if we have a ThinString.
4350 GotoIf(Word32Equal(key_instance_type, Int32Constant(THIN_STRING_TYPE)),
4351 &if_thinstring);
4352 GotoIf(
4353 Word32Equal(key_instance_type, Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
4354 &if_thinstring);
4355 // Finally, check if |key| is internalized.
4356 STATIC_ASSERT(kNotInternalizedTag != 0);
4357 Node* not_internalized =
4358 Word32And(key_instance_type, Int32Constant(kIsNotInternalizedMask));
4359 GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout);
4360 Goto(if_keyisunique);
4361
4362 Bind(&if_thinstring);
4363 var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
4364 Goto(if_keyisunique);
4365
4366 Bind(&if_hascachedindex);
4367 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
4368 Goto(if_keyisindex);
4369 }
4370
4371 template <typename Dictionary>
EntryToIndex(Node * entry,int field_index)4372 Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
4373 Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
4374 return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
4375 field_index));
4376 }
4377
4378 template Node* CodeStubAssembler::EntryToIndex<NameDictionary>(Node*, int);
4379 template Node* CodeStubAssembler::EntryToIndex<GlobalDictionary>(Node*, int);
4380 template Node* CodeStubAssembler::EntryToIndex<SeededNumberDictionary>(Node*,
4381 int);
4382
HashTableComputeCapacity(Node * at_least_space_for)4383 Node* CodeStubAssembler::HashTableComputeCapacity(Node* at_least_space_for) {
4384 Node* capacity = IntPtrRoundUpToPowerOfTwo32(
4385 WordShl(at_least_space_for, IntPtrConstant(1)));
4386 return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
4387 }
4388
IntPtrMax(Node * left,Node * right)4389 Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) {
4390 return SelectConstant(IntPtrGreaterThanOrEqual(left, right), left, right,
4391 MachineType::PointerRepresentation());
4392 }
4393
IntPtrMin(Node * left,Node * right)4394 Node* CodeStubAssembler::IntPtrMin(Node* left, Node* right) {
4395 return SelectConstant(IntPtrLessThanOrEqual(left, right), left, right,
4396 MachineType::PointerRepresentation());
4397 }
4398
4399 template <class Dictionary>
GetNumberOfElements(Node * dictionary)4400 Node* CodeStubAssembler::GetNumberOfElements(Node* dictionary) {
4401 return LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex);
4402 }
4403
4404 template <class Dictionary>
SetNumberOfElements(Node * dictionary,Node * num_elements_smi)4405 void CodeStubAssembler::SetNumberOfElements(Node* dictionary,
4406 Node* num_elements_smi) {
4407 StoreFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex,
4408 num_elements_smi, SKIP_WRITE_BARRIER);
4409 }
4410
4411 template <class Dictionary>
GetNumberOfDeletedElements(Node * dictionary)4412 Node* CodeStubAssembler::GetNumberOfDeletedElements(Node* dictionary) {
4413 return LoadFixedArrayElement(dictionary,
4414 Dictionary::kNumberOfDeletedElementsIndex);
4415 }
4416
4417 template <class Dictionary>
GetCapacity(Node * dictionary)4418 Node* CodeStubAssembler::GetCapacity(Node* dictionary) {
4419 return LoadFixedArrayElement(dictionary, Dictionary::kCapacityIndex);
4420 }
4421
4422 template <class Dictionary>
GetNextEnumerationIndex(Node * dictionary)4423 Node* CodeStubAssembler::GetNextEnumerationIndex(Node* dictionary) {
4424 return LoadFixedArrayElement(dictionary,
4425 Dictionary::kNextEnumerationIndexIndex);
4426 }
4427
4428 template <class Dictionary>
SetNextEnumerationIndex(Node * dictionary,Node * next_enum_index_smi)4429 void CodeStubAssembler::SetNextEnumerationIndex(Node* dictionary,
4430 Node* next_enum_index_smi) {
4431 StoreFixedArrayElement(dictionary, Dictionary::kNextEnumerationIndexIndex,
4432 next_enum_index_smi, SKIP_WRITE_BARRIER);
4433 }
4434
4435 template <typename Dictionary>
NameDictionaryLookup(Node * dictionary,Node * unique_name,Label * if_found,Variable * var_name_index,Label * if_not_found,int inlined_probes,LookupMode mode)4436 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
4437 Node* unique_name, Label* if_found,
4438 Variable* var_name_index,
4439 Label* if_not_found,
4440 int inlined_probes,
4441 LookupMode mode) {
4442 CSA_ASSERT(this, IsDictionary(dictionary));
4443 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
4444 DCHECK_IMPLIES(mode == kFindInsertionIndex,
4445 inlined_probes == 0 && if_found == nullptr);
4446 Comment("NameDictionaryLookup");
4447
4448 Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
4449 Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4450 Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
4451
4452 // See Dictionary::FirstProbe().
4453 Node* count = IntPtrConstant(0);
4454 Node* entry = WordAnd(hash, mask);
4455
4456 for (int i = 0; i < inlined_probes; i++) {
4457 Node* index = EntryToIndex<Dictionary>(entry);
4458 var_name_index->Bind(index);
4459
4460 Node* current = LoadFixedArrayElement(dictionary, index);
4461 GotoIf(WordEqual(current, unique_name), if_found);
4462
4463 // See Dictionary::NextProbe().
4464 count = IntPtrConstant(i + 1);
4465 entry = WordAnd(IntPtrAdd(entry, count), mask);
4466 }
4467 if (mode == kFindInsertionIndex) {
4468 // Appease the variable merging algorithm for "Goto(&loop)" below.
4469 var_name_index->Bind(IntPtrConstant(0));
4470 }
4471
4472 Node* undefined = UndefinedConstant();
4473 Node* the_hole = mode == kFindExisting ? nullptr : TheHoleConstant();
4474
4475 Variable var_count(this, MachineType::PointerRepresentation(), count);
4476 Variable var_entry(this, MachineType::PointerRepresentation(), entry);
4477 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
4478 Label loop(this, 3, loop_vars);
4479 Goto(&loop);
4480 Bind(&loop);
4481 {
4482 Node* entry = var_entry.value();
4483
4484 Node* index = EntryToIndex<Dictionary>(entry);
4485 var_name_index->Bind(index);
4486
4487 Node* current = LoadFixedArrayElement(dictionary, index);
4488 GotoIf(WordEqual(current, undefined), if_not_found);
4489 if (mode == kFindExisting) {
4490 GotoIf(WordEqual(current, unique_name), if_found);
4491 } else {
4492 DCHECK_EQ(kFindInsertionIndex, mode);
4493 GotoIf(WordEqual(current, the_hole), if_not_found);
4494 }
4495
4496 // See Dictionary::NextProbe().
4497 Increment(var_count);
4498 entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4499
4500 var_entry.Bind(entry);
4501 Goto(&loop);
4502 }
4503 }
4504
4505 // Instantiate template methods to workaround GCC compilation issue.
4506 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
4507 Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
4508 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
4509 Node*, Node*, Label*, Variable*, Label*, int, LookupMode);
4510
ComputeIntegerHash(Node * key,Node * seed)4511 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
4512 // See v8::internal::ComputeIntegerHash()
4513 Node* hash = TruncateWordToWord32(key);
4514 hash = Word32Xor(hash, seed);
4515 hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
4516 Word32Shl(hash, Int32Constant(15)));
4517 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
4518 hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
4519 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
4520 hash = Int32Mul(hash, Int32Constant(2057));
4521 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
4522 return Word32And(hash, Int32Constant(0x3fffffff));
4523 }
4524
4525 template <typename Dictionary>
NumberDictionaryLookup(Node * dictionary,Node * intptr_index,Label * if_found,Variable * var_entry,Label * if_not_found)4526 void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
4527 Node* intptr_index,
4528 Label* if_found,
4529 Variable* var_entry,
4530 Label* if_not_found) {
4531 CSA_ASSERT(this, IsDictionary(dictionary));
4532 DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
4533 Comment("NumberDictionaryLookup");
4534
4535 Node* capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
4536 Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4537
4538 Node* int32_seed;
4539 if (Dictionary::ShapeT::UsesSeed) {
4540 int32_seed = HashSeed();
4541 } else {
4542 int32_seed = Int32Constant(kZeroHashSeed);
4543 }
4544 Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
4545 Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
4546
4547 // See Dictionary::FirstProbe().
4548 Node* count = IntPtrConstant(0);
4549 Node* entry = WordAnd(hash, mask);
4550
4551 Node* undefined = UndefinedConstant();
4552 Node* the_hole = TheHoleConstant();
4553
4554 Variable var_count(this, MachineType::PointerRepresentation(), count);
4555 Variable* loop_vars[] = {&var_count, var_entry};
4556 Label loop(this, 2, loop_vars);
4557 var_entry->Bind(entry);
4558 Goto(&loop);
4559 Bind(&loop);
4560 {
4561 Node* entry = var_entry->value();
4562
4563 Node* index = EntryToIndex<Dictionary>(entry);
4564 Node* current = LoadFixedArrayElement(dictionary, index);
4565 GotoIf(WordEqual(current, undefined), if_not_found);
4566 Label next_probe(this);
4567 {
4568 Label if_currentissmi(this), if_currentisnotsmi(this);
4569 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
4570 Bind(&if_currentissmi);
4571 {
4572 Node* current_value = SmiUntag(current);
4573 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
4574 }
4575 Bind(&if_currentisnotsmi);
4576 {
4577 GotoIf(WordEqual(current, the_hole), &next_probe);
4578 // Current must be the Number.
4579 Node* current_value = LoadHeapNumberValue(current);
4580 Branch(Float64Equal(current_value, key_as_float64), if_found,
4581 &next_probe);
4582 }
4583 }
4584
4585 Bind(&next_probe);
4586 // See Dictionary::NextProbe().
4587 Increment(var_count);
4588 entry = WordAnd(IntPtrAdd(entry, var_count.value()), mask);
4589
4590 var_entry->Bind(entry);
4591 Goto(&loop);
4592 }
4593 }
4594
4595 template <class Dictionary>
FindInsertionEntry(Node * dictionary,Node * key,Variable * var_key_index)4596 void CodeStubAssembler::FindInsertionEntry(Node* dictionary, Node* key,
4597 Variable* var_key_index) {
4598 UNREACHABLE();
4599 }
4600
4601 template <>
FindInsertionEntry(Node * dictionary,Node * key,Variable * var_key_index)4602 void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
4603 Node* dictionary, Node* key, Variable* var_key_index) {
4604 Label done(this);
4605 NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
4606 &done, 0, kFindInsertionIndex);
4607 Bind(&done);
4608 }
4609
4610 template <class Dictionary>
InsertEntry(Node * dictionary,Node * key,Node * value,Node * index,Node * enum_index)4611 void CodeStubAssembler::InsertEntry(Node* dictionary, Node* key, Node* value,
4612 Node* index, Node* enum_index) {
4613 UNREACHABLE(); // Use specializations instead.
4614 }
4615
4616 template <>
InsertEntry(Node * dictionary,Node * name,Node * value,Node * index,Node * enum_index)4617 void CodeStubAssembler::InsertEntry<NameDictionary>(Node* dictionary,
4618 Node* name, Node* value,
4619 Node* index,
4620 Node* enum_index) {
4621 // Store name and value.
4622 StoreFixedArrayElement(dictionary, index, name);
4623 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
4624
4625 // Prepare details of the new property.
4626 const int kInitialIndex = 0;
4627 PropertyDetails d(kData, NONE, kInitialIndex, PropertyCellType::kNoCell);
4628 enum_index =
4629 SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
4630 STATIC_ASSERT(kInitialIndex == 0);
4631 Variable var_details(this, MachineRepresentation::kTaggedSigned,
4632 SmiOr(SmiConstant(d.AsSmi()), enum_index));
4633
4634 // Private names must be marked non-enumerable.
4635 Label not_private(this, &var_details);
4636 GotoIfNot(IsSymbolMap(LoadMap(name)), ¬_private);
4637 Node* flags = SmiToWord32(LoadObjectField(name, Symbol::kFlagsOffset));
4638 const int kPrivateMask = 1 << Symbol::kPrivateBit;
4639 GotoIfNot(IsSetWord32(flags, kPrivateMask), ¬_private);
4640 Node* dont_enum =
4641 SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
4642 var_details.Bind(SmiOr(var_details.value(), dont_enum));
4643 Goto(¬_private);
4644 Bind(¬_private);
4645
4646 // Finally, store the details.
4647 StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
4648 var_details.value());
4649 }
4650
4651 template <>
InsertEntry(Node * dictionary,Node * key,Node * value,Node * index,Node * enum_index)4652 void CodeStubAssembler::InsertEntry<GlobalDictionary>(Node* dictionary,
4653 Node* key, Node* value,
4654 Node* index,
4655 Node* enum_index) {
4656 UNIMPLEMENTED();
4657 }
4658
4659 template <class Dictionary>
Add(Node * dictionary,Node * key,Node * value,Label * bailout)4660 void CodeStubAssembler::Add(Node* dictionary, Node* key, Node* value,
4661 Label* bailout) {
4662 Node* capacity = GetCapacity<Dictionary>(dictionary);
4663 Node* nof = GetNumberOfElements<Dictionary>(dictionary);
4664 Node* new_nof = SmiAdd(nof, SmiConstant(1));
4665 // Require 33% to still be free after adding additional_elements.
4666 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
4667 // But that's OK here because it's only used for a comparison.
4668 Node* required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
4669 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
4670 // Require rehashing if more than 50% of free elements are deleted elements.
4671 Node* deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
4672 CSA_ASSERT(this, SmiAbove(capacity, new_nof));
4673 Node* half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
4674 GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
4675 Node* enum_index = nullptr;
4676 if (Dictionary::kIsEnumerable) {
4677 enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
4678 Node* new_enum_index = SmiAdd(enum_index, SmiConstant(1));
4679 Node* max_enum_index =
4680 SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
4681 GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
4682
4683 // No more bailouts after this point.
4684 // Operations from here on can have side effects.
4685
4686 SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
4687 } else {
4688 USE(enum_index);
4689 }
4690 SetNumberOfElements<Dictionary>(dictionary, new_nof);
4691
4692 Variable var_key_index(this, MachineType::PointerRepresentation());
4693 FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
4694 InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
4695 enum_index);
4696 }
4697
4698 template void CodeStubAssembler::Add<NameDictionary>(Node*, Node*, Node*,
4699 Label*);
4700
DescriptorLookupLinear(Node * unique_name,Node * descriptors,Node * nof,Label * if_found,Variable * var_name_index,Label * if_not_found)4701 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
4702 Node* descriptors, Node* nof,
4703 Label* if_found,
4704 Variable* var_name_index,
4705 Label* if_not_found) {
4706 Comment("DescriptorLookupLinear");
4707 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
4708 Node* factor = IntPtrConstant(DescriptorArray::kEntrySize);
4709 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
4710
4711 BuildFastLoop(last_exclusive, first_inclusive,
4712 [this, descriptors, unique_name, if_found,
4713 var_name_index](Node* name_index) {
4714 Node* candidate_name =
4715 LoadFixedArrayElement(descriptors, name_index);
4716 var_name_index->Bind(name_index);
4717 GotoIf(WordEqual(candidate_name, unique_name), if_found);
4718 },
4719 -DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
4720 IndexAdvanceMode::kPre);
4721 Goto(if_not_found);
4722 }
4723
DescriptorArrayNumberOfEntries(Node * descriptors)4724 Node* CodeStubAssembler::DescriptorArrayNumberOfEntries(Node* descriptors) {
4725 return LoadAndUntagToWord32FixedArrayElement(
4726 descriptors, IntPtrConstant(DescriptorArray::kDescriptorLengthIndex));
4727 }
4728
4729 namespace {
4730
DescriptorNumberToIndex(CodeStubAssembler * a,Node * descriptor_number)4731 Node* DescriptorNumberToIndex(CodeStubAssembler* a, Node* descriptor_number) {
4732 Node* descriptor_size = a->Int32Constant(DescriptorArray::kEntrySize);
4733 Node* index = a->Int32Mul(descriptor_number, descriptor_size);
4734 return a->ChangeInt32ToIntPtr(index);
4735 }
4736
4737 } // namespace
4738
DescriptorArrayToKeyIndex(Node * descriptor_number)4739 Node* CodeStubAssembler::DescriptorArrayToKeyIndex(Node* descriptor_number) {
4740 return IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
4741 DescriptorNumberToIndex(this, descriptor_number));
4742 }
4743
DescriptorArrayGetSortedKeyIndex(Node * descriptors,Node * descriptor_number)4744 Node* CodeStubAssembler::DescriptorArrayGetSortedKeyIndex(
4745 Node* descriptors, Node* descriptor_number) {
4746 const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
4747 Node* details = LoadAndUntagToWord32FixedArrayElement(
4748 descriptors, DescriptorNumberToIndex(this, descriptor_number),
4749 details_offset);
4750 return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
4751 }
4752
DescriptorArrayGetKey(Node * descriptors,Node * descriptor_number)4753 Node* CodeStubAssembler::DescriptorArrayGetKey(Node* descriptors,
4754 Node* descriptor_number) {
4755 const int key_offset = DescriptorArray::ToKeyIndex(0) * kPointerSize;
4756 return LoadFixedArrayElement(descriptors,
4757 DescriptorNumberToIndex(this, descriptor_number),
4758 key_offset);
4759 }
4760
DescriptorLookupBinary(Node * unique_name,Node * descriptors,Node * nof,Label * if_found,Variable * var_name_index,Label * if_not_found)4761 void CodeStubAssembler::DescriptorLookupBinary(Node* unique_name,
4762 Node* descriptors, Node* nof,
4763 Label* if_found,
4764 Variable* var_name_index,
4765 Label* if_not_found) {
4766 Comment("DescriptorLookupBinary");
4767 Variable var_low(this, MachineRepresentation::kWord32, Int32Constant(0));
4768 Node* limit =
4769 Int32Sub(DescriptorArrayNumberOfEntries(descriptors), Int32Constant(1));
4770 Variable var_high(this, MachineRepresentation::kWord32, limit);
4771 Node* hash = LoadNameHashField(unique_name);
4772 CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
4773
4774 // Assume non-empty array.
4775 CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
4776
4777 Variable* loop_vars[] = {&var_high, &var_low};
4778 Label binary_loop(this, 2, loop_vars);
4779 Goto(&binary_loop);
4780 Bind(&binary_loop);
4781 {
4782 // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
4783 Node* mid =
4784 Int32Add(var_low.value(),
4785 Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1));
4786 // mid_name = descriptors->GetSortedKey(mid).
4787 Node* sorted_key_index = DescriptorArrayGetSortedKeyIndex(descriptors, mid);
4788 Node* mid_name = DescriptorArrayGetKey(descriptors, sorted_key_index);
4789
4790 Node* mid_hash = LoadNameHashField(mid_name);
4791
4792 Label mid_greater(this), mid_less(this), merge(this);
4793 Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
4794 Bind(&mid_greater);
4795 {
4796 var_high.Bind(mid);
4797 Goto(&merge);
4798 }
4799 Bind(&mid_less);
4800 {
4801 var_low.Bind(Int32Add(mid, Int32Constant(1)));
4802 Goto(&merge);
4803 }
4804 Bind(&merge);
4805 GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
4806 }
4807
4808 Label scan_loop(this, &var_low);
4809 Goto(&scan_loop);
4810 Bind(&scan_loop);
4811 {
4812 GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
4813
4814 Node* sort_index =
4815 DescriptorArrayGetSortedKeyIndex(descriptors, var_low.value());
4816 Node* current_name = DescriptorArrayGetKey(descriptors, sort_index);
4817 Node* current_hash = LoadNameHashField(current_name);
4818 GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
4819 Label next(this);
4820 GotoIf(WordNotEqual(current_name, unique_name), &next);
4821 GotoIf(Int32GreaterThanOrEqual(sort_index, nof), if_not_found);
4822 var_name_index->Bind(DescriptorArrayToKeyIndex(sort_index));
4823 Goto(if_found);
4824
4825 Bind(&next);
4826 var_low.Bind(Int32Add(var_low.value(), Int32Constant(1)));
4827 Goto(&scan_loop);
4828 }
4829 }
4830
DescriptorLookup(Node * unique_name,Node * descriptors,Node * bitfield3,Label * if_found,Variable * var_name_index,Label * if_not_found)4831 void CodeStubAssembler::DescriptorLookup(Node* unique_name, Node* descriptors,
4832 Node* bitfield3, Label* if_found,
4833 Variable* var_name_index,
4834 Label* if_not_found) {
4835 Comment("DescriptorArrayLookup");
4836 Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
4837 GotoIf(Word32Equal(nof, Int32Constant(0)), if_not_found);
4838 Label linear_search(this), binary_search(this);
4839 const int kMaxElementsForLinearSearch = 32;
4840 Branch(Int32LessThanOrEqual(nof, Int32Constant(kMaxElementsForLinearSearch)),
4841 &linear_search, &binary_search);
4842 Bind(&linear_search);
4843 {
4844 DescriptorLookupLinear(unique_name, descriptors, ChangeInt32ToIntPtr(nof),
4845 if_found, var_name_index, if_not_found);
4846 }
4847 Bind(&binary_search);
4848 {
4849 DescriptorLookupBinary(unique_name, descriptors, nof, if_found,
4850 var_name_index, if_not_found);
4851 }
4852 }
4853
TryLookupProperty(Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_fast,Label * if_found_dict,Label * if_found_global,Variable * var_meta_storage,Variable * var_name_index,Label * if_not_found,Label * if_bailout)4854 void CodeStubAssembler::TryLookupProperty(
4855 Node* object, Node* map, Node* instance_type, Node* unique_name,
4856 Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
4857 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
4858 Label* if_bailout) {
4859 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
4860 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
4861
4862 Label if_objectisspecial(this);
4863 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
4864 GotoIf(Int32LessThanOrEqual(instance_type,
4865 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
4866 &if_objectisspecial);
4867
4868 uint32_t mask =
4869 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
4870 CSA_ASSERT(this, Word32BinaryNot(IsSetWord32(LoadMapBitField(map), mask)));
4871 USE(mask);
4872
4873 Node* bit_field3 = LoadMapBitField3(map);
4874 Label if_isfastmap(this), if_isslowmap(this);
4875 Branch(IsSetWord32<Map::DictionaryMap>(bit_field3), &if_isslowmap,
4876 &if_isfastmap);
4877 Bind(&if_isfastmap);
4878 {
4879 Node* descriptors = LoadMapDescriptors(map);
4880 var_meta_storage->Bind(descriptors);
4881
4882 DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
4883 var_name_index, if_not_found);
4884 }
4885 Bind(&if_isslowmap);
4886 {
4887 Node* dictionary = LoadProperties(object);
4888 var_meta_storage->Bind(dictionary);
4889
4890 NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
4891 var_name_index, if_not_found);
4892 }
4893 Bind(&if_objectisspecial);
4894 {
4895 // Handle global object here and other special objects in runtime.
4896 GotoIfNot(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
4897 if_bailout);
4898
4899 // Handle interceptors and access checks in runtime.
4900 Node* bit_field = LoadMapBitField(map);
4901 Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
4902 1 << Map::kIsAccessCheckNeeded);
4903 GotoIf(Word32NotEqual(Word32And(bit_field, mask), Int32Constant(0)),
4904 if_bailout);
4905
4906 Node* dictionary = LoadProperties(object);
4907 var_meta_storage->Bind(dictionary);
4908
4909 NameDictionaryLookup<GlobalDictionary>(
4910 dictionary, unique_name, if_found_global, var_name_index, if_not_found);
4911 }
4912 }
4913
TryHasOwnProperty(Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found,Label * if_not_found,Label * if_bailout)4914 void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
4915 Node* instance_type,
4916 Node* unique_name, Label* if_found,
4917 Label* if_not_found,
4918 Label* if_bailout) {
4919 Comment("TryHasOwnProperty");
4920 Variable var_meta_storage(this, MachineRepresentation::kTagged);
4921 Variable var_name_index(this, MachineType::PointerRepresentation());
4922
4923 Label if_found_global(this);
4924 TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
4925 &if_found_global, &var_meta_storage, &var_name_index,
4926 if_not_found, if_bailout);
4927 Bind(&if_found_global);
4928 {
4929 Variable var_value(this, MachineRepresentation::kTagged);
4930 Variable var_details(this, MachineRepresentation::kWord32);
4931 // Check if the property cell is not deleted.
4932 LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
4933 var_name_index.value(), &var_value,
4934 &var_details, if_not_found);
4935 Goto(if_found);
4936 }
4937 }
4938
LoadPropertyFromFastObject(Node * object,Node * map,Node * descriptors,Node * name_index,Variable * var_details,Variable * var_value)4939 void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
4940 Node* descriptors,
4941 Node* name_index,
4942 Variable* var_details,
4943 Variable* var_value) {
4944 DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
4945 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
4946 Comment("[ LoadPropertyFromFastObject");
4947
4948 Node* details =
4949 LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
4950 var_details->Bind(details);
4951
4952 Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
4953
4954 Label if_in_field(this), if_in_descriptor(this), done(this);
4955 Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
4956 &if_in_descriptor);
4957 Bind(&if_in_field);
4958 {
4959 Node* field_index =
4960 DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
4961 Node* representation =
4962 DecodeWord32<PropertyDetails::RepresentationField>(details);
4963
4964 Node* inobject_properties = LoadMapInobjectProperties(map);
4965
4966 Label if_inobject(this), if_backing_store(this);
4967 Variable var_double_value(this, MachineRepresentation::kFloat64);
4968 Label rebox_double(this, &var_double_value);
4969 Branch(UintPtrLessThan(field_index, inobject_properties), &if_inobject,
4970 &if_backing_store);
4971 Bind(&if_inobject);
4972 {
4973 Comment("if_inobject");
4974 Node* field_offset =
4975 IntPtrMul(IntPtrSub(LoadMapInstanceSize(map),
4976 IntPtrSub(inobject_properties, field_index)),
4977 IntPtrConstant(kPointerSize));
4978
4979 Label if_double(this), if_tagged(this);
4980 Branch(Word32NotEqual(representation,
4981 Int32Constant(Representation::kDouble)),
4982 &if_tagged, &if_double);
4983 Bind(&if_tagged);
4984 {
4985 var_value->Bind(LoadObjectField(object, field_offset));
4986 Goto(&done);
4987 }
4988 Bind(&if_double);
4989 {
4990 if (FLAG_unbox_double_fields) {
4991 var_double_value.Bind(
4992 LoadObjectField(object, field_offset, MachineType::Float64()));
4993 } else {
4994 Node* mutable_heap_number = LoadObjectField(object, field_offset);
4995 var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
4996 }
4997 Goto(&rebox_double);
4998 }
4999 }
5000 Bind(&if_backing_store);
5001 {
5002 Comment("if_backing_store");
5003 Node* properties = LoadProperties(object);
5004 field_index = IntPtrSub(field_index, inobject_properties);
5005 Node* value = LoadFixedArrayElement(properties, field_index);
5006
5007 Label if_double(this), if_tagged(this);
5008 Branch(Word32NotEqual(representation,
5009 Int32Constant(Representation::kDouble)),
5010 &if_tagged, &if_double);
5011 Bind(&if_tagged);
5012 {
5013 var_value->Bind(value);
5014 Goto(&done);
5015 }
5016 Bind(&if_double);
5017 {
5018 var_double_value.Bind(LoadHeapNumberValue(value));
5019 Goto(&rebox_double);
5020 }
5021 }
5022 Bind(&rebox_double);
5023 {
5024 Comment("rebox_double");
5025 Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
5026 var_value->Bind(heap_number);
5027 Goto(&done);
5028 }
5029 }
5030 Bind(&if_in_descriptor);
5031 {
5032 var_value->Bind(
5033 LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index));
5034 Goto(&done);
5035 }
5036 Bind(&done);
5037
5038 Comment("] LoadPropertyFromFastObject");
5039 }
5040
LoadPropertyFromNameDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value)5041 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
5042 Node* name_index,
5043 Variable* var_details,
5044 Variable* var_value) {
5045 Comment("LoadPropertyFromNameDictionary");
5046 CSA_ASSERT(this, IsDictionary(dictionary));
5047
5048 var_details->Bind(
5049 LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
5050 var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
5051
5052 Comment("] LoadPropertyFromNameDictionary");
5053 }
5054
LoadPropertyFromGlobalDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value,Label * if_deleted)5055 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
5056 Node* name_index,
5057 Variable* var_details,
5058 Variable* var_value,
5059 Label* if_deleted) {
5060 Comment("[ LoadPropertyFromGlobalDictionary");
5061 CSA_ASSERT(this, IsDictionary(dictionary));
5062
5063 Node* property_cell =
5064 LoadValueByKeyIndex<GlobalDictionary>(dictionary, name_index);
5065
5066 Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
5067 GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
5068
5069 var_value->Bind(value);
5070
5071 Node* details = LoadAndUntagToWord32ObjectField(property_cell,
5072 PropertyCell::kDetailsOffset);
5073 var_details->Bind(details);
5074
5075 Comment("] LoadPropertyFromGlobalDictionary");
5076 }
5077
5078 // |value| is the property backing store's contents, which is either a value
5079 // or an accessor pair, as specified by |details|.
5080 // Returns either the original value, or the result of the getter call.
CallGetterIfAccessor(Node * value,Node * details,Node * context,Node * receiver,Label * if_bailout)5081 Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
5082 Node* context, Node* receiver,
5083 Label* if_bailout) {
5084 Variable var_value(this, MachineRepresentation::kTagged, value);
5085 Label done(this);
5086
5087 Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
5088 GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
5089
5090 // Accessor case.
5091 {
5092 Node* accessor_pair = value;
5093 GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
5094 Int32Constant(ACCESSOR_INFO_TYPE)),
5095 if_bailout);
5096 CSA_ASSERT(this, HasInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE));
5097 Node* getter = LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
5098 Node* getter_map = LoadMap(getter);
5099 Node* instance_type = LoadMapInstanceType(getter_map);
5100 // FunctionTemplateInfo getters are not supported yet.
5101 GotoIf(
5102 Word32Equal(instance_type, Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
5103 if_bailout);
5104
5105 // Return undefined if the {getter} is not callable.
5106 var_value.Bind(UndefinedConstant());
5107 GotoIfNot(IsCallableMap(getter_map), &done);
5108
5109 // Call the accessor.
5110 Callable callable = CodeFactory::Call(isolate());
5111 Node* result = CallJS(callable, context, getter, receiver);
5112 var_value.Bind(result);
5113 Goto(&done);
5114 }
5115
5116 Bind(&done);
5117 return var_value.value();
5118 }
5119
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Label * if_not_found,Label * if_bailout)5120 void CodeStubAssembler::TryGetOwnProperty(
5121 Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
5122 Node* unique_name, Label* if_found_value, Variable* var_value,
5123 Label* if_not_found, Label* if_bailout) {
5124 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
5125 Comment("TryGetOwnProperty");
5126
5127 Variable var_meta_storage(this, MachineRepresentation::kTagged);
5128 Variable var_entry(this, MachineType::PointerRepresentation());
5129
5130 Label if_found_fast(this), if_found_dict(this), if_found_global(this);
5131
5132 Variable var_details(this, MachineRepresentation::kWord32);
5133 Variable* vars[] = {var_value, &var_details};
5134 Label if_found(this, 2, vars);
5135
5136 TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
5137 &if_found_dict, &if_found_global, &var_meta_storage,
5138 &var_entry, if_not_found, if_bailout);
5139 Bind(&if_found_fast);
5140 {
5141 Node* descriptors = var_meta_storage.value();
5142 Node* name_index = var_entry.value();
5143
5144 LoadPropertyFromFastObject(object, map, descriptors, name_index,
5145 &var_details, var_value);
5146 Goto(&if_found);
5147 }
5148 Bind(&if_found_dict);
5149 {
5150 Node* dictionary = var_meta_storage.value();
5151 Node* entry = var_entry.value();
5152 LoadPropertyFromNameDictionary(dictionary, entry, &var_details, var_value);
5153 Goto(&if_found);
5154 }
5155 Bind(&if_found_global);
5156 {
5157 Node* dictionary = var_meta_storage.value();
5158 Node* entry = var_entry.value();
5159
5160 LoadPropertyFromGlobalDictionary(dictionary, entry, &var_details, var_value,
5161 if_not_found);
5162 Goto(&if_found);
5163 }
5164 // Here we have details and value which could be an accessor.
5165 Bind(&if_found);
5166 {
5167 Node* value = CallGetterIfAccessor(var_value->value(), var_details.value(),
5168 context, receiver, if_bailout);
5169 var_value->Bind(value);
5170 Goto(if_found_value);
5171 }
5172 }
5173
TryLookupElement(Node * object,Node * map,Node * instance_type,Node * intptr_index,Label * if_found,Label * if_not_found,Label * if_bailout)5174 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
5175 Node* instance_type,
5176 Node* intptr_index, Label* if_found,
5177 Label* if_not_found,
5178 Label* if_bailout) {
5179 // Handle special objects in runtime.
5180 GotoIf(Int32LessThanOrEqual(instance_type,
5181 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
5182 if_bailout);
5183
5184 Node* elements_kind = LoadMapElementsKind(map);
5185
5186 // TODO(verwaest): Support other elements kinds as well.
5187 Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
5188 if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this);
5189 // clang-format off
5190 int32_t values[] = {
5191 // Handled by {if_isobjectorsmi}.
5192 FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
5193 FAST_HOLEY_ELEMENTS,
5194 // Handled by {if_isdouble}.
5195 FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
5196 // Handled by {if_isdictionary}.
5197 DICTIONARY_ELEMENTS,
5198 // Handled by {if_isfaststringwrapper}.
5199 FAST_STRING_WRAPPER_ELEMENTS,
5200 // Handled by {if_isslowstringwrapper}.
5201 SLOW_STRING_WRAPPER_ELEMENTS,
5202 // Handled by {if_not_found}.
5203 NO_ELEMENTS,
5204 };
5205 Label* labels[] = {
5206 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
5207 &if_isobjectorsmi,
5208 &if_isdouble, &if_isdouble,
5209 &if_isdictionary,
5210 &if_isfaststringwrapper,
5211 &if_isslowstringwrapper,
5212 if_not_found,
5213 };
5214 // clang-format on
5215 STATIC_ASSERT(arraysize(values) == arraysize(labels));
5216 Switch(elements_kind, if_bailout, values, labels, arraysize(values));
5217
5218 Bind(&if_isobjectorsmi);
5219 {
5220 Node* elements = LoadElements(object);
5221 Node* length = LoadAndUntagFixedArrayBaseLength(elements);
5222
5223 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
5224
5225 Node* element = LoadFixedArrayElement(elements, intptr_index);
5226 Node* the_hole = TheHoleConstant();
5227 Branch(WordEqual(element, the_hole), if_not_found, if_found);
5228 }
5229 Bind(&if_isdouble);
5230 {
5231 Node* elements = LoadElements(object);
5232 Node* length = LoadAndUntagFixedArrayBaseLength(elements);
5233
5234 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
5235
5236 // Check if the element is a double hole, but don't load it.
5237 LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
5238 INTPTR_PARAMETERS, if_not_found);
5239 Goto(if_found);
5240 }
5241 Bind(&if_isdictionary);
5242 {
5243 // Negative keys must be converted to property names.
5244 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
5245
5246 Variable var_entry(this, MachineType::PointerRepresentation());
5247 Node* elements = LoadElements(object);
5248 NumberDictionaryLookup<SeededNumberDictionary>(
5249 elements, intptr_index, if_found, &var_entry, if_not_found);
5250 }
5251 Bind(&if_isfaststringwrapper);
5252 {
5253 CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
5254 Node* string = LoadJSValueValue(object);
5255 CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
5256 Node* length = LoadStringLength(string);
5257 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
5258 Goto(&if_isobjectorsmi);
5259 }
5260 Bind(&if_isslowstringwrapper);
5261 {
5262 CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
5263 Node* string = LoadJSValueValue(object);
5264 CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
5265 Node* length = LoadStringLength(string);
5266 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
5267 Goto(&if_isdictionary);
5268 }
5269 Bind(&if_oob);
5270 {
5271 // Positive OOB indices mean "not found", negative indices must be
5272 // converted to property names.
5273 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
5274 Goto(if_not_found);
5275 }
5276 }
5277
5278 // Instantiate template methods to workaround GCC compilation issue.
5279 template void CodeStubAssembler::NumberDictionaryLookup<SeededNumberDictionary>(
5280 Node*, Node*, Label*, Variable*, Label*);
5281 template void CodeStubAssembler::NumberDictionaryLookup<
5282 UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
5283
TryPrototypeChainLookup(Node * receiver,Node * key,const LookupInHolder & lookup_property_in_holder,const LookupInHolder & lookup_element_in_holder,Label * if_end,Label * if_bailout)5284 void CodeStubAssembler::TryPrototypeChainLookup(
5285 Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
5286 const LookupInHolder& lookup_element_in_holder, Label* if_end,
5287 Label* if_bailout) {
5288 // Ensure receiver is JSReceiver, otherwise bailout.
5289 Label if_objectisnotsmi(this);
5290 Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
5291 Bind(&if_objectisnotsmi);
5292
5293 Node* map = LoadMap(receiver);
5294 Node* instance_type = LoadMapInstanceType(map);
5295 {
5296 Label if_objectisreceiver(this);
5297 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5298 STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
5299 Branch(
5300 Int32GreaterThan(instance_type, Int32Constant(FIRST_JS_RECEIVER_TYPE)),
5301 &if_objectisreceiver, if_bailout);
5302 Bind(&if_objectisreceiver);
5303 }
5304
5305 Variable var_index(this, MachineType::PointerRepresentation());
5306 Variable var_unique(this, MachineRepresentation::kTagged);
5307
5308 Label if_keyisindex(this), if_iskeyunique(this);
5309 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
5310 if_bailout);
5311
5312 Bind(&if_iskeyunique);
5313 {
5314 Variable var_holder(this, MachineRepresentation::kTagged, receiver);
5315 Variable var_holder_map(this, MachineRepresentation::kTagged, map);
5316 Variable var_holder_instance_type(this, MachineRepresentation::kWord32,
5317 instance_type);
5318
5319 Variable* merged_variables[] = {&var_holder, &var_holder_map,
5320 &var_holder_instance_type};
5321 Label loop(this, arraysize(merged_variables), merged_variables);
5322 Goto(&loop);
5323 Bind(&loop);
5324 {
5325 Node* holder_map = var_holder_map.value();
5326 Node* holder_instance_type = var_holder_instance_type.value();
5327
5328 Label next_proto(this);
5329 lookup_property_in_holder(receiver, var_holder.value(), holder_map,
5330 holder_instance_type, var_unique.value(),
5331 &next_proto, if_bailout);
5332 Bind(&next_proto);
5333
5334 // Bailout if it can be an integer indexed exotic case.
5335 GotoIf(
5336 Word32Equal(holder_instance_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
5337 if_bailout);
5338
5339 Node* proto = LoadMapPrototype(holder_map);
5340
5341 Label if_not_null(this);
5342 Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
5343 Bind(&if_not_null);
5344
5345 Node* map = LoadMap(proto);
5346 Node* instance_type = LoadMapInstanceType(map);
5347
5348 var_holder.Bind(proto);
5349 var_holder_map.Bind(map);
5350 var_holder_instance_type.Bind(instance_type);
5351 Goto(&loop);
5352 }
5353 }
5354 Bind(&if_keyisindex);
5355 {
5356 Variable var_holder(this, MachineRepresentation::kTagged, receiver);
5357 Variable var_holder_map(this, MachineRepresentation::kTagged, map);
5358 Variable var_holder_instance_type(this, MachineRepresentation::kWord32,
5359 instance_type);
5360
5361 Variable* merged_variables[] = {&var_holder, &var_holder_map,
5362 &var_holder_instance_type};
5363 Label loop(this, arraysize(merged_variables), merged_variables);
5364 Goto(&loop);
5365 Bind(&loop);
5366 {
5367 Label next_proto(this);
5368 lookup_element_in_holder(receiver, var_holder.value(),
5369 var_holder_map.value(),
5370 var_holder_instance_type.value(),
5371 var_index.value(), &next_proto, if_bailout);
5372 Bind(&next_proto);
5373
5374 Node* proto = LoadMapPrototype(var_holder_map.value());
5375
5376 Label if_not_null(this);
5377 Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
5378 Bind(&if_not_null);
5379
5380 Node* map = LoadMap(proto);
5381 Node* instance_type = LoadMapInstanceType(map);
5382
5383 var_holder.Bind(proto);
5384 var_holder_map.Bind(map);
5385 var_holder_instance_type.Bind(instance_type);
5386 Goto(&loop);
5387 }
5388 }
5389 }
5390
OrdinaryHasInstance(Node * context,Node * callable,Node * object)5391 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
5392 Node* object) {
5393 Variable var_result(this, MachineRepresentation::kTagged);
5394 Label return_false(this), return_true(this),
5395 return_runtime(this, Label::kDeferred), return_result(this);
5396
5397 // Goto runtime if {object} is a Smi.
5398 GotoIf(TaggedIsSmi(object), &return_runtime);
5399
5400 // Load map of {object}.
5401 Node* object_map = LoadMap(object);
5402
5403 // Lookup the {callable} and {object} map in the global instanceof cache.
5404 // Note: This is safe because we clear the global instanceof cache whenever
5405 // we change the prototype of any object.
5406 Node* instanceof_cache_function =
5407 LoadRoot(Heap::kInstanceofCacheFunctionRootIndex);
5408 Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
5409 {
5410 Label instanceof_cache_miss(this);
5411 GotoIfNot(WordEqual(instanceof_cache_function, callable),
5412 &instanceof_cache_miss);
5413 GotoIfNot(WordEqual(instanceof_cache_map, object_map),
5414 &instanceof_cache_miss);
5415 var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
5416 Goto(&return_result);
5417 Bind(&instanceof_cache_miss);
5418 }
5419
5420 // Goto runtime if {callable} is a Smi.
5421 GotoIf(TaggedIsSmi(callable), &return_runtime);
5422
5423 // Load map of {callable}.
5424 Node* callable_map = LoadMap(callable);
5425
5426 // Goto runtime if {callable} is not a JSFunction.
5427 Node* callable_instance_type = LoadMapInstanceType(callable_map);
5428 GotoIfNot(
5429 Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
5430 &return_runtime);
5431
5432 // Goto runtime if {callable} is not a constructor or has
5433 // a non-instance "prototype".
5434 Node* callable_bitfield = LoadMapBitField(callable_map);
5435 GotoIfNot(
5436 Word32Equal(Word32And(callable_bitfield,
5437 Int32Constant((1 << Map::kHasNonInstancePrototype) |
5438 (1 << Map::kIsConstructor))),
5439 Int32Constant(1 << Map::kIsConstructor)),
5440 &return_runtime);
5441
5442 // Get the "prototype" (or initial map) of the {callable}.
5443 Node* callable_prototype =
5444 LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
5445 {
5446 Label callable_prototype_valid(this);
5447 Variable var_callable_prototype(this, MachineRepresentation::kTagged,
5448 callable_prototype);
5449
5450 // Resolve the "prototype" if the {callable} has an initial map. Afterwards
5451 // the {callable_prototype} will be either the JSReceiver prototype object
5452 // or the hole value, which means that no instances of the {callable} were
5453 // created so far and hence we should return false.
5454 Node* callable_prototype_instance_type =
5455 LoadInstanceType(callable_prototype);
5456 GotoIfNot(
5457 Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
5458 &callable_prototype_valid);
5459 var_callable_prototype.Bind(
5460 LoadObjectField(callable_prototype, Map::kPrototypeOffset));
5461 Goto(&callable_prototype_valid);
5462 Bind(&callable_prototype_valid);
5463 callable_prototype = var_callable_prototype.value();
5464 }
5465
5466 // Update the global instanceof cache with the current {object} map and
5467 // {callable}. The cached answer will be set when it is known below.
5468 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, callable);
5469 StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
5470
5471 // Loop through the prototype chain looking for the {callable} prototype.
5472 Variable var_object_map(this, MachineRepresentation::kTagged, object_map);
5473 Label loop(this, &var_object_map);
5474 Goto(&loop);
5475 Bind(&loop);
5476 {
5477 Node* object_map = var_object_map.value();
5478
5479 // Check if the current {object} needs to be access checked.
5480 Node* object_bitfield = LoadMapBitField(object_map);
5481 GotoIfNot(
5482 Word32Equal(Word32And(object_bitfield,
5483 Int32Constant(1 << Map::kIsAccessCheckNeeded)),
5484 Int32Constant(0)),
5485 &return_runtime);
5486
5487 // Check if the current {object} is a proxy.
5488 Node* object_instance_type = LoadMapInstanceType(object_map);
5489 GotoIf(Word32Equal(object_instance_type, Int32Constant(JS_PROXY_TYPE)),
5490 &return_runtime);
5491
5492 // Check the current {object} prototype.
5493 Node* object_prototype = LoadMapPrototype(object_map);
5494 GotoIf(WordEqual(object_prototype, NullConstant()), &return_false);
5495 GotoIf(WordEqual(object_prototype, callable_prototype), &return_true);
5496
5497 // Continue with the prototype.
5498 var_object_map.Bind(LoadMap(object_prototype));
5499 Goto(&loop);
5500 }
5501
5502 Bind(&return_true);
5503 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(true));
5504 var_result.Bind(BooleanConstant(true));
5505 Goto(&return_result);
5506
5507 Bind(&return_false);
5508 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(false));
5509 var_result.Bind(BooleanConstant(false));
5510 Goto(&return_result);
5511
5512 Bind(&return_runtime);
5513 {
5514 // Invalidate the global instanceof cache.
5515 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, SmiConstant(0));
5516 // Fallback to the runtime implementation.
5517 var_result.Bind(
5518 CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
5519 }
5520 Goto(&return_result);
5521
5522 Bind(&return_result);
5523 return var_result.value();
5524 }
5525
ElementOffsetFromIndex(Node * index_node,ElementsKind kind,ParameterMode mode,int base_size)5526 Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
5527 ElementsKind kind,
5528 ParameterMode mode,
5529 int base_size) {
5530 int element_size_shift = ElementsKindToShiftSize(kind);
5531 int element_size = 1 << element_size_shift;
5532 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
5533 intptr_t index = 0;
5534 bool constant_index = false;
5535 if (mode == SMI_PARAMETERS) {
5536 element_size_shift -= kSmiShiftBits;
5537 Smi* smi_index;
5538 constant_index = ToSmiConstant(index_node, smi_index);
5539 if (constant_index) index = smi_index->value();
5540 index_node = BitcastTaggedToWord(index_node);
5541 } else {
5542 DCHECK(mode == INTPTR_PARAMETERS);
5543 constant_index = ToIntPtrConstant(index_node, index);
5544 }
5545 if (constant_index) {
5546 return IntPtrConstant(base_size + element_size * index);
5547 }
5548
5549 Node* shifted_index =
5550 (element_size_shift == 0)
5551 ? index_node
5552 : ((element_size_shift > 0)
5553 ? WordShl(index_node, IntPtrConstant(element_size_shift))
5554 : WordShr(index_node, IntPtrConstant(-element_size_shift)));
5555 return IntPtrAdd(IntPtrConstant(base_size), shifted_index);
5556 }
5557
LoadFeedbackVectorForStub()5558 Node* CodeStubAssembler::LoadFeedbackVectorForStub() {
5559 Node* function =
5560 LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
5561 Node* cell = LoadObjectField(function, JSFunction::kFeedbackVectorOffset);
5562 return LoadObjectField(cell, Cell::kValueOffset);
5563 }
5564
UpdateFeedback(Node * feedback,Node * feedback_vector,Node * slot_id)5565 void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
5566 Node* slot_id) {
5567 // This method is used for binary op and compare feedback. These
5568 // vector nodes are initialized with a smi 0, so we can simply OR
5569 // our new feedback in place.
5570 Node* previous_feedback = LoadFixedArrayElement(feedback_vector, slot_id);
5571 Node* combined_feedback = SmiOr(previous_feedback, feedback);
5572 StoreFixedArrayElement(feedback_vector, slot_id, combined_feedback,
5573 SKIP_WRITE_BARRIER);
5574 }
5575
LoadReceiverMap(Node * receiver)5576 Node* CodeStubAssembler::LoadReceiverMap(Node* receiver) {
5577 Variable var_receiver_map(this, MachineRepresentation::kTagged);
5578 Label load_smi_map(this, Label::kDeferred), load_receiver_map(this),
5579 if_result(this);
5580
5581 Branch(TaggedIsSmi(receiver), &load_smi_map, &load_receiver_map);
5582 Bind(&load_smi_map);
5583 {
5584 var_receiver_map.Bind(LoadRoot(Heap::kHeapNumberMapRootIndex));
5585 Goto(&if_result);
5586 }
5587 Bind(&load_receiver_map);
5588 {
5589 var_receiver_map.Bind(LoadMap(receiver));
5590 Goto(&if_result);
5591 }
5592 Bind(&if_result);
5593 return var_receiver_map.value();
5594 }
5595
TryToIntptr(Node * key,Label * miss)5596 Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
5597 Variable var_intptr_key(this, MachineType::PointerRepresentation());
5598 Label done(this, &var_intptr_key), key_is_smi(this);
5599 GotoIf(TaggedIsSmi(key), &key_is_smi);
5600 // Try to convert a heap number to a Smi.
5601 GotoIfNot(IsHeapNumberMap(LoadMap(key)), miss);
5602 {
5603 Node* value = LoadHeapNumberValue(key);
5604 Node* int_value = RoundFloat64ToInt32(value);
5605 GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
5606 var_intptr_key.Bind(ChangeInt32ToIntPtr(int_value));
5607 Goto(&done);
5608 }
5609
5610 Bind(&key_is_smi);
5611 {
5612 var_intptr_key.Bind(SmiUntag(key));
5613 Goto(&done);
5614 }
5615
5616 Bind(&done);
5617 return var_intptr_key.value();
5618 }
5619
EmitKeyedSloppyArguments(Node * receiver,Node * key,Node * value,Label * bailout)5620 Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
5621 Node* value, Label* bailout) {
5622 // Mapped arguments are actual arguments. Unmapped arguments are values added
5623 // to the arguments object after it was created for the call. Mapped arguments
5624 // are stored in the context at indexes given by elements[key + 2]. Unmapped
5625 // arguments are stored as regular indexed properties in the arguments array,
5626 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
5627 // look at argument object construction.
5628 //
5629 // The sloppy arguments elements array has a special format:
5630 //
5631 // 0: context
5632 // 1: unmapped arguments array
5633 // 2: mapped_index0,
5634 // 3: mapped_index1,
5635 // ...
5636 //
5637 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
5638 // If key + 2 >= elements.length then attempt to look in the unmapped
5639 // arguments array (given by elements[1]) and return the value at key, missing
5640 // to the runtime if the unmapped arguments array is not a fixed array or if
5641 // key >= unmapped_arguments_array.length.
5642 //
5643 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
5644 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
5645 // index into the context array given at elements[0]. Return the value at
5646 // context[t].
5647
5648 bool is_load = value == nullptr;
5649
5650 GotoIfNot(TaggedIsSmi(key), bailout);
5651 key = SmiUntag(key);
5652 GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
5653
5654 Node* elements = LoadElements(receiver);
5655 Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
5656
5657 Variable var_result(this, MachineRepresentation::kTagged);
5658 if (!is_load) {
5659 var_result.Bind(value);
5660 }
5661 Label if_mapped(this), if_unmapped(this), end(this, &var_result);
5662 Node* intptr_two = IntPtrConstant(2);
5663 Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
5664
5665 GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
5666
5667 Node* mapped_index =
5668 LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
5669 Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
5670
5671 Bind(&if_mapped);
5672 {
5673 CSA_ASSERT(this, TaggedIsSmi(mapped_index));
5674 mapped_index = SmiUntag(mapped_index);
5675 Node* the_context = LoadFixedArrayElement(elements, 0);
5676 // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
5677 // methods for accessing Context.
5678 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
5679 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
5680 FixedArray::OffsetOfElementAt(0));
5681 if (is_load) {
5682 Node* result = LoadFixedArrayElement(the_context, mapped_index);
5683 CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
5684 var_result.Bind(result);
5685 } else {
5686 StoreFixedArrayElement(the_context, mapped_index, value);
5687 }
5688 Goto(&end);
5689 }
5690
5691 Bind(&if_unmapped);
5692 {
5693 Node* backing_store = LoadFixedArrayElement(elements, 1);
5694 GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
5695 bailout);
5696
5697 Node* backing_store_length =
5698 LoadAndUntagFixedArrayBaseLength(backing_store);
5699 GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
5700
5701 // The key falls into unmapped range.
5702 if (is_load) {
5703 Node* result = LoadFixedArrayElement(backing_store, key);
5704 GotoIf(WordEqual(result, TheHoleConstant()), bailout);
5705 var_result.Bind(result);
5706 } else {
5707 StoreFixedArrayElement(backing_store, key, value);
5708 }
5709 Goto(&end);
5710 }
5711
5712 Bind(&end);
5713 return var_result.value();
5714 }
5715
LoadScriptContext(Node * context,int context_index)5716 Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) {
5717 Node* native_context = LoadNativeContext(context);
5718 Node* script_context_table =
5719 LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX);
5720
5721 int offset =
5722 ScriptContextTable::GetContextOffset(context_index) - kHeapObjectTag;
5723 return Load(MachineType::AnyTagged(), script_context_table,
5724 IntPtrConstant(offset));
5725 }
5726
5727 namespace {
5728
5729 // Converts typed array elements kind to a machine representations.
ElementsKindToMachineRepresentation(ElementsKind kind)5730 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
5731 switch (kind) {
5732 case UINT8_CLAMPED_ELEMENTS:
5733 case UINT8_ELEMENTS:
5734 case INT8_ELEMENTS:
5735 return MachineRepresentation::kWord8;
5736 case UINT16_ELEMENTS:
5737 case INT16_ELEMENTS:
5738 return MachineRepresentation::kWord16;
5739 case UINT32_ELEMENTS:
5740 case INT32_ELEMENTS:
5741 return MachineRepresentation::kWord32;
5742 case FLOAT32_ELEMENTS:
5743 return MachineRepresentation::kFloat32;
5744 case FLOAT64_ELEMENTS:
5745 return MachineRepresentation::kFloat64;
5746 default:
5747 UNREACHABLE();
5748 return MachineRepresentation::kNone;
5749 }
5750 }
5751
5752 } // namespace
5753
StoreElement(Node * elements,ElementsKind kind,Node * index,Node * value,ParameterMode mode)5754 void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
5755 Node* index, Node* value,
5756 ParameterMode mode) {
5757 if (IsFixedTypedArrayElementsKind(kind)) {
5758 if (kind == UINT8_CLAMPED_ELEMENTS) {
5759 CSA_ASSERT(this,
5760 Word32Equal(value, Word32And(Int32Constant(0xff), value)));
5761 }
5762 Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
5763 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
5764 StoreNoWriteBarrier(rep, elements, offset, value);
5765 return;
5766 }
5767
5768 WriteBarrierMode barrier_mode =
5769 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
5770 if (IsFastDoubleElementsKind(kind)) {
5771 // Make sure we do not store signalling NaNs into double arrays.
5772 value = Float64SilenceNaN(value);
5773 StoreFixedDoubleArrayElement(elements, index, value, mode);
5774 } else {
5775 StoreFixedArrayElement(elements, index, value, barrier_mode, 0, mode);
5776 }
5777 }
5778
Int32ToUint8Clamped(Node * int32_value)5779 Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
5780 Label done(this);
5781 Node* int32_zero = Int32Constant(0);
5782 Node* int32_255 = Int32Constant(255);
5783 Variable var_value(this, MachineRepresentation::kWord32, int32_value);
5784 GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
5785 var_value.Bind(int32_zero);
5786 GotoIf(Int32LessThan(int32_value, int32_zero), &done);
5787 var_value.Bind(int32_255);
5788 Goto(&done);
5789 Bind(&done);
5790 return var_value.value();
5791 }
5792
Float64ToUint8Clamped(Node * float64_value)5793 Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
5794 Label done(this);
5795 Variable var_value(this, MachineRepresentation::kWord32, Int32Constant(0));
5796 GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
5797 var_value.Bind(Int32Constant(255));
5798 GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
5799 {
5800 Node* rounded_value = Float64RoundToEven(float64_value);
5801 var_value.Bind(TruncateFloat64ToWord32(rounded_value));
5802 Goto(&done);
5803 }
5804 Bind(&done);
5805 return var_value.value();
5806 }
5807
PrepareValueForWriteToTypedArray(Node * input,ElementsKind elements_kind,Label * bailout)5808 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
5809 Node* input, ElementsKind elements_kind, Label* bailout) {
5810 DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
5811
5812 MachineRepresentation rep;
5813 switch (elements_kind) {
5814 case UINT8_ELEMENTS:
5815 case INT8_ELEMENTS:
5816 case UINT16_ELEMENTS:
5817 case INT16_ELEMENTS:
5818 case UINT32_ELEMENTS:
5819 case INT32_ELEMENTS:
5820 case UINT8_CLAMPED_ELEMENTS:
5821 rep = MachineRepresentation::kWord32;
5822 break;
5823 case FLOAT32_ELEMENTS:
5824 rep = MachineRepresentation::kFloat32;
5825 break;
5826 case FLOAT64_ELEMENTS:
5827 rep = MachineRepresentation::kFloat64;
5828 break;
5829 default:
5830 UNREACHABLE();
5831 return nullptr;
5832 }
5833
5834 Variable var_result(this, rep);
5835 Label done(this, &var_result), if_smi(this);
5836 GotoIf(TaggedIsSmi(input), &if_smi);
5837 // Try to convert a heap number to a Smi.
5838 GotoIfNot(IsHeapNumberMap(LoadMap(input)), bailout);
5839 {
5840 Node* value = LoadHeapNumberValue(input);
5841 if (rep == MachineRepresentation::kWord32) {
5842 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
5843 value = Float64ToUint8Clamped(value);
5844 } else {
5845 value = TruncateFloat64ToWord32(value);
5846 }
5847 } else if (rep == MachineRepresentation::kFloat32) {
5848 value = TruncateFloat64ToFloat32(value);
5849 } else {
5850 DCHECK_EQ(MachineRepresentation::kFloat64, rep);
5851 }
5852 var_result.Bind(value);
5853 Goto(&done);
5854 }
5855
5856 Bind(&if_smi);
5857 {
5858 Node* value = SmiToWord32(input);
5859 if (rep == MachineRepresentation::kFloat32) {
5860 value = RoundInt32ToFloat32(value);
5861 } else if (rep == MachineRepresentation::kFloat64) {
5862 value = ChangeInt32ToFloat64(value);
5863 } else {
5864 DCHECK_EQ(MachineRepresentation::kWord32, rep);
5865 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
5866 value = Int32ToUint8Clamped(value);
5867 }
5868 }
5869 var_result.Bind(value);
5870 Goto(&done);
5871 }
5872
5873 Bind(&done);
5874 return var_result.value();
5875 }
5876
EmitElementStore(Node * object,Node * key,Node * value,bool is_jsarray,ElementsKind elements_kind,KeyedAccessStoreMode store_mode,Label * bailout)5877 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
5878 bool is_jsarray,
5879 ElementsKind elements_kind,
5880 KeyedAccessStoreMode store_mode,
5881 Label* bailout) {
5882 Node* elements = LoadElements(object);
5883 if (IsFastSmiOrObjectElementsKind(elements_kind) &&
5884 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
5885 // Bailout in case of COW elements.
5886 GotoIf(WordNotEqual(LoadMap(elements),
5887 LoadRoot(Heap::kFixedArrayMapRootIndex)),
5888 bailout);
5889 }
5890 // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
5891 ParameterMode parameter_mode = INTPTR_PARAMETERS;
5892 key = TryToIntptr(key, bailout);
5893
5894 if (IsFixedTypedArrayElementsKind(elements_kind)) {
5895 Label done(this);
5896 // TODO(ishell): call ToNumber() on value and don't bailout but be careful
5897 // to call it only once if we decide to bailout because of bounds checks.
5898
5899 value = PrepareValueForWriteToTypedArray(value, elements_kind, bailout);
5900
5901 // There must be no allocations between the buffer load and
5902 // and the actual store to backing store, because GC may decide that
5903 // the buffer is not alive or move the elements.
5904 // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
5905
5906 // Check if buffer has been neutered.
5907 Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
5908 GotoIf(IsDetachedBuffer(buffer), bailout);
5909
5910 // Bounds check.
5911 Node* length = TaggedToParameter(
5912 LoadObjectField(object, JSTypedArray::kLengthOffset), parameter_mode);
5913
5914 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
5915 // Skip the store if we write beyond the length.
5916 GotoIfNot(IntPtrLessThan(key, length), &done);
5917 // ... but bailout if the key is negative.
5918 } else {
5919 DCHECK_EQ(STANDARD_STORE, store_mode);
5920 }
5921 GotoIfNot(UintPtrLessThan(key, length), bailout);
5922
5923 // Backing store = external_pointer + base_pointer.
5924 Node* external_pointer =
5925 LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
5926 MachineType::Pointer());
5927 Node* base_pointer =
5928 LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
5929 Node* backing_store =
5930 IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer));
5931 StoreElement(backing_store, elements_kind, key, value, parameter_mode);
5932 Goto(&done);
5933
5934 Bind(&done);
5935 return;
5936 }
5937 DCHECK(IsFastSmiOrObjectElementsKind(elements_kind) ||
5938 IsFastDoubleElementsKind(elements_kind));
5939
5940 Node* length = is_jsarray ? LoadObjectField(object, JSArray::kLengthOffset)
5941 : LoadFixedArrayBaseLength(elements);
5942 length = TaggedToParameter(length, parameter_mode);
5943
5944 // In case value is stored into a fast smi array, assure that the value is
5945 // a smi before manipulating the backing store. Otherwise the backing store
5946 // may be left in an invalid state.
5947 if (IsFastSmiElementsKind(elements_kind)) {
5948 GotoIfNot(TaggedIsSmi(value), bailout);
5949 } else if (IsFastDoubleElementsKind(elements_kind)) {
5950 value = TryTaggedToFloat64(value, bailout);
5951 }
5952
5953 if (IsGrowStoreMode(store_mode)) {
5954 elements = CheckForCapacityGrow(object, elements, elements_kind, length,
5955 key, parameter_mode, is_jsarray, bailout);
5956 } else {
5957 GotoIfNot(UintPtrLessThan(key, length), bailout);
5958
5959 if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
5960 IsFastSmiOrObjectElementsKind(elements_kind)) {
5961 elements = CopyElementsOnWrite(object, elements, elements_kind, length,
5962 parameter_mode, bailout);
5963 }
5964 }
5965 StoreElement(elements, elements_kind, key, value, parameter_mode);
5966 }
5967
CheckForCapacityGrow(Node * object,Node * elements,ElementsKind kind,Node * length,Node * key,ParameterMode mode,bool is_js_array,Label * bailout)5968 Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
5969 ElementsKind kind, Node* length,
5970 Node* key, ParameterMode mode,
5971 bool is_js_array,
5972 Label* bailout) {
5973 Variable checked_elements(this, MachineRepresentation::kTagged);
5974 Label grow_case(this), no_grow_case(this), done(this);
5975
5976 Node* condition;
5977 if (IsHoleyElementsKind(kind)) {
5978 condition = UintPtrGreaterThanOrEqual(key, length);
5979 } else {
5980 condition = WordEqual(key, length);
5981 }
5982 Branch(condition, &grow_case, &no_grow_case);
5983
5984 Bind(&grow_case);
5985 {
5986 Node* current_capacity =
5987 TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
5988
5989 checked_elements.Bind(elements);
5990
5991 Label fits_capacity(this);
5992 GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
5993 {
5994 Node* new_elements = TryGrowElementsCapacity(
5995 object, elements, kind, key, current_capacity, mode, bailout);
5996
5997 checked_elements.Bind(new_elements);
5998 Goto(&fits_capacity);
5999 }
6000 Bind(&fits_capacity);
6001
6002 if (is_js_array) {
6003 Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
6004 StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
6005 ParameterToTagged(new_length, mode));
6006 }
6007 Goto(&done);
6008 }
6009
6010 Bind(&no_grow_case);
6011 {
6012 GotoIfNot(UintPtrLessThan(key, length), bailout);
6013 checked_elements.Bind(elements);
6014 Goto(&done);
6015 }
6016
6017 Bind(&done);
6018 return checked_elements.value();
6019 }
6020
CopyElementsOnWrite(Node * object,Node * elements,ElementsKind kind,Node * length,ParameterMode mode,Label * bailout)6021 Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
6022 ElementsKind kind, Node* length,
6023 ParameterMode mode,
6024 Label* bailout) {
6025 Variable new_elements_var(this, MachineRepresentation::kTagged, elements);
6026 Label done(this);
6027
6028 GotoIfNot(
6029 WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
6030 &done);
6031 {
6032 Node* capacity =
6033 TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
6034 Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
6035 length, capacity, mode, bailout);
6036
6037 new_elements_var.Bind(new_elements);
6038 Goto(&done);
6039 }
6040
6041 Bind(&done);
6042 return new_elements_var.value();
6043 }
6044
TransitionElementsKind(Node * object,Node * map,ElementsKind from_kind,ElementsKind to_kind,bool is_jsarray,Label * bailout)6045 void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
6046 ElementsKind from_kind,
6047 ElementsKind to_kind,
6048 bool is_jsarray,
6049 Label* bailout) {
6050 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
6051 IsFastHoleyElementsKind(to_kind));
6052 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
6053 TrapAllocationMemento(object, bailout);
6054 }
6055
6056 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
6057 Comment("Non-simple map transition");
6058 Node* elements = LoadElements(object);
6059
6060 Node* empty_fixed_array =
6061 HeapConstant(isolate()->factory()->empty_fixed_array());
6062
6063 Label done(this);
6064 GotoIf(WordEqual(elements, empty_fixed_array), &done);
6065
6066 // TODO(ishell): Use OptimalParameterMode().
6067 ParameterMode mode = INTPTR_PARAMETERS;
6068 Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
6069 Node* array_length =
6070 is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset))
6071 : elements_length;
6072
6073 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
6074 elements_length, mode, bailout);
6075 Goto(&done);
6076 Bind(&done);
6077 }
6078
6079 StoreMap(object, map);
6080 }
6081
TrapAllocationMemento(Node * object,Label * memento_found)6082 void CodeStubAssembler::TrapAllocationMemento(Node* object,
6083 Label* memento_found) {
6084 Comment("[ TrapAllocationMemento");
6085 Label no_memento_found(this);
6086 Label top_check(this), map_check(this);
6087
6088 Node* new_space_top_address = ExternalConstant(
6089 ExternalReference::new_space_allocation_top_address(isolate()));
6090 const int kMementoMapOffset = JSArray::kSize;
6091 const int kMementoLastWordOffset =
6092 kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
6093
6094 // Bail out if the object is not in new space.
6095 Node* object_word = BitcastTaggedToWord(object);
6096 Node* object_page = PageFromAddress(object_word);
6097 {
6098 Node* page_flags = Load(MachineType::IntPtr(), object_page,
6099 IntPtrConstant(Page::kFlagsOffset));
6100 GotoIf(WordEqual(WordAnd(page_flags,
6101 IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
6102 IntPtrConstant(0)),
6103 &no_memento_found);
6104 }
6105
6106 Node* memento_last_word = IntPtrAdd(
6107 object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
6108 Node* memento_last_word_page = PageFromAddress(memento_last_word);
6109
6110 Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
6111 Node* new_space_top_page = PageFromAddress(new_space_top);
6112
6113 // If the object is in new space, we need to check whether respective
6114 // potential memento object is on the same page as the current top.
6115 GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
6116
6117 // The object is on a different page than allocation top. Bail out if the
6118 // object sits on the page boundary as no memento can follow and we cannot
6119 // touch the memory following it.
6120 Branch(WordEqual(object_page, memento_last_word_page), &map_check,
6121 &no_memento_found);
6122
6123 // If top is on the same page as the current object, we need to check whether
6124 // we are below top.
6125 Bind(&top_check);
6126 {
6127 Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
6128 &no_memento_found, &map_check);
6129 }
6130
6131 // Memento map check.
6132 Bind(&map_check);
6133 {
6134 Node* memento_map = LoadObjectField(object, kMementoMapOffset);
6135 Branch(
6136 WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
6137 memento_found, &no_memento_found);
6138 }
6139 Bind(&no_memento_found);
6140 Comment("] TrapAllocationMemento");
6141 }
6142
PageFromAddress(Node * address)6143 Node* CodeStubAssembler::PageFromAddress(Node* address) {
6144 return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
6145 }
6146
EnumLength(Node * map)6147 Node* CodeStubAssembler::EnumLength(Node* map) {
6148 CSA_ASSERT(this, IsMap(map));
6149 Node* bitfield_3 = LoadMapBitField3(map);
6150 Node* enum_length = DecodeWordFromWord32<Map::EnumLengthBits>(bitfield_3);
6151 return SmiTag(enum_length);
6152 }
6153
CheckEnumCache(Node * receiver,Label * use_cache,Label * use_runtime)6154 void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
6155 Label* use_runtime) {
6156 Variable current_js_object(this, MachineRepresentation::kTagged, receiver);
6157
6158 Variable current_map(this, MachineRepresentation::kTagged,
6159 LoadMap(current_js_object.value()));
6160
6161 // These variables are updated in the loop below.
6162 Variable* loop_vars[2] = {¤t_js_object, ¤t_map};
6163 Label loop(this, 2, loop_vars), next(this);
6164
6165 // Check if the enum length field is properly initialized, indicating that
6166 // there is an enum cache.
6167 {
6168 Node* invalid_enum_cache_sentinel =
6169 SmiConstant(Smi::FromInt(kInvalidEnumCacheSentinel));
6170 Node* enum_length = EnumLength(current_map.value());
6171 Branch(WordEqual(enum_length, invalid_enum_cache_sentinel), use_runtime,
6172 &loop);
6173 }
6174
6175 // Check that there are no elements. |current_js_object| contains
6176 // the current JS object we've reached through the prototype chain.
6177 Bind(&loop);
6178 {
6179 Label if_elements(this), if_no_elements(this);
6180 Node* elements = LoadElements(current_js_object.value());
6181 Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
6182 // Check that there are no elements.
6183 Branch(WordEqual(elements, empty_fixed_array), &if_no_elements,
6184 &if_elements);
6185 Bind(&if_elements);
6186 {
6187 // Second chance, the object may be using the empty slow element
6188 // dictionary.
6189 Node* slow_empty_dictionary =
6190 LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
6191 Branch(WordNotEqual(elements, slow_empty_dictionary), use_runtime,
6192 &if_no_elements);
6193 }
6194
6195 Bind(&if_no_elements);
6196 {
6197 // Update map prototype.
6198 current_js_object.Bind(LoadMapPrototype(current_map.value()));
6199 Branch(WordEqual(current_js_object.value(), NullConstant()), use_cache,
6200 &next);
6201 }
6202 }
6203
6204 Bind(&next);
6205 {
6206 // For all objects but the receiver, check that the cache is empty.
6207 current_map.Bind(LoadMap(current_js_object.value()));
6208 Node* enum_length = EnumLength(current_map.value());
6209 Node* zero_constant = SmiConstant(Smi::kZero);
6210 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime);
6211 }
6212 }
6213
CreateAllocationSiteInFeedbackVector(Node * feedback_vector,Node * slot)6214 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
6215 Node* feedback_vector, Node* slot) {
6216 Node* size = IntPtrConstant(AllocationSite::kSize);
6217 Node* site = Allocate(size, CodeStubAssembler::kPretenured);
6218
6219 StoreMap(site, AllocationSiteMapConstant());
6220 Node* kind = SmiConstant(GetInitialFastElementsKind());
6221 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
6222 kind);
6223
6224 // Unlike literals, constructed arrays don't have nested sites
6225 Node* zero = SmiConstant(0);
6226 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
6227
6228 // Pretenuring calculation field.
6229 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
6230 zero);
6231
6232 // Pretenuring memento creation count field.
6233 StoreObjectFieldNoWriteBarrier(
6234 site, AllocationSite::kPretenureCreateCountOffset, zero);
6235
6236 // Store an empty fixed array for the code dependency.
6237 StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
6238 Heap::kEmptyFixedArrayRootIndex);
6239
6240 // Link the object to the allocation site list
6241 Node* site_list = ExternalConstant(
6242 ExternalReference::allocation_sites_list_address(isolate()));
6243 Node* next_site = LoadBufferObject(site_list, 0);
6244
6245 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
6246 // mark as such in order to skip the write barrier, once we have a unified
6247 // system for weakness. For now we decided to keep it like this because having
6248 // an initial write barrier backed store makes this pointer strong until the
6249 // next GC, and allocation sites are designed to survive several GCs anyway.
6250 StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
6251 StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
6252
6253 StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
6254 CodeStubAssembler::SMI_PARAMETERS);
6255 return site;
6256 }
6257
CreateWeakCellInFeedbackVector(Node * feedback_vector,Node * slot,Node * value)6258 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
6259 Node* slot,
6260 Node* value) {
6261 Node* size = IntPtrConstant(WeakCell::kSize);
6262 Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
6263
6264 // Initialize the WeakCell.
6265 DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
6266 StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
6267 StoreObjectField(cell, WeakCell::kValueOffset, value);
6268 StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
6269 Heap::kTheHoleValueRootIndex);
6270
6271 // Store the WeakCell in the feedback vector.
6272 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
6273 CodeStubAssembler::SMI_PARAMETERS);
6274 return cell;
6275 }
6276
BuildFastLoop(const CodeStubAssembler::VariableList & vars,Node * start_index,Node * end_index,const FastLoopBody & body,int increment,ParameterMode parameter_mode,IndexAdvanceMode advance_mode)6277 Node* CodeStubAssembler::BuildFastLoop(
6278 const CodeStubAssembler::VariableList& vars, Node* start_index,
6279 Node* end_index, const FastLoopBody& body, int increment,
6280 ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
6281 MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
6282 ? MachineType::PointerRepresentation()
6283 : MachineRepresentation::kTaggedSigned;
6284 Variable var(this, index_rep, start_index);
6285 VariableList vars_copy(vars, zone());
6286 vars_copy.Add(&var, zone());
6287 Label loop(this, vars_copy);
6288 Label after_loop(this);
6289 // Introduce an explicit second check of the termination condition before the
6290 // loop that helps turbofan generate better code. If there's only a single
6291 // check, then the CodeStubAssembler forces it to be at the beginning of the
6292 // loop requiring a backwards branch at the end of the loop (it's not possible
6293 // to force the loop header check at the end of the loop and branch forward to
6294 // it from the pre-header). The extra branch is slower in the case that the
6295 // loop actually iterates.
6296 Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
6297 Bind(&loop);
6298 {
6299 if (advance_mode == IndexAdvanceMode::kPre) {
6300 Increment(var, increment, parameter_mode);
6301 }
6302 body(var.value());
6303 if (advance_mode == IndexAdvanceMode::kPost) {
6304 Increment(var, increment, parameter_mode);
6305 }
6306 Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
6307 }
6308 Bind(&after_loop);
6309 return var.value();
6310 }
6311
BuildFastFixedArrayForEach(const CodeStubAssembler::VariableList & vars,Node * fixed_array,ElementsKind kind,Node * first_element_inclusive,Node * last_element_exclusive,const FastFixedArrayForEachBody & body,ParameterMode mode,ForEachDirection direction)6312 void CodeStubAssembler::BuildFastFixedArrayForEach(
6313 const CodeStubAssembler::VariableList& vars, Node* fixed_array,
6314 ElementsKind kind, Node* first_element_inclusive,
6315 Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
6316 ParameterMode mode, ForEachDirection direction) {
6317 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
6318 int32_t first_val;
6319 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
6320 int32_t last_val;
6321 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
6322 if (constant_first && constent_last) {
6323 int delta = last_val - first_val;
6324 DCHECK(delta >= 0);
6325 if (delta <= kElementLoopUnrollThreshold) {
6326 if (direction == ForEachDirection::kForward) {
6327 for (int i = first_val; i < last_val; ++i) {
6328 Node* index = IntPtrConstant(i);
6329 Node* offset =
6330 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
6331 FixedArray::kHeaderSize - kHeapObjectTag);
6332 body(fixed_array, offset);
6333 }
6334 } else {
6335 for (int i = last_val - 1; i >= first_val; --i) {
6336 Node* index = IntPtrConstant(i);
6337 Node* offset =
6338 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
6339 FixedArray::kHeaderSize - kHeapObjectTag);
6340 body(fixed_array, offset);
6341 }
6342 }
6343 return;
6344 }
6345 }
6346
6347 Node* start =
6348 ElementOffsetFromIndex(first_element_inclusive, kind, mode,
6349 FixedArray::kHeaderSize - kHeapObjectTag);
6350 Node* limit =
6351 ElementOffsetFromIndex(last_element_exclusive, kind, mode,
6352 FixedArray::kHeaderSize - kHeapObjectTag);
6353 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
6354
6355 int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
6356 BuildFastLoop(
6357 vars, start, limit,
6358 [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
6359 direction == ForEachDirection::kReverse ? -increment : increment,
6360 INTPTR_PARAMETERS,
6361 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
6362 : IndexAdvanceMode::kPost);
6363 }
6364
GotoIfFixedArraySizeDoesntFitInNewSpace(Node * element_count,Label * doesnt_fit,int base_size,ParameterMode mode)6365 void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
6366 Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
6367 int max_newspace_parameters =
6368 (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
6369 GotoIf(IntPtrOrSmiGreaterThan(
6370 element_count, IntPtrOrSmiConstant(max_newspace_parameters, mode),
6371 mode),
6372 doesnt_fit);
6373 }
6374
InitializeFieldsWithRoot(Node * object,Node * start_offset,Node * end_offset,Heap::RootListIndex root_index)6375 void CodeStubAssembler::InitializeFieldsWithRoot(
6376 Node* object, Node* start_offset, Node* end_offset,
6377 Heap::RootListIndex root_index) {
6378 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
6379 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
6380 Node* root_value = LoadRoot(root_index);
6381 BuildFastLoop(end_offset, start_offset,
6382 [this, object, root_value](Node* current) {
6383 StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
6384 current, root_value);
6385 },
6386 -kPointerSize, INTPTR_PARAMETERS,
6387 CodeStubAssembler::IndexAdvanceMode::kPre);
6388 }
6389
BranchIfNumericRelationalComparison(RelationalComparisonMode mode,Node * lhs,Node * rhs,Label * if_true,Label * if_false)6390 void CodeStubAssembler::BranchIfNumericRelationalComparison(
6391 RelationalComparisonMode mode, Node* lhs, Node* rhs, Label* if_true,
6392 Label* if_false) {
6393 Label end(this);
6394 Variable result(this, MachineRepresentation::kTagged);
6395
6396 // Shared entry for floating point comparison.
6397 Label do_fcmp(this);
6398 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
6399 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
6400
6401 // Check if the {lhs} is a Smi or a HeapObject.
6402 Label if_lhsissmi(this), if_lhsisnotsmi(this);
6403 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
6404
6405 Bind(&if_lhsissmi);
6406 {
6407 // Check if {rhs} is a Smi or a HeapObject.
6408 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6409 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6410
6411 Bind(&if_rhsissmi);
6412 {
6413 // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
6414 switch (mode) {
6415 case kLessThan:
6416 BranchIfSmiLessThan(lhs, rhs, if_true, if_false);
6417 break;
6418 case kLessThanOrEqual:
6419 BranchIfSmiLessThanOrEqual(lhs, rhs, if_true, if_false);
6420 break;
6421 case kGreaterThan:
6422 BranchIfSmiLessThan(rhs, lhs, if_true, if_false);
6423 break;
6424 case kGreaterThanOrEqual:
6425 BranchIfSmiLessThanOrEqual(rhs, lhs, if_true, if_false);
6426 break;
6427 }
6428 }
6429
6430 Bind(&if_rhsisnotsmi);
6431 {
6432 CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
6433 // Convert the {lhs} and {rhs} to floating point values, and
6434 // perform a floating point comparison.
6435 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
6436 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6437 Goto(&do_fcmp);
6438 }
6439 }
6440
6441 Bind(&if_lhsisnotsmi);
6442 {
6443 CSA_ASSERT(this, IsHeapNumberMap(LoadMap(lhs)));
6444
6445 // Check if {rhs} is a Smi or a HeapObject.
6446 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6447 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6448
6449 Bind(&if_rhsissmi);
6450 {
6451 // Convert the {lhs} and {rhs} to floating point values, and
6452 // perform a floating point comparison.
6453 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6454 var_fcmp_rhs.Bind(SmiToFloat64(rhs));
6455 Goto(&do_fcmp);
6456 }
6457
6458 Bind(&if_rhsisnotsmi);
6459 {
6460 CSA_ASSERT(this, IsHeapNumberMap(LoadMap(rhs)));
6461
6462 // Convert the {lhs} and {rhs} to floating point values, and
6463 // perform a floating point comparison.
6464 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6465 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6466 Goto(&do_fcmp);
6467 }
6468 }
6469
6470 Bind(&do_fcmp);
6471 {
6472 // Load the {lhs} and {rhs} floating point values.
6473 Node* lhs = var_fcmp_lhs.value();
6474 Node* rhs = var_fcmp_rhs.value();
6475
6476 // Perform a fast floating point comparison.
6477 switch (mode) {
6478 case kLessThan:
6479 Branch(Float64LessThan(lhs, rhs), if_true, if_false);
6480 break;
6481 case kLessThanOrEqual:
6482 Branch(Float64LessThanOrEqual(lhs, rhs), if_true, if_false);
6483 break;
6484 case kGreaterThan:
6485 Branch(Float64GreaterThan(lhs, rhs), if_true, if_false);
6486 break;
6487 case kGreaterThanOrEqual:
6488 Branch(Float64GreaterThanOrEqual(lhs, rhs), if_true, if_false);
6489 break;
6490 }
6491 }
6492 }
6493
GotoUnlessNumberLessThan(Node * lhs,Node * rhs,Label * if_false)6494 void CodeStubAssembler::GotoUnlessNumberLessThan(Node* lhs, Node* rhs,
6495 Label* if_false) {
6496 Label if_true(this);
6497 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false);
6498 Bind(&if_true);
6499 }
6500
RelationalComparison(RelationalComparisonMode mode,Node * lhs,Node * rhs,Node * context)6501 Node* CodeStubAssembler::RelationalComparison(RelationalComparisonMode mode,
6502 Node* lhs, Node* rhs,
6503 Node* context) {
6504 Label return_true(this), return_false(this), end(this);
6505 Variable result(this, MachineRepresentation::kTagged);
6506
6507 // Shared entry for floating point comparison.
6508 Label do_fcmp(this);
6509 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
6510 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
6511
6512 // We might need to loop several times due to ToPrimitive and/or ToNumber
6513 // conversions.
6514 Variable var_lhs(this, MachineRepresentation::kTagged, lhs),
6515 var_rhs(this, MachineRepresentation::kTagged, rhs);
6516 Variable* loop_vars[2] = {&var_lhs, &var_rhs};
6517 Label loop(this, 2, loop_vars);
6518 Goto(&loop);
6519 Bind(&loop);
6520 {
6521 // Load the current {lhs} and {rhs} values.
6522 lhs = var_lhs.value();
6523 rhs = var_rhs.value();
6524
6525 // Check if the {lhs} is a Smi or a HeapObject.
6526 Label if_lhsissmi(this), if_lhsisnotsmi(this);
6527 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
6528
6529 Bind(&if_lhsissmi);
6530 {
6531 // Check if {rhs} is a Smi or a HeapObject.
6532 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6533 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6534
6535 Bind(&if_rhsissmi);
6536 {
6537 // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
6538 switch (mode) {
6539 case kLessThan:
6540 BranchIfSmiLessThan(lhs, rhs, &return_true, &return_false);
6541 break;
6542 case kLessThanOrEqual:
6543 BranchIfSmiLessThanOrEqual(lhs, rhs, &return_true, &return_false);
6544 break;
6545 case kGreaterThan:
6546 BranchIfSmiLessThan(rhs, lhs, &return_true, &return_false);
6547 break;
6548 case kGreaterThanOrEqual:
6549 BranchIfSmiLessThanOrEqual(rhs, lhs, &return_true, &return_false);
6550 break;
6551 }
6552 }
6553
6554 Bind(&if_rhsisnotsmi);
6555 {
6556 // Load the map of {rhs}.
6557 Node* rhs_map = LoadMap(rhs);
6558
6559 // Check if the {rhs} is a HeapNumber.
6560 Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
6561 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
6562
6563 Bind(&if_rhsisnumber);
6564 {
6565 // Convert the {lhs} and {rhs} to floating point values, and
6566 // perform a floating point comparison.
6567 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
6568 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6569 Goto(&do_fcmp);
6570 }
6571
6572 Bind(&if_rhsisnotnumber);
6573 {
6574 // Convert the {rhs} to a Number; we don't need to perform the
6575 // dedicated ToPrimitive(rhs, hint Number) operation, as the
6576 // ToNumber(rhs) will by itself already invoke ToPrimitive with
6577 // a Number hint.
6578 Callable callable = CodeFactory::NonNumberToNumber(isolate());
6579 var_rhs.Bind(CallStub(callable, context, rhs));
6580 Goto(&loop);
6581 }
6582 }
6583 }
6584
6585 Bind(&if_lhsisnotsmi);
6586 {
6587 // Load the map of {lhs}.
6588 Node* lhs_map = LoadMap(lhs);
6589
6590 // Check if {rhs} is a Smi or a HeapObject.
6591 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6592 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6593
6594 Bind(&if_rhsissmi);
6595 {
6596 // Check if the {lhs} is a HeapNumber.
6597 Label if_lhsisnumber(this), if_lhsisnotnumber(this, Label::kDeferred);
6598 Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
6599
6600 Bind(&if_lhsisnumber);
6601 {
6602 // Convert the {lhs} and {rhs} to floating point values, and
6603 // perform a floating point comparison.
6604 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6605 var_fcmp_rhs.Bind(SmiToFloat64(rhs));
6606 Goto(&do_fcmp);
6607 }
6608
6609 Bind(&if_lhsisnotnumber);
6610 {
6611 // Convert the {lhs} to a Number; we don't need to perform the
6612 // dedicated ToPrimitive(lhs, hint Number) operation, as the
6613 // ToNumber(lhs) will by itself already invoke ToPrimitive with
6614 // a Number hint.
6615 Callable callable = CodeFactory::NonNumberToNumber(isolate());
6616 var_lhs.Bind(CallStub(callable, context, lhs));
6617 Goto(&loop);
6618 }
6619 }
6620
6621 Bind(&if_rhsisnotsmi);
6622 {
6623 // Load the map of {rhs}.
6624 Node* rhs_map = LoadMap(rhs);
6625
6626 // Check if {lhs} is a HeapNumber.
6627 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
6628 Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
6629
6630 Bind(&if_lhsisnumber);
6631 {
6632 // Check if {rhs} is also a HeapNumber.
6633 Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
6634 Branch(WordEqual(lhs_map, rhs_map), &if_rhsisnumber,
6635 &if_rhsisnotnumber);
6636
6637 Bind(&if_rhsisnumber);
6638 {
6639 // Convert the {lhs} and {rhs} to floating point values, and
6640 // perform a floating point comparison.
6641 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
6642 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6643 Goto(&do_fcmp);
6644 }
6645
6646 Bind(&if_rhsisnotnumber);
6647 {
6648 // Convert the {rhs} to a Number; we don't need to perform
6649 // dedicated ToPrimitive(rhs, hint Number) operation, as the
6650 // ToNumber(rhs) will by itself already invoke ToPrimitive with
6651 // a Number hint.
6652 Callable callable = CodeFactory::NonNumberToNumber(isolate());
6653 var_rhs.Bind(CallStub(callable, context, rhs));
6654 Goto(&loop);
6655 }
6656 }
6657
6658 Bind(&if_lhsisnotnumber);
6659 {
6660 // Load the instance type of {lhs}.
6661 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
6662
6663 // Check if {lhs} is a String.
6664 Label if_lhsisstring(this), if_lhsisnotstring(this, Label::kDeferred);
6665 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
6666 &if_lhsisnotstring);
6667
6668 Bind(&if_lhsisstring);
6669 {
6670 // Load the instance type of {rhs}.
6671 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
6672
6673 // Check if {rhs} is also a String.
6674 Label if_rhsisstring(this, Label::kDeferred),
6675 if_rhsisnotstring(this, Label::kDeferred);
6676 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
6677 &if_rhsisnotstring);
6678
6679 Bind(&if_rhsisstring);
6680 {
6681 // Both {lhs} and {rhs} are strings.
6682 switch (mode) {
6683 case kLessThan:
6684 result.Bind(CallStub(CodeFactory::StringLessThan(isolate()),
6685 context, lhs, rhs));
6686 Goto(&end);
6687 break;
6688 case kLessThanOrEqual:
6689 result.Bind(
6690 CallStub(CodeFactory::StringLessThanOrEqual(isolate()),
6691 context, lhs, rhs));
6692 Goto(&end);
6693 break;
6694 case kGreaterThan:
6695 result.Bind(
6696 CallStub(CodeFactory::StringGreaterThan(isolate()),
6697 context, lhs, rhs));
6698 Goto(&end);
6699 break;
6700 case kGreaterThanOrEqual:
6701 result.Bind(
6702 CallStub(CodeFactory::StringGreaterThanOrEqual(isolate()),
6703 context, lhs, rhs));
6704 Goto(&end);
6705 break;
6706 }
6707 }
6708
6709 Bind(&if_rhsisnotstring);
6710 {
6711 // The {lhs} is a String, while {rhs} is neither a Number nor a
6712 // String, so we need to call ToPrimitive(rhs, hint Number) if
6713 // {rhs} is a receiver or ToNumber(lhs) and ToNumber(rhs) in the
6714 // other cases.
6715 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6716 Label if_rhsisreceiver(this, Label::kDeferred),
6717 if_rhsisnotreceiver(this, Label::kDeferred);
6718 Branch(IsJSReceiverInstanceType(rhs_instance_type),
6719 &if_rhsisreceiver, &if_rhsisnotreceiver);
6720
6721 Bind(&if_rhsisreceiver);
6722 {
6723 // Convert {rhs} to a primitive first passing Number hint.
6724 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6725 isolate(), ToPrimitiveHint::kNumber);
6726 var_rhs.Bind(CallStub(callable, context, rhs));
6727 Goto(&loop);
6728 }
6729
6730 Bind(&if_rhsisnotreceiver);
6731 {
6732 // Convert both {lhs} and {rhs} to Number.
6733 Callable callable = CodeFactory::ToNumber(isolate());
6734 var_lhs.Bind(CallStub(callable, context, lhs));
6735 var_rhs.Bind(CallStub(callable, context, rhs));
6736 Goto(&loop);
6737 }
6738 }
6739 }
6740
6741 Bind(&if_lhsisnotstring);
6742 {
6743 // The {lhs} is neither a Number nor a String, so we need to call
6744 // ToPrimitive(lhs, hint Number) if {lhs} is a receiver or
6745 // ToNumber(lhs) and ToNumber(rhs) in the other cases.
6746 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6747 Label if_lhsisreceiver(this, Label::kDeferred),
6748 if_lhsisnotreceiver(this, Label::kDeferred);
6749 Branch(IsJSReceiverInstanceType(lhs_instance_type),
6750 &if_lhsisreceiver, &if_lhsisnotreceiver);
6751
6752 Bind(&if_lhsisreceiver);
6753 {
6754 // Convert {lhs} to a primitive first passing Number hint.
6755 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6756 isolate(), ToPrimitiveHint::kNumber);
6757 var_lhs.Bind(CallStub(callable, context, lhs));
6758 Goto(&loop);
6759 }
6760
6761 Bind(&if_lhsisnotreceiver);
6762 {
6763 // Convert both {lhs} and {rhs} to Number.
6764 Callable callable = CodeFactory::ToNumber(isolate());
6765 var_lhs.Bind(CallStub(callable, context, lhs));
6766 var_rhs.Bind(CallStub(callable, context, rhs));
6767 Goto(&loop);
6768 }
6769 }
6770 }
6771 }
6772 }
6773 }
6774
6775 Bind(&do_fcmp);
6776 {
6777 // Load the {lhs} and {rhs} floating point values.
6778 Node* lhs = var_fcmp_lhs.value();
6779 Node* rhs = var_fcmp_rhs.value();
6780
6781 // Perform a fast floating point comparison.
6782 switch (mode) {
6783 case kLessThan:
6784 Branch(Float64LessThan(lhs, rhs), &return_true, &return_false);
6785 break;
6786 case kLessThanOrEqual:
6787 Branch(Float64LessThanOrEqual(lhs, rhs), &return_true, &return_false);
6788 break;
6789 case kGreaterThan:
6790 Branch(Float64GreaterThan(lhs, rhs), &return_true, &return_false);
6791 break;
6792 case kGreaterThanOrEqual:
6793 Branch(Float64GreaterThanOrEqual(lhs, rhs), &return_true,
6794 &return_false);
6795 break;
6796 }
6797 }
6798
6799 Bind(&return_true);
6800 {
6801 result.Bind(BooleanConstant(true));
6802 Goto(&end);
6803 }
6804
6805 Bind(&return_false);
6806 {
6807 result.Bind(BooleanConstant(false));
6808 Goto(&end);
6809 }
6810
6811 Bind(&end);
6812 return result.value();
6813 }
6814
6815 namespace {
6816
GenerateEqual_Same(CodeStubAssembler * assembler,Node * value,CodeStubAssembler::Label * if_equal,CodeStubAssembler::Label * if_notequal)6817 void GenerateEqual_Same(CodeStubAssembler* assembler, Node* value,
6818 CodeStubAssembler::Label* if_equal,
6819 CodeStubAssembler::Label* if_notequal) {
6820 // In case of abstract or strict equality checks, we need additional checks
6821 // for NaN values because they are not considered equal, even if both the
6822 // left and the right hand side reference exactly the same value.
6823
6824 typedef CodeStubAssembler::Label Label;
6825
6826 // Check if {value} is a Smi or a HeapObject.
6827 Label if_valueissmi(assembler), if_valueisnotsmi(assembler);
6828 assembler->Branch(assembler->TaggedIsSmi(value), &if_valueissmi,
6829 &if_valueisnotsmi);
6830
6831 assembler->Bind(&if_valueisnotsmi);
6832 {
6833 // Load the map of {value}.
6834 Node* value_map = assembler->LoadMap(value);
6835
6836 // Check if {value} (and therefore {rhs}) is a HeapNumber.
6837 Label if_valueisnumber(assembler), if_valueisnotnumber(assembler);
6838 assembler->Branch(assembler->IsHeapNumberMap(value_map), &if_valueisnumber,
6839 &if_valueisnotnumber);
6840
6841 assembler->Bind(&if_valueisnumber);
6842 {
6843 // Convert {value} (and therefore {rhs}) to floating point value.
6844 Node* value_value = assembler->LoadHeapNumberValue(value);
6845
6846 // Check if the HeapNumber value is a NaN.
6847 assembler->BranchIfFloat64IsNaN(value_value, if_notequal, if_equal);
6848 }
6849
6850 assembler->Bind(&if_valueisnotnumber);
6851 assembler->Goto(if_equal);
6852 }
6853
6854 assembler->Bind(&if_valueissmi);
6855 assembler->Goto(if_equal);
6856 }
6857 } // namespace
6858
6859 // ES6 section 7.2.12 Abstract Equality Comparison
Equal(ResultMode mode,Node * lhs,Node * rhs,Node * context)6860 Node* CodeStubAssembler::Equal(ResultMode mode, Node* lhs, Node* rhs,
6861 Node* context) {
6862 // This is a slightly optimized version of Object::Equals represented as
6863 // scheduled TurboFan graph utilizing the CodeStubAssembler. Whenever you
6864 // change something functionality wise in here, remember to update the
6865 // Object::Equals method as well.
6866
6867 Label if_equal(this), if_notequal(this),
6868 do_rhsstringtonumber(this, Label::kDeferred), end(this);
6869 Variable result(this, MachineRepresentation::kTagged);
6870
6871 // Shared entry for floating point comparison.
6872 Label do_fcmp(this);
6873 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
6874 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
6875
6876 // We might need to loop several times due to ToPrimitive and/or ToNumber
6877 // conversions.
6878 Variable var_lhs(this, MachineRepresentation::kTagged, lhs),
6879 var_rhs(this, MachineRepresentation::kTagged, rhs);
6880 Variable* loop_vars[2] = {&var_lhs, &var_rhs};
6881 Label loop(this, 2, loop_vars);
6882 Goto(&loop);
6883 Bind(&loop);
6884 {
6885 // Load the current {lhs} and {rhs} values.
6886 lhs = var_lhs.value();
6887 rhs = var_rhs.value();
6888
6889 // Check if {lhs} and {rhs} refer to the same object.
6890 Label if_same(this), if_notsame(this);
6891 Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
6892
6893 Bind(&if_same);
6894 {
6895 // The {lhs} and {rhs} reference the exact same value, yet we need special
6896 // treatment for HeapNumber, as NaN is not equal to NaN.
6897 GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
6898 }
6899
6900 Bind(&if_notsame);
6901 {
6902 // Check if {lhs} is a Smi or a HeapObject.
6903 Label if_lhsissmi(this), if_lhsisnotsmi(this);
6904 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
6905
6906 Bind(&if_lhsissmi);
6907 {
6908 // Check if {rhs} is a Smi or a HeapObject.
6909 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6910 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6911
6912 Bind(&if_rhsissmi);
6913 // We have already checked for {lhs} and {rhs} being the same value, so
6914 // if both are Smis when we get here they must not be equal.
6915 Goto(&if_notequal);
6916
6917 Bind(&if_rhsisnotsmi);
6918 {
6919 // Load the map of {rhs}.
6920 Node* rhs_map = LoadMap(rhs);
6921
6922 // Check if {rhs} is a HeapNumber.
6923 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
6924 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
6925
6926 Bind(&if_rhsisnumber);
6927 {
6928 // Convert {lhs} and {rhs} to floating point values, and
6929 // perform a floating point comparison.
6930 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
6931 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
6932 Goto(&do_fcmp);
6933 }
6934
6935 Bind(&if_rhsisnotnumber);
6936 {
6937 // Load the instance type of the {rhs}.
6938 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
6939
6940 // Check if the {rhs} is a String.
6941 Label if_rhsisstring(this, Label::kDeferred),
6942 if_rhsisnotstring(this);
6943 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
6944 &if_rhsisnotstring);
6945
6946 Bind(&if_rhsisstring);
6947 {
6948 // The {rhs} is a String and the {lhs} is a Smi; we need
6949 // to convert the {rhs} to a Number and compare the output to
6950 // the Number on the {lhs}.
6951 Goto(&do_rhsstringtonumber);
6952 }
6953
6954 Bind(&if_rhsisnotstring);
6955 {
6956 // Check if the {rhs} is a Boolean.
6957 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
6958 Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
6959 &if_rhsisnotboolean);
6960
6961 Bind(&if_rhsisboolean);
6962 {
6963 // The {rhs} is a Boolean, load its number value.
6964 var_rhs.Bind(LoadObjectField(rhs, Oddball::kToNumberOffset));
6965 Goto(&loop);
6966 }
6967
6968 Bind(&if_rhsisnotboolean);
6969 {
6970 // Check if the {rhs} is a Receiver.
6971 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
6972 Label if_rhsisreceiver(this, Label::kDeferred),
6973 if_rhsisnotreceiver(this);
6974 Branch(IsJSReceiverInstanceType(rhs_instance_type),
6975 &if_rhsisreceiver, &if_rhsisnotreceiver);
6976
6977 Bind(&if_rhsisreceiver);
6978 {
6979 // Convert {rhs} to a primitive first (passing no hint).
6980 Callable callable =
6981 CodeFactory::NonPrimitiveToPrimitive(isolate());
6982 var_rhs.Bind(CallStub(callable, context, rhs));
6983 Goto(&loop);
6984 }
6985
6986 Bind(&if_rhsisnotreceiver);
6987 Goto(&if_notequal);
6988 }
6989 }
6990 }
6991 }
6992 }
6993
6994 Bind(&if_lhsisnotsmi);
6995 {
6996 // Check if {rhs} is a Smi or a HeapObject.
6997 Label if_rhsissmi(this), if_rhsisnotsmi(this);
6998 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
6999
7000 Bind(&if_rhsissmi);
7001 {
7002 // The {lhs} is a HeapObject and the {rhs} is a Smi; swapping {lhs}
7003 // and {rhs} is not observable and doesn't matter for the result, so
7004 // we can just swap them and use the Smi handling above (for {lhs}
7005 // being a Smi).
7006 var_lhs.Bind(rhs);
7007 var_rhs.Bind(lhs);
7008 Goto(&loop);
7009 }
7010
7011 Bind(&if_rhsisnotsmi);
7012 {
7013 Label if_lhsisstring(this), if_lhsisnumber(this),
7014 if_lhsissymbol(this), if_lhsisoddball(this),
7015 if_lhsisreceiver(this);
7016
7017 // Both {lhs} and {rhs} are HeapObjects, load their maps
7018 // and their instance types.
7019 Node* lhs_map = LoadMap(lhs);
7020 Node* rhs_map = LoadMap(rhs);
7021
7022 // Load the instance types of {lhs} and {rhs}.
7023 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
7024 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
7025
7026 // Dispatch based on the instance type of {lhs}.
7027 size_t const kNumCases = FIRST_NONSTRING_TYPE + 3;
7028 Label* case_labels[kNumCases];
7029 int32_t case_values[kNumCases];
7030 for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
7031 case_labels[i] = new Label(this);
7032 case_values[i] = i;
7033 }
7034 case_labels[FIRST_NONSTRING_TYPE + 0] = &if_lhsisnumber;
7035 case_values[FIRST_NONSTRING_TYPE + 0] = HEAP_NUMBER_TYPE;
7036 case_labels[FIRST_NONSTRING_TYPE + 1] = &if_lhsissymbol;
7037 case_values[FIRST_NONSTRING_TYPE + 1] = SYMBOL_TYPE;
7038 case_labels[FIRST_NONSTRING_TYPE + 2] = &if_lhsisoddball;
7039 case_values[FIRST_NONSTRING_TYPE + 2] = ODDBALL_TYPE;
7040 Switch(lhs_instance_type, &if_lhsisreceiver, case_values, case_labels,
7041 arraysize(case_values));
7042 for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
7043 Bind(case_labels[i]);
7044 Goto(&if_lhsisstring);
7045 delete case_labels[i];
7046 }
7047
7048 Bind(&if_lhsisstring);
7049 {
7050 // Check if {rhs} is also a String.
7051 Label if_rhsisstring(this, Label::kDeferred),
7052 if_rhsisnotstring(this);
7053 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7054 &if_rhsisnotstring);
7055
7056 Bind(&if_rhsisstring);
7057 {
7058 // Both {lhs} and {rhs} are of type String, just do the
7059 // string comparison then.
7060 Callable callable = (mode == kDontNegateResult)
7061 ? CodeFactory::StringEqual(isolate())
7062 : CodeFactory::StringNotEqual(isolate());
7063 result.Bind(CallStub(callable, context, lhs, rhs));
7064 Goto(&end);
7065 }
7066
7067 Bind(&if_rhsisnotstring);
7068 {
7069 // The {lhs} is a String and the {rhs} is some other HeapObject.
7070 // Swapping {lhs} and {rhs} is not observable and doesn't matter
7071 // for the result, so we can just swap them and use the String
7072 // handling below (for {rhs} being a String).
7073 var_lhs.Bind(rhs);
7074 var_rhs.Bind(lhs);
7075 Goto(&loop);
7076 }
7077 }
7078
7079 Bind(&if_lhsisnumber);
7080 {
7081 // Check if {rhs} is also a HeapNumber.
7082 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7083 Branch(Word32Equal(lhs_instance_type, rhs_instance_type),
7084 &if_rhsisnumber, &if_rhsisnotnumber);
7085
7086 Bind(&if_rhsisnumber);
7087 {
7088 // Convert {lhs} and {rhs} to floating point values, and
7089 // perform a floating point comparison.
7090 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7091 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7092 Goto(&do_fcmp);
7093 }
7094
7095 Bind(&if_rhsisnotnumber);
7096 {
7097 // The {lhs} is a Number, the {rhs} is some other HeapObject.
7098 Label if_rhsisstring(this, Label::kDeferred),
7099 if_rhsisnotstring(this);
7100 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7101 &if_rhsisnotstring);
7102
7103 Bind(&if_rhsisstring);
7104 {
7105 // The {rhs} is a String and the {lhs} is a HeapNumber; we need
7106 // to convert the {rhs} to a Number and compare the output to
7107 // the Number on the {lhs}.
7108 Goto(&do_rhsstringtonumber);
7109 }
7110
7111 Bind(&if_rhsisnotstring);
7112 {
7113 // Check if the {rhs} is a JSReceiver.
7114 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7115 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7116 Branch(IsJSReceiverInstanceType(rhs_instance_type),
7117 &if_rhsisreceiver, &if_rhsisnotreceiver);
7118
7119 Bind(&if_rhsisreceiver);
7120 {
7121 // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
7122 // Swapping {lhs} and {rhs} is not observable and doesn't
7123 // matter for the result, so we can just swap them and use
7124 // the JSReceiver handling below (for {lhs} being a
7125 // JSReceiver).
7126 var_lhs.Bind(rhs);
7127 var_rhs.Bind(lhs);
7128 Goto(&loop);
7129 }
7130
7131 Bind(&if_rhsisnotreceiver);
7132 {
7133 // Check if {rhs} is a Boolean.
7134 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
7135 Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
7136 &if_rhsisnotboolean);
7137
7138 Bind(&if_rhsisboolean);
7139 {
7140 // The {rhs} is a Boolean, convert it to a Smi first.
7141 var_rhs.Bind(
7142 LoadObjectField(rhs, Oddball::kToNumberOffset));
7143 Goto(&loop);
7144 }
7145
7146 Bind(&if_rhsisnotboolean);
7147 Goto(&if_notequal);
7148 }
7149 }
7150 }
7151 }
7152
7153 Bind(&if_lhsisoddball);
7154 {
7155 // The {lhs} is an Oddball and {rhs} is some other HeapObject.
7156 Label if_lhsisboolean(this), if_lhsisnotboolean(this);
7157 Node* boolean_map = BooleanMapConstant();
7158 Branch(WordEqual(lhs_map, boolean_map), &if_lhsisboolean,
7159 &if_lhsisnotboolean);
7160
7161 Bind(&if_lhsisboolean);
7162 {
7163 // The {lhs} is a Boolean, check if {rhs} is also a Boolean.
7164 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
7165 Branch(WordEqual(rhs_map, boolean_map), &if_rhsisboolean,
7166 &if_rhsisnotboolean);
7167
7168 Bind(&if_rhsisboolean);
7169 {
7170 // Both {lhs} and {rhs} are distinct Boolean values.
7171 Goto(&if_notequal);
7172 }
7173
7174 Bind(&if_rhsisnotboolean);
7175 {
7176 // Convert the {lhs} to a Number first.
7177 var_lhs.Bind(LoadObjectField(lhs, Oddball::kToNumberOffset));
7178 Goto(&loop);
7179 }
7180 }
7181
7182 Bind(&if_lhsisnotboolean);
7183 {
7184 // The {lhs} is either Null or Undefined; check if the {rhs} is
7185 // undetectable (i.e. either also Null or Undefined or some
7186 // undetectable JSReceiver).
7187 Node* rhs_bitfield = LoadMapBitField(rhs_map);
7188 Branch(Word32Equal(
7189 Word32And(rhs_bitfield,
7190 Int32Constant(1 << Map::kIsUndetectable)),
7191 Int32Constant(0)),
7192 &if_notequal, &if_equal);
7193 }
7194 }
7195
7196 Bind(&if_lhsissymbol);
7197 {
7198 // Check if the {rhs} is a JSReceiver.
7199 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7200 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7201 Branch(IsJSReceiverInstanceType(rhs_instance_type),
7202 &if_rhsisreceiver, &if_rhsisnotreceiver);
7203
7204 Bind(&if_rhsisreceiver);
7205 {
7206 // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
7207 // Swapping {lhs} and {rhs} is not observable and doesn't
7208 // matter for the result, so we can just swap them and use
7209 // the JSReceiver handling below (for {lhs} being a JSReceiver).
7210 var_lhs.Bind(rhs);
7211 var_rhs.Bind(lhs);
7212 Goto(&loop);
7213 }
7214
7215 Bind(&if_rhsisnotreceiver);
7216 {
7217 // The {rhs} is not a JSReceiver and also not the same Symbol
7218 // as the {lhs}, so this is equality check is considered false.
7219 Goto(&if_notequal);
7220 }
7221 }
7222
7223 Bind(&if_lhsisreceiver);
7224 {
7225 // Check if the {rhs} is also a JSReceiver.
7226 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
7227 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7228 Branch(IsJSReceiverInstanceType(rhs_instance_type),
7229 &if_rhsisreceiver, &if_rhsisnotreceiver);
7230
7231 Bind(&if_rhsisreceiver);
7232 {
7233 // Both {lhs} and {rhs} are different JSReceiver references, so
7234 // this cannot be considered equal.
7235 Goto(&if_notequal);
7236 }
7237
7238 Bind(&if_rhsisnotreceiver);
7239 {
7240 // Check if {rhs} is Null or Undefined (an undetectable check
7241 // is sufficient here, since we already know that {rhs} is not
7242 // a JSReceiver).
7243 Label if_rhsisundetectable(this),
7244 if_rhsisnotundetectable(this, Label::kDeferred);
7245 Node* rhs_bitfield = LoadMapBitField(rhs_map);
7246 Branch(Word32Equal(
7247 Word32And(rhs_bitfield,
7248 Int32Constant(1 << Map::kIsUndetectable)),
7249 Int32Constant(0)),
7250 &if_rhsisnotundetectable, &if_rhsisundetectable);
7251
7252 Bind(&if_rhsisundetectable);
7253 {
7254 // Check if {lhs} is an undetectable JSReceiver.
7255 Node* lhs_bitfield = LoadMapBitField(lhs_map);
7256 Branch(Word32Equal(
7257 Word32And(lhs_bitfield,
7258 Int32Constant(1 << Map::kIsUndetectable)),
7259 Int32Constant(0)),
7260 &if_notequal, &if_equal);
7261 }
7262
7263 Bind(&if_rhsisnotundetectable);
7264 {
7265 // The {rhs} is some Primitive different from Null and
7266 // Undefined, need to convert {lhs} to Primitive first.
7267 Callable callable =
7268 CodeFactory::NonPrimitiveToPrimitive(isolate());
7269 var_lhs.Bind(CallStub(callable, context, lhs));
7270 Goto(&loop);
7271 }
7272 }
7273 }
7274 }
7275 }
7276 }
7277
7278 Bind(&do_rhsstringtonumber);
7279 {
7280 Callable callable = CodeFactory::StringToNumber(isolate());
7281 var_rhs.Bind(CallStub(callable, context, rhs));
7282 Goto(&loop);
7283 }
7284 }
7285
7286 Bind(&do_fcmp);
7287 {
7288 // Load the {lhs} and {rhs} floating point values.
7289 Node* lhs = var_fcmp_lhs.value();
7290 Node* rhs = var_fcmp_rhs.value();
7291
7292 // Perform a fast floating point comparison.
7293 Branch(Float64Equal(lhs, rhs), &if_equal, &if_notequal);
7294 }
7295
7296 Bind(&if_equal);
7297 {
7298 result.Bind(BooleanConstant(mode == kDontNegateResult));
7299 Goto(&end);
7300 }
7301
7302 Bind(&if_notequal);
7303 {
7304 result.Bind(BooleanConstant(mode == kNegateResult));
7305 Goto(&end);
7306 }
7307
7308 Bind(&end);
7309 return result.value();
7310 }
7311
StrictEqual(ResultMode mode,Node * lhs,Node * rhs,Node * context)7312 Node* CodeStubAssembler::StrictEqual(ResultMode mode, Node* lhs, Node* rhs,
7313 Node* context) {
7314 // Here's pseudo-code for the algorithm below in case of kDontNegateResult
7315 // mode; for kNegateResult mode we properly negate the result.
7316 //
7317 // if (lhs == rhs) {
7318 // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
7319 // return true;
7320 // }
7321 // if (!lhs->IsSmi()) {
7322 // if (lhs->IsHeapNumber()) {
7323 // if (rhs->IsSmi()) {
7324 // return Smi::cast(rhs)->value() == HeapNumber::cast(lhs)->value();
7325 // } else if (rhs->IsHeapNumber()) {
7326 // return HeapNumber::cast(rhs)->value() ==
7327 // HeapNumber::cast(lhs)->value();
7328 // } else {
7329 // return false;
7330 // }
7331 // } else {
7332 // if (rhs->IsSmi()) {
7333 // return false;
7334 // } else {
7335 // if (lhs->IsString()) {
7336 // if (rhs->IsString()) {
7337 // return %StringEqual(lhs, rhs);
7338 // } else {
7339 // return false;
7340 // }
7341 // } else {
7342 // return false;
7343 // }
7344 // }
7345 // }
7346 // } else {
7347 // if (rhs->IsSmi()) {
7348 // return false;
7349 // } else {
7350 // if (rhs->IsHeapNumber()) {
7351 // return Smi::cast(lhs)->value() == HeapNumber::cast(rhs)->value();
7352 // } else {
7353 // return false;
7354 // }
7355 // }
7356 // }
7357
7358 Label if_equal(this), if_notequal(this), end(this);
7359 Variable result(this, MachineRepresentation::kTagged);
7360
7361 // Check if {lhs} and {rhs} refer to the same object.
7362 Label if_same(this), if_notsame(this);
7363 Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
7364
7365 Bind(&if_same);
7366 {
7367 // The {lhs} and {rhs} reference the exact same value, yet we need special
7368 // treatment for HeapNumber, as NaN is not equal to NaN.
7369 GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
7370 }
7371
7372 Bind(&if_notsame);
7373 {
7374 // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber
7375 // and String they can still be considered equal.
7376
7377 // Check if {lhs} is a Smi or a HeapObject.
7378 Label if_lhsissmi(this), if_lhsisnotsmi(this);
7379 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7380
7381 Bind(&if_lhsisnotsmi);
7382 {
7383 // Load the map of {lhs}.
7384 Node* lhs_map = LoadMap(lhs);
7385
7386 // Check if {lhs} is a HeapNumber.
7387 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
7388 Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
7389
7390 Bind(&if_lhsisnumber);
7391 {
7392 // Check if {rhs} is a Smi or a HeapObject.
7393 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7394 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7395
7396 Bind(&if_rhsissmi);
7397 {
7398 // Convert {lhs} and {rhs} to floating point values.
7399 Node* lhs_value = LoadHeapNumberValue(lhs);
7400 Node* rhs_value = SmiToFloat64(rhs);
7401
7402 // Perform a floating point comparison of {lhs} and {rhs}.
7403 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7404 }
7405
7406 Bind(&if_rhsisnotsmi);
7407 {
7408 // Load the map of {rhs}.
7409 Node* rhs_map = LoadMap(rhs);
7410
7411 // Check if {rhs} is also a HeapNumber.
7412 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7413 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7414
7415 Bind(&if_rhsisnumber);
7416 {
7417 // Convert {lhs} and {rhs} to floating point values.
7418 Node* lhs_value = LoadHeapNumberValue(lhs);
7419 Node* rhs_value = LoadHeapNumberValue(rhs);
7420
7421 // Perform a floating point comparison of {lhs} and {rhs}.
7422 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7423 }
7424
7425 Bind(&if_rhsisnotnumber);
7426 Goto(&if_notequal);
7427 }
7428 }
7429
7430 Bind(&if_lhsisnotnumber);
7431 {
7432 // Check if {rhs} is a Smi or a HeapObject.
7433 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7434 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7435
7436 Bind(&if_rhsissmi);
7437 Goto(&if_notequal);
7438
7439 Bind(&if_rhsisnotsmi);
7440 {
7441 // Load the instance type of {lhs}.
7442 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
7443
7444 // Check if {lhs} is a String.
7445 Label if_lhsisstring(this), if_lhsisnotstring(this);
7446 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
7447 &if_lhsisnotstring);
7448
7449 Bind(&if_lhsisstring);
7450 {
7451 // Load the instance type of {rhs}.
7452 Node* rhs_instance_type = LoadInstanceType(rhs);
7453
7454 // Check if {rhs} is also a String.
7455 Label if_rhsisstring(this, Label::kDeferred),
7456 if_rhsisnotstring(this);
7457 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7458 &if_rhsisnotstring);
7459
7460 Bind(&if_rhsisstring);
7461 {
7462 Callable callable = (mode == kDontNegateResult)
7463 ? CodeFactory::StringEqual(isolate())
7464 : CodeFactory::StringNotEqual(isolate());
7465 result.Bind(CallStub(callable, context, lhs, rhs));
7466 Goto(&end);
7467 }
7468
7469 Bind(&if_rhsisnotstring);
7470 Goto(&if_notequal);
7471 }
7472
7473 Bind(&if_lhsisnotstring);
7474 Goto(&if_notequal);
7475 }
7476 }
7477 }
7478
7479 Bind(&if_lhsissmi);
7480 {
7481 // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
7482 // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
7483 // HeapNumber with an equal floating point value.
7484
7485 // Check if {rhs} is a Smi or a HeapObject.
7486 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7487 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7488
7489 Bind(&if_rhsissmi);
7490 Goto(&if_notequal);
7491
7492 Bind(&if_rhsisnotsmi);
7493 {
7494 // Load the map of the {rhs}.
7495 Node* rhs_map = LoadMap(rhs);
7496
7497 // The {rhs} could be a HeapNumber with the same value as {lhs}.
7498 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
7499 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7500
7501 Bind(&if_rhsisnumber);
7502 {
7503 // Convert {lhs} and {rhs} to floating point values.
7504 Node* lhs_value = SmiToFloat64(lhs);
7505 Node* rhs_value = LoadHeapNumberValue(rhs);
7506
7507 // Perform a floating point comparison of {lhs} and {rhs}.
7508 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
7509 }
7510
7511 Bind(&if_rhsisnotnumber);
7512 Goto(&if_notequal);
7513 }
7514 }
7515 }
7516
7517 Bind(&if_equal);
7518 {
7519 result.Bind(BooleanConstant(mode == kDontNegateResult));
7520 Goto(&end);
7521 }
7522
7523 Bind(&if_notequal);
7524 {
7525 result.Bind(BooleanConstant(mode == kNegateResult));
7526 Goto(&end);
7527 }
7528
7529 Bind(&end);
7530 return result.value();
7531 }
7532
7533 // ECMA#sec-samevalue
7534 // This algorithm differs from the Strict Equality Comparison Algorithm in its
7535 // treatment of signed zeroes and NaNs.
SameValue(Node * lhs,Node * rhs,Node * context)7536 Node* CodeStubAssembler::SameValue(Node* lhs, Node* rhs, Node* context) {
7537 Variable var_result(this, MachineRepresentation::kWord32);
7538 Label strict_equal(this), out(this);
7539
7540 Node* const int_false = Int32Constant(0);
7541 Node* const int_true = Int32Constant(1);
7542
7543 Label if_equal(this), if_notequal(this);
7544 Branch(WordEqual(lhs, rhs), &if_equal, &if_notequal);
7545
7546 Bind(&if_equal);
7547 {
7548 // This covers the case when {lhs} == {rhs}. We can simply return true
7549 // because SameValue considers two NaNs to be equal.
7550
7551 var_result.Bind(int_true);
7552 Goto(&out);
7553 }
7554
7555 Bind(&if_notequal);
7556 {
7557 // This covers the case when {lhs} != {rhs}. We only handle numbers here
7558 // and defer to StrictEqual for the rest.
7559
7560 Node* const lhs_float = TryTaggedToFloat64(lhs, &strict_equal);
7561 Node* const rhs_float = TryTaggedToFloat64(rhs, &strict_equal);
7562
7563 Label if_lhsisnan(this), if_lhsnotnan(this);
7564 BranchIfFloat64IsNaN(lhs_float, &if_lhsisnan, &if_lhsnotnan);
7565
7566 Bind(&if_lhsisnan);
7567 {
7568 // Return true iff {rhs} is NaN.
7569
7570 Node* const result =
7571 SelectConstant(Float64Equal(rhs_float, rhs_float), int_false,
7572 int_true, MachineRepresentation::kWord32);
7573 var_result.Bind(result);
7574 Goto(&out);
7575 }
7576
7577 Bind(&if_lhsnotnan);
7578 {
7579 Label if_floatisequal(this), if_floatnotequal(this);
7580 Branch(Float64Equal(lhs_float, rhs_float), &if_floatisequal,
7581 &if_floatnotequal);
7582
7583 Bind(&if_floatisequal);
7584 {
7585 // We still need to handle the case when {lhs} and {rhs} are -0.0 and
7586 // 0.0 (or vice versa). Compare the high word to
7587 // distinguish between the two.
7588
7589 Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_float);
7590 Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_float);
7591
7592 // If x is +0 and y is -0, return false.
7593 // If x is -0 and y is +0, return false.
7594
7595 Node* const result = Word32Equal(lhs_hi_word, rhs_hi_word);
7596 var_result.Bind(result);
7597 Goto(&out);
7598 }
7599
7600 Bind(&if_floatnotequal);
7601 {
7602 var_result.Bind(int_false);
7603 Goto(&out);
7604 }
7605 }
7606 }
7607
7608 Bind(&strict_equal);
7609 {
7610 Node* const is_equal = StrictEqual(kDontNegateResult, lhs, rhs, context);
7611 Node* const result = WordEqual(is_equal, TrueConstant());
7612 var_result.Bind(result);
7613 Goto(&out);
7614 }
7615
7616 Bind(&out);
7617 return var_result.value();
7618 }
7619
ForInFilter(Node * key,Node * object,Node * context)7620 Node* CodeStubAssembler::ForInFilter(Node* key, Node* object, Node* context) {
7621 Label return_undefined(this, Label::kDeferred), return_to_name(this),
7622 end(this);
7623
7624 Variable var_result(this, MachineRepresentation::kTagged);
7625
7626 Node* has_property =
7627 HasProperty(object, key, context, Runtime::kForInHasProperty);
7628
7629 Branch(WordEqual(has_property, BooleanConstant(true)), &return_to_name,
7630 &return_undefined);
7631
7632 Bind(&return_to_name);
7633 {
7634 var_result.Bind(ToName(context, key));
7635 Goto(&end);
7636 }
7637
7638 Bind(&return_undefined);
7639 {
7640 var_result.Bind(UndefinedConstant());
7641 Goto(&end);
7642 }
7643
7644 Bind(&end);
7645 return var_result.value();
7646 }
7647
HasProperty(Node * object,Node * key,Node * context,Runtime::FunctionId fallback_runtime_function_id)7648 Node* CodeStubAssembler::HasProperty(
7649 Node* object, Node* key, Node* context,
7650 Runtime::FunctionId fallback_runtime_function_id) {
7651 Label call_runtime(this, Label::kDeferred), return_true(this),
7652 return_false(this), end(this);
7653
7654 CodeStubAssembler::LookupInHolder lookup_property_in_holder =
7655 [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
7656 Node* holder_instance_type, Node* unique_name,
7657 Label* next_holder, Label* if_bailout) {
7658 TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
7659 &return_true, next_holder, if_bailout);
7660 };
7661
7662 CodeStubAssembler::LookupInHolder lookup_element_in_holder =
7663 [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
7664 Node* holder_instance_type, Node* index,
7665 Label* next_holder, Label* if_bailout) {
7666 TryLookupElement(holder, holder_map, holder_instance_type, index,
7667 &return_true, next_holder, if_bailout);
7668 };
7669
7670 TryPrototypeChainLookup(object, key, lookup_property_in_holder,
7671 lookup_element_in_holder, &return_false,
7672 &call_runtime);
7673
7674 Variable result(this, MachineRepresentation::kTagged);
7675 Bind(&return_true);
7676 {
7677 result.Bind(BooleanConstant(true));
7678 Goto(&end);
7679 }
7680
7681 Bind(&return_false);
7682 {
7683 result.Bind(BooleanConstant(false));
7684 Goto(&end);
7685 }
7686
7687 Bind(&call_runtime);
7688 {
7689 result.Bind(
7690 CallRuntime(fallback_runtime_function_id, context, object, key));
7691 Goto(&end);
7692 }
7693
7694 Bind(&end);
7695 return result.value();
7696 }
7697
ClassOf(Node * value)7698 Node* CodeStubAssembler::ClassOf(Node* value) {
7699 Variable var_result(this, MachineRepresentation::kTaggedPointer);
7700 Label if_function(this, Label::kDeferred), if_object(this, Label::kDeferred),
7701 if_primitive(this, Label::kDeferred), return_result(this);
7702
7703 // Check if {value} is a Smi.
7704 GotoIf(TaggedIsSmi(value), &if_primitive);
7705
7706 Node* value_map = LoadMap(value);
7707 Node* value_instance_type = LoadMapInstanceType(value_map);
7708
7709 // Check if {value} is a JSFunction or JSBoundFunction.
7710 STATIC_ASSERT(LAST_TYPE == LAST_FUNCTION_TYPE);
7711 GotoIf(Uint32LessThanOrEqual(Int32Constant(FIRST_FUNCTION_TYPE),
7712 value_instance_type),
7713 &if_function);
7714
7715 // Check if {value} is a primitive HeapObject.
7716 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
7717 GotoIf(Uint32LessThan(value_instance_type,
7718 Int32Constant(FIRST_JS_RECEIVER_TYPE)),
7719 &if_primitive);
7720
7721 // Load the {value}s constructor, and check that it's a JSFunction.
7722 Node* constructor = LoadMapConstructor(value_map);
7723 GotoIfNot(IsJSFunction(constructor), &if_object);
7724
7725 // Return the instance class name for the {constructor}.
7726 Node* shared_info =
7727 LoadObjectField(constructor, JSFunction::kSharedFunctionInfoOffset);
7728 Node* instance_class_name = LoadObjectField(
7729 shared_info, SharedFunctionInfo::kInstanceClassNameOffset);
7730 var_result.Bind(instance_class_name);
7731 Goto(&return_result);
7732
7733 Bind(&if_function);
7734 var_result.Bind(LoadRoot(Heap::kFunction_stringRootIndex));
7735 Goto(&return_result);
7736
7737 Bind(&if_object);
7738 var_result.Bind(LoadRoot(Heap::kObject_stringRootIndex));
7739 Goto(&return_result);
7740
7741 Bind(&if_primitive);
7742 var_result.Bind(NullConstant());
7743 Goto(&return_result);
7744
7745 Bind(&return_result);
7746 return var_result.value();
7747 }
7748
Typeof(Node * value,Node * context)7749 Node* CodeStubAssembler::Typeof(Node* value, Node* context) {
7750 Variable result_var(this, MachineRepresentation::kTagged);
7751
7752 Label return_number(this, Label::kDeferred), if_oddball(this),
7753 return_function(this), return_undefined(this), return_object(this),
7754 return_string(this), return_result(this);
7755
7756 GotoIf(TaggedIsSmi(value), &return_number);
7757
7758 Node* map = LoadMap(value);
7759
7760 GotoIf(IsHeapNumberMap(map), &return_number);
7761
7762 Node* instance_type = LoadMapInstanceType(map);
7763
7764 GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7765
7766 Node* callable_or_undetectable_mask = Word32And(
7767 LoadMapBitField(map),
7768 Int32Constant(1 << Map::kIsCallable | 1 << Map::kIsUndetectable));
7769
7770 GotoIf(Word32Equal(callable_or_undetectable_mask,
7771 Int32Constant(1 << Map::kIsCallable)),
7772 &return_function);
7773
7774 GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
7775 &return_undefined);
7776
7777 GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
7778
7779 GotoIf(IsStringInstanceType(instance_type), &return_string);
7780
7781 CSA_ASSERT(this, Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE)));
7782 result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
7783 Goto(&return_result);
7784
7785 Bind(&return_number);
7786 {
7787 result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
7788 Goto(&return_result);
7789 }
7790
7791 Bind(&if_oddball);
7792 {
7793 Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
7794 result_var.Bind(type);
7795 Goto(&return_result);
7796 }
7797
7798 Bind(&return_function);
7799 {
7800 result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
7801 Goto(&return_result);
7802 }
7803
7804 Bind(&return_undefined);
7805 {
7806 result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
7807 Goto(&return_result);
7808 }
7809
7810 Bind(&return_object);
7811 {
7812 result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
7813 Goto(&return_result);
7814 }
7815
7816 Bind(&return_string);
7817 {
7818 result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
7819 Goto(&return_result);
7820 }
7821
7822 Bind(&return_result);
7823 return result_var.value();
7824 }
7825
GetSuperConstructor(Node * active_function,Node * context)7826 Node* CodeStubAssembler::GetSuperConstructor(Node* active_function,
7827 Node* context) {
7828 CSA_ASSERT(this, IsJSFunction(active_function));
7829
7830 Label is_not_constructor(this, Label::kDeferred), out(this);
7831 Variable result(this, MachineRepresentation::kTagged);
7832
7833 Node* map = LoadMap(active_function);
7834 Node* prototype = LoadMapPrototype(map);
7835 Node* prototype_map = LoadMap(prototype);
7836 GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
7837
7838 result.Bind(prototype);
7839 Goto(&out);
7840
7841 Bind(&is_not_constructor);
7842 {
7843 CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
7844 active_function);
7845 Unreachable();
7846 }
7847
7848 Bind(&out);
7849 return result.value();
7850 }
7851
InstanceOf(Node * object,Node * callable,Node * context)7852 Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
7853 Node* context) {
7854 Variable var_result(this, MachineRepresentation::kTagged);
7855 Label if_notcallable(this, Label::kDeferred),
7856 if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
7857 if_nohandler(this, Label::kDeferred), return_true(this),
7858 return_false(this), return_result(this, &var_result);
7859
7860 // Ensure that the {callable} is actually a JSReceiver.
7861 GotoIf(TaggedIsSmi(callable), &if_notreceiver);
7862 GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
7863
7864 // Load the @@hasInstance property from {callable}.
7865 Node* inst_of_handler = CallStub(CodeFactory::GetProperty(isolate()), context,
7866 callable, HasInstanceSymbolConstant());
7867
7868 // Optimize for the likely case where {inst_of_handler} is the builtin
7869 // Function.prototype[@@hasInstance] method, and emit a direct call in
7870 // that case without any additional checking.
7871 Node* native_context = LoadNativeContext(context);
7872 Node* function_has_instance =
7873 LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
7874 GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
7875 &if_otherhandler);
7876 {
7877 // Call to Function.prototype[@@hasInstance] directly.
7878 Callable builtin(isolate()->builtins()->FunctionPrototypeHasInstance(),
7879 CallTrampolineDescriptor(isolate()));
7880 Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
7881 var_result.Bind(result);
7882 Goto(&return_result);
7883 }
7884
7885 Bind(&if_otherhandler);
7886 {
7887 // Check if there's actually an {inst_of_handler}.
7888 GotoIf(IsNull(inst_of_handler), &if_nohandler);
7889 GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
7890
7891 // Call the {inst_of_handler} for {callable} and {object}.
7892 Node* result = CallJS(
7893 CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
7894 context, inst_of_handler, callable, object);
7895
7896 // Convert the {result} to a Boolean.
7897 BranchIfToBooleanIsTrue(result, &return_true, &return_false);
7898 }
7899
7900 Bind(&if_nohandler);
7901 {
7902 // Ensure that the {callable} is actually Callable.
7903 GotoIfNot(IsCallable(callable), &if_notcallable);
7904
7905 // Use the OrdinaryHasInstance algorithm.
7906 Node* result = CallStub(CodeFactory::OrdinaryHasInstance(isolate()),
7907 context, callable, object);
7908 var_result.Bind(result);
7909 Goto(&return_result);
7910 }
7911
7912 Bind(&if_notcallable);
7913 {
7914 CallRuntime(Runtime::kThrowNonCallableInInstanceOfCheck, context);
7915 Unreachable();
7916 }
7917
7918 Bind(&if_notreceiver);
7919 {
7920 CallRuntime(Runtime::kThrowNonObjectInInstanceOfCheck, context);
7921 Unreachable();
7922 }
7923
7924 Bind(&return_true);
7925 var_result.Bind(TrueConstant());
7926 Goto(&return_result);
7927
7928 Bind(&return_false);
7929 var_result.Bind(FalseConstant());
7930 Goto(&return_result);
7931
7932 Bind(&return_result);
7933 return var_result.value();
7934 }
7935
NumberInc(Node * value)7936 Node* CodeStubAssembler::NumberInc(Node* value) {
7937 Variable var_result(this, MachineRepresentation::kTagged),
7938 var_finc_value(this, MachineRepresentation::kFloat64);
7939 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
7940 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
7941
7942 Bind(&if_issmi);
7943 {
7944 // Try fast Smi addition first.
7945 Node* one = SmiConstant(Smi::FromInt(1));
7946 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value),
7947 BitcastTaggedToWord(one));
7948 Node* overflow = Projection(1, pair);
7949
7950 // Check if the Smi addition overflowed.
7951 Label if_overflow(this), if_notoverflow(this);
7952 Branch(overflow, &if_overflow, &if_notoverflow);
7953
7954 Bind(&if_notoverflow);
7955 var_result.Bind(BitcastWordToTaggedSigned(Projection(0, pair)));
7956 Goto(&end);
7957
7958 Bind(&if_overflow);
7959 {
7960 var_finc_value.Bind(SmiToFloat64(value));
7961 Goto(&do_finc);
7962 }
7963 }
7964
7965 Bind(&if_isnotsmi);
7966 {
7967 // Check if the value is a HeapNumber.
7968 CSA_ASSERT(this, IsHeapNumberMap(LoadMap(value)));
7969
7970 // Load the HeapNumber value.
7971 var_finc_value.Bind(LoadHeapNumberValue(value));
7972 Goto(&do_finc);
7973 }
7974
7975 Bind(&do_finc);
7976 {
7977 Node* finc_value = var_finc_value.value();
7978 Node* one = Float64Constant(1.0);
7979 Node* finc_result = Float64Add(finc_value, one);
7980 var_result.Bind(AllocateHeapNumberWithValue(finc_result));
7981 Goto(&end);
7982 }
7983
7984 Bind(&end);
7985 return var_result.value();
7986 }
7987
GotoIfNotNumber(Node * input,Label * is_not_number)7988 void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
7989 Label is_number(this);
7990 GotoIf(TaggedIsSmi(input), &is_number);
7991 Node* input_map = LoadMap(input);
7992 Branch(IsHeapNumberMap(input_map), &is_number, is_not_number);
7993 Bind(&is_number);
7994 }
7995
GotoIfNumber(Node * input,Label * is_number)7996 void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
7997 GotoIf(TaggedIsSmi(input), is_number);
7998 Node* input_map = LoadMap(input);
7999 GotoIf(IsHeapNumberMap(input_map), is_number);
8000 }
8001
CreateArrayIterator(Node * array,Node * array_map,Node * array_type,Node * context,IterationKind mode)8002 Node* CodeStubAssembler::CreateArrayIterator(Node* array, Node* array_map,
8003 Node* array_type, Node* context,
8004 IterationKind mode) {
8005 int kBaseMapIndex = 0;
8006 switch (mode) {
8007 case IterationKind::kKeys:
8008 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
8009 break;
8010 case IterationKind::kValues:
8011 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8012 break;
8013 case IterationKind::kEntries:
8014 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
8015 break;
8016 }
8017
8018 // Fast Array iterator map index:
8019 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays)
8020 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays)
8021 const int kFastIteratorOffset =
8022 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX -
8023 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8024 STATIC_ASSERT(kFastIteratorOffset ==
8025 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
8026 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
8027
8028 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset)
8029 const int kSlowIteratorOffset =
8030 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX -
8031 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
8032 STATIC_ASSERT(kSlowIteratorOffset ==
8033 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
8034 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
8035
8036 // Assert: Type(array) is Object
8037 CSA_ASSERT(this, IsJSReceiverInstanceType(array_type));
8038
8039 Variable var_result(this, MachineRepresentation::kTagged);
8040 Variable var_map_index(this, MachineType::PointerRepresentation());
8041 Variable var_array_map(this, MachineRepresentation::kTagged);
8042
8043 Label return_result(this);
8044 Label allocate_iterator(this);
8045
8046 if (mode == IterationKind::kKeys) {
8047 // There are only two key iterator maps, branch depending on whether or not
8048 // the receiver is a TypedArray or not.
8049
8050 Label if_istypedarray(this), if_isgeneric(this);
8051
8052 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
8053 &if_istypedarray, &if_isgeneric);
8054
8055 Bind(&if_isgeneric);
8056 {
8057 Label if_isfast(this), if_isslow(this);
8058 BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
8059 &if_isfast, &if_isslow);
8060
8061 Bind(&if_isfast);
8062 {
8063 var_map_index.Bind(
8064 IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX));
8065 var_array_map.Bind(array_map);
8066 Goto(&allocate_iterator);
8067 }
8068
8069 Bind(&if_isslow);
8070 {
8071 var_map_index.Bind(
8072 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX));
8073 var_array_map.Bind(UndefinedConstant());
8074 Goto(&allocate_iterator);
8075 }
8076 }
8077
8078 Bind(&if_istypedarray);
8079 {
8080 var_map_index.Bind(
8081 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX));
8082 var_array_map.Bind(UndefinedConstant());
8083 Goto(&allocate_iterator);
8084 }
8085 } else {
8086 Label if_istypedarray(this), if_isgeneric(this);
8087 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
8088 &if_istypedarray, &if_isgeneric);
8089
8090 Bind(&if_isgeneric);
8091 {
8092 Label if_isfast(this), if_isslow(this);
8093 BranchIfFastJSArray(array, context, FastJSArrayAccessMode::INBOUNDS_READ,
8094 &if_isfast, &if_isslow);
8095
8096 Bind(&if_isfast);
8097 {
8098 Label if_ispacked(this), if_isholey(this);
8099 Node* elements_kind = LoadMapElementsKind(array_map);
8100 Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
8101 &if_ispacked);
8102
8103 Bind(&if_isholey);
8104 {
8105 // Fast holey JSArrays can treat the hole as undefined if the
8106 // protector cell is valid, and the prototype chain is unchanged from
8107 // its initial state (because the protector cell is only tracked for
8108 // initial the Array and Object prototypes). Check these conditions
8109 // here, and take the slow path if any fail.
8110 Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
8111 DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
8112 GotoIfNot(
8113 WordEqual(
8114 LoadObjectField(protector_cell, PropertyCell::kValueOffset),
8115 SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
8116 &if_isslow);
8117
8118 Node* native_context = LoadNativeContext(context);
8119
8120 Node* prototype = LoadMapPrototype(array_map);
8121 Node* array_prototype = LoadContextElement(
8122 native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
8123 GotoIfNot(WordEqual(prototype, array_prototype), &if_isslow);
8124
8125 Node* map = LoadMap(prototype);
8126 prototype = LoadMapPrototype(map);
8127 Node* object_prototype = LoadContextElement(
8128 native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
8129 GotoIfNot(WordEqual(prototype, object_prototype), &if_isslow);
8130
8131 map = LoadMap(prototype);
8132 prototype = LoadMapPrototype(map);
8133 Branch(IsNull(prototype), &if_ispacked, &if_isslow);
8134 }
8135 Bind(&if_ispacked);
8136 {
8137 Node* map_index =
8138 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
8139 ChangeUint32ToWord(LoadMapElementsKind(array_map)));
8140 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
8141 map_index, IntPtrConstant(kBaseMapIndex +
8142 kFastIteratorOffset)));
8143 CSA_ASSERT(this, IntPtrLessThan(map_index,
8144 IntPtrConstant(kBaseMapIndex +
8145 kSlowIteratorOffset)));
8146
8147 var_map_index.Bind(map_index);
8148 var_array_map.Bind(array_map);
8149 Goto(&allocate_iterator);
8150 }
8151 }
8152
8153 Bind(&if_isslow);
8154 {
8155 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex),
8156 IntPtrConstant(kSlowIteratorOffset));
8157 var_map_index.Bind(map_index);
8158 var_array_map.Bind(UndefinedConstant());
8159 Goto(&allocate_iterator);
8160 }
8161 }
8162
8163 Bind(&if_istypedarray);
8164 {
8165 Node* map_index =
8166 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS),
8167 ChangeUint32ToWord(LoadMapElementsKind(array_map)));
8168 CSA_ASSERT(
8169 this, IntPtrLessThan(map_index, IntPtrConstant(kBaseMapIndex +
8170 kFastIteratorOffset)));
8171 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(map_index,
8172 IntPtrConstant(kBaseMapIndex)));
8173 var_map_index.Bind(map_index);
8174 var_array_map.Bind(UndefinedConstant());
8175 Goto(&allocate_iterator);
8176 }
8177 }
8178
8179 Bind(&allocate_iterator);
8180 {
8181 Node* map = LoadFixedArrayElement(LoadNativeContext(context),
8182 var_map_index.value());
8183 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
8184 Goto(&return_result);
8185 }
8186
8187 Bind(&return_result);
8188 return var_result.value();
8189 }
8190
AllocateJSArrayIterator(Node * array,Node * array_map,Node * map)8191 Node* CodeStubAssembler::AllocateJSArrayIterator(Node* array, Node* array_map,
8192 Node* map) {
8193 Node* iterator = Allocate(JSArrayIterator::kSize);
8194 StoreMapNoWriteBarrier(iterator, map);
8195 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset,
8196 Heap::kEmptyFixedArrayRootIndex);
8197 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
8198 Heap::kEmptyFixedArrayRootIndex);
8199 StoreObjectFieldNoWriteBarrier(iterator,
8200 JSArrayIterator::kIteratedObjectOffset, array);
8201 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
8202 SmiConstant(Smi::FromInt(0)));
8203 StoreObjectFieldNoWriteBarrier(
8204 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map);
8205 return iterator;
8206 }
8207
IsDetachedBuffer(Node * buffer)8208 Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
8209 CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
8210
8211 Node* buffer_bit_field = LoadObjectField(
8212 buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
8213 return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
8214 }
8215
CodeStubArguments(CodeStubAssembler * assembler,Node * argc,Node * fp,CodeStubAssembler::ParameterMode mode)8216 CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler, Node* argc,
8217 Node* fp,
8218 CodeStubAssembler::ParameterMode mode)
8219 : assembler_(assembler),
8220 argc_mode_(mode),
8221 argc_(argc),
8222 arguments_(nullptr),
8223 fp_(fp != nullptr ? fp : assembler->LoadFramePointer()) {
8224 Node* offset = assembler->ElementOffsetFromIndex(
8225 argc_, FAST_ELEMENTS, mode,
8226 (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
8227 arguments_ = assembler_->IntPtrAdd(fp_, offset);
8228 }
8229
GetReceiver() const8230 Node* CodeStubArguments::GetReceiver() const {
8231 return assembler_->Load(MachineType::AnyTagged(), arguments_,
8232 assembler_->IntPtrConstant(kPointerSize));
8233 }
8234
AtIndexPtr(Node * index,CodeStubAssembler::ParameterMode mode) const8235 Node* CodeStubArguments::AtIndexPtr(
8236 Node* index, CodeStubAssembler::ParameterMode mode) const {
8237 typedef compiler::Node Node;
8238 Node* negated_index = assembler_->IntPtrOrSmiSub(
8239 assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
8240 Node* offset =
8241 assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0);
8242 return assembler_->IntPtrAdd(arguments_, offset);
8243 }
8244
AtIndex(Node * index,CodeStubAssembler::ParameterMode mode) const8245 Node* CodeStubArguments::AtIndex(Node* index,
8246 CodeStubAssembler::ParameterMode mode) const {
8247 DCHECK_EQ(argc_mode_, mode);
8248 CSA_ASSERT(assembler_,
8249 assembler_->UintPtrOrSmiLessThan(index, GetLength(), mode));
8250 return assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode));
8251 }
8252
AtIndex(int index) const8253 Node* CodeStubArguments::AtIndex(int index) const {
8254 return AtIndex(assembler_->IntPtrConstant(index));
8255 }
8256
ForEach(const CodeStubAssembler::VariableList & vars,const CodeStubArguments::ForEachBodyFunction & body,Node * first,Node * last,CodeStubAssembler::ParameterMode mode)8257 void CodeStubArguments::ForEach(
8258 const CodeStubAssembler::VariableList& vars,
8259 const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
8260 CodeStubAssembler::ParameterMode mode) {
8261 assembler_->Comment("CodeStubArguments::ForEach");
8262 if (first == nullptr) {
8263 first = assembler_->IntPtrOrSmiConstant(0, mode);
8264 }
8265 if (last == nullptr) {
8266 DCHECK_EQ(mode, argc_mode_);
8267 last = argc_;
8268 }
8269 Node* start = assembler_->IntPtrSub(
8270 arguments_,
8271 assembler_->ElementOffsetFromIndex(first, FAST_ELEMENTS, mode));
8272 Node* end = assembler_->IntPtrSub(
8273 arguments_,
8274 assembler_->ElementOffsetFromIndex(last, FAST_ELEMENTS, mode));
8275 assembler_->BuildFastLoop(vars, start, end,
8276 [this, &body](Node* current) {
8277 Node* arg = assembler_->Load(
8278 MachineType::AnyTagged(), current);
8279 body(arg);
8280 },
8281 -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
8282 CodeStubAssembler::IndexAdvanceMode::kPost);
8283 }
8284
PopAndReturn(Node * value)8285 void CodeStubArguments::PopAndReturn(Node* value) {
8286 assembler_->PopAndReturn(
8287 assembler_->IntPtrAdd(argc_, assembler_->IntPtrConstant(1)), value);
8288 }
8289
IsFastElementsKind(Node * elements_kind)8290 Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
8291 return Uint32LessThanOrEqual(elements_kind,
8292 Int32Constant(LAST_FAST_ELEMENTS_KIND));
8293 }
8294
IsHoleyFastElementsKind(Node * elements_kind)8295 Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
8296 CSA_ASSERT(this, IsFastElementsKind(elements_kind));
8297
8298 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
8299 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
8300 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
8301
8302 // Check prototype chain if receiver does not have packed elements.
8303 Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
8304 return Word32Equal(holey_elements, Int32Constant(1));
8305 }
8306
IsDebugActive()8307 Node* CodeStubAssembler::IsDebugActive() {
8308 Node* is_debug_active = Load(
8309 MachineType::Uint8(),
8310 ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
8311 return Word32NotEqual(is_debug_active, Int32Constant(0));
8312 }
8313
IsPromiseHookEnabledOrDebugIsActive()8314 Node* CodeStubAssembler::IsPromiseHookEnabledOrDebugIsActive() {
8315 Node* const promise_hook_or_debug_is_active =
8316 Load(MachineType::Uint8(),
8317 ExternalConstant(
8318 ExternalReference::promise_hook_or_debug_is_active_address(
8319 isolate())));
8320 return Word32NotEqual(promise_hook_or_debug_is_active, Int32Constant(0));
8321 }
8322
AllocateFunctionWithMapAndContext(Node * map,Node * shared_info,Node * context)8323 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
8324 Node* shared_info,
8325 Node* context) {
8326 Node* const code = BitcastTaggedToWord(
8327 LoadObjectField(shared_info, SharedFunctionInfo::kCodeOffset));
8328 Node* const code_entry =
8329 IntPtrAdd(code, IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
8330
8331 Node* const fun = Allocate(JSFunction::kSize);
8332 StoreMapNoWriteBarrier(fun, map);
8333 StoreObjectFieldRoot(fun, JSObject::kPropertiesOffset,
8334 Heap::kEmptyFixedArrayRootIndex);
8335 StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
8336 Heap::kEmptyFixedArrayRootIndex);
8337 StoreObjectFieldRoot(fun, JSFunction::kFeedbackVectorOffset,
8338 Heap::kUndefinedCellRootIndex);
8339 StoreObjectFieldRoot(fun, JSFunction::kPrototypeOrInitialMapOffset,
8340 Heap::kTheHoleValueRootIndex);
8341 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
8342 shared_info);
8343 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
8344 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeEntryOffset, code_entry,
8345 MachineType::PointerRepresentation());
8346 StoreObjectFieldRoot(fun, JSFunction::kNextFunctionLinkOffset,
8347 Heap::kUndefinedValueRootIndex);
8348
8349 return fun;
8350 }
8351
AllocatePromiseReactionJobInfo(Node * value,Node * tasks,Node * deferred_promise,Node * deferred_on_resolve,Node * deferred_on_reject,Node * context)8352 Node* CodeStubAssembler::AllocatePromiseReactionJobInfo(
8353 Node* value, Node* tasks, Node* deferred_promise, Node* deferred_on_resolve,
8354 Node* deferred_on_reject, Node* context) {
8355 Node* const result = Allocate(PromiseReactionJobInfo::kSize);
8356 StoreMapNoWriteBarrier(result, Heap::kPromiseReactionJobInfoMapRootIndex);
8357 StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kValueOffset,
8358 value);
8359 StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kTasksOffset,
8360 tasks);
8361 StoreObjectFieldNoWriteBarrier(
8362 result, PromiseReactionJobInfo::kDeferredPromiseOffset, deferred_promise);
8363 StoreObjectFieldNoWriteBarrier(
8364 result, PromiseReactionJobInfo::kDeferredOnResolveOffset,
8365 deferred_on_resolve);
8366 StoreObjectFieldNoWriteBarrier(
8367 result, PromiseReactionJobInfo::kDeferredOnRejectOffset,
8368 deferred_on_reject);
8369 StoreObjectFieldNoWriteBarrier(result, PromiseReactionJobInfo::kContextOffset,
8370 context);
8371 return result;
8372 }
8373
MarkerIsFrameType(Node * marker_or_function,StackFrame::Type frame_type)8374 Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
8375 StackFrame::Type frame_type) {
8376 return WordEqual(
8377 marker_or_function,
8378 IntPtrConstant(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
8379 }
8380
MarkerIsNotFrameType(Node * marker_or_function,StackFrame::Type frame_type)8381 Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
8382 StackFrame::Type frame_type) {
8383 return WordNotEqual(
8384 marker_or_function,
8385 IntPtrConstant(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
8386 }
8387
Print(const char * s)8388 void CodeStubAssembler::Print(const char* s) {
8389 #ifdef DEBUG
8390 std::string formatted(s);
8391 formatted += "\n";
8392 Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
8393 formatted.c_str(), TENURED);
8394 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(), HeapConstant(string));
8395 #endif
8396 }
8397
Print(const char * prefix,Node * tagged_value)8398 void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
8399 #ifdef DEBUG
8400 if (prefix != nullptr) {
8401 std::string formatted(prefix);
8402 formatted += ": ";
8403 Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
8404 formatted.c_str(), TENURED);
8405 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
8406 HeapConstant(string));
8407 }
8408 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
8409 #endif
8410 }
8411
8412 } // namespace internal
8413 } // namespace v8
8414