• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/code-stub-assembler.h"
6 
7 #include "src/code-factory.h"
8 #include "src/frames-inl.h"
9 #include "src/frames.h"
10 #include "src/objects/api-callbacks.h"
11 #include "src/objects/descriptor-array.h"
12 #include "src/objects/ordered-hash-table-inl.h"
13 #include "src/wasm/wasm-objects.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 using compiler::Node;
19 template <class T>
20 using TNode = compiler::TNode<T>;
21 template <class T>
22 using SloppyTNode = compiler::SloppyTNode<T>;
23 
CodeStubAssembler(compiler::CodeAssemblerState * state)24 CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
25     : compiler::CodeAssembler(state) {
26   if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
27     HandleBreakOnNode();
28   }
29 }
30 
HandleBreakOnNode()31 void CodeStubAssembler::HandleBreakOnNode() {
32   // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
33   // string specifying the name of a stub and NODE is number specifying node id.
34   const char* name = state()->name();
35   size_t name_length = strlen(name);
36   if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
37     // Different name.
38     return;
39   }
40   size_t option_length = strlen(FLAG_csa_trap_on_node);
41   if (option_length < name_length + 2 ||
42       FLAG_csa_trap_on_node[name_length] != ',') {
43     // Option is too short.
44     return;
45   }
46   const char* start = &FLAG_csa_trap_on_node[name_length + 1];
47   char* end;
48   int node_id = static_cast<int>(strtol(start, &end, 10));
49   if (start == end) {
50     // Bad node id.
51     return;
52   }
53   BreakOnNode(node_id);
54 }
55 
Assert(const BranchGenerator & branch,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)56 void CodeStubAssembler::Assert(const BranchGenerator& branch,
57                                const char* message, const char* file, int line,
58                                Node* extra_node1, const char* extra_node1_name,
59                                Node* extra_node2, const char* extra_node2_name,
60                                Node* extra_node3, const char* extra_node3_name,
61                                Node* extra_node4, const char* extra_node4_name,
62                                Node* extra_node5,
63                                const char* extra_node5_name) {
64 #if defined(DEBUG)
65   if (FLAG_debug_code) {
66     Check(branch, message, file, line, extra_node1, extra_node1_name,
67           extra_node2, extra_node2_name, extra_node3, extra_node3_name,
68           extra_node4, extra_node4_name, extra_node5, extra_node5_name);
69   }
70 #endif
71 }
72 
Assert(const NodeGenerator & condition_body,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)73 void CodeStubAssembler::Assert(const NodeGenerator& condition_body,
74                                const char* message, const char* file, int line,
75                                Node* extra_node1, const char* extra_node1_name,
76                                Node* extra_node2, const char* extra_node2_name,
77                                Node* extra_node3, const char* extra_node3_name,
78                                Node* extra_node4, const char* extra_node4_name,
79                                Node* extra_node5,
80                                const char* extra_node5_name) {
81 #if defined(DEBUG)
82   if (FLAG_debug_code) {
83     Check(condition_body, message, file, line, extra_node1, extra_node1_name,
84           extra_node2, extra_node2_name, extra_node3, extra_node3_name,
85           extra_node4, extra_node4_name, extra_node5, extra_node5_name);
86   }
87 #endif
88 }
89 
90 #ifdef DEBUG
91 namespace {
MaybePrintNodeWithName(CodeStubAssembler * csa,Node * node,const char * node_name)92 void MaybePrintNodeWithName(CodeStubAssembler* csa, Node* node,
93                             const char* node_name) {
94   if (node != nullptr) {
95     csa->CallRuntime(Runtime::kPrintWithNameForAssert, csa->SmiConstant(0),
96                      csa->StringConstant(node_name), node);
97   }
98 }
99 }  // namespace
100 #endif
101 
Check(const BranchGenerator & branch,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)102 void CodeStubAssembler::Check(const BranchGenerator& branch,
103                               const char* message, const char* file, int line,
104                               Node* extra_node1, const char* extra_node1_name,
105                               Node* extra_node2, const char* extra_node2_name,
106                               Node* extra_node3, const char* extra_node3_name,
107                               Node* extra_node4, const char* extra_node4_name,
108                               Node* extra_node5, const char* extra_node5_name) {
109   Label ok(this);
110   Label not_ok(this, Label::kDeferred);
111   if (message != nullptr && FLAG_code_comments) {
112     Comment("[ Assert: %s", message);
113   } else {
114     Comment("[ Assert");
115   }
116   branch(&ok, &not_ok);
117 
118   BIND(&not_ok);
119   DCHECK_NOT_NULL(message);
120   char chars[1024];
121   Vector<char> buffer(chars);
122   if (file != nullptr) {
123     SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
124   } else {
125     SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
126   }
127   Node* message_node = StringConstant(&(buffer[0]));
128 
129 #ifdef DEBUG
130   // Only print the extra nodes in debug builds.
131   MaybePrintNodeWithName(this, extra_node1, extra_node1_name);
132   MaybePrintNodeWithName(this, extra_node2, extra_node2_name);
133   MaybePrintNodeWithName(this, extra_node3, extra_node3_name);
134   MaybePrintNodeWithName(this, extra_node4, extra_node4_name);
135   MaybePrintNodeWithName(this, extra_node5, extra_node5_name);
136 #endif
137 
138   DebugAbort(message_node);
139   Unreachable();
140 
141   BIND(&ok);
142   Comment("] Assert");
143 }
144 
Check(const NodeGenerator & condition_body,const char * message,const char * file,int line,Node * extra_node1,const char * extra_node1_name,Node * extra_node2,const char * extra_node2_name,Node * extra_node3,const char * extra_node3_name,Node * extra_node4,const char * extra_node4_name,Node * extra_node5,const char * extra_node5_name)145 void CodeStubAssembler::Check(const NodeGenerator& condition_body,
146                               const char* message, const char* file, int line,
147                               Node* extra_node1, const char* extra_node1_name,
148                               Node* extra_node2, const char* extra_node2_name,
149                               Node* extra_node3, const char* extra_node3_name,
150                               Node* extra_node4, const char* extra_node4_name,
151                               Node* extra_node5, const char* extra_node5_name) {
152   BranchGenerator branch = [=](Label* ok, Label* not_ok) {
153     Node* condition = condition_body();
154     DCHECK_NOT_NULL(condition);
155     Branch(condition, ok, not_ok);
156   };
157 
158   Check(branch, message, file, line, extra_node1, extra_node1_name, extra_node2,
159         extra_node2_name, extra_node3, extra_node3_name, extra_node4,
160         extra_node4_name, extra_node5, extra_node5_name);
161 }
162 
FastCheck(TNode<BoolT> condition)163 void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
164   Label ok(this);
165   GotoIf(condition, &ok);
166   DebugBreak();
167   Goto(&ok);
168   BIND(&ok);
169 }
170 
SelectImpl(TNode<BoolT> condition,const NodeGenerator & true_body,const NodeGenerator & false_body,MachineRepresentation rep)171 Node* CodeStubAssembler::SelectImpl(TNode<BoolT> condition,
172                                     const NodeGenerator& true_body,
173                                     const NodeGenerator& false_body,
174                                     MachineRepresentation rep) {
175   VARIABLE(value, rep);
176   Label vtrue(this), vfalse(this), end(this);
177   Branch(condition, &vtrue, &vfalse);
178 
179   BIND(&vtrue);
180   {
181     value.Bind(true_body());
182     Goto(&end);
183   }
184   BIND(&vfalse);
185   {
186     value.Bind(false_body());
187     Goto(&end);
188   }
189 
190   BIND(&end);
191   return value.value();
192 }
193 
SelectInt32Constant(SloppyTNode<BoolT> condition,int true_value,int false_value)194 TNode<Int32T> CodeStubAssembler::SelectInt32Constant(
195     SloppyTNode<BoolT> condition, int true_value, int false_value) {
196   return SelectConstant<Int32T>(condition, Int32Constant(true_value),
197                                 Int32Constant(false_value));
198 }
199 
SelectIntPtrConstant(SloppyTNode<BoolT> condition,int true_value,int false_value)200 TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(
201     SloppyTNode<BoolT> condition, int true_value, int false_value) {
202   return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
203                                  IntPtrConstant(false_value));
204 }
205 
SelectBooleanConstant(SloppyTNode<BoolT> condition)206 TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
207     SloppyTNode<BoolT> condition) {
208   return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
209 }
210 
SelectSmiConstant(SloppyTNode<BoolT> condition,Smi * true_value,Smi * false_value)211 TNode<Smi> CodeStubAssembler::SelectSmiConstant(SloppyTNode<BoolT> condition,
212                                                 Smi* true_value,
213                                                 Smi* false_value) {
214   return SelectConstant<Smi>(condition, SmiConstant(true_value),
215                              SmiConstant(false_value));
216 }
217 
NoContextConstant()218 TNode<Object> CodeStubAssembler::NoContextConstant() {
219   return SmiConstant(Context::kNoContext);
220 }
221 
222 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
223   compiler::TNode<std::remove_reference<decltype(                     \
224       *std::declval<Heap>().rootAccessorName())>::type>               \
225       CodeStubAssembler::name##Constant() {                           \
226     return UncheckedCast<std::remove_reference<decltype(              \
227         *std::declval<Heap>().rootAccessorName())>::type>(            \
228         LoadRoot(Heap::k##rootIndexName##RootIndex));                 \
229   }
230 HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
231 #undef HEAP_CONSTANT_ACCESSOR
232 
233 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
234   compiler::TNode<std::remove_reference<decltype(                     \
235       *std::declval<ReadOnlyRoots>().rootAccessorName())>::type>      \
236       CodeStubAssembler::name##Constant() {                           \
237     return UncheckedCast<std::remove_reference<decltype(              \
238         *std::declval<ReadOnlyRoots>().rootAccessorName())>::type>(   \
239         LoadRoot(Heap::k##rootIndexName##RootIndex));                 \
240   }
241 HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR);
242 #undef HEAP_CONSTANT_ACCESSOR
243 
244 #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
245   compiler::TNode<BoolT> CodeStubAssembler::Is##name(             \
246       SloppyTNode<Object> value) {                                \
247     return WordEqual(value, name##Constant());                    \
248   }                                                               \
249   compiler::TNode<BoolT> CodeStubAssembler::IsNot##name(          \
250       SloppyTNode<Object> value) {                                \
251     return WordNotEqual(value, name##Constant());                 \
252   }
253 HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST);
254 #undef HEAP_CONSTANT_TEST
255 
HashSeed()256 TNode<Int64T> CodeStubAssembler::HashSeed() {
257   DCHECK(Is64());
258   TNode<HeapObject> hash_seed_root =
259       TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
260   return TNode<Int64T>::UncheckedCast(LoadObjectField(
261       hash_seed_root, ByteArray::kHeaderSize, MachineType::Int64()));
262 }
263 
HashSeedHigh()264 TNode<Int32T> CodeStubAssembler::HashSeedHigh() {
265   DCHECK(!Is64());
266 #ifdef V8_TARGET_BIG_ENDIAN
267   static int kOffset = 0;
268 #else
269   static int kOffset = kInt32Size;
270 #endif
271   TNode<HeapObject> hash_seed_root =
272       TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
273   return TNode<Int32T>::UncheckedCast(LoadObjectField(
274       hash_seed_root, ByteArray::kHeaderSize + kOffset, MachineType::Int32()));
275 }
276 
HashSeedLow()277 TNode<Int32T> CodeStubAssembler::HashSeedLow() {
278   DCHECK(!Is64());
279 #ifdef V8_TARGET_BIG_ENDIAN
280   static int kOffset = kInt32Size;
281 #else
282   static int kOffset = 0;
283 #endif
284   TNode<HeapObject> hash_seed_root =
285       TNode<HeapObject>::UncheckedCast(LoadRoot(Heap::kHashSeedRootIndex));
286   return TNode<Int32T>::UncheckedCast(LoadObjectField(
287       hash_seed_root, ByteArray::kHeaderSize + kOffset, MachineType::Int32()));
288 }
289 
IntPtrOrSmiConstant(int value,ParameterMode mode)290 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
291   if (mode == SMI_PARAMETERS) {
292     return SmiConstant(value);
293   } else {
294     DCHECK_EQ(INTPTR_PARAMETERS, mode);
295     return IntPtrConstant(value);
296   }
297 }
298 
IsIntPtrOrSmiConstantZero(Node * test,ParameterMode mode)299 bool CodeStubAssembler::IsIntPtrOrSmiConstantZero(Node* test,
300                                                   ParameterMode mode) {
301   int32_t constant_test;
302   Smi* smi_test;
303   if (mode == INTPTR_PARAMETERS) {
304     if (ToInt32Constant(test, constant_test) && constant_test == 0) {
305       return true;
306     }
307   } else {
308     DCHECK_EQ(mode, SMI_PARAMETERS);
309     if (ToSmiConstant(test, smi_test) && smi_test->value() == 0) {
310       return true;
311     }
312   }
313   return false;
314 }
315 
TryGetIntPtrOrSmiConstantValue(Node * maybe_constant,int * value,ParameterMode mode)316 bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(Node* maybe_constant,
317                                                        int* value,
318                                                        ParameterMode mode) {
319   int32_t int32_constant;
320   if (mode == INTPTR_PARAMETERS) {
321     if (ToInt32Constant(maybe_constant, int32_constant)) {
322       *value = int32_constant;
323       return true;
324     }
325   } else {
326     DCHECK_EQ(mode, SMI_PARAMETERS);
327     Smi* smi_constant;
328     if (ToSmiConstant(maybe_constant, smi_constant)) {
329       *value = Smi::ToInt(smi_constant);
330       return true;
331     }
332   }
333   return false;
334 }
335 
IntPtrRoundUpToPowerOfTwo32(TNode<IntPtrT> value)336 TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
337     TNode<IntPtrT> value) {
338   Comment("IntPtrRoundUpToPowerOfTwo32");
339   CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
340   value = Signed(IntPtrSub(value, IntPtrConstant(1)));
341   for (int i = 1; i <= 16; i *= 2) {
342     value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
343   }
344   return Signed(IntPtrAdd(value, IntPtrConstant(1)));
345 }
346 
MatchesParameterMode(Node * value,ParameterMode mode)347 Node* CodeStubAssembler::MatchesParameterMode(Node* value, ParameterMode mode) {
348   if (mode == SMI_PARAMETERS) {
349     return TaggedIsSmi(value);
350   } else {
351     return Int32Constant(1);
352   }
353 }
354 
WordIsPowerOfTwo(SloppyTNode<IntPtrT> value)355 TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
356   // value && !(value & (value - 1))
357   return WordEqual(
358       Select<IntPtrT>(
359           WordEqual(value, IntPtrConstant(0)),
360           [=] { return IntPtrConstant(1); },
361           [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
362       IntPtrConstant(0));
363 }
364 
Float64Round(SloppyTNode<Float64T> x)365 TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
366   Node* one = Float64Constant(1.0);
367   Node* one_half = Float64Constant(0.5);
368 
369   Label return_x(this);
370 
371   // Round up {x} towards Infinity.
372   VARIABLE(var_x, MachineRepresentation::kFloat64, Float64Ceil(x));
373 
374   GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
375          &return_x);
376   var_x.Bind(Float64Sub(var_x.value(), one));
377   Goto(&return_x);
378 
379   BIND(&return_x);
380   return TNode<Float64T>::UncheckedCast(var_x.value());
381 }
382 
Float64Ceil(SloppyTNode<Float64T> x)383 TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
384   if (IsFloat64RoundUpSupported()) {
385     return Float64RoundUp(x);
386   }
387 
388   Node* one = Float64Constant(1.0);
389   Node* zero = Float64Constant(0.0);
390   Node* two_52 = Float64Constant(4503599627370496.0E0);
391   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
392 
393   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
394   Label return_x(this), return_minus_x(this);
395 
396   // Check if {x} is greater than zero.
397   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
398   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
399          &if_xnotgreaterthanzero);
400 
401   BIND(&if_xgreaterthanzero);
402   {
403     // Just return {x} unless it's in the range ]0,2^52[.
404     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
405 
406     // Round positive {x} towards Infinity.
407     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
408     GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
409     var_x.Bind(Float64Add(var_x.value(), one));
410     Goto(&return_x);
411   }
412 
413   BIND(&if_xnotgreaterthanzero);
414   {
415     // Just return {x} unless it's in the range ]-2^52,0[
416     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
417     GotoIfNot(Float64LessThan(x, zero), &return_x);
418 
419     // Round negated {x} towards Infinity and return the result negated.
420     Node* minus_x = Float64Neg(x);
421     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
422     GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
423     var_x.Bind(Float64Sub(var_x.value(), one));
424     Goto(&return_minus_x);
425   }
426 
427   BIND(&return_minus_x);
428   var_x.Bind(Float64Neg(var_x.value()));
429   Goto(&return_x);
430 
431   BIND(&return_x);
432   return TNode<Float64T>::UncheckedCast(var_x.value());
433 }
434 
Float64Floor(SloppyTNode<Float64T> x)435 TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
436   if (IsFloat64RoundDownSupported()) {
437     return Float64RoundDown(x);
438   }
439 
440   Node* one = Float64Constant(1.0);
441   Node* zero = Float64Constant(0.0);
442   Node* two_52 = Float64Constant(4503599627370496.0E0);
443   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
444 
445   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
446   Label return_x(this), return_minus_x(this);
447 
448   // Check if {x} is greater than zero.
449   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
450   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
451          &if_xnotgreaterthanzero);
452 
453   BIND(&if_xgreaterthanzero);
454   {
455     // Just return {x} unless it's in the range ]0,2^52[.
456     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
457 
458     // Round positive {x} towards -Infinity.
459     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
460     GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
461     var_x.Bind(Float64Sub(var_x.value(), one));
462     Goto(&return_x);
463   }
464 
465   BIND(&if_xnotgreaterthanzero);
466   {
467     // Just return {x} unless it's in the range ]-2^52,0[
468     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
469     GotoIfNot(Float64LessThan(x, zero), &return_x);
470 
471     // Round negated {x} towards -Infinity and return the result negated.
472     Node* minus_x = Float64Neg(x);
473     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
474     GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
475     var_x.Bind(Float64Add(var_x.value(), one));
476     Goto(&return_minus_x);
477   }
478 
479   BIND(&return_minus_x);
480   var_x.Bind(Float64Neg(var_x.value()));
481   Goto(&return_x);
482 
483   BIND(&return_x);
484   return TNode<Float64T>::UncheckedCast(var_x.value());
485 }
486 
Float64RoundToEven(SloppyTNode<Float64T> x)487 TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
488   if (IsFloat64RoundTiesEvenSupported()) {
489     return Float64RoundTiesEven(x);
490   }
491   // See ES#sec-touint8clamp for details.
492   Node* f = Float64Floor(x);
493   Node* f_and_half = Float64Add(f, Float64Constant(0.5));
494 
495   VARIABLE(var_result, MachineRepresentation::kFloat64);
496   Label return_f(this), return_f_plus_one(this), done(this);
497 
498   GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
499   GotoIf(Float64LessThan(x, f_and_half), &return_f);
500   {
501     Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
502     Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
503            &return_f_plus_one);
504   }
505 
506   BIND(&return_f);
507   var_result.Bind(f);
508   Goto(&done);
509 
510   BIND(&return_f_plus_one);
511   var_result.Bind(Float64Add(f, Float64Constant(1.0)));
512   Goto(&done);
513 
514   BIND(&done);
515   return TNode<Float64T>::UncheckedCast(var_result.value());
516 }
517 
Float64Trunc(SloppyTNode<Float64T> x)518 TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
519   if (IsFloat64RoundTruncateSupported()) {
520     return Float64RoundTruncate(x);
521   }
522 
523   Node* one = Float64Constant(1.0);
524   Node* zero = Float64Constant(0.0);
525   Node* two_52 = Float64Constant(4503599627370496.0E0);
526   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
527 
528   VARIABLE(var_x, MachineRepresentation::kFloat64, x);
529   Label return_x(this), return_minus_x(this);
530 
531   // Check if {x} is greater than 0.
532   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
533   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
534          &if_xnotgreaterthanzero);
535 
536   BIND(&if_xgreaterthanzero);
537   {
538     if (IsFloat64RoundDownSupported()) {
539       var_x.Bind(Float64RoundDown(x));
540     } else {
541       // Just return {x} unless it's in the range ]0,2^52[.
542       GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
543 
544       // Round positive {x} towards -Infinity.
545       var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
546       GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
547       var_x.Bind(Float64Sub(var_x.value(), one));
548     }
549     Goto(&return_x);
550   }
551 
552   BIND(&if_xnotgreaterthanzero);
553   {
554     if (IsFloat64RoundUpSupported()) {
555       var_x.Bind(Float64RoundUp(x));
556       Goto(&return_x);
557     } else {
558       // Just return {x} unless its in the range ]-2^52,0[.
559       GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
560       GotoIfNot(Float64LessThan(x, zero), &return_x);
561 
562       // Round negated {x} towards -Infinity and return result negated.
563       Node* minus_x = Float64Neg(x);
564       var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
565       GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
566       var_x.Bind(Float64Sub(var_x.value(), one));
567       Goto(&return_minus_x);
568     }
569   }
570 
571   BIND(&return_minus_x);
572   var_x.Bind(Float64Neg(var_x.value()));
573   Goto(&return_x);
574 
575   BIND(&return_x);
576   return TNode<Float64T>::UncheckedCast(var_x.value());
577 }
578 
IsValidSmi(TNode<Smi> smi)579 TNode<BoolT> CodeStubAssembler::IsValidSmi(TNode<Smi> smi) {
580   if (SmiValuesAre31Bits() && kPointerSize == kInt64Size) {
581     // Check that the Smi value is properly sign-extended.
582     TNode<IntPtrT> value = Signed(BitcastTaggedToWord(smi));
583     return WordEqual(value, ChangeInt32ToIntPtr(TruncateIntPtrToInt32(value)));
584   }
585   return Int32TrueConstant();
586 }
587 
SmiShiftBitsConstant()588 Node* CodeStubAssembler::SmiShiftBitsConstant() {
589   return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
590 }
591 
SmiFromInt32(SloppyTNode<Int32T> value)592 TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
593   TNode<IntPtrT> value_intptr = ChangeInt32ToIntPtr(value);
594   TNode<Smi> smi =
595       BitcastWordToTaggedSigned(WordShl(value_intptr, SmiShiftBitsConstant()));
596 #if V8_COMPRESS_POINTERS
597   CSA_ASSERT(this, IsValidSmi(smi));
598 #endif
599   return smi;
600 }
601 
IsValidPositiveSmi(TNode<IntPtrT> value)602 TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
603   intptr_t constant_value;
604   if (ToIntPtrConstant(value, constant_value)) {
605     return (static_cast<uintptr_t>(constant_value) <=
606             static_cast<uintptr_t>(Smi::kMaxValue))
607                ? Int32TrueConstant()
608                : Int32FalseConstant();
609   }
610 
611   return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
612 }
613 
SmiTag(SloppyTNode<IntPtrT> value)614 TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
615   int32_t constant_value;
616   if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
617     return SmiConstant(constant_value);
618   }
619   TNode<Smi> smi =
620       BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
621 #if V8_COMPRESS_POINTERS
622   CSA_ASSERT(this, IsValidSmi(smi));
623 #endif
624   return smi;
625 }
626 
SmiUntag(SloppyTNode<Smi> value)627 TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
628 #if V8_COMPRESS_POINTERS
629   CSA_ASSERT(this, IsValidSmi(value));
630 #endif
631   intptr_t constant_value;
632   if (ToIntPtrConstant(value, constant_value)) {
633     return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
634   }
635   return Signed(WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant()));
636 }
637 
SmiToInt32(SloppyTNode<Smi> value)638 TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
639   TNode<IntPtrT> result = SmiUntag(value);
640   return TruncateIntPtrToInt32(result);
641 }
642 
SmiToFloat64(SloppyTNode<Smi> value)643 TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
644   return ChangeInt32ToFloat64(SmiToInt32(value));
645 }
646 
SmiMax(TNode<Smi> a,TNode<Smi> b)647 TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
648   return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
649 }
650 
SmiMin(TNode<Smi> a,TNode<Smi> b)651 TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
652   return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
653 }
654 
TrySmiAdd(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)655 TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
656                                         Label* if_overflow) {
657   if (SmiValuesAre32Bits()) {
658     TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(
659         BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
660     TNode<BoolT> overflow = Projection<1>(pair);
661     GotoIf(overflow, if_overflow);
662     TNode<IntPtrT> result = Projection<0>(pair);
663     return BitcastWordToTaggedSigned(result);
664   } else {
665     DCHECK(SmiValuesAre31Bits());
666     TNode<PairT<Int32T, BoolT>> pair =
667         Int32AddWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
668                              TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
669     TNode<BoolT> overflow = Projection<1>(pair);
670     GotoIf(overflow, if_overflow);
671     TNode<Int32T> result = Projection<0>(pair);
672     return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
673   }
674 }
675 
TrySmiSub(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)676 TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
677                                         Label* if_overflow) {
678   if (SmiValuesAre32Bits()) {
679     TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(
680         BitcastTaggedToWord(lhs), BitcastTaggedToWord(rhs));
681     TNode<BoolT> overflow = Projection<1>(pair);
682     GotoIf(overflow, if_overflow);
683     TNode<IntPtrT> result = Projection<0>(pair);
684     return BitcastWordToTaggedSigned(result);
685   } else {
686     DCHECK(SmiValuesAre31Bits());
687     TNode<PairT<Int32T, BoolT>> pair =
688         Int32SubWithOverflow(TruncateIntPtrToInt32(BitcastTaggedToWord(lhs)),
689                              TruncateIntPtrToInt32(BitcastTaggedToWord(rhs)));
690     TNode<BoolT> overflow = Projection<1>(pair);
691     GotoIf(overflow, if_overflow);
692     TNode<Int32T> result = Projection<0>(pair);
693     return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
694   }
695 }
696 
NumberMax(SloppyTNode<Object> a,SloppyTNode<Object> b)697 TNode<Object> CodeStubAssembler::NumberMax(SloppyTNode<Object> a,
698                                            SloppyTNode<Object> b) {
699   // TODO(danno): This could be optimized by specifically handling smi cases.
700   VARIABLE(result, MachineRepresentation::kTagged);
701   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
702   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
703   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
704   result.Bind(NanConstant());
705   Goto(&done);
706   BIND(&greater_than_equal_a);
707   result.Bind(a);
708   Goto(&done);
709   BIND(&greater_than_equal_b);
710   result.Bind(b);
711   Goto(&done);
712   BIND(&done);
713   return TNode<Object>::UncheckedCast(result.value());
714 }
715 
NumberMin(SloppyTNode<Object> a,SloppyTNode<Object> b)716 TNode<Object> CodeStubAssembler::NumberMin(SloppyTNode<Object> a,
717                                            SloppyTNode<Object> b) {
718   // TODO(danno): This could be optimized by specifically handling smi cases.
719   VARIABLE(result, MachineRepresentation::kTagged);
720   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
721   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
722   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
723   result.Bind(NanConstant());
724   Goto(&done);
725   BIND(&greater_than_equal_a);
726   result.Bind(b);
727   Goto(&done);
728   BIND(&greater_than_equal_b);
729   result.Bind(a);
730   Goto(&done);
731   BIND(&done);
732   return TNode<Object>::UncheckedCast(result.value());
733 }
734 
ConvertToRelativeIndex(TNode<Context> context,TNode<Object> index,TNode<IntPtrT> length)735 TNode<IntPtrT> CodeStubAssembler::ConvertToRelativeIndex(
736     TNode<Context> context, TNode<Object> index, TNode<IntPtrT> length) {
737   TVARIABLE(IntPtrT, result);
738 
739   TNode<Number> const index_int =
740       ToInteger_Inline(context, index, CodeStubAssembler::kTruncateMinusZero);
741   TNode<IntPtrT> zero = IntPtrConstant(0);
742 
743   Label done(this);
744   Label if_issmi(this), if_isheapnumber(this, Label::kDeferred);
745   Branch(TaggedIsSmi(index_int), &if_issmi, &if_isheapnumber);
746 
747   BIND(&if_issmi);
748   {
749     TNode<Smi> const index_smi = CAST(index_int);
750     result = Select<IntPtrT>(
751         IntPtrLessThan(SmiUntag(index_smi), zero),
752         [=] { return IntPtrMax(IntPtrAdd(length, SmiUntag(index_smi)), zero); },
753         [=] { return IntPtrMin(SmiUntag(index_smi), length); });
754     Goto(&done);
755   }
756 
757   BIND(&if_isheapnumber);
758   {
759     // If {index} is a heap number, it is definitely out of bounds. If it is
760     // negative, {index} = max({length} + {index}),0) = 0'. If it is positive,
761     // set {index} to {length}.
762     TNode<HeapNumber> const index_hn = CAST(index_int);
763     TNode<Float64T> const float_zero = Float64Constant(0.);
764     TNode<Float64T> const index_float = LoadHeapNumberValue(index_hn);
765     result = SelectConstant<IntPtrT>(Float64LessThan(index_float, float_zero),
766                                      zero, length);
767     Goto(&done);
768   }
769   BIND(&done);
770   return result.value();
771 }
772 
SmiMod(TNode<Smi> a,TNode<Smi> b)773 TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
774   TVARIABLE(Number, var_result);
775   Label return_result(this, &var_result),
776       return_minuszero(this, Label::kDeferred),
777       return_nan(this, Label::kDeferred);
778 
779   // Untag {a} and {b}.
780   TNode<Int32T> int_a = SmiToInt32(a);
781   TNode<Int32T> int_b = SmiToInt32(b);
782 
783   // Return NaN if {b} is zero.
784   GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
785 
786   // Check if {a} is non-negative.
787   Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
788   Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
789          &if_aisnegative);
790 
791   BIND(&if_aisnotnegative);
792   {
793     // Fast case, don't need to check any other edge cases.
794     TNode<Int32T> r = Int32Mod(int_a, int_b);
795     var_result = SmiFromInt32(r);
796     Goto(&return_result);
797   }
798 
799   BIND(&if_aisnegative);
800   {
801     if (SmiValuesAre32Bits()) {
802       // Check if {a} is kMinInt and {b} is -1 (only relevant if the
803       // kMinInt is actually representable as a Smi).
804       Label join(this);
805       GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
806       GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
807       Goto(&join);
808       BIND(&join);
809     }
810 
811     // Perform the integer modulus operation.
812     TNode<Int32T> r = Int32Mod(int_a, int_b);
813 
814     // Check if {r} is zero, and if so return -0, because we have to
815     // take the sign of the left hand side {a}, which is negative.
816     GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
817 
818     // The remainder {r} can be outside the valid Smi range on 32bit
819     // architectures, so we cannot just say SmiFromInt32(r) here.
820     var_result = ChangeInt32ToTagged(r);
821     Goto(&return_result);
822   }
823 
824   BIND(&return_minuszero);
825   var_result = MinusZeroConstant();
826   Goto(&return_result);
827 
828   BIND(&return_nan);
829   var_result = NanConstant();
830   Goto(&return_result);
831 
832   BIND(&return_result);
833   return var_result.value();
834 }
835 
SmiMul(TNode<Smi> a,TNode<Smi> b)836 TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
837   TVARIABLE(Number, var_result);
838   VARIABLE(var_lhs_float64, MachineRepresentation::kFloat64);
839   VARIABLE(var_rhs_float64, MachineRepresentation::kFloat64);
840   Label return_result(this, &var_result);
841 
842   // Both {a} and {b} are Smis. Convert them to integers and multiply.
843   Node* lhs32 = SmiToInt32(a);
844   Node* rhs32 = SmiToInt32(b);
845   Node* pair = Int32MulWithOverflow(lhs32, rhs32);
846 
847   Node* overflow = Projection(1, pair);
848 
849   // Check if the multiplication overflowed.
850   Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
851   Branch(overflow, &if_overflow, &if_notoverflow);
852   BIND(&if_notoverflow);
853   {
854     // If the answer is zero, we may need to return -0.0, depending on the
855     // input.
856     Label answer_zero(this), answer_not_zero(this);
857     Node* answer = Projection(0, pair);
858     Node* zero = Int32Constant(0);
859     Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
860     BIND(&answer_not_zero);
861     {
862       var_result = ChangeInt32ToTagged(answer);
863       Goto(&return_result);
864     }
865     BIND(&answer_zero);
866     {
867       Node* or_result = Word32Or(lhs32, rhs32);
868       Label if_should_be_negative_zero(this), if_should_be_zero(this);
869       Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
870              &if_should_be_zero);
871       BIND(&if_should_be_negative_zero);
872       {
873         var_result = MinusZeroConstant();
874         Goto(&return_result);
875       }
876       BIND(&if_should_be_zero);
877       {
878         var_result = SmiConstant(0);
879         Goto(&return_result);
880       }
881     }
882   }
883   BIND(&if_overflow);
884   {
885     var_lhs_float64.Bind(SmiToFloat64(a));
886     var_rhs_float64.Bind(SmiToFloat64(b));
887     Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
888     var_result = AllocateHeapNumberWithValue(value);
889     Goto(&return_result);
890   }
891 
892   BIND(&return_result);
893   return var_result.value();
894 }
895 
TrySmiDiv(TNode<Smi> dividend,TNode<Smi> divisor,Label * bailout)896 TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
897                                         Label* bailout) {
898   // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
899   // is zero.
900   GotoIf(WordEqual(divisor, SmiConstant(0)), bailout);
901 
902   // Do floating point division if {dividend} is zero and {divisor} is
903   // negative.
904   Label dividend_is_zero(this), dividend_is_not_zero(this);
905   Branch(WordEqual(dividend, SmiConstant(0)), &dividend_is_zero,
906          &dividend_is_not_zero);
907 
908   BIND(&dividend_is_zero);
909   {
910     GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
911     Goto(&dividend_is_not_zero);
912   }
913   BIND(&dividend_is_not_zero);
914 
915   TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
916   TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
917 
918   // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
919   // if the Smi size is 31) and {divisor} is -1.
920   Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
921   Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
922          &divisor_is_minus_one, &divisor_is_not_minus_one);
923 
924   BIND(&divisor_is_minus_one);
925   {
926     GotoIf(Word32Equal(
927                untagged_dividend,
928                Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
929            bailout);
930     Goto(&divisor_is_not_minus_one);
931   }
932   BIND(&divisor_is_not_minus_one);
933 
934   TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
935   TNode<Int32T> truncated = Signed(Int32Mul(untagged_result, untagged_divisor));
936 
937   // Do floating point division if the remainder is not 0.
938   GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
939 
940   return SmiFromInt32(untagged_result);
941 }
942 
TruncateIntPtrToInt32(SloppyTNode<IntPtrT> value)943 TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
944     SloppyTNode<IntPtrT> value) {
945   if (Is64()) {
946     return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
947   }
948   return ReinterpretCast<Int32T>(value);
949 }
950 
TaggedIsSmi(SloppyTNode<Object> a)951 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(SloppyTNode<Object> a) {
952   return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
953                    IntPtrConstant(0));
954 }
955 
TaggedIsSmi(TNode<MaybeObject> a)956 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
957   return WordEqual(
958       WordAnd(BitcastMaybeObjectToWord(a), IntPtrConstant(kSmiTagMask)),
959       IntPtrConstant(0));
960 }
961 
TaggedIsNotSmi(SloppyTNode<Object> a)962 TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(SloppyTNode<Object> a) {
963   return WordNotEqual(
964       WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
965       IntPtrConstant(0));
966 }
967 
TaggedIsPositiveSmi(SloppyTNode<Object> a)968 TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
969   return WordEqual(WordAnd(BitcastTaggedToWord(a),
970                            IntPtrConstant(kSmiTagMask | kSmiSignMask)),
971                    IntPtrConstant(0));
972 }
973 
WordIsWordAligned(SloppyTNode<WordT> word)974 TNode<BoolT> CodeStubAssembler::WordIsWordAligned(SloppyTNode<WordT> word) {
975   return WordEqual(IntPtrConstant(0),
976                    WordAnd(word, IntPtrConstant(kPointerSize - 1)));
977 }
978 
979 #if DEBUG
Bind(Label * label,AssemblerDebugInfo debug_info)980 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
981   CodeAssembler::Bind(label, debug_info);
982 }
983 #else
Bind(Label * label)984 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
985 #endif  // DEBUG
986 
LoadDoubleWithHoleCheck(TNode<FixedDoubleArray> array,TNode<Smi> index,Label * if_hole)987 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
988     TNode<FixedDoubleArray> array, TNode<Smi> index, Label* if_hole) {
989   return LoadFixedDoubleArrayElement(array, index, MachineType::Float64(), 0,
990                                      SMI_PARAMETERS, if_hole);
991 }
992 
BranchIfPrototypesHaveNoElements(Node * receiver_map,Label * definitely_no_elements,Label * possibly_elements)993 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
994     Node* receiver_map, Label* definitely_no_elements,
995     Label* possibly_elements) {
996   CSA_SLOW_ASSERT(this, IsMap(receiver_map));
997   VARIABLE(var_map, MachineRepresentation::kTagged, receiver_map);
998   Label loop_body(this, &var_map);
999   Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
1000   Node* empty_slow_element_dictionary =
1001       LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
1002   Goto(&loop_body);
1003 
1004   BIND(&loop_body);
1005   {
1006     Node* map = var_map.value();
1007     Node* prototype = LoadMapPrototype(map);
1008     GotoIf(IsNull(prototype), definitely_no_elements);
1009     Node* prototype_map = LoadMap(prototype);
1010     TNode<Int32T> prototype_instance_type = LoadMapInstanceType(prototype_map);
1011 
1012     // Pessimistically assume elements if a Proxy, Special API Object,
1013     // or JSValue wrapper is found on the prototype chain. After this
1014     // instance type check, it's not necessary to check for interceptors or
1015     // access checks.
1016     Label if_custom(this, Label::kDeferred), if_notcustom(this);
1017     Branch(IsCustomElementsReceiverInstanceType(prototype_instance_type),
1018            &if_custom, &if_notcustom);
1019 
1020     BIND(&if_custom);
1021     {
1022       // For string JSValue wrappers we still support the checks as long
1023       // as they wrap the empty string.
1024       GotoIfNot(InstanceTypeEqual(prototype_instance_type, JS_VALUE_TYPE),
1025                 possibly_elements);
1026       Node* prototype_value = LoadJSValueValue(prototype);
1027       Branch(IsEmptyString(prototype_value), &if_notcustom, possibly_elements);
1028     }
1029 
1030     BIND(&if_notcustom);
1031     {
1032       Node* prototype_elements = LoadElements(prototype);
1033       var_map.Bind(prototype_map);
1034       GotoIf(WordEqual(prototype_elements, empty_fixed_array), &loop_body);
1035       Branch(WordEqual(prototype_elements, empty_slow_element_dictionary),
1036              &loop_body, possibly_elements);
1037     }
1038   }
1039 }
1040 
BranchIfJSReceiver(Node * object,Label * if_true,Label * if_false)1041 void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
1042                                            Label* if_false) {
1043   GotoIf(TaggedIsSmi(object), if_false);
1044   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1045   Branch(IsJSReceiver(object), if_true, if_false);
1046 }
1047 
IsFastJSArray(SloppyTNode<Object> object,SloppyTNode<Context> context)1048 TNode<BoolT> CodeStubAssembler::IsFastJSArray(SloppyTNode<Object> object,
1049                                               SloppyTNode<Context> context) {
1050   Label if_true(this), if_false(this, Label::kDeferred), exit(this);
1051   BranchIfFastJSArray(object, context, &if_true, &if_false);
1052   TVARIABLE(BoolT, var_result);
1053   BIND(&if_true);
1054   {
1055     var_result = Int32TrueConstant();
1056     Goto(&exit);
1057   }
1058   BIND(&if_false);
1059   {
1060     var_result = Int32FalseConstant();
1061     Goto(&exit);
1062   }
1063   BIND(&exit);
1064   return var_result.value();
1065 }
1066 
IsFastJSArrayWithNoCustomIteration(TNode<Object> object,TNode<Context> context)1067 TNode<BoolT> CodeStubAssembler::IsFastJSArrayWithNoCustomIteration(
1068     TNode<Object> object, TNode<Context> context) {
1069   Label if_false(this, Label::kDeferred), if_fast(this), exit(this);
1070   TVARIABLE(BoolT, var_result);
1071   BranchIfFastJSArray(object, context, &if_fast, &if_false, true);
1072   BIND(&if_fast);
1073   {
1074     // Check that the Array.prototype hasn't been modified in a way that would
1075     // affect iteration.
1076     Node* protector_cell = LoadRoot(Heap::kArrayIteratorProtectorRootIndex);
1077     DCHECK(isolate()->heap()->array_iterator_protector()->IsPropertyCell());
1078     var_result =
1079         WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
1080                   SmiConstant(Isolate::kProtectorValid));
1081     Goto(&exit);
1082   }
1083   BIND(&if_false);
1084   {
1085     var_result = Int32FalseConstant();
1086     Goto(&exit);
1087   }
1088   BIND(&exit);
1089   return var_result.value();
1090 }
1091 
BranchIfFastJSArray(Node * object,Node * context,Label * if_true,Label * if_false,bool iteration_only)1092 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
1093                                             Label* if_true, Label* if_false,
1094                                             bool iteration_only) {
1095   GotoIfForceSlowPath(if_false);
1096 
1097   // Bailout if receiver is a Smi.
1098   GotoIf(TaggedIsSmi(object), if_false);
1099 
1100   Node* map = LoadMap(object);
1101   GotoIfNot(IsJSArrayMap(map), if_false);
1102 
1103   // Bailout if receiver has slow elements.
1104   Node* elements_kind = LoadMapElementsKind(map);
1105   GotoIfNot(IsFastElementsKind(elements_kind), if_false);
1106 
1107   // Verify that our prototype is the initial array prototype.
1108   GotoIfNot(IsPrototypeInitialArrayPrototype(context, map), if_false);
1109 
1110   if (iteration_only) {
1111     // If we are only iterating over the array, there is no need to check
1112     // the NoElements protector if the array is not holey.
1113     GotoIfNot(IsHoleyFastElementsKind(elements_kind), if_true);
1114   }
1115   Branch(IsNoElementsProtectorCellInvalid(), if_false, if_true);
1116 }
1117 
BranchIfFastJSArrayForCopy(Node * object,Node * context,Label * if_true,Label * if_false)1118 void CodeStubAssembler::BranchIfFastJSArrayForCopy(Node* object, Node* context,
1119                                                    Label* if_true,
1120                                                    Label* if_false) {
1121   GotoIf(IsArraySpeciesProtectorCellInvalid(), if_false);
1122   BranchIfFastJSArray(object, context, if_true, if_false);
1123 }
1124 
GotoIfForceSlowPath(Label * if_true)1125 void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1126 #ifdef V8_ENABLE_FORCE_SLOW_PATH
1127   Node* const force_slow_path_addr =
1128       ExternalConstant(ExternalReference::force_slow_path(isolate()));
1129   Node* const force_slow = Load(MachineType::Uint8(), force_slow_path_addr);
1130 
1131   GotoIf(force_slow, if_true);
1132 #endif
1133 }
1134 
AllocateRaw(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1135 Node* CodeStubAssembler::AllocateRaw(Node* size_in_bytes, AllocationFlags flags,
1136                                      Node* top_address, Node* limit_address) {
1137   // TODO(jgruber, chromium:848672): TNodeify AllocateRaw.
1138   // TODO(jgruber, chromium:848672): Call FatalProcessOutOfMemory if this fails.
1139   {
1140     intptr_t constant_value;
1141     if (ToIntPtrConstant(size_in_bytes, constant_value)) {
1142       CHECK(Internals::IsValidSmi(constant_value));
1143       CHECK_GT(constant_value, 0);
1144     } else {
1145       CSA_CHECK(this,
1146                 IsValidPositiveSmi(UncheckedCast<IntPtrT>(size_in_bytes)));
1147     }
1148   }
1149 
1150   Node* top = Load(MachineType::Pointer(), top_address);
1151   Node* limit = Load(MachineType::Pointer(), limit_address);
1152 
1153   // If there's not enough space, call the runtime.
1154   VARIABLE(result, MachineRepresentation::kTagged);
1155   Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
1156   Label merge_runtime(this, &result);
1157 
1158   bool needs_double_alignment = flags & kDoubleAlignment;
1159 
1160   if (flags & kAllowLargeObjectAllocation) {
1161     Label next(this);
1162     GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1163 
1164     Node* runtime_flags = SmiConstant(
1165         Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1166                      AllocateTargetSpace::encode(AllocationSpace::LO_SPACE)));
1167     Node* const runtime_result =
1168         CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1169                     SmiTag(size_in_bytes), runtime_flags);
1170     result.Bind(runtime_result);
1171     Goto(&merge_runtime);
1172 
1173     BIND(&next);
1174   }
1175 
1176   VARIABLE(adjusted_size, MachineType::PointerRepresentation(), size_in_bytes);
1177 
1178   if (needs_double_alignment) {
1179     Label not_aligned(this), done_alignment(this, &adjusted_size);
1180 
1181     Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
1182            &done_alignment);
1183 
1184     BIND(&not_aligned);
1185     Node* not_aligned_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1186     adjusted_size.Bind(not_aligned_size);
1187     Goto(&done_alignment);
1188 
1189     BIND(&done_alignment);
1190   }
1191 
1192   Node* new_top = IntPtrAdd(top, adjusted_size.value());
1193 
1194   Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1195          &no_runtime_call);
1196 
1197   BIND(&runtime_call);
1198   Node* runtime_result;
1199   if (flags & kPretenured) {
1200     Node* runtime_flags = SmiConstant(
1201         Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1202                      AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
1203     runtime_result =
1204         CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
1205                     SmiTag(size_in_bytes), runtime_flags);
1206   } else {
1207     runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
1208                                  NoContextConstant(), SmiTag(size_in_bytes));
1209   }
1210   result.Bind(runtime_result);
1211   Goto(&merge_runtime);
1212 
1213   // When there is enough space, return `top' and bump it up.
1214   BIND(&no_runtime_call);
1215   Node* no_runtime_result = top;
1216   StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1217                       new_top);
1218 
1219   VARIABLE(address, MachineType::PointerRepresentation(), no_runtime_result);
1220 
1221   if (needs_double_alignment) {
1222     Label needs_filler(this), done_filling(this, &address);
1223     Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &done_filling,
1224            &needs_filler);
1225 
1226     BIND(&needs_filler);
1227     // Store a filler and increase the address by kPointerSize.
1228     StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1229                         LoadRoot(Heap::kOnePointerFillerMapRootIndex));
1230     address.Bind(IntPtrAdd(no_runtime_result, IntPtrConstant(4)));
1231 
1232     Goto(&done_filling);
1233 
1234     BIND(&done_filling);
1235   }
1236 
1237   no_runtime_result = BitcastWordToTagged(
1238       IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1239 
1240   result.Bind(no_runtime_result);
1241   Goto(&merge_runtime);
1242 
1243   BIND(&merge_runtime);
1244   return result.value();
1245 }
1246 
AllocateRawUnaligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1247 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
1248                                               AllocationFlags flags,
1249                                               Node* top_address,
1250                                               Node* limit_address) {
1251   DCHECK_EQ(flags & kDoubleAlignment, 0);
1252   return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1253 }
1254 
AllocateRawDoubleAligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)1255 Node* CodeStubAssembler::AllocateRawDoubleAligned(Node* size_in_bytes,
1256                                                   AllocationFlags flags,
1257                                                   Node* top_address,
1258                                                   Node* limit_address) {
1259 #if defined(V8_HOST_ARCH_32_BIT)
1260   return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1261                      limit_address);
1262 #elif defined(V8_HOST_ARCH_64_BIT)
1263   // Allocation on 64 bit machine is naturally double aligned
1264   return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1265                      limit_address);
1266 #else
1267 #error Architecture not supported
1268 #endif
1269 }
1270 
AllocateInNewSpace(Node * size_in_bytes,AllocationFlags flags)1271 Node* CodeStubAssembler::AllocateInNewSpace(Node* size_in_bytes,
1272                                             AllocationFlags flags) {
1273   DCHECK(flags == kNone || flags == kDoubleAlignment);
1274   CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1275   return Allocate(size_in_bytes, flags);
1276 }
1277 
Allocate(Node * size_in_bytes,AllocationFlags flags)1278 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
1279   Comment("Allocate");
1280   bool const new_space = !(flags & kPretenured);
1281   Node* top_address = ExternalConstant(
1282       new_space
1283           ? ExternalReference::new_space_allocation_top_address(isolate())
1284           : ExternalReference::old_space_allocation_top_address(isolate()));
1285   DCHECK_EQ(kPointerSize,
1286             ExternalReference::new_space_allocation_limit_address(isolate())
1287                     .address() -
1288                 ExternalReference::new_space_allocation_top_address(isolate())
1289                     .address());
1290   DCHECK_EQ(kPointerSize,
1291             ExternalReference::old_space_allocation_limit_address(isolate())
1292                     .address() -
1293                 ExternalReference::old_space_allocation_top_address(isolate())
1294                     .address());
1295   Node* limit_address = IntPtrAdd(top_address, IntPtrConstant(kPointerSize));
1296 
1297   if (flags & kDoubleAlignment) {
1298     return AllocateRawDoubleAligned(size_in_bytes, flags, top_address,
1299                                     limit_address);
1300   } else {
1301     return AllocateRawUnaligned(size_in_bytes, flags, top_address,
1302                                 limit_address);
1303   }
1304 }
1305 
AllocateInNewSpace(int size_in_bytes,AllocationFlags flags)1306 Node* CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1307                                             AllocationFlags flags) {
1308   CHECK(flags == kNone || flags == kDoubleAlignment);
1309   DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1310   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1311 }
1312 
Allocate(int size_in_bytes,AllocationFlags flags)1313 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
1314   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1315 }
1316 
InnerAllocate(Node * previous,Node * offset)1317 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
1318   return BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset));
1319 }
1320 
InnerAllocate(Node * previous,int offset)1321 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
1322   return InnerAllocate(previous, IntPtrConstant(offset));
1323 }
1324 
IsRegularHeapObjectSize(Node * size)1325 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
1326   return UintPtrLessThanOrEqual(size,
1327                                 IntPtrConstant(kMaxRegularHeapObjectSize));
1328 }
1329 
BranchIfToBooleanIsTrue(Node * value,Label * if_true,Label * if_false)1330 void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
1331                                                 Label* if_false) {
1332   Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1333       if_bigint(this, Label::kDeferred);
1334   // Rule out false {value}.
1335   GotoIf(WordEqual(value, FalseConstant()), if_false);
1336 
1337   // Check if {value} is a Smi or a HeapObject.
1338   Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1339 
1340   BIND(&if_smi);
1341   {
1342     // The {value} is a Smi, only need to check against zero.
1343     BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1344   }
1345 
1346   BIND(&if_notsmi);
1347   {
1348     // Check if {value} is the empty string.
1349     GotoIf(IsEmptyString(value), if_false);
1350 
1351     // The {value} is a HeapObject, load its map.
1352     Node* value_map = LoadMap(value);
1353 
1354     // Only null, undefined and document.all have the undetectable bit set,
1355     // so we can return false immediately when that bit is set.
1356     GotoIf(IsUndetectableMap(value_map), if_false);
1357 
1358     // We still need to handle numbers specially, but all other {value}s
1359     // that make it here yield true.
1360     GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1361     Branch(IsBigInt(value), &if_bigint, if_true);
1362 
1363     BIND(&if_heapnumber);
1364     {
1365       // Load the floating point value of {value}.
1366       Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
1367                                           MachineType::Float64());
1368 
1369       // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1370       Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1371              if_true, if_false);
1372     }
1373 
1374     BIND(&if_bigint);
1375     {
1376       Node* result =
1377           CallRuntime(Runtime::kBigIntToBoolean, NoContextConstant(), value);
1378       CSA_ASSERT(this, IsBoolean(result));
1379       Branch(WordEqual(result, TrueConstant()), if_true, if_false);
1380     }
1381   }
1382 }
1383 
LoadFromFrame(int offset,MachineType rep)1384 Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
1385   Node* frame_pointer = LoadFramePointer();
1386   return Load(rep, frame_pointer, IntPtrConstant(offset));
1387 }
1388 
LoadFromParentFrame(int offset,MachineType rep)1389 Node* CodeStubAssembler::LoadFromParentFrame(int offset, MachineType rep) {
1390   Node* frame_pointer = LoadParentFramePointer();
1391   return Load(rep, frame_pointer, IntPtrConstant(offset));
1392 }
1393 
LoadTargetFromFrame()1394 TNode<JSFunction> CodeStubAssembler::LoadTargetFromFrame() {
1395   DCHECK(IsJSFunctionCall());
1396   return CAST(LoadFromFrame(StandardFrameConstants::kFunctionOffset,
1397                             MachineType::TaggedPointer()));
1398 }
1399 
LoadBufferObject(Node * buffer,int offset,MachineType rep)1400 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
1401                                           MachineType rep) {
1402   return Load(rep, buffer, IntPtrConstant(offset));
1403 }
1404 
LoadObjectField(SloppyTNode<HeapObject> object,int offset,MachineType rep)1405 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1406                                          int offset, MachineType rep) {
1407   CSA_ASSERT(this, IsStrongHeapObject(object));
1408   return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
1409 }
1410 
LoadObjectField(SloppyTNode<HeapObject> object,SloppyTNode<IntPtrT> offset,MachineType rep)1411 Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
1412                                          SloppyTNode<IntPtrT> offset,
1413                                          MachineType rep) {
1414   CSA_ASSERT(this, IsStrongHeapObject(object));
1415   return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
1416 }
1417 
LoadAndUntagObjectField(SloppyTNode<HeapObject> object,int offset)1418 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1419     SloppyTNode<HeapObject> object, int offset) {
1420   if (SmiValuesAre32Bits()) {
1421 #if V8_TARGET_LITTLE_ENDIAN
1422     offset += kPointerSize / 2;
1423 #endif
1424     return ChangeInt32ToIntPtr(
1425         LoadObjectField(object, offset, MachineType::Int32()));
1426   } else {
1427     return SmiToIntPtr(
1428         LoadObjectField(object, offset, MachineType::AnyTagged()));
1429   }
1430 }
1431 
LoadAndUntagToWord32ObjectField(Node * object,int offset)1432 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
1433                                                                  int offset) {
1434   if (SmiValuesAre32Bits()) {
1435 #if V8_TARGET_LITTLE_ENDIAN
1436     offset += kPointerSize / 2;
1437 #endif
1438     return UncheckedCast<Int32T>(
1439         LoadObjectField(object, offset, MachineType::Int32()));
1440   } else {
1441     return SmiToInt32(
1442         LoadObjectField(object, offset, MachineType::AnyTagged()));
1443   }
1444 }
1445 
LoadAndUntagSmi(Node * base,int index)1446 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
1447   if (SmiValuesAre32Bits()) {
1448 #if V8_TARGET_LITTLE_ENDIAN
1449     index += kPointerSize / 2;
1450 #endif
1451     return ChangeInt32ToIntPtr(
1452         Load(MachineType::Int32(), base, IntPtrConstant(index)));
1453   } else {
1454     return SmiToIntPtr(
1455         Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
1456   }
1457 }
1458 
LoadAndUntagToWord32Root(Heap::RootListIndex root_index)1459 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32Root(
1460     Heap::RootListIndex root_index) {
1461   Node* roots_array_start =
1462       ExternalConstant(ExternalReference::roots_array_start(isolate()));
1463   int index = root_index * kPointerSize;
1464   if (SmiValuesAre32Bits()) {
1465 #if V8_TARGET_LITTLE_ENDIAN
1466     index += kPointerSize / 2;
1467 #endif
1468     return UncheckedCast<Int32T>(
1469         Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index)));
1470   } else {
1471     return SmiToInt32(Load(MachineType::AnyTagged(), roots_array_start,
1472                            IntPtrConstant(index)));
1473   }
1474 }
1475 
StoreAndTagSmi(Node * base,int offset,Node * value)1476 Node* CodeStubAssembler::StoreAndTagSmi(Node* base, int offset, Node* value) {
1477   if (SmiValuesAre32Bits()) {
1478     int zero_offset = offset + kPointerSize / 2;
1479     int payload_offset = offset;
1480 #if V8_TARGET_LITTLE_ENDIAN
1481     std::swap(zero_offset, payload_offset);
1482 #endif
1483     StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1484                         IntPtrConstant(zero_offset), Int32Constant(0));
1485     return StoreNoWriteBarrier(MachineRepresentation::kWord32, base,
1486                                IntPtrConstant(payload_offset),
1487                                TruncateInt64ToInt32(value));
1488   } else {
1489     return StoreNoWriteBarrier(MachineRepresentation::kTaggedSigned, base,
1490                                IntPtrConstant(offset), SmiTag(value));
1491   }
1492 }
1493 
LoadHeapNumberValue(SloppyTNode<HeapNumber> object)1494 TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1495     SloppyTNode<HeapNumber> object) {
1496   return TNode<Float64T>::UncheckedCast(LoadObjectField(
1497       object, HeapNumber::kValueOffset, MachineType::Float64()));
1498 }
1499 
LoadMap(SloppyTNode<HeapObject> object)1500 TNode<Map> CodeStubAssembler::LoadMap(SloppyTNode<HeapObject> object) {
1501   return UncheckedCast<Map>(LoadObjectField(object, HeapObject::kMapOffset));
1502 }
1503 
LoadInstanceType(SloppyTNode<HeapObject> object)1504 TNode<Int32T> CodeStubAssembler::LoadInstanceType(
1505     SloppyTNode<HeapObject> object) {
1506   return LoadMapInstanceType(LoadMap(object));
1507 }
1508 
HasInstanceType(SloppyTNode<HeapObject> object,InstanceType instance_type)1509 TNode<BoolT> CodeStubAssembler::HasInstanceType(SloppyTNode<HeapObject> object,
1510                                                 InstanceType instance_type) {
1511   return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1512 }
1513 
DoesntHaveInstanceType(SloppyTNode<HeapObject> object,InstanceType instance_type)1514 TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1515     SloppyTNode<HeapObject> object, InstanceType instance_type) {
1516   return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1517 }
1518 
TaggedDoesntHaveInstanceType(SloppyTNode<HeapObject> any_tagged,InstanceType type)1519 TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1520     SloppyTNode<HeapObject> any_tagged, InstanceType type) {
1521   /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1522   TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1523   return Select<BoolT>(
1524       tagged_is_smi, [=]() { return tagged_is_smi; },
1525       [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1526 }
1527 
LoadFastProperties(SloppyTNode<JSObject> object)1528 TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1529     SloppyTNode<JSObject> object) {
1530   CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1531   TNode<Object> properties =
1532       LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1533   return Select<HeapObject>(TaggedIsSmi(properties),
1534                             [=] { return EmptyFixedArrayConstant(); },
1535                             [=] { return CAST(properties); });
1536 }
1537 
LoadSlowProperties(SloppyTNode<JSObject> object)1538 TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1539     SloppyTNode<JSObject> object) {
1540   CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1541   TNode<Object> properties =
1542       LoadObjectField(object, JSObject::kPropertiesOrHashOffset);
1543   return Select<HeapObject>(TaggedIsSmi(properties),
1544                             [=] { return EmptyPropertyDictionaryConstant(); },
1545                             [=] { return CAST(properties); });
1546 }
1547 
LoadElements(SloppyTNode<JSObject> object)1548 TNode<FixedArrayBase> CodeStubAssembler::LoadElements(
1549     SloppyTNode<JSObject> object) {
1550   return CAST(LoadObjectField(object, JSObject::kElementsOffset));
1551 }
1552 
LoadJSArrayLength(SloppyTNode<JSArray> array)1553 TNode<Number> CodeStubAssembler::LoadJSArrayLength(SloppyTNode<JSArray> array) {
1554   CSA_ASSERT(this, IsJSArray(array));
1555   return CAST(LoadObjectField(array, JSArray::kLengthOffset));
1556 }
1557 
LoadFastJSArrayLength(SloppyTNode<JSArray> array)1558 TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(
1559     SloppyTNode<JSArray> array) {
1560   TNode<Object> length = LoadJSArrayLength(array);
1561   CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(array)));
1562   // JSArray length is always a positive Smi for fast arrays.
1563   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1564   return UncheckedCast<Smi>(length);
1565 }
1566 
LoadFixedArrayBaseLength(SloppyTNode<FixedArrayBase> array)1567 TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1568     SloppyTNode<FixedArrayBase> array) {
1569   CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1570   return CAST(LoadObjectField(array, FixedArrayBase::kLengthOffset));
1571 }
1572 
LoadAndUntagFixedArrayBaseLength(SloppyTNode<FixedArrayBase> array)1573 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1574     SloppyTNode<FixedArrayBase> array) {
1575   return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1576 }
1577 
LoadFeedbackVectorLength(TNode<FeedbackVector> vector)1578 TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1579     TNode<FeedbackVector> vector) {
1580   return ChangeInt32ToIntPtr(
1581       LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1582 }
1583 
LoadWeakFixedArrayLength(TNode<WeakFixedArray> array)1584 TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1585     TNode<WeakFixedArray> array) {
1586   return CAST(LoadObjectField(array, WeakFixedArray::kLengthOffset));
1587 }
1588 
LoadAndUntagWeakFixedArrayLength(SloppyTNode<WeakFixedArray> array)1589 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1590     SloppyTNode<WeakFixedArray> array) {
1591   return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1592 }
1593 
LoadTypedArrayLength(TNode<JSTypedArray> typed_array)1594 TNode<Smi> CodeStubAssembler::LoadTypedArrayLength(
1595     TNode<JSTypedArray> typed_array) {
1596   return CAST(LoadObjectField(typed_array, JSTypedArray::kLengthOffset));
1597 }
1598 
LoadMapBitField(SloppyTNode<Map> map)1599 TNode<Int32T> CodeStubAssembler::LoadMapBitField(SloppyTNode<Map> map) {
1600   CSA_SLOW_ASSERT(this, IsMap(map));
1601   return UncheckedCast<Int32T>(
1602       LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8()));
1603 }
1604 
LoadMapBitField2(SloppyTNode<Map> map)1605 TNode<Int32T> CodeStubAssembler::LoadMapBitField2(SloppyTNode<Map> map) {
1606   CSA_SLOW_ASSERT(this, IsMap(map));
1607   return UncheckedCast<Int32T>(
1608       LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8()));
1609 }
1610 
LoadMapBitField3(SloppyTNode<Map> map)1611 TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(SloppyTNode<Map> map) {
1612   CSA_SLOW_ASSERT(this, IsMap(map));
1613   return UncheckedCast<Uint32T>(
1614       LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32()));
1615 }
1616 
LoadMapInstanceType(SloppyTNode<Map> map)1617 TNode<Int32T> CodeStubAssembler::LoadMapInstanceType(SloppyTNode<Map> map) {
1618   return UncheckedCast<Int32T>(
1619       LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint16()));
1620 }
1621 
LoadMapElementsKind(SloppyTNode<Map> map)1622 TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(SloppyTNode<Map> map) {
1623   CSA_SLOW_ASSERT(this, IsMap(map));
1624   Node* bit_field2 = LoadMapBitField2(map);
1625   return Signed(DecodeWord32<Map::ElementsKindBits>(bit_field2));
1626 }
1627 
LoadElementsKind(SloppyTNode<HeapObject> object)1628 TNode<Int32T> CodeStubAssembler::LoadElementsKind(
1629     SloppyTNode<HeapObject> object) {
1630   return LoadMapElementsKind(LoadMap(object));
1631 }
1632 
LoadMapDescriptors(SloppyTNode<Map> map)1633 TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(
1634     SloppyTNode<Map> map) {
1635   CSA_SLOW_ASSERT(this, IsMap(map));
1636   return CAST(LoadObjectField(map, Map::kDescriptorsOffset));
1637 }
1638 
LoadMapPrototype(SloppyTNode<Map> map)1639 TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(SloppyTNode<Map> map) {
1640   CSA_SLOW_ASSERT(this, IsMap(map));
1641   return CAST(LoadObjectField(map, Map::kPrototypeOffset));
1642 }
1643 
LoadMapPrototypeInfo(SloppyTNode<Map> map,Label * if_no_proto_info)1644 TNode<PrototypeInfo> CodeStubAssembler::LoadMapPrototypeInfo(
1645     SloppyTNode<Map> map, Label* if_no_proto_info) {
1646   Label if_strong_heap_object(this);
1647   CSA_ASSERT(this, IsMap(map));
1648   TNode<MaybeObject> maybe_prototype_info =
1649       LoadMaybeWeakObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1650   TVARIABLE(Object, prototype_info);
1651   DispatchMaybeObject(maybe_prototype_info, if_no_proto_info, if_no_proto_info,
1652                       if_no_proto_info, &if_strong_heap_object,
1653                       &prototype_info);
1654 
1655   BIND(&if_strong_heap_object);
1656   GotoIfNot(WordEqual(LoadMap(CAST(prototype_info.value())),
1657                       LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1658             if_no_proto_info);
1659   return CAST(prototype_info.value());
1660 }
1661 
LoadMapInstanceSizeInWords(SloppyTNode<Map> map)1662 TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(
1663     SloppyTNode<Map> map) {
1664   CSA_SLOW_ASSERT(this, IsMap(map));
1665   return ChangeInt32ToIntPtr(LoadObjectField(
1666       map, Map::kInstanceSizeInWordsOffset, MachineType::Uint8()));
1667 }
1668 
LoadMapInobjectPropertiesStartInWords(SloppyTNode<Map> map)1669 TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1670     SloppyTNode<Map> map) {
1671   CSA_SLOW_ASSERT(this, IsMap(map));
1672   // See Map::GetInObjectPropertiesStartInWords() for details.
1673   CSA_ASSERT(this, IsJSObjectMap(map));
1674   return ChangeInt32ToIntPtr(LoadObjectField(
1675       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1676       MachineType::Uint8()));
1677 }
1678 
LoadMapConstructorFunctionIndex(SloppyTNode<Map> map)1679 TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1680     SloppyTNode<Map> map) {
1681   CSA_SLOW_ASSERT(this, IsMap(map));
1682   // See Map::GetConstructorFunctionIndex() for details.
1683   CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1684   return ChangeInt32ToIntPtr(LoadObjectField(
1685       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
1686       MachineType::Uint8()));
1687 }
1688 
LoadMapConstructor(SloppyTNode<Map> map)1689 TNode<Object> CodeStubAssembler::LoadMapConstructor(SloppyTNode<Map> map) {
1690   CSA_SLOW_ASSERT(this, IsMap(map));
1691   TVARIABLE(Object, result,
1692             LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1693 
1694   Label done(this), loop(this, &result);
1695   Goto(&loop);
1696   BIND(&loop);
1697   {
1698     GotoIf(TaggedIsSmi(result.value()), &done);
1699     Node* is_map_type =
1700         InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1701     GotoIfNot(is_map_type, &done);
1702     result = LoadObjectField(CAST(result.value()),
1703                              Map::kConstructorOrBackPointerOffset);
1704     Goto(&loop);
1705   }
1706   BIND(&done);
1707   return result.value();
1708 }
1709 
LoadMapEnumLength(SloppyTNode<Map> map)1710 Node* CodeStubAssembler::LoadMapEnumLength(SloppyTNode<Map> map) {
1711   CSA_SLOW_ASSERT(this, IsMap(map));
1712   Node* bit_field3 = LoadMapBitField3(map);
1713   return DecodeWordFromWord32<Map::EnumLengthBits>(bit_field3);
1714 }
1715 
LoadMapBackPointer(SloppyTNode<Map> map)1716 TNode<Object> CodeStubAssembler::LoadMapBackPointer(SloppyTNode<Map> map) {
1717   TNode<HeapObject> object =
1718       CAST(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1719   return Select<Object>(IsMap(object), [=] { return object; },
1720                         [=] { return UndefinedConstant(); });
1721 }
1722 
LoadJSReceiverIdentityHash(SloppyTNode<Object> receiver,Label * if_no_hash)1723 TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1724     SloppyTNode<Object> receiver, Label* if_no_hash) {
1725   TVARIABLE(IntPtrT, var_hash);
1726   Label done(this), if_smi(this), if_property_array(this),
1727       if_property_dictionary(this), if_fixed_array(this);
1728 
1729   TNode<Object> properties_or_hash =
1730       LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1731                       JSReceiver::kPropertiesOrHashOffset);
1732   GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1733 
1734   TNode<HeapObject> properties =
1735       TNode<HeapObject>::UncheckedCast(properties_or_hash);
1736   TNode<Int32T> properties_instance_type = LoadInstanceType(properties);
1737 
1738   GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1739          &if_property_array);
1740   Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1741          &if_property_dictionary, &if_fixed_array);
1742 
1743   BIND(&if_fixed_array);
1744   {
1745     var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1746     Goto(&done);
1747   }
1748 
1749   BIND(&if_smi);
1750   {
1751     var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1752     Goto(&done);
1753   }
1754 
1755   BIND(&if_property_array);
1756   {
1757     TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1758         properties, PropertyArray::kLengthAndHashOffset);
1759     var_hash = TNode<IntPtrT>::UncheckedCast(
1760         DecodeWord<PropertyArray::HashField>(length_and_hash));
1761     Goto(&done);
1762   }
1763 
1764   BIND(&if_property_dictionary);
1765   {
1766     var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1767         CAST(properties), NameDictionary::kObjectHashIndex)));
1768     Goto(&done);
1769   }
1770 
1771   BIND(&done);
1772   if (if_no_hash != nullptr) {
1773     GotoIf(IntPtrEqual(var_hash.value(),
1774                        IntPtrConstant(PropertyArray::kNoHashSentinel)),
1775            if_no_hash);
1776   }
1777   return var_hash.value();
1778 }
1779 
LoadNameHashField(SloppyTNode<Name> name)1780 TNode<Uint32T> CodeStubAssembler::LoadNameHashField(SloppyTNode<Name> name) {
1781   CSA_ASSERT(this, IsName(name));
1782   return LoadObjectField<Uint32T>(name, Name::kHashFieldOffset);
1783 }
1784 
LoadNameHash(SloppyTNode<Name> name,Label * if_hash_not_computed)1785 TNode<Uint32T> CodeStubAssembler::LoadNameHash(SloppyTNode<Name> name,
1786                                                Label* if_hash_not_computed) {
1787   TNode<Uint32T> hash_field = LoadNameHashField(name);
1788   if (if_hash_not_computed != nullptr) {
1789     GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1790            if_hash_not_computed);
1791   }
1792   return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1793 }
1794 
LoadStringLengthAsWord(SloppyTNode<String> object)1795 TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(
1796     SloppyTNode<String> object) {
1797   return SmiUntag(LoadStringLengthAsSmi(object));
1798 }
1799 
LoadStringLengthAsSmi(SloppyTNode<String> object)1800 TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(
1801     SloppyTNode<String> object) {
1802   CSA_ASSERT(this, IsString(object));
1803   return CAST(LoadObjectField(object, String::kLengthOffset,
1804                               MachineType::TaggedPointer()));
1805 }
1806 
PointerToSeqStringData(Node * seq_string)1807 Node* CodeStubAssembler::PointerToSeqStringData(Node* seq_string) {
1808   CSA_ASSERT(this, IsString(seq_string));
1809   CSA_ASSERT(this,
1810              IsSequentialStringInstanceType(LoadInstanceType(seq_string)));
1811   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
1812   return IntPtrAdd(
1813       BitcastTaggedToWord(seq_string),
1814       IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag));
1815 }
1816 
LoadJSValueValue(Node * object)1817 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1818   CSA_ASSERT(this, IsJSValue(object));
1819   return LoadObjectField(object, JSValue::kValueOffset);
1820 }
1821 
DispatchMaybeObject(TNode<MaybeObject> maybe_object,Label * if_smi,Label * if_cleared,Label * if_weak,Label * if_strong,TVariable<Object> * extracted)1822 void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1823                                             Label* if_smi, Label* if_cleared,
1824                                             Label* if_weak, Label* if_strong,
1825                                             TVariable<Object>* extracted) {
1826   Label inner_if_smi(this), inner_if_strong(this);
1827 
1828   GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1829 
1830   GotoIf(WordEqual(BitcastMaybeObjectToWord(maybe_object),
1831                    IntPtrConstant(reinterpret_cast<intptr_t>(
1832                        HeapObjectReference::ClearedValue()))),
1833          if_cleared);
1834 
1835   GotoIf(WordEqual(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1836                            IntPtrConstant(kHeapObjectTagMask)),
1837                    IntPtrConstant(kHeapObjectTag)),
1838          &inner_if_strong);
1839 
1840   *extracted =
1841       BitcastWordToTagged(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1842                                   IntPtrConstant(~kWeakHeapObjectMask)));
1843   Goto(if_weak);
1844 
1845   BIND(&inner_if_smi);
1846   *extracted = CAST(maybe_object);
1847   Goto(if_smi);
1848 
1849   BIND(&inner_if_strong);
1850   *extracted = CAST(maybe_object);
1851   Goto(if_strong);
1852 }
1853 
IsStrongHeapObject(TNode<MaybeObject> value)1854 TNode<BoolT> CodeStubAssembler::IsStrongHeapObject(TNode<MaybeObject> value) {
1855   return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1856                            IntPtrConstant(kHeapObjectTagMask)),
1857                    IntPtrConstant(kHeapObjectTag));
1858 }
1859 
ToStrongHeapObject(TNode<MaybeObject> value,Label * if_not_strong)1860 TNode<HeapObject> CodeStubAssembler::ToStrongHeapObject(
1861     TNode<MaybeObject> value, Label* if_not_strong) {
1862   GotoIfNot(IsStrongHeapObject(value), if_not_strong);
1863   return CAST(value);
1864 }
1865 
IsWeakOrClearedHeapObject(TNode<MaybeObject> value)1866 TNode<BoolT> CodeStubAssembler::IsWeakOrClearedHeapObject(
1867     TNode<MaybeObject> value) {
1868   return WordEqual(WordAnd(BitcastMaybeObjectToWord(value),
1869                            IntPtrConstant(kHeapObjectTagMask)),
1870                    IntPtrConstant(kWeakHeapObjectTag));
1871 }
1872 
IsClearedWeakHeapObject(TNode<MaybeObject> value)1873 TNode<BoolT> CodeStubAssembler::IsClearedWeakHeapObject(
1874     TNode<MaybeObject> value) {
1875   return WordEqual(BitcastMaybeObjectToWord(value),
1876                    IntPtrConstant(kClearedWeakHeapObject));
1877 }
1878 
IsNotClearedWeakHeapObject(TNode<MaybeObject> value)1879 TNode<BoolT> CodeStubAssembler::IsNotClearedWeakHeapObject(
1880     TNode<MaybeObject> value) {
1881   return WordNotEqual(BitcastMaybeObjectToWord(value),
1882                       IntPtrConstant(kClearedWeakHeapObject));
1883 }
1884 
ToWeakHeapObject(TNode<MaybeObject> value)1885 TNode<HeapObject> CodeStubAssembler::ToWeakHeapObject(
1886     TNode<MaybeObject> value) {
1887   CSA_ASSERT(this, IsWeakOrClearedHeapObject(value));
1888   CSA_ASSERT(this, IsNotClearedWeakHeapObject(value));
1889   return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1890       BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1891 }
1892 
ToWeakHeapObject(TNode<MaybeObject> value,Label * if_cleared)1893 TNode<HeapObject> CodeStubAssembler::ToWeakHeapObject(TNode<MaybeObject> value,
1894                                                       Label* if_cleared) {
1895   GotoIf(IsClearedWeakHeapObject(value), if_cleared);
1896   return ToWeakHeapObject(value);
1897 }
1898 
IsWeakReferenceTo(TNode<MaybeObject> object,TNode<Object> value)1899 TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(TNode<MaybeObject> object,
1900                                                   TNode<Object> value) {
1901   return WordEqual(WordAnd(BitcastMaybeObjectToWord(object),
1902                            IntPtrConstant(~kWeakHeapObjectMask)),
1903                    BitcastTaggedToWord(value));
1904 }
1905 
IsStrongReferenceTo(TNode<MaybeObject> object,TNode<Object> value)1906 TNode<BoolT> CodeStubAssembler::IsStrongReferenceTo(TNode<MaybeObject> object,
1907                                                     TNode<Object> value) {
1908   return WordEqual(BitcastMaybeObjectToWord(object),
1909                    BitcastTaggedToWord(value));
1910 }
1911 
IsNotWeakReferenceTo(TNode<MaybeObject> object,TNode<Object> value)1912 TNode<BoolT> CodeStubAssembler::IsNotWeakReferenceTo(TNode<MaybeObject> object,
1913                                                      TNode<Object> value) {
1914   return WordNotEqual(WordAnd(BitcastMaybeObjectToWord(object),
1915                               IntPtrConstant(~kWeakHeapObjectMask)),
1916                       BitcastTaggedToWord(value));
1917 }
1918 
MakeWeak(TNode<HeapObject> value)1919 TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1920   return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1921       WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1922 }
1923 
LoadArrayElement(SloppyTNode<HeapObject> array,int array_header_size,Node * index_node,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)1924 TNode<MaybeObject> CodeStubAssembler::LoadArrayElement(
1925     SloppyTNode<HeapObject> array, int array_header_size, Node* index_node,
1926     int additional_offset, ParameterMode parameter_mode,
1927     LoadSensitivity needs_poisoning) {
1928   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
1929                        ParameterToIntPtr(index_node, parameter_mode),
1930                        IntPtrConstant(0)));
1931   DCHECK_EQ(additional_offset % kPointerSize, 0);
1932   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1933   TNode<IntPtrT> offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
1934                                                  parameter_mode, header_size);
1935   STATIC_ASSERT(FixedArrayBase::kLengthOffset == WeakFixedArray::kLengthOffset);
1936   STATIC_ASSERT(FixedArrayBase::kLengthOffset ==
1937                 PropertyArray::kLengthAndHashOffset);
1938   // Check that index_node + additional_offset <= object.length.
1939   // TODO(cbruni): Use proper LoadXXLength helpers
1940   CSA_ASSERT(
1941       this,
1942       IsOffsetInBounds(
1943           offset,
1944           Select<IntPtrT>(
1945               IsPropertyArray(array),
1946               [=] {
1947                 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1948                     array, PropertyArray::kLengthAndHashOffset);
1949                 return TNode<IntPtrT>::UncheckedCast(
1950                     DecodeWord<PropertyArray::LengthField>(length_and_hash));
1951               },
1952               [=] {
1953                 return LoadAndUntagObjectField(array,
1954                                                FixedArrayBase::kLengthOffset);
1955               }),
1956           FixedArray::kHeaderSize));
1957   return UncheckedCast<MaybeObject>(
1958       Load(MachineType::AnyTagged(), array, offset, needs_poisoning));
1959 }
1960 
FixedArrayBoundsCheck(TNode<FixedArrayBase> array,Node * index,int additional_offset,ParameterMode parameter_mode)1961 void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
1962                                               Node* index,
1963                                               int additional_offset,
1964                                               ParameterMode parameter_mode) {
1965   if (!FLAG_fixed_array_bounds_checks) return;
1966   DCHECK_EQ(0, additional_offset % kPointerSize);
1967   if (parameter_mode == ParameterMode::SMI_PARAMETERS) {
1968     TNode<Smi> effective_index;
1969     Smi* constant_index;
1970     bool index_is_constant = ToSmiConstant(index, constant_index);
1971     if (index_is_constant) {
1972       effective_index = SmiConstant(Smi::ToInt(constant_index) +
1973                                     additional_offset / kPointerSize);
1974     } else if (additional_offset != 0) {
1975       effective_index =
1976           SmiAdd(CAST(index), SmiConstant(additional_offset / kPointerSize));
1977     } else {
1978       effective_index = CAST(index);
1979     }
1980     CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
1981   } else {
1982     // IntPtrAdd does constant-folding automatically.
1983     TNode<IntPtrT> effective_index =
1984         IntPtrAdd(UncheckedCast<IntPtrT>(index),
1985                   IntPtrConstant(additional_offset / kPointerSize));
1986     CSA_CHECK(this, UintPtrLessThan(effective_index,
1987                                     LoadAndUntagFixedArrayBaseLength(array)));
1988   }
1989 }
1990 
LoadFixedArrayElement(TNode<FixedArray> object,Node * index_node,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)1991 TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
1992     TNode<FixedArray> object, Node* index_node, int additional_offset,
1993     ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
1994   CSA_ASSERT(this, IsFixedArraySubclass(object));
1995   CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
1996   FixedArrayBoundsCheck(object, index_node, additional_offset, parameter_mode);
1997   TNode<MaybeObject> element =
1998       LoadArrayElement(object, FixedArray::kHeaderSize, index_node,
1999                        additional_offset, parameter_mode, needs_poisoning);
2000   return CAST(element);
2001 }
2002 
LoadPropertyArrayElement(SloppyTNode<PropertyArray> object,SloppyTNode<IntPtrT> index)2003 TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
2004     SloppyTNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
2005   int additional_offset = 0;
2006   ParameterMode parameter_mode = INTPTR_PARAMETERS;
2007   LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
2008   STATIC_ASSERT(PropertyArray::kHeaderSize == FixedArray::kHeaderSize);
2009 
2010   return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
2011                                additional_offset, parameter_mode,
2012                                needs_poisoning));
2013 }
2014 
LoadPropertyArrayLength(TNode<PropertyArray> object)2015 TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
2016     TNode<PropertyArray> object) {
2017   TNode<IntPtrT> value =
2018       LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
2019   return Signed(DecodeWord<PropertyArray::LengthField>(value));
2020 }
2021 
LoadFixedTypedArrayBackingStore(TNode<FixedTypedArrayBase> typed_array)2022 TNode<RawPtrT> CodeStubAssembler::LoadFixedTypedArrayBackingStore(
2023     TNode<FixedTypedArrayBase> typed_array) {
2024   // Backing store = external_pointer + base_pointer.
2025   Node* external_pointer =
2026       LoadObjectField(typed_array, FixedTypedArrayBase::kExternalPointerOffset,
2027                       MachineType::Pointer());
2028   Node* base_pointer =
2029       LoadObjectField(typed_array, FixedTypedArrayBase::kBasePointerOffset);
2030   return UncheckedCast<RawPtrT>(
2031       IntPtrAdd(external_pointer, BitcastTaggedToWord(base_pointer)));
2032 }
2033 
LoadFixedBigInt64ArrayElementAsTagged(Node * data_pointer,Node * offset)2034 Node* CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2035     Node* data_pointer, Node* offset) {
2036   TVARIABLE(BigInt, var_result);
2037   Label done(this), if_zero(this);
2038   if (Is64()) {
2039     TNode<IntPtrT> value = UncheckedCast<IntPtrT>(
2040         Load(MachineType::IntPtr(), data_pointer, offset));
2041     Label if_positive(this), if_negative(this);
2042     GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2043     var_result = AllocateRawBigInt(IntPtrConstant(1));
2044     Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2045            &if_negative);
2046 
2047     BIND(&if_positive);
2048     {
2049       StoreBigIntBitfield(var_result.value(),
2050                           IntPtrConstant(BigInt::SignBits::encode(false) |
2051                                          BigInt::LengthBits::encode(1)));
2052       StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2053       Goto(&done);
2054     }
2055 
2056     BIND(&if_negative);
2057     {
2058       StoreBigIntBitfield(var_result.value(),
2059                           IntPtrConstant(BigInt::SignBits::encode(true) |
2060                                          BigInt::LengthBits::encode(1)));
2061       StoreBigIntDigit(var_result.value(), 0,
2062                        Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2063       Goto(&done);
2064     }
2065   } else {
2066     DCHECK(!Is64());
2067     TVARIABLE(WordT, var_sign, IntPtrConstant(BigInt::SignBits::encode(false)));
2068     TVARIABLE(IntPtrT, var_low);
2069     TVARIABLE(IntPtrT, var_high);
2070 #if defined(V8_TARGET_BIG_ENDIAN)
2071     var_high = UncheckedCast<IntPtrT>(
2072         Load(MachineType::UintPtr(), data_pointer, offset));
2073     var_low = UncheckedCast<IntPtrT>(
2074         Load(MachineType::UintPtr(), data_pointer,
2075              Int32Add(offset, Int32Constant(kPointerSize))));
2076 #else
2077     var_low = UncheckedCast<IntPtrT>(
2078         Load(MachineType::UintPtr(), data_pointer, offset));
2079     var_high = UncheckedCast<IntPtrT>(
2080         Load(MachineType::UintPtr(), data_pointer,
2081              Int32Add(offset, Int32Constant(kPointerSize))));
2082 #endif
2083 
2084     Label high_zero(this), negative(this), allocate_one_digit(this),
2085         allocate_two_digits(this);
2086 
2087     GotoIf(WordEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2088     Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2089            &allocate_two_digits);
2090 
2091     BIND(&high_zero);
2092     Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2093            &allocate_one_digit);
2094 
2095     BIND(&negative);
2096     {
2097       var_sign = IntPtrConstant(BigInt::SignBits::encode(true));
2098       // We must negate the value by computing "0 - (high|low)", performing
2099       // both parts of the subtraction separately and manually taking care
2100       // of the carry bit (which is 1 iff low != 0).
2101       var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2102       Label carry(this), no_carry(this);
2103       Branch(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2104       BIND(&carry);
2105       var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2106       Goto(&no_carry);
2107       BIND(&no_carry);
2108       var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2109       // var_high was non-zero going into this block, but subtracting the
2110       // carry bit from it could bring us back onto the "one digit" path.
2111       Branch(WordEqual(var_high.value(), IntPtrConstant(0)),
2112              &allocate_one_digit, &allocate_two_digits);
2113     }
2114 
2115     BIND(&allocate_one_digit);
2116     {
2117       var_result = AllocateRawBigInt(IntPtrConstant(1));
2118       StoreBigIntBitfield(
2119           var_result.value(),
2120           WordOr(var_sign.value(),
2121                  IntPtrConstant(BigInt::LengthBits::encode(1))));
2122       StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2123       Goto(&done);
2124     }
2125 
2126     BIND(&allocate_two_digits);
2127     {
2128       var_result = AllocateRawBigInt(IntPtrConstant(2));
2129       StoreBigIntBitfield(
2130           var_result.value(),
2131           WordOr(var_sign.value(),
2132                  IntPtrConstant(BigInt::LengthBits::encode(2))));
2133       StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2134       StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2135       Goto(&done);
2136     }
2137   }
2138   BIND(&if_zero);
2139   var_result = AllocateBigInt(IntPtrConstant(0));
2140   Goto(&done);
2141 
2142   BIND(&done);
2143   return var_result.value();
2144 }
2145 
LoadFixedBigUint64ArrayElementAsTagged(Node * data_pointer,Node * offset)2146 Node* CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2147     Node* data_pointer, Node* offset) {
2148   TVARIABLE(BigInt, var_result);
2149   Label if_zero(this), done(this);
2150   if (Is64()) {
2151     TNode<UintPtrT> value = UncheckedCast<UintPtrT>(
2152         Load(MachineType::UintPtr(), data_pointer, offset));
2153     GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2154     var_result = AllocateBigInt(IntPtrConstant(1));
2155     StoreBigIntDigit(var_result.value(), 0, value);
2156     Goto(&done);
2157   } else {
2158     DCHECK(!Is64());
2159     Label high_zero(this);
2160 
2161 #if defined(V8_TARGET_BIG_ENDIAN)
2162     TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2163         Load(MachineType::UintPtr(), data_pointer, offset));
2164     TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2165         Load(MachineType::UintPtr(), data_pointer,
2166              Int32Add(offset, Int32Constant(kPointerSize))));
2167 #else
2168     TNode<UintPtrT> low = UncheckedCast<UintPtrT>(
2169         Load(MachineType::UintPtr(), data_pointer, offset));
2170     TNode<UintPtrT> high = UncheckedCast<UintPtrT>(
2171         Load(MachineType::UintPtr(), data_pointer,
2172              Int32Add(offset, Int32Constant(kPointerSize))));
2173 #endif
2174 
2175     GotoIf(WordEqual(high, IntPtrConstant(0)), &high_zero);
2176     var_result = AllocateBigInt(IntPtrConstant(2));
2177     StoreBigIntDigit(var_result.value(), 0, low);
2178     StoreBigIntDigit(var_result.value(), 1, high);
2179     Goto(&done);
2180 
2181     BIND(&high_zero);
2182     GotoIf(WordEqual(low, IntPtrConstant(0)), &if_zero);
2183     var_result = AllocateBigInt(IntPtrConstant(1));
2184     StoreBigIntDigit(var_result.value(), 0, low);
2185     Goto(&done);
2186   }
2187   BIND(&if_zero);
2188   var_result = AllocateBigInt(IntPtrConstant(0));
2189   Goto(&done);
2190 
2191   BIND(&done);
2192   return var_result.value();
2193 }
2194 
LoadFixedTypedArrayElementAsTagged(Node * data_pointer,Node * index_node,ElementsKind elements_kind,ParameterMode parameter_mode)2195 Node* CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2196     Node* data_pointer, Node* index_node, ElementsKind elements_kind,
2197     ParameterMode parameter_mode) {
2198   Node* offset =
2199       ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2200   switch (elements_kind) {
2201     case UINT8_ELEMENTS: /* fall through */
2202     case UINT8_CLAMPED_ELEMENTS:
2203       return SmiFromInt32(Load(MachineType::Uint8(), data_pointer, offset));
2204     case INT8_ELEMENTS:
2205       return SmiFromInt32(Load(MachineType::Int8(), data_pointer, offset));
2206     case UINT16_ELEMENTS:
2207       return SmiFromInt32(Load(MachineType::Uint16(), data_pointer, offset));
2208     case INT16_ELEMENTS:
2209       return SmiFromInt32(Load(MachineType::Int16(), data_pointer, offset));
2210     case UINT32_ELEMENTS:
2211       return ChangeUint32ToTagged(
2212           Load(MachineType::Uint32(), data_pointer, offset));
2213     case INT32_ELEMENTS:
2214       return ChangeInt32ToTagged(
2215           Load(MachineType::Int32(), data_pointer, offset));
2216     case FLOAT32_ELEMENTS:
2217       return AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(
2218           Load(MachineType::Float32(), data_pointer, offset)));
2219     case FLOAT64_ELEMENTS:
2220       return AllocateHeapNumberWithValue(
2221           Load(MachineType::Float64(), data_pointer, offset));
2222     case BIGINT64_ELEMENTS:
2223       return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2224     case BIGUINT64_ELEMENTS:
2225       return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2226     default:
2227       UNREACHABLE();
2228   }
2229 }
2230 
LoadFixedTypedArrayElementAsTagged(TNode<WordT> data_pointer,TNode<Smi> index,TNode<Int32T> elements_kind)2231 TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2232     TNode<WordT> data_pointer, TNode<Smi> index, TNode<Int32T> elements_kind) {
2233   TVARIABLE(Numeric, var_result);
2234   Label done(this), if_unknown_type(this, Label::kDeferred);
2235   int32_t elements_kinds[] = {
2236 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2237       TYPED_ARRAYS(TYPED_ARRAY_CASE)
2238 #undef TYPED_ARRAY_CASE
2239   };
2240 
2241 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2242   TYPED_ARRAYS(TYPED_ARRAY_CASE)
2243 #undef TYPED_ARRAY_CASE
2244 
2245   Label* elements_kind_labels[] = {
2246 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2247       TYPED_ARRAYS(TYPED_ARRAY_CASE)
2248 #undef TYPED_ARRAY_CASE
2249   };
2250   STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2251 
2252   Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2253          arraysize(elements_kinds));
2254 
2255   BIND(&if_unknown_type);
2256   Unreachable();
2257 
2258 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)               \
2259   BIND(&if_##type##array);                                      \
2260   {                                                             \
2261     var_result = CAST(LoadFixedTypedArrayElementAsTagged(       \
2262         data_pointer, index, TYPE##_ELEMENTS, SMI_PARAMETERS)); \
2263     Goto(&done);                                                \
2264   }
2265   TYPED_ARRAYS(TYPED_ARRAY_CASE)
2266 #undef TYPED_ARRAY_CASE
2267 
2268   BIND(&done);
2269   return var_result.value();
2270 }
2271 
StoreFixedTypedArrayElementFromTagged(TNode<Context> context,TNode<FixedTypedArrayBase> elements,TNode<Object> index_node,TNode<Object> value,ElementsKind elements_kind,ParameterMode parameter_mode)2272 void CodeStubAssembler::StoreFixedTypedArrayElementFromTagged(
2273     TNode<Context> context, TNode<FixedTypedArrayBase> elements,
2274     TNode<Object> index_node, TNode<Object> value, ElementsKind elements_kind,
2275     ParameterMode parameter_mode) {
2276   TNode<RawPtrT> data_pointer = LoadFixedTypedArrayBackingStore(elements);
2277   switch (elements_kind) {
2278     case UINT8_ELEMENTS:
2279     case UINT8_CLAMPED_ELEMENTS:
2280     case INT8_ELEMENTS:
2281     case UINT16_ELEMENTS:
2282     case INT16_ELEMENTS:
2283       StoreElement(data_pointer, elements_kind, index_node,
2284                    SmiToInt32(CAST(value)), parameter_mode);
2285       break;
2286     case UINT32_ELEMENTS:
2287     case INT32_ELEMENTS:
2288       StoreElement(data_pointer, elements_kind, index_node,
2289                    TruncateTaggedToWord32(context, value), parameter_mode);
2290       break;
2291     case FLOAT32_ELEMENTS:
2292       StoreElement(data_pointer, elements_kind, index_node,
2293                    TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value))),
2294                    parameter_mode);
2295       break;
2296     case FLOAT64_ELEMENTS:
2297       StoreElement(data_pointer, elements_kind, index_node,
2298                    LoadHeapNumberValue(CAST(value)), parameter_mode);
2299       break;
2300     case BIGUINT64_ELEMENTS:
2301     case BIGINT64_ELEMENTS: {
2302       TNode<IntPtrT> offset =
2303           ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
2304       EmitBigTypedArrayElementStore(elements, data_pointer, offset,
2305                                     CAST(value));
2306       break;
2307     }
2308     default:
2309       UNREACHABLE();
2310   }
2311 }
2312 
LoadFeedbackVectorSlot(Node * object,Node * slot_index_node,int additional_offset,ParameterMode parameter_mode)2313 TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2314     Node* object, Node* slot_index_node, int additional_offset,
2315     ParameterMode parameter_mode) {
2316   CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2317   CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2318   int32_t header_size =
2319       FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2320   Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2321                                         parameter_mode, header_size);
2322   CSA_SLOW_ASSERT(
2323       this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2324                              FeedbackVector::kHeaderSize));
2325   return UncheckedCast<MaybeObject>(
2326       Load(MachineType::AnyTagged(), object, offset));
2327 }
2328 
LoadAndUntagToWord32ArrayElement(SloppyTNode<HeapObject> object,int array_header_size,Node * index_node,int additional_offset,ParameterMode parameter_mode)2329 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2330     SloppyTNode<HeapObject> object, int array_header_size, Node* index_node,
2331     int additional_offset, ParameterMode parameter_mode) {
2332   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2333   DCHECK_EQ(additional_offset % kPointerSize, 0);
2334   int endian_correction = 0;
2335 #if V8_TARGET_LITTLE_ENDIAN
2336   if (SmiValuesAre32Bits()) endian_correction = kPointerSize / 2;
2337 #endif
2338   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2339                         endian_correction;
2340   Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2341                                         parameter_mode, header_size);
2342   STATIC_ASSERT(FixedArrayBase::kLengthOffset == WeakFixedArray::kLengthOffset);
2343   // Check that index_node + additional_offset <= object.length.
2344   // TODO(cbruni): Use proper LoadXXLength helpers
2345   CSA_ASSERT(this,
2346              IsOffsetInBounds(
2347                  offset,
2348                  LoadAndUntagObjectField(object, FixedArrayBase::kLengthOffset),
2349                  FixedArray::kHeaderSize + endian_correction));
2350   if (SmiValuesAre32Bits()) {
2351     return UncheckedCast<Int32T>(Load(MachineType::Int32(), object, offset));
2352   } else {
2353     return SmiToInt32(Load(MachineType::AnyTagged(), object, offset));
2354   }
2355 }
2356 
LoadAndUntagToWord32FixedArrayElement(SloppyTNode<HeapObject> object,Node * index_node,int additional_offset,ParameterMode parameter_mode)2357 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2358     SloppyTNode<HeapObject> object, Node* index_node, int additional_offset,
2359     ParameterMode parameter_mode) {
2360   CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2361   return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2362                                           index_node, additional_offset,
2363                                           parameter_mode);
2364 }
2365 
LoadWeakFixedArrayElement(TNode<WeakFixedArray> object,Node * index,int additional_offset,ParameterMode parameter_mode,LoadSensitivity needs_poisoning)2366 TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2367     TNode<WeakFixedArray> object, Node* index, int additional_offset,
2368     ParameterMode parameter_mode, LoadSensitivity needs_poisoning) {
2369   return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2370                           additional_offset, parameter_mode, needs_poisoning);
2371 }
2372 
LoadFixedDoubleArrayElement(SloppyTNode<FixedDoubleArray> object,Node * index_node,MachineType machine_type,int additional_offset,ParameterMode parameter_mode,Label * if_hole)2373 TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2374     SloppyTNode<FixedDoubleArray> object, Node* index_node,
2375     MachineType machine_type, int additional_offset,
2376     ParameterMode parameter_mode, Label* if_hole) {
2377   CSA_ASSERT(this, IsFixedDoubleArray(object));
2378   DCHECK_EQ(additional_offset % kPointerSize, 0);
2379   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2380   int32_t header_size =
2381       FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
2382   TNode<IntPtrT> offset = ElementOffsetFromIndex(
2383       index_node, HOLEY_DOUBLE_ELEMENTS, parameter_mode, header_size);
2384   CSA_ASSERT(this, IsOffsetInBounds(
2385                        offset, LoadAndUntagFixedArrayBaseLength(object),
2386                        FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2387   return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2388 }
2389 
LoadFixedArrayBaseElementAsTagged(TNode<FixedArrayBase> elements,TNode<IntPtrT> index,TNode<Int32T> elements_kind,Label * if_accessor,Label * if_hole)2390 TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2391     TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2392     TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2393   TVARIABLE(Object, var_result);
2394   Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2395       if_holey_double(this), if_dictionary(this, Label::kDeferred);
2396 
2397   int32_t kinds[] = {// Handled by if_packed.
2398                      PACKED_SMI_ELEMENTS, PACKED_ELEMENTS,
2399                      // Handled by if_holey.
2400                      HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS,
2401                      // Handled by if_packed_double.
2402                      PACKED_DOUBLE_ELEMENTS,
2403                      // Handled by if_holey_double.
2404                      HOLEY_DOUBLE_ELEMENTS};
2405   Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2406                      &if_packed, &if_packed,
2407                      // HOLEY_{SMI,}_ELEMENTS
2408                      &if_holey, &if_holey,
2409                      // PACKED_DOUBLE_ELEMENTS
2410                      &if_packed_double,
2411                      // HOLEY_DOUBLE_ELEMENTS
2412                      &if_holey_double};
2413   Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2414 
2415   BIND(&if_packed);
2416   {
2417     var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2418     Goto(&done);
2419   }
2420 
2421   BIND(&if_holey);
2422   {
2423     var_result = LoadFixedArrayElement(CAST(elements), index);
2424     Branch(WordEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2425   }
2426 
2427   BIND(&if_packed_double);
2428   {
2429     var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2430         CAST(elements), index, MachineType::Float64()));
2431     Goto(&done);
2432   }
2433 
2434   BIND(&if_holey_double);
2435   {
2436     var_result = AllocateHeapNumberWithValue(LoadFixedDoubleArrayElement(
2437         CAST(elements), index, MachineType::Float64(), 0, INTPTR_PARAMETERS,
2438         if_hole));
2439     Goto(&done);
2440   }
2441 
2442   BIND(&if_dictionary);
2443   {
2444     CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2445     var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2446                                                   if_accessor, if_hole);
2447     Goto(&done);
2448   }
2449 
2450   BIND(&done);
2451   return var_result.value();
2452 }
2453 
LoadDoubleWithHoleCheck(SloppyTNode<Object> base,SloppyTNode<IntPtrT> offset,Label * if_hole,MachineType machine_type)2454 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2455     SloppyTNode<Object> base, SloppyTNode<IntPtrT> offset, Label* if_hole,
2456     MachineType machine_type) {
2457   if (if_hole) {
2458     // TODO(ishell): Compare only the upper part for the hole once the
2459     // compiler is able to fold addition of already complex |offset| with
2460     // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2461     if (Is64()) {
2462       Node* element = Load(MachineType::Uint64(), base, offset);
2463       GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
2464     } else {
2465       Node* element_upper = Load(
2466           MachineType::Uint32(), base,
2467           IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2468       GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
2469              if_hole);
2470     }
2471   }
2472   if (machine_type.IsNone()) {
2473     // This means the actual value is not needed.
2474     return TNode<Float64T>();
2475   }
2476   return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2477 }
2478 
LoadContextElement(SloppyTNode<Context> context,int slot_index)2479 TNode<Object> CodeStubAssembler::LoadContextElement(
2480     SloppyTNode<Context> context, int slot_index) {
2481   int offset = Context::SlotOffset(slot_index);
2482   return UncheckedCast<Object>(
2483       Load(MachineType::AnyTagged(), context, IntPtrConstant(offset)));
2484 }
2485 
LoadContextElement(SloppyTNode<Context> context,SloppyTNode<IntPtrT> slot_index)2486 TNode<Object> CodeStubAssembler::LoadContextElement(
2487     SloppyTNode<Context> context, SloppyTNode<IntPtrT> slot_index) {
2488   Node* offset =
2489       IntPtrAdd(TimesPointerSize(slot_index),
2490                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
2491   return UncheckedCast<Object>(Load(MachineType::AnyTagged(), context, offset));
2492 }
2493 
StoreContextElement(SloppyTNode<Context> context,int slot_index,SloppyTNode<Object> value)2494 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2495                                             int slot_index,
2496                                             SloppyTNode<Object> value) {
2497   int offset = Context::SlotOffset(slot_index);
2498   Store(context, IntPtrConstant(offset), value);
2499 }
2500 
StoreContextElement(SloppyTNode<Context> context,SloppyTNode<IntPtrT> slot_index,SloppyTNode<Object> value)2501 void CodeStubAssembler::StoreContextElement(SloppyTNode<Context> context,
2502                                             SloppyTNode<IntPtrT> slot_index,
2503                                             SloppyTNode<Object> value) {
2504   Node* offset =
2505       IntPtrAdd(TimesPointerSize(slot_index),
2506                 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
2507   Store(context, offset, value);
2508 }
2509 
StoreContextElementNoWriteBarrier(SloppyTNode<Context> context,int slot_index,SloppyTNode<Object> value)2510 void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2511     SloppyTNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2512   int offset = Context::SlotOffset(slot_index);
2513   StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2514                       IntPtrConstant(offset), value);
2515 }
2516 
LoadNativeContext(SloppyTNode<Context> context)2517 TNode<Context> CodeStubAssembler::LoadNativeContext(
2518     SloppyTNode<Context> context) {
2519   return UncheckedCast<Context>(
2520       LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX));
2521 }
2522 
LoadModuleContext(SloppyTNode<Context> context)2523 TNode<Context> CodeStubAssembler::LoadModuleContext(
2524     SloppyTNode<Context> context) {
2525   Node* module_map = LoadRoot(Heap::kModuleContextMapRootIndex);
2526   Variable cur_context(this, MachineRepresentation::kTaggedPointer);
2527   cur_context.Bind(context);
2528 
2529   Label context_found(this);
2530 
2531   Variable* context_search_loop_variables[1] = {&cur_context};
2532   Label context_search(this, 1, context_search_loop_variables);
2533 
2534   // Loop until cur_context->map() is module_map.
2535   Goto(&context_search);
2536   BIND(&context_search);
2537   {
2538     CSA_ASSERT(this, Word32BinaryNot(IsNativeContext(cur_context.value())));
2539     GotoIf(WordEqual(LoadMap(cur_context.value()), module_map), &context_found);
2540 
2541     cur_context.Bind(
2542         LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
2543     Goto(&context_search);
2544   }
2545 
2546   BIND(&context_found);
2547   return UncheckedCast<Context>(cur_context.value());
2548 }
2549 
LoadJSArrayElementsMap(SloppyTNode<Int32T> kind,SloppyTNode<Context> native_context)2550 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2551     SloppyTNode<Int32T> kind, SloppyTNode<Context> native_context) {
2552   CSA_ASSERT(this, IsFastElementsKind(kind));
2553   CSA_ASSERT(this, IsNativeContext(native_context));
2554   Node* offset = IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2555                            ChangeInt32ToIntPtr(kind));
2556   return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2557 }
2558 
LoadJSArrayElementsMap(ElementsKind kind,SloppyTNode<Context> native_context)2559 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2560     ElementsKind kind, SloppyTNode<Context> native_context) {
2561   CSA_ASSERT(this, IsNativeContext(native_context));
2562   return UncheckedCast<Map>(
2563       LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2564 }
2565 
IsGeneratorFunction(TNode<JSFunction> function)2566 TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2567     TNode<JSFunction> function) {
2568   TNode<SharedFunctionInfo> const shared_function_info =
2569       CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
2570 
2571   TNode<Uint32T> const function_kind =
2572       DecodeWord32<SharedFunctionInfo::FunctionKindBits>(LoadObjectField(
2573           shared_function_info, SharedFunctionInfo::kFlagsOffset,
2574           MachineType::Uint32()));
2575 
2576   return TNode<BoolT>::UncheckedCast(Word32Or(
2577       Word32Or(
2578           Word32Or(
2579               Word32Equal(function_kind,
2580                           Int32Constant(FunctionKind::kAsyncGeneratorFunction)),
2581               Word32Equal(
2582                   function_kind,
2583                   Int32Constant(FunctionKind::kAsyncConciseGeneratorMethod))),
2584           Word32Equal(function_kind,
2585                       Int32Constant(FunctionKind::kGeneratorFunction))),
2586       Word32Equal(function_kind,
2587                   Int32Constant(FunctionKind::kConciseGeneratorMethod))));
2588 }
2589 
HasPrototypeProperty(TNode<JSFunction> function,TNode<Map> map)2590 TNode<BoolT> CodeStubAssembler::HasPrototypeProperty(TNode<JSFunction> function,
2591                                                      TNode<Map> map) {
2592   // (has_prototype_slot() && IsConstructor()) ||
2593   // IsGeneratorFunction(shared()->kind())
2594   uint32_t mask =
2595       Map::HasPrototypeSlotBit::kMask | Map::IsConstructorBit::kMask;
2596   return TNode<BoolT>::UncheckedCast(
2597       Word32Or(IsAllSetWord32(LoadMapBitField(map), mask),
2598                IsGeneratorFunction(function)));
2599 }
2600 
GotoIfPrototypeRequiresRuntimeLookup(TNode<JSFunction> function,TNode<Map> map,Label * runtime)2601 void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2602     TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2603   // !has_prototype_property() || has_non_instance_prototype()
2604   GotoIfNot(HasPrototypeProperty(function, map), runtime);
2605   GotoIf(IsSetWord32<Map::HasNonInstancePrototypeBit>(LoadMapBitField(map)),
2606          runtime);
2607 }
2608 
LoadJSFunctionPrototype(Node * function,Label * if_bailout)2609 Node* CodeStubAssembler::LoadJSFunctionPrototype(Node* function,
2610                                                  Label* if_bailout) {
2611   CSA_ASSERT(this, TaggedIsNotSmi(function));
2612   CSA_ASSERT(this, IsJSFunction(function));
2613   CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2614   CSA_ASSERT(this, IsClearWord32<Map::HasNonInstancePrototypeBit>(
2615                        LoadMapBitField(LoadMap(function))));
2616   Node* proto_or_map =
2617       LoadObjectField(function, JSFunction::kPrototypeOrInitialMapOffset);
2618   GotoIf(IsTheHole(proto_or_map), if_bailout);
2619 
2620   VARIABLE(var_result, MachineRepresentation::kTagged, proto_or_map);
2621   Label done(this, &var_result);
2622   GotoIfNot(IsMap(proto_or_map), &done);
2623 
2624   var_result.Bind(LoadMapPrototype(proto_or_map));
2625   Goto(&done);
2626 
2627   BIND(&done);
2628   return var_result.value();
2629 }
2630 
LoadSharedFunctionInfoBytecodeArray(Node * shared)2631 Node* CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(Node* shared) {
2632   CSA_ASSERT(this, TaggedIsNotSmi(shared));
2633   CSA_ASSERT(this, IsSharedFunctionInfo(shared));
2634 
2635   Node* function_data =
2636       LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset);
2637 
2638   VARIABLE(var_result, MachineRepresentation::kTagged, function_data);
2639   Label done(this, &var_result);
2640 
2641   GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2642   Node* bytecode_array =
2643       LoadObjectField(function_data, InterpreterData::kBytecodeArrayOffset);
2644   var_result.Bind(bytecode_array);
2645   Goto(&done);
2646 
2647   BIND(&done);
2648   return var_result.value();
2649 }
2650 
StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,int offset,TNode<Word32T> value)2651 void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2652                                                       int offset,
2653                                                       TNode<Word32T> value) {
2654   StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2655                       IntPtrConstant(offset - kHeapObjectTag), value);
2656 }
2657 
StoreHeapNumberValue(SloppyTNode<HeapNumber> object,SloppyTNode<Float64T> value)2658 void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2659                                              SloppyTNode<Float64T> value) {
2660   StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
2661                                  MachineRepresentation::kFloat64);
2662 }
2663 
StoreMutableHeapNumberValue(SloppyTNode<MutableHeapNumber> object,SloppyTNode<Float64T> value)2664 void CodeStubAssembler::StoreMutableHeapNumberValue(
2665     SloppyTNode<MutableHeapNumber> object, SloppyTNode<Float64T> value) {
2666   StoreObjectFieldNoWriteBarrier(object, MutableHeapNumber::kValueOffset, value,
2667                                  MachineRepresentation::kFloat64);
2668 }
2669 
StoreObjectField(Node * object,int offset,Node * value)2670 Node* CodeStubAssembler::StoreObjectField(
2671     Node* object, int offset, Node* value) {
2672   DCHECK_NE(HeapObject::kMapOffset, offset);  // Use StoreMap instead.
2673   return Store(object, IntPtrConstant(offset - kHeapObjectTag), value);
2674 }
2675 
StoreObjectField(Node * object,Node * offset,Node * value)2676 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
2677                                           Node* value) {
2678   int const_offset;
2679   if (ToInt32Constant(offset, const_offset)) {
2680     return StoreObjectField(object, const_offset, value);
2681   }
2682   return Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)),
2683                value);
2684 }
2685 
StoreObjectFieldNoWriteBarrier(Node * object,int offset,Node * value,MachineRepresentation rep)2686 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2687     Node* object, int offset, Node* value, MachineRepresentation rep) {
2688   return StoreNoWriteBarrier(rep, object,
2689                              IntPtrConstant(offset - kHeapObjectTag), value);
2690 }
2691 
StoreObjectFieldNoWriteBarrier(Node * object,Node * offset,Node * value,MachineRepresentation rep)2692 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
2693     Node* object, Node* offset, Node* value, MachineRepresentation rep) {
2694   int const_offset;
2695   if (ToInt32Constant(offset, const_offset)) {
2696     return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
2697   }
2698   return StoreNoWriteBarrier(
2699       rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2700 }
2701 
StoreMap(Node * object,Node * map)2702 Node* CodeStubAssembler::StoreMap(Node* object, Node* map) {
2703   CSA_SLOW_ASSERT(this, IsMap(map));
2704   return StoreWithMapWriteBarrier(
2705       object, IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2706 }
2707 
StoreMapNoWriteBarrier(Node * object,Heap::RootListIndex map_root_index)2708 Node* CodeStubAssembler::StoreMapNoWriteBarrier(
2709     Node* object, Heap::RootListIndex map_root_index) {
2710   return StoreMapNoWriteBarrier(object, LoadRoot(map_root_index));
2711 }
2712 
StoreMapNoWriteBarrier(Node * object,Node * map)2713 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
2714   CSA_SLOW_ASSERT(this, IsMap(map));
2715   return StoreNoWriteBarrier(
2716       MachineRepresentation::kTagged, object,
2717       IntPtrConstant(HeapObject::kMapOffset - kHeapObjectTag), map);
2718 }
2719 
StoreObjectFieldRoot(Node * object,int offset,Heap::RootListIndex root_index)2720 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
2721                                               Heap::RootListIndex root_index) {
2722   if (Heap::RootIsImmortalImmovable(root_index)) {
2723     return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2724   } else {
2725     return StoreObjectField(object, offset, LoadRoot(root_index));
2726   }
2727 }
2728 
StoreJSArrayLength(TNode<JSArray> array,TNode<Smi> length)2729 Node* CodeStubAssembler::StoreJSArrayLength(TNode<JSArray> array,
2730                                             TNode<Smi> length) {
2731   return StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2732 }
2733 
StoreElements(TNode<Object> object,TNode<FixedArrayBase> elements)2734 Node* CodeStubAssembler::StoreElements(TNode<Object> object,
2735                                        TNode<FixedArrayBase> elements) {
2736   return StoreObjectField(object, JSObject::kElementsOffset, elements);
2737 }
2738 
StoreFixedArrayOrPropertyArrayElement(Node * object,Node * index_node,Node * value,WriteBarrierMode barrier_mode,int additional_offset,ParameterMode parameter_mode)2739 void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2740     Node* object, Node* index_node, Node* value, WriteBarrierMode barrier_mode,
2741     int additional_offset, ParameterMode parameter_mode) {
2742   CSA_SLOW_ASSERT(
2743       this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
2744   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2745   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2746          barrier_mode == UPDATE_WRITE_BARRIER);
2747   DCHECK_EQ(additional_offset % kPointerSize, 0);
2748   STATIC_ASSERT(FixedArray::kHeaderSize == PropertyArray::kHeaderSize);
2749   int header_size =
2750       FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2751   Node* offset = ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS,
2752                                         parameter_mode, header_size);
2753   STATIC_ASSERT(FixedArrayBase::kLengthOffset == WeakFixedArray::kLengthOffset);
2754   STATIC_ASSERT(FixedArrayBase::kLengthOffset ==
2755                 PropertyArray::kLengthAndHashOffset);
2756   // Check that index_node + additional_offset <= object.length.
2757   // TODO(cbruni): Use proper LoadXXLength helpers
2758   CSA_ASSERT(
2759       this,
2760       IsOffsetInBounds(
2761           offset,
2762           Select<IntPtrT>(
2763               IsPropertyArray(object),
2764               [=] {
2765                 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2766                     object, PropertyArray::kLengthAndHashOffset);
2767                 return TNode<IntPtrT>::UncheckedCast(
2768                     DecodeWord<PropertyArray::LengthField>(length_and_hash));
2769               },
2770               [=] {
2771                 return LoadAndUntagObjectField(object,
2772                                                FixedArrayBase::kLengthOffset);
2773               }),
2774           FixedArray::kHeaderSize));
2775   if (barrier_mode == SKIP_WRITE_BARRIER) {
2776     StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2777   } else {
2778     Store(object, offset, value);
2779   }
2780 }
2781 
StoreFixedDoubleArrayElement(TNode<FixedDoubleArray> object,Node * index_node,TNode<Float64T> value,ParameterMode parameter_mode)2782 void CodeStubAssembler::StoreFixedDoubleArrayElement(
2783     TNode<FixedDoubleArray> object, Node* index_node, TNode<Float64T> value,
2784     ParameterMode parameter_mode) {
2785   CSA_ASSERT(this, IsFixedDoubleArray(object));
2786   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
2787   FixedArrayBoundsCheck(object, index_node, 0, parameter_mode);
2788   Node* offset =
2789       ElementOffsetFromIndex(index_node, PACKED_DOUBLE_ELEMENTS, parameter_mode,
2790                              FixedArray::kHeaderSize - kHeapObjectTag);
2791   MachineRepresentation rep = MachineRepresentation::kFloat64;
2792   StoreNoWriteBarrier(rep, object, offset, value);
2793 }
2794 
StoreFeedbackVectorSlot(Node * object,Node * slot_index_node,Node * value,WriteBarrierMode barrier_mode,int additional_offset,ParameterMode parameter_mode)2795 Node* CodeStubAssembler::StoreFeedbackVectorSlot(Node* object,
2796                                                  Node* slot_index_node,
2797                                                  Node* value,
2798                                                  WriteBarrierMode barrier_mode,
2799                                                  int additional_offset,
2800                                                  ParameterMode parameter_mode) {
2801   CSA_SLOW_ASSERT(this, IsFeedbackVector(object));
2802   CSA_SLOW_ASSERT(this, MatchesParameterMode(slot_index_node, parameter_mode));
2803   DCHECK_EQ(additional_offset % kPointerSize, 0);
2804   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2805          barrier_mode == UPDATE_WRITE_BARRIER);
2806   int header_size =
2807       FeedbackVector::kFeedbackSlotsOffset + additional_offset - kHeapObjectTag;
2808   Node* offset = ElementOffsetFromIndex(slot_index_node, HOLEY_ELEMENTS,
2809                                         parameter_mode, header_size);
2810   // Check that slot_index_node <= object.length.
2811   CSA_ASSERT(this,
2812              IsOffsetInBounds(offset, LoadFeedbackVectorLength(CAST(object)),
2813                               FeedbackVector::kHeaderSize));
2814   if (barrier_mode == SKIP_WRITE_BARRIER) {
2815     return StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
2816                                value);
2817   } else {
2818     return Store(object, offset, value);
2819   }
2820 }
2821 
EnsureArrayLengthWritable(TNode<Map> map,Label * bailout)2822 void CodeStubAssembler::EnsureArrayLengthWritable(TNode<Map> map,
2823                                                   Label* bailout) {
2824   // Don't support arrays in dictionary named property mode.
2825   GotoIf(IsDictionaryMap(map), bailout);
2826 
2827   // Check whether the length property is writable. The length property is the
2828   // only default named property on arrays. It's nonconfigurable, hence is
2829   // guaranteed to stay the first property.
2830   TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
2831 
2832   int length_index = JSArray::kLengthDescriptorIndex;
2833 #ifdef DEBUG
2834   TNode<Name> maybe_length = CAST(LoadWeakFixedArrayElement(
2835       descriptors, DescriptorArray::ToKeyIndex(length_index)));
2836   CSA_ASSERT(this,
2837              WordEqual(maybe_length, LoadRoot(Heap::klength_stringRootIndex)));
2838 #endif
2839 
2840   TNode<Uint32T> details = LoadDetailsByKeyIndex(
2841       descriptors, IntPtrConstant(DescriptorArray::ToKeyIndex(length_index)));
2842   GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
2843          bailout);
2844 }
2845 
EnsureArrayPushable(TNode<Map> map,Label * bailout)2846 TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Map> map,
2847                                                      Label* bailout) {
2848   // Disallow pushing onto prototypes. It might be the JSArray prototype.
2849   // Disallow pushing onto non-extensible objects.
2850   Comment("Disallow pushing onto prototypes");
2851   Node* bit_field2 = LoadMapBitField2(map);
2852   int mask = Map::IsPrototypeMapBit::kMask | Map::IsExtensibleBit::kMask;
2853   Node* test = Word32And(bit_field2, Int32Constant(mask));
2854   GotoIf(Word32NotEqual(test, Int32Constant(Map::IsExtensibleBit::kMask)),
2855          bailout);
2856 
2857   EnsureArrayLengthWritable(map, bailout);
2858 
2859   TNode<Uint32T> kind = DecodeWord32<Map::ElementsKindBits>(bit_field2);
2860   return Signed(kind);
2861 }
2862 
PossiblyGrowElementsCapacity(ParameterMode mode,ElementsKind kind,Node * array,Node * length,Variable * var_elements,Node * growth,Label * bailout)2863 void CodeStubAssembler::PossiblyGrowElementsCapacity(
2864     ParameterMode mode, ElementsKind kind, Node* array, Node* length,
2865     Variable* var_elements, Node* growth, Label* bailout) {
2866   Label fits(this, var_elements);
2867   Node* capacity =
2868       TaggedToParameter(LoadFixedArrayBaseLength(var_elements->value()), mode);
2869   // length and growth nodes are already in a ParameterMode appropriate
2870   // representation.
2871   Node* new_length = IntPtrOrSmiAdd(growth, length, mode);
2872   GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity, mode), &fits);
2873   Node* new_capacity = CalculateNewElementsCapacity(new_length, mode);
2874   var_elements->Bind(GrowElementsCapacity(array, var_elements->value(), kind,
2875                                           kind, capacity, new_capacity, mode,
2876                                           bailout));
2877   Goto(&fits);
2878   BIND(&fits);
2879 }
2880 
BuildAppendJSArray(ElementsKind kind,SloppyTNode<JSArray> array,CodeStubArguments * args,TVariable<IntPtrT> * arg_index,Label * bailout)2881 TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2882                                                  SloppyTNode<JSArray> array,
2883                                                  CodeStubArguments* args,
2884                                                  TVariable<IntPtrT>* arg_index,
2885                                                  Label* bailout) {
2886   CSA_SLOW_ASSERT(this, IsJSArray(array));
2887   Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2888   Label pre_bailout(this);
2889   Label success(this);
2890   TVARIABLE(Smi, var_tagged_length);
2891   ParameterMode mode = OptimalParameterMode();
2892   VARIABLE(var_length, OptimalParameterRepresentation(),
2893            TaggedToParameter(LoadFastJSArrayLength(array), mode));
2894   VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2895 
2896   // Resize the capacity of the fixed array if it doesn't fit.
2897   TNode<IntPtrT> first = arg_index->value();
2898   Node* growth = IntPtrToParameter(
2899       IntPtrSub(UncheckedCast<IntPtrT>(args->GetLength(INTPTR_PARAMETERS)),
2900                 first),
2901       mode);
2902   PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2903                                &var_elements, growth, &pre_bailout);
2904 
2905   // Push each argument onto the end of the array now that there is enough
2906   // capacity.
2907   CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2908   Node* elements = var_elements.value();
2909   args->ForEach(
2910       push_vars,
2911       [this, kind, mode, elements, &var_length, &pre_bailout](Node* arg) {
2912         TryStoreArrayElement(kind, mode, &pre_bailout, elements,
2913                              var_length.value(), arg);
2914         Increment(&var_length, 1, mode);
2915       },
2916       first, nullptr);
2917   {
2918     TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2919     var_tagged_length = length;
2920     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2921     Goto(&success);
2922   }
2923 
2924   BIND(&pre_bailout);
2925   {
2926     TNode<Smi> length = ParameterToTagged(var_length.value(), mode);
2927     var_tagged_length = length;
2928     Node* diff = SmiSub(length, LoadFastJSArrayLength(array));
2929     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2930     *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2931     Goto(bailout);
2932   }
2933 
2934   BIND(&success);
2935   return var_tagged_length.value();
2936 }
2937 
TryStoreArrayElement(ElementsKind kind,ParameterMode mode,Label * bailout,Node * elements,Node * index,Node * value)2938 void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind,
2939                                              ParameterMode mode, Label* bailout,
2940                                              Node* elements, Node* index,
2941                                              Node* value) {
2942   if (IsSmiElementsKind(kind)) {
2943     GotoIf(TaggedIsNotSmi(value), bailout);
2944   } else if (IsDoubleElementsKind(kind)) {
2945     GotoIfNotNumber(value, bailout);
2946   }
2947   if (IsDoubleElementsKind(kind)) value = ChangeNumberToFloat64(value);
2948   StoreElement(elements, kind, index, value, mode);
2949 }
2950 
BuildAppendJSArray(ElementsKind kind,Node * array,Node * value,Label * bailout)2951 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind, Node* array,
2952                                            Node* value, Label* bailout) {
2953   CSA_SLOW_ASSERT(this, IsJSArray(array));
2954   Comment("BuildAppendJSArray: %s", ElementsKindToString(kind));
2955   ParameterMode mode = OptimalParameterMode();
2956   VARIABLE(var_length, OptimalParameterRepresentation(),
2957            TaggedToParameter(LoadFastJSArrayLength(array), mode));
2958   VARIABLE(var_elements, MachineRepresentation::kTagged, LoadElements(array));
2959 
2960   // Resize the capacity of the fixed array if it doesn't fit.
2961   Node* growth = IntPtrOrSmiConstant(1, mode);
2962   PossiblyGrowElementsCapacity(mode, kind, array, var_length.value(),
2963                                &var_elements, growth, bailout);
2964 
2965   // Push each argument onto the end of the array now that there is enough
2966   // capacity.
2967   TryStoreArrayElement(kind, mode, bailout, var_elements.value(),
2968                        var_length.value(), value);
2969   Increment(&var_length, 1, mode);
2970 
2971   Node* length = ParameterToTagged(var_length.value(), mode);
2972   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2973 }
2974 
AllocateCellWithValue(Node * value,WriteBarrierMode mode)2975 Node* CodeStubAssembler::AllocateCellWithValue(Node* value,
2976                                                WriteBarrierMode mode) {
2977   Node* result = Allocate(Cell::kSize, kNone);
2978   StoreMapNoWriteBarrier(result, Heap::kCellMapRootIndex);
2979   StoreCellValue(result, value, mode);
2980   return result;
2981 }
2982 
LoadCellValue(Node * cell)2983 Node* CodeStubAssembler::LoadCellValue(Node* cell) {
2984   CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2985   return LoadObjectField(cell, Cell::kValueOffset);
2986 }
2987 
StoreCellValue(Node * cell,Node * value,WriteBarrierMode mode)2988 Node* CodeStubAssembler::StoreCellValue(Node* cell, Node* value,
2989                                         WriteBarrierMode mode) {
2990   CSA_SLOW_ASSERT(this, HasInstanceType(cell, CELL_TYPE));
2991   DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
2992 
2993   if (mode == UPDATE_WRITE_BARRIER) {
2994     return StoreObjectField(cell, Cell::kValueOffset, value);
2995   } else {
2996     return StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
2997   }
2998 }
2999 
AllocateHeapNumber()3000 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3001   Node* result = Allocate(HeapNumber::kSize, kNone);
3002   Heap::RootListIndex heap_map_index = Heap::kHeapNumberMapRootIndex;
3003   StoreMapNoWriteBarrier(result, heap_map_index);
3004   return UncheckedCast<HeapNumber>(result);
3005 }
3006 
AllocateHeapNumberWithValue(SloppyTNode<Float64T> value)3007 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3008     SloppyTNode<Float64T> value) {
3009   TNode<HeapNumber> result = AllocateHeapNumber();
3010   StoreHeapNumberValue(result, value);
3011   return result;
3012 }
3013 
AllocateMutableHeapNumber()3014 TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumber() {
3015   Node* result = Allocate(MutableHeapNumber::kSize, kNone);
3016   Heap::RootListIndex heap_map_index = Heap::kMutableHeapNumberMapRootIndex;
3017   StoreMapNoWriteBarrier(result, heap_map_index);
3018   return UncheckedCast<MutableHeapNumber>(result);
3019 }
3020 
AllocateMutableHeapNumberWithValue(SloppyTNode<Float64T> value)3021 TNode<MutableHeapNumber> CodeStubAssembler::AllocateMutableHeapNumberWithValue(
3022     SloppyTNode<Float64T> value) {
3023   TNode<MutableHeapNumber> result = AllocateMutableHeapNumber();
3024   StoreMutableHeapNumberValue(result, value);
3025   return result;
3026 }
3027 
AllocateBigInt(TNode<IntPtrT> length)3028 TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3029   TNode<BigInt> result = AllocateRawBigInt(length);
3030   StoreBigIntBitfield(result, WordShl(length, BigInt::LengthBits::kShift));
3031   return result;
3032 }
3033 
AllocateRawBigInt(TNode<IntPtrT> length)3034 TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3035   // This is currently used only for 64-bit wide BigInts. If more general
3036   // applicability is required, a large-object check must be added.
3037   CSA_ASSERT(this, UintPtrLessThan(length, IntPtrConstant(3)));
3038 
3039   TNode<IntPtrT> size = IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3040                                   Signed(WordShl(length, kPointerSizeLog2)));
3041   Node* raw_result = Allocate(size, kNone);
3042   StoreMapNoWriteBarrier(raw_result, Heap::kBigIntMapRootIndex);
3043   return UncheckedCast<BigInt>(raw_result);
3044 }
3045 
StoreBigIntBitfield(TNode<BigInt> bigint,TNode<WordT> bitfield)3046 void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3047                                             TNode<WordT> bitfield) {
3048   StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield,
3049                                  MachineType::PointerRepresentation());
3050 }
3051 
StoreBigIntDigit(TNode<BigInt> bigint,int digit_index,TNode<UintPtrT> digit)3052 void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint, int digit_index,
3053                                          TNode<UintPtrT> digit) {
3054   StoreObjectFieldNoWriteBarrier(
3055       bigint, BigInt::kDigitsOffset + digit_index * kPointerSize, digit,
3056       UintPtrT::kMachineRepresentation);
3057 }
3058 
LoadBigIntBitfield(TNode<BigInt> bigint)3059 TNode<WordT> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3060   return UncheckedCast<WordT>(
3061       LoadObjectField(bigint, BigInt::kBitfieldOffset, MachineType::UintPtr()));
3062 }
3063 
LoadBigIntDigit(TNode<BigInt> bigint,int digit_index)3064 TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3065                                                    int digit_index) {
3066   return UncheckedCast<UintPtrT>(LoadObjectField(
3067       bigint, BigInt::kDigitsOffset + digit_index * kPointerSize,
3068       MachineType::UintPtr()));
3069 }
3070 
AllocateSeqOneByteString(int length,AllocationFlags flags)3071 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3072     int length, AllocationFlags flags) {
3073   Comment("AllocateSeqOneByteString");
3074   if (length == 0) {
3075     return CAST(LoadRoot(Heap::kempty_stringRootIndex));
3076   }
3077   Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
3078   DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
3079   StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
3080   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3081                                  SmiConstant(length),
3082                                  MachineRepresentation::kTagged);
3083   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
3084                                  IntPtrConstant(String::kEmptyHashField),
3085                                  MachineType::PointerRepresentation());
3086   return CAST(result);
3087 }
3088 
IsZeroOrContext(SloppyTNode<Object> object)3089 TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3090   return Select<BoolT>(WordEqual(object, SmiConstant(0)),
3091                        [=] { return Int32TrueConstant(); },
3092                        [=] { return IsContext(CAST(object)); });
3093 }
3094 
AllocateSeqOneByteString(Node * context,TNode<Smi> length,AllocationFlags flags)3095 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3096     Node* context, TNode<Smi> length, AllocationFlags flags) {
3097   Comment("AllocateSeqOneByteString");
3098   CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3099   VARIABLE(var_result, MachineRepresentation::kTagged);
3100 
3101   // Compute the SeqOneByteString size and check if it fits into new space.
3102   Label if_lengthiszero(this), if_sizeissmall(this),
3103       if_notsizeissmall(this, Label::kDeferred), if_join(this);
3104   GotoIf(SmiEqual(length, SmiConstant(0)), &if_lengthiszero);
3105 
3106   Node* raw_size = GetArrayAllocationSize(
3107       SmiUntag(length), UINT8_ELEMENTS, INTPTR_PARAMETERS,
3108       SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3109   Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3110   Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3111          &if_sizeissmall, &if_notsizeissmall);
3112 
3113   BIND(&if_sizeissmall);
3114   {
3115     // Just allocate the SeqOneByteString in new space.
3116     Node* result = AllocateInNewSpace(size, flags);
3117     DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
3118     StoreMapNoWriteBarrier(result, Heap::kOneByteStringMapRootIndex);
3119     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3120                                    length, MachineRepresentation::kTagged);
3121     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldSlot,
3122                                    IntPtrConstant(String::kEmptyHashField),
3123                                    MachineType::PointerRepresentation());
3124     var_result.Bind(result);
3125     Goto(&if_join);
3126   }
3127 
3128   BIND(&if_notsizeissmall);
3129   {
3130     // We might need to allocate in large object space, go to the runtime.
3131     Node* result =
3132         CallRuntime(Runtime::kAllocateSeqOneByteString, context, length);
3133     var_result.Bind(result);
3134     Goto(&if_join);
3135   }
3136 
3137   BIND(&if_lengthiszero);
3138   {
3139     var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
3140     Goto(&if_join);
3141   }
3142 
3143   BIND(&if_join);
3144   return CAST(var_result.value());
3145 }
3146 
AllocateSeqTwoByteString(int length,AllocationFlags flags)3147 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3148     int length, AllocationFlags flags) {
3149   Comment("AllocateSeqTwoByteString");
3150   if (length == 0) {
3151     return CAST(LoadRoot(Heap::kempty_stringRootIndex));
3152   }
3153   Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3154   DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
3155   StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
3156   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3157                                  SmiConstant(Smi::FromInt(length)),
3158                                  MachineRepresentation::kTagged);
3159   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
3160                                  IntPtrConstant(String::kEmptyHashField),
3161                                  MachineType::PointerRepresentation());
3162   return CAST(result);
3163 }
3164 
AllocateSeqTwoByteString(Node * context,TNode<Smi> length,AllocationFlags flags)3165 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3166     Node* context, TNode<Smi> length, AllocationFlags flags) {
3167   CSA_SLOW_ASSERT(this, IsZeroOrContext(context));
3168   Comment("AllocateSeqTwoByteString");
3169   VARIABLE(var_result, MachineRepresentation::kTagged);
3170 
3171   // Compute the SeqTwoByteString size and check if it fits into new space.
3172   Label if_lengthiszero(this), if_sizeissmall(this),
3173       if_notsizeissmall(this, Label::kDeferred), if_join(this);
3174   GotoIf(SmiEqual(length, SmiConstant(0)), &if_lengthiszero);
3175 
3176   Node* raw_size = GetArrayAllocationSize(
3177       SmiUntag(length), UINT16_ELEMENTS, INTPTR_PARAMETERS,
3178       SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
3179   Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3180   Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3181          &if_sizeissmall, &if_notsizeissmall);
3182 
3183   BIND(&if_sizeissmall);
3184   {
3185     // Just allocate the SeqTwoByteString in new space.
3186     Node* result = AllocateInNewSpace(size, flags);
3187     DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
3188     StoreMapNoWriteBarrier(result, Heap::kStringMapRootIndex);
3189     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3190                                    length, MachineRepresentation::kTagged);
3191     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldSlot,
3192                                    IntPtrConstant(String::kEmptyHashField),
3193                                    MachineType::PointerRepresentation());
3194     var_result.Bind(result);
3195     Goto(&if_join);
3196   }
3197 
3198   BIND(&if_notsizeissmall);
3199   {
3200     // We might need to allocate in large object space, go to the runtime.
3201     Node* result =
3202         CallRuntime(Runtime::kAllocateSeqTwoByteString, context, length);
3203     var_result.Bind(result);
3204     Goto(&if_join);
3205   }
3206 
3207   BIND(&if_lengthiszero);
3208   {
3209     var_result.Bind(LoadRoot(Heap::kempty_stringRootIndex));
3210     Goto(&if_join);
3211   }
3212 
3213   BIND(&if_join);
3214   return CAST(var_result.value());
3215 }
3216 
AllocateSlicedString(Heap::RootListIndex map_root_index,TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)3217 TNode<String> CodeStubAssembler::AllocateSlicedString(
3218     Heap::RootListIndex map_root_index, TNode<Smi> length, TNode<String> parent,
3219     TNode<Smi> offset) {
3220   DCHECK(map_root_index == Heap::kSlicedOneByteStringMapRootIndex ||
3221          map_root_index == Heap::kSlicedStringMapRootIndex);
3222   Node* result = Allocate(SlicedString::kSize);
3223   DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
3224   StoreMapNoWriteBarrier(result, map_root_index);
3225   StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
3226                                  MachineRepresentation::kTagged);
3227   StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldSlot,
3228                                  IntPtrConstant(String::kEmptyHashField),
3229                                  MachineType::PointerRepresentation());
3230   StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
3231                                  MachineRepresentation::kTagged);
3232   StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
3233                                  MachineRepresentation::kTagged);
3234   return CAST(result);
3235 }
3236 
AllocateSlicedOneByteString(TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)3237 TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3238     TNode<Smi> length, TNode<String> parent, TNode<Smi> offset) {
3239   return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
3240                               parent, offset);
3241 }
3242 
AllocateSlicedTwoByteString(TNode<Smi> length,TNode<String> parent,TNode<Smi> offset)3243 TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3244     TNode<Smi> length, TNode<String> parent, TNode<Smi> offset) {
3245   return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
3246                               offset);
3247 }
3248 
AllocateConsString(Heap::RootListIndex map_root_index,TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)3249 TNode<String> CodeStubAssembler::AllocateConsString(
3250     Heap::RootListIndex map_root_index, TNode<Smi> length, TNode<String> first,
3251     TNode<String> second, AllocationFlags flags) {
3252   DCHECK(map_root_index == Heap::kConsOneByteStringMapRootIndex ||
3253          map_root_index == Heap::kConsStringMapRootIndex);
3254   Node* result = Allocate(ConsString::kSize, flags);
3255   DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
3256   StoreMapNoWriteBarrier(result, map_root_index);
3257   StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
3258                                  MachineRepresentation::kTagged);
3259   StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldSlot,
3260                                  IntPtrConstant(String::kEmptyHashField),
3261                                  MachineType::PointerRepresentation());
3262   bool const new_space = !(flags & kPretenured);
3263   if (new_space) {
3264     StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
3265                                    MachineRepresentation::kTagged);
3266     StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
3267                                    MachineRepresentation::kTagged);
3268   } else {
3269     StoreObjectField(result, ConsString::kFirstOffset, first);
3270     StoreObjectField(result, ConsString::kSecondOffset, second);
3271   }
3272   return CAST(result);
3273 }
3274 
AllocateOneByteConsString(TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)3275 TNode<String> CodeStubAssembler::AllocateOneByteConsString(
3276     TNode<Smi> length, TNode<String> first, TNode<String> second,
3277     AllocationFlags flags) {
3278   return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
3279                             second, flags);
3280 }
3281 
AllocateTwoByteConsString(TNode<Smi> length,TNode<String> first,TNode<String> second,AllocationFlags flags)3282 TNode<String> CodeStubAssembler::AllocateTwoByteConsString(
3283     TNode<Smi> length, TNode<String> first, TNode<String> second,
3284     AllocationFlags flags) {
3285   return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
3286                             second, flags);
3287 }
3288 
NewConsString(Node * context,TNode<Smi> length,TNode<String> left,TNode<String> right,AllocationFlags flags)3289 TNode<String> CodeStubAssembler::NewConsString(Node* context, TNode<Smi> length,
3290                                                TNode<String> left,
3291                                                TNode<String> right,
3292                                                AllocationFlags flags) {
3293   CSA_ASSERT(this, IsContext(context));
3294   // Added string can be a cons string.
3295   Comment("Allocating ConsString");
3296   Node* left_instance_type = LoadInstanceType(left);
3297   Node* right_instance_type = LoadInstanceType(right);
3298 
3299   // Compute intersection and difference of instance types.
3300   Node* anded_instance_types =
3301       Word32And(left_instance_type, right_instance_type);
3302   Node* xored_instance_types =
3303       Word32Xor(left_instance_type, right_instance_type);
3304 
3305   // We create a one-byte cons string if
3306   // 1. both strings are one-byte, or
3307   // 2. at least one of the strings is two-byte, but happens to contain only
3308   //    one-byte characters.
3309   // To do this, we check
3310   // 1. if both strings are one-byte, or if the one-byte data hint is set in
3311   //    both strings, or
3312   // 2. if one of the strings has the one-byte data hint set and the other
3313   //    string is one-byte.
3314   STATIC_ASSERT(kOneByteStringTag != 0);
3315   STATIC_ASSERT(kOneByteDataHintTag != 0);
3316   Label one_byte_map(this);
3317   Label two_byte_map(this);
3318   TVARIABLE(String, result);
3319   Label done(this, &result);
3320   GotoIf(IsSetWord32(anded_instance_types,
3321                      kStringEncodingMask | kOneByteDataHintTag),
3322          &one_byte_map);
3323   Branch(Word32NotEqual(Word32And(xored_instance_types,
3324                                   Int32Constant(kStringEncodingMask |
3325                                                 kOneByteDataHintMask)),
3326                         Int32Constant(kOneByteStringTag | kOneByteDataHintTag)),
3327          &two_byte_map, &one_byte_map);
3328 
3329   BIND(&one_byte_map);
3330   Comment("One-byte ConsString");
3331   result = AllocateOneByteConsString(length, left, right, flags);
3332   Goto(&done);
3333 
3334   BIND(&two_byte_map);
3335   Comment("Two-byte ConsString");
3336   result = AllocateTwoByteConsString(length, left, right, flags);
3337   Goto(&done);
3338 
3339   BIND(&done);
3340 
3341   return result.value();
3342 }
3343 
AllocateNameDictionary(int at_least_space_for)3344 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3345     int at_least_space_for) {
3346   return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3347 }
3348 
AllocateNameDictionary(TNode<IntPtrT> at_least_space_for)3349 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3350     TNode<IntPtrT> at_least_space_for) {
3351   CSA_ASSERT(this, UintPtrLessThanOrEqual(
3352                        at_least_space_for,
3353                        IntPtrConstant(NameDictionary::kMaxCapacity)));
3354   TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3355   return AllocateNameDictionaryWithCapacity(capacity);
3356 }
3357 
AllocateNameDictionaryWithCapacity(TNode<IntPtrT> capacity)3358 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3359     TNode<IntPtrT> capacity) {
3360   CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3361   CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3362   TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3363   TNode<WordT> store_size = IntPtrAdd(
3364       TimesPointerSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3365 
3366   TNode<NameDictionary> result =
3367       UncheckedCast<NameDictionary>(AllocateInNewSpace(store_size));
3368   Comment("Initialize NameDictionary");
3369   // Initialize FixedArray fields.
3370   DCHECK(Heap::RootIsImmortalImmovable(Heap::kNameDictionaryMapRootIndex));
3371   StoreMapNoWriteBarrier(result, Heap::kNameDictionaryMapRootIndex);
3372   StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3373                                  SmiFromIntPtr(length));
3374   // Initialized HashTable fields.
3375   TNode<Smi> zero = SmiConstant(0);
3376   StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3377                          SKIP_WRITE_BARRIER);
3378   StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
3379                          zero, SKIP_WRITE_BARRIER);
3380   StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3381                          SmiTag(capacity), SKIP_WRITE_BARRIER);
3382   // Initialize Dictionary fields.
3383   TNode<HeapObject> filler = UndefinedConstant();
3384   StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3385                          SmiConstant(PropertyDetails::kInitialIndex),
3386                          SKIP_WRITE_BARRIER);
3387   StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3388                          SmiConstant(PropertyArray::kNoHashSentinel),
3389                          SKIP_WRITE_BARRIER);
3390 
3391   // Initialize NameDictionary elements.
3392   TNode<WordT> result_word = BitcastTaggedToWord(result);
3393   TNode<WordT> start_address = IntPtrAdd(
3394       result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3395                                       NameDictionary::kElementsStartIndex) -
3396                                   kHeapObjectTag));
3397   TNode<WordT> end_address = IntPtrAdd(
3398       result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3399   StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3400   return result;
3401 }
3402 
CopyNameDictionary(TNode<NameDictionary> dictionary,Label * large_object_fallback)3403 TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3404     TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3405   Comment("Copy boilerplate property dict");
3406   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3407   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3408   GotoIf(UintPtrGreaterThan(
3409              capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3410          large_object_fallback);
3411   TNode<NameDictionary> properties =
3412       AllocateNameDictionaryWithCapacity(capacity);
3413   TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3414   CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3415                          SKIP_WRITE_BARRIER, INTPTR_PARAMETERS);
3416   return properties;
3417 }
3418 
3419 template <typename CollectionType>
AllocateOrderedHashTable()3420 Node* CodeStubAssembler::AllocateOrderedHashTable() {
3421   static const int kCapacity = CollectionType::kMinCapacity;
3422   static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3423   static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3424   static const int kFixedArrayLength =
3425       CollectionType::kHashTableStartIndex + kBucketCount + kDataTableLength;
3426   static const int kDataTableStartIndex =
3427       CollectionType::kHashTableStartIndex + kBucketCount;
3428 
3429   STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3430   STATIC_ASSERT(kCapacity <= CollectionType::kMaxCapacity);
3431 
3432   // Allocate the table and add the proper map.
3433   const ElementsKind elements_kind = HOLEY_ELEMENTS;
3434   TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3435   TNode<Map> fixed_array_map = CAST(LoadRoot(
3436       static_cast<Heap::RootListIndex>(CollectionType::GetMapRootIndex())));
3437   TNode<FixedArray> table =
3438       CAST(AllocateFixedArray(elements_kind, length_intptr,
3439                               kAllowLargeObjectAllocation, fixed_array_map));
3440 
3441   // Initialize the OrderedHashTable fields.
3442   const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3443   StoreFixedArrayElement(table, CollectionType::kNumberOfElementsIndex,
3444                          SmiConstant(0), barrier_mode);
3445   StoreFixedArrayElement(table, CollectionType::kNumberOfDeletedElementsIndex,
3446                          SmiConstant(0), barrier_mode);
3447   StoreFixedArrayElement(table, CollectionType::kNumberOfBucketsIndex,
3448                          SmiConstant(kBucketCount), barrier_mode);
3449 
3450   // Fill the buckets with kNotFound.
3451   TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3452   STATIC_ASSERT(CollectionType::kHashTableStartIndex ==
3453                 CollectionType::kNumberOfBucketsIndex + 1);
3454   STATIC_ASSERT((CollectionType::kHashTableStartIndex + kBucketCount) ==
3455                 kDataTableStartIndex);
3456   for (int i = 0; i < kBucketCount; i++) {
3457     StoreFixedArrayElement(table, CollectionType::kHashTableStartIndex + i,
3458                            not_found, barrier_mode);
3459   }
3460 
3461   // Fill the data table with undefined.
3462   STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3463   for (int i = 0; i < kDataTableLength; i++) {
3464     StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3465                            barrier_mode);
3466   }
3467 
3468   return table;
3469 }
3470 
3471 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3472 template Node* CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3473 
3474 template <typename CollectionType>
AllocateSmallOrderedHashTable(TNode<IntPtrT> capacity)3475 TNode<CollectionType> CodeStubAssembler::AllocateSmallOrderedHashTable(
3476     TNode<IntPtrT> capacity) {
3477   CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3478   CSA_ASSERT(this, IntPtrLessThan(
3479                        capacity, IntPtrConstant(CollectionType::kMaxCapacity)));
3480 
3481   TNode<IntPtrT> data_table_start_offset =
3482       IntPtrConstant(CollectionType::kDataTableStartOffset);
3483 
3484   TNode<IntPtrT> data_table_size = IntPtrMul(
3485       capacity, IntPtrConstant(CollectionType::kEntrySize * kPointerSize));
3486 
3487   TNode<Int32T> hash_table_size =
3488       Int32Div(TruncateIntPtrToInt32(capacity),
3489                Int32Constant(CollectionType::kLoadFactor));
3490 
3491   TNode<IntPtrT> hash_table_start_offset =
3492       IntPtrAdd(data_table_start_offset, data_table_size);
3493 
3494   TNode<IntPtrT> hash_table_and_chain_table_size =
3495       IntPtrAdd(ChangeInt32ToIntPtr(hash_table_size), capacity);
3496 
3497   TNode<IntPtrT> total_size =
3498       IntPtrAdd(hash_table_start_offset, hash_table_and_chain_table_size);
3499 
3500   TNode<IntPtrT> total_size_word_aligned =
3501       IntPtrAdd(total_size, IntPtrConstant(kPointerSize - 1));
3502   total_size_word_aligned = ChangeInt32ToIntPtr(
3503       Int32Div(TruncateIntPtrToInt32(total_size_word_aligned),
3504                Int32Constant(kPointerSize)));
3505   total_size_word_aligned =
3506       UncheckedCast<IntPtrT>(TimesPointerSize(total_size_word_aligned));
3507 
3508   // Allocate the table and add the proper map.
3509   TNode<Map> small_ordered_hash_map = CAST(LoadRoot(
3510       static_cast<Heap::RootListIndex>(CollectionType::GetMapRootIndex())));
3511   TNode<Object> table_obj = CAST(AllocateInNewSpace(total_size_word_aligned));
3512   StoreMapNoWriteBarrier(table_obj, small_ordered_hash_map);
3513   TNode<CollectionType> table = UncheckedCast<CollectionType>(table_obj);
3514 
3515   // Initialize the SmallOrderedHashTable fields.
3516   StoreObjectByteNoWriteBarrier(
3517       table, CollectionType::kNumberOfBucketsOffset,
3518       Word32And(Int32Constant(0xFF), hash_table_size));
3519   StoreObjectByteNoWriteBarrier(table, CollectionType::kNumberOfElementsOffset,
3520                                 Int32Constant(0));
3521   StoreObjectByteNoWriteBarrier(
3522       table, CollectionType::kNumberOfDeletedElementsOffset, Int32Constant(0));
3523 
3524   TNode<IntPtrT> table_address =
3525       IntPtrSub(BitcastTaggedToWord(table), IntPtrConstant(kHeapObjectTag));
3526   TNode<IntPtrT> hash_table_start_address =
3527       IntPtrAdd(table_address, hash_table_start_offset);
3528 
3529   // Initialize the HashTable part.
3530   Node* memset = ExternalConstant(ExternalReference::libc_memset_function());
3531   CallCFunction3(MachineType::AnyTagged(), MachineType::Pointer(),
3532                  MachineType::IntPtr(), MachineType::UintPtr(), memset,
3533                  hash_table_start_address, IntPtrConstant(0xFF),
3534                  hash_table_and_chain_table_size);
3535 
3536   // Initialize the DataTable part.
3537   TNode<HeapObject> filler = TheHoleConstant();
3538   TNode<WordT> data_table_start_address =
3539       IntPtrAdd(table_address, data_table_start_offset);
3540   TNode<WordT> data_table_end_address =
3541       IntPtrAdd(data_table_start_address, data_table_size);
3542   StoreFieldsNoWriteBarrier(data_table_start_address, data_table_end_address,
3543                             filler);
3544 
3545   return table;
3546 }
3547 
3548 template TNode<SmallOrderedHashMap>
3549 CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
3550     TNode<IntPtrT> capacity);
3551 template TNode<SmallOrderedHashSet>
3552 CodeStubAssembler::AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
3553     TNode<IntPtrT> capacity);
3554 
3555 template <typename CollectionType>
FindOrderedHashTableEntry(Node * table,Node * hash,std::function<void (Node *,Label *,Label *)> key_compare,Variable * entry_start_position,Label * entry_found,Label * not_found)3556 void CodeStubAssembler::FindOrderedHashTableEntry(
3557     Node* table, Node* hash,
3558     std::function<void(Node*, Label*, Label*)> key_compare,
3559     Variable* entry_start_position, Label* entry_found, Label* not_found) {
3560   // Get the index of the bucket.
3561   Node* const number_of_buckets = SmiUntag(CAST(LoadFixedArrayElement(
3562       CAST(table), CollectionType::kNumberOfBucketsIndex)));
3563   Node* const bucket =
3564       WordAnd(hash, IntPtrSub(number_of_buckets, IntPtrConstant(1)));
3565   Node* const first_entry = SmiUntag(CAST(LoadFixedArrayElement(
3566       CAST(table), bucket,
3567       CollectionType::kHashTableStartIndex * kPointerSize)));
3568 
3569   // Walk the bucket chain.
3570   Node* entry_start;
3571   Label if_key_found(this);
3572   {
3573     VARIABLE(var_entry, MachineType::PointerRepresentation(), first_entry);
3574     Label loop(this, {&var_entry, entry_start_position}),
3575         continue_next_entry(this);
3576     Goto(&loop);
3577     BIND(&loop);
3578 
3579     // If the entry index is the not-found sentinel, we are done.
3580     GotoIf(
3581         WordEqual(var_entry.value(), IntPtrConstant(CollectionType::kNotFound)),
3582         not_found);
3583 
3584     // Make sure the entry index is within range.
3585     CSA_ASSERT(
3586         this, UintPtrLessThan(
3587                   var_entry.value(),
3588                   SmiUntag(SmiAdd(
3589                       CAST(LoadFixedArrayElement(
3590                           CAST(table), CollectionType::kNumberOfElementsIndex)),
3591                       CAST(LoadFixedArrayElement(
3592                           CAST(table),
3593                           CollectionType::kNumberOfDeletedElementsIndex))))));
3594 
3595     // Compute the index of the entry relative to kHashTableStartIndex.
3596     entry_start =
3597         IntPtrAdd(IntPtrMul(var_entry.value(),
3598                             IntPtrConstant(CollectionType::kEntrySize)),
3599                   number_of_buckets);
3600 
3601     // Load the key from the entry.
3602     Node* const candidate_key = LoadFixedArrayElement(
3603         CAST(table), entry_start,
3604         CollectionType::kHashTableStartIndex * kPointerSize);
3605 
3606     key_compare(candidate_key, &if_key_found, &continue_next_entry);
3607 
3608     BIND(&continue_next_entry);
3609     // Load the index of the next entry in the bucket chain.
3610     var_entry.Bind(SmiUntag(CAST(LoadFixedArrayElement(
3611         CAST(table), entry_start,
3612         (CollectionType::kHashTableStartIndex + CollectionType::kChainOffset) *
3613             kPointerSize))));
3614 
3615     Goto(&loop);
3616   }
3617 
3618   BIND(&if_key_found);
3619   entry_start_position->Bind(entry_start);
3620   Goto(entry_found);
3621 }
3622 
3623 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashMap>(
3624     Node* table, Node* hash,
3625     std::function<void(Node*, Label*, Label*)> key_compare,
3626     Variable* entry_start_position, Label* entry_found, Label* not_found);
3627 template void CodeStubAssembler::FindOrderedHashTableEntry<OrderedHashSet>(
3628     Node* table, Node* hash,
3629     std::function<void(Node*, Label*, Label*)> key_compare,
3630     Variable* entry_start_position, Label* entry_found, Label* not_found);
3631 
AllocateStruct(Node * map,AllocationFlags flags)3632 Node* CodeStubAssembler::AllocateStruct(Node* map, AllocationFlags flags) {
3633   Comment("AllocateStruct");
3634   CSA_ASSERT(this, IsMap(map));
3635   Node* size = TimesPointerSize(LoadMapInstanceSizeInWords(map));
3636   Node* object = Allocate(size, flags);
3637   StoreMapNoWriteBarrier(object, map);
3638   InitializeStructBody(object, map, size, Struct::kHeaderSize);
3639   return object;
3640 }
3641 
InitializeStructBody(Node * object,Node * map,Node * size,int start_offset)3642 void CodeStubAssembler::InitializeStructBody(Node* object, Node* map,
3643                                              Node* size, int start_offset) {
3644   CSA_SLOW_ASSERT(this, IsMap(map));
3645   Comment("InitializeStructBody");
3646   Node* filler = UndefinedConstant();
3647   // Calculate the untagged field addresses.
3648   object = BitcastTaggedToWord(object);
3649   Node* start_address =
3650       IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
3651   Node* end_address =
3652       IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
3653   StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3654 }
3655 
AllocateJSObjectFromMap(Node * map,Node * properties,Node * elements,AllocationFlags flags,SlackTrackingMode slack_tracking_mode)3656 Node* CodeStubAssembler::AllocateJSObjectFromMap(
3657     Node* map, Node* properties, Node* elements, AllocationFlags flags,
3658     SlackTrackingMode slack_tracking_mode) {
3659   CSA_ASSERT(this, IsMap(map));
3660   CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3661   CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3662                                                      JS_GLOBAL_OBJECT_TYPE)));
3663   Node* instance_size = TimesPointerSize(LoadMapInstanceSizeInWords(map));
3664   Node* object = AllocateInNewSpace(instance_size, flags);
3665   StoreMapNoWriteBarrier(object, map);
3666   InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3667                             slack_tracking_mode);
3668   return object;
3669 }
3670 
InitializeJSObjectFromMap(Node * object,Node * map,Node * instance_size,Node * properties,Node * elements,SlackTrackingMode slack_tracking_mode)3671 void CodeStubAssembler::InitializeJSObjectFromMap(
3672     Node* object, Node* map, Node* instance_size, Node* properties,
3673     Node* elements, SlackTrackingMode slack_tracking_mode) {
3674   CSA_SLOW_ASSERT(this, IsMap(map));
3675   // This helper assumes that the object is in new-space, as guarded by the
3676   // check in AllocatedJSObjectFromMap.
3677   if (properties == nullptr) {
3678     CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3679     StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3680                          Heap::kEmptyFixedArrayRootIndex);
3681   } else {
3682     CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(properties),
3683                                        IsNameDictionary(properties)),
3684                               IsEmptyFixedArray(properties)));
3685     StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3686                                    properties);
3687   }
3688   if (elements == nullptr) {
3689     StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3690                          Heap::kEmptyFixedArrayRootIndex);
3691   } else {
3692     CSA_ASSERT(this, IsFixedArray(elements));
3693     StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
3694   }
3695   if (slack_tracking_mode == kNoSlackTracking) {
3696     InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3697   } else {
3698     DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3699     InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3700   }
3701 }
3702 
InitializeJSObjectBodyNoSlackTracking(Node * object,Node * map,Node * instance_size,int start_offset)3703 void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3704     Node* object, Node* map, Node* instance_size, int start_offset) {
3705   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3706   CSA_ASSERT(
3707       this, IsClearWord32<Map::ConstructionCounterBits>(LoadMapBitField3(map)));
3708   InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3709                            Heap::kUndefinedValueRootIndex);
3710 }
3711 
InitializeJSObjectBodyWithSlackTracking(Node * object,Node * map,Node * instance_size)3712 void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3713     Node* object, Node* map, Node* instance_size) {
3714   CSA_SLOW_ASSERT(this, IsMap(map));
3715   Comment("InitializeJSObjectBodyNoSlackTracking");
3716 
3717   // Perform in-object slack tracking if requested.
3718   int start_offset = JSObject::kHeaderSize;
3719   Node* bit_field3 = LoadMapBitField3(map);
3720   Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3721   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3722   GotoIf(IsSetWord32<Map::ConstructionCounterBits>(bit_field3),
3723          &slack_tracking);
3724   Comment("No slack tracking");
3725   InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3726   Goto(&end);
3727 
3728   BIND(&slack_tracking);
3729   {
3730     Comment("Decrease construction counter");
3731     // Slack tracking is only done on initial maps.
3732     CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3733     STATIC_ASSERT(Map::ConstructionCounterBits::kNext == 32);
3734     Node* new_bit_field3 = Int32Sub(
3735         bit_field3, Int32Constant(1 << Map::ConstructionCounterBits::kShift));
3736     StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3,
3737                                    MachineRepresentation::kWord32);
3738     STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3739 
3740     // The object still has in-object slack therefore the |unsed_or_unused|
3741     // field contain the "used" value.
3742     Node* used_size = TimesPointerSize(ChangeUint32ToWord(
3743         LoadObjectField(map, Map::kUsedOrUnusedInstanceSizeInWordsOffset,
3744                         MachineType::Uint8())));
3745 
3746     Comment("iInitialize filler fields");
3747     InitializeFieldsWithRoot(object, used_size, instance_size,
3748                              Heap::kOnePointerFillerMapRootIndex);
3749 
3750     Comment("Initialize undefined fields");
3751     InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3752                              Heap::kUndefinedValueRootIndex);
3753 
3754     STATIC_ASSERT(Map::kNoSlackTracking == 0);
3755     GotoIf(IsClearWord32<Map::ConstructionCounterBits>(new_bit_field3),
3756            &complete);
3757     Goto(&end);
3758   }
3759 
3760   // Finalize the instance size.
3761   BIND(&complete);
3762   {
3763     // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3764     // context.
3765     CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3766                 NoContextConstant(), map);
3767     Goto(&end);
3768   }
3769 
3770   BIND(&end);
3771 }
3772 
StoreFieldsNoWriteBarrier(Node * start_address,Node * end_address,Node * value)3773 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
3774                                                   Node* end_address,
3775                                                   Node* value) {
3776   Comment("StoreFieldsNoWriteBarrier");
3777   CSA_ASSERT(this, WordIsWordAligned(start_address));
3778   CSA_ASSERT(this, WordIsWordAligned(end_address));
3779   BuildFastLoop(start_address, end_address,
3780                 [this, value](Node* current) {
3781                   StoreNoWriteBarrier(MachineRepresentation::kTagged, current,
3782                                       value);
3783                 },
3784                 kPointerSize, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
3785 }
3786 
AllocateUninitializedJSArrayWithoutElements(Node * array_map,Node * length,Node * allocation_site)3787 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
3788     Node* array_map, Node* length, Node* allocation_site) {
3789   Comment("begin allocation of JSArray without elements");
3790   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3791   CSA_SLOW_ASSERT(this, IsMap(array_map));
3792   int base_size = JSArray::kSize;
3793   if (allocation_site != nullptr) {
3794     base_size += AllocationMemento::kSize;
3795   }
3796 
3797   Node* size = IntPtrConstant(base_size);
3798   Node* array =
3799       AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3800   return array;
3801 }
3802 
3803 std::pair<Node*, Node*>
AllocateUninitializedJSArrayWithElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * capacity,ParameterMode capacity_mode)3804 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3805     ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
3806     Node* capacity, ParameterMode capacity_mode) {
3807   Comment("begin allocation of JSArray with elements");
3808   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3809   CSA_SLOW_ASSERT(this, IsMap(array_map));
3810   int base_size = JSArray::kSize;
3811 
3812   if (allocation_site != nullptr) {
3813     base_size += AllocationMemento::kSize;
3814   }
3815 
3816   int elements_offset = base_size;
3817 
3818   // Compute space for elements
3819   base_size += FixedArray::kHeaderSize;
3820   Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
3821 
3822   Node* array =
3823       AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3824 
3825   Node* elements = InnerAllocate(array, elements_offset);
3826   StoreObjectFieldNoWriteBarrier(array, JSObject::kElementsOffset, elements);
3827   // Setup elements object.
3828   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
3829   Heap::RootListIndex elements_map_index =
3830       IsDoubleElementsKind(kind) ? Heap::kFixedDoubleArrayMapRootIndex
3831                                  : Heap::kFixedArrayMapRootIndex;
3832   DCHECK(Heap::RootIsImmortalImmovable(elements_map_index));
3833   StoreMapNoWriteBarrier(elements, elements_map_index);
3834   TNode<Smi> capacity_smi = ParameterToTagged(capacity, capacity_mode);
3835   CSA_ASSERT(this, SmiGreaterThan(capacity_smi, SmiConstant(0)));
3836   StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
3837                                  capacity_smi);
3838   return {array, elements};
3839 }
3840 
AllocateUninitializedJSArray(Node * array_map,Node * length,Node * allocation_site,Node * size_in_bytes)3841 Node* CodeStubAssembler::AllocateUninitializedJSArray(Node* array_map,
3842                                                       Node* length,
3843                                                       Node* allocation_site,
3844                                                       Node* size_in_bytes) {
3845   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3846   CSA_SLOW_ASSERT(this, IsMap(array_map));
3847 
3848   // Allocate space for the JSArray and the elements FixedArray in one go.
3849   Node* array = AllocateInNewSpace(size_in_bytes);
3850 
3851   Comment("write JSArray headers");
3852   StoreMapNoWriteBarrier(array, array_map);
3853 
3854   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3855 
3856   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3857                        Heap::kEmptyFixedArrayRootIndex);
3858 
3859   if (allocation_site != nullptr) {
3860     InitializeAllocationMemento(array, IntPtrConstant(JSArray::kSize),
3861                                 allocation_site);
3862   }
3863   return array;
3864 }
3865 
AllocateJSArray(ElementsKind kind,Node * array_map,Node * capacity,Node * length,Node * allocation_site,ParameterMode capacity_mode)3866 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
3867                                          Node* capacity, Node* length,
3868                                          Node* allocation_site,
3869                                          ParameterMode capacity_mode) {
3870   CSA_SLOW_ASSERT(this, IsMap(array_map));
3871   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3872   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, capacity_mode));
3873 
3874   int capacity_as_constant;
3875   Node *array = nullptr, *elements = nullptr;
3876   if (IsIntPtrOrSmiConstantZero(capacity, capacity_mode)) {
3877     // Array is empty. Use the shared empty fixed array instead of allocating a
3878     // new one.
3879     array = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3880                                                         allocation_site);
3881     StoreObjectFieldRoot(array, JSArray::kElementsOffset,
3882                          Heap::kEmptyFixedArrayRootIndex);
3883   } else if (TryGetIntPtrOrSmiConstantValue(capacity, &capacity_as_constant,
3884                                             capacity_mode) &&
3885              capacity_as_constant > 0) {
3886     // Allocate both array and elements object, and initialize the JSArray.
3887     std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3888         kind, array_map, length, allocation_site, capacity, capacity_mode);
3889     // Fill in the elements with holes.
3890     FillFixedArrayWithValue(kind, elements,
3891                             IntPtrOrSmiConstant(0, capacity_mode), capacity,
3892                             Heap::kTheHoleValueRootIndex, capacity_mode);
3893   } else {
3894     Label out(this), empty(this), nonempty(this);
3895     VARIABLE(var_array, MachineRepresentation::kTagged);
3896 
3897     Branch(SmiEqual(ParameterToTagged(capacity, capacity_mode), SmiConstant(0)),
3898            &empty, &nonempty);
3899 
3900     BIND(&empty);
3901     {
3902       // Array is empty. Use the shared empty fixed array instead of allocating
3903       // a new one.
3904       var_array.Bind(AllocateUninitializedJSArrayWithoutElements(
3905           array_map, length, allocation_site));
3906       StoreObjectFieldRoot(var_array.value(), JSArray::kElementsOffset,
3907                            Heap::kEmptyFixedArrayRootIndex);
3908       Goto(&out);
3909     }
3910 
3911     BIND(&nonempty);
3912     {
3913       // Allocate both array and elements object, and initialize the JSArray.
3914       Node* array;
3915       std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3916           kind, array_map, length, allocation_site, capacity, capacity_mode);
3917       var_array.Bind(array);
3918       // Fill in the elements with holes.
3919       FillFixedArrayWithValue(kind, elements,
3920                               IntPtrOrSmiConstant(0, capacity_mode), capacity,
3921                               Heap::kTheHoleValueRootIndex, capacity_mode);
3922       Goto(&out);
3923     }
3924 
3925     BIND(&out);
3926     array = var_array.value();
3927   }
3928 
3929   return array;
3930 }
3931 
ExtractFastJSArray(Node * context,Node * array,Node * begin,Node * count,ParameterMode mode,Node * capacity,Node * allocation_site)3932 Node* CodeStubAssembler::ExtractFastJSArray(Node* context, Node* array,
3933                                             Node* begin, Node* count,
3934                                             ParameterMode mode, Node* capacity,
3935                                             Node* allocation_site) {
3936   Node* original_array_map = LoadMap(array);
3937   Node* elements_kind = LoadMapElementsKind(original_array_map);
3938 
3939   // Use the cannonical map for the Array's ElementsKind
3940   Node* native_context = LoadNativeContext(context);
3941   Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
3942 
3943   Node* new_elements =
3944       ExtractFixedArray(LoadElements(array), begin, count, capacity,
3945                         ExtractFixedArrayFlag::kAllFixedArrays, mode);
3946 
3947   Node* result = AllocateUninitializedJSArrayWithoutElements(
3948       array_map, ParameterToTagged(count, mode), allocation_site);
3949   StoreObjectField(result, JSObject::kElementsOffset, new_elements);
3950   return result;
3951 }
3952 
CloneFastJSArray(Node * context,Node * array,ParameterMode mode,Node * allocation_site)3953 Node* CodeStubAssembler::CloneFastJSArray(Node* context, Node* array,
3954                                           ParameterMode mode,
3955                                           Node* allocation_site) {
3956   Node* original_array_map = LoadMap(array);
3957   Node* elements_kind = LoadMapElementsKind(original_array_map);
3958 
3959   Node* length = LoadJSArrayLength(array);
3960   Node* new_elements = ExtractFixedArray(
3961       LoadElements(array), IntPtrOrSmiConstant(0, mode),
3962       TaggedToParameter(length, mode), nullptr,
3963       ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, mode);
3964 
3965   // Use the cannonical map for the Array's ElementsKind
3966   Node* native_context = LoadNativeContext(context);
3967   Node* array_map = LoadJSArrayElementsMap(elements_kind, native_context);
3968 
3969   Node* result = AllocateUninitializedJSArrayWithoutElements(array_map, length,
3970                                                              allocation_site);
3971   StoreObjectField(result, JSObject::kElementsOffset, new_elements);
3972   return result;
3973 }
3974 
AllocateFixedArray(ElementsKind kind,Node * capacity,ParameterMode mode,AllocationFlags flags,SloppyTNode<Map> fixed_array_map)3975 TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
3976     ElementsKind kind, Node* capacity, ParameterMode mode,
3977     AllocationFlags flags, SloppyTNode<Map> fixed_array_map) {
3978   Comment("AllocateFixedArray");
3979   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
3980   CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity,
3981                                           IntPtrOrSmiConstant(0, mode), mode));
3982   TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind, mode);
3983 
3984   if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
3985   // Allocate both array and elements object, and initialize the JSArray.
3986   Node* array = Allocate(total_size, flags);
3987   if (fixed_array_map != nullptr) {
3988     // Conservatively only skip the write barrier if there are no allocation
3989     // flags, this ensures that the object hasn't ended up in LOS. Note that the
3990     // fixed array map is currently always immortal and technically wouldn't
3991     // need the write barrier even in LOS, but it's better to not take chances
3992     // in case this invariant changes later, since it's difficult to enforce
3993     // locally here.
3994     if (flags == CodeStubAssembler::kNone) {
3995       StoreMapNoWriteBarrier(array, fixed_array_map);
3996     } else {
3997       StoreMap(array, fixed_array_map);
3998     }
3999   } else {
4000     Heap::RootListIndex map_index = IsDoubleElementsKind(kind)
4001                                         ? Heap::kFixedDoubleArrayMapRootIndex
4002                                         : Heap::kFixedArrayMapRootIndex;
4003     DCHECK(Heap::RootIsImmortalImmovable(map_index));
4004     StoreMapNoWriteBarrier(array, map_index);
4005   }
4006   StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
4007                                  ParameterToTagged(capacity, mode));
4008   return UncheckedCast<FixedArray>(array);
4009 }
4010 
ExtractFixedArray(Node * fixed_array,Node * first,Node * count,Node * capacity,ExtractFixedArrayFlags extract_flags,ParameterMode parameter_mode)4011 TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4012     Node* fixed_array, Node* first, Node* count, Node* capacity,
4013     ExtractFixedArrayFlags extract_flags, ParameterMode parameter_mode) {
4014   VARIABLE(var_result, MachineRepresentation::kTagged);
4015   VARIABLE(var_fixed_array_map, MachineRepresentation::kTagged);
4016   const AllocationFlags flags =
4017       (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4018           ? CodeStubAssembler::kNone
4019           : CodeStubAssembler::kAllowLargeObjectAllocation;
4020   if (first == nullptr) {
4021     first = IntPtrOrSmiConstant(0, parameter_mode);
4022   }
4023   if (count == nullptr) {
4024     count =
4025         IntPtrOrSmiSub(TaggedToParameter(LoadFixedArrayBaseLength(fixed_array),
4026                                          parameter_mode),
4027                        first, parameter_mode);
4028 
4029     CSA_ASSERT(
4030         this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant(0, parameter_mode),
4031                                          count, parameter_mode));
4032   }
4033   if (capacity == nullptr) {
4034     capacity = count;
4035   } else {
4036     CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4037                          IntPtrOrSmiAdd(first, count, parameter_mode), capacity,
4038                          parameter_mode)));
4039   }
4040 
4041   Label if_fixed_double_array(this), empty(this), cow(this),
4042       done(this, {&var_result, &var_fixed_array_map});
4043   var_fixed_array_map.Bind(LoadMap(fixed_array));
4044   GotoIf(WordEqual(IntPtrOrSmiConstant(0, parameter_mode), capacity), &empty);
4045 
4046   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4047     if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4048       GotoIf(IsFixedDoubleArrayMap(var_fixed_array_map.value()),
4049              &if_fixed_double_array);
4050     } else {
4051       CSA_ASSERT(this, IsFixedDoubleArrayMap(var_fixed_array_map.value()));
4052     }
4053   } else {
4054     DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
4055     CSA_ASSERT(this, Word32BinaryNot(
4056                          IsFixedDoubleArrayMap(var_fixed_array_map.value())));
4057   }
4058 
4059   if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4060     Label new_space_check(this, {&var_fixed_array_map});
4061     Branch(WordEqual(var_fixed_array_map.value(),
4062                      LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
4063            &cow, &new_space_check);
4064 
4065     BIND(&new_space_check);
4066 
4067     bool handle_old_space = true;
4068     if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
4069       handle_old_space = false;
4070       CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
4071                            count, FixedArray::kHeaderSize, parameter_mode)));
4072     } else {
4073       int constant_count;
4074       handle_old_space =
4075           !TryGetIntPtrOrSmiConstantValue(count, &constant_count,
4076                                           parameter_mode) ||
4077           (constant_count >
4078            FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
4079     }
4080 
4081     Label old_space(this, Label::kDeferred);
4082     if (handle_old_space) {
4083       GotoIfFixedArraySizeDoesntFitInNewSpace(
4084           capacity, &old_space, FixedArray::kHeaderSize, parameter_mode);
4085     }
4086 
4087     Comment("Copy PACKED_ELEMENTS new space");
4088 
4089     ElementsKind kind = PACKED_ELEMENTS;
4090     Node* to_elements =
4091         AllocateFixedArray(kind, capacity, parameter_mode,
4092                            AllocationFlag::kNone, var_fixed_array_map.value());
4093     var_result.Bind(to_elements);
4094     CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
4095                            capacity, SKIP_WRITE_BARRIER, parameter_mode);
4096     Goto(&done);
4097 
4098     if (handle_old_space) {
4099       BIND(&old_space);
4100       {
4101         Comment("Copy PACKED_ELEMENTS old space");
4102 
4103         to_elements = AllocateFixedArray(kind, capacity, parameter_mode, flags,
4104                                          var_fixed_array_map.value());
4105         var_result.Bind(to_elements);
4106         CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first,
4107                                count, capacity, UPDATE_WRITE_BARRIER,
4108                                parameter_mode);
4109         Goto(&done);
4110       }
4111     }
4112 
4113     BIND(&cow);
4114     {
4115       if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
4116         Branch(WordNotEqual(IntPtrOrSmiConstant(0, parameter_mode), first),
4117                &new_space_check, [&] {
4118                  var_result.Bind(fixed_array);
4119                  Goto(&done);
4120                });
4121       } else {
4122         var_fixed_array_map.Bind(LoadRoot(Heap::kFixedArrayMapRootIndex));
4123         Goto(&new_space_check);
4124       }
4125     }
4126   } else {
4127     Goto(&if_fixed_double_array);
4128   }
4129 
4130   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4131     BIND(&if_fixed_double_array);
4132 
4133     Comment("Copy PACKED_DOUBLE_ELEMENTS");
4134 
4135     ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4136     Node* to_elements = AllocateFixedArray(kind, capacity, parameter_mode,
4137                                            flags, var_fixed_array_map.value());
4138     var_result.Bind(to_elements);
4139     CopyFixedArrayElements(kind, fixed_array, kind, to_elements, first, count,
4140                            capacity, SKIP_WRITE_BARRIER, parameter_mode);
4141 
4142     Goto(&done);
4143   }
4144 
4145   BIND(&empty);
4146   {
4147     Comment("Copy empty array");
4148 
4149     var_result.Bind(EmptyFixedArrayConstant());
4150     Goto(&done);
4151   }
4152 
4153   BIND(&done);
4154   return UncheckedCast<FixedArray>(var_result.value());
4155 }
4156 
InitializePropertyArrayLength(Node * property_array,Node * length,ParameterMode mode)4157 void CodeStubAssembler::InitializePropertyArrayLength(Node* property_array,
4158                                                       Node* length,
4159                                                       ParameterMode mode) {
4160   CSA_SLOW_ASSERT(this, IsPropertyArray(property_array));
4161   CSA_ASSERT(
4162       this, IntPtrOrSmiGreaterThan(length, IntPtrOrSmiConstant(0, mode), mode));
4163   CSA_ASSERT(
4164       this,
4165       IntPtrOrSmiLessThanOrEqual(
4166           length, IntPtrOrSmiConstant(PropertyArray::LengthField::kMax, mode),
4167           mode));
4168   StoreObjectFieldNoWriteBarrier(
4169       property_array, PropertyArray::kLengthAndHashOffset,
4170       ParameterToTagged(length, mode), MachineRepresentation::kTaggedSigned);
4171 }
4172 
AllocatePropertyArray(Node * capacity_node,ParameterMode mode,AllocationFlags flags)4173 Node* CodeStubAssembler::AllocatePropertyArray(Node* capacity_node,
4174                                                ParameterMode mode,
4175                                                AllocationFlags flags) {
4176   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity_node, mode));
4177   CSA_ASSERT(this, IntPtrOrSmiGreaterThan(capacity_node,
4178                                           IntPtrOrSmiConstant(0, mode), mode));
4179   Node* total_size = GetPropertyArrayAllocationSize(capacity_node, mode);
4180 
4181   Node* array = Allocate(total_size, flags);
4182   Heap::RootListIndex map_index = Heap::kPropertyArrayMapRootIndex;
4183   DCHECK(Heap::RootIsImmortalImmovable(map_index));
4184   StoreMapNoWriteBarrier(array, map_index);
4185   InitializePropertyArrayLength(array, capacity_node, mode);
4186   return array;
4187 }
4188 
FillPropertyArrayWithUndefined(Node * array,Node * from_node,Node * to_node,ParameterMode mode)4189 void CodeStubAssembler::FillPropertyArrayWithUndefined(Node* array,
4190                                                        Node* from_node,
4191                                                        Node* to_node,
4192                                                        ParameterMode mode) {
4193   CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4194   CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4195   CSA_SLOW_ASSERT(this, IsPropertyArray(array));
4196   ElementsKind kind = PACKED_ELEMENTS;
4197   Node* value = UndefinedConstant();
4198   BuildFastFixedArrayForEach(array, kind, from_node, to_node,
4199                              [this, value](Node* array, Node* offset) {
4200                                StoreNoWriteBarrier(
4201                                    MachineRepresentation::kTagged, array,
4202                                    offset, value);
4203                              },
4204                              mode);
4205 }
4206 
FillFixedArrayWithValue(ElementsKind kind,Node * array,Node * from_node,Node * to_node,Heap::RootListIndex value_root_index,ParameterMode mode)4207 void CodeStubAssembler::FillFixedArrayWithValue(
4208     ElementsKind kind, Node* array, Node* from_node, Node* to_node,
4209     Heap::RootListIndex value_root_index, ParameterMode mode) {
4210   CSA_SLOW_ASSERT(this, MatchesParameterMode(from_node, mode));
4211   CSA_SLOW_ASSERT(this, MatchesParameterMode(to_node, mode));
4212   CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4213   DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
4214          value_root_index == Heap::kUndefinedValueRootIndex);
4215 
4216   // Determine the value to initialize the {array} based
4217   // on the {value_root_index} and the elements {kind}.
4218   Node* value = LoadRoot(value_root_index);
4219   if (IsDoubleElementsKind(kind)) {
4220     value = LoadHeapNumberValue(value);
4221   }
4222 
4223   BuildFastFixedArrayForEach(
4224       array, kind, from_node, to_node,
4225       [this, value, kind](Node* array, Node* offset) {
4226         if (IsDoubleElementsKind(kind)) {
4227           StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4228                               value);
4229         } else {
4230           StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4231                               value);
4232         }
4233       },
4234       mode);
4235 }
4236 
FillFixedArrayWithSmiZero(TNode<FixedArray> array,TNode<IntPtrT> length)4237 void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4238                                                   TNode<IntPtrT> length) {
4239   CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4240 
4241   TNode<IntPtrT> byte_length = TimesPointerSize(length);
4242   CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4243 
4244   static const int32_t fa_base_data_offset =
4245       FixedArray::kHeaderSize - kHeapObjectTag;
4246   TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4247                                            IntPtrConstant(fa_base_data_offset));
4248 
4249   // Call out to memset to perform initialization.
4250   TNode<ExternalReference> memset =
4251       ExternalConstant(ExternalReference::libc_memset_function());
4252   STATIC_ASSERT(kSizetSize == kIntptrSize);
4253   CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4254                  MachineType::IntPtr(), MachineType::UintPtr(), memset,
4255                  backing_store, IntPtrConstant(0), byte_length);
4256 }
4257 
FillFixedDoubleArrayWithZero(TNode<FixedDoubleArray> array,TNode<IntPtrT> length)4258 void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4259     TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4260   CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4261 
4262   TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4263   CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4264 
4265   static const int32_t fa_base_data_offset =
4266       FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4267   TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4268                                            IntPtrConstant(fa_base_data_offset));
4269 
4270   // Call out to memset to perform initialization.
4271   TNode<ExternalReference> memset =
4272       ExternalConstant(ExternalReference::libc_memset_function());
4273   STATIC_ASSERT(kSizetSize == kIntptrSize);
4274   CallCFunction3(MachineType::Pointer(), MachineType::Pointer(),
4275                  MachineType::IntPtr(), MachineType::UintPtr(), memset,
4276                  backing_store, IntPtrConstant(0), byte_length);
4277 }
4278 
CopyFixedArrayElements(ElementsKind from_kind,Node * from_array,ElementsKind to_kind,Node * to_array,Node * first_element,Node * element_count,Node * capacity,WriteBarrierMode barrier_mode,ParameterMode mode)4279 void CodeStubAssembler::CopyFixedArrayElements(
4280     ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
4281     Node* to_array, Node* first_element, Node* element_count, Node* capacity,
4282     WriteBarrierMode barrier_mode, ParameterMode mode) {
4283   CSA_SLOW_ASSERT(this, MatchesParameterMode(element_count, mode));
4284   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4285   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4286   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4287   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4288   const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4289   Comment("[ CopyFixedArrayElements");
4290 
4291   // Typed array elements are not supported.
4292   DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
4293   DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
4294 
4295   Label done(this);
4296   bool from_double_elements = IsDoubleElementsKind(from_kind);
4297   bool to_double_elements = IsDoubleElementsKind(to_kind);
4298   bool doubles_to_objects_conversion =
4299       IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4300   bool needs_write_barrier =
4301       doubles_to_objects_conversion ||
4302       (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4303   bool element_offset_matches =
4304       !needs_write_barrier && (Is64() || IsDoubleElementsKind(from_kind) ==
4305                                              IsDoubleElementsKind(to_kind));
4306   Node* double_hole =
4307       Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4308              : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4309 
4310   if (doubles_to_objects_conversion) {
4311     // If the copy might trigger a GC, make sure that the FixedArray is
4312     // pre-initialized with holes to make sure that it's always in a
4313     // consistent state.
4314     FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
4315                             capacity, Heap::kTheHoleValueRootIndex, mode);
4316   } else if (element_count != capacity) {
4317     FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4318                             Heap::kTheHoleValueRootIndex, mode);
4319   }
4320 
4321   Node* first_from_element_offset =
4322       ElementOffsetFromIndex(first_element, from_kind, mode, 0);
4323   Node* limit_offset = IntPtrAdd(first_from_element_offset,
4324                                  IntPtrConstant(first_element_offset));
4325   VARIABLE(
4326       var_from_offset, MachineType::PointerRepresentation(),
4327       ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count, mode),
4328                              from_kind, mode, first_element_offset));
4329   // This second variable is used only when the element sizes of source and
4330   // destination arrays do not match.
4331   VARIABLE(var_to_offset, MachineType::PointerRepresentation());
4332   if (element_offset_matches) {
4333     var_to_offset.Bind(var_from_offset.value());
4334   } else {
4335     var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
4336                                               first_element_offset));
4337   }
4338 
4339   Variable* vars[] = {&var_from_offset, &var_to_offset};
4340   Label decrement(this, 2, vars);
4341 
4342   Node* to_array_adjusted =
4343       element_offset_matches
4344           ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4345           : to_array;
4346 
4347   Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4348 
4349   BIND(&decrement);
4350   {
4351     Node* from_offset = IntPtrSub(
4352         var_from_offset.value(),
4353         IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
4354     var_from_offset.Bind(from_offset);
4355 
4356     Node* to_offset;
4357     if (element_offset_matches) {
4358       to_offset = from_offset;
4359     } else {
4360       to_offset = IntPtrSub(
4361           var_to_offset.value(),
4362           IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
4363       var_to_offset.Bind(to_offset);
4364     }
4365 
4366     Label next_iter(this), store_double_hole(this);
4367     Label* if_hole;
4368     if (doubles_to_objects_conversion) {
4369       // The target elements array is already preinitialized with holes, so we
4370       // can just proceed with the next iteration.
4371       if_hole = &next_iter;
4372     } else if (IsDoubleElementsKind(to_kind)) {
4373       if_hole = &store_double_hole;
4374     } else {
4375       // In all the other cases don't check for holes and copy the data as is.
4376       if_hole = nullptr;
4377     }
4378 
4379     Node* value = LoadElementAndPrepareForStore(
4380         from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4381 
4382     if (needs_write_barrier) {
4383       CHECK_EQ(to_array, to_array_adjusted);
4384       Store(to_array_adjusted, to_offset, value);
4385     } else if (to_double_elements) {
4386       StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4387                           to_offset, value);
4388     } else {
4389       StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array_adjusted,
4390                           to_offset, value);
4391     }
4392     Goto(&next_iter);
4393 
4394     if (if_hole == &store_double_hole) {
4395       BIND(&store_double_hole);
4396       // Don't use doubles to store the hole double, since manipulating the
4397       // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4398       // change its value on ia32 (the x87 stack is used to return values
4399       // and stores to the stack silently clear the signalling bit).
4400       //
4401       // TODO(danno): When we have a Float32/Float64 wrapper class that
4402       // preserves double bits during manipulation, remove this code/change
4403       // this to an indexed Float64 store.
4404       if (Is64()) {
4405         StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4406                             to_offset, double_hole);
4407       } else {
4408         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4409                             to_offset, double_hole);
4410         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4411                             IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
4412                             double_hole);
4413       }
4414       Goto(&next_iter);
4415     }
4416 
4417     BIND(&next_iter);
4418     Node* compare = WordNotEqual(from_offset, limit_offset);
4419     Branch(compare, &decrement, &done);
4420   }
4421 
4422   BIND(&done);
4423   Comment("] CopyFixedArrayElements");
4424 }
4425 
HeapObjectToFixedArray(TNode<HeapObject> base,Label * cast_fail)4426 TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
4427     TNode<HeapObject> base, Label* cast_fail) {
4428   Label fixed_array(this);
4429   TNode<Map> map = LoadMap(base);
4430   GotoIf(WordEqual(map, LoadRoot(Heap::kFixedArrayMapRootIndex)), &fixed_array);
4431   GotoIf(WordNotEqual(map, LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
4432          cast_fail);
4433   Goto(&fixed_array);
4434   BIND(&fixed_array);
4435   return UncheckedCast<FixedArray>(base);
4436 }
4437 
CopyPropertyArrayValues(Node * from_array,Node * to_array,Node * property_count,WriteBarrierMode barrier_mode,ParameterMode mode)4438 void CodeStubAssembler::CopyPropertyArrayValues(Node* from_array,
4439                                                 Node* to_array,
4440                                                 Node* property_count,
4441                                                 WriteBarrierMode barrier_mode,
4442                                                 ParameterMode mode) {
4443   CSA_SLOW_ASSERT(this, MatchesParameterMode(property_count, mode));
4444   CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
4445                                  IsEmptyFixedArray(from_array)));
4446   CSA_SLOW_ASSERT(this, IsPropertyArray(to_array));
4447   Comment("[ CopyPropertyArrayValues");
4448 
4449   bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
4450   Node* start = IntPtrOrSmiConstant(0, mode);
4451   ElementsKind kind = PACKED_ELEMENTS;
4452   BuildFastFixedArrayForEach(
4453       from_array, kind, start, property_count,
4454       [this, to_array, needs_write_barrier](Node* array, Node* offset) {
4455         Node* value = Load(MachineType::AnyTagged(), array, offset);
4456 
4457         if (needs_write_barrier) {
4458           Store(to_array, offset, value);
4459         } else {
4460           StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
4461                               value);
4462         }
4463       },
4464       mode);
4465   Comment("] CopyPropertyArrayValues");
4466 }
4467 
CopyStringCharacters(Node * from_string,Node * to_string,TNode<IntPtrT> from_index,TNode<IntPtrT> to_index,TNode<IntPtrT> character_count,String::Encoding from_encoding,String::Encoding to_encoding)4468 void CodeStubAssembler::CopyStringCharacters(Node* from_string, Node* to_string,
4469                                              TNode<IntPtrT> from_index,
4470                                              TNode<IntPtrT> to_index,
4471                                              TNode<IntPtrT> character_count,
4472                                              String::Encoding from_encoding,
4473                                              String::Encoding to_encoding) {
4474   // Cannot assert IsString(from_string) and IsString(to_string) here because
4475   // CSA::SubString can pass in faked sequential strings when handling external
4476   // subject strings.
4477   bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
4478   bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
4479   DCHECK_IMPLIES(to_one_byte, from_one_byte);
4480   Comment("CopyStringCharacters %s -> %s",
4481           from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
4482           to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
4483 
4484   ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
4485   ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
4486   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
4487   int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
4488   Node* from_offset = ElementOffsetFromIndex(from_index, from_kind,
4489                                              INTPTR_PARAMETERS, header_size);
4490   Node* to_offset =
4491       ElementOffsetFromIndex(to_index, to_kind, INTPTR_PARAMETERS, header_size);
4492   Node* byte_count =
4493       ElementOffsetFromIndex(character_count, from_kind, INTPTR_PARAMETERS);
4494   Node* limit_offset = IntPtrAdd(from_offset, byte_count);
4495 
4496   // Prepare the fast loop
4497   MachineType type =
4498       from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
4499   MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
4500                                           : MachineRepresentation::kWord16;
4501   int from_increment = 1 << ElementsKindToShiftSize(from_kind);
4502   int to_increment = 1 << ElementsKindToShiftSize(to_kind);
4503 
4504   VARIABLE(current_to_offset, MachineType::PointerRepresentation(), to_offset);
4505   VariableList vars({&current_to_offset}, zone());
4506   int to_index_constant = 0, from_index_constant = 0;
4507   bool index_same = (from_encoding == to_encoding) &&
4508                     (from_index == to_index ||
4509                      (ToInt32Constant(from_index, from_index_constant) &&
4510                       ToInt32Constant(to_index, to_index_constant) &&
4511                       from_index_constant == to_index_constant));
4512   BuildFastLoop(vars, from_offset, limit_offset,
4513                 [this, from_string, to_string, &current_to_offset, to_increment,
4514                  type, rep, index_same](Node* offset) {
4515                   Node* value = Load(type, from_string, offset);
4516                   StoreNoWriteBarrier(
4517                       rep, to_string,
4518                       index_same ? offset : current_to_offset.value(), value);
4519                   if (!index_same) {
4520                     Increment(&current_to_offset, to_increment);
4521                   }
4522                 },
4523                 from_increment, INTPTR_PARAMETERS, IndexAdvanceMode::kPost);
4524 }
4525 
LoadElementAndPrepareForStore(Node * array,Node * offset,ElementsKind from_kind,ElementsKind to_kind,Label * if_hole)4526 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
4527                                                        Node* offset,
4528                                                        ElementsKind from_kind,
4529                                                        ElementsKind to_kind,
4530                                                        Label* if_hole) {
4531   CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
4532   if (IsDoubleElementsKind(from_kind)) {
4533     Node* value =
4534         LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
4535     if (!IsDoubleElementsKind(to_kind)) {
4536       value = AllocateHeapNumberWithValue(value);
4537     }
4538     return value;
4539 
4540   } else {
4541     Node* value = Load(MachineType::AnyTagged(), array, offset);
4542     if (if_hole) {
4543       GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
4544     }
4545     if (IsDoubleElementsKind(to_kind)) {
4546       if (IsSmiElementsKind(from_kind)) {
4547         value = SmiToFloat64(value);
4548       } else {
4549         value = LoadHeapNumberValue(value);
4550       }
4551     }
4552     return value;
4553   }
4554 }
4555 
CalculateNewElementsCapacity(Node * old_capacity,ParameterMode mode)4556 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
4557                                                       ParameterMode mode) {
4558   CSA_SLOW_ASSERT(this, MatchesParameterMode(old_capacity, mode));
4559   Node* half_old_capacity = WordOrSmiShr(old_capacity, 1, mode);
4560   Node* new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity, mode);
4561   Node* padding =
4562       IntPtrOrSmiConstant(JSObject::kMinAddedElementsCapacity, mode);
4563   return IntPtrOrSmiAdd(new_capacity, padding, mode);
4564 }
4565 
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Label * bailout)4566 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
4567                                                  ElementsKind kind, Node* key,
4568                                                  Label* bailout) {
4569   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4570   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4571   CSA_SLOW_ASSERT(this, TaggedIsSmi(key));
4572   Node* capacity = LoadFixedArrayBaseLength(elements);
4573 
4574   ParameterMode mode = OptimalParameterMode();
4575   capacity = TaggedToParameter(capacity, mode);
4576   key = TaggedToParameter(key, mode);
4577 
4578   return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
4579                                  bailout);
4580 }
4581 
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Node * capacity,ParameterMode mode,Label * bailout)4582 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
4583                                                  ElementsKind kind, Node* key,
4584                                                  Node* capacity,
4585                                                  ParameterMode mode,
4586                                                  Label* bailout) {
4587   Comment("TryGrowElementsCapacity");
4588   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4589   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4590   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4591   CSA_SLOW_ASSERT(this, MatchesParameterMode(key, mode));
4592 
4593   // If the gap growth is too big, fall back to the runtime.
4594   Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
4595   Node* max_capacity = IntPtrOrSmiAdd(capacity, max_gap, mode);
4596   GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity, mode), bailout);
4597 
4598   // Calculate the capacity of the new backing store.
4599   Node* new_capacity = CalculateNewElementsCapacity(
4600       IntPtrOrSmiAdd(key, IntPtrOrSmiConstant(1, mode), mode), mode);
4601   return GrowElementsCapacity(object, elements, kind, kind, capacity,
4602                               new_capacity, mode, bailout);
4603 }
4604 
GrowElementsCapacity(Node * object,Node * elements,ElementsKind from_kind,ElementsKind to_kind,Node * capacity,Node * new_capacity,ParameterMode mode,Label * bailout)4605 Node* CodeStubAssembler::GrowElementsCapacity(
4606     Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
4607     Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
4608   Comment("[ GrowElementsCapacity");
4609   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
4610   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
4611   CSA_SLOW_ASSERT(this, MatchesParameterMode(capacity, mode));
4612   CSA_SLOW_ASSERT(this, MatchesParameterMode(new_capacity, mode));
4613 
4614   // If size of the allocation for the new capacity doesn't fit in a page
4615   // that we can bump-pointer allocate from, fall back to the runtime.
4616   int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
4617   GotoIf(UintPtrOrSmiGreaterThanOrEqual(
4618              new_capacity, IntPtrOrSmiConstant(max_size, mode), mode),
4619          bailout);
4620 
4621   // Allocate the new backing store.
4622   Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
4623 
4624   // Copy the elements from the old elements store to the new.
4625   // The size-check above guarantees that the |new_elements| is allocated
4626   // in new space so we can skip the write barrier.
4627   CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
4628                          new_capacity, SKIP_WRITE_BARRIER, mode);
4629 
4630   StoreObjectField(object, JSObject::kElementsOffset, new_elements);
4631   Comment("] GrowElementsCapacity");
4632   return new_elements;
4633 }
4634 
InitializeAllocationMemento(Node * base,Node * base_allocation_size,Node * allocation_site)4635 void CodeStubAssembler::InitializeAllocationMemento(Node* base,
4636                                                     Node* base_allocation_size,
4637                                                     Node* allocation_site) {
4638   Comment("[Initialize AllocationMemento");
4639   Node* memento = InnerAllocate(base, base_allocation_size);
4640   StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
4641   StoreObjectFieldNoWriteBarrier(
4642       memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
4643   if (FLAG_allocation_site_pretenuring) {
4644     TNode<Int32T> count = UncheckedCast<Int32T>(LoadObjectField(
4645         allocation_site, AllocationSite::kPretenureCreateCountOffset,
4646         MachineType::Int32()));
4647 
4648     TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
4649     StoreObjectFieldNoWriteBarrier(
4650         allocation_site, AllocationSite::kPretenureCreateCountOffset,
4651         incremented_count, MachineRepresentation::kWord32);
4652   }
4653   Comment("]");
4654 }
4655 
TryTaggedToFloat64(Node * value,Label * if_valueisnotnumber)4656 Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
4657                                             Label* if_valueisnotnumber) {
4658   Label out(this);
4659   VARIABLE(var_result, MachineRepresentation::kFloat64);
4660 
4661   // Check if the {value} is a Smi or a HeapObject.
4662   Label if_valueissmi(this), if_valueisnotsmi(this);
4663   Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
4664 
4665   BIND(&if_valueissmi);
4666   {
4667     // Convert the Smi {value}.
4668     var_result.Bind(SmiToFloat64(value));
4669     Goto(&out);
4670   }
4671 
4672   BIND(&if_valueisnotsmi);
4673   {
4674     // Check if {value} is a HeapNumber.
4675     Label if_valueisheapnumber(this);
4676     Branch(IsHeapNumber(value), &if_valueisheapnumber, if_valueisnotnumber);
4677 
4678     BIND(&if_valueisheapnumber);
4679     {
4680       // Load the floating point value.
4681       var_result.Bind(LoadHeapNumberValue(value));
4682       Goto(&out);
4683     }
4684   }
4685   BIND(&out);
4686   return var_result.value();
4687 }
4688 
TruncateTaggedToFloat64(Node * context,Node * value)4689 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
4690   // We might need to loop once due to ToNumber conversion.
4691   VARIABLE(var_value, MachineRepresentation::kTagged);
4692   VARIABLE(var_result, MachineRepresentation::kFloat64);
4693   Label loop(this, &var_value), done_loop(this, &var_result);
4694   var_value.Bind(value);
4695   Goto(&loop);
4696   BIND(&loop);
4697   {
4698     Label if_valueisnotnumber(this, Label::kDeferred);
4699 
4700     // Load the current {value}.
4701     value = var_value.value();
4702 
4703     // Convert {value} to Float64 if it is a number and convert it to a number
4704     // otherwise.
4705     Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
4706     var_result.Bind(result);
4707     Goto(&done_loop);
4708 
4709     BIND(&if_valueisnotnumber);
4710     {
4711       // Convert the {value} to a Number first.
4712       var_value.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, value));
4713       Goto(&loop);
4714     }
4715   }
4716   BIND(&done_loop);
4717   return var_result.value();
4718 }
4719 
TruncateTaggedToWord32(Node * context,Node * value)4720 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
4721   VARIABLE(var_result, MachineRepresentation::kWord32);
4722   Label done(this);
4723   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
4724                                                             &done, &var_result);
4725   BIND(&done);
4726   return var_result.value();
4727 }
4728 
4729 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
4730 // or find that it is a BigInt and jump to {if_bigint}.
TaggedToWord32OrBigInt(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint)4731 void CodeStubAssembler::TaggedToWord32OrBigInt(Node* context, Node* value,
4732                                                Label* if_number,
4733                                                Variable* var_word32,
4734                                                Label* if_bigint,
4735                                                Variable* var_bigint) {
4736   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
4737       context, value, if_number, var_word32, if_bigint, var_bigint);
4738 }
4739 
4740 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
4741 // or find that it is a BigInt and jump to {if_bigint}. In either case,
4742 // store the type feedback in {var_feedback}.
TaggedToWord32OrBigIntWithFeedback(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint,Variable * var_feedback)4743 void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
4744     Node* context, Node* value, Label* if_number, Variable* var_word32,
4745     Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
4746   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
4747       context, value, if_number, var_word32, if_bigint, var_bigint,
4748       var_feedback);
4749 }
4750 
4751 template <Object::Conversion conversion>
TaggedToWord32OrBigIntImpl(Node * context,Node * value,Label * if_number,Variable * var_word32,Label * if_bigint,Variable * var_bigint,Variable * var_feedback)4752 void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
4753     Node* context, Node* value, Label* if_number, Variable* var_word32,
4754     Label* if_bigint, Variable* var_bigint, Variable* var_feedback) {
4755   DCHECK(var_word32->rep() == MachineRepresentation::kWord32);
4756   DCHECK(var_bigint == nullptr ||
4757          var_bigint->rep() == MachineRepresentation::kTagged);
4758   DCHECK(var_feedback == nullptr ||
4759          var_feedback->rep() == MachineRepresentation::kTaggedSigned);
4760 
4761   // We might need to loop after conversion.
4762   VARIABLE(var_value, MachineRepresentation::kTagged, value);
4763   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
4764   Variable* loop_vars[] = {&var_value, var_feedback};
4765   int num_vars =
4766       var_feedback != nullptr ? arraysize(loop_vars) : arraysize(loop_vars) - 1;
4767   Label loop(this, num_vars, loop_vars);
4768   Goto(&loop);
4769   BIND(&loop);
4770   {
4771     value = var_value.value();
4772     Label not_smi(this), is_heap_number(this), is_oddball(this),
4773         is_bigint(this);
4774     GotoIf(TaggedIsNotSmi(value), &not_smi);
4775 
4776     // {value} is a Smi.
4777     var_word32->Bind(SmiToInt32(value));
4778     CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
4779     Goto(if_number);
4780 
4781     BIND(&not_smi);
4782     Node* map = LoadMap(value);
4783     GotoIf(IsHeapNumberMap(map), &is_heap_number);
4784     Node* instance_type = LoadMapInstanceType(map);
4785     if (conversion == Object::Conversion::kToNumeric) {
4786       GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
4787     }
4788 
4789     // Not HeapNumber (or BigInt if conversion == kToNumeric).
4790     {
4791       if (var_feedback != nullptr) {
4792         // We do not require an Or with earlier feedback here because once we
4793         // convert the value to a Numeric, we cannot reach this path. We can
4794         // only reach this path on the first pass when the feedback is kNone.
4795         CSA_ASSERT(this, SmiEqual(CAST(var_feedback->value()),
4796                                   SmiConstant(BinaryOperationFeedback::kNone)));
4797       }
4798       GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
4799       // Not an oddball either -> convert.
4800       auto builtin = conversion == Object::Conversion::kToNumeric
4801                          ? Builtins::kNonNumberToNumeric
4802                          : Builtins::kNonNumberToNumber;
4803       var_value.Bind(CallBuiltin(builtin, context, value));
4804       OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
4805       Goto(&loop);
4806 
4807       BIND(&is_oddball);
4808       var_value.Bind(LoadObjectField(value, Oddball::kToNumberOffset));
4809       OverwriteFeedback(var_feedback,
4810                         BinaryOperationFeedback::kNumberOrOddball);
4811       Goto(&loop);
4812     }
4813 
4814     BIND(&is_heap_number);
4815     var_word32->Bind(TruncateHeapNumberValueToWord32(value));
4816     CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
4817     Goto(if_number);
4818 
4819     if (conversion == Object::Conversion::kToNumeric) {
4820       BIND(&is_bigint);
4821       var_bigint->Bind(value);
4822       CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
4823       Goto(if_bigint);
4824     }
4825   }
4826 }
4827 
TruncateHeapNumberValueToWord32(Node * object)4828 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
4829   Node* value = LoadHeapNumberValue(object);
4830   return TruncateFloat64ToWord32(value);
4831 }
4832 
TryHeapNumberToSmi(TNode<HeapNumber> number,TVariable<Smi> & var_result_smi,Label * if_smi)4833 void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
4834                                            TVariable<Smi>& var_result_smi,
4835                                            Label* if_smi) {
4836   TNode<Float64T> value = LoadHeapNumberValue(number);
4837   TryFloat64ToSmi(value, var_result_smi, if_smi);
4838 }
4839 
TryFloat64ToSmi(TNode<Float64T> value,TVariable<Smi> & var_result_smi,Label * if_smi)4840 void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
4841                                         TVariable<Smi>& var_result_smi,
4842                                         Label* if_smi) {
4843   TNode<Int32T> value32 = RoundFloat64ToInt32(value);
4844   TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
4845 
4846   Label if_int32(this), if_heap_number(this, Label::kDeferred);
4847 
4848   GotoIfNot(Float64Equal(value, value64), &if_heap_number);
4849   GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
4850   Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
4851                        Int32Constant(0)),
4852          &if_heap_number, &if_int32);
4853 
4854   TVARIABLE(Number, var_result);
4855   BIND(&if_int32);
4856   {
4857     if (SmiValuesAre32Bits()) {
4858       var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
4859     } else {
4860       DCHECK(SmiValuesAre31Bits());
4861       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
4862       TNode<BoolT> overflow = Projection<1>(pair);
4863       GotoIf(overflow, &if_heap_number);
4864       var_result_smi =
4865           BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
4866     }
4867     Goto(if_smi);
4868   }
4869   BIND(&if_heap_number);
4870 }
4871 
ChangeFloat64ToTagged(SloppyTNode<Float64T> value)4872 TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
4873     SloppyTNode<Float64T> value) {
4874   Label if_smi(this), done(this);
4875   TVARIABLE(Smi, var_smi_result);
4876   TVARIABLE(Number, var_result);
4877   TryFloat64ToSmi(value, var_smi_result, &if_smi);
4878 
4879   var_result = AllocateHeapNumberWithValue(value);
4880   Goto(&done);
4881 
4882   BIND(&if_smi);
4883   {
4884     var_result = var_smi_result.value();
4885     Goto(&done);
4886   }
4887   BIND(&done);
4888   return var_result.value();
4889 }
4890 
ChangeInt32ToTagged(SloppyTNode<Int32T> value)4891 TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
4892     SloppyTNode<Int32T> value) {
4893   if (SmiValuesAre32Bits()) {
4894     return SmiTag(ChangeInt32ToIntPtr(value));
4895   }
4896   DCHECK(SmiValuesAre31Bits());
4897   TVARIABLE(Number, var_result);
4898   TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
4899   TNode<BoolT> overflow = Projection<1>(pair);
4900   Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
4901       if_join(this);
4902   Branch(overflow, &if_overflow, &if_notoverflow);
4903   BIND(&if_overflow);
4904   {
4905     TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
4906     TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
4907     var_result = result;
4908     Goto(&if_join);
4909   }
4910   BIND(&if_notoverflow);
4911   {
4912     TNode<IntPtrT> almost_tagged_value =
4913         ChangeInt32ToIntPtr(Projection<0>(pair));
4914     TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
4915     var_result = result;
4916     Goto(&if_join);
4917   }
4918   BIND(&if_join);
4919   return var_result.value();
4920 }
4921 
ChangeUint32ToTagged(SloppyTNode<Uint32T> value)4922 TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
4923     SloppyTNode<Uint32T> value) {
4924   Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
4925       if_join(this);
4926   TVARIABLE(Number, var_result);
4927   // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
4928   Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
4929          &if_not_overflow);
4930 
4931   BIND(&if_not_overflow);
4932   {
4933     if (SmiValuesAre32Bits()) {
4934       var_result =
4935           SmiTag(ReinterpretCast<IntPtrT>(ChangeUint32ToUint64(value)));
4936     } else {
4937       DCHECK(SmiValuesAre31Bits());
4938       // If tagging {value} results in an overflow, we need to use a HeapNumber
4939       // to represent it.
4940       // TODO(tebbi): This overflow can never happen.
4941       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
4942           UncheckedCast<Int32T>(value), UncheckedCast<Int32T>(value));
4943       TNode<BoolT> overflow = Projection<1>(pair);
4944       GotoIf(overflow, &if_overflow);
4945 
4946       TNode<IntPtrT> almost_tagged_value =
4947           ChangeInt32ToIntPtr(Projection<0>(pair));
4948       var_result = BitcastWordToTaggedSigned(almost_tagged_value);
4949     }
4950   }
4951   Goto(&if_join);
4952 
4953   BIND(&if_overflow);
4954   {
4955     TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
4956     var_result = AllocateHeapNumberWithValue(float64_value);
4957   }
4958   Goto(&if_join);
4959 
4960   BIND(&if_join);
4961   return var_result.value();
4962 }
4963 
ToThisString(Node * context,Node * value,char const * method_name)4964 TNode<String> CodeStubAssembler::ToThisString(Node* context, Node* value,
4965                                               char const* method_name) {
4966   VARIABLE(var_value, MachineRepresentation::kTagged, value);
4967 
4968   // Check if the {value} is a Smi or a HeapObject.
4969   Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
4970       if_valueisstring(this);
4971   Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
4972   BIND(&if_valueisnotsmi);
4973   {
4974     // Load the instance type of the {value}.
4975     Node* value_instance_type = LoadInstanceType(value);
4976 
4977     // Check if the {value} is already String.
4978     Label if_valueisnotstring(this, Label::kDeferred);
4979     Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
4980            &if_valueisnotstring);
4981     BIND(&if_valueisnotstring);
4982     {
4983       // Check if the {value} is null.
4984       Label if_valueisnullorundefined(this, Label::kDeferred);
4985       GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
4986       // Convert the {value} to a String.
4987       var_value.Bind(CallBuiltin(Builtins::kToString, context, value));
4988       Goto(&if_valueisstring);
4989 
4990       BIND(&if_valueisnullorundefined);
4991       {
4992         // The {value} is either null or undefined.
4993         ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
4994                        method_name);
4995       }
4996     }
4997   }
4998   BIND(&if_valueissmi);
4999   {
5000     // The {value} is a Smi, convert it to a String.
5001     var_value.Bind(CallBuiltin(Builtins::kNumberToString, context, value));
5002     Goto(&if_valueisstring);
5003   }
5004   BIND(&if_valueisstring);
5005   return CAST(var_value.value());
5006 }
5007 
ChangeNumberToUint32(TNode<Number> value)5008 TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5009   TVARIABLE(Uint32T, var_result);
5010   Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5011   Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5012   BIND(&if_smi);
5013   {
5014     var_result = Unsigned(SmiToInt32(CAST(value)));
5015     Goto(&done);
5016   }
5017   BIND(&if_heapnumber);
5018   {
5019     var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5020     Goto(&done);
5021   }
5022   BIND(&done);
5023   return var_result.value();
5024 }
5025 
ChangeNumberToFloat64(SloppyTNode<Number> value)5026 TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(
5027     SloppyTNode<Number> value) {
5028   // TODO(tebbi): Remove assert once argument is TNode instead of SloppyTNode.
5029   CSA_SLOW_ASSERT(this, IsNumber(value));
5030   TVARIABLE(Float64T, result);
5031   Label smi(this);
5032   Label done(this, &result);
5033   GotoIf(TaggedIsSmi(value), &smi);
5034   result = LoadHeapNumberValue(CAST(value));
5035   Goto(&done);
5036 
5037   BIND(&smi);
5038   {
5039     result = SmiToFloat64(CAST(value));
5040     Goto(&done);
5041   }
5042 
5043   BIND(&done);
5044   return result.value();
5045 }
5046 
ChangeNonnegativeNumberToUintPtr(TNode<Number> value)5047 TNode<UintPtrT> CodeStubAssembler::ChangeNonnegativeNumberToUintPtr(
5048     TNode<Number> value) {
5049   TVARIABLE(UintPtrT, result);
5050   Label done(this, &result);
5051   Branch(TaggedIsSmi(value),
5052          [&] {
5053            TNode<Smi> value_smi = CAST(value);
5054            CSA_SLOW_ASSERT(this, SmiLessThan(SmiConstant(-1), value_smi));
5055            result = UncheckedCast<UintPtrT>(SmiToIntPtr(value_smi));
5056            Goto(&done);
5057          },
5058          [&] {
5059            TNode<HeapNumber> value_hn = CAST(value);
5060            result = ChangeFloat64ToUintPtr(LoadHeapNumberValue(value_hn));
5061            Goto(&done);
5062          });
5063 
5064   BIND(&done);
5065   return result.value();
5066 }
5067 
TimesPointerSize(SloppyTNode<WordT> value)5068 TNode<WordT> CodeStubAssembler::TimesPointerSize(SloppyTNode<WordT> value) {
5069   return WordShl(value, kPointerSizeLog2);
5070 }
5071 
TimesDoubleSize(SloppyTNode<WordT> value)5072 TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5073   return WordShl(value, kDoubleSizeLog2);
5074 }
5075 
ToThisValue(Node * context,Node * value,PrimitiveType primitive_type,char const * method_name)5076 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
5077                                      PrimitiveType primitive_type,
5078                                      char const* method_name) {
5079   // We might need to loop once due to JSValue unboxing.
5080   VARIABLE(var_value, MachineRepresentation::kTagged, value);
5081   Label loop(this, &var_value), done_loop(this),
5082       done_throw(this, Label::kDeferred);
5083   Goto(&loop);
5084   BIND(&loop);
5085   {
5086     // Load the current {value}.
5087     value = var_value.value();
5088 
5089     // Check if the {value} is a Smi or a HeapObject.
5090     GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
5091                                    ? &done_loop
5092                                    : &done_throw);
5093 
5094     // Load the map of the {value}.
5095     Node* value_map = LoadMap(value);
5096 
5097     // Load the instance type of the {value}.
5098     Node* value_instance_type = LoadMapInstanceType(value_map);
5099 
5100     // Check if {value} is a JSValue.
5101     Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
5102     Branch(InstanceTypeEqual(value_instance_type, JS_VALUE_TYPE),
5103            &if_valueisvalue, &if_valueisnotvalue);
5104 
5105     BIND(&if_valueisvalue);
5106     {
5107       // Load the actual value from the {value}.
5108       var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
5109       Goto(&loop);
5110     }
5111 
5112     BIND(&if_valueisnotvalue);
5113     {
5114       switch (primitive_type) {
5115         case PrimitiveType::kBoolean:
5116           GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
5117           break;
5118         case PrimitiveType::kNumber:
5119           GotoIf(WordEqual(value_map, HeapNumberMapConstant()), &done_loop);
5120           break;
5121         case PrimitiveType::kString:
5122           GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5123           break;
5124         case PrimitiveType::kSymbol:
5125           GotoIf(WordEqual(value_map, SymbolMapConstant()), &done_loop);
5126           break;
5127       }
5128       Goto(&done_throw);
5129     }
5130   }
5131 
5132   BIND(&done_throw);
5133   {
5134     const char* primitive_name = nullptr;
5135     switch (primitive_type) {
5136       case PrimitiveType::kBoolean:
5137         primitive_name = "Boolean";
5138         break;
5139       case PrimitiveType::kNumber:
5140         primitive_name = "Number";
5141         break;
5142       case PrimitiveType::kString:
5143         primitive_name = "String";
5144         break;
5145       case PrimitiveType::kSymbol:
5146         primitive_name = "Symbol";
5147         break;
5148     }
5149     CHECK_NOT_NULL(primitive_name);
5150 
5151     // The {value} is not a compatible receiver for this method.
5152     ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5153                    primitive_name);
5154   }
5155 
5156   BIND(&done_loop);
5157   return var_value.value();
5158 }
5159 
ThrowIfNotInstanceType(Node * context,Node * value,InstanceType instance_type,char const * method_name)5160 Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
5161                                                 InstanceType instance_type,
5162                                                 char const* method_name) {
5163   Label out(this), throw_exception(this, Label::kDeferred);
5164   VARIABLE(var_value_map, MachineRepresentation::kTagged);
5165 
5166   GotoIf(TaggedIsSmi(value), &throw_exception);
5167 
5168   // Load the instance type of the {value}.
5169   var_value_map.Bind(LoadMap(value));
5170   Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5171 
5172   Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5173          &throw_exception);
5174 
5175   // The {value} is not a compatible receiver for this method.
5176   BIND(&throw_exception);
5177   ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5178                  StringConstant(method_name), value);
5179 
5180   BIND(&out);
5181   return var_value_map.value();
5182 }
5183 
ThrowIfNotJSReceiver(Node * context,Node * value,MessageTemplate::Template msg_template,const char * method_name)5184 Node* CodeStubAssembler::ThrowIfNotJSReceiver(
5185     Node* context, Node* value, MessageTemplate::Template msg_template,
5186     const char* method_name) {
5187   Label out(this), throw_exception(this, Label::kDeferred);
5188   VARIABLE(var_value_map, MachineRepresentation::kTagged);
5189 
5190   GotoIf(TaggedIsSmi(value), &throw_exception);
5191 
5192   // Load the instance type of the {value}.
5193   var_value_map.Bind(LoadMap(value));
5194   Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
5195 
5196   Branch(IsJSReceiverInstanceType(value_instance_type), &out, &throw_exception);
5197 
5198   // The {value} is not a compatible receiver for this method.
5199   BIND(&throw_exception);
5200   ThrowTypeError(context, msg_template, method_name);
5201 
5202   BIND(&out);
5203   return var_value_map.value();
5204 }
5205 
ThrowRangeError(Node * context,MessageTemplate::Template message,Node * arg0,Node * arg1,Node * arg2)5206 void CodeStubAssembler::ThrowRangeError(Node* context,
5207                                         MessageTemplate::Template message,
5208                                         Node* arg0, Node* arg1, Node* arg2) {
5209   Node* template_index = SmiConstant(message);
5210   if (arg0 == nullptr) {
5211     CallRuntime(Runtime::kThrowRangeError, context, template_index);
5212   } else if (arg1 == nullptr) {
5213     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0);
5214   } else if (arg2 == nullptr) {
5215     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1);
5216   } else {
5217     CallRuntime(Runtime::kThrowRangeError, context, template_index, arg0, arg1,
5218                 arg2);
5219   }
5220   Unreachable();
5221 }
5222 
ThrowTypeError(Node * context,MessageTemplate::Template message,char const * arg0,char const * arg1)5223 void CodeStubAssembler::ThrowTypeError(Node* context,
5224                                        MessageTemplate::Template message,
5225                                        char const* arg0, char const* arg1) {
5226   Node* arg0_node = nullptr;
5227   if (arg0) arg0_node = StringConstant(arg0);
5228   Node* arg1_node = nullptr;
5229   if (arg1) arg1_node = StringConstant(arg1);
5230   ThrowTypeError(context, message, arg0_node, arg1_node);
5231 }
5232 
ThrowTypeError(Node * context,MessageTemplate::Template message,Node * arg0,Node * arg1,Node * arg2)5233 void CodeStubAssembler::ThrowTypeError(Node* context,
5234                                        MessageTemplate::Template message,
5235                                        Node* arg0, Node* arg1, Node* arg2) {
5236   Node* template_index = SmiConstant(message);
5237   if (arg0 == nullptr) {
5238     CallRuntime(Runtime::kThrowTypeError, context, template_index);
5239   } else if (arg1 == nullptr) {
5240     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0);
5241   } else if (arg2 == nullptr) {
5242     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1);
5243   } else {
5244     CallRuntime(Runtime::kThrowTypeError, context, template_index, arg0, arg1,
5245                 arg2);
5246   }
5247   Unreachable();
5248 }
5249 
InstanceTypeEqual(SloppyTNode<Int32T> instance_type,int type)5250 TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5251     SloppyTNode<Int32T> instance_type, int type) {
5252   return Word32Equal(instance_type, Int32Constant(type));
5253 }
5254 
IsDictionaryMap(SloppyTNode<Map> map)5255 TNode<BoolT> CodeStubAssembler::IsDictionaryMap(SloppyTNode<Map> map) {
5256   CSA_SLOW_ASSERT(this, IsMap(map));
5257   Node* bit_field3 = LoadMapBitField3(map);
5258   return IsSetWord32<Map::IsDictionaryMapBit>(bit_field3);
5259 }
5260 
IsExtensibleMap(SloppyTNode<Map> map)5261 TNode<BoolT> CodeStubAssembler::IsExtensibleMap(SloppyTNode<Map> map) {
5262   CSA_ASSERT(this, IsMap(map));
5263   return IsSetWord32<Map::IsExtensibleBit>(LoadMapBitField2(map));
5264 }
5265 
IsCallableMap(SloppyTNode<Map> map)5266 TNode<BoolT> CodeStubAssembler::IsCallableMap(SloppyTNode<Map> map) {
5267   CSA_ASSERT(this, IsMap(map));
5268   return IsSetWord32<Map::IsCallableBit>(LoadMapBitField(map));
5269 }
5270 
IsDeprecatedMap(SloppyTNode<Map> map)5271 TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(SloppyTNode<Map> map) {
5272   CSA_ASSERT(this, IsMap(map));
5273   return IsSetWord32<Map::IsDeprecatedBit>(LoadMapBitField3(map));
5274 }
5275 
IsUndetectableMap(SloppyTNode<Map> map)5276 TNode<BoolT> CodeStubAssembler::IsUndetectableMap(SloppyTNode<Map> map) {
5277   CSA_ASSERT(this, IsMap(map));
5278   return IsSetWord32<Map::IsUndetectableBit>(LoadMapBitField(map));
5279 }
5280 
IsNoElementsProtectorCellInvalid()5281 TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5282   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5283   Node* cell = LoadRoot(Heap::kNoElementsProtectorRootIndex);
5284   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5285   return WordEqual(cell_value, invalid);
5286 }
5287 
IsPromiseResolveProtectorCellInvalid()5288 TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5289   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5290   Node* cell = LoadRoot(Heap::kPromiseResolveProtectorRootIndex);
5291   Node* cell_value = LoadObjectField(cell, Cell::kValueOffset);
5292   return WordEqual(cell_value, invalid);
5293 }
5294 
IsPromiseThenProtectorCellInvalid()5295 TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5296   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5297   Node* cell = LoadRoot(Heap::kPromiseThenProtectorRootIndex);
5298   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5299   return WordEqual(cell_value, invalid);
5300 }
5301 
IsArraySpeciesProtectorCellInvalid()5302 TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5303   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5304   Node* cell = LoadRoot(Heap::kArraySpeciesProtectorRootIndex);
5305   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5306   return WordEqual(cell_value, invalid);
5307 }
5308 
IsTypedArraySpeciesProtectorCellInvalid()5309 TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5310   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5311   Node* cell = LoadRoot(Heap::kTypedArraySpeciesProtectorRootIndex);
5312   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5313   return WordEqual(cell_value, invalid);
5314 }
5315 
IsPromiseSpeciesProtectorCellInvalid()5316 TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5317   Node* invalid = SmiConstant(Isolate::kProtectorInvalid);
5318   Node* cell = LoadRoot(Heap::kPromiseSpeciesProtectorRootIndex);
5319   Node* cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5320   return WordEqual(cell_value, invalid);
5321 }
5322 
IsPrototypeInitialArrayPrototype(SloppyTNode<Context> context,SloppyTNode<Map> map)5323 TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5324     SloppyTNode<Context> context, SloppyTNode<Map> map) {
5325   Node* const native_context = LoadNativeContext(context);
5326   Node* const initial_array_prototype = LoadContextElement(
5327       native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5328   Node* proto = LoadMapPrototype(map);
5329   return WordEqual(proto, initial_array_prototype);
5330 }
5331 
IsPrototypeTypedArrayPrototype(SloppyTNode<Context> context,SloppyTNode<Map> map)5332 TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5333     SloppyTNode<Context> context, SloppyTNode<Map> map) {
5334   TNode<Context> const native_context = LoadNativeContext(context);
5335   TNode<Object> const typed_array_prototype =
5336       LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5337   TNode<HeapObject> proto = LoadMapPrototype(map);
5338   TNode<HeapObject> proto_of_proto = Select<HeapObject>(
5339       IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
5340       [=] { return NullConstant(); });
5341   return WordEqual(proto_of_proto, typed_array_prototype);
5342 }
5343 
TaggedIsCallable(TNode<Object> object)5344 TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
5345   return Select<BoolT>(
5346       TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
5347       [=] {
5348         return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
5349       });
5350 }
5351 
IsCallable(SloppyTNode<HeapObject> object)5352 TNode<BoolT> CodeStubAssembler::IsCallable(SloppyTNode<HeapObject> object) {
5353   return IsCallableMap(LoadMap(object));
5354 }
5355 
IsCell(SloppyTNode<HeapObject> object)5356 TNode<BoolT> CodeStubAssembler::IsCell(SloppyTNode<HeapObject> object) {
5357   return WordEqual(LoadMap(object), LoadRoot(Heap::kCellMapRootIndex));
5358 }
5359 
IsCode(SloppyTNode<HeapObject> object)5360 TNode<BoolT> CodeStubAssembler::IsCode(SloppyTNode<HeapObject> object) {
5361   return HasInstanceType(object, CODE_TYPE);
5362 }
5363 
IsConstructorMap(SloppyTNode<Map> map)5364 TNode<BoolT> CodeStubAssembler::IsConstructorMap(SloppyTNode<Map> map) {
5365   CSA_ASSERT(this, IsMap(map));
5366   return IsSetWord32<Map::IsConstructorBit>(LoadMapBitField(map));
5367 }
5368 
IsConstructor(SloppyTNode<HeapObject> object)5369 TNode<BoolT> CodeStubAssembler::IsConstructor(SloppyTNode<HeapObject> object) {
5370   return IsConstructorMap(LoadMap(object));
5371 }
5372 
IsFunctionWithPrototypeSlotMap(SloppyTNode<Map> map)5373 TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(
5374     SloppyTNode<Map> map) {
5375   CSA_ASSERT(this, IsMap(map));
5376   return IsSetWord32<Map::HasPrototypeSlotBit>(LoadMapBitField(map));
5377 }
5378 
IsSpecialReceiverInstanceType(TNode<Int32T> instance_type)5379 TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
5380     TNode<Int32T> instance_type) {
5381   STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
5382   return Int32LessThanOrEqual(instance_type,
5383                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
5384 }
5385 
IsCustomElementsReceiverInstanceType(TNode<Int32T> instance_type)5386 TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
5387     TNode<Int32T> instance_type) {
5388   return Int32LessThanOrEqual(instance_type,
5389                               Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
5390 }
5391 
IsStringInstanceType(SloppyTNode<Int32T> instance_type)5392 TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
5393     SloppyTNode<Int32T> instance_type) {
5394   STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
5395   return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
5396 }
5397 
IsOneByteStringInstanceType(SloppyTNode<Int32T> instance_type)5398 TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
5399     SloppyTNode<Int32T> instance_type) {
5400   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5401   return Word32Equal(
5402       Word32And(instance_type, Int32Constant(kStringEncodingMask)),
5403       Int32Constant(kOneByteStringTag));
5404 }
5405 
IsSequentialStringInstanceType(SloppyTNode<Int32T> instance_type)5406 TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
5407     SloppyTNode<Int32T> instance_type) {
5408   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5409   return Word32Equal(
5410       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5411       Int32Constant(kSeqStringTag));
5412 }
5413 
IsConsStringInstanceType(SloppyTNode<Int32T> instance_type)5414 TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
5415     SloppyTNode<Int32T> instance_type) {
5416   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5417   return Word32Equal(
5418       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5419       Int32Constant(kConsStringTag));
5420 }
5421 
IsIndirectStringInstanceType(SloppyTNode<Int32T> instance_type)5422 TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
5423     SloppyTNode<Int32T> instance_type) {
5424   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5425   STATIC_ASSERT(kIsIndirectStringMask == 0x1);
5426   STATIC_ASSERT(kIsIndirectStringTag == 0x1);
5427   return UncheckedCast<BoolT>(
5428       Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
5429 }
5430 
IsExternalStringInstanceType(SloppyTNode<Int32T> instance_type)5431 TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
5432     SloppyTNode<Int32T> instance_type) {
5433   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5434   return Word32Equal(
5435       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5436       Int32Constant(kExternalStringTag));
5437 }
5438 
IsShortExternalStringInstanceType(SloppyTNode<Int32T> instance_type)5439 TNode<BoolT> CodeStubAssembler::IsShortExternalStringInstanceType(
5440     SloppyTNode<Int32T> instance_type) {
5441   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5442   STATIC_ASSERT(kShortExternalStringTag != 0);
5443   return IsSetWord32(instance_type, kShortExternalStringMask);
5444 }
5445 
IsJSReceiverInstanceType(SloppyTNode<Int32T> instance_type)5446 TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
5447     SloppyTNode<Int32T> instance_type) {
5448   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5449   return Int32GreaterThanOrEqual(instance_type,
5450                                  Int32Constant(FIRST_JS_RECEIVER_TYPE));
5451 }
5452 
IsJSReceiverMap(SloppyTNode<Map> map)5453 TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(SloppyTNode<Map> map) {
5454   return IsJSReceiverInstanceType(LoadMapInstanceType(map));
5455 }
5456 
IsJSReceiver(SloppyTNode<HeapObject> object)5457 TNode<BoolT> CodeStubAssembler::IsJSReceiver(SloppyTNode<HeapObject> object) {
5458   return IsJSReceiverMap(LoadMap(object));
5459 }
5460 
IsNullOrJSReceiver(SloppyTNode<HeapObject> object)5461 TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(
5462     SloppyTNode<HeapObject> object) {
5463   return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
5464 }
5465 
IsNullOrUndefined(SloppyTNode<Object> value)5466 TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
5467   return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
5468 }
5469 
IsJSGlobalProxyInstanceType(SloppyTNode<Int32T> instance_type)5470 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
5471     SloppyTNode<Int32T> instance_type) {
5472   return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
5473 }
5474 
IsJSObjectInstanceType(SloppyTNode<Int32T> instance_type)5475 TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
5476     SloppyTNode<Int32T> instance_type) {
5477   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
5478   return Int32GreaterThanOrEqual(instance_type,
5479                                  Int32Constant(FIRST_JS_OBJECT_TYPE));
5480 }
5481 
IsJSObjectMap(SloppyTNode<Map> map)5482 TNode<BoolT> CodeStubAssembler::IsJSObjectMap(SloppyTNode<Map> map) {
5483   CSA_ASSERT(this, IsMap(map));
5484   return IsJSObjectInstanceType(LoadMapInstanceType(map));
5485 }
5486 
IsJSObject(SloppyTNode<HeapObject> object)5487 TNode<BoolT> CodeStubAssembler::IsJSObject(SloppyTNode<HeapObject> object) {
5488   return IsJSObjectMap(LoadMap(object));
5489 }
5490 
IsJSPromiseMap(SloppyTNode<Map> map)5491 TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(SloppyTNode<Map> map) {
5492   CSA_ASSERT(this, IsMap(map));
5493   return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
5494 }
5495 
IsJSPromise(SloppyTNode<HeapObject> object)5496 TNode<BoolT> CodeStubAssembler::IsJSPromise(SloppyTNode<HeapObject> object) {
5497   return IsJSPromiseMap(LoadMap(object));
5498 }
5499 
IsJSProxy(SloppyTNode<HeapObject> object)5500 TNode<BoolT> CodeStubAssembler::IsJSProxy(SloppyTNode<HeapObject> object) {
5501   return HasInstanceType(object, JS_PROXY_TYPE);
5502 }
5503 
IsJSGlobalProxy(SloppyTNode<HeapObject> object)5504 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(
5505     SloppyTNode<HeapObject> object) {
5506   return HasInstanceType(object, JS_GLOBAL_PROXY_TYPE);
5507 }
5508 
IsMap(SloppyTNode<HeapObject> map)5509 TNode<BoolT> CodeStubAssembler::IsMap(SloppyTNode<HeapObject> map) {
5510   return IsMetaMap(LoadMap(map));
5511 }
5512 
IsJSValueInstanceType(SloppyTNode<Int32T> instance_type)5513 TNode<BoolT> CodeStubAssembler::IsJSValueInstanceType(
5514     SloppyTNode<Int32T> instance_type) {
5515   return InstanceTypeEqual(instance_type, JS_VALUE_TYPE);
5516 }
5517 
IsJSValue(SloppyTNode<HeapObject> object)5518 TNode<BoolT> CodeStubAssembler::IsJSValue(SloppyTNode<HeapObject> object) {
5519   return IsJSValueMap(LoadMap(object));
5520 }
5521 
IsJSValueMap(SloppyTNode<Map> map)5522 TNode<BoolT> CodeStubAssembler::IsJSValueMap(SloppyTNode<Map> map) {
5523   return IsJSValueInstanceType(LoadMapInstanceType(map));
5524 }
5525 
IsJSArrayInstanceType(SloppyTNode<Int32T> instance_type)5526 TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
5527     SloppyTNode<Int32T> instance_type) {
5528   return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
5529 }
5530 
IsJSArray(SloppyTNode<HeapObject> object)5531 TNode<BoolT> CodeStubAssembler::IsJSArray(SloppyTNode<HeapObject> object) {
5532   return IsJSArrayMap(LoadMap(object));
5533 }
5534 
IsJSArrayMap(SloppyTNode<Map> map)5535 TNode<BoolT> CodeStubAssembler::IsJSArrayMap(SloppyTNode<Map> map) {
5536   return IsJSArrayInstanceType(LoadMapInstanceType(map));
5537 }
5538 
IsJSArrayIterator(SloppyTNode<HeapObject> object)5539 TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(
5540     SloppyTNode<HeapObject> object) {
5541   return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
5542 }
5543 
IsJSAsyncGeneratorObject(SloppyTNode<HeapObject> object)5544 TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
5545     SloppyTNode<HeapObject> object) {
5546   return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
5547 }
5548 
IsContext(SloppyTNode<HeapObject> object)5549 TNode<BoolT> CodeStubAssembler::IsContext(SloppyTNode<HeapObject> object) {
5550   Node* instance_type = LoadInstanceType(object);
5551   return UncheckedCast<BoolT>(Word32And(
5552       Int32GreaterThanOrEqual(instance_type, Int32Constant(FIRST_CONTEXT_TYPE)),
5553       Int32LessThanOrEqual(instance_type, Int32Constant(LAST_CONTEXT_TYPE))));
5554 }
5555 
IsFixedArray(SloppyTNode<HeapObject> object)5556 TNode<BoolT> CodeStubAssembler::IsFixedArray(SloppyTNode<HeapObject> object) {
5557   return HasInstanceType(object, FIXED_ARRAY_TYPE);
5558 }
5559 
IsFixedArraySubclass(SloppyTNode<HeapObject> object)5560 TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(
5561     SloppyTNode<HeapObject> object) {
5562   Node* instance_type = LoadInstanceType(object);
5563   return UncheckedCast<BoolT>(
5564       Word32And(Int32GreaterThanOrEqual(instance_type,
5565                                         Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
5566                 Int32LessThanOrEqual(instance_type,
5567                                      Int32Constant(LAST_FIXED_ARRAY_TYPE))));
5568 }
5569 
IsNotWeakFixedArraySubclass(SloppyTNode<HeapObject> object)5570 TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
5571     SloppyTNode<HeapObject> object) {
5572   Node* instance_type = LoadInstanceType(object);
5573   return UncheckedCast<BoolT>(Word32Or(
5574       Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
5575       Int32GreaterThan(instance_type,
5576                        Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
5577 }
5578 
IsPromiseCapability(SloppyTNode<HeapObject> object)5579 TNode<BoolT> CodeStubAssembler::IsPromiseCapability(
5580     SloppyTNode<HeapObject> object) {
5581   return HasInstanceType(object, PROMISE_CAPABILITY_TYPE);
5582 }
5583 
IsPropertyArray(SloppyTNode<HeapObject> object)5584 TNode<BoolT> CodeStubAssembler::IsPropertyArray(
5585     SloppyTNode<HeapObject> object) {
5586   return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
5587 }
5588 
5589 // This complicated check is due to elements oddities. If a smi array is empty
5590 // after Array.p.shift, it is replaced by the empty array constant. If it is
5591 // later filled with a double element, we try to grow it but pass in a double
5592 // elements kind. Usually this would cause a size mismatch (since the source
5593 // fixed array has HOLEY_ELEMENTS and destination has
5594 // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
5595 // source array is empty.
5596 // TODO(jgruber): It might we worth creating an empty_double_array constant to
5597 // simplify this case.
IsFixedArrayWithKindOrEmpty(SloppyTNode<HeapObject> object,ElementsKind kind)5598 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
5599     SloppyTNode<HeapObject> object, ElementsKind kind) {
5600   Label out(this);
5601   TVARIABLE(BoolT, var_result, Int32TrueConstant());
5602 
5603   GotoIf(IsFixedArrayWithKind(object, kind), &out);
5604 
5605   TNode<Smi> const length = LoadFixedArrayBaseLength(CAST(object));
5606   GotoIf(SmiEqual(length, SmiConstant(0)), &out);
5607 
5608   var_result = Int32FalseConstant();
5609   Goto(&out);
5610 
5611   BIND(&out);
5612   return var_result.value();
5613 }
5614 
IsFixedArrayWithKind(SloppyTNode<HeapObject> object,ElementsKind kind)5615 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(
5616     SloppyTNode<HeapObject> object, ElementsKind kind) {
5617   if (IsDoubleElementsKind(kind)) {
5618     return IsFixedDoubleArray(object);
5619   } else {
5620     DCHECK(IsSmiOrObjectElementsKind(kind));
5621     return IsFixedArraySubclass(object);
5622   }
5623 }
5624 
IsBoolean(SloppyTNode<HeapObject> object)5625 TNode<BoolT> CodeStubAssembler::IsBoolean(SloppyTNode<HeapObject> object) {
5626   return IsBooleanMap(LoadMap(object));
5627 }
5628 
IsPropertyCell(SloppyTNode<HeapObject> object)5629 TNode<BoolT> CodeStubAssembler::IsPropertyCell(SloppyTNode<HeapObject> object) {
5630   return IsPropertyCellMap(LoadMap(object));
5631 }
5632 
IsAccessorInfo(SloppyTNode<HeapObject> object)5633 TNode<BoolT> CodeStubAssembler::IsAccessorInfo(SloppyTNode<HeapObject> object) {
5634   return IsAccessorInfoMap(LoadMap(object));
5635 }
5636 
IsAccessorPair(SloppyTNode<HeapObject> object)5637 TNode<BoolT> CodeStubAssembler::IsAccessorPair(SloppyTNode<HeapObject> object) {
5638   return IsAccessorPairMap(LoadMap(object));
5639 }
5640 
IsAllocationSite(SloppyTNode<HeapObject> object)5641 TNode<BoolT> CodeStubAssembler::IsAllocationSite(
5642     SloppyTNode<HeapObject> object) {
5643   return IsAllocationSiteInstanceType(LoadInstanceType(object));
5644 }
5645 
IsAnyHeapNumber(SloppyTNode<HeapObject> object)5646 TNode<BoolT> CodeStubAssembler::IsAnyHeapNumber(
5647     SloppyTNode<HeapObject> object) {
5648   return UncheckedCast<BoolT>(
5649       Word32Or(IsMutableHeapNumber(object), IsHeapNumber(object)));
5650 }
5651 
IsHeapNumber(SloppyTNode<HeapObject> object)5652 TNode<BoolT> CodeStubAssembler::IsHeapNumber(SloppyTNode<HeapObject> object) {
5653   return IsHeapNumberMap(LoadMap(object));
5654 }
5655 
IsMutableHeapNumber(SloppyTNode<HeapObject> object)5656 TNode<BoolT> CodeStubAssembler::IsMutableHeapNumber(
5657     SloppyTNode<HeapObject> object) {
5658   return IsMutableHeapNumberMap(LoadMap(object));
5659 }
5660 
IsFeedbackCell(SloppyTNode<HeapObject> object)5661 TNode<BoolT> CodeStubAssembler::IsFeedbackCell(SloppyTNode<HeapObject> object) {
5662   return HasInstanceType(object, FEEDBACK_CELL_TYPE);
5663 }
5664 
IsFeedbackVector(SloppyTNode<HeapObject> object)5665 TNode<BoolT> CodeStubAssembler::IsFeedbackVector(
5666     SloppyTNode<HeapObject> object) {
5667   return IsFeedbackVectorMap(LoadMap(object));
5668 }
5669 
IsName(SloppyTNode<HeapObject> object)5670 TNode<BoolT> CodeStubAssembler::IsName(SloppyTNode<HeapObject> object) {
5671   return Int32LessThanOrEqual(LoadInstanceType(object),
5672                               Int32Constant(LAST_NAME_TYPE));
5673 }
5674 
IsString(SloppyTNode<HeapObject> object)5675 TNode<BoolT> CodeStubAssembler::IsString(SloppyTNode<HeapObject> object) {
5676   return IsStringInstanceType(LoadInstanceType(object));
5677 }
5678 
IsSymbolInstanceType(SloppyTNode<Int32T> instance_type)5679 TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
5680     SloppyTNode<Int32T> instance_type) {
5681   return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
5682 }
5683 
IsSymbol(SloppyTNode<HeapObject> object)5684 TNode<BoolT> CodeStubAssembler::IsSymbol(SloppyTNode<HeapObject> object) {
5685   return IsSymbolMap(LoadMap(object));
5686 }
5687 
IsBigIntInstanceType(SloppyTNode<Int32T> instance_type)5688 TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
5689     SloppyTNode<Int32T> instance_type) {
5690   return InstanceTypeEqual(instance_type, BIGINT_TYPE);
5691 }
5692 
IsBigInt(SloppyTNode<HeapObject> object)5693 TNode<BoolT> CodeStubAssembler::IsBigInt(SloppyTNode<HeapObject> object) {
5694   return IsBigIntInstanceType(LoadInstanceType(object));
5695 }
5696 
IsPrimitiveInstanceType(SloppyTNode<Int32T> instance_type)5697 TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
5698     SloppyTNode<Int32T> instance_type) {
5699   return Int32LessThanOrEqual(instance_type,
5700                               Int32Constant(LAST_PRIMITIVE_TYPE));
5701 }
5702 
IsPrivateSymbol(SloppyTNode<HeapObject> object)5703 TNode<BoolT> CodeStubAssembler::IsPrivateSymbol(
5704     SloppyTNode<HeapObject> object) {
5705   return Select<BoolT>(
5706       IsSymbol(object),
5707       [=] {
5708         TNode<Symbol> symbol = CAST(object);
5709         TNode<Int32T> flags =
5710             SmiToInt32(LoadObjectField<Smi>(symbol, Symbol::kFlagsOffset));
5711         return IsSetWord32(flags, 1 << Symbol::kPrivateBit);
5712       },
5713       [=] { return Int32FalseConstant(); });
5714 }
5715 
IsNativeContext(SloppyTNode<HeapObject> object)5716 TNode<BoolT> CodeStubAssembler::IsNativeContext(
5717     SloppyTNode<HeapObject> object) {
5718   return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
5719 }
5720 
IsFixedDoubleArray(SloppyTNode<HeapObject> object)5721 TNode<BoolT> CodeStubAssembler::IsFixedDoubleArray(
5722     SloppyTNode<HeapObject> object) {
5723   return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
5724 }
5725 
IsHashTable(SloppyTNode<HeapObject> object)5726 TNode<BoolT> CodeStubAssembler::IsHashTable(SloppyTNode<HeapObject> object) {
5727   Node* instance_type = LoadInstanceType(object);
5728   return UncheckedCast<BoolT>(
5729       Word32And(Int32GreaterThanOrEqual(instance_type,
5730                                         Int32Constant(FIRST_HASH_TABLE_TYPE)),
5731                 Int32LessThanOrEqual(instance_type,
5732                                      Int32Constant(LAST_HASH_TABLE_TYPE))));
5733 }
5734 
IsEphemeronHashTable(SloppyTNode<HeapObject> object)5735 TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(
5736     SloppyTNode<HeapObject> object) {
5737   return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
5738 }
5739 
IsNameDictionary(SloppyTNode<HeapObject> object)5740 TNode<BoolT> CodeStubAssembler::IsNameDictionary(
5741     SloppyTNode<HeapObject> object) {
5742   return HasInstanceType(object, NAME_DICTIONARY_TYPE);
5743 }
5744 
IsGlobalDictionary(SloppyTNode<HeapObject> object)5745 TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(
5746     SloppyTNode<HeapObject> object) {
5747   return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
5748 }
5749 
IsNumberDictionary(SloppyTNode<HeapObject> object)5750 TNode<BoolT> CodeStubAssembler::IsNumberDictionary(
5751     SloppyTNode<HeapObject> object) {
5752   return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
5753 }
5754 
IsJSGeneratorObject(SloppyTNode<HeapObject> object)5755 TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(
5756     SloppyTNode<HeapObject> object) {
5757   return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
5758 }
5759 
IsJSFunctionInstanceType(SloppyTNode<Int32T> instance_type)5760 TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
5761     SloppyTNode<Int32T> instance_type) {
5762   return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
5763 }
5764 
IsAllocationSiteInstanceType(SloppyTNode<Int32T> instance_type)5765 TNode<BoolT> CodeStubAssembler::IsAllocationSiteInstanceType(
5766     SloppyTNode<Int32T> instance_type) {
5767   return InstanceTypeEqual(instance_type, ALLOCATION_SITE_TYPE);
5768 }
5769 
IsJSFunction(SloppyTNode<HeapObject> object)5770 TNode<BoolT> CodeStubAssembler::IsJSFunction(SloppyTNode<HeapObject> object) {
5771   return IsJSFunctionMap(LoadMap(object));
5772 }
5773 
IsJSFunctionMap(SloppyTNode<Map> map)5774 TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(SloppyTNode<Map> map) {
5775   return IsJSFunctionInstanceType(LoadMapInstanceType(map));
5776 }
5777 
IsJSTypedArray(SloppyTNode<HeapObject> object)5778 TNode<BoolT> CodeStubAssembler::IsJSTypedArray(SloppyTNode<HeapObject> object) {
5779   return HasInstanceType(object, JS_TYPED_ARRAY_TYPE);
5780 }
5781 
IsJSArrayBuffer(SloppyTNode<HeapObject> object)5782 TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(
5783     SloppyTNode<HeapObject> object) {
5784   return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
5785 }
5786 
IsJSDataView(TNode<HeapObject> object)5787 TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
5788   return HasInstanceType(object, JS_DATA_VIEW_TYPE);
5789 }
5790 
IsFixedTypedArray(SloppyTNode<HeapObject> object)5791 TNode<BoolT> CodeStubAssembler::IsFixedTypedArray(
5792     SloppyTNode<HeapObject> object) {
5793   TNode<Int32T> instance_type = LoadInstanceType(object);
5794   return UncheckedCast<BoolT>(Word32And(
5795       Int32GreaterThanOrEqual(instance_type,
5796                               Int32Constant(FIRST_FIXED_TYPED_ARRAY_TYPE)),
5797       Int32LessThanOrEqual(instance_type,
5798                            Int32Constant(LAST_FIXED_TYPED_ARRAY_TYPE))));
5799 }
5800 
IsJSRegExp(SloppyTNode<HeapObject> object)5801 TNode<BoolT> CodeStubAssembler::IsJSRegExp(SloppyTNode<HeapObject> object) {
5802   return HasInstanceType(object, JS_REGEXP_TYPE);
5803 }
5804 
IsNumber(SloppyTNode<Object> object)5805 TNode<BoolT> CodeStubAssembler::IsNumber(SloppyTNode<Object> object) {
5806   return Select<BoolT>(TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
5807                        [=] { return IsHeapNumber(CAST(object)); });
5808 }
5809 
IsNumeric(SloppyTNode<Object> object)5810 TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
5811   return Select<BoolT>(
5812       TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
5813       [=] {
5814         return UncheckedCast<BoolT>(
5815             Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
5816       });
5817 }
5818 
IsNumberNormalized(SloppyTNode<Number> number)5819 TNode<BoolT> CodeStubAssembler::IsNumberNormalized(SloppyTNode<Number> number) {
5820   TVARIABLE(BoolT, var_result, Int32TrueConstant());
5821   Label out(this);
5822 
5823   GotoIf(TaggedIsSmi(number), &out);
5824 
5825   TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
5826   TNode<Float64T> smi_min =
5827       Float64Constant(static_cast<double>(Smi::kMinValue));
5828   TNode<Float64T> smi_max =
5829       Float64Constant(static_cast<double>(Smi::kMaxValue));
5830 
5831   GotoIf(Float64LessThan(value, smi_min), &out);
5832   GotoIf(Float64GreaterThan(value, smi_max), &out);
5833   GotoIfNot(Float64Equal(value, value), &out);  // NaN.
5834 
5835   var_result = Int32FalseConstant();
5836   Goto(&out);
5837 
5838   BIND(&out);
5839   return var_result.value();
5840 }
5841 
IsNumberPositive(SloppyTNode<Number> number)5842 TNode<BoolT> CodeStubAssembler::IsNumberPositive(SloppyTNode<Number> number) {
5843   return Select<BoolT>(TaggedIsSmi(number),
5844                        [=] { return TaggedIsPositiveSmi(number); },
5845                        [=] { return IsHeapNumberPositive(CAST(number)); });
5846 }
5847 
5848 // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
IsHeapNumberPositive(TNode<HeapNumber> number)5849 TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
5850   TNode<Float64T> value = LoadHeapNumberValue(number);
5851   TNode<Float64T> float_zero = Float64Constant(0.);
5852   return Float64GreaterThanOrEqual(value, float_zero);
5853 }
5854 
IsNumberNonNegativeSafeInteger(TNode<Number> number)5855 TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
5856     TNode<Number> number) {
5857   return Select<BoolT>(
5858       // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
5859       TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
5860       [=] {
5861         TNode<HeapNumber> heap_number = CAST(number);
5862         return Select<BoolT>(IsInteger(heap_number),
5863                              [=] { return IsHeapNumberPositive(heap_number); },
5864                              [=] { return Int32FalseConstant(); });
5865       });
5866 }
5867 
IsSafeInteger(TNode<Object> number)5868 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
5869   return Select<BoolT>(
5870       TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
5871       [=] {
5872         return Select<BoolT>(
5873             IsHeapNumber(CAST(number)),
5874             [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
5875             [=] { return Int32FalseConstant(); });
5876       });
5877 }
5878 
IsSafeInteger(TNode<HeapNumber> number)5879 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
5880   // Load the actual value of {number}.
5881   TNode<Float64T> number_value = LoadHeapNumberValue(number);
5882   // Truncate the value of {number} to an integer (or an infinity).
5883   TNode<Float64T> integer = Float64Trunc(number_value);
5884 
5885   return Select<BoolT>(
5886       // Check if {number}s value matches the integer (ruling out the
5887       // infinities).
5888       Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
5889       [=] {
5890         // Check if the {integer} value is in safe integer range.
5891         return Float64LessThanOrEqual(Float64Abs(integer),
5892                                       Float64Constant(kMaxSafeInteger));
5893       },
5894       [=] { return Int32FalseConstant(); });
5895 }
5896 
IsInteger(TNode<Object> number)5897 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
5898   return Select<BoolT>(
5899       TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
5900       [=] {
5901         return Select<BoolT>(
5902             IsHeapNumber(CAST(number)),
5903             [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
5904             [=] { return Int32FalseConstant(); });
5905       });
5906 }
5907 
IsInteger(TNode<HeapNumber> number)5908 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
5909   TNode<Float64T> number_value = LoadHeapNumberValue(number);
5910   // Truncate the value of {number} to an integer (or an infinity).
5911   TNode<Float64T> integer = Float64Trunc(number_value);
5912   // Check if {number}s value matches the integer (ruling out the infinities).
5913   return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
5914 }
5915 
IsHeapNumberUint32(TNode<HeapNumber> number)5916 TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
5917   // Check that the HeapNumber is a valid uint32
5918   return Select<BoolT>(
5919       IsHeapNumberPositive(number),
5920       [=] {
5921         TNode<Float64T> value = LoadHeapNumberValue(number);
5922         TNode<Uint32T> int_value = Unsigned(TruncateFloat64ToWord32(value));
5923         return Float64Equal(value, ChangeUint32ToFloat64(int_value));
5924       },
5925       [=] { return Int32FalseConstant(); });
5926 }
5927 
IsNumberArrayIndex(TNode<Number> number)5928 TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
5929   return Select<BoolT>(TaggedIsSmi(number),
5930                        [=] { return TaggedIsPositiveSmi(number); },
5931                        [=] { return IsHeapNumberUint32(CAST(number)); });
5932 }
5933 
FixedArraySizeDoesntFitInNewSpace(Node * element_count,int base_size,ParameterMode mode)5934 Node* CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(Node* element_count,
5935                                                            int base_size,
5936                                                            ParameterMode mode) {
5937   int max_newspace_elements =
5938       (kMaxRegularHeapObjectSize - base_size) / kPointerSize;
5939   return IntPtrOrSmiGreaterThan(
5940       element_count, IntPtrOrSmiConstant(max_newspace_elements, mode), mode);
5941 }
5942 
StringCharCodeAt(SloppyTNode<String> string,SloppyTNode<IntPtrT> index)5943 TNode<Int32T> CodeStubAssembler::StringCharCodeAt(SloppyTNode<String> string,
5944                                                   SloppyTNode<IntPtrT> index) {
5945   CSA_ASSERT(this, IsString(string));
5946 
5947   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(index, IntPtrConstant(0)));
5948   CSA_ASSERT(this, IntPtrLessThan(index, LoadStringLengthAsWord(string)));
5949 
5950   TVARIABLE(Int32T, var_result);
5951 
5952   Label return_result(this), if_runtime(this, Label::kDeferred),
5953       if_stringistwobyte(this), if_stringisonebyte(this);
5954 
5955   ToDirectStringAssembler to_direct(state(), string);
5956   to_direct.TryToDirect(&if_runtime);
5957   Node* const offset = IntPtrAdd(index, to_direct.offset());
5958   Node* const instance_type = to_direct.instance_type();
5959 
5960   Node* const string_data = to_direct.PointerToData(&if_runtime);
5961 
5962   // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
5963   Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
5964          &if_stringistwobyte);
5965 
5966   BIND(&if_stringisonebyte);
5967   {
5968     var_result =
5969         UncheckedCast<Int32T>(Load(MachineType::Uint8(), string_data, offset));
5970     Goto(&return_result);
5971   }
5972 
5973   BIND(&if_stringistwobyte);
5974   {
5975     var_result =
5976         UncheckedCast<Int32T>(Load(MachineType::Uint16(), string_data,
5977                                    WordShl(offset, IntPtrConstant(1))));
5978     Goto(&return_result);
5979   }
5980 
5981   BIND(&if_runtime);
5982   {
5983     Node* result = CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(),
5984                                string, SmiTag(index));
5985     var_result = SmiToInt32(result);
5986     Goto(&return_result);
5987   }
5988 
5989   BIND(&return_result);
5990   return var_result.value();
5991 }
5992 
StringFromSingleCharCode(TNode<Int32T> code)5993 TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
5994   VARIABLE(var_result, MachineRepresentation::kTagged);
5995 
5996   // Check if the {code} is a one-byte char code.
5997   Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
5998       if_done(this);
5999   Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6000          &if_codeisonebyte, &if_codeistwobyte);
6001   BIND(&if_codeisonebyte);
6002   {
6003     // Load the isolate wide single character string cache.
6004     TNode<FixedArray> cache =
6005         CAST(LoadRoot(Heap::kSingleCharacterStringCacheRootIndex));
6006     TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6007 
6008     // Check if we have an entry for the {code} in the single character string
6009     // cache already.
6010     Label if_entryisundefined(this, Label::kDeferred),
6011         if_entryisnotundefined(this);
6012     Node* entry = LoadFixedArrayElement(cache, code_index);
6013     Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6014 
6015     BIND(&if_entryisundefined);
6016     {
6017       // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6018       TNode<String> result = AllocateSeqOneByteString(1);
6019       StoreNoWriteBarrier(
6020           MachineRepresentation::kWord8, result,
6021           IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6022       StoreFixedArrayElement(cache, code_index, result);
6023       var_result.Bind(result);
6024       Goto(&if_done);
6025     }
6026 
6027     BIND(&if_entryisnotundefined);
6028     {
6029       // Return the entry from the {cache}.
6030       var_result.Bind(entry);
6031       Goto(&if_done);
6032     }
6033   }
6034 
6035   BIND(&if_codeistwobyte);
6036   {
6037     // Allocate a new SeqTwoByteString for {code}.
6038     Node* result = AllocateSeqTwoByteString(1);
6039     StoreNoWriteBarrier(
6040         MachineRepresentation::kWord16, result,
6041         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6042     var_result.Bind(result);
6043     Goto(&if_done);
6044   }
6045 
6046   BIND(&if_done);
6047   CSA_ASSERT(this, IsString(var_result.value()));
6048   return CAST(var_result.value());
6049 }
6050 
6051 // A wrapper around CopyStringCharacters which determines the correct string
6052 // encoding, allocates a corresponding sequential string, and then copies the
6053 // given character range using CopyStringCharacters.
6054 // |from_string| must be a sequential string.
6055 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
AllocAndCopyStringCharacters(Node * from,Node * from_instance_type,TNode<IntPtrT> from_index,TNode<Smi> character_count)6056 TNode<String> CodeStubAssembler::AllocAndCopyStringCharacters(
6057     Node* from, Node* from_instance_type, TNode<IntPtrT> from_index,
6058     TNode<Smi> character_count) {
6059   Label end(this), one_byte_sequential(this), two_byte_sequential(this);
6060   TVARIABLE(String, var_result);
6061 
6062   Branch(IsOneByteStringInstanceType(from_instance_type), &one_byte_sequential,
6063          &two_byte_sequential);
6064 
6065   // The subject string is a sequential one-byte string.
6066   BIND(&one_byte_sequential);
6067   {
6068     TNode<String> result =
6069         AllocateSeqOneByteString(NoContextConstant(), character_count);
6070     CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6071                          SmiUntag(character_count), String::ONE_BYTE_ENCODING,
6072                          String::ONE_BYTE_ENCODING);
6073     var_result = result;
6074     Goto(&end);
6075   }
6076 
6077   // The subject string is a sequential two-byte string.
6078   BIND(&two_byte_sequential);
6079   {
6080     TNode<String> result =
6081         AllocateSeqTwoByteString(NoContextConstant(), character_count);
6082     CopyStringCharacters(from, result, from_index, IntPtrConstant(0),
6083                          SmiUntag(character_count), String::TWO_BYTE_ENCODING,
6084                          String::TWO_BYTE_ENCODING);
6085     var_result = result;
6086     Goto(&end);
6087   }
6088 
6089   BIND(&end);
6090   return var_result.value();
6091 }
6092 
SubString(TNode<String> string,TNode<IntPtrT> from,TNode<IntPtrT> to)6093 TNode<String> CodeStubAssembler::SubString(TNode<String> string,
6094                                            TNode<IntPtrT> from,
6095                                            TNode<IntPtrT> to) {
6096   TVARIABLE(String, var_result);
6097   ToDirectStringAssembler to_direct(state(), string);
6098   Label end(this), runtime(this);
6099 
6100   TNode<IntPtrT> const substr_length = IntPtrSub(to, from);
6101   TNode<IntPtrT> const string_length = LoadStringLengthAsWord(string);
6102 
6103   // Begin dispatching based on substring length.
6104 
6105   Label original_string_or_invalid_length(this);
6106   GotoIf(UintPtrGreaterThanOrEqual(substr_length, string_length),
6107          &original_string_or_invalid_length);
6108 
6109   // A real substring (substr_length < string_length).
6110 
6111   Label single_char(this);
6112   GotoIf(IntPtrEqual(substr_length, IntPtrConstant(1)), &single_char);
6113 
6114   // TODO(jgruber): Add an additional case for substring of length == 0?
6115 
6116   // Deal with different string types: update the index if necessary
6117   // and extract the underlying string.
6118 
6119   TNode<String> direct_string = to_direct.TryToDirect(&runtime);
6120   TNode<IntPtrT> offset = IntPtrAdd(from, to_direct.offset());
6121   Node* const instance_type = to_direct.instance_type();
6122 
6123   // The subject string can only be external or sequential string of either
6124   // encoding at this point.
6125   Label external_string(this);
6126   {
6127     if (FLAG_string_slices) {
6128       Label next(this);
6129 
6130       // Short slice.  Copy instead of slicing.
6131       GotoIf(IntPtrLessThan(substr_length,
6132                             IntPtrConstant(SlicedString::kMinLength)),
6133              &next);
6134 
6135       // Allocate new sliced string.
6136 
6137       Counters* counters = isolate()->counters();
6138       IncrementCounter(counters->sub_string_native(), 1);
6139 
6140       Label one_byte_slice(this), two_byte_slice(this);
6141       Branch(IsOneByteStringInstanceType(to_direct.instance_type()),
6142              &one_byte_slice, &two_byte_slice);
6143 
6144       BIND(&one_byte_slice);
6145       {
6146         var_result = AllocateSlicedOneByteString(SmiTag(substr_length),
6147                                                  direct_string, SmiTag(offset));
6148         Goto(&end);
6149       }
6150 
6151       BIND(&two_byte_slice);
6152       {
6153         var_result = AllocateSlicedTwoByteString(SmiTag(substr_length),
6154                                                  direct_string, SmiTag(offset));
6155         Goto(&end);
6156       }
6157 
6158       BIND(&next);
6159     }
6160 
6161     // The subject string can only be external or sequential string of either
6162     // encoding at this point.
6163     GotoIf(to_direct.is_external(), &external_string);
6164 
6165     var_result = AllocAndCopyStringCharacters(direct_string, instance_type,
6166                                               offset, SmiTag(substr_length));
6167 
6168     Counters* counters = isolate()->counters();
6169     IncrementCounter(counters->sub_string_native(), 1);
6170 
6171     Goto(&end);
6172   }
6173 
6174   // Handle external string.
6175   BIND(&external_string);
6176   {
6177     Node* const fake_sequential_string = to_direct.PointerToString(&runtime);
6178 
6179     var_result = AllocAndCopyStringCharacters(
6180         fake_sequential_string, instance_type, offset, SmiTag(substr_length));
6181 
6182     Counters* counters = isolate()->counters();
6183     IncrementCounter(counters->sub_string_native(), 1);
6184 
6185     Goto(&end);
6186   }
6187 
6188   // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
6189   BIND(&single_char);
6190   {
6191     TNode<Int32T> char_code = StringCharCodeAt(string, from);
6192     var_result = StringFromSingleCharCode(char_code);
6193     Goto(&end);
6194   }
6195 
6196   BIND(&original_string_or_invalid_length);
6197   {
6198     CSA_ASSERT(this, IntPtrEqual(substr_length, string_length));
6199 
6200     // Equal length - check if {from, to} == {0, str.length}.
6201     GotoIf(UintPtrGreaterThan(from, IntPtrConstant(0)), &runtime);
6202 
6203     // Return the original string (substr_length == string_length).
6204 
6205     Counters* counters = isolate()->counters();
6206     IncrementCounter(counters->sub_string_native(), 1);
6207 
6208     var_result = string;
6209     Goto(&end);
6210   }
6211 
6212   // Fall back to a runtime call.
6213   BIND(&runtime);
6214   {
6215     var_result =
6216         CAST(CallRuntime(Runtime::kStringSubstring, NoContextConstant(), string,
6217                          SmiTag(from), SmiTag(to)));
6218     Goto(&end);
6219   }
6220 
6221   BIND(&end);
6222   return var_result.value();
6223 }
6224 
ToDirectStringAssembler(compiler::CodeAssemblerState * state,Node * string,Flags flags)6225 ToDirectStringAssembler::ToDirectStringAssembler(
6226     compiler::CodeAssemblerState* state, Node* string, Flags flags)
6227     : CodeStubAssembler(state),
6228       var_string_(this, MachineRepresentation::kTagged, string),
6229       var_instance_type_(this, MachineRepresentation::kWord32),
6230       var_offset_(this, MachineType::PointerRepresentation()),
6231       var_is_external_(this, MachineRepresentation::kWord32),
6232       flags_(flags) {
6233   CSA_ASSERT(this, TaggedIsNotSmi(string));
6234   CSA_ASSERT(this, IsString(string));
6235 
6236   var_string_.Bind(string);
6237   var_offset_.Bind(IntPtrConstant(0));
6238   var_instance_type_.Bind(LoadInstanceType(string));
6239   var_is_external_.Bind(Int32Constant(0));
6240 }
6241 
TryToDirect(Label * if_bailout)6242 TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6243   VariableList vars({&var_string_, &var_offset_, &var_instance_type_}, zone());
6244   Label dispatch(this, vars);
6245   Label if_iscons(this);
6246   Label if_isexternal(this);
6247   Label if_issliced(this);
6248   Label if_isthin(this);
6249   Label out(this);
6250 
6251   Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6252          &dispatch);
6253 
6254   // Dispatch based on string representation.
6255   BIND(&dispatch);
6256   {
6257     int32_t values[] = {
6258         kSeqStringTag,    kConsStringTag, kExternalStringTag,
6259         kSlicedStringTag, kThinStringTag,
6260     };
6261     Label* labels[] = {
6262         &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6263     };
6264     STATIC_ASSERT(arraysize(values) == arraysize(labels));
6265 
6266     Node* const representation = Word32And(
6267         var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
6268     Switch(representation, if_bailout, values, labels, arraysize(values));
6269   }
6270 
6271   // Cons string.  Check whether it is flat, then fetch first part.
6272   // Flat cons strings have an empty second part.
6273   BIND(&if_iscons);
6274   {
6275     Node* const string = var_string_.value();
6276     GotoIfNot(IsEmptyString(LoadObjectField(string, ConsString::kSecondOffset)),
6277               if_bailout);
6278 
6279     Node* const lhs = LoadObjectField(string, ConsString::kFirstOffset);
6280     var_string_.Bind(lhs);
6281     var_instance_type_.Bind(LoadInstanceType(lhs));
6282 
6283     Goto(&dispatch);
6284   }
6285 
6286   // Sliced string. Fetch parent and correct start index by offset.
6287   BIND(&if_issliced);
6288   {
6289     if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
6290       Goto(if_bailout);
6291     } else {
6292       Node* const string = var_string_.value();
6293       Node* const sliced_offset =
6294           LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
6295       var_offset_.Bind(IntPtrAdd(var_offset_.value(), sliced_offset));
6296 
6297       Node* const parent = LoadObjectField(string, SlicedString::kParentOffset);
6298       var_string_.Bind(parent);
6299       var_instance_type_.Bind(LoadInstanceType(parent));
6300 
6301       Goto(&dispatch);
6302     }
6303   }
6304 
6305   // Thin string. Fetch the actual string.
6306   BIND(&if_isthin);
6307   {
6308     Node* const string = var_string_.value();
6309     Node* const actual_string =
6310         LoadObjectField(string, ThinString::kActualOffset);
6311     Node* const actual_instance_type = LoadInstanceType(actual_string);
6312 
6313     var_string_.Bind(actual_string);
6314     var_instance_type_.Bind(actual_instance_type);
6315 
6316     Goto(&dispatch);
6317   }
6318 
6319   // External string.
6320   BIND(&if_isexternal);
6321   var_is_external_.Bind(Int32Constant(1));
6322   Goto(&out);
6323 
6324   BIND(&out);
6325   return CAST(var_string_.value());
6326 }
6327 
TryToSequential(StringPointerKind ptr_kind,Label * if_bailout)6328 TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
6329     StringPointerKind ptr_kind, Label* if_bailout) {
6330   CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
6331 
6332   TVARIABLE(RawPtrT, var_result);
6333   Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
6334   Branch(is_external(), &if_isexternal, &if_issequential);
6335 
6336   BIND(&if_issequential);
6337   {
6338     STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
6339                   SeqTwoByteString::kHeaderSize);
6340     TNode<IntPtrT> result = BitcastTaggedToWord(var_string_.value());
6341     if (ptr_kind == PTR_TO_DATA) {
6342       result = IntPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
6343                                                 kHeapObjectTag));
6344     }
6345     var_result = ReinterpretCast<RawPtrT>(result);
6346     Goto(&out);
6347   }
6348 
6349   BIND(&if_isexternal);
6350   {
6351     GotoIf(IsShortExternalStringInstanceType(var_instance_type_.value()),
6352            if_bailout);
6353 
6354     TNode<String> string = CAST(var_string_.value());
6355     TNode<IntPtrT> result =
6356         LoadObjectField<IntPtrT>(string, ExternalString::kResourceDataOffset);
6357     if (ptr_kind == PTR_TO_STRING) {
6358       result = IntPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
6359                                                 kHeapObjectTag));
6360     }
6361     var_result = ReinterpretCast<RawPtrT>(result);
6362     Goto(&out);
6363   }
6364 
6365   BIND(&out);
6366   return var_result.value();
6367 }
6368 
BranchIfCanDerefIndirectString(Node * string,Node * instance_type,Label * can_deref,Label * cannot_deref)6369 void CodeStubAssembler::BranchIfCanDerefIndirectString(Node* string,
6370                                                        Node* instance_type,
6371                                                        Label* can_deref,
6372                                                        Label* cannot_deref) {
6373   CSA_ASSERT(this, IsString(string));
6374   Node* representation =
6375       Word32And(instance_type, Int32Constant(kStringRepresentationMask));
6376   GotoIf(Word32Equal(representation, Int32Constant(kThinStringTag)), can_deref);
6377   GotoIf(Word32NotEqual(representation, Int32Constant(kConsStringTag)),
6378          cannot_deref);
6379   // Cons string.
6380   Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
6381   GotoIf(IsEmptyString(rhs), can_deref);
6382   Goto(cannot_deref);
6383 }
6384 
DerefIndirectString(TNode<String> string,TNode<Int32T> instance_type,Label * cannot_deref)6385 Node* CodeStubAssembler::DerefIndirectString(TNode<String> string,
6386                                              TNode<Int32T> instance_type,
6387                                              Label* cannot_deref) {
6388   Label deref(this);
6389   BranchIfCanDerefIndirectString(string, instance_type, &deref, cannot_deref);
6390   BIND(&deref);
6391   STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
6392   return LoadObjectField(string, ThinString::kActualOffset);
6393 }
6394 
DerefIndirectString(Variable * var_string,Node * instance_type)6395 void CodeStubAssembler::DerefIndirectString(Variable* var_string,
6396                                             Node* instance_type) {
6397 #ifdef DEBUG
6398   Label can_deref(this), cannot_deref(this);
6399   BranchIfCanDerefIndirectString(var_string->value(), instance_type, &can_deref,
6400                                  &cannot_deref);
6401   BIND(&cannot_deref);
6402   DebugBreak();  // Should be able to dereference string.
6403   Goto(&can_deref);
6404   BIND(&can_deref);
6405 #endif  // DEBUG
6406 
6407   STATIC_ASSERT(ThinString::kActualOffset == ConsString::kFirstOffset);
6408   var_string->Bind(
6409       LoadObjectField(var_string->value(), ThinString::kActualOffset));
6410 }
6411 
MaybeDerefIndirectString(Variable * var_string,Node * instance_type,Label * did_deref,Label * cannot_deref)6412 void CodeStubAssembler::MaybeDerefIndirectString(Variable* var_string,
6413                                                  Node* instance_type,
6414                                                  Label* did_deref,
6415                                                  Label* cannot_deref) {
6416   Label deref(this);
6417   BranchIfCanDerefIndirectString(var_string->value(), instance_type, &deref,
6418                                  cannot_deref);
6419 
6420   BIND(&deref);
6421   {
6422     DerefIndirectString(var_string, instance_type);
6423     Goto(did_deref);
6424   }
6425 }
6426 
MaybeDerefIndirectStrings(Variable * var_left,Node * left_instance_type,Variable * var_right,Node * right_instance_type,Label * did_something)6427 void CodeStubAssembler::MaybeDerefIndirectStrings(Variable* var_left,
6428                                                   Node* left_instance_type,
6429                                                   Variable* var_right,
6430                                                   Node* right_instance_type,
6431                                                   Label* did_something) {
6432   Label did_nothing_left(this), did_something_left(this),
6433       didnt_do_anything(this);
6434   MaybeDerefIndirectString(var_left, left_instance_type, &did_something_left,
6435                            &did_nothing_left);
6436 
6437   BIND(&did_something_left);
6438   {
6439     MaybeDerefIndirectString(var_right, right_instance_type, did_something,
6440                              did_something);
6441   }
6442 
6443   BIND(&did_nothing_left);
6444   {
6445     MaybeDerefIndirectString(var_right, right_instance_type, did_something,
6446                              &didnt_do_anything);
6447   }
6448 
6449   BIND(&didnt_do_anything);
6450   // Fall through if neither string was an indirect string.
6451 }
6452 
StringAdd(Node * context,TNode<String> left,TNode<String> right,AllocationFlags flags)6453 TNode<String> CodeStubAssembler::StringAdd(Node* context, TNode<String> left,
6454                                            TNode<String> right,
6455                                            AllocationFlags flags) {
6456   TVARIABLE(String, result);
6457   Label check_right(this), runtime(this, Label::kDeferred), cons(this),
6458       done(this, &result), done_native(this, &result);
6459   Counters* counters = isolate()->counters();
6460 
6461   TNode<Smi> left_length = LoadStringLengthAsSmi(left);
6462   GotoIf(SmiNotEqual(SmiConstant(0), left_length), &check_right);
6463   result = right;
6464   Goto(&done_native);
6465 
6466   BIND(&check_right);
6467   TNode<Smi> right_length = LoadStringLengthAsSmi(right);
6468   GotoIf(SmiNotEqual(SmiConstant(0), right_length), &cons);
6469   result = left;
6470   Goto(&done_native);
6471 
6472   BIND(&cons);
6473   {
6474     TNode<Smi> new_length = SmiAdd(left_length, right_length);
6475 
6476     // If new length is greater than String::kMaxLength, goto runtime to
6477     // throw. Note: we also need to invalidate the string length protector, so
6478     // can't just throw here directly.
6479     GotoIf(SmiAbove(new_length, SmiConstant(String::kMaxLength)), &runtime);
6480 
6481     TVARIABLE(String, var_left, left);
6482     TVARIABLE(String, var_right, right);
6483     Variable* input_vars[2] = {&var_left, &var_right};
6484     Label non_cons(this, 2, input_vars);
6485     Label slow(this, Label::kDeferred);
6486     GotoIf(SmiLessThan(new_length, SmiConstant(ConsString::kMinLength)),
6487            &non_cons);
6488 
6489     result = NewConsString(context, new_length, var_left.value(),
6490                            var_right.value(), flags);
6491     Goto(&done_native);
6492 
6493     BIND(&non_cons);
6494 
6495     Comment("Full string concatenate");
6496     Node* left_instance_type = LoadInstanceType(var_left.value());
6497     Node* right_instance_type = LoadInstanceType(var_right.value());
6498     // Compute intersection and difference of instance types.
6499 
6500     Node* ored_instance_types =
6501         Word32Or(left_instance_type, right_instance_type);
6502     Node* xored_instance_types =
6503         Word32Xor(left_instance_type, right_instance_type);
6504 
6505     // Check if both strings have the same encoding and both are sequential.
6506     GotoIf(IsSetWord32(xored_instance_types, kStringEncodingMask), &runtime);
6507     GotoIf(IsSetWord32(ored_instance_types, kStringRepresentationMask), &slow);
6508 
6509     TNode<IntPtrT> word_left_length = SmiUntag(left_length);
6510     TNode<IntPtrT> word_right_length = SmiUntag(right_length);
6511 
6512     Label two_byte(this);
6513     GotoIf(Word32Equal(Word32And(ored_instance_types,
6514                                  Int32Constant(kStringEncodingMask)),
6515                        Int32Constant(kTwoByteStringTag)),
6516            &two_byte);
6517     // One-byte sequential string case
6518     result = AllocateSeqOneByteString(context, new_length);
6519     CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
6520                          IntPtrConstant(0), word_left_length,
6521                          String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
6522     CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
6523                          word_left_length, word_right_length,
6524                          String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING);
6525     Goto(&done_native);
6526 
6527     BIND(&two_byte);
6528     {
6529       // Two-byte sequential string case
6530       result = AllocateSeqTwoByteString(context, new_length);
6531       CopyStringCharacters(var_left.value(), result.value(), IntPtrConstant(0),
6532                            IntPtrConstant(0), word_left_length,
6533                            String::TWO_BYTE_ENCODING,
6534                            String::TWO_BYTE_ENCODING);
6535       CopyStringCharacters(var_right.value(), result.value(), IntPtrConstant(0),
6536                            word_left_length, word_right_length,
6537                            String::TWO_BYTE_ENCODING,
6538                            String::TWO_BYTE_ENCODING);
6539       Goto(&done_native);
6540     }
6541 
6542     BIND(&slow);
6543     {
6544       // Try to unwrap indirect strings, restart the above attempt on success.
6545       MaybeDerefIndirectStrings(&var_left, left_instance_type, &var_right,
6546                                 right_instance_type, &non_cons);
6547       Goto(&runtime);
6548     }
6549   }
6550   BIND(&runtime);
6551   {
6552     result = CAST(CallRuntime(Runtime::kStringAdd, context, left, right));
6553     Goto(&done);
6554   }
6555 
6556   BIND(&done_native);
6557   {
6558     IncrementCounter(counters->string_add_native(), 1);
6559     Goto(&done);
6560   }
6561 
6562   BIND(&done);
6563   return result.value();
6564 }
6565 
StringFromSingleCodePoint(TNode<Int32T> codepoint,UnicodeEncoding encoding)6566 TNode<String> CodeStubAssembler::StringFromSingleCodePoint(
6567     TNode<Int32T> codepoint, UnicodeEncoding encoding) {
6568   VARIABLE(var_result, MachineRepresentation::kTagged, EmptyStringConstant());
6569 
6570   Label if_isword16(this), if_isword32(this), return_result(this);
6571 
6572   Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
6573          &if_isword32);
6574 
6575   BIND(&if_isword16);
6576   {
6577     var_result.Bind(StringFromSingleCharCode(codepoint));
6578     Goto(&return_result);
6579   }
6580 
6581   BIND(&if_isword32);
6582   {
6583     switch (encoding) {
6584       case UnicodeEncoding::UTF16:
6585         break;
6586       case UnicodeEncoding::UTF32: {
6587         // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
6588         Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
6589 
6590         // lead = (codepoint >> 10) + LEAD_OFFSET
6591         Node* lead =
6592             Int32Add(Word32Shr(codepoint, Int32Constant(10)), lead_offset);
6593 
6594         // trail = (codepoint & 0x3FF) + 0xDC00;
6595         Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
6596                                Int32Constant(0xDC00));
6597 
6598         // codpoint = (trail << 16) | lead;
6599         codepoint = Signed(Word32Or(Word32Shl(trail, Int32Constant(16)), lead));
6600         break;
6601       }
6602     }
6603 
6604     Node* value = AllocateSeqTwoByteString(2);
6605     StoreNoWriteBarrier(
6606         MachineRepresentation::kWord32, value,
6607         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
6608         codepoint);
6609     var_result.Bind(value);
6610     Goto(&return_result);
6611   }
6612 
6613   BIND(&return_result);
6614   return CAST(var_result.value());
6615 }
6616 
StringToNumber(TNode<String> input)6617 TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
6618   Label runtime(this, Label::kDeferred);
6619   Label end(this);
6620 
6621   TVARIABLE(Number, var_result);
6622 
6623   // Check if string has a cached array index.
6624   TNode<Uint32T> hash = LoadNameHashField(input);
6625   GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
6626          &runtime);
6627 
6628   var_result =
6629       SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
6630   Goto(&end);
6631 
6632   BIND(&runtime);
6633   {
6634     var_result =
6635         CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
6636     Goto(&end);
6637   }
6638 
6639   BIND(&end);
6640   return var_result.value();
6641 }
6642 
NumberToString(TNode<Number> input)6643 TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
6644   TVARIABLE(String, result);
6645   TVARIABLE(Smi, smi_input);
6646   Label runtime(this, Label::kDeferred), if_smi(this), if_heap_number(this),
6647       done(this, &result);
6648 
6649   // Load the number string cache.
6650   Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
6651 
6652   // Make the hash mask from the length of the number string cache. It
6653   // contains two elements (number and string) for each cache entry.
6654   // TODO(ishell): cleanup mask handling.
6655   Node* mask =
6656       BitcastTaggedToWord(LoadFixedArrayBaseLength(number_string_cache));
6657   TNode<IntPtrT> one = IntPtrConstant(1);
6658   mask = IntPtrSub(mask, one);
6659 
6660   GotoIfNot(TaggedIsSmi(input), &if_heap_number);
6661   smi_input = CAST(input);
6662   Goto(&if_smi);
6663 
6664   BIND(&if_heap_number);
6665   {
6666     TNode<HeapNumber> heap_number_input = CAST(input);
6667     // Try normalizing the HeapNumber.
6668     TryHeapNumberToSmi(heap_number_input, smi_input, &if_smi);
6669 
6670     // Make a hash from the two 32-bit values of the double.
6671     TNode<Int32T> low =
6672         LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
6673     TNode<Int32T> high = LoadObjectField<Int32T>(
6674         heap_number_input, HeapNumber::kValueOffset + kIntSize);
6675     TNode<Word32T> hash = Word32Xor(low, high);
6676     TNode<WordT> word_hash = WordShl(ChangeInt32ToIntPtr(hash), one);
6677     TNode<WordT> index =
6678         WordAnd(word_hash, WordSar(mask, SmiShiftBitsConstant()));
6679 
6680     // Cache entry's key must be a heap number
6681     Node* number_key = LoadFixedArrayElement(CAST(number_string_cache), index);
6682     GotoIf(TaggedIsSmi(number_key), &runtime);
6683     GotoIfNot(IsHeapNumber(number_key), &runtime);
6684 
6685     // Cache entry's key must match the heap number value we're looking for.
6686     Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
6687                                         MachineType::Int32());
6688     Node* high_compare = LoadObjectField(
6689         number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
6690     GotoIfNot(Word32Equal(low, low_compare), &runtime);
6691     GotoIfNot(Word32Equal(high, high_compare), &runtime);
6692 
6693     // Heap number match, return value from cache entry.
6694     result = CAST(
6695         LoadFixedArrayElement(CAST(number_string_cache), index, kPointerSize));
6696     Goto(&done);
6697   }
6698 
6699   BIND(&if_smi);
6700   {
6701     // Load the smi key, make sure it matches the smi we're looking for.
6702     Node* smi_index = BitcastWordToTagged(
6703         WordAnd(WordShl(BitcastTaggedToWord(smi_input.value()), one), mask));
6704     Node* smi_key = LoadFixedArrayElement(CAST(number_string_cache), smi_index,
6705                                           0, SMI_PARAMETERS);
6706     GotoIf(WordNotEqual(smi_key, smi_input.value()), &runtime);
6707 
6708     // Smi match, return value from cache entry.
6709     result = CAST(LoadFixedArrayElement(CAST(number_string_cache), smi_index,
6710                                         kPointerSize, SMI_PARAMETERS));
6711     Goto(&done);
6712   }
6713 
6714   BIND(&runtime);
6715   {
6716     // No cache entry, go to the runtime.
6717     result =
6718         CAST(CallRuntime(Runtime::kNumberToString, NoContextConstant(), input));
6719     Goto(&done);
6720   }
6721   BIND(&done);
6722   return result.value();
6723 }
6724 
ToName(SloppyTNode<Context> context,SloppyTNode<Object> value)6725 TNode<Name> CodeStubAssembler::ToName(SloppyTNode<Context> context,
6726                                       SloppyTNode<Object> value) {
6727   Label end(this);
6728   TVARIABLE(Name, var_result);
6729 
6730   Label is_number(this);
6731   GotoIf(TaggedIsSmi(value), &is_number);
6732 
6733   Label not_name(this);
6734   TNode<Int32T> value_instance_type = LoadInstanceType(CAST(value));
6735   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
6736   GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
6737          &not_name);
6738 
6739   var_result = CAST(value);
6740   Goto(&end);
6741 
6742   BIND(&is_number);
6743   {
6744     var_result = CAST(CallBuiltin(Builtins::kNumberToString, context, value));
6745     Goto(&end);
6746   }
6747 
6748   BIND(&not_name);
6749   {
6750     GotoIf(InstanceTypeEqual(value_instance_type, HEAP_NUMBER_TYPE),
6751            &is_number);
6752 
6753     Label not_oddball(this);
6754     GotoIfNot(InstanceTypeEqual(value_instance_type, ODDBALL_TYPE),
6755               &not_oddball);
6756 
6757     var_result = LoadObjectField<String>(CAST(value), Oddball::kToStringOffset);
6758     Goto(&end);
6759 
6760     BIND(&not_oddball);
6761     {
6762       var_result = CAST(CallRuntime(Runtime::kToName, context, value));
6763       Goto(&end);
6764     }
6765   }
6766 
6767   BIND(&end);
6768   return var_result.value();
6769 }
6770 
NonNumberToNumberOrNumeric(Node * context,Node * input,Object::Conversion mode,BigIntHandling bigint_handling)6771 Node* CodeStubAssembler::NonNumberToNumberOrNumeric(
6772     Node* context, Node* input, Object::Conversion mode,
6773     BigIntHandling bigint_handling) {
6774   CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
6775   CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
6776 
6777   // We might need to loop once here due to ToPrimitive conversions.
6778   VARIABLE(var_input, MachineRepresentation::kTagged, input);
6779   VARIABLE(var_result, MachineRepresentation::kTagged);
6780   Label loop(this, &var_input);
6781   Label end(this);
6782   Goto(&loop);
6783   BIND(&loop);
6784   {
6785     // Load the current {input} value (known to be a HeapObject).
6786     Node* input = var_input.value();
6787 
6788     // Dispatch on the {input} instance type.
6789     Node* input_instance_type = LoadInstanceType(input);
6790     Label if_inputisstring(this), if_inputisoddball(this),
6791         if_inputisbigint(this), if_inputisreceiver(this, Label::kDeferred),
6792         if_inputisother(this, Label::kDeferred);
6793     GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
6794     GotoIf(IsBigIntInstanceType(input_instance_type), &if_inputisbigint);
6795     GotoIf(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE),
6796            &if_inputisoddball);
6797     Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
6798            &if_inputisother);
6799 
6800     BIND(&if_inputisstring);
6801     {
6802       // The {input} is a String, use the fast stub to convert it to a Number.
6803       TNode<String> string_input = CAST(input);
6804       var_result.Bind(StringToNumber(string_input));
6805       Goto(&end);
6806     }
6807 
6808     BIND(&if_inputisbigint);
6809     if (mode == Object::Conversion::kToNumeric) {
6810       var_result.Bind(input);
6811       Goto(&end);
6812     } else {
6813       DCHECK_EQ(mode, Object::Conversion::kToNumber);
6814       if (bigint_handling == BigIntHandling::kThrow) {
6815         Goto(&if_inputisother);
6816       } else {
6817         DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
6818         var_result.Bind(CallRuntime(Runtime::kBigIntToNumber, context, input));
6819         Goto(&end);
6820       }
6821     }
6822 
6823     BIND(&if_inputisoddball);
6824     {
6825       // The {input} is an Oddball, we just need to load the Number value of it.
6826       var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
6827       Goto(&end);
6828     }
6829 
6830     BIND(&if_inputisreceiver);
6831     {
6832       // The {input} is a JSReceiver, we need to convert it to a Primitive first
6833       // using the ToPrimitive type conversion, preferably yielding a Number.
6834       Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6835           isolate(), ToPrimitiveHint::kNumber);
6836       Node* result = CallStub(callable, context, input);
6837 
6838       // Check if the {result} is already a Number/Numeric.
6839       Label if_done(this), if_notdone(this);
6840       Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
6841                                                    : IsNumeric(result),
6842              &if_done, &if_notdone);
6843 
6844       BIND(&if_done);
6845       {
6846         // The ToPrimitive conversion already gave us a Number/Numeric, so we're
6847         // done.
6848         var_result.Bind(result);
6849         Goto(&end);
6850       }
6851 
6852       BIND(&if_notdone);
6853       {
6854         // We now have a Primitive {result}, but it's not yet a Number/Numeric.
6855         var_input.Bind(result);
6856         Goto(&loop);
6857       }
6858     }
6859 
6860     BIND(&if_inputisother);
6861     {
6862       // The {input} is something else (e.g. Symbol), let the runtime figure
6863       // out the correct exception.
6864       // Note: We cannot tail call to the runtime here, as js-to-wasm
6865       // trampolines also use this code currently, and they declare all
6866       // outgoing parameters as untagged, while we would push a tagged
6867       // object here.
6868       auto function_id = mode == Object::Conversion::kToNumber
6869                              ? Runtime::kToNumber
6870                              : Runtime::kToNumeric;
6871       var_result.Bind(CallRuntime(function_id, context, input));
6872       Goto(&end);
6873     }
6874   }
6875 
6876   BIND(&end);
6877   if (mode == Object::Conversion::kToNumeric) {
6878     CSA_ASSERT(this, IsNumeric(var_result.value()));
6879   } else {
6880     DCHECK_EQ(mode, Object::Conversion::kToNumber);
6881     CSA_ASSERT(this, IsNumber(var_result.value()));
6882   }
6883   return var_result.value();
6884 }
6885 
NonNumberToNumber(SloppyTNode<Context> context,SloppyTNode<HeapObject> input,BigIntHandling bigint_handling)6886 TNode<Number> CodeStubAssembler::NonNumberToNumber(
6887     SloppyTNode<Context> context, SloppyTNode<HeapObject> input,
6888     BigIntHandling bigint_handling) {
6889   return CAST(NonNumberToNumberOrNumeric(
6890       context, input, Object::Conversion::kToNumber, bigint_handling));
6891 }
6892 
NonNumberToNumeric(SloppyTNode<Context> context,SloppyTNode<HeapObject> input)6893 TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(
6894     SloppyTNode<Context> context, SloppyTNode<HeapObject> input) {
6895   Node* result = NonNumberToNumberOrNumeric(context, input,
6896                                             Object::Conversion::kToNumeric);
6897   CSA_SLOW_ASSERT(this, IsNumeric(result));
6898   return UncheckedCast<Numeric>(result);
6899 }
6900 
ToNumber_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)6901 TNode<Number> CodeStubAssembler::ToNumber_Inline(SloppyTNode<Context> context,
6902                                                  SloppyTNode<Object> input) {
6903   TVARIABLE(Number, var_result);
6904   Label end(this), not_smi(this, Label::kDeferred);
6905 
6906   GotoIfNot(TaggedIsSmi(input), &not_smi);
6907   var_result = CAST(input);
6908   Goto(&end);
6909 
6910   BIND(&not_smi);
6911   {
6912     var_result =
6913         Select<Number>(IsHeapNumber(CAST(input)), [=] { return CAST(input); },
6914                        [=] {
6915                          return CAST(CallBuiltin(Builtins::kNonNumberToNumber,
6916                                                  context, input));
6917                        });
6918     Goto(&end);
6919   }
6920 
6921   BIND(&end);
6922   return var_result.value();
6923 }
6924 
ToNumber(SloppyTNode<Context> context,SloppyTNode<Object> input,BigIntHandling bigint_handling)6925 TNode<Number> CodeStubAssembler::ToNumber(SloppyTNode<Context> context,
6926                                           SloppyTNode<Object> input,
6927                                           BigIntHandling bigint_handling) {
6928   TVARIABLE(Number, var_result);
6929   Label end(this);
6930 
6931   Label not_smi(this, Label::kDeferred);
6932   GotoIfNot(TaggedIsSmi(input), &not_smi);
6933   TNode<Smi> input_smi = CAST(input);
6934   var_result = input_smi;
6935   Goto(&end);
6936 
6937   BIND(&not_smi);
6938   {
6939     Label not_heap_number(this, Label::kDeferred);
6940     TNode<HeapObject> input_ho = CAST(input);
6941     GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
6942 
6943     TNode<HeapNumber> input_hn = CAST(input_ho);
6944     var_result = input_hn;
6945     Goto(&end);
6946 
6947     BIND(&not_heap_number);
6948     {
6949       var_result = NonNumberToNumber(context, input_ho, bigint_handling);
6950       Goto(&end);
6951     }
6952   }
6953 
6954   BIND(&end);
6955   return var_result.value();
6956 }
6957 
ToBigInt(SloppyTNode<Context> context,SloppyTNode<Object> input)6958 TNode<BigInt> CodeStubAssembler::ToBigInt(SloppyTNode<Context> context,
6959                                           SloppyTNode<Object> input) {
6960   TVARIABLE(BigInt, var_result);
6961   Label if_bigint(this), done(this), if_throw(this);
6962 
6963   GotoIf(TaggedIsSmi(input), &if_throw);
6964   GotoIf(IsBigInt(CAST(input)), &if_bigint);
6965   var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
6966   Goto(&done);
6967 
6968   BIND(&if_bigint);
6969   var_result = CAST(input);
6970   Goto(&done);
6971 
6972   BIND(&if_throw);
6973   ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
6974 
6975   BIND(&done);
6976   return var_result.value();
6977 }
6978 
TaggedToNumeric(Node * context,Node * value,Label * done,Variable * var_numeric)6979 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
6980                                         Variable* var_numeric) {
6981   TaggedToNumeric(context, value, done, var_numeric, nullptr);
6982 }
6983 
TaggedToNumericWithFeedback(Node * context,Node * value,Label * done,Variable * var_numeric,Variable * var_feedback)6984 void CodeStubAssembler::TaggedToNumericWithFeedback(Node* context, Node* value,
6985                                                     Label* done,
6986                                                     Variable* var_numeric,
6987                                                     Variable* var_feedback) {
6988   DCHECK_NOT_NULL(var_feedback);
6989   TaggedToNumeric(context, value, done, var_numeric, var_feedback);
6990 }
6991 
TaggedToNumeric(Node * context,Node * value,Label * done,Variable * var_numeric,Variable * var_feedback)6992 void CodeStubAssembler::TaggedToNumeric(Node* context, Node* value, Label* done,
6993                                         Variable* var_numeric,
6994                                         Variable* var_feedback) {
6995   var_numeric->Bind(value);
6996   Label if_smi(this), if_heapnumber(this), if_bigint(this), if_oddball(this);
6997   GotoIf(TaggedIsSmi(value), &if_smi);
6998   Node* map = LoadMap(value);
6999   GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7000   Node* instance_type = LoadMapInstanceType(map);
7001   GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7002 
7003   // {value} is not a Numeric yet.
7004   GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7005   var_numeric->Bind(CallBuiltin(Builtins::kNonNumberToNumeric, context, value));
7006   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7007   Goto(done);
7008 
7009   BIND(&if_smi);
7010   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7011   Goto(done);
7012 
7013   BIND(&if_heapnumber);
7014   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7015   Goto(done);
7016 
7017   BIND(&if_bigint);
7018   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7019   Goto(done);
7020 
7021   BIND(&if_oddball);
7022   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7023   var_numeric->Bind(LoadObjectField(value, Oddball::kToNumberOffset));
7024   Goto(done);
7025 }
7026 
7027 // ES#sec-touint32
ToUint32(SloppyTNode<Context> context,SloppyTNode<Object> input)7028 TNode<Number> CodeStubAssembler::ToUint32(SloppyTNode<Context> context,
7029                                           SloppyTNode<Object> input) {
7030   Node* const float_zero = Float64Constant(0.0);
7031   Node* const float_two_32 = Float64Constant(static_cast<double>(1ULL << 32));
7032 
7033   Label out(this);
7034 
7035   VARIABLE(var_result, MachineRepresentation::kTagged, input);
7036 
7037   // Early exit for positive smis.
7038   {
7039     // TODO(jgruber): This branch and the recheck below can be removed once we
7040     // have a ToNumber with multiple exits.
7041     Label next(this, Label::kDeferred);
7042     Branch(TaggedIsPositiveSmi(input), &out, &next);
7043     BIND(&next);
7044   }
7045 
7046   Node* const number = ToNumber(context, input);
7047   var_result.Bind(number);
7048 
7049   // Perhaps we have a positive smi now.
7050   {
7051     Label next(this, Label::kDeferred);
7052     Branch(TaggedIsPositiveSmi(number), &out, &next);
7053     BIND(&next);
7054   }
7055 
7056   Label if_isnegativesmi(this), if_isheapnumber(this);
7057   Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7058 
7059   BIND(&if_isnegativesmi);
7060   {
7061     Node* const uint32_value = SmiToInt32(number);
7062     Node* float64_value = ChangeUint32ToFloat64(uint32_value);
7063     var_result.Bind(AllocateHeapNumberWithValue(float64_value));
7064     Goto(&out);
7065   }
7066 
7067   BIND(&if_isheapnumber);
7068   {
7069     Label return_zero(this);
7070     Node* const value = LoadHeapNumberValue(number);
7071 
7072     {
7073       // +-0.
7074       Label next(this);
7075       Branch(Float64Equal(value, float_zero), &return_zero, &next);
7076       BIND(&next);
7077     }
7078 
7079     {
7080       // NaN.
7081       Label next(this);
7082       Branch(Float64Equal(value, value), &next, &return_zero);
7083       BIND(&next);
7084     }
7085 
7086     {
7087       // +Infinity.
7088       Label next(this);
7089       Node* const positive_infinity =
7090           Float64Constant(std::numeric_limits<double>::infinity());
7091       Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7092       BIND(&next);
7093     }
7094 
7095     {
7096       // -Infinity.
7097       Label next(this);
7098       Node* const negative_infinity =
7099           Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7100       Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7101       BIND(&next);
7102     }
7103 
7104     // * Let int be the mathematical value that is the same sign as number and
7105     //   whose magnitude is floor(abs(number)).
7106     // * Let int32bit be int modulo 2^32.
7107     // * Return int32bit.
7108     {
7109       Node* x = Float64Trunc(value);
7110       x = Float64Mod(x, float_two_32);
7111       x = Float64Add(x, float_two_32);
7112       x = Float64Mod(x, float_two_32);
7113 
7114       Node* const result = ChangeFloat64ToTagged(x);
7115       var_result.Bind(result);
7116       Goto(&out);
7117     }
7118 
7119     BIND(&return_zero);
7120     {
7121       var_result.Bind(SmiConstant(0));
7122       Goto(&out);
7123     }
7124   }
7125 
7126   BIND(&out);
7127   return CAST(var_result.value());
7128 }
7129 
ToString(SloppyTNode<Context> context,SloppyTNode<Object> input)7130 TNode<String> CodeStubAssembler::ToString(SloppyTNode<Context> context,
7131                                           SloppyTNode<Object> input) {
7132   Label is_number(this);
7133   Label runtime(this, Label::kDeferred), done(this);
7134   VARIABLE(result, MachineRepresentation::kTagged);
7135   GotoIf(TaggedIsSmi(input), &is_number);
7136 
7137   TNode<Map> input_map = LoadMap(CAST(input));
7138   TNode<Int32T> input_instance_type = LoadMapInstanceType(input_map);
7139 
7140   result.Bind(input);
7141   GotoIf(IsStringInstanceType(input_instance_type), &done);
7142 
7143   Label not_heap_number(this);
7144   Branch(IsHeapNumberMap(input_map), &is_number, &not_heap_number);
7145 
7146   BIND(&is_number);
7147   TNode<Number> number_input = CAST(input);
7148   result.Bind(NumberToString(number_input));
7149   Goto(&done);
7150 
7151   BIND(&not_heap_number);
7152   {
7153     GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), &runtime);
7154     result.Bind(LoadObjectField(CAST(input), Oddball::kToStringOffset));
7155     Goto(&done);
7156   }
7157 
7158   BIND(&runtime);
7159   {
7160     result.Bind(CallRuntime(Runtime::kToString, context, input));
7161     Goto(&done);
7162   }
7163 
7164   BIND(&done);
7165   return CAST(result.value());
7166 }
7167 
ToString_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)7168 TNode<String> CodeStubAssembler::ToString_Inline(SloppyTNode<Context> context,
7169                                                  SloppyTNode<Object> input) {
7170   VARIABLE(var_result, MachineRepresentation::kTagged, input);
7171   Label stub_call(this, Label::kDeferred), out(this);
7172 
7173   GotoIf(TaggedIsSmi(input), &stub_call);
7174   Branch(IsString(CAST(input)), &out, &stub_call);
7175 
7176   BIND(&stub_call);
7177   var_result.Bind(CallBuiltin(Builtins::kToString, context, input));
7178   Goto(&out);
7179 
7180   BIND(&out);
7181   return CAST(var_result.value());
7182 }
7183 
JSReceiverToPrimitive(Node * context,Node * input)7184 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
7185   Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
7186   VARIABLE(result, MachineRepresentation::kTagged);
7187   Label done(this, &result);
7188 
7189   BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7190 
7191   BIND(&if_isreceiver);
7192   {
7193     // Convert {input} to a primitive first passing Number hint.
7194     Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
7195     result.Bind(CallStub(callable, context, input));
7196     Goto(&done);
7197   }
7198 
7199   BIND(&if_isnotreceiver);
7200   {
7201     result.Bind(input);
7202     Goto(&done);
7203   }
7204 
7205   BIND(&done);
7206   return result.value();
7207 }
7208 
ToObject(SloppyTNode<Context> context,SloppyTNode<Object> input)7209 TNode<JSReceiver> CodeStubAssembler::ToObject(SloppyTNode<Context> context,
7210                                               SloppyTNode<Object> input) {
7211   return CAST(CallBuiltin(Builtins::kToObject, context, input));
7212 }
7213 
ToObject_Inline(TNode<Context> context,TNode<Object> input)7214 TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7215                                                      TNode<Object> input) {
7216   TVARIABLE(JSReceiver, result);
7217   Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7218   Label done(this);
7219 
7220   BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7221 
7222   BIND(&if_isreceiver);
7223   {
7224     result = CAST(input);
7225     Goto(&done);
7226   }
7227 
7228   BIND(&if_isnotreceiver);
7229   {
7230     result = ToObject(context, input);
7231     Goto(&done);
7232   }
7233 
7234   BIND(&done);
7235   return result.value();
7236 }
7237 
ToSmiIndex(TNode<Object> input,TNode<Context> context,Label * range_error)7238 TNode<Smi> CodeStubAssembler::ToSmiIndex(TNode<Object> input,
7239                                          TNode<Context> context,
7240                                          Label* range_error) {
7241   TVARIABLE(Smi, result);
7242   Label check_undefined(this), return_zero(this), defined(this),
7243       negative_check(this), done(this);
7244 
7245   GotoIfNot(TaggedIsSmi(input), &check_undefined);
7246   result = CAST(input);
7247   Goto(&negative_check);
7248 
7249   BIND(&check_undefined);
7250   Branch(IsUndefined(input), &return_zero, &defined);
7251 
7252   BIND(&defined);
7253   TNode<Number> integer_input =
7254       CAST(CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7255   GotoIfNot(TaggedIsSmi(integer_input), range_error);
7256   result = CAST(integer_input);
7257   Goto(&negative_check);
7258 
7259   BIND(&negative_check);
7260   Branch(SmiLessThan(result.value(), SmiConstant(0)), range_error, &done);
7261 
7262   BIND(&return_zero);
7263   result = SmiConstant(0);
7264   Goto(&done);
7265 
7266   BIND(&done);
7267   return result.value();
7268 }
7269 
ToSmiLength(TNode<Object> input,TNode<Context> context,Label * range_error)7270 TNode<Smi> CodeStubAssembler::ToSmiLength(TNode<Object> input,
7271                                           TNode<Context> context,
7272                                           Label* range_error) {
7273   TVARIABLE(Smi, result);
7274   Label to_integer(this), negative_check(this),
7275       heap_number_negative_check(this), return_zero(this), done(this);
7276 
7277   GotoIfNot(TaggedIsSmi(input), &to_integer);
7278   result = CAST(input);
7279   Goto(&negative_check);
7280 
7281   BIND(&to_integer);
7282   {
7283     TNode<Number> integer_input = CAST(
7284         CallBuiltin(Builtins::kToInteger_TruncateMinusZero, context, input));
7285     GotoIfNot(TaggedIsSmi(integer_input), &heap_number_negative_check);
7286     result = CAST(integer_input);
7287     Goto(&negative_check);
7288 
7289     // integer_input can still be a negative HeapNumber here.
7290     BIND(&heap_number_negative_check);
7291     TNode<HeapNumber> heap_number_input = CAST(integer_input);
7292     Branch(IsTrue(CallBuiltin(Builtins::kLessThan, context, heap_number_input,
7293                               SmiConstant(0))),
7294            &return_zero, range_error);
7295   }
7296 
7297   BIND(&negative_check);
7298   Branch(SmiLessThan(result.value(), SmiConstant(0)), &return_zero, &done);
7299 
7300   BIND(&return_zero);
7301   result = SmiConstant(0);
7302   Goto(&done);
7303 
7304   BIND(&done);
7305   return result.value();
7306 }
7307 
ToLength_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input)7308 TNode<Number> CodeStubAssembler::ToLength_Inline(SloppyTNode<Context> context,
7309                                                  SloppyTNode<Object> input) {
7310   TNode<Smi> smi_zero = SmiConstant(0);
7311   return Select<Number>(
7312       TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
7313       [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
7314 }
7315 
ToInteger_Inline(SloppyTNode<Context> context,SloppyTNode<Object> input,ToIntegerTruncationMode mode)7316 TNode<Number> CodeStubAssembler::ToInteger_Inline(
7317     SloppyTNode<Context> context, SloppyTNode<Object> input,
7318     ToIntegerTruncationMode mode) {
7319   Builtins::Name builtin = (mode == kNoTruncation)
7320                                ? Builtins::kToInteger
7321                                : Builtins::kToInteger_TruncateMinusZero;
7322   return Select<Number>(
7323       TaggedIsSmi(input), [=] { return CAST(input); },
7324       [=] { return CAST(CallBuiltin(builtin, context, input)); });
7325 }
7326 
ToInteger(SloppyTNode<Context> context,SloppyTNode<Object> input,ToIntegerTruncationMode mode)7327 TNode<Number> CodeStubAssembler::ToInteger(SloppyTNode<Context> context,
7328                                            SloppyTNode<Object> input,
7329                                            ToIntegerTruncationMode mode) {
7330   // We might need to loop once for ToNumber conversion.
7331   TVARIABLE(Object, var_arg, input);
7332   Label loop(this, &var_arg), out(this);
7333   Goto(&loop);
7334   BIND(&loop);
7335   {
7336     // Shared entry points.
7337     Label return_zero(this, Label::kDeferred);
7338 
7339     // Load the current {arg} value.
7340     TNode<Object> arg = var_arg.value();
7341 
7342     // Check if {arg} is a Smi.
7343     GotoIf(TaggedIsSmi(arg), &out);
7344 
7345     // Check if {arg} is a HeapNumber.
7346     Label if_argisheapnumber(this),
7347         if_argisnotheapnumber(this, Label::kDeferred);
7348     Branch(IsHeapNumber(CAST(arg)), &if_argisheapnumber,
7349            &if_argisnotheapnumber);
7350 
7351     BIND(&if_argisheapnumber);
7352     {
7353       TNode<HeapNumber> arg_hn = CAST(arg);
7354       // Load the floating-point value of {arg}.
7355       Node* arg_value = LoadHeapNumberValue(arg_hn);
7356 
7357       // Check if {arg} is NaN.
7358       GotoIfNot(Float64Equal(arg_value, arg_value), &return_zero);
7359 
7360       // Truncate {arg} towards zero.
7361       TNode<Float64T> value = Float64Trunc(arg_value);
7362 
7363       if (mode == kTruncateMinusZero) {
7364         // Truncate -0.0 to 0.
7365         GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
7366       }
7367 
7368       var_arg = ChangeFloat64ToTagged(value);
7369       Goto(&out);
7370     }
7371 
7372     BIND(&if_argisnotheapnumber);
7373     {
7374       // Need to convert {arg} to a Number first.
7375       var_arg = UncheckedCast<Object>(
7376           CallBuiltin(Builtins::kNonNumberToNumber, context, arg));
7377       Goto(&loop);
7378     }
7379 
7380     BIND(&return_zero);
7381     var_arg = SmiConstant(0);
7382     Goto(&out);
7383   }
7384 
7385   BIND(&out);
7386   if (mode == kTruncateMinusZero) {
7387     CSA_ASSERT(this, IsNumberNormalized(CAST(var_arg.value())));
7388   }
7389   return CAST(var_arg.value());
7390 }
7391 
DecodeWord32(SloppyTNode<Word32T> word32,uint32_t shift,uint32_t mask)7392 TNode<Uint32T> CodeStubAssembler::DecodeWord32(SloppyTNode<Word32T> word32,
7393                                                uint32_t shift, uint32_t mask) {
7394   return UncheckedCast<Uint32T>(Word32Shr(
7395       Word32And(word32, Int32Constant(mask)), static_cast<int>(shift)));
7396 }
7397 
DecodeWord(SloppyTNode<WordT> word,uint32_t shift,uint32_t mask)7398 TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
7399                                               uint32_t shift, uint32_t mask) {
7400   return Unsigned(
7401       WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift)));
7402 }
7403 
UpdateWord(TNode<WordT> word,TNode<WordT> value,uint32_t shift,uint32_t mask)7404 TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
7405                                            TNode<WordT> value, uint32_t shift,
7406                                            uint32_t mask) {
7407   TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
7408   TNode<IntPtrT> inverted_mask = IntPtrConstant(~static_cast<intptr_t>(mask));
7409   // Ensure the {value} fits fully in the mask.
7410   CSA_ASSERT(this, WordEqual(WordAnd(encoded_value, inverted_mask),
7411                              IntPtrConstant(0)));
7412   return WordOr(WordAnd(word, inverted_mask), encoded_value);
7413 }
7414 
SetCounter(StatsCounter * counter,int value)7415 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
7416   if (FLAG_native_code_counters && counter->Enabled()) {
7417     Node* counter_address =
7418         ExternalConstant(ExternalReference::Create(counter));
7419     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
7420                         Int32Constant(value));
7421   }
7422 }
7423 
IncrementCounter(StatsCounter * counter,int delta)7424 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
7425   DCHECK_GT(delta, 0);
7426   if (FLAG_native_code_counters && counter->Enabled()) {
7427     Node* counter_address =
7428         ExternalConstant(ExternalReference::Create(counter));
7429     Node* value = Load(MachineType::Int32(), counter_address);
7430     value = Int32Add(value, Int32Constant(delta));
7431     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
7432   }
7433 }
7434 
DecrementCounter(StatsCounter * counter,int delta)7435 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
7436   DCHECK_GT(delta, 0);
7437   if (FLAG_native_code_counters && counter->Enabled()) {
7438     Node* counter_address =
7439         ExternalConstant(ExternalReference::Create(counter));
7440     Node* value = Load(MachineType::Int32(), counter_address);
7441     value = Int32Sub(value, Int32Constant(delta));
7442     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
7443   }
7444 }
7445 
Increment(Variable * variable,int value,ParameterMode mode)7446 void CodeStubAssembler::Increment(Variable* variable, int value,
7447                                   ParameterMode mode) {
7448   DCHECK_IMPLIES(mode == INTPTR_PARAMETERS,
7449                  variable->rep() == MachineType::PointerRepresentation());
7450   DCHECK_IMPLIES(mode == SMI_PARAMETERS,
7451                  variable->rep() == MachineRepresentation::kTagged ||
7452                      variable->rep() == MachineRepresentation::kTaggedSigned);
7453   variable->Bind(IntPtrOrSmiAdd(variable->value(),
7454                                 IntPtrOrSmiConstant(value, mode), mode));
7455 }
7456 
Use(Label * label)7457 void CodeStubAssembler::Use(Label* label) {
7458   GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
7459 }
7460 
TryToName(Node * key,Label * if_keyisindex,Variable * var_index,Label * if_keyisunique,Variable * var_unique,Label * if_bailout,Label * if_notinternalized)7461 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
7462                                   Variable* var_index, Label* if_keyisunique,
7463                                   Variable* var_unique, Label* if_bailout,
7464                                   Label* if_notinternalized) {
7465   DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
7466   DCHECK_EQ(MachineRepresentation::kTagged, var_unique->rep());
7467   Comment("TryToName");
7468 
7469   Label if_hascachedindex(this), if_keyisnotindex(this), if_thinstring(this),
7470       if_keyisother(this, Label::kDeferred);
7471   // Handle Smi and HeapNumber keys.
7472   var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
7473   Goto(if_keyisindex);
7474 
7475   BIND(&if_keyisnotindex);
7476   Node* key_map = LoadMap(key);
7477   var_unique->Bind(key);
7478   // Symbols are unique.
7479   GotoIf(IsSymbolMap(key_map), if_keyisunique);
7480   Node* key_instance_type = LoadMapInstanceType(key_map);
7481   // Miss if |key| is not a String.
7482   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
7483   GotoIfNot(IsStringInstanceType(key_instance_type), &if_keyisother);
7484 
7485   // |key| is a String. Check if it has a cached array index.
7486   Node* hash = LoadNameHashField(key);
7487   GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7488          &if_hascachedindex);
7489   // No cached array index. If the string knows that it contains an index,
7490   // then it must be an uncacheable index. Handle this case in the runtime.
7491   GotoIf(IsClearWord32(hash, Name::kIsNotArrayIndexMask), if_bailout);
7492   // Check if we have a ThinString.
7493   GotoIf(InstanceTypeEqual(key_instance_type, THIN_STRING_TYPE),
7494          &if_thinstring);
7495   GotoIf(InstanceTypeEqual(key_instance_type, THIN_ONE_BYTE_STRING_TYPE),
7496          &if_thinstring);
7497   // Finally, check if |key| is internalized.
7498   STATIC_ASSERT(kNotInternalizedTag != 0);
7499   GotoIf(IsSetWord32(key_instance_type, kIsNotInternalizedMask),
7500          if_notinternalized != nullptr ? if_notinternalized : if_bailout);
7501   Goto(if_keyisunique);
7502 
7503   BIND(&if_thinstring);
7504   var_unique->Bind(LoadObjectField(key, ThinString::kActualOffset));
7505   Goto(if_keyisunique);
7506 
7507   BIND(&if_hascachedindex);
7508   var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
7509   Goto(if_keyisindex);
7510 
7511   BIND(&if_keyisother);
7512   GotoIfNot(InstanceTypeEqual(key_instance_type, ODDBALL_TYPE), if_bailout);
7513   var_unique->Bind(LoadObjectField(key, Oddball::kToStringOffset));
7514   Goto(if_keyisunique);
7515 }
7516 
TryInternalizeString(Node * string,Label * if_index,Variable * var_index,Label * if_internalized,Variable * var_internalized,Label * if_not_internalized,Label * if_bailout)7517 void CodeStubAssembler::TryInternalizeString(
7518     Node* string, Label* if_index, Variable* var_index, Label* if_internalized,
7519     Variable* var_internalized, Label* if_not_internalized, Label* if_bailout) {
7520   DCHECK(var_index->rep() == MachineType::PointerRepresentation());
7521   DCHECK_EQ(var_internalized->rep(), MachineRepresentation::kTagged);
7522   CSA_SLOW_ASSERT(this, IsString(string));
7523   Node* function =
7524       ExternalConstant(ExternalReference::try_internalize_string_function());
7525   Node* const isolate_ptr =
7526       ExternalConstant(ExternalReference::isolate_address(isolate()));
7527   Node* result =
7528       CallCFunction2(MachineType::AnyTagged(), MachineType::Pointer(),
7529                      MachineType::AnyTagged(), function, isolate_ptr, string);
7530   Label internalized(this);
7531   GotoIf(TaggedIsNotSmi(result), &internalized);
7532   Node* word_result = SmiUntag(result);
7533   GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
7534          if_not_internalized);
7535   GotoIf(WordEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
7536          if_bailout);
7537   var_index->Bind(word_result);
7538   Goto(if_index);
7539 
7540   BIND(&internalized);
7541   var_internalized->Bind(result);
7542   Goto(if_internalized);
7543 }
7544 
7545 template <typename Dictionary>
EntryToIndex(TNode<IntPtrT> entry,int field_index)7546 TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
7547                                                int field_index) {
7548   TNode<IntPtrT> entry_index =
7549       IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
7550   return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
7551                                                field_index));
7552 }
7553 
LoadDetailsByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7554 TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
7555     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7556   const int kKeyToDetailsOffset =
7557       (DescriptorArray::kEntryDetailsIndex - DescriptorArray::kEntryKeyIndex) *
7558       kPointerSize;
7559   return Unsigned(LoadAndUntagToWord32ArrayElement(
7560       container, WeakFixedArray::kHeaderSize, key_index, kKeyToDetailsOffset));
7561 }
7562 
LoadValueByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7563 TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
7564     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7565   const int kKeyToValueOffset =
7566       (DescriptorArray::kEntryValueIndex - DescriptorArray::kEntryKeyIndex) *
7567       kPointerSize;
7568   return CAST(
7569       LoadWeakFixedArrayElement(container, key_index, kKeyToValueOffset));
7570 }
7571 
LoadFieldTypeByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7572 TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
7573     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7574   const int kKeyToValueOffset =
7575       (DescriptorArray::kEntryValueIndex - DescriptorArray::kEntryKeyIndex) *
7576       kPointerSize;
7577   return LoadWeakFixedArrayElement(container, key_index, kKeyToValueOffset);
7578 }
7579 
7580 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NameDictionary>(
7581     TNode<IntPtrT>, int);
7582 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<GlobalDictionary>(
7583     TNode<IntPtrT>, int);
7584 template TNode<IntPtrT> CodeStubAssembler::EntryToIndex<NumberDictionary>(
7585     TNode<IntPtrT>, int);
7586 
7587 // This must be kept in sync with HashTableBase::ComputeCapacity().
HashTableComputeCapacity(TNode<IntPtrT> at_least_space_for)7588 TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
7589     TNode<IntPtrT> at_least_space_for) {
7590   TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
7591       IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
7592   return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
7593 }
7594 
IntPtrMax(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)7595 TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
7596                                             SloppyTNode<IntPtrT> right) {
7597   intptr_t left_constant;
7598   intptr_t right_constant;
7599   if (ToIntPtrConstant(left, left_constant) &&
7600       ToIntPtrConstant(right, right_constant)) {
7601     return IntPtrConstant(std::max(left_constant, right_constant));
7602   }
7603   return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
7604                                  right);
7605 }
7606 
IntPtrMin(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)7607 TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
7608                                             SloppyTNode<IntPtrT> right) {
7609   intptr_t left_constant;
7610   intptr_t right_constant;
7611   if (ToIntPtrConstant(left, left_constant) &&
7612       ToIntPtrConstant(right, right_constant)) {
7613     return IntPtrConstant(std::min(left_constant, right_constant));
7614   }
7615   return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
7616                                  right);
7617 }
7618 
7619 template <>
LoadName(TNode<HeapObject> key)7620 TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
7621     TNode<HeapObject> key) {
7622   CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
7623   return key;
7624 }
7625 
7626 template <>
LoadName(TNode<HeapObject> key)7627 TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
7628     TNode<HeapObject> key) {
7629   TNode<PropertyCell> property_cell = CAST(key);
7630   return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
7631 }
7632 
7633 template <typename Dictionary>
NameDictionaryLookup(TNode<Dictionary> dictionary,TNode<Name> unique_name,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found,int inlined_probes,LookupMode mode)7634 void CodeStubAssembler::NameDictionaryLookup(
7635     TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
7636     TVariable<IntPtrT>* var_name_index, Label* if_not_found, int inlined_probes,
7637     LookupMode mode) {
7638   static_assert(std::is_same<Dictionary, NameDictionary>::value ||
7639                     std::is_same<Dictionary, GlobalDictionary>::value,
7640                 "Unexpected NameDictionary");
7641   DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
7642   DCHECK_IMPLIES(mode == kFindInsertionIndex,
7643                  inlined_probes == 0 && if_found == nullptr);
7644   Comment("NameDictionaryLookup");
7645 
7646   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
7647   TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7648   TNode<WordT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
7649 
7650   // See Dictionary::FirstProbe().
7651   TNode<IntPtrT> count = IntPtrConstant(0);
7652   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7653   Node* undefined = UndefinedConstant();
7654 
7655   for (int i = 0; i < inlined_probes; i++) {
7656     TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
7657     *var_name_index = index;
7658 
7659     TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
7660     GotoIf(WordEqual(current, undefined), if_not_found);
7661     current = LoadName<Dictionary>(current);
7662     GotoIf(WordEqual(current, unique_name), if_found);
7663 
7664     // See Dictionary::NextProbe().
7665     count = IntPtrConstant(i + 1);
7666     entry = Signed(WordAnd(IntPtrAdd(entry, count), mask));
7667   }
7668   if (mode == kFindInsertionIndex) {
7669     // Appease the variable merging algorithm for "Goto(&loop)" below.
7670     *var_name_index = IntPtrConstant(0);
7671   }
7672 
7673   TVARIABLE(IntPtrT, var_count, count);
7674   TVARIABLE(IntPtrT, var_entry, entry);
7675   Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
7676   Label loop(this, 3, loop_vars);
7677   Goto(&loop);
7678   BIND(&loop);
7679   {
7680     TNode<IntPtrT> entry = var_entry.value();
7681 
7682     TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
7683     *var_name_index = index;
7684 
7685     TNode<HeapObject> current = CAST(LoadFixedArrayElement(dictionary, index));
7686     GotoIf(WordEqual(current, undefined), if_not_found);
7687     if (mode == kFindExisting) {
7688       current = LoadName<Dictionary>(current);
7689       GotoIf(WordEqual(current, unique_name), if_found);
7690     } else {
7691       DCHECK_EQ(kFindInsertionIndex, mode);
7692       GotoIf(WordEqual(current, TheHoleConstant()), if_not_found);
7693     }
7694 
7695     // See Dictionary::NextProbe().
7696     Increment(&var_count);
7697     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7698 
7699     var_entry = entry;
7700     Goto(&loop);
7701   }
7702 }
7703 
7704 // Instantiate template methods to workaround GCC compilation issue.
7705 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
7706     TNode<NameDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
7707     int, LookupMode);
7708 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
7709     TNode<GlobalDictionary>, TNode<Name>, Label*, TVariable<IntPtrT>*, Label*,
7710     int, LookupMode);
7711 
ComputeIntegerHash(Node * key)7712 Node* CodeStubAssembler::ComputeIntegerHash(Node* key) {
7713   return ComputeIntegerHash(key, IntPtrConstant(kZeroHashSeed));
7714 }
7715 
ComputeIntegerHash(Node * key,Node * seed)7716 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
7717   // See v8::internal::ComputeIntegerHash()
7718   Node* hash = TruncateIntPtrToInt32(key);
7719   hash = Word32Xor(hash, seed);
7720   hash = Int32Add(Word32Xor(hash, Int32Constant(0xFFFFFFFF)),
7721                   Word32Shl(hash, Int32Constant(15)));
7722   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
7723   hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
7724   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
7725   hash = Int32Mul(hash, Int32Constant(2057));
7726   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
7727   return Word32And(hash, Int32Constant(0x3FFFFFFF));
7728 }
7729 
NumberDictionaryLookup(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,Label * if_found,TVariable<IntPtrT> * var_entry,Label * if_not_found)7730 void CodeStubAssembler::NumberDictionaryLookup(
7731     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7732     Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
7733   CSA_ASSERT(this, IsNumberDictionary(dictionary));
7734   DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
7735   Comment("NumberDictionaryLookup");
7736 
7737   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
7738   TNode<WordT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7739 
7740   TNode<Int32T> int32_seed;
7741 
7742   if (Is64()) {
7743     int32_seed = TruncateInt64ToInt32(HashSeed());
7744   } else {
7745     int32_seed = HashSeedLow();
7746   }
7747 
7748   TNode<WordT> hash =
7749       ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
7750   Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
7751 
7752   // See Dictionary::FirstProbe().
7753   TNode<IntPtrT> count = IntPtrConstant(0);
7754   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7755 
7756   Node* undefined = UndefinedConstant();
7757   Node* the_hole = TheHoleConstant();
7758 
7759   TVARIABLE(IntPtrT, var_count, count);
7760   Variable* loop_vars[] = {&var_count, var_entry};
7761   Label loop(this, 2, loop_vars);
7762   *var_entry = entry;
7763   Goto(&loop);
7764   BIND(&loop);
7765   {
7766     TNode<IntPtrT> entry = var_entry->value();
7767 
7768     TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
7769     Node* current = LoadFixedArrayElement(dictionary, index);
7770     GotoIf(WordEqual(current, undefined), if_not_found);
7771     Label next_probe(this);
7772     {
7773       Label if_currentissmi(this), if_currentisnotsmi(this);
7774       Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
7775       BIND(&if_currentissmi);
7776       {
7777         Node* current_value = SmiUntag(current);
7778         Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
7779       }
7780       BIND(&if_currentisnotsmi);
7781       {
7782         GotoIf(WordEqual(current, the_hole), &next_probe);
7783         // Current must be the Number.
7784         Node* current_value = LoadHeapNumberValue(current);
7785         Branch(Float64Equal(current_value, key_as_float64), if_found,
7786                &next_probe);
7787       }
7788     }
7789 
7790     BIND(&next_probe);
7791     // See Dictionary::NextProbe().
7792     Increment(&var_count);
7793     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7794 
7795     *var_entry = entry;
7796     Goto(&loop);
7797   }
7798 }
7799 
BasicLoadNumberDictionaryElement(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,Label * not_data,Label * if_hole)7800 TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
7801     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7802     Label* not_data, Label* if_hole) {
7803   TVARIABLE(IntPtrT, var_entry);
7804   Label if_found(this);
7805   NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
7806                          if_hole);
7807   BIND(&if_found);
7808 
7809   // Check that the value is a data property.
7810   TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
7811   TNode<Uint32T> details =
7812       LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
7813   TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
7814   // TODO(jkummerow): Support accessors without missing?
7815   GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
7816   // Finally, load the value.
7817   return LoadValueByKeyIndex<NumberDictionary>(dictionary, index);
7818 }
7819 
BasicStoreNumberDictionaryElement(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,TNode<Object> value,Label * not_data,Label * if_hole,Label * read_only)7820 void CodeStubAssembler::BasicStoreNumberDictionaryElement(
7821     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7822     TNode<Object> value, Label* not_data, Label* if_hole, Label* read_only) {
7823   TVARIABLE(IntPtrT, var_entry);
7824   Label if_found(this);
7825   NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
7826                          if_hole);
7827   BIND(&if_found);
7828 
7829   // Check that the value is a data property.
7830   TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
7831   TNode<Uint32T> details =
7832       LoadDetailsByKeyIndex<NumberDictionary>(dictionary, index);
7833   TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
7834   // TODO(jkummerow): Support accessors without missing?
7835   GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
7836 
7837   // Check that the property is writeable.
7838   GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
7839          read_only);
7840 
7841   // Finally, store the value.
7842   StoreValueByKeyIndex<NumberDictionary>(dictionary, index, value);
7843 }
7844 
7845 template <class Dictionary>
FindInsertionEntry(TNode<Dictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7846 void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
7847                                            TNode<Name> key,
7848                                            TVariable<IntPtrT>* var_key_index) {
7849   UNREACHABLE();
7850 }
7851 
7852 template <>
FindInsertionEntry(TNode<NameDictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7853 void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
7854     TNode<NameDictionary> dictionary, TNode<Name> key,
7855     TVariable<IntPtrT>* var_key_index) {
7856   Label done(this);
7857   NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
7858                                        &done, 0, kFindInsertionIndex);
7859   BIND(&done);
7860 }
7861 
7862 template <class Dictionary>
InsertEntry(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7863 void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
7864                                     TNode<Name> key, TNode<Object> value,
7865                                     TNode<IntPtrT> index,
7866                                     TNode<Smi> enum_index) {
7867   UNREACHABLE();  // Use specializations instead.
7868 }
7869 
7870 template <>
InsertEntry(TNode<NameDictionary> dictionary,TNode<Name> name,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7871 void CodeStubAssembler::InsertEntry<NameDictionary>(
7872     TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
7873     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7874   // Store name and value.
7875   StoreFixedArrayElement(dictionary, index, name);
7876   StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
7877 
7878   // Prepare details of the new property.
7879   PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
7880   enum_index =
7881       SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
7882   // We OR over the actual index below, so we expect the initial value to be 0.
7883   DCHECK_EQ(0, d.dictionary_index());
7884   TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
7885 
7886   // Private names must be marked non-enumerable.
7887   Label not_private(this, &var_details);
7888   GotoIfNot(IsPrivateSymbol(name), &not_private);
7889   TNode<Smi> dont_enum =
7890       SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
7891   var_details = SmiOr(var_details.value(), dont_enum);
7892   Goto(&not_private);
7893   BIND(&not_private);
7894 
7895   // Finally, store the details.
7896   StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
7897                                          var_details.value());
7898 }
7899 
7900 template <>
InsertEntry(TNode<GlobalDictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7901 void CodeStubAssembler::InsertEntry<GlobalDictionary>(
7902     TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
7903     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7904   UNIMPLEMENTED();
7905 }
7906 
7907 template <class Dictionary>
Add(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,Label * bailout)7908 void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
7909                             TNode<Object> value, Label* bailout) {
7910   CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
7911   TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
7912   TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
7913   TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
7914   // Require 33% to still be free after adding additional_elements.
7915   // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
7916   // But that's OK here because it's only used for a comparison.
7917   TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
7918   GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
7919   // Require rehashing if more than 50% of free elements are deleted elements.
7920   TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
7921   CSA_ASSERT(this, SmiAbove(capacity, new_nof));
7922   TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
7923   GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
7924 
7925   TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
7926   TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
7927   TNode<Smi> max_enum_index =
7928       SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
7929   GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
7930 
7931   // No more bailouts after this point.
7932   // Operations from here on can have side effects.
7933 
7934   SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
7935   SetNumberOfElements<Dictionary>(dictionary, new_nof);
7936 
7937   TVARIABLE(IntPtrT, var_key_index);
7938   FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
7939   InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
7940                           enum_index);
7941 }
7942 
7943 template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
7944                                                      TNode<Name>, TNode<Object>,
7945                                                      Label*);
7946 
7947 template <typename Array>
LookupLinear(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7948 void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
7949                                      TNode<Array> array,
7950                                      TNode<Uint32T> number_of_valid_entries,
7951                                      Label* if_found,
7952                                      TVariable<IntPtrT>* var_name_index,
7953                                      Label* if_not_found) {
7954   static_assert(std::is_base_of<FixedArray, Array>::value ||
7955                     std::is_base_of<WeakFixedArray, Array>::value,
7956                 "T must be a descendant of FixedArray or a WeakFixedArray");
7957   Comment("LookupLinear");
7958   TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
7959   TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
7960   TNode<IntPtrT> last_exclusive = IntPtrAdd(
7961       first_inclusive,
7962       IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
7963 
7964   BuildFastLoop(last_exclusive, first_inclusive,
7965                 [=](SloppyTNode<IntPtrT> name_index) {
7966                   TNode<MaybeObject> element =
7967                       LoadArrayElement(array, Array::kHeaderSize, name_index);
7968                   TNode<Name> candidate_name = CAST(element);
7969                   *var_name_index = name_index;
7970                   GotoIf(WordEqual(candidate_name, unique_name), if_found);
7971                 },
7972                 -Array::kEntrySize, INTPTR_PARAMETERS, IndexAdvanceMode::kPre);
7973   Goto(if_not_found);
7974 }
7975 
7976 template <>
NumberOfEntries(TNode<DescriptorArray> descriptors)7977 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
7978     TNode<DescriptorArray> descriptors) {
7979   return Unsigned(LoadAndUntagToWord32ArrayElement(
7980       descriptors, WeakFixedArray::kHeaderSize,
7981       IntPtrConstant(DescriptorArray::kDescriptorLengthIndex)));
7982 }
7983 
7984 template <>
NumberOfEntries(TNode<TransitionArray> transitions)7985 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
7986     TNode<TransitionArray> transitions) {
7987   TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
7988   return Select<Uint32T>(
7989       UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
7990       [=] { return Unsigned(Int32Constant(0)); },
7991       [=] {
7992         return Unsigned(LoadAndUntagToWord32ArrayElement(
7993             transitions, WeakFixedArray::kHeaderSize,
7994             IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
7995       });
7996 }
7997 
7998 template <typename Array>
EntryIndexToIndex(TNode<Uint32T> entry_index)7999 TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
8000     TNode<Uint32T> entry_index) {
8001   TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8002   TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8003   return ChangeInt32ToIntPtr(index);
8004 }
8005 
8006 template <typename Array>
ToKeyIndex(TNode<Uint32T> entry_index)8007 TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8008   return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8009                    EntryIndexToIndex<Array>(entry_index));
8010 }
8011 
8012 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8013     TNode<Uint32T>);
8014 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8015     TNode<Uint32T>);
8016 
8017 template <>
GetSortedKeyIndex(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)8018 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8019     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8020   TNode<Uint32T> details =
8021       DescriptorArrayGetDetails(descriptors, descriptor_number);
8022   return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8023 }
8024 
8025 template <>
GetSortedKeyIndex(TNode<TransitionArray> transitions,TNode<Uint32T> transition_number)8026 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8027     TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8028   return transition_number;
8029 }
8030 
8031 template <typename Array>
GetKey(TNode<Array> array,TNode<Uint32T> entry_index)8032 TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8033                                       TNode<Uint32T> entry_index) {
8034   static_assert(std::is_base_of<FixedArray, Array>::value ||
8035                     std::is_base_of<WeakFixedArray, Array>::value,
8036                 "T must be a descendant of FixedArray or a TransitionArray");
8037   const int key_offset = Array::ToKeyIndex(0) * kPointerSize;
8038   TNode<MaybeObject> element =
8039       LoadArrayElement(array, Array::kHeaderSize,
8040                        EntryIndexToIndex<Array>(entry_index), key_offset);
8041   return CAST(element);
8042 }
8043 
8044 template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8045     TNode<DescriptorArray>, TNode<Uint32T>);
8046 template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8047     TNode<TransitionArray>, TNode<Uint32T>);
8048 
DescriptorArrayGetDetails(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)8049 TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8050     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8051   const int details_offset = DescriptorArray::ToDetailsIndex(0) * kPointerSize;
8052   return Unsigned(LoadAndUntagToWord32ArrayElement(
8053       descriptors, WeakFixedArray::kHeaderSize,
8054       EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8055 }
8056 
8057 template <typename Array>
LookupBinary(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8058 void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8059                                      TNode<Array> array,
8060                                      TNode<Uint32T> number_of_valid_entries,
8061                                      Label* if_found,
8062                                      TVariable<IntPtrT>* var_name_index,
8063                                      Label* if_not_found) {
8064   Comment("LookupBinary");
8065   TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8066   TNode<Uint32T> limit =
8067       Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8068   TVARIABLE(Uint32T, var_high, limit);
8069   TNode<Uint32T> hash = LoadNameHashField(unique_name);
8070   CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8071 
8072   // Assume non-empty array.
8073   CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8074 
8075   Label binary_loop(this, {&var_high, &var_low});
8076   Goto(&binary_loop);
8077   BIND(&binary_loop);
8078   {
8079     // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8080     TNode<Uint32T> mid = Unsigned(
8081         Int32Add(var_low.value(),
8082                  Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8083     // mid_name = array->GetSortedKey(mid).
8084     TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8085     TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8086 
8087     TNode<Uint32T> mid_hash = LoadNameHashField(mid_name);
8088 
8089     Label mid_greater(this), mid_less(this), merge(this);
8090     Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8091     BIND(&mid_greater);
8092     {
8093       var_high = mid;
8094       Goto(&merge);
8095     }
8096     BIND(&mid_less);
8097     {
8098       var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8099       Goto(&merge);
8100     }
8101     BIND(&merge);
8102     GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8103   }
8104 
8105   Label scan_loop(this, &var_low);
8106   Goto(&scan_loop);
8107   BIND(&scan_loop);
8108   {
8109     GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8110 
8111     TNode<Uint32T> sort_index =
8112         GetSortedKeyIndex<Array>(array, var_low.value());
8113     TNode<Name> current_name = GetKey<Array>(array, sort_index);
8114     TNode<Uint32T> current_hash = LoadNameHashField(current_name);
8115     GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8116     Label next(this);
8117     GotoIf(WordNotEqual(current_name, unique_name), &next);
8118     GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8119            if_not_found);
8120     *var_name_index = ToKeyIndex<Array>(sort_index);
8121     Goto(if_found);
8122 
8123     BIND(&next);
8124     var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8125     Goto(&scan_loop);
8126   }
8127 }
8128 
DescriptorArrayForEach(VariableList & variable_list,TNode<Uint32T> start_descriptor,TNode<Uint32T> end_descriptor,const ForEachDescriptorBodyFunction & body)8129 void CodeStubAssembler::DescriptorArrayForEach(
8130     VariableList& variable_list, TNode<Uint32T> start_descriptor,
8131     TNode<Uint32T> end_descriptor, const ForEachDescriptorBodyFunction& body) {
8132   TNode<IntPtrT> start_index =
8133       IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
8134                 EntryIndexToIndex<DescriptorArray>(start_descriptor));
8135 
8136   TNode<IntPtrT> end_index =
8137       IntPtrAdd(IntPtrConstant(DescriptorArray::ToKeyIndex(0)),
8138                 EntryIndexToIndex<DescriptorArray>(end_descriptor));
8139 
8140   BuildFastLoop(variable_list, start_index, end_index,
8141                 [=](Node* index) {
8142                   TNode<UintPtrT> descriptor_key_index =
8143                       TNode<UintPtrT>::UncheckedCast(index);
8144                   body(descriptor_key_index);
8145                 },
8146                 DescriptorArray::kEntrySize, INTPTR_PARAMETERS,
8147                 IndexAdvanceMode::kPost);
8148 }
8149 
DescriptorLookup(SloppyTNode<Name> unique_name,SloppyTNode<DescriptorArray> descriptors,SloppyTNode<Uint32T> bitfield3,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8150 void CodeStubAssembler::DescriptorLookup(
8151     SloppyTNode<Name> unique_name, SloppyTNode<DescriptorArray> descriptors,
8152     SloppyTNode<Uint32T> bitfield3, Label* if_found,
8153     TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
8154   Comment("DescriptorArrayLookup");
8155   TNode<Uint32T> nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
8156   Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
8157                           var_name_index, if_not_found);
8158 }
8159 
TransitionLookup(SloppyTNode<Name> unique_name,SloppyTNode<TransitionArray> transitions,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8160 void CodeStubAssembler::TransitionLookup(
8161     SloppyTNode<Name> unique_name, SloppyTNode<TransitionArray> transitions,
8162     Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found) {
8163   Comment("TransitionArrayLookup");
8164   TNode<Uint32T> number_of_valid_transitions =
8165       NumberOfEntries<TransitionArray>(transitions);
8166   Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
8167                           if_found, var_name_index, if_not_found);
8168 }
8169 
8170 template <typename Array>
Lookup(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8171 void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
8172                                TNode<Uint32T> number_of_valid_entries,
8173                                Label* if_found,
8174                                TVariable<IntPtrT>* var_name_index,
8175                                Label* if_not_found) {
8176   Comment("ArrayLookup");
8177   if (!number_of_valid_entries) {
8178     number_of_valid_entries = NumberOfEntries(array);
8179   }
8180   GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
8181   Label linear_search(this), binary_search(this);
8182   const int kMaxElementsForLinearSearch = 32;
8183   Branch(Uint32LessThanOrEqual(number_of_valid_entries,
8184                                Int32Constant(kMaxElementsForLinearSearch)),
8185          &linear_search, &binary_search);
8186   BIND(&linear_search);
8187   {
8188     LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
8189                         var_name_index, if_not_found);
8190   }
8191   BIND(&binary_search);
8192   {
8193     LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
8194                         var_name_index, if_not_found);
8195   }
8196 }
8197 
IsSimpleObjectMap(TNode<Map> map)8198 TNode<BoolT> CodeStubAssembler::IsSimpleObjectMap(TNode<Map> map) {
8199   uint32_t mask =
8200       Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
8201   // !IsSpecialReceiverType && !IsNamedInterceptor && !IsAccessCheckNeeded
8202   return Select<BoolT>(
8203       IsSpecialReceiverInstanceType(LoadMapInstanceType(map)),
8204       [=] { return Int32FalseConstant(); },
8205       [=] { return IsClearWord32(LoadMapBitField(map), mask); });
8206 }
8207 
TryLookupPropertyInSimpleObject(TNode<JSObject> object,TNode<Map> map,TNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8208 void CodeStubAssembler::TryLookupPropertyInSimpleObject(
8209     TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
8210     Label* if_found_fast, Label* if_found_dict,
8211     TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
8212     Label* if_not_found) {
8213   CSA_ASSERT(this, IsSimpleObjectMap(map));
8214 
8215   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
8216   Label if_isfastmap(this), if_isslowmap(this);
8217   Branch(IsSetWord32<Map::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
8218          &if_isfastmap);
8219   BIND(&if_isfastmap);
8220   {
8221     TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8222     *var_meta_storage = descriptors;
8223 
8224     DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
8225                      var_name_index, if_not_found);
8226   }
8227   BIND(&if_isslowmap);
8228   {
8229     TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
8230     *var_meta_storage = dictionary;
8231 
8232     NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
8233                                          var_name_index, if_not_found);
8234   }
8235 }
8236 
TryLookupProperty(SloppyTNode<JSObject> object,SloppyTNode<Map> map,SloppyTNode<Int32T> instance_type,SloppyTNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,Label * if_found_global,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found,Label * if_bailout)8237 void CodeStubAssembler::TryLookupProperty(
8238     SloppyTNode<JSObject> object, SloppyTNode<Map> map,
8239     SloppyTNode<Int32T> instance_type, SloppyTNode<Name> unique_name,
8240     Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
8241     TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
8242     Label* if_not_found, Label* if_bailout) {
8243   Label if_objectisspecial(this);
8244   GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
8245 
8246   TryLookupPropertyInSimpleObject(object, map, unique_name, if_found_fast,
8247                                   if_found_dict, var_meta_storage,
8248                                   var_name_index, if_not_found);
8249 
8250   BIND(&if_objectisspecial);
8251   {
8252     // Handle global object here and bailout for other special objects.
8253     GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
8254               if_bailout);
8255 
8256     // Handle interceptors and access checks in runtime.
8257     TNode<Int32T> bit_field = LoadMapBitField(map);
8258     int mask =
8259         Map::HasNamedInterceptorBit::kMask | Map::IsAccessCheckNeededBit::kMask;
8260     GotoIf(IsSetWord32(bit_field, mask), if_bailout);
8261 
8262     TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(object));
8263     *var_meta_storage = dictionary;
8264 
8265     NameDictionaryLookup<GlobalDictionary>(
8266         dictionary, unique_name, if_found_global, var_name_index, if_not_found);
8267   }
8268 }
8269 
TryHasOwnProperty(Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found,Label * if_not_found,Label * if_bailout)8270 void CodeStubAssembler::TryHasOwnProperty(Node* object, Node* map,
8271                                           Node* instance_type,
8272                                           Node* unique_name, Label* if_found,
8273                                           Label* if_not_found,
8274                                           Label* if_bailout) {
8275   Comment("TryHasOwnProperty");
8276   TVARIABLE(HeapObject, var_meta_storage);
8277   TVARIABLE(IntPtrT, var_name_index);
8278 
8279   Label if_found_global(this);
8280   TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
8281                     &if_found_global, &var_meta_storage, &var_name_index,
8282                     if_not_found, if_bailout);
8283 
8284   BIND(&if_found_global);
8285   {
8286     VARIABLE(var_value, MachineRepresentation::kTagged);
8287     VARIABLE(var_details, MachineRepresentation::kWord32);
8288     // Check if the property cell is not deleted.
8289     LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
8290                                      var_name_index.value(), &var_value,
8291                                      &var_details, if_not_found);
8292     Goto(if_found);
8293   }
8294 }
8295 
GetMethod(Node * context,Node * object,Handle<Name> name,Label * if_null_or_undefined)8296 Node* CodeStubAssembler::GetMethod(Node* context, Node* object,
8297                                    Handle<Name> name,
8298                                    Label* if_null_or_undefined) {
8299   Node* method = GetProperty(context, object, name);
8300 
8301   GotoIf(IsUndefined(method), if_null_or_undefined);
8302   GotoIf(IsNull(method), if_null_or_undefined);
8303 
8304   return method;
8305 }
8306 
LoadPropertyFromFastObject(Node * object,Node * map,TNode<DescriptorArray> descriptors,Node * name_index,Variable * var_details,Variable * var_value)8307 void CodeStubAssembler::LoadPropertyFromFastObject(
8308     Node* object, Node* map, TNode<DescriptorArray> descriptors,
8309     Node* name_index, Variable* var_details, Variable* var_value) {
8310   DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
8311   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
8312 
8313   Node* details =
8314       LoadDetailsByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index));
8315   var_details->Bind(details);
8316 
8317   LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
8318                              var_value);
8319 }
8320 
LoadPropertyFromFastObject(Node * object,Node * map,TNode<DescriptorArray> descriptors,Node * name_index,Node * details,Variable * var_value)8321 void CodeStubAssembler::LoadPropertyFromFastObject(
8322     Node* object, Node* map, TNode<DescriptorArray> descriptors,
8323     Node* name_index, Node* details, Variable* var_value) {
8324   Comment("[ LoadPropertyFromFastObject");
8325 
8326   Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
8327 
8328   Label if_in_field(this), if_in_descriptor(this), done(this);
8329   Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
8330          &if_in_descriptor);
8331   BIND(&if_in_field);
8332   {
8333     Node* field_index =
8334         DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
8335     Node* representation =
8336         DecodeWord32<PropertyDetails::RepresentationField>(details);
8337 
8338     field_index =
8339         IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
8340     Node* instance_size_in_words = LoadMapInstanceSizeInWords(map);
8341 
8342     Label if_inobject(this), if_backing_store(this);
8343     VARIABLE(var_double_value, MachineRepresentation::kFloat64);
8344     Label rebox_double(this, &var_double_value);
8345     Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
8346            &if_backing_store);
8347     BIND(&if_inobject);
8348     {
8349       Comment("if_inobject");
8350       Node* field_offset = TimesPointerSize(field_index);
8351 
8352       Label if_double(this), if_tagged(this);
8353       Branch(Word32NotEqual(representation,
8354                             Int32Constant(Representation::kDouble)),
8355              &if_tagged, &if_double);
8356       BIND(&if_tagged);
8357       {
8358         var_value->Bind(LoadObjectField(object, field_offset));
8359         Goto(&done);
8360       }
8361       BIND(&if_double);
8362       {
8363         if (FLAG_unbox_double_fields) {
8364           var_double_value.Bind(
8365               LoadObjectField(object, field_offset, MachineType::Float64()));
8366         } else {
8367           Node* mutable_heap_number = LoadObjectField(object, field_offset);
8368           var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
8369         }
8370         Goto(&rebox_double);
8371       }
8372     }
8373     BIND(&if_backing_store);
8374     {
8375       Comment("if_backing_store");
8376       Node* properties = LoadFastProperties(object);
8377       field_index = IntPtrSub(field_index, instance_size_in_words);
8378       Node* value = LoadPropertyArrayElement(properties, field_index);
8379 
8380       Label if_double(this), if_tagged(this);
8381       Branch(Word32NotEqual(representation,
8382                             Int32Constant(Representation::kDouble)),
8383              &if_tagged, &if_double);
8384       BIND(&if_tagged);
8385       {
8386         var_value->Bind(value);
8387         Goto(&done);
8388       }
8389       BIND(&if_double);
8390       {
8391         var_double_value.Bind(LoadHeapNumberValue(value));
8392         Goto(&rebox_double);
8393       }
8394     }
8395     BIND(&rebox_double);
8396     {
8397       Comment("rebox_double");
8398       Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
8399       var_value->Bind(heap_number);
8400       Goto(&done);
8401     }
8402   }
8403   BIND(&if_in_descriptor);
8404   {
8405     var_value->Bind(
8406         LoadValueByKeyIndex(descriptors, UncheckedCast<IntPtrT>(name_index)));
8407     Goto(&done);
8408   }
8409   BIND(&done);
8410 
8411   Comment("] LoadPropertyFromFastObject");
8412 }
8413 
LoadPropertyFromNameDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value)8414 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
8415                                                        Node* name_index,
8416                                                        Variable* var_details,
8417                                                        Variable* var_value) {
8418   Comment("LoadPropertyFromNameDictionary");
8419   CSA_ASSERT(this, IsNameDictionary(dictionary));
8420 
8421   var_details->Bind(
8422       LoadDetailsByKeyIndex<NameDictionary>(dictionary, name_index));
8423   var_value->Bind(LoadValueByKeyIndex<NameDictionary>(dictionary, name_index));
8424 
8425   Comment("] LoadPropertyFromNameDictionary");
8426 }
8427 
LoadPropertyFromGlobalDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value,Label * if_deleted)8428 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
8429                                                          Node* name_index,
8430                                                          Variable* var_details,
8431                                                          Variable* var_value,
8432                                                          Label* if_deleted) {
8433   Comment("[ LoadPropertyFromGlobalDictionary");
8434   CSA_ASSERT(this, IsGlobalDictionary(dictionary));
8435 
8436   Node* property_cell = LoadFixedArrayElement(CAST(dictionary), name_index);
8437   CSA_ASSERT(this, IsPropertyCell(property_cell));
8438 
8439   Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
8440   GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
8441 
8442   var_value->Bind(value);
8443 
8444   Node* details = LoadAndUntagToWord32ObjectField(property_cell,
8445                                                   PropertyCell::kDetailsOffset);
8446   var_details->Bind(details);
8447 
8448   Comment("] LoadPropertyFromGlobalDictionary");
8449 }
8450 
8451 // |value| is the property backing store's contents, which is either a value
8452 // or an accessor pair, as specified by |details|.
8453 // Returns either the original value, or the result of the getter call.
CallGetterIfAccessor(Node * value,Node * details,Node * context,Node * receiver,Label * if_bailout,GetOwnPropertyMode mode)8454 TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
8455     Node* value, Node* details, Node* context, Node* receiver,
8456     Label* if_bailout, GetOwnPropertyMode mode) {
8457   VARIABLE(var_value, MachineRepresentation::kTagged, value);
8458   Label done(this), if_accessor_info(this, Label::kDeferred);
8459 
8460   Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
8461   GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
8462 
8463   // Accessor case.
8464   GotoIfNot(IsAccessorPair(value), &if_accessor_info);
8465 
8466   // AccessorPair case.
8467   {
8468     if (mode == kCallJSGetter) {
8469       Node* accessor_pair = value;
8470       Node* getter =
8471           LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
8472       Node* getter_map = LoadMap(getter);
8473       Node* instance_type = LoadMapInstanceType(getter_map);
8474       // FunctionTemplateInfo getters are not supported yet.
8475       GotoIf(InstanceTypeEqual(instance_type, FUNCTION_TEMPLATE_INFO_TYPE),
8476              if_bailout);
8477 
8478       // Return undefined if the {getter} is not callable.
8479       var_value.Bind(UndefinedConstant());
8480       GotoIfNot(IsCallableMap(getter_map), &done);
8481 
8482       // Call the accessor.
8483       Callable callable = CodeFactory::Call(isolate());
8484       Node* result = CallJS(callable, context, getter, receiver);
8485       var_value.Bind(result);
8486     }
8487     Goto(&done);
8488   }
8489 
8490   // AccessorInfo case.
8491   BIND(&if_accessor_info);
8492   {
8493     Node* accessor_info = value;
8494     CSA_ASSERT(this, IsAccessorInfo(value));
8495     CSA_ASSERT(this, TaggedIsNotSmi(receiver));
8496     Label if_array(this), if_function(this), if_value(this);
8497 
8498     // Dispatch based on {receiver} instance type.
8499     Node* receiver_map = LoadMap(receiver);
8500     Node* receiver_instance_type = LoadMapInstanceType(receiver_map);
8501     GotoIf(IsJSArrayInstanceType(receiver_instance_type), &if_array);
8502     GotoIf(IsJSFunctionInstanceType(receiver_instance_type), &if_function);
8503     Branch(IsJSValueInstanceType(receiver_instance_type), &if_value,
8504            if_bailout);
8505 
8506     // JSArray AccessorInfo case.
8507     BIND(&if_array);
8508     {
8509       // We only deal with the "length" accessor on JSArray.
8510       GotoIfNot(IsLengthString(
8511                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8512                 if_bailout);
8513       var_value.Bind(LoadJSArrayLength(receiver));
8514       Goto(&done);
8515     }
8516 
8517     // JSFunction AccessorInfo case.
8518     BIND(&if_function);
8519     {
8520       // We only deal with the "prototype" accessor on JSFunction here.
8521       GotoIfNot(IsPrototypeString(
8522                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8523                 if_bailout);
8524 
8525       GotoIfPrototypeRequiresRuntimeLookup(CAST(receiver), CAST(receiver_map),
8526                                            if_bailout);
8527       var_value.Bind(LoadJSFunctionPrototype(receiver, if_bailout));
8528       Goto(&done);
8529     }
8530 
8531     // JSValue AccessorInfo case.
8532     BIND(&if_value);
8533     {
8534       // We only deal with the "length" accessor on JSValue string wrappers.
8535       GotoIfNot(IsLengthString(
8536                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8537                 if_bailout);
8538       Node* receiver_value = LoadJSValueValue(receiver);
8539       GotoIfNot(TaggedIsNotSmi(receiver_value), if_bailout);
8540       GotoIfNot(IsString(receiver_value), if_bailout);
8541       var_value.Bind(LoadStringLengthAsSmi(receiver_value));
8542       Goto(&done);
8543     }
8544   }
8545 
8546   BIND(&done);
8547   return UncheckedCast<Object>(var_value.value());
8548 }
8549 
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Label * if_not_found,Label * if_bailout)8550 void CodeStubAssembler::TryGetOwnProperty(
8551     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
8552     Node* unique_name, Label* if_found_value, Variable* var_value,
8553     Label* if_not_found, Label* if_bailout) {
8554   TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
8555                     if_found_value, var_value, nullptr, nullptr, if_not_found,
8556                     if_bailout, kCallJSGetter);
8557 }
8558 
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Variable * var_details,Variable * var_raw_value,Label * if_not_found,Label * if_bailout,GetOwnPropertyMode mode)8559 void CodeStubAssembler::TryGetOwnProperty(
8560     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
8561     Node* unique_name, Label* if_found_value, Variable* var_value,
8562     Variable* var_details, Variable* var_raw_value, Label* if_not_found,
8563     Label* if_bailout, GetOwnPropertyMode mode) {
8564   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
8565   Comment("TryGetOwnProperty");
8566 
8567   TVARIABLE(HeapObject, var_meta_storage);
8568   TVARIABLE(IntPtrT, var_entry);
8569 
8570   Label if_found_fast(this), if_found_dict(this), if_found_global(this);
8571 
8572   VARIABLE(local_var_details, MachineRepresentation::kWord32);
8573   if (!var_details) {
8574     var_details = &local_var_details;
8575   }
8576   Label if_found(this);
8577 
8578   TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
8579                     &if_found_dict, &if_found_global, &var_meta_storage,
8580                     &var_entry, if_not_found, if_bailout);
8581   BIND(&if_found_fast);
8582   {
8583     TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8584     Node* name_index = var_entry.value();
8585 
8586     LoadPropertyFromFastObject(object, map, descriptors, name_index,
8587                                var_details, var_value);
8588     Goto(&if_found);
8589   }
8590   BIND(&if_found_dict);
8591   {
8592     Node* dictionary = var_meta_storage.value();
8593     Node* entry = var_entry.value();
8594     LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
8595     Goto(&if_found);
8596   }
8597   BIND(&if_found_global);
8598   {
8599     Node* dictionary = var_meta_storage.value();
8600     Node* entry = var_entry.value();
8601 
8602     LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
8603                                      if_not_found);
8604     Goto(&if_found);
8605   }
8606   // Here we have details and value which could be an accessor.
8607   BIND(&if_found);
8608   {
8609     // TODO(ishell): Execute C++ accessor in case of accessor info
8610     if (var_raw_value) {
8611       var_raw_value->Bind(var_value->value());
8612     }
8613     Node* value = CallGetterIfAccessor(var_value->value(), var_details->value(),
8614                                        context, receiver, if_bailout, mode);
8615     var_value->Bind(value);
8616     Goto(if_found_value);
8617   }
8618 }
8619 
TryLookupElement(Node * object,Node * map,SloppyTNode<Int32T> instance_type,SloppyTNode<IntPtrT> intptr_index,Label * if_found,Label * if_absent,Label * if_not_found,Label * if_bailout)8620 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
8621                                          SloppyTNode<Int32T> instance_type,
8622                                          SloppyTNode<IntPtrT> intptr_index,
8623                                          Label* if_found, Label* if_absent,
8624                                          Label* if_not_found,
8625                                          Label* if_bailout) {
8626   // Handle special objects in runtime.
8627   GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
8628 
8629   Node* elements_kind = LoadMapElementsKind(map);
8630 
8631   // TODO(verwaest): Support other elements kinds as well.
8632   Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
8633       if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
8634       if_typedarray(this);
8635   // clang-format off
8636   int32_t values[] = {
8637       // Handled by {if_isobjectorsmi}.
8638       PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS,
8639           HOLEY_ELEMENTS,
8640       // Handled by {if_isdouble}.
8641       PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
8642       // Handled by {if_isdictionary}.
8643       DICTIONARY_ELEMENTS,
8644       // Handled by {if_isfaststringwrapper}.
8645       FAST_STRING_WRAPPER_ELEMENTS,
8646       // Handled by {if_isslowstringwrapper}.
8647       SLOW_STRING_WRAPPER_ELEMENTS,
8648       // Handled by {if_not_found}.
8649       NO_ELEMENTS,
8650       // Handled by {if_typed_array}.
8651       UINT8_ELEMENTS,
8652       INT8_ELEMENTS,
8653       UINT16_ELEMENTS,
8654       INT16_ELEMENTS,
8655       UINT32_ELEMENTS,
8656       INT32_ELEMENTS,
8657       FLOAT32_ELEMENTS,
8658       FLOAT64_ELEMENTS,
8659       UINT8_CLAMPED_ELEMENTS,
8660       BIGUINT64_ELEMENTS,
8661       BIGINT64_ELEMENTS,
8662   };
8663   Label* labels[] = {
8664       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
8665           &if_isobjectorsmi,
8666       &if_isdouble, &if_isdouble,
8667       &if_isdictionary,
8668       &if_isfaststringwrapper,
8669       &if_isslowstringwrapper,
8670       if_not_found,
8671       &if_typedarray,
8672       &if_typedarray,
8673       &if_typedarray,
8674       &if_typedarray,
8675       &if_typedarray,
8676       &if_typedarray,
8677       &if_typedarray,
8678       &if_typedarray,
8679       &if_typedarray,
8680       &if_typedarray,
8681       &if_typedarray,
8682   };
8683   // clang-format on
8684   STATIC_ASSERT(arraysize(values) == arraysize(labels));
8685   Switch(elements_kind, if_bailout, values, labels, arraysize(values));
8686 
8687   BIND(&if_isobjectorsmi);
8688   {
8689     TNode<FixedArray> elements = CAST(LoadElements(object));
8690     TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
8691 
8692     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8693 
8694     TNode<Object> element = LoadFixedArrayElement(elements, intptr_index);
8695     TNode<Oddball> the_hole = TheHoleConstant();
8696     Branch(WordEqual(element, the_hole), if_not_found, if_found);
8697   }
8698   BIND(&if_isdouble);
8699   {
8700     TNode<FixedArrayBase> elements = LoadElements(object);
8701     TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
8702 
8703     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8704 
8705     // Check if the element is a double hole, but don't load it.
8706     LoadFixedDoubleArrayElement(CAST(elements), intptr_index,
8707                                 MachineType::None(), 0, INTPTR_PARAMETERS,
8708                                 if_not_found);
8709     Goto(if_found);
8710   }
8711   BIND(&if_isdictionary);
8712   {
8713     // Negative keys must be converted to property names.
8714     GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
8715 
8716     TVARIABLE(IntPtrT, var_entry);
8717     TNode<NumberDictionary> elements = CAST(LoadElements(object));
8718     NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
8719                            if_not_found);
8720   }
8721   BIND(&if_isfaststringwrapper);
8722   {
8723     CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
8724     Node* string = LoadJSValueValue(object);
8725     CSA_ASSERT(this, IsString(string));
8726     Node* length = LoadStringLengthAsWord(string);
8727     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8728     Goto(&if_isobjectorsmi);
8729   }
8730   BIND(&if_isslowstringwrapper);
8731   {
8732     CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
8733     Node* string = LoadJSValueValue(object);
8734     CSA_ASSERT(this, IsString(string));
8735     Node* length = LoadStringLengthAsWord(string);
8736     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8737     Goto(&if_isdictionary);
8738   }
8739   BIND(&if_typedarray);
8740   {
8741     Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
8742     GotoIf(IsDetachedBuffer(buffer), if_absent);
8743 
8744     Node* length = SmiUntag(LoadTypedArrayLength(CAST(object)));
8745     Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
8746   }
8747   BIND(&if_oob);
8748   {
8749     // Positive OOB indices mean "not found", negative indices must be
8750     // converted to property names.
8751     GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
8752     Goto(if_not_found);
8753   }
8754 }
8755 
BranchIfMaybeSpecialIndex(TNode<String> name_string,Label * if_maybe_special_index,Label * if_not_special_index)8756 void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
8757                                                   Label* if_maybe_special_index,
8758                                                   Label* if_not_special_index) {
8759   // TODO(cwhan.tunz): Implement fast cases more.
8760 
8761   // If a name is empty or too long, it's not a special index
8762   // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
8763   const int kBufferSize = 24;
8764   TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
8765   GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
8766   GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
8767          if_not_special_index);
8768 
8769   // If the first character of name is not a digit or '-', or we can't match it
8770   // to Infinity or NaN, then this is not a special index.
8771   TNode<Int32T> first_char = StringCharCodeAt(name_string, IntPtrConstant(0));
8772   // If the name starts with '-', it can be a negative index.
8773   GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
8774   // If the name starts with 'I', it can be "Infinity".
8775   GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
8776   // If the name starts with 'N', it can be "NaN".
8777   GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
8778   // Finally, if the first character is not a digit either, then we are sure
8779   // that the name is not a special index.
8780   GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
8781   GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
8782   Goto(if_maybe_special_index);
8783 }
8784 
TryPrototypeChainLookup(Node * receiver,Node * key,const LookupInHolder & lookup_property_in_holder,const LookupInHolder & lookup_element_in_holder,Label * if_end,Label * if_bailout,Label * if_proxy)8785 void CodeStubAssembler::TryPrototypeChainLookup(
8786     Node* receiver, Node* key, const LookupInHolder& lookup_property_in_holder,
8787     const LookupInHolder& lookup_element_in_holder, Label* if_end,
8788     Label* if_bailout, Label* if_proxy) {
8789   // Ensure receiver is JSReceiver, otherwise bailout.
8790   Label if_objectisnotsmi(this);
8791   Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
8792   BIND(&if_objectisnotsmi);
8793 
8794   Node* map = LoadMap(receiver);
8795   Node* instance_type = LoadMapInstanceType(map);
8796   {
8797     Label if_objectisreceiver(this);
8798     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
8799     STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
8800     Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
8801            if_bailout);
8802     BIND(&if_objectisreceiver);
8803 
8804     if (if_proxy) {
8805       GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
8806     }
8807   }
8808 
8809   VARIABLE(var_index, MachineType::PointerRepresentation());
8810   VARIABLE(var_unique, MachineRepresentation::kTagged);
8811 
8812   Label if_keyisindex(this), if_iskeyunique(this);
8813   TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
8814             if_bailout);
8815 
8816   BIND(&if_iskeyunique);
8817   {
8818     VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
8819     VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
8820     VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
8821              instance_type);
8822 
8823     Variable* merged_variables[] = {&var_holder, &var_holder_map,
8824                                     &var_holder_instance_type};
8825     Label loop(this, arraysize(merged_variables), merged_variables);
8826     Goto(&loop);
8827     BIND(&loop);
8828     {
8829       Node* holder_map = var_holder_map.value();
8830       Node* holder_instance_type = var_holder_instance_type.value();
8831 
8832       Label next_proto(this), check_integer_indexed_exotic(this);
8833       lookup_property_in_holder(receiver, var_holder.value(), holder_map,
8834                                 holder_instance_type, var_unique.value(),
8835                                 &check_integer_indexed_exotic, if_bailout);
8836 
8837       BIND(&check_integer_indexed_exotic);
8838       {
8839         // Bailout if it can be an integer indexed exotic case.
8840         GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
8841                   &next_proto);
8842         GotoIfNot(IsString(var_unique.value()), &next_proto);
8843         BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
8844                                   &next_proto);
8845       }
8846 
8847       BIND(&next_proto);
8848 
8849       Node* proto = LoadMapPrototype(holder_map);
8850 
8851       GotoIf(IsNull(proto), if_end);
8852 
8853       Node* map = LoadMap(proto);
8854       Node* instance_type = LoadMapInstanceType(map);
8855 
8856       var_holder.Bind(proto);
8857       var_holder_map.Bind(map);
8858       var_holder_instance_type.Bind(instance_type);
8859       Goto(&loop);
8860     }
8861   }
8862   BIND(&if_keyisindex);
8863   {
8864     VARIABLE(var_holder, MachineRepresentation::kTagged, receiver);
8865     VARIABLE(var_holder_map, MachineRepresentation::kTagged, map);
8866     VARIABLE(var_holder_instance_type, MachineRepresentation::kWord32,
8867              instance_type);
8868 
8869     Variable* merged_variables[] = {&var_holder, &var_holder_map,
8870                                     &var_holder_instance_type};
8871     Label loop(this, arraysize(merged_variables), merged_variables);
8872     Goto(&loop);
8873     BIND(&loop);
8874     {
8875       Label next_proto(this);
8876       lookup_element_in_holder(receiver, var_holder.value(),
8877                                var_holder_map.value(),
8878                                var_holder_instance_type.value(),
8879                                var_index.value(), &next_proto, if_bailout);
8880       BIND(&next_proto);
8881 
8882       Node* proto = LoadMapPrototype(var_holder_map.value());
8883 
8884       GotoIf(IsNull(proto), if_end);
8885 
8886       Node* map = LoadMap(proto);
8887       Node* instance_type = LoadMapInstanceType(map);
8888 
8889       var_holder.Bind(proto);
8890       var_holder_map.Bind(map);
8891       var_holder_instance_type.Bind(instance_type);
8892       Goto(&loop);
8893     }
8894   }
8895 }
8896 
HasInPrototypeChain(Node * context,Node * object,Node * prototype)8897 Node* CodeStubAssembler::HasInPrototypeChain(Node* context, Node* object,
8898                                              Node* prototype) {
8899   CSA_ASSERT(this, TaggedIsNotSmi(object));
8900   VARIABLE(var_result, MachineRepresentation::kTagged);
8901   Label return_false(this), return_true(this),
8902       return_runtime(this, Label::kDeferred), return_result(this);
8903 
8904   // Loop through the prototype chain looking for the {prototype}.
8905   VARIABLE(var_object_map, MachineRepresentation::kTagged, LoadMap(object));
8906   Label loop(this, &var_object_map);
8907   Goto(&loop);
8908   BIND(&loop);
8909   {
8910     // Check if we can determine the prototype directly from the {object_map}.
8911     Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
8912     Node* object_map = var_object_map.value();
8913     TNode<Int32T> object_instance_type = LoadMapInstanceType(object_map);
8914     Branch(IsSpecialReceiverInstanceType(object_instance_type),
8915            &if_objectisspecial, &if_objectisdirect);
8916     BIND(&if_objectisspecial);
8917     {
8918       // The {object_map} is a special receiver map or a primitive map, check
8919       // if we need to use the if_objectisspecial path in the runtime.
8920       GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
8921              &return_runtime);
8922       Node* object_bitfield = LoadMapBitField(object_map);
8923       int mask = Map::HasNamedInterceptorBit::kMask |
8924                  Map::IsAccessCheckNeededBit::kMask;
8925       Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
8926              &if_objectisdirect);
8927     }
8928     BIND(&if_objectisdirect);
8929 
8930     // Check the current {object} prototype.
8931     Node* object_prototype = LoadMapPrototype(object_map);
8932     GotoIf(IsNull(object_prototype), &return_false);
8933     GotoIf(WordEqual(object_prototype, prototype), &return_true);
8934 
8935     // Continue with the prototype.
8936     CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
8937     var_object_map.Bind(LoadMap(object_prototype));
8938     Goto(&loop);
8939   }
8940 
8941   BIND(&return_true);
8942   var_result.Bind(TrueConstant());
8943   Goto(&return_result);
8944 
8945   BIND(&return_false);
8946   var_result.Bind(FalseConstant());
8947   Goto(&return_result);
8948 
8949   BIND(&return_runtime);
8950   {
8951     // Fallback to the runtime implementation.
8952     var_result.Bind(
8953         CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
8954   }
8955   Goto(&return_result);
8956 
8957   BIND(&return_result);
8958   return var_result.value();
8959 }
8960 
OrdinaryHasInstance(Node * context,Node * callable,Node * object)8961 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
8962                                              Node* object) {
8963   VARIABLE(var_result, MachineRepresentation::kTagged);
8964   Label return_runtime(this, Label::kDeferred), return_result(this);
8965 
8966   // Goto runtime if {object} is a Smi.
8967   GotoIf(TaggedIsSmi(object), &return_runtime);
8968 
8969   // Goto runtime if {callable} is a Smi.
8970   GotoIf(TaggedIsSmi(callable), &return_runtime);
8971 
8972   // Load map of {callable}.
8973   Node* callable_map = LoadMap(callable);
8974 
8975   // Goto runtime if {callable} is not a JSFunction.
8976   Node* callable_instance_type = LoadMapInstanceType(callable_map);
8977   GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
8978             &return_runtime);
8979 
8980   GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), CAST(callable_map),
8981                                        &return_runtime);
8982 
8983   // Get the "prototype" (or initial map) of the {callable}.
8984   Node* callable_prototype =
8985       LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
8986   {
8987     Label callable_prototype_valid(this);
8988     VARIABLE(var_callable_prototype, MachineRepresentation::kTagged,
8989              callable_prototype);
8990 
8991     // Resolve the "prototype" if the {callable} has an initial map.  Afterwards
8992     // the {callable_prototype} will be either the JSReceiver prototype object
8993     // or the hole value, which means that no instances of the {callable} were
8994     // created so far and hence we should return false.
8995     Node* callable_prototype_instance_type =
8996         LoadInstanceType(callable_prototype);
8997     GotoIfNot(InstanceTypeEqual(callable_prototype_instance_type, MAP_TYPE),
8998               &callable_prototype_valid);
8999     var_callable_prototype.Bind(
9000         LoadObjectField(callable_prototype, Map::kPrototypeOffset));
9001     Goto(&callable_prototype_valid);
9002     BIND(&callable_prototype_valid);
9003     callable_prototype = var_callable_prototype.value();
9004   }
9005 
9006   // Loop through the prototype chain looking for the {callable} prototype.
9007   var_result.Bind(HasInPrototypeChain(context, object, callable_prototype));
9008   Goto(&return_result);
9009 
9010   BIND(&return_runtime);
9011   {
9012     // Fallback to the runtime implementation.
9013     var_result.Bind(
9014         CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
9015   }
9016   Goto(&return_result);
9017 
9018   BIND(&return_result);
9019   return var_result.value();
9020 }
9021 
ElementOffsetFromIndex(Node * index_node,ElementsKind kind,ParameterMode mode,int base_size)9022 TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
9023                                                          ElementsKind kind,
9024                                                          ParameterMode mode,
9025                                                          int base_size) {
9026   CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, mode));
9027   int element_size_shift = ElementsKindToShiftSize(kind);
9028   int element_size = 1 << element_size_shift;
9029   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9030   intptr_t index = 0;
9031   bool constant_index = false;
9032   if (mode == SMI_PARAMETERS) {
9033     element_size_shift -= kSmiShiftBits;
9034     Smi* smi_index;
9035     constant_index = ToSmiConstant(index_node, smi_index);
9036     if (constant_index) index = smi_index->value();
9037     index_node = BitcastTaggedToWord(index_node);
9038   } else {
9039     DCHECK(mode == INTPTR_PARAMETERS);
9040     constant_index = ToIntPtrConstant(index_node, index);
9041   }
9042   if (constant_index) {
9043     return IntPtrConstant(base_size + element_size * index);
9044   }
9045 
9046   TNode<WordT> shifted_index =
9047       (element_size_shift == 0)
9048           ? UncheckedCast<WordT>(index_node)
9049           : ((element_size_shift > 0)
9050                  ? WordShl(index_node, IntPtrConstant(element_size_shift))
9051                  : WordSar(index_node, IntPtrConstant(-element_size_shift)));
9052   return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
9053 }
9054 
IsOffsetInBounds(SloppyTNode<IntPtrT> offset,SloppyTNode<IntPtrT> length,int header_size,ElementsKind kind)9055 TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
9056                                                  SloppyTNode<IntPtrT> length,
9057                                                  int header_size,
9058                                                  ElementsKind kind) {
9059   // Make sure we point to the last field.
9060   int element_size = 1 << ElementsKindToShiftSize(kind);
9061   int correction = header_size - kHeapObjectTag - element_size;
9062   TNode<IntPtrT> last_offset =
9063       ElementOffsetFromIndex(length, kind, INTPTR_PARAMETERS, correction);
9064   return IntPtrLessThanOrEqual(offset, last_offset);
9065 }
9066 
LoadFeedbackVector(SloppyTNode<JSFunction> closure,Label * if_undefined)9067 TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVector(
9068     SloppyTNode<JSFunction> closure, Label* if_undefined) {
9069   TNode<FeedbackCell> feedback_cell =
9070       CAST(LoadObjectField(closure, JSFunction::kFeedbackCellOffset));
9071   TNode<Object> maybe_vector =
9072       LoadObjectField(feedback_cell, FeedbackCell::kValueOffset);
9073   if (if_undefined) {
9074     GotoIf(IsUndefined(maybe_vector), if_undefined);
9075   }
9076   return CAST(maybe_vector);
9077 }
9078 
LoadFeedbackVectorForStub()9079 TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
9080   TNode<JSFunction> function =
9081       CAST(LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset));
9082   return LoadFeedbackVector(function);
9083 }
9084 
UpdateFeedback(Node * feedback,Node * feedback_vector,Node * slot_id)9085 void CodeStubAssembler::UpdateFeedback(Node* feedback, Node* feedback_vector,
9086                                        Node* slot_id) {
9087   // This method is used for binary op and compare feedback. These
9088   // vector nodes are initialized with a smi 0, so we can simply OR
9089   // our new feedback in place.
9090   TNode<MaybeObject> feedback_element =
9091       LoadFeedbackVectorSlot(feedback_vector, slot_id);
9092   TNode<Smi> previous_feedback = CAST(feedback_element);
9093   TNode<Smi> combined_feedback = SmiOr(previous_feedback, CAST(feedback));
9094   Label end(this);
9095 
9096   GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
9097   {
9098     StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
9099                             SKIP_WRITE_BARRIER);
9100     ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
9101     Goto(&end);
9102   }
9103 
9104   BIND(&end);
9105 }
9106 
ReportFeedbackUpdate(SloppyTNode<FeedbackVector> feedback_vector,SloppyTNode<IntPtrT> slot_id,const char * reason)9107 void CodeStubAssembler::ReportFeedbackUpdate(
9108     SloppyTNode<FeedbackVector> feedback_vector, SloppyTNode<IntPtrT> slot_id,
9109     const char* reason) {
9110   // Reset profiler ticks.
9111   StoreObjectFieldNoWriteBarrier(
9112       feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0),
9113       MachineRepresentation::kWord32);
9114 
9115 #ifdef V8_TRACE_FEEDBACK_UPDATES
9116   // Trace the update.
9117   CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
9118               LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset),
9119               SmiTag(slot_id), StringConstant(reason));
9120 #endif  // V8_TRACE_FEEDBACK_UPDATES
9121 }
9122 
OverwriteFeedback(Variable * existing_feedback,int new_feedback)9123 void CodeStubAssembler::OverwriteFeedback(Variable* existing_feedback,
9124                                           int new_feedback) {
9125   if (existing_feedback == nullptr) return;
9126   existing_feedback->Bind(SmiConstant(new_feedback));
9127 }
9128 
CombineFeedback(Variable * existing_feedback,int feedback)9129 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
9130                                         int feedback) {
9131   if (existing_feedback == nullptr) return;
9132   existing_feedback->Bind(
9133       SmiOr(CAST(existing_feedback->value()), SmiConstant(feedback)));
9134 }
9135 
CombineFeedback(Variable * existing_feedback,Node * feedback)9136 void CodeStubAssembler::CombineFeedback(Variable* existing_feedback,
9137                                         Node* feedback) {
9138   if (existing_feedback == nullptr) return;
9139   existing_feedback->Bind(
9140       SmiOr(CAST(existing_feedback->value()), CAST(feedback)));
9141 }
9142 
CheckForAssociatedProtector(Node * name,Label * if_protector)9143 void CodeStubAssembler::CheckForAssociatedProtector(Node* name,
9144                                                     Label* if_protector) {
9145   // This list must be kept in sync with LookupIterator::UpdateProtector!
9146   // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
9147   GotoIf(WordEqual(name, LoadRoot(Heap::kconstructor_stringRootIndex)),
9148          if_protector);
9149   GotoIf(WordEqual(name, LoadRoot(Heap::kiterator_symbolRootIndex)),
9150          if_protector);
9151   GotoIf(WordEqual(name, LoadRoot(Heap::knext_stringRootIndex)), if_protector);
9152   GotoIf(WordEqual(name, LoadRoot(Heap::kspecies_symbolRootIndex)),
9153          if_protector);
9154   GotoIf(WordEqual(name, LoadRoot(Heap::kis_concat_spreadable_symbolRootIndex)),
9155          if_protector);
9156   GotoIf(WordEqual(name, LoadRoot(Heap::kresolve_stringRootIndex)),
9157          if_protector);
9158   GotoIf(WordEqual(name, LoadRoot(Heap::kthen_stringRootIndex)), if_protector);
9159   // Fall through if no case matched.
9160 }
9161 
LoadReceiverMap(SloppyTNode<Object> receiver)9162 TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
9163   return Select<Map>(
9164       TaggedIsSmi(receiver),
9165       [=] { return CAST(LoadRoot(Heap::kHeapNumberMapRootIndex)); },
9166       [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
9167 }
9168 
TryToIntptr(Node * key,Label * miss)9169 TNode<IntPtrT> CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
9170   TVARIABLE(IntPtrT, var_intptr_key);
9171   Label done(this, &var_intptr_key), key_is_smi(this);
9172   GotoIf(TaggedIsSmi(key), &key_is_smi);
9173   // Try to convert a heap number to a Smi.
9174   GotoIfNot(IsHeapNumber(key), miss);
9175   {
9176     TNode<Float64T> value = LoadHeapNumberValue(key);
9177     TNode<Int32T> int_value = RoundFloat64ToInt32(value);
9178     GotoIfNot(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
9179     var_intptr_key = ChangeInt32ToIntPtr(int_value);
9180     Goto(&done);
9181   }
9182 
9183   BIND(&key_is_smi);
9184   {
9185     var_intptr_key = SmiUntag(key);
9186     Goto(&done);
9187   }
9188 
9189   BIND(&done);
9190   return var_intptr_key.value();
9191 }
9192 
EmitKeyedSloppyArguments(Node * receiver,Node * key,Node * value,Label * bailout)9193 Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
9194                                                   Node* value, Label* bailout) {
9195   // Mapped arguments are actual arguments. Unmapped arguments are values added
9196   // to the arguments object after it was created for the call. Mapped arguments
9197   // are stored in the context at indexes given by elements[key + 2]. Unmapped
9198   // arguments are stored as regular indexed properties in the arguments array,
9199   // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
9200   // look at argument object construction.
9201   //
9202   // The sloppy arguments elements array has a special format:
9203   //
9204   // 0: context
9205   // 1: unmapped arguments array
9206   // 2: mapped_index0,
9207   // 3: mapped_index1,
9208   // ...
9209   //
9210   // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
9211   // If key + 2 >= elements.length then attempt to look in the unmapped
9212   // arguments array (given by elements[1]) and return the value at key, missing
9213   // to the runtime if the unmapped arguments array is not a fixed array or if
9214   // key >= unmapped_arguments_array.length.
9215   //
9216   // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
9217   // in the unmapped arguments array, as described above. Otherwise, t is a Smi
9218   // index into the context array given at elements[0]. Return the value at
9219   // context[t].
9220 
9221   bool is_load = value == nullptr;
9222 
9223   GotoIfNot(TaggedIsSmi(key), bailout);
9224   key = SmiUntag(key);
9225   GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
9226 
9227   TNode<FixedArray> elements = CAST(LoadElements(receiver));
9228   TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
9229 
9230   VARIABLE(var_result, MachineRepresentation::kTagged);
9231   if (!is_load) {
9232     var_result.Bind(value);
9233   }
9234   Label if_mapped(this), if_unmapped(this), end(this, &var_result);
9235   Node* intptr_two = IntPtrConstant(2);
9236   Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
9237 
9238   GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
9239 
9240   TNode<Object> mapped_index =
9241       LoadFixedArrayElement(elements, IntPtrAdd(key, intptr_two));
9242   Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
9243 
9244   BIND(&if_mapped);
9245   {
9246     TNode<IntPtrT> mapped_index_intptr = SmiUntag(CAST(mapped_index));
9247     TNode<Context> the_context = CAST(LoadFixedArrayElement(elements, 0));
9248     // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
9249     // methods for accessing Context.
9250     STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
9251     DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
9252               FixedArray::OffsetOfElementAt(0));
9253     if (is_load) {
9254       Node* result = LoadFixedArrayElement(the_context, mapped_index_intptr);
9255       CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
9256       var_result.Bind(result);
9257     } else {
9258       StoreFixedArrayElement(the_context, mapped_index_intptr, value);
9259     }
9260     Goto(&end);
9261   }
9262 
9263   BIND(&if_unmapped);
9264   {
9265     TNode<HeapObject> backing_store_ho =
9266         CAST(LoadFixedArrayElement(elements, 1));
9267     GotoIf(WordNotEqual(LoadMap(backing_store_ho), FixedArrayMapConstant()),
9268            bailout);
9269     TNode<FixedArray> backing_store = CAST(backing_store_ho);
9270 
9271     TNode<IntPtrT> backing_store_length =
9272         LoadAndUntagFixedArrayBaseLength(backing_store);
9273     GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
9274 
9275     // The key falls into unmapped range.
9276     if (is_load) {
9277       Node* result = LoadFixedArrayElement(backing_store, key);
9278       GotoIf(WordEqual(result, TheHoleConstant()), bailout);
9279       var_result.Bind(result);
9280     } else {
9281       StoreFixedArrayElement(backing_store, key, value);
9282     }
9283     Goto(&end);
9284   }
9285 
9286   BIND(&end);
9287   return var_result.value();
9288 }
9289 
LoadScriptContext(TNode<Context> context,TNode<IntPtrT> context_index)9290 TNode<Context> CodeStubAssembler::LoadScriptContext(
9291     TNode<Context> context, TNode<IntPtrT> context_index) {
9292   TNode<Context> native_context = LoadNativeContext(context);
9293   TNode<ScriptContextTable> script_context_table = CAST(
9294       LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
9295 
9296   TNode<Context> script_context = CAST(LoadFixedArrayElement(
9297       script_context_table, context_index,
9298       ScriptContextTable::kFirstContextSlotIndex * kPointerSize));
9299   return script_context;
9300 }
9301 
9302 namespace {
9303 
9304 // Converts typed array elements kind to a machine representations.
ElementsKindToMachineRepresentation(ElementsKind kind)9305 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
9306   switch (kind) {
9307     case UINT8_CLAMPED_ELEMENTS:
9308     case UINT8_ELEMENTS:
9309     case INT8_ELEMENTS:
9310       return MachineRepresentation::kWord8;
9311     case UINT16_ELEMENTS:
9312     case INT16_ELEMENTS:
9313       return MachineRepresentation::kWord16;
9314     case UINT32_ELEMENTS:
9315     case INT32_ELEMENTS:
9316       return MachineRepresentation::kWord32;
9317     case FLOAT32_ELEMENTS:
9318       return MachineRepresentation::kFloat32;
9319     case FLOAT64_ELEMENTS:
9320       return MachineRepresentation::kFloat64;
9321     default:
9322       UNREACHABLE();
9323   }
9324 }
9325 
9326 }  // namespace
9327 
StoreElement(Node * elements,ElementsKind kind,Node * index,Node * value,ParameterMode mode)9328 void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
9329                                      Node* index, Node* value,
9330                                      ParameterMode mode) {
9331   if (IsFixedTypedArrayElementsKind(kind)) {
9332     if (kind == UINT8_CLAMPED_ELEMENTS) {
9333       CSA_ASSERT(this,
9334                  Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
9335     }
9336     Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
9337     // TODO(cbruni): Add OOB check once typed.
9338     MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
9339     StoreNoWriteBarrier(rep, elements, offset, value);
9340     return;
9341   } else if (IsDoubleElementsKind(kind)) {
9342     // Make sure we do not store signalling NaNs into double arrays.
9343     TNode<Float64T> value_silenced = Float64SilenceNaN(value);
9344     StoreFixedDoubleArrayElement(CAST(elements), index, value_silenced, mode);
9345   } else {
9346     WriteBarrierMode barrier_mode =
9347         IsSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
9348     StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0, mode);
9349   }
9350 }
9351 
Int32ToUint8Clamped(Node * int32_value)9352 Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
9353   Label done(this);
9354   Node* int32_zero = Int32Constant(0);
9355   Node* int32_255 = Int32Constant(255);
9356   VARIABLE(var_value, MachineRepresentation::kWord32, int32_value);
9357   GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
9358   var_value.Bind(int32_zero);
9359   GotoIf(Int32LessThan(int32_value, int32_zero), &done);
9360   var_value.Bind(int32_255);
9361   Goto(&done);
9362   BIND(&done);
9363   return var_value.value();
9364 }
9365 
Float64ToUint8Clamped(Node * float64_value)9366 Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
9367   Label done(this);
9368   VARIABLE(var_value, MachineRepresentation::kWord32, Int32Constant(0));
9369   GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
9370   var_value.Bind(Int32Constant(255));
9371   GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
9372   {
9373     Node* rounded_value = Float64RoundToEven(float64_value);
9374     var_value.Bind(TruncateFloat64ToWord32(rounded_value));
9375     Goto(&done);
9376   }
9377   BIND(&done);
9378   return var_value.value();
9379 }
9380 
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)9381 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
9382     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
9383   DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
9384 
9385   MachineRepresentation rep;
9386   switch (elements_kind) {
9387     case UINT8_ELEMENTS:
9388     case INT8_ELEMENTS:
9389     case UINT16_ELEMENTS:
9390     case INT16_ELEMENTS:
9391     case UINT32_ELEMENTS:
9392     case INT32_ELEMENTS:
9393     case UINT8_CLAMPED_ELEMENTS:
9394       rep = MachineRepresentation::kWord32;
9395       break;
9396     case FLOAT32_ELEMENTS:
9397       rep = MachineRepresentation::kFloat32;
9398       break;
9399     case FLOAT64_ELEMENTS:
9400       rep = MachineRepresentation::kFloat64;
9401       break;
9402     case BIGINT64_ELEMENTS:
9403     case BIGUINT64_ELEMENTS:
9404       return ToBigInt(context, input);
9405     default:
9406       UNREACHABLE();
9407   }
9408 
9409   VARIABLE(var_result, rep);
9410   VARIABLE(var_input, MachineRepresentation::kTagged, input);
9411   Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
9412       convert(this), loop(this, &var_input);
9413   Goto(&loop);
9414   BIND(&loop);
9415   GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
9416   // We can handle both HeapNumber and Oddball here, since Oddball has the
9417   // same layout as the HeapNumber for the HeapNumber::value field. This
9418   // way we can also properly optimize stores of oddballs to typed arrays.
9419   GotoIf(IsHeapNumber(var_input.value()), &if_heapnumber_or_oddball);
9420   STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
9421   Branch(HasInstanceType(var_input.value(), ODDBALL_TYPE),
9422          &if_heapnumber_or_oddball, &convert);
9423 
9424   BIND(&if_heapnumber_or_oddball);
9425   {
9426     Node* value = UncheckedCast<Float64T>(LoadObjectField(
9427         var_input.value(), HeapNumber::kValueOffset, MachineType::Float64()));
9428     if (rep == MachineRepresentation::kWord32) {
9429       if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
9430         value = Float64ToUint8Clamped(value);
9431       } else {
9432         value = TruncateFloat64ToWord32(value);
9433       }
9434     } else if (rep == MachineRepresentation::kFloat32) {
9435       value = TruncateFloat64ToFloat32(value);
9436     } else {
9437       DCHECK_EQ(MachineRepresentation::kFloat64, rep);
9438     }
9439     var_result.Bind(value);
9440     Goto(&done);
9441   }
9442 
9443   BIND(&if_smi);
9444   {
9445     Node* value = SmiToInt32(var_input.value());
9446     if (rep == MachineRepresentation::kFloat32) {
9447       value = RoundInt32ToFloat32(value);
9448     } else if (rep == MachineRepresentation::kFloat64) {
9449       value = ChangeInt32ToFloat64(value);
9450     } else {
9451       DCHECK_EQ(MachineRepresentation::kWord32, rep);
9452       if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
9453         value = Int32ToUint8Clamped(value);
9454       }
9455     }
9456     var_result.Bind(value);
9457     Goto(&done);
9458   }
9459 
9460   BIND(&convert);
9461   {
9462     var_input.Bind(CallBuiltin(Builtins::kNonNumberToNumber, context, input));
9463     Goto(&loop);
9464   }
9465 
9466   BIND(&done);
9467   return var_result.value();
9468 }
9469 
EmitBigTypedArrayElementStore(TNode<JSTypedArray> object,TNode<FixedTypedArrayBase> elements,TNode<IntPtrT> intptr_key,TNode<Object> value,TNode<Context> context,Label * opt_if_neutered)9470 void CodeStubAssembler::EmitBigTypedArrayElementStore(
9471     TNode<JSTypedArray> object, TNode<FixedTypedArrayBase> elements,
9472     TNode<IntPtrT> intptr_key, TNode<Object> value, TNode<Context> context,
9473     Label* opt_if_neutered) {
9474   TNode<BigInt> bigint_value = ToBigInt(context, value);
9475 
9476   if (opt_if_neutered != nullptr) {
9477     // Check if buffer has been neutered. Must happen after {ToBigInt}!
9478     Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9479     GotoIf(IsDetachedBuffer(buffer), opt_if_neutered);
9480   }
9481 
9482   TNode<RawPtrT> backing_store = LoadFixedTypedArrayBackingStore(elements);
9483   TNode<IntPtrT> offset = ElementOffsetFromIndex(intptr_key, BIGINT64_ELEMENTS,
9484                                                  INTPTR_PARAMETERS, 0);
9485   EmitBigTypedArrayElementStore(elements, backing_store, offset, bigint_value);
9486 }
9487 
EmitBigTypedArrayElementStore(TNode<FixedTypedArrayBase> elements,TNode<RawPtrT> backing_store,TNode<IntPtrT> offset,TNode<BigInt> bigint_value)9488 void CodeStubAssembler::EmitBigTypedArrayElementStore(
9489     TNode<FixedTypedArrayBase> elements, TNode<RawPtrT> backing_store,
9490     TNode<IntPtrT> offset, TNode<BigInt> bigint_value) {
9491   TNode<WordT> bitfield = LoadBigIntBitfield(bigint_value);
9492   TNode<UintPtrT> length = DecodeWord<BigIntBase::LengthBits>(bitfield);
9493   TNode<UintPtrT> sign = DecodeWord<BigIntBase::SignBits>(bitfield);
9494   TVARIABLE(UintPtrT, var_low, Unsigned(IntPtrConstant(0)));
9495   // Only used on 32-bit platforms.
9496   TVARIABLE(UintPtrT, var_high, Unsigned(IntPtrConstant(0)));
9497   Label do_store(this);
9498   GotoIf(WordEqual(length, IntPtrConstant(0)), &do_store);
9499   var_low = LoadBigIntDigit(bigint_value, 0);
9500   if (!Is64()) {
9501     Label load_done(this);
9502     GotoIf(WordEqual(length, IntPtrConstant(1)), &load_done);
9503     var_high = LoadBigIntDigit(bigint_value, 1);
9504     Goto(&load_done);
9505     BIND(&load_done);
9506   }
9507   GotoIf(WordEqual(sign, IntPtrConstant(0)), &do_store);
9508   // Negative value. Simulate two's complement.
9509   if (!Is64()) {
9510     var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high.value()));
9511     Label no_carry(this);
9512     GotoIf(WordEqual(var_low.value(), IntPtrConstant(0)), &no_carry);
9513     var_high = Unsigned(IntPtrSub(var_high.value(), IntPtrConstant(1)));
9514     Goto(&no_carry);
9515     BIND(&no_carry);
9516   }
9517   var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low.value()));
9518   Goto(&do_store);
9519   BIND(&do_store);
9520 
9521   // Assert that offset < elements.length. Given that it's an offset for a raw
9522   // pointer we correct it by the usual kHeapObjectTag offset.
9523   CSA_ASSERT(
9524       this, IsOffsetInBounds(offset, LoadAndUntagFixedArrayBaseLength(elements),
9525                              kHeapObjectTag, BIGINT64_ELEMENTS));
9526 
9527   MachineRepresentation rep = WordT::kMachineRepresentation;
9528 #if defined(V8_TARGET_BIG_ENDIAN)
9529   if (!Is64()) {
9530     StoreNoWriteBarrier(rep, backing_store, offset, var_high.value());
9531     StoreNoWriteBarrier(rep, backing_store,
9532                         IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
9533                         var_low.value());
9534   } else {
9535     StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
9536   }
9537 #else
9538   StoreNoWriteBarrier(rep, backing_store, offset, var_low.value());
9539   if (!Is64()) {
9540     StoreNoWriteBarrier(rep, backing_store,
9541                         IntPtrAdd(offset, IntPtrConstant(kPointerSize)),
9542                         var_high.value());
9543   }
9544 #endif
9545 }
9546 
EmitElementStore(Node * object,Node * key,Node * value,bool is_jsarray,ElementsKind elements_kind,KeyedAccessStoreMode store_mode,Label * bailout,Node * context)9547 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
9548                                          bool is_jsarray,
9549                                          ElementsKind elements_kind,
9550                                          KeyedAccessStoreMode store_mode,
9551                                          Label* bailout, Node* context) {
9552   CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
9553 
9554   Node* elements = LoadElements(object);
9555   if (!IsSmiOrObjectElementsKind(elements_kind)) {
9556     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
9557   } else if (!IsCOWHandlingStoreMode(store_mode)) {
9558     GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
9559   }
9560 
9561   // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
9562   ParameterMode parameter_mode = INTPTR_PARAMETERS;
9563   TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
9564 
9565   if (IsFixedTypedArrayElementsKind(elements_kind)) {
9566     Label done(this);
9567 
9568     // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
9569     // bounds check.
9570     value = PrepareValueForWriteToTypedArray(CAST(value), elements_kind,
9571                                              CAST(context));
9572 
9573     // There must be no allocations between the buffer load and
9574     // and the actual store to backing store, because GC may decide that
9575     // the buffer is not alive or move the elements.
9576     // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
9577 
9578     // Check if buffer has been neutered.
9579     Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
9580     GotoIf(IsDetachedBuffer(buffer), bailout);
9581 
9582     // Bounds check.
9583     Node* length =
9584         TaggedToParameter(LoadTypedArrayLength(CAST(object)), parameter_mode);
9585 
9586     if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
9587       // Skip the store if we write beyond the length or
9588       // to a property with a negative integer index.
9589       GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
9590     } else {
9591       DCHECK_EQ(STANDARD_STORE, store_mode);
9592       GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
9593     }
9594 
9595     if (elements_kind == BIGINT64_ELEMENTS ||
9596         elements_kind == BIGUINT64_ELEMENTS) {
9597       TNode<BigInt> bigint_value = UncheckedCast<BigInt>(value);
9598 
9599       TNode<RawPtrT> backing_store =
9600           LoadFixedTypedArrayBackingStore(CAST(elements));
9601       TNode<IntPtrT> offset = ElementOffsetFromIndex(
9602           intptr_key, BIGINT64_ELEMENTS, INTPTR_PARAMETERS, 0);
9603       EmitBigTypedArrayElementStore(CAST(elements), backing_store, offset,
9604                                     bigint_value);
9605     } else {
9606       Node* backing_store = LoadFixedTypedArrayBackingStore(CAST(elements));
9607       StoreElement(backing_store, elements_kind, intptr_key, value,
9608                    parameter_mode);
9609     }
9610     Goto(&done);
9611 
9612     BIND(&done);
9613     return;
9614   }
9615   DCHECK(IsSmiOrObjectElementsKind(elements_kind) ||
9616          IsDoubleElementsKind(elements_kind));
9617 
9618   Node* length = is_jsarray ? LoadJSArrayLength(object)
9619                             : LoadFixedArrayBaseLength(elements);
9620   length = TaggedToParameter(length, parameter_mode);
9621 
9622   // In case value is stored into a fast smi array, assure that the value is
9623   // a smi before manipulating the backing store. Otherwise the backing store
9624   // may be left in an invalid state.
9625   if (IsSmiElementsKind(elements_kind)) {
9626     GotoIfNot(TaggedIsSmi(value), bailout);
9627   } else if (IsDoubleElementsKind(elements_kind)) {
9628     value = TryTaggedToFloat64(value, bailout);
9629   }
9630 
9631   if (IsGrowStoreMode(store_mode)) {
9632     elements = CheckForCapacityGrow(object, elements, elements_kind, store_mode,
9633                                     length, intptr_key, parameter_mode,
9634                                     is_jsarray, bailout);
9635   } else {
9636     GotoIfNot(UintPtrLessThan(intptr_key, length), bailout);
9637   }
9638 
9639   // If we didn't grow {elements}, it might still be COW, in which case we
9640   // copy it now.
9641   if (!IsSmiOrObjectElementsKind(elements_kind)) {
9642     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
9643   } else if (IsCOWHandlingStoreMode(store_mode)) {
9644     elements = CopyElementsOnWrite(object, elements, elements_kind, length,
9645                                    parameter_mode, bailout);
9646   }
9647 
9648   CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
9649   StoreElement(elements, elements_kind, intptr_key, value, parameter_mode);
9650 }
9651 
CheckForCapacityGrow(Node * object,Node * elements,ElementsKind kind,KeyedAccessStoreMode store_mode,Node * length,Node * key,ParameterMode mode,bool is_js_array,Label * bailout)9652 Node* CodeStubAssembler::CheckForCapacityGrow(
9653     Node* object, Node* elements, ElementsKind kind,
9654     KeyedAccessStoreMode store_mode, Node* length, Node* key,
9655     ParameterMode mode, bool is_js_array, Label* bailout) {
9656   DCHECK(IsFastElementsKind(kind));
9657   VARIABLE(checked_elements, MachineRepresentation::kTagged);
9658   Label grow_case(this), no_grow_case(this), done(this),
9659       grow_bailout(this, Label::kDeferred);
9660 
9661   Node* condition;
9662   if (IsHoleyElementsKind(kind)) {
9663     condition = UintPtrGreaterThanOrEqual(key, length);
9664   } else {
9665     // We don't support growing here unless the value is being appended.
9666     condition = WordEqual(key, length);
9667   }
9668   Branch(condition, &grow_case, &no_grow_case);
9669 
9670   BIND(&grow_case);
9671   {
9672     Node* current_capacity =
9673         TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
9674     checked_elements.Bind(elements);
9675     Label fits_capacity(this);
9676     // If key is negative, we will notice in Runtime::kGrowArrayElements.
9677     GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
9678 
9679     {
9680       Node* new_elements = TryGrowElementsCapacity(
9681           object, elements, kind, key, current_capacity, mode, &grow_bailout);
9682       checked_elements.Bind(new_elements);
9683       Goto(&fits_capacity);
9684     }
9685 
9686     BIND(&grow_bailout);
9687     {
9688       Node* tagged_key = mode == SMI_PARAMETERS
9689                              ? key
9690                              : ChangeInt32ToTagged(TruncateIntPtrToInt32(key));
9691       Node* maybe_elements = CallRuntime(
9692           Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
9693       GotoIf(TaggedIsSmi(maybe_elements), bailout);
9694       CSA_ASSERT(this, IsFixedArrayWithKind(maybe_elements, kind));
9695       checked_elements.Bind(maybe_elements);
9696       Goto(&fits_capacity);
9697     }
9698 
9699     BIND(&fits_capacity);
9700     if (is_js_array) {
9701       Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
9702       StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
9703                                      ParameterToTagged(new_length, mode));
9704     }
9705     Goto(&done);
9706   }
9707 
9708   BIND(&no_grow_case);
9709   {
9710     GotoIfNot(UintPtrLessThan(key, length), bailout);
9711     checked_elements.Bind(elements);
9712     Goto(&done);
9713   }
9714 
9715   BIND(&done);
9716   return checked_elements.value();
9717 }
9718 
CopyElementsOnWrite(Node * object,Node * elements,ElementsKind kind,Node * length,ParameterMode mode,Label * bailout)9719 Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
9720                                              ElementsKind kind, Node* length,
9721                                              ParameterMode mode,
9722                                              Label* bailout) {
9723   VARIABLE(new_elements_var, MachineRepresentation::kTagged, elements);
9724   Label done(this);
9725 
9726   GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
9727   {
9728     Node* capacity =
9729         TaggedToParameter(LoadFixedArrayBaseLength(elements), mode);
9730     Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
9731                                               length, capacity, mode, bailout);
9732     new_elements_var.Bind(new_elements);
9733     Goto(&done);
9734   }
9735 
9736   BIND(&done);
9737   return new_elements_var.value();
9738 }
9739 
TransitionElementsKind(Node * object,Node * map,ElementsKind from_kind,ElementsKind to_kind,bool is_jsarray,Label * bailout)9740 void CodeStubAssembler::TransitionElementsKind(Node* object, Node* map,
9741                                                ElementsKind from_kind,
9742                                                ElementsKind to_kind,
9743                                                bool is_jsarray,
9744                                                Label* bailout) {
9745   DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
9746   if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
9747     TrapAllocationMemento(object, bailout);
9748   }
9749 
9750   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
9751     Comment("Non-simple map transition");
9752     Node* elements = LoadElements(object);
9753 
9754     Label done(this);
9755     GotoIf(WordEqual(elements, EmptyFixedArrayConstant()), &done);
9756 
9757     // TODO(ishell): Use OptimalParameterMode().
9758     ParameterMode mode = INTPTR_PARAMETERS;
9759     Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
9760     Node* array_length =
9761         is_jsarray ? SmiUntag(LoadFastJSArrayLength(object)) : elements_length;
9762 
9763     CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
9764 
9765     GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
9766                          elements_length, mode, bailout);
9767     Goto(&done);
9768     BIND(&done);
9769   }
9770 
9771   StoreMap(object, map);
9772 }
9773 
TrapAllocationMemento(Node * object,Label * memento_found)9774 void CodeStubAssembler::TrapAllocationMemento(Node* object,
9775                                               Label* memento_found) {
9776   Comment("[ TrapAllocationMemento");
9777   Label no_memento_found(this);
9778   Label top_check(this), map_check(this);
9779 
9780   TNode<ExternalReference> new_space_top_address = ExternalConstant(
9781       ExternalReference::new_space_allocation_top_address(isolate()));
9782   const int kMementoMapOffset = JSArray::kSize;
9783   const int kMementoLastWordOffset =
9784       kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
9785 
9786   // Bail out if the object is not in new space.
9787   TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
9788   TNode<IntPtrT> object_page = PageFromAddress(object_word);
9789   {
9790     TNode<IntPtrT> page_flags =
9791         UncheckedCast<IntPtrT>(Load(MachineType::IntPtr(), object_page,
9792                                     IntPtrConstant(Page::kFlagsOffset)));
9793     GotoIf(WordEqual(WordAnd(page_flags,
9794                              IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
9795                      IntPtrConstant(0)),
9796            &no_memento_found);
9797   }
9798 
9799   TNode<IntPtrT> memento_last_word = IntPtrAdd(
9800       object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
9801   TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
9802 
9803   TNode<IntPtrT> new_space_top = UncheckedCast<IntPtrT>(
9804       Load(MachineType::Pointer(), new_space_top_address));
9805   TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
9806 
9807   // If the object is in new space, we need to check whether respective
9808   // potential memento object is on the same page as the current top.
9809   GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
9810 
9811   // The object is on a different page than allocation top. Bail out if the
9812   // object sits on the page boundary as no memento can follow and we cannot
9813   // touch the memory following it.
9814   Branch(WordEqual(object_page, memento_last_word_page), &map_check,
9815          &no_memento_found);
9816 
9817   // If top is on the same page as the current object, we need to check whether
9818   // we are below top.
9819   BIND(&top_check);
9820   {
9821     Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
9822            &no_memento_found, &map_check);
9823   }
9824 
9825   // Memento map check.
9826   BIND(&map_check);
9827   {
9828     TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
9829     Branch(
9830         WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
9831         memento_found, &no_memento_found);
9832   }
9833   BIND(&no_memento_found);
9834   Comment("] TrapAllocationMemento");
9835 }
9836 
PageFromAddress(TNode<IntPtrT> address)9837 TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
9838   return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
9839 }
9840 
CreateAllocationSiteInFeedbackVector(SloppyTNode<FeedbackVector> feedback_vector,TNode<Smi> slot)9841 TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
9842     SloppyTNode<FeedbackVector> feedback_vector, TNode<Smi> slot) {
9843   TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
9844   Node* site = Allocate(size, CodeStubAssembler::kPretenured);
9845   StoreMapNoWriteBarrier(site, Heap::kAllocationSiteWithWeakNextMapRootIndex);
9846   // Should match AllocationSite::Initialize.
9847   TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
9848       IntPtrConstant(0), IntPtrConstant(GetInitialFastElementsKind()));
9849   StoreObjectFieldNoWriteBarrier(
9850       site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
9851       SmiTag(Signed(field)));
9852 
9853   // Unlike literals, constructed arrays don't have nested sites
9854   TNode<Smi> zero = SmiConstant(0);
9855   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
9856 
9857   // Pretenuring calculation field.
9858   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
9859                                  Int32Constant(0),
9860                                  MachineRepresentation::kWord32);
9861 
9862   // Pretenuring memento creation count field.
9863   StoreObjectFieldNoWriteBarrier(
9864       site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0),
9865       MachineRepresentation::kWord32);
9866 
9867   // Store an empty fixed array for the code dependency.
9868   StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
9869                        Heap::kEmptyWeakFixedArrayRootIndex);
9870 
9871   // Link the object to the allocation site list
9872   TNode<ExternalReference> site_list = ExternalConstant(
9873       ExternalReference::allocation_sites_list_address(isolate()));
9874   TNode<Object> next_site = CAST(LoadBufferObject(site_list, 0));
9875 
9876   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
9877   // mark as such in order to skip the write barrier, once we have a unified
9878   // system for weakness. For now we decided to keep it like this because having
9879   // an initial write barrier backed store makes this pointer strong until the
9880   // next GC, and allocation sites are designed to survive several GCs anyway.
9881   StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
9882   StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
9883 
9884   StoreFeedbackVectorSlot(feedback_vector, slot, site, UPDATE_WRITE_BARRIER, 0,
9885                           SMI_PARAMETERS);
9886   return CAST(site);
9887 }
9888 
StoreWeakReferenceInFeedbackVector(SloppyTNode<FeedbackVector> feedback_vector,Node * slot,SloppyTNode<HeapObject> value,int additional_offset,ParameterMode parameter_mode)9889 TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
9890     SloppyTNode<FeedbackVector> feedback_vector, Node* slot,
9891     SloppyTNode<HeapObject> value, int additional_offset,
9892     ParameterMode parameter_mode) {
9893   TNode<MaybeObject> weak_value = MakeWeak(value);
9894   StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
9895                           UPDATE_WRITE_BARRIER, additional_offset,
9896                           parameter_mode);
9897   return weak_value;
9898 }
9899 
NotHasBoilerplate(TNode<Object> maybe_literal_site)9900 TNode<BoolT> CodeStubAssembler::NotHasBoilerplate(
9901     TNode<Object> maybe_literal_site) {
9902   return TaggedIsSmi(maybe_literal_site);
9903 }
9904 
LoadTransitionInfo(TNode<AllocationSite> allocation_site)9905 TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
9906     TNode<AllocationSite> allocation_site) {
9907   TNode<Smi> transition_info = CAST(LoadObjectField(
9908       allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
9909   return transition_info;
9910 }
9911 
LoadBoilerplate(TNode<AllocationSite> allocation_site)9912 TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
9913     TNode<AllocationSite> allocation_site) {
9914   TNode<JSObject> boilerplate = CAST(LoadObjectField(
9915       allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
9916   return boilerplate;
9917 }
9918 
LoadElementsKind(TNode<AllocationSite> allocation_site)9919 TNode<Int32T> CodeStubAssembler::LoadElementsKind(
9920     TNode<AllocationSite> allocation_site) {
9921   TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
9922   TNode<Int32T> elements_kind =
9923       Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
9924           SmiToInt32(transition_info)));
9925   CSA_ASSERT(this, IsFastElementsKind(elements_kind));
9926   return elements_kind;
9927 }
9928 
BuildFastLoop(const CodeStubAssembler::VariableList & vars,Node * start_index,Node * end_index,const FastLoopBody & body,int increment,ParameterMode parameter_mode,IndexAdvanceMode advance_mode)9929 Node* CodeStubAssembler::BuildFastLoop(
9930     const CodeStubAssembler::VariableList& vars, Node* start_index,
9931     Node* end_index, const FastLoopBody& body, int increment,
9932     ParameterMode parameter_mode, IndexAdvanceMode advance_mode) {
9933   CSA_SLOW_ASSERT(this, MatchesParameterMode(start_index, parameter_mode));
9934   CSA_SLOW_ASSERT(this, MatchesParameterMode(end_index, parameter_mode));
9935   MachineRepresentation index_rep = (parameter_mode == INTPTR_PARAMETERS)
9936                                         ? MachineType::PointerRepresentation()
9937                                         : MachineRepresentation::kTaggedSigned;
9938   VARIABLE(var, index_rep, start_index);
9939   VariableList vars_copy(vars.begin(), vars.end(), zone());
9940   vars_copy.push_back(&var);
9941   Label loop(this, vars_copy);
9942   Label after_loop(this);
9943   // Introduce an explicit second check of the termination condition before the
9944   // loop that helps turbofan generate better code. If there's only a single
9945   // check, then the CodeStubAssembler forces it to be at the beginning of the
9946   // loop requiring a backwards branch at the end of the loop (it's not possible
9947   // to force the loop header check at the end of the loop and branch forward to
9948   // it from the pre-header). The extra branch is slower in the case that the
9949   // loop actually iterates.
9950   Node* first_check = WordEqual(var.value(), end_index);
9951   int32_t first_check_val;
9952   if (ToInt32Constant(first_check, first_check_val)) {
9953     if (first_check_val) return var.value();
9954     Goto(&loop);
9955   } else {
9956     Branch(first_check, &after_loop, &loop);
9957   }
9958 
9959   BIND(&loop);
9960   {
9961     if (advance_mode == IndexAdvanceMode::kPre) {
9962       Increment(&var, increment, parameter_mode);
9963     }
9964     body(var.value());
9965     if (advance_mode == IndexAdvanceMode::kPost) {
9966       Increment(&var, increment, parameter_mode);
9967     }
9968     Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
9969   }
9970   BIND(&after_loop);
9971   return var.value();
9972 }
9973 
BuildFastFixedArrayForEach(const CodeStubAssembler::VariableList & vars,Node * fixed_array,ElementsKind kind,Node * first_element_inclusive,Node * last_element_exclusive,const FastFixedArrayForEachBody & body,ParameterMode mode,ForEachDirection direction)9974 void CodeStubAssembler::BuildFastFixedArrayForEach(
9975     const CodeStubAssembler::VariableList& vars, Node* fixed_array,
9976     ElementsKind kind, Node* first_element_inclusive,
9977     Node* last_element_exclusive, const FastFixedArrayForEachBody& body,
9978     ParameterMode mode, ForEachDirection direction) {
9979   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
9980   CSA_SLOW_ASSERT(this, MatchesParameterMode(first_element_inclusive, mode));
9981   CSA_SLOW_ASSERT(this, MatchesParameterMode(last_element_exclusive, mode));
9982   CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(fixed_array, kind),
9983                                  IsPropertyArray(fixed_array)));
9984   int32_t first_val;
9985   bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
9986   int32_t last_val;
9987   bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
9988   if (constant_first && constent_last) {
9989     int delta = last_val - first_val;
9990     DCHECK_GE(delta, 0);
9991     if (delta <= kElementLoopUnrollThreshold) {
9992       if (direction == ForEachDirection::kForward) {
9993         for (int i = first_val; i < last_val; ++i) {
9994           Node* index = IntPtrConstant(i);
9995           Node* offset =
9996               ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
9997                                      FixedArray::kHeaderSize - kHeapObjectTag);
9998           body(fixed_array, offset);
9999         }
10000       } else {
10001         for (int i = last_val - 1; i >= first_val; --i) {
10002           Node* index = IntPtrConstant(i);
10003           Node* offset =
10004               ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
10005                                      FixedArray::kHeaderSize - kHeapObjectTag);
10006           body(fixed_array, offset);
10007         }
10008       }
10009       return;
10010     }
10011   }
10012 
10013   Node* start =
10014       ElementOffsetFromIndex(first_element_inclusive, kind, mode,
10015                              FixedArray::kHeaderSize - kHeapObjectTag);
10016   Node* limit =
10017       ElementOffsetFromIndex(last_element_exclusive, kind, mode,
10018                              FixedArray::kHeaderSize - kHeapObjectTag);
10019   if (direction == ForEachDirection::kReverse) std::swap(start, limit);
10020 
10021   int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
10022   BuildFastLoop(
10023       vars, start, limit,
10024       [fixed_array, &body](Node* offset) { body(fixed_array, offset); },
10025       direction == ForEachDirection::kReverse ? -increment : increment,
10026       INTPTR_PARAMETERS,
10027       direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
10028                                               : IndexAdvanceMode::kPost);
10029 }
10030 
GotoIfFixedArraySizeDoesntFitInNewSpace(Node * element_count,Label * doesnt_fit,int base_size,ParameterMode mode)10031 void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
10032     Node* element_count, Label* doesnt_fit, int base_size, ParameterMode mode) {
10033   GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size, mode),
10034          doesnt_fit);
10035 }
10036 
InitializeFieldsWithRoot(Node * object,Node * start_offset,Node * end_offset,Heap::RootListIndex root_index)10037 void CodeStubAssembler::InitializeFieldsWithRoot(
10038     Node* object, Node* start_offset, Node* end_offset,
10039     Heap::RootListIndex root_index) {
10040   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
10041   start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
10042   end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
10043   Node* root_value = LoadRoot(root_index);
10044   BuildFastLoop(end_offset, start_offset,
10045                 [this, object, root_value](Node* current) {
10046                   StoreNoWriteBarrier(MachineRepresentation::kTagged, object,
10047                                       current, root_value);
10048                 },
10049                 -kPointerSize, INTPTR_PARAMETERS,
10050                 CodeStubAssembler::IndexAdvanceMode::kPre);
10051 }
10052 
BranchIfNumberRelationalComparison(Operation op,Node * left,Node * right,Label * if_true,Label * if_false)10053 void CodeStubAssembler::BranchIfNumberRelationalComparison(
10054     Operation op, Node* left, Node* right, Label* if_true, Label* if_false) {
10055   CSA_SLOW_ASSERT(this, IsNumber(left));
10056   CSA_SLOW_ASSERT(this, IsNumber(right));
10057 
10058   Label do_float_comparison(this);
10059   TVARIABLE(Float64T, var_left_float);
10060   TVARIABLE(Float64T, var_right_float);
10061 
10062   Branch(TaggedIsSmi(left),
10063          [&] {
10064            TNode<Smi> smi_left = CAST(left);
10065 
10066            Branch(TaggedIsSmi(right),
10067                   [&] {
10068                     TNode<Smi> smi_right = CAST(right);
10069 
10070                     // Both {left} and {right} are Smi, so just perform a fast
10071                     // Smi comparison.
10072                     switch (op) {
10073                       case Operation::kLessThan:
10074                         BranchIfSmiLessThan(smi_left, smi_right, if_true,
10075                                             if_false);
10076                         break;
10077                       case Operation::kLessThanOrEqual:
10078                         BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
10079                                                    if_false);
10080                         break;
10081                       case Operation::kGreaterThan:
10082                         BranchIfSmiLessThan(smi_right, smi_left, if_true,
10083                                             if_false);
10084                         break;
10085                       case Operation::kGreaterThanOrEqual:
10086                         BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
10087                                                    if_false);
10088                         break;
10089                       default:
10090                         UNREACHABLE();
10091                     }
10092                   },
10093                   [&] {
10094                     CSA_ASSERT(this, IsHeapNumber(right));
10095                     var_left_float = SmiToFloat64(smi_left);
10096                     var_right_float = LoadHeapNumberValue(right);
10097                     Goto(&do_float_comparison);
10098                   });
10099          },
10100          [&] {
10101            CSA_ASSERT(this, IsHeapNumber(left));
10102            var_left_float = LoadHeapNumberValue(left);
10103 
10104            Branch(TaggedIsSmi(right),
10105                   [&] {
10106                     var_right_float = SmiToFloat64(right);
10107                     Goto(&do_float_comparison);
10108                   },
10109                   [&] {
10110                     CSA_ASSERT(this, IsHeapNumber(right));
10111                     var_right_float = LoadHeapNumberValue(right);
10112                     Goto(&do_float_comparison);
10113                   });
10114          });
10115 
10116   BIND(&do_float_comparison);
10117   {
10118     switch (op) {
10119       case Operation::kLessThan:
10120         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
10121                if_true, if_false);
10122         break;
10123       case Operation::kLessThanOrEqual:
10124         Branch(Float64LessThanOrEqual(var_left_float.value(),
10125                                       var_right_float.value()),
10126                if_true, if_false);
10127         break;
10128       case Operation::kGreaterThan:
10129         Branch(
10130             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
10131             if_true, if_false);
10132         break;
10133       case Operation::kGreaterThanOrEqual:
10134         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
10135                                          var_right_float.value()),
10136                if_true, if_false);
10137         break;
10138       default:
10139         UNREACHABLE();
10140     }
10141   }
10142 }
10143 
GotoIfNumberGreaterThanOrEqual(Node * left,Node * right,Label * if_true)10144 void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(Node* left, Node* right,
10145                                                        Label* if_true) {
10146   Label if_false(this);
10147   BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
10148                                      right, if_true, &if_false);
10149   BIND(&if_false);
10150 }
10151 
10152 namespace {
Reverse(Operation op)10153 Operation Reverse(Operation op) {
10154   switch (op) {
10155     case Operation::kLessThan:
10156       return Operation::kGreaterThan;
10157     case Operation::kLessThanOrEqual:
10158       return Operation::kGreaterThanOrEqual;
10159     case Operation::kGreaterThan:
10160       return Operation::kLessThan;
10161     case Operation::kGreaterThanOrEqual:
10162       return Operation::kLessThanOrEqual;
10163     default:
10164       break;
10165   }
10166   UNREACHABLE();
10167 }
10168 }  // anonymous namespace
10169 
RelationalComparison(Operation op,Node * left,Node * right,Node * context,Variable * var_type_feedback)10170 Node* CodeStubAssembler::RelationalComparison(Operation op, Node* left,
10171                                               Node* right, Node* context,
10172                                               Variable* var_type_feedback) {
10173   Label return_true(this), return_false(this), do_float_comparison(this),
10174       end(this);
10175   TVARIABLE(Oddball, var_result);  // Actually only "true" or "false".
10176   TVARIABLE(Float64T, var_left_float);
10177   TVARIABLE(Float64T, var_right_float);
10178 
10179   // We might need to loop several times due to ToPrimitive and/or ToNumeric
10180   // conversions.
10181   VARIABLE(var_left, MachineRepresentation::kTagged, left);
10182   VARIABLE(var_right, MachineRepresentation::kTagged, right);
10183   VariableList loop_variable_list({&var_left, &var_right}, zone());
10184   if (var_type_feedback != nullptr) {
10185     // Initialize the type feedback to None. The current feedback is combined
10186     // with the previous feedback.
10187     var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
10188     loop_variable_list.push_back(var_type_feedback);
10189   }
10190   Label loop(this, loop_variable_list);
10191   Goto(&loop);
10192   BIND(&loop);
10193   {
10194     left = var_left.value();
10195     right = var_right.value();
10196 
10197     Label if_left_smi(this), if_left_not_smi(this);
10198     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
10199 
10200     BIND(&if_left_smi);
10201     {
10202       TNode<Smi> smi_left = CAST(left);
10203       Label if_right_smi(this), if_right_heapnumber(this),
10204           if_right_bigint(this, Label::kDeferred),
10205           if_right_not_numeric(this, Label::kDeferred);
10206       GotoIf(TaggedIsSmi(right), &if_right_smi);
10207       Node* right_map = LoadMap(right);
10208       GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10209       Node* right_instance_type = LoadMapInstanceType(right_map);
10210       Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
10211              &if_right_not_numeric);
10212 
10213       BIND(&if_right_smi);
10214       {
10215         TNode<Smi> smi_right = CAST(right);
10216         CombineFeedback(var_type_feedback,
10217                         CompareOperationFeedback::kSignedSmall);
10218         switch (op) {
10219           case Operation::kLessThan:
10220             BranchIfSmiLessThan(smi_left, smi_right, &return_true,
10221                                 &return_false);
10222             break;
10223           case Operation::kLessThanOrEqual:
10224             BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
10225                                        &return_false);
10226             break;
10227           case Operation::kGreaterThan:
10228             BranchIfSmiLessThan(smi_right, smi_left, &return_true,
10229                                 &return_false);
10230             break;
10231           case Operation::kGreaterThanOrEqual:
10232             BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
10233                                        &return_false);
10234             break;
10235           default:
10236             UNREACHABLE();
10237         }
10238       }
10239 
10240       BIND(&if_right_heapnumber);
10241       {
10242         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10243         var_left_float = SmiToFloat64(smi_left);
10244         var_right_float = LoadHeapNumberValue(right);
10245         Goto(&do_float_comparison);
10246       }
10247 
10248       BIND(&if_right_bigint);
10249       {
10250         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10251         var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10252                                       NoContextConstant(),
10253                                       SmiConstant(Reverse(op)), right, left));
10254         Goto(&end);
10255       }
10256 
10257       BIND(&if_right_not_numeric);
10258       {
10259         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10260         // Convert {right} to a Numeric; we don't need to perform the
10261         // dedicated ToPrimitive(right, hint Number) operation, as the
10262         // ToNumeric(right) will by itself already invoke ToPrimitive with
10263         // a Number hint.
10264         var_right.Bind(
10265             CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
10266         Goto(&loop);
10267       }
10268     }
10269 
10270     BIND(&if_left_not_smi);
10271     {
10272       Node* left_map = LoadMap(left);
10273 
10274       Label if_right_smi(this), if_right_not_smi(this);
10275       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
10276 
10277       BIND(&if_right_smi);
10278       {
10279         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
10280             if_left_not_numeric(this, Label::kDeferred);
10281         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
10282         Node* left_instance_type = LoadMapInstanceType(left_map);
10283         Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
10284                &if_left_not_numeric);
10285 
10286         BIND(&if_left_heapnumber);
10287         {
10288           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10289           var_left_float = LoadHeapNumberValue(left);
10290           var_right_float = SmiToFloat64(right);
10291           Goto(&do_float_comparison);
10292         }
10293 
10294         BIND(&if_left_bigint);
10295         {
10296           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10297           var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10298                                         NoContextConstant(), SmiConstant(op),
10299                                         left, right));
10300           Goto(&end);
10301         }
10302 
10303         BIND(&if_left_not_numeric);
10304         {
10305           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10306           // Convert {left} to a Numeric; we don't need to perform the
10307           // dedicated ToPrimitive(left, hint Number) operation, as the
10308           // ToNumeric(left) will by itself already invoke ToPrimitive with
10309           // a Number hint.
10310           var_left.Bind(
10311               CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
10312           Goto(&loop);
10313         }
10314       }
10315 
10316       BIND(&if_right_not_smi);
10317       {
10318         Node* right_map = LoadMap(right);
10319 
10320         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
10321             if_left_string(this), if_left_other(this, Label::kDeferred);
10322         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
10323         Node* left_instance_type = LoadMapInstanceType(left_map);
10324         GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
10325         Branch(IsStringInstanceType(left_instance_type), &if_left_string,
10326                &if_left_other);
10327 
10328         BIND(&if_left_heapnumber);
10329         {
10330           Label if_right_heapnumber(this),
10331               if_right_bigint(this, Label::kDeferred),
10332               if_right_not_numeric(this, Label::kDeferred);
10333           GotoIf(WordEqual(right_map, left_map), &if_right_heapnumber);
10334           Node* right_instance_type = LoadMapInstanceType(right_map);
10335           Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
10336                  &if_right_not_numeric);
10337 
10338           BIND(&if_right_heapnumber);
10339           {
10340             CombineFeedback(var_type_feedback,
10341                             CompareOperationFeedback::kNumber);
10342             var_left_float = LoadHeapNumberValue(left);
10343             var_right_float = LoadHeapNumberValue(right);
10344             Goto(&do_float_comparison);
10345           }
10346 
10347           BIND(&if_right_bigint);
10348           {
10349             OverwriteFeedback(var_type_feedback,
10350                               CompareOperationFeedback::kAny);
10351             var_result = CAST(CallRuntime(
10352                 Runtime::kBigIntCompareToNumber, NoContextConstant(),
10353                 SmiConstant(Reverse(op)), right, left));
10354             Goto(&end);
10355           }
10356 
10357           BIND(&if_right_not_numeric);
10358           {
10359             OverwriteFeedback(var_type_feedback,
10360                               CompareOperationFeedback::kAny);
10361             // Convert {right} to a Numeric; we don't need to perform
10362             // dedicated ToPrimitive(right, hint Number) operation, as the
10363             // ToNumeric(right) will by itself already invoke ToPrimitive with
10364             // a Number hint.
10365             var_right.Bind(
10366                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
10367             Goto(&loop);
10368           }
10369         }
10370 
10371         BIND(&if_left_bigint);
10372         {
10373           Label if_right_heapnumber(this), if_right_bigint(this),
10374               if_right_string(this), if_right_other(this);
10375           GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10376           Node* right_instance_type = LoadMapInstanceType(right_map);
10377           GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
10378           Branch(IsStringInstanceType(right_instance_type), &if_right_string,
10379                  &if_right_other);
10380 
10381           BIND(&if_right_heapnumber);
10382           {
10383             OverwriteFeedback(var_type_feedback,
10384                               CompareOperationFeedback::kAny);
10385             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10386                                           NoContextConstant(), SmiConstant(op),
10387                                           left, right));
10388             Goto(&end);
10389           }
10390 
10391           BIND(&if_right_bigint);
10392           {
10393             CombineFeedback(var_type_feedback,
10394                             CompareOperationFeedback::kBigInt);
10395             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
10396                                           NoContextConstant(), SmiConstant(op),
10397                                           left, right));
10398             Goto(&end);
10399           }
10400 
10401           BIND(&if_right_string);
10402           {
10403             OverwriteFeedback(var_type_feedback,
10404                               CompareOperationFeedback::kAny);
10405             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
10406                                           NoContextConstant(), SmiConstant(op),
10407                                           left, right));
10408             Goto(&end);
10409           }
10410 
10411           // {right} is not a Number, BigInt, or String.
10412           BIND(&if_right_other);
10413           {
10414             OverwriteFeedback(var_type_feedback,
10415                               CompareOperationFeedback::kAny);
10416             // Convert {right} to a Numeric; we don't need to perform
10417             // dedicated ToPrimitive(right, hint Number) operation, as the
10418             // ToNumeric(right) will by itself already invoke ToPrimitive with
10419             // a Number hint.
10420             var_right.Bind(
10421                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right));
10422             Goto(&loop);
10423           }
10424         }
10425 
10426         BIND(&if_left_string);
10427         {
10428           Node* right_instance_type = LoadMapInstanceType(right_map);
10429 
10430           Label if_right_not_string(this, Label::kDeferred);
10431           GotoIfNot(IsStringInstanceType(right_instance_type),
10432                     &if_right_not_string);
10433 
10434           // Both {left} and {right} are strings.
10435           CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
10436           Builtins::Name builtin;
10437           switch (op) {
10438             case Operation::kLessThan:
10439               builtin = Builtins::kStringLessThan;
10440               break;
10441             case Operation::kLessThanOrEqual:
10442               builtin = Builtins::kStringLessThanOrEqual;
10443               break;
10444             case Operation::kGreaterThan:
10445               builtin = Builtins::kStringGreaterThan;
10446               break;
10447             case Operation::kGreaterThanOrEqual:
10448               builtin = Builtins::kStringGreaterThanOrEqual;
10449               break;
10450             default:
10451               UNREACHABLE();
10452           }
10453           var_result = CAST(CallBuiltin(builtin, context, left, right));
10454           Goto(&end);
10455 
10456           BIND(&if_right_not_string);
10457           {
10458             OverwriteFeedback(var_type_feedback,
10459                               CompareOperationFeedback::kAny);
10460             // {left} is a String, while {right} isn't. Check if {right} is
10461             // a BigInt, otherwise call ToPrimitive(right, hint Number) if
10462             // {right} is a receiver, or ToNumeric(left) and then
10463             // ToNumeric(right) in the other cases.
10464             STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
10465             Label if_right_bigint(this),
10466                 if_right_receiver(this, Label::kDeferred);
10467             GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
10468             GotoIf(IsJSReceiverInstanceType(right_instance_type),
10469                    &if_right_receiver);
10470 
10471             var_left.Bind(
10472                 CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
10473             var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
10474             Goto(&loop);
10475 
10476             BIND(&if_right_bigint);
10477             {
10478               var_result = CAST(CallRuntime(
10479                   Runtime::kBigIntCompareToString, NoContextConstant(),
10480                   SmiConstant(Reverse(op)), right, left));
10481               Goto(&end);
10482             }
10483 
10484             BIND(&if_right_receiver);
10485             {
10486               Callable callable = CodeFactory::NonPrimitiveToPrimitive(
10487                   isolate(), ToPrimitiveHint::kNumber);
10488               var_right.Bind(CallStub(callable, context, right));
10489               Goto(&loop);
10490             }
10491           }
10492         }
10493 
10494         BIND(&if_left_other);
10495         {
10496           // {left} is neither a Numeric nor a String, and {right} is not a Smi.
10497           if (var_type_feedback != nullptr) {
10498             // Collect NumberOrOddball feedback if {left} is an Oddball
10499             // and {right} is either a HeapNumber or Oddball. Otherwise collect
10500             // Any feedback.
10501             Label collect_any_feedback(this), collect_oddball_feedback(this),
10502                 collect_feedback_done(this);
10503             GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
10504                       &collect_any_feedback);
10505 
10506             GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
10507             Node* right_instance_type = LoadMapInstanceType(right_map);
10508             Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
10509                    &collect_oddball_feedback, &collect_any_feedback);
10510 
10511             BIND(&collect_oddball_feedback);
10512             {
10513               CombineFeedback(var_type_feedback,
10514                               CompareOperationFeedback::kNumberOrOddball);
10515               Goto(&collect_feedback_done);
10516             }
10517 
10518             BIND(&collect_any_feedback);
10519             {
10520               OverwriteFeedback(var_type_feedback,
10521                                 CompareOperationFeedback::kAny);
10522               Goto(&collect_feedback_done);
10523             }
10524 
10525             BIND(&collect_feedback_done);
10526           }
10527 
10528           // If {left} is a receiver, call ToPrimitive(left, hint Number).
10529           // Otherwise call ToNumeric(left) and then ToNumeric(right).
10530           STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
10531           Label if_left_receiver(this, Label::kDeferred);
10532           GotoIf(IsJSReceiverInstanceType(left_instance_type),
10533                  &if_left_receiver);
10534 
10535           var_left.Bind(
10536               CallBuiltin(Builtins::kNonNumberToNumeric, context, left));
10537           var_right.Bind(CallBuiltin(Builtins::kToNumeric, context, right));
10538           Goto(&loop);
10539 
10540           BIND(&if_left_receiver);
10541           {
10542             Callable callable = CodeFactory::NonPrimitiveToPrimitive(
10543                 isolate(), ToPrimitiveHint::kNumber);
10544             var_left.Bind(CallStub(callable, context, left));
10545             Goto(&loop);
10546           }
10547         }
10548       }
10549     }
10550   }
10551 
10552   BIND(&do_float_comparison);
10553   {
10554     switch (op) {
10555       case Operation::kLessThan:
10556         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
10557                &return_true, &return_false);
10558         break;
10559       case Operation::kLessThanOrEqual:
10560         Branch(Float64LessThanOrEqual(var_left_float.value(),
10561                                       var_right_float.value()),
10562                &return_true, &return_false);
10563         break;
10564       case Operation::kGreaterThan:
10565         Branch(
10566             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
10567             &return_true, &return_false);
10568         break;
10569       case Operation::kGreaterThanOrEqual:
10570         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
10571                                          var_right_float.value()),
10572                &return_true, &return_false);
10573         break;
10574       default:
10575         UNREACHABLE();
10576     }
10577   }
10578 
10579   BIND(&return_true);
10580   {
10581     var_result = TrueConstant();
10582     Goto(&end);
10583   }
10584 
10585   BIND(&return_false);
10586   {
10587     var_result = FalseConstant();
10588     Goto(&end);
10589   }
10590 
10591   BIND(&end);
10592   return var_result.value();
10593 }
10594 
CollectFeedbackForString(SloppyTNode<Int32T> instance_type)10595 TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
10596     SloppyTNode<Int32T> instance_type) {
10597   TNode<Smi> feedback = SelectSmiConstant(
10598       Word32Equal(
10599           Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
10600           Int32Constant(kInternalizedTag)),
10601       CompareOperationFeedback::kInternalizedString,
10602       CompareOperationFeedback::kString);
10603   return feedback;
10604 }
10605 
GenerateEqual_Same(Node * value,Label * if_equal,Label * if_notequal,Variable * var_type_feedback)10606 void CodeStubAssembler::GenerateEqual_Same(Node* value, Label* if_equal,
10607                                            Label* if_notequal,
10608                                            Variable* var_type_feedback) {
10609   // In case of abstract or strict equality checks, we need additional checks
10610   // for NaN values because they are not considered equal, even if both the
10611   // left and the right hand side reference exactly the same value.
10612 
10613   Label if_smi(this), if_heapnumber(this);
10614   GotoIf(TaggedIsSmi(value), &if_smi);
10615 
10616   Node* value_map = LoadMap(value);
10617   GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
10618 
10619   // For non-HeapNumbers, all we do is collect type feedback.
10620   if (var_type_feedback != nullptr) {
10621     Node* instance_type = LoadMapInstanceType(value_map);
10622 
10623     Label if_string(this), if_receiver(this), if_symbol(this), if_bigint(this),
10624         if_other(this, Label::kDeferred);
10625     GotoIf(IsStringInstanceType(instance_type), &if_string);
10626     GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
10627     GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
10628     Branch(IsSymbolInstanceType(instance_type), &if_symbol, &if_other);
10629 
10630     BIND(&if_string);
10631     {
10632       CombineFeedback(var_type_feedback,
10633                       CollectFeedbackForString(instance_type));
10634       Goto(if_equal);
10635     }
10636 
10637     BIND(&if_symbol);
10638     {
10639       CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
10640       Goto(if_equal);
10641     }
10642 
10643     BIND(&if_receiver);
10644     {
10645       CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
10646       Goto(if_equal);
10647     }
10648 
10649     BIND(&if_bigint);
10650     {
10651       CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
10652       Goto(if_equal);
10653     }
10654 
10655     BIND(&if_other);
10656     {
10657       CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10658       Goto(if_equal);
10659     }
10660   } else {
10661     Goto(if_equal);
10662   }
10663 
10664   BIND(&if_heapnumber);
10665   {
10666     CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10667     Node* number_value = LoadHeapNumberValue(value);
10668     BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
10669   }
10670 
10671   BIND(&if_smi);
10672   {
10673     CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
10674     Goto(if_equal);
10675   }
10676 }
10677 
10678 // ES6 section 7.2.12 Abstract Equality Comparison
Equal(Node * left,Node * right,Node * context,Variable * var_type_feedback)10679 Node* CodeStubAssembler::Equal(Node* left, Node* right, Node* context,
10680                                Variable* var_type_feedback) {
10681   // This is a slightly optimized version of Object::Equals. Whenever you
10682   // change something functionality wise in here, remember to update the
10683   // Object::Equals method as well.
10684 
10685   Label if_equal(this), if_notequal(this), do_float_comparison(this),
10686       do_right_stringtonumber(this, Label::kDeferred), end(this);
10687   VARIABLE(result, MachineRepresentation::kTagged);
10688   TVARIABLE(Float64T, var_left_float);
10689   TVARIABLE(Float64T, var_right_float);
10690 
10691   // We can avoid code duplication by exploiting the fact that abstract equality
10692   // is symmetric.
10693   Label use_symmetry(this);
10694 
10695   // We might need to loop several times due to ToPrimitive and/or ToNumber
10696   // conversions.
10697   VARIABLE(var_left, MachineRepresentation::kTagged, left);
10698   VARIABLE(var_right, MachineRepresentation::kTagged, right);
10699   VariableList loop_variable_list({&var_left, &var_right}, zone());
10700   if (var_type_feedback != nullptr) {
10701     // Initialize the type feedback to None. The current feedback will be
10702     // combined with the previous feedback.
10703     OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
10704     loop_variable_list.push_back(var_type_feedback);
10705   }
10706   Label loop(this, loop_variable_list);
10707   Goto(&loop);
10708   BIND(&loop);
10709   {
10710     left = var_left.value();
10711     right = var_right.value();
10712 
10713     Label if_notsame(this);
10714     GotoIf(WordNotEqual(left, right), &if_notsame);
10715     {
10716       // {left} and {right} reference the exact same value, yet we need special
10717       // treatment for HeapNumber, as NaN is not equal to NaN.
10718       GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
10719     }
10720 
10721     BIND(&if_notsame);
10722     Label if_left_smi(this), if_left_not_smi(this);
10723     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
10724 
10725     BIND(&if_left_smi);
10726     {
10727       Label if_right_smi(this), if_right_not_smi(this);
10728       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
10729 
10730       BIND(&if_right_smi);
10731       {
10732         // We have already checked for {left} and {right} being the same value,
10733         // so when we get here they must be different Smis.
10734         CombineFeedback(var_type_feedback,
10735                         CompareOperationFeedback::kSignedSmall);
10736         Goto(&if_notequal);
10737       }
10738 
10739       BIND(&if_right_not_smi);
10740       Node* right_map = LoadMap(right);
10741       Label if_right_heapnumber(this), if_right_boolean(this),
10742           if_right_bigint(this, Label::kDeferred),
10743           if_right_receiver(this, Label::kDeferred);
10744       GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10745       // {left} is Smi and {right} is not HeapNumber or Smi.
10746       if (var_type_feedback != nullptr) {
10747         var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10748       }
10749       GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10750       Node* right_type = LoadMapInstanceType(right_map);
10751       GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
10752       GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
10753       Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
10754              &if_notequal);
10755 
10756       BIND(&if_right_heapnumber);
10757       {
10758         var_left_float = SmiToFloat64(left);
10759         var_right_float = LoadHeapNumberValue(right);
10760         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10761         Goto(&do_float_comparison);
10762       }
10763 
10764       BIND(&if_right_boolean);
10765       {
10766         var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10767         Goto(&loop);
10768       }
10769 
10770       BIND(&if_right_bigint);
10771       {
10772         result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
10773                                 NoContextConstant(), right, left));
10774         Goto(&end);
10775       }
10776 
10777       BIND(&if_right_receiver);
10778       {
10779         Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
10780         var_right.Bind(CallStub(callable, context, right));
10781         Goto(&loop);
10782       }
10783     }
10784 
10785     BIND(&if_left_not_smi);
10786     {
10787       GotoIf(TaggedIsSmi(right), &use_symmetry);
10788 
10789       Label if_left_symbol(this), if_left_number(this), if_left_string(this),
10790           if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
10791           if_left_receiver(this);
10792 
10793       Node* left_map = LoadMap(left);
10794       Node* right_map = LoadMap(right);
10795       Node* left_type = LoadMapInstanceType(left_map);
10796       Node* right_type = LoadMapInstanceType(right_map);
10797 
10798       GotoIf(Int32LessThan(left_type, Int32Constant(FIRST_NONSTRING_TYPE)),
10799              &if_left_string);
10800       GotoIf(InstanceTypeEqual(left_type, SYMBOL_TYPE), &if_left_symbol);
10801       GotoIf(InstanceTypeEqual(left_type, HEAP_NUMBER_TYPE), &if_left_number);
10802       GotoIf(InstanceTypeEqual(left_type, ODDBALL_TYPE), &if_left_oddball);
10803       GotoIf(InstanceTypeEqual(left_type, BIGINT_TYPE), &if_left_bigint);
10804       Goto(&if_left_receiver);
10805 
10806       BIND(&if_left_string);
10807       {
10808         GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
10809         result.Bind(CallBuiltin(Builtins::kStringEqual, context, left, right));
10810         CombineFeedback(var_type_feedback,
10811                         SmiOr(CollectFeedbackForString(left_type),
10812                               CollectFeedbackForString(right_type)));
10813         Goto(&end);
10814       }
10815 
10816       BIND(&if_left_number);
10817       {
10818         Label if_right_not_number(this);
10819         GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
10820 
10821         var_left_float = LoadHeapNumberValue(left);
10822         var_right_float = LoadHeapNumberValue(right);
10823         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10824         Goto(&do_float_comparison);
10825 
10826         BIND(&if_right_not_number);
10827         {
10828           Label if_right_boolean(this);
10829           if (var_type_feedback != nullptr) {
10830             var_type_feedback->Bind(
10831                 SmiConstant(CompareOperationFeedback::kAny));
10832           }
10833           GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
10834           GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10835           GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
10836           Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
10837                  &if_notequal);
10838 
10839           BIND(&if_right_boolean);
10840           {
10841             var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10842             Goto(&loop);
10843           }
10844         }
10845       }
10846 
10847       BIND(&if_left_bigint);
10848       {
10849         Label if_right_heapnumber(this), if_right_bigint(this),
10850             if_right_string(this), if_right_boolean(this);
10851         GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10852         GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
10853         GotoIf(IsStringInstanceType(right_type), &if_right_string);
10854         GotoIf(IsBooleanMap(right_map), &if_right_boolean);
10855         Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
10856                &if_notequal);
10857 
10858         BIND(&if_right_heapnumber);
10859         {
10860           if (var_type_feedback != nullptr) {
10861             var_type_feedback->Bind(
10862                 SmiConstant(CompareOperationFeedback::kAny));
10863           }
10864           result.Bind(CallRuntime(Runtime::kBigIntEqualToNumber,
10865                                   NoContextConstant(), left, right));
10866           Goto(&end);
10867         }
10868 
10869         BIND(&if_right_bigint);
10870         {
10871           CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
10872           result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
10873                                   NoContextConstant(), left, right));
10874           Goto(&end);
10875         }
10876 
10877         BIND(&if_right_string);
10878         {
10879           if (var_type_feedback != nullptr) {
10880             var_type_feedback->Bind(
10881                 SmiConstant(CompareOperationFeedback::kAny));
10882           }
10883           result.Bind(CallRuntime(Runtime::kBigIntEqualToString,
10884                                   NoContextConstant(), left, right));
10885           Goto(&end);
10886         }
10887 
10888         BIND(&if_right_boolean);
10889         {
10890           if (var_type_feedback != nullptr) {
10891             var_type_feedback->Bind(
10892                 SmiConstant(CompareOperationFeedback::kAny));
10893           }
10894           var_right.Bind(LoadObjectField(right, Oddball::kToNumberOffset));
10895           Goto(&loop);
10896         }
10897       }
10898 
10899       BIND(&if_left_oddball);
10900       {
10901         if (var_type_feedback != nullptr) {
10902           var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10903         }
10904 
10905         Label if_left_boolean(this);
10906         GotoIf(IsBooleanMap(left_map), &if_left_boolean);
10907         // {left} is either Null or Undefined. Check if {right} is
10908         // undetectable (which includes Null and Undefined).
10909         Branch(IsUndetectableMap(right_map), &if_equal, &if_notequal);
10910 
10911         BIND(&if_left_boolean);
10912         {
10913           // If {right} is a Boolean too, it must be a different Boolean.
10914           GotoIf(WordEqual(right_map, left_map), &if_notequal);
10915           // Otherwise, convert {left} to number and try again.
10916           var_left.Bind(LoadObjectField(left, Oddball::kToNumberOffset));
10917           Goto(&loop);
10918         }
10919       }
10920 
10921       BIND(&if_left_symbol);
10922       {
10923         Label if_right_receiver(this);
10924         GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
10925         // {right} is not a JSReceiver and also not the same Symbol as {left},
10926         // so the result is "not equal".
10927         if (var_type_feedback != nullptr) {
10928           Label if_right_symbol(this);
10929           GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
10930           var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
10931           Goto(&if_notequal);
10932 
10933           BIND(&if_right_symbol);
10934           {
10935             CombineFeedback(var_type_feedback,
10936                             CompareOperationFeedback::kSymbol);
10937             Goto(&if_notequal);
10938           }
10939         } else {
10940           Goto(&if_notequal);
10941         }
10942 
10943         BIND(&if_right_receiver);
10944         {
10945           // {left} is a Primitive and {right} is a JSReceiver, so swapping
10946           // the order is not observable.
10947           if (var_type_feedback != nullptr) {
10948             var_type_feedback->Bind(
10949                 SmiConstant(CompareOperationFeedback::kAny));
10950           }
10951           Goto(&use_symmetry);
10952         }
10953       }
10954 
10955       BIND(&if_left_receiver);
10956       {
10957         CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
10958         Label if_right_not_receiver(this);
10959         GotoIfNot(IsJSReceiverInstanceType(right_type), &if_right_not_receiver);
10960 
10961         // {left} and {right} are different JSReceiver references.
10962         CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
10963         Goto(&if_notequal);
10964 
10965         BIND(&if_right_not_receiver);
10966         {
10967           if (var_type_feedback != nullptr) {
10968             var_type_feedback->Bind(
10969                 SmiConstant(CompareOperationFeedback::kAny));
10970           }
10971           Label if_right_null_or_undefined(this);
10972           GotoIf(IsUndetectableMap(right_map), &if_right_null_or_undefined);
10973 
10974           // {right} is a Primitive; convert {left} to Primitive too.
10975           Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
10976           var_left.Bind(CallStub(callable, context, left));
10977           Goto(&loop);
10978 
10979           BIND(&if_right_null_or_undefined);
10980           Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
10981         }
10982       }
10983     }
10984 
10985     BIND(&do_right_stringtonumber);
10986     {
10987       var_right.Bind(CallBuiltin(Builtins::kStringToNumber, context, right));
10988       Goto(&loop);
10989     }
10990 
10991     BIND(&use_symmetry);
10992     {
10993       var_left.Bind(right);
10994       var_right.Bind(left);
10995       Goto(&loop);
10996     }
10997   }
10998 
10999   BIND(&do_float_comparison);
11000   {
11001     Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11002            &if_equal, &if_notequal);
11003   }
11004 
11005   BIND(&if_equal);
11006   {
11007     result.Bind(TrueConstant());
11008     Goto(&end);
11009   }
11010 
11011   BIND(&if_notequal);
11012   {
11013     result.Bind(FalseConstant());
11014     Goto(&end);
11015   }
11016 
11017   BIND(&end);
11018   return result.value();
11019 }
11020 
StrictEqual(Node * lhs,Node * rhs,Variable * var_type_feedback)11021 Node* CodeStubAssembler::StrictEqual(Node* lhs, Node* rhs,
11022                                      Variable* var_type_feedback) {
11023   // Pseudo-code for the algorithm below:
11024   //
11025   // if (lhs == rhs) {
11026   //   if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
11027   //   return true;
11028   // }
11029   // if (!lhs->IsSmi()) {
11030   //   if (lhs->IsHeapNumber()) {
11031   //     if (rhs->IsSmi()) {
11032   //       return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
11033   //     } else if (rhs->IsHeapNumber()) {
11034   //       return HeapNumber::cast(rhs)->value() ==
11035   //       HeapNumber::cast(lhs)->value();
11036   //     } else {
11037   //       return false;
11038   //     }
11039   //   } else {
11040   //     if (rhs->IsSmi()) {
11041   //       return false;
11042   //     } else {
11043   //       if (lhs->IsString()) {
11044   //         if (rhs->IsString()) {
11045   //           return %StringEqual(lhs, rhs);
11046   //         } else {
11047   //           return false;
11048   //         }
11049   //       } else if (lhs->IsBigInt()) {
11050   //         if (rhs->IsBigInt()) {
11051   //           return %BigIntEqualToBigInt(lhs, rhs);
11052   //         } else {
11053   //           return false;
11054   //         }
11055   //       } else {
11056   //         return false;
11057   //       }
11058   //     }
11059   //   }
11060   // } else {
11061   //   if (rhs->IsSmi()) {
11062   //     return false;
11063   //   } else {
11064   //     if (rhs->IsHeapNumber()) {
11065   //       return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
11066   //     } else {
11067   //       return false;
11068   //     }
11069   //   }
11070   // }
11071 
11072   Label if_equal(this), if_notequal(this), end(this);
11073   VARIABLE(result, MachineRepresentation::kTagged);
11074 
11075   // Check if {lhs} and {rhs} refer to the same object.
11076   Label if_same(this), if_notsame(this);
11077   Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
11078 
11079   BIND(&if_same);
11080   {
11081     // The {lhs} and {rhs} reference the exact same value, yet we need special
11082     // treatment for HeapNumber, as NaN is not equal to NaN.
11083     if (var_type_feedback != nullptr) {
11084       var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kNone));
11085     }
11086     GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
11087   }
11088 
11089   BIND(&if_notsame);
11090   {
11091     // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
11092     // BigInt and String they can still be considered equal.
11093 
11094     if (var_type_feedback != nullptr) {
11095       var_type_feedback->Bind(SmiConstant(CompareOperationFeedback::kAny));
11096     }
11097 
11098     // Check if {lhs} is a Smi or a HeapObject.
11099     Label if_lhsissmi(this), if_lhsisnotsmi(this);
11100     Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
11101 
11102     BIND(&if_lhsisnotsmi);
11103     {
11104       // Load the map of {lhs}.
11105       Node* lhs_map = LoadMap(lhs);
11106 
11107       // Check if {lhs} is a HeapNumber.
11108       Label if_lhsisnumber(this), if_lhsisnotnumber(this);
11109       Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
11110 
11111       BIND(&if_lhsisnumber);
11112       {
11113         // Check if {rhs} is a Smi or a HeapObject.
11114         Label if_rhsissmi(this), if_rhsisnotsmi(this);
11115         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11116 
11117         BIND(&if_rhsissmi);
11118         {
11119           // Convert {lhs} and {rhs} to floating point values.
11120           Node* lhs_value = LoadHeapNumberValue(lhs);
11121           Node* rhs_value = SmiToFloat64(rhs);
11122 
11123           if (var_type_feedback != nullptr) {
11124             var_type_feedback->Bind(
11125                 SmiConstant(CompareOperationFeedback::kNumber));
11126           }
11127 
11128           // Perform a floating point comparison of {lhs} and {rhs}.
11129           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11130         }
11131 
11132         BIND(&if_rhsisnotsmi);
11133         {
11134           // Load the map of {rhs}.
11135           Node* rhs_map = LoadMap(rhs);
11136 
11137           // Check if {rhs} is also a HeapNumber.
11138           Label if_rhsisnumber(this), if_rhsisnotnumber(this);
11139           Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
11140 
11141           BIND(&if_rhsisnumber);
11142           {
11143             // Convert {lhs} and {rhs} to floating point values.
11144             Node* lhs_value = LoadHeapNumberValue(lhs);
11145             Node* rhs_value = LoadHeapNumberValue(rhs);
11146 
11147             if (var_type_feedback != nullptr) {
11148               var_type_feedback->Bind(
11149                   SmiConstant(CompareOperationFeedback::kNumber));
11150             }
11151 
11152             // Perform a floating point comparison of {lhs} and {rhs}.
11153             Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11154           }
11155 
11156           BIND(&if_rhsisnotnumber);
11157           Goto(&if_notequal);
11158         }
11159       }
11160 
11161       BIND(&if_lhsisnotnumber);
11162       {
11163         // Check if {rhs} is a Smi or a HeapObject.
11164         Label if_rhsissmi(this), if_rhsisnotsmi(this);
11165         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11166 
11167         BIND(&if_rhsissmi);
11168         Goto(&if_notequal);
11169 
11170         BIND(&if_rhsisnotsmi);
11171         {
11172           // Load the instance type of {lhs}.
11173           Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
11174 
11175           // Check if {lhs} is a String.
11176           Label if_lhsisstring(this), if_lhsisnotstring(this);
11177           Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
11178                  &if_lhsisnotstring);
11179 
11180           BIND(&if_lhsisstring);
11181           {
11182             // Load the instance type of {rhs}.
11183             Node* rhs_instance_type = LoadInstanceType(rhs);
11184 
11185             // Check if {rhs} is also a String.
11186             Label if_rhsisstring(this, Label::kDeferred),
11187                 if_rhsisnotstring(this);
11188             Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
11189                    &if_rhsisnotstring);
11190 
11191             BIND(&if_rhsisstring);
11192             {
11193               if (var_type_feedback != nullptr) {
11194                 TNode<Smi> lhs_feedback =
11195                     CollectFeedbackForString(lhs_instance_type);
11196                 TNode<Smi> rhs_feedback =
11197                     CollectFeedbackForString(rhs_instance_type);
11198                 var_type_feedback->Bind(SmiOr(lhs_feedback, rhs_feedback));
11199               }
11200               result.Bind(CallBuiltin(Builtins::kStringEqual,
11201                                       NoContextConstant(), lhs, rhs));
11202               Goto(&end);
11203             }
11204 
11205             BIND(&if_rhsisnotstring);
11206             Goto(&if_notequal);
11207           }
11208 
11209           BIND(&if_lhsisnotstring);
11210 
11211           // Check if {lhs} is a BigInt.
11212           Label if_lhsisbigint(this), if_lhsisnotbigint(this);
11213           Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
11214                  &if_lhsisnotbigint);
11215 
11216           BIND(&if_lhsisbigint);
11217           {
11218             // Load the instance type of {rhs}.
11219             Node* rhs_instance_type = LoadInstanceType(rhs);
11220 
11221             // Check if {rhs} is also a BigInt.
11222             Label if_rhsisbigint(this, Label::kDeferred),
11223                 if_rhsisnotbigint(this);
11224             Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
11225                    &if_rhsisnotbigint);
11226 
11227             BIND(&if_rhsisbigint);
11228             {
11229               if (var_type_feedback != nullptr) {
11230                 var_type_feedback->Bind(
11231                     SmiConstant(CompareOperationFeedback::kBigInt));
11232               }
11233               result.Bind(CallRuntime(Runtime::kBigIntEqualToBigInt,
11234                                       NoContextConstant(), lhs, rhs));
11235               Goto(&end);
11236             }
11237 
11238             BIND(&if_rhsisnotbigint);
11239             Goto(&if_notequal);
11240           }
11241 
11242           BIND(&if_lhsisnotbigint);
11243           if (var_type_feedback != nullptr) {
11244             // Load the instance type of {rhs}.
11245             Node* rhs_instance_type = LoadInstanceType(rhs);
11246 
11247             Label if_lhsissymbol(this), if_lhsisreceiver(this);
11248             GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
11249                    &if_lhsisreceiver);
11250             Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
11251                    &if_notequal);
11252 
11253             BIND(&if_lhsisreceiver);
11254             {
11255               GotoIfNot(IsJSReceiverInstanceType(rhs_instance_type),
11256                         &if_notequal);
11257               var_type_feedback->Bind(
11258                   SmiConstant(CompareOperationFeedback::kReceiver));
11259               Goto(&if_notequal);
11260             }
11261 
11262             BIND(&if_lhsissymbol);
11263             {
11264               GotoIfNot(IsSymbolInstanceType(rhs_instance_type), &if_notequal);
11265               var_type_feedback->Bind(
11266                   SmiConstant(CompareOperationFeedback::kSymbol));
11267               Goto(&if_notequal);
11268             }
11269           } else {
11270             Goto(&if_notequal);
11271           }
11272         }
11273       }
11274     }
11275 
11276     BIND(&if_lhsissmi);
11277     {
11278       // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
11279       // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
11280       // HeapNumber with an equal floating point value.
11281 
11282       // Check if {rhs} is a Smi or a HeapObject.
11283       Label if_rhsissmi(this), if_rhsisnotsmi(this);
11284       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11285 
11286       BIND(&if_rhsissmi);
11287       if (var_type_feedback != nullptr) {
11288         var_type_feedback->Bind(
11289             SmiConstant(CompareOperationFeedback::kSignedSmall));
11290       }
11291       Goto(&if_notequal);
11292 
11293       BIND(&if_rhsisnotsmi);
11294       {
11295         // Load the map of the {rhs}.
11296         Node* rhs_map = LoadMap(rhs);
11297 
11298         // The {rhs} could be a HeapNumber with the same value as {lhs}.
11299         Label if_rhsisnumber(this), if_rhsisnotnumber(this);
11300         Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
11301 
11302         BIND(&if_rhsisnumber);
11303         {
11304           // Convert {lhs} and {rhs} to floating point values.
11305           Node* lhs_value = SmiToFloat64(lhs);
11306           Node* rhs_value = LoadHeapNumberValue(rhs);
11307 
11308           if (var_type_feedback != nullptr) {
11309             var_type_feedback->Bind(
11310                 SmiConstant(CompareOperationFeedback::kNumber));
11311           }
11312 
11313           // Perform a floating point comparison of {lhs} and {rhs}.
11314           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11315         }
11316 
11317         BIND(&if_rhsisnotnumber);
11318         Goto(&if_notequal);
11319       }
11320     }
11321   }
11322 
11323   BIND(&if_equal);
11324   {
11325     result.Bind(TrueConstant());
11326     Goto(&end);
11327   }
11328 
11329   BIND(&if_notequal);
11330   {
11331     result.Bind(FalseConstant());
11332     Goto(&end);
11333   }
11334 
11335   BIND(&end);
11336   return result.value();
11337 }
11338 
11339 // ECMA#sec-samevalue
11340 // This algorithm differs from the Strict Equality Comparison Algorithm in its
11341 // treatment of signed zeroes and NaNs.
BranchIfSameValue(Node * lhs,Node * rhs,Label * if_true,Label * if_false)11342 void CodeStubAssembler::BranchIfSameValue(Node* lhs, Node* rhs, Label* if_true,
11343                                           Label* if_false) {
11344   VARIABLE(var_lhs_value, MachineRepresentation::kFloat64);
11345   VARIABLE(var_rhs_value, MachineRepresentation::kFloat64);
11346   Label do_fcmp(this);
11347 
11348   // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
11349   // StrictEqual - SameValue considers two NaNs to be equal.
11350   GotoIf(WordEqual(lhs, rhs), if_true);
11351 
11352   // Check if the {lhs} is a Smi.
11353   Label if_lhsissmi(this), if_lhsisheapobject(this);
11354   Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
11355 
11356   BIND(&if_lhsissmi);
11357   {
11358     // Since {lhs} is a Smi, the comparison can only yield true
11359     // iff the {rhs} is a HeapNumber with the same float64 value.
11360     Branch(TaggedIsSmi(rhs), if_false, [&] {
11361       GotoIfNot(IsHeapNumber(rhs), if_false);
11362       var_lhs_value.Bind(SmiToFloat64(lhs));
11363       var_rhs_value.Bind(LoadHeapNumberValue(rhs));
11364       Goto(&do_fcmp);
11365     });
11366   }
11367 
11368   BIND(&if_lhsisheapobject);
11369   {
11370     // Check if the {rhs} is a Smi.
11371     Branch(TaggedIsSmi(rhs),
11372            [&] {
11373              // Since {rhs} is a Smi, the comparison can only yield true
11374              // iff the {lhs} is a HeapNumber with the same float64 value.
11375              GotoIfNot(IsHeapNumber(lhs), if_false);
11376              var_lhs_value.Bind(LoadHeapNumberValue(lhs));
11377              var_rhs_value.Bind(SmiToFloat64(rhs));
11378              Goto(&do_fcmp);
11379            },
11380            [&] {
11381              // Now this can only yield true if either both {lhs} and {rhs} are
11382              // HeapNumbers with the same value, or both are Strings with the
11383              // same character sequence, or both are BigInts with the same
11384              // value.
11385              Label if_lhsisheapnumber(this), if_lhsisstring(this),
11386                  if_lhsisbigint(this);
11387              Node* const lhs_map = LoadMap(lhs);
11388              GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
11389              Node* const lhs_instance_type = LoadMapInstanceType(lhs_map);
11390              GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
11391              Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
11392                     if_false);
11393 
11394              BIND(&if_lhsisheapnumber);
11395              {
11396                GotoIfNot(IsHeapNumber(rhs), if_false);
11397                var_lhs_value.Bind(LoadHeapNumberValue(lhs));
11398                var_rhs_value.Bind(LoadHeapNumberValue(rhs));
11399                Goto(&do_fcmp);
11400              }
11401 
11402              BIND(&if_lhsisstring);
11403              {
11404                // Now we can only yield true if {rhs} is also a String
11405                // with the same sequence of characters.
11406                GotoIfNot(IsString(rhs), if_false);
11407                Node* const result = CallBuiltin(Builtins::kStringEqual,
11408                                                 NoContextConstant(), lhs, rhs);
11409                Branch(IsTrue(result), if_true, if_false);
11410              }
11411 
11412              BIND(&if_lhsisbigint);
11413              {
11414                GotoIfNot(IsBigInt(rhs), if_false);
11415                Node* const result = CallRuntime(Runtime::kBigIntEqualToBigInt,
11416                                                 NoContextConstant(), lhs, rhs);
11417                Branch(IsTrue(result), if_true, if_false);
11418              }
11419            });
11420   }
11421 
11422   BIND(&do_fcmp);
11423   {
11424     Node* const lhs_value = var_lhs_value.value();
11425     Node* const rhs_value = var_rhs_value.value();
11426 
11427     Label if_equal(this), if_notequal(this);
11428     Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11429 
11430     BIND(&if_equal);
11431     {
11432       // We still need to handle the case when {lhs} and {rhs} are -0.0 and
11433       // 0.0 (or vice versa). Compare the high word to
11434       // distinguish between the two.
11435       Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_value);
11436       Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_value);
11437 
11438       // If x is +0 and y is -0, return false.
11439       // If x is -0 and y is +0, return false.
11440       Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
11441     }
11442 
11443     BIND(&if_notequal);
11444     {
11445       // Return true iff both {rhs} and {lhs} are NaN.
11446       GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
11447       Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
11448     }
11449   }
11450 }
11451 
HasProperty(SloppyTNode<Context> context,SloppyTNode<Object> object,SloppyTNode<Object> key,HasPropertyLookupMode mode)11452 TNode<Oddball> CodeStubAssembler::HasProperty(SloppyTNode<Context> context,
11453                                               SloppyTNode<Object> object,
11454                                               SloppyTNode<Object> key,
11455                                               HasPropertyLookupMode mode) {
11456   Label call_runtime(this, Label::kDeferred), return_true(this),
11457       return_false(this), end(this), if_proxy(this, Label::kDeferred);
11458 
11459   CodeStubAssembler::LookupInHolder lookup_property_in_holder =
11460       [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
11461                            Node* holder_instance_type, Node* unique_name,
11462                            Label* next_holder, Label* if_bailout) {
11463         TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
11464                           &return_true, next_holder, if_bailout);
11465       };
11466 
11467   CodeStubAssembler::LookupInHolder lookup_element_in_holder =
11468       [this, &return_true, &return_false](
11469           Node* receiver, Node* holder, Node* holder_map,
11470           Node* holder_instance_type, Node* index, Label* next_holder,
11471           Label* if_bailout) {
11472         TryLookupElement(holder, holder_map, holder_instance_type, index,
11473                          &return_true, &return_false, next_holder, if_bailout);
11474       };
11475 
11476   TryPrototypeChainLookup(object, key, lookup_property_in_holder,
11477                           lookup_element_in_holder, &return_false,
11478                           &call_runtime, &if_proxy);
11479 
11480   TVARIABLE(Oddball, result);
11481 
11482   BIND(&if_proxy);
11483   {
11484     TNode<Name> name = ToName(context, key);
11485     switch (mode) {
11486       case kHasProperty:
11487         GotoIf(IsPrivateSymbol(name), &return_false);
11488 
11489         result = CAST(
11490             CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
11491         Goto(&end);
11492         break;
11493       case kForInHasProperty:
11494         Goto(&call_runtime);
11495         break;
11496     }
11497   }
11498 
11499   BIND(&return_true);
11500   {
11501     result = TrueConstant();
11502     Goto(&end);
11503   }
11504 
11505   BIND(&return_false);
11506   {
11507     result = FalseConstant();
11508     Goto(&end);
11509   }
11510 
11511   BIND(&call_runtime);
11512   {
11513     Runtime::FunctionId fallback_runtime_function_id;
11514     switch (mode) {
11515       case kHasProperty:
11516         fallback_runtime_function_id = Runtime::kHasProperty;
11517         break;
11518       case kForInHasProperty:
11519         fallback_runtime_function_id = Runtime::kForInHasProperty;
11520         break;
11521     }
11522 
11523     result =
11524         CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
11525     Goto(&end);
11526   }
11527 
11528   BIND(&end);
11529   CSA_ASSERT(this, IsBoolean(result.value()));
11530   return result.value();
11531 }
11532 
Typeof(Node * value)11533 Node* CodeStubAssembler::Typeof(Node* value) {
11534   VARIABLE(result_var, MachineRepresentation::kTagged);
11535 
11536   Label return_number(this, Label::kDeferred), if_oddball(this),
11537       return_function(this), return_undefined(this), return_object(this),
11538       return_string(this), return_bigint(this), return_result(this);
11539 
11540   GotoIf(TaggedIsSmi(value), &return_number);
11541 
11542   Node* map = LoadMap(value);
11543 
11544   GotoIf(IsHeapNumberMap(map), &return_number);
11545 
11546   Node* instance_type = LoadMapInstanceType(map);
11547 
11548   GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
11549 
11550   Node* callable_or_undetectable_mask = Word32And(
11551       LoadMapBitField(map),
11552       Int32Constant(Map::IsCallableBit::kMask | Map::IsUndetectableBit::kMask));
11553 
11554   GotoIf(Word32Equal(callable_or_undetectable_mask,
11555                      Int32Constant(Map::IsCallableBit::kMask)),
11556          &return_function);
11557 
11558   GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
11559             &return_undefined);
11560 
11561   GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
11562 
11563   GotoIf(IsStringInstanceType(instance_type), &return_string);
11564 
11565   GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
11566 
11567   CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
11568   result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
11569   Goto(&return_result);
11570 
11571   BIND(&return_number);
11572   {
11573     result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
11574     Goto(&return_result);
11575   }
11576 
11577   BIND(&if_oddball);
11578   {
11579     Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
11580     result_var.Bind(type);
11581     Goto(&return_result);
11582   }
11583 
11584   BIND(&return_function);
11585   {
11586     result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
11587     Goto(&return_result);
11588   }
11589 
11590   BIND(&return_undefined);
11591   {
11592     result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
11593     Goto(&return_result);
11594   }
11595 
11596   BIND(&return_object);
11597   {
11598     result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
11599     Goto(&return_result);
11600   }
11601 
11602   BIND(&return_string);
11603   {
11604     result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
11605     Goto(&return_result);
11606   }
11607 
11608   BIND(&return_bigint);
11609   {
11610     result_var.Bind(HeapConstant(isolate()->factory()->bigint_string()));
11611     Goto(&return_result);
11612   }
11613 
11614   BIND(&return_result);
11615   return result_var.value();
11616 }
11617 
GetSuperConstructor(SloppyTNode<Context> context,SloppyTNode<JSFunction> active_function)11618 TNode<Object> CodeStubAssembler::GetSuperConstructor(
11619     SloppyTNode<Context> context, SloppyTNode<JSFunction> active_function) {
11620   Label is_not_constructor(this, Label::kDeferred), out(this);
11621   TVARIABLE(Object, result);
11622 
11623   TNode<Map> map = LoadMap(active_function);
11624   TNode<Object> prototype = LoadMapPrototype(map);
11625   TNode<Map> prototype_map = LoadMap(CAST(prototype));
11626   GotoIfNot(IsConstructorMap(prototype_map), &is_not_constructor);
11627 
11628   result = prototype;
11629   Goto(&out);
11630 
11631   BIND(&is_not_constructor);
11632   {
11633     CallRuntime(Runtime::kThrowNotSuperConstructor, context, prototype,
11634                 active_function);
11635     Unreachable();
11636   }
11637 
11638   BIND(&out);
11639   return result.value();
11640 }
11641 
SpeciesConstructor(SloppyTNode<Context> context,SloppyTNode<Object> object,SloppyTNode<Object> default_constructor)11642 TNode<Object> CodeStubAssembler::SpeciesConstructor(
11643     SloppyTNode<Context> context, SloppyTNode<Object> object,
11644     SloppyTNode<Object> default_constructor) {
11645   Isolate* isolate = this->isolate();
11646   TVARIABLE(Object, var_result, default_constructor);
11647 
11648   // 2. Let C be ? Get(O, "constructor").
11649   TNode<Object> constructor =
11650       GetProperty(context, object, isolate->factory()->constructor_string());
11651 
11652   // 3. If C is undefined, return defaultConstructor.
11653   Label out(this);
11654   GotoIf(IsUndefined(constructor), &out);
11655 
11656   // 4. If Type(C) is not Object, throw a TypeError exception.
11657   ThrowIfNotJSReceiver(context, constructor,
11658                        MessageTemplate::kConstructorNotReceiver);
11659 
11660   // 5. Let S be ? Get(C, @@species).
11661   TNode<Object> species =
11662       GetProperty(context, constructor, isolate->factory()->species_symbol());
11663 
11664   // 6. If S is either undefined or null, return defaultConstructor.
11665   GotoIf(IsNullOrUndefined(species), &out);
11666 
11667   // 7. If IsConstructor(S) is true, return S.
11668   Label throw_error(this);
11669   GotoIf(TaggedIsSmi(species), &throw_error);
11670   GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
11671   var_result = species;
11672   Goto(&out);
11673 
11674   // 8. Throw a TypeError exception.
11675   BIND(&throw_error);
11676   ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
11677 
11678   BIND(&out);
11679   return var_result.value();
11680 }
11681 
InstanceOf(Node * object,Node * callable,Node * context)11682 Node* CodeStubAssembler::InstanceOf(Node* object, Node* callable,
11683                                     Node* context) {
11684   VARIABLE(var_result, MachineRepresentation::kTagged);
11685   Label if_notcallable(this, Label::kDeferred),
11686       if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
11687       if_nohandler(this, Label::kDeferred), return_true(this),
11688       return_false(this), return_result(this, &var_result);
11689 
11690   // Ensure that the {callable} is actually a JSReceiver.
11691   GotoIf(TaggedIsSmi(callable), &if_notreceiver);
11692   GotoIfNot(IsJSReceiver(callable), &if_notreceiver);
11693 
11694   // Load the @@hasInstance property from {callable}.
11695   Node* inst_of_handler =
11696       GetProperty(context, callable, HasInstanceSymbolConstant());
11697 
11698   // Optimize for the likely case where {inst_of_handler} is the builtin
11699   // Function.prototype[@@hasInstance] method, and emit a direct call in
11700   // that case without any additional checking.
11701   Node* native_context = LoadNativeContext(context);
11702   Node* function_has_instance =
11703       LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
11704   GotoIfNot(WordEqual(inst_of_handler, function_has_instance),
11705             &if_otherhandler);
11706   {
11707     // TODO(6786): A direct call to a TFJ builtin breaks the lazy
11708     // deserialization mechanism in two ways: first, we always pass in a
11709     // callable containing the DeserializeLazy code object (assuming that
11710     // FunctionPrototypeHasInstance is lazy). Second, a direct call (without
11711     // going through CodeFactory::Call) to DeserializeLazy will not initialize
11712     // new_target properly. For now we can avoid this by marking
11713     // FunctionPrototypeHasInstance as eager, but this should be fixed at some
11714     // point.
11715     //
11716     // Call to Function.prototype[@@hasInstance] directly.
11717     Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
11718                      CallTrampolineDescriptor{});
11719     Node* result = CallJS(builtin, context, inst_of_handler, callable, object);
11720     var_result.Bind(result);
11721     Goto(&return_result);
11722   }
11723 
11724   BIND(&if_otherhandler);
11725   {
11726     // Check if there's actually an {inst_of_handler}.
11727     GotoIf(IsNull(inst_of_handler), &if_nohandler);
11728     GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
11729 
11730     // Call the {inst_of_handler} for {callable} and {object}.
11731     Node* result = CallJS(
11732         CodeFactory::Call(isolate(), ConvertReceiverMode::kNotNullOrUndefined),
11733         context, inst_of_handler, callable, object);
11734 
11735     // Convert the {result} to a Boolean.
11736     BranchIfToBooleanIsTrue(result, &return_true, &return_false);
11737   }
11738 
11739   BIND(&if_nohandler);
11740   {
11741     // Ensure that the {callable} is actually Callable.
11742     GotoIfNot(IsCallable(callable), &if_notcallable);
11743 
11744     // Use the OrdinaryHasInstance algorithm.
11745     Node* result =
11746         CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object);
11747     var_result.Bind(result);
11748     Goto(&return_result);
11749   }
11750 
11751   BIND(&if_notcallable);
11752   { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
11753 
11754   BIND(&if_notreceiver);
11755   { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
11756 
11757   BIND(&return_true);
11758   var_result.Bind(TrueConstant());
11759   Goto(&return_result);
11760 
11761   BIND(&return_false);
11762   var_result.Bind(FalseConstant());
11763   Goto(&return_result);
11764 
11765   BIND(&return_result);
11766   return var_result.value();
11767 }
11768 
NumberInc(SloppyTNode<Number> value)11769 TNode<Number> CodeStubAssembler::NumberInc(SloppyTNode<Number> value) {
11770   TVARIABLE(Number, var_result);
11771   TVARIABLE(Float64T, var_finc_value);
11772   Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
11773   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
11774 
11775   BIND(&if_issmi);
11776   {
11777     Label if_overflow(this);
11778     TNode<Smi> smi_value = CAST(value);
11779     TNode<Smi> one = SmiConstant(1);
11780     var_result = TrySmiAdd(smi_value, one, &if_overflow);
11781     Goto(&end);
11782 
11783     BIND(&if_overflow);
11784     {
11785       var_finc_value = SmiToFloat64(smi_value);
11786       Goto(&do_finc);
11787     }
11788   }
11789 
11790   BIND(&if_isnotsmi);
11791   {
11792     TNode<HeapNumber> heap_number_value = CAST(value);
11793 
11794     // Load the HeapNumber value.
11795     var_finc_value = LoadHeapNumberValue(heap_number_value);
11796     Goto(&do_finc);
11797   }
11798 
11799   BIND(&do_finc);
11800   {
11801     TNode<Float64T> finc_value = var_finc_value.value();
11802     TNode<Float64T> one = Float64Constant(1.0);
11803     TNode<Float64T> finc_result = Float64Add(finc_value, one);
11804     var_result = AllocateHeapNumberWithValue(finc_result);
11805     Goto(&end);
11806   }
11807 
11808   BIND(&end);
11809   return var_result.value();
11810 }
11811 
NumberDec(SloppyTNode<Number> value)11812 TNode<Number> CodeStubAssembler::NumberDec(SloppyTNode<Number> value) {
11813   TVARIABLE(Number, var_result);
11814   TVARIABLE(Float64T, var_fdec_value);
11815   Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
11816   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
11817 
11818   BIND(&if_issmi);
11819   {
11820     TNode<Smi> smi_value = CAST(value);
11821     TNode<Smi> one = SmiConstant(1);
11822     Label if_overflow(this);
11823     var_result = TrySmiSub(smi_value, one, &if_overflow);
11824     Goto(&end);
11825 
11826     BIND(&if_overflow);
11827     {
11828       var_fdec_value = SmiToFloat64(smi_value);
11829       Goto(&do_fdec);
11830     }
11831   }
11832 
11833   BIND(&if_isnotsmi);
11834   {
11835     TNode<HeapNumber> heap_number_value = CAST(value);
11836 
11837     // Load the HeapNumber value.
11838     var_fdec_value = LoadHeapNumberValue(heap_number_value);
11839     Goto(&do_fdec);
11840   }
11841 
11842   BIND(&do_fdec);
11843   {
11844     TNode<Float64T> fdec_value = var_fdec_value.value();
11845     TNode<Float64T> minus_one = Float64Constant(-1.0);
11846     TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
11847     var_result = AllocateHeapNumberWithValue(fdec_result);
11848     Goto(&end);
11849   }
11850 
11851   BIND(&end);
11852   return var_result.value();
11853 }
11854 
NumberAdd(SloppyTNode<Number> a,SloppyTNode<Number> b)11855 TNode<Number> CodeStubAssembler::NumberAdd(SloppyTNode<Number> a,
11856                                            SloppyTNode<Number> b) {
11857   TVARIABLE(Number, var_result);
11858   Label float_add(this, Label::kDeferred), end(this);
11859   GotoIf(TaggedIsNotSmi(a), &float_add);
11860   GotoIf(TaggedIsNotSmi(b), &float_add);
11861 
11862   // Try fast Smi addition first.
11863   var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
11864   Goto(&end);
11865 
11866   BIND(&float_add);
11867   {
11868     var_result = ChangeFloat64ToTagged(
11869         Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
11870     Goto(&end);
11871   }
11872 
11873   BIND(&end);
11874   return var_result.value();
11875 }
11876 
NumberSub(SloppyTNode<Number> a,SloppyTNode<Number> b)11877 TNode<Number> CodeStubAssembler::NumberSub(SloppyTNode<Number> a,
11878                                            SloppyTNode<Number> b) {
11879   TVARIABLE(Number, var_result);
11880   Label float_sub(this, Label::kDeferred), end(this);
11881   GotoIf(TaggedIsNotSmi(a), &float_sub);
11882   GotoIf(TaggedIsNotSmi(b), &float_sub);
11883 
11884   // Try fast Smi subtraction first.
11885   var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
11886   Goto(&end);
11887 
11888   BIND(&float_sub);
11889   {
11890     var_result = ChangeFloat64ToTagged(
11891         Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
11892     Goto(&end);
11893   }
11894 
11895   BIND(&end);
11896   return var_result.value();
11897 }
11898 
GotoIfNotNumber(Node * input,Label * is_not_number)11899 void CodeStubAssembler::GotoIfNotNumber(Node* input, Label* is_not_number) {
11900   Label is_number(this);
11901   GotoIf(TaggedIsSmi(input), &is_number);
11902   Branch(IsHeapNumber(input), &is_number, is_not_number);
11903   BIND(&is_number);
11904 }
11905 
GotoIfNumber(Node * input,Label * is_number)11906 void CodeStubAssembler::GotoIfNumber(Node* input, Label* is_number) {
11907   GotoIf(TaggedIsSmi(input), is_number);
11908   GotoIf(IsHeapNumber(input), is_number);
11909 }
11910 
BitwiseOp(Node * left32,Node * right32,Operation bitwise_op)11911 TNode<Number> CodeStubAssembler::BitwiseOp(Node* left32, Node* right32,
11912                                            Operation bitwise_op) {
11913   switch (bitwise_op) {
11914     case Operation::kBitwiseAnd:
11915       return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
11916     case Operation::kBitwiseOr:
11917       return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
11918     case Operation::kBitwiseXor:
11919       return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
11920     case Operation::kShiftLeft:
11921       if (!Word32ShiftIsSafe()) {
11922         right32 = Word32And(right32, Int32Constant(0x1F));
11923       }
11924       return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
11925     case Operation::kShiftRight:
11926       if (!Word32ShiftIsSafe()) {
11927         right32 = Word32And(right32, Int32Constant(0x1F));
11928       }
11929       return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
11930     case Operation::kShiftRightLogical:
11931       if (!Word32ShiftIsSafe()) {
11932         right32 = Word32And(right32, Int32Constant(0x1F));
11933       }
11934       return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
11935     default:
11936       break;
11937   }
11938   UNREACHABLE();
11939 }
11940 
11941 // ES #sec-createarrayiterator
CreateArrayIterator(TNode<Context> context,TNode<Object> object,IterationKind kind)11942 TNode<JSArrayIterator> CodeStubAssembler::CreateArrayIterator(
11943     TNode<Context> context, TNode<Object> object, IterationKind kind) {
11944   TNode<Context> native_context = LoadNativeContext(context);
11945   TNode<Map> iterator_map = CAST(LoadContextElement(
11946       native_context, Context::INITIAL_ARRAY_ITERATOR_MAP_INDEX));
11947   Node* iterator = Allocate(JSArrayIterator::kSize);
11948   StoreMapNoWriteBarrier(iterator, iterator_map);
11949   StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOrHashOffset,
11950                        Heap::kEmptyFixedArrayRootIndex);
11951   StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
11952                        Heap::kEmptyFixedArrayRootIndex);
11953   StoreObjectFieldNoWriteBarrier(
11954       iterator, JSArrayIterator::kIteratedObjectOffset, object);
11955   StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
11956                                  SmiConstant(0));
11957   StoreObjectFieldNoWriteBarrier(
11958       iterator, JSArrayIterator::kKindOffset,
11959       SmiConstant(Smi::FromInt(static_cast<int>(kind))));
11960   return CAST(iterator);
11961 }
11962 
AllocateJSIteratorResult(Node * context,Node * value,Node * done)11963 Node* CodeStubAssembler::AllocateJSIteratorResult(Node* context, Node* value,
11964                                                   Node* done) {
11965   CSA_ASSERT(this, IsBoolean(done));
11966   Node* native_context = LoadNativeContext(context);
11967   Node* map =
11968       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
11969   Node* result = Allocate(JSIteratorResult::kSize);
11970   StoreMapNoWriteBarrier(result, map);
11971   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
11972                        Heap::kEmptyFixedArrayRootIndex);
11973   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
11974                        Heap::kEmptyFixedArrayRootIndex);
11975   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
11976   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
11977   return result;
11978 }
11979 
AllocateJSIteratorResultForEntry(Node * context,Node * key,Node * value)11980 Node* CodeStubAssembler::AllocateJSIteratorResultForEntry(Node* context,
11981                                                           Node* key,
11982                                                           Node* value) {
11983   Node* native_context = LoadNativeContext(context);
11984   Node* length = SmiConstant(2);
11985   int const elements_size = FixedArray::SizeFor(2);
11986   TNode<FixedArray> elements = UncheckedCast<FixedArray>(
11987       Allocate(elements_size + JSArray::kSize + JSIteratorResult::kSize));
11988   StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
11989                        Heap::kFixedArrayMapRootIndex);
11990   StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
11991   StoreFixedArrayElement(elements, 0, key);
11992   StoreFixedArrayElement(elements, 1, value);
11993   Node* array_map = LoadContextElement(
11994       native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX);
11995   Node* array = InnerAllocate(elements, elements_size);
11996   StoreMapNoWriteBarrier(array, array_map);
11997   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
11998                        Heap::kEmptyFixedArrayRootIndex);
11999   StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
12000   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
12001   Node* iterator_map =
12002       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX);
12003   Node* result = InnerAllocate(array, JSArray::kSize);
12004   StoreMapNoWriteBarrier(result, iterator_map);
12005   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12006                        Heap::kEmptyFixedArrayRootIndex);
12007   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12008                        Heap::kEmptyFixedArrayRootIndex);
12009   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
12010   StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
12011                        Heap::kFalseValueRootIndex);
12012   return result;
12013 }
12014 
ArraySpeciesCreate(TNode<Context> context,TNode<Object> o,TNode<Number> len)12015 Node* CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
12016                                             TNode<Object> o,
12017                                             TNode<Number> len) {
12018   Node* constructor =
12019       CallRuntime(Runtime::kArraySpeciesConstructor, context, o);
12020   return ConstructJS(CodeFactory::Construct(isolate()), context, constructor,
12021                      len);
12022 }
12023 
IsDetachedBuffer(Node * buffer)12024 Node* CodeStubAssembler::IsDetachedBuffer(Node* buffer) {
12025   CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
12026 
12027   Node* buffer_bit_field = LoadObjectField(
12028       buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
12029   return IsSetWord32<JSArrayBuffer::WasNeutered>(buffer_bit_field);
12030 }
12031 
ThrowIfArrayBufferIsDetached(SloppyTNode<Context> context,TNode<JSArrayBuffer> array_buffer,const char * method_name)12032 void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
12033     SloppyTNode<Context> context, TNode<JSArrayBuffer> array_buffer,
12034     const char* method_name) {
12035   Label if_detached(this, Label::kDeferred), if_not_detached(this);
12036   Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
12037   BIND(&if_detached);
12038   ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
12039   BIND(&if_not_detached);
12040 }
12041 
ThrowIfArrayBufferViewBufferIsDetached(SloppyTNode<Context> context,TNode<JSArrayBufferView> array_buffer_view,const char * method_name)12042 void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
12043     SloppyTNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
12044     const char* method_name) {
12045   TNode<JSArrayBuffer> buffer = LoadArrayBufferViewBuffer(array_buffer_view);
12046   ThrowIfArrayBufferIsDetached(context, buffer, method_name);
12047 }
12048 
LoadArrayBufferViewBuffer(TNode<JSArrayBufferView> array_buffer_view)12049 TNode<JSArrayBuffer> CodeStubAssembler::LoadArrayBufferViewBuffer(
12050     TNode<JSArrayBufferView> array_buffer_view) {
12051   return LoadObjectField<JSArrayBuffer>(array_buffer_view,
12052                                         JSArrayBufferView::kBufferOffset);
12053 }
12054 
LoadArrayBufferBackingStore(TNode<JSArrayBuffer> array_buffer)12055 TNode<RawPtrT> CodeStubAssembler::LoadArrayBufferBackingStore(
12056     TNode<JSArrayBuffer> array_buffer) {
12057   return LoadObjectField<RawPtrT>(array_buffer,
12058                                   JSArrayBuffer::kBackingStoreOffset);
12059 }
12060 
CodeStubArguments(CodeStubAssembler * assembler,Node * argc,Node * fp,CodeStubAssembler::ParameterMode param_mode,ReceiverMode receiver_mode)12061 CodeStubArguments::CodeStubArguments(
12062     CodeStubAssembler* assembler, Node* argc, Node* fp,
12063     CodeStubAssembler::ParameterMode param_mode, ReceiverMode receiver_mode)
12064     : assembler_(assembler),
12065       argc_mode_(param_mode),
12066       receiver_mode_(receiver_mode),
12067       argc_(argc),
12068       arguments_(),
12069       fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
12070   Node* offset = assembler_->ElementOffsetFromIndex(
12071       argc_, PACKED_ELEMENTS, param_mode,
12072       (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
12073   arguments_ = assembler_->UncheckedCast<RawPtr<Object>>(
12074       assembler_->IntPtrAdd(fp_, offset));
12075 }
12076 
GetReceiver() const12077 TNode<Object> CodeStubArguments::GetReceiver() const {
12078   DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
12079   return assembler_->UncheckedCast<Object>(
12080       assembler_->Load(MachineType::AnyTagged(), arguments_,
12081                        assembler_->IntPtrConstant(kPointerSize)));
12082 }
12083 
SetReceiver(TNode<Object> object) const12084 void CodeStubArguments::SetReceiver(TNode<Object> object) const {
12085   DCHECK_EQ(receiver_mode_, ReceiverMode::kHasReceiver);
12086   assembler_->StoreNoWriteBarrier(MachineRepresentation::kTagged, arguments_,
12087                                   assembler_->IntPtrConstant(kPointerSize),
12088                                   object);
12089 }
12090 
AtIndexPtr(Node * index,CodeStubAssembler::ParameterMode mode) const12091 TNode<RawPtr<Object>> CodeStubArguments::AtIndexPtr(
12092     Node* index, CodeStubAssembler::ParameterMode mode) const {
12093   typedef compiler::Node Node;
12094   Node* negated_index = assembler_->IntPtrOrSmiSub(
12095       assembler_->IntPtrOrSmiConstant(0, mode), index, mode);
12096   Node* offset = assembler_->ElementOffsetFromIndex(negated_index,
12097                                                     PACKED_ELEMENTS, mode, 0);
12098   return assembler_->UncheckedCast<RawPtr<Object>>(assembler_->IntPtrAdd(
12099       assembler_->UncheckedCast<IntPtrT>(arguments_), offset));
12100 }
12101 
AtIndex(Node * index,CodeStubAssembler::ParameterMode mode) const12102 TNode<Object> CodeStubArguments::AtIndex(
12103     Node* index, CodeStubAssembler::ParameterMode mode) const {
12104   DCHECK_EQ(argc_mode_, mode);
12105   CSA_ASSERT(assembler_,
12106              assembler_->UintPtrOrSmiLessThan(index, GetLength(mode), mode));
12107   return assembler_->UncheckedCast<Object>(
12108       assembler_->Load(MachineType::AnyTagged(), AtIndexPtr(index, mode)));
12109 }
12110 
AtIndex(int index) const12111 TNode<Object> CodeStubArguments::AtIndex(int index) const {
12112   return AtIndex(assembler_->IntPtrConstant(index));
12113 }
12114 
GetOptionalArgumentValue(int index,TNode<Object> default_value)12115 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
12116     int index, TNode<Object> default_value) {
12117   CodeStubAssembler::TVariable<Object> result(assembler_);
12118   CodeStubAssembler::Label argument_missing(assembler_),
12119       argument_done(assembler_, &result);
12120 
12121   assembler_->GotoIf(assembler_->UintPtrOrSmiGreaterThanOrEqual(
12122                          assembler_->IntPtrOrSmiConstant(index, argc_mode_),
12123                          argc_, argc_mode_),
12124                      &argument_missing);
12125   result = AtIndex(index);
12126   assembler_->Goto(&argument_done);
12127 
12128   assembler_->BIND(&argument_missing);
12129   result = default_value;
12130   assembler_->Goto(&argument_done);
12131 
12132   assembler_->BIND(&argument_done);
12133   return result.value();
12134 }
12135 
GetOptionalArgumentValue(TNode<IntPtrT> index,TNode<Object> default_value)12136 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
12137     TNode<IntPtrT> index, TNode<Object> default_value) {
12138   CodeStubAssembler::TVariable<Object> result(assembler_);
12139   CodeStubAssembler::Label argument_missing(assembler_),
12140       argument_done(assembler_, &result);
12141 
12142   assembler_->GotoIf(
12143       assembler_->UintPtrOrSmiGreaterThanOrEqual(
12144           assembler_->IntPtrToParameter(index, argc_mode_), argc_, argc_mode_),
12145       &argument_missing);
12146   result = AtIndex(index);
12147   assembler_->Goto(&argument_done);
12148 
12149   assembler_->BIND(&argument_missing);
12150   result = default_value;
12151   assembler_->Goto(&argument_done);
12152 
12153   assembler_->BIND(&argument_done);
12154   return result.value();
12155 }
12156 
ForEach(const CodeStubAssembler::VariableList & vars,const CodeStubArguments::ForEachBodyFunction & body,Node * first,Node * last,CodeStubAssembler::ParameterMode mode)12157 void CodeStubArguments::ForEach(
12158     const CodeStubAssembler::VariableList& vars,
12159     const CodeStubArguments::ForEachBodyFunction& body, Node* first, Node* last,
12160     CodeStubAssembler::ParameterMode mode) {
12161   assembler_->Comment("CodeStubArguments::ForEach");
12162   if (first == nullptr) {
12163     first = assembler_->IntPtrOrSmiConstant(0, mode);
12164   }
12165   if (last == nullptr) {
12166     DCHECK_EQ(mode, argc_mode_);
12167     last = argc_;
12168   }
12169   Node* start = assembler_->IntPtrSub(
12170       assembler_->UncheckedCast<IntPtrT>(arguments_),
12171       assembler_->ElementOffsetFromIndex(first, PACKED_ELEMENTS, mode));
12172   Node* end = assembler_->IntPtrSub(
12173       assembler_->UncheckedCast<IntPtrT>(arguments_),
12174       assembler_->ElementOffsetFromIndex(last, PACKED_ELEMENTS, mode));
12175   assembler_->BuildFastLoop(vars, start, end,
12176                             [this, &body](Node* current) {
12177                               Node* arg = assembler_->Load(
12178                                   MachineType::AnyTagged(), current);
12179                               body(arg);
12180                             },
12181                             -kPointerSize, CodeStubAssembler::INTPTR_PARAMETERS,
12182                             CodeStubAssembler::IndexAdvanceMode::kPost);
12183 }
12184 
PopAndReturn(Node * value)12185 void CodeStubArguments::PopAndReturn(Node* value) {
12186   Node* pop_count;
12187   if (receiver_mode_ == ReceiverMode::kHasReceiver) {
12188     pop_count = assembler_->IntPtrOrSmiAdd(
12189         argc_, assembler_->IntPtrOrSmiConstant(1, argc_mode_), argc_mode_);
12190   } else {
12191     pop_count = argc_;
12192   }
12193 
12194   assembler_->PopAndReturn(assembler_->ParameterToIntPtr(pop_count, argc_mode_),
12195                            value);
12196 }
12197 
IsFastElementsKind(Node * elements_kind)12198 Node* CodeStubAssembler::IsFastElementsKind(Node* elements_kind) {
12199   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12200   return Uint32LessThanOrEqual(elements_kind,
12201                                Int32Constant(LAST_FAST_ELEMENTS_KIND));
12202 }
12203 
IsDoubleElementsKind(TNode<Int32T> elements_kind)12204 TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
12205     TNode<Int32T> elements_kind) {
12206   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12207   STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
12208   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
12209   return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
12210                      Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
12211 }
12212 
IsFastSmiOrTaggedElementsKind(Node * elements_kind)12213 Node* CodeStubAssembler::IsFastSmiOrTaggedElementsKind(Node* elements_kind) {
12214   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12215   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
12216   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
12217   return Uint32LessThanOrEqual(elements_kind,
12218                                Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
12219 }
12220 
IsFastSmiElementsKind(Node * elements_kind)12221 Node* CodeStubAssembler::IsFastSmiElementsKind(Node* elements_kind) {
12222   return Uint32LessThanOrEqual(elements_kind,
12223                                Int32Constant(HOLEY_SMI_ELEMENTS));
12224 }
12225 
IsHoleyFastElementsKind(Node * elements_kind)12226 Node* CodeStubAssembler::IsHoleyFastElementsKind(Node* elements_kind) {
12227   CSA_ASSERT(this, IsFastElementsKind(elements_kind));
12228 
12229   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
12230   STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
12231   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
12232   return IsSetWord32(elements_kind, 1);
12233 }
12234 
IsElementsKindGreaterThan(Node * target_kind,ElementsKind reference_kind)12235 Node* CodeStubAssembler::IsElementsKindGreaterThan(
12236     Node* target_kind, ElementsKind reference_kind) {
12237   return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
12238 }
12239 
IsDebugActive()12240 Node* CodeStubAssembler::IsDebugActive() {
12241   Node* is_debug_active = Load(
12242       MachineType::Uint8(),
12243       ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
12244   return Word32NotEqual(is_debug_active, Int32Constant(0));
12245 }
12246 
IsRuntimeCallStatsEnabled()12247 TNode<BoolT> CodeStubAssembler::IsRuntimeCallStatsEnabled() {
12248   TNode<Word32T> flag_value = UncheckedCast<Word32T>(Load(
12249       MachineType::Int32(),
12250       ExternalConstant(ExternalReference::address_of_runtime_stats_flag())));
12251   return Word32NotEqual(flag_value, Int32Constant(0));
12252 }
12253 
IsPromiseHookEnabled()12254 Node* CodeStubAssembler::IsPromiseHookEnabled() {
12255   Node* const promise_hook = Load(
12256       MachineType::Pointer(),
12257       ExternalConstant(ExternalReference::promise_hook_address(isolate())));
12258   return WordNotEqual(promise_hook, IntPtrConstant(0));
12259 }
12260 
HasAsyncEventDelegate()12261 Node* CodeStubAssembler::HasAsyncEventDelegate() {
12262   Node* const async_event_delegate =
12263       Load(MachineType::Pointer(),
12264            ExternalConstant(
12265                ExternalReference::async_event_delegate_address(isolate())));
12266   return WordNotEqual(async_event_delegate, IntPtrConstant(0));
12267 }
12268 
IsPromiseHookEnabledOrHasAsyncEventDelegate()12269 Node* CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
12270   Node* const promise_hook_or_async_event_delegate =
12271       Load(MachineType::Uint8(),
12272            ExternalConstant(
12273                ExternalReference::promise_hook_or_async_event_delegate_address(
12274                    isolate())));
12275   return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
12276 }
12277 
LoadBuiltin(TNode<Smi> builtin_id)12278 TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
12279   CSA_ASSERT(this, SmiGreaterThanOrEqual(builtin_id, SmiConstant(0)));
12280   CSA_ASSERT(this,
12281              SmiLessThan(builtin_id, SmiConstant(Builtins::builtin_count)));
12282 
12283   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
12284   int index_shift = kPointerSizeLog2 - kSmiShiftBits;
12285   TNode<WordT> table_index =
12286       index_shift >= 0 ? WordShl(BitcastTaggedToWord(builtin_id), index_shift)
12287                        : WordSar(BitcastTaggedToWord(builtin_id), -index_shift);
12288 
12289   return CAST(
12290       Load(MachineType::TaggedPointer(),
12291            ExternalConstant(ExternalReference::builtins_address(isolate())),
12292            table_index));
12293 }
12294 
GetSharedFunctionInfoCode(SloppyTNode<SharedFunctionInfo> shared_info,Label * if_compile_lazy)12295 TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
12296     SloppyTNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
12297   TNode<Object> sfi_data =
12298       LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
12299 
12300   TVARIABLE(Code, sfi_code);
12301 
12302   Label done(this);
12303   Label check_instance_type(this);
12304 
12305   // IsSmi: Is builtin
12306   GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
12307   if (if_compile_lazy) {
12308     GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
12309            if_compile_lazy);
12310   }
12311   sfi_code = LoadBuiltin(CAST(sfi_data));
12312   Goto(&done);
12313 
12314   // Switch on data's instance type.
12315   BIND(&check_instance_type);
12316   TNode<Int32T> data_type = LoadInstanceType(CAST(sfi_data));
12317 
12318   int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
12319                            WASM_EXPORTED_FUNCTION_DATA_TYPE,
12320                            FIXED_ARRAY_TYPE,
12321                            UNCOMPILED_DATA_WITHOUT_PRE_PARSED_SCOPE_TYPE,
12322                            UNCOMPILED_DATA_WITH_PRE_PARSED_SCOPE_TYPE,
12323                            FUNCTION_TEMPLATE_INFO_TYPE};
12324   Label check_is_bytecode_array(this);
12325   Label check_is_exported_function_data(this);
12326   Label check_is_fixed_array(this);
12327   Label check_is_uncompiled_data_without_pre_parsed_scope(this);
12328   Label check_is_uncompiled_data_with_pre_parsed_scope(this);
12329   Label check_is_function_template_info(this);
12330   Label check_is_interpreter_data(this);
12331   Label* case_labels[] = {&check_is_bytecode_array,
12332                           &check_is_exported_function_data,
12333                           &check_is_fixed_array,
12334                           &check_is_uncompiled_data_without_pre_parsed_scope,
12335                           &check_is_uncompiled_data_with_pre_parsed_scope,
12336                           &check_is_function_template_info};
12337   STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
12338   Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
12339          arraysize(case_labels));
12340 
12341   // IsBytecodeArray: Interpret bytecode
12342   BIND(&check_is_bytecode_array);
12343   DCHECK(!Builtins::IsLazy(Builtins::kInterpreterEntryTrampoline));
12344   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
12345   Goto(&done);
12346 
12347   // IsWasmExportedFunctionData: Use the wrapper code
12348   BIND(&check_is_exported_function_data);
12349   sfi_code = CAST(LoadObjectField(
12350       CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
12351   Goto(&done);
12352 
12353   // IsFixedArray: Instantiate using AsmWasmData
12354   BIND(&check_is_fixed_array);
12355   DCHECK(!Builtins::IsLazy(Builtins::kInstantiateAsmJs));
12356   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
12357   Goto(&done);
12358 
12359   // IsUncompiledDataWithPreParsedScope | IsUncompiledDataWithoutPreParsedScope:
12360   // Compile lazy
12361   BIND(&check_is_uncompiled_data_with_pre_parsed_scope);
12362   Goto(&check_is_uncompiled_data_without_pre_parsed_scope);
12363   BIND(&check_is_uncompiled_data_without_pre_parsed_scope);
12364   DCHECK(!Builtins::IsLazy(Builtins::kCompileLazy));
12365   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
12366   Goto(if_compile_lazy ? if_compile_lazy : &done);
12367 
12368   // IsFunctionTemplateInfo: API call
12369   BIND(&check_is_function_template_info);
12370   DCHECK(!Builtins::IsLazy(Builtins::kHandleApiCall));
12371   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
12372   Goto(&done);
12373 
12374   // IsInterpreterData: Interpret bytecode
12375   BIND(&check_is_interpreter_data);
12376   // This is the default branch, so assert that we have the expected data type.
12377   CSA_ASSERT(this,
12378              Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
12379   sfi_code = CAST(LoadObjectField(
12380       CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
12381   Goto(&done);
12382 
12383   BIND(&done);
12384   return sfi_code.value();
12385 }
12386 
AllocateFunctionWithMapAndContext(Node * map,Node * shared_info,Node * context)12387 Node* CodeStubAssembler::AllocateFunctionWithMapAndContext(Node* map,
12388                                                            Node* shared_info,
12389                                                            Node* context) {
12390   CSA_SLOW_ASSERT(this, IsMap(map));
12391 
12392   Node* const code = GetSharedFunctionInfoCode(shared_info);
12393 
12394   // TODO(ishell): All the callers of this function pass map loaded from
12395   // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
12396   // map parameter.
12397   CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
12398   CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
12399   Node* const fun = Allocate(JSFunction::kSizeWithoutPrototype);
12400   STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
12401   StoreMapNoWriteBarrier(fun, map);
12402   StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
12403                        Heap::kEmptyFixedArrayRootIndex);
12404   StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
12405                        Heap::kEmptyFixedArrayRootIndex);
12406   StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
12407                        Heap::kManyClosuresCellRootIndex);
12408   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
12409                                  shared_info);
12410   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
12411   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
12412   return fun;
12413 }
12414 
MarkerIsFrameType(Node * marker_or_function,StackFrame::Type frame_type)12415 Node* CodeStubAssembler::MarkerIsFrameType(Node* marker_or_function,
12416                                            StackFrame::Type frame_type) {
12417   return WordEqual(marker_or_function,
12418                    IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
12419 }
12420 
MarkerIsNotFrameType(Node * marker_or_function,StackFrame::Type frame_type)12421 Node* CodeStubAssembler::MarkerIsNotFrameType(Node* marker_or_function,
12422                                               StackFrame::Type frame_type) {
12423   return WordNotEqual(marker_or_function,
12424                       IntPtrConstant(StackFrame::TypeToMarker(frame_type)));
12425 }
12426 
CheckPrototypeEnumCache(Node * receiver,Node * receiver_map,Label * if_fast,Label * if_slow)12427 void CodeStubAssembler::CheckPrototypeEnumCache(Node* receiver,
12428                                                 Node* receiver_map,
12429                                                 Label* if_fast,
12430                                                 Label* if_slow) {
12431   VARIABLE(var_object, MachineRepresentation::kTagged, receiver);
12432   VARIABLE(var_object_map, MachineRepresentation::kTagged, receiver_map);
12433 
12434   Label loop(this, {&var_object, &var_object_map}), done_loop(this);
12435   Goto(&loop);
12436   BIND(&loop);
12437   {
12438     // Check that there are no elements on the current {object}.
12439     Label if_no_elements(this);
12440     Node* object = var_object.value();
12441     Node* object_map = var_object_map.value();
12442 
12443     // The following relies on the elements only aliasing with JSProxy::target,
12444     // which is a Javascript value and hence cannot be confused with an elements
12445     // backing store.
12446     STATIC_ASSERT(JSObject::kElementsOffset == JSProxy::kTargetOffset);
12447     Node* object_elements = LoadObjectField(object, JSObject::kElementsOffset);
12448     GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
12449     GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
12450 
12451     // It might still be an empty JSArray.
12452     GotoIfNot(IsJSArrayMap(object_map), if_slow);
12453     Node* object_length = LoadJSArrayLength(object);
12454     Branch(WordEqual(object_length, SmiConstant(0)), &if_no_elements, if_slow);
12455 
12456     // Continue with the {object}s prototype.
12457     BIND(&if_no_elements);
12458     object = LoadMapPrototype(object_map);
12459     GotoIf(IsNull(object), if_fast);
12460 
12461     // For all {object}s but the {receiver}, check that the cache is empty.
12462     var_object.Bind(object);
12463     object_map = LoadMap(object);
12464     var_object_map.Bind(object_map);
12465     Node* object_enum_length = LoadMapEnumLength(object_map);
12466     Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
12467   }
12468 }
12469 
CheckEnumCache(Node * receiver,Label * if_empty,Label * if_runtime)12470 Node* CodeStubAssembler::CheckEnumCache(Node* receiver, Label* if_empty,
12471                                         Label* if_runtime) {
12472   Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
12473   Node* receiver_map = LoadMap(receiver);
12474 
12475   // Check if the enum length field of the {receiver} is properly initialized,
12476   // indicating that there is an enum cache.
12477   Node* receiver_enum_length = LoadMapEnumLength(receiver_map);
12478   Branch(WordEqual(receiver_enum_length,
12479                    IntPtrConstant(kInvalidEnumCacheSentinel)),
12480          &if_no_cache, &if_cache);
12481 
12482   BIND(&if_no_cache);
12483   {
12484     // Avoid runtime-call for empty dictionary receivers.
12485     GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
12486     TNode<NameDictionary> properties = CAST(LoadSlowProperties(receiver));
12487     TNode<Smi> length = GetNumberOfElements(properties);
12488     GotoIfNot(WordEqual(length, SmiConstant(0)), if_runtime);
12489     // Check that there are no elements on the {receiver} and its prototype
12490     // chain. Given that we do not create an EnumCache for dict-mode objects,
12491     // directly jump to {if_empty} if there are no elements and no properties
12492     // on the {receiver}.
12493     CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
12494   }
12495 
12496   // Check that there are no elements on the fast {receiver} and its
12497   // prototype chain.
12498   BIND(&if_cache);
12499   CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
12500 
12501   BIND(&if_fast);
12502   return receiver_map;
12503 }
12504 
GetArgumentsLength(CodeStubArguments * args)12505 TNode<IntPtrT> CodeStubAssembler::GetArgumentsLength(CodeStubArguments* args) {
12506   return args->GetLength();
12507 }
12508 
GetArgumentValue(CodeStubArguments * args,TNode<IntPtrT> index)12509 TNode<Object> CodeStubAssembler::GetArgumentValue(CodeStubArguments* args,
12510                                                   TNode<IntPtrT> index) {
12511   return args->GetOptionalArgumentValue(index);
12512 }
12513 
Print(const char * s)12514 void CodeStubAssembler::Print(const char* s) {
12515   std::string formatted(s);
12516   formatted += "\n";
12517   CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
12518               StringConstant(formatted.c_str()));
12519 }
12520 
Print(const char * prefix,Node * tagged_value)12521 void CodeStubAssembler::Print(const char* prefix, Node* tagged_value) {
12522   if (prefix != nullptr) {
12523     std::string formatted(prefix);
12524     formatted += ": ";
12525     Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
12526         formatted.c_str(), TENURED);
12527     CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
12528                 HeapConstant(string));
12529   }
12530   CallRuntime(Runtime::kDebugPrint, NoContextConstant(), tagged_value);
12531 }
12532 
PerformStackCheck(TNode<Context> context)12533 void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
12534   Label ok(this), stack_check_interrupt(this, Label::kDeferred);
12535 
12536   // The instruction sequence below is carefully crafted to hit our pattern
12537   // matcher for stack checks within instruction selection.
12538   // See StackCheckMatcher::Matched and JSGenericLowering::LowerJSStackCheck.
12539 
12540   TNode<UintPtrT> sp = UncheckedCast<UintPtrT>(LoadStackPointer());
12541   TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(Load(
12542       MachineType::Pointer(),
12543       ExternalConstant(ExternalReference::address_of_stack_limit(isolate()))));
12544   TNode<BoolT> sp_within_limit = UintPtrLessThan(stack_limit, sp);
12545 
12546   Branch(sp_within_limit, &ok, &stack_check_interrupt);
12547 
12548   BIND(&stack_check_interrupt);
12549   CallRuntime(Runtime::kStackGuard, context);
12550   Goto(&ok);
12551 
12552   BIND(&ok);
12553 }
12554 
InitializeFunctionContext(Node * native_context,Node * context,int slots)12555 void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
12556                                                   Node* context, int slots) {
12557   DCHECK_GE(slots, Context::MIN_CONTEXT_SLOTS);
12558   StoreMapNoWriteBarrier(context, Heap::kFunctionContextMapRootIndex);
12559   StoreObjectFieldNoWriteBarrier(context, FixedArray::kLengthOffset,
12560                                  SmiConstant(slots));
12561 
12562   Node* const empty_scope_info =
12563       LoadContextElement(native_context, Context::SCOPE_INFO_INDEX);
12564   StoreContextElementNoWriteBarrier(context, Context::SCOPE_INFO_INDEX,
12565                                     empty_scope_info);
12566   StoreContextElementNoWriteBarrier(context, Context::PREVIOUS_INDEX,
12567                                     UndefinedConstant());
12568   StoreContextElementNoWriteBarrier(context, Context::EXTENSION_INDEX,
12569                                     TheHoleConstant());
12570   StoreContextElementNoWriteBarrier(context, Context::NATIVE_CONTEXT_INDEX,
12571                                     native_context);
12572 }
12573 
12574 }  // namespace internal
12575 }  // namespace v8
12576