• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/codegen/code-stub-assembler.h"
6 
7 #include "include/v8-internal.h"
8 #include "src/base/macros.h"
9 #include "src/codegen/code-factory.h"
10 #include "src/codegen/tnode.h"
11 #include "src/common/globals.h"
12 #include "src/execution/frames-inl.h"
13 #include "src/execution/frames.h"
14 #include "src/execution/protectors.h"
15 #include "src/heap/heap-inl.h"  // For MemoryChunk. TODO(jkummerow): Drop.
16 #include "src/heap/memory-chunk.h"
17 #include "src/logging/counters.h"
18 #include "src/objects/api-callbacks.h"
19 #include "src/objects/cell.h"
20 #include "src/objects/descriptor-array.h"
21 #include "src/objects/function-kind.h"
22 #include "src/objects/heap-number.h"
23 #include "src/objects/js-generator.h"
24 #include "src/objects/oddball.h"
25 #include "src/objects/ordered-hash-table-inl.h"
26 #include "src/objects/property-cell.h"
27 #include "src/roots/roots.h"
28 #include "src/wasm/wasm-objects.h"
29 
30 namespace v8 {
31 namespace internal {
32 
33 using compiler::Node;
34 
CodeStubAssembler(compiler::CodeAssemblerState * state)35 CodeStubAssembler::CodeStubAssembler(compiler::CodeAssemblerState* state)
36     : compiler::CodeAssembler(state),
37       TorqueGeneratedExportedMacrosAssembler(state) {
38   if (DEBUG_BOOL && FLAG_csa_trap_on_node != nullptr) {
39     HandleBreakOnNode();
40   }
41 }
42 
HandleBreakOnNode()43 void CodeStubAssembler::HandleBreakOnNode() {
44   // FLAG_csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
45   // string specifying the name of a stub and NODE is number specifying node id.
46   const char* name = state()->name();
47   size_t name_length = strlen(name);
48   if (strncmp(FLAG_csa_trap_on_node, name, name_length) != 0) {
49     // Different name.
50     return;
51   }
52   size_t option_length = strlen(FLAG_csa_trap_on_node);
53   if (option_length < name_length + 2 ||
54       FLAG_csa_trap_on_node[name_length] != ',') {
55     // Option is too short.
56     return;
57   }
58   const char* start = &FLAG_csa_trap_on_node[name_length + 1];
59   char* end;
60   int node_id = static_cast<int>(strtol(start, &end, 10));
61   if (start == end) {
62     // Bad node id.
63     return;
64   }
65   BreakOnNode(node_id);
66 }
67 
Assert(const BranchGenerator & branch,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)68 void CodeStubAssembler::Assert(const BranchGenerator& branch,
69                                const char* message, const char* file, int line,
70                                std::initializer_list<ExtraNode> extra_nodes) {
71 #if defined(DEBUG)
72   if (FLAG_debug_code) {
73     Check(branch, message, file, line, extra_nodes);
74   }
75 #endif
76 }
77 
Assert(const NodeGenerator<BoolT> & condition_body,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)78 void CodeStubAssembler::Assert(const NodeGenerator<BoolT>& condition_body,
79                                const char* message, const char* file, int line,
80                                std::initializer_list<ExtraNode> extra_nodes) {
81 #if defined(DEBUG)
82   if (FLAG_debug_code) {
83     Check(condition_body, message, file, line, extra_nodes);
84   }
85 #endif
86 }
87 
Assert(TNode<Word32T> condition_node,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)88 void CodeStubAssembler::Assert(TNode<Word32T> condition_node,
89                                const char* message, const char* file, int line,
90                                std::initializer_list<ExtraNode> extra_nodes) {
91 #if defined(DEBUG)
92   if (FLAG_debug_code) {
93     Check(condition_node, message, file, line, extra_nodes);
94   }
95 #endif
96 }
97 
Check(const BranchGenerator & branch,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)98 void CodeStubAssembler::Check(const BranchGenerator& branch,
99                               const char* message, const char* file, int line,
100                               std::initializer_list<ExtraNode> extra_nodes) {
101   Label ok(this);
102   Label not_ok(this, Label::kDeferred);
103   if (message != nullptr && FLAG_code_comments) {
104     Comment("[ Assert: ", message);
105   } else {
106     Comment("[ Assert");
107   }
108   branch(&ok, &not_ok);
109 
110   BIND(&not_ok);
111   std::vector<FileAndLine> file_and_line;
112   if (file != nullptr) {
113     file_and_line.push_back({file, line});
114   }
115   FailAssert(message, file_and_line, extra_nodes);
116 
117   BIND(&ok);
118   Comment("] Assert");
119 }
120 
Check(const NodeGenerator<BoolT> & condition_body,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)121 void CodeStubAssembler::Check(const NodeGenerator<BoolT>& condition_body,
122                               const char* message, const char* file, int line,
123                               std::initializer_list<ExtraNode> extra_nodes) {
124   BranchGenerator branch = [=](Label* ok, Label* not_ok) {
125     TNode<BoolT> condition = condition_body();
126     Branch(condition, ok, not_ok);
127   };
128 
129   Check(branch, message, file, line, extra_nodes);
130 }
131 
Check(TNode<Word32T> condition_node,const char * message,const char * file,int line,std::initializer_list<ExtraNode> extra_nodes)132 void CodeStubAssembler::Check(TNode<Word32T> condition_node,
133                               const char* message, const char* file, int line,
134                               std::initializer_list<ExtraNode> extra_nodes) {
135   BranchGenerator branch = [=](Label* ok, Label* not_ok) {
136     Branch(condition_node, ok, not_ok);
137   };
138 
139   Check(branch, message, file, line, extra_nodes);
140 }
141 
IncrementCallCount(TNode<FeedbackVector> feedback_vector,TNode<UintPtrT> slot_id)142 void CodeStubAssembler::IncrementCallCount(
143     TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot_id) {
144   Comment("increment call count");
145   TNode<Smi> call_count =
146       CAST(LoadFeedbackVectorSlot(feedback_vector, slot_id, kTaggedSize));
147   // The lowest {FeedbackNexus::CallCountField::kShift} bits of the call
148   // count are used as flags. To increment the call count by 1 we hence
149   // have to increment by 1 << {FeedbackNexus::CallCountField::kShift}.
150   TNode<Smi> new_count = SmiAdd(
151       call_count, SmiConstant(1 << FeedbackNexus::CallCountField::kShift));
152   // Count is Smi, so we don't need a write barrier.
153   StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
154                           SKIP_WRITE_BARRIER, kTaggedSize);
155 }
156 
FastCheck(TNode<BoolT> condition)157 void CodeStubAssembler::FastCheck(TNode<BoolT> condition) {
158   Label ok(this), not_ok(this, Label::kDeferred);
159   Branch(condition, &ok, &not_ok);
160   BIND(&not_ok);
161   Unreachable();
162   BIND(&ok);
163 }
164 
FailAssert(const char * message,const std::vector<FileAndLine> & files_and_lines,std::initializer_list<ExtraNode> extra_nodes)165 void CodeStubAssembler::FailAssert(
166     const char* message, const std::vector<FileAndLine>& files_and_lines,
167     std::initializer_list<ExtraNode> extra_nodes) {
168   DCHECK_NOT_NULL(message);
169   EmbeddedVector<char, 1024> chars;
170   std::stringstream stream;
171   for (auto it = files_and_lines.rbegin(); it != files_and_lines.rend(); ++it) {
172     if (it->first != nullptr) {
173       stream << " [" << it->first << ":" << it->second << "]";
174 #ifndef DEBUG
175       // To limit the size of these strings in release builds, we include only
176       // the innermost macro's file name and line number.
177       break;
178 #endif
179     }
180   }
181   std::string files_and_lines_text = stream.str();
182   if (files_and_lines_text.size() != 0) {
183     SNPrintF(chars, "%s%s", message, files_and_lines_text.c_str());
184     message = chars.begin();
185   }
186   TNode<String> message_node = StringConstant(message);
187 
188 #ifdef DEBUG
189   // Only print the extra nodes in debug builds.
190   for (auto& node : extra_nodes) {
191     CallRuntime(Runtime::kPrintWithNameForAssert, SmiConstant(0),
192                 StringConstant(node.second), node.first);
193   }
194 #endif
195 
196   AbortCSAAssert(message_node);
197   Unreachable();
198 }
199 
SelectInt32Constant(TNode<BoolT> condition,int true_value,int false_value)200 TNode<Int32T> CodeStubAssembler::SelectInt32Constant(TNode<BoolT> condition,
201                                                      int true_value,
202                                                      int false_value) {
203   return SelectConstant<Int32T>(condition, Int32Constant(true_value),
204                                 Int32Constant(false_value));
205 }
206 
SelectIntPtrConstant(TNode<BoolT> condition,int true_value,int false_value)207 TNode<IntPtrT> CodeStubAssembler::SelectIntPtrConstant(TNode<BoolT> condition,
208                                                        int true_value,
209                                                        int false_value) {
210   return SelectConstant<IntPtrT>(condition, IntPtrConstant(true_value),
211                                  IntPtrConstant(false_value));
212 }
213 
SelectBooleanConstant(TNode<BoolT> condition)214 TNode<Oddball> CodeStubAssembler::SelectBooleanConstant(
215     TNode<BoolT> condition) {
216   return SelectConstant<Oddball>(condition, TrueConstant(), FalseConstant());
217 }
218 
SelectSmiConstant(TNode<BoolT> condition,Smi true_value,Smi false_value)219 TNode<Smi> CodeStubAssembler::SelectSmiConstant(TNode<BoolT> condition,
220                                                 Smi true_value,
221                                                 Smi false_value) {
222   return SelectConstant<Smi>(condition, SmiConstant(true_value),
223                              SmiConstant(false_value));
224 }
225 
NoContextConstant()226 TNode<Smi> CodeStubAssembler::NoContextConstant() {
227   return SmiConstant(Context::kNoContext);
228 }
229 
230 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name)        \
231   TNode<std::remove_pointer<std::remove_reference<decltype(                  \
232       std::declval<Heap>().rootAccessorName())>::type>::type>                \
233       CodeStubAssembler::name##Constant() {                                  \
234     return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
235         std::declval<Heap>().rootAccessorName())>::type>::type>(             \
236         LoadRoot(RootIndex::k##rootIndexName));                              \
237   }
238 HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
239 #undef HEAP_CONSTANT_ACCESSOR
240 
241 #define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name)        \
242   TNode<std::remove_pointer<std::remove_reference<decltype(                  \
243       std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>       \
244       CodeStubAssembler::name##Constant() {                                  \
245     return UncheckedCast<std::remove_pointer<std::remove_reference<decltype( \
246         std::declval<ReadOnlyRoots>().rootAccessorName())>::type>::type>(    \
247         LoadRoot(RootIndex::k##rootIndexName));                              \
248   }
HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)249 HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_ACCESSOR)
250 #undef HEAP_CONSTANT_ACCESSOR
251 
252 #define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name)          \
253   TNode<BoolT> CodeStubAssembler::Is##name(SloppyTNode<Object> value) {    \
254     return TaggedEqual(value, name##Constant());                           \
255   }                                                                        \
256   TNode<BoolT> CodeStubAssembler::IsNot##name(SloppyTNode<Object> value) { \
257     return TaggedNotEqual(value, name##Constant());                        \
258   }
259 HEAP_IMMOVABLE_OBJECT_LIST(HEAP_CONSTANT_TEST)
260 #undef HEAP_CONSTANT_TEST
261 
262 TNode<BInt> CodeStubAssembler::BIntConstant(int value) {
263 #if defined(BINT_IS_SMI)
264   return SmiConstant(value);
265 #elif defined(BINT_IS_INTPTR)
266   return IntPtrConstant(value);
267 #else
268 #error Unknown architecture.
269 #endif
270 }
271 
272 template <>
IntPtrOrSmiConstant(int value)273 TNode<Smi> CodeStubAssembler::IntPtrOrSmiConstant<Smi>(int value) {
274   return SmiConstant(value);
275 }
276 
277 template <>
IntPtrOrSmiConstant(int value)278 TNode<IntPtrT> CodeStubAssembler::IntPtrOrSmiConstant<IntPtrT>(int value) {
279   return IntPtrConstant(value);
280 }
281 
282 template <>
IntPtrOrSmiConstant(int value)283 TNode<UintPtrT> CodeStubAssembler::IntPtrOrSmiConstant<UintPtrT>(int value) {
284   return Unsigned(IntPtrConstant(value));
285 }
286 
287 template <>
IntPtrOrSmiConstant(int value)288 TNode<RawPtrT> CodeStubAssembler::IntPtrOrSmiConstant<RawPtrT>(int value) {
289   return ReinterpretCast<RawPtrT>(IntPtrConstant(value));
290 }
291 
TryGetIntPtrOrSmiConstantValue(TNode<Smi> maybe_constant,int * value)292 bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(
293     TNode<Smi> maybe_constant, int* value) {
294   Smi smi_constant;
295   if (ToSmiConstant(maybe_constant, &smi_constant)) {
296     *value = Smi::ToInt(smi_constant);
297     return true;
298   }
299   return false;
300 }
301 
TryGetIntPtrOrSmiConstantValue(TNode<IntPtrT> maybe_constant,int * value)302 bool CodeStubAssembler::TryGetIntPtrOrSmiConstantValue(
303     TNode<IntPtrT> maybe_constant, int* value) {
304   int32_t int32_constant;
305     if (ToInt32Constant(maybe_constant, &int32_constant)) {
306       *value = int32_constant;
307       return true;
308     }
309   return false;
310 }
311 
IntPtrRoundUpToPowerOfTwo32(TNode<IntPtrT> value)312 TNode<IntPtrT> CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(
313     TNode<IntPtrT> value) {
314   Comment("IntPtrRoundUpToPowerOfTwo32");
315   CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
316   value = Signed(IntPtrSub(value, IntPtrConstant(1)));
317   for (int i = 1; i <= 16; i *= 2) {
318     value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
319   }
320   return Signed(IntPtrAdd(value, IntPtrConstant(1)));
321 }
322 
WordIsPowerOfTwo(SloppyTNode<IntPtrT> value)323 TNode<BoolT> CodeStubAssembler::WordIsPowerOfTwo(SloppyTNode<IntPtrT> value) {
324   intptr_t constant;
325   if (ToIntPtrConstant(value, &constant)) {
326     return BoolConstant(base::bits::IsPowerOfTwo(constant));
327   }
328   // value && !(value & (value - 1))
329   return IntPtrEqual(
330       Select<IntPtrT>(
331           IntPtrEqual(value, IntPtrConstant(0)),
332           [=] { return IntPtrConstant(1); },
333           [=] { return WordAnd(value, IntPtrSub(value, IntPtrConstant(1))); }),
334       IntPtrConstant(0));
335 }
336 
Float64Round(SloppyTNode<Float64T> x)337 TNode<Float64T> CodeStubAssembler::Float64Round(SloppyTNode<Float64T> x) {
338   TNode<Float64T> one = Float64Constant(1.0);
339   TNode<Float64T> one_half = Float64Constant(0.5);
340 
341   Label return_x(this);
342 
343   // Round up {x} towards Infinity.
344   TVARIABLE(Float64T, var_x, Float64Ceil(x));
345 
346   GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
347          &return_x);
348   var_x = Float64Sub(var_x.value(), one);
349   Goto(&return_x);
350 
351   BIND(&return_x);
352   return TNode<Float64T>::UncheckedCast(var_x.value());
353 }
354 
Float64Ceil(SloppyTNode<Float64T> x)355 TNode<Float64T> CodeStubAssembler::Float64Ceil(SloppyTNode<Float64T> x) {
356   if (IsFloat64RoundUpSupported()) {
357     return Float64RoundUp(x);
358   }
359 
360   TNode<Float64T> one = Float64Constant(1.0);
361   TNode<Float64T> zero = Float64Constant(0.0);
362   TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
363   TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
364 
365   TVARIABLE(Float64T, var_x, x);
366   Label return_x(this), return_minus_x(this);
367 
368   // Check if {x} is greater than zero.
369   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
370   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
371          &if_xnotgreaterthanzero);
372 
373   BIND(&if_xgreaterthanzero);
374   {
375     // Just return {x} unless it's in the range ]0,2^52[.
376     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
377 
378     // Round positive {x} towards Infinity.
379     var_x = Float64Sub(Float64Add(two_52, x), two_52);
380     GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
381     var_x = Float64Add(var_x.value(), one);
382     Goto(&return_x);
383   }
384 
385   BIND(&if_xnotgreaterthanzero);
386   {
387     // Just return {x} unless it's in the range ]-2^52,0[
388     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
389     GotoIfNot(Float64LessThan(x, zero), &return_x);
390 
391     // Round negated {x} towards Infinity and return the result negated.
392     TNode<Float64T> minus_x = Float64Neg(x);
393     var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
394     GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
395     var_x = Float64Sub(var_x.value(), one);
396     Goto(&return_minus_x);
397   }
398 
399   BIND(&return_minus_x);
400   var_x = Float64Neg(var_x.value());
401   Goto(&return_x);
402 
403   BIND(&return_x);
404   return TNode<Float64T>::UncheckedCast(var_x.value());
405 }
406 
Float64Floor(SloppyTNode<Float64T> x)407 TNode<Float64T> CodeStubAssembler::Float64Floor(SloppyTNode<Float64T> x) {
408   if (IsFloat64RoundDownSupported()) {
409     return Float64RoundDown(x);
410   }
411 
412   TNode<Float64T> one = Float64Constant(1.0);
413   TNode<Float64T> zero = Float64Constant(0.0);
414   TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
415   TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
416 
417   TVARIABLE(Float64T, var_x, x);
418   Label return_x(this), return_minus_x(this);
419 
420   // Check if {x} is greater than zero.
421   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
422   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
423          &if_xnotgreaterthanzero);
424 
425   BIND(&if_xgreaterthanzero);
426   {
427     // Just return {x} unless it's in the range ]0,2^52[.
428     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
429 
430     // Round positive {x} towards -Infinity.
431     var_x = Float64Sub(Float64Add(two_52, x), two_52);
432     GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
433     var_x = Float64Sub(var_x.value(), one);
434     Goto(&return_x);
435   }
436 
437   BIND(&if_xnotgreaterthanzero);
438   {
439     // Just return {x} unless it's in the range ]-2^52,0[
440     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
441     GotoIfNot(Float64LessThan(x, zero), &return_x);
442 
443     // Round negated {x} towards -Infinity and return the result negated.
444     TNode<Float64T> minus_x = Float64Neg(x);
445     var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
446     GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
447     var_x = Float64Add(var_x.value(), one);
448     Goto(&return_minus_x);
449   }
450 
451   BIND(&return_minus_x);
452   var_x = Float64Neg(var_x.value());
453   Goto(&return_x);
454 
455   BIND(&return_x);
456   return TNode<Float64T>::UncheckedCast(var_x.value());
457 }
458 
Float64RoundToEven(SloppyTNode<Float64T> x)459 TNode<Float64T> CodeStubAssembler::Float64RoundToEven(SloppyTNode<Float64T> x) {
460   if (IsFloat64RoundTiesEvenSupported()) {
461     return Float64RoundTiesEven(x);
462   }
463   // See ES#sec-touint8clamp for details.
464   TNode<Float64T> f = Float64Floor(x);
465   TNode<Float64T> f_and_half = Float64Add(f, Float64Constant(0.5));
466 
467   TVARIABLE(Float64T, var_result);
468   Label return_f(this), return_f_plus_one(this), done(this);
469 
470   GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
471   GotoIf(Float64LessThan(x, f_and_half), &return_f);
472   {
473     TNode<Float64T> f_mod_2 = Float64Mod(f, Float64Constant(2.0));
474     Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
475            &return_f_plus_one);
476   }
477 
478   BIND(&return_f);
479   var_result = f;
480   Goto(&done);
481 
482   BIND(&return_f_plus_one);
483   var_result = Float64Add(f, Float64Constant(1.0));
484   Goto(&done);
485 
486   BIND(&done);
487   return TNode<Float64T>::UncheckedCast(var_result.value());
488 }
489 
Float64Trunc(SloppyTNode<Float64T> x)490 TNode<Float64T> CodeStubAssembler::Float64Trunc(SloppyTNode<Float64T> x) {
491   if (IsFloat64RoundTruncateSupported()) {
492     return Float64RoundTruncate(x);
493   }
494 
495   TNode<Float64T> one = Float64Constant(1.0);
496   TNode<Float64T> zero = Float64Constant(0.0);
497   TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
498   TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
499 
500   TVARIABLE(Float64T, var_x, x);
501   Label return_x(this), return_minus_x(this);
502 
503   // Check if {x} is greater than 0.
504   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
505   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
506          &if_xnotgreaterthanzero);
507 
508   BIND(&if_xgreaterthanzero);
509   {
510     if (IsFloat64RoundDownSupported()) {
511       var_x = Float64RoundDown(x);
512     } else {
513       // Just return {x} unless it's in the range ]0,2^52[.
514       GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
515 
516       // Round positive {x} towards -Infinity.
517       var_x = Float64Sub(Float64Add(two_52, x), two_52);
518       GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
519       var_x = Float64Sub(var_x.value(), one);
520     }
521     Goto(&return_x);
522   }
523 
524   BIND(&if_xnotgreaterthanzero);
525   {
526     if (IsFloat64RoundUpSupported()) {
527       var_x = Float64RoundUp(x);
528       Goto(&return_x);
529     } else {
530       // Just return {x} unless its in the range ]-2^52,0[.
531       GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
532       GotoIfNot(Float64LessThan(x, zero), &return_x);
533 
534       // Round negated {x} towards -Infinity and return result negated.
535       TNode<Float64T> minus_x = Float64Neg(x);
536       var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
537       GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
538       var_x = Float64Sub(var_x.value(), one);
539       Goto(&return_minus_x);
540     }
541   }
542 
543   BIND(&return_minus_x);
544   var_x = Float64Neg(var_x.value());
545   Goto(&return_x);
546 
547   BIND(&return_x);
548   return TNode<Float64T>::UncheckedCast(var_x.value());
549 }
550 
551 template <>
TaggedToParameter(TNode<Smi> value)552 TNode<Smi> CodeStubAssembler::TaggedToParameter(TNode<Smi> value) {
553   return value;
554 }
555 
556 template <>
TaggedToParameter(TNode<Smi> value)557 TNode<IntPtrT> CodeStubAssembler::TaggedToParameter(TNode<Smi> value) {
558   return SmiUntag(value);
559 }
560 
TaggedIndexToIntPtr(TNode<TaggedIndex> value)561 TNode<IntPtrT> CodeStubAssembler::TaggedIndexToIntPtr(
562     TNode<TaggedIndex> value) {
563   return Signed(WordSarShiftOutZeros(BitcastTaggedToWordForTagAndSmiBits(value),
564                                      IntPtrConstant(kSmiTagSize)));
565 }
566 
IntPtrToTaggedIndex(TNode<IntPtrT> value)567 TNode<TaggedIndex> CodeStubAssembler::IntPtrToTaggedIndex(
568     TNode<IntPtrT> value) {
569   return ReinterpretCast<TaggedIndex>(
570       BitcastWordToTaggedSigned(WordShl(value, IntPtrConstant(kSmiTagSize))));
571 }
572 
TaggedIndexToSmi(TNode<TaggedIndex> value)573 TNode<Smi> CodeStubAssembler::TaggedIndexToSmi(TNode<TaggedIndex> value) {
574   if (SmiValuesAre32Bits()) {
575     DCHECK_EQ(kSmiShiftSize, 31);
576     return BitcastWordToTaggedSigned(
577         WordShl(BitcastTaggedToWordForTagAndSmiBits(value),
578                 IntPtrConstant(kSmiShiftSize)));
579   }
580   DCHECK(SmiValuesAre31Bits());
581   DCHECK_EQ(kSmiShiftSize, 0);
582   return ReinterpretCast<Smi>(value);
583 }
584 
SmiToTaggedIndex(TNode<Smi> value)585 TNode<TaggedIndex> CodeStubAssembler::SmiToTaggedIndex(TNode<Smi> value) {
586   if (kSystemPointerSize == kInt32Size) {
587     return ReinterpretCast<TaggedIndex>(value);
588   }
589   if (SmiValuesAre32Bits()) {
590     DCHECK_EQ(kSmiShiftSize, 31);
591     return ReinterpretCast<TaggedIndex>(BitcastWordToTaggedSigned(
592         WordSar(BitcastTaggedToWordForTagAndSmiBits(value),
593                 IntPtrConstant(kSmiShiftSize))));
594   }
595   DCHECK(SmiValuesAre31Bits());
596   DCHECK_EQ(kSmiShiftSize, 0);
597   // Just sign-extend the lower 32 bits.
598   TNode<Int32T> raw =
599       TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(value));
600   return ReinterpretCast<TaggedIndex>(
601       BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw)));
602 }
603 
NormalizeSmiIndex(TNode<Smi> smi_index)604 TNode<Smi> CodeStubAssembler::NormalizeSmiIndex(TNode<Smi> smi_index) {
605   if (COMPRESS_POINTERS_BOOL) {
606     TNode<Int32T> raw =
607         TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(smi_index));
608     smi_index = BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw));
609   }
610   return smi_index;
611 }
612 
SmiFromInt32(SloppyTNode<Int32T> value)613 TNode<Smi> CodeStubAssembler::SmiFromInt32(SloppyTNode<Int32T> value) {
614   if (COMPRESS_POINTERS_BOOL) {
615     static_assert(!COMPRESS_POINTERS_BOOL || (kSmiShiftSize + kSmiTagSize == 1),
616                   "Use shifting instead of add");
617     return BitcastWordToTaggedSigned(
618         ChangeUint32ToWord(Int32Add(value, value)));
619   }
620   return SmiTag(ChangeInt32ToIntPtr(value));
621 }
622 
SmiFromUint32(TNode<Uint32T> value)623 TNode<Smi> CodeStubAssembler::SmiFromUint32(TNode<Uint32T> value) {
624   CSA_ASSERT(this, IntPtrLessThan(ChangeUint32ToWord(value),
625                                   IntPtrConstant(Smi::kMaxValue)));
626   return SmiFromInt32(Signed(value));
627 }
628 
IsValidPositiveSmi(TNode<IntPtrT> value)629 TNode<BoolT> CodeStubAssembler::IsValidPositiveSmi(TNode<IntPtrT> value) {
630   intptr_t constant_value;
631   if (ToIntPtrConstant(value, &constant_value)) {
632     return (static_cast<uintptr_t>(constant_value) <=
633             static_cast<uintptr_t>(Smi::kMaxValue))
634                ? Int32TrueConstant()
635                : Int32FalseConstant();
636   }
637 
638   return UintPtrLessThanOrEqual(value, IntPtrConstant(Smi::kMaxValue));
639 }
640 
SmiTag(SloppyTNode<IntPtrT> value)641 TNode<Smi> CodeStubAssembler::SmiTag(SloppyTNode<IntPtrT> value) {
642   int32_t constant_value;
643   if (ToInt32Constant(value, &constant_value) && Smi::IsValid(constant_value)) {
644     return SmiConstant(constant_value);
645   }
646   if (COMPRESS_POINTERS_BOOL) {
647     return SmiFromInt32(TruncateIntPtrToInt32(value));
648   }
649   TNode<Smi> smi =
650       BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
651   return smi;
652 }
653 
SmiUntag(SloppyTNode<Smi> value)654 TNode<IntPtrT> CodeStubAssembler::SmiUntag(SloppyTNode<Smi> value) {
655   intptr_t constant_value;
656   if (ToIntPtrConstant(value, &constant_value)) {
657     return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
658   }
659   TNode<IntPtrT> raw_bits = BitcastTaggedToWordForTagAndSmiBits(value);
660   if (COMPRESS_POINTERS_BOOL) {
661     // Clear the upper half using sign-extension.
662     raw_bits = ChangeInt32ToIntPtr(TruncateIntPtrToInt32(raw_bits));
663   }
664   return Signed(WordSarShiftOutZeros(raw_bits, SmiShiftBitsConstant()));
665 }
666 
SmiToInt32(SloppyTNode<Smi> value)667 TNode<Int32T> CodeStubAssembler::SmiToInt32(SloppyTNode<Smi> value) {
668   if (COMPRESS_POINTERS_BOOL) {
669     return Signed(Word32SarShiftOutZeros(
670         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
671         SmiShiftBitsConstant32()));
672   }
673   TNode<IntPtrT> result = SmiUntag(value);
674   return TruncateIntPtrToInt32(result);
675 }
676 
SmiToFloat64(SloppyTNode<Smi> value)677 TNode<Float64T> CodeStubAssembler::SmiToFloat64(SloppyTNode<Smi> value) {
678   return ChangeInt32ToFloat64(SmiToInt32(value));
679 }
680 
SmiMax(TNode<Smi> a,TNode<Smi> b)681 TNode<Smi> CodeStubAssembler::SmiMax(TNode<Smi> a, TNode<Smi> b) {
682   return SelectConstant<Smi>(SmiLessThan(a, b), b, a);
683 }
684 
SmiMin(TNode<Smi> a,TNode<Smi> b)685 TNode<Smi> CodeStubAssembler::SmiMin(TNode<Smi> a, TNode<Smi> b) {
686   return SelectConstant<Smi>(SmiLessThan(a, b), a, b);
687 }
688 
TryIntPtrAdd(TNode<IntPtrT> a,TNode<IntPtrT> b,Label * if_overflow)689 TNode<IntPtrT> CodeStubAssembler::TryIntPtrAdd(TNode<IntPtrT> a,
690                                                TNode<IntPtrT> b,
691                                                Label* if_overflow) {
692   TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
693   TNode<BoolT> overflow = Projection<1>(pair);
694   GotoIf(overflow, if_overflow);
695   return Projection<0>(pair);
696 }
697 
TryIntPtrSub(TNode<IntPtrT> a,TNode<IntPtrT> b,Label * if_overflow)698 TNode<IntPtrT> CodeStubAssembler::TryIntPtrSub(TNode<IntPtrT> a,
699                                                TNode<IntPtrT> b,
700                                                Label* if_overflow) {
701   TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(a, b);
702   TNode<BoolT> overflow = Projection<1>(pair);
703   GotoIf(overflow, if_overflow);
704   return Projection<0>(pair);
705 }
706 
TryInt32Mul(TNode<Int32T> a,TNode<Int32T> b,Label * if_overflow)707 TNode<Int32T> CodeStubAssembler::TryInt32Mul(TNode<Int32T> a, TNode<Int32T> b,
708                                              Label* if_overflow) {
709   TNode<PairT<Int32T, BoolT>> pair = Int32MulWithOverflow(a, b);
710   TNode<BoolT> overflow = Projection<1>(pair);
711   GotoIf(overflow, if_overflow);
712   return Projection<0>(pair);
713 }
714 
TrySmiAdd(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)715 TNode<Smi> CodeStubAssembler::TrySmiAdd(TNode<Smi> lhs, TNode<Smi> rhs,
716                                         Label* if_overflow) {
717   if (SmiValuesAre32Bits()) {
718     return BitcastWordToTaggedSigned(
719         TryIntPtrAdd(BitcastTaggedToWordForTagAndSmiBits(lhs),
720                      BitcastTaggedToWordForTagAndSmiBits(rhs), if_overflow));
721   } else {
722     DCHECK(SmiValuesAre31Bits());
723     TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
724         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
725         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
726     TNode<BoolT> overflow = Projection<1>(pair);
727     GotoIf(overflow, if_overflow);
728     TNode<Int32T> result = Projection<0>(pair);
729     return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
730   }
731 }
732 
TrySmiSub(TNode<Smi> lhs,TNode<Smi> rhs,Label * if_overflow)733 TNode<Smi> CodeStubAssembler::TrySmiSub(TNode<Smi> lhs, TNode<Smi> rhs,
734                                         Label* if_overflow) {
735   if (SmiValuesAre32Bits()) {
736     TNode<PairT<IntPtrT, BoolT>> pair =
737         IntPtrSubWithOverflow(BitcastTaggedToWordForTagAndSmiBits(lhs),
738                               BitcastTaggedToWordForTagAndSmiBits(rhs));
739     TNode<BoolT> overflow = Projection<1>(pair);
740     GotoIf(overflow, if_overflow);
741     TNode<IntPtrT> result = Projection<0>(pair);
742     return BitcastWordToTaggedSigned(result);
743   } else {
744     DCHECK(SmiValuesAre31Bits());
745     TNode<PairT<Int32T, BoolT>> pair = Int32SubWithOverflow(
746         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
747         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
748     TNode<BoolT> overflow = Projection<1>(pair);
749     GotoIf(overflow, if_overflow);
750     TNode<Int32T> result = Projection<0>(pair);
751     return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
752   }
753 }
754 
TrySmiAbs(TNode<Smi> a,Label * if_overflow)755 TNode<Smi> CodeStubAssembler::TrySmiAbs(TNode<Smi> a, Label* if_overflow) {
756   if (SmiValuesAre32Bits()) {
757     TNode<PairT<IntPtrT, BoolT>> pair =
758         IntPtrAbsWithOverflow(BitcastTaggedToWordForTagAndSmiBits(a));
759     TNode<BoolT> overflow = Projection<1>(pair);
760     GotoIf(overflow, if_overflow);
761     TNode<IntPtrT> result = Projection<0>(pair);
762     return BitcastWordToTaggedSigned(result);
763   } else {
764     CHECK(SmiValuesAre31Bits());
765     CHECK(IsInt32AbsWithOverflowSupported());
766     TNode<PairT<Int32T, BoolT>> pair = Int32AbsWithOverflow(
767         TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)));
768     TNode<BoolT> overflow = Projection<1>(pair);
769     GotoIf(overflow, if_overflow);
770     TNode<Int32T> result = Projection<0>(pair);
771     return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
772   }
773 }
774 
NumberMax(TNode<Number> a,TNode<Number> b)775 TNode<Number> CodeStubAssembler::NumberMax(TNode<Number> a, TNode<Number> b) {
776   // TODO(danno): This could be optimized by specifically handling smi cases.
777   TVARIABLE(Number, result);
778   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
779   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
780   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
781   result = NanConstant();
782   Goto(&done);
783   BIND(&greater_than_equal_a);
784   result = a;
785   Goto(&done);
786   BIND(&greater_than_equal_b);
787   result = b;
788   Goto(&done);
789   BIND(&done);
790   return result.value();
791 }
792 
NumberMin(TNode<Number> a,TNode<Number> b)793 TNode<Number> CodeStubAssembler::NumberMin(TNode<Number> a, TNode<Number> b) {
794   // TODO(danno): This could be optimized by specifically handling smi cases.
795   TVARIABLE(Number, result);
796   Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
797   GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
798   GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
799   result = NanConstant();
800   Goto(&done);
801   BIND(&greater_than_equal_a);
802   result = b;
803   Goto(&done);
804   BIND(&greater_than_equal_b);
805   result = a;
806   Goto(&done);
807   BIND(&done);
808   return result.value();
809 }
810 
SmiMod(TNode<Smi> a,TNode<Smi> b)811 TNode<Number> CodeStubAssembler::SmiMod(TNode<Smi> a, TNode<Smi> b) {
812   TVARIABLE(Number, var_result);
813   Label return_result(this, &var_result),
814       return_minuszero(this, Label::kDeferred),
815       return_nan(this, Label::kDeferred);
816 
817   // Untag {a} and {b}.
818   TNode<Int32T> int_a = SmiToInt32(a);
819   TNode<Int32T> int_b = SmiToInt32(b);
820 
821   // Return NaN if {b} is zero.
822   GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
823 
824   // Check if {a} is non-negative.
825   Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
826   Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
827          &if_aisnegative);
828 
829   BIND(&if_aisnotnegative);
830   {
831     // Fast case, don't need to check any other edge cases.
832     TNode<Int32T> r = Int32Mod(int_a, int_b);
833     var_result = SmiFromInt32(r);
834     Goto(&return_result);
835   }
836 
837   BIND(&if_aisnegative);
838   {
839     if (SmiValuesAre32Bits()) {
840       // Check if {a} is kMinInt and {b} is -1 (only relevant if the
841       // kMinInt is actually representable as a Smi).
842       Label join(this);
843       GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
844       GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
845       Goto(&join);
846       BIND(&join);
847     }
848 
849     // Perform the integer modulus operation.
850     TNode<Int32T> r = Int32Mod(int_a, int_b);
851 
852     // Check if {r} is zero, and if so return -0, because we have to
853     // take the sign of the left hand side {a}, which is negative.
854     GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
855 
856     // The remainder {r} can be outside the valid Smi range on 32bit
857     // architectures, so we cannot just say SmiFromInt32(r) here.
858     var_result = ChangeInt32ToTagged(r);
859     Goto(&return_result);
860   }
861 
862   BIND(&return_minuszero);
863   var_result = MinusZeroConstant();
864   Goto(&return_result);
865 
866   BIND(&return_nan);
867   var_result = NanConstant();
868   Goto(&return_result);
869 
870   BIND(&return_result);
871   return var_result.value();
872 }
873 
SmiMul(TNode<Smi> a,TNode<Smi> b)874 TNode<Number> CodeStubAssembler::SmiMul(TNode<Smi> a, TNode<Smi> b) {
875   TVARIABLE(Number, var_result);
876   TVARIABLE(Float64T, var_lhs_float64);
877   TVARIABLE(Float64T, var_rhs_float64);
878   Label return_result(this, &var_result);
879 
880   // Both {a} and {b} are Smis. Convert them to integers and multiply.
881   TNode<Int32T> lhs32 = SmiToInt32(a);
882   TNode<Int32T> rhs32 = SmiToInt32(b);
883   auto pair = Int32MulWithOverflow(lhs32, rhs32);
884 
885   TNode<BoolT> overflow = Projection<1>(pair);
886 
887   // Check if the multiplication overflowed.
888   Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
889   Branch(overflow, &if_overflow, &if_notoverflow);
890   BIND(&if_notoverflow);
891   {
892     // If the answer is zero, we may need to return -0.0, depending on the
893     // input.
894     Label answer_zero(this), answer_not_zero(this);
895     TNode<Int32T> answer = Projection<0>(pair);
896     TNode<Int32T> zero = Int32Constant(0);
897     Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
898     BIND(&answer_not_zero);
899     {
900       var_result = ChangeInt32ToTagged(answer);
901       Goto(&return_result);
902     }
903     BIND(&answer_zero);
904     {
905       TNode<Int32T> or_result = Word32Or(lhs32, rhs32);
906       Label if_should_be_negative_zero(this), if_should_be_zero(this);
907       Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
908              &if_should_be_zero);
909       BIND(&if_should_be_negative_zero);
910       {
911         var_result = MinusZeroConstant();
912         Goto(&return_result);
913       }
914       BIND(&if_should_be_zero);
915       {
916         var_result = SmiConstant(0);
917         Goto(&return_result);
918       }
919     }
920   }
921   BIND(&if_overflow);
922   {
923     var_lhs_float64 = SmiToFloat64(a);
924     var_rhs_float64 = SmiToFloat64(b);
925     TNode<Float64T> value =
926         Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
927     var_result = AllocateHeapNumberWithValue(value);
928     Goto(&return_result);
929   }
930 
931   BIND(&return_result);
932   return var_result.value();
933 }
934 
TrySmiDiv(TNode<Smi> dividend,TNode<Smi> divisor,Label * bailout)935 TNode<Smi> CodeStubAssembler::TrySmiDiv(TNode<Smi> dividend, TNode<Smi> divisor,
936                                         Label* bailout) {
937   // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
938   // is zero.
939   GotoIf(TaggedEqual(divisor, SmiConstant(0)), bailout);
940 
941   // Do floating point division if {dividend} is zero and {divisor} is
942   // negative.
943   Label dividend_is_zero(this), dividend_is_not_zero(this);
944   Branch(TaggedEqual(dividend, SmiConstant(0)), &dividend_is_zero,
945          &dividend_is_not_zero);
946 
947   BIND(&dividend_is_zero);
948   {
949     GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
950     Goto(&dividend_is_not_zero);
951   }
952   BIND(&dividend_is_not_zero);
953 
954   TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
955   TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
956 
957   // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
958   // if the Smi size is 31) and {divisor} is -1.
959   Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
960   Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
961          &divisor_is_minus_one, &divisor_is_not_minus_one);
962 
963   BIND(&divisor_is_minus_one);
964   {
965     GotoIf(Word32Equal(
966                untagged_dividend,
967                Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
968            bailout);
969     Goto(&divisor_is_not_minus_one);
970   }
971   BIND(&divisor_is_not_minus_one);
972 
973   TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
974   TNode<Int32T> truncated = Int32Mul(untagged_result, untagged_divisor);
975 
976   // Do floating point division if the remainder is not 0.
977   GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
978 
979   return SmiFromInt32(untagged_result);
980 }
981 
SmiLexicographicCompare(TNode<Smi> x,TNode<Smi> y)982 TNode<Smi> CodeStubAssembler::SmiLexicographicCompare(TNode<Smi> x,
983                                                       TNode<Smi> y) {
984   TNode<ExternalReference> smi_lexicographic_compare =
985       ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
986   TNode<ExternalReference> isolate_ptr =
987       ExternalConstant(ExternalReference::isolate_address(isolate()));
988   return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),
989                             std::make_pair(MachineType::Pointer(), isolate_ptr),
990                             std::make_pair(MachineType::AnyTagged(), x),
991                             std::make_pair(MachineType::AnyTagged(), y)));
992 }
993 
TruncateWordToInt32(SloppyTNode<WordT> value)994 TNode<Int32T> CodeStubAssembler::TruncateWordToInt32(SloppyTNode<WordT> value) {
995   if (Is64()) {
996     return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
997   }
998   return ReinterpretCast<Int32T>(value);
999 }
1000 
TruncateIntPtrToInt32(SloppyTNode<IntPtrT> value)1001 TNode<Int32T> CodeStubAssembler::TruncateIntPtrToInt32(
1002     SloppyTNode<IntPtrT> value) {
1003   if (Is64()) {
1004     return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
1005   }
1006   return ReinterpretCast<Int32T>(value);
1007 }
1008 
TaggedIsSmi(TNode<MaybeObject> a)1009 TNode<BoolT> CodeStubAssembler::TaggedIsSmi(TNode<MaybeObject> a) {
1010   STATIC_ASSERT(kSmiTagMask < kMaxUInt32);
1011   return Word32Equal(
1012       Word32And(TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1013                 Int32Constant(kSmiTagMask)),
1014       Int32Constant(0));
1015 }
1016 
TaggedIsNotSmi(TNode<MaybeObject> a)1017 TNode<BoolT> CodeStubAssembler::TaggedIsNotSmi(TNode<MaybeObject> a) {
1018   return Word32BinaryNot(TaggedIsSmi(a));
1019 }
1020 
TaggedIsPositiveSmi(SloppyTNode<Object> a)1021 TNode<BoolT> CodeStubAssembler::TaggedIsPositiveSmi(SloppyTNode<Object> a) {
1022 #if defined(V8_HOST_ARCH_32_BIT) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1023   return Word32Equal(
1024       Word32And(
1025           TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1026           Uint32Constant(static_cast<uint32_t>(kSmiTagMask | kSmiSignMask))),
1027       Int32Constant(0));
1028 #else
1029   return WordEqual(WordAnd(BitcastTaggedToWordForTagAndSmiBits(a),
1030                            IntPtrConstant(kSmiTagMask | kSmiSignMask)),
1031                    IntPtrConstant(0));
1032 #endif
1033 }
1034 
WordIsAligned(SloppyTNode<WordT> word,size_t alignment)1035 TNode<BoolT> CodeStubAssembler::WordIsAligned(SloppyTNode<WordT> word,
1036                                               size_t alignment) {
1037   DCHECK(base::bits::IsPowerOfTwo(alignment));
1038   DCHECK_LE(alignment, kMaxUInt32);
1039   return Word32Equal(
1040       Int32Constant(0),
1041       Word32And(TruncateWordToInt32(word),
1042                 Uint32Constant(static_cast<uint32_t>(alignment) - 1)));
1043 }
1044 
1045 #if DEBUG
Bind(Label * label,AssemblerDebugInfo debug_info)1046 void CodeStubAssembler::Bind(Label* label, AssemblerDebugInfo debug_info) {
1047   CodeAssembler::Bind(label, debug_info);
1048 }
1049 #endif  // DEBUG
1050 
Bind(Label * label)1051 void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
1052 
LoadDoubleWithHoleCheck(TNode<FixedDoubleArray> array,TNode<IntPtrT> index,Label * if_hole)1053 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
1054     TNode<FixedDoubleArray> array, TNode<IntPtrT> index, Label* if_hole) {
1055   return LoadFixedDoubleArrayElement(array, index, if_hole);
1056 }
1057 
BranchIfJSReceiver(SloppyTNode<Object> object,Label * if_true,Label * if_false)1058 void CodeStubAssembler::BranchIfJSReceiver(SloppyTNode<Object> object,
1059                                            Label* if_true, Label* if_false) {
1060   GotoIf(TaggedIsSmi(object), if_false);
1061   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1062   Branch(IsJSReceiver(CAST(object)), if_true, if_false);
1063 }
1064 
GotoIfForceSlowPath(Label * if_true)1065 void CodeStubAssembler::GotoIfForceSlowPath(Label* if_true) {
1066 #ifdef V8_ENABLE_FORCE_SLOW_PATH
1067   const TNode<ExternalReference> force_slow_path_addr =
1068       ExternalConstant(ExternalReference::force_slow_path(isolate()));
1069   const TNode<Uint8T> force_slow = Load<Uint8T>(force_slow_path_addr);
1070 
1071   GotoIf(force_slow, if_true);
1072 #endif
1073 }
1074 
AllocateRaw(TNode<IntPtrT> size_in_bytes,AllocationFlags flags,TNode<RawPtrT> top_address,TNode<RawPtrT> limit_address)1075 TNode<HeapObject> CodeStubAssembler::AllocateRaw(TNode<IntPtrT> size_in_bytes,
1076                                                  AllocationFlags flags,
1077                                                  TNode<RawPtrT> top_address,
1078                                                  TNode<RawPtrT> limit_address) {
1079   Label if_out_of_memory(this, Label::kDeferred);
1080 
1081   // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1082   // but bump pointer allocation) into a builtin to save code space. The
1083   // size_in_bytes check may be moved there as well since a non-smi
1084   // size_in_bytes probably doesn't fit into the bump pointer region
1085   // (double-check that).
1086 
1087   intptr_t size_in_bytes_constant;
1088   bool size_in_bytes_is_constant = false;
1089   if (ToIntPtrConstant(size_in_bytes, &size_in_bytes_constant)) {
1090     size_in_bytes_is_constant = true;
1091     CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1092     CHECK_GT(size_in_bytes_constant, 0);
1093   } else {
1094     GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1095   }
1096 
1097   TNode<RawPtrT> top = Load<RawPtrT>(top_address);
1098   TNode<RawPtrT> limit = Load<RawPtrT>(limit_address);
1099 
1100   // If there's not enough space, call the runtime.
1101   TVARIABLE(Object, result);
1102   Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1103 
1104   bool needs_double_alignment = flags & kDoubleAlignment;
1105   bool allow_large_object_allocation = flags & kAllowLargeObjectAllocation;
1106 
1107   if (allow_large_object_allocation) {
1108     Label next(this);
1109     GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1110 
1111     TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1112         AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1113         AllowLargeObjectAllocationFlag::encode(allow_large_object_allocation)));
1114     if (FLAG_young_generation_large_objects) {
1115       result =
1116           CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1117                       SmiTag(size_in_bytes), runtime_flags);
1118     } else {
1119       result =
1120           CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1121                       SmiTag(size_in_bytes), runtime_flags);
1122     }
1123     Goto(&out);
1124 
1125     BIND(&next);
1126   }
1127 
1128   TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1129 
1130   if (needs_double_alignment) {
1131     Label next(this);
1132     GotoIfNot(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &next);
1133 
1134     adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1135     Goto(&next);
1136 
1137     BIND(&next);
1138   }
1139 
1140   TNode<IntPtrT> new_top =
1141       IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1142 
1143   Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1144          &no_runtime_call);
1145 
1146   BIND(&runtime_call);
1147   {
1148     TNode<Smi> runtime_flags = SmiConstant(Smi::FromInt(
1149         AllocateDoubleAlignFlag::encode(needs_double_alignment) |
1150         AllowLargeObjectAllocationFlag::encode(allow_large_object_allocation)));
1151     if (flags & kPretenured) {
1152       result =
1153           CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1154                       SmiTag(size_in_bytes), runtime_flags);
1155     } else {
1156       result =
1157           CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1158                       SmiTag(size_in_bytes), runtime_flags);
1159     }
1160     Goto(&out);
1161   }
1162 
1163   // When there is enough space, return `top' and bump it up.
1164   BIND(&no_runtime_call);
1165   {
1166     StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
1167                         new_top);
1168 
1169     TVARIABLE(IntPtrT, address, UncheckedCast<IntPtrT>(top));
1170 
1171     if (needs_double_alignment) {
1172       Label next(this);
1173       GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1174 
1175       // Store a filler and increase the address by 4.
1176       StoreNoWriteBarrier(MachineRepresentation::kTagged, top,
1177                           OnePointerFillerMapConstant());
1178       address = IntPtrAdd(UncheckedCast<IntPtrT>(top), IntPtrConstant(4));
1179       Goto(&next);
1180 
1181       BIND(&next);
1182     }
1183 
1184     result = BitcastWordToTagged(
1185         IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1186     Goto(&out);
1187   }
1188 
1189   if (!size_in_bytes_is_constant) {
1190     BIND(&if_out_of_memory);
1191     CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1192                 NoContextConstant());
1193     Unreachable();
1194   }
1195 
1196   BIND(&out);
1197   return UncheckedCast<HeapObject>(result.value());
1198 }
1199 
AllocateRawUnaligned(TNode<IntPtrT> size_in_bytes,AllocationFlags flags,TNode<RawPtrT> top_address,TNode<RawPtrT> limit_address)1200 TNode<HeapObject> CodeStubAssembler::AllocateRawUnaligned(
1201     TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1202     TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1203   DCHECK_EQ(flags & kDoubleAlignment, 0);
1204   return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1205 }
1206 
AllocateRawDoubleAligned(TNode<IntPtrT> size_in_bytes,AllocationFlags flags,TNode<RawPtrT> top_address,TNode<RawPtrT> limit_address)1207 TNode<HeapObject> CodeStubAssembler::AllocateRawDoubleAligned(
1208     TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1209     TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1210 #if defined(V8_HOST_ARCH_32_BIT)
1211   return AllocateRaw(size_in_bytes, flags | kDoubleAlignment, top_address,
1212                      limit_address);
1213 #elif defined(V8_HOST_ARCH_64_BIT)
1214 #ifdef V8_COMPRESS_POINTERS
1215   // TODO(ishell, v8:8875): Consider using aligned allocations once the
1216   // allocation alignment inconsistency is fixed. For now we keep using
1217   // unaligned access since both x64 and arm64 architectures (where pointer
1218   // compression is supported) allow unaligned access to doubles and full words.
1219 #endif  // V8_COMPRESS_POINTERS
1220   // Allocation on 64 bit machine is naturally double aligned
1221   return AllocateRaw(size_in_bytes, flags & ~kDoubleAlignment, top_address,
1222                      limit_address);
1223 #else
1224 #error Architecture not supported
1225 #endif
1226 }
1227 
AllocateInNewSpace(TNode<IntPtrT> size_in_bytes,AllocationFlags flags)1228 TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(
1229     TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1230   DCHECK(flags == kNone || flags == kDoubleAlignment);
1231   CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1232   return Allocate(size_in_bytes, flags);
1233 }
1234 
Allocate(TNode<IntPtrT> size_in_bytes,AllocationFlags flags)1235 TNode<HeapObject> CodeStubAssembler::Allocate(TNode<IntPtrT> size_in_bytes,
1236                                               AllocationFlags flags) {
1237   Comment("Allocate");
1238   bool const new_space = !(flags & kPretenured);
1239   bool const allow_large_objects = flags & kAllowLargeObjectAllocation;
1240   // For optimized allocations, we don't allow the allocation to happen in a
1241   // different generation than requested.
1242   bool const always_allocated_in_requested_space =
1243       !new_space || !allow_large_objects || FLAG_young_generation_large_objects;
1244   if (!allow_large_objects) {
1245     intptr_t size_constant;
1246     if (ToIntPtrConstant(size_in_bytes, &size_constant)) {
1247       CHECK_LE(size_constant, kMaxRegularHeapObjectSize);
1248     } else {
1249       CSA_ASSERT(this, IsRegularHeapObjectSize(size_in_bytes));
1250     }
1251   }
1252   if (!(flags & kDoubleAlignment) && always_allocated_in_requested_space) {
1253     return OptimizedAllocate(
1254         size_in_bytes,
1255         new_space ? AllocationType::kYoung : AllocationType::kOld,
1256         allow_large_objects ? AllowLargeObjects::kTrue
1257                             : AllowLargeObjects::kFalse);
1258   }
1259   TNode<ExternalReference> top_address = ExternalConstant(
1260       new_space
1261           ? ExternalReference::new_space_allocation_top_address(isolate())
1262           : ExternalReference::old_space_allocation_top_address(isolate()));
1263   DCHECK_EQ(kSystemPointerSize,
1264             ExternalReference::new_space_allocation_limit_address(isolate())
1265                     .address() -
1266                 ExternalReference::new_space_allocation_top_address(isolate())
1267                     .address());
1268   DCHECK_EQ(kSystemPointerSize,
1269             ExternalReference::old_space_allocation_limit_address(isolate())
1270                     .address() -
1271                 ExternalReference::old_space_allocation_top_address(isolate())
1272                     .address());
1273   TNode<IntPtrT> limit_address =
1274       IntPtrAdd(ReinterpretCast<IntPtrT>(top_address),
1275                 IntPtrConstant(kSystemPointerSize));
1276 
1277   if (flags & kDoubleAlignment) {
1278     return AllocateRawDoubleAligned(size_in_bytes, flags,
1279                                     ReinterpretCast<RawPtrT>(top_address),
1280                                     ReinterpretCast<RawPtrT>(limit_address));
1281   } else {
1282     return AllocateRawUnaligned(size_in_bytes, flags,
1283                                 ReinterpretCast<RawPtrT>(top_address),
1284                                 ReinterpretCast<RawPtrT>(limit_address));
1285   }
1286 }
1287 
AllocateInNewSpace(int size_in_bytes,AllocationFlags flags)1288 TNode<HeapObject> CodeStubAssembler::AllocateInNewSpace(int size_in_bytes,
1289                                                         AllocationFlags flags) {
1290   CHECK(flags == kNone || flags == kDoubleAlignment);
1291   DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1292   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1293 }
1294 
Allocate(int size_in_bytes,AllocationFlags flags)1295 TNode<HeapObject> CodeStubAssembler::Allocate(int size_in_bytes,
1296                                               AllocationFlags flags) {
1297   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1298 }
1299 
InnerAllocate(TNode<HeapObject> previous,TNode<IntPtrT> offset)1300 TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1301                                                    TNode<IntPtrT> offset) {
1302   return UncheckedCast<HeapObject>(
1303       BitcastWordToTagged(IntPtrAdd(BitcastTaggedToWord(previous), offset)));
1304 }
1305 
InnerAllocate(TNode<HeapObject> previous,int offset)1306 TNode<HeapObject> CodeStubAssembler::InnerAllocate(TNode<HeapObject> previous,
1307                                                    int offset) {
1308   return InnerAllocate(previous, IntPtrConstant(offset));
1309 }
1310 
IsRegularHeapObjectSize(TNode<IntPtrT> size)1311 TNode<BoolT> CodeStubAssembler::IsRegularHeapObjectSize(TNode<IntPtrT> size) {
1312   return UintPtrLessThanOrEqual(size,
1313                                 IntPtrConstant(kMaxRegularHeapObjectSize));
1314 }
1315 
BranchIfToBooleanIsTrue(SloppyTNode<Object> value,Label * if_true,Label * if_false)1316 void CodeStubAssembler::BranchIfToBooleanIsTrue(SloppyTNode<Object> value,
1317                                                 Label* if_true,
1318                                                 Label* if_false) {
1319   Label if_smi(this), if_notsmi(this), if_heapnumber(this, Label::kDeferred),
1320       if_bigint(this, Label::kDeferred);
1321   // Rule out false {value}.
1322   GotoIf(TaggedEqual(value, FalseConstant()), if_false);
1323 
1324   // Check if {value} is a Smi or a HeapObject.
1325   Branch(TaggedIsSmi(value), &if_smi, &if_notsmi);
1326 
1327   BIND(&if_smi);
1328   {
1329     // The {value} is a Smi, only need to check against zero.
1330     BranchIfSmiEqual(CAST(value), SmiConstant(0), if_false, if_true);
1331   }
1332 
1333   BIND(&if_notsmi);
1334   {
1335     TNode<HeapObject> value_heapobject = CAST(value);
1336 
1337     // Check if {value} is the empty string.
1338     GotoIf(IsEmptyString(value_heapobject), if_false);
1339 
1340     // The {value} is a HeapObject, load its map.
1341     TNode<Map> value_map = LoadMap(value_heapobject);
1342 
1343     // Only null, undefined and document.all have the undetectable bit set,
1344     // so we can return false immediately when that bit is set.
1345     GotoIf(IsUndetectableMap(value_map), if_false);
1346 
1347     // We still need to handle numbers specially, but all other {value}s
1348     // that make it here yield true.
1349     GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1350     Branch(IsBigInt(value_heapobject), &if_bigint, if_true);
1351 
1352     BIND(&if_heapnumber);
1353     {
1354       // Load the floating point value of {value}.
1355       TNode<Float64T> value_value =
1356           LoadObjectField<Float64T>(value_heapobject, HeapNumber::kValueOffset);
1357 
1358       // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1359       Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1360              if_true, if_false);
1361     }
1362 
1363     BIND(&if_bigint);
1364     {
1365       TNode<BigInt> bigint = CAST(value);
1366       TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
1367       TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
1368       Branch(Word32Equal(length, Int32Constant(0)), if_false, if_true);
1369     }
1370   }
1371 }
1372 
ChangeUint32ToExternalPointer(TNode<Uint32T> value)1373 TNode<ExternalPointerT> CodeStubAssembler::ChangeUint32ToExternalPointer(
1374     TNode<Uint32T> value) {
1375   STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
1376   return ReinterpretCast<ExternalPointerT>(ChangeUint32ToWord(value));
1377 }
1378 
ChangeExternalPointerToUint32(TNode<ExternalPointerT> value)1379 TNode<Uint32T> CodeStubAssembler::ChangeExternalPointerToUint32(
1380     TNode<ExternalPointerT> value) {
1381   STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
1382   return Unsigned(TruncateWordToInt32(ReinterpretCast<UintPtrT>(value)));
1383 }
1384 
InitializeExternalPointerField(TNode<HeapObject> object,TNode<IntPtrT> offset)1385 void CodeStubAssembler::InitializeExternalPointerField(TNode<HeapObject> object,
1386                                                        TNode<IntPtrT> offset) {
1387 #ifdef V8_HEAP_SANDBOX
1388   TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1389       ExternalReference::external_pointer_table_address(isolate()));
1390   TNode<Uint32T> table_length = UncheckedCast<Uint32T>(
1391       Load(MachineType::Uint32(), external_pointer_table_address,
1392            UintPtrConstant(Internals::kExternalPointerTableLengthOffset)));
1393   TNode<Uint32T> table_capacity = UncheckedCast<Uint32T>(
1394       Load(MachineType::Uint32(), external_pointer_table_address,
1395            UintPtrConstant(Internals::kExternalPointerTableCapacityOffset)));
1396 
1397   Label grow_table(this, Label::kDeferred), finish(this);
1398 
1399   TNode<BoolT> compare = Uint32LessThan(table_length, table_capacity);
1400   Branch(compare, &finish, &grow_table);
1401 
1402   BIND(&grow_table);
1403   {
1404     TNode<ExternalReference> table_grow_function = ExternalConstant(
1405         ExternalReference::external_pointer_table_grow_table_function());
1406     CallCFunction(
1407         table_grow_function, MachineType::Pointer(),
1408         std::make_pair(MachineType::Pointer(), external_pointer_table_address));
1409     Goto(&finish);
1410   }
1411   BIND(&finish);
1412 
1413   TNode<Uint32T> new_table_length = Uint32Add(table_length, Uint32Constant(1));
1414   StoreNoWriteBarrier(
1415       MachineRepresentation::kWord32, external_pointer_table_address,
1416       UintPtrConstant(Internals::kExternalPointerTableLengthOffset),
1417       new_table_length);
1418 
1419   TNode<Uint32T> index = table_length;
1420   TNode<ExternalPointerT> encoded = ChangeUint32ToExternalPointer(index);
1421   StoreObjectFieldNoWriteBarrier<ExternalPointerT>(object, offset, encoded);
1422 #endif
1423 }
1424 
LoadExternalPointerFromObject(TNode<HeapObject> object,TNode<IntPtrT> offset,ExternalPointerTag external_pointer_tag)1425 TNode<RawPtrT> CodeStubAssembler::LoadExternalPointerFromObject(
1426     TNode<HeapObject> object, TNode<IntPtrT> offset,
1427     ExternalPointerTag external_pointer_tag) {
1428 #ifdef V8_HEAP_SANDBOX
1429   TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1430       ExternalReference::external_pointer_table_address(isolate()));
1431   TNode<RawPtrT> table = UncheckedCast<RawPtrT>(
1432       Load(MachineType::Pointer(), external_pointer_table_address,
1433            UintPtrConstant(Internals::kExternalPointerTableBufferOffset)));
1434 
1435   TNode<ExternalPointerT> encoded =
1436       LoadObjectField<ExternalPointerT>(object, offset);
1437   TNode<Word32T> index = ChangeExternalPointerToUint32(encoded);
1438   // TODO(v8:10391, saelo): bounds check if table is not caged
1439   TNode<IntPtrT> table_offset = ElementOffsetFromIndex(
1440       ChangeUint32ToWord(index), SYSTEM_POINTER_ELEMENTS, 0);
1441 
1442   TNode<UintPtrT> entry = Load<UintPtrT>(table, table_offset);
1443   if (external_pointer_tag != 0) {
1444     TNode<UintPtrT> tag = UintPtrConstant(external_pointer_tag);
1445     entry = UncheckedCast<UintPtrT>(WordXor(entry, tag));
1446   }
1447   return UncheckedCast<RawPtrT>(UncheckedCast<WordT>(entry));
1448 #else
1449   return LoadObjectField<RawPtrT>(object, offset);
1450 #endif  // V8_HEAP_SANDBOX
1451 }
1452 
StoreExternalPointerToObject(TNode<HeapObject> object,TNode<IntPtrT> offset,TNode<RawPtrT> pointer,ExternalPointerTag external_pointer_tag)1453 void CodeStubAssembler::StoreExternalPointerToObject(
1454     TNode<HeapObject> object, TNode<IntPtrT> offset, TNode<RawPtrT> pointer,
1455     ExternalPointerTag external_pointer_tag) {
1456 #ifdef V8_HEAP_SANDBOX
1457   TNode<ExternalReference> external_pointer_table_address = ExternalConstant(
1458       ExternalReference::external_pointer_table_address(isolate()));
1459   TNode<RawPtrT> table = UncheckedCast<RawPtrT>(
1460       Load(MachineType::Pointer(), external_pointer_table_address,
1461            UintPtrConstant(Internals::kExternalPointerTableBufferOffset)));
1462 
1463   TNode<ExternalPointerT> encoded =
1464       LoadObjectField<ExternalPointerT>(object, offset);
1465   TNode<Word32T> index = ChangeExternalPointerToUint32(encoded);
1466   // TODO(v8:10391, saelo): bounds check if table is not caged
1467   TNode<IntPtrT> table_offset = ElementOffsetFromIndex(
1468       ChangeUint32ToWord(index), SYSTEM_POINTER_ELEMENTS, 0);
1469 
1470   TNode<UintPtrT> value = UncheckedCast<UintPtrT>(pointer);
1471   if (external_pointer_tag != 0) {
1472     TNode<UintPtrT> tag = UintPtrConstant(external_pointer_tag);
1473     value = UncheckedCast<UintPtrT>(WordXor(pointer, tag));
1474   }
1475   StoreNoWriteBarrier(MachineType::PointerRepresentation(), table, table_offset,
1476                       value);
1477 #else
1478   StoreObjectFieldNoWriteBarrier<RawPtrT>(object, offset, pointer);
1479 #endif  // V8_HEAP_SANDBOX
1480 }
1481 
LoadFromParentFrame(int offset)1482 TNode<Object> CodeStubAssembler::LoadFromParentFrame(int offset) {
1483   TNode<RawPtrT> frame_pointer = LoadParentFramePointer();
1484   return LoadFullTagged(frame_pointer, IntPtrConstant(offset));
1485 }
1486 
LoadAndUntagObjectField(TNode<HeapObject> object,int offset)1487 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagObjectField(
1488     TNode<HeapObject> object, int offset) {
1489   if (SmiValuesAre32Bits()) {
1490 #if V8_TARGET_LITTLE_ENDIAN
1491     offset += 4;
1492 #endif
1493     return ChangeInt32ToIntPtr(LoadObjectField<Int32T>(object, offset));
1494   } else {
1495     return SmiToIntPtr(LoadObjectField<Smi>(object, offset));
1496   }
1497 }
1498 
LoadAndUntagToWord32ObjectField(TNode<HeapObject> object,int offset)1499 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ObjectField(
1500     TNode<HeapObject> object, int offset) {
1501   if (SmiValuesAre32Bits()) {
1502 #if V8_TARGET_LITTLE_ENDIAN
1503     offset += 4;
1504 #endif
1505     return LoadObjectField<Int32T>(object, offset);
1506   } else {
1507     return SmiToInt32(LoadObjectField<Smi>(object, offset));
1508   }
1509 }
1510 
LoadHeapNumberValue(TNode<HeapObject> object)1511 TNode<Float64T> CodeStubAssembler::LoadHeapNumberValue(
1512     TNode<HeapObject> object) {
1513   CSA_ASSERT(this, Word32Or(IsHeapNumber(object), IsOddball(object)));
1514   STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1515   return LoadObjectField<Float64T>(object, HeapNumber::kValueOffset);
1516 }
1517 
GetInstanceTypeMap(InstanceType instance_type)1518 TNode<Map> CodeStubAssembler::GetInstanceTypeMap(InstanceType instance_type) {
1519   Handle<Map> map_handle(
1520       Map::GetInstanceTypeMap(ReadOnlyRoots(isolate()), instance_type),
1521       isolate());
1522   return HeapConstant(map_handle);
1523 }
1524 
LoadMap(TNode<HeapObject> object)1525 TNode<Map> CodeStubAssembler::LoadMap(TNode<HeapObject> object) {
1526   return LoadObjectField<Map>(object, HeapObject::kMapOffset);
1527 }
1528 
LoadInstanceType(TNode<HeapObject> object)1529 TNode<Uint16T> CodeStubAssembler::LoadInstanceType(TNode<HeapObject> object) {
1530   return LoadMapInstanceType(LoadMap(object));
1531 }
1532 
HasInstanceType(TNode<HeapObject> object,InstanceType instance_type)1533 TNode<BoolT> CodeStubAssembler::HasInstanceType(TNode<HeapObject> object,
1534                                                 InstanceType instance_type) {
1535   return InstanceTypeEqual(LoadInstanceType(object), instance_type);
1536 }
1537 
DoesntHaveInstanceType(TNode<HeapObject> object,InstanceType instance_type)1538 TNode<BoolT> CodeStubAssembler::DoesntHaveInstanceType(
1539     TNode<HeapObject> object, InstanceType instance_type) {
1540   return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
1541 }
1542 
TaggedDoesntHaveInstanceType(TNode<HeapObject> any_tagged,InstanceType type)1543 TNode<BoolT> CodeStubAssembler::TaggedDoesntHaveInstanceType(
1544     TNode<HeapObject> any_tagged, InstanceType type) {
1545   /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
1546   TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
1547   return Select<BoolT>(
1548       tagged_is_smi, [=]() { return tagged_is_smi; },
1549       [=]() { return DoesntHaveInstanceType(any_tagged, type); });
1550 }
1551 
IsSpecialReceiverMap(TNode<Map> map)1552 TNode<BoolT> CodeStubAssembler::IsSpecialReceiverMap(TNode<Map> map) {
1553   TNode<BoolT> is_special =
1554       IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
1555   uint32_t mask = Map::Bits1::HasNamedInterceptorBit::kMask |
1556                   Map::Bits1::IsAccessCheckNeededBit::kMask;
1557   USE(mask);
1558   // Interceptors or access checks imply special receiver.
1559   CSA_ASSERT(this,
1560              SelectConstant<BoolT>(IsSetWord32(LoadMapBitField(map), mask),
1561                                    is_special, Int32TrueConstant()));
1562   return is_special;
1563 }
1564 
IsStringWrapperElementsKind(TNode<Map> map)1565 TNode<Word32T> CodeStubAssembler::IsStringWrapperElementsKind(TNode<Map> map) {
1566   TNode<Int32T> kind = LoadMapElementsKind(map);
1567   return Word32Or(
1568       Word32Equal(kind, Int32Constant(FAST_STRING_WRAPPER_ELEMENTS)),
1569       Word32Equal(kind, Int32Constant(SLOW_STRING_WRAPPER_ELEMENTS)));
1570 }
1571 
GotoIfMapHasSlowProperties(TNode<Map> map,Label * if_slow)1572 void CodeStubAssembler::GotoIfMapHasSlowProperties(TNode<Map> map,
1573                                                    Label* if_slow) {
1574   GotoIf(IsStringWrapperElementsKind(map), if_slow);
1575   GotoIf(IsSpecialReceiverMap(map), if_slow);
1576   GotoIf(IsDictionaryMap(map), if_slow);
1577 }
1578 
LoadFastProperties(TNode<JSReceiver> object)1579 TNode<HeapObject> CodeStubAssembler::LoadFastProperties(
1580     TNode<JSReceiver> object) {
1581   CSA_SLOW_ASSERT(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
1582   TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1583   return Select<HeapObject>(
1584       TaggedIsSmi(properties), [=] { return EmptyFixedArrayConstant(); },
1585       [=] { return CAST(properties); });
1586 }
1587 
LoadSlowProperties(TNode<JSReceiver> object)1588 TNode<HeapObject> CodeStubAssembler::LoadSlowProperties(
1589     TNode<JSReceiver> object) {
1590   CSA_SLOW_ASSERT(this, IsDictionaryMap(LoadMap(object)));
1591   TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
1592   return Select<HeapObject>(
1593       TaggedIsSmi(properties),
1594       [=] { return EmptyPropertyDictionaryConstant(); },
1595       [=] { return CAST(properties); });
1596 }
1597 
LoadJSArgumentsObjectLength(TNode<Context> context,TNode<JSArgumentsObject> array)1598 TNode<Object> CodeStubAssembler::LoadJSArgumentsObjectLength(
1599     TNode<Context> context, TNode<JSArgumentsObject> array) {
1600   CSA_ASSERT(this, IsJSArgumentsObjectWithLength(context, array));
1601   constexpr int offset = JSStrictArgumentsObject::kLengthOffset;
1602   STATIC_ASSERT(offset == JSSloppyArgumentsObject::kLengthOffset);
1603   return LoadObjectField(array, offset);
1604 }
1605 
LoadFastJSArrayLength(TNode<JSArray> array)1606 TNode<Smi> CodeStubAssembler::LoadFastJSArrayLength(TNode<JSArray> array) {
1607   TNode<Number> length = LoadJSArrayLength(array);
1608   CSA_ASSERT(this, Word32Or(IsFastElementsKind(LoadElementsKind(array)),
1609                             IsElementsKindInRange(
1610                                 LoadElementsKind(array),
1611                                 FIRST_ANY_NONEXTENSIBLE_ELEMENTS_KIND,
1612                                 LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND)));
1613   // JSArray length is always a positive Smi for fast arrays.
1614   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
1615   return CAST(length);
1616 }
1617 
LoadFixedArrayBaseLength(TNode<FixedArrayBase> array)1618 TNode<Smi> CodeStubAssembler::LoadFixedArrayBaseLength(
1619     TNode<FixedArrayBase> array) {
1620   CSA_SLOW_ASSERT(this, IsNotWeakFixedArraySubclass(array));
1621   return LoadObjectField<Smi>(array, FixedArrayBase::kLengthOffset);
1622 }
1623 
LoadAndUntagFixedArrayBaseLength(TNode<FixedArrayBase> array)1624 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(
1625     TNode<FixedArrayBase> array) {
1626   return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1627 }
1628 
LoadFeedbackVectorLength(TNode<FeedbackVector> vector)1629 TNode<IntPtrT> CodeStubAssembler::LoadFeedbackVectorLength(
1630     TNode<FeedbackVector> vector) {
1631   return ChangeInt32ToIntPtr(
1632       LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset));
1633 }
1634 
LoadWeakFixedArrayLength(TNode<WeakFixedArray> array)1635 TNode<Smi> CodeStubAssembler::LoadWeakFixedArrayLength(
1636     TNode<WeakFixedArray> array) {
1637   return LoadObjectField<Smi>(array, WeakFixedArray::kLengthOffset);
1638 }
1639 
LoadAndUntagWeakFixedArrayLength(TNode<WeakFixedArray> array)1640 TNode<IntPtrT> CodeStubAssembler::LoadAndUntagWeakFixedArrayLength(
1641     TNode<WeakFixedArray> array) {
1642   return LoadAndUntagObjectField(array, WeakFixedArray::kLengthOffset);
1643 }
1644 
LoadNumberOfDescriptors(TNode<DescriptorArray> array)1645 TNode<Int32T> CodeStubAssembler::LoadNumberOfDescriptors(
1646     TNode<DescriptorArray> array) {
1647   return UncheckedCast<Int32T>(LoadObjectField<Int16T>(
1648       array, DescriptorArray::kNumberOfDescriptorsOffset));
1649 }
1650 
LoadNumberOfOwnDescriptors(TNode<Map> map)1651 TNode<Int32T> CodeStubAssembler::LoadNumberOfOwnDescriptors(TNode<Map> map) {
1652   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1653   return UncheckedCast<Int32T>(
1654       DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3));
1655 }
1656 
LoadMapBitField(TNode<Map> map)1657 TNode<Int32T> CodeStubAssembler::LoadMapBitField(TNode<Map> map) {
1658   return UncheckedCast<Int32T>(
1659       LoadObjectField<Uint8T>(map, Map::kBitFieldOffset));
1660 }
1661 
LoadMapBitField2(TNode<Map> map)1662 TNode<Int32T> CodeStubAssembler::LoadMapBitField2(TNode<Map> map) {
1663   return UncheckedCast<Int32T>(
1664       LoadObjectField<Uint8T>(map, Map::kBitField2Offset));
1665 }
1666 
LoadMapBitField3(TNode<Map> map)1667 TNode<Uint32T> CodeStubAssembler::LoadMapBitField3(TNode<Map> map) {
1668   return LoadObjectField<Uint32T>(map, Map::kBitField3Offset);
1669 }
1670 
LoadMapInstanceType(TNode<Map> map)1671 TNode<Uint16T> CodeStubAssembler::LoadMapInstanceType(TNode<Map> map) {
1672   return LoadObjectField<Uint16T>(map, Map::kInstanceTypeOffset);
1673 }
1674 
LoadMapElementsKind(TNode<Map> map)1675 TNode<Int32T> CodeStubAssembler::LoadMapElementsKind(TNode<Map> map) {
1676   TNode<Int32T> bit_field2 = LoadMapBitField2(map);
1677   return Signed(DecodeWord32<Map::Bits2::ElementsKindBits>(bit_field2));
1678 }
1679 
LoadElementsKind(TNode<HeapObject> object)1680 TNode<Int32T> CodeStubAssembler::LoadElementsKind(TNode<HeapObject> object) {
1681   return LoadMapElementsKind(LoadMap(object));
1682 }
1683 
LoadMapDescriptors(TNode<Map> map)1684 TNode<DescriptorArray> CodeStubAssembler::LoadMapDescriptors(TNode<Map> map) {
1685   return LoadObjectField<DescriptorArray>(map, Map::kInstanceDescriptorsOffset);
1686 }
1687 
LoadMapPrototype(TNode<Map> map)1688 TNode<HeapObject> CodeStubAssembler::LoadMapPrototype(TNode<Map> map) {
1689   return LoadObjectField<HeapObject>(map, Map::kPrototypeOffset);
1690 }
1691 
LoadMapInstanceSizeInWords(TNode<Map> map)1692 TNode<IntPtrT> CodeStubAssembler::LoadMapInstanceSizeInWords(TNode<Map> map) {
1693   return ChangeInt32ToIntPtr(
1694       LoadObjectField<Uint8T>(map, Map::kInstanceSizeInWordsOffset));
1695 }
1696 
LoadMapInobjectPropertiesStartInWords(TNode<Map> map)1697 TNode<IntPtrT> CodeStubAssembler::LoadMapInobjectPropertiesStartInWords(
1698     TNode<Map> map) {
1699   // See Map::GetInObjectPropertiesStartInWords() for details.
1700   CSA_ASSERT(this, IsJSObjectMap(map));
1701   return ChangeInt32ToIntPtr(LoadObjectField<Uint8T>(
1702       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset));
1703 }
1704 
LoadMapConstructorFunctionIndex(TNode<Map> map)1705 TNode<IntPtrT> CodeStubAssembler::LoadMapConstructorFunctionIndex(
1706     TNode<Map> map) {
1707   // See Map::GetConstructorFunctionIndex() for details.
1708   CSA_ASSERT(this, IsPrimitiveInstanceType(LoadMapInstanceType(map)));
1709   return ChangeInt32ToIntPtr(LoadObjectField<Uint8T>(
1710       map, Map::kInObjectPropertiesStartOrConstructorFunctionIndexOffset));
1711 }
1712 
LoadMapConstructor(TNode<Map> map)1713 TNode<Object> CodeStubAssembler::LoadMapConstructor(TNode<Map> map) {
1714   TVARIABLE(Object, result,
1715             LoadObjectField(
1716                 map, Map::kConstructorOrBackPointerOrNativeContextOffset));
1717 
1718   Label done(this), loop(this, &result);
1719   Goto(&loop);
1720   BIND(&loop);
1721   {
1722     GotoIf(TaggedIsSmi(result.value()), &done);
1723     TNode<BoolT> is_map_type =
1724         InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
1725     GotoIfNot(is_map_type, &done);
1726     result =
1727         LoadObjectField(CAST(result.value()),
1728                         Map::kConstructorOrBackPointerOrNativeContextOffset);
1729     Goto(&loop);
1730   }
1731   BIND(&done);
1732   return result.value();
1733 }
1734 
LoadMapEnumLength(TNode<Map> map)1735 TNode<WordT> CodeStubAssembler::LoadMapEnumLength(TNode<Map> map) {
1736   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1737   return DecodeWordFromWord32<Map::Bits3::EnumLengthBits>(bit_field3);
1738 }
1739 
LoadMapBackPointer(TNode<Map> map)1740 TNode<Object> CodeStubAssembler::LoadMapBackPointer(TNode<Map> map) {
1741   TNode<HeapObject> object = CAST(LoadObjectField(
1742       map, Map::kConstructorOrBackPointerOrNativeContextOffset));
1743   return Select<Object>(
1744       IsMap(object), [=] { return object; },
1745       [=] { return UndefinedConstant(); });
1746 }
1747 
EnsureOnlyHasSimpleProperties(TNode<Map> map,TNode<Int32T> instance_type,Label * bailout)1748 TNode<Uint32T> CodeStubAssembler::EnsureOnlyHasSimpleProperties(
1749     TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
1750   // This check can have false positives, since it applies to any
1751   // JSPrimitiveWrapper type.
1752   GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
1753 
1754   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
1755   GotoIf(IsSetWord32(bit_field3, Map::Bits3::IsDictionaryMapBit::kMask),
1756          bailout);
1757 
1758   return bit_field3;
1759 }
1760 
LoadJSReceiverIdentityHash(SloppyTNode<Object> receiver,Label * if_no_hash)1761 TNode<IntPtrT> CodeStubAssembler::LoadJSReceiverIdentityHash(
1762     SloppyTNode<Object> receiver, Label* if_no_hash) {
1763   TVARIABLE(IntPtrT, var_hash);
1764   Label done(this), if_smi(this), if_property_array(this),
1765       if_property_dictionary(this), if_fixed_array(this);
1766 
1767   TNode<Object> properties_or_hash =
1768       LoadObjectField(TNode<HeapObject>::UncheckedCast(receiver),
1769                       JSReceiver::kPropertiesOrHashOffset);
1770   GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
1771 
1772   TNode<HeapObject> properties =
1773       TNode<HeapObject>::UncheckedCast(properties_or_hash);
1774   TNode<Uint16T> properties_instance_type = LoadInstanceType(properties);
1775 
1776   GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
1777          &if_property_array);
1778   Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
1779          &if_property_dictionary, &if_fixed_array);
1780 
1781   BIND(&if_fixed_array);
1782   {
1783     var_hash = IntPtrConstant(PropertyArray::kNoHashSentinel);
1784     Goto(&done);
1785   }
1786 
1787   BIND(&if_smi);
1788   {
1789     var_hash = SmiUntag(TNode<Smi>::UncheckedCast(properties_or_hash));
1790     Goto(&done);
1791   }
1792 
1793   BIND(&if_property_array);
1794   {
1795     TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
1796         properties, PropertyArray::kLengthAndHashOffset);
1797     var_hash = TNode<IntPtrT>::UncheckedCast(
1798         DecodeWord<PropertyArray::HashField>(length_and_hash));
1799     Goto(&done);
1800   }
1801 
1802   BIND(&if_property_dictionary);
1803   {
1804     var_hash = SmiUntag(CAST(LoadFixedArrayElement(
1805         CAST(properties), NameDictionary::kObjectHashIndex)));
1806     Goto(&done);
1807   }
1808 
1809   BIND(&done);
1810   if (if_no_hash != nullptr) {
1811     GotoIf(IntPtrEqual(var_hash.value(),
1812                        IntPtrConstant(PropertyArray::kNoHashSentinel)),
1813            if_no_hash);
1814   }
1815   return var_hash.value();
1816 }
1817 
LoadNameHashAssumeComputed(TNode<Name> name)1818 TNode<Uint32T> CodeStubAssembler::LoadNameHashAssumeComputed(TNode<Name> name) {
1819   TNode<Uint32T> hash_field = LoadNameHashField(name);
1820   CSA_ASSERT(this, IsClearWord32(hash_field, Name::kHashNotComputedMask));
1821   return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1822 }
1823 
LoadNameHash(TNode<Name> name,Label * if_hash_not_computed)1824 TNode<Uint32T> CodeStubAssembler::LoadNameHash(TNode<Name> name,
1825                                                Label* if_hash_not_computed) {
1826   TNode<Uint32T> hash_field = LoadNameHashField(name);
1827   if (if_hash_not_computed != nullptr) {
1828     GotoIf(IsSetWord32(hash_field, Name::kHashNotComputedMask),
1829            if_hash_not_computed);
1830   }
1831   return Unsigned(Word32Shr(hash_field, Int32Constant(Name::kHashShift)));
1832 }
1833 
LoadStringLengthAsSmi(TNode<String> string)1834 TNode<Smi> CodeStubAssembler::LoadStringLengthAsSmi(TNode<String> string) {
1835   return SmiFromIntPtr(LoadStringLengthAsWord(string));
1836 }
1837 
LoadStringLengthAsWord(TNode<String> string)1838 TNode<IntPtrT> CodeStubAssembler::LoadStringLengthAsWord(TNode<String> string) {
1839   return Signed(ChangeUint32ToWord(LoadStringLengthAsWord32(string)));
1840 }
1841 
LoadStringLengthAsWord32(TNode<String> string)1842 TNode<Uint32T> CodeStubAssembler::LoadStringLengthAsWord32(
1843     TNode<String> string) {
1844   return LoadObjectField<Uint32T>(string, String::kLengthOffset);
1845 }
1846 
LoadJSPrimitiveWrapperValue(TNode<JSPrimitiveWrapper> object)1847 TNode<Object> CodeStubAssembler::LoadJSPrimitiveWrapperValue(
1848     TNode<JSPrimitiveWrapper> object) {
1849   return LoadObjectField(object, JSPrimitiveWrapper::kValueOffset);
1850 }
1851 
DispatchMaybeObject(TNode<MaybeObject> maybe_object,Label * if_smi,Label * if_cleared,Label * if_weak,Label * if_strong,TVariable<Object> * extracted)1852 void CodeStubAssembler::DispatchMaybeObject(TNode<MaybeObject> maybe_object,
1853                                             Label* if_smi, Label* if_cleared,
1854                                             Label* if_weak, Label* if_strong,
1855                                             TVariable<Object>* extracted) {
1856   Label inner_if_smi(this), inner_if_strong(this);
1857 
1858   GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
1859 
1860   GotoIf(IsCleared(maybe_object), if_cleared);
1861 
1862   GotoIf(IsStrong(maybe_object), &inner_if_strong);
1863 
1864   *extracted = GetHeapObjectAssumeWeak(maybe_object);
1865   Goto(if_weak);
1866 
1867   BIND(&inner_if_smi);
1868   *extracted = CAST(maybe_object);
1869   Goto(if_smi);
1870 
1871   BIND(&inner_if_strong);
1872   *extracted = CAST(maybe_object);
1873   Goto(if_strong);
1874 }
1875 
IsStrong(TNode<MaybeObject> value)1876 TNode<BoolT> CodeStubAssembler::IsStrong(TNode<MaybeObject> value) {
1877   return Word32Equal(Word32And(TruncateIntPtrToInt32(
1878                                    BitcastTaggedToWordForTagAndSmiBits(value)),
1879                                Int32Constant(kHeapObjectTagMask)),
1880                      Int32Constant(kHeapObjectTag));
1881 }
1882 
GetHeapObjectIfStrong(TNode<MaybeObject> value,Label * if_not_strong)1883 TNode<HeapObject> CodeStubAssembler::GetHeapObjectIfStrong(
1884     TNode<MaybeObject> value, Label* if_not_strong) {
1885   GotoIfNot(IsStrong(value), if_not_strong);
1886   return CAST(value);
1887 }
1888 
IsWeakOrCleared(TNode<MaybeObject> value)1889 TNode<BoolT> CodeStubAssembler::IsWeakOrCleared(TNode<MaybeObject> value) {
1890   return Word32Equal(Word32And(TruncateIntPtrToInt32(
1891                                    BitcastTaggedToWordForTagAndSmiBits(value)),
1892                                Int32Constant(kHeapObjectTagMask)),
1893                      Int32Constant(kWeakHeapObjectTag));
1894 }
1895 
IsCleared(TNode<MaybeObject> value)1896 TNode<BoolT> CodeStubAssembler::IsCleared(TNode<MaybeObject> value) {
1897   return Word32Equal(TruncateIntPtrToInt32(BitcastMaybeObjectToWord(value)),
1898                      Int32Constant(kClearedWeakHeapObjectLower32));
1899 }
1900 
GetHeapObjectAssumeWeak(TNode<MaybeObject> value)1901 TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1902     TNode<MaybeObject> value) {
1903   CSA_ASSERT(this, IsWeakOrCleared(value));
1904   CSA_ASSERT(this, IsNotCleared(value));
1905   return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
1906       BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
1907 }
1908 
GetHeapObjectAssumeWeak(TNode<MaybeObject> value,Label * if_cleared)1909 TNode<HeapObject> CodeStubAssembler::GetHeapObjectAssumeWeak(
1910     TNode<MaybeObject> value, Label* if_cleared) {
1911   GotoIf(IsCleared(value), if_cleared);
1912   return GetHeapObjectAssumeWeak(value);
1913 }
1914 
1915 // This version generates
1916 //   (maybe_object & ~mask) == value
1917 // It works for non-Smi |maybe_object| and for both Smi and HeapObject values
1918 // but requires a big constant for ~mask.
IsWeakReferenceToObject(TNode<MaybeObject> maybe_object,TNode<Object> value)1919 TNode<BoolT> CodeStubAssembler::IsWeakReferenceToObject(
1920     TNode<MaybeObject> maybe_object, TNode<Object> value) {
1921   CSA_ASSERT(this, TaggedIsNotSmi(maybe_object));
1922   if (COMPRESS_POINTERS_BOOL) {
1923     return Word32Equal(
1924         Word32And(TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
1925                   Uint32Constant(~static_cast<uint32_t>(kWeakHeapObjectMask))),
1926         TruncateWordToInt32(BitcastTaggedToWord(value)));
1927   } else {
1928     return WordEqual(WordAnd(BitcastMaybeObjectToWord(maybe_object),
1929                              IntPtrConstant(~kWeakHeapObjectMask)),
1930                      BitcastTaggedToWord(value));
1931   }
1932 }
1933 
1934 // This version generates
1935 //   maybe_object == (heap_object | mask)
1936 // It works for any |maybe_object| values and generates a better code because it
1937 // uses a small constant for mask.
IsWeakReferenceTo(TNode<MaybeObject> maybe_object,TNode<HeapObject> heap_object)1938 TNode<BoolT> CodeStubAssembler::IsWeakReferenceTo(
1939     TNode<MaybeObject> maybe_object, TNode<HeapObject> heap_object) {
1940   if (COMPRESS_POINTERS_BOOL) {
1941     return Word32Equal(
1942         TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
1943         Word32Or(TruncateWordToInt32(BitcastTaggedToWord(heap_object)),
1944                  Int32Constant(kWeakHeapObjectMask)));
1945   } else {
1946     return WordEqual(BitcastMaybeObjectToWord(maybe_object),
1947                      WordOr(BitcastTaggedToWord(heap_object),
1948                             IntPtrConstant(kWeakHeapObjectMask)));
1949   }
1950 }
1951 
MakeWeak(TNode<HeapObject> value)1952 TNode<MaybeObject> CodeStubAssembler::MakeWeak(TNode<HeapObject> value) {
1953   return ReinterpretCast<MaybeObject>(BitcastWordToTagged(
1954       WordOr(BitcastTaggedToWord(value), IntPtrConstant(kWeakHeapObjectTag))));
1955 }
1956 
1957 template <>
LoadArrayLength(TNode<FixedArray> array)1958 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<FixedArray> array) {
1959   return LoadAndUntagFixedArrayBaseLength(array);
1960 }
1961 
1962 template <>
LoadArrayLength(TNode<WeakFixedArray> array)1963 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<WeakFixedArray> array) {
1964   return LoadAndUntagWeakFixedArrayLength(array);
1965 }
1966 
1967 template <>
LoadArrayLength(TNode<PropertyArray> array)1968 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(TNode<PropertyArray> array) {
1969   return LoadPropertyArrayLength(array);
1970 }
1971 
1972 template <>
LoadArrayLength(TNode<DescriptorArray> array)1973 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1974     TNode<DescriptorArray> array) {
1975   return IntPtrMul(ChangeInt32ToIntPtr(LoadNumberOfDescriptors(array)),
1976                    IntPtrConstant(DescriptorArray::kEntrySize));
1977 }
1978 
1979 template <>
LoadArrayLength(TNode<TransitionArray> array)1980 TNode<IntPtrT> CodeStubAssembler::LoadArrayLength(
1981     TNode<TransitionArray> array) {
1982   return LoadAndUntagWeakFixedArrayLength(array);
1983 }
1984 
1985 template <typename Array, typename TIndex, typename TValue>
LoadArrayElement(TNode<Array> array,int array_header_size,TNode<TIndex> index_node,int additional_offset,LoadSensitivity needs_poisoning)1986 TNode<TValue> CodeStubAssembler::LoadArrayElement(
1987     TNode<Array> array, int array_header_size, TNode<TIndex> index_node,
1988     int additional_offset, LoadSensitivity needs_poisoning) {
1989   // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
1990   static_assert(std::is_same<TIndex, Smi>::value ||
1991                     std::is_same<TIndex, UintPtrT>::value ||
1992                     std::is_same<TIndex, IntPtrT>::value,
1993                 "Only Smi, UintPtrT or IntPtrT indices are allowed");
1994   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(ParameterToIntPtr(index_node),
1995                                             IntPtrConstant(0)));
1996   DCHECK(IsAligned(additional_offset, kTaggedSize));
1997   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
1998   TNode<IntPtrT> offset =
1999       ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
2000   CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(array),
2001                                     array_header_size));
2002   constexpr MachineType machine_type = MachineTypeOf<TValue>::value;
2003   // TODO(gsps): Remove the Load case once LoadFromObject supports poisoning
2004   if (needs_poisoning == LoadSensitivity::kSafe) {
2005     return UncheckedCast<TValue>(LoadFromObject(machine_type, array, offset));
2006   } else {
2007     return UncheckedCast<TValue>(
2008         Load(machine_type, array, offset, needs_poisoning));
2009   }
2010 }
2011 
2012 template V8_EXPORT_PRIVATE TNode<MaybeObject>
2013 CodeStubAssembler::LoadArrayElement<TransitionArray, IntPtrT>(
2014     TNode<TransitionArray>, int, TNode<IntPtrT>, int, LoadSensitivity);
2015 
2016 template <typename TIndex>
LoadFixedArrayElement(TNode<FixedArray> object,TNode<TIndex> index,int additional_offset,LoadSensitivity needs_poisoning,CheckBounds check_bounds)2017 TNode<Object> CodeStubAssembler::LoadFixedArrayElement(
2018     TNode<FixedArray> object, TNode<TIndex> index, int additional_offset,
2019     LoadSensitivity needs_poisoning, CheckBounds check_bounds) {
2020   // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2021   static_assert(std::is_same<TIndex, Smi>::value ||
2022                     std::is_same<TIndex, UintPtrT>::value ||
2023                     std::is_same<TIndex, IntPtrT>::value,
2024                 "Only Smi, UintPtrT or IntPtrT indexes are allowed");
2025   CSA_ASSERT(this, IsFixedArraySubclass(object));
2026   CSA_ASSERT(this, IsNotWeakFixedArraySubclass(object));
2027 
2028   if (NeedsBoundsCheck(check_bounds)) {
2029     FixedArrayBoundsCheck(object, index, additional_offset);
2030   }
2031   TNode<MaybeObject> element =
2032       LoadArrayElement(object, FixedArray::kHeaderSize, index,
2033                        additional_offset, needs_poisoning);
2034   return CAST(element);
2035 }
2036 
2037 template V8_EXPORT_PRIVATE TNode<Object>
2038 CodeStubAssembler::LoadFixedArrayElement<Smi>(TNode<FixedArray>, TNode<Smi>,
2039                                               int, LoadSensitivity,
2040                                               CheckBounds);
2041 template V8_EXPORT_PRIVATE TNode<Object>
2042 CodeStubAssembler::LoadFixedArrayElement<UintPtrT>(TNode<FixedArray>,
2043                                                    TNode<UintPtrT>, int,
2044                                                    LoadSensitivity,
2045                                                    CheckBounds);
2046 template V8_EXPORT_PRIVATE TNode<Object>
2047 CodeStubAssembler::LoadFixedArrayElement<IntPtrT>(TNode<FixedArray>,
2048                                                   TNode<IntPtrT>, int,
2049                                                   LoadSensitivity, CheckBounds);
2050 
FixedArrayBoundsCheck(TNode<FixedArrayBase> array,TNode<Smi> index,int additional_offset)2051 void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
2052                                               TNode<Smi> index,
2053                                               int additional_offset) {
2054   if (!FLAG_fixed_array_bounds_checks) return;
2055   DCHECK(IsAligned(additional_offset, kTaggedSize));
2056   TNode<Smi> effective_index;
2057   Smi constant_index;
2058   bool index_is_constant = ToSmiConstant(index, &constant_index);
2059   if (index_is_constant) {
2060     effective_index = SmiConstant(Smi::ToInt(constant_index) +
2061                                   additional_offset / kTaggedSize);
2062   } else {
2063     effective_index =
2064         SmiAdd(index, SmiConstant(additional_offset / kTaggedSize));
2065   }
2066   CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
2067 }
2068 
FixedArrayBoundsCheck(TNode<FixedArrayBase> array,TNode<IntPtrT> index,int additional_offset)2069 void CodeStubAssembler::FixedArrayBoundsCheck(TNode<FixedArrayBase> array,
2070                                               TNode<IntPtrT> index,
2071                                               int additional_offset) {
2072   if (!FLAG_fixed_array_bounds_checks) return;
2073   DCHECK(IsAligned(additional_offset, kTaggedSize));
2074   // IntPtrAdd does constant-folding automatically.
2075   TNode<IntPtrT> effective_index =
2076       IntPtrAdd(index, IntPtrConstant(additional_offset / kTaggedSize));
2077   CSA_CHECK(this, UintPtrLessThan(effective_index,
2078                                   LoadAndUntagFixedArrayBaseLength(array)));
2079 }
2080 
LoadPropertyArrayElement(TNode<PropertyArray> object,SloppyTNode<IntPtrT> index)2081 TNode<Object> CodeStubAssembler::LoadPropertyArrayElement(
2082     TNode<PropertyArray> object, SloppyTNode<IntPtrT> index) {
2083   int additional_offset = 0;
2084   LoadSensitivity needs_poisoning = LoadSensitivity::kSafe;
2085   return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
2086                                additional_offset, needs_poisoning));
2087 }
2088 
LoadPropertyArrayLength(TNode<PropertyArray> object)2089 TNode<IntPtrT> CodeStubAssembler::LoadPropertyArrayLength(
2090     TNode<PropertyArray> object) {
2091   TNode<IntPtrT> value =
2092       LoadAndUntagObjectField(object, PropertyArray::kLengthAndHashOffset);
2093   return Signed(DecodeWord<PropertyArray::LengthField>(value));
2094 }
2095 
LoadJSTypedArrayDataPtr(TNode<JSTypedArray> typed_array)2096 TNode<RawPtrT> CodeStubAssembler::LoadJSTypedArrayDataPtr(
2097     TNode<JSTypedArray> typed_array) {
2098   // Data pointer = external_pointer + static_cast<Tagged_t>(base_pointer).
2099   TNode<RawPtrT> external_pointer =
2100       LoadJSTypedArrayExternalPointerPtr(typed_array);
2101 
2102   TNode<IntPtrT> base_pointer;
2103   if (COMPRESS_POINTERS_BOOL) {
2104     TNode<Int32T> compressed_base =
2105         LoadObjectField<Int32T>(typed_array, JSTypedArray::kBasePointerOffset);
2106     // Zero-extend TaggedT to WordT according to current compression scheme
2107     // so that the addition with |external_pointer| (which already contains
2108     // compensated offset value) below will decompress the tagged value.
2109     // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for
2110     // details.
2111     base_pointer = Signed(ChangeUint32ToWord(compressed_base));
2112   } else {
2113     base_pointer =
2114         LoadObjectField<IntPtrT>(typed_array, JSTypedArray::kBasePointerOffset);
2115   }
2116   return RawPtrAdd(external_pointer, base_pointer);
2117 }
2118 
LoadFixedBigInt64ArrayElementAsTagged(SloppyTNode<RawPtrT> data_pointer,SloppyTNode<IntPtrT> offset)2119 TNode<BigInt> CodeStubAssembler::LoadFixedBigInt64ArrayElementAsTagged(
2120     SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset) {
2121   if (Is64()) {
2122     TNode<IntPtrT> value = Load<IntPtrT>(data_pointer, offset);
2123     return BigIntFromInt64(value);
2124   } else {
2125     DCHECK(!Is64());
2126 #if defined(V8_TARGET_BIG_ENDIAN)
2127     TNode<IntPtrT> high = Load<IntPtrT>(data_pointer, offset);
2128     TNode<IntPtrT> low = Load<IntPtrT>(
2129         data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2130 #else
2131     TNode<IntPtrT> low = Load<IntPtrT>(data_pointer, offset);
2132     TNode<IntPtrT> high = Load<IntPtrT>(
2133         data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2134 #endif
2135     return BigIntFromInt32Pair(low, high);
2136   }
2137 }
2138 
BigIntFromInt32Pair(TNode<IntPtrT> low,TNode<IntPtrT> high)2139 TNode<BigInt> CodeStubAssembler::BigIntFromInt32Pair(TNode<IntPtrT> low,
2140                                                      TNode<IntPtrT> high) {
2141   DCHECK(!Is64());
2142   TVARIABLE(BigInt, var_result);
2143   TVARIABLE(Word32T, var_sign, Int32Constant(BigInt::SignBits::encode(false)));
2144   TVARIABLE(IntPtrT, var_high, high);
2145   TVARIABLE(IntPtrT, var_low, low);
2146   Label high_zero(this), negative(this), allocate_one_digit(this),
2147       allocate_two_digits(this), if_zero(this), done(this);
2148 
2149   GotoIf(IntPtrEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2150   Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2151          &allocate_two_digits);
2152 
2153   BIND(&high_zero);
2154   Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2155          &allocate_one_digit);
2156 
2157   BIND(&negative);
2158   {
2159     var_sign = Int32Constant(BigInt::SignBits::encode(true));
2160     // We must negate the value by computing "0 - (high|low)", performing
2161     // both parts of the subtraction separately and manually taking care
2162     // of the carry bit (which is 1 iff low != 0).
2163     var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
2164     Label carry(this), no_carry(this);
2165     Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
2166     BIND(&carry);
2167     var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
2168     Goto(&no_carry);
2169     BIND(&no_carry);
2170     var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
2171     // var_high was non-zero going into this block, but subtracting the
2172     // carry bit from it could bring us back onto the "one digit" path.
2173     Branch(IntPtrEqual(var_high.value(), IntPtrConstant(0)),
2174            &allocate_one_digit, &allocate_two_digits);
2175   }
2176 
2177   BIND(&allocate_one_digit);
2178   {
2179     var_result = AllocateRawBigInt(IntPtrConstant(1));
2180     StoreBigIntBitfield(var_result.value(),
2181                         Word32Or(var_sign.value(),
2182                                  Int32Constant(BigInt::LengthBits::encode(1))));
2183     StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2184     Goto(&done);
2185   }
2186 
2187   BIND(&allocate_two_digits);
2188   {
2189     var_result = AllocateRawBigInt(IntPtrConstant(2));
2190     StoreBigIntBitfield(var_result.value(),
2191                         Word32Or(var_sign.value(),
2192                                  Int32Constant(BigInt::LengthBits::encode(2))));
2193     StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
2194     StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
2195     Goto(&done);
2196   }
2197 
2198   BIND(&if_zero);
2199   var_result = AllocateBigInt(IntPtrConstant(0));
2200   Goto(&done);
2201 
2202   BIND(&done);
2203   return var_result.value();
2204 }
2205 
BigIntFromInt64(TNode<IntPtrT> value)2206 TNode<BigInt> CodeStubAssembler::BigIntFromInt64(TNode<IntPtrT> value) {
2207   DCHECK(Is64());
2208   TVARIABLE(BigInt, var_result);
2209   Label done(this), if_positive(this), if_negative(this), if_zero(this);
2210   GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2211   var_result = AllocateRawBigInt(IntPtrConstant(1));
2212   Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
2213          &if_negative);
2214 
2215   BIND(&if_positive);
2216   {
2217     StoreBigIntBitfield(var_result.value(),
2218                         Int32Constant(BigInt::SignBits::encode(false) |
2219                                       BigInt::LengthBits::encode(1)));
2220     StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
2221     Goto(&done);
2222   }
2223 
2224   BIND(&if_negative);
2225   {
2226     StoreBigIntBitfield(var_result.value(),
2227                         Int32Constant(BigInt::SignBits::encode(true) |
2228                                       BigInt::LengthBits::encode(1)));
2229     StoreBigIntDigit(var_result.value(), 0,
2230                      Unsigned(IntPtrSub(IntPtrConstant(0), value)));
2231     Goto(&done);
2232   }
2233 
2234   BIND(&if_zero);
2235   {
2236     var_result = AllocateBigInt(IntPtrConstant(0));
2237     Goto(&done);
2238   }
2239 
2240   BIND(&done);
2241   return var_result.value();
2242 }
2243 
LoadFixedBigUint64ArrayElementAsTagged(SloppyTNode<RawPtrT> data_pointer,SloppyTNode<IntPtrT> offset)2244 TNode<BigInt> CodeStubAssembler::LoadFixedBigUint64ArrayElementAsTagged(
2245     SloppyTNode<RawPtrT> data_pointer, SloppyTNode<IntPtrT> offset) {
2246   Label if_zero(this), done(this);
2247   if (Is64()) {
2248     TNode<UintPtrT> value = Load<UintPtrT>(data_pointer, offset);
2249     return BigIntFromUint64(value);
2250   } else {
2251     DCHECK(!Is64());
2252 #if defined(V8_TARGET_BIG_ENDIAN)
2253     TNode<UintPtrT> high = Load<UintPtrT>(data_pointer, offset);
2254     TNode<UintPtrT> low = Load<UintPtrT>(
2255         data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2256 #else
2257     TNode<UintPtrT> low = Load<UintPtrT>(data_pointer, offset);
2258     TNode<UintPtrT> high = Load<UintPtrT>(
2259         data_pointer, IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)));
2260 #endif
2261     return BigIntFromUint32Pair(low, high);
2262   }
2263 }
2264 
BigIntFromUint32Pair(TNode<UintPtrT> low,TNode<UintPtrT> high)2265 TNode<BigInt> CodeStubAssembler::BigIntFromUint32Pair(TNode<UintPtrT> low,
2266                                                       TNode<UintPtrT> high) {
2267   DCHECK(!Is64());
2268   TVARIABLE(BigInt, var_result);
2269   Label high_zero(this), if_zero(this), done(this);
2270 
2271   GotoIf(IntPtrEqual(high, IntPtrConstant(0)), &high_zero);
2272   var_result = AllocateBigInt(IntPtrConstant(2));
2273   StoreBigIntDigit(var_result.value(), 0, low);
2274   StoreBigIntDigit(var_result.value(), 1, high);
2275   Goto(&done);
2276 
2277   BIND(&high_zero);
2278   GotoIf(IntPtrEqual(low, IntPtrConstant(0)), &if_zero);
2279   var_result = AllocateBigInt(IntPtrConstant(1));
2280   StoreBigIntDigit(var_result.value(), 0, low);
2281   Goto(&done);
2282 
2283   BIND(&if_zero);
2284   var_result = AllocateBigInt(IntPtrConstant(0));
2285   Goto(&done);
2286 
2287   BIND(&done);
2288   return var_result.value();
2289 }
2290 
BigIntFromUint64(TNode<UintPtrT> value)2291 TNode<BigInt> CodeStubAssembler::BigIntFromUint64(TNode<UintPtrT> value) {
2292   DCHECK(Is64());
2293   TVARIABLE(BigInt, var_result);
2294   Label done(this), if_zero(this);
2295   GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
2296   var_result = AllocateBigInt(IntPtrConstant(1));
2297   StoreBigIntDigit(var_result.value(), 0, value);
2298   Goto(&done);
2299 
2300   BIND(&if_zero);
2301   var_result = AllocateBigInt(IntPtrConstant(0));
2302   Goto(&done);
2303   BIND(&done);
2304   return var_result.value();
2305 }
2306 
LoadFixedTypedArrayElementAsTagged(TNode<RawPtrT> data_pointer,TNode<UintPtrT> index,ElementsKind elements_kind)2307 TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2308     TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
2309     ElementsKind elements_kind) {
2310   TNode<IntPtrT> offset =
2311       ElementOffsetFromIndex(Signed(index), elements_kind, 0);
2312   switch (elements_kind) {
2313     case UINT8_ELEMENTS: /* fall through */
2314     case UINT8_CLAMPED_ELEMENTS:
2315       return SmiFromInt32(Load<Uint8T>(data_pointer, offset));
2316     case INT8_ELEMENTS:
2317       return SmiFromInt32(Load<Int8T>(data_pointer, offset));
2318     case UINT16_ELEMENTS:
2319       return SmiFromInt32(Load<Uint16T>(data_pointer, offset));
2320     case INT16_ELEMENTS:
2321       return SmiFromInt32(Load<Int16T>(data_pointer, offset));
2322     case UINT32_ELEMENTS:
2323       return ChangeUint32ToTagged(Load<Uint32T>(data_pointer, offset));
2324     case INT32_ELEMENTS:
2325       return ChangeInt32ToTagged(Load<Int32T>(data_pointer, offset));
2326     case FLOAT32_ELEMENTS:
2327       return AllocateHeapNumberWithValue(
2328           ChangeFloat32ToFloat64(Load<Float32T>(data_pointer, offset)));
2329     case FLOAT64_ELEMENTS:
2330       return AllocateHeapNumberWithValue(Load<Float64T>(data_pointer, offset));
2331     case BIGINT64_ELEMENTS:
2332       return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
2333     case BIGUINT64_ELEMENTS:
2334       return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
2335     default:
2336       UNREACHABLE();
2337   }
2338 }
2339 
LoadFixedTypedArrayElementAsTagged(TNode<RawPtrT> data_pointer,TNode<UintPtrT> index,TNode<Int32T> elements_kind)2340 TNode<Numeric> CodeStubAssembler::LoadFixedTypedArrayElementAsTagged(
2341     TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
2342     TNode<Int32T> elements_kind) {
2343   TVARIABLE(Numeric, var_result);
2344   Label done(this), if_unknown_type(this, Label::kDeferred);
2345   int32_t elements_kinds[] = {
2346 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
2347       TYPED_ARRAYS(TYPED_ARRAY_CASE)
2348 #undef TYPED_ARRAY_CASE
2349   };
2350 
2351 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
2352   TYPED_ARRAYS(TYPED_ARRAY_CASE)
2353 #undef TYPED_ARRAY_CASE
2354 
2355   Label* elements_kind_labels[] = {
2356 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
2357       TYPED_ARRAYS(TYPED_ARRAY_CASE)
2358 #undef TYPED_ARRAY_CASE
2359   };
2360   STATIC_ASSERT(arraysize(elements_kinds) == arraysize(elements_kind_labels));
2361 
2362   Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
2363          arraysize(elements_kinds));
2364 
2365   BIND(&if_unknown_type);
2366   Unreachable();
2367 
2368 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)                        \
2369   BIND(&if_##type##array);                                               \
2370   {                                                                      \
2371     var_result = LoadFixedTypedArrayElementAsTagged(data_pointer, index, \
2372                                                     TYPE##_ELEMENTS);    \
2373     Goto(&done);                                                         \
2374   }
2375   TYPED_ARRAYS(TYPED_ARRAY_CASE)
2376 #undef TYPED_ARRAY_CASE
2377 
2378   BIND(&done);
2379   return var_result.value();
2380 }
2381 
2382 template <typename TIndex>
LoadFeedbackVectorSlot(TNode<FeedbackVector> feedback_vector,TNode<TIndex> slot,int additional_offset)2383 TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2384     TNode<FeedbackVector> feedback_vector, TNode<TIndex> slot,
2385     int additional_offset) {
2386   int32_t header_size = FeedbackVector::kRawFeedbackSlotsOffset +
2387                         additional_offset - kHeapObjectTag;
2388   TNode<IntPtrT> offset =
2389       ElementOffsetFromIndex(slot, HOLEY_ELEMENTS, header_size);
2390   CSA_SLOW_ASSERT(
2391       this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(feedback_vector),
2392                              FeedbackVector::kHeaderSize));
2393   return Load<MaybeObject>(feedback_vector, offset);
2394 }
2395 
2396 template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2397     TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
2398     int additional_offset);
2399 template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2400     TNode<FeedbackVector> feedback_vector, TNode<IntPtrT> slot,
2401     int additional_offset);
2402 template TNode<MaybeObject> CodeStubAssembler::LoadFeedbackVectorSlot(
2403     TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
2404     int additional_offset);
2405 
2406 template <typename Array>
LoadAndUntagToWord32ArrayElement(TNode<Array> object,int array_header_size,TNode<IntPtrT> index,int additional_offset)2407 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32ArrayElement(
2408     TNode<Array> object, int array_header_size, TNode<IntPtrT> index,
2409     int additional_offset) {
2410   DCHECK(IsAligned(additional_offset, kTaggedSize));
2411   int endian_correction = 0;
2412 #if V8_TARGET_LITTLE_ENDIAN
2413   if (SmiValuesAre32Bits()) endian_correction = 4;
2414 #endif
2415   int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
2416                         endian_correction;
2417   TNode<IntPtrT> offset =
2418       ElementOffsetFromIndex(index, HOLEY_ELEMENTS, header_size);
2419   CSA_ASSERT(this, IsOffsetInBounds(offset, LoadArrayLength(object),
2420                                     array_header_size + endian_correction));
2421   if (SmiValuesAre32Bits()) {
2422     return Load<Int32T>(object, offset);
2423   } else {
2424     return SmiToInt32(Load(MachineType::TaggedSigned(), object, offset));
2425   }
2426 }
2427 
LoadAndUntagToWord32FixedArrayElement(TNode<FixedArray> object,TNode<IntPtrT> index,int additional_offset)2428 TNode<Int32T> CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
2429     TNode<FixedArray> object, TNode<IntPtrT> index, int additional_offset) {
2430   CSA_SLOW_ASSERT(this, IsFixedArraySubclass(object));
2431   return LoadAndUntagToWord32ArrayElement(object, FixedArray::kHeaderSize,
2432                                           index, additional_offset);
2433 }
2434 
LoadWeakFixedArrayElement(TNode<WeakFixedArray> object,TNode<IntPtrT> index,int additional_offset)2435 TNode<MaybeObject> CodeStubAssembler::LoadWeakFixedArrayElement(
2436     TNode<WeakFixedArray> object, TNode<IntPtrT> index, int additional_offset) {
2437   return LoadArrayElement(object, WeakFixedArray::kHeaderSize, index,
2438                           additional_offset, LoadSensitivity::kSafe);
2439 }
2440 
LoadFixedDoubleArrayElement(TNode<FixedDoubleArray> object,TNode<IntPtrT> index,Label * if_hole,MachineType machine_type)2441 TNode<Float64T> CodeStubAssembler::LoadFixedDoubleArrayElement(
2442     TNode<FixedDoubleArray> object, TNode<IntPtrT> index, Label* if_hole,
2443     MachineType machine_type) {
2444   int32_t header_size = FixedDoubleArray::kHeaderSize - kHeapObjectTag;
2445   TNode<IntPtrT> offset =
2446       ElementOffsetFromIndex(index, HOLEY_DOUBLE_ELEMENTS, header_size);
2447   CSA_ASSERT(this, IsOffsetInBounds(
2448                        offset, LoadAndUntagFixedArrayBaseLength(object),
2449                        FixedDoubleArray::kHeaderSize, HOLEY_DOUBLE_ELEMENTS));
2450   return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
2451 }
2452 
LoadFixedArrayBaseElementAsTagged(TNode<FixedArrayBase> elements,TNode<IntPtrT> index,TNode<Int32T> elements_kind,Label * if_accessor,Label * if_hole)2453 TNode<Object> CodeStubAssembler::LoadFixedArrayBaseElementAsTagged(
2454     TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
2455     TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
2456   TVARIABLE(Object, var_result);
2457   Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
2458       if_holey_double(this), if_dictionary(this, Label::kDeferred);
2459 
2460   int32_t kinds[] = {
2461       // Handled by if_packed.
2462       PACKED_SMI_ELEMENTS, PACKED_ELEMENTS, PACKED_NONEXTENSIBLE_ELEMENTS,
2463       PACKED_SEALED_ELEMENTS, PACKED_FROZEN_ELEMENTS,
2464       // Handled by if_holey.
2465       HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS, HOLEY_NONEXTENSIBLE_ELEMENTS,
2466       HOLEY_SEALED_ELEMENTS, HOLEY_FROZEN_ELEMENTS,
2467       // Handled by if_packed_double.
2468       PACKED_DOUBLE_ELEMENTS,
2469       // Handled by if_holey_double.
2470       HOLEY_DOUBLE_ELEMENTS};
2471   Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
2472                      &if_packed, &if_packed, &if_packed, &if_packed, &if_packed,
2473                      // HOLEY_{SMI,}_ELEMENTS
2474                      &if_holey, &if_holey, &if_holey, &if_holey, &if_holey,
2475                      // PACKED_DOUBLE_ELEMENTS
2476                      &if_packed_double,
2477                      // HOLEY_DOUBLE_ELEMENTS
2478                      &if_holey_double};
2479   Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
2480 
2481   BIND(&if_packed);
2482   {
2483     var_result = LoadFixedArrayElement(CAST(elements), index, 0);
2484     Goto(&done);
2485   }
2486 
2487   BIND(&if_holey);
2488   {
2489     var_result = LoadFixedArrayElement(CAST(elements), index);
2490     Branch(TaggedEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
2491   }
2492 
2493   BIND(&if_packed_double);
2494   {
2495     var_result = AllocateHeapNumberWithValue(
2496         LoadFixedDoubleArrayElement(CAST(elements), index));
2497     Goto(&done);
2498   }
2499 
2500   BIND(&if_holey_double);
2501   {
2502     var_result = AllocateHeapNumberWithValue(
2503         LoadFixedDoubleArrayElement(CAST(elements), index, if_hole));
2504     Goto(&done);
2505   }
2506 
2507   BIND(&if_dictionary);
2508   {
2509     CSA_ASSERT(this, IsDictionaryElementsKind(elements_kind));
2510     var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
2511                                                   if_accessor, if_hole);
2512     Goto(&done);
2513   }
2514 
2515   BIND(&done);
2516   return var_result.value();
2517 }
2518 
IsDoubleHole(TNode<Object> base,TNode<IntPtrT> offset)2519 TNode<BoolT> CodeStubAssembler::IsDoubleHole(TNode<Object> base,
2520                                              TNode<IntPtrT> offset) {
2521   // TODO(ishell): Compare only the upper part for the hole once the
2522   // compiler is able to fold addition of already complex |offset| with
2523   // |kIeeeDoubleExponentWordOffset| into one addressing mode.
2524   if (Is64()) {
2525     TNode<Uint64T> element = Load<Uint64T>(base, offset);
2526     return Word64Equal(element, Int64Constant(kHoleNanInt64));
2527   } else {
2528     TNode<Uint32T> element_upper = Load<Uint32T>(
2529         base, IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
2530     return Word32Equal(element_upper, Int32Constant(kHoleNanUpper32));
2531   }
2532 }
2533 
LoadDoubleWithHoleCheck(TNode<Object> base,TNode<IntPtrT> offset,Label * if_hole,MachineType machine_type)2534 TNode<Float64T> CodeStubAssembler::LoadDoubleWithHoleCheck(
2535     TNode<Object> base, TNode<IntPtrT> offset, Label* if_hole,
2536     MachineType machine_type) {
2537   if (if_hole) {
2538     GotoIf(IsDoubleHole(base, offset), if_hole);
2539   }
2540   if (machine_type.IsNone()) {
2541     // This means the actual value is not needed.
2542     return TNode<Float64T>();
2543   }
2544   return UncheckedCast<Float64T>(Load(machine_type, base, offset));
2545 }
2546 
LoadScopeInfo(TNode<Context> context)2547 TNode<ScopeInfo> CodeStubAssembler::LoadScopeInfo(TNode<Context> context) {
2548   return CAST(LoadContextElement(context, Context::SCOPE_INFO_INDEX));
2549 }
2550 
LoadScopeInfoHasExtensionField(TNode<ScopeInfo> scope_info)2551 TNode<BoolT> CodeStubAssembler::LoadScopeInfoHasExtensionField(
2552     TNode<ScopeInfo> scope_info) {
2553   TNode<IntPtrT> value =
2554       LoadAndUntagObjectField(scope_info, ScopeInfo::kFlagsOffset);
2555   return IsSetWord<ScopeInfo::HasContextExtensionSlotBit>(value);
2556 }
2557 
StoreContextElementNoWriteBarrier(TNode<Context> context,int slot_index,SloppyTNode<Object> value)2558 void CodeStubAssembler::StoreContextElementNoWriteBarrier(
2559     TNode<Context> context, int slot_index, SloppyTNode<Object> value) {
2560   int offset = Context::SlotOffset(slot_index);
2561   StoreNoWriteBarrier(MachineRepresentation::kTagged, context,
2562                       IntPtrConstant(offset), value);
2563 }
2564 
LoadNativeContext(TNode<Context> context)2565 TNode<NativeContext> CodeStubAssembler::LoadNativeContext(
2566     TNode<Context> context) {
2567   TNode<Map> map = LoadMap(context);
2568   return CAST(LoadObjectField(
2569       map, Map::kConstructorOrBackPointerOrNativeContextOffset));
2570 }
2571 
LoadModuleContext(TNode<Context> context)2572 TNode<Context> CodeStubAssembler::LoadModuleContext(TNode<Context> context) {
2573   TNode<NativeContext> native_context = LoadNativeContext(context);
2574   TNode<Map> module_map = CAST(
2575       LoadContextElement(native_context, Context::MODULE_CONTEXT_MAP_INDEX));
2576   TVariable<Object> cur_context(context, this);
2577 
2578   Label context_found(this);
2579 
2580   Label context_search(this, &cur_context);
2581 
2582   // Loop until cur_context->map() is module_map.
2583   Goto(&context_search);
2584   BIND(&context_search);
2585   {
2586     CSA_ASSERT(this, Word32BinaryNot(
2587                          TaggedEqual(cur_context.value(), native_context)));
2588     GotoIf(TaggedEqual(LoadMap(CAST(cur_context.value())), module_map),
2589            &context_found);
2590 
2591     cur_context =
2592         LoadContextElement(CAST(cur_context.value()), Context::PREVIOUS_INDEX);
2593     Goto(&context_search);
2594   }
2595 
2596   BIND(&context_found);
2597   return UncheckedCast<Context>(cur_context.value());
2598 }
2599 
LoadObjectFunctionInitialMap(TNode<NativeContext> native_context)2600 TNode<Map> CodeStubAssembler::LoadObjectFunctionInitialMap(
2601     TNode<NativeContext> native_context) {
2602   TNode<JSFunction> object_function =
2603       CAST(LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX));
2604   return CAST(LoadJSFunctionPrototypeOrInitialMap(object_function));
2605 }
2606 
LoadSlowObjectWithNullPrototypeMap(TNode<NativeContext> native_context)2607 TNode<Map> CodeStubAssembler::LoadSlowObjectWithNullPrototypeMap(
2608     TNode<NativeContext> native_context) {
2609   TNode<Map> map = CAST(LoadContextElement(
2610       native_context, Context::SLOW_OBJECT_WITH_NULL_PROTOTYPE_MAP));
2611   return map;
2612 }
2613 
LoadJSArrayElementsMap(SloppyTNode<Int32T> kind,TNode<NativeContext> native_context)2614 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2615     SloppyTNode<Int32T> kind, TNode<NativeContext> native_context) {
2616   CSA_ASSERT(this, IsFastElementsKind(kind));
2617   TNode<IntPtrT> offset =
2618       IntPtrAdd(IntPtrConstant(Context::FIRST_JS_ARRAY_MAP_SLOT),
2619                 ChangeInt32ToIntPtr(kind));
2620   return UncheckedCast<Map>(LoadContextElement(native_context, offset));
2621 }
2622 
LoadJSArrayElementsMap(ElementsKind kind,TNode<NativeContext> native_context)2623 TNode<Map> CodeStubAssembler::LoadJSArrayElementsMap(
2624     ElementsKind kind, TNode<NativeContext> native_context) {
2625   return UncheckedCast<Map>(
2626       LoadContextElement(native_context, Context::ArrayMapIndex(kind)));
2627 }
2628 
IsGeneratorFunction(TNode<JSFunction> function)2629 TNode<BoolT> CodeStubAssembler::IsGeneratorFunction(
2630     TNode<JSFunction> function) {
2631   const TNode<SharedFunctionInfo> shared_function_info =
2632       LoadObjectField<SharedFunctionInfo>(
2633           function, JSFunction::kSharedFunctionInfoOffset);
2634 
2635   const TNode<Uint32T> function_kind =
2636       DecodeWord32<SharedFunctionInfo::FunctionKindBits>(
2637           LoadObjectField<Uint32T>(shared_function_info,
2638                                    SharedFunctionInfo::kFlagsOffset));
2639 
2640   // See IsGeneratorFunction(FunctionKind kind).
2641   return IsInRange(function_kind, FunctionKind::kAsyncConciseGeneratorMethod,
2642                    FunctionKind::kConciseGeneratorMethod);
2643 }
2644 
IsJSFunctionWithPrototypeSlot(TNode<HeapObject> object)2645 TNode<BoolT> CodeStubAssembler::IsJSFunctionWithPrototypeSlot(
2646     TNode<HeapObject> object) {
2647   // Only JSFunction maps may have HasPrototypeSlotBit set.
2648   return TNode<BoolT>::UncheckedCast(
2649       IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(
2650           LoadMapBitField(LoadMap(object))));
2651 }
2652 
BranchIfHasPrototypeProperty(TNode<JSFunction> function,TNode<Int32T> function_map_bit_field,Label * if_true,Label * if_false)2653 void CodeStubAssembler::BranchIfHasPrototypeProperty(
2654     TNode<JSFunction> function, TNode<Int32T> function_map_bit_field,
2655     Label* if_true, Label* if_false) {
2656   // (has_prototype_slot() && IsConstructor()) ||
2657   // IsGeneratorFunction(shared()->kind())
2658   uint32_t mask = Map::Bits1::HasPrototypeSlotBit::kMask |
2659                   Map::Bits1::IsConstructorBit::kMask;
2660 
2661   GotoIf(IsAllSetWord32(function_map_bit_field, mask), if_true);
2662   Branch(IsGeneratorFunction(function), if_true, if_false);
2663 }
2664 
GotoIfPrototypeRequiresRuntimeLookup(TNode<JSFunction> function,TNode<Map> map,Label * runtime)2665 void CodeStubAssembler::GotoIfPrototypeRequiresRuntimeLookup(
2666     TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
2667   // !has_prototype_property() || has_non_instance_prototype()
2668   TNode<Int32T> map_bit_field = LoadMapBitField(map);
2669   Label next_check(this);
2670   BranchIfHasPrototypeProperty(function, map_bit_field, &next_check, runtime);
2671   BIND(&next_check);
2672   GotoIf(IsSetWord32<Map::Bits1::HasNonInstancePrototypeBit>(map_bit_field),
2673          runtime);
2674 }
2675 
LoadJSFunctionPrototype(TNode<JSFunction> function,Label * if_bailout)2676 TNode<HeapObject> CodeStubAssembler::LoadJSFunctionPrototype(
2677     TNode<JSFunction> function, Label* if_bailout) {
2678   CSA_ASSERT(this, IsFunctionWithPrototypeSlotMap(LoadMap(function)));
2679   CSA_ASSERT(this, IsClearWord32<Map::Bits1::HasNonInstancePrototypeBit>(
2680                        LoadMapBitField(LoadMap(function))));
2681   TNode<HeapObject> proto_or_map = LoadObjectField<HeapObject>(
2682       function, JSFunction::kPrototypeOrInitialMapOffset);
2683   GotoIf(IsTheHole(proto_or_map), if_bailout);
2684 
2685   TVARIABLE(HeapObject, var_result, proto_or_map);
2686   Label done(this, &var_result);
2687   GotoIfNot(IsMap(proto_or_map), &done);
2688 
2689   var_result = LoadMapPrototype(CAST(proto_or_map));
2690   Goto(&done);
2691 
2692   BIND(&done);
2693   return var_result.value();
2694 }
2695 
LoadSharedFunctionInfoBytecodeArray(TNode<SharedFunctionInfo> shared)2696 TNode<BytecodeArray> CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray(
2697     TNode<SharedFunctionInfo> shared) {
2698   TNode<HeapObject> function_data = LoadObjectField<HeapObject>(
2699       shared, SharedFunctionInfo::kFunctionDataOffset);
2700 
2701   TVARIABLE(HeapObject, var_result, function_data);
2702   Label done(this, &var_result);
2703 
2704   GotoIfNot(HasInstanceType(function_data, INTERPRETER_DATA_TYPE), &done);
2705   TNode<BytecodeArray> bytecode_array = LoadObjectField<BytecodeArray>(
2706       function_data, InterpreterData::kBytecodeArrayOffset);
2707   var_result = bytecode_array;
2708   Goto(&done);
2709 
2710   BIND(&done);
2711   return CAST(var_result.value());
2712 }
2713 
StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,int offset,TNode<Word32T> value)2714 void CodeStubAssembler::StoreObjectByteNoWriteBarrier(TNode<HeapObject> object,
2715                                                       int offset,
2716                                                       TNode<Word32T> value) {
2717   StoreNoWriteBarrier(MachineRepresentation::kWord8, object,
2718                       IntPtrConstant(offset - kHeapObjectTag), value);
2719 }
2720 
StoreHeapNumberValue(SloppyTNode<HeapNumber> object,SloppyTNode<Float64T> value)2721 void CodeStubAssembler::StoreHeapNumberValue(SloppyTNode<HeapNumber> object,
2722                                              SloppyTNode<Float64T> value) {
2723   StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value);
2724 }
2725 
StoreObjectField(TNode<HeapObject> object,int offset,TNode<Object> value)2726 void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object, int offset,
2727                                          TNode<Object> value) {
2728   DCHECK_NE(HeapObject::kMapOffset, offset);  // Use StoreMap instead.
2729 
2730   OptimizedStoreField(MachineRepresentation::kTagged,
2731                       UncheckedCast<HeapObject>(object), offset, value);
2732 }
2733 
StoreObjectField(TNode<HeapObject> object,TNode<IntPtrT> offset,TNode<Object> value)2734 void CodeStubAssembler::StoreObjectField(TNode<HeapObject> object,
2735                                          TNode<IntPtrT> offset,
2736                                          TNode<Object> value) {
2737   int const_offset;
2738   if (ToInt32Constant(offset, &const_offset)) {
2739     StoreObjectField(object, const_offset, value);
2740   } else {
2741     Store(object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
2742   }
2743 }
2744 
UnsafeStoreObjectFieldNoWriteBarrier(TNode<HeapObject> object,int offset,TNode<Object> value)2745 void CodeStubAssembler::UnsafeStoreObjectFieldNoWriteBarrier(
2746     TNode<HeapObject> object, int offset, TNode<Object> value) {
2747   OptimizedStoreFieldUnsafeNoWriteBarrier(MachineRepresentation::kTagged,
2748                                           object, offset, value);
2749 }
2750 
StoreMap(TNode<HeapObject> object,TNode<Map> map)2751 void CodeStubAssembler::StoreMap(TNode<HeapObject> object, TNode<Map> map) {
2752   OptimizedStoreMap(object, map);
2753 }
2754 
StoreMapNoWriteBarrier(TNode<HeapObject> object,RootIndex map_root_index)2755 void CodeStubAssembler::StoreMapNoWriteBarrier(TNode<HeapObject> object,
2756                                                RootIndex map_root_index) {
2757   StoreMapNoWriteBarrier(object, CAST(LoadRoot(map_root_index)));
2758 }
2759 
StoreMapNoWriteBarrier(TNode<HeapObject> object,TNode<Map> map)2760 void CodeStubAssembler::StoreMapNoWriteBarrier(TNode<HeapObject> object,
2761                                                TNode<Map> map) {
2762   OptimizedStoreFieldAssertNoWriteBarrier(MachineRepresentation::kTaggedPointer,
2763                                           object, HeapObject::kMapOffset, map);
2764 }
2765 
StoreObjectFieldRoot(TNode<HeapObject> object,int offset,RootIndex root_index)2766 void CodeStubAssembler::StoreObjectFieldRoot(TNode<HeapObject> object,
2767                                              int offset, RootIndex root_index) {
2768   if (RootsTable::IsImmortalImmovable(root_index)) {
2769     StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
2770   } else {
2771     StoreObjectField(object, offset, LoadRoot(root_index));
2772   }
2773 }
2774 
2775 template <typename TIndex>
StoreFixedArrayOrPropertyArrayElement(TNode<UnionT<FixedArray,PropertyArray>> object,TNode<TIndex> index_node,TNode<Object> value,WriteBarrierMode barrier_mode,int additional_offset)2776 void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
2777     TNode<UnionT<FixedArray, PropertyArray>> object, TNode<TIndex> index_node,
2778     TNode<Object> value, WriteBarrierMode barrier_mode, int additional_offset) {
2779   // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2780   static_assert(std::is_same<TIndex, Smi>::value ||
2781                     std::is_same<TIndex, UintPtrT>::value ||
2782                     std::is_same<TIndex, IntPtrT>::value,
2783                 "Only Smi, UintPtrT or IntPtrT index is allowed");
2784   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2785          barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||
2786          barrier_mode == UPDATE_WRITE_BARRIER ||
2787          barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
2788   DCHECK(IsAligned(additional_offset, kTaggedSize));
2789   STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
2790                 static_cast<int>(PropertyArray::kHeaderSize));
2791   int header_size =
2792       FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
2793   TNode<IntPtrT> offset =
2794       ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
2795   STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2796                 static_cast<int>(WeakFixedArray::kLengthOffset));
2797   STATIC_ASSERT(static_cast<int>(FixedArrayBase::kLengthOffset) ==
2798                 static_cast<int>(PropertyArray::kLengthAndHashOffset));
2799   // Check that index_node + additional_offset <= object.length.
2800   // TODO(cbruni): Use proper LoadXXLength helpers
2801   CSA_ASSERT(
2802       this,
2803       IsOffsetInBounds(
2804           offset,
2805           Select<IntPtrT>(
2806               IsPropertyArray(object),
2807               [=] {
2808                 TNode<IntPtrT> length_and_hash = LoadAndUntagObjectField(
2809                     object, PropertyArray::kLengthAndHashOffset);
2810                 return TNode<IntPtrT>::UncheckedCast(
2811                     DecodeWord<PropertyArray::LengthField>(length_and_hash));
2812               },
2813               [=] {
2814                 return LoadAndUntagObjectField(object,
2815                                                FixedArrayBase::kLengthOffset);
2816               }),
2817           FixedArray::kHeaderSize));
2818   if (barrier_mode == SKIP_WRITE_BARRIER) {
2819     StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
2820   } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
2821     UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset,
2822                               value);
2823   } else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
2824     StoreEphemeronKey(object, offset, value);
2825   } else {
2826     Store(object, offset, value);
2827   }
2828 }
2829 
2830 template V8_EXPORT_PRIVATE void
2831 CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<Smi>(
2832     TNode<UnionT<FixedArray, PropertyArray>>, TNode<Smi>, TNode<Object>,
2833     WriteBarrierMode, int);
2834 
2835 template V8_EXPORT_PRIVATE void
2836 CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<IntPtrT>(
2837     TNode<UnionT<FixedArray, PropertyArray>>, TNode<IntPtrT>, TNode<Object>,
2838     WriteBarrierMode, int);
2839 
2840 template V8_EXPORT_PRIVATE void
2841 CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement<UintPtrT>(
2842     TNode<UnionT<FixedArray, PropertyArray>>, TNode<UintPtrT>, TNode<Object>,
2843     WriteBarrierMode, int);
2844 
2845 template <typename TIndex>
StoreFixedDoubleArrayElement(TNode<FixedDoubleArray> object,TNode<TIndex> index,TNode<Float64T> value,CheckBounds check_bounds)2846 void CodeStubAssembler::StoreFixedDoubleArrayElement(
2847     TNode<FixedDoubleArray> object, TNode<TIndex> index, TNode<Float64T> value,
2848     CheckBounds check_bounds) {
2849   // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2850   static_assert(std::is_same<TIndex, Smi>::value ||
2851                     std::is_same<TIndex, UintPtrT>::value ||
2852                     std::is_same<TIndex, IntPtrT>::value,
2853                 "Only Smi, UintPtrT or IntPtrT index is allowed");
2854   if (NeedsBoundsCheck(check_bounds)) {
2855     FixedArrayBoundsCheck(object, index, 0);
2856   }
2857   TNode<IntPtrT> offset = ElementOffsetFromIndex(
2858       index, PACKED_DOUBLE_ELEMENTS, FixedArray::kHeaderSize - kHeapObjectTag);
2859   MachineRepresentation rep = MachineRepresentation::kFloat64;
2860   // Make sure we do not store signalling NaNs into double arrays.
2861   TNode<Float64T> value_silenced = Float64SilenceNaN(value);
2862   StoreNoWriteBarrier(rep, object, offset, value_silenced);
2863 }
2864 
2865 // Export the Smi version which is used outside of code-stub-assembler.
2866 template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreFixedDoubleArrayElement<
2867     Smi>(TNode<FixedDoubleArray>, TNode<Smi>, TNode<Float64T>, CheckBounds);
2868 
StoreFeedbackVectorSlot(TNode<FeedbackVector> feedback_vector,TNode<UintPtrT> slot,TNode<AnyTaggedT> value,WriteBarrierMode barrier_mode,int additional_offset)2869 void CodeStubAssembler::StoreFeedbackVectorSlot(
2870     TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
2871     TNode<AnyTaggedT> value, WriteBarrierMode barrier_mode,
2872     int additional_offset) {
2873   DCHECK(IsAligned(additional_offset, kTaggedSize));
2874   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
2875          barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||
2876          barrier_mode == UPDATE_WRITE_BARRIER);
2877   int header_size = FeedbackVector::kRawFeedbackSlotsOffset +
2878                     additional_offset - kHeapObjectTag;
2879   TNode<IntPtrT> offset =
2880       ElementOffsetFromIndex(Signed(slot), HOLEY_ELEMENTS, header_size);
2881   // Check that slot <= feedback_vector.length.
2882   CSA_ASSERT(this,
2883              IsOffsetInBounds(offset, LoadFeedbackVectorLength(feedback_vector),
2884                               FeedbackVector::kHeaderSize));
2885   if (barrier_mode == SKIP_WRITE_BARRIER) {
2886     StoreNoWriteBarrier(MachineRepresentation::kTagged, feedback_vector, offset,
2887                         value);
2888   } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
2889     UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, feedback_vector,
2890                               offset, value);
2891   } else {
2892     Store(feedback_vector, offset, value);
2893   }
2894 }
2895 
EnsureArrayPushable(TNode<Context> context,TNode<Map> map,Label * bailout)2896 TNode<Int32T> CodeStubAssembler::EnsureArrayPushable(TNode<Context> context,
2897                                                      TNode<Map> map,
2898                                                      Label* bailout) {
2899   // Disallow pushing onto prototypes. It might be the JSArray prototype.
2900   // Disallow pushing onto non-extensible objects.
2901   Comment("Disallow pushing onto prototypes");
2902   GotoIfNot(IsExtensibleNonPrototypeMap(map), bailout);
2903 
2904   EnsureArrayLengthWritable(context, map, bailout);
2905 
2906   TNode<Uint32T> kind =
2907       DecodeWord32<Map::Bits2::ElementsKindBits>(LoadMapBitField2(map));
2908   return Signed(kind);
2909 }
2910 
PossiblyGrowElementsCapacity(ElementsKind kind,TNode<HeapObject> array,TNode<BInt> length,TVariable<FixedArrayBase> * var_elements,TNode<BInt> growth,Label * bailout)2911 void CodeStubAssembler::PossiblyGrowElementsCapacity(
2912     ElementsKind kind, TNode<HeapObject> array, TNode<BInt> length,
2913     TVariable<FixedArrayBase>* var_elements, TNode<BInt> growth,
2914     Label* bailout) {
2915   Label fits(this, var_elements);
2916   TNode<BInt> capacity =
2917       TaggedToParameter<BInt>(LoadFixedArrayBaseLength(var_elements->value()));
2918 
2919   TNode<BInt> new_length = IntPtrOrSmiAdd(growth, length);
2920   GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity), &fits);
2921   TNode<BInt> new_capacity = CalculateNewElementsCapacity(new_length);
2922   *var_elements = GrowElementsCapacity(array, var_elements->value(), kind, kind,
2923                                        capacity, new_capacity, bailout);
2924   Goto(&fits);
2925   BIND(&fits);
2926 }
2927 
BuildAppendJSArray(ElementsKind kind,TNode<JSArray> array,CodeStubArguments * args,TVariable<IntPtrT> * arg_index,Label * bailout)2928 TNode<Smi> CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2929                                                  TNode<JSArray> array,
2930                                                  CodeStubArguments* args,
2931                                                  TVariable<IntPtrT>* arg_index,
2932                                                  Label* bailout) {
2933   Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
2934   Label pre_bailout(this);
2935   Label success(this);
2936   TVARIABLE(Smi, var_tagged_length);
2937   TVARIABLE(BInt, var_length, SmiToBInt(LoadFastJSArrayLength(array)));
2938   TVARIABLE(FixedArrayBase, var_elements, LoadElements(array));
2939 
2940   // Resize the capacity of the fixed array if it doesn't fit.
2941   TNode<IntPtrT> first = arg_index->value();
2942   TNode<BInt> growth = IntPtrToBInt(IntPtrSub(args->GetLength(), first));
2943   PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
2944                                growth, &pre_bailout);
2945 
2946   // Push each argument onto the end of the array now that there is enough
2947   // capacity.
2948   CodeStubAssembler::VariableList push_vars({&var_length}, zone());
2949   TNode<FixedArrayBase> elements = var_elements.value();
2950   args->ForEach(
2951       push_vars,
2952       [&](TNode<Object> arg) {
2953         TryStoreArrayElement(kind, &pre_bailout, elements, var_length.value(),
2954                              arg);
2955         Increment(&var_length);
2956       },
2957       first);
2958   {
2959     TNode<Smi> length = BIntToSmi(var_length.value());
2960     var_tagged_length = length;
2961     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2962     Goto(&success);
2963   }
2964 
2965   BIND(&pre_bailout);
2966   {
2967     TNode<Smi> length = ParameterToTagged(var_length.value());
2968     var_tagged_length = length;
2969     TNode<Smi> diff = SmiSub(length, LoadFastJSArrayLength(array));
2970     StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
2971     *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
2972     Goto(bailout);
2973   }
2974 
2975   BIND(&success);
2976   return var_tagged_length.value();
2977 }
2978 
TryStoreArrayElement(ElementsKind kind,Label * bailout,TNode<FixedArrayBase> elements,TNode<BInt> index,TNode<Object> value)2979 void CodeStubAssembler::TryStoreArrayElement(ElementsKind kind, Label* bailout,
2980                                              TNode<FixedArrayBase> elements,
2981                                              TNode<BInt> index,
2982                                              TNode<Object> value) {
2983   if (IsSmiElementsKind(kind)) {
2984     GotoIf(TaggedIsNotSmi(value), bailout);
2985   } else if (IsDoubleElementsKind(kind)) {
2986     GotoIfNotNumber(value, bailout);
2987   }
2988 
2989   if (IsDoubleElementsKind(kind)) {
2990     StoreElement(elements, kind, index, ChangeNumberToFloat64(CAST(value)));
2991   } else {
2992     StoreElement(elements, kind, index, value);
2993   }
2994 }
2995 
BuildAppendJSArray(ElementsKind kind,TNode<JSArray> array,TNode<Object> value,Label * bailout)2996 void CodeStubAssembler::BuildAppendJSArray(ElementsKind kind,
2997                                            TNode<JSArray> array,
2998                                            TNode<Object> value,
2999                                            Label* bailout) {
3000   Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
3001   TVARIABLE(BInt, var_length, SmiToBInt(LoadFastJSArrayLength(array)));
3002   TVARIABLE(FixedArrayBase, var_elements, LoadElements(array));
3003 
3004   // Resize the capacity of the fixed array if it doesn't fit.
3005   TNode<BInt> growth = IntPtrOrSmiConstant<BInt>(1);
3006   PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
3007                                growth, bailout);
3008 
3009   // Push each argument onto the end of the array now that there is enough
3010   // capacity.
3011   TryStoreArrayElement(kind, bailout, var_elements.value(), var_length.value(),
3012                        value);
3013   Increment(&var_length);
3014 
3015   TNode<Smi> length = BIntToSmi(var_length.value());
3016   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3017 }
3018 
AllocateCellWithValue(TNode<Object> value,WriteBarrierMode mode)3019 TNode<Cell> CodeStubAssembler::AllocateCellWithValue(TNode<Object> value,
3020                                                      WriteBarrierMode mode) {
3021   TNode<HeapObject> result = Allocate(Cell::kSize, kNone);
3022   StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
3023   TNode<Cell> cell = CAST(result);
3024   StoreCellValue(cell, value, mode);
3025   return cell;
3026 }
3027 
LoadCellValue(TNode<Cell> cell)3028 TNode<Object> CodeStubAssembler::LoadCellValue(TNode<Cell> cell) {
3029   return LoadObjectField(cell, Cell::kValueOffset);
3030 }
3031 
StoreCellValue(TNode<Cell> cell,TNode<Object> value,WriteBarrierMode mode)3032 void CodeStubAssembler::StoreCellValue(TNode<Cell> cell, TNode<Object> value,
3033                                        WriteBarrierMode mode) {
3034   DCHECK(mode == SKIP_WRITE_BARRIER || mode == UPDATE_WRITE_BARRIER);
3035 
3036   if (mode == UPDATE_WRITE_BARRIER) {
3037     StoreObjectField(cell, Cell::kValueOffset, value);
3038   } else {
3039     StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
3040   }
3041 }
3042 
AllocateHeapNumber()3043 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumber() {
3044   TNode<HeapObject> result = Allocate(HeapNumber::kSize, kNone);
3045   RootIndex heap_map_index = RootIndex::kHeapNumberMap;
3046   StoreMapNoWriteBarrier(result, heap_map_index);
3047   return UncheckedCast<HeapNumber>(result);
3048 }
3049 
AllocateHeapNumberWithValue(SloppyTNode<Float64T> value)3050 TNode<HeapNumber> CodeStubAssembler::AllocateHeapNumberWithValue(
3051     SloppyTNode<Float64T> value) {
3052   TNode<HeapNumber> result = AllocateHeapNumber();
3053   StoreHeapNumberValue(result, value);
3054   return result;
3055 }
3056 
CloneIfMutablePrimitive(TNode<Object> object)3057 TNode<Object> CodeStubAssembler::CloneIfMutablePrimitive(TNode<Object> object) {
3058   TVARIABLE(Object, result, object);
3059   Label done(this);
3060 
3061   GotoIf(TaggedIsSmi(object), &done);
3062   // TODO(leszeks): Read the field descriptor to decide if this heap number is
3063   // mutable or not.
3064   GotoIfNot(IsHeapNumber(UncheckedCast<HeapObject>(object)), &done);
3065   {
3066     // Mutable heap number found --- allocate a clone.
3067     TNode<Float64T> value =
3068         LoadHeapNumberValue(UncheckedCast<HeapNumber>(object));
3069     result = AllocateHeapNumberWithValue(value);
3070     Goto(&done);
3071   }
3072 
3073   BIND(&done);
3074   return result.value();
3075 }
3076 
AllocateBigInt(TNode<IntPtrT> length)3077 TNode<BigInt> CodeStubAssembler::AllocateBigInt(TNode<IntPtrT> length) {
3078   TNode<BigInt> result = AllocateRawBigInt(length);
3079   StoreBigIntBitfield(result,
3080                       Word32Shl(TruncateIntPtrToInt32(length),
3081                                 Int32Constant(BigInt::LengthBits::kShift)));
3082   return result;
3083 }
3084 
AllocateRawBigInt(TNode<IntPtrT> length)3085 TNode<BigInt> CodeStubAssembler::AllocateRawBigInt(TNode<IntPtrT> length) {
3086   TNode<IntPtrT> size =
3087       IntPtrAdd(IntPtrConstant(BigInt::kHeaderSize),
3088                 Signed(WordShl(length, kSystemPointerSizeLog2)));
3089   TNode<HeapObject> raw_result = Allocate(size, kAllowLargeObjectAllocation);
3090   StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
3091   if (FIELD_SIZE(BigInt::kOptionalPaddingOffset) != 0) {
3092     DCHECK_EQ(4, FIELD_SIZE(BigInt::kOptionalPaddingOffset));
3093     StoreObjectFieldNoWriteBarrier(raw_result, BigInt::kOptionalPaddingOffset,
3094                                    Int32Constant(0));
3095   }
3096   return UncheckedCast<BigInt>(raw_result);
3097 }
3098 
StoreBigIntBitfield(TNode<BigInt> bigint,TNode<Word32T> bitfield)3099 void CodeStubAssembler::StoreBigIntBitfield(TNode<BigInt> bigint,
3100                                             TNode<Word32T> bitfield) {
3101   StoreObjectFieldNoWriteBarrier(bigint, BigInt::kBitfieldOffset, bitfield);
3102 }
3103 
StoreBigIntDigit(TNode<BigInt> bigint,intptr_t digit_index,TNode<UintPtrT> digit)3104 void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint,
3105                                          intptr_t digit_index,
3106                                          TNode<UintPtrT> digit) {
3107   CHECK_LE(0, digit_index);
3108   CHECK_LT(digit_index, BigInt::kMaxLength);
3109   StoreObjectFieldNoWriteBarrier(
3110       bigint,
3111       BigInt::kDigitsOffset +
3112           static_cast<int>(digit_index) * kSystemPointerSize,
3113       digit);
3114 }
3115 
StoreBigIntDigit(TNode<BigInt> bigint,TNode<IntPtrT> digit_index,TNode<UintPtrT> digit)3116 void CodeStubAssembler::StoreBigIntDigit(TNode<BigInt> bigint,
3117                                          TNode<IntPtrT> digit_index,
3118                                          TNode<UintPtrT> digit) {
3119   TNode<IntPtrT> offset =
3120       IntPtrAdd(IntPtrConstant(BigInt::kDigitsOffset),
3121                 IntPtrMul(digit_index, IntPtrConstant(kSystemPointerSize)));
3122   StoreObjectFieldNoWriteBarrier(bigint, offset, digit);
3123 }
3124 
LoadBigIntBitfield(TNode<BigInt> bigint)3125 TNode<Word32T> CodeStubAssembler::LoadBigIntBitfield(TNode<BigInt> bigint) {
3126   return UncheckedCast<Word32T>(
3127       LoadObjectField<Uint32T>(bigint, BigInt::kBitfieldOffset));
3128 }
3129 
LoadBigIntDigit(TNode<BigInt> bigint,intptr_t digit_index)3130 TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3131                                                    intptr_t digit_index) {
3132   CHECK_LE(0, digit_index);
3133   CHECK_LT(digit_index, BigInt::kMaxLength);
3134   return LoadObjectField<UintPtrT>(
3135       bigint, BigInt::kDigitsOffset +
3136                   static_cast<int>(digit_index) * kSystemPointerSize);
3137 }
3138 
LoadBigIntDigit(TNode<BigInt> bigint,TNode<IntPtrT> digit_index)3139 TNode<UintPtrT> CodeStubAssembler::LoadBigIntDigit(TNode<BigInt> bigint,
3140                                                    TNode<IntPtrT> digit_index) {
3141   TNode<IntPtrT> offset =
3142       IntPtrAdd(IntPtrConstant(BigInt::kDigitsOffset),
3143                 IntPtrMul(digit_index, IntPtrConstant(kSystemPointerSize)));
3144   return LoadObjectField<UintPtrT>(bigint, offset);
3145 }
3146 
AllocateByteArray(TNode<UintPtrT> length,AllocationFlags flags)3147 TNode<ByteArray> CodeStubAssembler::AllocateByteArray(TNode<UintPtrT> length,
3148                                                       AllocationFlags flags) {
3149   Comment("AllocateByteArray");
3150   TVARIABLE(Object, var_result);
3151 
3152   // Compute the ByteArray size and check if it fits into new space.
3153   Label if_lengthiszero(this), if_sizeissmall(this),
3154       if_notsizeissmall(this, Label::kDeferred), if_join(this);
3155   GotoIf(WordEqual(length, UintPtrConstant(0)), &if_lengthiszero);
3156 
3157   TNode<IntPtrT> raw_size =
3158       GetArrayAllocationSize(Signed(length), UINT8_ELEMENTS,
3159                              ByteArray::kHeaderSize + kObjectAlignmentMask);
3160   TNode<IntPtrT> size =
3161       WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
3162   Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
3163          &if_sizeissmall, &if_notsizeissmall);
3164 
3165   BIND(&if_sizeissmall);
3166   {
3167     // Just allocate the ByteArray in new space.
3168     TNode<HeapObject> result =
3169         AllocateInNewSpace(UncheckedCast<IntPtrT>(size), flags);
3170     DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kByteArrayMap));
3171     StoreMapNoWriteBarrier(result, RootIndex::kByteArrayMap);
3172     StoreObjectFieldNoWriteBarrier(result, ByteArray::kLengthOffset,
3173                                    SmiTag(Signed(length)));
3174     var_result = result;
3175     Goto(&if_join);
3176   }
3177 
3178   BIND(&if_notsizeissmall);
3179   {
3180     // We might need to allocate in large object space, go to the runtime.
3181     TNode<Object> result =
3182         CallRuntime(Runtime::kAllocateByteArray, NoContextConstant(),
3183                     ChangeUintPtrToTagged(length));
3184     var_result = result;
3185     Goto(&if_join);
3186   }
3187 
3188   BIND(&if_lengthiszero);
3189   {
3190     var_result = EmptyByteArrayConstant();
3191     Goto(&if_join);
3192   }
3193 
3194   BIND(&if_join);
3195   return CAST(var_result.value());
3196 }
3197 
AllocateSeqOneByteString(uint32_t length,AllocationFlags flags)3198 TNode<String> CodeStubAssembler::AllocateSeqOneByteString(
3199     uint32_t length, AllocationFlags flags) {
3200   Comment("AllocateSeqOneByteString");
3201   if (length == 0) {
3202     return EmptyStringConstant();
3203   }
3204   TNode<HeapObject> result = Allocate(SeqOneByteString::SizeFor(length), flags);
3205   DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kOneByteStringMap));
3206   StoreMapNoWriteBarrier(result, RootIndex::kOneByteStringMap);
3207   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
3208                                  Uint32Constant(length));
3209   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
3210                                  Int32Constant(String::kEmptyHashField));
3211   return CAST(result);
3212 }
3213 
IsZeroOrContext(SloppyTNode<Object> object)3214 TNode<BoolT> CodeStubAssembler::IsZeroOrContext(SloppyTNode<Object> object) {
3215   return Select<BoolT>(
3216       TaggedEqual(object, SmiConstant(0)), [=] { return Int32TrueConstant(); },
3217       [=] { return IsContext(CAST(object)); });
3218 }
3219 
AllocateSeqTwoByteString(uint32_t length,AllocationFlags flags)3220 TNode<String> CodeStubAssembler::AllocateSeqTwoByteString(
3221     uint32_t length, AllocationFlags flags) {
3222   Comment("AllocateSeqTwoByteString");
3223   if (length == 0) {
3224     return EmptyStringConstant();
3225   }
3226   TNode<HeapObject> result = Allocate(SeqTwoByteString::SizeFor(length), flags);
3227   DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kStringMap));
3228   StoreMapNoWriteBarrier(result, RootIndex::kStringMap);
3229   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
3230                                  Uint32Constant(length));
3231   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
3232                                  Int32Constant(String::kEmptyHashField));
3233   return CAST(result);
3234 }
3235 
AllocateSlicedString(RootIndex map_root_index,TNode<Uint32T> length,TNode<String> parent,TNode<Smi> offset)3236 TNode<String> CodeStubAssembler::AllocateSlicedString(RootIndex map_root_index,
3237                                                       TNode<Uint32T> length,
3238                                                       TNode<String> parent,
3239                                                       TNode<Smi> offset) {
3240   DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
3241          map_root_index == RootIndex::kSlicedStringMap);
3242   TNode<HeapObject> result = Allocate(SlicedString::kSize);
3243   DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
3244   StoreMapNoWriteBarrier(result, map_root_index);
3245   StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
3246                                  Int32Constant(String::kEmptyHashField));
3247   StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length);
3248   StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent);
3249   StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset);
3250   return CAST(result);
3251 }
3252 
AllocateSlicedOneByteString(TNode<Uint32T> length,TNode<String> parent,TNode<Smi> offset)3253 TNode<String> CodeStubAssembler::AllocateSlicedOneByteString(
3254     TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3255   return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
3256                               parent, offset);
3257 }
3258 
AllocateSlicedTwoByteString(TNode<Uint32T> length,TNode<String> parent,TNode<Smi> offset)3259 TNode<String> CodeStubAssembler::AllocateSlicedTwoByteString(
3260     TNode<Uint32T> length, TNode<String> parent, TNode<Smi> offset) {
3261   return AllocateSlicedString(RootIndex::kSlicedStringMap, length, parent,
3262                               offset);
3263 }
3264 
AllocateNameDictionary(int at_least_space_for)3265 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3266     int at_least_space_for) {
3267   return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
3268 }
3269 
AllocateNameDictionary(TNode<IntPtrT> at_least_space_for,AllocationFlags flags)3270 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionary(
3271     TNode<IntPtrT> at_least_space_for, AllocationFlags flags) {
3272   CSA_ASSERT(this, UintPtrLessThanOrEqual(
3273                        at_least_space_for,
3274                        IntPtrConstant(NameDictionary::kMaxCapacity)));
3275   TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
3276   return AllocateNameDictionaryWithCapacity(capacity, flags);
3277 }
3278 
AllocateNameDictionaryWithCapacity(TNode<IntPtrT> capacity,AllocationFlags flags)3279 TNode<NameDictionary> CodeStubAssembler::AllocateNameDictionaryWithCapacity(
3280     TNode<IntPtrT> capacity, AllocationFlags flags) {
3281   CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
3282   CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
3283   TNode<IntPtrT> length = EntryToIndex<NameDictionary>(capacity);
3284   TNode<IntPtrT> store_size = IntPtrAdd(
3285       TimesTaggedSize(length), IntPtrConstant(NameDictionary::kHeaderSize));
3286 
3287   TNode<NameDictionary> result =
3288       UncheckedCast<NameDictionary>(Allocate(store_size, flags));
3289 
3290   // Initialize FixedArray fields.
3291   {
3292     DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
3293     StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
3294     StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
3295                                    SmiFromIntPtr(length));
3296   }
3297 
3298   // Initialized HashTable fields.
3299   {
3300     TNode<Smi> zero = SmiConstant(0);
3301     StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
3302                            SKIP_WRITE_BARRIER);
3303     StoreFixedArrayElement(result,
3304                            NameDictionary::kNumberOfDeletedElementsIndex, zero,
3305                            SKIP_WRITE_BARRIER);
3306     StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
3307                            SmiTag(capacity), SKIP_WRITE_BARRIER);
3308     // Initialize Dictionary fields.
3309     StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
3310                            SmiConstant(PropertyDetails::kInitialIndex),
3311                            SKIP_WRITE_BARRIER);
3312     StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
3313                            SmiConstant(PropertyArray::kNoHashSentinel),
3314                            SKIP_WRITE_BARRIER);
3315   }
3316 
3317   // Initialize NameDictionary elements.
3318   {
3319     TNode<IntPtrT> result_word = BitcastTaggedToWord(result);
3320     TNode<IntPtrT> start_address = IntPtrAdd(
3321         result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
3322                                         NameDictionary::kElementsStartIndex) -
3323                                     kHeapObjectTag));
3324     TNode<IntPtrT> end_address = IntPtrAdd(
3325         result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
3326 
3327     TNode<Oddball> filler = UndefinedConstant();
3328     DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kUndefinedValue));
3329 
3330     StoreFieldsNoWriteBarrier(start_address, end_address, filler);
3331   }
3332 
3333   return result;
3334 }
3335 
CopyNameDictionary(TNode<NameDictionary> dictionary,Label * large_object_fallback)3336 TNode<NameDictionary> CodeStubAssembler::CopyNameDictionary(
3337     TNode<NameDictionary> dictionary, Label* large_object_fallback) {
3338   Comment("Copy boilerplate property dict");
3339   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NameDictionary>(dictionary));
3340   CSA_ASSERT(this, IntPtrGreaterThanOrEqual(capacity, IntPtrConstant(0)));
3341   GotoIf(UintPtrGreaterThan(
3342              capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
3343          large_object_fallback);
3344   TNode<NameDictionary> properties =
3345       AllocateNameDictionaryWithCapacity(capacity);
3346   TNode<IntPtrT> length = SmiUntag(LoadFixedArrayBaseLength(dictionary));
3347   CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
3348                          SKIP_WRITE_BARRIER);
3349   return properties;
3350 }
3351 
3352 template <typename CollectionType>
AllocateOrderedHashTable()3353 TNode<CollectionType> CodeStubAssembler::AllocateOrderedHashTable() {
3354   static const int kCapacity = CollectionType::kInitialCapacity;
3355   static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
3356   static const int kDataTableLength = kCapacity * CollectionType::kEntrySize;
3357   static const int kFixedArrayLength =
3358       CollectionType::HashTableStartIndex() + kBucketCount + kDataTableLength;
3359   static const int kDataTableStartIndex =
3360       CollectionType::HashTableStartIndex() + kBucketCount;
3361 
3362   STATIC_ASSERT(base::bits::IsPowerOfTwo(kCapacity));
3363   STATIC_ASSERT(kCapacity <= CollectionType::MaxCapacity());
3364 
3365   // Allocate the table and add the proper map.
3366   const ElementsKind elements_kind = HOLEY_ELEMENTS;
3367   TNode<IntPtrT> length_intptr = IntPtrConstant(kFixedArrayLength);
3368   TNode<Map> fixed_array_map =
3369       HeapConstant(CollectionType::GetMap(ReadOnlyRoots(isolate())));
3370   TNode<CollectionType> table =
3371       CAST(AllocateFixedArray(elements_kind, length_intptr,
3372                               kAllowLargeObjectAllocation, fixed_array_map));
3373 
3374   // Initialize the OrderedHashTable fields.
3375   const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
3376   StoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
3377                          SmiConstant(0), barrier_mode);
3378   StoreFixedArrayElement(table, CollectionType::NumberOfDeletedElementsIndex(),
3379                          SmiConstant(0), barrier_mode);
3380   StoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
3381                          SmiConstant(kBucketCount), barrier_mode);
3382 
3383   // Fill the buckets with kNotFound.
3384   TNode<Smi> not_found = SmiConstant(CollectionType::kNotFound);
3385   STATIC_ASSERT(CollectionType::HashTableStartIndex() ==
3386                 CollectionType::NumberOfBucketsIndex() + 1);
3387   STATIC_ASSERT((CollectionType::HashTableStartIndex() + kBucketCount) ==
3388                 kDataTableStartIndex);
3389   for (int i = 0; i < kBucketCount; i++) {
3390     StoreFixedArrayElement(table, CollectionType::HashTableStartIndex() + i,
3391                            not_found, barrier_mode);
3392   }
3393 
3394   // Fill the data table with undefined.
3395   STATIC_ASSERT(kDataTableStartIndex + kDataTableLength == kFixedArrayLength);
3396   for (int i = 0; i < kDataTableLength; i++) {
3397     StoreFixedArrayElement(table, kDataTableStartIndex + i, UndefinedConstant(),
3398                            barrier_mode);
3399   }
3400 
3401   return table;
3402 }
3403 
3404 template TNode<OrderedHashMap>
3405 CodeStubAssembler::AllocateOrderedHashTable<OrderedHashMap>();
3406 template TNode<OrderedHashSet>
3407 CodeStubAssembler::AllocateOrderedHashTable<OrderedHashSet>();
3408 
AllocateJSObjectFromMap(TNode<Map> map,base::Optional<TNode<HeapObject>> properties,base::Optional<TNode<FixedArray>> elements,AllocationFlags flags,SlackTrackingMode slack_tracking_mode)3409 TNode<JSObject> CodeStubAssembler::AllocateJSObjectFromMap(
3410     TNode<Map> map, base::Optional<TNode<HeapObject>> properties,
3411     base::Optional<TNode<FixedArray>> elements, AllocationFlags flags,
3412     SlackTrackingMode slack_tracking_mode) {
3413   CSA_ASSERT(this, Word32BinaryNot(IsJSFunctionMap(map)));
3414   CSA_ASSERT(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
3415                                                      JS_GLOBAL_OBJECT_TYPE)));
3416   TNode<IntPtrT> instance_size =
3417       TimesTaggedSize(LoadMapInstanceSizeInWords(map));
3418   TNode<HeapObject> object = AllocateInNewSpace(instance_size, flags);
3419   StoreMapNoWriteBarrier(object, map);
3420   InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
3421                             slack_tracking_mode);
3422   return CAST(object);
3423 }
3424 
InitializeJSObjectFromMap(TNode<HeapObject> object,TNode<Map> map,TNode<IntPtrT> instance_size,base::Optional<TNode<HeapObject>> properties,base::Optional<TNode<FixedArray>> elements,SlackTrackingMode slack_tracking_mode)3425 void CodeStubAssembler::InitializeJSObjectFromMap(
3426     TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
3427     base::Optional<TNode<HeapObject>> properties,
3428     base::Optional<TNode<FixedArray>> elements,
3429     SlackTrackingMode slack_tracking_mode) {
3430   // This helper assumes that the object is in new-space, as guarded by the
3431   // check in AllocatedJSObjectFromMap.
3432   if (!properties) {
3433     CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
3434     StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
3435                          RootIndex::kEmptyFixedArray);
3436   } else {
3437     CSA_ASSERT(this, Word32Or(Word32Or(IsPropertyArray(*properties),
3438                                        IsNameDictionary(*properties)),
3439                               IsEmptyFixedArray(*properties)));
3440     StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
3441                                    *properties);
3442   }
3443   if (!elements) {
3444     StoreObjectFieldRoot(object, JSObject::kElementsOffset,
3445                          RootIndex::kEmptyFixedArray);
3446   } else {
3447     StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset,
3448                                    *elements);
3449   }
3450   if (slack_tracking_mode == kNoSlackTracking) {
3451     InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3452   } else {
3453     DCHECK_EQ(slack_tracking_mode, kWithSlackTracking);
3454     InitializeJSObjectBodyWithSlackTracking(object, map, instance_size);
3455   }
3456 }
3457 
InitializeJSObjectBodyNoSlackTracking(TNode<HeapObject> object,TNode<Map> map,SloppyTNode<IntPtrT> instance_size,int start_offset)3458 void CodeStubAssembler::InitializeJSObjectBodyNoSlackTracking(
3459     TNode<HeapObject> object, TNode<Map> map,
3460     SloppyTNode<IntPtrT> instance_size, int start_offset) {
3461   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3462   CSA_ASSERT(this, IsClearWord32<Map::Bits3::ConstructionCounterBits>(
3463                        LoadMapBitField3(map)));
3464   InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
3465                            RootIndex::kUndefinedValue);
3466 }
3467 
InitializeJSObjectBodyWithSlackTracking(TNode<HeapObject> object,TNode<Map> map,SloppyTNode<IntPtrT> instance_size)3468 void CodeStubAssembler::InitializeJSObjectBodyWithSlackTracking(
3469     TNode<HeapObject> object, TNode<Map> map,
3470     SloppyTNode<IntPtrT> instance_size) {
3471   Comment("InitializeJSObjectBodyNoSlackTracking");
3472 
3473   // Perform in-object slack tracking if requested.
3474   int start_offset = JSObject::kHeaderSize;
3475   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
3476   Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
3477   STATIC_ASSERT(Map::kNoSlackTracking == 0);
3478   GotoIf(IsSetWord32<Map::Bits3::ConstructionCounterBits>(bit_field3),
3479          &slack_tracking);
3480   Comment("No slack tracking");
3481   InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
3482   Goto(&end);
3483 
3484   BIND(&slack_tracking);
3485   {
3486     Comment("Decrease construction counter");
3487     // Slack tracking is only done on initial maps.
3488     CSA_ASSERT(this, IsUndefined(LoadMapBackPointer(map)));
3489     STATIC_ASSERT(Map::Bits3::ConstructionCounterBits::kLastUsedBit == 31);
3490     TNode<Word32T> new_bit_field3 = Int32Sub(
3491         bit_field3,
3492         Int32Constant(1 << Map::Bits3::ConstructionCounterBits::kShift));
3493     StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3);
3494     STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3495 
3496     // The object still has in-object slack therefore the |unsed_or_unused|
3497     // field contain the "used" value.
3498     TNode<IntPtrT> used_size =
3499         Signed(TimesTaggedSize(ChangeUint32ToWord(LoadObjectField<Uint8T>(
3500             map, Map::kUsedOrUnusedInstanceSizeInWordsOffset))));
3501 
3502     Comment("iInitialize filler fields");
3503     InitializeFieldsWithRoot(object, used_size, instance_size,
3504                              RootIndex::kOnePointerFillerMap);
3505 
3506     Comment("Initialize undefined fields");
3507     InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
3508                              RootIndex::kUndefinedValue);
3509 
3510     STATIC_ASSERT(Map::kNoSlackTracking == 0);
3511     GotoIf(IsClearWord32<Map::Bits3::ConstructionCounterBits>(new_bit_field3),
3512            &complete);
3513     Goto(&end);
3514   }
3515 
3516   // Finalize the instance size.
3517   BIND(&complete);
3518   {
3519     // ComplextInobjectSlackTracking doesn't allocate and thus doesn't need a
3520     // context.
3521     CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
3522                 NoContextConstant(), map);
3523     Goto(&end);
3524   }
3525 
3526   BIND(&end);
3527 }
3528 
StoreFieldsNoWriteBarrier(TNode<IntPtrT> start_address,TNode<IntPtrT> end_address,TNode<Object> value)3529 void CodeStubAssembler::StoreFieldsNoWriteBarrier(TNode<IntPtrT> start_address,
3530                                                   TNode<IntPtrT> end_address,
3531                                                   TNode<Object> value) {
3532   Comment("StoreFieldsNoWriteBarrier");
3533   CSA_ASSERT(this, WordIsAligned(start_address, kTaggedSize));
3534   CSA_ASSERT(this, WordIsAligned(end_address, kTaggedSize));
3535   BuildFastLoop<IntPtrT>(
3536       start_address, end_address,
3537       [=](TNode<IntPtrT> current) {
3538         UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged, current,
3539                                   value);
3540       },
3541       kTaggedSize, IndexAdvanceMode::kPost);
3542 }
3543 
MakeFixedArrayCOW(TNode<FixedArray> array)3544 void CodeStubAssembler::MakeFixedArrayCOW(TNode<FixedArray> array) {
3545   CSA_ASSERT(this, IsFixedArrayMap(LoadMap(array)));
3546   Label done(this);
3547   // The empty fixed array is not modifiable anyway. And we shouldn't change its
3548   // Map.
3549   GotoIf(TaggedEqual(array, EmptyFixedArrayConstant()), &done);
3550   StoreMap(array, FixedCOWArrayMapConstant());
3551   Goto(&done);
3552   BIND(&done);
3553 }
3554 
IsValidFastJSArrayCapacity(TNode<IntPtrT> capacity)3555 TNode<BoolT> CodeStubAssembler::IsValidFastJSArrayCapacity(
3556     TNode<IntPtrT> capacity) {
3557   return UintPtrLessThanOrEqual(capacity,
3558                                 UintPtrConstant(JSArray::kMaxFastArrayLength));
3559 }
3560 
AllocateJSArray(TNode<Map> array_map,TNode<FixedArrayBase> elements,TNode<Smi> length,base::Optional<TNode<AllocationSite>> allocation_site,int array_header_size)3561 TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3562     TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
3563     base::Optional<TNode<AllocationSite>> allocation_site,
3564     int array_header_size) {
3565   Comment("begin allocation of JSArray passing in elements");
3566   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3567 
3568   int base_size = array_header_size;
3569   if (allocation_site) {
3570     base_size += AllocationMemento::kSize;
3571   }
3572 
3573   TNode<IntPtrT> size = IntPtrConstant(base_size);
3574   TNode<JSArray> result =
3575       AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3576   StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
3577   return result;
3578 }
3579 
3580 std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
AllocateUninitializedJSArrayWithElements(ElementsKind kind,TNode<Map> array_map,TNode<Smi> length,base::Optional<TNode<AllocationSite>> allocation_site,TNode<IntPtrT> capacity,AllocationFlags allocation_flags,int array_header_size)3581 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
3582     ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
3583     base::Optional<TNode<AllocationSite>> allocation_site,
3584     TNode<IntPtrT> capacity, AllocationFlags allocation_flags,
3585     int array_header_size) {
3586   Comment("begin allocation of JSArray with elements");
3587   CHECK_EQ(allocation_flags & ~kAllowLargeObjectAllocation, 0);
3588   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3589 
3590   TVARIABLE(JSArray, array);
3591   TVARIABLE(FixedArrayBase, elements);
3592 
3593   Label out(this), empty(this), nonempty(this);
3594 
3595   int capacity_int;
3596   if (ToInt32Constant(capacity, &capacity_int)) {
3597     if (capacity_int == 0) {
3598       TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
3599       array = AllocateJSArray(array_map, empty_array, length, allocation_site,
3600                               array_header_size);
3601       return {array.value(), empty_array};
3602     } else {
3603       Goto(&nonempty);
3604     }
3605   } else {
3606     Branch(WordEqual(capacity, IntPtrConstant(0)), &empty, &nonempty);
3607 
3608     BIND(&empty);
3609     {
3610       TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
3611       array = AllocateJSArray(array_map, empty_array, length, allocation_site,
3612                               array_header_size);
3613       elements = empty_array;
3614       Goto(&out);
3615     }
3616   }
3617 
3618   BIND(&nonempty);
3619   {
3620     int base_size = array_header_size;
3621     if (allocation_site) {
3622       base_size += AllocationMemento::kSize;
3623     }
3624 
3625     const int elements_offset = base_size;
3626 
3627     // Compute space for elements
3628     base_size += FixedArray::kHeaderSize;
3629     TNode<IntPtrT> size = ElementOffsetFromIndex(capacity, kind, base_size);
3630 
3631     // For very large arrays in which the requested allocation exceeds the
3632     // maximal size of a regular heap object, we cannot use the allocation
3633     // folding trick. Instead, we first allocate the elements in large object
3634     // space, and then allocate the JSArray (and possibly the allocation
3635     // memento) in new space.
3636     if (allocation_flags & kAllowLargeObjectAllocation) {
3637       Label next(this);
3638       GotoIf(IsRegularHeapObjectSize(size), &next);
3639 
3640       CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity));
3641 
3642       // Allocate and initialize the elements first. Full initialization is
3643       // needed because the upcoming JSArray allocation could trigger GC.
3644       elements = AllocateFixedArray(kind, capacity, allocation_flags);
3645 
3646       if (IsDoubleElementsKind(kind)) {
3647         FillFixedDoubleArrayWithZero(CAST(elements.value()), capacity);
3648       } else {
3649         FillFixedArrayWithSmiZero(CAST(elements.value()), capacity);
3650       }
3651 
3652       // The JSArray and possibly allocation memento next. Note that
3653       // allocation_flags are *not* passed on here and the resulting JSArray
3654       // will always be in new space.
3655       array = AllocateJSArray(array_map, elements.value(), length,
3656                               allocation_site, array_header_size);
3657 
3658       Goto(&out);
3659 
3660       BIND(&next);
3661     }
3662 
3663     // Fold all objects into a single new space allocation.
3664     array =
3665         AllocateUninitializedJSArray(array_map, length, allocation_site, size);
3666     elements = UncheckedCast<FixedArrayBase>(
3667         InnerAllocate(array.value(), elements_offset));
3668 
3669     StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
3670                                    elements.value());
3671 
3672     // Setup elements object.
3673     STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
3674     RootIndex elements_map_index = IsDoubleElementsKind(kind)
3675                                        ? RootIndex::kFixedDoubleArrayMap
3676                                        : RootIndex::kFixedArrayMap;
3677     DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
3678     StoreMapNoWriteBarrier(elements.value(), elements_map_index);
3679 
3680     CSA_ASSERT(this, WordNotEqual(capacity, IntPtrConstant(0)));
3681     TNode<Smi> capacity_smi = SmiTag(capacity);
3682     StoreObjectFieldNoWriteBarrier(elements.value(), FixedArray::kLengthOffset,
3683                                    capacity_smi);
3684     Goto(&out);
3685   }
3686 
3687   BIND(&out);
3688   return {array.value(), elements.value()};
3689 }
3690 
AllocateUninitializedJSArray(TNode<Map> array_map,TNode<Smi> length,base::Optional<TNode<AllocationSite>> allocation_site,TNode<IntPtrT> size_in_bytes)3691 TNode<JSArray> CodeStubAssembler::AllocateUninitializedJSArray(
3692     TNode<Map> array_map, TNode<Smi> length,
3693     base::Optional<TNode<AllocationSite>> allocation_site,
3694     TNode<IntPtrT> size_in_bytes) {
3695   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3696 
3697   // Allocate space for the JSArray and the elements FixedArray in one go.
3698   TNode<HeapObject> array = AllocateInNewSpace(size_in_bytes);
3699 
3700   StoreMapNoWriteBarrier(array, array_map);
3701   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
3702   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
3703                        RootIndex::kEmptyFixedArray);
3704 
3705   if (allocation_site) {
3706     InitializeAllocationMemento(array, IntPtrConstant(JSArray::kHeaderSize),
3707                                 *allocation_site);
3708   }
3709 
3710   return CAST(array);
3711 }
3712 
AllocateJSArray(ElementsKind kind,TNode<Map> array_map,TNode<IntPtrT> capacity,TNode<Smi> length,base::Optional<TNode<AllocationSite>> allocation_site,AllocationFlags allocation_flags)3713 TNode<JSArray> CodeStubAssembler::AllocateJSArray(
3714     ElementsKind kind, TNode<Map> array_map, TNode<IntPtrT> capacity,
3715     TNode<Smi> length, base::Optional<TNode<AllocationSite>> allocation_site,
3716     AllocationFlags allocation_flags) {
3717   CSA_SLOW_ASSERT(this, TaggedIsPositiveSmi(length));
3718 
3719   TNode<JSArray> array;
3720   TNode<FixedArrayBase> elements;
3721 
3722   std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
3723       kind, array_map, length, allocation_site, capacity, allocation_flags);
3724 
3725   Label out(this), nonempty(this);
3726 
3727   Branch(WordEqual(capacity, IntPtrConstant(0)), &out, &nonempty);
3728 
3729   BIND(&nonempty);
3730   {
3731     FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity,
3732                             RootIndex::kTheHoleValue);
3733     Goto(&out);
3734   }
3735 
3736   BIND(&out);
3737   return array;
3738 }
3739 
ExtractFastJSArray(TNode<Context> context,TNode<JSArray> array,TNode<BInt> begin,TNode<BInt> count)3740 TNode<JSArray> CodeStubAssembler::ExtractFastJSArray(TNode<Context> context,
3741                                                      TNode<JSArray> array,
3742                                                      TNode<BInt> begin,
3743                                                      TNode<BInt> count) {
3744   TNode<Map> original_array_map = LoadMap(array);
3745   TNode<Int32T> elements_kind = LoadMapElementsKind(original_array_map);
3746 
3747   // Use the canonical map for the Array's ElementsKind
3748   TNode<NativeContext> native_context = LoadNativeContext(context);
3749   TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
3750 
3751   TNode<FixedArrayBase> new_elements = ExtractFixedArray(
3752       LoadElements(array), base::Optional<TNode<BInt>>(begin),
3753       base::Optional<TNode<BInt>>(count),
3754       base::Optional<TNode<BInt>>(base::nullopt),
3755       ExtractFixedArrayFlag::kAllFixedArrays, nullptr, elements_kind);
3756 
3757   TNode<JSArray> result = AllocateJSArray(
3758       array_map, new_elements, ParameterToTagged(count), base::nullopt);
3759   return result;
3760 }
3761 
CloneFastJSArray(TNode<Context> context,TNode<JSArray> array,base::Optional<TNode<AllocationSite>> allocation_site,HoleConversionMode convert_holes)3762 TNode<JSArray> CodeStubAssembler::CloneFastJSArray(
3763     TNode<Context> context, TNode<JSArray> array,
3764     base::Optional<TNode<AllocationSite>> allocation_site,
3765     HoleConversionMode convert_holes) {
3766   // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
3767   // function is also used to copy boilerplates even when the no-elements
3768   // protector is invalid. This function should be renamed to reflect its uses.
3769 
3770   TNode<Number> length = LoadJSArrayLength(array);
3771   TNode<FixedArrayBase> new_elements;
3772   TVARIABLE(FixedArrayBase, var_new_elements);
3773   TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
3774 
3775   Label allocate_jsarray(this), holey_extract(this),
3776       allocate_jsarray_main(this);
3777 
3778   bool need_conversion =
3779       convert_holes == HoleConversionMode::kConvertToUndefined;
3780   if (need_conversion) {
3781     // We need to take care of holes, if the array is of holey elements kind.
3782     GotoIf(IsHoleyFastElementsKindForRead(var_elements_kind.value()),
3783            &holey_extract);
3784   }
3785 
3786   // Simple extraction that preserves holes.
3787   new_elements = ExtractFixedArray(
3788       LoadElements(array),
3789       base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
3790       base::Optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length))),
3791       base::Optional<TNode<BInt>>(base::nullopt),
3792       ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW, nullptr,
3793       var_elements_kind.value());
3794   var_new_elements = new_elements;
3795   Goto(&allocate_jsarray);
3796 
3797   if (need_conversion) {
3798     BIND(&holey_extract);
3799     // Convert holes to undefined.
3800     TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
3801     // Copy |array|'s elements store. The copy will be compatible with the
3802     // original elements kind unless there are holes in the source. Any holes
3803     // get converted to undefined, hence in that case the copy is compatible
3804     // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
3805     // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
3806     // ExtractFixedArrayFlag::kDontCopyCOW.
3807     new_elements = ExtractFixedArray(
3808         LoadElements(array),
3809         base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
3810         base::Optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length))),
3811         base::Optional<TNode<BInt>>(base::nullopt),
3812         ExtractFixedArrayFlag::kAllFixedArrays, &var_holes_converted);
3813     var_new_elements = new_elements;
3814     // If the array type didn't change, use the original elements kind.
3815     GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
3816     // Otherwise use PACKED_ELEMENTS for the target's elements kind.
3817     var_elements_kind = Int32Constant(PACKED_ELEMENTS);
3818     Goto(&allocate_jsarray);
3819   }
3820 
3821   BIND(&allocate_jsarray);
3822 
3823   // Handle any nonextensible elements kinds
3824   CSA_ASSERT(this, IsElementsKindLessThanOrEqual(
3825                        var_elements_kind.value(),
3826                        LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND));
3827   GotoIf(IsElementsKindLessThanOrEqual(var_elements_kind.value(),
3828                                        LAST_FAST_ELEMENTS_KIND),
3829          &allocate_jsarray_main);
3830   var_elements_kind = Int32Constant(PACKED_ELEMENTS);
3831   Goto(&allocate_jsarray_main);
3832 
3833   BIND(&allocate_jsarray_main);
3834   // Use the cannonical map for the chosen elements kind.
3835   TNode<NativeContext> native_context = LoadNativeContext(context);
3836   TNode<Map> array_map =
3837       LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
3838 
3839   TNode<JSArray> result = AllocateJSArray(array_map, var_new_elements.value(),
3840                                           CAST(length), allocation_site);
3841   return result;
3842 }
3843 
3844 template <typename TIndex>
AllocateFixedArray(ElementsKind kind,TNode<TIndex> capacity,AllocationFlags flags,base::Optional<TNode<Map>> fixed_array_map)3845 TNode<FixedArrayBase> CodeStubAssembler::AllocateFixedArray(
3846     ElementsKind kind, TNode<TIndex> capacity, AllocationFlags flags,
3847     base::Optional<TNode<Map>> fixed_array_map) {
3848   static_assert(
3849       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
3850       "Only Smi or IntPtrT capacity is allowed");
3851   Comment("AllocateFixedArray");
3852   CSA_ASSERT(this,
3853              IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(0)));
3854 
3855   const intptr_t kMaxLength = IsDoubleElementsKind(kind)
3856                                   ? FixedDoubleArray::kMaxLength
3857                                   : FixedArray::kMaxLength;
3858   intptr_t capacity_constant;
3859   if (ToParameterConstant(capacity, &capacity_constant)) {
3860     CHECK_LE(capacity_constant, kMaxLength);
3861   } else {
3862     Label if_out_of_memory(this, Label::kDeferred), next(this);
3863     Branch(IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(
3864                                                 static_cast<int>(kMaxLength))),
3865            &if_out_of_memory, &next);
3866 
3867     BIND(&if_out_of_memory);
3868     CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
3869                 NoContextConstant());
3870     Unreachable();
3871 
3872     BIND(&next);
3873   }
3874 
3875   TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind);
3876 
3877   if (IsDoubleElementsKind(kind)) flags |= kDoubleAlignment;
3878   // Allocate both array and elements object, and initialize the JSArray.
3879   TNode<HeapObject> array = Allocate(total_size, flags);
3880   if (fixed_array_map) {
3881     // Conservatively only skip the write barrier if there are no allocation
3882     // flags, this ensures that the object hasn't ended up in LOS. Note that the
3883     // fixed array map is currently always immortal and technically wouldn't
3884     // need the write barrier even in LOS, but it's better to not take chances
3885     // in case this invariant changes later, since it's difficult to enforce
3886     // locally here.
3887     if (flags == CodeStubAssembler::kNone) {
3888       StoreMapNoWriteBarrier(array, *fixed_array_map);
3889     } else {
3890       StoreMap(array, *fixed_array_map);
3891     }
3892   } else {
3893     RootIndex map_index = IsDoubleElementsKind(kind)
3894                               ? RootIndex::kFixedDoubleArrayMap
3895                               : RootIndex::kFixedArrayMap;
3896     DCHECK(RootsTable::IsImmortalImmovable(map_index));
3897     StoreMapNoWriteBarrier(array, map_index);
3898   }
3899   StoreObjectFieldNoWriteBarrier(array, FixedArrayBase::kLengthOffset,
3900                                  ParameterToTagged(capacity));
3901   return UncheckedCast<FixedArrayBase>(array);
3902 }
3903 
3904 // There is no need to export the Smi version since it is only used inside
3905 // code-stub-assembler.
3906 template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
3907     CodeStubAssembler::AllocateFixedArray<IntPtrT>(ElementsKind, TNode<IntPtrT>,
3908                                                    AllocationFlags,
3909                                                    base::Optional<TNode<Map>>);
3910 
3911 template <typename TIndex>
ExtractToFixedArray(TNode<FixedArrayBase> source,TNode<TIndex> first,TNode<TIndex> count,TNode<TIndex> capacity,TNode<Map> source_map,ElementsKind from_kind,AllocationFlags allocation_flags,ExtractFixedArrayFlags extract_flags,HoleConversionMode convert_holes,TVariable<BoolT> * var_holes_converted,base::Optional<TNode<Int32T>> source_elements_kind)3912 TNode<FixedArray> CodeStubAssembler::ExtractToFixedArray(
3913     TNode<FixedArrayBase> source, TNode<TIndex> first, TNode<TIndex> count,
3914     TNode<TIndex> capacity, TNode<Map> source_map, ElementsKind from_kind,
3915     AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
3916     HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
3917     base::Optional<TNode<Int32T>> source_elements_kind) {
3918   static_assert(
3919       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
3920       "Only Smi or IntPtrT first, count, and capacity are allowed");
3921 
3922   DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays);
3923   CSA_ASSERT(this,
3924              IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), capacity));
3925   CSA_ASSERT(this, TaggedEqual(source_map, LoadMap(source)));
3926 
3927   TVARIABLE(FixedArrayBase, var_result);
3928   TVARIABLE(Map, var_target_map, source_map);
3929 
3930   Label done(this, {&var_result}), is_cow(this),
3931       new_space_check(this, {&var_target_map});
3932 
3933   // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
3934   // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
3935   // source_map as the target map.
3936   if (IsDoubleElementsKind(from_kind)) {
3937     CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
3938     var_target_map = FixedArrayMapConstant();
3939     Goto(&new_space_check);
3940   } else {
3941     CSA_ASSERT(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
3942     Branch(TaggedEqual(var_target_map.value(), FixedCOWArrayMapConstant()),
3943            &is_cow, &new_space_check);
3944 
3945     BIND(&is_cow);
3946     {
3947       // |source| is a COW array, so we don't actually need to allocate a new
3948       // array unless:
3949       // 1) |extract_flags| forces us to, or
3950       // 2) we're asked to extract only part of the |source| (|first| != 0).
3951       if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
3952         Branch(IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), first),
3953                &new_space_check, [&] {
3954                  var_result = source;
3955                  Goto(&done);
3956                });
3957       } else {
3958         var_target_map = FixedArrayMapConstant();
3959         Goto(&new_space_check);
3960       }
3961     }
3962   }
3963 
3964   BIND(&new_space_check);
3965   {
3966     bool handle_old_space = !FLAG_young_generation_large_objects;
3967     if (handle_old_space) {
3968       if (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly) {
3969         handle_old_space = false;
3970         CSA_ASSERT(this, Word32BinaryNot(FixedArraySizeDoesntFitInNewSpace(
3971                              count, FixedArray::kHeaderSize)));
3972       } else {
3973         int constant_count;
3974         handle_old_space =
3975             !TryGetIntPtrOrSmiConstantValue(count, &constant_count) ||
3976             (constant_count >
3977              FixedArray::GetMaxLengthForNewSpaceAllocation(PACKED_ELEMENTS));
3978       }
3979     }
3980 
3981     Label old_space(this, Label::kDeferred);
3982     if (handle_old_space) {
3983       GotoIfFixedArraySizeDoesntFitInNewSpace(capacity, &old_space,
3984                                               FixedArray::kHeaderSize);
3985     }
3986 
3987     Comment("Copy FixedArray in young generation");
3988     // We use PACKED_ELEMENTS to tell AllocateFixedArray and
3989     // CopyFixedArrayElements that we want a FixedArray.
3990     const ElementsKind to_kind = PACKED_ELEMENTS;
3991     TNode<FixedArrayBase> to_elements = AllocateFixedArray(
3992         to_kind, capacity, allocation_flags, var_target_map.value());
3993     var_result = to_elements;
3994 
3995 #ifndef V8_ENABLE_SINGLE_GENERATION
3996 #ifdef DEBUG
3997     TNode<IntPtrT> object_word = BitcastTaggedToWord(to_elements);
3998     TNode<IntPtrT> object_page = PageFromAddress(object_word);
3999     TNode<IntPtrT> page_flags =
4000         Load<IntPtrT>(object_page, IntPtrConstant(Page::kFlagsOffset));
4001     CSA_ASSERT(
4002         this,
4003         WordNotEqual(
4004             WordAnd(page_flags,
4005                     IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
4006             IntPtrConstant(0)));
4007 #endif
4008 #endif
4009 
4010     if (convert_holes == HoleConversionMode::kDontConvert &&
4011         !IsDoubleElementsKind(from_kind)) {
4012       // We can use CopyElements (memcpy) because we don't need to replace or
4013       // convert any values. Since {to_elements} is in new-space, CopyElements
4014       // will efficiently use memcpy.
4015       FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
4016                               RootIndex::kTheHoleValue);
4017       CopyElements(to_kind, to_elements, IntPtrConstant(0), source,
4018                    ParameterToIntPtr(first), ParameterToIntPtr(count),
4019                    SKIP_WRITE_BARRIER);
4020     } else {
4021       CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4022                              count, capacity, SKIP_WRITE_BARRIER, convert_holes,
4023                              var_holes_converted);
4024     }
4025     Goto(&done);
4026 
4027     if (handle_old_space) {
4028       BIND(&old_space);
4029       {
4030         Comment("Copy FixedArray in old generation");
4031         Label copy_one_by_one(this);
4032 
4033         // Try to use memcpy if we don't need to convert holes to undefined.
4034         if (convert_holes == HoleConversionMode::kDontConvert &&
4035             source_elements_kind) {
4036           // Only try memcpy if we're not copying object pointers.
4037           GotoIfNot(IsFastSmiElementsKind(*source_elements_kind),
4038                     &copy_one_by_one);
4039 
4040           const ElementsKind to_smi_kind = PACKED_SMI_ELEMENTS;
4041           to_elements = AllocateFixedArray(
4042               to_smi_kind, capacity, allocation_flags, var_target_map.value());
4043           var_result = to_elements;
4044 
4045           FillFixedArrayWithValue(to_smi_kind, to_elements, count, capacity,
4046                                   RootIndex::kTheHoleValue);
4047           // CopyElements will try to use memcpy if it's not conflicting with
4048           // GC. Otherwise it will copy elements by elements, but skip write
4049           // barriers (since we're copying smis to smis).
4050           CopyElements(to_smi_kind, to_elements, IntPtrConstant(0), source,
4051                        ParameterToIntPtr(first), ParameterToIntPtr(count),
4052                        SKIP_WRITE_BARRIER);
4053           Goto(&done);
4054         } else {
4055           Goto(&copy_one_by_one);
4056         }
4057 
4058         BIND(&copy_one_by_one);
4059         {
4060           to_elements = AllocateFixedArray(to_kind, capacity, allocation_flags,
4061                                            var_target_map.value());
4062           var_result = to_elements;
4063           CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
4064                                  count, capacity, UPDATE_WRITE_BARRIER,
4065                                  convert_holes, var_holes_converted);
4066           Goto(&done);
4067         }
4068       }
4069     }
4070   }
4071 
4072   BIND(&done);
4073   return UncheckedCast<FixedArray>(var_result.value());
4074 }
4075 
4076 template <typename TIndex>
ExtractFixedDoubleArrayFillingHoles(TNode<FixedArrayBase> from_array,TNode<TIndex> first,TNode<TIndex> count,TNode<TIndex> capacity,TNode<Map> fixed_array_map,TVariable<BoolT> * var_holes_converted,AllocationFlags allocation_flags,ExtractFixedArrayFlags extract_flags)4077 TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedDoubleArrayFillingHoles(
4078     TNode<FixedArrayBase> from_array, TNode<TIndex> first, TNode<TIndex> count,
4079     TNode<TIndex> capacity, TNode<Map> fixed_array_map,
4080     TVariable<BoolT>* var_holes_converted, AllocationFlags allocation_flags,
4081     ExtractFixedArrayFlags extract_flags) {
4082   static_assert(
4083       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4084       "Only Smi or IntPtrT first, count, and capacity are allowed");
4085 
4086   DCHECK_NE(var_holes_converted, nullptr);
4087   CSA_ASSERT(this, IsFixedDoubleArrayMap(fixed_array_map));
4088 
4089   TVARIABLE(FixedArrayBase, var_result);
4090   const ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4091   TNode<FixedArrayBase> to_elements =
4092       AllocateFixedArray(kind, capacity, allocation_flags, fixed_array_map);
4093   var_result = to_elements;
4094   // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
4095   // |var_holes_converted| is set to False preliminarily.
4096   *var_holes_converted = Int32FalseConstant();
4097 
4098   // The construction of the loop and the offsets for double elements is
4099   // extracted from CopyFixedArrayElements.
4100   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, kind));
4101   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4102 
4103   Comment("[ ExtractFixedDoubleArrayFillingHoles");
4104 
4105   // This copy can trigger GC, so we pre-initialize the array with holes.
4106   FillFixedArrayWithValue(kind, to_elements, IntPtrOrSmiConstant<TIndex>(0),
4107                           capacity, RootIndex::kTheHoleValue);
4108 
4109   const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4110   TNode<IntPtrT> first_from_element_offset =
4111       ElementOffsetFromIndex(first, kind, 0);
4112   TNode<IntPtrT> limit_offset = IntPtrAdd(first_from_element_offset,
4113                                           IntPtrConstant(first_element_offset));
4114   TVARIABLE(IntPtrT, var_from_offset,
4115             ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count), kind,
4116                                    first_element_offset));
4117 
4118   Label decrement(this, {&var_from_offset}), done(this);
4119   TNode<IntPtrT> to_array_adjusted =
4120       IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
4121 
4122   Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4123 
4124   BIND(&decrement);
4125   {
4126     TNode<IntPtrT> from_offset =
4127         IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
4128     var_from_offset = from_offset;
4129 
4130     TNode<IntPtrT> to_offset = from_offset;
4131 
4132     Label if_hole(this);
4133 
4134     TNode<Float64T> value = LoadDoubleWithHoleCheck(
4135         from_array, var_from_offset.value(), &if_hole, MachineType::Float64());
4136 
4137     StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4138                         to_offset, value);
4139 
4140     TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
4141     Branch(compare, &decrement, &done);
4142 
4143     BIND(&if_hole);
4144     // We are unlucky: there are holes! We need to restart the copy, this time
4145     // we will copy the FixedDoubleArray to a new FixedArray with undefined
4146     // replacing holes. We signal this to the caller through
4147     // |var_holes_converted|.
4148     *var_holes_converted = Int32TrueConstant();
4149     to_elements =
4150         ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
4151                             kind, allocation_flags, extract_flags,
4152                             HoleConversionMode::kConvertToUndefined);
4153     var_result = to_elements;
4154     Goto(&done);
4155   }
4156 
4157   BIND(&done);
4158   Comment("] ExtractFixedDoubleArrayFillingHoles");
4159   return var_result.value();
4160 }
4161 
4162 template <typename TIndex>
ExtractFixedArray(TNode<FixedArrayBase> source,base::Optional<TNode<TIndex>> first,base::Optional<TNode<TIndex>> count,base::Optional<TNode<TIndex>> capacity,ExtractFixedArrayFlags extract_flags,TVariable<BoolT> * var_holes_converted,base::Optional<TNode<Int32T>> source_elements_kind)4163 TNode<FixedArrayBase> CodeStubAssembler::ExtractFixedArray(
4164     TNode<FixedArrayBase> source, base::Optional<TNode<TIndex>> first,
4165     base::Optional<TNode<TIndex>> count, base::Optional<TNode<TIndex>> capacity,
4166     ExtractFixedArrayFlags extract_flags, TVariable<BoolT>* var_holes_converted,
4167     base::Optional<TNode<Int32T>> source_elements_kind) {
4168   static_assert(
4169       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4170       "Only Smi or IntPtrT first, count, and capacity are allowed");
4171   DCHECK(extract_flags & ExtractFixedArrayFlag::kFixedArrays ||
4172          extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays);
4173   // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should
4174   // not be used, because that disables the iteration which detects holes.
4175   DCHECK_IMPLIES(var_holes_converted != nullptr,
4176                  !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
4177   HoleConversionMode convert_holes =
4178       var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
4179                                      : HoleConversionMode::kDontConvert;
4180   TVARIABLE(FixedArrayBase, var_result);
4181   const AllocationFlags allocation_flags =
4182       (extract_flags & ExtractFixedArrayFlag::kNewSpaceAllocationOnly)
4183           ? CodeStubAssembler::kNone
4184           : CodeStubAssembler::kAllowLargeObjectAllocation;
4185   if (!first) {
4186     first = IntPtrOrSmiConstant<TIndex>(0);
4187   }
4188   if (!count) {
4189     count = IntPtrOrSmiSub(
4190         TaggedToParameter<TIndex>(LoadFixedArrayBaseLength(source)), *first);
4191 
4192     CSA_ASSERT(this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant<TIndex>(0),
4193                                                 *count));
4194   }
4195   if (!capacity) {
4196     capacity = *count;
4197   } else {
4198     CSA_ASSERT(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
4199                          IntPtrOrSmiAdd(*first, *count), *capacity)));
4200   }
4201 
4202   Label if_fixed_double_array(this), empty(this), done(this, &var_result);
4203   TNode<Map> source_map = LoadMap(source);
4204   GotoIf(IntPtrOrSmiEqual(IntPtrOrSmiConstant<TIndex>(0), *capacity), &empty);
4205 
4206   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4207     if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4208       GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
4209     } else {
4210       CSA_ASSERT(this, IsFixedDoubleArrayMap(source_map));
4211     }
4212   }
4213 
4214   if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
4215     // Here we can only get |source| as FixedArray, never FixedDoubleArray.
4216     // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
4217     TNode<FixedArray> to_elements = ExtractToFixedArray(
4218         source, *first, *count, *capacity, source_map, PACKED_ELEMENTS,
4219         allocation_flags, extract_flags, convert_holes, var_holes_converted,
4220         source_elements_kind);
4221     var_result = to_elements;
4222     Goto(&done);
4223   }
4224 
4225   if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
4226     BIND(&if_fixed_double_array);
4227     Comment("Copy FixedDoubleArray");
4228 
4229     if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4230       TNode<FixedArrayBase> to_elements = ExtractFixedDoubleArrayFillingHoles(
4231           source, *first, *count, *capacity, source_map, var_holes_converted,
4232           allocation_flags, extract_flags);
4233       var_result = to_elements;
4234     } else {
4235       // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
4236       // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
4237       // matter.
4238       ElementsKind kind = PACKED_DOUBLE_ELEMENTS;
4239       TNode<FixedArrayBase> to_elements =
4240           AllocateFixedArray(kind, *capacity, allocation_flags, source_map);
4241       FillFixedArrayWithValue(kind, to_elements, *count, *capacity,
4242                               RootIndex::kTheHoleValue);
4243       CopyElements(kind, to_elements, IntPtrConstant(0), source,
4244                    ParameterToIntPtr(*first), ParameterToIntPtr(*count));
4245       var_result = to_elements;
4246     }
4247 
4248     Goto(&done);
4249   }
4250 
4251   BIND(&empty);
4252   {
4253     Comment("Copy empty array");
4254 
4255     var_result = EmptyFixedArrayConstant();
4256     Goto(&done);
4257   }
4258 
4259   BIND(&done);
4260   return var_result.value();
4261 }
4262 
4263 template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
4264 CodeStubAssembler::ExtractFixedArray<Smi>(
4265     TNode<FixedArrayBase>, base::Optional<TNode<Smi>>,
4266     base::Optional<TNode<Smi>>, base::Optional<TNode<Smi>>,
4267     ExtractFixedArrayFlags, TVariable<BoolT>*, base::Optional<TNode<Int32T>>);
4268 
4269 template V8_EXPORT_PRIVATE TNode<FixedArrayBase>
4270 CodeStubAssembler::ExtractFixedArray<IntPtrT>(
4271     TNode<FixedArrayBase>, base::Optional<TNode<IntPtrT>>,
4272     base::Optional<TNode<IntPtrT>>, base::Optional<TNode<IntPtrT>>,
4273     ExtractFixedArrayFlags, TVariable<BoolT>*, base::Optional<TNode<Int32T>>);
4274 
InitializePropertyArrayLength(TNode<PropertyArray> property_array,TNode<IntPtrT> length)4275 void CodeStubAssembler::InitializePropertyArrayLength(
4276     TNode<PropertyArray> property_array, TNode<IntPtrT> length) {
4277   CSA_ASSERT(this, IntPtrGreaterThan(length, IntPtrConstant(0)));
4278   CSA_ASSERT(this,
4279              IntPtrLessThanOrEqual(
4280                  length, IntPtrConstant(PropertyArray::LengthField::kMax)));
4281   StoreObjectFieldNoWriteBarrier(
4282       property_array, PropertyArray::kLengthAndHashOffset, SmiTag(length));
4283 }
4284 
AllocatePropertyArray(TNode<IntPtrT> capacity)4285 TNode<PropertyArray> CodeStubAssembler::AllocatePropertyArray(
4286     TNode<IntPtrT> capacity) {
4287   CSA_ASSERT(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
4288   TNode<IntPtrT> total_size = GetPropertyArrayAllocationSize(capacity);
4289 
4290   TNode<HeapObject> array = Allocate(total_size, kNone);
4291   RootIndex map_index = RootIndex::kPropertyArrayMap;
4292   DCHECK(RootsTable::IsImmortalImmovable(map_index));
4293   StoreMapNoWriteBarrier(array, map_index);
4294   TNode<PropertyArray> property_array = CAST(array);
4295   InitializePropertyArrayLength(property_array, capacity);
4296   return property_array;
4297 }
4298 
FillPropertyArrayWithUndefined(TNode<PropertyArray> array,TNode<IntPtrT> from_index,TNode<IntPtrT> to_index)4299 void CodeStubAssembler::FillPropertyArrayWithUndefined(
4300     TNode<PropertyArray> array, TNode<IntPtrT> from_index,
4301     TNode<IntPtrT> to_index) {
4302   ElementsKind kind = PACKED_ELEMENTS;
4303   TNode<Oddball> value = UndefinedConstant();
4304   BuildFastArrayForEach(
4305       array, kind, from_index, to_index,
4306       [this, value](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4307         StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4308                             value);
4309       });
4310 }
4311 
4312 template <typename TIndex>
FillFixedArrayWithValue(ElementsKind kind,TNode<FixedArrayBase> array,TNode<TIndex> from_index,TNode<TIndex> to_index,RootIndex value_root_index)4313 void CodeStubAssembler::FillFixedArrayWithValue(ElementsKind kind,
4314                                                 TNode<FixedArrayBase> array,
4315                                                 TNode<TIndex> from_index,
4316                                                 TNode<TIndex> to_index,
4317                                                 RootIndex value_root_index) {
4318   static_assert(
4319       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4320       "Only Smi or IntPtrT from and to are allowed");
4321   CSA_SLOW_ASSERT(this, IsFixedArrayWithKind(array, kind));
4322   DCHECK(value_root_index == RootIndex::kTheHoleValue ||
4323          value_root_index == RootIndex::kUndefinedValue);
4324 
4325   // Determine the value to initialize the {array} based
4326   // on the {value_root_index} and the elements {kind}.
4327   TNode<Object> value = LoadRoot(value_root_index);
4328   TNode<Float64T> float_value;
4329   if (IsDoubleElementsKind(kind)) {
4330     float_value = LoadHeapNumberValue(CAST(value));
4331   }
4332 
4333   BuildFastArrayForEach(
4334       array, kind, from_index, to_index,
4335       [this, value, float_value, kind](TNode<HeapObject> array,
4336                                        TNode<IntPtrT> offset) {
4337         if (IsDoubleElementsKind(kind)) {
4338           StoreNoWriteBarrier(MachineRepresentation::kFloat64, array, offset,
4339                               float_value);
4340         } else {
4341           StoreNoWriteBarrier(MachineRepresentation::kTagged, array, offset,
4342                               value);
4343         }
4344       });
4345 }
4346 
4347 template V8_EXPORT_PRIVATE void
4348     CodeStubAssembler::FillFixedArrayWithValue<IntPtrT>(ElementsKind,
4349                                                         TNode<FixedArrayBase>,
4350                                                         TNode<IntPtrT>,
4351                                                         TNode<IntPtrT>,
4352                                                         RootIndex);
4353 template V8_EXPORT_PRIVATE void CodeStubAssembler::FillFixedArrayWithValue<Smi>(
4354     ElementsKind, TNode<FixedArrayBase>, TNode<Smi>, TNode<Smi>, RootIndex);
4355 
StoreDoubleHole(TNode<HeapObject> object,TNode<IntPtrT> offset)4356 void CodeStubAssembler::StoreDoubleHole(TNode<HeapObject> object,
4357                                         TNode<IntPtrT> offset) {
4358   TNode<UintPtrT> double_hole =
4359       Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4360              : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4361   // TODO(danno): When we have a Float32/Float64 wrapper class that
4362   // preserves double bits during manipulation, remove this code/change
4363   // this to an indexed Float64 store.
4364   if (Is64()) {
4365     StoreNoWriteBarrier(MachineRepresentation::kWord64, object, offset,
4366                         double_hole);
4367   } else {
4368     StoreNoWriteBarrier(MachineRepresentation::kWord32, object, offset,
4369                         double_hole);
4370     StoreNoWriteBarrier(MachineRepresentation::kWord32, object,
4371                         IntPtrAdd(offset, IntPtrConstant(kInt32Size)),
4372                         double_hole);
4373   }
4374 }
4375 
StoreFixedDoubleArrayHole(TNode<FixedDoubleArray> array,TNode<IntPtrT> index)4376 void CodeStubAssembler::StoreFixedDoubleArrayHole(TNode<FixedDoubleArray> array,
4377                                                   TNode<IntPtrT> index) {
4378   TNode<IntPtrT> offset = ElementOffsetFromIndex(
4379       index, PACKED_DOUBLE_ELEMENTS, FixedArray::kHeaderSize - kHeapObjectTag);
4380   CSA_ASSERT(this, IsOffsetInBounds(
4381                        offset, LoadAndUntagFixedArrayBaseLength(array),
4382                        FixedDoubleArray::kHeaderSize, PACKED_DOUBLE_ELEMENTS));
4383   StoreDoubleHole(array, offset);
4384 }
4385 
FillFixedArrayWithSmiZero(TNode<FixedArray> array,TNode<IntPtrT> length)4386 void CodeStubAssembler::FillFixedArrayWithSmiZero(TNode<FixedArray> array,
4387                                                   TNode<IntPtrT> length) {
4388   CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4389 
4390   TNode<IntPtrT> byte_length = TimesTaggedSize(length);
4391   CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4392 
4393   static const int32_t fa_base_data_offset =
4394       FixedArray::kHeaderSize - kHeapObjectTag;
4395   TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4396                                            IntPtrConstant(fa_base_data_offset));
4397 
4398   // Call out to memset to perform initialization.
4399   TNode<ExternalReference> memset =
4400       ExternalConstant(ExternalReference::libc_memset_function());
4401   STATIC_ASSERT(kSizetSize == kIntptrSize);
4402   CallCFunction(memset, MachineType::Pointer(),
4403                 std::make_pair(MachineType::Pointer(), backing_store),
4404                 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4405                 std::make_pair(MachineType::UintPtr(), byte_length));
4406 }
4407 
FillFixedDoubleArrayWithZero(TNode<FixedDoubleArray> array,TNode<IntPtrT> length)4408 void CodeStubAssembler::FillFixedDoubleArrayWithZero(
4409     TNode<FixedDoubleArray> array, TNode<IntPtrT> length) {
4410   CSA_ASSERT(this, WordEqual(length, LoadAndUntagFixedArrayBaseLength(array)));
4411 
4412   TNode<IntPtrT> byte_length = TimesDoubleSize(length);
4413   CSA_ASSERT(this, UintPtrLessThan(length, byte_length));
4414 
4415   static const int32_t fa_base_data_offset =
4416       FixedDoubleArray::kHeaderSize - kHeapObjectTag;
4417   TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array),
4418                                            IntPtrConstant(fa_base_data_offset));
4419 
4420   // Call out to memset to perform initialization.
4421   TNode<ExternalReference> memset =
4422       ExternalConstant(ExternalReference::libc_memset_function());
4423   STATIC_ASSERT(kSizetSize == kIntptrSize);
4424   CallCFunction(memset, MachineType::Pointer(),
4425                 std::make_pair(MachineType::Pointer(), backing_store),
4426                 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
4427                 std::make_pair(MachineType::UintPtr(), byte_length));
4428 }
4429 
JumpIfPointersFromHereAreInteresting(TNode<Object> object,Label * interesting)4430 void CodeStubAssembler::JumpIfPointersFromHereAreInteresting(
4431     TNode<Object> object, Label* interesting) {
4432   Label finished(this);
4433   TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
4434   TNode<IntPtrT> object_page = PageFromAddress(object_word);
4435   TNode<IntPtrT> page_flags = UncheckedCast<IntPtrT>(Load(
4436       MachineType::IntPtr(), object_page, IntPtrConstant(Page::kFlagsOffset)));
4437   Branch(
4438       WordEqual(WordAnd(page_flags,
4439                         IntPtrConstant(
4440                             MemoryChunk::kPointersFromHereAreInterestingMask)),
4441                 IntPtrConstant(0)),
4442       &finished, interesting);
4443   BIND(&finished);
4444 }
4445 
MoveElements(ElementsKind kind,TNode<FixedArrayBase> elements,TNode<IntPtrT> dst_index,TNode<IntPtrT> src_index,TNode<IntPtrT> length)4446 void CodeStubAssembler::MoveElements(ElementsKind kind,
4447                                      TNode<FixedArrayBase> elements,
4448                                      TNode<IntPtrT> dst_index,
4449                                      TNode<IntPtrT> src_index,
4450                                      TNode<IntPtrT> length) {
4451   Label finished(this);
4452   Label needs_barrier(this);
4453   const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4454 
4455   DCHECK(IsFastElementsKind(kind));
4456   CSA_ASSERT(this, IsFixedArrayWithKind(elements, kind));
4457   CSA_ASSERT(this,
4458              IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
4459                                    LoadAndUntagFixedArrayBaseLength(elements)));
4460   CSA_ASSERT(this,
4461              IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
4462                                    LoadAndUntagFixedArrayBaseLength(elements)));
4463 
4464   // The write barrier can be ignored if {dst_elements} is in new space, or if
4465   // the elements pointer is FixedDoubleArray.
4466   if (needs_barrier_check) {
4467     JumpIfPointersFromHereAreInteresting(elements, &needs_barrier);
4468   }
4469 
4470   const TNode<IntPtrT> source_byte_length =
4471       IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4472   static const int32_t fa_base_data_offset =
4473       FixedArrayBase::kHeaderSize - kHeapObjectTag;
4474   TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
4475   TNode<IntPtrT> target_data_ptr =
4476       IntPtrAdd(elements_intptr,
4477                 ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset));
4478   TNode<IntPtrT> source_data_ptr =
4479       IntPtrAdd(elements_intptr,
4480                 ElementOffsetFromIndex(src_index, kind, fa_base_data_offset));
4481   TNode<ExternalReference> memmove =
4482       ExternalConstant(ExternalReference::libc_memmove_function());
4483   CallCFunction(memmove, MachineType::Pointer(),
4484                 std::make_pair(MachineType::Pointer(), target_data_ptr),
4485                 std::make_pair(MachineType::Pointer(), source_data_ptr),
4486                 std::make_pair(MachineType::UintPtr(), source_byte_length));
4487 
4488   if (needs_barrier_check) {
4489     Goto(&finished);
4490 
4491     BIND(&needs_barrier);
4492     {
4493       const TNode<IntPtrT> begin = src_index;
4494       const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4495 
4496       // If dst_index is less than src_index, then walk forward.
4497       const TNode<IntPtrT> delta =
4498           IntPtrMul(IntPtrSub(dst_index, begin),
4499                     IntPtrConstant(ElementsKindToByteSize(kind)));
4500       auto loop_body = [&](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4501         const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
4502         const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
4503         Store(array, delta_offset, element);
4504       };
4505 
4506       Label iterate_forward(this);
4507       Label iterate_backward(this);
4508       Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
4509              &iterate_backward);
4510       BIND(&iterate_forward);
4511       {
4512         // Make a loop for the stores.
4513         BuildFastArrayForEach(elements, kind, begin, end, loop_body,
4514                               ForEachDirection::kForward);
4515         Goto(&finished);
4516       }
4517 
4518       BIND(&iterate_backward);
4519       {
4520         BuildFastArrayForEach(elements, kind, begin, end, loop_body,
4521                               ForEachDirection::kReverse);
4522         Goto(&finished);
4523       }
4524     }
4525     BIND(&finished);
4526   }
4527 }
4528 
CopyElements(ElementsKind kind,TNode<FixedArrayBase> dst_elements,TNode<IntPtrT> dst_index,TNode<FixedArrayBase> src_elements,TNode<IntPtrT> src_index,TNode<IntPtrT> length,WriteBarrierMode write_barrier)4529 void CodeStubAssembler::CopyElements(ElementsKind kind,
4530                                      TNode<FixedArrayBase> dst_elements,
4531                                      TNode<IntPtrT> dst_index,
4532                                      TNode<FixedArrayBase> src_elements,
4533                                      TNode<IntPtrT> src_index,
4534                                      TNode<IntPtrT> length,
4535                                      WriteBarrierMode write_barrier) {
4536   Label finished(this);
4537   Label needs_barrier(this);
4538   const bool needs_barrier_check = !IsDoubleElementsKind(kind);
4539 
4540   DCHECK(IsFastElementsKind(kind));
4541   CSA_ASSERT(this, IsFixedArrayWithKind(dst_elements, kind));
4542   CSA_ASSERT(this, IsFixedArrayWithKind(src_elements, kind));
4543   CSA_ASSERT(this, IntPtrLessThanOrEqual(
4544                        IntPtrAdd(dst_index, length),
4545                        LoadAndUntagFixedArrayBaseLength(dst_elements)));
4546   CSA_ASSERT(this, IntPtrLessThanOrEqual(
4547                        IntPtrAdd(src_index, length),
4548                        LoadAndUntagFixedArrayBaseLength(src_elements)));
4549   CSA_ASSERT(this, Word32Or(TaggedNotEqual(dst_elements, src_elements),
4550                             IntPtrEqual(length, IntPtrConstant(0))));
4551 
4552   // The write barrier can be ignored if {dst_elements} is in new space, or if
4553   // the elements pointer is FixedDoubleArray.
4554   if (needs_barrier_check) {
4555     JumpIfPointersFromHereAreInteresting(dst_elements, &needs_barrier);
4556   }
4557 
4558   TNode<IntPtrT> source_byte_length =
4559       IntPtrMul(length, IntPtrConstant(ElementsKindToByteSize(kind)));
4560   static const int32_t fa_base_data_offset =
4561       FixedArrayBase::kHeaderSize - kHeapObjectTag;
4562   TNode<IntPtrT> src_offset_start =
4563       ElementOffsetFromIndex(src_index, kind, fa_base_data_offset);
4564   TNode<IntPtrT> dst_offset_start =
4565       ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset);
4566   TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
4567   TNode<IntPtrT> source_data_ptr =
4568       IntPtrAdd(src_elements_intptr, src_offset_start);
4569   TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
4570   TNode<IntPtrT> dst_data_ptr =
4571       IntPtrAdd(dst_elements_intptr, dst_offset_start);
4572   TNode<ExternalReference> memcpy =
4573       ExternalConstant(ExternalReference::libc_memcpy_function());
4574   CallCFunction(memcpy, MachineType::Pointer(),
4575                 std::make_pair(MachineType::Pointer(), dst_data_ptr),
4576                 std::make_pair(MachineType::Pointer(), source_data_ptr),
4577                 std::make_pair(MachineType::UintPtr(), source_byte_length));
4578 
4579   if (needs_barrier_check) {
4580     Goto(&finished);
4581 
4582     BIND(&needs_barrier);
4583     {
4584       const TNode<IntPtrT> begin = src_index;
4585       const TNode<IntPtrT> end = IntPtrAdd(begin, length);
4586       const TNode<IntPtrT> delta =
4587           IntPtrMul(IntPtrSub(dst_index, src_index),
4588                     IntPtrConstant(ElementsKindToByteSize(kind)));
4589       BuildFastArrayForEach(
4590           src_elements, kind, begin, end,
4591           [&](TNode<HeapObject> array, TNode<IntPtrT> offset) {
4592             const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
4593             const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
4594             if (write_barrier == SKIP_WRITE_BARRIER) {
4595               StoreNoWriteBarrier(MachineRepresentation::kTagged, dst_elements,
4596                                   delta_offset, element);
4597             } else {
4598               Store(dst_elements, delta_offset, element);
4599             }
4600           },
4601           ForEachDirection::kForward);
4602       Goto(&finished);
4603     }
4604     BIND(&finished);
4605   }
4606 }
4607 
4608 template <typename TIndex>
CopyFixedArrayElements(ElementsKind from_kind,TNode<FixedArrayBase> from_array,ElementsKind to_kind,TNode<FixedArrayBase> to_array,TNode<TIndex> first_element,TNode<TIndex> element_count,TNode<TIndex> capacity,WriteBarrierMode barrier_mode,HoleConversionMode convert_holes,TVariable<BoolT> * var_holes_converted)4609 void CodeStubAssembler::CopyFixedArrayElements(
4610     ElementsKind from_kind, TNode<FixedArrayBase> from_array,
4611     ElementsKind to_kind, TNode<FixedArrayBase> to_array,
4612     TNode<TIndex> first_element, TNode<TIndex> element_count,
4613     TNode<TIndex> capacity, WriteBarrierMode barrier_mode,
4614     HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
4615   DCHECK_IMPLIES(var_holes_converted != nullptr,
4616                  convert_holes == HoleConversionMode::kConvertToUndefined);
4617   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
4618   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(to_array, to_kind));
4619   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
4620   static_assert(
4621       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4622       "Only Smi or IntPtrT indices are allowed");
4623 
4624   const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
4625   Comment("[ CopyFixedArrayElements");
4626 
4627   // Typed array elements are not supported.
4628   DCHECK(!IsTypedArrayElementsKind(from_kind));
4629   DCHECK(!IsTypedArrayElementsKind(to_kind));
4630 
4631   Label done(this);
4632   bool from_double_elements = IsDoubleElementsKind(from_kind);
4633   bool to_double_elements = IsDoubleElementsKind(to_kind);
4634   bool doubles_to_objects_conversion =
4635       IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
4636   bool needs_write_barrier =
4637       doubles_to_objects_conversion ||
4638       (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
4639   bool element_offset_matches =
4640       !needs_write_barrier &&
4641       (kTaggedSize == kDoubleSize ||
4642        IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind));
4643   TNode<UintPtrT> double_hole =
4644       Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kHoleNanInt64))
4645              : ReinterpretCast<UintPtrT>(Int32Constant(kHoleNanLower32));
4646 
4647   // If copying might trigger a GC, we pre-initialize the FixedArray such that
4648   // it's always in a consistent state.
4649   if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4650     DCHECK(IsObjectElementsKind(to_kind));
4651     // Use undefined for the part that we copy and holes for the rest.
4652     // Later if we run into a hole in the source we can just skip the writing
4653     // to the target and are still guaranteed that we get an undefined.
4654     FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant<TIndex>(0),
4655                             element_count, RootIndex::kUndefinedValue);
4656     FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4657                             RootIndex::kTheHoleValue);
4658   } else if (doubles_to_objects_conversion) {
4659     // Pre-initialized the target with holes so later if we run into a hole in
4660     // the source we can just skip the writing to the target.
4661     FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant<TIndex>(0),
4662                             capacity, RootIndex::kTheHoleValue);
4663   } else if (element_count != capacity) {
4664     FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
4665                             RootIndex::kTheHoleValue);
4666   }
4667 
4668   TNode<IntPtrT> first_from_element_offset =
4669       ElementOffsetFromIndex(first_element, from_kind, 0);
4670   TNode<IntPtrT> limit_offset = Signed(IntPtrAdd(
4671       first_from_element_offset, IntPtrConstant(first_element_offset)));
4672   TVARIABLE(IntPtrT, var_from_offset,
4673             ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count),
4674                                    from_kind, first_element_offset));
4675   // This second variable is used only when the element sizes of source and
4676   // destination arrays do not match.
4677   TVARIABLE(IntPtrT, var_to_offset);
4678   if (element_offset_matches) {
4679     var_to_offset = var_from_offset.value();
4680   } else {
4681     var_to_offset =
4682         ElementOffsetFromIndex(element_count, to_kind, first_element_offset);
4683   }
4684 
4685   VariableList vars({&var_from_offset, &var_to_offset}, zone());
4686   if (var_holes_converted != nullptr) vars.push_back(var_holes_converted);
4687   Label decrement(this, vars);
4688 
4689   TNode<IntPtrT> to_array_adjusted =
4690       element_offset_matches
4691           ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
4692           : ReinterpretCast<IntPtrT>(to_array);
4693 
4694   Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
4695 
4696   BIND(&decrement);
4697   {
4698     TNode<IntPtrT> from_offset = Signed(IntPtrSub(
4699         var_from_offset.value(),
4700         IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize)));
4701     var_from_offset = from_offset;
4702 
4703     TNode<IntPtrT> to_offset;
4704     if (element_offset_matches) {
4705       to_offset = from_offset;
4706     } else {
4707       to_offset = IntPtrSub(
4708           var_to_offset.value(),
4709           IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
4710       var_to_offset = to_offset;
4711     }
4712 
4713     Label next_iter(this), store_double_hole(this), signal_hole(this);
4714     Label* if_hole;
4715     if (convert_holes == HoleConversionMode::kConvertToUndefined) {
4716       // The target elements array is already preinitialized with undefined
4717       // so we only need to signal that a hole was found and continue the loop.
4718       if_hole = &signal_hole;
4719     } else if (doubles_to_objects_conversion) {
4720       // The target elements array is already preinitialized with holes, so we
4721       // can just proceed with the next iteration.
4722       if_hole = &next_iter;
4723     } else if (IsDoubleElementsKind(to_kind)) {
4724       if_hole = &store_double_hole;
4725     } else {
4726       // In all the other cases don't check for holes and copy the data as is.
4727       if_hole = nullptr;
4728     }
4729 
4730     Node* value = LoadElementAndPrepareForStore(
4731         from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
4732 
4733     if (needs_write_barrier) {
4734       CHECK_EQ(to_array, to_array_adjusted);
4735       Store(to_array_adjusted, to_offset, value);
4736     } else if (to_double_elements) {
4737       StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array_adjusted,
4738                           to_offset, value);
4739     } else {
4740       UnsafeStoreNoWriteBarrier(MachineRepresentation::kTagged,
4741                                 to_array_adjusted, to_offset, value);
4742     }
4743     Goto(&next_iter);
4744 
4745     if (if_hole == &store_double_hole) {
4746       BIND(&store_double_hole);
4747       // Don't use doubles to store the hole double, since manipulating the
4748       // signaling NaN used for the hole in C++, e.g. with bit_cast, will
4749       // change its value on ia32 (the x87 stack is used to return values
4750       // and stores to the stack silently clear the signalling bit).
4751       //
4752       // TODO(danno): When we have a Float32/Float64 wrapper class that
4753       // preserves double bits during manipulation, remove this code/change
4754       // this to an indexed Float64 store.
4755       if (Is64()) {
4756         StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array_adjusted,
4757                             to_offset, double_hole);
4758       } else {
4759         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4760                             to_offset, double_hole);
4761         StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array_adjusted,
4762                             IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
4763                             double_hole);
4764       }
4765       Goto(&next_iter);
4766     } else if (if_hole == &signal_hole) {
4767       // This case happens only when IsObjectElementsKind(to_kind).
4768       BIND(&signal_hole);
4769       if (var_holes_converted != nullptr) {
4770         *var_holes_converted = Int32TrueConstant();
4771       }
4772       Goto(&next_iter);
4773     }
4774 
4775     BIND(&next_iter);
4776     TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
4777     Branch(compare, &decrement, &done);
4778   }
4779 
4780   BIND(&done);
4781   Comment("] CopyFixedArrayElements");
4782 }
4783 
HeapObjectToFixedArray(TNode<HeapObject> base,Label * cast_fail)4784 TNode<FixedArray> CodeStubAssembler::HeapObjectToFixedArray(
4785     TNode<HeapObject> base, Label* cast_fail) {
4786   Label fixed_array(this);
4787   TNode<Map> map = LoadMap(base);
4788   GotoIf(TaggedEqual(map, FixedArrayMapConstant()), &fixed_array);
4789   GotoIf(TaggedNotEqual(map, FixedCOWArrayMapConstant()), cast_fail);
4790   Goto(&fixed_array);
4791   BIND(&fixed_array);
4792   return UncheckedCast<FixedArray>(base);
4793 }
4794 
CopyPropertyArrayValues(TNode<HeapObject> from_array,TNode<PropertyArray> to_array,TNode<IntPtrT> property_count,WriteBarrierMode barrier_mode,DestroySource destroy_source)4795 void CodeStubAssembler::CopyPropertyArrayValues(TNode<HeapObject> from_array,
4796                                                 TNode<PropertyArray> to_array,
4797                                                 TNode<IntPtrT> property_count,
4798                                                 WriteBarrierMode barrier_mode,
4799                                                 DestroySource destroy_source) {
4800   CSA_SLOW_ASSERT(this, Word32Or(IsPropertyArray(from_array),
4801                                  IsEmptyFixedArray(from_array)));
4802   Comment("[ CopyPropertyArrayValues");
4803 
4804   bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
4805 
4806   if (destroy_source == DestroySource::kNo) {
4807     // PropertyArray may contain mutable HeapNumbers, which will be cloned on
4808     // the heap, requiring a write barrier.
4809     needs_write_barrier = true;
4810   }
4811 
4812   TNode<IntPtrT> start = IntPtrConstant(0);
4813   ElementsKind kind = PACKED_ELEMENTS;
4814   BuildFastArrayForEach(
4815       from_array, kind, start, property_count,
4816       [this, to_array, needs_write_barrier, destroy_source](
4817           TNode<HeapObject> array, TNode<IntPtrT> offset) {
4818         TNode<AnyTaggedT> value = Load<AnyTaggedT>(array, offset);
4819 
4820         if (destroy_source == DestroySource::kNo) {
4821           value = CloneIfMutablePrimitive(CAST(value));
4822         }
4823 
4824         if (needs_write_barrier) {
4825           Store(to_array, offset, value);
4826         } else {
4827           StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, offset,
4828                               value);
4829         }
4830       });
4831 
4832 #ifdef DEBUG
4833   // Zap {from_array} if the copying above has made it invalid.
4834   if (destroy_source == DestroySource::kYes) {
4835     Label did_zap(this);
4836     GotoIf(IsEmptyFixedArray(from_array), &did_zap);
4837     FillPropertyArrayWithUndefined(CAST(from_array), start, property_count);
4838 
4839     Goto(&did_zap);
4840     BIND(&did_zap);
4841   }
4842 #endif
4843   Comment("] CopyPropertyArrayValues");
4844 }
4845 
CloneFixedArray(TNode<FixedArrayBase> source,ExtractFixedArrayFlags flags)4846 TNode<FixedArrayBase> CodeStubAssembler::CloneFixedArray(
4847     TNode<FixedArrayBase> source, ExtractFixedArrayFlags flags) {
4848   return ExtractFixedArray(
4849       source, base::Optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
4850       base::Optional<TNode<BInt>>(base::nullopt),
4851       base::Optional<TNode<BInt>>(base::nullopt), flags);
4852 }
4853 
LoadElementAndPrepareForStore(TNode<FixedArrayBase> array,TNode<IntPtrT> offset,ElementsKind from_kind,ElementsKind to_kind,Label * if_hole)4854 Node* CodeStubAssembler::LoadElementAndPrepareForStore(
4855     TNode<FixedArrayBase> array, TNode<IntPtrT> offset, ElementsKind from_kind,
4856     ElementsKind to_kind, Label* if_hole) {
4857   CSA_ASSERT(this, IsFixedArrayWithKind(array, from_kind));
4858   if (IsDoubleElementsKind(from_kind)) {
4859     TNode<Float64T> value =
4860         LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
4861     if (!IsDoubleElementsKind(to_kind)) {
4862       return AllocateHeapNumberWithValue(value);
4863     }
4864     return value;
4865 
4866   } else {
4867     TNode<Object> value = Load<Object>(array, offset);
4868     if (if_hole) {
4869       GotoIf(TaggedEqual(value, TheHoleConstant()), if_hole);
4870     }
4871     if (IsDoubleElementsKind(to_kind)) {
4872       if (IsSmiElementsKind(from_kind)) {
4873         return SmiToFloat64(CAST(value));
4874       }
4875       return LoadHeapNumberValue(CAST(value));
4876     }
4877     return value;
4878   }
4879 }
4880 
4881 template <typename TIndex>
CalculateNewElementsCapacity(TNode<TIndex> old_capacity)4882 TNode<TIndex> CodeStubAssembler::CalculateNewElementsCapacity(
4883     TNode<TIndex> old_capacity) {
4884   static_assert(
4885       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4886       "Only Smi or IntPtrT old_capacity is allowed");
4887   Comment("TryGrowElementsCapacity");
4888   TNode<TIndex> half_old_capacity = WordOrSmiShr(old_capacity, 1);
4889   TNode<TIndex> new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity);
4890   TNode<TIndex> padding =
4891       IntPtrOrSmiConstant<TIndex>(JSObject::kMinAddedElementsCapacity);
4892   return IntPtrOrSmiAdd(new_capacity, padding);
4893 }
4894 
4895 template V8_EXPORT_PRIVATE TNode<IntPtrT>
4896     CodeStubAssembler::CalculateNewElementsCapacity<IntPtrT>(TNode<IntPtrT>);
4897 template V8_EXPORT_PRIVATE TNode<Smi>
4898     CodeStubAssembler::CalculateNewElementsCapacity<Smi>(TNode<Smi>);
4899 
TryGrowElementsCapacity(TNode<HeapObject> object,TNode<FixedArrayBase> elements,ElementsKind kind,TNode<Smi> key,Label * bailout)4900 TNode<FixedArrayBase> CodeStubAssembler::TryGrowElementsCapacity(
4901     TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
4902     TNode<Smi> key, Label* bailout) {
4903   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4904   TNode<Smi> capacity = LoadFixedArrayBaseLength(elements);
4905 
4906   return TryGrowElementsCapacity(object, elements, kind,
4907                                  TaggedToParameter<BInt>(key),
4908                                  TaggedToParameter<BInt>(capacity), bailout);
4909 }
4910 
4911 template <typename TIndex>
TryGrowElementsCapacity(TNode<HeapObject> object,TNode<FixedArrayBase> elements,ElementsKind kind,TNode<TIndex> key,TNode<TIndex> capacity,Label * bailout)4912 TNode<FixedArrayBase> CodeStubAssembler::TryGrowElementsCapacity(
4913     TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
4914     TNode<TIndex> key, TNode<TIndex> capacity, Label* bailout) {
4915   static_assert(
4916       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4917       "Only Smi or IntPtrT key and capacity nodes are allowed");
4918   Comment("TryGrowElementsCapacity");
4919   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, kind));
4920 
4921   // If the gap growth is too big, fall back to the runtime.
4922   TNode<TIndex> max_gap = IntPtrOrSmiConstant<TIndex>(JSObject::kMaxGap);
4923   TNode<TIndex> max_capacity = IntPtrOrSmiAdd(capacity, max_gap);
4924   GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity), bailout);
4925 
4926   // Calculate the capacity of the new backing store.
4927   TNode<TIndex> new_capacity = CalculateNewElementsCapacity(
4928       IntPtrOrSmiAdd(key, IntPtrOrSmiConstant<TIndex>(1)));
4929 
4930   return GrowElementsCapacity(object, elements, kind, kind, capacity,
4931                               new_capacity, bailout);
4932 }
4933 
4934 template <typename TIndex>
GrowElementsCapacity(TNode<HeapObject> object,TNode<FixedArrayBase> elements,ElementsKind from_kind,ElementsKind to_kind,TNode<TIndex> capacity,TNode<TIndex> new_capacity,Label * bailout)4935 TNode<FixedArrayBase> CodeStubAssembler::GrowElementsCapacity(
4936     TNode<HeapObject> object, TNode<FixedArrayBase> elements,
4937     ElementsKind from_kind, ElementsKind to_kind, TNode<TIndex> capacity,
4938     TNode<TIndex> new_capacity, Label* bailout) {
4939   static_assert(
4940       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
4941       "Only Smi or IntPtrT capacities are allowed");
4942   Comment("[ GrowElementsCapacity");
4943   CSA_SLOW_ASSERT(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
4944 
4945   // If size of the allocation for the new capacity doesn't fit in a page
4946   // that we can bump-pointer allocate from, fall back to the runtime.
4947   int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
4948   GotoIf(UintPtrOrSmiGreaterThanOrEqual(new_capacity,
4949                                         IntPtrOrSmiConstant<TIndex>(max_size)),
4950          bailout);
4951 
4952   // Allocate the new backing store.
4953   TNode<FixedArrayBase> new_elements =
4954       AllocateFixedArray(to_kind, new_capacity);
4955 
4956   // Copy the elements from the old elements store to the new.
4957   // The size-check above guarantees that the |new_elements| is allocated
4958   // in new space so we can skip the write barrier.
4959   CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
4960                          new_capacity, SKIP_WRITE_BARRIER);
4961 
4962   StoreObjectField(object, JSObject::kElementsOffset, new_elements);
4963   Comment("] GrowElementsCapacity");
4964   return new_elements;
4965 }
4966 
InitializeAllocationMemento(TNode<HeapObject> base,TNode<IntPtrT> base_allocation_size,TNode<AllocationSite> allocation_site)4967 void CodeStubAssembler::InitializeAllocationMemento(
4968     TNode<HeapObject> base, TNode<IntPtrT> base_allocation_size,
4969     TNode<AllocationSite> allocation_site) {
4970   Comment("[Initialize AllocationMemento");
4971   TNode<HeapObject> memento = InnerAllocate(base, base_allocation_size);
4972   StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
4973   StoreObjectFieldNoWriteBarrier(
4974       memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
4975   if (FLAG_allocation_site_pretenuring) {
4976     TNode<Int32T> count = LoadObjectField<Int32T>(
4977         allocation_site, AllocationSite::kPretenureCreateCountOffset);
4978 
4979     TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
4980     StoreObjectFieldNoWriteBarrier(allocation_site,
4981                                    AllocationSite::kPretenureCreateCountOffset,
4982                                    incremented_count);
4983   }
4984   Comment("]");
4985 }
4986 
TryTaggedToFloat64(TNode<Object> value,Label * if_valueisnotnumber)4987 TNode<Float64T> CodeStubAssembler::TryTaggedToFloat64(
4988     TNode<Object> value, Label* if_valueisnotnumber) {
4989   return Select<Float64T>(
4990       TaggedIsSmi(value), [&]() { return SmiToFloat64(CAST(value)); },
4991       [&]() {
4992         GotoIfNot(IsHeapNumber(CAST(value)), if_valueisnotnumber);
4993         return LoadHeapNumberValue(CAST(value));
4994       });
4995 }
4996 
TruncateTaggedToFloat64(TNode<Context> context,SloppyTNode<Object> value)4997 TNode<Float64T> CodeStubAssembler::TruncateTaggedToFloat64(
4998     TNode<Context> context, SloppyTNode<Object> value) {
4999   // We might need to loop once due to ToNumber conversion.
5000   TVARIABLE(Object, var_value, value);
5001   TVARIABLE(Float64T, var_result);
5002   Label loop(this, &var_value), done_loop(this, &var_result);
5003   Goto(&loop);
5004   BIND(&loop);
5005   {
5006     Label if_valueisnotnumber(this, Label::kDeferred);
5007 
5008     // Load the current {value}.
5009     value = var_value.value();
5010 
5011     // Convert {value} to Float64 if it is a number and convert it to a number
5012     // otherwise.
5013     var_result = TryTaggedToFloat64(value, &if_valueisnotnumber);
5014     Goto(&done_loop);
5015 
5016     BIND(&if_valueisnotnumber);
5017     {
5018       // Convert the {value} to a Number first.
5019       var_value = CallBuiltin(Builtins::kNonNumberToNumber, context, value);
5020       Goto(&loop);
5021     }
5022   }
5023   BIND(&done_loop);
5024   return var_result.value();
5025 }
5026 
TruncateTaggedToWord32(TNode<Context> context,SloppyTNode<Object> value)5027 TNode<Word32T> CodeStubAssembler::TruncateTaggedToWord32(
5028     TNode<Context> context, SloppyTNode<Object> value) {
5029   TVARIABLE(Word32T, var_result);
5030   Label done(this);
5031   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumber>(context, value,
5032                                                             &done, &var_result);
5033   BIND(&done);
5034   return var_result.value();
5035 }
5036 
5037 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5038 // or find that it is a BigInt and jump to {if_bigint}.
TaggedToWord32OrBigInt(TNode<Context> context,TNode<Object> value,Label * if_number,TVariable<Word32T> * var_word32,Label * if_bigint,TVariable<BigInt> * var_maybe_bigint)5039 void CodeStubAssembler::TaggedToWord32OrBigInt(
5040     TNode<Context> context, TNode<Object> value, Label* if_number,
5041     TVariable<Word32T>* var_word32, Label* if_bigint,
5042     TVariable<BigInt>* var_maybe_bigint) {
5043   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5044       context, value, if_number, var_word32, if_bigint, var_maybe_bigint);
5045 }
5046 
5047 // Truncate {value} to word32 and jump to {if_number} if it is a Number,
5048 // or find that it is a BigInt and jump to {if_bigint}. In either case,
5049 // store the type feedback in {var_feedback}.
TaggedToWord32OrBigIntWithFeedback(TNode<Context> context,TNode<Object> value,Label * if_number,TVariable<Word32T> * var_word32,Label * if_bigint,TVariable<BigInt> * var_maybe_bigint,TVariable<Smi> * var_feedback)5050 void CodeStubAssembler::TaggedToWord32OrBigIntWithFeedback(
5051     TNode<Context> context, TNode<Object> value, Label* if_number,
5052     TVariable<Word32T>* var_word32, Label* if_bigint,
5053     TVariable<BigInt>* var_maybe_bigint, TVariable<Smi>* var_feedback) {
5054   TaggedToWord32OrBigIntImpl<Object::Conversion::kToNumeric>(
5055       context, value, if_number, var_word32, if_bigint, var_maybe_bigint,
5056       var_feedback);
5057 }
5058 
5059 template <Object::Conversion conversion>
TaggedToWord32OrBigIntImpl(TNode<Context> context,TNode<Object> value,Label * if_number,TVariable<Word32T> * var_word32,Label * if_bigint,TVariable<BigInt> * var_maybe_bigint,TVariable<Smi> * var_feedback)5060 void CodeStubAssembler::TaggedToWord32OrBigIntImpl(
5061     TNode<Context> context, TNode<Object> value, Label* if_number,
5062     TVariable<Word32T>* var_word32, Label* if_bigint,
5063     TVariable<BigInt>* var_maybe_bigint, TVariable<Smi>* var_feedback) {
5064   // We might need to loop after conversion.
5065   TVARIABLE(Object, var_value, value);
5066   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNone);
5067   VariableList loop_vars({&var_value}, zone());
5068   if (var_feedback != nullptr) loop_vars.push_back(var_feedback);
5069   Label loop(this, loop_vars);
5070   Goto(&loop);
5071   BIND(&loop);
5072   {
5073     value = var_value.value();
5074     Label not_smi(this), is_heap_number(this), is_oddball(this),
5075         is_bigint(this);
5076     GotoIf(TaggedIsNotSmi(value), &not_smi);
5077 
5078     // {value} is a Smi.
5079     *var_word32 = SmiToInt32(CAST(value));
5080     CombineFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
5081     Goto(if_number);
5082 
5083     BIND(&not_smi);
5084     TNode<HeapObject> value_heap_object = CAST(value);
5085     TNode<Map> map = LoadMap(value_heap_object);
5086     GotoIf(IsHeapNumberMap(map), &is_heap_number);
5087     TNode<Uint16T> instance_type = LoadMapInstanceType(map);
5088     if (conversion == Object::Conversion::kToNumeric) {
5089       GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
5090     }
5091 
5092     // Not HeapNumber (or BigInt if conversion == kToNumeric).
5093     {
5094       if (var_feedback != nullptr) {
5095         // We do not require an Or with earlier feedback here because once we
5096         // convert the value to a Numeric, we cannot reach this path. We can
5097         // only reach this path on the first pass when the feedback is kNone.
5098         CSA_ASSERT(this, SmiEqual(var_feedback->value(),
5099                                   SmiConstant(BinaryOperationFeedback::kNone)));
5100       }
5101       GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
5102       // Not an oddball either -> convert.
5103       auto builtin = conversion == Object::Conversion::kToNumeric
5104                          ? Builtins::kNonNumberToNumeric
5105                          : Builtins::kNonNumberToNumber;
5106       var_value = CallBuiltin(builtin, context, value);
5107       OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
5108       Goto(&loop);
5109 
5110       BIND(&is_oddball);
5111       var_value = LoadObjectField(value_heap_object, Oddball::kToNumberOffset);
5112       OverwriteFeedback(var_feedback,
5113                         BinaryOperationFeedback::kNumberOrOddball);
5114       Goto(&loop);
5115     }
5116 
5117     BIND(&is_heap_number);
5118     *var_word32 = TruncateHeapNumberValueToWord32(CAST(value));
5119     CombineFeedback(var_feedback, BinaryOperationFeedback::kNumber);
5120     Goto(if_number);
5121 
5122     if (conversion == Object::Conversion::kToNumeric) {
5123       BIND(&is_bigint);
5124       *var_maybe_bigint = CAST(value);
5125       CombineFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
5126       Goto(if_bigint);
5127     }
5128   }
5129 }
5130 
TruncateNumberToWord32(TNode<Number> number)5131 TNode<Int32T> CodeStubAssembler::TruncateNumberToWord32(TNode<Number> number) {
5132   TVARIABLE(Int32T, var_result);
5133   Label done(this), if_heapnumber(this);
5134   GotoIfNot(TaggedIsSmi(number), &if_heapnumber);
5135   var_result = SmiToInt32(CAST(number));
5136   Goto(&done);
5137 
5138   BIND(&if_heapnumber);
5139   TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
5140   var_result = Signed(TruncateFloat64ToWord32(value));
5141   Goto(&done);
5142 
5143   BIND(&done);
5144   return var_result.value();
5145 }
5146 
TruncateHeapNumberValueToWord32(TNode<HeapNumber> object)5147 TNode<Int32T> CodeStubAssembler::TruncateHeapNumberValueToWord32(
5148     TNode<HeapNumber> object) {
5149   TNode<Float64T> value = LoadHeapNumberValue(object);
5150   return Signed(TruncateFloat64ToWord32(value));
5151 }
5152 
TryHeapNumberToSmi(TNode<HeapNumber> number,TVariable<Smi> * var_result_smi,Label * if_smi)5153 void CodeStubAssembler::TryHeapNumberToSmi(TNode<HeapNumber> number,
5154                                            TVariable<Smi>* var_result_smi,
5155                                            Label* if_smi) {
5156   TNode<Float64T> value = LoadHeapNumberValue(number);
5157   TryFloat64ToSmi(value, var_result_smi, if_smi);
5158 }
5159 
TryFloat32ToSmi(TNode<Float32T> value,TVariable<Smi> * var_result_smi,Label * if_smi)5160 void CodeStubAssembler::TryFloat32ToSmi(TNode<Float32T> value,
5161                                         TVariable<Smi>* var_result_smi,
5162                                         Label* if_smi) {
5163   TNode<Int32T> ivalue = TruncateFloat32ToInt32(value);
5164   TNode<Float32T> fvalue = RoundInt32ToFloat32(ivalue);
5165 
5166   Label if_int32(this), if_heap_number(this);
5167 
5168   GotoIfNot(Float32Equal(value, fvalue), &if_heap_number);
5169   GotoIfNot(Word32Equal(ivalue, Int32Constant(0)), &if_int32);
5170   Branch(Int32LessThan(UncheckedCast<Int32T>(BitcastFloat32ToInt32(value)),
5171                        Int32Constant(0)),
5172          &if_heap_number, &if_int32);
5173 
5174   TVARIABLE(Number, var_result);
5175   BIND(&if_int32);
5176   {
5177     if (SmiValuesAre32Bits()) {
5178       *var_result_smi = SmiTag(ChangeInt32ToIntPtr(ivalue));
5179     } else {
5180       DCHECK(SmiValuesAre31Bits());
5181       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(ivalue, ivalue);
5182       TNode<BoolT> overflow = Projection<1>(pair);
5183       GotoIf(overflow, &if_heap_number);
5184       *var_result_smi =
5185           BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5186     }
5187     Goto(if_smi);
5188   }
5189   BIND(&if_heap_number);
5190 }
5191 
TryFloat64ToSmi(TNode<Float64T> value,TVariable<Smi> * var_result_smi,Label * if_smi)5192 void CodeStubAssembler::TryFloat64ToSmi(TNode<Float64T> value,
5193                                         TVariable<Smi>* var_result_smi,
5194                                         Label* if_smi) {
5195   TNode<Int32T> value32 = RoundFloat64ToInt32(value);
5196   TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
5197 
5198   Label if_int32(this), if_heap_number(this, Label::kDeferred);
5199 
5200   GotoIfNot(Float64Equal(value, value64), &if_heap_number);
5201   GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
5202   Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
5203                        Int32Constant(0)),
5204          &if_heap_number, &if_int32);
5205 
5206   TVARIABLE(Number, var_result);
5207   BIND(&if_int32);
5208   {
5209     if (SmiValuesAre32Bits()) {
5210       *var_result_smi = SmiTag(ChangeInt32ToIntPtr(value32));
5211     } else {
5212       DCHECK(SmiValuesAre31Bits());
5213       TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
5214       TNode<BoolT> overflow = Projection<1>(pair);
5215       GotoIf(overflow, &if_heap_number);
5216       *var_result_smi =
5217           BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
5218     }
5219     Goto(if_smi);
5220   }
5221   BIND(&if_heap_number);
5222 }
5223 
ChangeFloat32ToTagged(TNode<Float32T> value)5224 TNode<Number> CodeStubAssembler::ChangeFloat32ToTagged(TNode<Float32T> value) {
5225   Label if_smi(this), done(this);
5226   TVARIABLE(Smi, var_smi_result);
5227   TVARIABLE(Number, var_result);
5228   TryFloat32ToSmi(value, &var_smi_result, &if_smi);
5229 
5230   var_result = AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(value));
5231   Goto(&done);
5232 
5233   BIND(&if_smi);
5234   {
5235     var_result = var_smi_result.value();
5236     Goto(&done);
5237   }
5238   BIND(&done);
5239   return var_result.value();
5240 }
5241 
ChangeFloat64ToTagged(SloppyTNode<Float64T> value)5242 TNode<Number> CodeStubAssembler::ChangeFloat64ToTagged(
5243     SloppyTNode<Float64T> value) {
5244   Label if_smi(this), done(this);
5245   TVARIABLE(Smi, var_smi_result);
5246   TVARIABLE(Number, var_result);
5247   TryFloat64ToSmi(value, &var_smi_result, &if_smi);
5248 
5249   var_result = AllocateHeapNumberWithValue(value);
5250   Goto(&done);
5251 
5252   BIND(&if_smi);
5253   {
5254     var_result = var_smi_result.value();
5255     Goto(&done);
5256   }
5257   BIND(&done);
5258   return var_result.value();
5259 }
5260 
ChangeInt32ToTagged(SloppyTNode<Int32T> value)5261 TNode<Number> CodeStubAssembler::ChangeInt32ToTagged(
5262     SloppyTNode<Int32T> value) {
5263   if (SmiValuesAre32Bits()) {
5264     return SmiTag(ChangeInt32ToIntPtr(value));
5265   }
5266   DCHECK(SmiValuesAre31Bits());
5267   TVARIABLE(Number, var_result);
5268   TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
5269   TNode<BoolT> overflow = Projection<1>(pair);
5270   Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
5271       if_join(this);
5272   Branch(overflow, &if_overflow, &if_notoverflow);
5273   BIND(&if_overflow);
5274   {
5275     TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
5276     TNode<HeapNumber> result = AllocateHeapNumberWithValue(value64);
5277     var_result = result;
5278     Goto(&if_join);
5279   }
5280   BIND(&if_notoverflow);
5281   {
5282     TNode<IntPtrT> almost_tagged_value =
5283         ChangeInt32ToIntPtr(Projection<0>(pair));
5284     TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
5285     var_result = result;
5286     Goto(&if_join);
5287   }
5288   BIND(&if_join);
5289   return var_result.value();
5290 }
5291 
ChangeUint32ToTagged(SloppyTNode<Uint32T> value)5292 TNode<Number> CodeStubAssembler::ChangeUint32ToTagged(
5293     SloppyTNode<Uint32T> value) {
5294   Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5295       if_join(this);
5296   TVARIABLE(Number, var_result);
5297   // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5298   Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
5299          &if_not_overflow);
5300 
5301   BIND(&if_not_overflow);
5302   {
5303     // The {value} is definitely in valid Smi range.
5304     var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
5305   }
5306   Goto(&if_join);
5307 
5308   BIND(&if_overflow);
5309   {
5310     TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
5311     var_result = AllocateHeapNumberWithValue(float64_value);
5312   }
5313   Goto(&if_join);
5314 
5315   BIND(&if_join);
5316   return var_result.value();
5317 }
5318 
ChangeUintPtrToTagged(TNode<UintPtrT> value)5319 TNode<Number> CodeStubAssembler::ChangeUintPtrToTagged(TNode<UintPtrT> value) {
5320   Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
5321       if_join(this);
5322   TVARIABLE(Number, var_result);
5323   // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
5324   Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
5325          &if_not_overflow);
5326 
5327   BIND(&if_not_overflow);
5328   {
5329     // The {value} is definitely in valid Smi range.
5330     var_result = SmiTag(Signed(value));
5331   }
5332   Goto(&if_join);
5333 
5334   BIND(&if_overflow);
5335   {
5336     TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
5337     var_result = AllocateHeapNumberWithValue(float64_value);
5338   }
5339   Goto(&if_join);
5340 
5341   BIND(&if_join);
5342   return var_result.value();
5343 }
5344 
ToThisString(TNode<Context> context,TNode<Object> value,TNode<String> method_name)5345 TNode<String> CodeStubAssembler::ToThisString(TNode<Context> context,
5346                                               TNode<Object> value,
5347                                               TNode<String> method_name) {
5348   TVARIABLE(Object, var_value, value);
5349 
5350   // Check if the {value} is a Smi or a HeapObject.
5351   Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
5352       if_valueisstring(this);
5353   Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
5354   BIND(&if_valueisnotsmi);
5355   {
5356     // Load the instance type of the {value}.
5357     TNode<Uint16T> value_instance_type = LoadInstanceType(CAST(value));
5358 
5359     // Check if the {value} is already String.
5360     Label if_valueisnotstring(this, Label::kDeferred);
5361     Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
5362            &if_valueisnotstring);
5363     BIND(&if_valueisnotstring);
5364     {
5365       // Check if the {value} is null.
5366       Label if_valueisnullorundefined(this, Label::kDeferred);
5367       GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
5368       // Convert the {value} to a String.
5369       var_value = CallBuiltin(Builtins::kToString, context, value);
5370       Goto(&if_valueisstring);
5371 
5372       BIND(&if_valueisnullorundefined);
5373       {
5374         // The {value} is either null or undefined.
5375         ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
5376                        method_name);
5377       }
5378     }
5379   }
5380   BIND(&if_valueissmi);
5381   {
5382     // The {value} is a Smi, convert it to a String.
5383     var_value = CallBuiltin(Builtins::kNumberToString, context, value);
5384     Goto(&if_valueisstring);
5385   }
5386   BIND(&if_valueisstring);
5387   return CAST(var_value.value());
5388 }
5389 
ChangeNumberToUint32(TNode<Number> value)5390 TNode<Uint32T> CodeStubAssembler::ChangeNumberToUint32(TNode<Number> value) {
5391   TVARIABLE(Uint32T, var_result);
5392   Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
5393   Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
5394   BIND(&if_smi);
5395   {
5396     var_result = Unsigned(SmiToInt32(CAST(value)));
5397     Goto(&done);
5398   }
5399   BIND(&if_heapnumber);
5400   {
5401     var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
5402     Goto(&done);
5403   }
5404   BIND(&done);
5405   return var_result.value();
5406 }
5407 
ChangeNumberToFloat64(TNode<Number> value)5408 TNode<Float64T> CodeStubAssembler::ChangeNumberToFloat64(TNode<Number> value) {
5409   TVARIABLE(Float64T, result);
5410   Label smi(this);
5411   Label done(this, &result);
5412   GotoIf(TaggedIsSmi(value), &smi);
5413   result = LoadHeapNumberValue(CAST(value));
5414   Goto(&done);
5415 
5416   BIND(&smi);
5417   {
5418     result = SmiToFloat64(CAST(value));
5419     Goto(&done);
5420   }
5421 
5422   BIND(&done);
5423   return result.value();
5424 }
5425 
ChangeTaggedNonSmiToInt32(TNode<Context> context,TNode<HeapObject> input)5426 TNode<Int32T> CodeStubAssembler::ChangeTaggedNonSmiToInt32(
5427     TNode<Context> context, TNode<HeapObject> input) {
5428   return Select<Int32T>(
5429       IsHeapNumber(input),
5430       [=] {
5431         return Signed(TruncateFloat64ToWord32(LoadHeapNumberValue(input)));
5432       },
5433       [=] {
5434         return TruncateNumberToWord32(
5435             CAST(CallBuiltin(Builtins::kNonNumberToNumber, context, input)));
5436       });
5437 }
5438 
ChangeTaggedToFloat64(TNode<Context> context,TNode<Object> input)5439 TNode<Float64T> CodeStubAssembler::ChangeTaggedToFloat64(TNode<Context> context,
5440                                                          TNode<Object> input) {
5441   TVARIABLE(Float64T, var_result);
5442   Label end(this), not_smi(this);
5443 
5444   GotoIfNot(TaggedIsSmi(input), &not_smi);
5445   var_result = SmiToFloat64(CAST(input));
5446   Goto(&end);
5447 
5448   BIND(&not_smi);
5449   var_result = Select<Float64T>(
5450       IsHeapNumber(CAST(input)),
5451       [=] { return LoadHeapNumberValue(CAST(input)); },
5452       [=] {
5453         return ChangeNumberToFloat64(
5454             CAST(CallBuiltin(Builtins::kNonNumberToNumber, context, input)));
5455       });
5456   Goto(&end);
5457 
5458   BIND(&end);
5459   return var_result.value();
5460 }
5461 
TimesSystemPointerSize(SloppyTNode<WordT> value)5462 TNode<WordT> CodeStubAssembler::TimesSystemPointerSize(
5463     SloppyTNode<WordT> value) {
5464   return WordShl(value, kSystemPointerSizeLog2);
5465 }
5466 
TimesTaggedSize(SloppyTNode<WordT> value)5467 TNode<WordT> CodeStubAssembler::TimesTaggedSize(SloppyTNode<WordT> value) {
5468   return WordShl(value, kTaggedSizeLog2);
5469 }
5470 
TimesDoubleSize(SloppyTNode<WordT> value)5471 TNode<WordT> CodeStubAssembler::TimesDoubleSize(SloppyTNode<WordT> value) {
5472   return WordShl(value, kDoubleSizeLog2);
5473 }
5474 
ToThisValue(TNode<Context> context,TNode<Object> value,PrimitiveType primitive_type,char const * method_name)5475 TNode<Object> CodeStubAssembler::ToThisValue(TNode<Context> context,
5476                                              TNode<Object> value,
5477                                              PrimitiveType primitive_type,
5478                                              char const* method_name) {
5479   // We might need to loop once due to JSPrimitiveWrapper unboxing.
5480   TVARIABLE(Object, var_value, value);
5481   Label loop(this, &var_value), done_loop(this),
5482       done_throw(this, Label::kDeferred);
5483   Goto(&loop);
5484   BIND(&loop);
5485   {
5486     // Check if the {value} is a Smi or a HeapObject.
5487     GotoIf(
5488         TaggedIsSmi(var_value.value()),
5489         (primitive_type == PrimitiveType::kNumber) ? &done_loop : &done_throw);
5490 
5491     TNode<HeapObject> value = CAST(var_value.value());
5492 
5493     // Load the map of the {value}.
5494     TNode<Map> value_map = LoadMap(value);
5495 
5496     // Load the instance type of the {value}.
5497     TNode<Uint16T> value_instance_type = LoadMapInstanceType(value_map);
5498 
5499     // Check if {value} is a JSPrimitiveWrapper.
5500     Label if_valueiswrapper(this, Label::kDeferred), if_valueisnotwrapper(this);
5501     Branch(InstanceTypeEqual(value_instance_type, JS_PRIMITIVE_WRAPPER_TYPE),
5502            &if_valueiswrapper, &if_valueisnotwrapper);
5503 
5504     BIND(&if_valueiswrapper);
5505     {
5506       // Load the actual value from the {value}.
5507       var_value = LoadObjectField(value, JSPrimitiveWrapper::kValueOffset);
5508       Goto(&loop);
5509     }
5510 
5511     BIND(&if_valueisnotwrapper);
5512     {
5513       switch (primitive_type) {
5514         case PrimitiveType::kBoolean:
5515           GotoIf(TaggedEqual(value_map, BooleanMapConstant()), &done_loop);
5516           break;
5517         case PrimitiveType::kNumber:
5518           GotoIf(TaggedEqual(value_map, HeapNumberMapConstant()), &done_loop);
5519           break;
5520         case PrimitiveType::kString:
5521           GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
5522           break;
5523         case PrimitiveType::kSymbol:
5524           GotoIf(TaggedEqual(value_map, SymbolMapConstant()), &done_loop);
5525           break;
5526       }
5527       Goto(&done_throw);
5528     }
5529   }
5530 
5531   BIND(&done_throw);
5532   {
5533     const char* primitive_name = nullptr;
5534     switch (primitive_type) {
5535       case PrimitiveType::kBoolean:
5536         primitive_name = "Boolean";
5537         break;
5538       case PrimitiveType::kNumber:
5539         primitive_name = "Number";
5540         break;
5541       case PrimitiveType::kString:
5542         primitive_name = "String";
5543         break;
5544       case PrimitiveType::kSymbol:
5545         primitive_name = "Symbol";
5546         break;
5547     }
5548     CHECK_NOT_NULL(primitive_name);
5549 
5550     // The {value} is not a compatible receiver for this method.
5551     ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
5552                    primitive_name);
5553   }
5554 
5555   BIND(&done_loop);
5556   return var_value.value();
5557 }
5558 
ThrowIfNotInstanceType(TNode<Context> context,TNode<Object> value,InstanceType instance_type,char const * method_name)5559 void CodeStubAssembler::ThrowIfNotInstanceType(TNode<Context> context,
5560                                                TNode<Object> value,
5561                                                InstanceType instance_type,
5562                                                char const* method_name) {
5563   Label out(this), throw_exception(this, Label::kDeferred);
5564 
5565   GotoIf(TaggedIsSmi(value), &throw_exception);
5566 
5567   // Load the instance type of the {value}.
5568   TNode<Map> map = LoadMap(CAST(value));
5569   const TNode<Uint16T> value_instance_type = LoadMapInstanceType(map);
5570 
5571   Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
5572          &throw_exception);
5573 
5574   // The {value} is not a compatible receiver for this method.
5575   BIND(&throw_exception);
5576   ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
5577                  StringConstant(method_name), value);
5578 
5579   BIND(&out);
5580 }
5581 
ThrowIfNotJSReceiver(TNode<Context> context,TNode<Object> value,MessageTemplate msg_template,const char * method_name)5582 void CodeStubAssembler::ThrowIfNotJSReceiver(TNode<Context> context,
5583                                              TNode<Object> value,
5584                                              MessageTemplate msg_template,
5585                                              const char* method_name) {
5586   Label done(this), throw_exception(this, Label::kDeferred);
5587 
5588   GotoIf(TaggedIsSmi(value), &throw_exception);
5589 
5590   // Load the instance type of the {value}.
5591   TNode<Map> value_map = LoadMap(CAST(value));
5592   const TNode<Uint16T> value_instance_type = LoadMapInstanceType(value_map);
5593 
5594   Branch(IsJSReceiverInstanceType(value_instance_type), &done,
5595          &throw_exception);
5596 
5597   // The {value} is not a compatible receiver for this method.
5598   BIND(&throw_exception);
5599   ThrowTypeError(context, msg_template, StringConstant(method_name), value);
5600 
5601   BIND(&done);
5602 }
5603 
ThrowIfNotCallable(TNode<Context> context,TNode<Object> value,const char * method_name)5604 void CodeStubAssembler::ThrowIfNotCallable(TNode<Context> context,
5605                                            TNode<Object> value,
5606                                            const char* method_name) {
5607   Label out(this), throw_exception(this, Label::kDeferred);
5608 
5609   GotoIf(TaggedIsSmi(value), &throw_exception);
5610   Branch(IsCallable(CAST(value)), &out, &throw_exception);
5611 
5612   // The {value} is not a compatible receiver for this method.
5613   BIND(&throw_exception);
5614   ThrowTypeError(context, MessageTemplate::kCalledNonCallable, method_name);
5615 
5616   BIND(&out);
5617 }
5618 
ThrowRangeError(TNode<Context> context,MessageTemplate message,base::Optional<TNode<Object>> arg0,base::Optional<TNode<Object>> arg1,base::Optional<TNode<Object>> arg2)5619 void CodeStubAssembler::ThrowRangeError(TNode<Context> context,
5620                                         MessageTemplate message,
5621                                         base::Optional<TNode<Object>> arg0,
5622                                         base::Optional<TNode<Object>> arg1,
5623                                         base::Optional<TNode<Object>> arg2) {
5624   TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
5625   if (!arg0) {
5626     CallRuntime(Runtime::kThrowRangeError, context, template_index);
5627   } else if (!arg1) {
5628     CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0);
5629   } else if (!arg2) {
5630     CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
5631                 *arg1);
5632   } else {
5633     CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
5634                 *arg1, *arg2);
5635   }
5636   Unreachable();
5637 }
5638 
ThrowTypeError(TNode<Context> context,MessageTemplate message,char const * arg0,char const * arg1)5639 void CodeStubAssembler::ThrowTypeError(TNode<Context> context,
5640                                        MessageTemplate message,
5641                                        char const* arg0, char const* arg1) {
5642   base::Optional<TNode<Object>> arg0_node;
5643   if (arg0) arg0_node = StringConstant(arg0);
5644   base::Optional<TNode<Object>> arg1_node;
5645   if (arg1) arg1_node = StringConstant(arg1);
5646   ThrowTypeError(context, message, arg0_node, arg1_node);
5647 }
5648 
ThrowTypeError(TNode<Context> context,MessageTemplate message,base::Optional<TNode<Object>> arg0,base::Optional<TNode<Object>> arg1,base::Optional<TNode<Object>> arg2)5649 void CodeStubAssembler::ThrowTypeError(TNode<Context> context,
5650                                        MessageTemplate message,
5651                                        base::Optional<TNode<Object>> arg0,
5652                                        base::Optional<TNode<Object>> arg1,
5653                                        base::Optional<TNode<Object>> arg2) {
5654   TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
5655   if (!arg0) {
5656     CallRuntime(Runtime::kThrowTypeError, context, template_index);
5657   } else if (!arg1) {
5658     CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0);
5659   } else if (!arg2) {
5660     CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0,
5661                 *arg1);
5662   } else {
5663     CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0, *arg1,
5664                 *arg2);
5665   }
5666   Unreachable();
5667 }
5668 
InstanceTypeEqual(SloppyTNode<Int32T> instance_type,int type)5669 TNode<BoolT> CodeStubAssembler::InstanceTypeEqual(
5670     SloppyTNode<Int32T> instance_type, int type) {
5671   return Word32Equal(instance_type, Int32Constant(type));
5672 }
5673 
IsDictionaryMap(TNode<Map> map)5674 TNode<BoolT> CodeStubAssembler::IsDictionaryMap(TNode<Map> map) {
5675   return IsSetWord32<Map::Bits3::IsDictionaryMapBit>(LoadMapBitField3(map));
5676 }
5677 
IsExtensibleMap(TNode<Map> map)5678 TNode<BoolT> CodeStubAssembler::IsExtensibleMap(TNode<Map> map) {
5679   return IsSetWord32<Map::Bits3::IsExtensibleBit>(LoadMapBitField3(map));
5680 }
5681 
IsExtensibleNonPrototypeMap(TNode<Map> map)5682 TNode<BoolT> CodeStubAssembler::IsExtensibleNonPrototypeMap(TNode<Map> map) {
5683   int kMask =
5684       Map::Bits3::IsExtensibleBit::kMask | Map::Bits3::IsPrototypeMapBit::kMask;
5685   int kExpected = Map::Bits3::IsExtensibleBit::kMask;
5686   return Word32Equal(Word32And(LoadMapBitField3(map), Int32Constant(kMask)),
5687                      Int32Constant(kExpected));
5688 }
5689 
IsCallableMap(TNode<Map> map)5690 TNode<BoolT> CodeStubAssembler::IsCallableMap(TNode<Map> map) {
5691   return IsSetWord32<Map::Bits1::IsCallableBit>(LoadMapBitField(map));
5692 }
5693 
IsDeprecatedMap(TNode<Map> map)5694 TNode<BoolT> CodeStubAssembler::IsDeprecatedMap(TNode<Map> map) {
5695   return IsSetWord32<Map::Bits3::IsDeprecatedBit>(LoadMapBitField3(map));
5696 }
5697 
IsUndetectableMap(TNode<Map> map)5698 TNode<BoolT> CodeStubAssembler::IsUndetectableMap(TNode<Map> map) {
5699   return IsSetWord32<Map::Bits1::IsUndetectableBit>(LoadMapBitField(map));
5700 }
5701 
IsNoElementsProtectorCellInvalid()5702 TNode<BoolT> CodeStubAssembler::IsNoElementsProtectorCellInvalid() {
5703   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5704   TNode<PropertyCell> cell = NoElementsProtectorConstant();
5705   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5706   return TaggedEqual(cell_value, invalid);
5707 }
5708 
IsArrayIteratorProtectorCellInvalid()5709 TNode<BoolT> CodeStubAssembler::IsArrayIteratorProtectorCellInvalid() {
5710   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5711   TNode<PropertyCell> cell = ArrayIteratorProtectorConstant();
5712   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5713   return TaggedEqual(cell_value, invalid);
5714 }
5715 
IsPromiseResolveProtectorCellInvalid()5716 TNode<BoolT> CodeStubAssembler::IsPromiseResolveProtectorCellInvalid() {
5717   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5718   TNode<PropertyCell> cell = PromiseResolveProtectorConstant();
5719   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5720   return TaggedEqual(cell_value, invalid);
5721 }
5722 
IsPromiseThenProtectorCellInvalid()5723 TNode<BoolT> CodeStubAssembler::IsPromiseThenProtectorCellInvalid() {
5724   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5725   TNode<PropertyCell> cell = PromiseThenProtectorConstant();
5726   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5727   return TaggedEqual(cell_value, invalid);
5728 }
5729 
IsArraySpeciesProtectorCellInvalid()5730 TNode<BoolT> CodeStubAssembler::IsArraySpeciesProtectorCellInvalid() {
5731   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5732   TNode<PropertyCell> cell = ArraySpeciesProtectorConstant();
5733   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5734   return TaggedEqual(cell_value, invalid);
5735 }
5736 
IsTypedArraySpeciesProtectorCellInvalid()5737 TNode<BoolT> CodeStubAssembler::IsTypedArraySpeciesProtectorCellInvalid() {
5738   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5739   TNode<PropertyCell> cell = TypedArraySpeciesProtectorConstant();
5740   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5741   return TaggedEqual(cell_value, invalid);
5742 }
5743 
IsRegExpSpeciesProtectorCellInvalid()5744 TNode<BoolT> CodeStubAssembler::IsRegExpSpeciesProtectorCellInvalid() {
5745   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5746   TNode<PropertyCell> cell = RegExpSpeciesProtectorConstant();
5747   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5748   return TaggedEqual(cell_value, invalid);
5749 }
5750 
IsPromiseSpeciesProtectorCellInvalid()5751 TNode<BoolT> CodeStubAssembler::IsPromiseSpeciesProtectorCellInvalid() {
5752   TNode<Smi> invalid = SmiConstant(Protectors::kProtectorInvalid);
5753   TNode<PropertyCell> cell = PromiseSpeciesProtectorConstant();
5754   TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
5755   return TaggedEqual(cell_value, invalid);
5756 }
5757 
IsPrototypeInitialArrayPrototype(TNode<Context> context,TNode<Map> map)5758 TNode<BoolT> CodeStubAssembler::IsPrototypeInitialArrayPrototype(
5759     TNode<Context> context, TNode<Map> map) {
5760   const TNode<NativeContext> native_context = LoadNativeContext(context);
5761   const TNode<Object> initial_array_prototype = LoadContextElement(
5762       native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
5763   TNode<HeapObject> proto = LoadMapPrototype(map);
5764   return TaggedEqual(proto, initial_array_prototype);
5765 }
5766 
IsPrototypeTypedArrayPrototype(TNode<Context> context,TNode<Map> map)5767 TNode<BoolT> CodeStubAssembler::IsPrototypeTypedArrayPrototype(
5768     TNode<Context> context, TNode<Map> map) {
5769   const TNode<NativeContext> native_context = LoadNativeContext(context);
5770   const TNode<Object> typed_array_prototype =
5771       LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
5772   TNode<HeapObject> proto = LoadMapPrototype(map);
5773   TNode<HeapObject> proto_of_proto = Select<HeapObject>(
5774       IsJSObject(proto), [=] { return LoadMapPrototype(LoadMap(proto)); },
5775       [=] { return NullConstant(); });
5776   return TaggedEqual(proto_of_proto, typed_array_prototype);
5777 }
5778 
IsFastAliasedArgumentsMap(TNode<Context> context,TNode<Map> map)5779 TNode<BoolT> CodeStubAssembler::IsFastAliasedArgumentsMap(
5780     TNode<Context> context, TNode<Map> map) {
5781   const TNode<NativeContext> native_context = LoadNativeContext(context);
5782   const TNode<Object> arguments_map = LoadContextElement(
5783       native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
5784   return TaggedEqual(arguments_map, map);
5785 }
5786 
IsSlowAliasedArgumentsMap(TNode<Context> context,TNode<Map> map)5787 TNode<BoolT> CodeStubAssembler::IsSlowAliasedArgumentsMap(
5788     TNode<Context> context, TNode<Map> map) {
5789   const TNode<NativeContext> native_context = LoadNativeContext(context);
5790   const TNode<Object> arguments_map = LoadContextElement(
5791       native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
5792   return TaggedEqual(arguments_map, map);
5793 }
5794 
IsSloppyArgumentsMap(TNode<Context> context,TNode<Map> map)5795 TNode<BoolT> CodeStubAssembler::IsSloppyArgumentsMap(TNode<Context> context,
5796                                                      TNode<Map> map) {
5797   const TNode<NativeContext> native_context = LoadNativeContext(context);
5798   const TNode<Object> arguments_map =
5799       LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
5800   return TaggedEqual(arguments_map, map);
5801 }
5802 
IsStrictArgumentsMap(TNode<Context> context,TNode<Map> map)5803 TNode<BoolT> CodeStubAssembler::IsStrictArgumentsMap(TNode<Context> context,
5804                                                      TNode<Map> map) {
5805   const TNode<NativeContext> native_context = LoadNativeContext(context);
5806   const TNode<Object> arguments_map =
5807       LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
5808   return TaggedEqual(arguments_map, map);
5809 }
5810 
TaggedIsCallable(TNode<Object> object)5811 TNode<BoolT> CodeStubAssembler::TaggedIsCallable(TNode<Object> object) {
5812   return Select<BoolT>(
5813       TaggedIsSmi(object), [=] { return Int32FalseConstant(); },
5814       [=] {
5815         return IsCallableMap(LoadMap(UncheckedCast<HeapObject>(object)));
5816       });
5817 }
5818 
IsCallable(TNode<HeapObject> object)5819 TNode<BoolT> CodeStubAssembler::IsCallable(TNode<HeapObject> object) {
5820   return IsCallableMap(LoadMap(object));
5821 }
5822 
IsConstructorMap(TNode<Map> map)5823 TNode<BoolT> CodeStubAssembler::IsConstructorMap(TNode<Map> map) {
5824   return IsSetWord32<Map::Bits1::IsConstructorBit>(LoadMapBitField(map));
5825 }
5826 
IsConstructor(TNode<HeapObject> object)5827 TNode<BoolT> CodeStubAssembler::IsConstructor(TNode<HeapObject> object) {
5828   return IsConstructorMap(LoadMap(object));
5829 }
5830 
IsFunctionWithPrototypeSlotMap(TNode<Map> map)5831 TNode<BoolT> CodeStubAssembler::IsFunctionWithPrototypeSlotMap(TNode<Map> map) {
5832   return IsSetWord32<Map::Bits1::HasPrototypeSlotBit>(LoadMapBitField(map));
5833 }
5834 
IsSpecialReceiverInstanceType(TNode<Int32T> instance_type)5835 TNode<BoolT> CodeStubAssembler::IsSpecialReceiverInstanceType(
5836     TNode<Int32T> instance_type) {
5837   STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
5838   return Int32LessThanOrEqual(instance_type,
5839                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
5840 }
5841 
IsCustomElementsReceiverInstanceType(TNode<Int32T> instance_type)5842 TNode<BoolT> CodeStubAssembler::IsCustomElementsReceiverInstanceType(
5843     TNode<Int32T> instance_type) {
5844   return Int32LessThanOrEqual(instance_type,
5845                               Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER));
5846 }
5847 
IsStringInstanceType(SloppyTNode<Int32T> instance_type)5848 TNode<BoolT> CodeStubAssembler::IsStringInstanceType(
5849     SloppyTNode<Int32T> instance_type) {
5850   STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
5851   return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
5852 }
5853 
IsOneByteStringInstanceType(TNode<Int32T> instance_type)5854 TNode<BoolT> CodeStubAssembler::IsOneByteStringInstanceType(
5855     TNode<Int32T> instance_type) {
5856   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5857   return Word32Equal(
5858       Word32And(instance_type, Int32Constant(kStringEncodingMask)),
5859       Int32Constant(kOneByteStringTag));
5860 }
5861 
IsSequentialStringInstanceType(SloppyTNode<Int32T> instance_type)5862 TNode<BoolT> CodeStubAssembler::IsSequentialStringInstanceType(
5863     SloppyTNode<Int32T> instance_type) {
5864   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5865   return Word32Equal(
5866       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5867       Int32Constant(kSeqStringTag));
5868 }
5869 
IsSeqOneByteStringInstanceType(TNode<Int32T> instance_type)5870 TNode<BoolT> CodeStubAssembler::IsSeqOneByteStringInstanceType(
5871     TNode<Int32T> instance_type) {
5872   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5873   return Word32Equal(
5874       Word32And(instance_type,
5875                 Int32Constant(kStringRepresentationMask | kStringEncodingMask)),
5876       Int32Constant(kSeqStringTag | kOneByteStringTag));
5877 }
5878 
IsConsStringInstanceType(SloppyTNode<Int32T> instance_type)5879 TNode<BoolT> CodeStubAssembler::IsConsStringInstanceType(
5880     SloppyTNode<Int32T> instance_type) {
5881   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5882   return Word32Equal(
5883       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5884       Int32Constant(kConsStringTag));
5885 }
5886 
IsIndirectStringInstanceType(SloppyTNode<Int32T> instance_type)5887 TNode<BoolT> CodeStubAssembler::IsIndirectStringInstanceType(
5888     SloppyTNode<Int32T> instance_type) {
5889   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5890   STATIC_ASSERT(kIsIndirectStringMask == 0x1);
5891   STATIC_ASSERT(kIsIndirectStringTag == 0x1);
5892   return UncheckedCast<BoolT>(
5893       Word32And(instance_type, Int32Constant(kIsIndirectStringMask)));
5894 }
5895 
IsExternalStringInstanceType(SloppyTNode<Int32T> instance_type)5896 TNode<BoolT> CodeStubAssembler::IsExternalStringInstanceType(
5897     SloppyTNode<Int32T> instance_type) {
5898   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5899   return Word32Equal(
5900       Word32And(instance_type, Int32Constant(kStringRepresentationMask)),
5901       Int32Constant(kExternalStringTag));
5902 }
5903 
IsUncachedExternalStringInstanceType(SloppyTNode<Int32T> instance_type)5904 TNode<BoolT> CodeStubAssembler::IsUncachedExternalStringInstanceType(
5905     SloppyTNode<Int32T> instance_type) {
5906   CSA_ASSERT(this, IsStringInstanceType(instance_type));
5907   STATIC_ASSERT(kUncachedExternalStringTag != 0);
5908   return IsSetWord32(instance_type, kUncachedExternalStringMask);
5909 }
5910 
IsJSReceiverInstanceType(SloppyTNode<Int32T> instance_type)5911 TNode<BoolT> CodeStubAssembler::IsJSReceiverInstanceType(
5912     SloppyTNode<Int32T> instance_type) {
5913   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
5914   return Int32GreaterThanOrEqual(instance_type,
5915                                  Int32Constant(FIRST_JS_RECEIVER_TYPE));
5916 }
5917 
IsJSReceiverMap(TNode<Map> map)5918 TNode<BoolT> CodeStubAssembler::IsJSReceiverMap(TNode<Map> map) {
5919   return IsJSReceiverInstanceType(LoadMapInstanceType(map));
5920 }
5921 
IsJSReceiver(TNode<HeapObject> object)5922 TNode<BoolT> CodeStubAssembler::IsJSReceiver(TNode<HeapObject> object) {
5923   return IsJSReceiverMap(LoadMap(object));
5924 }
5925 
IsNullOrJSReceiver(TNode<HeapObject> object)5926 TNode<BoolT> CodeStubAssembler::IsNullOrJSReceiver(TNode<HeapObject> object) {
5927   return UncheckedCast<BoolT>(Word32Or(IsJSReceiver(object), IsNull(object)));
5928 }
5929 
IsNullOrUndefined(SloppyTNode<Object> value)5930 TNode<BoolT> CodeStubAssembler::IsNullOrUndefined(SloppyTNode<Object> value) {
5931   return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
5932 }
5933 
IsJSGlobalProxyInstanceType(SloppyTNode<Int32T> instance_type)5934 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyInstanceType(
5935     SloppyTNode<Int32T> instance_type) {
5936   return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
5937 }
5938 
IsJSGlobalProxyMap(TNode<Map> map)5939 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxyMap(TNode<Map> map) {
5940   return IsJSGlobalProxyInstanceType(LoadMapInstanceType(map));
5941 }
5942 
IsJSGlobalProxy(TNode<HeapObject> object)5943 TNode<BoolT> CodeStubAssembler::IsJSGlobalProxy(TNode<HeapObject> object) {
5944   return IsJSGlobalProxyMap(LoadMap(object));
5945 }
5946 
IsJSGeneratorMap(TNode<Map> map)5947 TNode<BoolT> CodeStubAssembler::IsJSGeneratorMap(TNode<Map> map) {
5948   return InstanceTypeEqual(LoadMapInstanceType(map), JS_GENERATOR_OBJECT_TYPE);
5949 }
5950 
IsJSObjectInstanceType(SloppyTNode<Int32T> instance_type)5951 TNode<BoolT> CodeStubAssembler::IsJSObjectInstanceType(
5952     SloppyTNode<Int32T> instance_type) {
5953   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
5954   return Int32GreaterThanOrEqual(instance_type,
5955                                  Int32Constant(FIRST_JS_OBJECT_TYPE));
5956 }
5957 
IsJSObjectMap(TNode<Map> map)5958 TNode<BoolT> CodeStubAssembler::IsJSObjectMap(TNode<Map> map) {
5959   return IsJSObjectInstanceType(LoadMapInstanceType(map));
5960 }
5961 
IsJSObject(TNode<HeapObject> object)5962 TNode<BoolT> CodeStubAssembler::IsJSObject(TNode<HeapObject> object) {
5963   return IsJSObjectMap(LoadMap(object));
5964 }
5965 
IsJSFinalizationRegistryMap(TNode<Map> map)5966 TNode<BoolT> CodeStubAssembler::IsJSFinalizationRegistryMap(TNode<Map> map) {
5967   return InstanceTypeEqual(LoadMapInstanceType(map),
5968                            JS_FINALIZATION_REGISTRY_TYPE);
5969 }
5970 
IsJSFinalizationRegistry(TNode<HeapObject> object)5971 TNode<BoolT> CodeStubAssembler::IsJSFinalizationRegistry(
5972     TNode<HeapObject> object) {
5973   return IsJSFinalizationRegistryMap(LoadMap(object));
5974 }
5975 
IsJSPromiseMap(TNode<Map> map)5976 TNode<BoolT> CodeStubAssembler::IsJSPromiseMap(TNode<Map> map) {
5977   return InstanceTypeEqual(LoadMapInstanceType(map), JS_PROMISE_TYPE);
5978 }
5979 
IsJSPromise(TNode<HeapObject> object)5980 TNode<BoolT> CodeStubAssembler::IsJSPromise(TNode<HeapObject> object) {
5981   return IsJSPromiseMap(LoadMap(object));
5982 }
5983 
IsJSProxy(TNode<HeapObject> object)5984 TNode<BoolT> CodeStubAssembler::IsJSProxy(TNode<HeapObject> object) {
5985   return HasInstanceType(object, JS_PROXY_TYPE);
5986 }
5987 
IsJSStringIterator(TNode<HeapObject> object)5988 TNode<BoolT> CodeStubAssembler::IsJSStringIterator(TNode<HeapObject> object) {
5989   return HasInstanceType(object, JS_STRING_ITERATOR_TYPE);
5990 }
5991 
IsJSRegExpStringIterator(TNode<HeapObject> object)5992 TNode<BoolT> CodeStubAssembler::IsJSRegExpStringIterator(
5993     TNode<HeapObject> object) {
5994   return HasInstanceType(object, JS_REG_EXP_STRING_ITERATOR_TYPE);
5995 }
5996 
IsMap(TNode<HeapObject> map)5997 TNode<BoolT> CodeStubAssembler::IsMap(TNode<HeapObject> map) {
5998   return IsMetaMap(LoadMap(map));
5999 }
6000 
IsJSPrimitiveWrapperInstanceType(SloppyTNode<Int32T> instance_type)6001 TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapperInstanceType(
6002     SloppyTNode<Int32T> instance_type) {
6003   return InstanceTypeEqual(instance_type, JS_PRIMITIVE_WRAPPER_TYPE);
6004 }
6005 
IsJSPrimitiveWrapper(TNode<HeapObject> object)6006 TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapper(TNode<HeapObject> object) {
6007   return IsJSPrimitiveWrapperMap(LoadMap(object));
6008 }
6009 
IsJSPrimitiveWrapperMap(TNode<Map> map)6010 TNode<BoolT> CodeStubAssembler::IsJSPrimitiveWrapperMap(TNode<Map> map) {
6011   return IsJSPrimitiveWrapperInstanceType(LoadMapInstanceType(map));
6012 }
6013 
IsJSArrayInstanceType(SloppyTNode<Int32T> instance_type)6014 TNode<BoolT> CodeStubAssembler::IsJSArrayInstanceType(
6015     SloppyTNode<Int32T> instance_type) {
6016   return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
6017 }
6018 
IsJSArray(TNode<HeapObject> object)6019 TNode<BoolT> CodeStubAssembler::IsJSArray(TNode<HeapObject> object) {
6020   return IsJSArrayMap(LoadMap(object));
6021 }
6022 
IsJSArrayMap(TNode<Map> map)6023 TNode<BoolT> CodeStubAssembler::IsJSArrayMap(TNode<Map> map) {
6024   return IsJSArrayInstanceType(LoadMapInstanceType(map));
6025 }
6026 
IsJSArrayIterator(TNode<HeapObject> object)6027 TNode<BoolT> CodeStubAssembler::IsJSArrayIterator(TNode<HeapObject> object) {
6028   return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
6029 }
6030 
IsJSAsyncGeneratorObject(TNode<HeapObject> object)6031 TNode<BoolT> CodeStubAssembler::IsJSAsyncGeneratorObject(
6032     TNode<HeapObject> object) {
6033   return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
6034 }
6035 
IsFixedArray(TNode<HeapObject> object)6036 TNode<BoolT> CodeStubAssembler::IsFixedArray(TNode<HeapObject> object) {
6037   return HasInstanceType(object, FIXED_ARRAY_TYPE);
6038 }
6039 
IsFixedArraySubclass(TNode<HeapObject> object)6040 TNode<BoolT> CodeStubAssembler::IsFixedArraySubclass(TNode<HeapObject> object) {
6041   TNode<Uint16T> instance_type = LoadInstanceType(object);
6042   return UncheckedCast<BoolT>(
6043       Word32And(Int32GreaterThanOrEqual(instance_type,
6044                                         Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
6045                 Int32LessThanOrEqual(instance_type,
6046                                      Int32Constant(LAST_FIXED_ARRAY_TYPE))));
6047 }
6048 
IsNotWeakFixedArraySubclass(TNode<HeapObject> object)6049 TNode<BoolT> CodeStubAssembler::IsNotWeakFixedArraySubclass(
6050     TNode<HeapObject> object) {
6051   TNode<Uint16T> instance_type = LoadInstanceType(object);
6052   return UncheckedCast<BoolT>(Word32Or(
6053       Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
6054       Int32GreaterThan(instance_type,
6055                        Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
6056 }
6057 
IsPropertyArray(TNode<HeapObject> object)6058 TNode<BoolT> CodeStubAssembler::IsPropertyArray(TNode<HeapObject> object) {
6059   return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
6060 }
6061 
IsPromiseReactionJobTask(TNode<HeapObject> object)6062 TNode<BoolT> CodeStubAssembler::IsPromiseReactionJobTask(
6063     TNode<HeapObject> object) {
6064   TNode<Uint16T> instance_type = LoadInstanceType(object);
6065   return IsInRange(instance_type, FIRST_PROMISE_REACTION_JOB_TASK_TYPE,
6066                    LAST_PROMISE_REACTION_JOB_TASK_TYPE);
6067 }
6068 
6069 // This complicated check is due to elements oddities. If a smi array is empty
6070 // after Array.p.shift, it is replaced by the empty array constant. If it is
6071 // later filled with a double element, we try to grow it but pass in a double
6072 // elements kind. Usually this would cause a size mismatch (since the source
6073 // fixed array has HOLEY_ELEMENTS and destination has
6074 // HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
6075 // source array is empty.
6076 // TODO(jgruber): It might we worth creating an empty_double_array constant to
6077 // simplify this case.
IsFixedArrayWithKindOrEmpty(TNode<FixedArrayBase> object,ElementsKind kind)6078 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKindOrEmpty(
6079     TNode<FixedArrayBase> object, ElementsKind kind) {
6080   Label out(this);
6081   TVARIABLE(BoolT, var_result, Int32TrueConstant());
6082 
6083   GotoIf(IsFixedArrayWithKind(object, kind), &out);
6084 
6085   const TNode<Smi> length = LoadFixedArrayBaseLength(object);
6086   GotoIf(SmiEqual(length, SmiConstant(0)), &out);
6087 
6088   var_result = Int32FalseConstant();
6089   Goto(&out);
6090 
6091   BIND(&out);
6092   return var_result.value();
6093 }
6094 
IsFixedArrayWithKind(TNode<HeapObject> object,ElementsKind kind)6095 TNode<BoolT> CodeStubAssembler::IsFixedArrayWithKind(TNode<HeapObject> object,
6096                                                      ElementsKind kind) {
6097   if (IsDoubleElementsKind(kind)) {
6098     return IsFixedDoubleArray(object);
6099   } else {
6100     DCHECK(IsSmiOrObjectElementsKind(kind) || IsSealedElementsKind(kind) ||
6101            IsNonextensibleElementsKind(kind));
6102     return IsFixedArraySubclass(object);
6103   }
6104 }
6105 
IsBoolean(TNode<HeapObject> object)6106 TNode<BoolT> CodeStubAssembler::IsBoolean(TNode<HeapObject> object) {
6107   return IsBooleanMap(LoadMap(object));
6108 }
6109 
IsPropertyCell(TNode<HeapObject> object)6110 TNode<BoolT> CodeStubAssembler::IsPropertyCell(TNode<HeapObject> object) {
6111   return IsPropertyCellMap(LoadMap(object));
6112 }
6113 
IsHeapNumberInstanceType(SloppyTNode<Int32T> instance_type)6114 TNode<BoolT> CodeStubAssembler::IsHeapNumberInstanceType(
6115     SloppyTNode<Int32T> instance_type) {
6116   return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
6117 }
6118 
IsOddball(TNode<HeapObject> object)6119 TNode<BoolT> CodeStubAssembler::IsOddball(TNode<HeapObject> object) {
6120   return IsOddballInstanceType(LoadInstanceType(object));
6121 }
6122 
IsOddballInstanceType(SloppyTNode<Int32T> instance_type)6123 TNode<BoolT> CodeStubAssembler::IsOddballInstanceType(
6124     SloppyTNode<Int32T> instance_type) {
6125   return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
6126 }
6127 
IsName(TNode<HeapObject> object)6128 TNode<BoolT> CodeStubAssembler::IsName(TNode<HeapObject> object) {
6129   return IsNameInstanceType(LoadInstanceType(object));
6130 }
6131 
IsNameInstanceType(SloppyTNode<Int32T> instance_type)6132 TNode<BoolT> CodeStubAssembler::IsNameInstanceType(
6133     SloppyTNode<Int32T> instance_type) {
6134   return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
6135 }
6136 
IsString(TNode<HeapObject> object)6137 TNode<BoolT> CodeStubAssembler::IsString(TNode<HeapObject> object) {
6138   return IsStringInstanceType(LoadInstanceType(object));
6139 }
6140 
IsSeqOneByteString(TNode<HeapObject> object)6141 TNode<BoolT> CodeStubAssembler::IsSeqOneByteString(TNode<HeapObject> object) {
6142   return IsSeqOneByteStringInstanceType(LoadInstanceType(object));
6143 }
6144 
IsSymbolInstanceType(SloppyTNode<Int32T> instance_type)6145 TNode<BoolT> CodeStubAssembler::IsSymbolInstanceType(
6146     SloppyTNode<Int32T> instance_type) {
6147   return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
6148 }
6149 
IsInternalizedStringInstanceType(TNode<Int32T> instance_type)6150 TNode<BoolT> CodeStubAssembler::IsInternalizedStringInstanceType(
6151     TNode<Int32T> instance_type) {
6152   STATIC_ASSERT(kNotInternalizedTag != 0);
6153   return Word32Equal(
6154       Word32And(instance_type,
6155                 Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
6156       Int32Constant(kStringTag | kInternalizedTag));
6157 }
6158 
IsUniqueName(TNode<HeapObject> object)6159 TNode<BoolT> CodeStubAssembler::IsUniqueName(TNode<HeapObject> object) {
6160   TNode<Uint16T> instance_type = LoadInstanceType(object);
6161   return Select<BoolT>(
6162       IsInternalizedStringInstanceType(instance_type),
6163       [=] { return Int32TrueConstant(); },
6164       [=] { return IsSymbolInstanceType(instance_type); });
6165 }
6166 
6167 // Semantics: guaranteed not to be an integer index (i.e. contains non-digit
6168 // characters, or is outside MAX_SAFE_INTEGER/size_t range). Note that for
6169 // non-TypedArray receivers, there are additional strings that must be treated
6170 // as named property keys, namely the range [0xFFFFFFFF, MAX_SAFE_INTEGER].
IsUniqueNameNoIndex(TNode<HeapObject> object)6171 TNode<BoolT> CodeStubAssembler::IsUniqueNameNoIndex(TNode<HeapObject> object) {
6172   TNode<Uint16T> instance_type = LoadInstanceType(object);
6173   return Select<BoolT>(
6174       IsInternalizedStringInstanceType(instance_type),
6175       [=] {
6176         return IsSetWord32(LoadNameHashField(CAST(object)),
6177                            Name::kIsNotIntegerIndexMask);
6178       },
6179       [=] { return IsSymbolInstanceType(instance_type); });
6180 }
6181 
6182 // Semantics: {object} is a Symbol, or a String that doesn't have a cached
6183 // index. This returns {true} for strings containing representations of
6184 // integers in the range above 9999999 (per kMaxCachedArrayIndexLength)
6185 // and below MAX_SAFE_INTEGER. For CSA_ASSERTs ensuring correct usage, this is
6186 // better than no checking; and we don't have a good/fast way to accurately
6187 // check such strings for being within "array index" (uint32_t) range.
IsUniqueNameNoCachedIndex(TNode<HeapObject> object)6188 TNode<BoolT> CodeStubAssembler::IsUniqueNameNoCachedIndex(
6189     TNode<HeapObject> object) {
6190   TNode<Uint16T> instance_type = LoadInstanceType(object);
6191   return Select<BoolT>(
6192       IsInternalizedStringInstanceType(instance_type),
6193       [=] {
6194         return IsSetWord32(LoadNameHashField(CAST(object)),
6195                            Name::kDoesNotContainCachedArrayIndexMask);
6196       },
6197       [=] { return IsSymbolInstanceType(instance_type); });
6198 }
6199 
IsBigIntInstanceType(SloppyTNode<Int32T> instance_type)6200 TNode<BoolT> CodeStubAssembler::IsBigIntInstanceType(
6201     SloppyTNode<Int32T> instance_type) {
6202   return InstanceTypeEqual(instance_type, BIGINT_TYPE);
6203 }
6204 
IsBigInt(TNode<HeapObject> object)6205 TNode<BoolT> CodeStubAssembler::IsBigInt(TNode<HeapObject> object) {
6206   return IsBigIntInstanceType(LoadInstanceType(object));
6207 }
6208 
IsPrimitiveInstanceType(SloppyTNode<Int32T> instance_type)6209 TNode<BoolT> CodeStubAssembler::IsPrimitiveInstanceType(
6210     SloppyTNode<Int32T> instance_type) {
6211   return Int32LessThanOrEqual(instance_type,
6212                               Int32Constant(LAST_PRIMITIVE_HEAP_OBJECT_TYPE));
6213 }
6214 
IsPrivateName(SloppyTNode<Symbol> symbol)6215 TNode<BoolT> CodeStubAssembler::IsPrivateName(SloppyTNode<Symbol> symbol) {
6216   TNode<Uint32T> flags = LoadObjectField<Uint32T>(symbol, Symbol::kFlagsOffset);
6217   return IsSetWord32<Symbol::IsPrivateNameBit>(flags);
6218 }
6219 
IsHashTable(TNode<HeapObject> object)6220 TNode<BoolT> CodeStubAssembler::IsHashTable(TNode<HeapObject> object) {
6221   TNode<Uint16T> instance_type = LoadInstanceType(object);
6222   return UncheckedCast<BoolT>(
6223       Word32And(Int32GreaterThanOrEqual(instance_type,
6224                                         Int32Constant(FIRST_HASH_TABLE_TYPE)),
6225                 Int32LessThanOrEqual(instance_type,
6226                                      Int32Constant(LAST_HASH_TABLE_TYPE))));
6227 }
6228 
IsEphemeronHashTable(TNode<HeapObject> object)6229 TNode<BoolT> CodeStubAssembler::IsEphemeronHashTable(TNode<HeapObject> object) {
6230   return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
6231 }
6232 
IsNameDictionary(TNode<HeapObject> object)6233 TNode<BoolT> CodeStubAssembler::IsNameDictionary(TNode<HeapObject> object) {
6234   return HasInstanceType(object, NAME_DICTIONARY_TYPE);
6235 }
6236 
IsGlobalDictionary(TNode<HeapObject> object)6237 TNode<BoolT> CodeStubAssembler::IsGlobalDictionary(TNode<HeapObject> object) {
6238   return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
6239 }
6240 
IsNumberDictionary(TNode<HeapObject> object)6241 TNode<BoolT> CodeStubAssembler::IsNumberDictionary(TNode<HeapObject> object) {
6242   return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
6243 }
6244 
IsJSGeneratorObject(TNode<HeapObject> object)6245 TNode<BoolT> CodeStubAssembler::IsJSGeneratorObject(TNode<HeapObject> object) {
6246   return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
6247 }
6248 
IsJSFunctionInstanceType(SloppyTNode<Int32T> instance_type)6249 TNode<BoolT> CodeStubAssembler::IsJSFunctionInstanceType(
6250     SloppyTNode<Int32T> instance_type) {
6251   return InstanceTypeEqual(instance_type, JS_FUNCTION_TYPE);
6252 }
6253 
IsJSFunction(TNode<HeapObject> object)6254 TNode<BoolT> CodeStubAssembler::IsJSFunction(TNode<HeapObject> object) {
6255   return IsJSFunctionMap(LoadMap(object));
6256 }
6257 
IsJSBoundFunction(TNode<HeapObject> object)6258 TNode<BoolT> CodeStubAssembler::IsJSBoundFunction(TNode<HeapObject> object) {
6259   return HasInstanceType(object, JS_BOUND_FUNCTION_TYPE);
6260 }
6261 
IsJSFunctionMap(TNode<Map> map)6262 TNode<BoolT> CodeStubAssembler::IsJSFunctionMap(TNode<Map> map) {
6263   return IsJSFunctionInstanceType(LoadMapInstanceType(map));
6264 }
6265 
IsJSTypedArrayInstanceType(SloppyTNode<Int32T> instance_type)6266 TNode<BoolT> CodeStubAssembler::IsJSTypedArrayInstanceType(
6267     SloppyTNode<Int32T> instance_type) {
6268   return InstanceTypeEqual(instance_type, JS_TYPED_ARRAY_TYPE);
6269 }
6270 
IsJSTypedArrayMap(TNode<Map> map)6271 TNode<BoolT> CodeStubAssembler::IsJSTypedArrayMap(TNode<Map> map) {
6272   return IsJSTypedArrayInstanceType(LoadMapInstanceType(map));
6273 }
6274 
IsJSTypedArray(TNode<HeapObject> object)6275 TNode<BoolT> CodeStubAssembler::IsJSTypedArray(TNode<HeapObject> object) {
6276   return IsJSTypedArrayMap(LoadMap(object));
6277 }
6278 
IsJSArrayBuffer(TNode<HeapObject> object)6279 TNode<BoolT> CodeStubAssembler::IsJSArrayBuffer(TNode<HeapObject> object) {
6280   return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
6281 }
6282 
IsJSDataView(TNode<HeapObject> object)6283 TNode<BoolT> CodeStubAssembler::IsJSDataView(TNode<HeapObject> object) {
6284   return HasInstanceType(object, JS_DATA_VIEW_TYPE);
6285 }
6286 
IsJSRegExp(TNode<HeapObject> object)6287 TNode<BoolT> CodeStubAssembler::IsJSRegExp(TNode<HeapObject> object) {
6288   return HasInstanceType(object, JS_REG_EXP_TYPE);
6289 }
6290 
IsNumeric(SloppyTNode<Object> object)6291 TNode<BoolT> CodeStubAssembler::IsNumeric(SloppyTNode<Object> object) {
6292   return Select<BoolT>(
6293       TaggedIsSmi(object), [=] { return Int32TrueConstant(); },
6294       [=] {
6295         return UncheckedCast<BoolT>(
6296             Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
6297       });
6298 }
6299 
IsNumberNormalized(TNode<Number> number)6300 TNode<BoolT> CodeStubAssembler::IsNumberNormalized(TNode<Number> number) {
6301   TVARIABLE(BoolT, var_result, Int32TrueConstant());
6302   Label out(this);
6303 
6304   GotoIf(TaggedIsSmi(number), &out);
6305 
6306   TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6307   TNode<Float64T> smi_min =
6308       Float64Constant(static_cast<double>(Smi::kMinValue));
6309   TNode<Float64T> smi_max =
6310       Float64Constant(static_cast<double>(Smi::kMaxValue));
6311 
6312   GotoIf(Float64LessThan(value, smi_min), &out);
6313   GotoIf(Float64GreaterThan(value, smi_max), &out);
6314   GotoIfNot(Float64Equal(value, value), &out);  // NaN.
6315 
6316   var_result = Int32FalseConstant();
6317   Goto(&out);
6318 
6319   BIND(&out);
6320   return var_result.value();
6321 }
6322 
IsNumberPositive(TNode<Number> number)6323 TNode<BoolT> CodeStubAssembler::IsNumberPositive(TNode<Number> number) {
6324   return Select<BoolT>(
6325       TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6326       [=] { return IsHeapNumberPositive(CAST(number)); });
6327 }
6328 
6329 // TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
IsHeapNumberPositive(TNode<HeapNumber> number)6330 TNode<BoolT> CodeStubAssembler::IsHeapNumberPositive(TNode<HeapNumber> number) {
6331   TNode<Float64T> value = LoadHeapNumberValue(number);
6332   TNode<Float64T> float_zero = Float64Constant(0.);
6333   return Float64GreaterThanOrEqual(value, float_zero);
6334 }
6335 
IsNumberNonNegativeSafeInteger(TNode<Number> number)6336 TNode<BoolT> CodeStubAssembler::IsNumberNonNegativeSafeInteger(
6337     TNode<Number> number) {
6338   return Select<BoolT>(
6339       // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
6340       TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6341       [=] {
6342         TNode<HeapNumber> heap_number = CAST(number);
6343         return Select<BoolT>(
6344             IsInteger(heap_number),
6345             [=] { return IsHeapNumberPositive(heap_number); },
6346             [=] { return Int32FalseConstant(); });
6347       });
6348 }
6349 
IsSafeInteger(TNode<Object> number)6350 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<Object> number) {
6351   return Select<BoolT>(
6352       TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6353       [=] {
6354         return Select<BoolT>(
6355             IsHeapNumber(CAST(number)),
6356             [=] { return IsSafeInteger(UncheckedCast<HeapNumber>(number)); },
6357             [=] { return Int32FalseConstant(); });
6358       });
6359 }
6360 
IsSafeInteger(TNode<HeapNumber> number)6361 TNode<BoolT> CodeStubAssembler::IsSafeInteger(TNode<HeapNumber> number) {
6362   // Load the actual value of {number}.
6363   TNode<Float64T> number_value = LoadHeapNumberValue(number);
6364   // Truncate the value of {number} to an integer (or an infinity).
6365   TNode<Float64T> integer = Float64Trunc(number_value);
6366 
6367   return Select<BoolT>(
6368       // Check if {number}s value matches the integer (ruling out the
6369       // infinities).
6370       Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
6371       [=] {
6372         // Check if the {integer} value is in safe integer range.
6373         return Float64LessThanOrEqual(Float64Abs(integer),
6374                                       Float64Constant(kMaxSafeInteger));
6375       },
6376       [=] { return Int32FalseConstant(); });
6377 }
6378 
IsInteger(TNode<Object> number)6379 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<Object> number) {
6380   return Select<BoolT>(
6381       TaggedIsSmi(number), [=] { return Int32TrueConstant(); },
6382       [=] {
6383         return Select<BoolT>(
6384             IsHeapNumber(CAST(number)),
6385             [=] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
6386             [=] { return Int32FalseConstant(); });
6387       });
6388 }
6389 
IsInteger(TNode<HeapNumber> number)6390 TNode<BoolT> CodeStubAssembler::IsInteger(TNode<HeapNumber> number) {
6391   TNode<Float64T> number_value = LoadHeapNumberValue(number);
6392   // Truncate the value of {number} to an integer (or an infinity).
6393   TNode<Float64T> integer = Float64Trunc(number_value);
6394   // Check if {number}s value matches the integer (ruling out the infinities).
6395   return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
6396 }
6397 
IsHeapNumberUint32(TNode<HeapNumber> number)6398 TNode<BoolT> CodeStubAssembler::IsHeapNumberUint32(TNode<HeapNumber> number) {
6399   // Check that the HeapNumber is a valid uint32
6400   return Select<BoolT>(
6401       IsHeapNumberPositive(number),
6402       [=] {
6403         TNode<Float64T> value = LoadHeapNumberValue(number);
6404         TNode<Uint32T> int_value = TruncateFloat64ToWord32(value);
6405         return Float64Equal(value, ChangeUint32ToFloat64(int_value));
6406       },
6407       [=] { return Int32FalseConstant(); });
6408 }
6409 
IsNumberArrayIndex(TNode<Number> number)6410 TNode<BoolT> CodeStubAssembler::IsNumberArrayIndex(TNode<Number> number) {
6411   return Select<BoolT>(
6412       TaggedIsSmi(number), [=] { return TaggedIsPositiveSmi(number); },
6413       [=] { return IsHeapNumberUint32(CAST(number)); });
6414 }
6415 
6416 template <typename TIndex>
FixedArraySizeDoesntFitInNewSpace(TNode<TIndex> element_count,int base_size)6417 TNode<BoolT> CodeStubAssembler::FixedArraySizeDoesntFitInNewSpace(
6418     TNode<TIndex> element_count, int base_size) {
6419   static_assert(
6420       std::is_same<TIndex, Smi>::value || std::is_same<TIndex, IntPtrT>::value,
6421       "Only Smi or IntPtrT element_count is allowed");
6422   int max_newspace_elements =
6423       (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
6424   return IntPtrOrSmiGreaterThan(
6425       element_count, IntPtrOrSmiConstant<TIndex>(max_newspace_elements));
6426 }
6427 
StringCharCodeAt(TNode<String> string,TNode<UintPtrT> index)6428 TNode<Int32T> CodeStubAssembler::StringCharCodeAt(TNode<String> string,
6429                                                   TNode<UintPtrT> index) {
6430   CSA_ASSERT(this, UintPtrLessThan(index, LoadStringLengthAsWord(string)));
6431 
6432   TVARIABLE(Int32T, var_result);
6433 
6434   Label return_result(this), if_runtime(this, Label::kDeferred),
6435       if_stringistwobyte(this), if_stringisonebyte(this);
6436 
6437   ToDirectStringAssembler to_direct(state(), string);
6438   to_direct.TryToDirect(&if_runtime);
6439   const TNode<UintPtrT> offset =
6440       UintPtrAdd(index, Unsigned(to_direct.offset()));
6441   const TNode<Int32T> instance_type = to_direct.instance_type();
6442   const TNode<RawPtrT> string_data = to_direct.PointerToData(&if_runtime);
6443 
6444   // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
6445   Branch(IsOneByteStringInstanceType(instance_type), &if_stringisonebyte,
6446          &if_stringistwobyte);
6447 
6448   BIND(&if_stringisonebyte);
6449   {
6450     var_result = UncheckedCast<Int32T>(Load<Uint8T>(string_data, offset));
6451     Goto(&return_result);
6452   }
6453 
6454   BIND(&if_stringistwobyte);
6455   {
6456     var_result = UncheckedCast<Int32T>(
6457         Load<Uint16T>(string_data, WordShl(offset, IntPtrConstant(1))));
6458     Goto(&return_result);
6459   }
6460 
6461   BIND(&if_runtime);
6462   {
6463     TNode<Object> result =
6464         CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(), string,
6465                     ChangeUintPtrToTagged(index));
6466     var_result = SmiToInt32(CAST(result));
6467     Goto(&return_result);
6468   }
6469 
6470   BIND(&return_result);
6471   return var_result.value();
6472 }
6473 
StringFromSingleCharCode(TNode<Int32T> code)6474 TNode<String> CodeStubAssembler::StringFromSingleCharCode(TNode<Int32T> code) {
6475   TVARIABLE(String, var_result);
6476 
6477   // Check if the {code} is a one-byte char code.
6478   Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
6479       if_done(this);
6480   Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
6481          &if_codeisonebyte, &if_codeistwobyte);
6482   BIND(&if_codeisonebyte);
6483   {
6484     // Load the isolate wide single character string cache.
6485     TNode<FixedArray> cache = SingleCharacterStringCacheConstant();
6486     TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
6487 
6488     // Check if we have an entry for the {code} in the single character string
6489     // cache already.
6490     Label if_entryisundefined(this, Label::kDeferred),
6491         if_entryisnotundefined(this);
6492     TNode<Object> entry = UnsafeLoadFixedArrayElement(cache, code_index);
6493     Branch(IsUndefined(entry), &if_entryisundefined, &if_entryisnotundefined);
6494 
6495     BIND(&if_entryisundefined);
6496     {
6497       // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
6498       TNode<String> result = AllocateSeqOneByteString(1);
6499       StoreNoWriteBarrier(
6500           MachineRepresentation::kWord8, result,
6501           IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
6502       StoreFixedArrayElement(cache, code_index, result);
6503       var_result = result;
6504       Goto(&if_done);
6505     }
6506 
6507     BIND(&if_entryisnotundefined);
6508     {
6509       // Return the entry from the {cache}.
6510       var_result = CAST(entry);
6511       Goto(&if_done);
6512     }
6513   }
6514 
6515   BIND(&if_codeistwobyte);
6516   {
6517     // Allocate a new SeqTwoByteString for {code}.
6518     TNode<String> result = AllocateSeqTwoByteString(1);
6519     StoreNoWriteBarrier(
6520         MachineRepresentation::kWord16, result,
6521         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
6522     var_result = result;
6523     Goto(&if_done);
6524   }
6525 
6526   BIND(&if_done);
6527   return var_result.value();
6528 }
6529 
ToDirectStringAssembler(compiler::CodeAssemblerState * state,TNode<String> string,Flags flags)6530 ToDirectStringAssembler::ToDirectStringAssembler(
6531     compiler::CodeAssemblerState* state, TNode<String> string, Flags flags)
6532     : CodeStubAssembler(state),
6533       var_string_(string, this),
6534       var_instance_type_(LoadInstanceType(string), this),
6535       var_offset_(IntPtrConstant(0), this),
6536       var_is_external_(Int32Constant(0), this),
6537       flags_(flags) {}
6538 
TryToDirect(Label * if_bailout)6539 TNode<String> ToDirectStringAssembler::TryToDirect(Label* if_bailout) {
6540   Label dispatch(this, {&var_string_, &var_offset_, &var_instance_type_});
6541   Label if_iscons(this);
6542   Label if_isexternal(this);
6543   Label if_issliced(this);
6544   Label if_isthin(this);
6545   Label out(this);
6546 
6547   Branch(IsSequentialStringInstanceType(var_instance_type_.value()), &out,
6548          &dispatch);
6549 
6550   // Dispatch based on string representation.
6551   BIND(&dispatch);
6552   {
6553     int32_t values[] = {
6554         kSeqStringTag,    kConsStringTag, kExternalStringTag,
6555         kSlicedStringTag, kThinStringTag,
6556     };
6557     Label* labels[] = {
6558         &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
6559     };
6560     STATIC_ASSERT(arraysize(values) == arraysize(labels));
6561 
6562     const TNode<Int32T> representation = Word32And(
6563         var_instance_type_.value(), Int32Constant(kStringRepresentationMask));
6564     Switch(representation, if_bailout, values, labels, arraysize(values));
6565   }
6566 
6567   // Cons string.  Check whether it is flat, then fetch first part.
6568   // Flat cons strings have an empty second part.
6569   BIND(&if_iscons);
6570   {
6571     const TNode<String> string = var_string_.value();
6572     GotoIfNot(IsEmptyString(
6573                   LoadObjectField<String>(string, ConsString::kSecondOffset)),
6574               if_bailout);
6575 
6576     const TNode<String> lhs =
6577         LoadObjectField<String>(string, ConsString::kFirstOffset);
6578     var_string_ = lhs;
6579     var_instance_type_ = LoadInstanceType(lhs);
6580 
6581     Goto(&dispatch);
6582   }
6583 
6584   // Sliced string. Fetch parent and correct start index by offset.
6585   BIND(&if_issliced);
6586   {
6587     if (!FLAG_string_slices || (flags_ & kDontUnpackSlicedStrings)) {
6588       Goto(if_bailout);
6589     } else {
6590       const TNode<String> string = var_string_.value();
6591       const TNode<IntPtrT> sliced_offset =
6592           LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
6593       var_offset_ = IntPtrAdd(var_offset_.value(), sliced_offset);
6594 
6595       const TNode<String> parent =
6596           LoadObjectField<String>(string, SlicedString::kParentOffset);
6597       var_string_ = parent;
6598       var_instance_type_ = LoadInstanceType(parent);
6599 
6600       Goto(&dispatch);
6601     }
6602   }
6603 
6604   // Thin string. Fetch the actual string.
6605   BIND(&if_isthin);
6606   {
6607     const TNode<String> string = var_string_.value();
6608     const TNode<String> actual_string =
6609         LoadObjectField<String>(string, ThinString::kActualOffset);
6610     const TNode<Uint16T> actual_instance_type = LoadInstanceType(actual_string);
6611 
6612     var_string_ = actual_string;
6613     var_instance_type_ = actual_instance_type;
6614 
6615     Goto(&dispatch);
6616   }
6617 
6618   // External string.
6619   BIND(&if_isexternal);
6620   var_is_external_ = Int32Constant(1);
6621   Goto(&out);
6622 
6623   BIND(&out);
6624   return var_string_.value();
6625 }
6626 
TryToSequential(StringPointerKind ptr_kind,Label * if_bailout)6627 TNode<RawPtrT> ToDirectStringAssembler::TryToSequential(
6628     StringPointerKind ptr_kind, Label* if_bailout) {
6629   CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
6630 
6631   TVARIABLE(RawPtrT, var_result);
6632   Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
6633   Branch(is_external(), &if_isexternal, &if_issequential);
6634 
6635   BIND(&if_issequential);
6636   {
6637     STATIC_ASSERT(SeqOneByteString::kHeaderSize ==
6638                   SeqTwoByteString::kHeaderSize);
6639     TNode<RawPtrT> result =
6640         ReinterpretCast<RawPtrT>(BitcastTaggedToWord(var_string_.value()));
6641     if (ptr_kind == PTR_TO_DATA) {
6642       result = RawPtrAdd(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
6643                                                 kHeapObjectTag));
6644     }
6645     var_result = result;
6646     Goto(&out);
6647   }
6648 
6649   BIND(&if_isexternal);
6650   {
6651     GotoIf(IsUncachedExternalStringInstanceType(var_instance_type_.value()),
6652            if_bailout);
6653 
6654     TNode<String> string = var_string_.value();
6655     TNode<RawPtrT> result = LoadExternalStringResourceDataPtr(CAST(string));
6656     if (ptr_kind == PTR_TO_STRING) {
6657       result = RawPtrSub(result, IntPtrConstant(SeqOneByteString::kHeaderSize -
6658                                                 kHeapObjectTag));
6659     }
6660     var_result = result;
6661     Goto(&out);
6662   }
6663 
6664   BIND(&out);
6665   return var_result.value();
6666 }
6667 
StringToNumber(TNode<String> input)6668 TNode<Number> CodeStubAssembler::StringToNumber(TNode<String> input) {
6669   Label runtime(this, Label::kDeferred);
6670   Label end(this);
6671 
6672   TVARIABLE(Number, var_result);
6673 
6674   // Check if string has a cached array index.
6675   TNode<Uint32T> hash = LoadNameHashField(input);
6676   GotoIf(IsSetWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
6677          &runtime);
6678 
6679   var_result =
6680       SmiTag(Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
6681   Goto(&end);
6682 
6683   BIND(&runtime);
6684   {
6685     var_result =
6686         CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
6687     Goto(&end);
6688   }
6689 
6690   BIND(&end);
6691   return var_result.value();
6692 }
6693 
NumberToString(TNode<Number> input,Label * bailout)6694 TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input,
6695                                                 Label* bailout) {
6696   TVARIABLE(String, result);
6697   TVARIABLE(Smi, smi_input);
6698   Label if_smi(this), if_heap_number(this), done(this, &result);
6699 
6700   // Load the number string cache.
6701   TNode<FixedArray> number_string_cache = NumberStringCacheConstant();
6702 
6703   // Make the hash mask from the length of the number string cache. It
6704   // contains two elements (number and string) for each cache entry.
6705   TNode<IntPtrT> number_string_cache_length =
6706       LoadAndUntagFixedArrayBaseLength(number_string_cache);
6707   TNode<Int32T> one = Int32Constant(1);
6708   TNode<Word32T> mask = Int32Sub(
6709       Word32Shr(TruncateWordToInt32(number_string_cache_length), one), one);
6710 
6711   GotoIfNot(TaggedIsSmi(input), &if_heap_number);
6712   smi_input = CAST(input);
6713   Goto(&if_smi);
6714 
6715   BIND(&if_heap_number);
6716   {
6717     Comment("NumberToString - HeapNumber");
6718     TNode<HeapNumber> heap_number_input = CAST(input);
6719     // Try normalizing the HeapNumber.
6720     TryHeapNumberToSmi(heap_number_input, &smi_input, &if_smi);
6721 
6722     // Make a hash from the two 32-bit values of the double.
6723     TNode<Int32T> low =
6724         LoadObjectField<Int32T>(heap_number_input, HeapNumber::kValueOffset);
6725     TNode<Int32T> high = LoadObjectField<Int32T>(
6726         heap_number_input, HeapNumber::kValueOffset + kIntSize);
6727     TNode<Word32T> hash = Word32And(Word32Xor(low, high), mask);
6728     TNode<IntPtrT> entry_index =
6729         Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
6730 
6731     // Cache entry's key must be a heap number
6732     TNode<Object> number_key =
6733         UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
6734     GotoIf(TaggedIsSmi(number_key), bailout);
6735     TNode<HeapObject> number_key_heap_object = CAST(number_key);
6736     GotoIfNot(IsHeapNumber(number_key_heap_object), bailout);
6737 
6738     // Cache entry's key must match the heap number value we're looking for.
6739     TNode<Int32T> low_compare = LoadObjectField<Int32T>(
6740         number_key_heap_object, HeapNumber::kValueOffset);
6741     TNode<Int32T> high_compare = LoadObjectField<Int32T>(
6742         number_key_heap_object, HeapNumber::kValueOffset + kIntSize);
6743     GotoIfNot(Word32Equal(low, low_compare), bailout);
6744     GotoIfNot(Word32Equal(high, high_compare), bailout);
6745 
6746     // Heap number match, return value from cache entry.
6747     result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,
6748                                               kTaggedSize));
6749     Goto(&done);
6750   }
6751 
6752   BIND(&if_smi);
6753   {
6754     Comment("NumberToString - Smi");
6755     // Load the smi key, make sure it matches the smi we're looking for.
6756     TNode<Word32T> hash = Word32And(SmiToInt32(smi_input.value()), mask);
6757     TNode<IntPtrT> entry_index =
6758         Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
6759     TNode<Object> smi_key =
6760         UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
6761     Label if_smi_cache_missed(this);
6762     GotoIf(TaggedNotEqual(smi_key, smi_input.value()), &if_smi_cache_missed);
6763 
6764     // Smi match, return value from cache entry.
6765     result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,
6766                                               kTaggedSize));
6767     Goto(&done);
6768 
6769     BIND(&if_smi_cache_missed);
6770     {
6771       Label store_to_cache(this);
6772 
6773       // Bailout when the cache is not full-size.
6774       const int kFullCacheSize =
6775           isolate()->heap()->MaxNumberToStringCacheSize();
6776       Branch(IntPtrLessThan(number_string_cache_length,
6777                             IntPtrConstant(kFullCacheSize)),
6778              bailout, &store_to_cache);
6779 
6780       BIND(&store_to_cache);
6781       {
6782         // Generate string and update string hash field.
6783         result = NumberToStringSmi(SmiToInt32(smi_input.value()),
6784                                    Int32Constant(10), bailout);
6785 
6786         // Store string into cache.
6787         StoreFixedArrayElement(number_string_cache, entry_index,
6788                                smi_input.value());
6789         StoreFixedArrayElement(number_string_cache,
6790                                IntPtrAdd(entry_index, IntPtrConstant(1)),
6791                                result.value());
6792         Goto(&done);
6793       }
6794     }
6795   }
6796   BIND(&done);
6797   return result.value();
6798 }
6799 
NumberToString(TNode<Number> input)6800 TNode<String> CodeStubAssembler::NumberToString(TNode<Number> input) {
6801   TVARIABLE(String, result);
6802   Label runtime(this, Label::kDeferred), done(this, &result);
6803 
6804   GotoIfForceSlowPath(&runtime);
6805 
6806   result = NumberToString(input, &runtime);
6807   Goto(&done);
6808 
6809   BIND(&runtime);
6810   {
6811     // No cache entry, go to the runtime.
6812     result = CAST(
6813         CallRuntime(Runtime::kNumberToStringSlow, NoContextConstant(), input));
6814     Goto(&done);
6815   }
6816   BIND(&done);
6817   return result.value();
6818 }
6819 
NonNumberToNumberOrNumeric(TNode<Context> context,TNode<HeapObject> input,Object::Conversion mode,BigIntHandling bigint_handling)6820 TNode<Numeric> CodeStubAssembler::NonNumberToNumberOrNumeric(
6821     TNode<Context> context, TNode<HeapObject> input, Object::Conversion mode,
6822     BigIntHandling bigint_handling) {
6823   CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
6824 
6825   TVARIABLE(HeapObject, var_input, input);
6826   TVARIABLE(Numeric, var_result);
6827   TVARIABLE(Uint16T, instance_type, LoadInstanceType(var_input.value()));
6828   Label end(this), if_inputisreceiver(this, Label::kDeferred),
6829       if_inputisnotreceiver(this);
6830 
6831   // We need to handle JSReceiver first since we might need to do two
6832   // conversions due to ToPritmive.
6833   Branch(IsJSReceiverInstanceType(instance_type.value()), &if_inputisreceiver,
6834          &if_inputisnotreceiver);
6835 
6836   BIND(&if_inputisreceiver);
6837   {
6838     // The {var_input.value()} is a JSReceiver, we need to convert it to a
6839     // Primitive first using the ToPrimitive type conversion, preferably
6840     // yielding a Number.
6841     Callable callable = CodeFactory::NonPrimitiveToPrimitive(
6842         isolate(), ToPrimitiveHint::kNumber);
6843     TNode<Object> result = CallStub(callable, context, var_input.value());
6844 
6845     // Check if the {result} is already a Number/Numeric.
6846     Label if_done(this), if_notdone(this);
6847     Branch(mode == Object::Conversion::kToNumber ? IsNumber(result)
6848                                                  : IsNumeric(result),
6849            &if_done, &if_notdone);
6850 
6851     BIND(&if_done);
6852     {
6853       // The ToPrimitive conversion already gave us a Number/Numeric, so
6854       // we're done.
6855       var_result = CAST(result);
6856       Goto(&end);
6857     }
6858 
6859     BIND(&if_notdone);
6860     {
6861       // We now have a Primitive {result}, but it's not yet a
6862       // Number/Numeric.
6863       var_input = CAST(result);
6864       // We have a new input. Redo the check and reload instance_type.
6865       CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(var_input.value())));
6866       instance_type = LoadInstanceType(var_input.value());
6867       Goto(&if_inputisnotreceiver);
6868     }
6869   }
6870 
6871   BIND(&if_inputisnotreceiver);
6872   {
6873     Label not_plain_primitive(this), if_inputisbigint(this),
6874         if_inputisother(this, Label::kDeferred);
6875 
6876     // String and Oddball cases.
6877     TVARIABLE(Number, var_result_number);
6878     TryPlainPrimitiveNonNumberToNumber(var_input.value(), &var_result_number,
6879                                        &not_plain_primitive);
6880     var_result = var_result_number.value();
6881     Goto(&end);
6882 
6883     BIND(&not_plain_primitive);
6884     {
6885       Branch(IsBigIntInstanceType(instance_type.value()), &if_inputisbigint,
6886              &if_inputisother);
6887 
6888       BIND(&if_inputisbigint);
6889       {
6890         if (mode == Object::Conversion::kToNumeric) {
6891           var_result = CAST(var_input.value());
6892           Goto(&end);
6893         } else {
6894           DCHECK_EQ(mode, Object::Conversion::kToNumber);
6895           if (bigint_handling == BigIntHandling::kThrow) {
6896             Goto(&if_inputisother);
6897           } else {
6898             DCHECK_EQ(bigint_handling, BigIntHandling::kConvertToNumber);
6899             var_result = CAST(CallRuntime(Runtime::kBigIntToNumber, context,
6900                                           var_input.value()));
6901             Goto(&end);
6902           }
6903         }
6904       }
6905 
6906       BIND(&if_inputisother);
6907       {
6908         // The {var_input.value()} is something else (e.g. Symbol), let the
6909         // runtime figure out the correct exception. Note: We cannot tail call
6910         // to the runtime here, as js-to-wasm trampolines also use this code
6911         // currently, and they declare all outgoing parameters as untagged,
6912         // while we would push a tagged object here.
6913         auto function_id = mode == Object::Conversion::kToNumber
6914                                ? Runtime::kToNumber
6915                                : Runtime::kToNumeric;
6916         var_result = CAST(CallRuntime(function_id, context, var_input.value()));
6917         Goto(&end);
6918       }
6919     }
6920   }
6921 
6922   BIND(&end);
6923   if (mode == Object::Conversion::kToNumber) {
6924     CSA_ASSERT(this, IsNumber(var_result.value()));
6925   }
6926   return var_result.value();
6927 }
6928 
NonNumberToNumber(TNode<Context> context,TNode<HeapObject> input,BigIntHandling bigint_handling)6929 TNode<Number> CodeStubAssembler::NonNumberToNumber(
6930     TNode<Context> context, TNode<HeapObject> input,
6931     BigIntHandling bigint_handling) {
6932   return CAST(NonNumberToNumberOrNumeric(
6933       context, input, Object::Conversion::kToNumber, bigint_handling));
6934 }
6935 
TryPlainPrimitiveNonNumberToNumber(TNode<HeapObject> input,TVariable<Number> * var_result,Label * if_bailout)6936 void CodeStubAssembler::TryPlainPrimitiveNonNumberToNumber(
6937     TNode<HeapObject> input, TVariable<Number>* var_result, Label* if_bailout) {
6938   CSA_ASSERT(this, Word32BinaryNot(IsHeapNumber(input)));
6939   Label done(this);
6940 
6941   // Dispatch on the {input} instance type.
6942   TNode<Uint16T> input_instance_type = LoadInstanceType(input);
6943   Label if_inputisstring(this);
6944   GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
6945   GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), if_bailout);
6946 
6947   // The {input} is an Oddball, we just need to load the Number value of it.
6948   *var_result = LoadObjectField<Number>(input, Oddball::kToNumberOffset);
6949   Goto(&done);
6950 
6951   BIND(&if_inputisstring);
6952   {
6953     // The {input} is a String, use the fast stub to convert it to a Number.
6954     *var_result = StringToNumber(CAST(input));
6955     Goto(&done);
6956   }
6957 
6958   BIND(&done);
6959 }
6960 
NonNumberToNumeric(TNode<Context> context,TNode<HeapObject> input)6961 TNode<Numeric> CodeStubAssembler::NonNumberToNumeric(TNode<Context> context,
6962                                                      TNode<HeapObject> input) {
6963   return NonNumberToNumberOrNumeric(context, input,
6964                                     Object::Conversion::kToNumeric);
6965 }
6966 
ToNumber_Inline(TNode<Context> context,SloppyTNode<Object> input)6967 TNode<Number> CodeStubAssembler::ToNumber_Inline(TNode<Context> context,
6968                                                  SloppyTNode<Object> input) {
6969   TVARIABLE(Number, var_result);
6970   Label end(this), not_smi(this, Label::kDeferred);
6971 
6972   GotoIfNot(TaggedIsSmi(input), &not_smi);
6973   var_result = CAST(input);
6974   Goto(&end);
6975 
6976   BIND(&not_smi);
6977   {
6978     var_result = Select<Number>(
6979         IsHeapNumber(CAST(input)), [=] { return CAST(input); },
6980         [=] {
6981           return CAST(
6982               CallBuiltin(Builtins::kNonNumberToNumber, context, input));
6983         });
6984     Goto(&end);
6985   }
6986 
6987   BIND(&end);
6988   return var_result.value();
6989 }
6990 
ToNumber(TNode<Context> context,SloppyTNode<Object> input,BigIntHandling bigint_handling)6991 TNode<Number> CodeStubAssembler::ToNumber(TNode<Context> context,
6992                                           SloppyTNode<Object> input,
6993                                           BigIntHandling bigint_handling) {
6994   TVARIABLE(Number, var_result);
6995   Label end(this);
6996 
6997   Label not_smi(this, Label::kDeferred);
6998   GotoIfNot(TaggedIsSmi(input), &not_smi);
6999   TNode<Smi> input_smi = CAST(input);
7000   var_result = input_smi;
7001   Goto(&end);
7002 
7003   BIND(&not_smi);
7004   {
7005     Label not_heap_number(this, Label::kDeferred);
7006     TNode<HeapObject> input_ho = CAST(input);
7007     GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
7008 
7009     TNode<HeapNumber> input_hn = CAST(input_ho);
7010     var_result = input_hn;
7011     Goto(&end);
7012 
7013     BIND(&not_heap_number);
7014     {
7015       var_result = NonNumberToNumber(context, input_ho, bigint_handling);
7016       Goto(&end);
7017     }
7018   }
7019 
7020   BIND(&end);
7021   return var_result.value();
7022 }
7023 
PlainPrimitiveToNumber(TNode<Object> input)7024 TNode<Number> CodeStubAssembler::PlainPrimitiveToNumber(TNode<Object> input) {
7025   TVARIABLE(Number, var_result);
7026   Label end(this), fallback(this);
7027 
7028   Label not_smi(this, Label::kDeferred);
7029   GotoIfNot(TaggedIsSmi(input), &not_smi);
7030   TNode<Smi> input_smi = CAST(input);
7031   var_result = input_smi;
7032   Goto(&end);
7033 
7034   BIND(&not_smi);
7035   {
7036     Label not_heap_number(this, Label::kDeferred);
7037     TNode<HeapObject> input_ho = CAST(input);
7038     GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
7039 
7040     TNode<HeapNumber> input_hn = CAST(input_ho);
7041     var_result = input_hn;
7042     Goto(&end);
7043 
7044     BIND(&not_heap_number);
7045     {
7046       TryPlainPrimitiveNonNumberToNumber(input_ho, &var_result, &fallback);
7047       Goto(&end);
7048       BIND(&fallback);
7049       Unreachable();
7050     }
7051   }
7052 
7053   BIND(&end);
7054   return var_result.value();
7055 }
7056 
ToBigInt(TNode<Context> context,TNode<Object> input)7057 TNode<BigInt> CodeStubAssembler::ToBigInt(TNode<Context> context,
7058                                           TNode<Object> input) {
7059   TVARIABLE(BigInt, var_result);
7060   Label if_bigint(this), done(this), if_throw(this);
7061 
7062   GotoIf(TaggedIsSmi(input), &if_throw);
7063   GotoIf(IsBigInt(CAST(input)), &if_bigint);
7064   var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
7065   Goto(&done);
7066 
7067   BIND(&if_bigint);
7068   var_result = CAST(input);
7069   Goto(&done);
7070 
7071   BIND(&if_throw);
7072   ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
7073 
7074   BIND(&done);
7075   return var_result.value();
7076 }
7077 
TaggedToNumeric(TNode<Context> context,TNode<Object> value,TVariable<Numeric> * var_numeric)7078 void CodeStubAssembler::TaggedToNumeric(TNode<Context> context,
7079                                         TNode<Object> value,
7080                                         TVariable<Numeric>* var_numeric) {
7081   TaggedToNumeric(context, value, var_numeric, nullptr);
7082 }
7083 
TaggedToNumericWithFeedback(TNode<Context> context,TNode<Object> value,TVariable<Numeric> * var_numeric,TVariable<Smi> * var_feedback)7084 void CodeStubAssembler::TaggedToNumericWithFeedback(
7085     TNode<Context> context, TNode<Object> value,
7086     TVariable<Numeric>* var_numeric, TVariable<Smi>* var_feedback) {
7087   DCHECK_NOT_NULL(var_feedback);
7088   TaggedToNumeric(context, value, var_numeric, var_feedback);
7089 }
7090 
TaggedToNumeric(TNode<Context> context,TNode<Object> value,TVariable<Numeric> * var_numeric,TVariable<Smi> * var_feedback)7091 void CodeStubAssembler::TaggedToNumeric(TNode<Context> context,
7092                                         TNode<Object> value,
7093                                         TVariable<Numeric>* var_numeric,
7094                                         TVariable<Smi>* var_feedback) {
7095   Label done(this), if_smi(this), if_heapnumber(this), if_bigint(this),
7096       if_oddball(this);
7097   GotoIf(TaggedIsSmi(value), &if_smi);
7098   TNode<HeapObject> heap_object_value = CAST(value);
7099   TNode<Map> map = LoadMap(heap_object_value);
7100   GotoIf(IsHeapNumberMap(map), &if_heapnumber);
7101   TNode<Uint16T> instance_type = LoadMapInstanceType(map);
7102   GotoIf(IsBigIntInstanceType(instance_type), &if_bigint);
7103 
7104   // {heap_object_value} is not a Numeric yet.
7105   GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
7106   *var_numeric = CAST(
7107       CallBuiltin(Builtins::kNonNumberToNumeric, context, heap_object_value));
7108   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kAny);
7109   Goto(&done);
7110 
7111   BIND(&if_smi);
7112   *var_numeric = CAST(value);
7113   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kSignedSmall);
7114   Goto(&done);
7115 
7116   BIND(&if_heapnumber);
7117   *var_numeric = CAST(value);
7118   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumber);
7119   Goto(&done);
7120 
7121   BIND(&if_bigint);
7122   *var_numeric = CAST(value);
7123   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kBigInt);
7124   Goto(&done);
7125 
7126   BIND(&if_oddball);
7127   OverwriteFeedback(var_feedback, BinaryOperationFeedback::kNumberOrOddball);
7128   *var_numeric =
7129       CAST(LoadObjectField(heap_object_value, Oddball::kToNumberOffset));
7130   Goto(&done);
7131 
7132   Bind(&done);
7133 }
7134 
7135 // ES#sec-touint32
ToUint32(TNode<Context> context,SloppyTNode<Object> input)7136 TNode<Number> CodeStubAssembler::ToUint32(TNode<Context> context,
7137                                           SloppyTNode<Object> input) {
7138   const TNode<Float64T> float_zero = Float64Constant(0.0);
7139   const TNode<Float64T> float_two_32 =
7140       Float64Constant(static_cast<double>(1ULL << 32));
7141 
7142   Label out(this);
7143 
7144   TVARIABLE(Object, var_result, input);
7145 
7146   // Early exit for positive smis.
7147   {
7148     // TODO(jgruber): This branch and the recheck below can be removed once we
7149     // have a ToNumber with multiple exits.
7150     Label next(this, Label::kDeferred);
7151     Branch(TaggedIsPositiveSmi(input), &out, &next);
7152     BIND(&next);
7153   }
7154 
7155   const TNode<Number> number = ToNumber(context, input);
7156   var_result = number;
7157 
7158   // Perhaps we have a positive smi now.
7159   {
7160     Label next(this, Label::kDeferred);
7161     Branch(TaggedIsPositiveSmi(number), &out, &next);
7162     BIND(&next);
7163   }
7164 
7165   Label if_isnegativesmi(this), if_isheapnumber(this);
7166   Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
7167 
7168   BIND(&if_isnegativesmi);
7169   {
7170     const TNode<Int32T> uint32_value = SmiToInt32(CAST(number));
7171     TNode<Float64T> float64_value = ChangeUint32ToFloat64(uint32_value);
7172     var_result = AllocateHeapNumberWithValue(float64_value);
7173     Goto(&out);
7174   }
7175 
7176   BIND(&if_isheapnumber);
7177   {
7178     Label return_zero(this);
7179     const TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
7180 
7181     {
7182       // +-0.
7183       Label next(this);
7184       Branch(Float64Equal(value, float_zero), &return_zero, &next);
7185       BIND(&next);
7186     }
7187 
7188     {
7189       // NaN.
7190       Label next(this);
7191       Branch(Float64Equal(value, value), &next, &return_zero);
7192       BIND(&next);
7193     }
7194 
7195     {
7196       // +Infinity.
7197       Label next(this);
7198       const TNode<Float64T> positive_infinity =
7199           Float64Constant(std::numeric_limits<double>::infinity());
7200       Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
7201       BIND(&next);
7202     }
7203 
7204     {
7205       // -Infinity.
7206       Label next(this);
7207       const TNode<Float64T> negative_infinity =
7208           Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
7209       Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
7210       BIND(&next);
7211     }
7212 
7213     // * Let int be the mathematical value that is the same sign as number and
7214     //   whose magnitude is floor(abs(number)).
7215     // * Let int32bit be int modulo 2^32.
7216     // * Return int32bit.
7217     {
7218       TNode<Float64T> x = Float64Trunc(value);
7219       x = Float64Mod(x, float_two_32);
7220       x = Float64Add(x, float_two_32);
7221       x = Float64Mod(x, float_two_32);
7222 
7223       const TNode<Number> result = ChangeFloat64ToTagged(x);
7224       var_result = result;
7225       Goto(&out);
7226     }
7227 
7228     BIND(&return_zero);
7229     {
7230       var_result = SmiConstant(0);
7231       Goto(&out);
7232     }
7233   }
7234 
7235   BIND(&out);
7236   return CAST(var_result.value());
7237 }
7238 
ToString_Inline(TNode<Context> context,SloppyTNode<Object> input)7239 TNode<String> CodeStubAssembler::ToString_Inline(TNode<Context> context,
7240                                                  SloppyTNode<Object> input) {
7241   TVARIABLE(Object, var_result, input);
7242   Label stub_call(this, Label::kDeferred), out(this);
7243 
7244   GotoIf(TaggedIsSmi(input), &stub_call);
7245   Branch(IsString(CAST(input)), &out, &stub_call);
7246 
7247   BIND(&stub_call);
7248   var_result = CallBuiltin(Builtins::kToString, context, input);
7249   Goto(&out);
7250 
7251   BIND(&out);
7252   return CAST(var_result.value());
7253 }
7254 
ToObject(TNode<Context> context,SloppyTNode<Object> input)7255 TNode<JSReceiver> CodeStubAssembler::ToObject(TNode<Context> context,
7256                                               SloppyTNode<Object> input) {
7257   return CAST(CallBuiltin(Builtins::kToObject, context, input));
7258 }
7259 
ToObject_Inline(TNode<Context> context,TNode<Object> input)7260 TNode<JSReceiver> CodeStubAssembler::ToObject_Inline(TNode<Context> context,
7261                                                      TNode<Object> input) {
7262   TVARIABLE(JSReceiver, result);
7263   Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
7264   Label done(this);
7265 
7266   BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
7267 
7268   BIND(&if_isreceiver);
7269   {
7270     result = CAST(input);
7271     Goto(&done);
7272   }
7273 
7274   BIND(&if_isnotreceiver);
7275   {
7276     result = ToObject(context, input);
7277     Goto(&done);
7278   }
7279 
7280   BIND(&done);
7281   return result.value();
7282 }
7283 
ToLength_Inline(TNode<Context> context,SloppyTNode<Object> input)7284 TNode<Number> CodeStubAssembler::ToLength_Inline(TNode<Context> context,
7285                                                  SloppyTNode<Object> input) {
7286   TNode<Smi> smi_zero = SmiConstant(0);
7287   return Select<Number>(
7288       TaggedIsSmi(input), [=] { return SmiMax(CAST(input), smi_zero); },
7289       [=] { return CAST(CallBuiltin(Builtins::kToLength, context, input)); });
7290 }
7291 
OrdinaryToPrimitive(TNode<Context> context,TNode<Object> input,OrdinaryToPrimitiveHint hint)7292 TNode<Object> CodeStubAssembler::OrdinaryToPrimitive(
7293     TNode<Context> context, TNode<Object> input, OrdinaryToPrimitiveHint hint) {
7294   Callable callable = CodeFactory::OrdinaryToPrimitive(isolate(), hint);
7295   return CallStub(callable, context, input);
7296 }
7297 
DecodeWord32(TNode<Word32T> word32,uint32_t shift,uint32_t mask)7298 TNode<Uint32T> CodeStubAssembler::DecodeWord32(TNode<Word32T> word32,
7299                                                uint32_t shift, uint32_t mask) {
7300   DCHECK_EQ((mask >> shift) << shift, mask);
7301   return Unsigned(Word32And(Word32Shr(word32, static_cast<int>(shift)),
7302                             Int32Constant(mask >> shift)));
7303 }
7304 
DecodeWord(SloppyTNode<WordT> word,uint32_t shift,uintptr_t mask)7305 TNode<UintPtrT> CodeStubAssembler::DecodeWord(SloppyTNode<WordT> word,
7306                                               uint32_t shift, uintptr_t mask) {
7307   DCHECK_EQ((mask >> shift) << shift, mask);
7308   return Unsigned(WordAnd(WordShr(word, static_cast<int>(shift)),
7309                           IntPtrConstant(mask >> shift)));
7310 }
7311 
UpdateWord32(TNode<Word32T> word,TNode<Uint32T> value,uint32_t shift,uint32_t mask,bool starts_as_zero)7312 TNode<Word32T> CodeStubAssembler::UpdateWord32(TNode<Word32T> word,
7313                                                TNode<Uint32T> value,
7314                                                uint32_t shift, uint32_t mask,
7315                                                bool starts_as_zero) {
7316   DCHECK_EQ((mask >> shift) << shift, mask);
7317   // Ensure the {value} fits fully in the mask.
7318   CSA_ASSERT(this, Uint32LessThanOrEqual(value, Uint32Constant(mask >> shift)));
7319   TNode<Word32T> encoded_value = Word32Shl(value, Int32Constant(shift));
7320   TNode<Word32T> masked_word;
7321   if (starts_as_zero) {
7322     CSA_ASSERT(this, Word32Equal(Word32And(word, Int32Constant(~mask)), word));
7323     masked_word = word;
7324   } else {
7325     masked_word = Word32And(word, Int32Constant(~mask));
7326   }
7327   return Word32Or(masked_word, encoded_value);
7328 }
7329 
UpdateWord(TNode<WordT> word,TNode<UintPtrT> value,uint32_t shift,uintptr_t mask,bool starts_as_zero)7330 TNode<WordT> CodeStubAssembler::UpdateWord(TNode<WordT> word,
7331                                            TNode<UintPtrT> value,
7332                                            uint32_t shift, uintptr_t mask,
7333                                            bool starts_as_zero) {
7334   DCHECK_EQ((mask >> shift) << shift, mask);
7335   // Ensure the {value} fits fully in the mask.
7336   CSA_ASSERT(this,
7337              UintPtrLessThanOrEqual(value, UintPtrConstant(mask >> shift)));
7338   TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
7339   TNode<WordT> masked_word;
7340   if (starts_as_zero) {
7341     CSA_ASSERT(this, WordEqual(WordAnd(word, UintPtrConstant(~mask)), word));
7342     masked_word = word;
7343   } else {
7344     masked_word = WordAnd(word, UintPtrConstant(~mask));
7345   }
7346   return WordOr(masked_word, encoded_value);
7347 }
7348 
SetCounter(StatsCounter * counter,int value)7349 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
7350   if (FLAG_native_code_counters && counter->Enabled()) {
7351     TNode<ExternalReference> counter_address =
7352         ExternalConstant(ExternalReference::Create(counter));
7353     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
7354                         Int32Constant(value));
7355   }
7356 }
7357 
IncrementCounter(StatsCounter * counter,int delta)7358 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
7359   DCHECK_GT(delta, 0);
7360   if (FLAG_native_code_counters && counter->Enabled()) {
7361     TNode<ExternalReference> counter_address =
7362         ExternalConstant(ExternalReference::Create(counter));
7363     // This operation has to be exactly 32-bit wide in case the external
7364     // reference table redirects the counter to a uint32_t dummy_stats_counter_
7365     // field.
7366     TNode<Int32T> value = Load<Int32T>(counter_address);
7367     value = Int32Add(value, Int32Constant(delta));
7368     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
7369   }
7370 }
7371 
DecrementCounter(StatsCounter * counter,int delta)7372 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
7373   DCHECK_GT(delta, 0);
7374   if (FLAG_native_code_counters && counter->Enabled()) {
7375     TNode<ExternalReference> counter_address =
7376         ExternalConstant(ExternalReference::Create(counter));
7377     // This operation has to be exactly 32-bit wide in case the external
7378     // reference table redirects the counter to a uint32_t dummy_stats_counter_
7379     // field.
7380     TNode<Int32T> value = Load<Int32T>(counter_address);
7381     value = Int32Sub(value, Int32Constant(delta));
7382     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
7383   }
7384 }
7385 
7386 template <typename TIndex>
Increment(TVariable<TIndex> * variable,int value)7387 void CodeStubAssembler::Increment(TVariable<TIndex>* variable, int value) {
7388   *variable =
7389       IntPtrOrSmiAdd(variable->value(), IntPtrOrSmiConstant<TIndex>(value));
7390 }
7391 
7392 // Instantiate Increment for Smi and IntPtrT.
7393 // TODO(v8:9708): Consider renaming to [Smi|IntPtrT|RawPtrT]Increment.
7394 template void CodeStubAssembler::Increment<Smi>(TVariable<Smi>* variable,
7395                                                 int value);
7396 template void CodeStubAssembler::Increment<IntPtrT>(
7397     TVariable<IntPtrT>* variable, int value);
7398 template void CodeStubAssembler::Increment<RawPtrT>(
7399     TVariable<RawPtrT>* variable, int value);
7400 
Use(Label * label)7401 void CodeStubAssembler::Use(Label* label) {
7402   GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
7403 }
7404 
TryToName(SloppyTNode<Object> key,Label * if_keyisindex,TVariable<IntPtrT> * var_index,Label * if_keyisunique,TVariable<Name> * var_unique,Label * if_bailout,Label * if_notinternalized)7405 void CodeStubAssembler::TryToName(SloppyTNode<Object> key, Label* if_keyisindex,
7406                                   TVariable<IntPtrT>* var_index,
7407                                   Label* if_keyisunique,
7408                                   TVariable<Name>* var_unique,
7409                                   Label* if_bailout,
7410                                   Label* if_notinternalized) {
7411   Comment("TryToName");
7412 
7413   TVARIABLE(Int32T, var_instance_type);
7414   Label if_keyisnotindex(this);
7415   *var_index = TryToIntptr(key, &if_keyisnotindex, &var_instance_type);
7416   Goto(if_keyisindex);
7417 
7418   BIND(&if_keyisnotindex);
7419   {
7420     Label if_symbol(this), if_string(this),
7421         if_keyisother(this, Label::kDeferred);
7422 
7423     // Symbols are unique.
7424     GotoIf(IsSymbolInstanceType(var_instance_type.value()), &if_symbol);
7425 
7426     // Miss if |key| is not a String.
7427     STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
7428     Branch(IsStringInstanceType(var_instance_type.value()), &if_string,
7429            &if_keyisother);
7430 
7431     // Symbols are unique.
7432     BIND(&if_symbol);
7433     {
7434       *var_unique = CAST(key);
7435       Goto(if_keyisunique);
7436     }
7437 
7438     BIND(&if_string);
7439     {
7440       Label if_thinstring(this), if_has_cached_index(this);
7441 
7442       TNode<Uint32T> hash = LoadNameHashField(CAST(key));
7443       GotoIf(IsClearWord32(hash, Name::kDoesNotContainCachedArrayIndexMask),
7444              &if_has_cached_index);
7445       // No cached array index. If the string knows that it contains an index,
7446       // then it must be an uncacheable index. Handle this case in the runtime.
7447       GotoIf(IsClearWord32(hash, Name::kIsNotIntegerIndexMask), if_bailout);
7448 
7449       GotoIf(InstanceTypeEqual(var_instance_type.value(), THIN_STRING_TYPE),
7450              &if_thinstring);
7451       GotoIf(InstanceTypeEqual(var_instance_type.value(),
7452                                THIN_ONE_BYTE_STRING_TYPE),
7453              &if_thinstring);
7454       // Finally, check if |key| is internalized.
7455       STATIC_ASSERT(kNotInternalizedTag != 0);
7456       GotoIf(IsSetWord32(var_instance_type.value(), kIsNotInternalizedMask),
7457              if_notinternalized != nullptr ? if_notinternalized : if_bailout);
7458 
7459       *var_unique = CAST(key);
7460       Goto(if_keyisunique);
7461 
7462       BIND(&if_thinstring);
7463       {
7464         *var_unique =
7465             LoadObjectField<String>(CAST(key), ThinString::kActualOffset);
7466         Goto(if_keyisunique);
7467       }
7468 
7469       BIND(&if_has_cached_index);
7470       {
7471         TNode<IntPtrT> index =
7472             Signed(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash));
7473         CSA_ASSERT(this, IntPtrLessThan(index, IntPtrConstant(INT_MAX)));
7474         *var_index = index;
7475         Goto(if_keyisindex);
7476       }
7477     }
7478 
7479     BIND(&if_keyisother);
7480     {
7481       GotoIfNot(InstanceTypeEqual(var_instance_type.value(), ODDBALL_TYPE),
7482                 if_bailout);
7483       *var_unique =
7484           LoadObjectField<String>(CAST(key), Oddball::kToStringOffset);
7485       Goto(if_keyisunique);
7486     }
7487   }
7488 }
7489 
TryInternalizeString(TNode<String> string,Label * if_index,TVariable<IntPtrT> * var_index,Label * if_internalized,TVariable<Name> * var_internalized,Label * if_not_internalized,Label * if_bailout)7490 void CodeStubAssembler::TryInternalizeString(
7491     TNode<String> string, Label* if_index, TVariable<IntPtrT>* var_index,
7492     Label* if_internalized, TVariable<Name>* var_internalized,
7493     Label* if_not_internalized, Label* if_bailout) {
7494   TNode<ExternalReference> function = ExternalConstant(
7495       ExternalReference::try_string_to_index_or_lookup_existing());
7496   const TNode<ExternalReference> isolate_ptr =
7497       ExternalConstant(ExternalReference::isolate_address(isolate()));
7498   TNode<Object> result =
7499       CAST(CallCFunction(function, MachineType::AnyTagged(),
7500                          std::make_pair(MachineType::Pointer(), isolate_ptr),
7501                          std::make_pair(MachineType::AnyTagged(), string)));
7502   Label internalized(this);
7503   GotoIf(TaggedIsNotSmi(result), &internalized);
7504   TNode<IntPtrT> word_result = SmiUntag(CAST(result));
7505   GotoIf(IntPtrEqual(word_result, IntPtrConstant(ResultSentinel::kNotFound)),
7506          if_not_internalized);
7507   GotoIf(IntPtrEqual(word_result, IntPtrConstant(ResultSentinel::kUnsupported)),
7508          if_bailout);
7509   *var_index = word_result;
7510   Goto(if_index);
7511 
7512   BIND(&internalized);
7513   *var_internalized = CAST(result);
7514   Goto(if_internalized);
7515 }
7516 
7517 template <typename Dictionary>
EntryToIndex(TNode<IntPtrT> entry,int field_index)7518 TNode<IntPtrT> CodeStubAssembler::EntryToIndex(TNode<IntPtrT> entry,
7519                                                int field_index) {
7520   TNode<IntPtrT> entry_index =
7521       IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
7522   return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
7523                                                field_index));
7524 }
7525 
7526 template <typename T>
LoadDescriptorArrayElement(TNode<DescriptorArray> object,TNode<IntPtrT> index,int additional_offset)7527 TNode<T> CodeStubAssembler::LoadDescriptorArrayElement(
7528     TNode<DescriptorArray> object, TNode<IntPtrT> index,
7529     int additional_offset) {
7530   return LoadArrayElement<DescriptorArray, IntPtrT, T>(
7531       object, DescriptorArray::kHeaderSize, index, additional_offset);
7532 }
7533 
LoadKeyByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7534 TNode<Name> CodeStubAssembler::LoadKeyByKeyIndex(
7535     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7536   return CAST(LoadDescriptorArrayElement<HeapObject>(container, key_index, 0));
7537 }
7538 
LoadDetailsByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7539 TNode<Uint32T> CodeStubAssembler::LoadDetailsByKeyIndex(
7540     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7541   const int kKeyToDetailsOffset =
7542       DescriptorArray::kEntryDetailsOffset - DescriptorArray::kEntryKeyOffset;
7543   return Unsigned(LoadAndUntagToWord32ArrayElement(
7544       container, DescriptorArray::kHeaderSize, key_index, kKeyToDetailsOffset));
7545 }
7546 
LoadValueByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7547 TNode<Object> CodeStubAssembler::LoadValueByKeyIndex(
7548     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7549   const int kKeyToValueOffset =
7550       DescriptorArray::kEntryValueOffset - DescriptorArray::kEntryKeyOffset;
7551   return LoadDescriptorArrayElement<Object>(container, key_index,
7552                                             kKeyToValueOffset);
7553 }
7554 
LoadFieldTypeByKeyIndex(TNode<DescriptorArray> container,TNode<IntPtrT> key_index)7555 TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByKeyIndex(
7556     TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
7557   const int kKeyToValueOffset =
7558       DescriptorArray::kEntryValueOffset - DescriptorArray::kEntryKeyOffset;
7559   return LoadDescriptorArrayElement<MaybeObject>(container, key_index,
7560                                                  kKeyToValueOffset);
7561 }
7562 
DescriptorEntryToIndex(TNode<IntPtrT> descriptor_entry)7563 TNode<IntPtrT> CodeStubAssembler::DescriptorEntryToIndex(
7564     TNode<IntPtrT> descriptor_entry) {
7565   return IntPtrMul(descriptor_entry,
7566                    IntPtrConstant(DescriptorArray::kEntrySize));
7567 }
7568 
LoadKeyByDescriptorEntry(TNode<DescriptorArray> container,TNode<IntPtrT> descriptor_entry)7569 TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
7570     TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
7571   return CAST(LoadDescriptorArrayElement<HeapObject>(
7572       container, DescriptorEntryToIndex(descriptor_entry),
7573       DescriptorArray::ToKeyIndex(0) * kTaggedSize));
7574 }
7575 
LoadKeyByDescriptorEntry(TNode<DescriptorArray> container,int descriptor_entry)7576 TNode<Name> CodeStubAssembler::LoadKeyByDescriptorEntry(
7577     TNode<DescriptorArray> container, int descriptor_entry) {
7578   return CAST(LoadDescriptorArrayElement<HeapObject>(
7579       container, IntPtrConstant(0),
7580       DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
7581 }
7582 
LoadDetailsByDescriptorEntry(TNode<DescriptorArray> container,TNode<IntPtrT> descriptor_entry)7583 TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
7584     TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
7585   return Unsigned(LoadAndUntagToWord32ArrayElement(
7586       container, DescriptorArray::kHeaderSize,
7587       DescriptorEntryToIndex(descriptor_entry),
7588       DescriptorArray::ToDetailsIndex(0) * kTaggedSize));
7589 }
7590 
LoadDetailsByDescriptorEntry(TNode<DescriptorArray> container,int descriptor_entry)7591 TNode<Uint32T> CodeStubAssembler::LoadDetailsByDescriptorEntry(
7592     TNode<DescriptorArray> container, int descriptor_entry) {
7593   return Unsigned(LoadAndUntagToWord32ArrayElement(
7594       container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
7595       DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
7596 }
7597 
LoadValueByDescriptorEntry(TNode<DescriptorArray> container,int descriptor_entry)7598 TNode<Object> CodeStubAssembler::LoadValueByDescriptorEntry(
7599     TNode<DescriptorArray> container, int descriptor_entry) {
7600   return LoadDescriptorArrayElement<Object>(
7601       container, IntPtrConstant(0),
7602       DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize);
7603 }
7604 
LoadFieldTypeByDescriptorEntry(TNode<DescriptorArray> container,TNode<IntPtrT> descriptor_entry)7605 TNode<MaybeObject> CodeStubAssembler::LoadFieldTypeByDescriptorEntry(
7606     TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
7607   return LoadDescriptorArrayElement<MaybeObject>(
7608       container, DescriptorEntryToIndex(descriptor_entry),
7609       DescriptorArray::ToValueIndex(0) * kTaggedSize);
7610 }
7611 
7612 template V8_EXPORT_PRIVATE TNode<IntPtrT>
7613 CodeStubAssembler::EntryToIndex<NameDictionary>(TNode<IntPtrT>, int);
7614 template V8_EXPORT_PRIVATE TNode<IntPtrT>
7615 CodeStubAssembler::EntryToIndex<GlobalDictionary>(TNode<IntPtrT>, int);
7616 template V8_EXPORT_PRIVATE TNode<IntPtrT>
7617 CodeStubAssembler::EntryToIndex<NumberDictionary>(TNode<IntPtrT>, int);
7618 
7619 // This must be kept in sync with HashTableBase::ComputeCapacity().
HashTableComputeCapacity(TNode<IntPtrT> at_least_space_for)7620 TNode<IntPtrT> CodeStubAssembler::HashTableComputeCapacity(
7621     TNode<IntPtrT> at_least_space_for) {
7622   TNode<IntPtrT> capacity = IntPtrRoundUpToPowerOfTwo32(
7623       IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
7624   return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
7625 }
7626 
IntPtrMax(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)7627 TNode<IntPtrT> CodeStubAssembler::IntPtrMax(SloppyTNode<IntPtrT> left,
7628                                             SloppyTNode<IntPtrT> right) {
7629   intptr_t left_constant;
7630   intptr_t right_constant;
7631   if (ToIntPtrConstant(left, &left_constant) &&
7632       ToIntPtrConstant(right, &right_constant)) {
7633     return IntPtrConstant(std::max(left_constant, right_constant));
7634   }
7635   return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
7636                                  right);
7637 }
7638 
IntPtrMin(SloppyTNode<IntPtrT> left,SloppyTNode<IntPtrT> right)7639 TNode<IntPtrT> CodeStubAssembler::IntPtrMin(SloppyTNode<IntPtrT> left,
7640                                             SloppyTNode<IntPtrT> right) {
7641   intptr_t left_constant;
7642   intptr_t right_constant;
7643   if (ToIntPtrConstant(left, &left_constant) &&
7644       ToIntPtrConstant(right, &right_constant)) {
7645     return IntPtrConstant(std::min(left_constant, right_constant));
7646   }
7647   return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
7648                                  right);
7649 }
7650 
UintPtrMin(TNode<UintPtrT> left,TNode<UintPtrT> right)7651 TNode<UintPtrT> CodeStubAssembler::UintPtrMin(TNode<UintPtrT> left,
7652                                               TNode<UintPtrT> right) {
7653   intptr_t left_constant;
7654   intptr_t right_constant;
7655   if (ToIntPtrConstant(left, &left_constant) &&
7656       ToIntPtrConstant(right, &right_constant)) {
7657     return UintPtrConstant(std::min(static_cast<uintptr_t>(left_constant),
7658                                     static_cast<uintptr_t>(right_constant)));
7659   }
7660   return SelectConstant<UintPtrT>(UintPtrLessThanOrEqual(left, right), left,
7661                                   right);
7662 }
7663 
7664 template <>
LoadName(TNode<HeapObject> key)7665 TNode<HeapObject> CodeStubAssembler::LoadName<NameDictionary>(
7666     TNode<HeapObject> key) {
7667   CSA_ASSERT(this, Word32Or(IsTheHole(key), IsName(key)));
7668   return key;
7669 }
7670 
7671 template <>
LoadName(TNode<HeapObject> key)7672 TNode<HeapObject> CodeStubAssembler::LoadName<GlobalDictionary>(
7673     TNode<HeapObject> key) {
7674   TNode<PropertyCell> property_cell = CAST(key);
7675   return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
7676 }
7677 
7678 template <typename Dictionary>
NameDictionaryLookup(TNode<Dictionary> dictionary,TNode<Name> unique_name,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found,LookupMode mode)7679 void CodeStubAssembler::NameDictionaryLookup(
7680     TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
7681     TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
7682   static_assert(std::is_same<Dictionary, NameDictionary>::value ||
7683                     std::is_same<Dictionary, GlobalDictionary>::value,
7684                 "Unexpected NameDictionary");
7685   DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
7686   DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr);
7687   Comment("NameDictionaryLookup");
7688   CSA_ASSERT(this, IsUniqueName(unique_name));
7689 
7690   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<Dictionary>(dictionary));
7691   TNode<IntPtrT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7692   TNode<UintPtrT> hash = ChangeUint32ToWord(LoadNameHash(unique_name));
7693 
7694   // See Dictionary::FirstProbe().
7695   TNode<IntPtrT> count = IntPtrConstant(0);
7696   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7697   TNode<Oddball> undefined = UndefinedConstant();
7698 
7699   // Appease the variable merging algorithm for "Goto(&loop)" below.
7700   *var_name_index = IntPtrConstant(0);
7701 
7702   TVARIABLE(IntPtrT, var_count, count);
7703   TVARIABLE(IntPtrT, var_entry, entry);
7704   Label loop(this, {&var_count, &var_entry, var_name_index});
7705   Goto(&loop);
7706   BIND(&loop);
7707   {
7708     Label next_probe(this);
7709     TNode<IntPtrT> entry = var_entry.value();
7710 
7711     TNode<IntPtrT> index = EntryToIndex<Dictionary>(entry);
7712     *var_name_index = index;
7713 
7714     TNode<HeapObject> current =
7715         CAST(UnsafeLoadFixedArrayElement(dictionary, index));
7716     GotoIf(TaggedEqual(current, undefined), if_not_found);
7717     if (mode == kFindExisting) {
7718       if (Dictionary::ShapeT::kMatchNeedsHoleCheck) {
7719         GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe);
7720       }
7721       current = LoadName<Dictionary>(current);
7722       GotoIf(TaggedEqual(current, unique_name), if_found);
7723     } else {
7724       DCHECK_EQ(kFindInsertionIndex, mode);
7725       GotoIf(TaggedEqual(current, TheHoleConstant()), if_not_found);
7726     }
7727     Goto(&next_probe);
7728 
7729     BIND(&next_probe);
7730     // See Dictionary::NextProbe().
7731     Increment(&var_count);
7732     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7733 
7734     var_entry = entry;
7735     Goto(&loop);
7736   }
7737 }
7738 
7739 // Instantiate template methods to workaround GCC compilation issue.
7740 template V8_EXPORT_PRIVATE void
7741 CodeStubAssembler::NameDictionaryLookup<NameDictionary>(TNode<NameDictionary>,
7742                                                         TNode<Name>, Label*,
7743                                                         TVariable<IntPtrT>*,
7744                                                         Label*, LookupMode);
7745 template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup<
7746     GlobalDictionary>(TNode<GlobalDictionary>, TNode<Name>, Label*,
7747                       TVariable<IntPtrT>*, Label*, LookupMode);
7748 
ComputeSeededHash(TNode<IntPtrT> key)7749 TNode<Word32T> CodeStubAssembler::ComputeSeededHash(TNode<IntPtrT> key) {
7750   const TNode<ExternalReference> function_addr =
7751       ExternalConstant(ExternalReference::compute_integer_hash());
7752   const TNode<ExternalReference> isolate_ptr =
7753       ExternalConstant(ExternalReference::isolate_address(isolate()));
7754 
7755   MachineType type_ptr = MachineType::Pointer();
7756   MachineType type_uint32 = MachineType::Uint32();
7757   MachineType type_int32 = MachineType::Int32();
7758 
7759   return UncheckedCast<Word32T>(CallCFunction(
7760       function_addr, type_uint32, std::make_pair(type_ptr, isolate_ptr),
7761       std::make_pair(type_int32, TruncateIntPtrToInt32(key))));
7762 }
7763 
NumberDictionaryLookup(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,Label * if_found,TVariable<IntPtrT> * var_entry,Label * if_not_found)7764 void CodeStubAssembler::NumberDictionaryLookup(
7765     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7766     Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
7767   CSA_ASSERT(this, IsNumberDictionary(dictionary));
7768   DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
7769   Comment("NumberDictionaryLookup");
7770 
7771   TNode<IntPtrT> capacity = SmiUntag(GetCapacity<NumberDictionary>(dictionary));
7772   TNode<IntPtrT> mask = IntPtrSub(capacity, IntPtrConstant(1));
7773 
7774   TNode<UintPtrT> hash = ChangeUint32ToWord(ComputeSeededHash(intptr_index));
7775   TNode<Float64T> key_as_float64 = RoundIntPtrToFloat64(intptr_index);
7776 
7777   // See Dictionary::FirstProbe().
7778   TNode<IntPtrT> count = IntPtrConstant(0);
7779   TNode<IntPtrT> entry = Signed(WordAnd(hash, mask));
7780 
7781   TNode<Oddball> undefined = UndefinedConstant();
7782   TNode<Oddball> the_hole = TheHoleConstant();
7783 
7784   TVARIABLE(IntPtrT, var_count, count);
7785   Label loop(this, {&var_count, var_entry});
7786   *var_entry = entry;
7787   Goto(&loop);
7788   BIND(&loop);
7789   {
7790     TNode<IntPtrT> entry = var_entry->value();
7791 
7792     TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(entry);
7793     TNode<Object> current = UnsafeLoadFixedArrayElement(dictionary, index);
7794     GotoIf(TaggedEqual(current, undefined), if_not_found);
7795     Label next_probe(this);
7796     {
7797       Label if_currentissmi(this), if_currentisnotsmi(this);
7798       Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
7799       BIND(&if_currentissmi);
7800       {
7801         TNode<IntPtrT> current_value = SmiUntag(CAST(current));
7802         Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
7803       }
7804       BIND(&if_currentisnotsmi);
7805       {
7806         GotoIf(TaggedEqual(current, the_hole), &next_probe);
7807         // Current must be the Number.
7808         TNode<Float64T> current_value = LoadHeapNumberValue(CAST(current));
7809         Branch(Float64Equal(current_value, key_as_float64), if_found,
7810                &next_probe);
7811       }
7812     }
7813 
7814     BIND(&next_probe);
7815     // See Dictionary::NextProbe().
7816     Increment(&var_count);
7817     entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
7818 
7819     *var_entry = entry;
7820     Goto(&loop);
7821   }
7822 }
7823 
BasicLoadNumberDictionaryElement(TNode<NumberDictionary> dictionary,TNode<IntPtrT> intptr_index,Label * not_data,Label * if_hole)7824 TNode<Object> CodeStubAssembler::BasicLoadNumberDictionaryElement(
7825     TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
7826     Label* not_data, Label* if_hole) {
7827   TVARIABLE(IntPtrT, var_entry);
7828   Label if_found(this);
7829   NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
7830                          if_hole);
7831   BIND(&if_found);
7832 
7833   // Check that the value is a data property.
7834   TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
7835   TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, index);
7836   TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
7837   // TODO(jkummerow): Support accessors without missing?
7838   GotoIfNot(Word32Equal(kind, Int32Constant(kData)), not_data);
7839   // Finally, load the value.
7840   return LoadValueByKeyIndex(dictionary, index);
7841 }
7842 
7843 template <class Dictionary>
FindInsertionEntry(TNode<Dictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7844 void CodeStubAssembler::FindInsertionEntry(TNode<Dictionary> dictionary,
7845                                            TNode<Name> key,
7846                                            TVariable<IntPtrT>* var_key_index) {
7847   UNREACHABLE();
7848 }
7849 
7850 template <>
FindInsertionEntry(TNode<NameDictionary> dictionary,TNode<Name> key,TVariable<IntPtrT> * var_key_index)7851 void CodeStubAssembler::FindInsertionEntry<NameDictionary>(
7852     TNode<NameDictionary> dictionary, TNode<Name> key,
7853     TVariable<IntPtrT>* var_key_index) {
7854   Label done(this);
7855   NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
7856                                        &done, kFindInsertionIndex);
7857   BIND(&done);
7858 }
7859 
7860 template <class Dictionary>
InsertEntry(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7861 void CodeStubAssembler::InsertEntry(TNode<Dictionary> dictionary,
7862                                     TNode<Name> key, TNode<Object> value,
7863                                     TNode<IntPtrT> index,
7864                                     TNode<Smi> enum_index) {
7865   UNREACHABLE();  // Use specializations instead.
7866 }
7867 
7868 template <>
InsertEntry(TNode<NameDictionary> dictionary,TNode<Name> name,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7869 void CodeStubAssembler::InsertEntry<NameDictionary>(
7870     TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
7871     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7872   // Store name and value.
7873   StoreFixedArrayElement(dictionary, index, name);
7874   StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
7875 
7876   // Prepare details of the new property.
7877   PropertyDetails d(kData, NONE, PropertyCellType::kNoCell);
7878   enum_index =
7879       SmiShl(enum_index, PropertyDetails::DictionaryStorageField::kShift);
7880   // We OR over the actual index below, so we expect the initial value to be 0.
7881   DCHECK_EQ(0, d.dictionary_index());
7882   TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
7883 
7884   // Private names must be marked non-enumerable.
7885   Label not_private(this, &var_details);
7886   GotoIfNot(IsPrivateSymbol(name), &not_private);
7887   TNode<Smi> dont_enum =
7888       SmiShl(SmiConstant(DONT_ENUM), PropertyDetails::AttributesField::kShift);
7889   var_details = SmiOr(var_details.value(), dont_enum);
7890   Goto(&not_private);
7891   BIND(&not_private);
7892 
7893   // Finally, store the details.
7894   StoreDetailsByKeyIndex<NameDictionary>(dictionary, index,
7895                                          var_details.value());
7896 }
7897 
7898 template <>
InsertEntry(TNode<GlobalDictionary> dictionary,TNode<Name> key,TNode<Object> value,TNode<IntPtrT> index,TNode<Smi> enum_index)7899 void CodeStubAssembler::InsertEntry<GlobalDictionary>(
7900     TNode<GlobalDictionary> dictionary, TNode<Name> key, TNode<Object> value,
7901     TNode<IntPtrT> index, TNode<Smi> enum_index) {
7902   UNIMPLEMENTED();
7903 }
7904 
7905 template <class Dictionary>
Add(TNode<Dictionary> dictionary,TNode<Name> key,TNode<Object> value,Label * bailout)7906 void CodeStubAssembler::Add(TNode<Dictionary> dictionary, TNode<Name> key,
7907                             TNode<Object> value, Label* bailout) {
7908   CSA_ASSERT(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
7909   TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
7910   TNode<Smi> nof = GetNumberOfElements<Dictionary>(dictionary);
7911   TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
7912   // Require 33% to still be free after adding additional_elements.
7913   // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
7914   // But that's OK here because it's only used for a comparison.
7915   TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
7916   GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
7917   // Require rehashing if more than 50% of free elements are deleted elements.
7918   TNode<Smi> deleted = GetNumberOfDeletedElements<Dictionary>(dictionary);
7919   CSA_ASSERT(this, SmiAbove(capacity, new_nof));
7920   TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
7921   GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
7922 
7923   TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
7924   TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
7925   TNode<Smi> max_enum_index =
7926       SmiConstant(PropertyDetails::DictionaryStorageField::kMax);
7927   GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
7928 
7929   // No more bailouts after this point.
7930   // Operations from here on can have side effects.
7931 
7932   SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
7933   SetNumberOfElements<Dictionary>(dictionary, new_nof);
7934 
7935   TVARIABLE(IntPtrT, var_key_index);
7936   FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
7937   InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
7938                           enum_index);
7939 }
7940 
7941 template void CodeStubAssembler::Add<NameDictionary>(TNode<NameDictionary>,
7942                                                      TNode<Name>, TNode<Object>,
7943                                                      Label*);
7944 
7945 template <typename Array>
LookupLinear(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)7946 void CodeStubAssembler::LookupLinear(TNode<Name> unique_name,
7947                                      TNode<Array> array,
7948                                      TNode<Uint32T> number_of_valid_entries,
7949                                      Label* if_found,
7950                                      TVariable<IntPtrT>* var_name_index,
7951                                      Label* if_not_found) {
7952   static_assert(std::is_base_of<FixedArray, Array>::value ||
7953                     std::is_base_of<WeakFixedArray, Array>::value ||
7954                     std::is_base_of<DescriptorArray, Array>::value,
7955                 "T must be a descendant of FixedArray or a WeakFixedArray");
7956   Comment("LookupLinear");
7957   CSA_ASSERT(this, IsUniqueName(unique_name));
7958   TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
7959   TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
7960   TNode<IntPtrT> last_exclusive = IntPtrAdd(
7961       first_inclusive,
7962       IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
7963 
7964   BuildFastLoop<IntPtrT>(
7965       last_exclusive, first_inclusive,
7966       [=](TNode<IntPtrT> name_index) {
7967         TNode<MaybeObject> element =
7968             LoadArrayElement(array, Array::kHeaderSize, name_index);
7969         TNode<Name> candidate_name = CAST(element);
7970         *var_name_index = name_index;
7971         GotoIf(TaggedEqual(candidate_name, unique_name), if_found);
7972       },
7973       -Array::kEntrySize, IndexAdvanceMode::kPre);
7974   Goto(if_not_found);
7975 }
7976 
7977 template <>
NumberOfEntries(TNode<DescriptorArray> descriptors)7978 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<DescriptorArray>(
7979     TNode<DescriptorArray> descriptors) {
7980   return Unsigned(LoadNumberOfDescriptors(descriptors));
7981 }
7982 
7983 template <>
NumberOfEntries(TNode<TransitionArray> transitions)7984 TNode<Uint32T> CodeStubAssembler::NumberOfEntries<TransitionArray>(
7985     TNode<TransitionArray> transitions) {
7986   TNode<IntPtrT> length = LoadAndUntagWeakFixedArrayLength(transitions);
7987   return Select<Uint32T>(
7988       UintPtrLessThan(length, IntPtrConstant(TransitionArray::kFirstIndex)),
7989       [=] { return Unsigned(Int32Constant(0)); },
7990       [=] {
7991         return Unsigned(LoadAndUntagToWord32ArrayElement(
7992             transitions, WeakFixedArray::kHeaderSize,
7993             IntPtrConstant(TransitionArray::kTransitionLengthIndex)));
7994       });
7995 }
7996 
7997 template <typename Array>
EntryIndexToIndex(TNode<Uint32T> entry_index)7998 TNode<IntPtrT> CodeStubAssembler::EntryIndexToIndex(
7999     TNode<Uint32T> entry_index) {
8000   TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
8001   TNode<Word32T> index = Int32Mul(entry_index, entry_size);
8002   return ChangeInt32ToIntPtr(index);
8003 }
8004 
8005 template <typename Array>
ToKeyIndex(TNode<Uint32T> entry_index)8006 TNode<IntPtrT> CodeStubAssembler::ToKeyIndex(TNode<Uint32T> entry_index) {
8007   return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
8008                    EntryIndexToIndex<Array>(entry_index));
8009 }
8010 
8011 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<DescriptorArray>(
8012     TNode<Uint32T>);
8013 template TNode<IntPtrT> CodeStubAssembler::ToKeyIndex<TransitionArray>(
8014     TNode<Uint32T>);
8015 
8016 template <>
GetSortedKeyIndex(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)8017 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<DescriptorArray>(
8018     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8019   TNode<Uint32T> details =
8020       DescriptorArrayGetDetails(descriptors, descriptor_number);
8021   return DecodeWord32<PropertyDetails::DescriptorPointer>(details);
8022 }
8023 
8024 template <>
GetSortedKeyIndex(TNode<TransitionArray> transitions,TNode<Uint32T> transition_number)8025 TNode<Uint32T> CodeStubAssembler::GetSortedKeyIndex<TransitionArray>(
8026     TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
8027   return transition_number;
8028 }
8029 
8030 template <typename Array>
GetKey(TNode<Array> array,TNode<Uint32T> entry_index)8031 TNode<Name> CodeStubAssembler::GetKey(TNode<Array> array,
8032                                       TNode<Uint32T> entry_index) {
8033   static_assert(std::is_base_of<TransitionArray, Array>::value ||
8034                     std::is_base_of<DescriptorArray, Array>::value,
8035                 "T must be a descendant of DescriptorArray or TransitionArray");
8036   const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
8037   TNode<MaybeObject> element =
8038       LoadArrayElement(array, Array::kHeaderSize,
8039                        EntryIndexToIndex<Array>(entry_index), key_offset);
8040   return CAST(element);
8041 }
8042 
8043 template TNode<Name> CodeStubAssembler::GetKey<DescriptorArray>(
8044     TNode<DescriptorArray>, TNode<Uint32T>);
8045 template TNode<Name> CodeStubAssembler::GetKey<TransitionArray>(
8046     TNode<TransitionArray>, TNode<Uint32T>);
8047 
DescriptorArrayGetDetails(TNode<DescriptorArray> descriptors,TNode<Uint32T> descriptor_number)8048 TNode<Uint32T> CodeStubAssembler::DescriptorArrayGetDetails(
8049     TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
8050   const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
8051   return Unsigned(LoadAndUntagToWord32ArrayElement(
8052       descriptors, DescriptorArray::kHeaderSize,
8053       EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
8054 }
8055 
8056 template <typename Array>
LookupBinary(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8057 void CodeStubAssembler::LookupBinary(TNode<Name> unique_name,
8058                                      TNode<Array> array,
8059                                      TNode<Uint32T> number_of_valid_entries,
8060                                      Label* if_found,
8061                                      TVariable<IntPtrT>* var_name_index,
8062                                      Label* if_not_found) {
8063   Comment("LookupBinary");
8064   TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
8065   TNode<Uint32T> limit =
8066       Unsigned(Int32Sub(NumberOfEntries<Array>(array), Int32Constant(1)));
8067   TVARIABLE(Uint32T, var_high, limit);
8068   TNode<Uint32T> hash = LoadNameHashAssumeComputed(unique_name);
8069   CSA_ASSERT(this, Word32NotEqual(hash, Int32Constant(0)));
8070 
8071   // Assume non-empty array.
8072   CSA_ASSERT(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
8073 
8074   Label binary_loop(this, {&var_high, &var_low});
8075   Goto(&binary_loop);
8076   BIND(&binary_loop);
8077   {
8078     // mid = low + (high - low) / 2 (to avoid overflow in "(low + high) / 2").
8079     TNode<Uint32T> mid = Unsigned(
8080         Int32Add(var_low.value(),
8081                  Word32Shr(Int32Sub(var_high.value(), var_low.value()), 1)));
8082     // mid_name = array->GetSortedKey(mid).
8083     TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
8084     TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
8085 
8086     TNode<Uint32T> mid_hash = LoadNameHashAssumeComputed(mid_name);
8087 
8088     Label mid_greater(this), mid_less(this), merge(this);
8089     Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
8090     BIND(&mid_greater);
8091     {
8092       var_high = mid;
8093       Goto(&merge);
8094     }
8095     BIND(&mid_less);
8096     {
8097       var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
8098       Goto(&merge);
8099     }
8100     BIND(&merge);
8101     GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
8102   }
8103 
8104   Label scan_loop(this, &var_low);
8105   Goto(&scan_loop);
8106   BIND(&scan_loop);
8107   {
8108     GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
8109 
8110     TNode<Uint32T> sort_index =
8111         GetSortedKeyIndex<Array>(array, var_low.value());
8112     TNode<Name> current_name = GetKey<Array>(array, sort_index);
8113     TNode<Uint32T> current_hash = LoadNameHashAssumeComputed(current_name);
8114     GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
8115     Label next(this);
8116     GotoIf(TaggedNotEqual(current_name, unique_name), &next);
8117     GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
8118            if_not_found);
8119     *var_name_index = ToKeyIndex<Array>(sort_index);
8120     Goto(if_found);
8121 
8122     BIND(&next);
8123     var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
8124     Goto(&scan_loop);
8125   }
8126 }
8127 
ForEachEnumerableOwnProperty(TNode<Context> context,TNode<Map> map,TNode<JSObject> object,ForEachEnumerationMode mode,const ForEachKeyValueFunction & body,Label * bailout)8128 void CodeStubAssembler::ForEachEnumerableOwnProperty(
8129     TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
8130     ForEachEnumerationMode mode, const ForEachKeyValueFunction& body,
8131     Label* bailout) {
8132   TNode<Uint16T> type = LoadMapInstanceType(map);
8133   TNode<Uint32T> bit_field3 = EnsureOnlyHasSimpleProperties(map, type, bailout);
8134 
8135   TVARIABLE(DescriptorArray, var_descriptors, LoadMapDescriptors(map));
8136   TNode<Uint32T> nof_descriptors =
8137       DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bit_field3);
8138 
8139   TVARIABLE(BoolT, var_stable, Int32TrueConstant());
8140 
8141   TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant());
8142   // false - iterate only string properties, true - iterate only symbol
8143   // properties
8144   TVARIABLE(BoolT, var_is_symbol_processing_loop, Int32FalseConstant());
8145   TVARIABLE(IntPtrT, var_start_key_index,
8146             ToKeyIndex<DescriptorArray>(Unsigned(Int32Constant(0))));
8147   // Note: var_end_key_index is exclusive for the loop
8148   TVARIABLE(IntPtrT, var_end_key_index,
8149             ToKeyIndex<DescriptorArray>(nof_descriptors));
8150   VariableList list({&var_descriptors, &var_stable, &var_has_symbol,
8151                      &var_is_symbol_processing_loop, &var_start_key_index,
8152                      &var_end_key_index},
8153                     zone());
8154   Label descriptor_array_loop(this, list);
8155 
8156   Goto(&descriptor_array_loop);
8157   BIND(&descriptor_array_loop);
8158 
8159   BuildFastLoop<IntPtrT>(
8160       list, var_start_key_index.value(), var_end_key_index.value(),
8161       [&](TNode<IntPtrT> descriptor_key_index) {
8162         TNode<Name> next_key =
8163             LoadKeyByKeyIndex(var_descriptors.value(), descriptor_key_index);
8164 
8165         TVARIABLE(Object, var_value, SmiConstant(0));
8166         Label callback(this), next_iteration(this);
8167 
8168         if (mode == kEnumerationOrder) {
8169           // |next_key| is either a string or a symbol
8170           // Skip strings or symbols depending on
8171           // |var_is_symbol_processing_loop|.
8172           Label if_string(this), if_symbol(this), if_name_ok(this);
8173           Branch(IsSymbol(next_key), &if_symbol, &if_string);
8174           BIND(&if_symbol);
8175           {
8176             // Process symbol property when |var_is_symbol_processing_loop| is
8177             // true.
8178             GotoIf(var_is_symbol_processing_loop.value(), &if_name_ok);
8179             // First iteration need to calculate smaller range for processing
8180             // symbols
8181             Label if_first_symbol(this);
8182             // var_end_key_index is still inclusive at this point.
8183             var_end_key_index = descriptor_key_index;
8184             Branch(var_has_symbol.value(), &next_iteration, &if_first_symbol);
8185             BIND(&if_first_symbol);
8186             {
8187               var_start_key_index = descriptor_key_index;
8188               var_has_symbol = Int32TrueConstant();
8189               Goto(&next_iteration);
8190             }
8191           }
8192           BIND(&if_string);
8193           {
8194             CSA_ASSERT(this, IsString(next_key));
8195             // Process string property when |var_is_symbol_processing_loop| is
8196             // false.
8197             Branch(var_is_symbol_processing_loop.value(), &next_iteration,
8198                    &if_name_ok);
8199           }
8200           BIND(&if_name_ok);
8201         }
8202         {
8203           TVARIABLE(Map, var_map);
8204           TVARIABLE(HeapObject, var_meta_storage);
8205           TVARIABLE(IntPtrT, var_entry);
8206           TVARIABLE(Uint32T, var_details);
8207           Label if_found(this);
8208 
8209           Label if_found_fast(this), if_found_dict(this);
8210 
8211           Label if_stable(this), if_not_stable(this);
8212           Branch(var_stable.value(), &if_stable, &if_not_stable);
8213           BIND(&if_stable);
8214           {
8215             // Directly decode from the descriptor array if |object| did not
8216             // change shape.
8217             var_map = map;
8218             var_meta_storage = var_descriptors.value();
8219             var_entry = Signed(descriptor_key_index);
8220             Goto(&if_found_fast);
8221           }
8222           BIND(&if_not_stable);
8223           {
8224             // If the map did change, do a slower lookup. We are still
8225             // guaranteed that the object has a simple shape, and that the key
8226             // is a name.
8227             var_map = LoadMap(object);
8228             TryLookupPropertyInSimpleObject(
8229                 object, var_map.value(), next_key, &if_found_fast,
8230                 &if_found_dict, &var_meta_storage, &var_entry, &next_iteration);
8231           }
8232 
8233           BIND(&if_found_fast);
8234           {
8235             TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8236             TNode<IntPtrT> name_index = var_entry.value();
8237 
8238             // Skip non-enumerable properties.
8239             var_details = LoadDetailsByKeyIndex(descriptors, name_index);
8240             GotoIf(IsSetWord32(var_details.value(),
8241                                PropertyDetails::kAttributesDontEnumMask),
8242                    &next_iteration);
8243 
8244             LoadPropertyFromFastObject(object, var_map.value(), descriptors,
8245                                        name_index, var_details.value(),
8246                                        &var_value);
8247             Goto(&if_found);
8248           }
8249           BIND(&if_found_dict);
8250           {
8251             TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
8252             TNode<IntPtrT> entry = var_entry.value();
8253 
8254             TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, entry);
8255             // Skip non-enumerable properties.
8256             GotoIf(
8257                 IsSetWord32(details, PropertyDetails::kAttributesDontEnumMask),
8258                 &next_iteration);
8259 
8260             var_details = details;
8261             var_value = LoadValueByKeyIndex<NameDictionary>(dictionary, entry);
8262             Goto(&if_found);
8263           }
8264 
8265           // Here we have details and value which could be an accessor.
8266           BIND(&if_found);
8267           {
8268             Label slow_load(this, Label::kDeferred);
8269 
8270             var_value = CallGetterIfAccessor(var_value.value(), object,
8271                                              var_details.value(), context,
8272                                              object, &slow_load, kCallJSGetter);
8273             Goto(&callback);
8274 
8275             BIND(&slow_load);
8276             var_value =
8277                 CallRuntime(Runtime::kGetProperty, context, object, next_key);
8278             Goto(&callback);
8279 
8280             BIND(&callback);
8281             body(next_key, var_value.value());
8282 
8283             // Check if |object| is still stable, i.e. the descriptors in the
8284             // preloaded |descriptors| are still the same modulo in-place
8285             // representation changes.
8286             GotoIfNot(var_stable.value(), &next_iteration);
8287             var_stable = TaggedEqual(LoadMap(object), map);
8288             // Reload the descriptors just in case the actual array changed, and
8289             // any of the field representations changed in-place.
8290             var_descriptors = LoadMapDescriptors(map);
8291 
8292             Goto(&next_iteration);
8293           }
8294         }
8295         BIND(&next_iteration);
8296       },
8297       DescriptorArray::kEntrySize, IndexAdvanceMode::kPost);
8298 
8299   if (mode == kEnumerationOrder) {
8300     Label done(this);
8301     GotoIf(var_is_symbol_processing_loop.value(), &done);
8302     GotoIfNot(var_has_symbol.value(), &done);
8303     // All string properties are processed, now process symbol properties.
8304     var_is_symbol_processing_loop = Int32TrueConstant();
8305     // Add DescriptorArray::kEntrySize to make the var_end_key_index exclusive
8306     // as BuildFastLoop() expects.
8307     Increment(&var_end_key_index, DescriptorArray::kEntrySize);
8308     Goto(&descriptor_array_loop);
8309 
8310     BIND(&done);
8311   }
8312 }
8313 
GetConstructor(TNode<Map> map)8314 TNode<Object> CodeStubAssembler::GetConstructor(TNode<Map> map) {
8315   TVARIABLE(HeapObject, var_maybe_constructor);
8316   var_maybe_constructor = map;
8317   Label loop(this, &var_maybe_constructor), done(this);
8318   GotoIfNot(IsMap(var_maybe_constructor.value()), &done);
8319   Goto(&loop);
8320 
8321   BIND(&loop);
8322   {
8323     var_maybe_constructor = CAST(
8324         LoadObjectField(var_maybe_constructor.value(),
8325                         Map::kConstructorOrBackPointerOrNativeContextOffset));
8326     GotoIf(IsMap(var_maybe_constructor.value()), &loop);
8327     Goto(&done);
8328   }
8329 
8330   BIND(&done);
8331   return var_maybe_constructor.value();
8332 }
8333 
GetCreationContext(TNode<JSReceiver> receiver,Label * if_bailout)8334 TNode<NativeContext> CodeStubAssembler::GetCreationContext(
8335     TNode<JSReceiver> receiver, Label* if_bailout) {
8336   TNode<Map> receiver_map = LoadMap(receiver);
8337   TNode<Object> constructor = GetConstructor(receiver_map);
8338 
8339   TVARIABLE(JSFunction, var_function);
8340 
8341   Label done(this), if_jsfunction(this), if_jsgenerator(this);
8342   GotoIf(TaggedIsSmi(constructor), if_bailout);
8343 
8344   TNode<Map> function_map = LoadMap(CAST(constructor));
8345   GotoIf(IsJSFunctionMap(function_map), &if_jsfunction);
8346   GotoIf(IsJSGeneratorMap(function_map), &if_jsgenerator);
8347   // Remote objects don't have a creation context.
8348   GotoIf(IsFunctionTemplateInfoMap(function_map), if_bailout);
8349 
8350   CSA_ASSERT(this, IsJSFunctionMap(receiver_map));
8351   var_function = CAST(receiver);
8352   Goto(&done);
8353 
8354   BIND(&if_jsfunction);
8355   {
8356     var_function = CAST(constructor);
8357     Goto(&done);
8358   }
8359 
8360   BIND(&if_jsgenerator);
8361   {
8362     var_function = LoadJSGeneratorObjectFunction(CAST(receiver));
8363     Goto(&done);
8364   }
8365 
8366   BIND(&done);
8367   TNode<Context> context = LoadJSFunctionContext(var_function.value());
8368 
8369   GotoIfNot(IsContext(context), if_bailout);
8370 
8371   TNode<NativeContext> native_context = LoadNativeContext(context);
8372   return native_context;
8373 }
8374 
DescriptorLookup(TNode<Name> unique_name,TNode<DescriptorArray> descriptors,TNode<Uint32T> bitfield3,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8375 void CodeStubAssembler::DescriptorLookup(TNode<Name> unique_name,
8376                                          TNode<DescriptorArray> descriptors,
8377                                          TNode<Uint32T> bitfield3,
8378                                          Label* if_found,
8379                                          TVariable<IntPtrT>* var_name_index,
8380                                          Label* if_not_found) {
8381   Comment("DescriptorArrayLookup");
8382   TNode<Uint32T> nof =
8383       DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bitfield3);
8384   Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
8385                           var_name_index, if_not_found);
8386 }
8387 
TransitionLookup(TNode<Name> unique_name,TNode<TransitionArray> transitions,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8388 void CodeStubAssembler::TransitionLookup(TNode<Name> unique_name,
8389                                          TNode<TransitionArray> transitions,
8390                                          Label* if_found,
8391                                          TVariable<IntPtrT>* var_name_index,
8392                                          Label* if_not_found) {
8393   Comment("TransitionArrayLookup");
8394   TNode<Uint32T> number_of_valid_transitions =
8395       NumberOfEntries<TransitionArray>(transitions);
8396   Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
8397                           if_found, var_name_index, if_not_found);
8398 }
8399 
8400 template <typename Array>
Lookup(TNode<Name> unique_name,TNode<Array> array,TNode<Uint32T> number_of_valid_entries,Label * if_found,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8401 void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
8402                                TNode<Uint32T> number_of_valid_entries,
8403                                Label* if_found,
8404                                TVariable<IntPtrT>* var_name_index,
8405                                Label* if_not_found) {
8406   Comment("ArrayLookup");
8407   if (!number_of_valid_entries) {
8408     number_of_valid_entries = NumberOfEntries(array);
8409   }
8410   GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
8411   Label linear_search(this), binary_search(this);
8412   const int kMaxElementsForLinearSearch = 32;
8413   Branch(Uint32LessThanOrEqual(number_of_valid_entries,
8414                                Int32Constant(kMaxElementsForLinearSearch)),
8415          &linear_search, &binary_search);
8416   BIND(&linear_search);
8417   {
8418     LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
8419                         var_name_index, if_not_found);
8420   }
8421   BIND(&binary_search);
8422   {
8423     LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
8424                         var_name_index, if_not_found);
8425   }
8426 }
8427 
TryLookupPropertyInSimpleObject(TNode<JSObject> object,TNode<Map> map,TNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found)8428 void CodeStubAssembler::TryLookupPropertyInSimpleObject(
8429     TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
8430     Label* if_found_fast, Label* if_found_dict,
8431     TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
8432     Label* if_not_found) {
8433   CSA_ASSERT(this, IsSimpleObjectMap(map));
8434   CSA_ASSERT(this, IsUniqueNameNoCachedIndex(unique_name));
8435 
8436   TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
8437   Label if_isfastmap(this), if_isslowmap(this);
8438   Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
8439          &if_isfastmap);
8440   BIND(&if_isfastmap);
8441   {
8442     TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
8443     *var_meta_storage = descriptors;
8444 
8445     DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
8446                      var_name_index, if_not_found);
8447   }
8448   BIND(&if_isslowmap);
8449   {
8450     TNode<NameDictionary> dictionary = CAST(LoadSlowProperties(object));
8451     *var_meta_storage = dictionary;
8452 
8453     NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
8454                                          var_name_index, if_not_found);
8455   }
8456 }
8457 
TryLookupProperty(TNode<HeapObject> object,TNode<Map> map,SloppyTNode<Int32T> instance_type,TNode<Name> unique_name,Label * if_found_fast,Label * if_found_dict,Label * if_found_global,TVariable<HeapObject> * var_meta_storage,TVariable<IntPtrT> * var_name_index,Label * if_not_found,Label * if_bailout)8458 void CodeStubAssembler::TryLookupProperty(
8459     TNode<HeapObject> object, TNode<Map> map, SloppyTNode<Int32T> instance_type,
8460     TNode<Name> unique_name, Label* if_found_fast, Label* if_found_dict,
8461     Label* if_found_global, TVariable<HeapObject>* var_meta_storage,
8462     TVariable<IntPtrT>* var_name_index, Label* if_not_found,
8463     Label* if_bailout) {
8464   Label if_objectisspecial(this);
8465   GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
8466 
8467   TryLookupPropertyInSimpleObject(CAST(object), map, unique_name, if_found_fast,
8468                                   if_found_dict, var_meta_storage,
8469                                   var_name_index, if_not_found);
8470 
8471   BIND(&if_objectisspecial);
8472   {
8473     // Handle global object here and bailout for other special objects.
8474     GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
8475               if_bailout);
8476 
8477     // Handle interceptors and access checks in runtime.
8478     TNode<Int32T> bit_field = LoadMapBitField(map);
8479     int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
8480                Map::Bits1::IsAccessCheckNeededBit::kMask;
8481     GotoIf(IsSetWord32(bit_field, mask), if_bailout);
8482 
8483     TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(CAST(object)));
8484     *var_meta_storage = dictionary;
8485 
8486     NameDictionaryLookup<GlobalDictionary>(
8487         dictionary, unique_name, if_found_global, var_name_index, if_not_found);
8488   }
8489 }
8490 
TryHasOwnProperty(TNode<HeapObject> object,TNode<Map> map,TNode<Int32T> instance_type,TNode<Name> unique_name,Label * if_found,Label * if_not_found,Label * if_bailout)8491 void CodeStubAssembler::TryHasOwnProperty(TNode<HeapObject> object,
8492                                           TNode<Map> map,
8493                                           TNode<Int32T> instance_type,
8494                                           TNode<Name> unique_name,
8495                                           Label* if_found, Label* if_not_found,
8496                                           Label* if_bailout) {
8497   Comment("TryHasOwnProperty");
8498   CSA_ASSERT(this, IsUniqueNameNoCachedIndex(unique_name));
8499   TVARIABLE(HeapObject, var_meta_storage);
8500   TVARIABLE(IntPtrT, var_name_index);
8501 
8502   Label if_found_global(this);
8503   TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
8504                     &if_found_global, &var_meta_storage, &var_name_index,
8505                     if_not_found, if_bailout);
8506 
8507   BIND(&if_found_global);
8508   {
8509     TVARIABLE(Object, var_value);
8510     TVARIABLE(Uint32T, var_details);
8511     // Check if the property cell is not deleted.
8512     LoadPropertyFromGlobalDictionary(CAST(var_meta_storage.value()),
8513                                      var_name_index.value(), &var_details,
8514                                      &var_value, if_not_found);
8515     Goto(if_found);
8516   }
8517 }
8518 
GetMethod(TNode<Context> context,TNode<Object> object,Handle<Name> name,Label * if_null_or_undefined)8519 TNode<Object> CodeStubAssembler::GetMethod(TNode<Context> context,
8520                                            TNode<Object> object,
8521                                            Handle<Name> name,
8522                                            Label* if_null_or_undefined) {
8523   TNode<Object> method = GetProperty(context, object, name);
8524 
8525   GotoIf(IsUndefined(method), if_null_or_undefined);
8526   GotoIf(IsNull(method), if_null_or_undefined);
8527 
8528   return method;
8529 }
8530 
GetIteratorMethod(TNode<Context> context,TNode<HeapObject> heap_obj,Label * if_iteratorundefined)8531 TNode<Object> CodeStubAssembler::GetIteratorMethod(
8532     TNode<Context> context, TNode<HeapObject> heap_obj,
8533     Label* if_iteratorundefined) {
8534   return GetMethod(context, heap_obj, isolate()->factory()->iterator_symbol(),
8535                    if_iteratorundefined);
8536 }
8537 
LoadPropertyFromFastObject(TNode<HeapObject> object,TNode<Map> map,TNode<DescriptorArray> descriptors,TNode<IntPtrT> name_index,TVariable<Uint32T> * var_details,TVariable<Object> * var_value)8538 void CodeStubAssembler::LoadPropertyFromFastObject(
8539     TNode<HeapObject> object, TNode<Map> map,
8540     TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
8541     TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
8542   TNode<Uint32T> details = LoadDetailsByKeyIndex(descriptors, name_index);
8543   *var_details = details;
8544 
8545   LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
8546                              var_value);
8547 }
8548 
LoadPropertyFromFastObject(TNode<HeapObject> object,TNode<Map> map,TNode<DescriptorArray> descriptors,TNode<IntPtrT> name_index,TNode<Uint32T> details,TVariable<Object> * var_value)8549 void CodeStubAssembler::LoadPropertyFromFastObject(
8550     TNode<HeapObject> object, TNode<Map> map,
8551     TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
8552     TNode<Uint32T> details, TVariable<Object>* var_value) {
8553   Comment("[ LoadPropertyFromFastObject");
8554 
8555   TNode<Uint32T> location =
8556       DecodeWord32<PropertyDetails::LocationField>(details);
8557 
8558   Label if_in_field(this), if_in_descriptor(this), done(this);
8559   Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
8560          &if_in_descriptor);
8561   BIND(&if_in_field);
8562   {
8563     TNode<IntPtrT> field_index =
8564         Signed(DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details));
8565     TNode<Uint32T> representation =
8566         DecodeWord32<PropertyDetails::RepresentationField>(details);
8567 
8568     field_index =
8569         IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
8570     TNode<IntPtrT> instance_size_in_words = LoadMapInstanceSizeInWords(map);
8571 
8572     Label if_inobject(this), if_backing_store(this);
8573     TVARIABLE(Float64T, var_double_value);
8574     Label rebox_double(this, &var_double_value);
8575     Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
8576            &if_backing_store);
8577     BIND(&if_inobject);
8578     {
8579       Comment("if_inobject");
8580       TNode<IntPtrT> field_offset = TimesTaggedSize(field_index);
8581 
8582       Label if_double(this), if_tagged(this);
8583       Branch(Word32NotEqual(representation,
8584                             Int32Constant(Representation::kDouble)),
8585              &if_tagged, &if_double);
8586       BIND(&if_tagged);
8587       {
8588         *var_value = LoadObjectField(object, field_offset);
8589         Goto(&done);
8590       }
8591       BIND(&if_double);
8592       {
8593         if (FLAG_unbox_double_fields) {
8594           var_double_value = LoadObjectField<Float64T>(object, field_offset);
8595         } else {
8596           TNode<HeapNumber> heap_number =
8597               CAST(LoadObjectField(object, field_offset));
8598           var_double_value = LoadHeapNumberValue(heap_number);
8599         }
8600         Goto(&rebox_double);
8601       }
8602     }
8603     BIND(&if_backing_store);
8604     {
8605       Comment("if_backing_store");
8606       TNode<HeapObject> properties = LoadFastProperties(CAST(object));
8607       field_index = Signed(IntPtrSub(field_index, instance_size_in_words));
8608       TNode<Object> value =
8609           LoadPropertyArrayElement(CAST(properties), field_index);
8610 
8611       Label if_double(this), if_tagged(this);
8612       Branch(Word32NotEqual(representation,
8613                             Int32Constant(Representation::kDouble)),
8614              &if_tagged, &if_double);
8615       BIND(&if_tagged);
8616       {
8617         *var_value = value;
8618         Goto(&done);
8619       }
8620       BIND(&if_double);
8621       {
8622         var_double_value = LoadHeapNumberValue(CAST(value));
8623         Goto(&rebox_double);
8624       }
8625     }
8626     BIND(&rebox_double);
8627     {
8628       Comment("rebox_double");
8629       TNode<HeapNumber> heap_number =
8630           AllocateHeapNumberWithValue(var_double_value.value());
8631       *var_value = heap_number;
8632       Goto(&done);
8633     }
8634   }
8635   BIND(&if_in_descriptor);
8636   {
8637     *var_value = LoadValueByKeyIndex(descriptors, name_index);
8638     Goto(&done);
8639   }
8640   BIND(&done);
8641 
8642   Comment("] LoadPropertyFromFastObject");
8643 }
8644 
LoadPropertyFromNameDictionary(TNode<NameDictionary> dictionary,TNode<IntPtrT> name_index,TVariable<Uint32T> * var_details,TVariable<Object> * var_value)8645 void CodeStubAssembler::LoadPropertyFromNameDictionary(
8646     TNode<NameDictionary> dictionary, TNode<IntPtrT> name_index,
8647     TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
8648   Comment("LoadPropertyFromNameDictionary");
8649   *var_details = LoadDetailsByKeyIndex(dictionary, name_index);
8650   *var_value = LoadValueByKeyIndex(dictionary, name_index);
8651 
8652   Comment("] LoadPropertyFromNameDictionary");
8653 }
8654 
LoadPropertyFromGlobalDictionary(TNode<GlobalDictionary> dictionary,TNode<IntPtrT> name_index,TVariable<Uint32T> * var_details,TVariable<Object> * var_value,Label * if_deleted)8655 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(
8656     TNode<GlobalDictionary> dictionary, TNode<IntPtrT> name_index,
8657     TVariable<Uint32T>* var_details, TVariable<Object>* var_value,
8658     Label* if_deleted) {
8659   Comment("[ LoadPropertyFromGlobalDictionary");
8660   TNode<PropertyCell> property_cell =
8661       CAST(LoadFixedArrayElement(dictionary, name_index));
8662 
8663   TNode<Object> value =
8664       LoadObjectField(property_cell, PropertyCell::kValueOffset);
8665   GotoIf(TaggedEqual(value, TheHoleConstant()), if_deleted);
8666 
8667   *var_value = value;
8668 
8669   TNode<Uint32T> details = Unsigned(LoadAndUntagToWord32ObjectField(
8670       property_cell, PropertyCell::kPropertyDetailsRawOffset));
8671   *var_details = details;
8672 
8673   Comment("] LoadPropertyFromGlobalDictionary");
8674 }
8675 
8676 // |value| is the property backing store's contents, which is either a value or
8677 // an accessor pair, as specified by |details|. |holder| is a JSObject or a
8678 // PropertyCell (TODO: use UnionT). Returns either the original value, or the
8679 // result of the getter call.
CallGetterIfAccessor(TNode<Object> value,TNode<HeapObject> holder,TNode<Uint32T> details,TNode<Context> context,TNode<Object> receiver,Label * if_bailout,GetOwnPropertyMode mode)8680 TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
8681     TNode<Object> value, TNode<HeapObject> holder, TNode<Uint32T> details,
8682     TNode<Context> context, TNode<Object> receiver, Label* if_bailout,
8683     GetOwnPropertyMode mode) {
8684   TVARIABLE(Object, var_value, value);
8685   Label done(this), if_accessor_info(this, Label::kDeferred);
8686 
8687   TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
8688   GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
8689 
8690   // Accessor case.
8691   GotoIfNot(IsAccessorPair(CAST(value)), &if_accessor_info);
8692 
8693   // AccessorPair case.
8694   {
8695     if (mode == kCallJSGetter) {
8696       Label if_callable(this), if_function_template_info(this);
8697       TNode<AccessorPair> accessor_pair = CAST(value);
8698       TNode<HeapObject> getter =
8699           CAST(LoadObjectField(accessor_pair, AccessorPair::kGetterOffset));
8700       TNode<Map> getter_map = LoadMap(getter);
8701 
8702       GotoIf(IsCallableMap(getter_map), &if_callable);
8703       GotoIf(IsFunctionTemplateInfoMap(getter_map), &if_function_template_info);
8704 
8705       // Return undefined if the {getter} is not callable.
8706       var_value = UndefinedConstant();
8707       Goto(&done);
8708 
8709       BIND(&if_callable);
8710       {
8711         // Call the accessor.
8712         var_value = Call(context, getter, receiver);
8713         Goto(&done);
8714       }
8715 
8716       BIND(&if_function_template_info);
8717       {
8718         TNode<HeapObject> cached_property_name = LoadObjectField<HeapObject>(
8719             getter, FunctionTemplateInfo::kCachedPropertyNameOffset);
8720         GotoIfNot(IsTheHole(cached_property_name), if_bailout);
8721 
8722         TNode<NativeContext> creation_context =
8723             GetCreationContext(CAST(holder), if_bailout);
8724         var_value = CallBuiltin(
8725             Builtins::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver,
8726             creation_context, getter, IntPtrConstant(0), receiver);
8727         Goto(&done);
8728       }
8729     } else {
8730       Goto(&done);
8731     }
8732   }
8733 
8734   // AccessorInfo case.
8735   BIND(&if_accessor_info);
8736   {
8737     TNode<AccessorInfo> accessor_info = CAST(value);
8738     Label if_array(this), if_function(this), if_wrapper(this);
8739 
8740     // Dispatch based on {holder} instance type.
8741     TNode<Map> holder_map = LoadMap(holder);
8742     TNode<Uint16T> holder_instance_type = LoadMapInstanceType(holder_map);
8743     GotoIf(IsJSArrayInstanceType(holder_instance_type), &if_array);
8744     GotoIf(IsJSFunctionInstanceType(holder_instance_type), &if_function);
8745     Branch(IsJSPrimitiveWrapperInstanceType(holder_instance_type), &if_wrapper,
8746            if_bailout);
8747 
8748     // JSArray AccessorInfo case.
8749     BIND(&if_array);
8750     {
8751       // We only deal with the "length" accessor on JSArray.
8752       GotoIfNot(IsLengthString(
8753                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8754                 if_bailout);
8755       TNode<JSArray> array = CAST(holder);
8756       var_value = LoadJSArrayLength(array);
8757       Goto(&done);
8758     }
8759 
8760     // JSFunction AccessorInfo case.
8761     BIND(&if_function);
8762     {
8763       // We only deal with the "prototype" accessor on JSFunction here.
8764       GotoIfNot(IsPrototypeString(
8765                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8766                 if_bailout);
8767 
8768       TNode<JSFunction> function = CAST(holder);
8769       GotoIfPrototypeRequiresRuntimeLookup(function, holder_map, if_bailout);
8770       var_value = LoadJSFunctionPrototype(function, if_bailout);
8771       Goto(&done);
8772     }
8773 
8774     // JSPrimitiveWrapper AccessorInfo case.
8775     BIND(&if_wrapper);
8776     {
8777       // We only deal with the "length" accessor on JSPrimitiveWrapper string
8778       // wrappers.
8779       GotoIfNot(IsLengthString(
8780                     LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
8781                 if_bailout);
8782       TNode<Object> holder_value = LoadJSPrimitiveWrapperValue(CAST(holder));
8783       GotoIfNot(TaggedIsNotSmi(holder_value), if_bailout);
8784       GotoIfNot(IsString(CAST(holder_value)), if_bailout);
8785       var_value = LoadStringLengthAsSmi(CAST(holder_value));
8786       Goto(&done);
8787     }
8788   }
8789 
8790   BIND(&done);
8791   return var_value.value();
8792 }
8793 
TryGetOwnProperty(TNode<Context> context,TNode<Object> receiver,TNode<JSReceiver> object,TNode<Map> map,TNode<Int32T> instance_type,TNode<Name> unique_name,Label * if_found_value,TVariable<Object> * var_value,Label * if_not_found,Label * if_bailout)8794 void CodeStubAssembler::TryGetOwnProperty(
8795     TNode<Context> context, TNode<Object> receiver, TNode<JSReceiver> object,
8796     TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
8797     Label* if_found_value, TVariable<Object>* var_value, Label* if_not_found,
8798     Label* if_bailout) {
8799   TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
8800                     if_found_value, var_value, nullptr, nullptr, if_not_found,
8801                     if_bailout, kCallJSGetter);
8802 }
8803 
TryGetOwnProperty(TNode<Context> context,TNode<Object> receiver,TNode<JSReceiver> object,TNode<Map> map,TNode<Int32T> instance_type,TNode<Name> unique_name,Label * if_found_value,TVariable<Object> * var_value,TVariable<Uint32T> * var_details,TVariable<Object> * var_raw_value,Label * if_not_found,Label * if_bailout,GetOwnPropertyMode mode)8804 void CodeStubAssembler::TryGetOwnProperty(
8805     TNode<Context> context, TNode<Object> receiver, TNode<JSReceiver> object,
8806     TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
8807     Label* if_found_value, TVariable<Object>* var_value,
8808     TVariable<Uint32T>* var_details, TVariable<Object>* var_raw_value,
8809     Label* if_not_found, Label* if_bailout, GetOwnPropertyMode mode) {
8810   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
8811   Comment("TryGetOwnProperty");
8812   CSA_ASSERT(this, IsUniqueNameNoCachedIndex(unique_name));
8813   TVARIABLE(HeapObject, var_meta_storage);
8814   TVARIABLE(IntPtrT, var_entry);
8815 
8816   Label if_found_fast(this), if_found_dict(this), if_found_global(this);
8817 
8818   TVARIABLE(Uint32T, local_var_details);
8819   if (!var_details) {
8820     var_details = &local_var_details;
8821   }
8822   Label if_found(this);
8823 
8824   TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
8825                     &if_found_dict, &if_found_global, &var_meta_storage,
8826                     &var_entry, if_not_found, if_bailout);
8827   BIND(&if_found_fast);
8828   {
8829     TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
8830     TNode<IntPtrT> name_index = var_entry.value();
8831 
8832     LoadPropertyFromFastObject(object, map, descriptors, name_index,
8833                                var_details, var_value);
8834     Goto(&if_found);
8835   }
8836   BIND(&if_found_dict);
8837   {
8838     TNode<NameDictionary> dictionary = CAST(var_meta_storage.value());
8839     TNode<IntPtrT> entry = var_entry.value();
8840     LoadPropertyFromNameDictionary(dictionary, entry, var_details, var_value);
8841     Goto(&if_found);
8842   }
8843   BIND(&if_found_global);
8844   {
8845     TNode<GlobalDictionary> dictionary = CAST(var_meta_storage.value());
8846     TNode<IntPtrT> entry = var_entry.value();
8847 
8848     LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
8849                                      if_not_found);
8850     Goto(&if_found);
8851   }
8852   // Here we have details and value which could be an accessor.
8853   BIND(&if_found);
8854   {
8855     // TODO(ishell): Execute C++ accessor in case of accessor info
8856     if (var_raw_value) {
8857       *var_raw_value = *var_value;
8858     }
8859     TNode<Object> value =
8860         CallGetterIfAccessor(var_value->value(), object, var_details->value(),
8861                              context, receiver, if_bailout, mode);
8862     *var_value = value;
8863     Goto(if_found_value);
8864   }
8865 }
8866 
TryLookupElement(TNode<HeapObject> object,TNode<Map> map,SloppyTNode<Int32T> instance_type,SloppyTNode<IntPtrT> intptr_index,Label * if_found,Label * if_absent,Label * if_not_found,Label * if_bailout)8867 void CodeStubAssembler::TryLookupElement(
8868     TNode<HeapObject> object, TNode<Map> map, SloppyTNode<Int32T> instance_type,
8869     SloppyTNode<IntPtrT> intptr_index, Label* if_found, Label* if_absent,
8870     Label* if_not_found, Label* if_bailout) {
8871   // Handle special objects in runtime.
8872   GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
8873 
8874   TNode<Int32T> elements_kind = LoadMapElementsKind(map);
8875 
8876   // TODO(verwaest): Support other elements kinds as well.
8877   Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
8878       if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
8879       if_typedarray(this);
8880   // clang-format off
8881   int32_t values[] = {
8882       // Handled by {if_isobjectorsmi}.
8883       PACKED_SMI_ELEMENTS, HOLEY_SMI_ELEMENTS, PACKED_ELEMENTS, HOLEY_ELEMENTS,
8884       PACKED_NONEXTENSIBLE_ELEMENTS, PACKED_SEALED_ELEMENTS,
8885       HOLEY_NONEXTENSIBLE_ELEMENTS, HOLEY_SEALED_ELEMENTS,
8886       PACKED_FROZEN_ELEMENTS, HOLEY_FROZEN_ELEMENTS,
8887       // Handled by {if_isdouble}.
8888       PACKED_DOUBLE_ELEMENTS, HOLEY_DOUBLE_ELEMENTS,
8889       // Handled by {if_isdictionary}.
8890       DICTIONARY_ELEMENTS,
8891       // Handled by {if_isfaststringwrapper}.
8892       FAST_STRING_WRAPPER_ELEMENTS,
8893       // Handled by {if_isslowstringwrapper}.
8894       SLOW_STRING_WRAPPER_ELEMENTS,
8895       // Handled by {if_not_found}.
8896       NO_ELEMENTS,
8897       // Handled by {if_typed_array}.
8898       UINT8_ELEMENTS,
8899       INT8_ELEMENTS,
8900       UINT16_ELEMENTS,
8901       INT16_ELEMENTS,
8902       UINT32_ELEMENTS,
8903       INT32_ELEMENTS,
8904       FLOAT32_ELEMENTS,
8905       FLOAT64_ELEMENTS,
8906       UINT8_CLAMPED_ELEMENTS,
8907       BIGUINT64_ELEMENTS,
8908       BIGINT64_ELEMENTS,
8909   };
8910   Label* labels[] = {
8911       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
8912       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
8913       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
8914       &if_isobjectorsmi,
8915       &if_isdouble, &if_isdouble,
8916       &if_isdictionary,
8917       &if_isfaststringwrapper,
8918       &if_isslowstringwrapper,
8919       if_not_found,
8920       &if_typedarray,
8921       &if_typedarray,
8922       &if_typedarray,
8923       &if_typedarray,
8924       &if_typedarray,
8925       &if_typedarray,
8926       &if_typedarray,
8927       &if_typedarray,
8928       &if_typedarray,
8929       &if_typedarray,
8930       &if_typedarray,
8931   };
8932   // clang-format on
8933   STATIC_ASSERT(arraysize(values) == arraysize(labels));
8934   Switch(elements_kind, if_bailout, values, labels, arraysize(values));
8935 
8936   BIND(&if_isobjectorsmi);
8937   {
8938     TNode<FixedArray> elements = CAST(LoadElements(CAST(object)));
8939     TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
8940 
8941     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8942 
8943     TNode<Object> element = UnsafeLoadFixedArrayElement(elements, intptr_index);
8944     TNode<Oddball> the_hole = TheHoleConstant();
8945     Branch(TaggedEqual(element, the_hole), if_not_found, if_found);
8946   }
8947   BIND(&if_isdouble);
8948   {
8949     TNode<FixedArrayBase> elements = LoadElements(CAST(object));
8950     TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
8951 
8952     GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
8953 
8954     // Check if the element is a double hole, but don't load it.
8955     LoadFixedDoubleArrayElement(CAST(elements), intptr_index, if_not_found,
8956                                 MachineType::None());
8957     Goto(if_found);
8958   }
8959   BIND(&if_isdictionary);
8960   {
8961     // Negative and too-large keys must be converted to property names.
8962     if (Is64()) {
8963       GotoIf(UintPtrLessThan(IntPtrConstant(JSArray::kMaxArrayIndex),
8964                              intptr_index),
8965              if_bailout);
8966     } else {
8967       GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
8968     }
8969 
8970     TVARIABLE(IntPtrT, var_entry);
8971     TNode<NumberDictionary> elements = CAST(LoadElements(CAST(object)));
8972     NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
8973                            if_not_found);
8974   }
8975   BIND(&if_isfaststringwrapper);
8976   {
8977     TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)));
8978     TNode<IntPtrT> length = LoadStringLengthAsWord(string);
8979     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8980     Goto(&if_isobjectorsmi);
8981   }
8982   BIND(&if_isslowstringwrapper);
8983   {
8984     TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)));
8985     TNode<IntPtrT> length = LoadStringLengthAsWord(string);
8986     GotoIf(UintPtrLessThan(intptr_index, length), if_found);
8987     Goto(&if_isdictionary);
8988   }
8989   BIND(&if_typedarray);
8990   {
8991     TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(CAST(object));
8992     GotoIf(IsDetachedBuffer(buffer), if_absent);
8993 
8994     TNode<UintPtrT> length = LoadJSTypedArrayLength(CAST(object));
8995     Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
8996   }
8997   BIND(&if_oob);
8998   {
8999     // Positive OOB indices mean "not found", negative indices and indices
9000     // out of array index range must be converted to property names.
9001     if (Is64()) {
9002       GotoIf(UintPtrLessThan(IntPtrConstant(JSArray::kMaxArrayIndex),
9003                              intptr_index),
9004              if_bailout);
9005     } else {
9006       GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
9007     }
9008     Goto(if_not_found);
9009   }
9010 }
9011 
BranchIfMaybeSpecialIndex(TNode<String> name_string,Label * if_maybe_special_index,Label * if_not_special_index)9012 void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
9013                                                   Label* if_maybe_special_index,
9014                                                   Label* if_not_special_index) {
9015   // TODO(cwhan.tunz): Implement fast cases more.
9016 
9017   // If a name is empty or too long, it's not a special index
9018   // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
9019   const int kBufferSize = 24;
9020   TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
9021   GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
9022   GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
9023          if_not_special_index);
9024 
9025   // If the first character of name is not a digit or '-', or we can't match it
9026   // to Infinity or NaN, then this is not a special index.
9027   TNode<Int32T> first_char = StringCharCodeAt(name_string, UintPtrConstant(0));
9028   // If the name starts with '-', it can be a negative index.
9029   GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
9030   // If the name starts with 'I', it can be "Infinity".
9031   GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
9032   // If the name starts with 'N', it can be "NaN".
9033   GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
9034   // Finally, if the first character is not a digit either, then we are sure
9035   // that the name is not a special index.
9036   GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
9037   GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
9038   Goto(if_maybe_special_index);
9039 }
9040 
TryPrototypeChainLookup(TNode<Object> receiver,TNode<Object> object_arg,TNode<Object> key,const LookupPropertyInHolder & lookup_property_in_holder,const LookupElementInHolder & lookup_element_in_holder,Label * if_end,Label * if_bailout,Label * if_proxy)9041 void CodeStubAssembler::TryPrototypeChainLookup(
9042     TNode<Object> receiver, TNode<Object> object_arg, TNode<Object> key,
9043     const LookupPropertyInHolder& lookup_property_in_holder,
9044     const LookupElementInHolder& lookup_element_in_holder, Label* if_end,
9045     Label* if_bailout, Label* if_proxy) {
9046   // Ensure receiver is JSReceiver, otherwise bailout.
9047   GotoIf(TaggedIsSmi(receiver), if_bailout);
9048   TNode<HeapObject> object = CAST(object_arg);
9049 
9050   TNode<Map> map = LoadMap(object);
9051   TNode<Uint16T> instance_type = LoadMapInstanceType(map);
9052   {
9053     Label if_objectisreceiver(this);
9054     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
9055     STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
9056     Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
9057            if_bailout);
9058     BIND(&if_objectisreceiver);
9059 
9060     GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
9061   }
9062 
9063   TVARIABLE(IntPtrT, var_index);
9064   TVARIABLE(Name, var_unique);
9065 
9066   Label if_keyisindex(this), if_iskeyunique(this);
9067   TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
9068             if_bailout);
9069 
9070   BIND(&if_iskeyunique);
9071   {
9072     TVARIABLE(HeapObject, var_holder, object);
9073     TVARIABLE(Map, var_holder_map, map);
9074     TVARIABLE(Int32T, var_holder_instance_type, instance_type);
9075 
9076     Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
9077     Goto(&loop);
9078     BIND(&loop);
9079     {
9080       TNode<Map> holder_map = var_holder_map.value();
9081       TNode<Int32T> holder_instance_type = var_holder_instance_type.value();
9082 
9083       Label next_proto(this), check_integer_indexed_exotic(this);
9084       lookup_property_in_holder(CAST(receiver), var_holder.value(), holder_map,
9085                                 holder_instance_type, var_unique.value(),
9086                                 &check_integer_indexed_exotic, if_bailout);
9087 
9088       BIND(&check_integer_indexed_exotic);
9089       {
9090         // Bailout if it can be an integer indexed exotic case.
9091         GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
9092                   &next_proto);
9093         GotoIfNot(IsString(var_unique.value()), &next_proto);
9094         BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
9095                                   &next_proto);
9096       }
9097 
9098       BIND(&next_proto);
9099 
9100       TNode<HeapObject> proto = LoadMapPrototype(holder_map);
9101 
9102       GotoIf(IsNull(proto), if_end);
9103 
9104       TNode<Map> map = LoadMap(proto);
9105       TNode<Uint16T> instance_type = LoadMapInstanceType(map);
9106 
9107       var_holder = proto;
9108       var_holder_map = map;
9109       var_holder_instance_type = instance_type;
9110       Goto(&loop);
9111     }
9112   }
9113   BIND(&if_keyisindex);
9114   {
9115     TVARIABLE(HeapObject, var_holder, object);
9116     TVARIABLE(Map, var_holder_map, map);
9117     TVARIABLE(Int32T, var_holder_instance_type, instance_type);
9118 
9119     Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
9120     Goto(&loop);
9121     BIND(&loop);
9122     {
9123       Label next_proto(this);
9124       lookup_element_in_holder(CAST(receiver), var_holder.value(),
9125                                var_holder_map.value(),
9126                                var_holder_instance_type.value(),
9127                                var_index.value(), &next_proto, if_bailout);
9128       BIND(&next_proto);
9129 
9130       TNode<HeapObject> proto = LoadMapPrototype(var_holder_map.value());
9131 
9132       GotoIf(IsNull(proto), if_end);
9133 
9134       TNode<Map> map = LoadMap(proto);
9135       TNode<Uint16T> instance_type = LoadMapInstanceType(map);
9136 
9137       var_holder = proto;
9138       var_holder_map = map;
9139       var_holder_instance_type = instance_type;
9140       Goto(&loop);
9141     }
9142   }
9143 }
9144 
HasInPrototypeChain(TNode<Context> context,TNode<HeapObject> object,TNode<Object> prototype)9145 TNode<Oddball> CodeStubAssembler::HasInPrototypeChain(TNode<Context> context,
9146                                                       TNode<HeapObject> object,
9147                                                       TNode<Object> prototype) {
9148   TVARIABLE(Oddball, var_result);
9149   Label return_false(this), return_true(this),
9150       return_runtime(this, Label::kDeferred), return_result(this);
9151 
9152   // Loop through the prototype chain looking for the {prototype}.
9153   TVARIABLE(Map, var_object_map, LoadMap(object));
9154   Label loop(this, &var_object_map);
9155   Goto(&loop);
9156   BIND(&loop);
9157   {
9158     // Check if we can determine the prototype directly from the {object_map}.
9159     Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
9160     TNode<Map> object_map = var_object_map.value();
9161     TNode<Uint16T> object_instance_type = LoadMapInstanceType(object_map);
9162     Branch(IsSpecialReceiverInstanceType(object_instance_type),
9163            &if_objectisspecial, &if_objectisdirect);
9164     BIND(&if_objectisspecial);
9165     {
9166       // The {object_map} is a special receiver map or a primitive map, check
9167       // if we need to use the if_objectisspecial path in the runtime.
9168       GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
9169              &return_runtime);
9170       TNode<Int32T> object_bitfield = LoadMapBitField(object_map);
9171       int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
9172                  Map::Bits1::IsAccessCheckNeededBit::kMask;
9173       Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
9174              &if_objectisdirect);
9175     }
9176     BIND(&if_objectisdirect);
9177 
9178     // Check the current {object} prototype.
9179     TNode<HeapObject> object_prototype = LoadMapPrototype(object_map);
9180     GotoIf(IsNull(object_prototype), &return_false);
9181     GotoIf(TaggedEqual(object_prototype, prototype), &return_true);
9182 
9183     // Continue with the prototype.
9184     CSA_ASSERT(this, TaggedIsNotSmi(object_prototype));
9185     var_object_map = LoadMap(object_prototype);
9186     Goto(&loop);
9187   }
9188 
9189   BIND(&return_true);
9190   var_result = TrueConstant();
9191   Goto(&return_result);
9192 
9193   BIND(&return_false);
9194   var_result = FalseConstant();
9195   Goto(&return_result);
9196 
9197   BIND(&return_runtime);
9198   {
9199     // Fallback to the runtime implementation.
9200     var_result = CAST(
9201         CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
9202   }
9203   Goto(&return_result);
9204 
9205   BIND(&return_result);
9206   return var_result.value();
9207 }
9208 
OrdinaryHasInstance(TNode<Context> context,TNode<Object> callable_maybe_smi,TNode<Object> object_maybe_smi)9209 TNode<Oddball> CodeStubAssembler::OrdinaryHasInstance(
9210     TNode<Context> context, TNode<Object> callable_maybe_smi,
9211     TNode<Object> object_maybe_smi) {
9212   TVARIABLE(Oddball, var_result);
9213   Label return_runtime(this, Label::kDeferred), return_result(this);
9214 
9215   GotoIfForceSlowPath(&return_runtime);
9216 
9217   // Goto runtime if {object} is a Smi.
9218   GotoIf(TaggedIsSmi(object_maybe_smi), &return_runtime);
9219 
9220   // Goto runtime if {callable} is a Smi.
9221   GotoIf(TaggedIsSmi(callable_maybe_smi), &return_runtime);
9222 
9223   {
9224     // Load map of {callable}.
9225     TNode<HeapObject> object = CAST(object_maybe_smi);
9226     TNode<HeapObject> callable = CAST(callable_maybe_smi);
9227     TNode<Map> callable_map = LoadMap(callable);
9228 
9229     // Goto runtime if {callable} is not a JSFunction.
9230     TNode<Uint16T> callable_instance_type = LoadMapInstanceType(callable_map);
9231     GotoIfNot(InstanceTypeEqual(callable_instance_type, JS_FUNCTION_TYPE),
9232               &return_runtime);
9233 
9234     GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), callable_map,
9235                                          &return_runtime);
9236 
9237     // Get the "prototype" (or initial map) of the {callable}.
9238     TNode<HeapObject> callable_prototype = LoadObjectField<HeapObject>(
9239         callable, JSFunction::kPrototypeOrInitialMapOffset);
9240     {
9241       Label no_initial_map(this), walk_prototype_chain(this);
9242       TVARIABLE(HeapObject, var_callable_prototype, callable_prototype);
9243 
9244       // Resolve the "prototype" if the {callable} has an initial map.
9245       GotoIfNot(IsMap(callable_prototype), &no_initial_map);
9246       var_callable_prototype = LoadObjectField<HeapObject>(
9247           callable_prototype, Map::kPrototypeOffset);
9248       Goto(&walk_prototype_chain);
9249 
9250       BIND(&no_initial_map);
9251       // {callable_prototype} is the hole if the "prototype" property hasn't
9252       // been requested so far.
9253       Branch(TaggedEqual(callable_prototype, TheHoleConstant()),
9254              &return_runtime, &walk_prototype_chain);
9255 
9256       BIND(&walk_prototype_chain);
9257       callable_prototype = var_callable_prototype.value();
9258     }
9259 
9260     // Loop through the prototype chain looking for the {callable} prototype.
9261     var_result = HasInPrototypeChain(context, object, callable_prototype);
9262     Goto(&return_result);
9263   }
9264 
9265   BIND(&return_runtime);
9266   {
9267     // Fallback to the runtime implementation.
9268     var_result = CAST(CallRuntime(Runtime::kOrdinaryHasInstance, context,
9269                                   callable_maybe_smi, object_maybe_smi));
9270   }
9271   Goto(&return_result);
9272 
9273   BIND(&return_result);
9274   return var_result.value();
9275 }
9276 
9277 template <typename TIndex>
ElementOffsetFromIndex(TNode<TIndex> index_node,ElementsKind kind,int base_size)9278 TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(
9279     TNode<TIndex> index_node, ElementsKind kind, int base_size) {
9280   // TODO(v8:9708): Remove IntPtrT variant in favor of UintPtrT.
9281   static_assert(std::is_same<TIndex, Smi>::value ||
9282                     std::is_same<TIndex, TaggedIndex>::value ||
9283                     std::is_same<TIndex, IntPtrT>::value ||
9284                     std::is_same<TIndex, UintPtrT>::value,
9285                 "Only Smi, UintPtrT or IntPtrT index nodes are allowed");
9286   int element_size_shift = ElementsKindToShiftSize(kind);
9287   int element_size = 1 << element_size_shift;
9288   intptr_t index = 0;
9289   TNode<IntPtrT> intptr_index_node;
9290   bool constant_index = false;
9291   if (std::is_same<TIndex, Smi>::value) {
9292     TNode<Smi> smi_index_node = ReinterpretCast<Smi>(index_node);
9293     int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
9294     element_size_shift -= kSmiShiftBits;
9295     Smi smi_index;
9296     constant_index = ToSmiConstant(smi_index_node, &smi_index);
9297     if (constant_index) {
9298       index = smi_index.value();
9299     } else {
9300       if (COMPRESS_POINTERS_BOOL) {
9301         smi_index_node = NormalizeSmiIndex(smi_index_node);
9302       }
9303     }
9304     intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(smi_index_node);
9305   } else if (std::is_same<TIndex, TaggedIndex>::value) {
9306     TNode<TaggedIndex> tagged_index_node =
9307         ReinterpretCast<TaggedIndex>(index_node);
9308     element_size_shift -= kSmiTagSize;
9309     intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(tagged_index_node);
9310     constant_index = ToIntPtrConstant(intptr_index_node, &index);
9311   } else {
9312     intptr_index_node = ReinterpretCast<IntPtrT>(index_node);
9313     constant_index = ToIntPtrConstant(intptr_index_node, &index);
9314   }
9315   if (constant_index) {
9316     return IntPtrConstant(base_size + element_size * index);
9317   }
9318 
9319   TNode<IntPtrT> shifted_index =
9320       (element_size_shift == 0)
9321           ? intptr_index_node
9322           : ((element_size_shift > 0)
9323                  ? WordShl(intptr_index_node,
9324                            IntPtrConstant(element_size_shift))
9325                  : WordSar(intptr_index_node,
9326                            IntPtrConstant(-element_size_shift)));
9327   return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
9328 }
9329 
9330 // Instantiate ElementOffsetFromIndex for Smi and IntPtrT.
9331 template V8_EXPORT_PRIVATE TNode<IntPtrT>
9332 CodeStubAssembler::ElementOffsetFromIndex<Smi>(TNode<Smi> index_node,
9333                                                ElementsKind kind,
9334                                                int base_size);
9335 template V8_EXPORT_PRIVATE TNode<IntPtrT>
9336 CodeStubAssembler::ElementOffsetFromIndex<TaggedIndex>(
9337     TNode<TaggedIndex> index_node, ElementsKind kind, int base_size);
9338 template V8_EXPORT_PRIVATE TNode<IntPtrT>
9339 CodeStubAssembler::ElementOffsetFromIndex<IntPtrT>(TNode<IntPtrT> index_node,
9340                                                    ElementsKind kind,
9341                                                    int base_size);
9342 
IsOffsetInBounds(SloppyTNode<IntPtrT> offset,SloppyTNode<IntPtrT> length,int header_size,ElementsKind kind)9343 TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(SloppyTNode<IntPtrT> offset,
9344                                                  SloppyTNode<IntPtrT> length,
9345                                                  int header_size,
9346                                                  ElementsKind kind) {
9347   // Make sure we point to the last field.
9348   int element_size = 1 << ElementsKindToShiftSize(kind);
9349   int correction = header_size - kHeapObjectTag - element_size;
9350   TNode<IntPtrT> last_offset = ElementOffsetFromIndex(length, kind, correction);
9351   return IntPtrLessThanOrEqual(offset, last_offset);
9352 }
9353 
LoadFeedbackCellValue(TNode<JSFunction> closure)9354 TNode<HeapObject> CodeStubAssembler::LoadFeedbackCellValue(
9355     TNode<JSFunction> closure) {
9356   TNode<FeedbackCell> feedback_cell =
9357       LoadObjectField<FeedbackCell>(closure, JSFunction::kFeedbackCellOffset);
9358   return LoadObjectField<HeapObject>(feedback_cell, FeedbackCell::kValueOffset);
9359 }
9360 
LoadFeedbackVector(TNode<JSFunction> closure)9361 TNode<HeapObject> CodeStubAssembler::LoadFeedbackVector(
9362     TNode<JSFunction> closure) {
9363   TVARIABLE(HeapObject, maybe_vector, LoadFeedbackCellValue(closure));
9364   Label done(this);
9365 
9366   // If the closure doesn't have a feedback vector allocated yet, return
9367   // undefined. FeedbackCell can contain Undefined / FixedArray (for lazy
9368   // allocations) / FeedbackVector.
9369   GotoIf(IsFeedbackVector(maybe_vector.value()), &done);
9370 
9371   // In all other cases return Undefined.
9372   maybe_vector = UndefinedConstant();
9373   Goto(&done);
9374 
9375   BIND(&done);
9376   return maybe_vector.value();
9377 }
9378 
LoadClosureFeedbackArray(TNode<JSFunction> closure)9379 TNode<ClosureFeedbackCellArray> CodeStubAssembler::LoadClosureFeedbackArray(
9380     TNode<JSFunction> closure) {
9381   TVARIABLE(HeapObject, feedback_cell_array, LoadFeedbackCellValue(closure));
9382   Label end(this);
9383 
9384   // When feedback vectors are not yet allocated feedback cell contains a
9385   // an array of feedback cells used by create closures.
9386   GotoIf(HasInstanceType(feedback_cell_array.value(),
9387                          CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
9388          &end);
9389 
9390   // Load FeedbackCellArray from feedback vector.
9391   TNode<FeedbackVector> vector = CAST(feedback_cell_array.value());
9392   feedback_cell_array = CAST(
9393       LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset));
9394   Goto(&end);
9395 
9396   BIND(&end);
9397   return CAST(feedback_cell_array.value());
9398 }
9399 
LoadFeedbackVectorForStub()9400 TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
9401   TNode<JSFunction> function =
9402       CAST(LoadFromParentFrame(StandardFrameConstants::kFunctionOffset));
9403   return CAST(LoadFeedbackVector(function));
9404 }
9405 
UpdateFeedback(TNode<Smi> feedback,TNode<HeapObject> maybe_vector,TNode<UintPtrT> slot_id)9406 void CodeStubAssembler::UpdateFeedback(TNode<Smi> feedback,
9407                                        TNode<HeapObject> maybe_vector,
9408                                        TNode<UintPtrT> slot_id) {
9409   Label end(this);
9410   // If feedback_vector is not valid, then nothing to do.
9411   GotoIf(IsUndefined(maybe_vector), &end);
9412 
9413   // This method is used for binary op and compare feedback. These
9414   // vector nodes are initialized with a smi 0, so we can simply OR
9415   // our new feedback in place.
9416   TNode<FeedbackVector> feedback_vector = CAST(maybe_vector);
9417   TNode<MaybeObject> feedback_element =
9418       LoadFeedbackVectorSlot(feedback_vector, slot_id);
9419   TNode<Smi> previous_feedback = CAST(feedback_element);
9420   TNode<Smi> combined_feedback = SmiOr(previous_feedback, feedback);
9421 
9422   GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
9423   {
9424     StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
9425                             SKIP_WRITE_BARRIER);
9426     ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
9427     Goto(&end);
9428   }
9429 
9430   BIND(&end);
9431 }
9432 
ReportFeedbackUpdate(TNode<FeedbackVector> feedback_vector,SloppyTNode<UintPtrT> slot_id,const char * reason)9433 void CodeStubAssembler::ReportFeedbackUpdate(
9434     TNode<FeedbackVector> feedback_vector, SloppyTNode<UintPtrT> slot_id,
9435     const char* reason) {
9436   // Reset profiler ticks.
9437   StoreObjectFieldNoWriteBarrier(
9438       feedback_vector, FeedbackVector::kProfilerTicksOffset, Int32Constant(0));
9439 
9440 #ifdef V8_TRACE_FEEDBACK_UPDATES
9441   // Trace the update.
9442   CallRuntime(Runtime::kInterpreterTraceUpdateFeedback, NoContextConstant(),
9443               LoadFromParentFrame(StandardFrameConstants::kFunctionOffset),
9444               SmiTag(Signed(slot_id)), StringConstant(reason));
9445 #endif  // V8_TRACE_FEEDBACK_UPDATES
9446 }
9447 
OverwriteFeedback(TVariable<Smi> * existing_feedback,int new_feedback)9448 void CodeStubAssembler::OverwriteFeedback(TVariable<Smi>* existing_feedback,
9449                                           int new_feedback) {
9450   if (existing_feedback == nullptr) return;
9451   *existing_feedback = SmiConstant(new_feedback);
9452 }
9453 
CombineFeedback(TVariable<Smi> * existing_feedback,int feedback)9454 void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
9455                                         int feedback) {
9456   if (existing_feedback == nullptr) return;
9457   *existing_feedback = SmiOr(existing_feedback->value(), SmiConstant(feedback));
9458 }
9459 
CombineFeedback(TVariable<Smi> * existing_feedback,TNode<Smi> feedback)9460 void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
9461                                         TNode<Smi> feedback) {
9462   if (existing_feedback == nullptr) return;
9463   *existing_feedback = SmiOr(existing_feedback->value(), feedback);
9464 }
9465 
CheckForAssociatedProtector(TNode<Name> name,Label * if_protector)9466 void CodeStubAssembler::CheckForAssociatedProtector(TNode<Name> name,
9467                                                     Label* if_protector) {
9468   // This list must be kept in sync with LookupIterator::UpdateProtector!
9469   // TODO(jkummerow): Would it be faster to have a bit in Symbol::flags()?
9470   GotoIf(TaggedEqual(name, ConstructorStringConstant()), if_protector);
9471   GotoIf(TaggedEqual(name, IteratorSymbolConstant()), if_protector);
9472   GotoIf(TaggedEqual(name, NextStringConstant()), if_protector);
9473   GotoIf(TaggedEqual(name, SpeciesSymbolConstant()), if_protector);
9474   GotoIf(TaggedEqual(name, IsConcatSpreadableSymbolConstant()), if_protector);
9475   GotoIf(TaggedEqual(name, ResolveStringConstant()), if_protector);
9476   GotoIf(TaggedEqual(name, ThenStringConstant()), if_protector);
9477   // Fall through if no case matched.
9478 }
9479 
LoadReceiverMap(SloppyTNode<Object> receiver)9480 TNode<Map> CodeStubAssembler::LoadReceiverMap(SloppyTNode<Object> receiver) {
9481   return Select<Map>(
9482       TaggedIsSmi(receiver), [=] { return HeapNumberMapConstant(); },
9483       [=] { return LoadMap(UncheckedCast<HeapObject>(receiver)); });
9484 }
9485 
TryToIntptr(SloppyTNode<Object> key,Label * if_not_intptr,TVariable<Int32T> * var_instance_type)9486 TNode<IntPtrT> CodeStubAssembler::TryToIntptr(
9487     SloppyTNode<Object> key, Label* if_not_intptr,
9488     TVariable<Int32T>* var_instance_type) {
9489   TVARIABLE(IntPtrT, var_intptr_key);
9490   Label done(this, &var_intptr_key), key_is_smi(this), key_is_heapnumber(this);
9491   GotoIf(TaggedIsSmi(key), &key_is_smi);
9492 
9493   TNode<Int32T> instance_type = LoadInstanceType(CAST(key));
9494   if (var_instance_type != nullptr) {
9495     *var_instance_type = instance_type;
9496   }
9497 
9498   Branch(IsHeapNumberInstanceType(instance_type), &key_is_heapnumber,
9499          if_not_intptr);
9500 
9501   BIND(&key_is_smi);
9502   {
9503     var_intptr_key = SmiUntag(CAST(key));
9504     Goto(&done);
9505   }
9506 
9507   BIND(&key_is_heapnumber);
9508   {
9509     TNode<Float64T> value = LoadHeapNumberValue(CAST(key));
9510     TNode<IntPtrT> int_value = ChangeFloat64ToIntPtr(value);
9511     GotoIfNot(Float64Equal(value, RoundIntPtrToFloat64(int_value)),
9512               if_not_intptr);
9513 #if V8_TARGET_ARCH_64_BIT
9514     // We can't rely on Is64() alone because 32-bit compilers rightly complain
9515     // about kMaxSafeIntegerUint64 not fitting into an intptr_t.
9516     DCHECK(Is64());
9517     // TODO(jkummerow): Investigate whether we can drop support for
9518     // negative indices.
9519     GotoIfNot(IsInRange(int_value, static_cast<intptr_t>(-kMaxSafeInteger),
9520                         static_cast<intptr_t>(kMaxSafeIntegerUint64)),
9521               if_not_intptr);
9522 #else
9523     DCHECK(!Is64());
9524 #endif
9525     var_intptr_key = int_value;
9526     Goto(&done);
9527   }
9528 
9529   BIND(&done);
9530   return var_intptr_key.value();
9531 }
9532 
LoadScriptContext(TNode<Context> context,TNode<IntPtrT> context_index)9533 TNode<Context> CodeStubAssembler::LoadScriptContext(
9534     TNode<Context> context, TNode<IntPtrT> context_index) {
9535   TNode<NativeContext> native_context = LoadNativeContext(context);
9536   TNode<ScriptContextTable> script_context_table = CAST(
9537       LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
9538 
9539   TNode<Context> script_context = CAST(LoadFixedArrayElement(
9540       script_context_table, context_index,
9541       ScriptContextTable::kFirstContextSlotIndex * kTaggedSize));
9542   return script_context;
9543 }
9544 
9545 namespace {
9546 
9547 // Converts typed array elements kind to a machine representations.
ElementsKindToMachineRepresentation(ElementsKind kind)9548 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
9549   switch (kind) {
9550     case UINT8_CLAMPED_ELEMENTS:
9551     case UINT8_ELEMENTS:
9552     case INT8_ELEMENTS:
9553       return MachineRepresentation::kWord8;
9554     case UINT16_ELEMENTS:
9555     case INT16_ELEMENTS:
9556       return MachineRepresentation::kWord16;
9557     case UINT32_ELEMENTS:
9558     case INT32_ELEMENTS:
9559       return MachineRepresentation::kWord32;
9560     case FLOAT32_ELEMENTS:
9561       return MachineRepresentation::kFloat32;
9562     case FLOAT64_ELEMENTS:
9563       return MachineRepresentation::kFloat64;
9564     default:
9565       UNREACHABLE();
9566   }
9567 }
9568 
9569 }  // namespace
9570 
9571 template <typename TArray, typename TIndex>
StoreElementBigIntOrTypedArray(TNode<TArray> elements,ElementsKind kind,TNode<TIndex> index,Node * value)9572 void CodeStubAssembler::StoreElementBigIntOrTypedArray(TNode<TArray> elements,
9573                                                        ElementsKind kind,
9574                                                        TNode<TIndex> index,
9575                                                        Node* value) {
9576   // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
9577   static_assert(std::is_same<TIndex, Smi>::value ||
9578                     std::is_same<TIndex, UintPtrT>::value ||
9579                     std::is_same<TIndex, IntPtrT>::value,
9580                 "Only Smi, UintPtrT or IntPtrT index is allowed");
9581   static_assert(std::is_same<TArray, RawPtrT>::value ||
9582                     std::is_same<TArray, FixedArrayBase>::value,
9583                 "Only RawPtrT or FixedArrayBase elements are allowed");
9584   if (kind == BIGINT64_ELEMENTS || kind == BIGUINT64_ELEMENTS) {
9585     TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
9586     TVARIABLE(UintPtrT, var_low);
9587     // Only used on 32-bit platforms.
9588     TVARIABLE(UintPtrT, var_high);
9589     BigIntToRawBytes(CAST(value), &var_low, &var_high);
9590 
9591     MachineRepresentation rep = WordT::kMachineRepresentation;
9592 #if defined(V8_TARGET_BIG_ENDIAN)
9593     if (!Is64()) {
9594       StoreNoWriteBarrier(rep, elements, offset, var_high.value());
9595       StoreNoWriteBarrier(rep, elements,
9596                           IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
9597                           var_low.value());
9598     } else {
9599       StoreNoWriteBarrier(rep, elements, offset, var_low.value());
9600     }
9601 #else
9602     StoreNoWriteBarrier(rep, elements, offset, var_low.value());
9603     if (!Is64()) {
9604       StoreNoWriteBarrier(rep, elements,
9605                           IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
9606                           var_high.value());
9607     }
9608 #endif
9609   } else {
9610     DCHECK(IsTypedArrayElementsKind(kind));
9611     if (kind == UINT8_CLAMPED_ELEMENTS) {
9612       CSA_ASSERT(this, Word32Equal(UncheckedCast<Word32T>(value),
9613                                    Word32And(Int32Constant(0xFF), value)));
9614     }
9615     TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
9616     // TODO(cbruni): Add OOB check once typed.
9617     MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
9618     StoreNoWriteBarrier(rep, elements, offset, value);
9619   }
9620 }
9621 
9622 template <typename TIndex>
StoreElement(TNode<FixedArrayBase> elements,ElementsKind kind,TNode<TIndex> index,Node * value)9623 void CodeStubAssembler::StoreElement(TNode<FixedArrayBase> elements,
9624                                      ElementsKind kind, TNode<TIndex> index,
9625                                      Node* value) {
9626   if (kind == BIGINT64_ELEMENTS || kind == BIGUINT64_ELEMENTS ||
9627       IsTypedArrayElementsKind(kind)) {
9628     StoreElementBigIntOrTypedArray(elements, kind, index, value);
9629   } else if (IsDoubleElementsKind(kind)) {
9630     TNode<Float64T> value_float64 = UncheckedCast<Float64T>(value);
9631     StoreFixedDoubleArrayElement(CAST(elements), index, value_float64);
9632   } else {
9633     WriteBarrierMode barrier_mode = IsSmiElementsKind(kind)
9634                                         ? UNSAFE_SKIP_WRITE_BARRIER
9635                                         : UPDATE_WRITE_BARRIER;
9636     StoreFixedArrayElement(CAST(elements), index, value, barrier_mode, 0);
9637   }
9638 }
9639 
9640 template <typename TIndex>
StoreElement(TNode<RawPtrT> elements,ElementsKind kind,TNode<TIndex> index,Node * value)9641 void CodeStubAssembler::StoreElement(TNode<RawPtrT> elements, ElementsKind kind,
9642                                      TNode<TIndex> index, Node* value) {
9643   DCHECK(kind == BIGINT64_ELEMENTS || kind == BIGUINT64_ELEMENTS ||
9644          IsTypedArrayElementsKind(kind));
9645   StoreElementBigIntOrTypedArray(elements, kind, index, value);
9646 }
9647 template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement<UintPtrT>(
9648     TNode<RawPtrT>, ElementsKind, TNode<UintPtrT>, Node*);
9649 
Int32ToUint8Clamped(TNode<Int32T> int32_value)9650 TNode<Uint8T> CodeStubAssembler::Int32ToUint8Clamped(
9651     TNode<Int32T> int32_value) {
9652   Label done(this);
9653   TNode<Int32T> int32_zero = Int32Constant(0);
9654   TNode<Int32T> int32_255 = Int32Constant(255);
9655   TVARIABLE(Word32T, var_value, int32_value);
9656   GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
9657   var_value = int32_zero;
9658   GotoIf(Int32LessThan(int32_value, int32_zero), &done);
9659   var_value = int32_255;
9660   Goto(&done);
9661   BIND(&done);
9662   return UncheckedCast<Uint8T>(var_value.value());
9663 }
9664 
Float64ToUint8Clamped(TNode<Float64T> float64_value)9665 TNode<Uint8T> CodeStubAssembler::Float64ToUint8Clamped(
9666     TNode<Float64T> float64_value) {
9667   Label done(this);
9668   TVARIABLE(Word32T, var_value, Int32Constant(0));
9669   GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
9670   var_value = Int32Constant(255);
9671   GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
9672   {
9673     TNode<Float64T> rounded_value = Float64RoundToEven(float64_value);
9674     var_value = TruncateFloat64ToWord32(rounded_value);
9675     Goto(&done);
9676   }
9677   BIND(&done);
9678   return UncheckedCast<Uint8T>(var_value.value());
9679 }
9680 
9681 template <>
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)9682 TNode<Word32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Word32T>(
9683     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
9684   DCHECK(IsTypedArrayElementsKind(elements_kind));
9685 
9686   switch (elements_kind) {
9687     case UINT8_ELEMENTS:
9688     case INT8_ELEMENTS:
9689     case UINT16_ELEMENTS:
9690     case INT16_ELEMENTS:
9691     case UINT32_ELEMENTS:
9692     case INT32_ELEMENTS:
9693     case UINT8_CLAMPED_ELEMENTS:
9694       break;
9695     default:
9696       UNREACHABLE();
9697   }
9698 
9699   TVARIABLE(Word32T, var_result);
9700   TVARIABLE(Object, var_input, input);
9701   Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
9702       convert(this), loop(this, &var_input);
9703   Goto(&loop);
9704   BIND(&loop);
9705   GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
9706   // We can handle both HeapNumber and Oddball here, since Oddball has the
9707   // same layout as the HeapNumber for the HeapNumber::value field. This
9708   // way we can also properly optimize stores of oddballs to typed arrays.
9709   TNode<HeapObject> heap_object = CAST(var_input.value());
9710   GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
9711   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
9712                                     Oddball::kToNumberRawOffset);
9713   Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
9714          &convert);
9715 
9716   BIND(&if_heapnumber_or_oddball);
9717   {
9718     TNode<Float64T> value =
9719         LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
9720     if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
9721       var_result = Float64ToUint8Clamped(value);
9722     } else {
9723       var_result = TruncateFloat64ToWord32(value);
9724     }
9725     Goto(&done);
9726   }
9727 
9728   BIND(&if_smi);
9729   {
9730     TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
9731     if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
9732       var_result = Int32ToUint8Clamped(value);
9733     } else {
9734       var_result = value;
9735     }
9736     Goto(&done);
9737   }
9738 
9739   BIND(&convert);
9740   {
9741     var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
9742     Goto(&loop);
9743   }
9744 
9745   BIND(&done);
9746   return var_result.value();
9747 }
9748 
9749 template <>
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)9750 TNode<Float32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float32T>(
9751     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
9752   DCHECK(IsTypedArrayElementsKind(elements_kind));
9753   CHECK_EQ(elements_kind, FLOAT32_ELEMENTS);
9754 
9755   TVARIABLE(Float32T, var_result);
9756   TVARIABLE(Object, var_input, input);
9757   Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
9758       convert(this), loop(this, &var_input);
9759   Goto(&loop);
9760   BIND(&loop);
9761   GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
9762   // We can handle both HeapNumber and Oddball here, since Oddball has the
9763   // same layout as the HeapNumber for the HeapNumber::value field. This
9764   // way we can also properly optimize stores of oddballs to typed arrays.
9765   TNode<HeapObject> heap_object = CAST(var_input.value());
9766   GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
9767   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
9768                                     Oddball::kToNumberRawOffset);
9769   Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
9770          &convert);
9771 
9772   BIND(&if_heapnumber_or_oddball);
9773   {
9774     TNode<Float64T> value =
9775         LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
9776     var_result = TruncateFloat64ToFloat32(value);
9777     Goto(&done);
9778   }
9779 
9780   BIND(&if_smi);
9781   {
9782     TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
9783     var_result = RoundInt32ToFloat32(value);
9784     Goto(&done);
9785   }
9786 
9787   BIND(&convert);
9788   {
9789     var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
9790     Goto(&loop);
9791   }
9792 
9793   BIND(&done);
9794   return var_result.value();
9795 }
9796 
9797 template <>
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)9798 TNode<Float64T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float64T>(
9799     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
9800   DCHECK(IsTypedArrayElementsKind(elements_kind));
9801   CHECK_EQ(elements_kind, FLOAT64_ELEMENTS);
9802 
9803   TVARIABLE(Float64T, var_result);
9804   TVARIABLE(Object, var_input, input);
9805   Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
9806       convert(this), loop(this, &var_input);
9807   Goto(&loop);
9808   BIND(&loop);
9809   GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
9810   // We can handle both HeapNumber and Oddball here, since Oddball has the
9811   // same layout as the HeapNumber for the HeapNumber::value field. This
9812   // way we can also properly optimize stores of oddballs to typed arrays.
9813   TNode<HeapObject> heap_object = CAST(var_input.value());
9814   GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
9815   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
9816                                     Oddball::kToNumberRawOffset);
9817   Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
9818          &convert);
9819 
9820   BIND(&if_heapnumber_or_oddball);
9821   {
9822     var_result =
9823         LoadObjectField<Float64T>(heap_object, HeapNumber::kValueOffset);
9824     Goto(&done);
9825   }
9826 
9827   BIND(&if_smi);
9828   {
9829     TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
9830     var_result = ChangeInt32ToFloat64(value);
9831     Goto(&done);
9832   }
9833 
9834   BIND(&convert);
9835   {
9836     var_input = CallBuiltin(Builtins::kNonNumberToNumber, context, input);
9837     Goto(&loop);
9838   }
9839 
9840   BIND(&done);
9841   return var_result.value();
9842 }
9843 
PrepareValueForWriteToTypedArray(TNode<Object> input,ElementsKind elements_kind,TNode<Context> context)9844 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
9845     TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
9846   DCHECK(IsTypedArrayElementsKind(elements_kind));
9847 
9848   switch (elements_kind) {
9849     case UINT8_ELEMENTS:
9850     case INT8_ELEMENTS:
9851     case UINT16_ELEMENTS:
9852     case INT16_ELEMENTS:
9853     case UINT32_ELEMENTS:
9854     case INT32_ELEMENTS:
9855     case UINT8_CLAMPED_ELEMENTS:
9856       return PrepareValueForWriteToTypedArray<Word32T>(input, elements_kind,
9857                                                        context);
9858     case FLOAT32_ELEMENTS:
9859       return PrepareValueForWriteToTypedArray<Float32T>(input, elements_kind,
9860                                                         context);
9861     case FLOAT64_ELEMENTS:
9862       return PrepareValueForWriteToTypedArray<Float64T>(input, elements_kind,
9863                                                         context);
9864     case BIGINT64_ELEMENTS:
9865     case BIGUINT64_ELEMENTS:
9866       return ToBigInt(context, input);
9867     default:
9868       UNREACHABLE();
9869   }
9870 }
9871 
BigIntToRawBytes(TNode<BigInt> bigint,TVariable<UintPtrT> * var_low,TVariable<UintPtrT> * var_high)9872 void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
9873                                          TVariable<UintPtrT>* var_low,
9874                                          TVariable<UintPtrT>* var_high) {
9875   Label done(this);
9876   *var_low = Unsigned(IntPtrConstant(0));
9877   *var_high = Unsigned(IntPtrConstant(0));
9878   TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
9879   TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
9880   TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
9881   GotoIf(Word32Equal(length, Int32Constant(0)), &done);
9882   *var_low = LoadBigIntDigit(bigint, 0);
9883   if (!Is64()) {
9884     Label load_done(this);
9885     GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
9886     *var_high = LoadBigIntDigit(bigint, 1);
9887     Goto(&load_done);
9888     BIND(&load_done);
9889   }
9890   GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
9891   // Negative value. Simulate two's complement.
9892   if (!Is64()) {
9893     *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
9894     Label no_carry(this);
9895     GotoIf(IntPtrEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
9896     *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
9897     Goto(&no_carry);
9898     BIND(&no_carry);
9899   }
9900   *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
9901   Goto(&done);
9902   BIND(&done);
9903 }
9904 
EmitElementStore(TNode<JSObject> object,TNode<Object> key,TNode<Object> value,ElementsKind elements_kind,KeyedAccessStoreMode store_mode,Label * bailout,TNode<Context> context,TVariable<Object> * maybe_converted_value)9905 void CodeStubAssembler::EmitElementStore(
9906     TNode<JSObject> object, TNode<Object> key, TNode<Object> value,
9907     ElementsKind elements_kind, KeyedAccessStoreMode store_mode, Label* bailout,
9908     TNode<Context> context, TVariable<Object>* maybe_converted_value) {
9909   CSA_ASSERT(this, Word32BinaryNot(IsJSProxy(object)));
9910 
9911   TNode<FixedArrayBase> elements = LoadElements(object);
9912   if (!(IsSmiOrObjectElementsKind(elements_kind) ||
9913         IsSealedElementsKind(elements_kind) ||
9914         IsNonextensibleElementsKind(elements_kind))) {
9915     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
9916   } else if (!IsCOWHandlingStoreMode(store_mode)) {
9917     GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
9918   }
9919 
9920   // TODO(ishell): introduce TryToIntPtrOrSmi() and use BInt.
9921   TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
9922 
9923   // TODO(rmcilroy): TNodify the converted value once this funciton and
9924   // StoreElement are templated based on the type elements_kind type.
9925   Node* converted_value = value;
9926   if (IsTypedArrayElementsKind(elements_kind)) {
9927     Label done(this), update_value_and_bailout(this, Label::kDeferred);
9928 
9929     // IntegerIndexedElementSet converts value to a Number/BigInt prior to the
9930     // bounds check.
9931     converted_value =
9932         PrepareValueForWriteToTypedArray(value, elements_kind, context);
9933     TNode<JSTypedArray> typed_array = CAST(object);
9934 
9935     // There must be no allocations between the buffer load and
9936     // and the actual store to backing store, because GC may decide that
9937     // the buffer is not alive or move the elements.
9938     // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
9939 
9940     // Check if buffer has been detached.
9941     TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(typed_array);
9942     if (maybe_converted_value) {
9943       GotoIf(IsDetachedBuffer(buffer), &update_value_and_bailout);
9944     } else {
9945       GotoIf(IsDetachedBuffer(buffer), bailout);
9946     }
9947 
9948     // Bounds check.
9949     TNode<UintPtrT> length = LoadJSTypedArrayLength(typed_array);
9950 
9951     if (store_mode == STORE_IGNORE_OUT_OF_BOUNDS) {
9952       // Skip the store if we write beyond the length or
9953       // to a property with a negative integer index.
9954       GotoIfNot(UintPtrLessThan(intptr_key, length), &done);
9955     } else {
9956       DCHECK_EQ(store_mode, STANDARD_STORE);
9957       GotoIfNot(UintPtrLessThan(intptr_key, length), &update_value_and_bailout);
9958     }
9959 
9960     TNode<RawPtrT> data_ptr = LoadJSTypedArrayDataPtr(typed_array);
9961     StoreElement(data_ptr, elements_kind, intptr_key, converted_value);
9962     Goto(&done);
9963 
9964     BIND(&update_value_and_bailout);
9965     // We already prepared the incoming value for storing into a typed array.
9966     // This might involve calling ToNumber in some cases. We shouldn't call
9967     // ToNumber again in the runtime so pass the converted value to the runtime.
9968     // The prepared value is an untagged value. Convert it to a tagged value
9969     // to pass it to runtime. It is not possible to do the detached buffer check
9970     // before we prepare the value, since ToNumber can detach the ArrayBuffer.
9971     // The spec specifies the order of these operations.
9972     if (maybe_converted_value != nullptr) {
9973       switch (elements_kind) {
9974         case UINT8_ELEMENTS:
9975         case INT8_ELEMENTS:
9976         case UINT16_ELEMENTS:
9977         case INT16_ELEMENTS:
9978         case UINT8_CLAMPED_ELEMENTS:
9979           *maybe_converted_value = SmiFromInt32(converted_value);
9980           break;
9981         case UINT32_ELEMENTS:
9982           *maybe_converted_value = ChangeUint32ToTagged(converted_value);
9983           break;
9984         case INT32_ELEMENTS:
9985           *maybe_converted_value = ChangeInt32ToTagged(converted_value);
9986           break;
9987         case FLOAT32_ELEMENTS: {
9988           Label dont_allocate_heap_number(this), end(this);
9989           GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
9990           GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number);
9991           {
9992             *maybe_converted_value = AllocateHeapNumberWithValue(
9993                 ChangeFloat32ToFloat64(converted_value));
9994             Goto(&end);
9995           }
9996           BIND(&dont_allocate_heap_number);
9997           {
9998             *maybe_converted_value = value;
9999             Goto(&end);
10000           }
10001           BIND(&end);
10002           break;
10003         }
10004         case FLOAT64_ELEMENTS: {
10005           Label dont_allocate_heap_number(this), end(this);
10006           GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
10007           GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number);
10008           {
10009             *maybe_converted_value =
10010                 AllocateHeapNumberWithValue(converted_value);
10011             Goto(&end);
10012           }
10013           BIND(&dont_allocate_heap_number);
10014           {
10015             *maybe_converted_value = value;
10016             Goto(&end);
10017           }
10018           BIND(&end);
10019           break;
10020         }
10021         case BIGINT64_ELEMENTS:
10022         case BIGUINT64_ELEMENTS:
10023           *maybe_converted_value = CAST(converted_value);
10024           break;
10025         default:
10026           UNREACHABLE();
10027       }
10028     }
10029     Goto(bailout);
10030 
10031     BIND(&done);
10032     return;
10033   }
10034   DCHECK(IsFastElementsKind(elements_kind) ||
10035          IsSealedElementsKind(elements_kind) ||
10036          IsNonextensibleElementsKind(elements_kind));
10037 
10038   // In case value is stored into a fast smi array, assure that the value is
10039   // a smi before manipulating the backing store. Otherwise the backing store
10040   // may be left in an invalid state.
10041   if (IsSmiElementsKind(elements_kind)) {
10042     GotoIfNot(TaggedIsSmi(value), bailout);
10043   } else if (IsDoubleElementsKind(elements_kind)) {
10044     converted_value = TryTaggedToFloat64(value, bailout);
10045   }
10046 
10047   TNode<Smi> smi_length = Select<Smi>(
10048       IsJSArray(object),
10049       [=]() {
10050         // This is casting Number -> Smi which may not actually be safe.
10051         return CAST(LoadJSArrayLength(CAST(object)));
10052       },
10053       [=]() { return LoadFixedArrayBaseLength(elements); });
10054 
10055   TNode<UintPtrT> length = Unsigned(SmiUntag(smi_length));
10056   if (IsGrowStoreMode(store_mode) &&
10057       !(IsSealedElementsKind(elements_kind) ||
10058         IsNonextensibleElementsKind(elements_kind))) {
10059     elements = CheckForCapacityGrow(object, elements, elements_kind, length,
10060                                     intptr_key, bailout);
10061   } else {
10062     GotoIfNot(UintPtrLessThan(Unsigned(intptr_key), length), bailout);
10063   }
10064 
10065   // Cannot store to a hole in holey sealed elements so bailout.
10066   if (elements_kind == HOLEY_SEALED_ELEMENTS ||
10067       elements_kind == HOLEY_NONEXTENSIBLE_ELEMENTS) {
10068     TNode<Object> target_value =
10069         LoadFixedArrayElement(CAST(elements), intptr_key);
10070     GotoIf(IsTheHole(target_value), bailout);
10071   }
10072 
10073   // If we didn't grow {elements}, it might still be COW, in which case we
10074   // copy it now.
10075   if (!(IsSmiOrObjectElementsKind(elements_kind) ||
10076         IsSealedElementsKind(elements_kind) ||
10077         IsNonextensibleElementsKind(elements_kind))) {
10078     CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10079   } else if (IsCOWHandlingStoreMode(store_mode)) {
10080     elements = CopyElementsOnWrite(object, elements, elements_kind,
10081                                    Signed(length), bailout);
10082   }
10083 
10084   CSA_ASSERT(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
10085   StoreElement(elements, elements_kind, intptr_key, converted_value);
10086 }
10087 
CheckForCapacityGrow(TNode<JSObject> object,TNode<FixedArrayBase> elements,ElementsKind kind,TNode<UintPtrT> length,TNode<IntPtrT> key,Label * bailout)10088 TNode<FixedArrayBase> CodeStubAssembler::CheckForCapacityGrow(
10089     TNode<JSObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
10090     TNode<UintPtrT> length, TNode<IntPtrT> key, Label* bailout) {
10091   DCHECK(IsFastElementsKind(kind));
10092   TVARIABLE(FixedArrayBase, checked_elements);
10093   Label grow_case(this), no_grow_case(this), done(this),
10094       grow_bailout(this, Label::kDeferred);
10095 
10096   TNode<BoolT> condition;
10097   if (IsHoleyElementsKind(kind)) {
10098     condition = UintPtrGreaterThanOrEqual(key, length);
10099   } else {
10100     // We don't support growing here unless the value is being appended.
10101     condition = WordEqual(key, length);
10102   }
10103   Branch(condition, &grow_case, &no_grow_case);
10104 
10105   BIND(&grow_case);
10106   {
10107     TNode<IntPtrT> current_capacity =
10108         SmiUntag(LoadFixedArrayBaseLength(elements));
10109     checked_elements = elements;
10110     Label fits_capacity(this);
10111     // If key is negative, we will notice in Runtime::kGrowArrayElements.
10112     GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
10113 
10114     {
10115       TNode<FixedArrayBase> new_elements = TryGrowElementsCapacity(
10116           object, elements, kind, key, current_capacity, &grow_bailout);
10117       checked_elements = new_elements;
10118       Goto(&fits_capacity);
10119     }
10120 
10121     BIND(&grow_bailout);
10122     {
10123       GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
10124       TNode<Number> tagged_key = ChangeUintPtrToTagged(Unsigned(key));
10125       TNode<Object> maybe_elements = CallRuntime(
10126           Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
10127       GotoIf(TaggedIsSmi(maybe_elements), bailout);
10128       TNode<FixedArrayBase> new_elements = CAST(maybe_elements);
10129       CSA_ASSERT(this, IsFixedArrayWithKind(new_elements, kind));
10130       checked_elements = new_elements;
10131       Goto(&fits_capacity);
10132     }
10133 
10134     BIND(&fits_capacity);
10135     GotoIfNot(IsJSArray(object), &done);
10136 
10137     TNode<IntPtrT> new_length = IntPtrAdd(key, IntPtrConstant(1));
10138     StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
10139                                    SmiTag(new_length));
10140     Goto(&done);
10141   }
10142 
10143   BIND(&no_grow_case);
10144   {
10145     GotoIfNot(UintPtrLessThan(key, length), bailout);
10146     checked_elements = elements;
10147     Goto(&done);
10148   }
10149 
10150   BIND(&done);
10151   return checked_elements.value();
10152 }
10153 
CopyElementsOnWrite(TNode<HeapObject> object,TNode<FixedArrayBase> elements,ElementsKind kind,TNode<IntPtrT> length,Label * bailout)10154 TNode<FixedArrayBase> CodeStubAssembler::CopyElementsOnWrite(
10155     TNode<HeapObject> object, TNode<FixedArrayBase> elements, ElementsKind kind,
10156     TNode<IntPtrT> length, Label* bailout) {
10157   TVARIABLE(FixedArrayBase, new_elements_var, elements);
10158   Label done(this);
10159 
10160   GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
10161   {
10162     TNode<IntPtrT> capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
10163     TNode<FixedArrayBase> new_elements = GrowElementsCapacity(
10164         object, elements, kind, kind, length, capacity, bailout);
10165     new_elements_var = new_elements;
10166     Goto(&done);
10167   }
10168 
10169   BIND(&done);
10170   return new_elements_var.value();
10171 }
10172 
TransitionElementsKind(TNode<JSObject> object,TNode<Map> map,ElementsKind from_kind,ElementsKind to_kind,Label * bailout)10173 void CodeStubAssembler::TransitionElementsKind(TNode<JSObject> object,
10174                                                TNode<Map> map,
10175                                                ElementsKind from_kind,
10176                                                ElementsKind to_kind,
10177                                                Label* bailout) {
10178   DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
10179   if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
10180     TrapAllocationMemento(object, bailout);
10181   }
10182 
10183   if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
10184     Comment("Non-simple map transition");
10185     TNode<FixedArrayBase> elements = LoadElements(object);
10186 
10187     Label done(this);
10188     GotoIf(TaggedEqual(elements, EmptyFixedArrayConstant()), &done);
10189 
10190     // TODO(ishell): Use BInt for elements_length and array_length.
10191     TNode<IntPtrT> elements_length =
10192         SmiUntag(LoadFixedArrayBaseLength(elements));
10193     TNode<IntPtrT> array_length = Select<IntPtrT>(
10194         IsJSArray(object),
10195         [=]() {
10196           CSA_ASSERT(this, IsFastElementsKind(LoadElementsKind(object)));
10197           return SmiUntag(LoadFastJSArrayLength(CAST(object)));
10198         },
10199         [=]() { return elements_length; });
10200 
10201     CSA_ASSERT(this, WordNotEqual(elements_length, IntPtrConstant(0)));
10202 
10203     GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
10204                          elements_length, bailout);
10205     Goto(&done);
10206     BIND(&done);
10207   }
10208 
10209   StoreMap(object, map);
10210 }
10211 
TrapAllocationMemento(TNode<JSObject> object,Label * memento_found)10212 void CodeStubAssembler::TrapAllocationMemento(TNode<JSObject> object,
10213                                               Label* memento_found) {
10214   Comment("[ TrapAllocationMemento");
10215   Label no_memento_found(this);
10216   Label top_check(this), map_check(this);
10217 
10218   TNode<ExternalReference> new_space_top_address = ExternalConstant(
10219       ExternalReference::new_space_allocation_top_address(isolate()));
10220   const int kMementoMapOffset = JSArray::kHeaderSize;
10221   const int kMementoLastWordOffset =
10222       kMementoMapOffset + AllocationMemento::kSize - kTaggedSize;
10223 
10224   // Bail out if the object is not in new space.
10225   TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
10226   TNode<IntPtrT> object_page = PageFromAddress(object_word);
10227   {
10228     TNode<IntPtrT> page_flags =
10229         Load<IntPtrT>(object_page, IntPtrConstant(Page::kFlagsOffset));
10230     GotoIf(WordEqual(
10231                WordAnd(page_flags,
10232                        IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
10233                IntPtrConstant(0)),
10234            &no_memento_found);
10235     // TODO(ulan): Support allocation memento for a large object by allocating
10236     // additional word for the memento after the large object.
10237     GotoIf(WordNotEqual(WordAnd(page_flags,
10238                                 IntPtrConstant(MemoryChunk::kIsLargePageMask)),
10239                         IntPtrConstant(0)),
10240            &no_memento_found);
10241   }
10242 
10243   TNode<IntPtrT> memento_last_word = IntPtrAdd(
10244       object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
10245   TNode<IntPtrT> memento_last_word_page = PageFromAddress(memento_last_word);
10246 
10247   TNode<IntPtrT> new_space_top = Load<IntPtrT>(new_space_top_address);
10248   TNode<IntPtrT> new_space_top_page = PageFromAddress(new_space_top);
10249 
10250   // If the object is in new space, we need to check whether respective
10251   // potential memento object is on the same page as the current top.
10252   GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
10253 
10254   // The object is on a different page than allocation top. Bail out if the
10255   // object sits on the page boundary as no memento can follow and we cannot
10256   // touch the memory following it.
10257   Branch(WordEqual(object_page, memento_last_word_page), &map_check,
10258          &no_memento_found);
10259 
10260   // If top is on the same page as the current object, we need to check whether
10261   // we are below top.
10262   BIND(&top_check);
10263   {
10264     Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
10265            &no_memento_found, &map_check);
10266   }
10267 
10268   // Memento map check.
10269   BIND(&map_check);
10270   {
10271     TNode<Object> memento_map = LoadObjectField(object, kMementoMapOffset);
10272     Branch(TaggedEqual(memento_map, AllocationMementoMapConstant()),
10273            memento_found, &no_memento_found);
10274   }
10275   BIND(&no_memento_found);
10276   Comment("] TrapAllocationMemento");
10277 }
10278 
PageFromAddress(TNode<IntPtrT> address)10279 TNode<IntPtrT> CodeStubAssembler::PageFromAddress(TNode<IntPtrT> address) {
10280   return WordAnd(address, IntPtrConstant(~kPageAlignmentMask));
10281 }
10282 
CreateAllocationSiteInFeedbackVector(TNode<FeedbackVector> feedback_vector,TNode<UintPtrT> slot)10283 TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
10284     TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot) {
10285   TNode<IntPtrT> size = IntPtrConstant(AllocationSite::kSizeWithWeakNext);
10286   TNode<HeapObject> site = Allocate(size, CodeStubAssembler::kPretenured);
10287   StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
10288   // Should match AllocationSite::Initialize.
10289   TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
10290       IntPtrConstant(0), UintPtrConstant(GetInitialFastElementsKind()));
10291   StoreObjectFieldNoWriteBarrier(
10292       site, AllocationSite::kTransitionInfoOrBoilerplateOffset,
10293       SmiTag(Signed(field)));
10294 
10295   // Unlike literals, constructed arrays don't have nested sites
10296   TNode<Smi> zero = SmiConstant(0);
10297   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
10298 
10299   // Pretenuring calculation field.
10300   StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
10301                                  Int32Constant(0));
10302 
10303   // Pretenuring memento creation count field.
10304   StoreObjectFieldNoWriteBarrier(
10305       site, AllocationSite::kPretenureCreateCountOffset, Int32Constant(0));
10306 
10307   // Store an empty fixed array for the code dependency.
10308   StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
10309                        RootIndex::kEmptyWeakFixedArray);
10310 
10311   // Link the object to the allocation site list
10312   TNode<ExternalReference> site_list = ExternalConstant(
10313       ExternalReference::allocation_sites_list_address(isolate()));
10314   TNode<Object> next_site =
10315       LoadBufferObject(ReinterpretCast<RawPtrT>(site_list), 0);
10316 
10317   // TODO(mvstanton): This is a store to a weak pointer, which we may want to
10318   // mark as such in order to skip the write barrier, once we have a unified
10319   // system for weakness. For now we decided to keep it like this because having
10320   // an initial write barrier backed store makes this pointer strong until the
10321   // next GC, and allocation sites are designed to survive several GCs anyway.
10322   StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
10323   StoreFullTaggedNoWriteBarrier(site_list, site);
10324 
10325   StoreFeedbackVectorSlot(feedback_vector, slot, site);
10326   return CAST(site);
10327 }
10328 
StoreWeakReferenceInFeedbackVector(TNode<FeedbackVector> feedback_vector,TNode<UintPtrT> slot,TNode<HeapObject> value,int additional_offset)10329 TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
10330     TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
10331     TNode<HeapObject> value, int additional_offset) {
10332   TNode<MaybeObject> weak_value = MakeWeak(value);
10333   StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
10334                           UPDATE_WRITE_BARRIER, additional_offset);
10335   return weak_value;
10336 }
10337 
HasBoilerplate(TNode<Object> maybe_literal_site)10338 TNode<BoolT> CodeStubAssembler::HasBoilerplate(
10339     TNode<Object> maybe_literal_site) {
10340   return TaggedIsNotSmi(maybe_literal_site);
10341 }
10342 
LoadTransitionInfo(TNode<AllocationSite> allocation_site)10343 TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
10344     TNode<AllocationSite> allocation_site) {
10345   TNode<Smi> transition_info = CAST(LoadObjectField(
10346       allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10347   return transition_info;
10348 }
10349 
LoadBoilerplate(TNode<AllocationSite> allocation_site)10350 TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
10351     TNode<AllocationSite> allocation_site) {
10352   TNode<JSObject> boilerplate = CAST(LoadObjectField(
10353       allocation_site, AllocationSite::kTransitionInfoOrBoilerplateOffset));
10354   return boilerplate;
10355 }
10356 
LoadElementsKind(TNode<AllocationSite> allocation_site)10357 TNode<Int32T> CodeStubAssembler::LoadElementsKind(
10358     TNode<AllocationSite> allocation_site) {
10359   TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
10360   TNode<Int32T> elements_kind =
10361       Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
10362           SmiToInt32(transition_info)));
10363   CSA_ASSERT(this, IsFastElementsKind(elements_kind));
10364   return elements_kind;
10365 }
10366 
10367 template <typename TIndex>
BuildFastLoop(const VariableList & vars,TNode<TIndex> start_index,TNode<TIndex> end_index,const FastLoopBody<TIndex> & body,int increment,IndexAdvanceMode advance_mode)10368 TNode<TIndex> CodeStubAssembler::BuildFastLoop(const VariableList& vars,
10369                                                TNode<TIndex> start_index,
10370                                                TNode<TIndex> end_index,
10371                                                const FastLoopBody<TIndex>& body,
10372                                                int increment,
10373                                                IndexAdvanceMode advance_mode) {
10374   TVARIABLE(TIndex, var, start_index);
10375   VariableList vars_copy(vars.begin(), vars.end(), zone());
10376   vars_copy.push_back(&var);
10377   Label loop(this, vars_copy);
10378   Label after_loop(this);
10379   // Introduce an explicit second check of the termination condition before the
10380   // loop that helps turbofan generate better code. If there's only a single
10381   // check, then the CodeStubAssembler forces it to be at the beginning of the
10382   // loop requiring a backwards branch at the end of the loop (it's not possible
10383   // to force the loop header check at the end of the loop and branch forward to
10384   // it from the pre-header). The extra branch is slower in the case that the
10385   // loop actually iterates.
10386   TNode<BoolT> first_check = IntPtrOrSmiEqual(var.value(), end_index);
10387   int32_t first_check_val;
10388   if (ToInt32Constant(first_check, &first_check_val)) {
10389     if (first_check_val) return var.value();
10390     Goto(&loop);
10391   } else {
10392     Branch(first_check, &after_loop, &loop);
10393   }
10394 
10395   BIND(&loop);
10396   {
10397     if (advance_mode == IndexAdvanceMode::kPre) {
10398       Increment(&var, increment);
10399     }
10400     body(var.value());
10401     if (advance_mode == IndexAdvanceMode::kPost) {
10402       Increment(&var, increment);
10403     }
10404     Branch(IntPtrOrSmiNotEqual(var.value(), end_index), &loop, &after_loop);
10405   }
10406   BIND(&after_loop);
10407   return var.value();
10408 }
10409 
10410 // Instantiate BuildFastLoop for IntPtrT and UintPtrT.
10411 template V8_EXPORT_PRIVATE TNode<IntPtrT>
10412 CodeStubAssembler::BuildFastLoop<IntPtrT>(const VariableList& vars,
10413                                           TNode<IntPtrT> start_index,
10414                                           TNode<IntPtrT> end_index,
10415                                           const FastLoopBody<IntPtrT>& body,
10416                                           int increment,
10417                                           IndexAdvanceMode advance_mode);
10418 template V8_EXPORT_PRIVATE TNode<UintPtrT>
10419 CodeStubAssembler::BuildFastLoop<UintPtrT>(const VariableList& vars,
10420                                            TNode<UintPtrT> start_index,
10421                                            TNode<UintPtrT> end_index,
10422                                            const FastLoopBody<UintPtrT>& body,
10423                                            int increment,
10424                                            IndexAdvanceMode advance_mode);
10425 
10426 template <typename TIndex>
BuildFastArrayForEach(TNode<UnionT<UnionT<FixedArray,PropertyArray>,HeapObject>> array,ElementsKind kind,TNode<TIndex> first_element_inclusive,TNode<TIndex> last_element_exclusive,const FastArrayForEachBody & body,ForEachDirection direction)10427 void CodeStubAssembler::BuildFastArrayForEach(
10428     TNode<UnionT<UnionT<FixedArray, PropertyArray>, HeapObject>> array,
10429     ElementsKind kind, TNode<TIndex> first_element_inclusive,
10430     TNode<TIndex> last_element_exclusive, const FastArrayForEachBody& body,
10431     ForEachDirection direction) {
10432   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
10433   CSA_SLOW_ASSERT(this, Word32Or(IsFixedArrayWithKind(array, kind),
10434                                  IsPropertyArray(array)));
10435 
10436   int32_t first_val;
10437   bool constant_first = ToInt32Constant(first_element_inclusive, &first_val);
10438   int32_t last_val;
10439   bool constent_last = ToInt32Constant(last_element_exclusive, &last_val);
10440   if (constant_first && constent_last) {
10441     int delta = last_val - first_val;
10442     DCHECK_GE(delta, 0);
10443     if (delta <= kElementLoopUnrollThreshold) {
10444       if (direction == ForEachDirection::kForward) {
10445         for (int i = first_val; i < last_val; ++i) {
10446           TNode<IntPtrT> index = IntPtrConstant(i);
10447           TNode<IntPtrT> offset = ElementOffsetFromIndex(
10448               index, kind, FixedArray::kHeaderSize - kHeapObjectTag);
10449           body(array, offset);
10450         }
10451       } else {
10452         for (int i = last_val - 1; i >= first_val; --i) {
10453           TNode<IntPtrT> index = IntPtrConstant(i);
10454           TNode<IntPtrT> offset = ElementOffsetFromIndex(
10455               index, kind, FixedArray::kHeaderSize - kHeapObjectTag);
10456           body(array, offset);
10457         }
10458       }
10459       return;
10460     }
10461   }
10462 
10463   TNode<IntPtrT> start = ElementOffsetFromIndex(
10464       first_element_inclusive, kind, FixedArray::kHeaderSize - kHeapObjectTag);
10465   TNode<IntPtrT> limit = ElementOffsetFromIndex(
10466       last_element_exclusive, kind, FixedArray::kHeaderSize - kHeapObjectTag);
10467   if (direction == ForEachDirection::kReverse) std::swap(start, limit);
10468 
10469   int increment = IsDoubleElementsKind(kind) ? kDoubleSize : kTaggedSize;
10470   BuildFastLoop<IntPtrT>(
10471       start, limit, [&](TNode<IntPtrT> offset) { body(array, offset); },
10472       direction == ForEachDirection::kReverse ? -increment : increment,
10473       direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
10474                                               : IndexAdvanceMode::kPost);
10475 }
10476 
10477 template <typename TIndex>
GotoIfFixedArraySizeDoesntFitInNewSpace(TNode<TIndex> element_count,Label * doesnt_fit,int base_size)10478 void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
10479     TNode<TIndex> element_count, Label* doesnt_fit, int base_size) {
10480   GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size),
10481          doesnt_fit);
10482 }
10483 
InitializeFieldsWithRoot(TNode<HeapObject> object,TNode<IntPtrT> start_offset,TNode<IntPtrT> end_offset,RootIndex root_index)10484 void CodeStubAssembler::InitializeFieldsWithRoot(TNode<HeapObject> object,
10485                                                  TNode<IntPtrT> start_offset,
10486                                                  TNode<IntPtrT> end_offset,
10487                                                  RootIndex root_index) {
10488   CSA_SLOW_ASSERT(this, TaggedIsNotSmi(object));
10489   start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
10490   end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
10491   TNode<Object> root_value = LoadRoot(root_index);
10492   BuildFastLoop<IntPtrT>(
10493       end_offset, start_offset,
10494       [=](TNode<IntPtrT> current) {
10495         StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
10496                             root_value);
10497       },
10498       -kTaggedSize, CodeStubAssembler::IndexAdvanceMode::kPre);
10499 }
10500 
BranchIfNumberRelationalComparison(Operation op,TNode<Number> left,TNode<Number> right,Label * if_true,Label * if_false)10501 void CodeStubAssembler::BranchIfNumberRelationalComparison(Operation op,
10502                                                            TNode<Number> left,
10503                                                            TNode<Number> right,
10504                                                            Label* if_true,
10505                                                            Label* if_false) {
10506   Label do_float_comparison(this);
10507   TVARIABLE(Float64T, var_left_float);
10508   TVARIABLE(Float64T, var_right_float);
10509 
10510   Branch(
10511       TaggedIsSmi(left),
10512       [&] {
10513         TNode<Smi> smi_left = CAST(left);
10514 
10515         Branch(
10516             TaggedIsSmi(right),
10517             [&] {
10518               TNode<Smi> smi_right = CAST(right);
10519 
10520               // Both {left} and {right} are Smi, so just perform a fast
10521               // Smi comparison.
10522               switch (op) {
10523                 case Operation::kEqual:
10524                   BranchIfSmiEqual(smi_left, smi_right, if_true, if_false);
10525                   break;
10526                 case Operation::kLessThan:
10527                   BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
10528                   break;
10529                 case Operation::kLessThanOrEqual:
10530                   BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
10531                                              if_false);
10532                   break;
10533                 case Operation::kGreaterThan:
10534                   BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
10535                   break;
10536                 case Operation::kGreaterThanOrEqual:
10537                   BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
10538                                              if_false);
10539                   break;
10540                 default:
10541                   UNREACHABLE();
10542               }
10543             },
10544             [&] {
10545               var_left_float = SmiToFloat64(smi_left);
10546               var_right_float = LoadHeapNumberValue(CAST(right));
10547               Goto(&do_float_comparison);
10548             });
10549       },
10550       [&] {
10551         var_left_float = LoadHeapNumberValue(CAST(left));
10552 
10553         Branch(
10554             TaggedIsSmi(right),
10555             [&] {
10556               var_right_float = SmiToFloat64(CAST(right));
10557               Goto(&do_float_comparison);
10558             },
10559             [&] {
10560               var_right_float = LoadHeapNumberValue(CAST(right));
10561               Goto(&do_float_comparison);
10562             });
10563       });
10564 
10565   BIND(&do_float_comparison);
10566   {
10567     switch (op) {
10568       case Operation::kEqual:
10569         Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
10570                if_true, if_false);
10571         break;
10572       case Operation::kLessThan:
10573         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
10574                if_true, if_false);
10575         break;
10576       case Operation::kLessThanOrEqual:
10577         Branch(Float64LessThanOrEqual(var_left_float.value(),
10578                                       var_right_float.value()),
10579                if_true, if_false);
10580         break;
10581       case Operation::kGreaterThan:
10582         Branch(
10583             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
10584             if_true, if_false);
10585         break;
10586       case Operation::kGreaterThanOrEqual:
10587         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
10588                                          var_right_float.value()),
10589                if_true, if_false);
10590         break;
10591       default:
10592         UNREACHABLE();
10593     }
10594   }
10595 }
10596 
GotoIfNumberGreaterThanOrEqual(TNode<Number> left,TNode<Number> right,Label * if_true)10597 void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(TNode<Number> left,
10598                                                        TNode<Number> right,
10599                                                        Label* if_true) {
10600   Label if_false(this);
10601   BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
10602                                      right, if_true, &if_false);
10603   BIND(&if_false);
10604 }
10605 
10606 namespace {
Reverse(Operation op)10607 Operation Reverse(Operation op) {
10608   switch (op) {
10609     case Operation::kLessThan:
10610       return Operation::kGreaterThan;
10611     case Operation::kLessThanOrEqual:
10612       return Operation::kGreaterThanOrEqual;
10613     case Operation::kGreaterThan:
10614       return Operation::kLessThan;
10615     case Operation::kGreaterThanOrEqual:
10616       return Operation::kLessThanOrEqual;
10617     default:
10618       break;
10619   }
10620   UNREACHABLE();
10621 }
10622 }  // anonymous namespace
10623 
RelationalComparison(Operation op,TNode<Object> left,TNode<Object> right,TNode<Context> context,TVariable<Smi> * var_type_feedback)10624 TNode<Oddball> CodeStubAssembler::RelationalComparison(
10625     Operation op, TNode<Object> left, TNode<Object> right,
10626     TNode<Context> context, TVariable<Smi>* var_type_feedback) {
10627   Label return_true(this), return_false(this), do_float_comparison(this),
10628       end(this);
10629   TVARIABLE(Oddball, var_result);  // Actually only "true" or "false".
10630   TVARIABLE(Float64T, var_left_float);
10631   TVARIABLE(Float64T, var_right_float);
10632 
10633   // We might need to loop several times due to ToPrimitive and/or ToNumeric
10634   // conversions.
10635   TVARIABLE(Object, var_left, left);
10636   TVARIABLE(Object, var_right, right);
10637   VariableList loop_variable_list({&var_left, &var_right}, zone());
10638   if (var_type_feedback != nullptr) {
10639     // Initialize the type feedback to None. The current feedback is combined
10640     // with the previous feedback.
10641     *var_type_feedback = SmiConstant(CompareOperationFeedback::kNone);
10642     loop_variable_list.push_back(var_type_feedback);
10643   }
10644   Label loop(this, loop_variable_list);
10645   Goto(&loop);
10646   BIND(&loop);
10647   {
10648     left = var_left.value();
10649     right = var_right.value();
10650 
10651     Label if_left_smi(this), if_left_not_smi(this);
10652     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
10653 
10654     BIND(&if_left_smi);
10655     {
10656       TNode<Smi> smi_left = CAST(left);
10657       Label if_right_smi(this), if_right_heapnumber(this),
10658           if_right_bigint(this, Label::kDeferred),
10659           if_right_not_numeric(this, Label::kDeferred);
10660       GotoIf(TaggedIsSmi(right), &if_right_smi);
10661       TNode<Map> right_map = LoadMap(CAST(right));
10662       GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10663       TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
10664       Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
10665              &if_right_not_numeric);
10666 
10667       BIND(&if_right_smi);
10668       {
10669         TNode<Smi> smi_right = CAST(right);
10670         CombineFeedback(var_type_feedback,
10671                         CompareOperationFeedback::kSignedSmall);
10672         switch (op) {
10673           case Operation::kLessThan:
10674             BranchIfSmiLessThan(smi_left, smi_right, &return_true,
10675                                 &return_false);
10676             break;
10677           case Operation::kLessThanOrEqual:
10678             BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
10679                                        &return_false);
10680             break;
10681           case Operation::kGreaterThan:
10682             BranchIfSmiLessThan(smi_right, smi_left, &return_true,
10683                                 &return_false);
10684             break;
10685           case Operation::kGreaterThanOrEqual:
10686             BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
10687                                        &return_false);
10688             break;
10689           default:
10690             UNREACHABLE();
10691         }
10692       }
10693 
10694       BIND(&if_right_heapnumber);
10695       {
10696         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10697         var_left_float = SmiToFloat64(smi_left);
10698         var_right_float = LoadHeapNumberValue(CAST(right));
10699         Goto(&do_float_comparison);
10700       }
10701 
10702       BIND(&if_right_bigint);
10703       {
10704         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10705         var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10706                                       NoContextConstant(),
10707                                       SmiConstant(Reverse(op)), right, left));
10708         Goto(&end);
10709       }
10710 
10711       BIND(&if_right_not_numeric);
10712       {
10713         OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10714         // Convert {right} to a Numeric; we don't need to perform the
10715         // dedicated ToPrimitive(right, hint Number) operation, as the
10716         // ToNumeric(right) will by itself already invoke ToPrimitive with
10717         // a Number hint.
10718         var_right = CallBuiltin(Builtins::kNonNumberToNumeric, context, right);
10719         Goto(&loop);
10720       }
10721     }
10722 
10723     BIND(&if_left_not_smi);
10724     {
10725       TNode<Map> left_map = LoadMap(CAST(left));
10726 
10727       Label if_right_smi(this), if_right_not_smi(this);
10728       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
10729 
10730       BIND(&if_right_smi);
10731       {
10732         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
10733             if_left_not_numeric(this, Label::kDeferred);
10734         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
10735         TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
10736         Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
10737                &if_left_not_numeric);
10738 
10739         BIND(&if_left_heapnumber);
10740         {
10741           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
10742           var_left_float = LoadHeapNumberValue(CAST(left));
10743           var_right_float = SmiToFloat64(CAST(right));
10744           Goto(&do_float_comparison);
10745         }
10746 
10747         BIND(&if_left_bigint);
10748         {
10749           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10750           var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10751                                         NoContextConstant(), SmiConstant(op),
10752                                         left, right));
10753           Goto(&end);
10754         }
10755 
10756         BIND(&if_left_not_numeric);
10757         {
10758           OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
10759           // Convert {left} to a Numeric; we don't need to perform the
10760           // dedicated ToPrimitive(left, hint Number) operation, as the
10761           // ToNumeric(left) will by itself already invoke ToPrimitive with
10762           // a Number hint.
10763           var_left = CallBuiltin(Builtins::kNonNumberToNumeric, context, left);
10764           Goto(&loop);
10765         }
10766       }
10767 
10768       BIND(&if_right_not_smi);
10769       {
10770         TNode<Map> right_map = LoadMap(CAST(right));
10771 
10772         Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
10773             if_left_string(this, Label::kDeferred),
10774             if_left_other(this, Label::kDeferred);
10775         GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
10776         TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
10777         GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
10778         Branch(IsStringInstanceType(left_instance_type), &if_left_string,
10779                &if_left_other);
10780 
10781         BIND(&if_left_heapnumber);
10782         {
10783           Label if_right_heapnumber(this),
10784               if_right_bigint(this, Label::kDeferred),
10785               if_right_not_numeric(this, Label::kDeferred);
10786           GotoIf(TaggedEqual(right_map, left_map), &if_right_heapnumber);
10787           TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
10788           Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
10789                  &if_right_not_numeric);
10790 
10791           BIND(&if_right_heapnumber);
10792           {
10793             CombineFeedback(var_type_feedback,
10794                             CompareOperationFeedback::kNumber);
10795             var_left_float = LoadHeapNumberValue(CAST(left));
10796             var_right_float = LoadHeapNumberValue(CAST(right));
10797             Goto(&do_float_comparison);
10798           }
10799 
10800           BIND(&if_right_bigint);
10801           {
10802             OverwriteFeedback(var_type_feedback,
10803                               CompareOperationFeedback::kAny);
10804             var_result = CAST(CallRuntime(
10805                 Runtime::kBigIntCompareToNumber, NoContextConstant(),
10806                 SmiConstant(Reverse(op)), right, left));
10807             Goto(&end);
10808           }
10809 
10810           BIND(&if_right_not_numeric);
10811           {
10812             OverwriteFeedback(var_type_feedback,
10813                               CompareOperationFeedback::kAny);
10814             // Convert {right} to a Numeric; we don't need to perform
10815             // dedicated ToPrimitive(right, hint Number) operation, as the
10816             // ToNumeric(right) will by itself already invoke ToPrimitive with
10817             // a Number hint.
10818             var_right =
10819                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right);
10820             Goto(&loop);
10821           }
10822         }
10823 
10824         BIND(&if_left_bigint);
10825         {
10826           Label if_right_heapnumber(this), if_right_bigint(this),
10827               if_right_string(this), if_right_other(this);
10828           GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
10829           TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
10830           GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
10831           Branch(IsStringInstanceType(right_instance_type), &if_right_string,
10832                  &if_right_other);
10833 
10834           BIND(&if_right_heapnumber);
10835           {
10836             OverwriteFeedback(var_type_feedback,
10837                               CompareOperationFeedback::kAny);
10838             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
10839                                           NoContextConstant(), SmiConstant(op),
10840                                           left, right));
10841             Goto(&end);
10842           }
10843 
10844           BIND(&if_right_bigint);
10845           {
10846             CombineFeedback(var_type_feedback,
10847                             CompareOperationFeedback::kBigInt);
10848             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToBigInt,
10849                                           NoContextConstant(), SmiConstant(op),
10850                                           left, right));
10851             Goto(&end);
10852           }
10853 
10854           BIND(&if_right_string);
10855           {
10856             OverwriteFeedback(var_type_feedback,
10857                               CompareOperationFeedback::kAny);
10858             var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
10859                                           NoContextConstant(), SmiConstant(op),
10860                                           left, right));
10861             Goto(&end);
10862           }
10863 
10864           // {right} is not a Number, BigInt, or String.
10865           BIND(&if_right_other);
10866           {
10867             OverwriteFeedback(var_type_feedback,
10868                               CompareOperationFeedback::kAny);
10869             // Convert {right} to a Numeric; we don't need to perform
10870             // dedicated ToPrimitive(right, hint Number) operation, as the
10871             // ToNumeric(right) will by itself already invoke ToPrimitive with
10872             // a Number hint.
10873             var_right =
10874                 CallBuiltin(Builtins::kNonNumberToNumeric, context, right);
10875             Goto(&loop);
10876           }
10877         }
10878 
10879         BIND(&if_left_string);
10880         {
10881           TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
10882 
10883           Label if_right_not_string(this, Label::kDeferred);
10884           GotoIfNot(IsStringInstanceType(right_instance_type),
10885                     &if_right_not_string);
10886 
10887           // Both {left} and {right} are strings.
10888           CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
10889           Builtins::Name builtin;
10890           switch (op) {
10891             case Operation::kLessThan:
10892               builtin = Builtins::kStringLessThan;
10893               break;
10894             case Operation::kLessThanOrEqual:
10895               builtin = Builtins::kStringLessThanOrEqual;
10896               break;
10897             case Operation::kGreaterThan:
10898               builtin = Builtins::kStringGreaterThan;
10899               break;
10900             case Operation::kGreaterThanOrEqual:
10901               builtin = Builtins::kStringGreaterThanOrEqual;
10902               break;
10903             default:
10904               UNREACHABLE();
10905           }
10906           var_result = CAST(CallBuiltin(builtin, context, left, right));
10907           Goto(&end);
10908 
10909           BIND(&if_right_not_string);
10910           {
10911             OverwriteFeedback(var_type_feedback,
10912                               CompareOperationFeedback::kAny);
10913             // {left} is a String, while {right} isn't. Check if {right} is
10914             // a BigInt, otherwise call ToPrimitive(right, hint Number) if
10915             // {right} is a receiver, or ToNumeric(left) and then
10916             // ToNumeric(right) in the other cases.
10917             STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
10918             Label if_right_bigint(this),
10919                 if_right_receiver(this, Label::kDeferred);
10920             GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
10921             GotoIf(IsJSReceiverInstanceType(right_instance_type),
10922                    &if_right_receiver);
10923 
10924             var_left =
10925                 CallBuiltin(Builtins::kNonNumberToNumeric, context, left);
10926             var_right = CallBuiltin(Builtins::kToNumeric, context, right);
10927             Goto(&loop);
10928 
10929             BIND(&if_right_bigint);
10930             {
10931               var_result = CAST(CallRuntime(
10932                   Runtime::kBigIntCompareToString, NoContextConstant(),
10933                   SmiConstant(Reverse(op)), right, left));
10934               Goto(&end);
10935             }
10936 
10937             BIND(&if_right_receiver);
10938             {
10939               Callable callable = CodeFactory::NonPrimitiveToPrimitive(
10940                   isolate(), ToPrimitiveHint::kNumber);
10941               var_right = CallStub(callable, context, right);
10942               Goto(&loop);
10943             }
10944           }
10945         }
10946 
10947         BIND(&if_left_other);
10948         {
10949           // {left} is neither a Numeric nor a String, and {right} is not a Smi.
10950           if (var_type_feedback != nullptr) {
10951             // Collect NumberOrOddball feedback if {left} is an Oddball
10952             // and {right} is either a HeapNumber or Oddball. Otherwise collect
10953             // Any feedback.
10954             Label collect_any_feedback(this), collect_oddball_feedback(this),
10955                 collect_feedback_done(this);
10956             GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
10957                       &collect_any_feedback);
10958 
10959             GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
10960             TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
10961             Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
10962                    &collect_oddball_feedback, &collect_any_feedback);
10963 
10964             BIND(&collect_oddball_feedback);
10965             {
10966               CombineFeedback(var_type_feedback,
10967                               CompareOperationFeedback::kNumberOrOddball);
10968               Goto(&collect_feedback_done);
10969             }
10970 
10971             BIND(&collect_any_feedback);
10972             {
10973               OverwriteFeedback(var_type_feedback,
10974                                 CompareOperationFeedback::kAny);
10975               Goto(&collect_feedback_done);
10976             }
10977 
10978             BIND(&collect_feedback_done);
10979           }
10980 
10981           // If {left} is a receiver, call ToPrimitive(left, hint Number).
10982           // Otherwise call ToNumeric(right) and then ToNumeric(left), the
10983           // order here is important as it's observable by user code.
10984           STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
10985           Label if_left_receiver(this, Label::kDeferred);
10986           GotoIf(IsJSReceiverInstanceType(left_instance_type),
10987                  &if_left_receiver);
10988 
10989           var_right = CallBuiltin(Builtins::kToNumeric, context, right);
10990           var_left = CallBuiltin(Builtins::kNonNumberToNumeric, context, left);
10991           Goto(&loop);
10992 
10993           BIND(&if_left_receiver);
10994           {
10995             Callable callable = CodeFactory::NonPrimitiveToPrimitive(
10996                 isolate(), ToPrimitiveHint::kNumber);
10997             var_left = CallStub(callable, context, left);
10998             Goto(&loop);
10999           }
11000         }
11001       }
11002     }
11003   }
11004 
11005   BIND(&do_float_comparison);
11006   {
11007     switch (op) {
11008       case Operation::kLessThan:
11009         Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
11010                &return_true, &return_false);
11011         break;
11012       case Operation::kLessThanOrEqual:
11013         Branch(Float64LessThanOrEqual(var_left_float.value(),
11014                                       var_right_float.value()),
11015                &return_true, &return_false);
11016         break;
11017       case Operation::kGreaterThan:
11018         Branch(
11019             Float64GreaterThan(var_left_float.value(), var_right_float.value()),
11020             &return_true, &return_false);
11021         break;
11022       case Operation::kGreaterThanOrEqual:
11023         Branch(Float64GreaterThanOrEqual(var_left_float.value(),
11024                                          var_right_float.value()),
11025                &return_true, &return_false);
11026         break;
11027       default:
11028         UNREACHABLE();
11029     }
11030   }
11031 
11032   BIND(&return_true);
11033   {
11034     var_result = TrueConstant();
11035     Goto(&end);
11036   }
11037 
11038   BIND(&return_false);
11039   {
11040     var_result = FalseConstant();
11041     Goto(&end);
11042   }
11043 
11044   BIND(&end);
11045   return var_result.value();
11046 }
11047 
CollectFeedbackForString(SloppyTNode<Int32T> instance_type)11048 TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
11049     SloppyTNode<Int32T> instance_type) {
11050   TNode<Smi> feedback = SelectSmiConstant(
11051       Word32Equal(
11052           Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
11053           Int32Constant(kInternalizedTag)),
11054       CompareOperationFeedback::kInternalizedString,
11055       CompareOperationFeedback::kString);
11056   return feedback;
11057 }
11058 
GenerateEqual_Same(SloppyTNode<Object> value,Label * if_equal,Label * if_notequal,TVariable<Smi> * var_type_feedback)11059 void CodeStubAssembler::GenerateEqual_Same(SloppyTNode<Object> value,
11060                                            Label* if_equal, Label* if_notequal,
11061                                            TVariable<Smi>* var_type_feedback) {
11062   // In case of abstract or strict equality checks, we need additional checks
11063   // for NaN values because they are not considered equal, even if both the
11064   // left and the right hand side reference exactly the same value.
11065 
11066   Label if_smi(this), if_heapnumber(this);
11067   GotoIf(TaggedIsSmi(value), &if_smi);
11068 
11069   TNode<HeapObject> value_heapobject = CAST(value);
11070   TNode<Map> value_map = LoadMap(value_heapobject);
11071   GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
11072 
11073   // For non-HeapNumbers, all we do is collect type feedback.
11074   if (var_type_feedback != nullptr) {
11075     TNode<Uint16T> instance_type = LoadMapInstanceType(value_map);
11076 
11077     Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
11078         if_bigint(this);
11079     GotoIf(IsStringInstanceType(instance_type), &if_string);
11080     GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
11081     GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
11082     Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
11083 
11084     BIND(&if_string);
11085     {
11086       CSA_ASSERT(this, IsString(value_heapobject));
11087       CombineFeedback(var_type_feedback,
11088                       CollectFeedbackForString(instance_type));
11089       Goto(if_equal);
11090     }
11091 
11092     BIND(&if_symbol);
11093     {
11094       CSA_ASSERT(this, IsSymbol(value_heapobject));
11095       CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
11096       Goto(if_equal);
11097     }
11098 
11099     BIND(&if_receiver);
11100     {
11101       CSA_ASSERT(this, IsJSReceiver(value_heapobject));
11102       CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
11103       Goto(if_equal);
11104     }
11105 
11106     BIND(&if_bigint);
11107     {
11108       CSA_ASSERT(this, IsBigInt(value_heapobject));
11109       CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11110       Goto(if_equal);
11111     }
11112 
11113     BIND(&if_oddball);
11114     {
11115       CSA_ASSERT(this, IsOddball(value_heapobject));
11116       Label if_boolean(this), if_not_boolean(this);
11117       Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
11118 
11119       BIND(&if_boolean);
11120       {
11121         CombineFeedback(var_type_feedback, CompareOperationFeedback::kBoolean);
11122         Goto(if_equal);
11123       }
11124 
11125       BIND(&if_not_boolean);
11126       {
11127         CSA_ASSERT(this, IsNullOrUndefined(value_heapobject));
11128         CombineFeedback(var_type_feedback,
11129                         CompareOperationFeedback::kReceiverOrNullOrUndefined);
11130         Goto(if_equal);
11131       }
11132     }
11133   } else {
11134     Goto(if_equal);
11135   }
11136 
11137   BIND(&if_heapnumber);
11138   {
11139     CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11140     TNode<Float64T> number_value = LoadHeapNumberValue(value_heapobject);
11141     BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
11142   }
11143 
11144   BIND(&if_smi);
11145   {
11146     CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
11147     Goto(if_equal);
11148   }
11149 }
11150 
11151 // ES6 section 7.2.12 Abstract Equality Comparison
Equal(SloppyTNode<Object> left,SloppyTNode<Object> right,TNode<Context> context,TVariable<Smi> * var_type_feedback)11152 TNode<Oddball> CodeStubAssembler::Equal(SloppyTNode<Object> left,
11153                                         SloppyTNode<Object> right,
11154                                         TNode<Context> context,
11155                                         TVariable<Smi>* var_type_feedback) {
11156   // This is a slightly optimized version of Object::Equals. Whenever you
11157   // change something functionality wise in here, remember to update the
11158   // Object::Equals method as well.
11159 
11160   Label if_equal(this), if_notequal(this), do_float_comparison(this),
11161       do_right_stringtonumber(this, Label::kDeferred), end(this);
11162   TVARIABLE(Oddball, result);
11163   TVARIABLE(Float64T, var_left_float);
11164   TVARIABLE(Float64T, var_right_float);
11165 
11166   // We can avoid code duplication by exploiting the fact that abstract equality
11167   // is symmetric.
11168   Label use_symmetry(this);
11169 
11170   // We might need to loop several times due to ToPrimitive and/or ToNumber
11171   // conversions.
11172   TVARIABLE(Object, var_left, left);
11173   TVARIABLE(Object, var_right, right);
11174   VariableList loop_variable_list({&var_left, &var_right}, zone());
11175   if (var_type_feedback != nullptr) {
11176     // Initialize the type feedback to None. The current feedback will be
11177     // combined with the previous feedback.
11178     OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11179     loop_variable_list.push_back(var_type_feedback);
11180   }
11181   Label loop(this, loop_variable_list);
11182   Goto(&loop);
11183   BIND(&loop);
11184   {
11185     left = var_left.value();
11186     right = var_right.value();
11187 
11188     Label if_notsame(this);
11189     GotoIf(TaggedNotEqual(left, right), &if_notsame);
11190     {
11191       // {left} and {right} reference the exact same value, yet we need special
11192       // treatment for HeapNumber, as NaN is not equal to NaN.
11193       GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
11194     }
11195 
11196     BIND(&if_notsame);
11197     Label if_left_smi(this), if_left_not_smi(this);
11198     Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
11199 
11200     BIND(&if_left_smi);
11201     {
11202       Label if_right_smi(this), if_right_not_smi(this);
11203       CombineFeedback(var_type_feedback,
11204                       CompareOperationFeedback::kSignedSmall);
11205       Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
11206 
11207       BIND(&if_right_smi);
11208       {
11209         // We have already checked for {left} and {right} being the same value,
11210         // so when we get here they must be different Smis.
11211         Goto(&if_notequal);
11212       }
11213 
11214       BIND(&if_right_not_smi);
11215       {
11216         TNode<Map> right_map = LoadMap(CAST(right));
11217         Label if_right_heapnumber(this), if_right_boolean(this),
11218             if_right_oddball(this), if_right_bigint(this, Label::kDeferred),
11219             if_right_receiver(this, Label::kDeferred);
11220         GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11221 
11222         // {left} is Smi and {right} is not HeapNumber or Smi.
11223         TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
11224         GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11225         GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
11226         GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11227         GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
11228         CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11229         Goto(&if_notequal);
11230 
11231         BIND(&if_right_heapnumber);
11232         {
11233           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11234           var_left_float = SmiToFloat64(CAST(left));
11235           var_right_float = LoadHeapNumberValue(CAST(right));
11236           Goto(&do_float_comparison);
11237         }
11238 
11239         BIND(&if_right_oddball);
11240         {
11241           Label if_right_boolean(this);
11242           GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11243           CombineFeedback(var_type_feedback,
11244                           CompareOperationFeedback::kOddball);
11245           Goto(&if_notequal);
11246 
11247           BIND(&if_right_boolean);
11248           {
11249             CombineFeedback(var_type_feedback,
11250                             CompareOperationFeedback::kBoolean);
11251             var_right = LoadObjectField(CAST(right), Oddball::kToNumberOffset);
11252             Goto(&loop);
11253           }
11254         }
11255 
11256         BIND(&if_right_bigint);
11257         {
11258           CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11259           result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,
11260                                     NoContextConstant(), right, left));
11261           Goto(&end);
11262         }
11263 
11264         BIND(&if_right_receiver);
11265         {
11266           CombineFeedback(var_type_feedback,
11267                           CompareOperationFeedback::kReceiver);
11268           Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11269           var_right = CallStub(callable, context, right);
11270           Goto(&loop);
11271         }
11272       }
11273     }
11274 
11275     BIND(&if_left_not_smi);
11276     {
11277       GotoIf(TaggedIsSmi(right), &use_symmetry);
11278 
11279       Label if_left_symbol(this), if_left_number(this),
11280           if_left_string(this, Label::kDeferred),
11281           if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
11282           if_left_receiver(this);
11283 
11284       TNode<Map> left_map = LoadMap(CAST(left));
11285       TNode<Map> right_map = LoadMap(CAST(right));
11286       TNode<Uint16T> left_type = LoadMapInstanceType(left_map);
11287       TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
11288 
11289       GotoIf(IsStringInstanceType(left_type), &if_left_string);
11290       GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
11291       GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
11292       GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
11293       Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
11294              &if_left_receiver);
11295 
11296       BIND(&if_left_string);
11297       {
11298         GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
11299         result =
11300             CAST(CallBuiltin(Builtins::kStringEqual, context, left, right));
11301         CombineFeedback(var_type_feedback,
11302                         SmiOr(CollectFeedbackForString(left_type),
11303                               CollectFeedbackForString(right_type)));
11304         Goto(&end);
11305       }
11306 
11307       BIND(&if_left_number);
11308       {
11309         Label if_right_not_number(this);
11310 
11311         CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11312         GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
11313 
11314         var_left_float = LoadHeapNumberValue(CAST(left));
11315         var_right_float = LoadHeapNumberValue(CAST(right));
11316         Goto(&do_float_comparison);
11317 
11318         BIND(&if_right_not_number);
11319         {
11320           Label if_right_oddball(this);
11321 
11322           GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
11323           GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
11324           GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
11325           GotoIf(IsJSReceiverInstanceType(right_type), &use_symmetry);
11326           CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11327           Goto(&if_notequal);
11328 
11329           BIND(&if_right_oddball);
11330           {
11331             Label if_right_boolean(this);
11332             GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11333             CombineFeedback(var_type_feedback,
11334                             CompareOperationFeedback::kOddball);
11335             Goto(&if_notequal);
11336 
11337             BIND(&if_right_boolean);
11338             {
11339               CombineFeedback(var_type_feedback,
11340                               CompareOperationFeedback::kBoolean);
11341               var_right =
11342                   LoadObjectField(CAST(right), Oddball::kToNumberOffset);
11343               Goto(&loop);
11344             }
11345           }
11346         }
11347       }
11348 
11349       BIND(&if_left_bigint);
11350       {
11351         Label if_right_heapnumber(this), if_right_bigint(this),
11352             if_right_string(this), if_right_boolean(this);
11353         CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
11354 
11355         GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
11356         GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
11357         GotoIf(IsStringInstanceType(right_type), &if_right_string);
11358         GotoIf(IsBooleanMap(right_map), &if_right_boolean);
11359         Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
11360                &if_notequal);
11361 
11362         BIND(&if_right_heapnumber);
11363         {
11364           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11365           result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,
11366                                     NoContextConstant(), left, right));
11367           Goto(&end);
11368         }
11369 
11370         BIND(&if_right_bigint);
11371         {
11372           // We already have BigInt feedback.
11373           result = CAST(CallRuntime(Runtime::kBigIntEqualToBigInt,
11374                                     NoContextConstant(), left, right));
11375           Goto(&end);
11376         }
11377 
11378         BIND(&if_right_string);
11379         {
11380           CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
11381           result = CAST(CallRuntime(Runtime::kBigIntEqualToString,
11382                                     NoContextConstant(), left, right));
11383           Goto(&end);
11384         }
11385 
11386         BIND(&if_right_boolean);
11387         {
11388           CombineFeedback(var_type_feedback,
11389                           CompareOperationFeedback::kBoolean);
11390           var_right = LoadObjectField(CAST(right), Oddball::kToNumberOffset);
11391           Goto(&loop);
11392         }
11393       }
11394 
11395       BIND(&if_left_oddball);
11396       {
11397         Label if_left_boolean(this), if_left_not_boolean(this);
11398         GotoIf(IsBooleanMap(left_map), &if_left_boolean);
11399         if (var_type_feedback != nullptr) {
11400           CombineFeedback(var_type_feedback,
11401                           CompareOperationFeedback::kNullOrUndefined);
11402           GotoIf(IsUndetectableMap(left_map), &if_left_not_boolean);
11403         }
11404         Goto(&if_left_not_boolean);
11405 
11406         BIND(&if_left_not_boolean);
11407         {
11408           // {left} is either Null or Undefined. Check if {right} is
11409           // undetectable (which includes Null and Undefined).
11410           Label if_right_undetectable(this), if_right_number(this),
11411               if_right_oddball(this),
11412               if_right_not_number_or_oddball_or_undetectable(this);
11413           GotoIf(IsUndetectableMap(right_map), &if_right_undetectable);
11414           GotoIf(IsHeapNumberInstanceType(right_type), &if_right_number);
11415           GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
11416           Goto(&if_right_not_number_or_oddball_or_undetectable);
11417 
11418           BIND(&if_right_undetectable);
11419           {
11420             // If {right} is undetectable, it must be either also
11421             // Null or Undefined, or a Receiver (aka document.all).
11422             CombineFeedback(
11423                 var_type_feedback,
11424                 CompareOperationFeedback::kReceiverOrNullOrUndefined);
11425             Goto(&if_equal);
11426           }
11427 
11428           BIND(&if_right_number);
11429           {
11430             CombineFeedback(var_type_feedback,
11431                             CompareOperationFeedback::kNumber);
11432             Goto(&if_notequal);
11433           }
11434 
11435           BIND(&if_right_oddball);
11436           {
11437             CombineFeedback(var_type_feedback,
11438                             CompareOperationFeedback::kOddball);
11439             Goto(&if_notequal);
11440           }
11441 
11442           BIND(&if_right_not_number_or_oddball_or_undetectable);
11443           {
11444             if (var_type_feedback != nullptr) {
11445               // Track whether {right} is Null, Undefined or Receiver.
11446               CombineFeedback(
11447                   var_type_feedback,
11448                   CompareOperationFeedback::kReceiverOrNullOrUndefined);
11449               GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
11450               CombineFeedback(var_type_feedback,
11451                               CompareOperationFeedback::kAny);
11452             }
11453             Goto(&if_notequal);
11454           }
11455         }
11456 
11457         BIND(&if_left_boolean);
11458         {
11459           CombineFeedback(var_type_feedback,
11460                           CompareOperationFeedback::kBoolean);
11461 
11462           // If {right} is a Boolean too, it must be a different Boolean.
11463           GotoIf(TaggedEqual(right_map, left_map), &if_notequal);
11464 
11465           // Otherwise, convert {left} to number and try again.
11466           var_left = LoadObjectField(CAST(left), Oddball::kToNumberOffset);
11467           Goto(&loop);
11468         }
11469       }
11470 
11471       BIND(&if_left_symbol);
11472       {
11473         Label if_right_receiver(this);
11474         GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
11475         // {right} is not a JSReceiver and also not the same Symbol as {left},
11476         // so the result is "not equal".
11477         if (var_type_feedback != nullptr) {
11478           Label if_right_symbol(this);
11479           GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
11480           *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
11481           Goto(&if_notequal);
11482 
11483           BIND(&if_right_symbol);
11484           {
11485             CombineFeedback(var_type_feedback,
11486                             CompareOperationFeedback::kSymbol);
11487             Goto(&if_notequal);
11488           }
11489         } else {
11490           Goto(&if_notequal);
11491         }
11492 
11493         BIND(&if_right_receiver);
11494         {
11495           // {left} is a Primitive and {right} is a JSReceiver, so swapping
11496           // the order is not observable.
11497           if (var_type_feedback != nullptr) {
11498             *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
11499           }
11500           Goto(&use_symmetry);
11501         }
11502       }
11503 
11504       BIND(&if_left_receiver);
11505       {
11506         CSA_ASSERT(this, IsJSReceiverInstanceType(left_type));
11507         Label if_right_receiver(this), if_right_not_receiver(this);
11508         Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
11509                &if_right_not_receiver);
11510 
11511         BIND(&if_right_receiver);
11512         {
11513           // {left} and {right} are different JSReceiver references.
11514           CombineFeedback(var_type_feedback,
11515                           CompareOperationFeedback::kReceiver);
11516           Goto(&if_notequal);
11517         }
11518 
11519         BIND(&if_right_not_receiver);
11520         {
11521           // Check if {right} is undetectable, which means it must be Null
11522           // or Undefined, since we already ruled out Receiver for {right}.
11523           Label if_right_undetectable(this),
11524               if_right_not_undetectable(this, Label::kDeferred);
11525           Branch(IsUndetectableMap(right_map), &if_right_undetectable,
11526                  &if_right_not_undetectable);
11527 
11528           BIND(&if_right_undetectable);
11529           {
11530             // When we get here, {right} must be either Null or Undefined.
11531             CSA_ASSERT(this, IsNullOrUndefined(right));
11532             if (var_type_feedback != nullptr) {
11533               *var_type_feedback = SmiConstant(
11534                   CompareOperationFeedback::kReceiverOrNullOrUndefined);
11535             }
11536             Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
11537           }
11538 
11539           BIND(&if_right_not_undetectable);
11540           {
11541             // {right} is a Primitive, and neither Null or Undefined;
11542             // convert {left} to Primitive too.
11543             CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11544             Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
11545             var_left = CallStub(callable, context, left);
11546             Goto(&loop);
11547           }
11548         }
11549       }
11550     }
11551 
11552     BIND(&do_right_stringtonumber);
11553     {
11554       if (var_type_feedback != nullptr) {
11555         TNode<Map> right_map = LoadMap(CAST(right));
11556         TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
11557         CombineFeedback(var_type_feedback,
11558                         CollectFeedbackForString(right_type));
11559       }
11560       var_right = CallBuiltin(Builtins::kStringToNumber, context, right);
11561       Goto(&loop);
11562     }
11563 
11564     BIND(&use_symmetry);
11565     {
11566       var_left = right;
11567       var_right = left;
11568       Goto(&loop);
11569     }
11570   }
11571 
11572   BIND(&do_float_comparison);
11573   {
11574     Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
11575            &if_equal, &if_notequal);
11576   }
11577 
11578   BIND(&if_equal);
11579   {
11580     result = TrueConstant();
11581     Goto(&end);
11582   }
11583 
11584   BIND(&if_notequal);
11585   {
11586     result = FalseConstant();
11587     Goto(&end);
11588   }
11589 
11590   BIND(&end);
11591   return result.value();
11592 }
11593 
StrictEqual(SloppyTNode<Object> lhs,SloppyTNode<Object> rhs,TVariable<Smi> * var_type_feedback)11594 TNode<Oddball> CodeStubAssembler::StrictEqual(
11595     SloppyTNode<Object> lhs, SloppyTNode<Object> rhs,
11596     TVariable<Smi>* var_type_feedback) {
11597   // Pseudo-code for the algorithm below:
11598   //
11599   // if (lhs == rhs) {
11600   //   if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
11601   //   return true;
11602   // }
11603   // if (!lhs->IsSmi()) {
11604   //   if (lhs->IsHeapNumber()) {
11605   //     if (rhs->IsSmi()) {
11606   //       return Smi::ToInt(rhs) == HeapNumber::cast(lhs)->value();
11607   //     } else if (rhs->IsHeapNumber()) {
11608   //       return HeapNumber::cast(rhs)->value() ==
11609   //       HeapNumber::cast(lhs)->value();
11610   //     } else {
11611   //       return false;
11612   //     }
11613   //   } else {
11614   //     if (rhs->IsSmi()) {
11615   //       return false;
11616   //     } else {
11617   //       if (lhs->IsString()) {
11618   //         if (rhs->IsString()) {
11619   //           return %StringEqual(lhs, rhs);
11620   //         } else {
11621   //           return false;
11622   //         }
11623   //       } else if (lhs->IsBigInt()) {
11624   //         if (rhs->IsBigInt()) {
11625   //           return %BigIntEqualToBigInt(lhs, rhs);
11626   //         } else {
11627   //           return false;
11628   //         }
11629   //       } else {
11630   //         return false;
11631   //       }
11632   //     }
11633   //   }
11634   // } else {
11635   //   if (rhs->IsSmi()) {
11636   //     return false;
11637   //   } else {
11638   //     if (rhs->IsHeapNumber()) {
11639   //       return Smi::ToInt(lhs) == HeapNumber::cast(rhs)->value();
11640   //     } else {
11641   //       return false;
11642   //     }
11643   //   }
11644   // }
11645 
11646   Label if_equal(this), if_notequal(this), if_not_equivalent_types(this),
11647       end(this);
11648   TVARIABLE(Oddball, result);
11649 
11650   OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
11651 
11652   // Check if {lhs} and {rhs} refer to the same object.
11653   Label if_same(this), if_notsame(this);
11654   Branch(TaggedEqual(lhs, rhs), &if_same, &if_notsame);
11655 
11656   BIND(&if_same);
11657   {
11658     // The {lhs} and {rhs} reference the exact same value, yet we need special
11659     // treatment for HeapNumber, as NaN is not equal to NaN.
11660     GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
11661   }
11662 
11663   BIND(&if_notsame);
11664   {
11665     // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
11666     // BigInt and String they can still be considered equal.
11667 
11668     // Check if {lhs} is a Smi or a HeapObject.
11669     Label if_lhsissmi(this), if_lhsisnotsmi(this);
11670     Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
11671 
11672     BIND(&if_lhsisnotsmi);
11673     {
11674       // Load the map of {lhs}.
11675       TNode<Map> lhs_map = LoadMap(CAST(lhs));
11676 
11677       // Check if {lhs} is a HeapNumber.
11678       Label if_lhsisnumber(this), if_lhsisnotnumber(this);
11679       Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
11680 
11681       BIND(&if_lhsisnumber);
11682       {
11683         // Check if {rhs} is a Smi or a HeapObject.
11684         Label if_rhsissmi(this), if_rhsisnotsmi(this);
11685         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11686 
11687         BIND(&if_rhsissmi);
11688         {
11689           // Convert {lhs} and {rhs} to floating point values.
11690           TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs));
11691           TNode<Float64T> rhs_value = SmiToFloat64(CAST(rhs));
11692 
11693           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11694 
11695           // Perform a floating point comparison of {lhs} and {rhs}.
11696           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11697         }
11698 
11699         BIND(&if_rhsisnotsmi);
11700         {
11701           TNode<HeapObject> rhs_ho = CAST(rhs);
11702           // Load the map of {rhs}.
11703           TNode<Map> rhs_map = LoadMap(rhs_ho);
11704 
11705           // Check if {rhs} is also a HeapNumber.
11706           Label if_rhsisnumber(this), if_rhsisnotnumber(this);
11707           Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
11708 
11709           BIND(&if_rhsisnumber);
11710           {
11711             // Convert {lhs} and {rhs} to floating point values.
11712             TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs));
11713             TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs));
11714 
11715             CombineFeedback(var_type_feedback,
11716                             CompareOperationFeedback::kNumber);
11717 
11718             // Perform a floating point comparison of {lhs} and {rhs}.
11719             Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11720           }
11721 
11722           BIND(&if_rhsisnotnumber);
11723           Goto(&if_not_equivalent_types);
11724         }
11725       }
11726 
11727       BIND(&if_lhsisnotnumber);
11728       {
11729         // Check if {rhs} is a Smi or a HeapObject.
11730         Label if_rhsissmi(this), if_rhsisnotsmi(this);
11731         Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11732 
11733         BIND(&if_rhsissmi);
11734         Goto(&if_not_equivalent_types);
11735 
11736         BIND(&if_rhsisnotsmi);
11737         {
11738           // Load the instance type of {lhs}.
11739           TNode<Uint16T> lhs_instance_type = LoadMapInstanceType(lhs_map);
11740 
11741           // Check if {lhs} is a String.
11742           Label if_lhsisstring(this, Label::kDeferred), if_lhsisnotstring(this);
11743           Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
11744                  &if_lhsisnotstring);
11745 
11746           BIND(&if_lhsisstring);
11747           {
11748             // Load the instance type of {rhs}.
11749             TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs));
11750 
11751             // Check if {rhs} is also a String.
11752             Label if_rhsisstring(this, Label::kDeferred),
11753                 if_rhsisnotstring(this);
11754             Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
11755                    &if_rhsisnotstring);
11756 
11757             BIND(&if_rhsisstring);
11758             {
11759               if (var_type_feedback != nullptr) {
11760                 TNode<Smi> lhs_feedback =
11761                     CollectFeedbackForString(lhs_instance_type);
11762                 TNode<Smi> rhs_feedback =
11763                     CollectFeedbackForString(rhs_instance_type);
11764                 *var_type_feedback = SmiOr(lhs_feedback, rhs_feedback);
11765               }
11766               result = CAST(CallBuiltin(Builtins::kStringEqual,
11767                                         NoContextConstant(), lhs, rhs));
11768               Goto(&end);
11769             }
11770 
11771             BIND(&if_rhsisnotstring);
11772             Goto(&if_not_equivalent_types);
11773           }
11774 
11775           BIND(&if_lhsisnotstring);
11776           {
11777             // Check if {lhs} is a BigInt.
11778             Label if_lhsisbigint(this), if_lhsisnotbigint(this);
11779             Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
11780                    &if_lhsisnotbigint);
11781 
11782             BIND(&if_lhsisbigint);
11783             {
11784               // Load the instance type of {rhs}.
11785               TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs));
11786 
11787               // Check if {rhs} is also a BigInt.
11788               Label if_rhsisbigint(this, Label::kDeferred),
11789                   if_rhsisnotbigint(this);
11790               Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
11791                      &if_rhsisnotbigint);
11792 
11793               BIND(&if_rhsisbigint);
11794               {
11795                 CombineFeedback(var_type_feedback,
11796                                 CompareOperationFeedback::kBigInt);
11797                 result = CAST(CallRuntime(Runtime::kBigIntEqualToBigInt,
11798                                           NoContextConstant(), lhs, rhs));
11799                 Goto(&end);
11800               }
11801 
11802               BIND(&if_rhsisnotbigint);
11803               Goto(&if_not_equivalent_types);
11804             }
11805 
11806             BIND(&if_lhsisnotbigint);
11807             if (var_type_feedback != nullptr) {
11808               // Load the instance type of {rhs}.
11809               TNode<Map> rhs_map = LoadMap(CAST(rhs));
11810               TNode<Uint16T> rhs_instance_type = LoadMapInstanceType(rhs_map);
11811 
11812               Label if_lhsissymbol(this), if_lhsisreceiver(this),
11813                   if_lhsisoddball(this);
11814               GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
11815                      &if_lhsisreceiver);
11816               GotoIf(IsBooleanMap(lhs_map), &if_not_equivalent_types);
11817               GotoIf(IsOddballInstanceType(lhs_instance_type),
11818                      &if_lhsisoddball);
11819               Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
11820                      &if_not_equivalent_types);
11821 
11822               BIND(&if_lhsisreceiver);
11823               {
11824                 GotoIf(IsBooleanMap(rhs_map), &if_not_equivalent_types);
11825                 OverwriteFeedback(var_type_feedback,
11826                                   CompareOperationFeedback::kReceiver);
11827                 GotoIf(IsJSReceiverInstanceType(rhs_instance_type),
11828                        &if_notequal);
11829                 OverwriteFeedback(
11830                     var_type_feedback,
11831                     CompareOperationFeedback::kReceiverOrNullOrUndefined);
11832                 GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
11833                 Goto(&if_not_equivalent_types);
11834               }
11835 
11836               BIND(&if_lhsisoddball);
11837               {
11838                 Label if_lhsisboolean(this), if_lhsisnotboolean(this);
11839                 Branch(IsBooleanMap(lhs_map), &if_lhsisboolean,
11840                        &if_lhsisnotboolean);
11841 
11842                 BIND(&if_lhsisboolean);
11843                 {
11844                   OverwriteFeedback(var_type_feedback,
11845                                     CompareOperationFeedback::kNumberOrOddball);
11846                   GotoIf(IsBooleanMap(rhs_map), &if_notequal);
11847                   Goto(&if_not_equivalent_types);
11848                 }
11849 
11850                 BIND(&if_lhsisnotboolean);
11851                 {
11852                   Label if_rhsisheapnumber(this), if_rhsisnotheapnumber(this);
11853 
11854                   STATIC_ASSERT(LAST_PRIMITIVE_HEAP_OBJECT_TYPE ==
11855                                 ODDBALL_TYPE);
11856                   GotoIf(Int32LessThan(rhs_instance_type,
11857                                        Int32Constant(ODDBALL_TYPE)),
11858                          &if_not_equivalent_types);
11859 
11860                   Branch(IsHeapNumberMap(rhs_map), &if_rhsisheapnumber,
11861                          &if_rhsisnotheapnumber);
11862 
11863                   BIND(&if_rhsisheapnumber);
11864                   {
11865                     OverwriteFeedback(
11866                         var_type_feedback,
11867                         CompareOperationFeedback::kNumberOrOddball);
11868                     Goto(&if_not_equivalent_types);
11869                   }
11870 
11871                   BIND(&if_rhsisnotheapnumber);
11872                   {
11873                     OverwriteFeedback(
11874                         var_type_feedback,
11875                         CompareOperationFeedback::kReceiverOrNullOrUndefined);
11876                     Goto(&if_notequal);
11877                   }
11878                 }
11879               }
11880 
11881               BIND(&if_lhsissymbol);
11882               {
11883                 GotoIfNot(IsSymbolInstanceType(rhs_instance_type),
11884                           &if_not_equivalent_types);
11885                 OverwriteFeedback(var_type_feedback,
11886                                   CompareOperationFeedback::kSymbol);
11887                 Goto(&if_notequal);
11888               }
11889             } else {
11890               Goto(&if_notequal);
11891             }
11892           }
11893         }
11894       }
11895     }
11896 
11897     BIND(&if_lhsissmi);
11898     {
11899       // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
11900       // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
11901       // HeapNumber with an equal floating point value.
11902 
11903       // Check if {rhs} is a Smi or a HeapObject.
11904       Label if_rhsissmi(this), if_rhsisnotsmi(this);
11905       Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
11906 
11907       BIND(&if_rhsissmi);
11908       CombineFeedback(var_type_feedback,
11909                       CompareOperationFeedback::kSignedSmall);
11910       Goto(&if_notequal);
11911 
11912       BIND(&if_rhsisnotsmi);
11913       {
11914         // Load the map of the {rhs}.
11915         TNode<Map> rhs_map = LoadMap(CAST(rhs));
11916 
11917         // The {rhs} could be a HeapNumber with the same value as {lhs}.
11918         Label if_rhsisnumber(this), if_rhsisnotnumber(this);
11919         Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
11920 
11921         BIND(&if_rhsisnumber);
11922         {
11923           // Convert {lhs} and {rhs} to floating point values.
11924           TNode<Float64T> lhs_value = SmiToFloat64(CAST(lhs));
11925           TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs));
11926 
11927           CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
11928 
11929           // Perform a floating point comparison of {lhs} and {rhs}.
11930           Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
11931         }
11932 
11933         BIND(&if_rhsisnotnumber);
11934         {
11935           TNode<Uint16T> rhs_instance_type = LoadMapInstanceType(rhs_map);
11936           GotoIfNot(IsOddballInstanceType(rhs_instance_type),
11937                     &if_not_equivalent_types);
11938           OverwriteFeedback(var_type_feedback,
11939                             CompareOperationFeedback::kNumberOrOddball);
11940           Goto(&if_notequal);
11941         }
11942       }
11943     }
11944   }
11945 
11946   BIND(&if_equal);
11947   {
11948     result = TrueConstant();
11949     Goto(&end);
11950   }
11951 
11952   BIND(&if_not_equivalent_types);
11953   {
11954     OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
11955     Goto(&if_notequal);
11956   }
11957 
11958   BIND(&if_notequal);
11959   {
11960     result = FalseConstant();
11961     Goto(&end);
11962   }
11963 
11964   BIND(&end);
11965   return result.value();
11966 }
11967 
11968 // ECMA#sec-samevalue
11969 // This algorithm differs from the Strict Equality Comparison Algorithm in its
11970 // treatment of signed zeroes and NaNs.
BranchIfSameValue(SloppyTNode<Object> lhs,SloppyTNode<Object> rhs,Label * if_true,Label * if_false,SameValueMode mode)11971 void CodeStubAssembler::BranchIfSameValue(SloppyTNode<Object> lhs,
11972                                           SloppyTNode<Object> rhs,
11973                                           Label* if_true, Label* if_false,
11974                                           SameValueMode mode) {
11975   TVARIABLE(Float64T, var_lhs_value);
11976   TVARIABLE(Float64T, var_rhs_value);
11977   Label do_fcmp(this);
11978 
11979   // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
11980   // StrictEqual - SameValue considers two NaNs to be equal.
11981   GotoIf(TaggedEqual(lhs, rhs), if_true);
11982 
11983   // Check if the {lhs} is a Smi.
11984   Label if_lhsissmi(this), if_lhsisheapobject(this);
11985   Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
11986 
11987   BIND(&if_lhsissmi);
11988   {
11989     // Since {lhs} is a Smi, the comparison can only yield true
11990     // iff the {rhs} is a HeapNumber with the same float64 value.
11991     Branch(TaggedIsSmi(rhs), if_false, [&] {
11992       GotoIfNot(IsHeapNumber(CAST(rhs)), if_false);
11993       var_lhs_value = SmiToFloat64(CAST(lhs));
11994       var_rhs_value = LoadHeapNumberValue(CAST(rhs));
11995       Goto(&do_fcmp);
11996     });
11997   }
11998 
11999   BIND(&if_lhsisheapobject);
12000   {
12001     // Check if the {rhs} is a Smi.
12002     Branch(
12003         TaggedIsSmi(rhs),
12004         [&] {
12005           // Since {rhs} is a Smi, the comparison can only yield true
12006           // iff the {lhs} is a HeapNumber with the same float64 value.
12007           GotoIfNot(IsHeapNumber(CAST(lhs)), if_false);
12008           var_lhs_value = LoadHeapNumberValue(CAST(lhs));
12009           var_rhs_value = SmiToFloat64(CAST(rhs));
12010           Goto(&do_fcmp);
12011         },
12012         [&] {
12013           // Now this can only yield true if either both {lhs} and {rhs} are
12014           // HeapNumbers with the same value, or both are Strings with the
12015           // same character sequence, or both are BigInts with the same
12016           // value.
12017           Label if_lhsisheapnumber(this), if_lhsisstring(this),
12018               if_lhsisbigint(this);
12019           const TNode<Map> lhs_map = LoadMap(CAST(lhs));
12020           GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
12021           if (mode != SameValueMode::kNumbersOnly) {
12022             const TNode<Uint16T> lhs_instance_type =
12023                 LoadMapInstanceType(lhs_map);
12024             GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
12025             GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint);
12026           }
12027           Goto(if_false);
12028 
12029           BIND(&if_lhsisheapnumber);
12030           {
12031             GotoIfNot(IsHeapNumber(CAST(rhs)), if_false);
12032             var_lhs_value = LoadHeapNumberValue(CAST(lhs));
12033             var_rhs_value = LoadHeapNumberValue(CAST(rhs));
12034             Goto(&do_fcmp);
12035           }
12036 
12037           if (mode != SameValueMode::kNumbersOnly) {
12038             BIND(&if_lhsisstring);
12039             {
12040               // Now we can only yield true if {rhs} is also a String
12041               // with the same sequence of characters.
12042               GotoIfNot(IsString(CAST(rhs)), if_false);
12043               const TNode<Object> result = CallBuiltin(
12044                   Builtins::kStringEqual, NoContextConstant(), lhs, rhs);
12045               Branch(IsTrue(result), if_true, if_false);
12046             }
12047 
12048             BIND(&if_lhsisbigint);
12049             {
12050               GotoIfNot(IsBigInt(CAST(rhs)), if_false);
12051               const TNode<Object> result = CallRuntime(
12052                   Runtime::kBigIntEqualToBigInt, NoContextConstant(), lhs, rhs);
12053               Branch(IsTrue(result), if_true, if_false);
12054             }
12055           }
12056         });
12057   }
12058 
12059   BIND(&do_fcmp);
12060   {
12061     TNode<Float64T> lhs_value = UncheckedCast<Float64T>(var_lhs_value.value());
12062     TNode<Float64T> rhs_value = UncheckedCast<Float64T>(var_rhs_value.value());
12063     BranchIfSameNumberValue(lhs_value, rhs_value, if_true, if_false);
12064   }
12065 }
12066 
BranchIfSameNumberValue(TNode<Float64T> lhs_value,TNode<Float64T> rhs_value,Label * if_true,Label * if_false)12067 void CodeStubAssembler::BranchIfSameNumberValue(TNode<Float64T> lhs_value,
12068                                                 TNode<Float64T> rhs_value,
12069                                                 Label* if_true,
12070                                                 Label* if_false) {
12071   Label if_equal(this), if_notequal(this);
12072   Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
12073 
12074   BIND(&if_equal);
12075   {
12076     // We still need to handle the case when {lhs} and {rhs} are -0.0 and
12077     // 0.0 (or vice versa). Compare the high word to
12078     // distinguish between the two.
12079     const TNode<Uint32T> lhs_hi_word = Float64ExtractHighWord32(lhs_value);
12080     const TNode<Uint32T> rhs_hi_word = Float64ExtractHighWord32(rhs_value);
12081 
12082     // If x is +0 and y is -0, return false.
12083     // If x is -0 and y is +0, return false.
12084     Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
12085   }
12086 
12087   BIND(&if_notequal);
12088   {
12089     // Return true iff both {rhs} and {lhs} are NaN.
12090     GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
12091     Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
12092   }
12093 }
12094 
HasProperty(TNode<Context> context,SloppyTNode<Object> object,SloppyTNode<Object> key,HasPropertyLookupMode mode)12095 TNode<Oddball> CodeStubAssembler::HasProperty(TNode<Context> context,
12096                                               SloppyTNode<Object> object,
12097                                               SloppyTNode<Object> key,
12098                                               HasPropertyLookupMode mode) {
12099   Label call_runtime(this, Label::kDeferred), return_true(this),
12100       return_false(this), end(this), if_proxy(this, Label::kDeferred);
12101 
12102   CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
12103       [this, &return_true](
12104           TNode<HeapObject> receiver, TNode<HeapObject> holder,
12105           TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
12106           TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
12107         TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
12108                           &return_true, next_holder, if_bailout);
12109       };
12110 
12111   CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
12112       [this, &return_true, &return_false](
12113           TNode<HeapObject> receiver, TNode<HeapObject> holder,
12114           TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
12115           TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
12116         TryLookupElement(holder, holder_map, holder_instance_type, index,
12117                          &return_true, &return_false, next_holder, if_bailout);
12118       };
12119 
12120   TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
12121                           lookup_element_in_holder, &return_false,
12122                           &call_runtime, &if_proxy);
12123 
12124   TVARIABLE(Oddball, result);
12125 
12126   BIND(&if_proxy);
12127   {
12128     TNode<Name> name = CAST(CallBuiltin(Builtins::kToName, context, key));
12129     switch (mode) {
12130       case kHasProperty:
12131         GotoIf(IsPrivateSymbol(name), &return_false);
12132 
12133         result = CAST(
12134             CallBuiltin(Builtins::kProxyHasProperty, context, object, name));
12135         Goto(&end);
12136         break;
12137       case kForInHasProperty:
12138         Goto(&call_runtime);
12139         break;
12140     }
12141   }
12142 
12143   BIND(&return_true);
12144   {
12145     result = TrueConstant();
12146     Goto(&end);
12147   }
12148 
12149   BIND(&return_false);
12150   {
12151     result = FalseConstant();
12152     Goto(&end);
12153   }
12154 
12155   BIND(&call_runtime);
12156   {
12157     Runtime::FunctionId fallback_runtime_function_id;
12158     switch (mode) {
12159       case kHasProperty:
12160         fallback_runtime_function_id = Runtime::kHasProperty;
12161         break;
12162       case kForInHasProperty:
12163         fallback_runtime_function_id = Runtime::kForInHasProperty;
12164         break;
12165     }
12166 
12167     result =
12168         CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
12169     Goto(&end);
12170   }
12171 
12172   BIND(&end);
12173   CSA_ASSERT(this, IsBoolean(result.value()));
12174   return result.value();
12175 }
12176 
ForInPrepare(TNode<HeapObject> enumerator,TNode<UintPtrT> slot,TNode<HeapObject> maybe_feedback_vector,TNode<FixedArray> * cache_array_out,TNode<Smi> * cache_length_out)12177 void CodeStubAssembler::ForInPrepare(TNode<HeapObject> enumerator,
12178                                      TNode<UintPtrT> slot,
12179                                      TNode<HeapObject> maybe_feedback_vector,
12180                                      TNode<FixedArray>* cache_array_out,
12181                                      TNode<Smi>* cache_length_out) {
12182   // Check if we're using an enum cache.
12183   TVARIABLE(FixedArray, cache_array);
12184   TVARIABLE(Smi, cache_length);
12185   Label if_fast(this), if_slow(this, Label::kDeferred), out(this);
12186   Branch(IsMap(enumerator), &if_fast, &if_slow);
12187 
12188   BIND(&if_fast);
12189   {
12190     // Load the enumeration length and cache from the {enumerator}.
12191     TNode<Map> map_enumerator = CAST(enumerator);
12192     TNode<WordT> enum_length = LoadMapEnumLength(map_enumerator);
12193     CSA_ASSERT(this, WordNotEqual(enum_length,
12194                                   IntPtrConstant(kInvalidEnumCacheSentinel)));
12195     TNode<DescriptorArray> descriptors = LoadMapDescriptors(map_enumerator);
12196     TNode<EnumCache> enum_cache = LoadObjectField<EnumCache>(
12197         descriptors, DescriptorArray::kEnumCacheOffset);
12198     TNode<FixedArray> enum_keys =
12199         LoadObjectField<FixedArray>(enum_cache, EnumCache::kKeysOffset);
12200 
12201     // Check if we have enum indices available.
12202     TNode<FixedArray> enum_indices =
12203         LoadObjectField<FixedArray>(enum_cache, EnumCache::kIndicesOffset);
12204     TNode<IntPtrT> enum_indices_length =
12205         LoadAndUntagFixedArrayBaseLength(enum_indices);
12206     TNode<Smi> feedback = SelectSmiConstant(
12207         IntPtrLessThanOrEqual(enum_length, enum_indices_length),
12208         static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices),
12209         static_cast<int>(ForInFeedback::kEnumCacheKeys));
12210     UpdateFeedback(feedback, maybe_feedback_vector, slot);
12211 
12212     cache_array = enum_keys;
12213     cache_length = SmiTag(Signed(enum_length));
12214     Goto(&out);
12215   }
12216 
12217   BIND(&if_slow);
12218   {
12219     // The {enumerator} is a FixedArray with all the keys to iterate.
12220     TNode<FixedArray> array_enumerator = CAST(enumerator);
12221 
12222     // Record the fact that we hit the for-in slow-path.
12223     UpdateFeedback(SmiConstant(ForInFeedback::kAny), maybe_feedback_vector,
12224                    slot);
12225 
12226     cache_array = array_enumerator;
12227     cache_length = LoadFixedArrayBaseLength(array_enumerator);
12228     Goto(&out);
12229   }
12230 
12231   BIND(&out);
12232   *cache_array_out = cache_array.value();
12233   *cache_length_out = cache_length.value();
12234 }
12235 
ForInPrepareForTorque(TNode<HeapObject> enumerator,TNode<UintPtrT> slot,TNode<HeapObject> maybe_feedback_vector)12236 TNode<FixedArray> CodeStubAssembler::ForInPrepareForTorque(
12237     TNode<HeapObject> enumerator, TNode<UintPtrT> slot,
12238     TNode<HeapObject> maybe_feedback_vector) {
12239   TNode<FixedArray> cache_array;
12240   TNode<Smi> cache_length;
12241   ForInPrepare(enumerator, slot, maybe_feedback_vector, &cache_array,
12242                &cache_length);
12243 
12244   TNode<FixedArray> result = AllocateUninitializedFixedArray(2);
12245   StoreFixedArrayElement(result, 0, cache_array);
12246   StoreFixedArrayElement(result, 1, cache_length);
12247 
12248   return result;
12249 }
12250 
Typeof(SloppyTNode<Object> value)12251 TNode<String> CodeStubAssembler::Typeof(SloppyTNode<Object> value) {
12252   TVARIABLE(String, result_var);
12253 
12254   Label return_number(this, Label::kDeferred), if_oddball(this),
12255       return_function(this), return_undefined(this), return_object(this),
12256       return_string(this), return_bigint(this), return_result(this);
12257 
12258   GotoIf(TaggedIsSmi(value), &return_number);
12259 
12260   TNode<HeapObject> value_heap_object = CAST(value);
12261   TNode<Map> map = LoadMap(value_heap_object);
12262 
12263   GotoIf(IsHeapNumberMap(map), &return_number);
12264 
12265   TNode<Uint16T> instance_type = LoadMapInstanceType(map);
12266 
12267   GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
12268 
12269   TNode<Int32T> callable_or_undetectable_mask =
12270       Word32And(LoadMapBitField(map),
12271                 Int32Constant(Map::Bits1::IsCallableBit::kMask |
12272                               Map::Bits1::IsUndetectableBit::kMask));
12273 
12274   GotoIf(Word32Equal(callable_or_undetectable_mask,
12275                      Int32Constant(Map::Bits1::IsCallableBit::kMask)),
12276          &return_function);
12277 
12278   GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
12279             &return_undefined);
12280 
12281   GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
12282 
12283   GotoIf(IsStringInstanceType(instance_type), &return_string);
12284 
12285   GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
12286 
12287   CSA_ASSERT(this, InstanceTypeEqual(instance_type, SYMBOL_TYPE));
12288   result_var = HeapConstant(isolate()->factory()->symbol_string());
12289   Goto(&return_result);
12290 
12291   BIND(&return_number);
12292   {
12293     result_var = HeapConstant(isolate()->factory()->number_string());
12294     Goto(&return_result);
12295   }
12296 
12297   BIND(&if_oddball);
12298   {
12299     TNode<String> type =
12300         CAST(LoadObjectField(value_heap_object, Oddball::kTypeOfOffset));
12301     result_var = type;
12302     Goto(&return_result);
12303   }
12304 
12305   BIND(&return_function);
12306   {
12307     result_var = HeapConstant(isolate()->factory()->function_string());
12308     Goto(&return_result);
12309   }
12310 
12311   BIND(&return_undefined);
12312   {
12313     result_var = HeapConstant(isolate()->factory()->undefined_string());
12314     Goto(&return_result);
12315   }
12316 
12317   BIND(&return_object);
12318   {
12319     result_var = HeapConstant(isolate()->factory()->object_string());
12320     Goto(&return_result);
12321   }
12322 
12323   BIND(&return_string);
12324   {
12325     result_var = HeapConstant(isolate()->factory()->string_string());
12326     Goto(&return_result);
12327   }
12328 
12329   BIND(&return_bigint);
12330   {
12331     result_var = HeapConstant(isolate()->factory()->bigint_string());
12332     Goto(&return_result);
12333   }
12334 
12335   BIND(&return_result);
12336   return result_var.value();
12337 }
12338 
GetSuperConstructor(TNode<JSFunction> active_function)12339 TNode<HeapObject> CodeStubAssembler::GetSuperConstructor(
12340     TNode<JSFunction> active_function) {
12341   TNode<Map> map = LoadMap(active_function);
12342   return LoadMapPrototype(map);
12343 }
12344 
SpeciesConstructor(TNode<Context> context,SloppyTNode<Object> object,TNode<JSReceiver> default_constructor)12345 TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
12346     TNode<Context> context, SloppyTNode<Object> object,
12347     TNode<JSReceiver> default_constructor) {
12348   Isolate* isolate = this->isolate();
12349   TVARIABLE(JSReceiver, var_result, default_constructor);
12350 
12351   // 2. Let C be ? Get(O, "constructor").
12352   TNode<Object> constructor =
12353       GetProperty(context, object, isolate->factory()->constructor_string());
12354 
12355   // 3. If C is undefined, return defaultConstructor.
12356   Label out(this);
12357   GotoIf(IsUndefined(constructor), &out);
12358 
12359   // 4. If Type(C) is not Object, throw a TypeError exception.
12360   ThrowIfNotJSReceiver(context, constructor,
12361                        MessageTemplate::kConstructorNotReceiver, "");
12362 
12363   // 5. Let S be ? Get(C, @@species).
12364   TNode<Object> species =
12365       GetProperty(context, constructor, isolate->factory()->species_symbol());
12366 
12367   // 6. If S is either undefined or null, return defaultConstructor.
12368   GotoIf(IsNullOrUndefined(species), &out);
12369 
12370   // 7. If IsConstructor(S) is true, return S.
12371   Label throw_error(this);
12372   GotoIf(TaggedIsSmi(species), &throw_error);
12373   GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
12374   var_result = CAST(species);
12375   Goto(&out);
12376 
12377   // 8. Throw a TypeError exception.
12378   BIND(&throw_error);
12379   ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
12380 
12381   BIND(&out);
12382   return var_result.value();
12383 }
12384 
InstanceOf(TNode<Object> object,TNode<Object> callable,TNode<Context> context)12385 TNode<Oddball> CodeStubAssembler::InstanceOf(TNode<Object> object,
12386                                              TNode<Object> callable,
12387                                              TNode<Context> context) {
12388   TVARIABLE(Oddball, var_result);
12389   Label if_notcallable(this, Label::kDeferred),
12390       if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
12391       if_nohandler(this, Label::kDeferred), return_true(this),
12392       return_false(this), return_result(this, &var_result);
12393 
12394   // Ensure that the {callable} is actually a JSReceiver.
12395   GotoIf(TaggedIsSmi(callable), &if_notreceiver);
12396   GotoIfNot(IsJSReceiver(CAST(callable)), &if_notreceiver);
12397 
12398   // Load the @@hasInstance property from {callable}.
12399   TNode<Object> inst_of_handler =
12400       GetProperty(context, callable, HasInstanceSymbolConstant());
12401 
12402   // Optimize for the likely case where {inst_of_handler} is the builtin
12403   // Function.prototype[@@hasInstance] method, and emit a direct call in
12404   // that case without any additional checking.
12405   TNode<NativeContext> native_context = LoadNativeContext(context);
12406   TNode<Object> function_has_instance =
12407       LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX);
12408   GotoIfNot(TaggedEqual(inst_of_handler, function_has_instance),
12409             &if_otherhandler);
12410   {
12411     // Call to Function.prototype[@@hasInstance] directly.
12412     Callable builtin(BUILTIN_CODE(isolate(), FunctionPrototypeHasInstance),
12413                      CallTrampolineDescriptor{});
12414     var_result =
12415         CAST(CallJS(builtin, context, inst_of_handler, callable, object));
12416     Goto(&return_result);
12417   }
12418 
12419   BIND(&if_otherhandler);
12420   {
12421     // Check if there's actually an {inst_of_handler}.
12422     GotoIf(IsNull(inst_of_handler), &if_nohandler);
12423     GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
12424 
12425     // Call the {inst_of_handler} for {callable} and {object}.
12426     TNode<Object> result = Call(context, inst_of_handler, callable, object);
12427 
12428     // Convert the {result} to a Boolean.
12429     BranchIfToBooleanIsTrue(result, &return_true, &return_false);
12430   }
12431 
12432   BIND(&if_nohandler);
12433   {
12434     // Ensure that the {callable} is actually Callable.
12435     GotoIfNot(IsCallable(CAST(callable)), &if_notcallable);
12436 
12437     // Use the OrdinaryHasInstance algorithm.
12438     var_result = CAST(
12439         CallBuiltin(Builtins::kOrdinaryHasInstance, context, callable, object));
12440     Goto(&return_result);
12441   }
12442 
12443   BIND(&if_notcallable);
12444   { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
12445 
12446   BIND(&if_notreceiver);
12447   { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
12448 
12449   BIND(&return_true);
12450   var_result = TrueConstant();
12451   Goto(&return_result);
12452 
12453   BIND(&return_false);
12454   var_result = FalseConstant();
12455   Goto(&return_result);
12456 
12457   BIND(&return_result);
12458   return var_result.value();
12459 }
12460 
NumberInc(TNode<Number> value)12461 TNode<Number> CodeStubAssembler::NumberInc(TNode<Number> value) {
12462   TVARIABLE(Number, var_result);
12463   TVARIABLE(Float64T, var_finc_value);
12464   Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
12465   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12466 
12467   BIND(&if_issmi);
12468   {
12469     Label if_overflow(this);
12470     TNode<Smi> smi_value = CAST(value);
12471     TNode<Smi> one = SmiConstant(1);
12472     var_result = TrySmiAdd(smi_value, one, &if_overflow);
12473     Goto(&end);
12474 
12475     BIND(&if_overflow);
12476     {
12477       var_finc_value = SmiToFloat64(smi_value);
12478       Goto(&do_finc);
12479     }
12480   }
12481 
12482   BIND(&if_isnotsmi);
12483   {
12484     TNode<HeapNumber> heap_number_value = CAST(value);
12485 
12486     // Load the HeapNumber value.
12487     var_finc_value = LoadHeapNumberValue(heap_number_value);
12488     Goto(&do_finc);
12489   }
12490 
12491   BIND(&do_finc);
12492   {
12493     TNode<Float64T> finc_value = var_finc_value.value();
12494     TNode<Float64T> one = Float64Constant(1.0);
12495     TNode<Float64T> finc_result = Float64Add(finc_value, one);
12496     var_result = AllocateHeapNumberWithValue(finc_result);
12497     Goto(&end);
12498   }
12499 
12500   BIND(&end);
12501   return var_result.value();
12502 }
12503 
NumberDec(TNode<Number> value)12504 TNode<Number> CodeStubAssembler::NumberDec(TNode<Number> value) {
12505   TVARIABLE(Number, var_result);
12506   TVARIABLE(Float64T, var_fdec_value);
12507   Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
12508   Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
12509 
12510   BIND(&if_issmi);
12511   {
12512     TNode<Smi> smi_value = CAST(value);
12513     TNode<Smi> one = SmiConstant(1);
12514     Label if_overflow(this);
12515     var_result = TrySmiSub(smi_value, one, &if_overflow);
12516     Goto(&end);
12517 
12518     BIND(&if_overflow);
12519     {
12520       var_fdec_value = SmiToFloat64(smi_value);
12521       Goto(&do_fdec);
12522     }
12523   }
12524 
12525   BIND(&if_isnotsmi);
12526   {
12527     TNode<HeapNumber> heap_number_value = CAST(value);
12528 
12529     // Load the HeapNumber value.
12530     var_fdec_value = LoadHeapNumberValue(heap_number_value);
12531     Goto(&do_fdec);
12532   }
12533 
12534   BIND(&do_fdec);
12535   {
12536     TNode<Float64T> fdec_value = var_fdec_value.value();
12537     TNode<Float64T> minus_one = Float64Constant(-1.0);
12538     TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
12539     var_result = AllocateHeapNumberWithValue(fdec_result);
12540     Goto(&end);
12541   }
12542 
12543   BIND(&end);
12544   return var_result.value();
12545 }
12546 
NumberAdd(TNode<Number> a,TNode<Number> b)12547 TNode<Number> CodeStubAssembler::NumberAdd(TNode<Number> a, TNode<Number> b) {
12548   TVARIABLE(Number, var_result);
12549   Label float_add(this, Label::kDeferred), end(this);
12550   GotoIf(TaggedIsNotSmi(a), &float_add);
12551   GotoIf(TaggedIsNotSmi(b), &float_add);
12552 
12553   // Try fast Smi addition first.
12554   var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
12555   Goto(&end);
12556 
12557   BIND(&float_add);
12558   {
12559     var_result = ChangeFloat64ToTagged(
12560         Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12561     Goto(&end);
12562   }
12563 
12564   BIND(&end);
12565   return var_result.value();
12566 }
12567 
NumberSub(TNode<Number> a,TNode<Number> b)12568 TNode<Number> CodeStubAssembler::NumberSub(TNode<Number> a, TNode<Number> b) {
12569   TVARIABLE(Number, var_result);
12570   Label float_sub(this, Label::kDeferred), end(this);
12571   GotoIf(TaggedIsNotSmi(a), &float_sub);
12572   GotoIf(TaggedIsNotSmi(b), &float_sub);
12573 
12574   // Try fast Smi subtraction first.
12575   var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
12576   Goto(&end);
12577 
12578   BIND(&float_sub);
12579   {
12580     var_result = ChangeFloat64ToTagged(
12581         Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
12582     Goto(&end);
12583   }
12584 
12585   BIND(&end);
12586   return var_result.value();
12587 }
12588 
GotoIfNotNumber(TNode<Object> input,Label * is_not_number)12589 void CodeStubAssembler::GotoIfNotNumber(TNode<Object> input,
12590                                         Label* is_not_number) {
12591   Label is_number(this);
12592   GotoIf(TaggedIsSmi(input), &is_number);
12593   Branch(IsHeapNumber(CAST(input)), &is_number, is_not_number);
12594   BIND(&is_number);
12595 }
12596 
GotoIfNumber(TNode<Object> input,Label * is_number)12597 void CodeStubAssembler::GotoIfNumber(TNode<Object> input, Label* is_number) {
12598   GotoIf(TaggedIsSmi(input), is_number);
12599   GotoIf(IsHeapNumber(CAST(input)), is_number);
12600 }
12601 
BitwiseOp(TNode<Word32T> left32,TNode<Word32T> right32,Operation bitwise_op)12602 TNode<Number> CodeStubAssembler::BitwiseOp(TNode<Word32T> left32,
12603                                            TNode<Word32T> right32,
12604                                            Operation bitwise_op) {
12605   switch (bitwise_op) {
12606     case Operation::kBitwiseAnd:
12607       return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
12608     case Operation::kBitwiseOr:
12609       return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
12610     case Operation::kBitwiseXor:
12611       return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
12612     case Operation::kShiftLeft:
12613       if (!Word32ShiftIsSafe()) {
12614         right32 = Word32And(right32, Int32Constant(0x1F));
12615       }
12616       return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
12617     case Operation::kShiftRight:
12618       if (!Word32ShiftIsSafe()) {
12619         right32 = Word32And(right32, Int32Constant(0x1F));
12620       }
12621       return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
12622     case Operation::kShiftRightLogical:
12623       if (!Word32ShiftIsSafe()) {
12624         right32 = Word32And(right32, Int32Constant(0x1F));
12625       }
12626       return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
12627     default:
12628       break;
12629   }
12630   UNREACHABLE();
12631 }
12632 
AllocateJSIteratorResult(TNode<Context> context,SloppyTNode<Object> value,SloppyTNode<Oddball> done)12633 TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResult(
12634     TNode<Context> context, SloppyTNode<Object> value,
12635     SloppyTNode<Oddball> done) {
12636   CSA_ASSERT(this, IsBoolean(done));
12637   TNode<NativeContext> native_context = LoadNativeContext(context);
12638   TNode<Map> map = CAST(
12639       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX));
12640   TNode<HeapObject> result = Allocate(JSIteratorResult::kSize);
12641   StoreMapNoWriteBarrier(result, map);
12642   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12643                        RootIndex::kEmptyFixedArray);
12644   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12645                        RootIndex::kEmptyFixedArray);
12646   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
12647   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
12648   return CAST(result);
12649 }
12650 
AllocateJSIteratorResultForEntry(TNode<Context> context,TNode<Object> key,SloppyTNode<Object> value)12651 TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResultForEntry(
12652     TNode<Context> context, TNode<Object> key, SloppyTNode<Object> value) {
12653   TNode<NativeContext> native_context = LoadNativeContext(context);
12654   TNode<Smi> length = SmiConstant(2);
12655   int const elements_size = FixedArray::SizeFor(2);
12656   TNode<FixedArray> elements = UncheckedCast<FixedArray>(
12657       Allocate(elements_size + JSArray::kHeaderSize + JSIteratorResult::kSize));
12658   StoreObjectFieldRoot(elements, FixedArray::kMapOffset,
12659                        RootIndex::kFixedArrayMap);
12660   StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset, length);
12661   StoreFixedArrayElement(elements, 0, key);
12662   StoreFixedArrayElement(elements, 1, value);
12663   TNode<Map> array_map = CAST(LoadContextElement(
12664       native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX));
12665   TNode<HeapObject> array = InnerAllocate(elements, elements_size);
12666   StoreMapNoWriteBarrier(array, array_map);
12667   StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
12668                        RootIndex::kEmptyFixedArray);
12669   StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
12670   StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
12671   TNode<Map> iterator_map = CAST(
12672       LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX));
12673   TNode<HeapObject> result = InnerAllocate(array, JSArray::kHeaderSize);
12674   StoreMapNoWriteBarrier(result, iterator_map);
12675   StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
12676                        RootIndex::kEmptyFixedArray);
12677   StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
12678                        RootIndex::kEmptyFixedArray);
12679   StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
12680   StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
12681                        RootIndex::kFalseValue);
12682   return CAST(result);
12683 }
12684 
ArraySpeciesCreate(TNode<Context> context,TNode<Object> o,TNode<Number> len)12685 TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
12686                                                         TNode<Object> o,
12687                                                         TNode<Number> len) {
12688   TNode<JSReceiver> constructor =
12689       CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
12690   return Construct(context, constructor, len);
12691 }
12692 
ThrowIfArrayBufferIsDetached(TNode<Context> context,TNode<JSArrayBuffer> array_buffer,const char * method_name)12693 void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
12694     TNode<Context> context, TNode<JSArrayBuffer> array_buffer,
12695     const char* method_name) {
12696   Label if_detached(this, Label::kDeferred), if_not_detached(this);
12697   Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
12698   BIND(&if_detached);
12699   ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
12700   BIND(&if_not_detached);
12701 }
12702 
ThrowIfArrayBufferViewBufferIsDetached(TNode<Context> context,TNode<JSArrayBufferView> array_buffer_view,const char * method_name)12703 void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
12704     TNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
12705     const char* method_name) {
12706   TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
12707   ThrowIfArrayBufferIsDetached(context, buffer, method_name);
12708 }
12709 
LoadJSArrayBufferBackingStorePtr(TNode<JSArrayBuffer> array_buffer)12710 TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr(
12711     TNode<JSArrayBuffer> array_buffer) {
12712   return LoadExternalPointerFromObject(array_buffer,
12713                                        JSArrayBuffer::kBackingStoreOffset,
12714                                        kArrayBufferBackingStoreTag);
12715 }
12716 
LoadJSArrayBufferViewBuffer(TNode<JSArrayBufferView> array_buffer_view)12717 TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
12718     TNode<JSArrayBufferView> array_buffer_view) {
12719   return LoadObjectField<JSArrayBuffer>(array_buffer_view,
12720                                         JSArrayBufferView::kBufferOffset);
12721 }
12722 
LoadJSArrayBufferViewByteLength(TNode<JSArrayBufferView> array_buffer_view)12723 TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
12724     TNode<JSArrayBufferView> array_buffer_view) {
12725   return LoadObjectField<UintPtrT>(array_buffer_view,
12726                                    JSArrayBufferView::kByteLengthOffset);
12727 }
12728 
LoadJSArrayBufferViewByteOffset(TNode<JSArrayBufferView> array_buffer_view)12729 TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
12730     TNode<JSArrayBufferView> array_buffer_view) {
12731   return LoadObjectField<UintPtrT>(array_buffer_view,
12732                                    JSArrayBufferView::kByteOffsetOffset);
12733 }
12734 
LoadJSTypedArrayLength(TNode<JSTypedArray> typed_array)12735 TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLength(
12736     TNode<JSTypedArray> typed_array) {
12737   return LoadObjectField<UintPtrT>(typed_array, JSTypedArray::kLengthOffset);
12738 }
12739 
GetTypedArrayBuffer(TNode<Context> context,TNode<JSTypedArray> array)12740 TNode<JSArrayBuffer> CodeStubAssembler::GetTypedArrayBuffer(
12741     TNode<Context> context, TNode<JSTypedArray> array) {
12742   Label call_runtime(this), done(this);
12743   TVARIABLE(Object, var_result);
12744 
12745   TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array);
12746   GotoIf(IsDetachedBuffer(buffer), &call_runtime);
12747   TNode<RawPtrT> backing_store = LoadJSArrayBufferBackingStorePtr(buffer);
12748   GotoIf(WordEqual(backing_store, IntPtrConstant(0)), &call_runtime);
12749   var_result = buffer;
12750   Goto(&done);
12751 
12752   BIND(&call_runtime);
12753   {
12754     var_result = CallRuntime(Runtime::kTypedArrayGetBuffer, context, array);
12755     Goto(&done);
12756   }
12757 
12758   BIND(&done);
12759   return CAST(var_result.value());
12760 }
12761 
CodeStubArguments(CodeStubAssembler * assembler,TNode<IntPtrT> argc,TNode<RawPtrT> fp)12762 CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler,
12763                                      TNode<IntPtrT> argc, TNode<RawPtrT> fp)
12764     : assembler_(assembler),
12765       argc_(argc),
12766       base_(),
12767       fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
12768   TNode<IntPtrT> offset = assembler_->IntPtrConstant(
12769       (StandardFrameConstants::kFixedSlotCountAboveFp + 1) *
12770       kSystemPointerSize);
12771   // base_ points to the first argument, not the receiver
12772   // whether present or not.
12773   base_ = assembler_->RawPtrAdd(fp_, offset);
12774 }
12775 
GetReceiver() const12776 TNode<Object> CodeStubArguments::GetReceiver() const {
12777   intptr_t offset = -kSystemPointerSize;
12778   return assembler_->LoadFullTagged(base_, assembler_->IntPtrConstant(offset));
12779 }
12780 
SetReceiver(TNode<Object> object) const12781 void CodeStubArguments::SetReceiver(TNode<Object> object) const {
12782   intptr_t offset = -kSystemPointerSize;
12783   assembler_->StoreFullTaggedNoWriteBarrier(
12784       base_, assembler_->IntPtrConstant(offset), object);
12785 }
12786 
AtIndexPtr(TNode<IntPtrT> index) const12787 TNode<RawPtrT> CodeStubArguments::AtIndexPtr(TNode<IntPtrT> index) const {
12788   TNode<IntPtrT> offset =
12789       assembler_->ElementOffsetFromIndex(index, SYSTEM_POINTER_ELEMENTS, 0);
12790   return assembler_->RawPtrAdd(base_, offset);
12791 }
12792 
AtIndex(TNode<IntPtrT> index) const12793 TNode<Object> CodeStubArguments::AtIndex(TNode<IntPtrT> index) const {
12794   CSA_ASSERT(assembler_, assembler_->UintPtrOrSmiLessThan(index, GetLength()));
12795   return assembler_->UncheckedCast<Object>(
12796       assembler_->LoadFullTagged(AtIndexPtr(index)));
12797 }
12798 
AtIndex(int index) const12799 TNode<Object> CodeStubArguments::AtIndex(int index) const {
12800   return AtIndex(assembler_->IntPtrConstant(index));
12801 }
12802 
GetOptionalArgumentValue(TNode<IntPtrT> index,TNode<Object> default_value)12803 TNode<Object> CodeStubArguments::GetOptionalArgumentValue(
12804     TNode<IntPtrT> index, TNode<Object> default_value) {
12805   CodeStubAssembler::TVariable<Object> result(assembler_);
12806   CodeStubAssembler::Label argument_missing(assembler_),
12807       argument_done(assembler_, &result);
12808 
12809   assembler_->GotoIf(assembler_->UintPtrGreaterThanOrEqual(index, argc_),
12810                      &argument_missing);
12811   result = AtIndex(index);
12812   assembler_->Goto(&argument_done);
12813 
12814   assembler_->BIND(&argument_missing);
12815   result = default_value;
12816   assembler_->Goto(&argument_done);
12817 
12818   assembler_->BIND(&argument_done);
12819   return result.value();
12820 }
12821 
ForEach(const CodeStubAssembler::VariableList & vars,const CodeStubArguments::ForEachBodyFunction & body,TNode<IntPtrT> first,TNode<IntPtrT> last) const12822 void CodeStubArguments::ForEach(
12823     const CodeStubAssembler::VariableList& vars,
12824     const CodeStubArguments::ForEachBodyFunction& body, TNode<IntPtrT> first,
12825     TNode<IntPtrT> last) const {
12826   assembler_->Comment("CodeStubArguments::ForEach");
12827   if (first == nullptr) {
12828     first = assembler_->IntPtrConstant(0);
12829   }
12830   if (last == nullptr) {
12831     last = argc_;
12832   }
12833   TNode<RawPtrT> start = AtIndexPtr(first);
12834   TNode<RawPtrT> end = AtIndexPtr(last);
12835   const int increment = kSystemPointerSize;
12836   assembler_->BuildFastLoop<RawPtrT>(
12837       vars, start, end,
12838       [&](TNode<RawPtrT> current) {
12839         TNode<Object> arg = assembler_->LoadFullTagged(current);
12840         body(arg);
12841       },
12842       increment, CodeStubAssembler::IndexAdvanceMode::kPost);
12843 }
12844 
PopAndReturn(TNode<Object> value)12845 void CodeStubArguments::PopAndReturn(TNode<Object> value) {
12846   TNode<IntPtrT> pop_count =
12847       assembler_->IntPtrAdd(argc_, assembler_->IntPtrConstant(1));
12848   assembler_->PopAndReturn(pop_count, value);
12849 }
12850 
IsFastElementsKind(TNode<Int32T> elements_kind)12851 TNode<BoolT> CodeStubAssembler::IsFastElementsKind(
12852     TNode<Int32T> elements_kind) {
12853   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12854   return Uint32LessThanOrEqual(elements_kind,
12855                                Int32Constant(LAST_FAST_ELEMENTS_KIND));
12856 }
12857 
IsFastOrNonExtensibleOrSealedElementsKind(TNode<Int32T> elements_kind)12858 TNode<BoolT> CodeStubAssembler::IsFastOrNonExtensibleOrSealedElementsKind(
12859     TNode<Int32T> elements_kind) {
12860   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12861   STATIC_ASSERT(LAST_FAST_ELEMENTS_KIND + 1 == PACKED_NONEXTENSIBLE_ELEMENTS);
12862   STATIC_ASSERT(PACKED_NONEXTENSIBLE_ELEMENTS + 1 ==
12863                 HOLEY_NONEXTENSIBLE_ELEMENTS);
12864   STATIC_ASSERT(HOLEY_NONEXTENSIBLE_ELEMENTS + 1 == PACKED_SEALED_ELEMENTS);
12865   STATIC_ASSERT(PACKED_SEALED_ELEMENTS + 1 == HOLEY_SEALED_ELEMENTS);
12866   return Uint32LessThanOrEqual(elements_kind,
12867                                Int32Constant(HOLEY_SEALED_ELEMENTS));
12868 }
12869 
IsDoubleElementsKind(TNode<Int32T> elements_kind)12870 TNode<BoolT> CodeStubAssembler::IsDoubleElementsKind(
12871     TNode<Int32T> elements_kind) {
12872   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12873   STATIC_ASSERT((PACKED_DOUBLE_ELEMENTS & 1) == 0);
12874   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
12875   return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
12876                      Int32Constant(PACKED_DOUBLE_ELEMENTS / 2));
12877 }
12878 
IsFastSmiOrTaggedElementsKind(TNode<Int32T> elements_kind)12879 TNode<BoolT> CodeStubAssembler::IsFastSmiOrTaggedElementsKind(
12880     TNode<Int32T> elements_kind) {
12881   STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
12882   STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
12883   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS > TERMINAL_FAST_ELEMENTS_KIND);
12884   return Uint32LessThanOrEqual(elements_kind,
12885                                Int32Constant(TERMINAL_FAST_ELEMENTS_KIND));
12886 }
12887 
IsFastSmiElementsKind(SloppyTNode<Int32T> elements_kind)12888 TNode<BoolT> CodeStubAssembler::IsFastSmiElementsKind(
12889     SloppyTNode<Int32T> elements_kind) {
12890   return Uint32LessThanOrEqual(elements_kind,
12891                                Int32Constant(HOLEY_SMI_ELEMENTS));
12892 }
12893 
IsHoleyFastElementsKind(TNode<Int32T> elements_kind)12894 TNode<BoolT> CodeStubAssembler::IsHoleyFastElementsKind(
12895     TNode<Int32T> elements_kind) {
12896   CSA_ASSERT(this, IsFastElementsKind(elements_kind));
12897 
12898   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
12899   STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
12900   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
12901   return IsSetWord32(elements_kind, 1);
12902 }
12903 
IsHoleyFastElementsKindForRead(TNode<Int32T> elements_kind)12904 TNode<BoolT> CodeStubAssembler::IsHoleyFastElementsKindForRead(
12905     TNode<Int32T> elements_kind) {
12906   CSA_ASSERT(this, Uint32LessThanOrEqual(
12907                        elements_kind,
12908                        Int32Constant(LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND)));
12909 
12910   STATIC_ASSERT(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
12911   STATIC_ASSERT(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
12912   STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
12913   STATIC_ASSERT(HOLEY_NONEXTENSIBLE_ELEMENTS ==
12914                 (PACKED_NONEXTENSIBLE_ELEMENTS | 1));
12915   STATIC_ASSERT(HOLEY_SEALED_ELEMENTS == (PACKED_SEALED_ELEMENTS | 1));
12916   STATIC_ASSERT(HOLEY_FROZEN_ELEMENTS == (PACKED_FROZEN_ELEMENTS | 1));
12917   return IsSetWord32(elements_kind, 1);
12918 }
12919 
IsElementsKindGreaterThan(TNode<Int32T> target_kind,ElementsKind reference_kind)12920 TNode<BoolT> CodeStubAssembler::IsElementsKindGreaterThan(
12921     TNode<Int32T> target_kind, ElementsKind reference_kind) {
12922   return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
12923 }
12924 
IsElementsKindLessThanOrEqual(TNode<Int32T> target_kind,ElementsKind reference_kind)12925 TNode<BoolT> CodeStubAssembler::IsElementsKindLessThanOrEqual(
12926     TNode<Int32T> target_kind, ElementsKind reference_kind) {
12927   return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
12928 }
12929 
IsDebugActive()12930 TNode<BoolT> CodeStubAssembler::IsDebugActive() {
12931   TNode<Uint8T> is_debug_active = Load<Uint8T>(
12932       ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
12933   return Word32NotEqual(is_debug_active, Int32Constant(0));
12934 }
12935 
IsPromiseHookEnabled()12936 TNode<BoolT> CodeStubAssembler::IsPromiseHookEnabled() {
12937   const TNode<RawPtrT> promise_hook = Load<RawPtrT>(
12938       ExternalConstant(ExternalReference::promise_hook_address(isolate())));
12939   return WordNotEqual(promise_hook, IntPtrConstant(0));
12940 }
12941 
HasAsyncEventDelegate()12942 TNode<BoolT> CodeStubAssembler::HasAsyncEventDelegate() {
12943   const TNode<RawPtrT> async_event_delegate = Load<RawPtrT>(ExternalConstant(
12944       ExternalReference::async_event_delegate_address(isolate())));
12945   return WordNotEqual(async_event_delegate, IntPtrConstant(0));
12946 }
12947 
IsPromiseHookEnabledOrHasAsyncEventDelegate()12948 TNode<BoolT> CodeStubAssembler::IsPromiseHookEnabledOrHasAsyncEventDelegate() {
12949   const TNode<Uint8T> promise_hook_or_async_event_delegate =
12950       Load<Uint8T>(ExternalConstant(
12951           ExternalReference::promise_hook_or_async_event_delegate_address(
12952               isolate())));
12953   return Word32NotEqual(promise_hook_or_async_event_delegate, Int32Constant(0));
12954 }
12955 
12956 TNode<BoolT> CodeStubAssembler::
IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate()12957     IsPromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate() {
12958   const TNode<Uint8T> promise_hook_or_debug_is_active_or_async_event_delegate =
12959       Load<Uint8T>(ExternalConstant(
12960           ExternalReference::
12961               promise_hook_or_debug_is_active_or_async_event_delegate_address(
12962                   isolate())));
12963   return Word32NotEqual(promise_hook_or_debug_is_active_or_async_event_delegate,
12964                         Int32Constant(0));
12965 }
12966 
LoadBuiltin(TNode<Smi> builtin_id)12967 TNode<Code> CodeStubAssembler::LoadBuiltin(TNode<Smi> builtin_id) {
12968   CSA_ASSERT(this, SmiBelow(builtin_id, SmiConstant(Builtins::builtin_count)));
12969 
12970   TNode<IntPtrT> offset =
12971       ElementOffsetFromIndex(SmiToBInt(builtin_id), SYSTEM_POINTER_ELEMENTS);
12972 
12973   return CAST(BitcastWordToTagged(
12974       Load(MachineType::Pointer(),
12975            ExternalConstant(ExternalReference::builtins_address(isolate())),
12976            offset)));
12977 }
12978 
GetSharedFunctionInfoCode(TNode<SharedFunctionInfo> shared_info,Label * if_compile_lazy)12979 TNode<Code> CodeStubAssembler::GetSharedFunctionInfoCode(
12980     TNode<SharedFunctionInfo> shared_info, Label* if_compile_lazy) {
12981   TNode<Object> sfi_data =
12982       LoadObjectField(shared_info, SharedFunctionInfo::kFunctionDataOffset);
12983 
12984   TVARIABLE(Code, sfi_code);
12985 
12986   Label done(this);
12987   Label check_instance_type(this);
12988 
12989   // IsSmi: Is builtin
12990   GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
12991   if (if_compile_lazy) {
12992     GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtins::kCompileLazy)),
12993            if_compile_lazy);
12994   }
12995   sfi_code = LoadBuiltin(CAST(sfi_data));
12996   Goto(&done);
12997 
12998   // Switch on data's instance type.
12999   BIND(&check_instance_type);
13000   TNode<Uint16T> data_type = LoadInstanceType(CAST(sfi_data));
13001 
13002   int32_t case_values[] = {BYTECODE_ARRAY_TYPE,
13003                            WASM_EXPORTED_FUNCTION_DATA_TYPE,
13004                            ASM_WASM_DATA_TYPE,
13005                            UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
13006                            UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
13007                            FUNCTION_TEMPLATE_INFO_TYPE,
13008                            WASM_JS_FUNCTION_DATA_TYPE,
13009                            WASM_CAPI_FUNCTION_DATA_TYPE};
13010   Label check_is_bytecode_array(this);
13011   Label check_is_exported_function_data(this);
13012   Label check_is_asm_wasm_data(this);
13013   Label check_is_uncompiled_data_without_preparse_data(this);
13014   Label check_is_uncompiled_data_with_preparse_data(this);
13015   Label check_is_function_template_info(this);
13016   Label check_is_interpreter_data(this);
13017   Label check_is_wasm_js_function_data(this);
13018   Label check_is_wasm_capi_function_data(this);
13019   Label* case_labels[] = {&check_is_bytecode_array,
13020                           &check_is_exported_function_data,
13021                           &check_is_asm_wasm_data,
13022                           &check_is_uncompiled_data_without_preparse_data,
13023                           &check_is_uncompiled_data_with_preparse_data,
13024                           &check_is_function_template_info,
13025                           &check_is_wasm_js_function_data,
13026                           &check_is_wasm_capi_function_data};
13027   STATIC_ASSERT(arraysize(case_values) == arraysize(case_labels));
13028   Switch(data_type, &check_is_interpreter_data, case_values, case_labels,
13029          arraysize(case_labels));
13030 
13031   // IsBytecodeArray: Interpret bytecode
13032   BIND(&check_is_bytecode_array);
13033   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
13034   Goto(&done);
13035 
13036   // IsWasmExportedFunctionData: Use the wrapper code
13037   BIND(&check_is_exported_function_data);
13038   sfi_code = CAST(LoadObjectField(
13039       CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
13040   Goto(&done);
13041 
13042   // IsAsmWasmData: Instantiate using AsmWasmData
13043   BIND(&check_is_asm_wasm_data);
13044   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), InstantiateAsmJs));
13045   Goto(&done);
13046 
13047   // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
13048   // Compile lazy
13049   BIND(&check_is_uncompiled_data_with_preparse_data);
13050   Goto(&check_is_uncompiled_data_without_preparse_data);
13051   BIND(&check_is_uncompiled_data_without_preparse_data);
13052   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy));
13053   Goto(if_compile_lazy ? if_compile_lazy : &done);
13054 
13055   // IsFunctionTemplateInfo: API call
13056   BIND(&check_is_function_template_info);
13057   sfi_code = HeapConstant(BUILTIN_CODE(isolate(), HandleApiCall));
13058   Goto(&done);
13059 
13060   // IsInterpreterData: Interpret bytecode
13061   BIND(&check_is_interpreter_data);
13062   // This is the default branch, so assert that we have the expected data type.
13063   CSA_ASSERT(this,
13064              Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE)));
13065   sfi_code = CAST(LoadObjectField(
13066       CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
13067   Goto(&done);
13068 
13069   // IsWasmJSFunctionData: Use the wrapper code.
13070   BIND(&check_is_wasm_js_function_data);
13071   sfi_code = CAST(
13072       LoadObjectField(CAST(sfi_data), WasmJSFunctionData::kWrapperCodeOffset));
13073   Goto(&done);
13074 
13075   // IsWasmCapiFunctionData: Use the wrapper code.
13076   BIND(&check_is_wasm_capi_function_data);
13077   sfi_code = CAST(LoadObjectField(CAST(sfi_data),
13078                                   WasmCapiFunctionData::kWrapperCodeOffset));
13079   Goto(&done);
13080 
13081   BIND(&done);
13082   return sfi_code.value();
13083 }
13084 
AllocateFunctionWithMapAndContext(TNode<Map> map,TNode<SharedFunctionInfo> shared_info,TNode<Context> context)13085 TNode<JSFunction> CodeStubAssembler::AllocateFunctionWithMapAndContext(
13086     TNode<Map> map, TNode<SharedFunctionInfo> shared_info,
13087     TNode<Context> context) {
13088   const TNode<Code> code = GetSharedFunctionInfoCode(shared_info);
13089 
13090   // TODO(ishell): All the callers of this function pass map loaded from
13091   // Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX. So we can remove
13092   // map parameter.
13093   CSA_ASSERT(this, Word32BinaryNot(IsConstructorMap(map)));
13094   CSA_ASSERT(this, Word32BinaryNot(IsFunctionWithPrototypeSlotMap(map)));
13095   const TNode<HeapObject> fun = Allocate(JSFunction::kSizeWithoutPrototype);
13096   STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
13097   StoreMapNoWriteBarrier(fun, map);
13098   StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
13099                        RootIndex::kEmptyFixedArray);
13100   StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
13101                        RootIndex::kEmptyFixedArray);
13102   StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
13103                        RootIndex::kManyClosuresCell);
13104   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
13105                                  shared_info);
13106   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
13107   StoreObjectFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
13108   return CAST(fun);
13109 }
13110 
CheckPrototypeEnumCache(TNode<JSReceiver> receiver,TNode<Map> receiver_map,Label * if_fast,Label * if_slow)13111 void CodeStubAssembler::CheckPrototypeEnumCache(TNode<JSReceiver> receiver,
13112                                                 TNode<Map> receiver_map,
13113                                                 Label* if_fast,
13114                                                 Label* if_slow) {
13115   TVARIABLE(JSReceiver, var_object, receiver);
13116   TVARIABLE(Map, object_map, receiver_map);
13117 
13118   Label loop(this, {&var_object, &object_map}), done_loop(this);
13119   Goto(&loop);
13120   BIND(&loop);
13121   {
13122     // Check that there are no elements on the current {var_object}.
13123     Label if_no_elements(this);
13124 
13125     // The following relies on the elements only aliasing with JSProxy::target,
13126     // which is a JavaScript value and hence cannot be confused with an elements
13127     // backing store.
13128     STATIC_ASSERT(static_cast<int>(JSObject::kElementsOffset) ==
13129                   static_cast<int>(JSProxy::kTargetOffset));
13130     TNode<Object> object_elements =
13131         LoadObjectField(var_object.value(), JSObject::kElementsOffset);
13132     GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
13133     GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
13134 
13135     // It might still be an empty JSArray.
13136     GotoIfNot(IsJSArrayMap(object_map.value()), if_slow);
13137     TNode<Number> object_length = LoadJSArrayLength(CAST(var_object.value()));
13138     Branch(TaggedEqual(object_length, SmiConstant(0)), &if_no_elements,
13139            if_slow);
13140 
13141     // Continue with {var_object}'s prototype.
13142     BIND(&if_no_elements);
13143     TNode<HeapObject> object = LoadMapPrototype(object_map.value());
13144     GotoIf(IsNull(object), if_fast);
13145 
13146     // For all {object}s but the {receiver}, check that the cache is empty.
13147     var_object = CAST(object);
13148     object_map = LoadMap(object);
13149     TNode<WordT> object_enum_length = LoadMapEnumLength(object_map.value());
13150     Branch(WordEqual(object_enum_length, IntPtrConstant(0)), &loop, if_slow);
13151   }
13152 }
13153 
CheckEnumCache(TNode<JSReceiver> receiver,Label * if_empty,Label * if_runtime)13154 TNode<Map> CodeStubAssembler::CheckEnumCache(TNode<JSReceiver> receiver,
13155                                              Label* if_empty,
13156                                              Label* if_runtime) {
13157   Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
13158   TNode<Map> receiver_map = LoadMap(receiver);
13159 
13160   // Check if the enum length field of the {receiver} is properly initialized,
13161   // indicating that there is an enum cache.
13162   TNode<WordT> receiver_enum_length = LoadMapEnumLength(receiver_map);
13163   Branch(WordEqual(receiver_enum_length,
13164                    IntPtrConstant(kInvalidEnumCacheSentinel)),
13165          &if_no_cache, &if_cache);
13166 
13167   BIND(&if_no_cache);
13168   {
13169     // Avoid runtime-call for empty dictionary receivers.
13170     GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
13171     TNode<HashTableBase> properties =
13172         UncheckedCast<HashTableBase>(LoadSlowProperties(receiver));
13173     CSA_ASSERT(this, Word32Or(IsNameDictionary(properties),
13174                               IsGlobalDictionary(properties)));
13175     STATIC_ASSERT(static_cast<int>(NameDictionary::kNumberOfElementsIndex) ==
13176                   static_cast<int>(GlobalDictionary::kNumberOfElementsIndex));
13177     TNode<Smi> length = GetNumberOfElements(properties);
13178     GotoIfNot(TaggedEqual(length, SmiConstant(0)), if_runtime);
13179     // Check that there are no elements on the {receiver} and its prototype
13180     // chain. Given that we do not create an EnumCache for dict-mode objects,
13181     // directly jump to {if_empty} if there are no elements and no properties
13182     // on the {receiver}.
13183     CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
13184   }
13185 
13186   // Check that there are no elements on the fast {receiver} and its
13187   // prototype chain.
13188   BIND(&if_cache);
13189   CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
13190 
13191   BIND(&if_fast);
13192   return receiver_map;
13193 }
13194 
GetArgumentValue(TorqueStructArguments args,TNode<IntPtrT> index)13195 TNode<Object> CodeStubAssembler::GetArgumentValue(TorqueStructArguments args,
13196                                                   TNode<IntPtrT> index) {
13197   return CodeStubArguments(this, args).GetOptionalArgumentValue(index);
13198 }
13199 
GetFrameArguments(TNode<RawPtrT> frame,TNode<IntPtrT> argc)13200 TorqueStructArguments CodeStubAssembler::GetFrameArguments(
13201     TNode<RawPtrT> frame, TNode<IntPtrT> argc) {
13202   return CodeStubArguments(this, argc, frame).GetTorqueArguments();
13203 }
13204 
Print(const char * s)13205 void CodeStubAssembler::Print(const char* s) {
13206   std::string formatted(s);
13207   formatted += "\n";
13208   CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13209               StringConstant(formatted.c_str()));
13210 }
13211 
Print(const char * prefix,TNode<MaybeObject> tagged_value)13212 void CodeStubAssembler::Print(const char* prefix,
13213                               TNode<MaybeObject> tagged_value) {
13214   if (prefix != nullptr) {
13215     std::string formatted(prefix);
13216     formatted += ": ";
13217     Handle<String> string = isolate()->factory()->NewStringFromAsciiChecked(
13218         formatted.c_str(), AllocationType::kOld);
13219     CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
13220                 HeapConstant(string));
13221   }
13222   // CallRuntime only accepts Objects, so do an UncheckedCast to object.
13223   // DebugPrint explicitly checks whether the tagged value is a MaybeObject.
13224   TNode<Object> arg = UncheckedCast<Object>(tagged_value);
13225   CallRuntime(Runtime::kDebugPrint, NoContextConstant(), arg);
13226 }
13227 
PerformStackCheck(TNode<Context> context)13228 void CodeStubAssembler::PerformStackCheck(TNode<Context> context) {
13229   Label ok(this), stack_check_interrupt(this, Label::kDeferred);
13230 
13231   TNode<UintPtrT> stack_limit = UncheckedCast<UintPtrT>(
13232       Load(MachineType::Pointer(),
13233            ExternalConstant(ExternalReference::address_of_jslimit(isolate()))));
13234   TNode<BoolT> sp_within_limit = StackPointerGreaterThan(stack_limit);
13235 
13236   Branch(sp_within_limit, &ok, &stack_check_interrupt);
13237 
13238   BIND(&stack_check_interrupt);
13239   CallRuntime(Runtime::kStackGuard, context);
13240   Goto(&ok);
13241 
13242   BIND(&ok);
13243 }
13244 
CallApiCallback(TNode<Object> context,TNode<RawPtrT> callback,TNode<IntPtrT> argc,TNode<Object> data,TNode<Object> holder,TNode<Object> receiver)13245 TNode<Object> CodeStubAssembler::CallApiCallback(
13246     TNode<Object> context, TNode<RawPtrT> callback, TNode<IntPtrT> argc,
13247     TNode<Object> data, TNode<Object> holder, TNode<Object> receiver) {
13248   Callable callable = CodeFactory::CallApiCallback(isolate());
13249   return CallStub(callable, context, callback, argc, data, holder, receiver);
13250 }
13251 
CallApiCallback(TNode<Object> context,TNode<RawPtrT> callback,TNode<IntPtrT> argc,TNode<Object> data,TNode<Object> holder,TNode<Object> receiver,TNode<Object> value)13252 TNode<Object> CodeStubAssembler::CallApiCallback(
13253     TNode<Object> context, TNode<RawPtrT> callback, TNode<IntPtrT> argc,
13254     TNode<Object> data, TNode<Object> holder, TNode<Object> receiver,
13255     TNode<Object> value) {
13256   Callable callable = CodeFactory::CallApiCallback(isolate());
13257   return CallStub(callable, context, callback, argc, data, holder, receiver,
13258                   value);
13259 }
13260 
CallRuntimeNewArray(TNode<Context> context,TNode<Object> receiver,TNode<Object> length,TNode<Object> new_target,TNode<Object> allocation_site)13261 TNode<Object> CodeStubAssembler::CallRuntimeNewArray(
13262     TNode<Context> context, TNode<Object> receiver, TNode<Object> length,
13263     TNode<Object> new_target, TNode<Object> allocation_site) {
13264   // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
13265   // copy). Except the last two (new_target and allocation_site) which are add
13266   // on top of the stack later.
13267   return CallRuntime(Runtime::kNewArray, context, length, receiver, new_target,
13268                      allocation_site);
13269 }
13270 
TailCallRuntimeNewArray(TNode<Context> context,TNode<Object> receiver,TNode<Object> length,TNode<Object> new_target,TNode<Object> allocation_site)13271 void CodeStubAssembler::TailCallRuntimeNewArray(TNode<Context> context,
13272                                                 TNode<Object> receiver,
13273                                                 TNode<Object> length,
13274                                                 TNode<Object> new_target,
13275                                                 TNode<Object> allocation_site) {
13276   // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
13277   // copy). Except the last two (new_target and allocation_site) which are add
13278   // on top of the stack later.
13279   return TailCallRuntime(Runtime::kNewArray, context, length, receiver,
13280                          new_target, allocation_site);
13281 }
13282 
ArrayCreate(TNode<Context> context,TNode<Number> length)13283 TNode<JSArray> CodeStubAssembler::ArrayCreate(TNode<Context> context,
13284                                               TNode<Number> length) {
13285   TVARIABLE(JSArray, array);
13286   Label allocate_js_array(this);
13287 
13288   Label done(this), next(this), runtime(this, Label::kDeferred);
13289   TNode<Smi> limit = SmiConstant(JSArray::kInitialMaxFastElementArray);
13290   CSA_ASSERT_BRANCH(this, [=](Label* ok, Label* not_ok) {
13291     BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13292                                        SmiConstant(0), ok, not_ok);
13293   });
13294   // This check also transitively covers the case where length is too big
13295   // to be representable by a SMI and so is not usable with
13296   // AllocateJSArray.
13297   BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
13298                                      limit, &runtime, &next);
13299 
13300   BIND(&runtime);
13301   {
13302     TNode<NativeContext> native_context = LoadNativeContext(context);
13303     TNode<JSFunction> array_function =
13304         CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
13305     array = CAST(CallRuntimeNewArray(context, array_function, length,
13306                                      array_function, UndefinedConstant()));
13307     Goto(&done);
13308   }
13309 
13310   BIND(&next);
13311   TNode<Smi> length_smi = CAST(length);
13312 
13313   TNode<Map> array_map = CAST(LoadContextElement(
13314       context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
13315 
13316   // TODO(delphick): Consider using
13317   // AllocateUninitializedJSArrayWithElements to avoid initializing an
13318   // array and then writing over it.
13319   array = AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length_smi,
13320                           SmiConstant(0));
13321   Goto(&done);
13322 
13323   BIND(&done);
13324   return array.value();
13325 }
13326 
SetPropertyLength(TNode<Context> context,TNode<Object> array,TNode<Number> length)13327 void CodeStubAssembler::SetPropertyLength(TNode<Context> context,
13328                                           TNode<Object> array,
13329                                           TNode<Number> length) {
13330   Label fast(this), runtime(this), done(this);
13331   // There's no need to set the length, if
13332   // 1) the array is a fast JS array and
13333   // 2) the new length is equal to the old length.
13334   // as the set is not observable. Otherwise fall back to the run-time.
13335 
13336   // 1) Check that the array has fast elements.
13337   // TODO(delphick): Consider changing this since it does an an unnecessary
13338   // check for SMIs.
13339   // TODO(delphick): Also we could hoist this to after the array construction
13340   // and copy the args into array in the same way as the Array constructor.
13341   BranchIfFastJSArray(array, context, &fast, &runtime);
13342 
13343   BIND(&fast);
13344   {
13345     TNode<JSArray> fast_array = CAST(array);
13346 
13347     TNode<Smi> length_smi = CAST(length);
13348     TNode<Smi> old_length = LoadFastJSArrayLength(fast_array);
13349     CSA_ASSERT(this, TaggedIsPositiveSmi(old_length));
13350 
13351     // 2) If the created array's length matches the required length, then
13352     //    there's nothing else to do. Otherwise use the runtime to set the
13353     //    property as that will insert holes into excess elements or shrink
13354     //    the backing store as appropriate.
13355     Branch(SmiNotEqual(length_smi, old_length), &runtime, &done);
13356   }
13357 
13358   BIND(&runtime);
13359   {
13360     SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
13361                       length);
13362     Goto(&done);
13363   }
13364 
13365   BIND(&done);
13366 }
13367 
RefillMathRandom(TNode<NativeContext> native_context)13368 TNode<Smi> CodeStubAssembler::RefillMathRandom(
13369     TNode<NativeContext> native_context) {
13370   // Cache exhausted, populate the cache. Return value is the new index.
13371   const TNode<ExternalReference> refill_math_random =
13372       ExternalConstant(ExternalReference::refill_math_random());
13373   const TNode<ExternalReference> isolate_ptr =
13374       ExternalConstant(ExternalReference::isolate_address(isolate()));
13375   MachineType type_tagged = MachineType::AnyTagged();
13376   MachineType type_ptr = MachineType::Pointer();
13377 
13378   return CAST(CallCFunction(refill_math_random, type_tagged,
13379                             std::make_pair(type_ptr, isolate_ptr),
13380                             std::make_pair(type_tagged, native_context)));
13381 }
13382 
TaggedToDirectString(TNode<Object> value,Label * fail)13383 TNode<String> CodeStubAssembler::TaggedToDirectString(TNode<Object> value,
13384                                                       Label* fail) {
13385   ToDirectStringAssembler to_direct(state(), CAST(value));
13386   to_direct.TryToDirect(fail);
13387   to_direct.PointerToData(fail);
13388   return CAST(value);
13389 }
13390 
RemoveFinalizationRegistryCellFromUnregisterTokenMap(TNode<JSFinalizationRegistry> finalization_registry,TNode<WeakCell> weak_cell)13391 void CodeStubAssembler::RemoveFinalizationRegistryCellFromUnregisterTokenMap(
13392     TNode<JSFinalizationRegistry> finalization_registry,
13393     TNode<WeakCell> weak_cell) {
13394   const TNode<ExternalReference> remove_cell = ExternalConstant(
13395       ExternalReference::
13396           js_finalization_registry_remove_cell_from_unregister_token_map());
13397   const TNode<ExternalReference> isolate_ptr =
13398       ExternalConstant(ExternalReference::isolate_address(isolate()));
13399 
13400   CallCFunction(remove_cell, MachineType::Pointer(),
13401                 std::make_pair(MachineType::Pointer(), isolate_ptr),
13402                 std::make_pair(MachineType::AnyTagged(), finalization_registry),
13403                 std::make_pair(MachineType::AnyTagged(), weak_cell));
13404 }
13405 
PrototypeCheckAssembler(compiler::CodeAssemblerState * state,Flags flags,TNode<NativeContext> native_context,TNode<Map> initial_prototype_map,Vector<DescriptorIndexNameValue> properties)13406 PrototypeCheckAssembler::PrototypeCheckAssembler(
13407     compiler::CodeAssemblerState* state, Flags flags,
13408     TNode<NativeContext> native_context, TNode<Map> initial_prototype_map,
13409     Vector<DescriptorIndexNameValue> properties)
13410     : CodeStubAssembler(state),
13411       flags_(flags),
13412       native_context_(native_context),
13413       initial_prototype_map_(initial_prototype_map),
13414       properties_(properties) {}
13415 
CheckAndBranch(TNode<HeapObject> prototype,Label * if_unmodified,Label * if_modified)13416 void PrototypeCheckAssembler::CheckAndBranch(TNode<HeapObject> prototype,
13417                                              Label* if_unmodified,
13418                                              Label* if_modified) {
13419   TNode<Map> prototype_map = LoadMap(prototype);
13420   TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
13421 
13422   // The continuation of a failed fast check: if property identity checks are
13423   // enabled, we continue there (since they may still classify the prototype as
13424   // fast), otherwise we bail out.
13425   Label property_identity_check(this, Label::kDeferred);
13426   Label* if_fast_check_failed =
13427       ((flags_ & kCheckPrototypePropertyIdentity) == 0)
13428           ? if_modified
13429           : &property_identity_check;
13430 
13431   if ((flags_ & kCheckPrototypePropertyConstness) != 0) {
13432     // A simple prototype map identity check. Note that map identity does not
13433     // guarantee unmodified properties. It does guarantee that no new properties
13434     // have been added, or old properties deleted.
13435 
13436     GotoIfNot(TaggedEqual(prototype_map, initial_prototype_map_),
13437               if_fast_check_failed);
13438 
13439     // We need to make sure that relevant properties in the prototype have
13440     // not been tampered with. We do this by checking that their slots
13441     // in the prototype's descriptor array are still marked as const.
13442 
13443     TNode<Uint32T> combined_details;
13444     for (int i = 0; i < properties_.length(); i++) {
13445       // Assert the descriptor index is in-bounds.
13446       int descriptor = properties_[i].descriptor_index;
13447       CSA_ASSERT(this, Int32LessThan(Int32Constant(descriptor),
13448                                      LoadNumberOfDescriptors(descriptors)));
13449 
13450       // Assert that the name is correct. This essentially checks that
13451       // the descriptor index corresponds to the insertion order in
13452       // the bootstrapper.
13453       CSA_ASSERT(
13454           this,
13455           TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13456                       CodeAssembler::LoadRoot(properties_[i].name_root_index)));
13457 
13458       TNode<Uint32T> details =
13459           DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13460 
13461       if (i == 0) {
13462         combined_details = details;
13463       } else {
13464         combined_details = Word32And(combined_details, details);
13465       }
13466     }
13467 
13468     TNode<Uint32T> constness =
13469         DecodeWord32<PropertyDetails::ConstnessField>(combined_details);
13470 
13471     Branch(
13472         Word32Equal(constness,
13473                     Int32Constant(static_cast<int>(PropertyConstness::kConst))),
13474         if_unmodified, if_fast_check_failed);
13475   }
13476 
13477   if ((flags_ & kCheckPrototypePropertyIdentity) != 0) {
13478     // The above checks have failed, for whatever reason (maybe the prototype
13479     // map has changed, or a property is no longer const). This block implements
13480     // a more thorough check that can also accept maps which 1. do not have the
13481     // initial map, 2. have mutable relevant properties, but 3. still match the
13482     // expected value for all relevant properties.
13483 
13484     BIND(&property_identity_check);
13485 
13486     int max_descriptor_index = -1;
13487     for (int i = 0; i < properties_.length(); i++) {
13488       max_descriptor_index =
13489           std::max(max_descriptor_index, properties_[i].descriptor_index);
13490     }
13491 
13492     // If the greatest descriptor index is out of bounds, the map cannot be
13493     // fast.
13494     GotoIfNot(Int32LessThan(Int32Constant(max_descriptor_index),
13495                             LoadNumberOfDescriptors(descriptors)),
13496               if_modified);
13497 
13498     // Logic below only handles maps with fast properties.
13499     GotoIfMapHasSlowProperties(prototype_map, if_modified);
13500 
13501     for (int i = 0; i < properties_.length(); i++) {
13502       const DescriptorIndexNameValue& p = properties_[i];
13503       const int descriptor = p.descriptor_index;
13504 
13505       // Check if the name is correct. This essentially checks that
13506       // the descriptor index corresponds to the insertion order in
13507       // the bootstrapper.
13508       GotoIfNot(TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
13509                             CodeAssembler::LoadRoot(p.name_root_index)),
13510                 if_modified);
13511 
13512       // Finally, check whether the actual value equals the expected value.
13513       TNode<Uint32T> details =
13514           DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
13515       TVARIABLE(Uint32T, var_details, details);
13516       TVARIABLE(Object, var_value);
13517 
13518       const int key_index = DescriptorArray::ToKeyIndex(descriptor);
13519       LoadPropertyFromFastObject(prototype, prototype_map, descriptors,
13520                                  IntPtrConstant(key_index), &var_details,
13521                                  &var_value);
13522 
13523       TNode<Object> actual_value = var_value.value();
13524       TNode<Object> expected_value =
13525           LoadContextElement(native_context_, p.expected_value_context_index);
13526       GotoIfNot(TaggedEqual(actual_value, expected_value), if_modified);
13527     }
13528 
13529     Goto(if_unmodified);
13530   }
13531 }
13532 
13533 }  // namespace internal
13534 }  // namespace v8
13535