• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/effect-control-linearizer.h"
6 
7 #include "include/v8-fast-api-calls.h"
8 #include "src/base/bits.h"
9 #include "src/codegen/code-factory.h"
10 #include "src/codegen/machine-type.h"
11 #include "src/common/ptr-compr-inl.h"
12 #include "src/compiler/access-builder.h"
13 #include "src/compiler/compiler-source-position-table.h"
14 #include "src/compiler/feedback-source.h"
15 #include "src/compiler/graph-assembler.h"
16 #include "src/compiler/js-graph.h"
17 #include "src/compiler/js-heap-broker.h"
18 #include "src/compiler/linkage.h"
19 #include "src/compiler/node-matchers.h"
20 #include "src/compiler/node-origin-table.h"
21 #include "src/compiler/node-properties.h"
22 #include "src/compiler/node.h"
23 #include "src/compiler/schedule.h"
24 #include "src/execution/frames.h"
25 #include "src/heap/factory-inl.h"
26 #include "src/objects/heap-number.h"
27 #include "src/objects/oddball.h"
28 #include "src/objects/ordered-hash-table.h"
29 
30 namespace v8 {
31 namespace internal {
32 namespace compiler {
33 
34 class EffectControlLinearizer {
35  public:
EffectControlLinearizer(JSGraph * js_graph,Schedule * schedule,Zone * temp_zone,SourcePositionTable * source_positions,NodeOriginTable * node_origins,MaskArrayIndexEnable mask_array_index,MaintainSchedule maintain_schedule,JSHeapBroker * broker)36   EffectControlLinearizer(JSGraph* js_graph, Schedule* schedule,
37                           Zone* temp_zone,
38                           SourcePositionTable* source_positions,
39                           NodeOriginTable* node_origins,
40                           MaskArrayIndexEnable mask_array_index,
41                           MaintainSchedule maintain_schedule,
42                           JSHeapBroker* broker)
43       : js_graph_(js_graph),
44         schedule_(schedule),
45         temp_zone_(temp_zone),
46         mask_array_index_(mask_array_index),
47         maintain_schedule_(maintain_schedule),
48         source_positions_(source_positions),
49         node_origins_(node_origins),
50         broker_(broker),
51         graph_assembler_(js_graph, temp_zone, base::nullopt,
52                          should_maintain_schedule() ? schedule : nullptr),
53         frame_state_zapper_(nullptr),
54         fast_api_call_stack_slot_(nullptr) {}
55 
56   void Run();
57 
58  private:
59   void UpdateEffectControlForNode(Node* node);
60   void ProcessNode(Node* node, Node** frame_state);
61 
62   bool TryWireInStateEffect(Node* node, Node* frame_state);
63   Node* LowerChangeBitToTagged(Node* node);
64   Node* LowerChangeInt31ToTaggedSigned(Node* node);
65   Node* LowerChangeInt32ToTagged(Node* node);
66   Node* LowerChangeInt64ToTagged(Node* node);
67   Node* LowerChangeUint32ToTagged(Node* node);
68   Node* LowerChangeUint64ToTagged(Node* node);
69   Node* LowerChangeFloat64ToTagged(Node* node);
70   Node* LowerChangeFloat64ToTaggedPointer(Node* node);
71   Node* LowerChangeTaggedSignedToInt32(Node* node);
72   Node* LowerChangeTaggedSignedToInt64(Node* node);
73   Node* LowerChangeTaggedToBit(Node* node);
74   Node* LowerChangeTaggedToInt32(Node* node);
75   Node* LowerChangeTaggedToUint32(Node* node);
76   Node* LowerChangeTaggedToInt64(Node* node);
77   Node* LowerChangeTaggedToTaggedSigned(Node* node);
78   Node* LowerPoisonIndex(Node* node);
79   Node* LowerCheckInternalizedString(Node* node, Node* frame_state);
80   void LowerCheckMaps(Node* node, Node* frame_state);
81   void LowerDynamicCheckMaps(Node* node, Node* frame_state);
82   Node* LowerCompareMaps(Node* node);
83   Node* LowerCheckNumber(Node* node, Node* frame_state);
84   Node* LowerCheckClosure(Node* node, Node* frame_state);
85   Node* LowerCheckReceiver(Node* node, Node* frame_state);
86   Node* LowerCheckReceiverOrNullOrUndefined(Node* node, Node* frame_state);
87   Node* LowerCheckString(Node* node, Node* frame_state);
88   Node* LowerCheckBigInt(Node* node, Node* frame_state);
89   Node* LowerCheckSymbol(Node* node, Node* frame_state);
90   void LowerCheckIf(Node* node, Node* frame_state);
91   Node* LowerCheckedInt32Add(Node* node, Node* frame_state);
92   Node* LowerCheckedInt32Sub(Node* node, Node* frame_state);
93   Node* LowerCheckedInt32Div(Node* node, Node* frame_state);
94   Node* LowerCheckedInt32Mod(Node* node, Node* frame_state);
95   Node* LowerCheckedUint32Div(Node* node, Node* frame_state);
96   Node* LowerCheckedUint32Mod(Node* node, Node* frame_state);
97   Node* LowerCheckedInt32Mul(Node* node, Node* frame_state);
98   Node* LowerCheckedInt32ToTaggedSigned(Node* node, Node* frame_state);
99   Node* LowerCheckedInt64ToInt32(Node* node, Node* frame_state);
100   Node* LowerCheckedInt64ToTaggedSigned(Node* node, Node* frame_state);
101   Node* LowerCheckedUint32Bounds(Node* node, Node* frame_state);
102   Node* LowerCheckedUint32ToInt32(Node* node, Node* frame_state);
103   Node* LowerCheckedUint32ToTaggedSigned(Node* node, Node* frame_state);
104   Node* LowerCheckedUint64Bounds(Node* node, Node* frame_state);
105   Node* LowerCheckedUint64ToInt32(Node* node, Node* frame_state);
106   Node* LowerCheckedUint64ToTaggedSigned(Node* node, Node* frame_state);
107   Node* LowerCheckedFloat64ToInt32(Node* node, Node* frame_state);
108   Node* LowerCheckedFloat64ToInt64(Node* node, Node* frame_state);
109   Node* LowerCheckedTaggedSignedToInt32(Node* node, Node* frame_state);
110   Node* LowerCheckedTaggedToArrayIndex(Node* node, Node* frame_state);
111   Node* LowerCheckedTaggedToInt32(Node* node, Node* frame_state);
112   Node* LowerCheckedTaggedToInt64(Node* node, Node* frame_state);
113   Node* LowerCheckedTaggedToFloat64(Node* node, Node* frame_state);
114   Node* LowerCheckedTaggedToTaggedSigned(Node* node, Node* frame_state);
115   Node* LowerCheckedTaggedToTaggedPointer(Node* node, Node* frame_state);
116   Node* LowerBigIntAsUintN(Node* node, Node* frame_state);
117   Node* LowerChangeUint64ToBigInt(Node* node);
118   Node* LowerTruncateBigIntToUint64(Node* node);
119   Node* LowerChangeTaggedToFloat64(Node* node);
120   void TruncateTaggedPointerToBit(Node* node, GraphAssemblerLabel<1>* done);
121   Node* LowerTruncateTaggedToBit(Node* node);
122   Node* LowerTruncateTaggedPointerToBit(Node* node);
123   Node* LowerTruncateTaggedToFloat64(Node* node);
124   Node* LowerTruncateTaggedToWord32(Node* node);
125   Node* LowerCheckedTruncateTaggedToWord32(Node* node, Node* frame_state);
126   Node* LowerAllocate(Node* node);
127   Node* LowerNumberToString(Node* node);
128   Node* LowerObjectIsArrayBufferView(Node* node);
129   Node* LowerObjectIsBigInt(Node* node);
130   Node* LowerObjectIsCallable(Node* node);
131   Node* LowerObjectIsConstructor(Node* node);
132   Node* LowerObjectIsDetectableCallable(Node* node);
133   Node* LowerObjectIsMinusZero(Node* node);
134   Node* LowerNumberIsMinusZero(Node* node);
135   Node* LowerObjectIsNaN(Node* node);
136   Node* LowerNumberIsNaN(Node* node);
137   Node* LowerObjectIsNonCallable(Node* node);
138   Node* LowerObjectIsNumber(Node* node);
139   Node* LowerObjectIsReceiver(Node* node);
140   Node* LowerObjectIsSmi(Node* node);
141   Node* LowerObjectIsString(Node* node);
142   Node* LowerObjectIsSymbol(Node* node);
143   Node* LowerObjectIsUndetectable(Node* node);
144   Node* LowerNumberIsFloat64Hole(Node* node);
145   Node* LowerNumberIsFinite(Node* node);
146   Node* LowerObjectIsFiniteNumber(Node* node);
147   Node* LowerNumberIsInteger(Node* node);
148   Node* LowerObjectIsInteger(Node* node);
149   Node* LowerNumberIsSafeInteger(Node* node);
150   Node* LowerObjectIsSafeInteger(Node* node);
151   Node* LowerArgumentsFrame(Node* node);
152   Node* LowerArgumentsLength(Node* node);
153   Node* LowerRestLength(Node* node);
154   Node* LowerNewDoubleElements(Node* node);
155   Node* LowerNewSmiOrObjectElements(Node* node);
156   Node* LowerNewArgumentsElements(Node* node);
157   Node* LowerNewConsString(Node* node);
158   Node* LowerSameValue(Node* node);
159   Node* LowerSameValueNumbersOnly(Node* node);
160   Node* LowerNumberSameValue(Node* node);
161   Node* LowerDeadValue(Node* node);
162   Node* LowerStringConcat(Node* node);
163   Node* LowerStringToNumber(Node* node);
164   Node* LowerStringCharCodeAt(Node* node);
165   Node* LowerStringCodePointAt(Node* node);
166   Node* LowerStringToLowerCaseIntl(Node* node);
167   Node* LowerStringToUpperCaseIntl(Node* node);
168   Node* LowerStringFromSingleCharCode(Node* node);
169   Node* LowerStringFromSingleCodePoint(Node* node);
170   Node* LowerStringIndexOf(Node* node);
171   Node* LowerStringSubstring(Node* node);
172   Node* LowerStringFromCodePointAt(Node* node);
173   Node* LowerStringLength(Node* node);
174   Node* LowerStringEqual(Node* node);
175   Node* LowerStringLessThan(Node* node);
176   Node* LowerStringLessThanOrEqual(Node* node);
177   Node* LowerBigIntAdd(Node* node, Node* frame_state);
178   Node* LowerBigIntSubtract(Node* node, Node* frame_state);
179   Node* LowerBigIntNegate(Node* node);
180   Node* LowerCheckFloat64Hole(Node* node, Node* frame_state);
181   Node* LowerCheckNotTaggedHole(Node* node, Node* frame_state);
182   Node* LowerConvertTaggedHoleToUndefined(Node* node);
183   void LowerCheckEqualsInternalizedString(Node* node, Node* frame_state);
184   void LowerCheckEqualsSymbol(Node* node, Node* frame_state);
185   Node* LowerTypeOf(Node* node);
186   void LowerTierUpCheck(Node* node);
187   void LowerUpdateInterruptBudget(Node* node);
188   Node* LowerToBoolean(Node* node);
189   Node* LowerPlainPrimitiveToNumber(Node* node);
190   Node* LowerPlainPrimitiveToWord32(Node* node);
191   Node* LowerPlainPrimitiveToFloat64(Node* node);
192   Node* LowerEnsureWritableFastElements(Node* node);
193   Node* LowerMaybeGrowFastElements(Node* node, Node* frame_state);
194   void LowerTransitionElementsKind(Node* node);
195   Node* LowerLoadFieldByIndex(Node* node);
196   Node* LowerLoadMessage(Node* node);
197   Node* LowerFastApiCall(Node* node);
198   Node* LowerLoadTypedElement(Node* node);
199   Node* LowerLoadDataViewElement(Node* node);
200   Node* LowerLoadStackArgument(Node* node);
201   void LowerStoreMessage(Node* node);
202   void LowerStoreTypedElement(Node* node);
203   void LowerStoreDataViewElement(Node* node);
204   void LowerStoreSignedSmallElement(Node* node);
205   Node* LowerFindOrderedHashMapEntry(Node* node);
206   Node* LowerFindOrderedHashMapEntryForInt32Key(Node* node);
207   void LowerTransitionAndStoreElement(Node* node);
208   void LowerTransitionAndStoreNumberElement(Node* node);
209   void LowerTransitionAndStoreNonNumberElement(Node* node);
210   void LowerRuntimeAbort(Node* node);
211   Node* LowerAssertType(Node* node);
212   Node* LowerFoldConstant(Node* node);
213   Node* LowerConvertReceiver(Node* node);
214   Node* LowerDateNow(Node* node);
215 
216   // Lowering of optional operators.
217   Maybe<Node*> LowerFloat64RoundUp(Node* node);
218   Maybe<Node*> LowerFloat64RoundDown(Node* node);
219   Maybe<Node*> LowerFloat64RoundTiesEven(Node* node);
220   Maybe<Node*> LowerFloat64RoundTruncate(Node* node);
221 
222   Node* AllocateHeapNumberWithValue(Node* node);
223   Node* BuildCheckedFloat64ToInt32(CheckForMinusZeroMode mode,
224                                    const FeedbackSource& feedback, Node* value,
225                                    Node* frame_state);
226   Node* BuildCheckedFloat64ToInt64(CheckForMinusZeroMode mode,
227                                    const FeedbackSource& feedback, Node* value,
228                                    Node* frame_state);
229   Node* BuildCheckedFloat64ToIndex(const FeedbackSource& feedback, Node* value,
230                                    Node* frame_state);
231   Node* BuildCheckedHeapNumberOrOddballToFloat64(CheckTaggedInputMode mode,
232                                                  const FeedbackSource& feedback,
233                                                  Node* value,
234                                                  Node* frame_state);
235   Node* BuildReverseBytes(ExternalArrayType type, Node* value);
236   Node* BuildFloat64RoundDown(Node* value);
237   Node* BuildFloat64RoundTruncate(Node* input);
238   template <size_t VarCount, size_t VarCount2>
239   void SmiTagOrOverflow(Node* value, GraphAssemblerLabel<VarCount>* if_overflow,
240                         GraphAssemblerLabel<VarCount2>* done);
241   Node* SmiTagOrDeopt(Node* value, const CheckParameters& params,
242                       Node* frame_state);
243   Node* BuildUint32Mod(Node* lhs, Node* rhs);
244   Node* ComputeUnseededHash(Node* value);
245   Node* LowerStringComparison(Callable const& callable, Node* node);
246   Node* IsElementsKindGreaterThan(Node* kind, ElementsKind reference_kind);
247 
248   Node* BuildTypedArrayDataPointer(Node* base, Node* external);
249 
250   template <typename... Args>
251   Node* CallBuiltin(Builtins::Name builtin, Operator::Properties properties,
252                     Args...);
253 
254   Node* ChangeInt32ToSmi(Node* value);
255   // In pointer compression, we smi-corrupt. This means the upper bits of a Smi
256   // are not important. ChangeTaggedInt32ToSmi has a known tagged int32 as input
257   // and takes advantage of the smi corruption by emitting a Bitcast node
258   // instead of a Change node in order to save instructions.
259   // In non pointer compression, it behaves like ChangeInt32ToSmi.
260   Node* ChangeTaggedInt32ToSmi(Node* value);
261   Node* ChangeInt32ToIntPtr(Node* value);
262   Node* ChangeInt64ToSmi(Node* value);
263   Node* ChangeIntPtrToInt32(Node* value);
264   Node* ChangeIntPtrToSmi(Node* value);
265   Node* ChangeUint32ToUintPtr(Node* value);
266   Node* ChangeUint32ToSmi(Node* value);
267   Node* ChangeSmiToIntPtr(Node* value);
268   Node* ChangeSmiToInt32(Node* value);
269   Node* ChangeSmiToInt64(Node* value);
270   Node* ObjectIsSmi(Node* value);
271   Node* LoadFromSeqString(Node* receiver, Node* position, Node* is_one_byte);
272   Node* TruncateWordToInt32(Node* value);
273   Node* MakeWeakForComparison(Node* heap_object);
274   Node* BuildIsWeakReferenceTo(Node* maybe_object, Node* value);
275   Node* BuildIsClearedWeakReference(Node* maybe_object);
276   Node* BuildIsStrongReference(Node* value);
277   Node* BuildStrongReferenceFromWeakReference(Node* value);
278   Node* SmiMaxValueConstant();
279   Node* SmiShiftBitsConstant();
280   void TransitionElementsTo(Node* node, Node* array, ElementsKind from,
281                             ElementsKind to);
282 
283   // This function tries to migrate |value| if its map |value_map| is
284   // deprecated. It deopts, if either |value_map| isn't deprecated or migration
285   // fails.
286   void MigrateInstanceOrDeopt(Node* value, Node* value_map, Node* frame_state,
287                               FeedbackSource const& feedback_source,
288                               DeoptimizeReason reason);
289 
290   // Helper functions used in LowerDynamicCheckMaps
291   void BuildCallDynamicMapChecksBuiltin(Node* actual_value,
292                                         Node* actual_handler,
293                                         int feedback_slot_index,
294                                         GraphAssemblerLabel<0>* done,
295                                         Node* frame_state);
should_maintain_schedule() const296   bool should_maintain_schedule() const {
297     return maintain_schedule_ == MaintainSchedule::kMaintain;
298   }
299 
factory() const300   Factory* factory() const { return isolate()->factory(); }
isolate() const301   Isolate* isolate() const { return jsgraph()->isolate(); }
jsgraph() const302   JSGraph* jsgraph() const { return js_graph_; }
graph() const303   Graph* graph() const { return js_graph_->graph(); }
schedule() const304   Schedule* schedule() const { return schedule_; }
temp_zone() const305   Zone* temp_zone() const { return temp_zone_; }
common() const306   CommonOperatorBuilder* common() const { return js_graph_->common(); }
simplified() const307   SimplifiedOperatorBuilder* simplified() const {
308     return js_graph_->simplified();
309   }
machine() const310   MachineOperatorBuilder* machine() const { return js_graph_->machine(); }
gasm()311   JSGraphAssembler* gasm() { return &graph_assembler_; }
broker() const312   JSHeapBroker* broker() const { return broker_; }
313 
314   JSGraph* js_graph_;
315   Schedule* schedule_;
316   Zone* temp_zone_;
317   MaskArrayIndexEnable mask_array_index_;
318   MaintainSchedule maintain_schedule_;
319   RegionObservability region_observability_ = RegionObservability::kObservable;
320   SourcePositionTable* source_positions_;
321   NodeOriginTable* node_origins_;
322   JSHeapBroker* broker_;
323   JSGraphAssembler graph_assembler_;
324   Node* frame_state_zapper_;  // For tracking down compiler::Node::New crashes.
325   Node* fast_api_call_stack_slot_;  // For caching the stack slot allocated for
326   // fast API calls.
327 };
328 
329 namespace {
330 
331 struct BlockEffectControlData {
332   Node* current_effect = nullptr;       // New effect.
333   Node* current_control = nullptr;      // New control.
334   Node* current_frame_state = nullptr;  // New frame state.
335 };
336 
337 class BlockEffectControlMap {
338  public:
BlockEffectControlMap(Zone * temp_zone)339   explicit BlockEffectControlMap(Zone* temp_zone) : map_(temp_zone) {}
340 
For(BasicBlock * from,BasicBlock * to)341   BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) {
342     return map_[std::make_pair(from->id().ToInt(), to->id().ToInt())];
343   }
344 
For(BasicBlock * from,BasicBlock * to) const345   const BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) const {
346     return map_.at(std::make_pair(from->id().ToInt(), to->id().ToInt()));
347   }
348 
349  private:
350   using Key = std::pair<int32_t, int32_t>;
351   using Map = ZoneMap<Key, BlockEffectControlData>;
352 
353   Map map_;
354 };
355 
356 // Effect phis that need to be updated after the first pass.
357 struct PendingEffectPhi {
358   Node* effect_phi;
359   BasicBlock* block;
360 
PendingEffectPhiv8::internal::compiler::__anonaa92a10b0111::PendingEffectPhi361   PendingEffectPhi(Node* effect_phi, BasicBlock* block)
362       : effect_phi(effect_phi), block(block) {}
363 };
364 
UpdateEffectPhi(Node * node,BasicBlock * block,BlockEffectControlMap * block_effects)365 void UpdateEffectPhi(Node* node, BasicBlock* block,
366                      BlockEffectControlMap* block_effects) {
367   // Update all inputs to an effect phi with the effects from the given
368   // block->effect map.
369   DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
370   DCHECK_EQ(static_cast<size_t>(node->op()->EffectInputCount()),
371             block->PredecessorCount());
372   for (int i = 0; i < node->op()->EffectInputCount(); i++) {
373     Node* input = node->InputAt(i);
374     BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
375     const BlockEffectControlData& block_effect =
376         block_effects->For(predecessor, block);
377     Node* effect = block_effect.current_effect;
378     if (input != effect) {
379       node->ReplaceInput(i, effect);
380     }
381   }
382 }
383 
UpdateBlockControl(BasicBlock * block,BlockEffectControlMap * block_effects)384 void UpdateBlockControl(BasicBlock* block,
385                         BlockEffectControlMap* block_effects) {
386   Node* control = block->NodeAt(0);
387   DCHECK(NodeProperties::IsControl(control));
388 
389   // Do not rewire the end node.
390   if (control->opcode() == IrOpcode::kEnd) return;
391 
392   // Update all inputs to the given control node with the correct control.
393   DCHECK(control->opcode() == IrOpcode::kMerge ||
394          static_cast<size_t>(control->op()->ControlInputCount()) ==
395              block->PredecessorCount());
396   if (static_cast<size_t>(control->op()->ControlInputCount()) !=
397       block->PredecessorCount()) {
398     return;  // We already re-wired the control inputs of this node.
399   }
400   for (int i = 0; i < control->op()->ControlInputCount(); i++) {
401     Node* input = NodeProperties::GetControlInput(control, i);
402     BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
403     const BlockEffectControlData& block_effect =
404         block_effects->For(predecessor, block);
405     if (input != block_effect.current_control) {
406       NodeProperties::ReplaceControlInput(control, block_effect.current_control,
407                                           i);
408     }
409   }
410 }
411 
RemoveRenameNode(Node * node)412 void RemoveRenameNode(Node* node) {
413   DCHECK(IrOpcode::kFinishRegion == node->opcode() ||
414          IrOpcode::kBeginRegion == node->opcode() ||
415          IrOpcode::kTypeGuard == node->opcode());
416   // Update the value/context uses to the value input of the finish node and
417   // the effect uses to the effect input.
418   for (Edge edge : node->use_edges()) {
419     DCHECK(!edge.from()->IsDead());
420     if (NodeProperties::IsEffectEdge(edge)) {
421       edge.UpdateTo(NodeProperties::GetEffectInput(node));
422     } else {
423       DCHECK(!NodeProperties::IsControlEdge(edge));
424       DCHECK(!NodeProperties::IsFrameStateEdge(edge));
425       edge.UpdateTo(node->InputAt(0));
426     }
427   }
428   node->Kill();
429 }
430 
TryCloneBranch(Node * node,BasicBlock * block,Zone * temp_zone,Graph * graph,CommonOperatorBuilder * common,BlockEffectControlMap * block_effects,SourcePositionTable * source_positions,NodeOriginTable * node_origins)431 void TryCloneBranch(Node* node, BasicBlock* block, Zone* temp_zone,
432                     Graph* graph, CommonOperatorBuilder* common,
433                     BlockEffectControlMap* block_effects,
434                     SourcePositionTable* source_positions,
435                     NodeOriginTable* node_origins) {
436   DCHECK_EQ(IrOpcode::kBranch, node->opcode());
437 
438   // This optimization is a special case of (super)block cloning. It takes an
439   // input graph as shown below and clones the Branch node for every predecessor
440   // to the Merge, essentially removing the Merge completely. This avoids
441   // materializing the bit for the Phi and may offer potential for further
442   // branch folding optimizations (i.e. because one or more inputs to the Phi is
443   // a constant). Note that there may be more Phi nodes hanging off the Merge,
444   // but we can only a certain subset of them currently (actually only Phi and
445   // EffectPhi nodes whose uses have either the IfTrue or IfFalse as control
446   // input).
447 
448   //   Control1 ... ControlN
449   //      ^            ^
450   //      |            |   Cond1 ... CondN
451   //      +----+  +----+     ^         ^
452   //           |  |          |         |
453   //           |  |     +----+         |
454   //          Merge<--+ | +------------+
455   //            ^      \|/
456   //            |      Phi
457   //            |       |
458   //          Branch----+
459   //            ^
460   //            |
461   //      +-----+-----+
462   //      |           |
463   //    IfTrue     IfFalse
464   //      ^           ^
465   //      |           |
466 
467   // The resulting graph (modulo the Phi and EffectPhi nodes) looks like this:
468 
469   // Control1 Cond1 ... ControlN CondN
470   //    ^      ^           ^      ^
471   //    \      /           \      /
472   //     Branch     ...     Branch
473   //       ^                  ^
474   //       |                  |
475   //   +---+---+          +---+----+
476   //   |       |          |        |
477   // IfTrue IfFalse ... IfTrue  IfFalse
478   //   ^       ^          ^        ^
479   //   |       |          |        |
480   //   +--+ +-------------+        |
481   //      | |  +--------------+ +--+
482   //      | |                 | |
483   //     Merge               Merge
484   //       ^                   ^
485   //       |                   |
486 
487   SourcePositionTable::Scope scope(source_positions,
488                                    source_positions->GetSourcePosition(node));
489   NodeOriginTable::Scope origin_scope(node_origins, "clone branch", node);
490   Node* branch = node;
491   Node* cond = NodeProperties::GetValueInput(branch, 0);
492   if (!cond->OwnedBy(branch) || cond->opcode() != IrOpcode::kPhi) return;
493   Node* merge = NodeProperties::GetControlInput(branch);
494   if (merge->opcode() != IrOpcode::kMerge ||
495       NodeProperties::GetControlInput(cond) != merge) {
496     return;
497   }
498   // Grab the IfTrue/IfFalse projections of the Branch.
499   BranchMatcher matcher(branch);
500   // Check/collect other Phi/EffectPhi nodes hanging off the Merge.
501   NodeVector phis(temp_zone);
502   for (Node* const use : merge->uses()) {
503     if (use == branch || use == cond) continue;
504     // We cannot currently deal with non-Phi/EffectPhi nodes hanging off the
505     // Merge. Ideally, we would just clone the nodes (and everything that
506     // depends on it to some distant join point), but that requires knowledge
507     // about dominance/post-dominance.
508     if (!NodeProperties::IsPhi(use)) return;
509     for (Edge edge : use->use_edges()) {
510       // Right now we can only handle Phi/EffectPhi nodes whose uses are
511       // directly control-dependend on either the IfTrue or the IfFalse
512       // successor, because we know exactly how to update those uses.
513       if (edge.from()->op()->ControlInputCount() != 1) return;
514       Node* control = NodeProperties::GetControlInput(edge.from());
515       if (NodeProperties::IsPhi(edge.from())) {
516         control = NodeProperties::GetControlInput(control, edge.index());
517       }
518       if (control != matcher.IfTrue() && control != matcher.IfFalse()) return;
519     }
520     phis.push_back(use);
521   }
522   BranchHint const hint = BranchHintOf(branch->op());
523   int const input_count = merge->op()->ControlInputCount();
524   DCHECK_LE(1, input_count);
525   Node** const inputs = graph->zone()->NewArray<Node*>(2 * input_count);
526   Node** const merge_true_inputs = &inputs[0];
527   Node** const merge_false_inputs = &inputs[input_count];
528   for (int index = 0; index < input_count; ++index) {
529     Node* cond1 = NodeProperties::GetValueInput(cond, index);
530     Node* control1 = NodeProperties::GetControlInput(merge, index);
531     Node* branch1 = graph->NewNode(common->Branch(hint), cond1, control1);
532     merge_true_inputs[index] = graph->NewNode(common->IfTrue(), branch1);
533     merge_false_inputs[index] = graph->NewNode(common->IfFalse(), branch1);
534   }
535   Node* const merge_true = matcher.IfTrue();
536   Node* const merge_false = matcher.IfFalse();
537   merge_true->TrimInputCount(0);
538   merge_false->TrimInputCount(0);
539   for (int i = 0; i < input_count; ++i) {
540     merge_true->AppendInput(graph->zone(), merge_true_inputs[i]);
541     merge_false->AppendInput(graph->zone(), merge_false_inputs[i]);
542   }
543   DCHECK_EQ(2u, block->SuccessorCount());
544   NodeProperties::ChangeOp(matcher.IfTrue(), common->Merge(input_count));
545   NodeProperties::ChangeOp(matcher.IfFalse(), common->Merge(input_count));
546   int const true_index =
547       block->SuccessorAt(0)->NodeAt(0) == matcher.IfTrue() ? 0 : 1;
548   BlockEffectControlData* true_block_data =
549       &block_effects->For(block, block->SuccessorAt(true_index));
550   BlockEffectControlData* false_block_data =
551       &block_effects->For(block, block->SuccessorAt(true_index ^ 1));
552   for (Node* const phi : phis) {
553     for (int index = 0; index < input_count; ++index) {
554       inputs[index] = phi->InputAt(index);
555     }
556     inputs[input_count] = merge_true;
557     Node* phi_true = graph->NewNode(phi->op(), input_count + 1, inputs);
558     inputs[input_count] = merge_false;
559     Node* phi_false = graph->NewNode(phi->op(), input_count + 1, inputs);
560     if (phi->UseCount() == 0) {
561       DCHECK_EQ(phi->opcode(), IrOpcode::kEffectPhi);
562     } else {
563       for (Edge edge : phi->use_edges()) {
564         Node* control = NodeProperties::GetControlInput(edge.from());
565         if (NodeProperties::IsPhi(edge.from())) {
566           control = NodeProperties::GetControlInput(control, edge.index());
567         }
568         DCHECK(control == matcher.IfTrue() || control == matcher.IfFalse());
569         edge.UpdateTo((control == matcher.IfTrue()) ? phi_true : phi_false);
570       }
571     }
572     if (phi->opcode() == IrOpcode::kEffectPhi) {
573       true_block_data->current_effect = phi_true;
574       false_block_data->current_effect = phi_false;
575     }
576     phi->Kill();
577   }
578   // Fix up IfTrue and IfFalse and kill all dead nodes.
579   if (branch == block->control_input()) {
580     true_block_data->current_control = merge_true;
581     false_block_data->current_control = merge_false;
582   }
583   branch->Kill();
584   cond->Kill();
585   merge->Kill();
586 }
587 
588 }  // namespace
589 
Run()590 void EffectControlLinearizer::Run() {
591   BlockEffectControlMap block_effects(temp_zone());
592   ZoneVector<PendingEffectPhi> pending_effect_phis(temp_zone());
593   ZoneVector<BasicBlock*> pending_block_controls(temp_zone());
594   NodeVector inputs_buffer(temp_zone());
595 
596   // TODO(rmcilroy) We should not depend on having rpo_order on schedule, and
597   // instead just do our own RPO walk here.
598   for (BasicBlock* block : *(schedule()->rpo_order())) {
599     if (block != schedule()->start() && block->PredecessorCount() == 0) {
600       // Block has been removed from the schedule by a preceeding unreachable
601       // node, just skip it.
602       continue;
603     }
604 
605     gasm()->Reset(block);
606 
607     BasicBlock::iterator instr = block->begin();
608     BasicBlock::iterator end_instr = block->end();
609 
610     // The control node should be the first.
611     Node* control = *instr;
612     gasm()->AddNode(control);
613 
614     DCHECK(NodeProperties::IsControl(control));
615     bool has_incoming_backedge = IrOpcode::kLoop == control->opcode();
616     // Update the control inputs.
617     if (has_incoming_backedge) {
618       // If there are back edges, we need to update later because we have not
619       // computed the control yet.
620       pending_block_controls.push_back(block);
621     } else {
622       // If there are no back edges, we can update now.
623       UpdateBlockControl(block, &block_effects);
624     }
625     instr++;
626 
627     // Iterate over the phis and update the effect phis.
628     Node* effect_phi = nullptr;
629     Node* terminate = nullptr;
630     for (; instr != end_instr; instr++) {
631       Node* node = *instr;
632       // Only go through the phis and effect phis.
633       if (node->opcode() == IrOpcode::kEffectPhi) {
634         // There should be at most one effect phi in a block.
635         DCHECK_NULL(effect_phi);
636         // IfException blocks should not have effect phis.
637         DCHECK_NE(IrOpcode::kIfException, control->opcode());
638         effect_phi = node;
639       } else if (node->opcode() == IrOpcode::kPhi) {
640         // Just skip phis.
641       } else if (node->opcode() == IrOpcode::kTerminate) {
642         DCHECK_NULL(terminate);
643         terminate = node;
644       } else {
645         break;
646       }
647       gasm()->AddNode(node);
648     }
649 
650     if (effect_phi) {
651       // Make sure we update the inputs to the incoming blocks' effects.
652       if (has_incoming_backedge) {
653         // In case of loops, we do not update the effect phi immediately
654         // because the back predecessor has not been handled yet. We just
655         // record the effect phi for later processing.
656         pending_effect_phis.push_back(PendingEffectPhi(effect_phi, block));
657       } else {
658         UpdateEffectPhi(effect_phi, block, &block_effects);
659       }
660     }
661 
662     Node* effect = effect_phi;
663     if (effect == nullptr) {
664       // There was no effect phi.
665       if (block == schedule()->start()) {
666         // Start block => effect is start.
667         DCHECK_EQ(graph()->start(), control);
668         effect = graph()->start();
669       } else if (control->opcode() == IrOpcode::kEnd) {
670         // End block is just a dummy, no effect needed.
671         DCHECK_EQ(BasicBlock::kNone, block->control());
672         DCHECK_EQ(1u, block->size());
673         effect = nullptr;
674       } else {
675         // If all the predecessors have the same effect, we can use it as our
676         // current effect.
677         for (size_t i = 0; i < block->PredecessorCount(); ++i) {
678           const BlockEffectControlData& data =
679               block_effects.For(block->PredecessorAt(i), block);
680           if (!effect) effect = data.current_effect;
681           if (data.current_effect != effect) {
682             effect = nullptr;
683             break;
684           }
685         }
686         if (effect == nullptr) {
687           DCHECK_NE(IrOpcode::kIfException, control->opcode());
688           // The input blocks do not have the same effect. We have
689           // to create an effect phi node.
690           inputs_buffer.clear();
691           inputs_buffer.resize(block->PredecessorCount(), jsgraph()->Dead());
692           inputs_buffer.push_back(control);
693           effect = graph()->NewNode(
694               common()->EffectPhi(static_cast<int>(block->PredecessorCount())),
695               static_cast<int>(inputs_buffer.size()), &(inputs_buffer.front()));
696           gasm()->AddNode(effect);
697           // For loops, we update the effect phi node later to break cycles.
698           if (control->opcode() == IrOpcode::kLoop) {
699             pending_effect_phis.push_back(PendingEffectPhi(effect, block));
700           } else {
701             UpdateEffectPhi(effect, block, &block_effects);
702           }
703         } else if (control->opcode() == IrOpcode::kIfException) {
704           // The IfException is connected into the effect chain, so we need
705           // to update the effect here.
706           NodeProperties::ReplaceEffectInput(control, effect);
707           effect = control;
708         }
709       }
710     }
711 
712     // Fixup the Terminate node.
713     if (terminate != nullptr) {
714       NodeProperties::ReplaceEffectInput(terminate, effect);
715     }
716 
717     // The frame state at block entry is determined by the frame states leaving
718     // all predecessors. In case there is no frame state dominating this block,
719     // we can rely on a checkpoint being present before the next deoptimization.
720     Node* frame_state = nullptr;
721     if (block != schedule()->start()) {
722       // If all the predecessors have the same effect, we can use it
723       // as our current effect.
724       frame_state =
725           block_effects.For(block->PredecessorAt(0), block).current_frame_state;
726       for (size_t i = 1; i < block->PredecessorCount(); i++) {
727         if (block_effects.For(block->PredecessorAt(i), block)
728                 .current_frame_state != frame_state) {
729           frame_state = nullptr;
730           frame_state_zapper_ = graph()->end();
731           break;
732         }
733       }
734     }
735 
736     gasm()->InitializeEffectControl(effect, control);
737 
738     // Process the ordinary instructions.
739     for (; instr != end_instr; instr++) {
740       Node* node = *instr;
741       ProcessNode(node, &frame_state);
742     }
743 
744     block = gasm()->FinalizeCurrentBlock(block);
745 
746     switch (block->control()) {
747       case BasicBlock::kGoto:
748       case BasicBlock::kNone:
749         break;
750       case BasicBlock::kCall:
751       case BasicBlock::kTailCall:
752       case BasicBlock::kSwitch:
753       case BasicBlock::kReturn:
754       case BasicBlock::kDeoptimize:
755       case BasicBlock::kThrow:
756       case BasicBlock::kBranch:
757         UpdateEffectControlForNode(block->control_input());
758         gasm()->UpdateEffectControlWith(block->control_input());
759         break;
760     }
761 
762     if (!should_maintain_schedule() &&
763         block->control() == BasicBlock::kBranch) {
764       TryCloneBranch(block->control_input(), block, temp_zone(), graph(),
765                      common(), &block_effects, source_positions_,
766                      node_origins_);
767     }
768 
769     // Store the effect, control and frame state for later use.
770     for (BasicBlock* successor : block->successors()) {
771       BlockEffectControlData* data = &block_effects.For(block, successor);
772       if (data->current_effect == nullptr) {
773         data->current_effect = gasm()->effect();
774       }
775       if (data->current_control == nullptr) {
776         data->current_control = gasm()->control();
777       }
778       data->current_frame_state = frame_state;
779     }
780   }
781 
782   for (BasicBlock* pending_block_control : pending_block_controls) {
783     UpdateBlockControl(pending_block_control, &block_effects);
784   }
785   // Update the incoming edges of the effect phis that could not be processed
786   // during the first pass (because they could have incoming back edges).
787   for (const PendingEffectPhi& pending_effect_phi : pending_effect_phis) {
788     UpdateEffectPhi(pending_effect_phi.effect_phi, pending_effect_phi.block,
789                     &block_effects);
790   }
791 
792   schedule_->rpo_order()->clear();
793 }
794 
UpdateEffectControlForNode(Node * node)795 void EffectControlLinearizer::UpdateEffectControlForNode(Node* node) {
796   // If the node takes an effect, replace with the current one.
797   if (node->op()->EffectInputCount() > 0) {
798     DCHECK_EQ(1, node->op()->EffectInputCount());
799     NodeProperties::ReplaceEffectInput(node, gasm()->effect());
800   } else {
801     // New effect chain is only started with a Start or ValueEffect node.
802     DCHECK(node->op()->EffectOutputCount() == 0 ||
803            node->opcode() == IrOpcode::kStart);
804   }
805 
806   // Rewire control inputs.
807   for (int i = 0; i < node->op()->ControlInputCount(); i++) {
808     NodeProperties::ReplaceControlInput(node, gasm()->control(), i);
809   }
810 }
811 
ProcessNode(Node * node,Node ** frame_state)812 void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state) {
813   SourcePositionTable::Scope scope(source_positions_,
814                                    source_positions_->GetSourcePosition(node));
815   NodeOriginTable::Scope origin_scope(node_origins_, "process node", node);
816 
817   // If basic block is unreachable after this point, update the node's effect
818   // and control inputs to mark it as dead, but don't process further.
819   if (gasm()->effect() == jsgraph()->Dead()) {
820     UpdateEffectControlForNode(node);
821     return;
822   }
823 
824   // If the node needs to be wired into the effect/control chain, do this
825   // here. Pass current frame state for lowering to eager deoptimization.
826   if (TryWireInStateEffect(node, *frame_state)) {
827     return;
828   }
829 
830   // If the node has a visible effect, then there must be a checkpoint in the
831   // effect chain before we are allowed to place another eager deoptimization
832   // point. We zap the frame state to ensure this invariant is maintained.
833   if (region_observability_ == RegionObservability::kObservable &&
834       !node->op()->HasProperty(Operator::kNoWrite)) {
835     *frame_state = nullptr;
836     frame_state_zapper_ = node;
837   }
838 
839   // Remove the end markers of 'atomic' allocation region because the
840   // region should be wired-in now.
841   if (node->opcode() == IrOpcode::kFinishRegion) {
842     // Reset the current region observability.
843     region_observability_ = RegionObservability::kObservable;
844     // Update the value uses to the value input of the finish node and
845     // the effect uses to the effect input.
846     return RemoveRenameNode(node);
847   }
848   if (node->opcode() == IrOpcode::kBeginRegion) {
849     // Determine the observability for this region and use that for all
850     // nodes inside the region (i.e. ignore the absence of kNoWrite on
851     // StoreField and other operators).
852     DCHECK_NE(RegionObservability::kNotObservable, region_observability_);
853     region_observability_ = RegionObservabilityOf(node->op());
854     // Update the value uses to the value input of the finish node and
855     // the effect uses to the effect input.
856     return RemoveRenameNode(node);
857   }
858   if (node->opcode() == IrOpcode::kTypeGuard) {
859     return RemoveRenameNode(node);
860   }
861 
862   // Special treatment for checkpoint nodes.
863   if (node->opcode() == IrOpcode::kCheckpoint) {
864     // Unlink the check point; effect uses will be updated to the incoming
865     // effect that is passed. The frame state is preserved for lowering.
866     DCHECK_EQ(RegionObservability::kObservable, region_observability_);
867     *frame_state = NodeProperties::GetFrameStateInput(node);
868     return;
869   }
870 
871   // The IfSuccess nodes should always start a basic block (and basic block
872   // start nodes are not handled in the ProcessNode method).
873   DCHECK_NE(IrOpcode::kIfSuccess, node->opcode());
874 
875   UpdateEffectControlForNode(node);
876 
877   gasm()->AddNode(node);
878 
879   if (node->opcode() == IrOpcode::kUnreachable) {
880     // Break the effect chain on {Unreachable} and reconnect to the graph end.
881     // Mark the following code for deletion by connecting to the {Dead} node.
882     gasm()->ConnectUnreachableToEnd();
883   }
884 }
885 
TryWireInStateEffect(Node * node,Node * frame_state)886 bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
887                                                    Node* frame_state) {
888   Node* result = nullptr;
889   switch (node->opcode()) {
890     case IrOpcode::kChangeBitToTagged:
891       result = LowerChangeBitToTagged(node);
892       break;
893     case IrOpcode::kChangeInt31ToTaggedSigned:
894       result = LowerChangeInt31ToTaggedSigned(node);
895       break;
896     case IrOpcode::kChangeInt32ToTagged:
897       result = LowerChangeInt32ToTagged(node);
898       break;
899     case IrOpcode::kChangeInt64ToTagged:
900       result = LowerChangeInt64ToTagged(node);
901       break;
902     case IrOpcode::kChangeUint32ToTagged:
903       result = LowerChangeUint32ToTagged(node);
904       break;
905     case IrOpcode::kChangeUint64ToTagged:
906       result = LowerChangeUint64ToTagged(node);
907       break;
908     case IrOpcode::kChangeFloat64ToTagged:
909       result = LowerChangeFloat64ToTagged(node);
910       break;
911     case IrOpcode::kChangeFloat64ToTaggedPointer:
912       result = LowerChangeFloat64ToTaggedPointer(node);
913       break;
914     case IrOpcode::kChangeTaggedSignedToInt32:
915       result = LowerChangeTaggedSignedToInt32(node);
916       break;
917     case IrOpcode::kChangeTaggedSignedToInt64:
918       result = LowerChangeTaggedSignedToInt64(node);
919       break;
920     case IrOpcode::kChangeTaggedToBit:
921       result = LowerChangeTaggedToBit(node);
922       break;
923     case IrOpcode::kChangeTaggedToInt32:
924       result = LowerChangeTaggedToInt32(node);
925       break;
926     case IrOpcode::kChangeTaggedToUint32:
927       result = LowerChangeTaggedToUint32(node);
928       break;
929     case IrOpcode::kChangeTaggedToInt64:
930       result = LowerChangeTaggedToInt64(node);
931       break;
932     case IrOpcode::kChangeTaggedToFloat64:
933       result = LowerChangeTaggedToFloat64(node);
934       break;
935     case IrOpcode::kChangeTaggedToTaggedSigned:
936       result = LowerChangeTaggedToTaggedSigned(node);
937       break;
938     case IrOpcode::kTruncateTaggedToBit:
939       result = LowerTruncateTaggedToBit(node);
940       break;
941     case IrOpcode::kTruncateTaggedPointerToBit:
942       result = LowerTruncateTaggedPointerToBit(node);
943       break;
944     case IrOpcode::kTruncateTaggedToFloat64:
945       result = LowerTruncateTaggedToFloat64(node);
946       break;
947     case IrOpcode::kPoisonIndex:
948       result = LowerPoisonIndex(node);
949       break;
950     case IrOpcode::kCheckClosure:
951       result = LowerCheckClosure(node, frame_state);
952       break;
953     case IrOpcode::kCheckMaps:
954       LowerCheckMaps(node, frame_state);
955       break;
956     case IrOpcode::kDynamicCheckMaps:
957       LowerDynamicCheckMaps(node, frame_state);
958       break;
959     case IrOpcode::kCompareMaps:
960       result = LowerCompareMaps(node);
961       break;
962     case IrOpcode::kCheckNumber:
963       result = LowerCheckNumber(node, frame_state);
964       break;
965     case IrOpcode::kCheckReceiver:
966       result = LowerCheckReceiver(node, frame_state);
967       break;
968     case IrOpcode::kCheckReceiverOrNullOrUndefined:
969       result = LowerCheckReceiverOrNullOrUndefined(node, frame_state);
970       break;
971     case IrOpcode::kCheckSymbol:
972       result = LowerCheckSymbol(node, frame_state);
973       break;
974     case IrOpcode::kCheckString:
975       result = LowerCheckString(node, frame_state);
976       break;
977     case IrOpcode::kCheckBigInt:
978       result = LowerCheckBigInt(node, frame_state);
979       break;
980     case IrOpcode::kCheckInternalizedString:
981       result = LowerCheckInternalizedString(node, frame_state);
982       break;
983     case IrOpcode::kCheckIf:
984       LowerCheckIf(node, frame_state);
985       break;
986     case IrOpcode::kCheckedInt32Add:
987       result = LowerCheckedInt32Add(node, frame_state);
988       break;
989     case IrOpcode::kCheckedInt32Sub:
990       result = LowerCheckedInt32Sub(node, frame_state);
991       break;
992     case IrOpcode::kCheckedInt32Div:
993       result = LowerCheckedInt32Div(node, frame_state);
994       break;
995     case IrOpcode::kCheckedInt32Mod:
996       result = LowerCheckedInt32Mod(node, frame_state);
997       break;
998     case IrOpcode::kCheckedUint32Div:
999       result = LowerCheckedUint32Div(node, frame_state);
1000       break;
1001     case IrOpcode::kCheckedUint32Mod:
1002       result = LowerCheckedUint32Mod(node, frame_state);
1003       break;
1004     case IrOpcode::kCheckedInt32Mul:
1005       result = LowerCheckedInt32Mul(node, frame_state);
1006       break;
1007     case IrOpcode::kCheckedInt32ToTaggedSigned:
1008       result = LowerCheckedInt32ToTaggedSigned(node, frame_state);
1009       break;
1010     case IrOpcode::kCheckedInt64ToInt32:
1011       result = LowerCheckedInt64ToInt32(node, frame_state);
1012       break;
1013     case IrOpcode::kCheckedInt64ToTaggedSigned:
1014       result = LowerCheckedInt64ToTaggedSigned(node, frame_state);
1015       break;
1016     case IrOpcode::kCheckedUint32Bounds:
1017       result = LowerCheckedUint32Bounds(node, frame_state);
1018       break;
1019     case IrOpcode::kCheckedUint32ToInt32:
1020       result = LowerCheckedUint32ToInt32(node, frame_state);
1021       break;
1022     case IrOpcode::kCheckedUint32ToTaggedSigned:
1023       result = LowerCheckedUint32ToTaggedSigned(node, frame_state);
1024       break;
1025     case IrOpcode::kCheckedUint64Bounds:
1026       result = LowerCheckedUint64Bounds(node, frame_state);
1027       break;
1028     case IrOpcode::kCheckedUint64ToInt32:
1029       result = LowerCheckedUint64ToInt32(node, frame_state);
1030       break;
1031     case IrOpcode::kCheckedUint64ToTaggedSigned:
1032       result = LowerCheckedUint64ToTaggedSigned(node, frame_state);
1033       break;
1034     case IrOpcode::kCheckedFloat64ToInt32:
1035       result = LowerCheckedFloat64ToInt32(node, frame_state);
1036       break;
1037     case IrOpcode::kCheckedFloat64ToInt64:
1038       result = LowerCheckedFloat64ToInt64(node, frame_state);
1039       break;
1040     case IrOpcode::kCheckedTaggedSignedToInt32:
1041       if (frame_state == nullptr) {
1042         FATAL("No frame state (zapped by #%d: %s)", frame_state_zapper_->id(),
1043               frame_state_zapper_->op()->mnemonic());
1044       }
1045       result = LowerCheckedTaggedSignedToInt32(node, frame_state);
1046       break;
1047     case IrOpcode::kCheckedTaggedToArrayIndex:
1048       result = LowerCheckedTaggedToArrayIndex(node, frame_state);
1049       break;
1050     case IrOpcode::kCheckedTaggedToInt32:
1051       result = LowerCheckedTaggedToInt32(node, frame_state);
1052       break;
1053     case IrOpcode::kCheckedTaggedToInt64:
1054       result = LowerCheckedTaggedToInt64(node, frame_state);
1055       break;
1056     case IrOpcode::kCheckedTaggedToFloat64:
1057       result = LowerCheckedTaggedToFloat64(node, frame_state);
1058       break;
1059     case IrOpcode::kCheckedTaggedToTaggedSigned:
1060       result = LowerCheckedTaggedToTaggedSigned(node, frame_state);
1061       break;
1062     case IrOpcode::kCheckedTaggedToTaggedPointer:
1063       result = LowerCheckedTaggedToTaggedPointer(node, frame_state);
1064       break;
1065     case IrOpcode::kBigIntAsUintN:
1066       result = LowerBigIntAsUintN(node, frame_state);
1067       break;
1068     case IrOpcode::kChangeUint64ToBigInt:
1069       result = LowerChangeUint64ToBigInt(node);
1070       break;
1071     case IrOpcode::kTruncateBigIntToUint64:
1072       result = LowerTruncateBigIntToUint64(node);
1073       break;
1074     case IrOpcode::kTruncateTaggedToWord32:
1075       result = LowerTruncateTaggedToWord32(node);
1076       break;
1077     case IrOpcode::kCheckedTruncateTaggedToWord32:
1078       result = LowerCheckedTruncateTaggedToWord32(node, frame_state);
1079       break;
1080     case IrOpcode::kNumberToString:
1081       result = LowerNumberToString(node);
1082       break;
1083     case IrOpcode::kObjectIsArrayBufferView:
1084       result = LowerObjectIsArrayBufferView(node);
1085       break;
1086     case IrOpcode::kObjectIsBigInt:
1087       result = LowerObjectIsBigInt(node);
1088       break;
1089     case IrOpcode::kObjectIsCallable:
1090       result = LowerObjectIsCallable(node);
1091       break;
1092     case IrOpcode::kObjectIsConstructor:
1093       result = LowerObjectIsConstructor(node);
1094       break;
1095     case IrOpcode::kObjectIsDetectableCallable:
1096       result = LowerObjectIsDetectableCallable(node);
1097       break;
1098     case IrOpcode::kObjectIsMinusZero:
1099       result = LowerObjectIsMinusZero(node);
1100       break;
1101     case IrOpcode::kNumberIsMinusZero:
1102       result = LowerNumberIsMinusZero(node);
1103       break;
1104     case IrOpcode::kObjectIsNaN:
1105       result = LowerObjectIsNaN(node);
1106       break;
1107     case IrOpcode::kNumberIsNaN:
1108       result = LowerNumberIsNaN(node);
1109       break;
1110     case IrOpcode::kObjectIsNonCallable:
1111       result = LowerObjectIsNonCallable(node);
1112       break;
1113     case IrOpcode::kObjectIsNumber:
1114       result = LowerObjectIsNumber(node);
1115       break;
1116     case IrOpcode::kObjectIsReceiver:
1117       result = LowerObjectIsReceiver(node);
1118       break;
1119     case IrOpcode::kObjectIsSmi:
1120       result = LowerObjectIsSmi(node);
1121       break;
1122     case IrOpcode::kObjectIsString:
1123       result = LowerObjectIsString(node);
1124       break;
1125     case IrOpcode::kObjectIsSymbol:
1126       result = LowerObjectIsSymbol(node);
1127       break;
1128     case IrOpcode::kObjectIsUndetectable:
1129       result = LowerObjectIsUndetectable(node);
1130       break;
1131     case IrOpcode::kArgumentsFrame:
1132       result = LowerArgumentsFrame(node);
1133       break;
1134     case IrOpcode::kArgumentsLength:
1135       result = LowerArgumentsLength(node);
1136       break;
1137     case IrOpcode::kRestLength:
1138       result = LowerRestLength(node);
1139       break;
1140     case IrOpcode::kToBoolean:
1141       result = LowerToBoolean(node);
1142       break;
1143     case IrOpcode::kTypeOf:
1144       result = LowerTypeOf(node);
1145       break;
1146     case IrOpcode::kTierUpCheck:
1147       LowerTierUpCheck(node);
1148       break;
1149     case IrOpcode::kUpdateInterruptBudget:
1150       LowerUpdateInterruptBudget(node);
1151       break;
1152     case IrOpcode::kNewDoubleElements:
1153       result = LowerNewDoubleElements(node);
1154       break;
1155     case IrOpcode::kNewSmiOrObjectElements:
1156       result = LowerNewSmiOrObjectElements(node);
1157       break;
1158     case IrOpcode::kNewArgumentsElements:
1159       result = LowerNewArgumentsElements(node);
1160       break;
1161     case IrOpcode::kNewConsString:
1162       result = LowerNewConsString(node);
1163       break;
1164     case IrOpcode::kSameValue:
1165       result = LowerSameValue(node);
1166       break;
1167     case IrOpcode::kSameValueNumbersOnly:
1168       result = LowerSameValueNumbersOnly(node);
1169       break;
1170     case IrOpcode::kNumberSameValue:
1171       result = LowerNumberSameValue(node);
1172       break;
1173     case IrOpcode::kDeadValue:
1174       result = LowerDeadValue(node);
1175       break;
1176     case IrOpcode::kStringConcat:
1177       result = LowerStringConcat(node);
1178       break;
1179     case IrOpcode::kStringFromSingleCharCode:
1180       result = LowerStringFromSingleCharCode(node);
1181       break;
1182     case IrOpcode::kStringFromSingleCodePoint:
1183       result = LowerStringFromSingleCodePoint(node);
1184       break;
1185     case IrOpcode::kStringIndexOf:
1186       result = LowerStringIndexOf(node);
1187       break;
1188     case IrOpcode::kStringFromCodePointAt:
1189       result = LowerStringFromCodePointAt(node);
1190       break;
1191     case IrOpcode::kStringLength:
1192       result = LowerStringLength(node);
1193       break;
1194     case IrOpcode::kStringToNumber:
1195       result = LowerStringToNumber(node);
1196       break;
1197     case IrOpcode::kStringCharCodeAt:
1198       result = LowerStringCharCodeAt(node);
1199       break;
1200     case IrOpcode::kStringCodePointAt:
1201       result = LowerStringCodePointAt(node);
1202       break;
1203     case IrOpcode::kStringToLowerCaseIntl:
1204       result = LowerStringToLowerCaseIntl(node);
1205       break;
1206     case IrOpcode::kStringToUpperCaseIntl:
1207       result = LowerStringToUpperCaseIntl(node);
1208       break;
1209     case IrOpcode::kStringSubstring:
1210       result = LowerStringSubstring(node);
1211       break;
1212     case IrOpcode::kStringEqual:
1213       result = LowerStringEqual(node);
1214       break;
1215     case IrOpcode::kStringLessThan:
1216       result = LowerStringLessThan(node);
1217       break;
1218     case IrOpcode::kStringLessThanOrEqual:
1219       result = LowerStringLessThanOrEqual(node);
1220       break;
1221     case IrOpcode::kBigIntAdd:
1222       result = LowerBigIntAdd(node, frame_state);
1223       break;
1224     case IrOpcode::kBigIntSubtract:
1225       result = LowerBigIntSubtract(node, frame_state);
1226       break;
1227     case IrOpcode::kBigIntNegate:
1228       result = LowerBigIntNegate(node);
1229       break;
1230     case IrOpcode::kNumberIsFloat64Hole:
1231       result = LowerNumberIsFloat64Hole(node);
1232       break;
1233     case IrOpcode::kNumberIsFinite:
1234       result = LowerNumberIsFinite(node);
1235       break;
1236     case IrOpcode::kObjectIsFiniteNumber:
1237       result = LowerObjectIsFiniteNumber(node);
1238       break;
1239     case IrOpcode::kNumberIsInteger:
1240       result = LowerNumberIsInteger(node);
1241       break;
1242     case IrOpcode::kObjectIsInteger:
1243       result = LowerObjectIsInteger(node);
1244       break;
1245     case IrOpcode::kNumberIsSafeInteger:
1246       result = LowerNumberIsSafeInteger(node);
1247       break;
1248     case IrOpcode::kObjectIsSafeInteger:
1249       result = LowerObjectIsSafeInteger(node);
1250       break;
1251     case IrOpcode::kCheckFloat64Hole:
1252       result = LowerCheckFloat64Hole(node, frame_state);
1253       break;
1254     case IrOpcode::kCheckNotTaggedHole:
1255       result = LowerCheckNotTaggedHole(node, frame_state);
1256       break;
1257     case IrOpcode::kConvertTaggedHoleToUndefined:
1258       result = LowerConvertTaggedHoleToUndefined(node);
1259       break;
1260     case IrOpcode::kCheckEqualsInternalizedString:
1261       LowerCheckEqualsInternalizedString(node, frame_state);
1262       break;
1263     case IrOpcode::kAllocate:
1264       result = LowerAllocate(node);
1265       break;
1266     case IrOpcode::kCheckEqualsSymbol:
1267       LowerCheckEqualsSymbol(node, frame_state);
1268       break;
1269     case IrOpcode::kPlainPrimitiveToNumber:
1270       result = LowerPlainPrimitiveToNumber(node);
1271       break;
1272     case IrOpcode::kPlainPrimitiveToWord32:
1273       result = LowerPlainPrimitiveToWord32(node);
1274       break;
1275     case IrOpcode::kPlainPrimitiveToFloat64:
1276       result = LowerPlainPrimitiveToFloat64(node);
1277       break;
1278     case IrOpcode::kEnsureWritableFastElements:
1279       result = LowerEnsureWritableFastElements(node);
1280       break;
1281     case IrOpcode::kMaybeGrowFastElements:
1282       result = LowerMaybeGrowFastElements(node, frame_state);
1283       break;
1284     case IrOpcode::kTransitionElementsKind:
1285       LowerTransitionElementsKind(node);
1286       break;
1287     case IrOpcode::kLoadMessage:
1288       result = LowerLoadMessage(node);
1289       break;
1290     case IrOpcode::kStoreMessage:
1291       LowerStoreMessage(node);
1292       break;
1293     case IrOpcode::kFastApiCall:
1294       result = LowerFastApiCall(node);
1295       break;
1296     case IrOpcode::kLoadFieldByIndex:
1297       result = LowerLoadFieldByIndex(node);
1298       break;
1299     case IrOpcode::kLoadTypedElement:
1300       result = LowerLoadTypedElement(node);
1301       break;
1302     case IrOpcode::kLoadDataViewElement:
1303       result = LowerLoadDataViewElement(node);
1304       break;
1305     case IrOpcode::kLoadStackArgument:
1306       result = LowerLoadStackArgument(node);
1307       break;
1308     case IrOpcode::kStoreTypedElement:
1309       LowerStoreTypedElement(node);
1310       break;
1311     case IrOpcode::kStoreDataViewElement:
1312       LowerStoreDataViewElement(node);
1313       break;
1314     case IrOpcode::kStoreSignedSmallElement:
1315       LowerStoreSignedSmallElement(node);
1316       break;
1317     case IrOpcode::kFindOrderedHashMapEntry:
1318       result = LowerFindOrderedHashMapEntry(node);
1319       break;
1320     case IrOpcode::kFindOrderedHashMapEntryForInt32Key:
1321       result = LowerFindOrderedHashMapEntryForInt32Key(node);
1322       break;
1323     case IrOpcode::kTransitionAndStoreNumberElement:
1324       LowerTransitionAndStoreNumberElement(node);
1325       break;
1326     case IrOpcode::kTransitionAndStoreNonNumberElement:
1327       LowerTransitionAndStoreNonNumberElement(node);
1328       break;
1329     case IrOpcode::kTransitionAndStoreElement:
1330       LowerTransitionAndStoreElement(node);
1331       break;
1332     case IrOpcode::kRuntimeAbort:
1333       LowerRuntimeAbort(node);
1334       break;
1335     case IrOpcode::kAssertType:
1336       result = LowerAssertType(node);
1337       break;
1338     case IrOpcode::kConvertReceiver:
1339       result = LowerConvertReceiver(node);
1340       break;
1341     case IrOpcode::kFloat64RoundUp:
1342       if (!LowerFloat64RoundUp(node).To(&result)) {
1343         return false;
1344       }
1345       break;
1346     case IrOpcode::kFloat64RoundDown:
1347       if (!LowerFloat64RoundDown(node).To(&result)) {
1348         return false;
1349       }
1350       break;
1351     case IrOpcode::kFloat64RoundTruncate:
1352       if (!LowerFloat64RoundTruncate(node).To(&result)) {
1353         return false;
1354       }
1355       break;
1356     case IrOpcode::kFloat64RoundTiesEven:
1357       if (!LowerFloat64RoundTiesEven(node).To(&result)) {
1358         return false;
1359       }
1360       break;
1361     case IrOpcode::kDateNow:
1362       result = LowerDateNow(node);
1363       break;
1364     case IrOpcode::kFoldConstant:
1365       result = LowerFoldConstant(node);
1366       break;
1367     default:
1368       return false;
1369   }
1370 
1371   if ((result ? 1 : 0) != node->op()->ValueOutputCount()) {
1372     FATAL(
1373         "Effect control linearizer lowering of '%s':"
1374         " value output count does not agree.",
1375         node->op()->mnemonic());
1376   }
1377 
1378   NodeProperties::ReplaceUses(node, result, gasm()->effect(),
1379                               gasm()->control());
1380   return true;
1381 }
1382 
1383 #define __ gasm()->
1384 
LowerChangeFloat64ToTagged(Node * node)1385 Node* EffectControlLinearizer::LowerChangeFloat64ToTagged(Node* node) {
1386   CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
1387   Node* value = node->InputAt(0);
1388 
1389   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1390   auto if_heapnumber = __ MakeDeferredLabel();
1391   auto if_int32 = __ MakeLabel();
1392 
1393   Node* value32 = __ RoundFloat64ToInt32(value);
1394   __ GotoIf(__ Float64Equal(value, __ ChangeInt32ToFloat64(value32)),
1395             &if_int32);
1396   __ Goto(&if_heapnumber);
1397 
1398   __ Bind(&if_int32);
1399   {
1400     if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1401       Node* zero = __ Int32Constant(0);
1402       auto if_zero = __ MakeDeferredLabel();
1403       auto if_smi = __ MakeLabel();
1404 
1405       __ GotoIf(__ Word32Equal(value32, zero), &if_zero);
1406       __ Goto(&if_smi);
1407 
1408       __ Bind(&if_zero);
1409       {
1410         // In case of 0, we need to check the high bits for the IEEE -0 pattern.
1411         __ GotoIf(__ Int32LessThan(__ Float64ExtractHighWord32(value), zero),
1412                   &if_heapnumber);
1413         __ Goto(&if_smi);
1414       }
1415 
1416       __ Bind(&if_smi);
1417     }
1418 
1419     if (SmiValuesAre32Bits()) {
1420       Node* value_smi = ChangeInt32ToSmi(value32);
1421       __ Goto(&done, value_smi);
1422     } else {
1423       SmiTagOrOverflow(value32, &if_heapnumber, &done);
1424     }
1425   }
1426 
1427   __ Bind(&if_heapnumber);
1428   {
1429     Node* value_number = AllocateHeapNumberWithValue(value);
1430     __ Goto(&done, value_number);
1431   }
1432 
1433   __ Bind(&done);
1434   return done.PhiAt(0);
1435 }
1436 
LowerChangeFloat64ToTaggedPointer(Node * node)1437 Node* EffectControlLinearizer::LowerChangeFloat64ToTaggedPointer(Node* node) {
1438   Node* value = node->InputAt(0);
1439   return AllocateHeapNumberWithValue(value);
1440 }
1441 
LowerChangeBitToTagged(Node * node)1442 Node* EffectControlLinearizer::LowerChangeBitToTagged(Node* node) {
1443   Node* value = node->InputAt(0);
1444 
1445   auto if_true = __ MakeLabel();
1446   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1447 
1448   __ GotoIf(value, &if_true);
1449   __ Goto(&done, __ FalseConstant());
1450 
1451   __ Bind(&if_true);
1452   __ Goto(&done, __ TrueConstant());
1453 
1454   __ Bind(&done);
1455   return done.PhiAt(0);
1456 }
1457 
LowerChangeInt31ToTaggedSigned(Node * node)1458 Node* EffectControlLinearizer::LowerChangeInt31ToTaggedSigned(Node* node) {
1459   Node* value = node->InputAt(0);
1460   return ChangeInt32ToSmi(value);
1461 }
1462 
LowerChangeInt32ToTagged(Node * node)1463 Node* EffectControlLinearizer::LowerChangeInt32ToTagged(Node* node) {
1464   Node* value = node->InputAt(0);
1465 
1466   if (SmiValuesAre32Bits()) {
1467     return ChangeInt32ToSmi(value);
1468   }
1469   DCHECK(SmiValuesAre31Bits());
1470 
1471   auto if_overflow = __ MakeDeferredLabel();
1472   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1473 
1474   SmiTagOrOverflow(value, &if_overflow, &done);
1475 
1476   __ Bind(&if_overflow);
1477   Node* number = AllocateHeapNumberWithValue(__ ChangeInt32ToFloat64(value));
1478   __ Goto(&done, number);
1479 
1480   __ Bind(&done);
1481   return done.PhiAt(0);
1482 }
1483 
LowerChangeInt64ToTagged(Node * node)1484 Node* EffectControlLinearizer::LowerChangeInt64ToTagged(Node* node) {
1485   Node* value = node->InputAt(0);
1486 
1487   auto if_not_in_smi_range = __ MakeDeferredLabel();
1488   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1489 
1490   Node* value32 = __ TruncateInt64ToInt32(value);
1491   __ GotoIfNot(__ Word64Equal(__ ChangeInt32ToInt64(value32), value),
1492                &if_not_in_smi_range);
1493 
1494   if (SmiValuesAre32Bits()) {
1495     Node* value_smi = ChangeInt64ToSmi(value);
1496     __ Goto(&done, value_smi);
1497   } else {
1498     SmiTagOrOverflow(value32, &if_not_in_smi_range, &done);
1499   }
1500 
1501   __ Bind(&if_not_in_smi_range);
1502   Node* number = AllocateHeapNumberWithValue(__ ChangeInt64ToFloat64(value));
1503   __ Goto(&done, number);
1504 
1505   __ Bind(&done);
1506   return done.PhiAt(0);
1507 }
1508 
LowerChangeUint32ToTagged(Node * node)1509 Node* EffectControlLinearizer::LowerChangeUint32ToTagged(Node* node) {
1510   Node* value = node->InputAt(0);
1511 
1512   auto if_not_in_smi_range = __ MakeDeferredLabel();
1513   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1514 
1515   Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
1516   __ GotoIfNot(check, &if_not_in_smi_range);
1517   __ Goto(&done, ChangeUint32ToSmi(value));
1518 
1519   __ Bind(&if_not_in_smi_range);
1520   Node* number = AllocateHeapNumberWithValue(__ ChangeUint32ToFloat64(value));
1521 
1522   __ Goto(&done, number);
1523   __ Bind(&done);
1524 
1525   return done.PhiAt(0);
1526 }
1527 
LowerChangeUint64ToTagged(Node * node)1528 Node* EffectControlLinearizer::LowerChangeUint64ToTagged(Node* node) {
1529   Node* value = node->InputAt(0);
1530 
1531   auto if_not_in_smi_range = __ MakeDeferredLabel();
1532   auto done = __ MakeLabel(MachineRepresentation::kTagged);
1533 
1534   Node* check =
1535       __ Uint64LessThanOrEqual(value, __ Int64Constant(Smi::kMaxValue));
1536   __ GotoIfNot(check, &if_not_in_smi_range);
1537   __ Goto(&done, ChangeInt64ToSmi(value));
1538 
1539   __ Bind(&if_not_in_smi_range);
1540   Node* number = AllocateHeapNumberWithValue(__ ChangeInt64ToFloat64(value));
1541 
1542   __ Goto(&done, number);
1543   __ Bind(&done);
1544 
1545   return done.PhiAt(0);
1546 }
1547 
LowerChangeTaggedSignedToInt32(Node * node)1548 Node* EffectControlLinearizer::LowerChangeTaggedSignedToInt32(Node* node) {
1549   Node* value = node->InputAt(0);
1550   return ChangeSmiToInt32(value);
1551 }
1552 
LowerChangeTaggedSignedToInt64(Node * node)1553 Node* EffectControlLinearizer::LowerChangeTaggedSignedToInt64(Node* node) {
1554   Node* value = node->InputAt(0);
1555   return ChangeSmiToInt64(value);
1556 }
1557 
LowerChangeTaggedToBit(Node * node)1558 Node* EffectControlLinearizer::LowerChangeTaggedToBit(Node* node) {
1559   Node* value = node->InputAt(0);
1560   return __ TaggedEqual(value, __ TrueConstant());
1561 }
1562 
TruncateTaggedPointerToBit(Node * node,GraphAssemblerLabel<1> * done)1563 void EffectControlLinearizer::TruncateTaggedPointerToBit(
1564     Node* node, GraphAssemblerLabel<1>* done) {
1565   Node* value = node->InputAt(0);
1566 
1567   auto if_heapnumber = __ MakeDeferredLabel();
1568   auto if_bigint = __ MakeDeferredLabel();
1569 
1570   Node* zero = __ Int32Constant(0);
1571   Node* fzero = __ Float64Constant(0.0);
1572 
1573   // Check if {value} is false.
1574   __ GotoIf(__ TaggedEqual(value, __ FalseConstant()), done, zero);
1575 
1576   // Check if {value} is the empty string.
1577   __ GotoIf(__ TaggedEqual(value, __ EmptyStringConstant()), done, zero);
1578 
1579   // Load the map of {value}.
1580   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1581 
1582   // Check if the {value} is undetectable and immediately return false.
1583   // This includes undefined and null.
1584   Node* value_map_bitfield =
1585       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
1586   __ GotoIfNot(
1587       __ Word32Equal(
1588           __ Word32And(value_map_bitfield,
1589                        __ Int32Constant(Map::Bits1::IsUndetectableBit::kMask)),
1590           zero),
1591       done, zero);
1592 
1593   // Check if {value} is a HeapNumber.
1594   __ GotoIf(__ TaggedEqual(value_map, __ HeapNumberMapConstant()),
1595             &if_heapnumber);
1596 
1597   // Check if {value} is a BigInt.
1598   __ GotoIf(__ TaggedEqual(value_map, __ BigIntMapConstant()), &if_bigint);
1599 
1600   // All other values that reach here are true.
1601   __ Goto(done, __ Int32Constant(1));
1602 
1603   __ Bind(&if_heapnumber);
1604   {
1605     // For HeapNumber {value}, just check that its value is not 0.0, -0.0 or
1606     // NaN.
1607     Node* value_value =
1608         __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1609     __ Goto(done, __ Float64LessThan(fzero, __ Float64Abs(value_value)));
1610   }
1611 
1612   __ Bind(&if_bigint);
1613   {
1614     Node* bitfield = __ LoadField(AccessBuilder::ForBigIntBitfield(), value);
1615     Node* length_is_zero = __ Word32Equal(
1616         __ Word32And(bitfield, __ Int32Constant(BigInt::LengthBits::kMask)),
1617         __ Int32Constant(0));
1618     __ Goto(done, __ Word32Equal(length_is_zero, zero));
1619   }
1620 }
1621 
LowerTruncateTaggedToBit(Node * node)1622 Node* EffectControlLinearizer::LowerTruncateTaggedToBit(Node* node) {
1623   auto done = __ MakeLabel(MachineRepresentation::kBit);
1624   auto if_smi = __ MakeDeferredLabel();
1625 
1626   Node* value = node->InputAt(0);
1627   __ GotoIf(ObjectIsSmi(value), &if_smi);
1628 
1629   TruncateTaggedPointerToBit(node, &done);
1630 
1631   __ Bind(&if_smi);
1632   {
1633     // If {value} is a Smi, then we only need to check that it's not zero.
1634     __ Goto(&done, __ Word32Equal(__ TaggedEqual(value, __ SmiConstant(0)),
1635                                   __ Int32Constant(0)));
1636   }
1637 
1638   __ Bind(&done);
1639   return done.PhiAt(0);
1640 }
1641 
LowerTruncateTaggedPointerToBit(Node * node)1642 Node* EffectControlLinearizer::LowerTruncateTaggedPointerToBit(Node* node) {
1643   auto done = __ MakeLabel(MachineRepresentation::kBit);
1644 
1645   TruncateTaggedPointerToBit(node, &done);
1646 
1647   __ Bind(&done);
1648   return done.PhiAt(0);
1649 }
1650 
LowerChangeTaggedToInt32(Node * node)1651 Node* EffectControlLinearizer::LowerChangeTaggedToInt32(Node* node) {
1652   Node* value = node->InputAt(0);
1653 
1654   auto if_not_smi = __ MakeDeferredLabel();
1655   auto done = __ MakeLabel(MachineRepresentation::kWord32);
1656 
1657   Node* check = ObjectIsSmi(value);
1658   __ GotoIfNot(check, &if_not_smi);
1659   __ Goto(&done, ChangeSmiToInt32(value));
1660 
1661   __ Bind(&if_not_smi);
1662   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
1663                                     Oddball::kToNumberRawOffset);
1664   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1665   vfalse = __ ChangeFloat64ToInt32(vfalse);
1666   __ Goto(&done, vfalse);
1667 
1668   __ Bind(&done);
1669   return done.PhiAt(0);
1670 }
1671 
LowerChangeTaggedToUint32(Node * node)1672 Node* EffectControlLinearizer::LowerChangeTaggedToUint32(Node* node) {
1673   Node* value = node->InputAt(0);
1674 
1675   auto if_not_smi = __ MakeDeferredLabel();
1676   auto done = __ MakeLabel(MachineRepresentation::kWord32);
1677 
1678   Node* check = ObjectIsSmi(value);
1679   __ GotoIfNot(check, &if_not_smi);
1680   __ Goto(&done, ChangeSmiToInt32(value));
1681 
1682   __ Bind(&if_not_smi);
1683   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
1684                                     Oddball::kToNumberRawOffset);
1685   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1686   vfalse = __ ChangeFloat64ToUint32(vfalse);
1687   __ Goto(&done, vfalse);
1688 
1689   __ Bind(&done);
1690   return done.PhiAt(0);
1691 }
1692 
LowerChangeTaggedToInt64(Node * node)1693 Node* EffectControlLinearizer::LowerChangeTaggedToInt64(Node* node) {
1694   Node* value = node->InputAt(0);
1695 
1696   auto if_not_smi = __ MakeDeferredLabel();
1697   auto done = __ MakeLabel(MachineRepresentation::kWord64);
1698 
1699   Node* check = ObjectIsSmi(value);
1700   __ GotoIfNot(check, &if_not_smi);
1701   __ Goto(&done, ChangeSmiToInt64(value));
1702 
1703   __ Bind(&if_not_smi);
1704   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
1705                                     Oddball::kToNumberRawOffset);
1706   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1707   vfalse = __ ChangeFloat64ToInt64(vfalse);
1708   __ Goto(&done, vfalse);
1709 
1710   __ Bind(&done);
1711   return done.PhiAt(0);
1712 }
1713 
LowerChangeTaggedToFloat64(Node * node)1714 Node* EffectControlLinearizer::LowerChangeTaggedToFloat64(Node* node) {
1715   return LowerTruncateTaggedToFloat64(node);
1716 }
1717 
LowerChangeTaggedToTaggedSigned(Node * node)1718 Node* EffectControlLinearizer::LowerChangeTaggedToTaggedSigned(Node* node) {
1719   Node* value = node->InputAt(0);
1720 
1721   auto if_not_smi = __ MakeDeferredLabel();
1722   auto done = __ MakeLabel(MachineRepresentation::kWord32);
1723 
1724   Node* check = ObjectIsSmi(value);
1725   __ GotoIfNot(check, &if_not_smi);
1726   __ Goto(&done, value);
1727 
1728   __ Bind(&if_not_smi);
1729   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
1730                                     Oddball::kToNumberRawOffset);
1731   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1732   vfalse = __ ChangeFloat64ToInt32(vfalse);
1733   vfalse = ChangeInt32ToSmi(vfalse);
1734   __ Goto(&done, vfalse);
1735 
1736   __ Bind(&done);
1737   return done.PhiAt(0);
1738 }
1739 
LowerTruncateTaggedToFloat64(Node * node)1740 Node* EffectControlLinearizer::LowerTruncateTaggedToFloat64(Node* node) {
1741   Node* value = node->InputAt(0);
1742 
1743   auto if_not_smi = __ MakeDeferredLabel();
1744   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
1745 
1746   Node* check = ObjectIsSmi(value);
1747   __ GotoIfNot(check, &if_not_smi);
1748   Node* vtrue = ChangeSmiToInt32(value);
1749   vtrue = __ ChangeInt32ToFloat64(vtrue);
1750   __ Goto(&done, vtrue);
1751 
1752   __ Bind(&if_not_smi);
1753   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
1754                                     Oddball::kToNumberRawOffset);
1755   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1756   __ Goto(&done, vfalse);
1757 
1758   __ Bind(&done);
1759   return done.PhiAt(0);
1760 }
1761 
LowerPoisonIndex(Node * node)1762 Node* EffectControlLinearizer::LowerPoisonIndex(Node* node) {
1763   Node* index = node->InputAt(0);
1764   if (mask_array_index_ == MaskArrayIndexEnable::kMaskArrayIndex) {
1765     index = __ Word32PoisonOnSpeculation(index);
1766   }
1767   return index;
1768 }
1769 
LowerCheckClosure(Node * node,Node * frame_state)1770 Node* EffectControlLinearizer::LowerCheckClosure(Node* node,
1771                                                  Node* frame_state) {
1772   Handle<FeedbackCell> feedback_cell = FeedbackCellOf(node->op());
1773   Node* value = node->InputAt(0);
1774 
1775   // Check that {value} is actually a JSFunction.
1776   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1777   Node* value_instance_type =
1778       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1779   Node* check_instance_type =
1780       __ Word32Equal(value_instance_type, __ Int32Constant(JS_FUNCTION_TYPE));
1781   __ DeoptimizeIfNot(DeoptimizeReason::kWrongCallTarget, FeedbackSource(),
1782                      check_instance_type, frame_state);
1783 
1784   // Check that the {value}s feedback vector cell matches the one
1785   // we recorded before.
1786   Node* value_cell =
1787       __ LoadField(AccessBuilder::ForJSFunctionFeedbackCell(), value);
1788   Node* check_cell = __ WordEqual(value_cell, __ HeapConstant(feedback_cell));
1789   __ DeoptimizeIfNot(DeoptimizeReason::kWrongFeedbackCell, FeedbackSource(),
1790                      check_cell, frame_state);
1791   return value;
1792 }
1793 
MigrateInstanceOrDeopt(Node * value,Node * value_map,Node * frame_state,FeedbackSource const & feedback_source,DeoptimizeReason reason)1794 void EffectControlLinearizer::MigrateInstanceOrDeopt(
1795     Node* value, Node* value_map, Node* frame_state,
1796     FeedbackSource const& feedback_source, DeoptimizeReason reason) {
1797   // If map is not deprecated the migration attempt does not make sense.
1798   Node* bitfield3 = __ LoadField(AccessBuilder::ForMapBitField3(), value_map);
1799   Node* is_not_deprecated = __ Word32Equal(
1800       __ Word32And(bitfield3,
1801                    __ Int32Constant(Map::Bits3::IsDeprecatedBit::kMask)),
1802       __ Int32Constant(0));
1803   __ DeoptimizeIf(reason, feedback_source, is_not_deprecated, frame_state,
1804                   IsSafetyCheck::kCriticalSafetyCheck);
1805   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
1806   Runtime::FunctionId id = Runtime::kTryMigrateInstance;
1807   auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
1808       graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
1809   Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1), value,
1810                          __ ExternalConstant(ExternalReference::Create(id)),
1811                          __ Int32Constant(1), __ NoContextConstant());
1812   Node* check = ObjectIsSmi(result);
1813   __ DeoptimizeIf(DeoptimizeReason::kInstanceMigrationFailed, feedback_source,
1814                   check, frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1815 }
1816 
LowerCheckMaps(Node * node,Node * frame_state)1817 void EffectControlLinearizer::LowerCheckMaps(Node* node, Node* frame_state) {
1818   CheckMapsParameters const& p = CheckMapsParametersOf(node->op());
1819   Node* value = node->InputAt(0);
1820 
1821   ZoneHandleSet<Map> const& maps = p.maps();
1822   size_t const map_count = maps.size();
1823 
1824   if (p.flags() & CheckMapsFlag::kTryMigrateInstance) {
1825     auto done = __ MakeLabel();
1826     auto migrate = __ MakeDeferredLabel();
1827 
1828     // Load the current map of the {value}.
1829     Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1830 
1831     // Perform the map checks.
1832     for (size_t i = 0; i < map_count; ++i) {
1833       Node* map = __ HeapConstant(maps[i]);
1834       Node* check = __ TaggedEqual(value_map, map);
1835       if (i == map_count - 1) {
1836         __ BranchWithCriticalSafetyCheck(check, &done, &migrate);
1837       } else {
1838         auto next_map = __ MakeLabel();
1839         __ BranchWithCriticalSafetyCheck(check, &done, &next_map);
1840         __ Bind(&next_map);
1841       }
1842     }
1843 
1844     // Perform the (deferred) instance migration.
1845     __ Bind(&migrate);
1846     MigrateInstanceOrDeopt(value, value_map, frame_state, p.feedback(),
1847                            DeoptimizeReason::kWrongMap);
1848 
1849     // Reload the current map of the {value}.
1850     value_map = __ LoadField(AccessBuilder::ForMap(), value);
1851 
1852     // Perform the map checks again.
1853     for (size_t i = 0; i < map_count; ++i) {
1854       Node* map = __ HeapConstant(maps[i]);
1855       Node* check = __ TaggedEqual(value_map, map);
1856       if (i == map_count - 1) {
1857         __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1858                            frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1859       } else {
1860         auto next_map = __ MakeLabel();
1861         __ BranchWithCriticalSafetyCheck(check, &done, &next_map);
1862         __ Bind(&next_map);
1863       }
1864     }
1865 
1866     __ Goto(&done);
1867     __ Bind(&done);
1868   } else {
1869     auto done = __ MakeLabel();
1870 
1871     // Load the current map of the {value}.
1872     Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1873 
1874     for (size_t i = 0; i < map_count; ++i) {
1875       Node* map = __ HeapConstant(maps[i]);
1876       Node* check = __ TaggedEqual(value_map, map);
1877 
1878       if (i == map_count - 1) {
1879         __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1880                            frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1881       } else {
1882         auto next_map = __ MakeLabel();
1883         __ BranchWithCriticalSafetyCheck(check, &done, &next_map);
1884         __ Bind(&next_map);
1885       }
1886     }
1887     __ Goto(&done);
1888     __ Bind(&done);
1889   }
1890 }
1891 
BuildCallDynamicMapChecksBuiltin(Node * actual_value,Node * actual_handler,int feedback_slot_index,GraphAssemblerLabel<0> * done,Node * frame_state)1892 void EffectControlLinearizer::BuildCallDynamicMapChecksBuiltin(
1893     Node* actual_value, Node* actual_handler, int feedback_slot_index,
1894     GraphAssemblerLabel<0>* done, Node* frame_state) {
1895   Node* slot_index = __ IntPtrConstant(feedback_slot_index);
1896   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
1897   auto builtin = Builtins::kDynamicMapChecks;
1898   Node* result = CallBuiltin(builtin, properties, slot_index, actual_value,
1899                              actual_handler);
1900   __ GotoIf(__ WordEqual(result, __ IntPtrConstant(static_cast<int>(
1901                                      DynamicMapChecksStatus::kSuccess))),
1902             done);
1903   __ DeoptimizeIf(DeoptimizeKind::kBailout, DeoptimizeReason::kMissingMap,
1904                   FeedbackSource(),
1905                   __ WordEqual(result, __ IntPtrConstant(static_cast<int>(
1906                                            DynamicMapChecksStatus::kBailout))),
1907                   frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1908   __ DeoptimizeIf(DeoptimizeReason::kWrongHandler, FeedbackSource(),
1909                   __ WordEqual(result, __ IntPtrConstant(static_cast<int>(
1910                                            DynamicMapChecksStatus::kDeopt))),
1911                   frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1912   __ Unreachable(done);
1913 }
1914 
LowerDynamicCheckMaps(Node * node,Node * frame_state)1915 void EffectControlLinearizer::LowerDynamicCheckMaps(Node* node,
1916                                                     Node* frame_state) {
1917   DynamicCheckMapsParameters const& p =
1918       DynamicCheckMapsParametersOf(node->op());
1919   Node* actual_value = node->InputAt(0);
1920 
1921   FeedbackSource const& feedback = p.feedback();
1922   Node* actual_value_map = __ LoadField(AccessBuilder::ForMap(), actual_value);
1923   Node* actual_handler =
1924       p.handler()->IsSmi()
1925           ? __ SmiConstant(Smi::ToInt(*p.handler()))
1926           : __ HeapConstant(Handle<HeapObject>::cast(p.handler()));
1927 
1928   auto done = __ MakeLabel();
1929   auto call_builtin = __ MakeDeferredLabel();
1930 
1931   ZoneHandleSet<Map> maps = p.maps();
1932   size_t const map_count = maps.size();
1933   for (size_t i = 0; i < map_count; ++i) {
1934     Node* map = __ HeapConstant(maps[i]);
1935     Node* check = __ TaggedEqual(actual_value_map, map);
1936     if (i == map_count - 1) {
1937       __ BranchWithCriticalSafetyCheck(check, &done, &call_builtin);
1938     } else {
1939       auto next_map = __ MakeLabel();
1940       __ BranchWithCriticalSafetyCheck(check, &done, &next_map);
1941       __ Bind(&next_map);
1942     }
1943   }
1944 
1945   __ Bind(&call_builtin);
1946   {
1947     BuildCallDynamicMapChecksBuiltin(actual_value, actual_handler,
1948                                      feedback.index(), &done, frame_state);
1949   }
1950 
1951   __ Bind(&done);
1952 }
1953 
LowerCompareMaps(Node * node)1954 Node* EffectControlLinearizer::LowerCompareMaps(Node* node) {
1955   ZoneHandleSet<Map> const& maps = CompareMapsParametersOf(node->op());
1956   size_t const map_count = maps.size();
1957   Node* value = node->InputAt(0);
1958 
1959   auto done = __ MakeLabel(MachineRepresentation::kBit);
1960 
1961   // Load the current map of the {value}.
1962   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1963 
1964   for (size_t i = 0; i < map_count; ++i) {
1965     Node* map = __ HeapConstant(maps[i]);
1966     Node* check = __ TaggedEqual(value_map, map);
1967 
1968     auto next_map = __ MakeLabel();
1969     auto passed = __ MakeLabel();
1970     __ BranchWithCriticalSafetyCheck(check, &passed, &next_map);
1971 
1972     __ Bind(&passed);
1973     __ Goto(&done, __ Int32Constant(1));
1974 
1975     __ Bind(&next_map);
1976   }
1977   __ Goto(&done, __ Int32Constant(0));
1978 
1979   __ Bind(&done);
1980   return done.PhiAt(0);
1981 }
1982 
LowerCheckNumber(Node * node,Node * frame_state)1983 Node* EffectControlLinearizer::LowerCheckNumber(Node* node, Node* frame_state) {
1984   Node* value = node->InputAt(0);
1985   const CheckParameters& params = CheckParametersOf(node->op());
1986 
1987   auto if_not_smi = __ MakeDeferredLabel();
1988   auto done = __ MakeLabel();
1989 
1990   Node* check0 = ObjectIsSmi(value);
1991   __ GotoIfNot(check0, &if_not_smi);
1992   __ Goto(&done);
1993 
1994   __ Bind(&if_not_smi);
1995   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1996   Node* check1 = __ TaggedEqual(value_map, __ HeapNumberMapConstant());
1997   __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
1998                      check1, frame_state);
1999   __ Goto(&done);
2000 
2001   __ Bind(&done);
2002   return value;
2003 }
2004 
LowerCheckReceiver(Node * node,Node * frame_state)2005 Node* EffectControlLinearizer::LowerCheckReceiver(Node* node,
2006                                                   Node* frame_state) {
2007   Node* value = node->InputAt(0);
2008 
2009   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2010   Node* value_instance_type =
2011       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2012 
2013   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2014   Node* check = __ Uint32LessThanOrEqual(
2015       __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
2016   __ DeoptimizeIfNot(DeoptimizeReason::kNotAJavaScriptObject, FeedbackSource(),
2017                      check, frame_state);
2018   return value;
2019 }
2020 
LowerCheckReceiverOrNullOrUndefined(Node * node,Node * frame_state)2021 Node* EffectControlLinearizer::LowerCheckReceiverOrNullOrUndefined(
2022     Node* node, Node* frame_state) {
2023   Node* value = node->InputAt(0);
2024 
2025   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2026   Node* value_instance_type =
2027       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2028 
2029   // Rule out all primitives except oddballs (true, false, undefined, null).
2030   STATIC_ASSERT(LAST_PRIMITIVE_HEAP_OBJECT_TYPE == ODDBALL_TYPE);
2031   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2032   Node* check0 = __ Uint32LessThanOrEqual(__ Uint32Constant(ODDBALL_TYPE),
2033                                           value_instance_type);
2034   __ DeoptimizeIfNot(DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined,
2035                      FeedbackSource(), check0, frame_state);
2036 
2037   // Rule out booleans.
2038   Node* check1 = __ TaggedEqual(value_map, __ BooleanMapConstant());
2039   __ DeoptimizeIf(DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined,
2040                   FeedbackSource(), check1, frame_state);
2041   return value;
2042 }
2043 
LowerCheckSymbol(Node * node,Node * frame_state)2044 Node* EffectControlLinearizer::LowerCheckSymbol(Node* node, Node* frame_state) {
2045   Node* value = node->InputAt(0);
2046 
2047   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2048 
2049   Node* check =
2050       __ TaggedEqual(value_map, __ HeapConstant(factory()->symbol_map()));
2051   __ DeoptimizeIfNot(DeoptimizeReason::kNotASymbol, FeedbackSource(), check,
2052                      frame_state);
2053   return value;
2054 }
2055 
LowerCheckString(Node * node,Node * frame_state)2056 Node* EffectControlLinearizer::LowerCheckString(Node* node, Node* frame_state) {
2057   Node* value = node->InputAt(0);
2058   const CheckParameters& params = CheckParametersOf(node->op());
2059 
2060   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2061   Node* value_instance_type =
2062       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2063 
2064   Node* check = __ Uint32LessThan(value_instance_type,
2065                                   __ Uint32Constant(FIRST_NONSTRING_TYPE));
2066   __ DeoptimizeIfNot(DeoptimizeReason::kNotAString, params.feedback(), check,
2067                      frame_state);
2068   return value;
2069 }
2070 
LowerCheckInternalizedString(Node * node,Node * frame_state)2071 Node* EffectControlLinearizer::LowerCheckInternalizedString(Node* node,
2072                                                             Node* frame_state) {
2073   Node* value = node->InputAt(0);
2074 
2075   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2076   Node* value_instance_type =
2077       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2078 
2079   Node* check = __ Word32Equal(
2080       __ Word32And(value_instance_type,
2081                    __ Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
2082       __ Int32Constant(kInternalizedTag));
2083   __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, FeedbackSource(),
2084                      check, frame_state);
2085 
2086   return value;
2087 }
2088 
LowerCheckIf(Node * node,Node * frame_state)2089 void EffectControlLinearizer::LowerCheckIf(Node* node, Node* frame_state) {
2090   Node* value = node->InputAt(0);
2091   const CheckIfParameters& p = CheckIfParametersOf(node->op());
2092   __ DeoptimizeIfNot(p.reason(), p.feedback(), value, frame_state);
2093 }
2094 
LowerStringConcat(Node * node)2095 Node* EffectControlLinearizer::LowerStringConcat(Node* node) {
2096   Node* lhs = node->InputAt(1);
2097   Node* rhs = node->InputAt(2);
2098 
2099   Callable const callable =
2100       CodeFactory::StringAdd(isolate(), STRING_ADD_CHECK_NONE);
2101   auto call_descriptor = Linkage::GetStubCallDescriptor(
2102       graph()->zone(), callable.descriptor(),
2103       callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
2104       Operator::kNoDeopt | Operator::kNoWrite | Operator::kNoThrow);
2105 
2106   Node* value = __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs,
2107                         rhs, __ NoContextConstant());
2108 
2109   return value;
2110 }
2111 
LowerCheckedInt32Add(Node * node,Node * frame_state)2112 Node* EffectControlLinearizer::LowerCheckedInt32Add(Node* node,
2113                                                     Node* frame_state) {
2114   Node* lhs = node->InputAt(0);
2115   Node* rhs = node->InputAt(1);
2116 
2117   Node* value = __ Int32AddWithOverflow(lhs, rhs);
2118   Node* check = __ Projection(1, value);
2119   __ DeoptimizeIf(DeoptimizeReason::kOverflow, FeedbackSource(), check,
2120                   frame_state);
2121   return __ Projection(0, value);
2122 }
2123 
LowerCheckedInt32Sub(Node * node,Node * frame_state)2124 Node* EffectControlLinearizer::LowerCheckedInt32Sub(Node* node,
2125                                                     Node* frame_state) {
2126   Node* lhs = node->InputAt(0);
2127   Node* rhs = node->InputAt(1);
2128 
2129   Node* value = __ Int32SubWithOverflow(lhs, rhs);
2130   Node* check = __ Projection(1, value);
2131   __ DeoptimizeIf(DeoptimizeReason::kOverflow, FeedbackSource(), check,
2132                   frame_state);
2133   return __ Projection(0, value);
2134 }
2135 
LowerCheckedInt32Div(Node * node,Node * frame_state)2136 Node* EffectControlLinearizer::LowerCheckedInt32Div(Node* node,
2137                                                     Node* frame_state) {
2138   Node* lhs = node->InputAt(0);
2139   Node* rhs = node->InputAt(1);
2140   Node* zero = __ Int32Constant(0);
2141 
2142   // Check if the {rhs} is a known power of two.
2143   Int32Matcher m(rhs);
2144   if (m.IsPowerOf2()) {
2145     // Since we know that {rhs} is a power of two, we can perform a fast
2146     // check to see if the relevant least significant bits of the {lhs}
2147     // are all zero, and if so we know that we can perform a division
2148     // safely (and fast by doing an arithmetic - aka sign preserving -
2149     // right shift on {lhs}).
2150     int32_t divisor = m.ResolvedValue();
2151     Node* mask = __ Int32Constant(divisor - 1);
2152     Node* shift = __ Int32Constant(base::bits::WhichPowerOfTwo(divisor));
2153     Node* check = __ Word32Equal(__ Word32And(lhs, mask), zero);
2154     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, FeedbackSource(),
2155                        check, frame_state);
2156     return __ Word32Sar(lhs, shift);
2157   } else {
2158     auto if_rhs_positive = __ MakeLabel();
2159     auto if_rhs_negative = __ MakeDeferredLabel();
2160     auto done = __ MakeLabel(MachineRepresentation::kWord32);
2161 
2162     // Check if {rhs} is positive (and not zero).
2163     Node* check_rhs_positive = __ Int32LessThan(zero, rhs);
2164     __ Branch(check_rhs_positive, &if_rhs_positive, &if_rhs_negative);
2165 
2166     __ Bind(&if_rhs_positive);
2167     {
2168       // Fast case, no additional checking required.
2169       __ Goto(&done, __ Int32Div(lhs, rhs));
2170     }
2171 
2172     __ Bind(&if_rhs_negative);
2173     {
2174       auto if_lhs_minint = __ MakeDeferredLabel();
2175       auto if_lhs_notminint = __ MakeLabel();
2176 
2177       // Check if {rhs} is zero.
2178       Node* check_rhs_zero = __ Word32Equal(rhs, zero);
2179       __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, FeedbackSource(),
2180                       check_rhs_zero, frame_state);
2181 
2182       // Check if {lhs} is zero, as that would produce minus zero.
2183       Node* check_lhs_zero = __ Word32Equal(lhs, zero);
2184       __ DeoptimizeIf(DeoptimizeReason::kMinusZero, FeedbackSource(),
2185                       check_lhs_zero, frame_state);
2186 
2187       // Check if {lhs} is kMinInt and {rhs} is -1, in which case we'd have
2188       // to return -kMinInt, which is not representable as Word32.
2189       Node* check_lhs_minint = __ Word32Equal(lhs, __ Int32Constant(kMinInt));
2190       __ Branch(check_lhs_minint, &if_lhs_minint, &if_lhs_notminint);
2191 
2192       __ Bind(&if_lhs_minint);
2193       {
2194         // Check that {rhs} is not -1, otherwise result would be -kMinInt.
2195         Node* check_rhs_minusone = __ Word32Equal(rhs, __ Int32Constant(-1));
2196         __ DeoptimizeIf(DeoptimizeReason::kOverflow, FeedbackSource(),
2197                         check_rhs_minusone, frame_state);
2198 
2199         // Perform the actual integer division.
2200         __ Goto(&done, __ Int32Div(lhs, rhs));
2201       }
2202 
2203       __ Bind(&if_lhs_notminint);
2204       {
2205         // Perform the actual integer division.
2206         __ Goto(&done, __ Int32Div(lhs, rhs));
2207       }
2208     }
2209 
2210     __ Bind(&done);
2211     Node* value = done.PhiAt(0);
2212 
2213     // Check if the remainder is non-zero.
2214     Node* check = __ Word32Equal(lhs, __ Int32Mul(value, rhs));
2215     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, FeedbackSource(),
2216                        check, frame_state);
2217 
2218     return value;
2219   }
2220 }
2221 
2222 template <size_t VarCount, size_t VarCount2>
SmiTagOrOverflow(Node * value,GraphAssemblerLabel<VarCount> * if_overflow,GraphAssemblerLabel<VarCount2> * done)2223 void EffectControlLinearizer::SmiTagOrOverflow(
2224     Node* value, GraphAssemblerLabel<VarCount>* if_overflow,
2225     GraphAssemblerLabel<VarCount2>* done) {
2226   DCHECK(SmiValuesAre31Bits());
2227   // Check for overflow at the same time that we are smi tagging.
2228   // Since smi tagging shifts left by one, it's the same as adding value twice.
2229   Node* add = __ Int32AddWithOverflow(value, value);
2230   Node* ovf = __ Projection(1, add);
2231   __ GotoIf(ovf, if_overflow);
2232   Node* value_smi = __ Projection(0, add);
2233   value_smi = ChangeTaggedInt32ToSmi(value_smi);
2234   __ Goto(done, value_smi);
2235 }
2236 
SmiTagOrDeopt(Node * value,const CheckParameters & params,Node * frame_state)2237 Node* EffectControlLinearizer::SmiTagOrDeopt(Node* value,
2238                                              const CheckParameters& params,
2239                                              Node* frame_state) {
2240   DCHECK(SmiValuesAre31Bits());
2241   // Check for the lost precision at the same time that we are smi tagging.
2242   // Since smi tagging shifts left by one, it's the same as adding value twice.
2243   Node* add = __ Int32AddWithOverflow(value, value);
2244   Node* check = __ Projection(1, add);
2245   __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2246                   frame_state);
2247   Node* result = __ Projection(0, add);
2248   return ChangeTaggedInt32ToSmi(result);
2249 }
2250 
BuildUint32Mod(Node * lhs,Node * rhs)2251 Node* EffectControlLinearizer::BuildUint32Mod(Node* lhs, Node* rhs) {
2252   auto if_rhs_power_of_two = __ MakeLabel();
2253   auto done = __ MakeLabel(MachineRepresentation::kWord32);
2254 
2255   // Compute the mask for the {rhs}.
2256   Node* one = __ Int32Constant(1);
2257   Node* msk = __ Int32Sub(rhs, one);
2258 
2259   // Check if the {rhs} is a power of two.
2260   __ GotoIf(__ Word32Equal(__ Word32And(rhs, msk), __ Int32Constant(0)),
2261             &if_rhs_power_of_two);
2262   {
2263     // The {rhs} is not a power of two, do a generic Uint32Mod.
2264     __ Goto(&done, __ Uint32Mod(lhs, rhs));
2265   }
2266 
2267   __ Bind(&if_rhs_power_of_two);
2268   {
2269     // The {rhs} is a power of two, just do a fast bit masking.
2270     __ Goto(&done, __ Word32And(lhs, msk));
2271   }
2272 
2273   __ Bind(&done);
2274   return done.PhiAt(0);
2275 }
2276 
LowerCheckedInt32Mod(Node * node,Node * frame_state)2277 Node* EffectControlLinearizer::LowerCheckedInt32Mod(Node* node,
2278                                                     Node* frame_state) {
2279   // General case for signed integer modulus, with optimization for (unknown)
2280   // power of 2 right hand side.
2281   //
2282   //   if rhs <= 0 then
2283   //     rhs = -rhs
2284   //     deopt if rhs == 0
2285   //   let msk = rhs - 1 in
2286   //   if lhs < 0 then
2287   //     let lhs_abs = -lsh in
2288   //     let res = if rhs & msk == 0 then
2289   //                 lhs_abs & msk
2290   //               else
2291   //                 lhs_abs % rhs in
2292   //     if lhs < 0 then
2293   //       deopt if res == 0
2294   //       -res
2295   //     else
2296   //       res
2297   //   else
2298   //     if rhs & msk == 0 then
2299   //       lhs & msk
2300   //     else
2301   //       lhs % rhs
2302   //
2303   Node* lhs = node->InputAt(0);
2304   Node* rhs = node->InputAt(1);
2305 
2306   auto if_rhs_not_positive = __ MakeDeferredLabel();
2307   auto if_lhs_negative = __ MakeDeferredLabel();
2308   auto if_rhs_power_of_two = __ MakeLabel();
2309   auto rhs_checked = __ MakeLabel(MachineRepresentation::kWord32);
2310   auto done = __ MakeLabel(MachineRepresentation::kWord32);
2311 
2312   Node* zero = __ Int32Constant(0);
2313 
2314   // Check if {rhs} is not strictly positive.
2315   Node* check0 = __ Int32LessThanOrEqual(rhs, zero);
2316   __ GotoIf(check0, &if_rhs_not_positive);
2317   __ Goto(&rhs_checked, rhs);
2318 
2319   __ Bind(&if_rhs_not_positive);
2320   {
2321     // Negate {rhs}, might still produce a negative result in case of
2322     // -2^31, but that is handled safely below.
2323     Node* vtrue0 = __ Int32Sub(zero, rhs);
2324 
2325     // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
2326     __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, FeedbackSource(),
2327                     __ Word32Equal(vtrue0, zero), frame_state);
2328     __ Goto(&rhs_checked, vtrue0);
2329   }
2330 
2331   __ Bind(&rhs_checked);
2332   rhs = rhs_checked.PhiAt(0);
2333 
2334   __ GotoIf(__ Int32LessThan(lhs, zero), &if_lhs_negative);
2335   {
2336     // The {lhs} is a non-negative integer.
2337     __ Goto(&done, BuildUint32Mod(lhs, rhs));
2338   }
2339 
2340   __ Bind(&if_lhs_negative);
2341   {
2342     // The {lhs} is a negative integer. This is very unlikely and
2343     // we intentionally don't use the BuildUint32Mod() here, which
2344     // would try to figure out whether {rhs} is a power of two,
2345     // since this is intended to be a slow-path.
2346     Node* res = __ Uint32Mod(__ Int32Sub(zero, lhs), rhs);
2347 
2348     // Check if we would have to return -0.
2349     __ DeoptimizeIf(DeoptimizeReason::kMinusZero, FeedbackSource(),
2350                     __ Word32Equal(res, zero), frame_state);
2351     __ Goto(&done, __ Int32Sub(zero, res));
2352   }
2353 
2354   __ Bind(&done);
2355   return done.PhiAt(0);
2356 }
2357 
LowerCheckedUint32Div(Node * node,Node * frame_state)2358 Node* EffectControlLinearizer::LowerCheckedUint32Div(Node* node,
2359                                                      Node* frame_state) {
2360   Node* lhs = node->InputAt(0);
2361   Node* rhs = node->InputAt(1);
2362   Node* zero = __ Int32Constant(0);
2363 
2364   // Check if the {rhs} is a known power of two.
2365   Uint32Matcher m(rhs);
2366   if (m.IsPowerOf2()) {
2367     // Since we know that {rhs} is a power of two, we can perform a fast
2368     // check to see if the relevant least significant bits of the {lhs}
2369     // are all zero, and if so we know that we can perform a division
2370     // safely (and fast by doing a logical - aka zero extending - right
2371     // shift on {lhs}).
2372     uint32_t divisor = m.ResolvedValue();
2373     Node* mask = __ Uint32Constant(divisor - 1);
2374     Node* shift = __ Uint32Constant(base::bits::WhichPowerOfTwo(divisor));
2375     Node* check = __ Word32Equal(__ Word32And(lhs, mask), zero);
2376     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, FeedbackSource(),
2377                        check, frame_state);
2378     return __ Word32Shr(lhs, shift);
2379   } else {
2380     // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
2381     Node* check = __ Word32Equal(rhs, zero);
2382     __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, FeedbackSource(), check,
2383                     frame_state);
2384 
2385     // Perform the actual unsigned integer division.
2386     Node* value = __ Uint32Div(lhs, rhs);
2387 
2388     // Check if the remainder is non-zero.
2389     check = __ Word32Equal(lhs, __ Int32Mul(rhs, value));
2390     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, FeedbackSource(),
2391                        check, frame_state);
2392     return value;
2393   }
2394 }
2395 
LowerCheckedUint32Mod(Node * node,Node * frame_state)2396 Node* EffectControlLinearizer::LowerCheckedUint32Mod(Node* node,
2397                                                      Node* frame_state) {
2398   Node* lhs = node->InputAt(0);
2399   Node* rhs = node->InputAt(1);
2400 
2401   Node* zero = __ Int32Constant(0);
2402 
2403   // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
2404   Node* check = __ Word32Equal(rhs, zero);
2405   __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, FeedbackSource(), check,
2406                   frame_state);
2407 
2408   // Perform the actual unsigned integer modulus.
2409   return BuildUint32Mod(lhs, rhs);
2410 }
2411 
LowerCheckedInt32Mul(Node * node,Node * frame_state)2412 Node* EffectControlLinearizer::LowerCheckedInt32Mul(Node* node,
2413                                                     Node* frame_state) {
2414   CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
2415   Node* lhs = node->InputAt(0);
2416   Node* rhs = node->InputAt(1);
2417 
2418   Node* projection = __ Int32MulWithOverflow(lhs, rhs);
2419   Node* check = __ Projection(1, projection);
2420   __ DeoptimizeIf(DeoptimizeReason::kOverflow, FeedbackSource(), check,
2421                   frame_state);
2422 
2423   Node* value = __ Projection(0, projection);
2424 
2425   if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
2426     auto if_zero = __ MakeDeferredLabel();
2427     auto check_done = __ MakeLabel();
2428     Node* zero = __ Int32Constant(0);
2429     Node* check_zero = __ Word32Equal(value, zero);
2430     __ GotoIf(check_zero, &if_zero);
2431     __ Goto(&check_done);
2432 
2433     __ Bind(&if_zero);
2434     // We may need to return negative zero.
2435     Node* check_or = __ Int32LessThan(__ Word32Or(lhs, rhs), zero);
2436     __ DeoptimizeIf(DeoptimizeReason::kMinusZero, FeedbackSource(), check_or,
2437                     frame_state);
2438     __ Goto(&check_done);
2439 
2440     __ Bind(&check_done);
2441   }
2442 
2443   return value;
2444 }
2445 
LowerCheckedInt32ToTaggedSigned(Node * node,Node * frame_state)2446 Node* EffectControlLinearizer::LowerCheckedInt32ToTaggedSigned(
2447     Node* node, Node* frame_state) {
2448   DCHECK(SmiValuesAre31Bits());
2449   Node* value = node->InputAt(0);
2450   const CheckParameters& params = CheckParametersOf(node->op());
2451   return SmiTagOrDeopt(value, params, frame_state);
2452 }
2453 
LowerCheckedInt64ToInt32(Node * node,Node * frame_state)2454 Node* EffectControlLinearizer::LowerCheckedInt64ToInt32(Node* node,
2455                                                         Node* frame_state) {
2456   Node* value = node->InputAt(0);
2457   const CheckParameters& params = CheckParametersOf(node->op());
2458 
2459   Node* value32 = __ TruncateInt64ToInt32(value);
2460   Node* check = __ Word64Equal(__ ChangeInt32ToInt64(value32), value);
2461   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2462                      frame_state);
2463   return value32;
2464 }
2465 
LowerCheckedInt64ToTaggedSigned(Node * node,Node * frame_state)2466 Node* EffectControlLinearizer::LowerCheckedInt64ToTaggedSigned(
2467     Node* node, Node* frame_state) {
2468   Node* value = node->InputAt(0);
2469   const CheckParameters& params = CheckParametersOf(node->op());
2470 
2471   Node* value32 = __ TruncateInt64ToInt32(value);
2472   Node* check = __ Word64Equal(__ ChangeInt32ToInt64(value32), value);
2473   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2474                      frame_state);
2475 
2476   if (SmiValuesAre32Bits()) {
2477     return ChangeInt64ToSmi(value);
2478   } else {
2479     return SmiTagOrDeopt(value32, params, frame_state);
2480   }
2481 }
2482 
LowerCheckedUint32Bounds(Node * node,Node * frame_state)2483 Node* EffectControlLinearizer::LowerCheckedUint32Bounds(Node* node,
2484                                                         Node* frame_state) {
2485   Node* index = node->InputAt(0);
2486   Node* limit = node->InputAt(1);
2487   const CheckBoundsParameters& params = CheckBoundsParametersOf(node->op());
2488 
2489   Node* check = __ Uint32LessThan(index, limit);
2490   if (!(params.flags() & CheckBoundsFlag::kAbortOnOutOfBounds)) {
2491     __ DeoptimizeIfNot(DeoptimizeReason::kOutOfBounds,
2492                        params.check_parameters().feedback(), check, frame_state,
2493                        IsSafetyCheck::kCriticalSafetyCheck);
2494   } else {
2495     auto if_abort = __ MakeDeferredLabel();
2496     auto done = __ MakeLabel();
2497 
2498     __ Branch(check, &done, &if_abort);
2499 
2500     __ Bind(&if_abort);
2501     __ Unreachable(&done);
2502 
2503     __ Bind(&done);
2504   }
2505 
2506   return index;
2507 }
2508 
LowerCheckedUint32ToInt32(Node * node,Node * frame_state)2509 Node* EffectControlLinearizer::LowerCheckedUint32ToInt32(Node* node,
2510                                                          Node* frame_state) {
2511   Node* value = node->InputAt(0);
2512   const CheckParameters& params = CheckParametersOf(node->op());
2513   Node* unsafe = __ Int32LessThan(value, __ Int32Constant(0));
2514   __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), unsafe,
2515                   frame_state);
2516   return value;
2517 }
2518 
LowerCheckedUint32ToTaggedSigned(Node * node,Node * frame_state)2519 Node* EffectControlLinearizer::LowerCheckedUint32ToTaggedSigned(
2520     Node* node, Node* frame_state) {
2521   Node* value = node->InputAt(0);
2522   const CheckParameters& params = CheckParametersOf(node->op());
2523   Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
2524   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2525                      frame_state);
2526   return ChangeUint32ToSmi(value);
2527 }
2528 
LowerCheckedUint64Bounds(Node * node,Node * frame_state)2529 Node* EffectControlLinearizer::LowerCheckedUint64Bounds(Node* node,
2530                                                         Node* frame_state) {
2531   Node* const index = node->InputAt(0);
2532   Node* const limit = node->InputAt(1);
2533   const CheckBoundsParameters& params = CheckBoundsParametersOf(node->op());
2534 
2535   Node* check = __ Uint64LessThan(index, limit);
2536   if (!(params.flags() & CheckBoundsFlag::kAbortOnOutOfBounds)) {
2537     __ DeoptimizeIfNot(DeoptimizeReason::kOutOfBounds,
2538                        params.check_parameters().feedback(), check, frame_state,
2539                        IsSafetyCheck::kCriticalSafetyCheck);
2540   } else {
2541     auto if_abort = __ MakeDeferredLabel();
2542     auto done = __ MakeLabel();
2543 
2544     __ Branch(check, &done, &if_abort);
2545 
2546     __ Bind(&if_abort);
2547     __ Unreachable(&done);
2548 
2549     __ Bind(&done);
2550   }
2551   return index;
2552 }
2553 
LowerCheckedUint64ToInt32(Node * node,Node * frame_state)2554 Node* EffectControlLinearizer::LowerCheckedUint64ToInt32(Node* node,
2555                                                          Node* frame_state) {
2556   Node* value = node->InputAt(0);
2557   const CheckParameters& params = CheckParametersOf(node->op());
2558 
2559   Node* check = __ Uint64LessThanOrEqual(value, __ Int64Constant(kMaxInt));
2560   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2561                      frame_state);
2562   return __ TruncateInt64ToInt32(value);
2563 }
2564 
LowerCheckedUint64ToTaggedSigned(Node * node,Node * frame_state)2565 Node* EffectControlLinearizer::LowerCheckedUint64ToTaggedSigned(
2566     Node* node, Node* frame_state) {
2567   Node* value = node->InputAt(0);
2568   const CheckParameters& params = CheckParametersOf(node->op());
2569 
2570   Node* check =
2571       __ Uint64LessThanOrEqual(value, __ Int64Constant(Smi::kMaxValue));
2572   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
2573                      frame_state);
2574   return ChangeInt64ToSmi(value);
2575 }
2576 
BuildCheckedFloat64ToInt32(CheckForMinusZeroMode mode,const FeedbackSource & feedback,Node * value,Node * frame_state)2577 Node* EffectControlLinearizer::BuildCheckedFloat64ToInt32(
2578     CheckForMinusZeroMode mode, const FeedbackSource& feedback, Node* value,
2579     Node* frame_state) {
2580   Node* value32 = __ RoundFloat64ToInt32(value);
2581   Node* check_same = __ Float64Equal(value, __ ChangeInt32ToFloat64(value32));
2582   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2583                      check_same, frame_state);
2584 
2585   if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
2586     // Check if {value} is -0.
2587     auto if_zero = __ MakeDeferredLabel();
2588     auto check_done = __ MakeLabel();
2589 
2590     Node* check_zero = __ Word32Equal(value32, __ Int32Constant(0));
2591     __ GotoIf(check_zero, &if_zero);
2592     __ Goto(&check_done);
2593 
2594     __ Bind(&if_zero);
2595     // In case of 0, we need to check the high bits for the IEEE -0 pattern.
2596     Node* check_negative = __ Int32LessThan(__ Float64ExtractHighWord32(value),
2597                                             __ Int32Constant(0));
2598     __ DeoptimizeIf(DeoptimizeReason::kMinusZero, feedback, check_negative,
2599                     frame_state);
2600     __ Goto(&check_done);
2601 
2602     __ Bind(&check_done);
2603   }
2604   return value32;
2605 }
2606 
BuildCheckedFloat64ToIndex(const FeedbackSource & feedback,Node * value,Node * frame_state)2607 Node* EffectControlLinearizer::BuildCheckedFloat64ToIndex(
2608     const FeedbackSource& feedback, Node* value, Node* frame_state) {
2609   if (machine()->Is64()) {
2610     Node* value64 = __ TruncateFloat64ToInt64(value);
2611     Node* check_same = __ Float64Equal(value, __ ChangeInt64ToFloat64(value64));
2612     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2613                        check_same, frame_state);
2614     Node* check_max =
2615         __ IntLessThan(value64, __ Int64Constant(kMaxSafeInteger));
2616     __ DeoptimizeIfNot(DeoptimizeReason::kNotAnArrayIndex, feedback, check_max,
2617                        frame_state);
2618     Node* check_min =
2619         __ IntLessThan(__ Int64Constant(-kMaxSafeInteger), value64);
2620     __ DeoptimizeIfNot(DeoptimizeReason::kNotAnArrayIndex, feedback, check_min,
2621                        frame_state);
2622     return value64;
2623   } else {
2624     Node* value32 = __ RoundFloat64ToInt32(value);
2625     Node* check_same = __ Float64Equal(value, __ ChangeInt32ToFloat64(value32));
2626     __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2627                        check_same, frame_state);
2628     return value32;
2629   }
2630 }
2631 
LowerCheckedFloat64ToInt32(Node * node,Node * frame_state)2632 Node* EffectControlLinearizer::LowerCheckedFloat64ToInt32(Node* node,
2633                                                           Node* frame_state) {
2634   const CheckMinusZeroParameters& params =
2635       CheckMinusZeroParametersOf(node->op());
2636   Node* value = node->InputAt(0);
2637   return BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), value,
2638                                     frame_state);
2639 }
2640 
BuildCheckedFloat64ToInt64(CheckForMinusZeroMode mode,const FeedbackSource & feedback,Node * value,Node * frame_state)2641 Node* EffectControlLinearizer::BuildCheckedFloat64ToInt64(
2642     CheckForMinusZeroMode mode, const FeedbackSource& feedback, Node* value,
2643     Node* frame_state) {
2644   Node* value64 = __ TruncateFloat64ToInt64(value);
2645   Node* check_same = __ Float64Equal(value, __ ChangeInt64ToFloat64(value64));
2646   __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
2647                      check_same, frame_state);
2648 
2649   if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
2650     // Check if {value} is -0.
2651     auto if_zero = __ MakeDeferredLabel();
2652     auto check_done = __ MakeLabel();
2653 
2654     Node* check_zero = __ Word64Equal(value64, __ Int64Constant(0));
2655     __ GotoIf(check_zero, &if_zero);
2656     __ Goto(&check_done);
2657 
2658     __ Bind(&if_zero);
2659     // In case of 0, we need to check the high bits for the IEEE -0 pattern.
2660     Node* check_negative = __ Int32LessThan(__ Float64ExtractHighWord32(value),
2661                                             __ Int32Constant(0));
2662     __ DeoptimizeIf(DeoptimizeReason::kMinusZero, feedback, check_negative,
2663                     frame_state);
2664     __ Goto(&check_done);
2665 
2666     __ Bind(&check_done);
2667   }
2668   return value64;
2669 }
2670 
LowerCheckedFloat64ToInt64(Node * node,Node * frame_state)2671 Node* EffectControlLinearizer::LowerCheckedFloat64ToInt64(Node* node,
2672                                                           Node* frame_state) {
2673   const CheckMinusZeroParameters& params =
2674       CheckMinusZeroParametersOf(node->op());
2675   Node* value = node->InputAt(0);
2676   return BuildCheckedFloat64ToInt64(params.mode(), params.feedback(), value,
2677                                     frame_state);
2678 }
2679 
LowerCheckedTaggedSignedToInt32(Node * node,Node * frame_state)2680 Node* EffectControlLinearizer::LowerCheckedTaggedSignedToInt32(
2681     Node* node, Node* frame_state) {
2682   Node* value = node->InputAt(0);
2683   const CheckParameters& params = CheckParametersOf(node->op());
2684   Node* check = ObjectIsSmi(value);
2685   __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
2686                      frame_state);
2687   return ChangeSmiToInt32(value);
2688 }
2689 
LowerCheckedTaggedToArrayIndex(Node * node,Node * frame_state)2690 Node* EffectControlLinearizer::LowerCheckedTaggedToArrayIndex(
2691     Node* node, Node* frame_state) {
2692   CheckParameters const& params = CheckParametersOf(node->op());
2693   Node* value = node->InputAt(0);
2694 
2695   auto if_not_smi = __ MakeDeferredLabel();
2696   auto done = __ MakeLabel(MachineType::PointerRepresentation());
2697 
2698   __ GotoIfNot(ObjectIsSmi(value), &if_not_smi);
2699   // In the Smi case, just convert to intptr_t.
2700   __ Goto(&done, ChangeSmiToIntPtr(value));
2701 
2702   // In the non-Smi case, check the heap numberness, load the number and convert
2703   // to integer.
2704   __ Bind(&if_not_smi);
2705   auto if_not_heap_number = __ MakeDeferredLabel();
2706   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2707   Node* is_heap_number = __ TaggedEqual(value_map, __ HeapNumberMapConstant());
2708   __ GotoIfNot(is_heap_number, &if_not_heap_number);
2709 
2710   Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2711   number = BuildCheckedFloat64ToIndex(params.feedback(), number, frame_state);
2712   __ Goto(&done, number);
2713 
2714   __ Bind(&if_not_heap_number);
2715   auto calculate_index = __ MakeDeferredLabel();
2716   Node* value_instance_type =
2717       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2718   Node* is_string = __ Uint32LessThan(value_instance_type,
2719                                       __ Uint32Constant(FIRST_NONSTRING_TYPE));
2720   __ DeoptimizeIfNot(DeoptimizeReason::kNotAString, params.feedback(),
2721                      is_string, frame_state);
2722 
2723   MachineSignature::Builder builder(graph()->zone(), 1, 1);
2724   builder.AddReturn(MachineType::IntPtr());
2725   builder.AddParam(MachineType::TaggedPointer());
2726   Node* string_to_array_index_function =
2727       __ ExternalConstant(ExternalReference::string_to_array_index_function());
2728   auto call_descriptor =
2729       Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
2730   Node* index = __ Call(common()->Call(call_descriptor),
2731                         string_to_array_index_function, value);
2732 
2733   __ DeoptimizeIf(DeoptimizeReason::kNotAnArrayIndex, params.feedback(),
2734                   __ Word32Equal(index, __ Int32Constant(-1)), frame_state);
2735 
2736   __ Goto(&done, index);
2737 
2738   __ Bind(&done);
2739   return done.PhiAt(0);
2740 }
2741 
LowerCheckedTaggedToInt32(Node * node,Node * frame_state)2742 Node* EffectControlLinearizer::LowerCheckedTaggedToInt32(Node* node,
2743                                                          Node* frame_state) {
2744   const CheckMinusZeroParameters& params =
2745       CheckMinusZeroParametersOf(node->op());
2746   Node* value = node->InputAt(0);
2747 
2748   auto if_not_smi = __ MakeDeferredLabel();
2749   auto done = __ MakeLabel(MachineRepresentation::kWord32);
2750   Node* check = ObjectIsSmi(value);
2751   __ GotoIfNot(check, &if_not_smi);
2752   // In the Smi case, just convert to int32.
2753   __ Goto(&done, ChangeSmiToInt32(value));
2754 
2755   // In the non-Smi case, check the heap numberness, load the number and convert
2756   // to int32.
2757   __ Bind(&if_not_smi);
2758   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2759   Node* check_map = __ TaggedEqual(value_map, __ HeapNumberMapConstant());
2760   __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
2761                      check_map, frame_state);
2762   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2763   vfalse = BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), vfalse,
2764                                       frame_state);
2765   __ Goto(&done, vfalse);
2766 
2767   __ Bind(&done);
2768   return done.PhiAt(0);
2769 }
2770 
LowerCheckedTaggedToInt64(Node * node,Node * frame_state)2771 Node* EffectControlLinearizer::LowerCheckedTaggedToInt64(Node* node,
2772                                                          Node* frame_state) {
2773   const CheckMinusZeroParameters& params =
2774       CheckMinusZeroParametersOf(node->op());
2775   Node* value = node->InputAt(0);
2776 
2777   auto if_not_smi = __ MakeDeferredLabel();
2778   auto done = __ MakeLabel(MachineRepresentation::kWord64);
2779 
2780   Node* check = ObjectIsSmi(value);
2781   __ GotoIfNot(check, &if_not_smi);
2782   // In the Smi case, just convert to int64.
2783   __ Goto(&done, ChangeSmiToInt64(value));
2784 
2785   // In the non-Smi case, check the heap numberness, load the number and convert
2786   // to int64.
2787   __ Bind(&if_not_smi);
2788   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2789   Node* check_map = __ TaggedEqual(value_map, __ HeapNumberMapConstant());
2790   __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
2791                      check_map, frame_state);
2792   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2793   vfalse = BuildCheckedFloat64ToInt64(params.mode(), params.feedback(), vfalse,
2794                                       frame_state);
2795   __ Goto(&done, vfalse);
2796 
2797   __ Bind(&done);
2798   return done.PhiAt(0);
2799 }
2800 
BuildCheckedHeapNumberOrOddballToFloat64(CheckTaggedInputMode mode,const FeedbackSource & feedback,Node * value,Node * frame_state)2801 Node* EffectControlLinearizer::BuildCheckedHeapNumberOrOddballToFloat64(
2802     CheckTaggedInputMode mode, const FeedbackSource& feedback, Node* value,
2803     Node* frame_state) {
2804   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2805   Node* check_number = __ TaggedEqual(value_map, __ HeapNumberMapConstant());
2806   switch (mode) {
2807     case CheckTaggedInputMode::kNumber: {
2808       __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, feedback,
2809                          check_number, frame_state);
2810       break;
2811     }
2812     case CheckTaggedInputMode::kNumberOrBoolean: {
2813       auto check_done = __ MakeLabel();
2814 
2815       __ GotoIf(check_number, &check_done);
2816       __ DeoptimizeIfNot(DeoptimizeReason::kNotANumberOrBoolean, feedback,
2817                          __ TaggedEqual(value_map, __ BooleanMapConstant()),
2818                          frame_state);
2819       STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
2820                                         Oddball::kToNumberRawOffset);
2821       __ Goto(&check_done);
2822 
2823       __ Bind(&check_done);
2824       break;
2825     }
2826     case CheckTaggedInputMode::kNumberOrOddball: {
2827       auto check_done = __ MakeLabel();
2828 
2829       __ GotoIf(check_number, &check_done);
2830       // For oddballs also contain the numeric value, let us just check that
2831       // we have an oddball here.
2832       Node* instance_type =
2833           __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2834       Node* check_oddball =
2835           __ Word32Equal(instance_type, __ Int32Constant(ODDBALL_TYPE));
2836       __ DeoptimizeIfNot(DeoptimizeReason::kNotANumberOrOddball, feedback,
2837                          check_oddball, frame_state);
2838       STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
2839                                         Oddball::kToNumberRawOffset);
2840       __ Goto(&check_done);
2841 
2842       __ Bind(&check_done);
2843       break;
2844     }
2845   }
2846   return __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2847 }
2848 
LowerCheckedTaggedToFloat64(Node * node,Node * frame_state)2849 Node* EffectControlLinearizer::LowerCheckedTaggedToFloat64(Node* node,
2850                                                            Node* frame_state) {
2851   CheckTaggedInputParameters const& p =
2852       CheckTaggedInputParametersOf(node->op());
2853   Node* value = node->InputAt(0);
2854 
2855   auto if_smi = __ MakeLabel();
2856   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
2857 
2858   Node* check = ObjectIsSmi(value);
2859   __ GotoIf(check, &if_smi);
2860 
2861   // In the Smi case, just convert to int32 and then float64.
2862   // Otherwise, check heap numberness and load the number.
2863   Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
2864       p.mode(), p.feedback(), value, frame_state);
2865   __ Goto(&done, number);
2866 
2867   __ Bind(&if_smi);
2868   Node* from_smi = ChangeSmiToInt32(value);
2869   from_smi = __ ChangeInt32ToFloat64(from_smi);
2870   __ Goto(&done, from_smi);
2871 
2872   __ Bind(&done);
2873   return done.PhiAt(0);
2874 }
2875 
LowerCheckedTaggedToTaggedSigned(Node * node,Node * frame_state)2876 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedSigned(
2877     Node* node, Node* frame_state) {
2878   Node* value = node->InputAt(0);
2879   const CheckParameters& params = CheckParametersOf(node->op());
2880 
2881   Node* check = ObjectIsSmi(value);
2882   __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
2883                      frame_state);
2884 
2885   return value;
2886 }
2887 
LowerCheckedTaggedToTaggedPointer(Node * node,Node * frame_state)2888 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedPointer(
2889     Node* node, Node* frame_state) {
2890   Node* value = node->InputAt(0);
2891   const CheckParameters& params = CheckParametersOf(node->op());
2892 
2893   Node* check = ObjectIsSmi(value);
2894   __ DeoptimizeIf(DeoptimizeReason::kSmi, params.feedback(), check,
2895                   frame_state);
2896   return value;
2897 }
2898 
LowerCheckBigInt(Node * node,Node * frame_state)2899 Node* EffectControlLinearizer::LowerCheckBigInt(Node* node, Node* frame_state) {
2900   Node* value = node->InputAt(0);
2901   const CheckParameters& params = CheckParametersOf(node->op());
2902 
2903   // Check for Smi.
2904   Node* smi_check = ObjectIsSmi(value);
2905   __ DeoptimizeIf(DeoptimizeReason::kSmi, params.feedback(), smi_check,
2906                   frame_state);
2907 
2908   // Check for BigInt.
2909   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2910   Node* bi_check = __ TaggedEqual(value_map, __ BigIntMapConstant());
2911   __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, params.feedback(),
2912                      bi_check, frame_state);
2913 
2914   return value;
2915 }
2916 
LowerBigIntAsUintN(Node * node,Node * frame_state)2917 Node* EffectControlLinearizer::LowerBigIntAsUintN(Node* node,
2918                                                   Node* frame_state) {
2919   DCHECK(machine()->Is64());
2920 
2921   const int bits = OpParameter<int>(node->op());
2922   DCHECK(0 <= bits && bits <= 64);
2923 
2924   if (bits == 64) {
2925     // Reduce to nop.
2926     return node->InputAt(0);
2927   } else {
2928     const uint64_t msk = (1ULL << bits) - 1ULL;
2929     return __ Word64And(node->InputAt(0), __ Int64Constant(msk));
2930   }
2931 }
2932 
LowerChangeUint64ToBigInt(Node * node)2933 Node* EffectControlLinearizer::LowerChangeUint64ToBigInt(Node* node) {
2934   DCHECK(machine()->Is64());
2935 
2936   Node* value = node->InputAt(0);
2937   Node* map = __ HeapConstant(factory()->bigint_map());
2938   // BigInts with value 0 must be of size 0 (canonical form).
2939   auto if_zerodigits = __ MakeLabel();
2940   auto if_onedigit = __ MakeLabel();
2941   auto done = __ MakeLabel(MachineRepresentation::kTagged);
2942 
2943   __ GotoIf(__ Word64Equal(value, __ IntPtrConstant(0)), &if_zerodigits);
2944   __ Goto(&if_onedigit);
2945 
2946   __ Bind(&if_onedigit);
2947   {
2948     Node* result = __ Allocate(AllocationType::kYoung,
2949                                __ IntPtrConstant(BigInt::SizeFor(1)));
2950     const auto bitfield = BigInt::LengthBits::update(0, 1);
2951     __ StoreField(AccessBuilder::ForMap(), result, map);
2952     __ StoreField(AccessBuilder::ForBigIntBitfield(), result,
2953                   __ IntPtrConstant(bitfield));
2954     // BigInts have no padding on 64 bit architectures with pointer compression.
2955     if (BigInt::HasOptionalPadding()) {
2956       __ StoreField(AccessBuilder::ForBigIntOptionalPadding(), result,
2957                     __ IntPtrConstant(0));
2958     }
2959     __ StoreField(AccessBuilder::ForBigIntLeastSignificantDigit64(), result,
2960                   value);
2961     __ Goto(&done, result);
2962   }
2963 
2964   __ Bind(&if_zerodigits);
2965   {
2966     Node* result = __ Allocate(AllocationType::kYoung,
2967                                __ IntPtrConstant(BigInt::SizeFor(0)));
2968     const auto bitfield = BigInt::LengthBits::update(0, 0);
2969     __ StoreField(AccessBuilder::ForMap(), result, map);
2970     __ StoreField(AccessBuilder::ForBigIntBitfield(), result,
2971                   __ IntPtrConstant(bitfield));
2972     // BigInts have no padding on 64 bit architectures with pointer compression.
2973     if (BigInt::HasOptionalPadding()) {
2974       __ StoreField(AccessBuilder::ForBigIntOptionalPadding(), result,
2975                     __ IntPtrConstant(0));
2976     }
2977     __ Goto(&done, result);
2978   }
2979 
2980   __ Bind(&done);
2981   return done.PhiAt(0);
2982 }
2983 
LowerTruncateBigIntToUint64(Node * node)2984 Node* EffectControlLinearizer::LowerTruncateBigIntToUint64(Node* node) {
2985   DCHECK(machine()->Is64());
2986 
2987   auto done = __ MakeLabel(MachineRepresentation::kWord64);
2988   auto if_neg = __ MakeLabel();
2989   auto if_not_zero = __ MakeLabel();
2990 
2991   Node* value = node->InputAt(0);
2992 
2993   Node* bitfield = __ LoadField(AccessBuilder::ForBigIntBitfield(), value);
2994   __ GotoIfNot(__ Word32Equal(bitfield, __ Int32Constant(0)), &if_not_zero);
2995   __ Goto(&done, __ Int64Constant(0));
2996 
2997   __ Bind(&if_not_zero);
2998   {
2999     Node* lsd =
3000         __ LoadField(AccessBuilder::ForBigIntLeastSignificantDigit64(), value);
3001     Node* sign =
3002         __ Word32And(bitfield, __ Int32Constant(BigInt::SignBits::kMask));
3003     __ GotoIf(__ Word32Equal(sign, __ Int32Constant(1)), &if_neg);
3004     __ Goto(&done, lsd);
3005 
3006     __ Bind(&if_neg);
3007     __ Goto(&done, __ Int64Sub(__ Int64Constant(0), lsd));
3008   }
3009 
3010   __ Bind(&done);
3011   return done.PhiAt(0);
3012 }
3013 
LowerTruncateTaggedToWord32(Node * node)3014 Node* EffectControlLinearizer::LowerTruncateTaggedToWord32(Node* node) {
3015   Node* value = node->InputAt(0);
3016 
3017   auto if_not_smi = __ MakeDeferredLabel();
3018   auto done = __ MakeLabel(MachineRepresentation::kWord32);
3019 
3020   Node* check = ObjectIsSmi(value);
3021   __ GotoIfNot(check, &if_not_smi);
3022   __ Goto(&done, ChangeSmiToInt32(value));
3023 
3024   __ Bind(&if_not_smi);
3025   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
3026                                     Oddball::kToNumberRawOffset);
3027   Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
3028   vfalse = __ TruncateFloat64ToWord32(vfalse);
3029   __ Goto(&done, vfalse);
3030 
3031   __ Bind(&done);
3032   return done.PhiAt(0);
3033 }
3034 
LowerCheckedTruncateTaggedToWord32(Node * node,Node * frame_state)3035 Node* EffectControlLinearizer::LowerCheckedTruncateTaggedToWord32(
3036     Node* node, Node* frame_state) {
3037   const CheckTaggedInputParameters& params =
3038       CheckTaggedInputParametersOf(node->op());
3039   Node* value = node->InputAt(0);
3040 
3041   auto if_not_smi = __ MakeLabel();
3042   auto done = __ MakeLabel(MachineRepresentation::kWord32);
3043 
3044   Node* check = ObjectIsSmi(value);
3045   __ GotoIfNot(check, &if_not_smi);
3046   // In the Smi case, just convert to int32.
3047   __ Goto(&done, ChangeSmiToInt32(value));
3048 
3049   // Otherwise, check that it's a heap number or oddball and truncate the value
3050   // to int32.
3051   __ Bind(&if_not_smi);
3052   Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
3053       params.mode(), params.feedback(), value, frame_state);
3054   number = __ TruncateFloat64ToWord32(number);
3055   __ Goto(&done, number);
3056 
3057   __ Bind(&done);
3058   return done.PhiAt(0);
3059 }
3060 
LowerAllocate(Node * node)3061 Node* EffectControlLinearizer::LowerAllocate(Node* node) {
3062   Node* size = node->InputAt(0);
3063   AllocationType allocation = AllocationTypeOf(node->op());
3064   Node* new_node = __ Allocate(allocation, size);
3065   return new_node;
3066 }
3067 
LowerNumberToString(Node * node)3068 Node* EffectControlLinearizer::LowerNumberToString(Node* node) {
3069   Node* argument = node->InputAt(0);
3070 
3071   Callable const callable =
3072       Builtins::CallableFor(isolate(), Builtins::kNumberToString);
3073   Operator::Properties properties = Operator::kEliminatable;
3074   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3075   auto call_descriptor = Linkage::GetStubCallDescriptor(
3076       graph()->zone(), callable.descriptor(),
3077       callable.descriptor().GetStackParameterCount(), flags, properties);
3078   return __ Call(call_descriptor, __ HeapConstant(callable.code()), argument,
3079                  __ NoContextConstant());
3080 }
3081 
LowerObjectIsArrayBufferView(Node * node)3082 Node* EffectControlLinearizer::LowerObjectIsArrayBufferView(Node* node) {
3083   Node* value = node->InputAt(0);
3084 
3085   auto if_smi = __ MakeDeferredLabel();
3086   auto done = __ MakeLabel(MachineRepresentation::kBit);
3087 
3088   Node* check = ObjectIsSmi(value);
3089   __ GotoIf(check, &if_smi);
3090 
3091   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3092   Node* value_instance_type =
3093       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
3094   Node* vfalse = __ Uint32LessThan(
3095       __ Int32Sub(value_instance_type,
3096                   __ Int32Constant(FIRST_JS_ARRAY_BUFFER_VIEW_TYPE)),
3097       __ Int32Constant(LAST_JS_ARRAY_BUFFER_VIEW_TYPE -
3098                        FIRST_JS_ARRAY_BUFFER_VIEW_TYPE + 1));
3099   __ Goto(&done, vfalse);
3100 
3101   __ Bind(&if_smi);
3102   __ Goto(&done, __ Int32Constant(0));
3103 
3104   __ Bind(&done);
3105   return done.PhiAt(0);
3106 }
3107 
LowerObjectIsBigInt(Node * node)3108 Node* EffectControlLinearizer::LowerObjectIsBigInt(Node* node) {
3109   Node* value = node->InputAt(0);
3110 
3111   auto if_smi = __ MakeDeferredLabel();
3112   auto done = __ MakeLabel(MachineRepresentation::kBit);
3113 
3114   Node* check = ObjectIsSmi(value);
3115   __ GotoIf(check, &if_smi);
3116   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3117   Node* vfalse = __ TaggedEqual(value_map, __ BigIntMapConstant());
3118   __ Goto(&done, vfalse);
3119 
3120   __ Bind(&if_smi);
3121   __ Goto(&done, __ Int32Constant(0));
3122 
3123   __ Bind(&done);
3124   return done.PhiAt(0);
3125 }
3126 
LowerObjectIsCallable(Node * node)3127 Node* EffectControlLinearizer::LowerObjectIsCallable(Node* node) {
3128   Node* value = node->InputAt(0);
3129 
3130   auto if_smi = __ MakeDeferredLabel();
3131   auto done = __ MakeLabel(MachineRepresentation::kBit);
3132 
3133   Node* check = ObjectIsSmi(value);
3134   __ GotoIf(check, &if_smi);
3135 
3136   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3137   Node* value_bit_field =
3138       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
3139   Node* vfalse = __ Word32Equal(
3140       __ Int32Constant(Map::Bits1::IsCallableBit::kMask),
3141       __ Word32And(value_bit_field,
3142                    __ Int32Constant(Map::Bits1::IsCallableBit::kMask)));
3143   __ Goto(&done, vfalse);
3144 
3145   __ Bind(&if_smi);
3146   __ Goto(&done, __ Int32Constant(0));
3147 
3148   __ Bind(&done);
3149   return done.PhiAt(0);
3150 }
3151 
LowerObjectIsConstructor(Node * node)3152 Node* EffectControlLinearizer::LowerObjectIsConstructor(Node* node) {
3153   Node* value = node->InputAt(0);
3154 
3155   auto if_smi = __ MakeDeferredLabel();
3156   auto done = __ MakeLabel(MachineRepresentation::kBit);
3157 
3158   Node* check = ObjectIsSmi(value);
3159   __ GotoIf(check, &if_smi);
3160 
3161   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3162   Node* value_bit_field =
3163       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
3164   Node* vfalse = __ Word32Equal(
3165       __ Int32Constant(Map::Bits1::IsConstructorBit::kMask),
3166       __ Word32And(value_bit_field,
3167                    __ Int32Constant(Map::Bits1::IsConstructorBit::kMask)));
3168   __ Goto(&done, vfalse);
3169 
3170   __ Bind(&if_smi);
3171   __ Goto(&done, __ Int32Constant(0));
3172 
3173   __ Bind(&done);
3174   return done.PhiAt(0);
3175 }
3176 
LowerObjectIsDetectableCallable(Node * node)3177 Node* EffectControlLinearizer::LowerObjectIsDetectableCallable(Node* node) {
3178   Node* value = node->InputAt(0);
3179 
3180   auto if_smi = __ MakeDeferredLabel();
3181   auto done = __ MakeLabel(MachineRepresentation::kBit);
3182 
3183   Node* check = ObjectIsSmi(value);
3184   __ GotoIf(check, &if_smi);
3185 
3186   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3187   Node* value_bit_field =
3188       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
3189   Node* vfalse = __ Word32Equal(
3190       __ Int32Constant(Map::Bits1::IsCallableBit::kMask),
3191       __ Word32And(value_bit_field,
3192                    __ Int32Constant((Map::Bits1::IsCallableBit::kMask) |
3193                                     (Map::Bits1::IsUndetectableBit::kMask))));
3194   __ Goto(&done, vfalse);
3195 
3196   __ Bind(&if_smi);
3197   __ Goto(&done, __ Int32Constant(0));
3198 
3199   __ Bind(&done);
3200   return done.PhiAt(0);
3201 }
3202 
LowerNumberIsFloat64Hole(Node * node)3203 Node* EffectControlLinearizer::LowerNumberIsFloat64Hole(Node* node) {
3204   Node* value = node->InputAt(0);
3205   Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
3206                                __ Int32Constant(kHoleNanUpper32));
3207   return check;
3208 }
3209 
LowerNumberIsFinite(Node * node)3210 Node* EffectControlLinearizer::LowerNumberIsFinite(Node* node) {
3211   Node* number = node->InputAt(0);
3212   Node* diff = __ Float64Sub(number, number);
3213   Node* check = __ Float64Equal(diff, diff);
3214   return check;
3215 }
3216 
LowerObjectIsFiniteNumber(Node * node)3217 Node* EffectControlLinearizer::LowerObjectIsFiniteNumber(Node* node) {
3218   Node* object = node->InputAt(0);
3219   Node* zero = __ Int32Constant(0);
3220   Node* one = __ Int32Constant(1);
3221 
3222   auto done = __ MakeLabel(MachineRepresentation::kBit);
3223 
3224   // Check if {object} is a Smi.
3225   __ GotoIf(ObjectIsSmi(object), &done, one);
3226 
3227   // Check if {object} is a HeapNumber.
3228   Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
3229   __ GotoIfNot(__ TaggedEqual(value_map, __ HeapNumberMapConstant()), &done,
3230                zero);
3231 
3232   // {object} is a HeapNumber.
3233   Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
3234   Node* diff = __ Float64Sub(value, value);
3235   Node* check = __ Float64Equal(diff, diff);
3236   __ Goto(&done, check);
3237 
3238   __ Bind(&done);
3239   return done.PhiAt(0);
3240 }
3241 
LowerNumberIsInteger(Node * node)3242 Node* EffectControlLinearizer::LowerNumberIsInteger(Node* node) {
3243   Node* number = node->InputAt(0);
3244   Node* trunc = BuildFloat64RoundTruncate(number);
3245   Node* diff = __ Float64Sub(number, trunc);
3246   Node* check = __ Float64Equal(diff, __ Float64Constant(0));
3247   return check;
3248 }
3249 
LowerObjectIsInteger(Node * node)3250 Node* EffectControlLinearizer::LowerObjectIsInteger(Node* node) {
3251   Node* object = node->InputAt(0);
3252   Node* zero = __ Int32Constant(0);
3253   Node* one = __ Int32Constant(1);
3254 
3255   auto done = __ MakeLabel(MachineRepresentation::kBit);
3256 
3257   // Check if {object} is a Smi.
3258   __ GotoIf(ObjectIsSmi(object), &done, one);
3259 
3260   // Check if {object} is a HeapNumber.
3261   Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
3262   __ GotoIfNot(__ TaggedEqual(value_map, __ HeapNumberMapConstant()), &done,
3263                zero);
3264 
3265   // {object} is a HeapNumber.
3266   Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
3267   Node* trunc = BuildFloat64RoundTruncate(value);
3268   Node* diff = __ Float64Sub(value, trunc);
3269   Node* check = __ Float64Equal(diff, __ Float64Constant(0));
3270   __ Goto(&done, check);
3271 
3272   __ Bind(&done);
3273   return done.PhiAt(0);
3274 }
3275 
LowerNumberIsSafeInteger(Node * node)3276 Node* EffectControlLinearizer::LowerNumberIsSafeInteger(Node* node) {
3277   Node* number = node->InputAt(0);
3278   Node* zero = __ Int32Constant(0);
3279   auto done = __ MakeLabel(MachineRepresentation::kBit);
3280 
3281   Node* trunc = BuildFloat64RoundTruncate(number);
3282   Node* diff = __ Float64Sub(number, trunc);
3283   Node* check = __ Float64Equal(diff, __ Float64Constant(0));
3284   __ GotoIfNot(check, &done, zero);
3285   Node* in_range = __ Float64LessThanOrEqual(
3286       __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
3287   __ Goto(&done, in_range);
3288 
3289   __ Bind(&done);
3290   return done.PhiAt(0);
3291 }
3292 
LowerObjectIsSafeInteger(Node * node)3293 Node* EffectControlLinearizer::LowerObjectIsSafeInteger(Node* node) {
3294   Node* object = node->InputAt(0);
3295   Node* zero = __ Int32Constant(0);
3296   Node* one = __ Int32Constant(1);
3297 
3298   auto done = __ MakeLabel(MachineRepresentation::kBit);
3299 
3300   // Check if {object} is a Smi.
3301   __ GotoIf(ObjectIsSmi(object), &done, one);
3302 
3303   // Check if {object} is a HeapNumber.
3304   Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
3305   __ GotoIfNot(__ TaggedEqual(value_map, __ HeapNumberMapConstant()), &done,
3306                zero);
3307 
3308   // {object} is a HeapNumber.
3309   Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
3310   Node* trunc = BuildFloat64RoundTruncate(value);
3311   Node* diff = __ Float64Sub(value, trunc);
3312   Node* check = __ Float64Equal(diff, __ Float64Constant(0));
3313   __ GotoIfNot(check, &done, zero);
3314   Node* in_range = __ Float64LessThanOrEqual(
3315       __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
3316   __ Goto(&done, in_range);
3317 
3318   __ Bind(&done);
3319   return done.PhiAt(0);
3320 }
3321 
3322 namespace {
3323 
3324 // There is no (currently) available constexpr version of bit_cast, so we have
3325 // to make do with constructing the -0.0 bits manually (by setting the sign bit
3326 // to 1 and everything else to 0).
3327 // TODO(leszeks): Revisit when upgrading to C++20.
3328 constexpr int32_t kMinusZeroLoBits = static_cast<int32_t>(0);
3329 constexpr int32_t kMinusZeroHiBits = static_cast<int32_t>(1) << 31;
3330 constexpr int64_t kMinusZeroBits =
3331     (static_cast<uint64_t>(kMinusZeroHiBits) << 32) | kMinusZeroLoBits;
3332 
3333 }  // namespace
3334 
LowerObjectIsMinusZero(Node * node)3335 Node* EffectControlLinearizer::LowerObjectIsMinusZero(Node* node) {
3336   Node* value = node->InputAt(0);
3337   Node* zero = __ Int32Constant(0);
3338 
3339   auto done = __ MakeLabel(MachineRepresentation::kBit);
3340 
3341   // Check if {value} is a Smi.
3342   __ GotoIf(ObjectIsSmi(value), &done, zero);
3343 
3344   // Check if {value} is a HeapNumber.
3345   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3346   __ GotoIfNot(__ TaggedEqual(value_map, __ HeapNumberMapConstant()), &done,
3347                zero);
3348 
3349   // Check if {value} contains -0.
3350   Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
3351   if (machine()->Is64()) {
3352     Node* value64 = __ BitcastFloat64ToInt64(value_value);
3353     __ Goto(&done, __ Word64Equal(value64, __ Int64Constant(kMinusZeroBits)));
3354   } else {
3355     Node* value_lo = __ Float64ExtractLowWord32(value_value);
3356     __ GotoIfNot(__ Word32Equal(value_lo, __ Int32Constant(kMinusZeroLoBits)),
3357                  &done, zero);
3358     Node* value_hi = __ Float64ExtractHighWord32(value_value);
3359     __ Goto(&done,
3360             __ Word32Equal(value_hi, __ Int32Constant(kMinusZeroHiBits)));
3361   }
3362 
3363   __ Bind(&done);
3364   return done.PhiAt(0);
3365 }
3366 
LowerNumberIsMinusZero(Node * node)3367 Node* EffectControlLinearizer::LowerNumberIsMinusZero(Node* node) {
3368   Node* value = node->InputAt(0);
3369 
3370   if (machine()->Is64()) {
3371     Node* value64 = __ BitcastFloat64ToInt64(value);
3372     return __ Word64Equal(value64, __ Int64Constant(kMinusZeroBits));
3373   } else {
3374     auto done = __ MakeLabel(MachineRepresentation::kBit);
3375 
3376     Node* value_lo = __ Float64ExtractLowWord32(value);
3377     __ GotoIfNot(__ Word32Equal(value_lo, __ Int32Constant(kMinusZeroLoBits)),
3378                  &done, __ Int32Constant(0));
3379     Node* value_hi = __ Float64ExtractHighWord32(value);
3380     __ Goto(&done,
3381             __ Word32Equal(value_hi, __ Int32Constant(kMinusZeroHiBits)));
3382 
3383     __ Bind(&done);
3384     return done.PhiAt(0);
3385   }
3386 }
3387 
LowerObjectIsNaN(Node * node)3388 Node* EffectControlLinearizer::LowerObjectIsNaN(Node* node) {
3389   Node* value = node->InputAt(0);
3390   Node* zero = __ Int32Constant(0);
3391 
3392   auto done = __ MakeLabel(MachineRepresentation::kBit);
3393 
3394   // Check if {value} is a Smi.
3395   __ GotoIf(ObjectIsSmi(value), &done, zero);
3396 
3397   // Check if {value} is a HeapNumber.
3398   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3399   __ GotoIfNot(__ TaggedEqual(value_map, __ HeapNumberMapConstant()), &done,
3400                zero);
3401 
3402   // Check if {value} contains a NaN.
3403   Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
3404   __ Goto(&done,
3405           __ Word32Equal(__ Float64Equal(value_value, value_value), zero));
3406 
3407   __ Bind(&done);
3408   return done.PhiAt(0);
3409 }
3410 
LowerNumberIsNaN(Node * node)3411 Node* EffectControlLinearizer::LowerNumberIsNaN(Node* node) {
3412   Node* number = node->InputAt(0);
3413   Node* diff = __ Float64Equal(number, number);
3414   Node* check = __ Word32Equal(diff, __ Int32Constant(0));
3415   return check;
3416 }
3417 
LowerObjectIsNonCallable(Node * node)3418 Node* EffectControlLinearizer::LowerObjectIsNonCallable(Node* node) {
3419   Node* value = node->InputAt(0);
3420 
3421   auto if_primitive = __ MakeDeferredLabel();
3422   auto done = __ MakeLabel(MachineRepresentation::kBit);
3423 
3424   Node* check0 = ObjectIsSmi(value);
3425   __ GotoIf(check0, &if_primitive);
3426 
3427   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3428   Node* value_instance_type =
3429       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
3430   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3431   Node* check1 = __ Uint32LessThanOrEqual(
3432       __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
3433   __ GotoIfNot(check1, &if_primitive);
3434 
3435   Node* value_bit_field =
3436       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
3437   Node* check2 = __ Word32Equal(
3438       __ Int32Constant(0),
3439       __ Word32And(value_bit_field,
3440                    __ Int32Constant(Map::Bits1::IsCallableBit::kMask)));
3441   __ Goto(&done, check2);
3442 
3443   __ Bind(&if_primitive);
3444   __ Goto(&done, __ Int32Constant(0));
3445 
3446   __ Bind(&done);
3447   return done.PhiAt(0);
3448 }
3449 
LowerObjectIsNumber(Node * node)3450 Node* EffectControlLinearizer::LowerObjectIsNumber(Node* node) {
3451   Node* value = node->InputAt(0);
3452 
3453   auto if_smi = __ MakeLabel();
3454   auto done = __ MakeLabel(MachineRepresentation::kBit);
3455 
3456   __ GotoIf(ObjectIsSmi(value), &if_smi);
3457   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3458   __ Goto(&done, __ TaggedEqual(value_map, __ HeapNumberMapConstant()));
3459 
3460   __ Bind(&if_smi);
3461   __ Goto(&done, __ Int32Constant(1));
3462 
3463   __ Bind(&done);
3464   return done.PhiAt(0);
3465 }
3466 
LowerObjectIsReceiver(Node * node)3467 Node* EffectControlLinearizer::LowerObjectIsReceiver(Node* node) {
3468   Node* value = node->InputAt(0);
3469 
3470   auto if_smi = __ MakeDeferredLabel();
3471   auto done = __ MakeLabel(MachineRepresentation::kBit);
3472 
3473   __ GotoIf(ObjectIsSmi(value), &if_smi);
3474 
3475   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3476   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3477   Node* value_instance_type =
3478       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
3479   Node* result = __ Uint32LessThanOrEqual(
3480       __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
3481   __ Goto(&done, result);
3482 
3483   __ Bind(&if_smi);
3484   __ Goto(&done, __ Int32Constant(0));
3485 
3486   __ Bind(&done);
3487   return done.PhiAt(0);
3488 }
3489 
LowerObjectIsSmi(Node * node)3490 Node* EffectControlLinearizer::LowerObjectIsSmi(Node* node) {
3491   Node* value = node->InputAt(0);
3492   return ObjectIsSmi(value);
3493 }
3494 
LowerObjectIsString(Node * node)3495 Node* EffectControlLinearizer::LowerObjectIsString(Node* node) {
3496   Node* value = node->InputAt(0);
3497 
3498   auto if_smi = __ MakeDeferredLabel();
3499   auto done = __ MakeLabel(MachineRepresentation::kBit);
3500 
3501   Node* check = ObjectIsSmi(value);
3502   __ GotoIf(check, &if_smi);
3503   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3504   Node* value_instance_type =
3505       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
3506   Node* vfalse = __ Uint32LessThan(value_instance_type,
3507                                    __ Uint32Constant(FIRST_NONSTRING_TYPE));
3508   __ Goto(&done, vfalse);
3509 
3510   __ Bind(&if_smi);
3511   __ Goto(&done, __ Int32Constant(0));
3512 
3513   __ Bind(&done);
3514   return done.PhiAt(0);
3515 }
3516 
LowerObjectIsSymbol(Node * node)3517 Node* EffectControlLinearizer::LowerObjectIsSymbol(Node* node) {
3518   Node* value = node->InputAt(0);
3519 
3520   auto if_smi = __ MakeDeferredLabel();
3521   auto done = __ MakeLabel(MachineRepresentation::kBit);
3522 
3523   Node* check = ObjectIsSmi(value);
3524   __ GotoIf(check, &if_smi);
3525   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3526   Node* value_instance_type =
3527       __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
3528   Node* vfalse =
3529       __ Word32Equal(value_instance_type, __ Uint32Constant(SYMBOL_TYPE));
3530   __ Goto(&done, vfalse);
3531 
3532   __ Bind(&if_smi);
3533   __ Goto(&done, __ Int32Constant(0));
3534 
3535   __ Bind(&done);
3536   return done.PhiAt(0);
3537 }
3538 
LowerObjectIsUndetectable(Node * node)3539 Node* EffectControlLinearizer::LowerObjectIsUndetectable(Node* node) {
3540   Node* value = node->InputAt(0);
3541 
3542   auto if_smi = __ MakeDeferredLabel();
3543   auto done = __ MakeLabel(MachineRepresentation::kBit);
3544 
3545   Node* check = ObjectIsSmi(value);
3546   __ GotoIf(check, &if_smi);
3547 
3548   Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
3549   Node* value_bit_field =
3550       __ LoadField(AccessBuilder::ForMapBitField(), value_map);
3551   Node* vfalse = __ Word32Equal(
3552       __ Word32Equal(
3553           __ Int32Constant(0),
3554           __ Word32And(value_bit_field,
3555                        __ Int32Constant(Map::Bits1::IsUndetectableBit::kMask))),
3556       __ Int32Constant(0));
3557   __ Goto(&done, vfalse);
3558 
3559   __ Bind(&if_smi);
3560   __ Goto(&done, __ Int32Constant(0));
3561 
3562   __ Bind(&done);
3563   return done.PhiAt(0);
3564 }
3565 
LowerTypeOf(Node * node)3566 Node* EffectControlLinearizer::LowerTypeOf(Node* node) {
3567   Node* obj = node->InputAt(0);
3568   Callable const callable = Builtins::CallableFor(isolate(), Builtins::kTypeof);
3569   Operator::Properties const properties = Operator::kEliminatable;
3570   CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
3571   auto call_descriptor = Linkage::GetStubCallDescriptor(
3572       graph()->zone(), callable.descriptor(),
3573       callable.descriptor().GetStackParameterCount(), flags, properties);
3574   return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
3575                  __ NoContextConstant());
3576 }
3577 
LowerTierUpCheck(Node * node)3578 void EffectControlLinearizer::LowerTierUpCheck(Node* node) {
3579   TierUpCheckNode n(node);
3580   TNode<FeedbackVector> vector = n.feedback_vector();
3581 
3582   Node* optimization_state =
3583       __ LoadField(AccessBuilder::ForFeedbackVectorFlags(), vector);
3584 
3585   // TODO(jgruber): The branch introduces a sequence of spills before the
3586   // branch (and restores at `fallthrough`) that are completely unnecessary
3587   // since the IfFalse continuation ends in a tail call. Investigate how to
3588   // avoid these and fix it.
3589 
3590   auto fallthrough = __ MakeLabel();
3591   auto has_optimized_code_or_marker = __ MakeDeferredLabel();
3592   __ BranchWithHint(
3593       __ Word32Equal(
3594           __ Word32And(optimization_state,
3595                        __ Uint32Constant(
3596                            FeedbackVector::
3597                                kHasNoTopTierCodeOrCompileOptimizedMarkerMask)),
3598           __ Int32Constant(0)),
3599       &fallthrough, &has_optimized_code_or_marker, BranchHint::kTrue);
3600 
3601   __ Bind(&has_optimized_code_or_marker);
3602 
3603   // The optimization marker field contains a non-trivial value, and some
3604   // action has to be taken. For example, perhaps tier-up has been requested
3605   // and we need to kick off a compilation job; or optimized code is available
3606   // and should be tail-called.
3607   //
3608   // Currently we delegate these tasks to the InterpreterEntryTrampoline.
3609   // TODO(jgruber,v8:8888): Consider a dedicated builtin instead.
3610 
3611   TNode<HeapObject> code =
3612       __ HeapConstant(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
3613 
3614   JSTrampolineDescriptor descriptor;
3615   CallDescriptor::Flags flags = CallDescriptor::kFixedTargetRegister |
3616                                 CallDescriptor::kIsTailCallForTierUp;
3617   auto call_descriptor = Linkage::GetStubCallDescriptor(
3618       graph()->zone(), descriptor, descriptor.GetStackParameterCount(), flags,
3619       Operator::kNoProperties);
3620   Node* nodes[] = {code,        n.target(),  n.new_target(), n.input_count(),
3621                    n.context(), __ effect(), __ control()};
3622 
3623 #ifdef DEBUG
3624   static constexpr int kCodeContextEffectControl = 4;
3625   DCHECK_EQ(arraysize(nodes),
3626             descriptor.GetParameterCount() + kCodeContextEffectControl);
3627 #endif  // DEBUG
3628 
3629   __ TailCall(call_descriptor, arraysize(nodes), nodes);
3630 
3631   __ Bind(&fallthrough);
3632 }
3633 
LowerUpdateInterruptBudget(Node * node)3634 void EffectControlLinearizer::LowerUpdateInterruptBudget(Node* node) {
3635   UpdateInterruptBudgetNode n(node);
3636   TNode<FeedbackCell> feedback_cell = n.feedback_cell();
3637   TNode<Int32T> budget = __ LoadField<Int32T>(
3638       AccessBuilder::ForFeedbackCellInterruptBudget(), feedback_cell);
3639   Node* new_budget = __ Int32Add(budget, __ Int32Constant(n.delta()));
3640   __ StoreField(AccessBuilder::ForFeedbackCellInterruptBudget(), feedback_cell,
3641                 new_budget);
3642   if (n.delta() < 0) {
3643     auto next = __ MakeLabel();
3644     auto if_budget_exhausted = __ MakeDeferredLabel();
3645     __ Branch(__ Int32LessThan(new_budget, __ Int32Constant(0)),
3646               &if_budget_exhausted, &next);
3647 
3648     __ Bind(&if_budget_exhausted);
3649     CallBuiltin(Builtins::kBytecodeBudgetInterruptFromCode,
3650                 node->op()->properties(), feedback_cell);
3651     __ Goto(&next);
3652 
3653     __ Bind(&next);
3654   }
3655 }
3656 
LowerToBoolean(Node * node)3657 Node* EffectControlLinearizer::LowerToBoolean(Node* node) {
3658   Node* obj = node->InputAt(0);
3659   Callable const callable =
3660       Builtins::CallableFor(isolate(), Builtins::kToBoolean);
3661   Operator::Properties const properties = Operator::kEliminatable;
3662   CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
3663   auto call_descriptor = Linkage::GetStubCallDescriptor(
3664       graph()->zone(), callable.descriptor(),
3665       callable.descriptor().GetStackParameterCount(), flags, properties);
3666   return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
3667                  __ NoContextConstant());
3668 }
3669 
LowerArgumentsLength(Node * node)3670 Node* EffectControlLinearizer::LowerArgumentsLength(Node* node) {
3671 #ifdef V8_NO_ARGUMENTS_ADAPTOR
3672   return ChangeIntPtrToSmi(
3673       __ Load(MachineType::Pointer(), __ LoadFramePointer(),
3674               __ IntPtrConstant(StandardFrameConstants::kArgCOffset)));
3675 #else
3676   auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
3677   Node* frame = __ LoadFramePointer();
3678 
3679   Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
3680   int formal_parameter_count = FormalParameterCountOf(node->op());
3681   DCHECK_LE(0, formal_parameter_count);
3682 
3683   // The ArgumentsLength node is computing the actual number of arguments.
3684   // We have to distinguish the case when there is an arguments adaptor frame
3685   // (i.e., arguments_frame != LoadFramePointer()).
3686   auto if_adaptor_frame = __ MakeLabel();
3687   __ GotoIf(__ TaggedEqual(arguments_frame, frame), &done,
3688             __ SmiConstant(formal_parameter_count));
3689   __ Goto(&if_adaptor_frame);
3690 
3691   __ Bind(&if_adaptor_frame);
3692   Node* arguments_length = __ BitcastWordToTaggedSigned(__ Load(
3693       MachineType::Pointer(), arguments_frame,
3694       __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset)));
3695   __ Goto(&done, arguments_length);
3696   __ Bind(&done);
3697   return done.PhiAt(0);
3698 #endif
3699 }
3700 
LowerRestLength(Node * node)3701 Node* EffectControlLinearizer::LowerRestLength(Node* node) {
3702   int formal_parameter_count = FormalParameterCountOf(node->op());
3703   DCHECK_LE(0, formal_parameter_count);
3704 
3705   auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
3706   Node* frame = __ LoadFramePointer();
3707 
3708 #ifdef V8_NO_ARGUMENTS_ADAPTOR
3709   Node* arguments_length = ChangeIntPtrToSmi(
3710       __ Load(MachineType::Pointer(), frame,
3711               __ IntPtrConstant(StandardFrameConstants::kArgCOffset)));
3712 #else
3713   Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
3714 
3715   // The RestLength node is computing the number of rest parameters,
3716   // which is max(0, actual_parameter_count - formal_parameter_count).
3717   // We have to distinguish the case, when there is an arguments adaptor frame
3718   // (i.e., arguments_frame != LoadFramePointer()).
3719   auto if_adaptor_frame = __ MakeLabel();
3720   __ GotoIf(__ TaggedEqual(arguments_frame, frame), &done, __ SmiConstant(0));
3721   __ Goto(&if_adaptor_frame);
3722 
3723   __ Bind(&if_adaptor_frame);
3724   Node* arguments_length = __ BitcastWordToTaggedSigned(__ Load(
3725       MachineType::Pointer(), arguments_frame,
3726       __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset)));
3727 #endif
3728 
3729   Node* rest_length =
3730       __ SmiSub(arguments_length, __ SmiConstant(formal_parameter_count));
3731   __ GotoIf(__ SmiLessThan(rest_length, __ SmiConstant(0)), &done,
3732             __ SmiConstant(0));
3733   __ Goto(&done, rest_length);
3734 
3735   __ Bind(&done);
3736   return done.PhiAt(0);
3737 }
3738 
LowerArgumentsFrame(Node * node)3739 Node* EffectControlLinearizer::LowerArgumentsFrame(Node* node) {
3740   auto done = __ MakeLabel(MachineType::PointerRepresentation());
3741 
3742   Node* frame = __ LoadFramePointer();
3743   Node* parent_frame =
3744       __ Load(MachineType::Pointer(), frame,
3745               __ IntPtrConstant(StandardFrameConstants::kCallerFPOffset));
3746   Node* parent_frame_type = __ Load(
3747       MachineType::IntPtr(), parent_frame,
3748       __ IntPtrConstant(CommonFrameConstants::kContextOrFrameTypeOffset));
3749 
3750   __ GotoIf(__ IntPtrEqual(parent_frame_type,
3751                            __ IntPtrConstant(StackFrame::TypeToMarker(
3752                                StackFrame::ARGUMENTS_ADAPTOR))),
3753             &done, parent_frame);
3754   __ Goto(&done, frame);
3755 
3756   __ Bind(&done);
3757   return done.PhiAt(0);
3758 }
3759 
LowerNewDoubleElements(Node * node)3760 Node* EffectControlLinearizer::LowerNewDoubleElements(Node* node) {
3761   AllocationType const allocation = AllocationTypeOf(node->op());
3762   Node* length = node->InputAt(0);
3763 
3764   auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3765   Node* zero_length = __ IntPtrEqual(length, __ IntPtrConstant(0));
3766   __ GotoIf(zero_length, &done,
3767             __ HeapConstant(factory()->empty_fixed_array()));
3768 
3769   // Compute the effective size of the backing store.
3770   Node* size = __ IntAdd(__ WordShl(length, __ IntPtrConstant(kDoubleSizeLog2)),
3771                          __ IntPtrConstant(FixedDoubleArray::kHeaderSize));
3772 
3773   // Allocate the result and initialize the header.
3774   Node* result = __ Allocate(allocation, size);
3775   __ StoreField(AccessBuilder::ForMap(), result,
3776                 __ FixedDoubleArrayMapConstant());
3777   __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
3778                 ChangeIntPtrToSmi(length));
3779 
3780   // Initialize the backing store with holes.
3781   STATIC_ASSERT_FIELD_OFFSETS_EQUAL(HeapNumber::kValueOffset,
3782                                     Oddball::kToNumberRawOffset);
3783   Node* the_hole =
3784       __ LoadField(AccessBuilder::ForHeapNumberValue(), __ TheHoleConstant());
3785   auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
3786   __ Goto(&loop, __ IntPtrConstant(0));
3787   __ Bind(&loop);
3788   {
3789     // Check if we've initialized everything.
3790     Node* index = loop.PhiAt(0);
3791     Node* check = __ UintLessThan(index, length);
3792     __ GotoIfNot(check, &done, result);
3793 
3794     ElementAccess const access = {kTaggedBase, FixedDoubleArray::kHeaderSize,
3795                                   Type::NumberOrHole(), MachineType::Float64(),
3796                                   kNoWriteBarrier};
3797     __ StoreElement(access, result, index, the_hole);
3798 
3799     // Advance the {index}.
3800     index = __ IntAdd(index, __ IntPtrConstant(1));
3801     __ Goto(&loop, index);
3802   }
3803 
3804   __ Bind(&done);
3805   return done.PhiAt(0);
3806 }
3807 
LowerNewSmiOrObjectElements(Node * node)3808 Node* EffectControlLinearizer::LowerNewSmiOrObjectElements(Node* node) {
3809   AllocationType const allocation = AllocationTypeOf(node->op());
3810   Node* length = node->InputAt(0);
3811 
3812   auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3813   Node* zero_length = __ IntPtrEqual(length, __ IntPtrConstant(0));
3814   __ GotoIf(zero_length, &done,
3815             __ HeapConstant(factory()->empty_fixed_array()));
3816 
3817   // Compute the effective size of the backing store.
3818   Node* size = __ IntAdd(__ WordShl(length, __ IntPtrConstant(kTaggedSizeLog2)),
3819                          __ IntPtrConstant(FixedArray::kHeaderSize));
3820 
3821   // Allocate the result and initialize the header.
3822   Node* result = __ Allocate(allocation, size);
3823   __ StoreField(AccessBuilder::ForMap(), result, __ FixedArrayMapConstant());
3824   __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
3825                 ChangeIntPtrToSmi(length));
3826 
3827   // Initialize the backing store with holes.
3828   Node* the_hole = __ TheHoleConstant();
3829   auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
3830   __ Goto(&loop, __ IntPtrConstant(0));
3831   __ Bind(&loop);
3832   {
3833     // Check if we've initialized everything.
3834     Node* index = loop.PhiAt(0);
3835     Node* check = __ UintLessThan(index, length);
3836     __ GotoIfNot(check, &done, result);
3837 
3838     // Storing "the_hole" doesn't need a write barrier.
3839     ElementAccess const access = {kTaggedBase, FixedArray::kHeaderSize,
3840                                   Type::Any(), MachineType::AnyTagged(),
3841                                   kNoWriteBarrier};
3842     __ StoreElement(access, result, index, the_hole);
3843 
3844     // Advance the {index}.
3845     index = __ IntAdd(index, __ IntPtrConstant(1));
3846     __ Goto(&loop, index);
3847   }
3848 
3849   __ Bind(&done);
3850   return done.PhiAt(0);
3851 }
3852 
LowerNewArgumentsElements(Node * node)3853 Node* EffectControlLinearizer::LowerNewArgumentsElements(Node* node) {
3854   const NewArgumentsElementsParameters& parameters =
3855       NewArgumentsElementsParametersOf(node->op());
3856   CreateArgumentsType type = parameters.arguments_type();
3857   Operator::Properties const properties = node->op()->properties();
3858   CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
3859   Node* frame = NodeProperties::GetValueInput(node, 0);
3860   Node* arguments_count = NodeProperties::GetValueInput(node, 1);
3861   Builtins::Name builtin_name;
3862   switch (type) {
3863     case CreateArgumentsType::kMappedArguments:
3864       builtin_name = Builtins::kNewSloppyArgumentsElements;
3865       break;
3866     case CreateArgumentsType::kUnmappedArguments:
3867       builtin_name = Builtins::kNewStrictArgumentsElements;
3868       break;
3869     case CreateArgumentsType::kRestParameter:
3870       builtin_name = Builtins::kNewRestArgumentsElements;
3871       break;
3872   }
3873   Callable const callable = Builtins::CallableFor(isolate(), builtin_name);
3874   auto call_descriptor = Linkage::GetStubCallDescriptor(
3875       graph()->zone(), callable.descriptor(),
3876       callable.descriptor().GetStackParameterCount(), flags, properties);
3877   return __ Call(call_descriptor, __ HeapConstant(callable.code()), frame,
3878                  __ IntPtrConstant(parameters.formal_parameter_count()),
3879                  arguments_count);
3880 }
3881 
LowerNewConsString(Node * node)3882 Node* EffectControlLinearizer::LowerNewConsString(Node* node) {
3883   Node* length = node->InputAt(0);
3884   Node* first = node->InputAt(1);
3885   Node* second = node->InputAt(2);
3886 
3887   // Determine the instance types of {first} and {second}.
3888   Node* first_map = __ LoadField(AccessBuilder::ForMap(), first);
3889   Node* first_instance_type =
3890       __ LoadField(AccessBuilder::ForMapInstanceType(), first_map);
3891   Node* second_map = __ LoadField(AccessBuilder::ForMap(), second);
3892   Node* second_instance_type =
3893       __ LoadField(AccessBuilder::ForMapInstanceType(), second_map);
3894 
3895   // Determine the proper map for the resulting ConsString.
3896   // If both {first} and {second} are one-byte strings, we
3897   // create a new ConsOneByteString, otherwise we create a
3898   // new ConsString instead.
3899   auto if_onebyte = __ MakeLabel();
3900   auto if_twobyte = __ MakeLabel();
3901   auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
3902   STATIC_ASSERT(kOneByteStringTag != 0);
3903   STATIC_ASSERT(kTwoByteStringTag == 0);
3904   Node* instance_type = __ Word32And(first_instance_type, second_instance_type);
3905   Node* encoding =
3906       __ Word32And(instance_type, __ Int32Constant(kStringEncodingMask));
3907   __ Branch(__ Word32Equal(encoding, __ Int32Constant(kTwoByteStringTag)),
3908             &if_twobyte, &if_onebyte);
3909   __ Bind(&if_onebyte);
3910   __ Goto(&done, __ HeapConstant(factory()->cons_one_byte_string_map()));
3911   __ Bind(&if_twobyte);
3912   __ Goto(&done, __ HeapConstant(factory()->cons_string_map()));
3913   __ Bind(&done);
3914   Node* result_map = done.PhiAt(0);
3915 
3916   // Allocate the resulting ConsString.
3917   Node* result =
3918       __ Allocate(AllocationType::kYoung, __ IntPtrConstant(ConsString::kSize));
3919   __ StoreField(AccessBuilder::ForMap(), result, result_map);
3920   __ StoreField(AccessBuilder::ForNameHashField(), result,
3921                 __ Int32Constant(Name::kEmptyHashField));
3922   __ StoreField(AccessBuilder::ForStringLength(), result, length);
3923   __ StoreField(AccessBuilder::ForConsStringFirst(), result, first);
3924   __ StoreField(AccessBuilder::ForConsStringSecond(), result, second);
3925   return result;
3926 }
3927 
LowerSameValue(Node * node)3928 Node* EffectControlLinearizer::LowerSameValue(Node* node) {
3929   Node* lhs = node->InputAt(0);
3930   Node* rhs = node->InputAt(1);
3931 
3932   Callable const callable =
3933       Builtins::CallableFor(isolate(), Builtins::kSameValue);
3934   Operator::Properties properties = Operator::kEliminatable;
3935   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3936   auto call_descriptor = Linkage::GetStubCallDescriptor(
3937       graph()->zone(), callable.descriptor(),
3938       callable.descriptor().GetStackParameterCount(), flags, properties);
3939   return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
3940                  __ NoContextConstant());
3941 }
3942 
LowerSameValueNumbersOnly(Node * node)3943 Node* EffectControlLinearizer::LowerSameValueNumbersOnly(Node* node) {
3944   Node* lhs = node->InputAt(0);
3945   Node* rhs = node->InputAt(1);
3946 
3947   Callable const callable =
3948       Builtins::CallableFor(isolate(), Builtins::kSameValueNumbersOnly);
3949   Operator::Properties properties = Operator::kEliminatable;
3950   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3951   auto call_descriptor = Linkage::GetStubCallDescriptor(
3952       graph()->zone(), callable.descriptor(),
3953       callable.descriptor().GetStackParameterCount(), flags, properties);
3954   return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
3955                  __ NoContextConstant());
3956 }
3957 
LowerNumberSameValue(Node * node)3958 Node* EffectControlLinearizer::LowerNumberSameValue(Node* node) {
3959   Node* lhs = node->InputAt(0);
3960   Node* rhs = node->InputAt(1);
3961 
3962   auto is_float64_equal = __ MakeLabel();
3963   auto done = __ MakeLabel(MachineRepresentation::kBit);
3964 
3965   __ GotoIf(__ Float64Equal(lhs, rhs), &is_float64_equal);
3966 
3967   // Return true iff both {lhs} and {rhs} are NaN.
3968   __ GotoIf(__ Float64Equal(lhs, lhs), &done, __ Int32Constant(0));
3969   __ GotoIf(__ Float64Equal(rhs, rhs), &done, __ Int32Constant(0));
3970   __ Goto(&done, __ Int32Constant(1));
3971 
3972   __ Bind(&is_float64_equal);
3973   // Even if the values are float64-equal, we still need to distinguish
3974   // zero and minus zero.
3975   Node* lhs_hi = __ Float64ExtractHighWord32(lhs);
3976   Node* rhs_hi = __ Float64ExtractHighWord32(rhs);
3977   __ Goto(&done, __ Word32Equal(lhs_hi, rhs_hi));
3978 
3979   __ Bind(&done);
3980   return done.PhiAt(0);
3981 }
3982 
LowerDeadValue(Node * node)3983 Node* EffectControlLinearizer::LowerDeadValue(Node* node) {
3984   Node* input = NodeProperties::GetValueInput(node, 0);
3985   if (input->opcode() != IrOpcode::kUnreachable) {
3986     // There is no fundamental reason not to connect to end here, except it
3987     // integrates into the way the graph is constructed in a simpler way at
3988     // this point.
3989     // TODO(jgruber): Connect to end here as well.
3990     Node* unreachable = __ UnreachableWithoutConnectToEnd();
3991     NodeProperties::ReplaceValueInput(node, unreachable, 0);
3992   }
3993   return gasm()->AddNode(node);
3994 }
3995 
LowerStringToNumber(Node * node)3996 Node* EffectControlLinearizer::LowerStringToNumber(Node* node) {
3997   Node* string = node->InputAt(0);
3998 
3999   Callable const callable =
4000       Builtins::CallableFor(isolate(), Builtins::kStringToNumber);
4001   Operator::Properties properties = Operator::kEliminatable;
4002   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4003   auto call_descriptor = Linkage::GetStubCallDescriptor(
4004       graph()->zone(), callable.descriptor(),
4005       callable.descriptor().GetStackParameterCount(), flags, properties);
4006   return __ Call(call_descriptor, __ HeapConstant(callable.code()), string,
4007                  __ NoContextConstant());
4008 }
4009 
LowerStringCharCodeAt(Node * node)4010 Node* EffectControlLinearizer::LowerStringCharCodeAt(Node* node) {
4011   Node* receiver = node->InputAt(0);
4012   Node* position = node->InputAt(1);
4013 
4014   // We need a loop here to properly deal with indirect strings
4015   // (SlicedString, ConsString and ThinString).
4016   auto loop = __ MakeLoopLabel(MachineRepresentation::kTagged,
4017                                MachineType::PointerRepresentation());
4018   auto loop_next = __ MakeLabel(MachineRepresentation::kTagged,
4019                                 MachineType::PointerRepresentation());
4020   auto loop_done = __ MakeLabel(MachineRepresentation::kWord32);
4021   __ Goto(&loop, receiver, position);
4022   __ Bind(&loop);
4023   {
4024     Node* receiver = loop.PhiAt(0);
4025     Node* position = loop.PhiAt(1);
4026     Node* receiver_map = __ LoadField(AccessBuilder::ForMap(), receiver);
4027     Node* receiver_instance_type =
4028         __ LoadField(AccessBuilder::ForMapInstanceType(), receiver_map);
4029     Node* receiver_representation = __ Word32And(
4030         receiver_instance_type, __ Int32Constant(kStringRepresentationMask));
4031 
4032     // Dispatch on the current {receiver}s string representation.
4033     auto if_lessthanoreq_cons = __ MakeLabel();
4034     auto if_greaterthan_cons = __ MakeLabel();
4035     auto if_seqstring = __ MakeLabel();
4036     auto if_consstring = __ MakeLabel();
4037     auto if_thinstring = __ MakeLabel();
4038     auto if_externalstring = __ MakeLabel();
4039     auto if_slicedstring = __ MakeLabel();
4040     auto if_runtime = __ MakeDeferredLabel();
4041 
4042     __ Branch(__ Int32LessThanOrEqual(receiver_representation,
4043                                       __ Int32Constant(kConsStringTag)),
4044               &if_lessthanoreq_cons, &if_greaterthan_cons);
4045 
4046     __ Bind(&if_lessthanoreq_cons);
4047     {
4048       __ Branch(__ Word32Equal(receiver_representation,
4049                                __ Int32Constant(kConsStringTag)),
4050                 &if_consstring, &if_seqstring);
4051     }
4052 
4053     __ Bind(&if_greaterthan_cons);
4054     {
4055       __ GotoIf(__ Word32Equal(receiver_representation,
4056                                __ Int32Constant(kThinStringTag)),
4057                 &if_thinstring);
4058       __ GotoIf(__ Word32Equal(receiver_representation,
4059                                __ Int32Constant(kExternalStringTag)),
4060                 &if_externalstring);
4061       __ Branch(__ Word32Equal(receiver_representation,
4062                                __ Int32Constant(kSlicedStringTag)),
4063                 &if_slicedstring, &if_runtime);
4064     }
4065 
4066     __ Bind(&if_seqstring);
4067     {
4068       Node* receiver_is_onebyte = __ Word32Equal(
4069           __ Word32Equal(__ Word32And(receiver_instance_type,
4070                                       __ Int32Constant(kStringEncodingMask)),
4071                          __ Int32Constant(kTwoByteStringTag)),
4072           __ Int32Constant(0));
4073       Node* result = LoadFromSeqString(receiver, position, receiver_is_onebyte);
4074       __ Goto(&loop_done, result);
4075     }
4076 
4077     __ Bind(&if_consstring);
4078     {
4079       Node* receiver_second =
4080           __ LoadField(AccessBuilder::ForConsStringSecond(), receiver);
4081       __ GotoIfNot(__ TaggedEqual(receiver_second, __ EmptyStringConstant()),
4082                    &if_runtime);
4083       Node* receiver_first =
4084           __ LoadField(AccessBuilder::ForConsStringFirst(), receiver);
4085       __ Goto(&loop_next, receiver_first, position);
4086     }
4087 
4088     __ Bind(&if_thinstring);
4089     {
4090       Node* receiver_actual =
4091           __ LoadField(AccessBuilder::ForThinStringActual(), receiver);
4092       __ Goto(&loop_next, receiver_actual, position);
4093     }
4094 
4095     __ Bind(&if_externalstring);
4096     {
4097       // We need to bailout to the runtime for uncached external strings.
4098       __ GotoIf(__ Word32Equal(
4099                     __ Word32And(receiver_instance_type,
4100                                  __ Int32Constant(kUncachedExternalStringMask)),
4101                     __ Int32Constant(kUncachedExternalStringTag)),
4102                 &if_runtime);
4103 
4104       Node* receiver_data = __ LoadField(
4105           AccessBuilder::ForExternalStringResourceData(), receiver);
4106 
4107       auto if_onebyte = __ MakeLabel();
4108       auto if_twobyte = __ MakeLabel();
4109       __ Branch(
4110           __ Word32Equal(__ Word32And(receiver_instance_type,
4111                                       __ Int32Constant(kStringEncodingMask)),
4112                          __ Int32Constant(kTwoByteStringTag)),
4113           &if_twobyte, &if_onebyte);
4114 
4115       __ Bind(&if_onebyte);
4116       {
4117         Node* result = __ Load(MachineType::Uint8(), receiver_data, position);
4118         __ Goto(&loop_done, result);
4119       }
4120 
4121       __ Bind(&if_twobyte);
4122       {
4123         Node* result = __ Load(MachineType::Uint16(), receiver_data,
4124                                __ WordShl(position, __ IntPtrConstant(1)));
4125         __ Goto(&loop_done, result);
4126       }
4127     }
4128 
4129     __ Bind(&if_slicedstring);
4130     {
4131       Node* receiver_offset =
4132           __ LoadField(AccessBuilder::ForSlicedStringOffset(), receiver);
4133       Node* receiver_parent =
4134           __ LoadField(AccessBuilder::ForSlicedStringParent(), receiver);
4135       __ Goto(&loop_next, receiver_parent,
4136               __ IntAdd(position, ChangeSmiToIntPtr(receiver_offset)));
4137     }
4138 
4139     __ Bind(&if_runtime);
4140     {
4141       Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4142       Runtime::FunctionId id = Runtime::kStringCharCodeAt;
4143       auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4144           graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
4145       Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1),
4146                              receiver, ChangeIntPtrToSmi(position),
4147                              __ ExternalConstant(ExternalReference::Create(id)),
4148                              __ Int32Constant(2), __ NoContextConstant());
4149       __ Goto(&loop_done, ChangeSmiToInt32(result));
4150     }
4151 
4152     __ Bind(&loop_next);
4153     __ Goto(&loop, loop_next.PhiAt(0), loop_next.PhiAt(1));
4154   }
4155   __ Bind(&loop_done);
4156   return loop_done.PhiAt(0);
4157 }
4158 
LowerStringCodePointAt(Node * node)4159 Node* EffectControlLinearizer::LowerStringCodePointAt(Node* node) {
4160   Node* receiver = node->InputAt(0);
4161   Node* position = node->InputAt(1);
4162 
4163   Callable const callable =
4164       Builtins::CallableFor(isolate(), Builtins::kStringCodePointAt);
4165   Operator::Properties properties = Operator::kNoThrow | Operator::kNoWrite;
4166   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4167   auto call_descriptor = Linkage::GetStubCallDescriptor(
4168       graph()->zone(), callable.descriptor(),
4169       callable.descriptor().GetStackParameterCount(), flags, properties);
4170   return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
4171                  position, __ NoContextConstant());
4172 }
4173 
LoadFromSeqString(Node * receiver,Node * position,Node * is_one_byte)4174 Node* EffectControlLinearizer::LoadFromSeqString(Node* receiver, Node* position,
4175                                                  Node* is_one_byte) {
4176   auto one_byte_load = __ MakeLabel();
4177   auto done = __ MakeLabel(MachineRepresentation::kWord32);
4178   __ GotoIf(is_one_byte, &one_byte_load);
4179   Node* two_byte_result = __ LoadElement(
4180       AccessBuilder::ForSeqTwoByteStringCharacter(), receiver, position);
4181   __ Goto(&done, two_byte_result);
4182 
4183   __ Bind(&one_byte_load);
4184   Node* one_byte_element = __ LoadElement(
4185       AccessBuilder::ForSeqOneByteStringCharacter(), receiver, position);
4186   __ Goto(&done, one_byte_element);
4187 
4188   __ Bind(&done);
4189   return done.PhiAt(0);
4190 }
4191 
LowerStringFromSingleCharCode(Node * node)4192 Node* EffectControlLinearizer::LowerStringFromSingleCharCode(Node* node) {
4193   Node* value = node->InputAt(0);
4194   Node* code = __ Word32And(value, __ Uint32Constant(0xFFFF));
4195 
4196   auto if_not_one_byte = __ MakeDeferredLabel();
4197   auto cache_miss = __ MakeDeferredLabel();
4198   auto done = __ MakeLabel(MachineRepresentation::kTagged);
4199 
4200   // Check if the {code} is a one byte character
4201   Node* check1 = __ Uint32LessThanOrEqual(
4202       code, __ Uint32Constant(String::kMaxOneByteCharCode));
4203   __ GotoIfNot(check1, &if_not_one_byte);
4204   {
4205     // Load the isolate wide single character string cache.
4206     Node* cache = __ HeapConstant(factory()->single_character_string_cache());
4207 
4208     // Compute the {cache} index for {code}.
4209     Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
4210 
4211     // Check if we have an entry for the {code} in the single character string
4212     // cache already.
4213     Node* entry =
4214         __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
4215 
4216     Node* check2 = __ TaggedEqual(entry, __ UndefinedConstant());
4217     __ GotoIf(check2, &cache_miss);
4218 
4219     // Use the {entry} from the {cache}.
4220     __ Goto(&done, entry);
4221 
4222     __ Bind(&cache_miss);
4223     {
4224       // Allocate a new SeqOneByteString for {code}.
4225       Node* vtrue2 =
4226           __ Allocate(AllocationType::kYoung,
4227                       __ IntPtrConstant(SeqOneByteString::SizeFor(1)));
4228       __ StoreField(AccessBuilder::ForMap(), vtrue2,
4229                     __ HeapConstant(factory()->one_byte_string_map()));
4230       __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
4231                     __ Int32Constant(Name::kEmptyHashField));
4232       __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
4233                     __ Int32Constant(1));
4234       __ Store(
4235           StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
4236           vtrue2,
4237           __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
4238           code);
4239 
4240       // Remember it in the {cache}.
4241       __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
4242                       vtrue2);
4243       __ Goto(&done, vtrue2);
4244     }
4245   }
4246 
4247   __ Bind(&if_not_one_byte);
4248   {
4249     // Allocate a new SeqTwoByteString for {code}.
4250     Node* vfalse1 =
4251         __ Allocate(AllocationType::kYoung,
4252                     __ IntPtrConstant(SeqTwoByteString::SizeFor(1)));
4253     __ StoreField(AccessBuilder::ForMap(), vfalse1,
4254                   __ HeapConstant(factory()->string_map()));
4255     __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
4256                   __ Int32Constant(Name::kEmptyHashField));
4257     __ StoreField(AccessBuilder::ForStringLength(), vfalse1,
4258                   __ Int32Constant(1));
4259     __ Store(
4260         StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
4261         vfalse1,
4262         __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
4263         code);
4264     __ Goto(&done, vfalse1);
4265   }
4266 
4267   __ Bind(&done);
4268   return done.PhiAt(0);
4269 }
4270 
4271 #ifdef V8_INTL_SUPPORT
4272 
LowerStringToLowerCaseIntl(Node * node)4273 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
4274   Node* receiver = node->InputAt(0);
4275 
4276   Callable callable =
4277       Builtins::CallableFor(isolate(), Builtins::kStringToLowerCaseIntl);
4278   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4279   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4280   auto call_descriptor = Linkage::GetStubCallDescriptor(
4281       graph()->zone(), callable.descriptor(),
4282       callable.descriptor().GetStackParameterCount(), flags, properties);
4283   return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
4284                  __ NoContextConstant());
4285 }
4286 
LowerStringToUpperCaseIntl(Node * node)4287 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
4288   Node* receiver = node->InputAt(0);
4289   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4290   Runtime::FunctionId id = Runtime::kStringToUpperCaseIntl;
4291   auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4292       graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
4293   return __ Call(call_descriptor, __ CEntryStubConstant(1), receiver,
4294                  __ ExternalConstant(ExternalReference::Create(id)),
4295                  __ Int32Constant(1), __ NoContextConstant());
4296 }
4297 
4298 #else
4299 
LowerStringToLowerCaseIntl(Node * node)4300 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
4301   UNREACHABLE();
4302   return nullptr;
4303 }
4304 
LowerStringToUpperCaseIntl(Node * node)4305 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
4306   UNREACHABLE();
4307   return nullptr;
4308 }
4309 
4310 #endif  // V8_INTL_SUPPORT
4311 
LowerStringFromSingleCodePoint(Node * node)4312 Node* EffectControlLinearizer::LowerStringFromSingleCodePoint(Node* node) {
4313   Node* value = node->InputAt(0);
4314   Node* code = value;
4315 
4316   auto if_not_single_code = __ MakeDeferredLabel();
4317   auto if_not_one_byte = __ MakeDeferredLabel();
4318   auto cache_miss = __ MakeDeferredLabel();
4319   auto done = __ MakeLabel(MachineRepresentation::kTagged);
4320 
4321   // Check if the {code} is a single code unit
4322   Node* check0 = __ Uint32LessThanOrEqual(code, __ Uint32Constant(0xFFFF));
4323   __ GotoIfNot(check0, &if_not_single_code);
4324 
4325   {
4326     // Check if the {code} is a one byte character
4327     Node* check1 = __ Uint32LessThanOrEqual(
4328         code, __ Uint32Constant(String::kMaxOneByteCharCode));
4329     __ GotoIfNot(check1, &if_not_one_byte);
4330     {
4331       // Load the isolate wide single character string cache.
4332       Node* cache = __ HeapConstant(factory()->single_character_string_cache());
4333 
4334       // Compute the {cache} index for {code}.
4335       Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
4336 
4337       // Check if we have an entry for the {code} in the single character string
4338       // cache already.
4339       Node* entry =
4340           __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
4341 
4342       Node* check2 = __ TaggedEqual(entry, __ UndefinedConstant());
4343       __ GotoIf(check2, &cache_miss);
4344 
4345       // Use the {entry} from the {cache}.
4346       __ Goto(&done, entry);
4347 
4348       __ Bind(&cache_miss);
4349       {
4350         // Allocate a new SeqOneByteString for {code}.
4351         Node* vtrue2 =
4352             __ Allocate(AllocationType::kYoung,
4353                         __ IntPtrConstant(SeqOneByteString::SizeFor(1)));
4354         __ StoreField(AccessBuilder::ForMap(), vtrue2,
4355                       __ HeapConstant(factory()->one_byte_string_map()));
4356         __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
4357                       __ Int32Constant(Name::kEmptyHashField));
4358         __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
4359                       __ Int32Constant(1));
4360         __ Store(
4361             StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
4362             vtrue2,
4363             __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
4364             code);
4365 
4366         // Remember it in the {cache}.
4367         __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
4368                         vtrue2);
4369         __ Goto(&done, vtrue2);
4370       }
4371     }
4372 
4373     __ Bind(&if_not_one_byte);
4374     {
4375       // Allocate a new SeqTwoByteString for {code}.
4376       Node* vfalse1 =
4377           __ Allocate(AllocationType::kYoung,
4378                       __ IntPtrConstant(SeqTwoByteString::SizeFor(1)));
4379       __ StoreField(AccessBuilder::ForMap(), vfalse1,
4380                     __ HeapConstant(factory()->string_map()));
4381       __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
4382                     __ IntPtrConstant(Name::kEmptyHashField));
4383       __ StoreField(AccessBuilder::ForStringLength(), vfalse1,
4384                     __ Int32Constant(1));
4385       __ Store(
4386           StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
4387           vfalse1,
4388           __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
4389           code);
4390       __ Goto(&done, vfalse1);
4391     }
4392   }
4393 
4394   __ Bind(&if_not_single_code);
4395   // Generate surrogate pair string
4396   {
4397     // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
4398     Node* lead_offset = __ Int32Constant(0xD800 - (0x10000 >> 10));
4399 
4400     // lead = (codepoint >> 10) + LEAD_OFFSET
4401     Node* lead =
4402         __ Int32Add(__ Word32Shr(code, __ Int32Constant(10)), lead_offset);
4403 
4404     // trail = (codepoint & 0x3FF) + 0xDC00;
4405     Node* trail = __ Int32Add(__ Word32And(code, __ Int32Constant(0x3FF)),
4406                               __ Int32Constant(0xDC00));
4407 
4408     // codpoint = (trail << 16) | lead;
4409 #if V8_TARGET_BIG_ENDIAN
4410     code = __ Word32Or(__ Word32Shl(lead, __ Int32Constant(16)), trail);
4411 #else
4412     code = __ Word32Or(__ Word32Shl(trail, __ Int32Constant(16)), lead);
4413 #endif
4414 
4415     // Allocate a new SeqTwoByteString for {code}.
4416     Node* vfalse0 =
4417         __ Allocate(AllocationType::kYoung,
4418                     __ IntPtrConstant(SeqTwoByteString::SizeFor(2)));
4419     __ StoreField(AccessBuilder::ForMap(), vfalse0,
4420                   __ HeapConstant(factory()->string_map()));
4421     __ StoreField(AccessBuilder::ForNameHashField(), vfalse0,
4422                   __ Int32Constant(Name::kEmptyHashField));
4423     __ StoreField(AccessBuilder::ForStringLength(), vfalse0,
4424                   __ Int32Constant(2));
4425     __ Store(
4426         StoreRepresentation(MachineRepresentation::kWord32, kNoWriteBarrier),
4427         vfalse0,
4428         __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
4429         code);
4430     __ Goto(&done, vfalse0);
4431   }
4432 
4433   __ Bind(&done);
4434   return done.PhiAt(0);
4435 }
4436 
LowerStringIndexOf(Node * node)4437 Node* EffectControlLinearizer::LowerStringIndexOf(Node* node) {
4438   Node* subject = node->InputAt(0);
4439   Node* search_string = node->InputAt(1);
4440   Node* position = node->InputAt(2);
4441 
4442   Callable callable =
4443       Builtins::CallableFor(isolate(), Builtins::kStringIndexOf);
4444   Operator::Properties properties = Operator::kEliminatable;
4445   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4446   auto call_descriptor = Linkage::GetStubCallDescriptor(
4447       graph()->zone(), callable.descriptor(),
4448       callable.descriptor().GetStackParameterCount(), flags, properties);
4449   return __ Call(call_descriptor, __ HeapConstant(callable.code()), subject,
4450                  search_string, position, __ NoContextConstant());
4451 }
4452 
LowerStringFromCodePointAt(Node * node)4453 Node* EffectControlLinearizer::LowerStringFromCodePointAt(Node* node) {
4454   Node* string = node->InputAt(0);
4455   Node* index = node->InputAt(1);
4456 
4457   Callable callable =
4458       Builtins::CallableFor(isolate(), Builtins::kStringFromCodePointAt);
4459   Operator::Properties properties = Operator::kEliminatable;
4460   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4461   auto call_descriptor = Linkage::GetStubCallDescriptor(
4462       graph()->zone(), callable.descriptor(),
4463       callable.descriptor().GetStackParameterCount(), flags, properties);
4464   return __ Call(call_descriptor, __ HeapConstant(callable.code()), string,
4465                  index, __ NoContextConstant());
4466 }
4467 
LowerStringLength(Node * node)4468 Node* EffectControlLinearizer::LowerStringLength(Node* node) {
4469   Node* subject = node->InputAt(0);
4470 
4471   return __ LoadField(AccessBuilder::ForStringLength(), subject);
4472 }
4473 
LowerStringComparison(Callable const & callable,Node * node)4474 Node* EffectControlLinearizer::LowerStringComparison(Callable const& callable,
4475                                                      Node* node) {
4476   Node* lhs = node->InputAt(0);
4477   Node* rhs = node->InputAt(1);
4478 
4479   Operator::Properties properties = Operator::kEliminatable;
4480   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4481   auto call_descriptor = Linkage::GetStubCallDescriptor(
4482       graph()->zone(), callable.descriptor(),
4483       callable.descriptor().GetStackParameterCount(), flags, properties);
4484   return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
4485                  __ NoContextConstant());
4486 }
4487 
LowerStringSubstring(Node * node)4488 Node* EffectControlLinearizer::LowerStringSubstring(Node* node) {
4489   Node* receiver = node->InputAt(0);
4490   Node* start = ChangeInt32ToIntPtr(node->InputAt(1));
4491   Node* end = ChangeInt32ToIntPtr(node->InputAt(2));
4492 
4493   Callable callable =
4494       Builtins::CallableFor(isolate(), Builtins::kStringSubstring);
4495   Operator::Properties properties = Operator::kEliminatable;
4496   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4497   auto call_descriptor = Linkage::GetStubCallDescriptor(
4498       graph()->zone(), callable.descriptor(),
4499       callable.descriptor().GetStackParameterCount(), flags, properties);
4500   return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
4501                  start, end, __ NoContextConstant());
4502 }
4503 
LowerStringEqual(Node * node)4504 Node* EffectControlLinearizer::LowerStringEqual(Node* node) {
4505   return LowerStringComparison(
4506       Builtins::CallableFor(isolate(), Builtins::kStringEqual), node);
4507 }
4508 
LowerStringLessThan(Node * node)4509 Node* EffectControlLinearizer::LowerStringLessThan(Node* node) {
4510   return LowerStringComparison(
4511       Builtins::CallableFor(isolate(), Builtins::kStringLessThan), node);
4512 }
4513 
LowerStringLessThanOrEqual(Node * node)4514 Node* EffectControlLinearizer::LowerStringLessThanOrEqual(Node* node) {
4515   return LowerStringComparison(
4516       Builtins::CallableFor(isolate(), Builtins::kStringLessThanOrEqual), node);
4517 }
4518 
LowerBigIntAdd(Node * node,Node * frame_state)4519 Node* EffectControlLinearizer::LowerBigIntAdd(Node* node, Node* frame_state) {
4520   Node* lhs = node->InputAt(0);
4521   Node* rhs = node->InputAt(1);
4522 
4523   Callable const callable =
4524       Builtins::CallableFor(isolate(), Builtins::kBigIntAddNoThrow);
4525   auto call_descriptor = Linkage::GetStubCallDescriptor(
4526       graph()->zone(), callable.descriptor(),
4527       callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
4528       Operator::kFoldable | Operator::kNoThrow);
4529   Node* value = __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs,
4530                         rhs, __ NoContextConstant());
4531 
4532   // Check for exception sentinel: Smi is returned to signal BigIntTooBig.
4533   __ DeoptimizeIf(DeoptimizeReason::kBigIntTooBig, FeedbackSource{},
4534                   ObjectIsSmi(value), frame_state);
4535 
4536   return value;
4537 }
4538 
LowerBigIntSubtract(Node * node,Node * frame_state)4539 Node* EffectControlLinearizer::LowerBigIntSubtract(Node* node,
4540                                                    Node* frame_state) {
4541   Node* lhs = node->InputAt(0);
4542   Node* rhs = node->InputAt(1);
4543 
4544   Callable const callable =
4545       Builtins::CallableFor(isolate(), Builtins::kBigIntSubtractNoThrow);
4546   auto call_descriptor = Linkage::GetStubCallDescriptor(
4547       graph()->zone(), callable.descriptor(),
4548       callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
4549       Operator::kFoldable | Operator::kNoThrow);
4550   Node* value = __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs,
4551                         rhs, __ NoContextConstant());
4552 
4553   // Check for exception sentinel: Smi is returned to signal BigIntTooBig.
4554   __ DeoptimizeIf(DeoptimizeReason::kBigIntTooBig, FeedbackSource{},
4555                   ObjectIsSmi(value), frame_state);
4556 
4557   return value;
4558 }
4559 
LowerBigIntNegate(Node * node)4560 Node* EffectControlLinearizer::LowerBigIntNegate(Node* node) {
4561   Callable const callable =
4562       Builtins::CallableFor(isolate(), Builtins::kBigIntUnaryMinus);
4563   auto call_descriptor = Linkage::GetStubCallDescriptor(
4564       graph()->zone(), callable.descriptor(),
4565       callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
4566       Operator::kFoldable | Operator::kNoThrow);
4567   Node* value = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4568                         node->InputAt(0), __ NoContextConstant());
4569 
4570   return value;
4571 }
4572 
LowerCheckFloat64Hole(Node * node,Node * frame_state)4573 Node* EffectControlLinearizer::LowerCheckFloat64Hole(Node* node,
4574                                                      Node* frame_state) {
4575   // If we reach this point w/o eliminating the {node} that's marked
4576   // with allow-return-hole, we cannot do anything, so just deoptimize
4577   // in case of the hole NaN.
4578   CheckFloat64HoleParameters const& params =
4579       CheckFloat64HoleParametersOf(node->op());
4580   Node* value = node->InputAt(0);
4581 
4582   auto if_nan = __ MakeDeferredLabel();
4583   auto done = __ MakeLabel();
4584 
4585   // First check whether {value} is a NaN at all...
4586   __ Branch(__ Float64Equal(value, value), &done, &if_nan);
4587 
4588   __ Bind(&if_nan);
4589   {
4590     // ...and only if {value} is a NaN, perform the expensive bit
4591     // check. See http://crbug.com/v8/8264 for details.
4592     Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
4593                                  __ Int32Constant(kHoleNanUpper32));
4594     __ DeoptimizeIf(DeoptimizeReason::kHole, params.feedback(), check,
4595                     frame_state);
4596     __ Goto(&done);
4597   }
4598 
4599   __ Bind(&done);
4600   return value;
4601 }
4602 
LowerCheckNotTaggedHole(Node * node,Node * frame_state)4603 Node* EffectControlLinearizer::LowerCheckNotTaggedHole(Node* node,
4604                                                        Node* frame_state) {
4605   Node* value = node->InputAt(0);
4606   Node* check = __ TaggedEqual(value, __ TheHoleConstant());
4607   __ DeoptimizeIf(DeoptimizeReason::kHole, FeedbackSource(), check,
4608                   frame_state);
4609   return value;
4610 }
4611 
LowerConvertTaggedHoleToUndefined(Node * node)4612 Node* EffectControlLinearizer::LowerConvertTaggedHoleToUndefined(Node* node) {
4613   Node* value = node->InputAt(0);
4614 
4615   auto if_is_hole = __ MakeDeferredLabel();
4616   auto done = __ MakeLabel(MachineRepresentation::kTagged);
4617 
4618   Node* check = __ TaggedEqual(value, __ TheHoleConstant());
4619   __ GotoIf(check, &if_is_hole);
4620   __ Goto(&done, value);
4621 
4622   __ Bind(&if_is_hole);
4623   __ Goto(&done, __ UndefinedConstant());
4624 
4625   __ Bind(&done);
4626   return done.PhiAt(0);
4627 }
4628 
LowerCheckEqualsInternalizedString(Node * node,Node * frame_state)4629 void EffectControlLinearizer::LowerCheckEqualsInternalizedString(
4630     Node* node, Node* frame_state) {
4631   Node* exp = node->InputAt(0);
4632   Node* val = node->InputAt(1);
4633 
4634   auto if_same = __ MakeLabel();
4635   auto if_notsame = __ MakeDeferredLabel();
4636   auto if_thinstring = __ MakeLabel();
4637   auto if_notthinstring = __ MakeLabel();
4638 
4639   // Check if {exp} and {val} are the same, which is the likely case.
4640   __ Branch(__ TaggedEqual(exp, val), &if_same, &if_notsame);
4641 
4642   __ Bind(&if_notsame);
4643   {
4644     // Now {val} could still be a non-internalized String that matches {exp}.
4645     __ DeoptimizeIf(DeoptimizeReason::kWrongName, FeedbackSource(),
4646                     ObjectIsSmi(val), frame_state);
4647     Node* val_map = __ LoadField(AccessBuilder::ForMap(), val);
4648     Node* val_instance_type =
4649         __ LoadField(AccessBuilder::ForMapInstanceType(), val_map);
4650 
4651     // Check for the common case of ThinString first.
4652     __ GotoIf(__ Word32Equal(val_instance_type,
4653                              __ Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
4654               &if_thinstring);
4655     __ Branch(
4656         __ Word32Equal(val_instance_type, __ Int32Constant(THIN_STRING_TYPE)),
4657         &if_thinstring, &if_notthinstring);
4658 
4659     __ Bind(&if_notthinstring);
4660     {
4661       // Check that the {val} is a non-internalized String, if it's anything
4662       // else it cannot match the recorded feedback {exp} anyways.
4663       __ DeoptimizeIfNot(
4664           DeoptimizeReason::kWrongName, FeedbackSource(),
4665           __ Word32Equal(__ Word32And(val_instance_type,
4666                                       __ Int32Constant(kIsNotStringMask |
4667                                                        kIsNotInternalizedMask)),
4668                          __ Int32Constant(kStringTag | kNotInternalizedTag)),
4669           frame_state);
4670 
4671       // Try to find the {val} in the string table.
4672       MachineSignature::Builder builder(graph()->zone(), 1, 2);
4673       builder.AddReturn(MachineType::AnyTagged());
4674       builder.AddParam(MachineType::Pointer());
4675       builder.AddParam(MachineType::AnyTagged());
4676       Node* try_string_to_index_or_lookup_existing = __ ExternalConstant(
4677           ExternalReference::try_string_to_index_or_lookup_existing());
4678       Node* const isolate_ptr =
4679           __ ExternalConstant(ExternalReference::isolate_address(isolate()));
4680       auto call_descriptor =
4681           Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
4682       Node* val_internalized =
4683           __ Call(common()->Call(call_descriptor),
4684                   try_string_to_index_or_lookup_existing, isolate_ptr, val);
4685 
4686       // Now see if the results match.
4687       __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, FeedbackSource(),
4688                          __ TaggedEqual(exp, val_internalized), frame_state);
4689       __ Goto(&if_same);
4690     }
4691 
4692     __ Bind(&if_thinstring);
4693     {
4694       // The {val} is a ThinString, let's check the actual value.
4695       Node* val_actual =
4696           __ LoadField(AccessBuilder::ForThinStringActual(), val);
4697       __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, FeedbackSource(),
4698                          __ TaggedEqual(exp, val_actual), frame_state);
4699       __ Goto(&if_same);
4700     }
4701   }
4702 
4703   __ Bind(&if_same);
4704 }
4705 
LowerCheckEqualsSymbol(Node * node,Node * frame_state)4706 void EffectControlLinearizer::LowerCheckEqualsSymbol(Node* node,
4707                                                      Node* frame_state) {
4708   Node* exp = node->InputAt(0);
4709   Node* val = node->InputAt(1);
4710   Node* check = __ TaggedEqual(exp, val);
4711   __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, FeedbackSource(), check,
4712                      frame_state);
4713 }
4714 
AllocateHeapNumberWithValue(Node * value)4715 Node* EffectControlLinearizer::AllocateHeapNumberWithValue(Node* value) {
4716   Node* result =
4717       __ Allocate(AllocationType::kYoung, __ IntPtrConstant(HeapNumber::kSize));
4718   __ StoreField(AccessBuilder::ForMap(), result, __ HeapNumberMapConstant());
4719   __ StoreField(AccessBuilder::ForHeapNumberValue(), result, value);
4720   return result;
4721 }
4722 
ChangeIntPtrToSmi(Node * value)4723 Node* EffectControlLinearizer::ChangeIntPtrToSmi(Node* value) {
4724   // Do shift on 32bit values if Smis are stored in the lower word.
4725   if (machine()->Is64() && SmiValuesAre31Bits()) {
4726     return ChangeTaggedInt32ToSmi(__ Word32Shl(value, SmiShiftBitsConstant()));
4727   }
4728   return __ WordShl(value, SmiShiftBitsConstant());
4729 }
4730 
ChangeTaggedInt32ToSmi(Node * value)4731 Node* EffectControlLinearizer::ChangeTaggedInt32ToSmi(Node* value) {
4732   DCHECK(SmiValuesAre31Bits());
4733   // In pointer compression, we smi-corrupt. Then, the upper bits are not
4734   // important.
4735   return COMPRESS_POINTERS_BOOL ? __ BitcastWord32ToWord64(value)
4736                                 : ChangeInt32ToIntPtr(value);
4737 }
4738 
ChangeInt32ToIntPtr(Node * value)4739 Node* EffectControlLinearizer::ChangeInt32ToIntPtr(Node* value) {
4740   if (machine()->Is64()) {
4741     value = __ ChangeInt32ToInt64(value);
4742   }
4743   return value;
4744 }
4745 
ChangeIntPtrToInt32(Node * value)4746 Node* EffectControlLinearizer::ChangeIntPtrToInt32(Node* value) {
4747   if (machine()->Is64()) {
4748     value = __ TruncateInt64ToInt32(value);
4749   }
4750   return value;
4751 }
4752 
ChangeInt32ToSmi(Node * value)4753 Node* EffectControlLinearizer::ChangeInt32ToSmi(Node* value) {
4754   // Do shift on 32bit values if Smis are stored in the lower word.
4755   if (machine()->Is64() && SmiValuesAre31Bits()) {
4756     return ChangeIntPtrToSmi(value);
4757   }
4758   return ChangeIntPtrToSmi(ChangeInt32ToIntPtr(value));
4759 }
4760 
ChangeInt64ToSmi(Node * value)4761 Node* EffectControlLinearizer::ChangeInt64ToSmi(Node* value) {
4762   DCHECK(machine()->Is64());
4763   return ChangeIntPtrToSmi(value);
4764 }
4765 
ChangeUint32ToUintPtr(Node * value)4766 Node* EffectControlLinearizer::ChangeUint32ToUintPtr(Node* value) {
4767   if (machine()->Is64()) {
4768     value = __ ChangeUint32ToUint64(value);
4769   }
4770   return value;
4771 }
4772 
ChangeUint32ToSmi(Node * value)4773 Node* EffectControlLinearizer::ChangeUint32ToSmi(Node* value) {
4774   // Do shift on 32bit values if Smis are stored in the lower word.
4775   if (machine()->Is64() && SmiValuesAre31Bits()) {
4776     Node* smi_value = __ Word32Shl(value, SmiShiftBitsConstant());
4777     // In pointer compression, we smi-corrupt. Then, the upper bits are not
4778     // important.
4779     return COMPRESS_POINTERS_BOOL ? __ BitcastWord32ToWord64(smi_value)
4780                                   : __ ChangeUint32ToUint64(smi_value);
4781   } else {
4782     return __ WordShl(ChangeUint32ToUintPtr(value), SmiShiftBitsConstant());
4783   }
4784 }
4785 
ChangeSmiToIntPtr(Node * value)4786 Node* EffectControlLinearizer::ChangeSmiToIntPtr(Node* value) {
4787   if (machine()->Is64() && SmiValuesAre31Bits()) {
4788     // First sign-extend the upper half, then shift away the Smi tag.
4789     return __ WordSarShiftOutZeros(
4790         __ ChangeInt32ToInt64(__ TruncateInt64ToInt32(value)),
4791         SmiShiftBitsConstant());
4792   }
4793   return __ WordSarShiftOutZeros(value, SmiShiftBitsConstant());
4794 }
4795 
ChangeSmiToInt32(Node * value)4796 Node* EffectControlLinearizer::ChangeSmiToInt32(Node* value) {
4797   // Do shift on 32bit values if Smis are stored in the lower word.
4798   if (machine()->Is64() && SmiValuesAre31Bits()) {
4799     return __ Word32SarShiftOutZeros(__ TruncateInt64ToInt32(value),
4800                                      SmiShiftBitsConstant());
4801   }
4802   if (machine()->Is64()) {
4803     return __ TruncateInt64ToInt32(ChangeSmiToIntPtr(value));
4804   }
4805   return ChangeSmiToIntPtr(value);
4806 }
4807 
ChangeSmiToInt64(Node * value)4808 Node* EffectControlLinearizer::ChangeSmiToInt64(Node* value) {
4809   CHECK(machine()->Is64());
4810   return ChangeSmiToIntPtr(value);
4811 }
4812 
ObjectIsSmi(Node * value)4813 Node* EffectControlLinearizer::ObjectIsSmi(Node* value) {
4814   return __ Word32Equal(__ Word32And(value, __ Int32Constant(kSmiTagMask)),
4815                         __ Int32Constant(kSmiTag));
4816 }
4817 
SmiMaxValueConstant()4818 Node* EffectControlLinearizer::SmiMaxValueConstant() {
4819   return __ Int32Constant(Smi::kMaxValue);
4820 }
4821 
SmiShiftBitsConstant()4822 Node* EffectControlLinearizer::SmiShiftBitsConstant() {
4823   if (machine()->Is64() && SmiValuesAre31Bits()) {
4824     return __ Int32Constant(kSmiShiftSize + kSmiTagSize);
4825   }
4826   return __ IntPtrConstant(kSmiShiftSize + kSmiTagSize);
4827 }
4828 
LowerPlainPrimitiveToNumber(Node * node)4829 Node* EffectControlLinearizer::LowerPlainPrimitiveToNumber(Node* node) {
4830   Node* value = node->InputAt(0);
4831   return __ PlainPrimitiveToNumber(TNode<Object>::UncheckedCast(value));
4832 }
4833 
LowerPlainPrimitiveToWord32(Node * node)4834 Node* EffectControlLinearizer::LowerPlainPrimitiveToWord32(Node* node) {
4835   Node* value = node->InputAt(0);
4836 
4837   auto if_not_smi = __ MakeDeferredLabel();
4838   auto if_to_number_smi = __ MakeLabel();
4839   auto done = __ MakeLabel(MachineRepresentation::kWord32);
4840 
4841   Node* check0 = ObjectIsSmi(value);
4842   __ GotoIfNot(check0, &if_not_smi);
4843   __ Goto(&done, ChangeSmiToInt32(value));
4844 
4845   __ Bind(&if_not_smi);
4846   Node* to_number =
4847       __ PlainPrimitiveToNumber(TNode<Object>::UncheckedCast(value));
4848 
4849   Node* check1 = ObjectIsSmi(to_number);
4850   __ GotoIf(check1, &if_to_number_smi);
4851   Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
4852   __ Goto(&done, __ TruncateFloat64ToWord32(number));
4853 
4854   __ Bind(&if_to_number_smi);
4855   __ Goto(&done, ChangeSmiToInt32(to_number));
4856 
4857   __ Bind(&done);
4858   return done.PhiAt(0);
4859 }
4860 
LowerPlainPrimitiveToFloat64(Node * node)4861 Node* EffectControlLinearizer::LowerPlainPrimitiveToFloat64(Node* node) {
4862   Node* value = node->InputAt(0);
4863 
4864   auto if_not_smi = __ MakeDeferredLabel();
4865   auto if_to_number_smi = __ MakeLabel();
4866   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4867 
4868   Node* check0 = ObjectIsSmi(value);
4869   __ GotoIfNot(check0, &if_not_smi);
4870   Node* from_smi = ChangeSmiToInt32(value);
4871   __ Goto(&done, __ ChangeInt32ToFloat64(from_smi));
4872 
4873   __ Bind(&if_not_smi);
4874   Node* to_number =
4875       __ PlainPrimitiveToNumber(TNode<Object>::UncheckedCast(value));
4876   Node* check1 = ObjectIsSmi(to_number);
4877   __ GotoIf(check1, &if_to_number_smi);
4878 
4879   Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
4880   __ Goto(&done, number);
4881 
4882   __ Bind(&if_to_number_smi);
4883   Node* number_from_smi = ChangeSmiToInt32(to_number);
4884   number_from_smi = __ ChangeInt32ToFloat64(number_from_smi);
4885   __ Goto(&done, number_from_smi);
4886 
4887   __ Bind(&done);
4888   return done.PhiAt(0);
4889 }
4890 
LowerEnsureWritableFastElements(Node * node)4891 Node* EffectControlLinearizer::LowerEnsureWritableFastElements(Node* node) {
4892   Node* object = node->InputAt(0);
4893   Node* elements = node->InputAt(1);
4894 
4895   auto if_not_fixed_array = __ MakeDeferredLabel();
4896   auto done = __ MakeLabel(MachineRepresentation::kTagged);
4897 
4898   // Load the current map of {elements}.
4899   Node* elements_map = __ LoadField(AccessBuilder::ForMap(), elements);
4900 
4901   // Check if {elements} is not a copy-on-write FixedArray.
4902   Node* check = __ TaggedEqual(elements_map, __ FixedArrayMapConstant());
4903   __ GotoIfNot(check, &if_not_fixed_array);
4904   // Nothing to do if the {elements} are not copy-on-write.
4905   __ Goto(&done, elements);
4906 
4907   __ Bind(&if_not_fixed_array);
4908   // We need to take a copy of the {elements} and set them up for {object}.
4909   Operator::Properties properties = Operator::kEliminatable;
4910   Callable callable =
4911       Builtins::CallableFor(isolate(), Builtins::kCopyFastSmiOrObjectElements);
4912   CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4913   auto call_descriptor = Linkage::GetStubCallDescriptor(
4914       graph()->zone(), callable.descriptor(),
4915       callable.descriptor().GetStackParameterCount(), flags, properties);
4916   Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4917                          object, __ NoContextConstant());
4918   __ Goto(&done, result);
4919 
4920   __ Bind(&done);
4921   return done.PhiAt(0);
4922 }
4923 
LowerMaybeGrowFastElements(Node * node,Node * frame_state)4924 Node* EffectControlLinearizer::LowerMaybeGrowFastElements(Node* node,
4925                                                           Node* frame_state) {
4926   GrowFastElementsParameters params = GrowFastElementsParametersOf(node->op());
4927   Node* object = node->InputAt(0);
4928   Node* elements = node->InputAt(1);
4929   Node* index = node->InputAt(2);
4930   Node* elements_length = node->InputAt(3);
4931 
4932   auto done = __ MakeLabel(MachineRepresentation::kTagged);
4933   auto if_grow = __ MakeDeferredLabel();
4934   auto if_not_grow = __ MakeLabel();
4935 
4936   // Check if we need to grow the {elements} backing store.
4937   Node* check = __ Uint32LessThan(index, elements_length);
4938   __ GotoIfNot(check, &if_grow);
4939   __ Goto(&done, elements);
4940 
4941   __ Bind(&if_grow);
4942   // We need to grow the {elements} for {object}.
4943   Operator::Properties properties = Operator::kEliminatable;
4944   Callable callable =
4945       (params.mode() == GrowFastElementsMode::kDoubleElements)
4946           ? Builtins::CallableFor(isolate(), Builtins::kGrowFastDoubleElements)
4947           : Builtins::CallableFor(isolate(),
4948                                   Builtins::kGrowFastSmiOrObjectElements);
4949   CallDescriptor::Flags call_flags = CallDescriptor::kNoFlags;
4950   auto call_descriptor = Linkage::GetStubCallDescriptor(
4951       graph()->zone(), callable.descriptor(),
4952       callable.descriptor().GetStackParameterCount(), call_flags, properties);
4953   Node* new_elements =
4954       __ Call(call_descriptor, __ HeapConstant(callable.code()), object,
4955               ChangeInt32ToSmi(index), __ NoContextConstant());
4956 
4957   // Ensure that we were able to grow the {elements}.
4958   __ DeoptimizeIf(DeoptimizeReason::kCouldNotGrowElements, params.feedback(),
4959                   ObjectIsSmi(new_elements), frame_state);
4960   __ Goto(&done, new_elements);
4961 
4962   __ Bind(&done);
4963   return done.PhiAt(0);
4964 }
4965 
LowerTransitionElementsKind(Node * node)4966 void EffectControlLinearizer::LowerTransitionElementsKind(Node* node) {
4967   ElementsTransition const transition = ElementsTransitionOf(node->op());
4968   Node* object = node->InputAt(0);
4969 
4970   auto if_map_same = __ MakeDeferredLabel();
4971   auto done = __ MakeLabel();
4972 
4973   Node* source_map = __ HeapConstant(transition.source());
4974   Node* target_map = __ HeapConstant(transition.target());
4975 
4976   // Load the current map of {object}.
4977   Node* object_map = __ LoadField(AccessBuilder::ForMap(), object);
4978 
4979   // Check if {object_map} is the same as {source_map}.
4980   Node* check = __ TaggedEqual(object_map, source_map);
4981   __ GotoIf(check, &if_map_same);
4982   __ Goto(&done);
4983 
4984   __ Bind(&if_map_same);
4985   switch (transition.mode()) {
4986     case ElementsTransition::kFastTransition:
4987       // In-place migration of {object}, just store the {target_map}.
4988       __ StoreField(AccessBuilder::ForMap(), object, target_map);
4989       break;
4990     case ElementsTransition::kSlowTransition: {
4991       // Instance migration, call out to the runtime for {object}.
4992       Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4993       Runtime::FunctionId id = Runtime::kTransitionElementsKind;
4994       auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4995           graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
4996       __ Call(call_descriptor, __ CEntryStubConstant(1), object, target_map,
4997               __ ExternalConstant(ExternalReference::Create(id)),
4998               __ Int32Constant(2), __ NoContextConstant());
4999       break;
5000     }
5001   }
5002   __ Goto(&done);
5003 
5004   __ Bind(&done);
5005 }
5006 
LowerLoadMessage(Node * node)5007 Node* EffectControlLinearizer::LowerLoadMessage(Node* node) {
5008   Node* offset = node->InputAt(0);
5009   Node* object_pattern =
5010       __ LoadField(AccessBuilder::ForExternalIntPtr(), offset);
5011   return __ BitcastWordToTagged(object_pattern);
5012 }
5013 
LowerStoreMessage(Node * node)5014 void EffectControlLinearizer::LowerStoreMessage(Node* node) {
5015   Node* offset = node->InputAt(0);
5016   Node* object = node->InputAt(1);
5017   Node* object_pattern = __ BitcastTaggedToWord(object);
5018   __ StoreField(AccessBuilder::ForExternalIntPtr(), offset, object_pattern);
5019 }
5020 
5021 // TODO(mslekova): Avoid code duplication with simplified lowering.
MachineTypeFor(CTypeInfo::Type type)5022 static MachineType MachineTypeFor(CTypeInfo::Type type) {
5023   switch (type) {
5024     case CTypeInfo::Type::kVoid:
5025       return MachineType::AnyTagged();
5026     case CTypeInfo::Type::kBool:
5027       return MachineType::Bool();
5028     case CTypeInfo::Type::kInt32:
5029       return MachineType::Int32();
5030     case CTypeInfo::Type::kUint32:
5031       return MachineType::Uint32();
5032     case CTypeInfo::Type::kInt64:
5033       return MachineType::Int64();
5034     case CTypeInfo::Type::kUint64:
5035       return MachineType::Uint64();
5036     case CTypeInfo::Type::kFloat32:
5037       return MachineType::Float32();
5038     case CTypeInfo::Type::kFloat64:
5039       return MachineType::Float64();
5040     case CTypeInfo::Type::kV8Value:
5041       return MachineType::AnyTagged();
5042   }
5043 }
5044 
LowerFastApiCall(Node * node)5045 Node* EffectControlLinearizer::LowerFastApiCall(Node* node) {
5046   FastApiCallNode n(node);
5047   FastApiCallParameters const& params = n.Parameters();
5048   const CFunctionInfo* c_signature = params.signature();
5049   const int c_arg_count = c_signature->ArgumentCount();
5050   CallDescriptor* js_call_descriptor = params.descriptor();
5051   int js_arg_count = static_cast<int>(js_call_descriptor->ParameterCount());
5052   const int value_input_count = node->op()->ValueInputCount();
5053   CHECK_EQ(FastApiCallNode::ArityForArgc(c_arg_count, js_arg_count),
5054            value_input_count);
5055 
5056   if (fast_api_call_stack_slot_ == nullptr) {
5057     // Add the { fallback } output parameter.
5058     int kAlign = 4;
5059     int kSize = sizeof(v8::FastApiCallbackOptions);
5060     // If this check fails, probably you've added new fields to
5061     // v8::FastApiCallbackOptions, which means you'll need to write code
5062     // that initializes and reads from them too (see the Store and Load to
5063     // fast_api_call_stack_slot_ below).
5064     CHECK_EQ(kSize, 1);
5065     fast_api_call_stack_slot_ = __ StackSlot(kSize, kAlign);
5066   }
5067 
5068   // Generate the store to `fast_api_call_stack_slot_`.
5069   __ Store(StoreRepresentation(MachineRepresentation::kWord32, kNoWriteBarrier),
5070            fast_api_call_stack_slot_, 0, jsgraph()->ZeroConstant());
5071 
5072   MachineSignature::Builder builder(
5073       graph()->zone(), 1, c_arg_count + FastApiCallNode::kHasErrorInputCount);
5074   MachineType return_type = MachineTypeFor(c_signature->ReturnInfo().GetType());
5075   builder.AddReturn(return_type);
5076   for (int i = 0; i < c_arg_count; ++i) {
5077     MachineType machine_type =
5078         MachineTypeFor(c_signature->ArgumentInfo(i).GetType());
5079     builder.AddParam(machine_type);
5080   }
5081   builder.AddParam(MachineType::Pointer());  // fast_api_call_stack_slot_
5082 
5083   CallDescriptor* call_descriptor =
5084       Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
5085 
5086   call_descriptor->SetCFunctionInfo(c_signature);
5087 
5088   Node** const inputs = graph()->zone()->NewArray<Node*>(
5089       c_arg_count + FastApiCallNode::kFastCallExtraInputCount);
5090   inputs[0] = NodeProperties::GetValueInput(node, 0);  // the target
5091   for (int i = FastApiCallNode::kFastTargetInputCount;
5092        i < c_arg_count + FastApiCallNode::kFastTargetInputCount; ++i) {
5093     if (c_signature->ArgumentInfo(i - 1).GetType() ==
5094         CTypeInfo::Type::kFloat32) {
5095       inputs[i] =
5096           __ TruncateFloat64ToFloat32(NodeProperties::GetValueInput(node, i));
5097     } else {
5098       inputs[i] = NodeProperties::GetValueInput(node, i);
5099     }
5100   }
5101   inputs[c_arg_count + 1] = fast_api_call_stack_slot_;
5102   inputs[c_arg_count + 2] = __ effect();
5103   inputs[c_arg_count + 3] = __ control();
5104 
5105   __ Call(call_descriptor,
5106           c_arg_count + FastApiCallNode::kFastCallExtraInputCount, inputs);
5107 
5108   // Generate the load from `fast_api_call_stack_slot_`.
5109   Node* load = __ Load(MachineType::Int32(), fast_api_call_stack_slot_, 0);
5110 
5111   TNode<Boolean> cond =
5112       TNode<Boolean>::UncheckedCast(__ Word32Equal(load, __ Int32Constant(0)));
5113   // Hint to true.
5114   auto if_success = __ MakeLabel();
5115   auto if_error = __ MakeDeferredLabel();
5116   auto merge = __ MakeLabel(MachineRepresentation::kTagged);
5117   __ Branch(cond, &if_success, &if_error);
5118 
5119   // Generate fast call.
5120   __ Bind(&if_success);
5121   Node* then_result = [&]() { return __ UndefinedConstant(); }();
5122   __ Goto(&merge, then_result);
5123 
5124   // Generate direct slow call.
5125   __ Bind(&if_error);
5126   Node* else_result = [&]() {
5127     Node** const slow_inputs = graph()->zone()->NewArray<Node*>(
5128         n.SlowCallArgumentCount() +
5129         FastApiCallNode::kEffectAndControlInputCount);
5130 
5131     int fast_call_params = c_arg_count + FastApiCallNode::kFastTargetInputCount;
5132     CHECK_EQ(value_input_count - fast_call_params, n.SlowCallArgumentCount());
5133     int index = 0;
5134     for (; index < n.SlowCallArgumentCount(); ++index) {
5135       slow_inputs[index] = n.SlowCallArgument(index);
5136     }
5137 
5138     slow_inputs[index] = __ effect();
5139     slow_inputs[index + 1] = __ control();
5140     Node* slow_call = __ Call(
5141         params.descriptor(),
5142         index + FastApiCallNode::kEffectAndControlInputCount, slow_inputs);
5143     return slow_call;
5144   }();
5145   __ Goto(&merge, else_result);
5146 
5147   __ Bind(&merge);
5148   return merge.PhiAt(0);
5149 }
5150 
LowerLoadFieldByIndex(Node * node)5151 Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
5152   Node* object = node->InputAt(0);
5153   Node* index = node->InputAt(1);
5154   Node* zero = __ IntPtrConstant(0);
5155   Node* one = __ IntPtrConstant(1);
5156 
5157   // Sign-extend the {index} on 64-bit architectures.
5158   if (machine()->Is64()) {
5159     index = __ ChangeInt32ToInt64(index);
5160   }
5161 
5162   auto if_double = __ MakeDeferredLabel();
5163   auto done = __ MakeLabel(MachineRepresentation::kTagged);
5164 
5165   // Check if field is a mutable double field.
5166   __ GotoIfNot(__ IntPtrEqual(__ WordAnd(index, one), zero), &if_double);
5167 
5168   // The field is a proper Tagged field on {object}. The {index} is shifted
5169   // to the left by one in the code below.
5170   {
5171     // Check if field is in-object or out-of-object.
5172     auto if_outofobject = __ MakeLabel();
5173     __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
5174 
5175     // The field is located in the {object} itself.
5176     {
5177       Node* offset =
5178           __ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2 - 1)),
5179                     __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
5180       Node* result = __ Load(MachineType::AnyTagged(), object, offset);
5181       __ Goto(&done, result);
5182     }
5183 
5184     // The field is located in the properties backing store of {object}.
5185     // The {index} is equal to the negated out of property index plus 1.
5186     __ Bind(&if_outofobject);
5187     {
5188       Node* properties = __ LoadField(
5189           AccessBuilder::ForJSObjectPropertiesOrHashKnownPointer(), object);
5190       Node* offset =
5191           __ IntAdd(__ WordShl(__ IntSub(zero, index),
5192                                __ IntPtrConstant(kTaggedSizeLog2 - 1)),
5193                     __ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
5194                                       kHeapObjectTag));
5195       Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
5196       __ Goto(&done, result);
5197     }
5198   }
5199 
5200   // The field is a Double field, either unboxed in the object on 64-bit
5201   // architectures, or a mutable HeapNumber.
5202   __ Bind(&if_double);
5203   {
5204     auto loaded_field = __ MakeLabel(MachineRepresentation::kTagged);
5205     auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);
5206 
5207     index = __ WordSar(index, one);
5208 
5209     // Check if field is in-object or out-of-object.
5210     auto if_outofobject = __ MakeLabel();
5211     __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
5212 
5213     // The field is located in the {object} itself.
5214     {
5215       Node* offset =
5216           __ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2)),
5217                     __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
5218       if (FLAG_unbox_double_fields) {
5219         Node* result = __ Load(MachineType::Float64(), object, offset);
5220         __ Goto(&done_double, result);
5221       } else {
5222         Node* field = __ Load(MachineType::AnyTagged(), object, offset);
5223         __ Goto(&loaded_field, field);
5224       }
5225     }
5226 
5227     __ Bind(&if_outofobject);
5228     {
5229       Node* properties = __ LoadField(
5230           AccessBuilder::ForJSObjectPropertiesOrHashKnownPointer(), object);
5231       Node* offset =
5232           __ IntAdd(__ WordShl(__ IntSub(zero, index),
5233                                __ IntPtrConstant(kTaggedSizeLog2)),
5234                     __ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
5235                                       kHeapObjectTag));
5236       Node* field = __ Load(MachineType::AnyTagged(), properties, offset);
5237       __ Goto(&loaded_field, field);
5238     }
5239 
5240     __ Bind(&loaded_field);
5241     {
5242       Node* field = loaded_field.PhiAt(0);
5243       // We may have transitioned in-place away from double, so check that
5244       // this is a HeapNumber -- otherwise the load is fine and we don't need
5245       // to copy anything anyway.
5246       __ GotoIf(ObjectIsSmi(field), &done, field);
5247       Node* field_map = __ LoadField(AccessBuilder::ForMap(), field);
5248       __ GotoIfNot(__ TaggedEqual(field_map, __ HeapNumberMapConstant()), &done,
5249                    field);
5250 
5251       Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), field);
5252       __ Goto(&done_double, value);
5253     }
5254 
5255     __ Bind(&done_double);
5256     {
5257       Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
5258       __ Goto(&done, result);
5259     }
5260   }
5261 
5262   __ Bind(&done);
5263   return done.PhiAt(0);
5264 }
5265 
BuildReverseBytes(ExternalArrayType type,Node * value)5266 Node* EffectControlLinearizer::BuildReverseBytes(ExternalArrayType type,
5267                                                  Node* value) {
5268   switch (type) {
5269     case kExternalInt8Array:
5270     case kExternalUint8Array:
5271     case kExternalUint8ClampedArray:
5272       return value;
5273 
5274     case kExternalInt16Array: {
5275       Node* result = __ Word32ReverseBytes(value);
5276       result = __ Word32Sar(result, __ Int32Constant(16));
5277       return result;
5278     }
5279 
5280     case kExternalUint16Array: {
5281       Node* result = __ Word32ReverseBytes(value);
5282       result = __ Word32Shr(result, __ Int32Constant(16));
5283       return result;
5284     }
5285 
5286     case kExternalInt32Array:  // Fall through.
5287     case kExternalUint32Array:
5288       return __ Word32ReverseBytes(value);
5289 
5290     case kExternalFloat32Array: {
5291       Node* result = __ BitcastFloat32ToInt32(value);
5292       result = __ Word32ReverseBytes(result);
5293       result = __ BitcastInt32ToFloat32(result);
5294       return result;
5295     }
5296 
5297     case kExternalFloat64Array: {
5298       if (machine()->Is64()) {
5299         Node* result = __ BitcastFloat64ToInt64(value);
5300         result = __ Word64ReverseBytes(result);
5301         result = __ BitcastInt64ToFloat64(result);
5302         return result;
5303       } else {
5304         Node* lo = __ Word32ReverseBytes(__ Float64ExtractLowWord32(value));
5305         Node* hi = __ Word32ReverseBytes(__ Float64ExtractHighWord32(value));
5306         Node* result = __ Float64Constant(0.0);
5307         result = __ Float64InsertLowWord32(result, hi);
5308         result = __ Float64InsertHighWord32(result, lo);
5309         return result;
5310       }
5311     }
5312 
5313     case kExternalBigInt64Array:
5314     case kExternalBigUint64Array:
5315       UNREACHABLE();
5316   }
5317 }
5318 
LowerLoadDataViewElement(Node * node)5319 Node* EffectControlLinearizer::LowerLoadDataViewElement(Node* node) {
5320   ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
5321   Node* object = node->InputAt(0);
5322   Node* storage = node->InputAt(1);
5323   Node* index = node->InputAt(2);
5324   Node* is_little_endian = node->InputAt(3);
5325 
5326   // We need to keep the {object} (either the JSArrayBuffer or the JSDataView)
5327   // alive so that the GC will not release the JSArrayBuffer (if there's any)
5328   // as long as we are still operating on it.
5329   __ Retain(object);
5330 
5331   MachineType const machine_type =
5332       AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
5333 
5334   Node* value = __ LoadUnaligned(machine_type, storage, index);
5335   auto big_endian = __ MakeLabel();
5336   auto done = __ MakeLabel(machine_type.representation());
5337 
5338   __ GotoIfNot(is_little_endian, &big_endian);
5339   {  // Little-endian load.
5340 #if V8_TARGET_LITTLE_ENDIAN
5341     __ Goto(&done, value);
5342 #else
5343     __ Goto(&done, BuildReverseBytes(element_type, value));
5344 #endif  // V8_TARGET_LITTLE_ENDIAN
5345   }
5346 
5347   __ Bind(&big_endian);
5348   {  // Big-endian load.
5349 #if V8_TARGET_LITTLE_ENDIAN
5350     __ Goto(&done, BuildReverseBytes(element_type, value));
5351 #else
5352     __ Goto(&done, value);
5353 #endif  // V8_TARGET_LITTLE_ENDIAN
5354   }
5355 
5356   // We're done, return {result}.
5357   __ Bind(&done);
5358   return done.PhiAt(0);
5359 }
5360 
LowerStoreDataViewElement(Node * node)5361 void EffectControlLinearizer::LowerStoreDataViewElement(Node* node) {
5362   ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
5363   Node* object = node->InputAt(0);
5364   Node* storage = node->InputAt(1);
5365   Node* index = node->InputAt(2);
5366   Node* value = node->InputAt(3);
5367   Node* is_little_endian = node->InputAt(4);
5368 
5369   // We need to keep the {object} (either the JSArrayBuffer or the JSDataView)
5370   // alive so that the GC will not release the JSArrayBuffer (if there's any)
5371   // as long as we are still operating on it.
5372   __ Retain(object);
5373 
5374   MachineType const machine_type =
5375       AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
5376 
5377   auto big_endian = __ MakeLabel();
5378   auto done = __ MakeLabel(machine_type.representation());
5379 
5380   __ GotoIfNot(is_little_endian, &big_endian);
5381   {  // Little-endian store.
5382 #if V8_TARGET_LITTLE_ENDIAN
5383     __ Goto(&done, value);
5384 #else
5385     __ Goto(&done, BuildReverseBytes(element_type, value));
5386 #endif  // V8_TARGET_LITTLE_ENDIAN
5387   }
5388 
5389   __ Bind(&big_endian);
5390   {  // Big-endian store.
5391 #if V8_TARGET_LITTLE_ENDIAN
5392     __ Goto(&done, BuildReverseBytes(element_type, value));
5393 #else
5394     __ Goto(&done, value);
5395 #endif  // V8_TARGET_LITTLE_ENDIAN
5396   }
5397 
5398   __ Bind(&done);
5399   __ StoreUnaligned(machine_type.representation(), storage, index,
5400                     done.PhiAt(0));
5401 }
5402 
5403 // Compute the data pointer, handling the case where the {external} pointer
5404 // is the effective data pointer (i.e. the {base} is Smi zero).
BuildTypedArrayDataPointer(Node * base,Node * external)5405 Node* EffectControlLinearizer::BuildTypedArrayDataPointer(Node* base,
5406                                                           Node* external) {
5407   if (IntPtrMatcher(base).Is(0)) {
5408     return external;
5409   } else {
5410     if (COMPRESS_POINTERS_BOOL) {
5411       base = __ BitcastTaggedToWord(base);
5412       // Zero-extend Tagged_t to UintPtr according to current compression
5413       // scheme so that the addition with |external_pointer| (which already
5414       // contains compensated offset value) will decompress the tagged value.
5415       // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for
5416       // details.
5417       base = ChangeUint32ToUintPtr(base);
5418     }
5419     return __ UnsafePointerAdd(base, external);
5420   }
5421 }
5422 
LowerLoadTypedElement(Node * node)5423 Node* EffectControlLinearizer::LowerLoadTypedElement(Node* node) {
5424   ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
5425   Node* buffer = node->InputAt(0);
5426   Node* base = node->InputAt(1);
5427   Node* external = node->InputAt(2);
5428   Node* index = node->InputAt(3);
5429 
5430   // We need to keep the {buffer} alive so that the GC will not release the
5431   // ArrayBuffer (if there's any) as long as we are still operating on it.
5432   __ Retain(buffer);
5433 
5434   Node* data_ptr = BuildTypedArrayDataPointer(base, external);
5435 
5436   // Perform the actual typed element access.
5437   return __ LoadElement(AccessBuilder::ForTypedArrayElement(
5438                             array_type, true, LoadSensitivity::kCritical),
5439                         data_ptr, index);
5440 }
5441 
LowerLoadStackArgument(Node * node)5442 Node* EffectControlLinearizer::LowerLoadStackArgument(Node* node) {
5443   Node* base = node->InputAt(0);
5444   Node* index = node->InputAt(1);
5445 
5446   Node* argument =
5447       __ LoadElement(AccessBuilder::ForStackArgument(), base, index);
5448 
5449   return __ BitcastWordToTagged(argument);
5450 }
5451 
LowerStoreTypedElement(Node * node)5452 void EffectControlLinearizer::LowerStoreTypedElement(Node* node) {
5453   ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
5454   Node* buffer = node->InputAt(0);
5455   Node* base = node->InputAt(1);
5456   Node* external = node->InputAt(2);
5457   Node* index = node->InputAt(3);
5458   Node* value = node->InputAt(4);
5459 
5460   // We need to keep the {buffer} alive so that the GC will not release the
5461   // ArrayBuffer (if there's any) as long as we are still operating on it.
5462   __ Retain(buffer);
5463 
5464   Node* data_ptr = BuildTypedArrayDataPointer(base, external);
5465 
5466   // Perform the actual typed element access.
5467   __ StoreElement(AccessBuilder::ForTypedArrayElement(array_type, true),
5468                   data_ptr, index, value);
5469 }
5470 
TransitionElementsTo(Node * node,Node * array,ElementsKind from,ElementsKind to)5471 void EffectControlLinearizer::TransitionElementsTo(Node* node, Node* array,
5472                                                    ElementsKind from,
5473                                                    ElementsKind to) {
5474   DCHECK(IsMoreGeneralElementsKindTransition(from, to));
5475   DCHECK(to == HOLEY_ELEMENTS || to == HOLEY_DOUBLE_ELEMENTS);
5476 
5477   Handle<Map> target(to == HOLEY_ELEMENTS ? FastMapParameterOf(node->op())
5478                                           : DoubleMapParameterOf(node->op()));
5479   Node* target_map = __ HeapConstant(target);
5480 
5481   if (IsSimpleMapChangeTransition(from, to)) {
5482     __ StoreField(AccessBuilder::ForMap(), array, target_map);
5483   } else {
5484     // Instance migration, call out to the runtime for {array}.
5485     Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
5486     Runtime::FunctionId id = Runtime::kTransitionElementsKind;
5487     auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
5488         graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
5489     __ Call(call_descriptor, __ CEntryStubConstant(1), array, target_map,
5490             __ ExternalConstant(ExternalReference::Create(id)),
5491             __ Int32Constant(2), __ NoContextConstant());
5492   }
5493 }
5494 
IsElementsKindGreaterThan(Node * kind,ElementsKind reference_kind)5495 Node* EffectControlLinearizer::IsElementsKindGreaterThan(
5496     Node* kind, ElementsKind reference_kind) {
5497   Node* ref_kind = __ Int32Constant(reference_kind);
5498   Node* ret = __ Int32LessThan(ref_kind, kind);
5499   return ret;
5500 }
5501 
LowerTransitionAndStoreElement(Node * node)5502 void EffectControlLinearizer::LowerTransitionAndStoreElement(Node* node) {
5503   Node* array = node->InputAt(0);
5504   Node* index = node->InputAt(1);
5505   Node* value = node->InputAt(2);
5506 
5507   // Possibly transition array based on input and store.
5508   //
5509   //   -- TRANSITION PHASE -----------------
5510   //   kind = ElementsKind(array)
5511   //   if value is not smi {
5512   //     if kind == HOLEY_SMI_ELEMENTS {
5513   //       if value is heap number {
5514   //         Transition array to HOLEY_DOUBLE_ELEMENTS
5515   //         kind = HOLEY_DOUBLE_ELEMENTS
5516   //       } else {
5517   //         Transition array to HOLEY_ELEMENTS
5518   //         kind = HOLEY_ELEMENTS
5519   //       }
5520   //     } else if kind == HOLEY_DOUBLE_ELEMENTS {
5521   //       if value is not heap number {
5522   //         Transition array to HOLEY_ELEMENTS
5523   //         kind = HOLEY_ELEMENTS
5524   //       }
5525   //     }
5526   //   }
5527   //
5528   //   -- STORE PHASE ----------------------
5529   //   [make sure {kind} is up-to-date]
5530   //   if kind == HOLEY_DOUBLE_ELEMENTS {
5531   //     if value is smi {
5532   //       float_value = convert smi to float
5533   //       Store array[index] = float_value
5534   //     } else {
5535   //       float_value = value
5536   //       Store array[index] = float_value
5537   //     }
5538   //   } else {
5539   //     // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
5540   //     Store array[index] = value
5541   //   }
5542   //
5543   Node* map = __ LoadField(AccessBuilder::ForMap(), array);
5544   Node* kind;
5545   {
5546     Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
5547     Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
5548     Node* andit = __ Word32And(bit_field2, mask);
5549     Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
5550     kind = __ Word32Shr(andit, shift);
5551   }
5552 
5553   auto do_store = __ MakeLabel(MachineRepresentation::kWord32);
5554   // We can store a smi anywhere.
5555   __ GotoIf(ObjectIsSmi(value), &do_store, kind);
5556 
5557   // {value} is a HeapObject.
5558   auto transition_smi_array = __ MakeDeferredLabel();
5559   auto transition_double_to_fast = __ MakeDeferredLabel();
5560   {
5561     __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
5562                  &transition_smi_array);
5563     __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS), &do_store,
5564                  kind);
5565 
5566     // We have double elements kind. Only a HeapNumber can be stored
5567     // without effecting a transition.
5568     Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
5569     Node* heap_number_map = __ HeapNumberMapConstant();
5570     Node* check = __ TaggedEqual(value_map, heap_number_map);
5571     __ GotoIfNot(check, &transition_double_to_fast);
5572     __ Goto(&do_store, kind);
5573   }
5574 
5575   __ Bind(&transition_smi_array);  // deferred code.
5576   {
5577     // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS or
5578     // to HOLEY_ELEMENTS.
5579     auto if_value_not_heap_number = __ MakeLabel();
5580     Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
5581     Node* heap_number_map = __ HeapNumberMapConstant();
5582     Node* check = __ TaggedEqual(value_map, heap_number_map);
5583     __ GotoIfNot(check, &if_value_not_heap_number);
5584     {
5585       // {value} is a HeapNumber.
5586       TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
5587                            HOLEY_DOUBLE_ELEMENTS);
5588       __ Goto(&do_store, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS));
5589     }
5590     __ Bind(&if_value_not_heap_number);
5591     {
5592       TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
5593       __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
5594     }
5595   }
5596 
5597   __ Bind(&transition_double_to_fast);  // deferred code.
5598   {
5599     TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
5600     __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
5601   }
5602 
5603   // Make sure kind is up-to-date.
5604   __ Bind(&do_store);
5605   kind = do_store.PhiAt(0);
5606 
5607   Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
5608   auto if_kind_is_double = __ MakeLabel();
5609   auto done = __ MakeLabel();
5610   __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
5611             &if_kind_is_double);
5612   {
5613     // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
5614     __ StoreElement(AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS),
5615                     elements, index, value);
5616     __ Goto(&done);
5617   }
5618   __ Bind(&if_kind_is_double);
5619   {
5620     // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
5621     auto do_double_store = __ MakeLabel();
5622     __ GotoIfNot(ObjectIsSmi(value), &do_double_store);
5623     {
5624       Node* int_value = ChangeSmiToInt32(value);
5625       Node* float_value = __ ChangeInt32ToFloat64(int_value);
5626       __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
5627                       index, float_value);
5628       __ Goto(&done);
5629     }
5630     __ Bind(&do_double_store);
5631     {
5632       Node* float_value =
5633           __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
5634       __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
5635                       index, __ Float64SilenceNaN(float_value));
5636       __ Goto(&done);
5637     }
5638   }
5639 
5640   __ Bind(&done);
5641 }
5642 
LowerTransitionAndStoreNumberElement(Node * node)5643 void EffectControlLinearizer::LowerTransitionAndStoreNumberElement(Node* node) {
5644   Node* array = node->InputAt(0);
5645   Node* index = node->InputAt(1);
5646   Node* value = node->InputAt(2);  // This is a Float64, not tagged.
5647 
5648   // Possibly transition array based on input and store.
5649   //
5650   //   -- TRANSITION PHASE -----------------
5651   //   kind = ElementsKind(array)
5652   //   if kind == HOLEY_SMI_ELEMENTS {
5653   //     Transition array to HOLEY_DOUBLE_ELEMENTS
5654   //   } else if kind != HOLEY_DOUBLE_ELEMENTS {
5655   //     This is UNREACHABLE, execute a debug break.
5656   //   }
5657   //
5658   //   -- STORE PHASE ----------------------
5659   //   Store array[index] = value (it's a float)
5660   //
5661   Node* map = __ LoadField(AccessBuilder::ForMap(), array);
5662   Node* kind;
5663   {
5664     Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
5665     Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
5666     Node* andit = __ Word32And(bit_field2, mask);
5667     Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
5668     kind = __ Word32Shr(andit, shift);
5669   }
5670 
5671   auto do_store = __ MakeLabel();
5672 
5673   // {value} is a float64.
5674   auto transition_smi_array = __ MakeDeferredLabel();
5675   {
5676     __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
5677                  &transition_smi_array);
5678     // We expect that our input array started at HOLEY_SMI_ELEMENTS, and
5679     // climbs the lattice up to HOLEY_DOUBLE_ELEMENTS. Force a debug break
5680     // if this assumption is broken. It also would be the case that
5681     // loop peeling can break this assumption.
5682     __ GotoIf(__ Word32Equal(kind, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS)),
5683               &do_store);
5684     __ Unreachable(&do_store);
5685   }
5686 
5687   __ Bind(&transition_smi_array);  // deferred code.
5688   {
5689     // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS.
5690     TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
5691                          HOLEY_DOUBLE_ELEMENTS);
5692     __ Goto(&do_store);
5693   }
5694 
5695   __ Bind(&do_store);
5696 
5697   Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
5698   __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements, index,
5699                   __ Float64SilenceNaN(value));
5700 }
5701 
LowerTransitionAndStoreNonNumberElement(Node * node)5702 void EffectControlLinearizer::LowerTransitionAndStoreNonNumberElement(
5703     Node* node) {
5704   Node* array = node->InputAt(0);
5705   Node* index = node->InputAt(1);
5706   Node* value = node->InputAt(2);
5707 
5708   // Possibly transition array based on input and store.
5709   //
5710   //   -- TRANSITION PHASE -----------------
5711   //   kind = ElementsKind(array)
5712   //   if kind == HOLEY_SMI_ELEMENTS {
5713   //     Transition array to HOLEY_ELEMENTS
5714   //   } else if kind == HOLEY_DOUBLE_ELEMENTS {
5715   //     Transition array to HOLEY_ELEMENTS
5716   //   }
5717   //
5718   //   -- STORE PHASE ----------------------
5719   //   // kind is HOLEY_ELEMENTS
5720   //   Store array[index] = value
5721   //
5722   Node* map = __ LoadField(AccessBuilder::ForMap(), array);
5723   Node* kind;
5724   {
5725     Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
5726     Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
5727     Node* andit = __ Word32And(bit_field2, mask);
5728     Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
5729     kind = __ Word32Shr(andit, shift);
5730   }
5731 
5732   auto do_store = __ MakeLabel();
5733 
5734   auto transition_smi_array = __ MakeDeferredLabel();
5735   auto transition_double_to_fast = __ MakeDeferredLabel();
5736   {
5737     __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
5738                  &transition_smi_array);
5739     __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
5740               &transition_double_to_fast);
5741     __ Goto(&do_store);
5742   }
5743 
5744   __ Bind(&transition_smi_array);  // deferred code.
5745   {
5746     // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_ELEMENTS.
5747     TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
5748     __ Goto(&do_store);
5749   }
5750 
5751   __ Bind(&transition_double_to_fast);  // deferred code.
5752   {
5753     TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
5754     __ Goto(&do_store);
5755   }
5756 
5757   __ Bind(&do_store);
5758 
5759   Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
5760   // Our ElementsKind is HOLEY_ELEMENTS.
5761   ElementAccess access = AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS);
5762   Type value_type = ValueTypeParameterOf(node->op());
5763   if (value_type.Is(Type::BooleanOrNullOrUndefined())) {
5764     access.type = value_type;
5765     access.write_barrier_kind = kNoWriteBarrier;
5766   }
5767   __ StoreElement(access, elements, index, value);
5768 }
5769 
LowerStoreSignedSmallElement(Node * node)5770 void EffectControlLinearizer::LowerStoreSignedSmallElement(Node* node) {
5771   Node* array = node->InputAt(0);
5772   Node* index = node->InputAt(1);
5773   Node* value = node->InputAt(2);  // int32
5774 
5775   // Store a signed small in an output array.
5776   //
5777   //   kind = ElementsKind(array)
5778   //
5779   //   -- STORE PHASE ----------------------
5780   //   if kind == HOLEY_DOUBLE_ELEMENTS {
5781   //     float_value = convert int32 to float
5782   //     Store array[index] = float_value
5783   //   } else {
5784   //     // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
5785   //     smi_value = convert int32 to smi
5786   //     Store array[index] = smi_value
5787   //   }
5788   //
5789   Node* map = __ LoadField(AccessBuilder::ForMap(), array);
5790   Node* kind;
5791   {
5792     Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
5793     Node* mask = __ Int32Constant(Map::Bits2::ElementsKindBits::kMask);
5794     Node* andit = __ Word32And(bit_field2, mask);
5795     Node* shift = __ Int32Constant(Map::Bits2::ElementsKindBits::kShift);
5796     kind = __ Word32Shr(andit, shift);
5797   }
5798 
5799   Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
5800   auto if_kind_is_double = __ MakeLabel();
5801   auto done = __ MakeLabel();
5802   __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
5803             &if_kind_is_double);
5804   {
5805     // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
5806     // In this case, we know our value is a signed small, and we can optimize
5807     // the ElementAccess information.
5808     ElementAccess access = AccessBuilder::ForFixedArrayElement();
5809     access.type = Type::SignedSmall();
5810     access.machine_type = MachineType::TaggedSigned();
5811     access.write_barrier_kind = kNoWriteBarrier;
5812     Node* smi_value = ChangeInt32ToSmi(value);
5813     __ StoreElement(access, elements, index, smi_value);
5814     __ Goto(&done);
5815   }
5816   __ Bind(&if_kind_is_double);
5817   {
5818     // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
5819     Node* float_value = __ ChangeInt32ToFloat64(value);
5820     __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
5821                     index, float_value);
5822     __ Goto(&done);
5823   }
5824 
5825   __ Bind(&done);
5826 }
5827 
LowerRuntimeAbort(Node * node)5828 void EffectControlLinearizer::LowerRuntimeAbort(Node* node) {
5829   AbortReason reason = AbortReasonOf(node->op());
5830   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
5831   Runtime::FunctionId id = Runtime::kAbort;
5832   auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
5833       graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
5834   __ Call(call_descriptor, __ CEntryStubConstant(1),
5835           __ SmiConstant(static_cast<int>(reason)),
5836           __ ExternalConstant(ExternalReference::Create(id)),
5837           __ Int32Constant(1), __ NoContextConstant());
5838 }
5839 
5840 template <typename... Args>
CallBuiltin(Builtins::Name builtin,Operator::Properties properties,Args...args)5841 Node* EffectControlLinearizer::CallBuiltin(Builtins::Name builtin,
5842                                            Operator::Properties properties,
5843                                            Args... args) {
5844   Callable const callable = Builtins::CallableFor(isolate(), builtin);
5845   auto call_descriptor = Linkage::GetStubCallDescriptor(
5846       graph()->zone(), callable.descriptor(),
5847       callable.descriptor().GetStackParameterCount(), CallDescriptor::kNoFlags,
5848       properties);
5849   return __ Call(call_descriptor, __ HeapConstant(callable.code()), args...,
5850                  __ NoContextConstant());
5851 }
5852 
LowerAssertType(Node * node)5853 Node* EffectControlLinearizer::LowerAssertType(Node* node) {
5854   DCHECK_EQ(node->opcode(), IrOpcode::kAssertType);
5855   Type type = OpParameter<Type>(node->op());
5856   DCHECK(type.IsRange());
5857   auto range = type.AsRange();
5858   Node* const input = node->InputAt(0);
5859   Node* const min = __ NumberConstant(range->Min());
5860   Node* const max = __ NumberConstant(range->Max());
5861   CallBuiltin(Builtins::kCheckNumberInRange, node->op()->properties(), input,
5862               min, max, __ SmiConstant(node->id()));
5863   return input;
5864 }
5865 
LowerFoldConstant(Node * node)5866 Node* EffectControlLinearizer::LowerFoldConstant(Node* node) {
5867   DCHECK_EQ(node->opcode(), IrOpcode::kFoldConstant);
5868   Node* original = node->InputAt(0);
5869   Node* constant = node->InputAt(1);
5870   CallBuiltin(Builtins::kCheckSameObject, node->op()->properties(), original,
5871               constant);
5872   return constant;
5873 }
5874 
LowerConvertReceiver(Node * node)5875 Node* EffectControlLinearizer::LowerConvertReceiver(Node* node) {
5876   ConvertReceiverMode const mode = ConvertReceiverModeOf(node->op());
5877   Node* value = node->InputAt(0);
5878   Node* global_proxy = node->InputAt(1);
5879 
5880   switch (mode) {
5881     case ConvertReceiverMode::kNullOrUndefined: {
5882       return global_proxy;
5883     }
5884     case ConvertReceiverMode::kNotNullOrUndefined: {
5885       auto convert_to_object = __ MakeDeferredLabel();
5886       auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
5887 
5888       // Check if {value} is already a JSReceiver.
5889       __ GotoIf(ObjectIsSmi(value), &convert_to_object);
5890       STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
5891       Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
5892       Node* value_instance_type =
5893           __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
5894       Node* check = __ Uint32LessThan(
5895           value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
5896       __ GotoIf(check, &convert_to_object);
5897       __ Goto(&done_convert, value);
5898 
5899       // Wrap the primitive {value} into a JSPrimitiveWrapper.
5900       __ Bind(&convert_to_object);
5901       Operator::Properties properties = Operator::kEliminatable;
5902       Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
5903       CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
5904       auto call_descriptor = Linkage::GetStubCallDescriptor(
5905           graph()->zone(), callable.descriptor(),
5906           callable.descriptor().GetStackParameterCount(), flags, properties);
5907       Node* native_context = __ LoadField(
5908           AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
5909       Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
5910                              value, native_context);
5911       __ Goto(&done_convert, result);
5912 
5913       __ Bind(&done_convert);
5914       return done_convert.PhiAt(0);
5915     }
5916     case ConvertReceiverMode::kAny: {
5917       auto convert_to_object = __ MakeDeferredLabel();
5918       auto convert_global_proxy = __ MakeDeferredLabel();
5919       auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
5920 
5921       // Check if {value} is already a JSReceiver, or null/undefined.
5922       __ GotoIf(ObjectIsSmi(value), &convert_to_object);
5923       STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
5924       Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
5925       Node* value_instance_type =
5926           __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
5927       Node* check = __ Uint32LessThan(
5928           value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
5929       __ GotoIf(check, &convert_to_object);
5930       __ Goto(&done_convert, value);
5931 
5932       // Wrap the primitive {value} into a JSPrimitiveWrapper.
5933       __ Bind(&convert_to_object);
5934       __ GotoIf(__ TaggedEqual(value, __ UndefinedConstant()),
5935                 &convert_global_proxy);
5936       __ GotoIf(__ TaggedEqual(value, __ NullConstant()),
5937                 &convert_global_proxy);
5938       Operator::Properties properties = Operator::kEliminatable;
5939       Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
5940       CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
5941       auto call_descriptor = Linkage::GetStubCallDescriptor(
5942           graph()->zone(), callable.descriptor(),
5943           callable.descriptor().GetStackParameterCount(), flags, properties);
5944       Node* native_context = __ LoadField(
5945           AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
5946       Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
5947                              value, native_context);
5948       __ Goto(&done_convert, result);
5949 
5950       // Replace the {value} with the {global_proxy}.
5951       __ Bind(&convert_global_proxy);
5952       __ Goto(&done_convert, global_proxy);
5953 
5954       __ Bind(&done_convert);
5955       return done_convert.PhiAt(0);
5956     }
5957   }
5958 
5959   UNREACHABLE();
5960   return nullptr;
5961 }
5962 
LowerFloat64RoundUp(Node * node)5963 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundUp(Node* node) {
5964   // Nothing to be done if a fast hardware instruction is available.
5965   if (machine()->Float64RoundUp().IsSupported()) {
5966     return Nothing<Node*>();
5967   }
5968 
5969   Node* const input = node->InputAt(0);
5970 
5971   // General case for ceil.
5972   //
5973   //   if 0.0 < input then
5974   //     if 2^52 <= input then
5975   //       input
5976   //     else
5977   //       let temp1 = (2^52 + input) - 2^52 in
5978   //       if temp1 < input then
5979   //         temp1 + 1
5980   //       else
5981   //         temp1
5982   //   else
5983   //     if input == 0 then
5984   //       input
5985   //     else
5986   //       if input <= -2^52 then
5987   //         input
5988   //       else
5989   //         let temp1 = -0 - input in
5990   //         let temp2 = (2^52 + temp1) - 2^52 in
5991   //         let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
5992   //         -0 - temp3
5993 
5994   auto if_not_positive = __ MakeDeferredLabel();
5995   auto if_greater_than_two_52 = __ MakeDeferredLabel();
5996   auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
5997   auto if_zero = __ MakeDeferredLabel();
5998   auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
5999   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
6000 
6001   Node* const zero = __ Float64Constant(0.0);
6002   Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
6003   Node* const one = __ Float64Constant(1.0);
6004 
6005   Node* check0 = __ Float64LessThan(zero, input);
6006   __ GotoIfNot(check0, &if_not_positive);
6007   {
6008     Node* check1 = __ Float64LessThanOrEqual(two_52, input);
6009     __ GotoIf(check1, &if_greater_than_two_52);
6010     {
6011       Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
6012       __ GotoIfNot(__ Float64LessThan(temp1, input), &done, temp1);
6013       __ Goto(&done, __ Float64Add(temp1, one));
6014     }
6015 
6016     __ Bind(&if_greater_than_two_52);
6017     __ Goto(&done, input);
6018   }
6019 
6020   __ Bind(&if_not_positive);
6021   {
6022     Node* check1 = __ Float64Equal(input, zero);
6023     __ GotoIf(check1, &if_zero);
6024 
6025     Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
6026     Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
6027     __ GotoIf(check2, &if_less_than_minus_two_52);
6028 
6029     {
6030       Node* const minus_zero = __ Float64Constant(-0.0);
6031       Node* temp1 = __ Float64Sub(minus_zero, input);
6032       Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
6033       Node* check3 = __ Float64LessThan(temp1, temp2);
6034       __ GotoIfNot(check3, &done_temp3, temp2);
6035       __ Goto(&done_temp3, __ Float64Sub(temp2, one));
6036 
6037       __ Bind(&done_temp3);
6038       Node* temp3 = done_temp3.PhiAt(0);
6039       __ Goto(&done, __ Float64Sub(minus_zero, temp3));
6040     }
6041     __ Bind(&if_less_than_minus_two_52);
6042     __ Goto(&done, input);
6043 
6044     __ Bind(&if_zero);
6045     __ Goto(&done, input);
6046   }
6047   __ Bind(&done);
6048   return Just(done.PhiAt(0));
6049 }
6050 
BuildFloat64RoundDown(Node * value)6051 Node* EffectControlLinearizer::BuildFloat64RoundDown(Node* value) {
6052   if (machine()->Float64RoundDown().IsSupported()) {
6053     return __ Float64RoundDown(value);
6054   }
6055 
6056   Node* const input = value;
6057 
6058   // General case for floor.
6059   //
6060   //   if 0.0 < input then
6061   //     if 2^52 <= input then
6062   //       input
6063   //     else
6064   //       let temp1 = (2^52 + input) - 2^52 in
6065   //       if input < temp1 then
6066   //         temp1 - 1
6067   //       else
6068   //         temp1
6069   //   else
6070   //     if input == 0 then
6071   //       input
6072   //     else
6073   //       if input <= -2^52 then
6074   //         input
6075   //       else
6076   //         let temp1 = -0 - input in
6077   //         let temp2 = (2^52 + temp1) - 2^52 in
6078   //         if temp2 < temp1 then
6079   //           -1 - temp2
6080   //         else
6081   //           -0 - temp2
6082 
6083   auto if_not_positive = __ MakeDeferredLabel();
6084   auto if_greater_than_two_52 = __ MakeDeferredLabel();
6085   auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
6086   auto if_temp2_lt_temp1 = __ MakeLabel();
6087   auto if_zero = __ MakeDeferredLabel();
6088   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
6089 
6090   Node* const zero = __ Float64Constant(0.0);
6091   Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
6092 
6093   Node* check0 = __ Float64LessThan(zero, input);
6094   __ GotoIfNot(check0, &if_not_positive);
6095   {
6096     Node* check1 = __ Float64LessThanOrEqual(two_52, input);
6097     __ GotoIf(check1, &if_greater_than_two_52);
6098     {
6099       Node* const one = __ Float64Constant(1.0);
6100       Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
6101       __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
6102       __ Goto(&done, __ Float64Sub(temp1, one));
6103     }
6104 
6105     __ Bind(&if_greater_than_two_52);
6106     __ Goto(&done, input);
6107   }
6108 
6109   __ Bind(&if_not_positive);
6110   {
6111     Node* check1 = __ Float64Equal(input, zero);
6112     __ GotoIf(check1, &if_zero);
6113 
6114     Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
6115     Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
6116     __ GotoIf(check2, &if_less_than_minus_two_52);
6117 
6118     {
6119       Node* const minus_zero = __ Float64Constant(-0.0);
6120       Node* temp1 = __ Float64Sub(minus_zero, input);
6121       Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
6122       Node* check3 = __ Float64LessThan(temp2, temp1);
6123       __ GotoIf(check3, &if_temp2_lt_temp1);
6124       __ Goto(&done, __ Float64Sub(minus_zero, temp2));
6125 
6126       __ Bind(&if_temp2_lt_temp1);
6127       __ Goto(&done, __ Float64Sub(__ Float64Constant(-1.0), temp2));
6128     }
6129     __ Bind(&if_less_than_minus_two_52);
6130     __ Goto(&done, input);
6131 
6132     __ Bind(&if_zero);
6133     __ Goto(&done, input);
6134   }
6135   __ Bind(&done);
6136   return done.PhiAt(0);
6137 }
6138 
LowerFloat64RoundDown(Node * node)6139 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundDown(Node* node) {
6140   // Nothing to be done if a fast hardware instruction is available.
6141   if (machine()->Float64RoundDown().IsSupported()) {
6142     return Nothing<Node*>();
6143   }
6144 
6145   Node* const input = node->InputAt(0);
6146   return Just(BuildFloat64RoundDown(input));
6147 }
6148 
LowerFloat64RoundTiesEven(Node * node)6149 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTiesEven(Node* node) {
6150   // Nothing to be done if a fast hardware instruction is available.
6151   if (machine()->Float64RoundTiesEven().IsSupported()) {
6152     return Nothing<Node*>();
6153   }
6154 
6155   Node* const input = node->InputAt(0);
6156 
6157   // Generate case for round ties to even:
6158   //
6159   //   let value = floor(input) in
6160   //   let temp1 = input - value in
6161   //   if temp1 < 0.5 then
6162   //     value
6163   //   else if 0.5 < temp1 then
6164   //     value + 1.0
6165   //   else
6166   //     let temp2 = value % 2.0 in
6167   //     if temp2 == 0.0 then
6168   //       value
6169   //     else
6170   //       value + 1.0
6171 
6172   auto if_is_half = __ MakeLabel();
6173   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
6174 
6175   Node* value = BuildFloat64RoundDown(input);
6176   Node* temp1 = __ Float64Sub(input, value);
6177 
6178   Node* const half = __ Float64Constant(0.5);
6179   Node* check0 = __ Float64LessThan(temp1, half);
6180   __ GotoIf(check0, &done, value);
6181 
6182   Node* const one = __ Float64Constant(1.0);
6183   Node* check1 = __ Float64LessThan(half, temp1);
6184   __ GotoIfNot(check1, &if_is_half);
6185   __ Goto(&done, __ Float64Add(value, one));
6186 
6187   __ Bind(&if_is_half);
6188   Node* temp2 = __ Float64Mod(value, __ Float64Constant(2.0));
6189   Node* check2 = __ Float64Equal(temp2, __ Float64Constant(0.0));
6190   __ GotoIf(check2, &done, value);
6191   __ Goto(&done, __ Float64Add(value, one));
6192 
6193   __ Bind(&done);
6194   return Just(done.PhiAt(0));
6195 }
6196 
BuildFloat64RoundTruncate(Node * input)6197 Node* EffectControlLinearizer::BuildFloat64RoundTruncate(Node* input) {
6198   if (machine()->Float64RoundTruncate().IsSupported()) {
6199     return __ Float64RoundTruncate(input);
6200   }
6201   // General case for trunc.
6202   //
6203   //   if 0.0 < input then
6204   //     if 2^52 <= input then
6205   //       input
6206   //     else
6207   //       let temp1 = (2^52 + input) - 2^52 in
6208   //       if input < temp1 then
6209   //         temp1 - 1
6210   //       else
6211   //         temp1
6212   //   else
6213   //     if input == 0 then
6214   //       input
6215   //     else
6216   //       if input <= -2^52 then
6217   //         input
6218   //       else
6219   //         let temp1 = -0 - input in
6220   //         let temp2 = (2^52 + temp1) - 2^52 in
6221   //         let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
6222   //         -0 - temp3
6223   //
6224   // Note: We do not use the Diamond helper class here, because it really hurts
6225   // readability with nested diamonds.
6226 
6227   auto if_not_positive = __ MakeDeferredLabel();
6228   auto if_greater_than_two_52 = __ MakeDeferredLabel();
6229   auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
6230   auto if_zero = __ MakeDeferredLabel();
6231   auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
6232   auto done = __ MakeLabel(MachineRepresentation::kFloat64);
6233 
6234   Node* const zero = __ Float64Constant(0.0);
6235   Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
6236   Node* const one = __ Float64Constant(1.0);
6237 
6238   Node* check0 = __ Float64LessThan(zero, input);
6239   __ GotoIfNot(check0, &if_not_positive);
6240   {
6241     Node* check1 = __ Float64LessThanOrEqual(two_52, input);
6242     __ GotoIf(check1, &if_greater_than_two_52);
6243     {
6244       Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
6245       __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
6246       __ Goto(&done, __ Float64Sub(temp1, one));
6247     }
6248 
6249     __ Bind(&if_greater_than_two_52);
6250     __ Goto(&done, input);
6251   }
6252 
6253   __ Bind(&if_not_positive);
6254   {
6255     Node* check1 = __ Float64Equal(input, zero);
6256     __ GotoIf(check1, &if_zero);
6257 
6258     Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
6259     Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
6260     __ GotoIf(check2, &if_less_than_minus_two_52);
6261 
6262     {
6263       Node* const minus_zero = __ Float64Constant(-0.0);
6264       Node* temp1 = __ Float64Sub(minus_zero, input);
6265       Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
6266       Node* check3 = __ Float64LessThan(temp1, temp2);
6267       __ GotoIfNot(check3, &done_temp3, temp2);
6268       __ Goto(&done_temp3, __ Float64Sub(temp2, one));
6269 
6270       __ Bind(&done_temp3);
6271       Node* temp3 = done_temp3.PhiAt(0);
6272       __ Goto(&done, __ Float64Sub(minus_zero, temp3));
6273     }
6274     __ Bind(&if_less_than_minus_two_52);
6275     __ Goto(&done, input);
6276 
6277     __ Bind(&if_zero);
6278     __ Goto(&done, input);
6279   }
6280   __ Bind(&done);
6281   return done.PhiAt(0);
6282 }
6283 
LowerFloat64RoundTruncate(Node * node)6284 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTruncate(Node* node) {
6285   // Nothing to be done if a fast hardware instruction is available.
6286   if (machine()->Float64RoundTruncate().IsSupported()) {
6287     return Nothing<Node*>();
6288   }
6289 
6290   Node* const input = node->InputAt(0);
6291   return Just(BuildFloat64RoundTruncate(input));
6292 }
6293 
LowerFindOrderedHashMapEntry(Node * node)6294 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntry(Node* node) {
6295   Node* table = NodeProperties::GetValueInput(node, 0);
6296   Node* key = NodeProperties::GetValueInput(node, 1);
6297 
6298   {
6299     Callable const callable =
6300         Builtins::CallableFor(isolate(), Builtins::kFindOrderedHashMapEntry);
6301     Operator::Properties const properties = node->op()->properties();
6302     CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
6303     auto call_descriptor = Linkage::GetStubCallDescriptor(
6304         graph()->zone(), callable.descriptor(),
6305         callable.descriptor().GetStackParameterCount(), flags, properties);
6306     return __ Call(call_descriptor, __ HeapConstant(callable.code()), table,
6307                    key, __ NoContextConstant());
6308   }
6309 }
6310 
ComputeUnseededHash(Node * value)6311 Node* EffectControlLinearizer::ComputeUnseededHash(Node* value) {
6312   // See v8::internal::ComputeUnseededHash()
6313   value = __ Int32Add(__ Word32Xor(value, __ Int32Constant(0xFFFFFFFF)),
6314                       __ Word32Shl(value, __ Int32Constant(15)));
6315   value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(12)));
6316   value = __ Int32Add(value, __ Word32Shl(value, __ Int32Constant(2)));
6317   value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(4)));
6318   value = __ Int32Mul(value, __ Int32Constant(2057));
6319   value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(16)));
6320   value = __ Word32And(value, __ Int32Constant(0x3FFFFFFF));
6321   return value;
6322 }
6323 
LowerFindOrderedHashMapEntryForInt32Key(Node * node)6324 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
6325     Node* node) {
6326   Node* table = NodeProperties::GetValueInput(node, 0);
6327   Node* key = NodeProperties::GetValueInput(node, 1);
6328 
6329   // Compute the integer hash code.
6330   Node* hash = ChangeUint32ToUintPtr(ComputeUnseededHash(key));
6331 
6332   Node* number_of_buckets = ChangeSmiToIntPtr(__ LoadField(
6333       AccessBuilder::ForOrderedHashMapOrSetNumberOfBuckets(), table));
6334   hash = __ WordAnd(hash, __ IntSub(number_of_buckets, __ IntPtrConstant(1)));
6335   Node* first_entry = ChangeSmiToIntPtr(__ Load(
6336       MachineType::TaggedSigned(), table,
6337       __ IntAdd(__ WordShl(hash, __ IntPtrConstant(kTaggedSizeLog2)),
6338                 __ IntPtrConstant(OrderedHashMap::HashTableStartOffset() -
6339                                   kHeapObjectTag))));
6340 
6341   auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
6342   auto done = __ MakeLabel(MachineType::PointerRepresentation());
6343   __ Goto(&loop, first_entry);
6344   __ Bind(&loop);
6345   {
6346     Node* entry = loop.PhiAt(0);
6347     Node* check =
6348         __ IntPtrEqual(entry, __ IntPtrConstant(OrderedHashMap::kNotFound));
6349     __ GotoIf(check, &done, entry);
6350     entry = __ IntAdd(
6351         __ IntMul(entry, __ IntPtrConstant(OrderedHashMap::kEntrySize)),
6352         number_of_buckets);
6353 
6354     Node* candidate_key = __ Load(
6355         MachineType::AnyTagged(), table,
6356         __ IntAdd(__ WordShl(entry, __ IntPtrConstant(kTaggedSizeLog2)),
6357                   __ IntPtrConstant(OrderedHashMap::HashTableStartOffset() -
6358                                     kHeapObjectTag)));
6359 
6360     auto if_match = __ MakeLabel();
6361     auto if_notmatch = __ MakeLabel();
6362     auto if_notsmi = __ MakeDeferredLabel();
6363     __ GotoIfNot(ObjectIsSmi(candidate_key), &if_notsmi);
6364     __ Branch(__ Word32Equal(ChangeSmiToInt32(candidate_key), key), &if_match,
6365               &if_notmatch);
6366 
6367     __ Bind(&if_notsmi);
6368     __ GotoIfNot(
6369         __ TaggedEqual(__ LoadField(AccessBuilder::ForMap(), candidate_key),
6370                        __ HeapNumberMapConstant()),
6371         &if_notmatch);
6372     __ Branch(__ Float64Equal(__ LoadField(AccessBuilder::ForHeapNumberValue(),
6373                                            candidate_key),
6374                               __ ChangeInt32ToFloat64(key)),
6375               &if_match, &if_notmatch);
6376 
6377     __ Bind(&if_match);
6378     __ Goto(&done, entry);
6379 
6380     __ Bind(&if_notmatch);
6381     {
6382       Node* next_entry = ChangeSmiToIntPtr(__ Load(
6383           MachineType::TaggedSigned(), table,
6384           __ IntAdd(
6385               __ WordShl(entry, __ IntPtrConstant(kTaggedSizeLog2)),
6386               __ IntPtrConstant(OrderedHashMap::HashTableStartOffset() +
6387                                 OrderedHashMap::kChainOffset * kTaggedSize -
6388                                 kHeapObjectTag))));
6389       __ Goto(&loop, next_entry);
6390     }
6391   }
6392 
6393   __ Bind(&done);
6394   return done.PhiAt(0);
6395 }
6396 
LowerDateNow(Node * node)6397 Node* EffectControlLinearizer::LowerDateNow(Node* node) {
6398   Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
6399   Runtime::FunctionId id = Runtime::kDateCurrentTime;
6400   auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
6401       graph()->zone(), id, 0, properties, CallDescriptor::kNoFlags);
6402   return __ Call(call_descriptor, __ CEntryStubConstant(1),
6403                  __ ExternalConstant(ExternalReference::Create(id)),
6404                  __ Int32Constant(0), __ NoContextConstant());
6405 }
6406 
TruncateWordToInt32(Node * value)6407 Node* EffectControlLinearizer::TruncateWordToInt32(Node* value) {
6408   if (machine()->Is64()) {
6409     return __ TruncateInt64ToInt32(value);
6410   }
6411   return value;
6412 }
6413 
BuildIsStrongReference(Node * value)6414 Node* EffectControlLinearizer::BuildIsStrongReference(Node* value) {
6415   return __ Word32Equal(
6416       __ Word32And(
6417           TruncateWordToInt32(__ BitcastTaggedToWordForTagAndSmiBits(value)),
6418           __ Int32Constant(kHeapObjectTagMask)),
6419       __ Int32Constant(kHeapObjectTag));
6420 }
6421 
MakeWeakForComparison(Node * heap_object)6422 Node* EffectControlLinearizer::MakeWeakForComparison(Node* heap_object) {
6423   // TODO(gsathya): Specialize this for pointer compression.
6424   return __ BitcastWordToTagged(
6425       __ WordOr(__ BitcastTaggedToWord(heap_object),
6426                 __ IntPtrConstant(kWeakHeapObjectTag)));
6427 }
6428 
BuildStrongReferenceFromWeakReference(Node * maybe_object)6429 Node* EffectControlLinearizer::BuildStrongReferenceFromWeakReference(
6430     Node* maybe_object) {
6431   return __ BitcastWordToTagged(
6432       __ WordAnd(__ BitcastMaybeObjectToWord(maybe_object),
6433                  __ IntPtrConstant(~kWeakHeapObjectMask)));
6434 }
6435 
BuildIsWeakReferenceTo(Node * maybe_object,Node * value)6436 Node* EffectControlLinearizer::BuildIsWeakReferenceTo(Node* maybe_object,
6437                                                       Node* value) {
6438   if (COMPRESS_POINTERS_BOOL) {
6439     return __ Word32Equal(
6440         __ Word32And(
6441             TruncateWordToInt32(__ BitcastMaybeObjectToWord(maybe_object)),
6442             __ Uint32Constant(~static_cast<uint32_t>(kWeakHeapObjectMask))),
6443         TruncateWordToInt32(__ BitcastTaggedToWord(value)));
6444   } else {
6445     return __ WordEqual(__ WordAnd(__ BitcastMaybeObjectToWord(maybe_object),
6446                                    __ IntPtrConstant(~kWeakHeapObjectMask)),
6447                         __ BitcastTaggedToWord(value));
6448   }
6449 }
6450 
BuildIsClearedWeakReference(Node * maybe_object)6451 Node* EffectControlLinearizer::BuildIsClearedWeakReference(Node* maybe_object) {
6452   return __ Word32Equal(
6453       TruncateWordToInt32(__ BitcastMaybeObjectToWord(maybe_object)),
6454       __ Int32Constant(kClearedWeakHeapObjectLower32));
6455 }
6456 
6457 #undef __
6458 
LinearizeEffectControl(JSGraph * graph,Schedule * schedule,Zone * temp_zone,SourcePositionTable * source_positions,NodeOriginTable * node_origins,MaskArrayIndexEnable mask_array_index,MaintainSchedule maintain_schedule,JSHeapBroker * broker)6459 void LinearizeEffectControl(JSGraph* graph, Schedule* schedule, Zone* temp_zone,
6460                             SourcePositionTable* source_positions,
6461                             NodeOriginTable* node_origins,
6462                             MaskArrayIndexEnable mask_array_index,
6463                             MaintainSchedule maintain_schedule,
6464                             JSHeapBroker* broker) {
6465   EffectControlLinearizer linearizer(
6466       graph, schedule, temp_zone, source_positions, node_origins,
6467       mask_array_index, maintain_schedule, broker);
6468   linearizer.Run();
6469 }
6470 
6471 }  // namespace compiler
6472 }  // namespace internal
6473 }  // namespace v8
6474