1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/effect-control-linearizer.h"
6
7 #include "src/code-factory.h"
8 #include "src/compiler/access-builder.h"
9 #include "src/compiler/compiler-source-position-table.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/linkage.h"
12 #include "src/compiler/node-matchers.h"
13 #include "src/compiler/node-origin-table.h"
14 #include "src/compiler/node-properties.h"
15 #include "src/compiler/node.h"
16 #include "src/compiler/schedule.h"
17 #include "src/heap/factory-inl.h"
18
19 namespace v8 {
20 namespace internal {
21 namespace compiler {
22
EffectControlLinearizer(JSGraph * js_graph,Schedule * schedule,Zone * temp_zone,SourcePositionTable * source_positions,NodeOriginTable * node_origins,MaskArrayIndexEnable mask_array_index)23 EffectControlLinearizer::EffectControlLinearizer(
24 JSGraph* js_graph, Schedule* schedule, Zone* temp_zone,
25 SourcePositionTable* source_positions, NodeOriginTable* node_origins,
26 MaskArrayIndexEnable mask_array_index)
27 : js_graph_(js_graph),
28 schedule_(schedule),
29 temp_zone_(temp_zone),
30 mask_array_index_(mask_array_index),
31 source_positions_(source_positions),
32 node_origins_(node_origins),
33 graph_assembler_(js_graph, nullptr, nullptr, temp_zone),
34 frame_state_zapper_(nullptr) {}
35
graph() const36 Graph* EffectControlLinearizer::graph() const { return js_graph_->graph(); }
common() const37 CommonOperatorBuilder* EffectControlLinearizer::common() const {
38 return js_graph_->common();
39 }
simplified() const40 SimplifiedOperatorBuilder* EffectControlLinearizer::simplified() const {
41 return js_graph_->simplified();
42 }
machine() const43 MachineOperatorBuilder* EffectControlLinearizer::machine() const {
44 return js_graph_->machine();
45 }
46
47 namespace {
48
49 struct BlockEffectControlData {
50 Node* current_effect = nullptr; // New effect.
51 Node* current_control = nullptr; // New control.
52 Node* current_frame_state = nullptr; // New frame state.
53 };
54
55 class BlockEffectControlMap {
56 public:
BlockEffectControlMap(Zone * temp_zone)57 explicit BlockEffectControlMap(Zone* temp_zone) : map_(temp_zone) {}
58
For(BasicBlock * from,BasicBlock * to)59 BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) {
60 return map_[std::make_pair(from->rpo_number(), to->rpo_number())];
61 }
62
For(BasicBlock * from,BasicBlock * to) const63 const BlockEffectControlData& For(BasicBlock* from, BasicBlock* to) const {
64 return map_.at(std::make_pair(from->rpo_number(), to->rpo_number()));
65 }
66
67 private:
68 typedef std::pair<int32_t, int32_t> Key;
69 typedef ZoneMap<Key, BlockEffectControlData> Map;
70
71 Map map_;
72 };
73
74 // Effect phis that need to be updated after the first pass.
75 struct PendingEffectPhi {
76 Node* effect_phi;
77 BasicBlock* block;
78
PendingEffectPhiv8::internal::compiler::__anonf90eaa890111::PendingEffectPhi79 PendingEffectPhi(Node* effect_phi, BasicBlock* block)
80 : effect_phi(effect_phi), block(block) {}
81 };
82
ConnectUnreachableToEnd(Node * effect,Node * control,JSGraph * jsgraph)83 void ConnectUnreachableToEnd(Node* effect, Node* control, JSGraph* jsgraph) {
84 Graph* graph = jsgraph->graph();
85 CommonOperatorBuilder* common = jsgraph->common();
86 if (effect->opcode() == IrOpcode::kDead) return;
87 if (effect->opcode() != IrOpcode::kUnreachable) {
88 effect = graph->NewNode(common->Unreachable(), effect, control);
89 }
90 Node* throw_node = graph->NewNode(common->Throw(), effect, control);
91 NodeProperties::MergeControlToEnd(graph, common, throw_node);
92 }
93
UpdateEffectPhi(Node * node,BasicBlock * block,BlockEffectControlMap * block_effects,JSGraph * jsgraph)94 void UpdateEffectPhi(Node* node, BasicBlock* block,
95 BlockEffectControlMap* block_effects, JSGraph* jsgraph) {
96 // Update all inputs to an effect phi with the effects from the given
97 // block->effect map.
98 DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
99 DCHECK_EQ(static_cast<size_t>(node->op()->EffectInputCount()),
100 block->PredecessorCount());
101 for (int i = 0; i < node->op()->EffectInputCount(); i++) {
102 Node* input = node->InputAt(i);
103 BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
104 const BlockEffectControlData& block_effect =
105 block_effects->For(predecessor, block);
106 Node* effect = block_effect.current_effect;
107 if (input != effect) {
108 node->ReplaceInput(i, effect);
109 }
110 }
111 }
112
UpdateBlockControl(BasicBlock * block,BlockEffectControlMap * block_effects)113 void UpdateBlockControl(BasicBlock* block,
114 BlockEffectControlMap* block_effects) {
115 Node* control = block->NodeAt(0);
116 DCHECK(NodeProperties::IsControl(control));
117
118 // Do not rewire the end node.
119 if (control->opcode() == IrOpcode::kEnd) return;
120
121 // Update all inputs to the given control node with the correct control.
122 DCHECK(control->opcode() == IrOpcode::kMerge ||
123 static_cast<size_t>(control->op()->ControlInputCount()) ==
124 block->PredecessorCount());
125 if (static_cast<size_t>(control->op()->ControlInputCount()) !=
126 block->PredecessorCount()) {
127 return; // We already re-wired the control inputs of this node.
128 }
129 for (int i = 0; i < control->op()->ControlInputCount(); i++) {
130 Node* input = NodeProperties::GetControlInput(control, i);
131 BasicBlock* predecessor = block->PredecessorAt(static_cast<size_t>(i));
132 const BlockEffectControlData& block_effect =
133 block_effects->For(predecessor, block);
134 if (input != block_effect.current_control) {
135 NodeProperties::ReplaceControlInput(control, block_effect.current_control,
136 i);
137 }
138 }
139 }
140
HasIncomingBackEdges(BasicBlock * block)141 bool HasIncomingBackEdges(BasicBlock* block) {
142 for (BasicBlock* pred : block->predecessors()) {
143 if (pred->rpo_number() >= block->rpo_number()) {
144 return true;
145 }
146 }
147 return false;
148 }
149
RemoveRenameNode(Node * node)150 void RemoveRenameNode(Node* node) {
151 DCHECK(IrOpcode::kFinishRegion == node->opcode() ||
152 IrOpcode::kBeginRegion == node->opcode() ||
153 IrOpcode::kTypeGuard == node->opcode());
154 // Update the value/context uses to the value input of the finish node and
155 // the effect uses to the effect input.
156 for (Edge edge : node->use_edges()) {
157 DCHECK(!edge.from()->IsDead());
158 if (NodeProperties::IsEffectEdge(edge)) {
159 edge.UpdateTo(NodeProperties::GetEffectInput(node));
160 } else {
161 DCHECK(!NodeProperties::IsControlEdge(edge));
162 DCHECK(!NodeProperties::IsFrameStateEdge(edge));
163 edge.UpdateTo(node->InputAt(0));
164 }
165 }
166 node->Kill();
167 }
168
TryCloneBranch(Node * node,BasicBlock * block,Zone * temp_zone,Graph * graph,CommonOperatorBuilder * common,BlockEffectControlMap * block_effects,SourcePositionTable * source_positions,NodeOriginTable * node_origins)169 void TryCloneBranch(Node* node, BasicBlock* block, Zone* temp_zone,
170 Graph* graph, CommonOperatorBuilder* common,
171 BlockEffectControlMap* block_effects,
172 SourcePositionTable* source_positions,
173 NodeOriginTable* node_origins) {
174 DCHECK_EQ(IrOpcode::kBranch, node->opcode());
175
176 // This optimization is a special case of (super)block cloning. It takes an
177 // input graph as shown below and clones the Branch node for every predecessor
178 // to the Merge, essentially removing the Merge completely. This avoids
179 // materializing the bit for the Phi and may offer potential for further
180 // branch folding optimizations (i.e. because one or more inputs to the Phi is
181 // a constant). Note that there may be more Phi nodes hanging off the Merge,
182 // but we can only a certain subset of them currently (actually only Phi and
183 // EffectPhi nodes whose uses have either the IfTrue or IfFalse as control
184 // input).
185
186 // Control1 ... ControlN
187 // ^ ^
188 // | | Cond1 ... CondN
189 // +----+ +----+ ^ ^
190 // | | | |
191 // | | +----+ |
192 // Merge<--+ | +------------+
193 // ^ \|/
194 // | Phi
195 // | |
196 // Branch----+
197 // ^
198 // |
199 // +-----+-----+
200 // | |
201 // IfTrue IfFalse
202 // ^ ^
203 // | |
204
205 // The resulting graph (modulo the Phi and EffectPhi nodes) looks like this:
206
207 // Control1 Cond1 ... ControlN CondN
208 // ^ ^ ^ ^
209 // \ / \ /
210 // Branch ... Branch
211 // ^ ^
212 // | |
213 // +---+---+ +---+----+
214 // | | | |
215 // IfTrue IfFalse ... IfTrue IfFalse
216 // ^ ^ ^ ^
217 // | | | |
218 // +--+ +-------------+ |
219 // | | +--------------+ +--+
220 // | | | |
221 // Merge Merge
222 // ^ ^
223 // | |
224
225 SourcePositionTable::Scope scope(source_positions,
226 source_positions->GetSourcePosition(node));
227 NodeOriginTable::Scope origin_scope(node_origins, "clone branch", node);
228 Node* branch = node;
229 Node* cond = NodeProperties::GetValueInput(branch, 0);
230 if (!cond->OwnedBy(branch) || cond->opcode() != IrOpcode::kPhi) return;
231 Node* merge = NodeProperties::GetControlInput(branch);
232 if (merge->opcode() != IrOpcode::kMerge ||
233 NodeProperties::GetControlInput(cond) != merge) {
234 return;
235 }
236 // Grab the IfTrue/IfFalse projections of the Branch.
237 BranchMatcher matcher(branch);
238 // Check/collect other Phi/EffectPhi nodes hanging off the Merge.
239 NodeVector phis(temp_zone);
240 for (Node* const use : merge->uses()) {
241 if (use == branch || use == cond) continue;
242 // We cannot currently deal with non-Phi/EffectPhi nodes hanging off the
243 // Merge. Ideally, we would just clone the nodes (and everything that
244 // depends on it to some distant join point), but that requires knowledge
245 // about dominance/post-dominance.
246 if (!NodeProperties::IsPhi(use)) return;
247 for (Edge edge : use->use_edges()) {
248 // Right now we can only handle Phi/EffectPhi nodes whose uses are
249 // directly control-dependend on either the IfTrue or the IfFalse
250 // successor, because we know exactly how to update those uses.
251 if (edge.from()->op()->ControlInputCount() != 1) return;
252 Node* control = NodeProperties::GetControlInput(edge.from());
253 if (NodeProperties::IsPhi(edge.from())) {
254 control = NodeProperties::GetControlInput(control, edge.index());
255 }
256 if (control != matcher.IfTrue() && control != matcher.IfFalse()) return;
257 }
258 phis.push_back(use);
259 }
260 BranchHint const hint = BranchHintOf(branch->op());
261 int const input_count = merge->op()->ControlInputCount();
262 DCHECK_LE(1, input_count);
263 Node** const inputs = graph->zone()->NewArray<Node*>(2 * input_count);
264 Node** const merge_true_inputs = &inputs[0];
265 Node** const merge_false_inputs = &inputs[input_count];
266 for (int index = 0; index < input_count; ++index) {
267 Node* cond1 = NodeProperties::GetValueInput(cond, index);
268 Node* control1 = NodeProperties::GetControlInput(merge, index);
269 Node* branch1 = graph->NewNode(common->Branch(hint), cond1, control1);
270 merge_true_inputs[index] = graph->NewNode(common->IfTrue(), branch1);
271 merge_false_inputs[index] = graph->NewNode(common->IfFalse(), branch1);
272 }
273 Node* const merge_true = matcher.IfTrue();
274 Node* const merge_false = matcher.IfFalse();
275 merge_true->TrimInputCount(0);
276 merge_false->TrimInputCount(0);
277 for (int i = 0; i < input_count; ++i) {
278 merge_true->AppendInput(graph->zone(), merge_true_inputs[i]);
279 merge_false->AppendInput(graph->zone(), merge_false_inputs[i]);
280 }
281 DCHECK_EQ(2u, block->SuccessorCount());
282 NodeProperties::ChangeOp(matcher.IfTrue(), common->Merge(input_count));
283 NodeProperties::ChangeOp(matcher.IfFalse(), common->Merge(input_count));
284 int const true_index =
285 block->SuccessorAt(0)->NodeAt(0) == matcher.IfTrue() ? 0 : 1;
286 BlockEffectControlData* true_block_data =
287 &block_effects->For(block, block->SuccessorAt(true_index));
288 BlockEffectControlData* false_block_data =
289 &block_effects->For(block, block->SuccessorAt(true_index ^ 1));
290 for (Node* const phi : phis) {
291 for (int index = 0; index < input_count; ++index) {
292 inputs[index] = phi->InputAt(index);
293 }
294 inputs[input_count] = merge_true;
295 Node* phi_true = graph->NewNode(phi->op(), input_count + 1, inputs);
296 inputs[input_count] = merge_false;
297 Node* phi_false = graph->NewNode(phi->op(), input_count + 1, inputs);
298 if (phi->UseCount() == 0) {
299 DCHECK_EQ(phi->opcode(), IrOpcode::kEffectPhi);
300 } else {
301 for (Edge edge : phi->use_edges()) {
302 Node* control = NodeProperties::GetControlInput(edge.from());
303 if (NodeProperties::IsPhi(edge.from())) {
304 control = NodeProperties::GetControlInput(control, edge.index());
305 }
306 DCHECK(control == matcher.IfTrue() || control == matcher.IfFalse());
307 edge.UpdateTo((control == matcher.IfTrue()) ? phi_true : phi_false);
308 }
309 }
310 if (phi->opcode() == IrOpcode::kEffectPhi) {
311 true_block_data->current_effect = phi_true;
312 false_block_data->current_effect = phi_false;
313 }
314 phi->Kill();
315 }
316 // Fix up IfTrue and IfFalse and kill all dead nodes.
317 if (branch == block->control_input()) {
318 true_block_data->current_control = merge_true;
319 false_block_data->current_control = merge_false;
320 }
321 branch->Kill();
322 cond->Kill();
323 merge->Kill();
324 }
325
326 } // namespace
327
Run()328 void EffectControlLinearizer::Run() {
329 BlockEffectControlMap block_effects(temp_zone());
330 ZoneVector<PendingEffectPhi> pending_effect_phis(temp_zone());
331 ZoneVector<BasicBlock*> pending_block_controls(temp_zone());
332 NodeVector inputs_buffer(temp_zone());
333
334 for (BasicBlock* block : *(schedule()->rpo_order())) {
335 size_t instr = 0;
336
337 // The control node should be the first.
338 Node* control = block->NodeAt(instr);
339 DCHECK(NodeProperties::IsControl(control));
340 // Update the control inputs.
341 if (HasIncomingBackEdges(block)) {
342 // If there are back edges, we need to update later because we have not
343 // computed the control yet. This should only happen for loops.
344 DCHECK_EQ(IrOpcode::kLoop, control->opcode());
345 pending_block_controls.push_back(block);
346 } else {
347 // If there are no back edges, we can update now.
348 UpdateBlockControl(block, &block_effects);
349 }
350 instr++;
351
352 // Iterate over the phis and update the effect phis.
353 Node* effect_phi = nullptr;
354 Node* terminate = nullptr;
355 for (; instr < block->NodeCount(); instr++) {
356 Node* node = block->NodeAt(instr);
357 // Only go through the phis and effect phis.
358 if (node->opcode() == IrOpcode::kEffectPhi) {
359 // There should be at most one effect phi in a block.
360 DCHECK_NULL(effect_phi);
361 // IfException blocks should not have effect phis.
362 DCHECK_NE(IrOpcode::kIfException, control->opcode());
363 effect_phi = node;
364 } else if (node->opcode() == IrOpcode::kPhi) {
365 // Just skip phis.
366 } else if (node->opcode() == IrOpcode::kTerminate) {
367 DCHECK_NULL(terminate);
368 terminate = node;
369 } else {
370 break;
371 }
372 }
373
374 if (effect_phi) {
375 // Make sure we update the inputs to the incoming blocks' effects.
376 if (HasIncomingBackEdges(block)) {
377 // In case of loops, we do not update the effect phi immediately
378 // because the back predecessor has not been handled yet. We just
379 // record the effect phi for later processing.
380 pending_effect_phis.push_back(PendingEffectPhi(effect_phi, block));
381 } else {
382 UpdateEffectPhi(effect_phi, block, &block_effects, jsgraph());
383 }
384 }
385
386 Node* effect = effect_phi;
387 if (effect == nullptr) {
388 // There was no effect phi.
389
390 // Since a loop should have at least a StackCheck, only loops in
391 // unreachable code can have no effect phi.
392 DCHECK_IMPLIES(
393 HasIncomingBackEdges(block),
394 block_effects.For(block->PredecessorAt(0), block)
395 .current_effect->opcode() == IrOpcode::kUnreachable);
396 if (block == schedule()->start()) {
397 // Start block => effect is start.
398 DCHECK_EQ(graph()->start(), control);
399 effect = graph()->start();
400 } else if (control->opcode() == IrOpcode::kEnd) {
401 // End block is just a dummy, no effect needed.
402 DCHECK_EQ(BasicBlock::kNone, block->control());
403 DCHECK_EQ(1u, block->size());
404 effect = nullptr;
405 } else {
406 // If all the predecessors have the same effect, we can use it as our
407 // current effect.
408 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
409 const BlockEffectControlData& data =
410 block_effects.For(block->PredecessorAt(i), block);
411 if (!effect) effect = data.current_effect;
412 if (data.current_effect != effect) {
413 effect = nullptr;
414 break;
415 }
416 }
417 if (effect == nullptr) {
418 DCHECK_NE(IrOpcode::kIfException, control->opcode());
419 // The input blocks do not have the same effect. We have
420 // to create an effect phi node.
421 inputs_buffer.clear();
422 inputs_buffer.resize(block->PredecessorCount(), jsgraph()->Dead());
423 inputs_buffer.push_back(control);
424 effect = graph()->NewNode(
425 common()->EffectPhi(static_cast<int>(block->PredecessorCount())),
426 static_cast<int>(inputs_buffer.size()), &(inputs_buffer.front()));
427 // For loops, we update the effect phi node later to break cycles.
428 if (control->opcode() == IrOpcode::kLoop) {
429 pending_effect_phis.push_back(PendingEffectPhi(effect, block));
430 } else {
431 UpdateEffectPhi(effect, block, &block_effects, jsgraph());
432 }
433 } else if (control->opcode() == IrOpcode::kIfException) {
434 // The IfException is connected into the effect chain, so we need
435 // to update the effect here.
436 NodeProperties::ReplaceEffectInput(control, effect);
437 effect = control;
438 }
439 }
440 }
441
442 // Fixup the Terminate node.
443 if (terminate != nullptr) {
444 NodeProperties::ReplaceEffectInput(terminate, effect);
445 }
446
447 // The frame state at block entry is determined by the frame states leaving
448 // all predecessors. In case there is no frame state dominating this block,
449 // we can rely on a checkpoint being present before the next deoptimization.
450 // TODO(mstarzinger): Eventually we will need to go hunt for a frame state
451 // once deoptimizing nodes roam freely through the schedule.
452 Node* frame_state = nullptr;
453 if (block != schedule()->start()) {
454 // If all the predecessors have the same effect, we can use it
455 // as our current effect.
456 frame_state =
457 block_effects.For(block->PredecessorAt(0), block).current_frame_state;
458 for (size_t i = 1; i < block->PredecessorCount(); i++) {
459 if (block_effects.For(block->PredecessorAt(i), block)
460 .current_frame_state != frame_state) {
461 frame_state = nullptr;
462 frame_state_zapper_ = graph()->end();
463 break;
464 }
465 }
466 }
467
468 // Process the ordinary instructions.
469 for (; instr < block->NodeCount(); instr++) {
470 Node* node = block->NodeAt(instr);
471 ProcessNode(node, &frame_state, &effect, &control);
472 }
473
474 switch (block->control()) {
475 case BasicBlock::kGoto:
476 case BasicBlock::kNone:
477 break;
478
479 case BasicBlock::kCall:
480 case BasicBlock::kTailCall:
481 case BasicBlock::kSwitch:
482 case BasicBlock::kReturn:
483 case BasicBlock::kDeoptimize:
484 case BasicBlock::kThrow:
485 ProcessNode(block->control_input(), &frame_state, &effect, &control);
486 break;
487
488 case BasicBlock::kBranch:
489 ProcessNode(block->control_input(), &frame_state, &effect, &control);
490 TryCloneBranch(block->control_input(), block, temp_zone(), graph(),
491 common(), &block_effects, source_positions_,
492 node_origins_);
493 break;
494 }
495
496 // Store the effect, control and frame state for later use.
497 for (BasicBlock* successor : block->successors()) {
498 BlockEffectControlData* data = &block_effects.For(block, successor);
499 if (data->current_effect == nullptr) {
500 data->current_effect = effect;
501 }
502 if (data->current_control == nullptr) {
503 data->current_control = control;
504 }
505 data->current_frame_state = frame_state;
506 }
507 }
508
509 for (BasicBlock* pending_block_control : pending_block_controls) {
510 UpdateBlockControl(pending_block_control, &block_effects);
511 }
512 // Update the incoming edges of the effect phis that could not be processed
513 // during the first pass (because they could have incoming back edges).
514 for (const PendingEffectPhi& pending_effect_phi : pending_effect_phis) {
515 UpdateEffectPhi(pending_effect_phi.effect_phi, pending_effect_phi.block,
516 &block_effects, jsgraph());
517 }
518 }
519
ProcessNode(Node * node,Node ** frame_state,Node ** effect,Node ** control)520 void EffectControlLinearizer::ProcessNode(Node* node, Node** frame_state,
521 Node** effect, Node** control) {
522 SourcePositionTable::Scope scope(source_positions_,
523 source_positions_->GetSourcePosition(node));
524 NodeOriginTable::Scope origin_scope(node_origins_, "process node", node);
525
526 // If the node needs to be wired into the effect/control chain, do this
527 // here. Pass current frame state for lowering to eager deoptimization.
528 if (TryWireInStateEffect(node, *frame_state, effect, control)) {
529 return;
530 }
531
532 // If the node has a visible effect, then there must be a checkpoint in the
533 // effect chain before we are allowed to place another eager deoptimization
534 // point. We zap the frame state to ensure this invariant is maintained.
535 if (region_observability_ == RegionObservability::kObservable &&
536 !node->op()->HasProperty(Operator::kNoWrite)) {
537 *frame_state = nullptr;
538 frame_state_zapper_ = node;
539 }
540
541 // Remove the end markers of 'atomic' allocation region because the
542 // region should be wired-in now.
543 if (node->opcode() == IrOpcode::kFinishRegion) {
544 // Reset the current region observability.
545 region_observability_ = RegionObservability::kObservable;
546 // Update the value uses to the value input of the finish node and
547 // the effect uses to the effect input.
548 return RemoveRenameNode(node);
549 }
550 if (node->opcode() == IrOpcode::kBeginRegion) {
551 // Determine the observability for this region and use that for all
552 // nodes inside the region (i.e. ignore the absence of kNoWrite on
553 // StoreField and other operators).
554 DCHECK_NE(RegionObservability::kNotObservable, region_observability_);
555 region_observability_ = RegionObservabilityOf(node->op());
556 // Update the value uses to the value input of the finish node and
557 // the effect uses to the effect input.
558 return RemoveRenameNode(node);
559 }
560 if (node->opcode() == IrOpcode::kTypeGuard) {
561 return RemoveRenameNode(node);
562 }
563
564 // Special treatment for checkpoint nodes.
565 if (node->opcode() == IrOpcode::kCheckpoint) {
566 // Unlink the check point; effect uses will be updated to the incoming
567 // effect that is passed. The frame state is preserved for lowering.
568 DCHECK_EQ(RegionObservability::kObservable, region_observability_);
569 *frame_state = NodeProperties::GetFrameStateInput(node);
570 return;
571 }
572
573 // The IfSuccess nodes should always start a basic block (and basic block
574 // start nodes are not handled in the ProcessNode method).
575 DCHECK_NE(IrOpcode::kIfSuccess, node->opcode());
576
577 // If the node takes an effect, replace with the current one.
578 if (node->op()->EffectInputCount() > 0) {
579 DCHECK_EQ(1, node->op()->EffectInputCount());
580 Node* input_effect = NodeProperties::GetEffectInput(node);
581
582 if (input_effect != *effect) {
583 NodeProperties::ReplaceEffectInput(node, *effect);
584 }
585
586 // If the node produces an effect, update our current effect. (However,
587 // ignore new effect chains started with ValueEffect.)
588 if (node->op()->EffectOutputCount() > 0) {
589 DCHECK_EQ(1, node->op()->EffectOutputCount());
590 *effect = node;
591 }
592 } else {
593 // New effect chain is only started with a Start or ValueEffect node.
594 DCHECK(node->op()->EffectOutputCount() == 0 ||
595 node->opcode() == IrOpcode::kStart);
596 }
597
598 // Rewire control inputs.
599 for (int i = 0; i < node->op()->ControlInputCount(); i++) {
600 NodeProperties::ReplaceControlInput(node, *control, i);
601 }
602 // Update the current control.
603 if (node->op()->ControlOutputCount() > 0) {
604 *control = node;
605 }
606
607 // Break the effect chain on {Unreachable} and reconnect to the graph end.
608 // Mark the following code for deletion by connecting to the {Dead} node.
609 if (node->opcode() == IrOpcode::kUnreachable) {
610 ConnectUnreachableToEnd(*effect, *control, jsgraph());
611 *effect = *control = jsgraph()->Dead();
612 }
613 }
614
TryWireInStateEffect(Node * node,Node * frame_state,Node ** effect,Node ** control)615 bool EffectControlLinearizer::TryWireInStateEffect(Node* node,
616 Node* frame_state,
617 Node** effect,
618 Node** control) {
619 gasm()->Reset(*effect, *control);
620 Node* result = nullptr;
621 switch (node->opcode()) {
622 case IrOpcode::kChangeBitToTagged:
623 result = LowerChangeBitToTagged(node);
624 break;
625 case IrOpcode::kChangeInt31ToTaggedSigned:
626 result = LowerChangeInt31ToTaggedSigned(node);
627 break;
628 case IrOpcode::kChangeInt32ToTagged:
629 result = LowerChangeInt32ToTagged(node);
630 break;
631 case IrOpcode::kChangeUint32ToTagged:
632 result = LowerChangeUint32ToTagged(node);
633 break;
634 case IrOpcode::kChangeFloat64ToTagged:
635 result = LowerChangeFloat64ToTagged(node);
636 break;
637 case IrOpcode::kChangeFloat64ToTaggedPointer:
638 result = LowerChangeFloat64ToTaggedPointer(node);
639 break;
640 case IrOpcode::kChangeTaggedSignedToInt32:
641 result = LowerChangeTaggedSignedToInt32(node);
642 break;
643 case IrOpcode::kChangeTaggedToBit:
644 result = LowerChangeTaggedToBit(node);
645 break;
646 case IrOpcode::kChangeTaggedToInt32:
647 result = LowerChangeTaggedToInt32(node);
648 break;
649 case IrOpcode::kChangeTaggedToUint32:
650 result = LowerChangeTaggedToUint32(node);
651 break;
652 case IrOpcode::kChangeTaggedToFloat64:
653 result = LowerChangeTaggedToFloat64(node);
654 break;
655 case IrOpcode::kChangeTaggedToTaggedSigned:
656 result = LowerChangeTaggedToTaggedSigned(node);
657 break;
658 case IrOpcode::kTruncateTaggedToBit:
659 result = LowerTruncateTaggedToBit(node);
660 break;
661 case IrOpcode::kTruncateTaggedPointerToBit:
662 result = LowerTruncateTaggedPointerToBit(node);
663 break;
664 case IrOpcode::kTruncateTaggedToFloat64:
665 result = LowerTruncateTaggedToFloat64(node);
666 break;
667 case IrOpcode::kCheckBounds:
668 result = LowerCheckBounds(node, frame_state);
669 break;
670 case IrOpcode::kPoisonIndex:
671 result = LowerPoisonIndex(node);
672 break;
673 case IrOpcode::kCheckMaps:
674 LowerCheckMaps(node, frame_state);
675 break;
676 case IrOpcode::kCompareMaps:
677 result = LowerCompareMaps(node);
678 break;
679 case IrOpcode::kCheckNumber:
680 result = LowerCheckNumber(node, frame_state);
681 break;
682 case IrOpcode::kCheckReceiver:
683 result = LowerCheckReceiver(node, frame_state);
684 break;
685 case IrOpcode::kCheckSymbol:
686 result = LowerCheckSymbol(node, frame_state);
687 break;
688 case IrOpcode::kCheckString:
689 result = LowerCheckString(node, frame_state);
690 break;
691 case IrOpcode::kCheckInternalizedString:
692 result = LowerCheckInternalizedString(node, frame_state);
693 break;
694 case IrOpcode::kCheckIf:
695 LowerCheckIf(node, frame_state);
696 break;
697 case IrOpcode::kCheckedInt32Add:
698 result = LowerCheckedInt32Add(node, frame_state);
699 break;
700 case IrOpcode::kCheckedInt32Sub:
701 result = LowerCheckedInt32Sub(node, frame_state);
702 break;
703 case IrOpcode::kCheckedInt32Div:
704 result = LowerCheckedInt32Div(node, frame_state);
705 break;
706 case IrOpcode::kCheckedInt32Mod:
707 result = LowerCheckedInt32Mod(node, frame_state);
708 break;
709 case IrOpcode::kCheckedUint32Div:
710 result = LowerCheckedUint32Div(node, frame_state);
711 break;
712 case IrOpcode::kCheckedUint32Mod:
713 result = LowerCheckedUint32Mod(node, frame_state);
714 break;
715 case IrOpcode::kCheckedInt32Mul:
716 result = LowerCheckedInt32Mul(node, frame_state);
717 break;
718 case IrOpcode::kCheckedInt32ToTaggedSigned:
719 result = LowerCheckedInt32ToTaggedSigned(node, frame_state);
720 break;
721 case IrOpcode::kCheckedUint32ToInt32:
722 result = LowerCheckedUint32ToInt32(node, frame_state);
723 break;
724 case IrOpcode::kCheckedUint32ToTaggedSigned:
725 result = LowerCheckedUint32ToTaggedSigned(node, frame_state);
726 break;
727 case IrOpcode::kCheckedFloat64ToInt32:
728 result = LowerCheckedFloat64ToInt32(node, frame_state);
729 break;
730 case IrOpcode::kCheckedTaggedSignedToInt32:
731 if (frame_state == nullptr) {
732 FATAL("No frame state (zapped by #%d: %s)", frame_state_zapper_->id(),
733 frame_state_zapper_->op()->mnemonic());
734 }
735 result = LowerCheckedTaggedSignedToInt32(node, frame_state);
736 break;
737 case IrOpcode::kCheckedTaggedToInt32:
738 result = LowerCheckedTaggedToInt32(node, frame_state);
739 break;
740 case IrOpcode::kCheckedTaggedToFloat64:
741 result = LowerCheckedTaggedToFloat64(node, frame_state);
742 break;
743 case IrOpcode::kCheckedTaggedToTaggedSigned:
744 result = LowerCheckedTaggedToTaggedSigned(node, frame_state);
745 break;
746 case IrOpcode::kCheckedTaggedToTaggedPointer:
747 result = LowerCheckedTaggedToTaggedPointer(node, frame_state);
748 break;
749 case IrOpcode::kTruncateTaggedToWord32:
750 result = LowerTruncateTaggedToWord32(node);
751 break;
752 case IrOpcode::kCheckedTruncateTaggedToWord32:
753 result = LowerCheckedTruncateTaggedToWord32(node, frame_state);
754 break;
755 case IrOpcode::kNumberToString:
756 result = LowerNumberToString(node);
757 break;
758 case IrOpcode::kObjectIsArrayBufferView:
759 result = LowerObjectIsArrayBufferView(node);
760 break;
761 case IrOpcode::kObjectIsBigInt:
762 result = LowerObjectIsBigInt(node);
763 break;
764 case IrOpcode::kObjectIsCallable:
765 result = LowerObjectIsCallable(node);
766 break;
767 case IrOpcode::kObjectIsConstructor:
768 result = LowerObjectIsConstructor(node);
769 break;
770 case IrOpcode::kObjectIsDetectableCallable:
771 result = LowerObjectIsDetectableCallable(node);
772 break;
773 case IrOpcode::kObjectIsMinusZero:
774 result = LowerObjectIsMinusZero(node);
775 break;
776 case IrOpcode::kObjectIsNaN:
777 result = LowerObjectIsNaN(node);
778 break;
779 case IrOpcode::kNumberIsNaN:
780 result = LowerNumberIsNaN(node);
781 break;
782 case IrOpcode::kObjectIsNonCallable:
783 result = LowerObjectIsNonCallable(node);
784 break;
785 case IrOpcode::kObjectIsNumber:
786 result = LowerObjectIsNumber(node);
787 break;
788 case IrOpcode::kObjectIsReceiver:
789 result = LowerObjectIsReceiver(node);
790 break;
791 case IrOpcode::kObjectIsSmi:
792 result = LowerObjectIsSmi(node);
793 break;
794 case IrOpcode::kObjectIsString:
795 result = LowerObjectIsString(node);
796 break;
797 case IrOpcode::kObjectIsSymbol:
798 result = LowerObjectIsSymbol(node);
799 break;
800 case IrOpcode::kObjectIsUndetectable:
801 result = LowerObjectIsUndetectable(node);
802 break;
803 case IrOpcode::kArgumentsFrame:
804 result = LowerArgumentsFrame(node);
805 break;
806 case IrOpcode::kArgumentsLength:
807 result = LowerArgumentsLength(node);
808 break;
809 case IrOpcode::kToBoolean:
810 result = LowerToBoolean(node);
811 break;
812 case IrOpcode::kTypeOf:
813 result = LowerTypeOf(node);
814 break;
815 case IrOpcode::kNewDoubleElements:
816 result = LowerNewDoubleElements(node);
817 break;
818 case IrOpcode::kNewSmiOrObjectElements:
819 result = LowerNewSmiOrObjectElements(node);
820 break;
821 case IrOpcode::kNewArgumentsElements:
822 result = LowerNewArgumentsElements(node);
823 break;
824 case IrOpcode::kNewConsString:
825 result = LowerNewConsString(node);
826 break;
827 case IrOpcode::kArrayBufferWasNeutered:
828 result = LowerArrayBufferWasNeutered(node);
829 break;
830 case IrOpcode::kSameValue:
831 result = LowerSameValue(node);
832 break;
833 case IrOpcode::kDeadValue:
834 result = LowerDeadValue(node);
835 break;
836 case IrOpcode::kStringFromSingleCharCode:
837 result = LowerStringFromSingleCharCode(node);
838 break;
839 case IrOpcode::kStringFromSingleCodePoint:
840 result = LowerStringFromSingleCodePoint(node);
841 break;
842 case IrOpcode::kStringIndexOf:
843 result = LowerStringIndexOf(node);
844 break;
845 case IrOpcode::kStringLength:
846 result = LowerStringLength(node);
847 break;
848 case IrOpcode::kStringToNumber:
849 result = LowerStringToNumber(node);
850 break;
851 case IrOpcode::kStringCharCodeAt:
852 result = LowerStringCharCodeAt(node);
853 break;
854 case IrOpcode::kStringCodePointAt:
855 result = LowerStringCodePointAt(node, UnicodeEncodingOf(node->op()));
856 break;
857 case IrOpcode::kStringToLowerCaseIntl:
858 result = LowerStringToLowerCaseIntl(node);
859 break;
860 case IrOpcode::kStringToUpperCaseIntl:
861 result = LowerStringToUpperCaseIntl(node);
862 break;
863 case IrOpcode::kStringSubstring:
864 result = LowerStringSubstring(node);
865 break;
866 case IrOpcode::kStringEqual:
867 result = LowerStringEqual(node);
868 break;
869 case IrOpcode::kStringLessThan:
870 result = LowerStringLessThan(node);
871 break;
872 case IrOpcode::kStringLessThanOrEqual:
873 result = LowerStringLessThanOrEqual(node);
874 break;
875 case IrOpcode::kNumberIsFloat64Hole:
876 result = LowerNumberIsFloat64Hole(node);
877 break;
878 case IrOpcode::kNumberIsFinite:
879 result = LowerNumberIsFinite(node);
880 break;
881 case IrOpcode::kObjectIsFiniteNumber:
882 result = LowerObjectIsFiniteNumber(node);
883 break;
884 case IrOpcode::kNumberIsInteger:
885 result = LowerNumberIsInteger(node);
886 break;
887 case IrOpcode::kObjectIsInteger:
888 result = LowerObjectIsInteger(node);
889 break;
890 case IrOpcode::kNumberIsSafeInteger:
891 result = LowerNumberIsSafeInteger(node);
892 break;
893 case IrOpcode::kObjectIsSafeInteger:
894 result = LowerObjectIsSafeInteger(node);
895 break;
896 case IrOpcode::kCheckFloat64Hole:
897 result = LowerCheckFloat64Hole(node, frame_state);
898 break;
899 case IrOpcode::kCheckNotTaggedHole:
900 result = LowerCheckNotTaggedHole(node, frame_state);
901 break;
902 case IrOpcode::kConvertTaggedHoleToUndefined:
903 result = LowerConvertTaggedHoleToUndefined(node);
904 break;
905 case IrOpcode::kCheckEqualsInternalizedString:
906 LowerCheckEqualsInternalizedString(node, frame_state);
907 break;
908 case IrOpcode::kAllocate:
909 result = LowerAllocate(node);
910 break;
911 case IrOpcode::kCheckEqualsSymbol:
912 LowerCheckEqualsSymbol(node, frame_state);
913 break;
914 case IrOpcode::kPlainPrimitiveToNumber:
915 result = LowerPlainPrimitiveToNumber(node);
916 break;
917 case IrOpcode::kPlainPrimitiveToWord32:
918 result = LowerPlainPrimitiveToWord32(node);
919 break;
920 case IrOpcode::kPlainPrimitiveToFloat64:
921 result = LowerPlainPrimitiveToFloat64(node);
922 break;
923 case IrOpcode::kEnsureWritableFastElements:
924 result = LowerEnsureWritableFastElements(node);
925 break;
926 case IrOpcode::kMaybeGrowFastElements:
927 result = LowerMaybeGrowFastElements(node, frame_state);
928 break;
929 case IrOpcode::kTransitionElementsKind:
930 LowerTransitionElementsKind(node);
931 break;
932 case IrOpcode::kLoadFieldByIndex:
933 result = LowerLoadFieldByIndex(node);
934 break;
935 case IrOpcode::kLoadTypedElement:
936 result = LowerLoadTypedElement(node);
937 break;
938 case IrOpcode::kLoadDataViewElement:
939 result = LowerLoadDataViewElement(node);
940 break;
941 case IrOpcode::kStoreTypedElement:
942 LowerStoreTypedElement(node);
943 break;
944 case IrOpcode::kStoreDataViewElement:
945 LowerStoreDataViewElement(node);
946 break;
947 case IrOpcode::kStoreSignedSmallElement:
948 LowerStoreSignedSmallElement(node);
949 break;
950 case IrOpcode::kFindOrderedHashMapEntry:
951 result = LowerFindOrderedHashMapEntry(node);
952 break;
953 case IrOpcode::kFindOrderedHashMapEntryForInt32Key:
954 result = LowerFindOrderedHashMapEntryForInt32Key(node);
955 break;
956 case IrOpcode::kTransitionAndStoreNumberElement:
957 LowerTransitionAndStoreNumberElement(node);
958 break;
959 case IrOpcode::kTransitionAndStoreNonNumberElement:
960 LowerTransitionAndStoreNonNumberElement(node);
961 break;
962 case IrOpcode::kTransitionAndStoreElement:
963 LowerTransitionAndStoreElement(node);
964 break;
965 case IrOpcode::kRuntimeAbort:
966 LowerRuntimeAbort(node);
967 break;
968 case IrOpcode::kConvertReceiver:
969 result = LowerConvertReceiver(node);
970 break;
971 case IrOpcode::kFloat64RoundUp:
972 if (!LowerFloat64RoundUp(node).To(&result)) {
973 return false;
974 }
975 break;
976 case IrOpcode::kFloat64RoundDown:
977 if (!LowerFloat64RoundDown(node).To(&result)) {
978 return false;
979 }
980 break;
981 case IrOpcode::kFloat64RoundTruncate:
982 if (!LowerFloat64RoundTruncate(node).To(&result)) {
983 return false;
984 }
985 break;
986 case IrOpcode::kFloat64RoundTiesEven:
987 if (!LowerFloat64RoundTiesEven(node).To(&result)) {
988 return false;
989 }
990 break;
991 case IrOpcode::kDateNow:
992 result = LowerDateNow(node);
993 break;
994 default:
995 return false;
996 }
997
998 if ((result ? 1 : 0) != node->op()->ValueOutputCount()) {
999 FATAL(
1000 "Effect control linearizer lowering of '%s':"
1001 " value output count does not agree.",
1002 node->op()->mnemonic());
1003 }
1004
1005 *effect = gasm()->ExtractCurrentEffect();
1006 *control = gasm()->ExtractCurrentControl();
1007 NodeProperties::ReplaceUses(node, result, *effect, *control);
1008 return true;
1009 }
1010
1011 #define __ gasm()->
1012
LowerChangeFloat64ToTagged(Node * node)1013 Node* EffectControlLinearizer::LowerChangeFloat64ToTagged(Node* node) {
1014 CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
1015 Node* value = node->InputAt(0);
1016
1017 auto done = __ MakeLabel(MachineRepresentation::kTagged);
1018 auto if_heapnumber = __ MakeDeferredLabel();
1019 auto if_int32 = __ MakeLabel();
1020
1021 Node* value32 = __ RoundFloat64ToInt32(value);
1022 __ GotoIf(__ Float64Equal(value, __ ChangeInt32ToFloat64(value32)),
1023 &if_int32);
1024 __ Goto(&if_heapnumber);
1025
1026 __ Bind(&if_int32);
1027 {
1028 if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1029 Node* zero = __ Int32Constant(0);
1030 auto if_zero = __ MakeDeferredLabel();
1031 auto if_smi = __ MakeLabel();
1032
1033 __ GotoIf(__ Word32Equal(value32, zero), &if_zero);
1034 __ Goto(&if_smi);
1035
1036 __ Bind(&if_zero);
1037 {
1038 // In case of 0, we need to check the high bits for the IEEE -0 pattern.
1039 __ GotoIf(__ Int32LessThan(__ Float64ExtractHighWord32(value), zero),
1040 &if_heapnumber);
1041 __ Goto(&if_smi);
1042 }
1043
1044 __ Bind(&if_smi);
1045 }
1046
1047 if (SmiValuesAre32Bits()) {
1048 Node* value_smi = ChangeInt32ToSmi(value32);
1049 __ Goto(&done, value_smi);
1050 } else {
1051 DCHECK(SmiValuesAre31Bits());
1052 Node* add = __ Int32AddWithOverflow(value32, value32);
1053 Node* ovf = __ Projection(1, add);
1054 __ GotoIf(ovf, &if_heapnumber);
1055 Node* value_smi = __ Projection(0, add);
1056 value_smi = ChangeInt32ToIntPtr(value_smi);
1057 __ Goto(&done, value_smi);
1058 }
1059 }
1060
1061 __ Bind(&if_heapnumber);
1062 {
1063 Node* value_number = AllocateHeapNumberWithValue(value);
1064 __ Goto(&done, value_number);
1065 }
1066
1067 __ Bind(&done);
1068 return done.PhiAt(0);
1069 }
1070
LowerChangeFloat64ToTaggedPointer(Node * node)1071 Node* EffectControlLinearizer::LowerChangeFloat64ToTaggedPointer(Node* node) {
1072 Node* value = node->InputAt(0);
1073 return AllocateHeapNumberWithValue(value);
1074 }
1075
LowerChangeBitToTagged(Node * node)1076 Node* EffectControlLinearizer::LowerChangeBitToTagged(Node* node) {
1077 Node* value = node->InputAt(0);
1078
1079 auto if_true = __ MakeLabel();
1080 auto done = __ MakeLabel(MachineRepresentation::kTagged);
1081
1082 __ GotoIf(value, &if_true);
1083 __ Goto(&done, __ FalseConstant());
1084
1085 __ Bind(&if_true);
1086 __ Goto(&done, __ TrueConstant());
1087
1088 __ Bind(&done);
1089 return done.PhiAt(0);
1090 }
1091
LowerChangeInt31ToTaggedSigned(Node * node)1092 Node* EffectControlLinearizer::LowerChangeInt31ToTaggedSigned(Node* node) {
1093 Node* value = node->InputAt(0);
1094 return ChangeInt32ToSmi(value);
1095 }
1096
LowerChangeInt32ToTagged(Node * node)1097 Node* EffectControlLinearizer::LowerChangeInt32ToTagged(Node* node) {
1098 Node* value = node->InputAt(0);
1099
1100 if (SmiValuesAre32Bits()) {
1101 return ChangeInt32ToSmi(value);
1102 }
1103 DCHECK(SmiValuesAre31Bits());
1104
1105 auto if_overflow = __ MakeDeferredLabel();
1106 auto done = __ MakeLabel(MachineRepresentation::kTagged);
1107
1108 Node* add = __ Int32AddWithOverflow(value, value);
1109 Node* ovf = __ Projection(1, add);
1110 __ GotoIf(ovf, &if_overflow);
1111 Node* value_smi = __ Projection(0, add);
1112 value_smi = ChangeInt32ToIntPtr(value_smi);
1113 __ Goto(&done, value_smi);
1114
1115 __ Bind(&if_overflow);
1116 Node* number = AllocateHeapNumberWithValue(__ ChangeInt32ToFloat64(value));
1117 __ Goto(&done, number);
1118
1119 __ Bind(&done);
1120 return done.PhiAt(0);
1121 }
1122
LowerChangeUint32ToTagged(Node * node)1123 Node* EffectControlLinearizer::LowerChangeUint32ToTagged(Node* node) {
1124 Node* value = node->InputAt(0);
1125
1126 auto if_not_in_smi_range = __ MakeDeferredLabel();
1127 auto done = __ MakeLabel(MachineRepresentation::kTagged);
1128
1129 Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
1130 __ GotoIfNot(check, &if_not_in_smi_range);
1131 __ Goto(&done, ChangeUint32ToSmi(value));
1132
1133 __ Bind(&if_not_in_smi_range);
1134 Node* number = AllocateHeapNumberWithValue(__ ChangeUint32ToFloat64(value));
1135
1136 __ Goto(&done, number);
1137 __ Bind(&done);
1138
1139 return done.PhiAt(0);
1140 }
1141
LowerChangeTaggedSignedToInt32(Node * node)1142 Node* EffectControlLinearizer::LowerChangeTaggedSignedToInt32(Node* node) {
1143 Node* value = node->InputAt(0);
1144 return ChangeSmiToInt32(value);
1145 }
1146
LowerChangeTaggedToBit(Node * node)1147 Node* EffectControlLinearizer::LowerChangeTaggedToBit(Node* node) {
1148 Node* value = node->InputAt(0);
1149 return __ WordEqual(value, __ TrueConstant());
1150 }
1151
TruncateTaggedPointerToBit(Node * node,GraphAssemblerLabel<1> * done)1152 void EffectControlLinearizer::TruncateTaggedPointerToBit(
1153 Node* node, GraphAssemblerLabel<1>* done) {
1154 Node* value = node->InputAt(0);
1155
1156 auto if_heapnumber = __ MakeDeferredLabel();
1157 auto if_bigint = __ MakeDeferredLabel();
1158
1159 Node* zero = __ Int32Constant(0);
1160 Node* fzero = __ Float64Constant(0.0);
1161
1162 // Check if {value} is false.
1163 __ GotoIf(__ WordEqual(value, __ FalseConstant()), done, zero);
1164
1165 // Check if {value} is the empty string.
1166 __ GotoIf(__ WordEqual(value, __ EmptyStringConstant()), done, zero);
1167
1168 // Load the map of {value}.
1169 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1170
1171 // Check if the {value} is undetectable and immediately return false.
1172 // This includes undefined and null.
1173 Node* value_map_bitfield =
1174 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
1175 __ GotoIfNot(
1176 __ Word32Equal(
1177 __ Word32And(value_map_bitfield,
1178 __ Int32Constant(Map::IsUndetectableBit::kMask)),
1179 zero),
1180 done, zero);
1181
1182 // Check if {value} is a HeapNumber.
1183 __ GotoIf(__ WordEqual(value_map, __ HeapNumberMapConstant()),
1184 &if_heapnumber);
1185
1186 // Check if {value} is a BigInt.
1187 Node* value_instance_type =
1188 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1189 __ GotoIf(__ Word32Equal(value_instance_type, __ Int32Constant(BIGINT_TYPE)),
1190 &if_bigint);
1191
1192 // All other values that reach here are true.
1193 __ Goto(done, __ Int32Constant(1));
1194
1195 __ Bind(&if_heapnumber);
1196 {
1197 // For HeapNumber {value}, just check that its value is not 0.0, -0.0 or
1198 // NaN.
1199 Node* value_value =
1200 __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1201 __ Goto(done, __ Float64LessThan(fzero, __ Float64Abs(value_value)));
1202 }
1203
1204 __ Bind(&if_bigint);
1205 {
1206 Node* bitfield = __ LoadField(AccessBuilder::ForBigIntBitfield(), value);
1207 Node* length_is_zero = __ WordEqual(
1208 __ WordAnd(bitfield, __ IntPtrConstant(BigInt::LengthBits::kMask)),
1209 __ IntPtrConstant(0));
1210 __ Goto(done, __ Word32Equal(length_is_zero, zero));
1211 }
1212 }
1213
LowerTruncateTaggedToBit(Node * node)1214 Node* EffectControlLinearizer::LowerTruncateTaggedToBit(Node* node) {
1215 auto done = __ MakeLabel(MachineRepresentation::kBit);
1216 auto if_smi = __ MakeDeferredLabel();
1217
1218 Node* value = node->InputAt(0);
1219 __ GotoIf(ObjectIsSmi(value), &if_smi);
1220
1221 TruncateTaggedPointerToBit(node, &done);
1222
1223 __ Bind(&if_smi);
1224 {
1225 // If {value} is a Smi, then we only need to check that it's not zero.
1226 __ Goto(&done, __ Word32Equal(__ WordEqual(value, __ IntPtrConstant(0)),
1227 __ Int32Constant(0)));
1228 }
1229
1230 __ Bind(&done);
1231 return done.PhiAt(0);
1232 }
1233
LowerTruncateTaggedPointerToBit(Node * node)1234 Node* EffectControlLinearizer::LowerTruncateTaggedPointerToBit(Node* node) {
1235 auto done = __ MakeLabel(MachineRepresentation::kBit);
1236
1237 TruncateTaggedPointerToBit(node, &done);
1238
1239 __ Bind(&done);
1240 return done.PhiAt(0);
1241 }
1242
LowerChangeTaggedToInt32(Node * node)1243 Node* EffectControlLinearizer::LowerChangeTaggedToInt32(Node* node) {
1244 Node* value = node->InputAt(0);
1245
1246 auto if_not_smi = __ MakeDeferredLabel();
1247 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1248
1249 Node* check = ObjectIsSmi(value);
1250 __ GotoIfNot(check, &if_not_smi);
1251 __ Goto(&done, ChangeSmiToInt32(value));
1252
1253 __ Bind(&if_not_smi);
1254 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1255 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1256 vfalse = __ ChangeFloat64ToInt32(vfalse);
1257 __ Goto(&done, vfalse);
1258
1259 __ Bind(&done);
1260 return done.PhiAt(0);
1261 }
1262
LowerChangeTaggedToUint32(Node * node)1263 Node* EffectControlLinearizer::LowerChangeTaggedToUint32(Node* node) {
1264 Node* value = node->InputAt(0);
1265
1266 auto if_not_smi = __ MakeDeferredLabel();
1267 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1268
1269 Node* check = ObjectIsSmi(value);
1270 __ GotoIfNot(check, &if_not_smi);
1271 __ Goto(&done, ChangeSmiToInt32(value));
1272
1273 __ Bind(&if_not_smi);
1274 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1275 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1276 vfalse = __ ChangeFloat64ToUint32(vfalse);
1277 __ Goto(&done, vfalse);
1278
1279 __ Bind(&done);
1280 return done.PhiAt(0);
1281 }
1282
LowerChangeTaggedToFloat64(Node * node)1283 Node* EffectControlLinearizer::LowerChangeTaggedToFloat64(Node* node) {
1284 return LowerTruncateTaggedToFloat64(node);
1285 }
1286
LowerChangeTaggedToTaggedSigned(Node * node)1287 Node* EffectControlLinearizer::LowerChangeTaggedToTaggedSigned(Node* node) {
1288 Node* value = node->InputAt(0);
1289
1290 auto if_not_smi = __ MakeDeferredLabel();
1291 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1292
1293 Node* check = ObjectIsSmi(value);
1294 __ GotoIfNot(check, &if_not_smi);
1295 __ Goto(&done, value);
1296
1297 __ Bind(&if_not_smi);
1298 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1299 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1300 vfalse = __ ChangeFloat64ToInt32(vfalse);
1301 vfalse = ChangeInt32ToSmi(vfalse);
1302 __ Goto(&done, vfalse);
1303
1304 __ Bind(&done);
1305 return done.PhiAt(0);
1306 }
1307
LowerTruncateTaggedToFloat64(Node * node)1308 Node* EffectControlLinearizer::LowerTruncateTaggedToFloat64(Node* node) {
1309 Node* value = node->InputAt(0);
1310
1311 auto if_not_smi = __ MakeDeferredLabel();
1312 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
1313
1314 Node* check = ObjectIsSmi(value);
1315 __ GotoIfNot(check, &if_not_smi);
1316 Node* vtrue = ChangeSmiToInt32(value);
1317 vtrue = __ ChangeInt32ToFloat64(vtrue);
1318 __ Goto(&done, vtrue);
1319
1320 __ Bind(&if_not_smi);
1321 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1322 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1323 __ Goto(&done, vfalse);
1324
1325 __ Bind(&done);
1326 return done.PhiAt(0);
1327 }
1328
LowerCheckBounds(Node * node,Node * frame_state)1329 Node* EffectControlLinearizer::LowerCheckBounds(Node* node, Node* frame_state) {
1330 Node* index = node->InputAt(0);
1331 Node* limit = node->InputAt(1);
1332 const CheckParameters& params = CheckParametersOf(node->op());
1333
1334 Node* check = __ Uint32LessThan(index, limit);
1335 __ DeoptimizeIfNot(DeoptimizeReason::kOutOfBounds, params.feedback(), check,
1336 frame_state, IsSafetyCheck::kCriticalSafetyCheck);
1337 return index;
1338 }
1339
LowerPoisonIndex(Node * node)1340 Node* EffectControlLinearizer::LowerPoisonIndex(Node* node) {
1341 Node* index = node->InputAt(0);
1342 if (mask_array_index_ == kMaskArrayIndex) {
1343 index = __ Word32PoisonOnSpeculation(index);
1344 }
1345 return index;
1346 }
1347
LowerCheckMaps(Node * node,Node * frame_state)1348 void EffectControlLinearizer::LowerCheckMaps(Node* node, Node* frame_state) {
1349 CheckMapsParameters const& p = CheckMapsParametersOf(node->op());
1350 Node* value = node->InputAt(0);
1351
1352 ZoneHandleSet<Map> const& maps = p.maps();
1353 size_t const map_count = maps.size();
1354
1355 if (p.flags() & CheckMapsFlag::kTryMigrateInstance) {
1356 auto done = __ MakeDeferredLabel();
1357 auto migrate = __ MakeDeferredLabel();
1358
1359 // Load the current map of the {value}.
1360 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1361
1362 // Perform the map checks.
1363 for (size_t i = 0; i < map_count; ++i) {
1364 Node* map = __ HeapConstant(maps[i]);
1365 Node* check = __ WordEqual(value_map, map);
1366 if (i == map_count - 1) {
1367 __ GotoIfNot(check, &migrate);
1368 __ Goto(&done);
1369 } else {
1370 __ GotoIf(check, &done);
1371 }
1372 }
1373
1374 // Perform the (deferred) instance migration.
1375 __ Bind(&migrate);
1376 {
1377 // If map is not deprecated the migration attempt does not make sense.
1378 Node* bitfield3 =
1379 __ LoadField(AccessBuilder::ForMapBitField3(), value_map);
1380 Node* if_not_deprecated = __ WordEqual(
1381 __ Word32And(bitfield3,
1382 __ Int32Constant(Map::IsDeprecatedBit::kMask)),
1383 __ Int32Constant(0));
1384 __ DeoptimizeIf(DeoptimizeReason::kWrongMap, p.feedback(),
1385 if_not_deprecated, frame_state);
1386
1387 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
1388 Runtime::FunctionId id = Runtime::kTryMigrateInstance;
1389 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
1390 graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
1391 Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1), value,
1392 __ ExternalConstant(ExternalReference::Create(id)),
1393 __ Int32Constant(1), __ NoContextConstant());
1394 Node* check = ObjectIsSmi(result);
1395 __ DeoptimizeIf(DeoptimizeReason::kInstanceMigrationFailed, p.feedback(),
1396 check, frame_state);
1397 }
1398
1399 // Reload the current map of the {value}.
1400 value_map = __ LoadField(AccessBuilder::ForMap(), value);
1401
1402 // Perform the map checks again.
1403 for (size_t i = 0; i < map_count; ++i) {
1404 Node* map = __ HeapConstant(maps[i]);
1405 Node* check = __ WordEqual(value_map, map);
1406 if (i == map_count - 1) {
1407 __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1408 frame_state);
1409 } else {
1410 __ GotoIf(check, &done);
1411 }
1412 }
1413
1414 __ Goto(&done);
1415 __ Bind(&done);
1416 } else {
1417 auto done = __ MakeLabel();
1418
1419 // Load the current map of the {value}.
1420 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1421
1422 for (size_t i = 0; i < map_count; ++i) {
1423 Node* map = __ HeapConstant(maps[i]);
1424 Node* check = __ WordEqual(value_map, map);
1425 if (i == map_count - 1) {
1426 __ DeoptimizeIfNot(DeoptimizeReason::kWrongMap, p.feedback(), check,
1427 frame_state);
1428 } else {
1429 __ GotoIf(check, &done);
1430 }
1431 }
1432 __ Goto(&done);
1433 __ Bind(&done);
1434 }
1435 }
1436
LowerCompareMaps(Node * node)1437 Node* EffectControlLinearizer::LowerCompareMaps(Node* node) {
1438 ZoneHandleSet<Map> const& maps = CompareMapsParametersOf(node->op()).maps();
1439 size_t const map_count = maps.size();
1440 Node* value = node->InputAt(0);
1441
1442 auto done = __ MakeLabel(MachineRepresentation::kBit);
1443
1444 // Load the current map of the {value}.
1445 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1446
1447 for (size_t i = 0; i < map_count; ++i) {
1448 Node* map = __ HeapConstant(maps[i]);
1449 Node* check = __ WordEqual(value_map, map);
1450 __ GotoIf(check, &done, __ Int32Constant(1));
1451 }
1452 __ Goto(&done, __ Int32Constant(0));
1453
1454 __ Bind(&done);
1455 return done.PhiAt(0);
1456 }
1457
LowerCheckNumber(Node * node,Node * frame_state)1458 Node* EffectControlLinearizer::LowerCheckNumber(Node* node, Node* frame_state) {
1459 Node* value = node->InputAt(0);
1460 const CheckParameters& params = CheckParametersOf(node->op());
1461
1462 auto if_not_smi = __ MakeDeferredLabel();
1463 auto done = __ MakeLabel();
1464
1465 Node* check0 = ObjectIsSmi(value);
1466 __ GotoIfNot(check0, &if_not_smi);
1467 __ Goto(&done);
1468
1469 __ Bind(&if_not_smi);
1470 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1471 Node* check1 = __ WordEqual(value_map, __ HeapNumberMapConstant());
1472 __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
1473 check1, frame_state);
1474 __ Goto(&done);
1475
1476 __ Bind(&done);
1477 return value;
1478 }
1479
LowerCheckReceiver(Node * node,Node * frame_state)1480 Node* EffectControlLinearizer::LowerCheckReceiver(Node* node,
1481 Node* frame_state) {
1482 Node* value = node->InputAt(0);
1483
1484 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1485 Node* value_instance_type =
1486 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1487
1488 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1489 Node* check = __ Uint32LessThanOrEqual(
1490 __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
1491 __ DeoptimizeIfNot(DeoptimizeReason::kNotAJavaScriptObject, VectorSlotPair(),
1492 check, frame_state);
1493 return value;
1494 }
1495
LowerCheckSymbol(Node * node,Node * frame_state)1496 Node* EffectControlLinearizer::LowerCheckSymbol(Node* node, Node* frame_state) {
1497 Node* value = node->InputAt(0);
1498
1499 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1500
1501 Node* check =
1502 __ WordEqual(value_map, __ HeapConstant(factory()->symbol_map()));
1503 __ DeoptimizeIfNot(DeoptimizeReason::kNotASymbol, VectorSlotPair(), check,
1504 frame_state);
1505 return value;
1506 }
1507
LowerCheckString(Node * node,Node * frame_state)1508 Node* EffectControlLinearizer::LowerCheckString(Node* node, Node* frame_state) {
1509 Node* value = node->InputAt(0);
1510 const CheckParameters& params = CheckParametersOf(node->op());
1511
1512 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1513 Node* value_instance_type =
1514 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1515
1516 Node* check = __ Uint32LessThan(value_instance_type,
1517 __ Uint32Constant(FIRST_NONSTRING_TYPE));
1518 __ DeoptimizeIfNot(DeoptimizeReason::kNotAString, params.feedback(), check,
1519 frame_state);
1520 return value;
1521 }
1522
LowerCheckInternalizedString(Node * node,Node * frame_state)1523 Node* EffectControlLinearizer::LowerCheckInternalizedString(Node* node,
1524 Node* frame_state) {
1525 Node* value = node->InputAt(0);
1526
1527 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1528 Node* value_instance_type =
1529 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1530
1531 Node* check = __ Word32Equal(
1532 __ Word32And(value_instance_type,
1533 __ Int32Constant(kIsNotStringMask | kIsNotInternalizedMask)),
1534 __ Int32Constant(kInternalizedTag));
1535 __ DeoptimizeIfNot(DeoptimizeReason::kWrongInstanceType, VectorSlotPair(),
1536 check, frame_state);
1537
1538 return value;
1539 }
1540
LowerCheckIf(Node * node,Node * frame_state)1541 void EffectControlLinearizer::LowerCheckIf(Node* node, Node* frame_state) {
1542 Node* value = node->InputAt(0);
1543 const CheckIfParameters& p = CheckIfParametersOf(node->op());
1544 __ DeoptimizeIfNot(p.reason(), p.feedback(), value, frame_state);
1545 }
1546
LowerCheckedInt32Add(Node * node,Node * frame_state)1547 Node* EffectControlLinearizer::LowerCheckedInt32Add(Node* node,
1548 Node* frame_state) {
1549 Node* lhs = node->InputAt(0);
1550 Node* rhs = node->InputAt(1);
1551
1552 Node* value = __ Int32AddWithOverflow(lhs, rhs);
1553 Node* check = __ Projection(1, value);
1554 __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1555 frame_state);
1556 return __ Projection(0, value);
1557 }
1558
LowerCheckedInt32Sub(Node * node,Node * frame_state)1559 Node* EffectControlLinearizer::LowerCheckedInt32Sub(Node* node,
1560 Node* frame_state) {
1561 Node* lhs = node->InputAt(0);
1562 Node* rhs = node->InputAt(1);
1563
1564 Node* value = __ Int32SubWithOverflow(lhs, rhs);
1565 Node* check = __ Projection(1, value);
1566 __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1567 frame_state);
1568 return __ Projection(0, value);
1569 }
1570
LowerCheckedInt32Div(Node * node,Node * frame_state)1571 Node* EffectControlLinearizer::LowerCheckedInt32Div(Node* node,
1572 Node* frame_state) {
1573 Node* lhs = node->InputAt(0);
1574 Node* rhs = node->InputAt(1);
1575
1576 auto if_not_positive = __ MakeDeferredLabel();
1577 auto if_is_minint = __ MakeDeferredLabel();
1578 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1579 auto minint_check_done = __ MakeLabel();
1580
1581 Node* zero = __ Int32Constant(0);
1582
1583 // Check if {rhs} is positive (and not zero).
1584 Node* check0 = __ Int32LessThan(zero, rhs);
1585 __ GotoIfNot(check0, &if_not_positive);
1586
1587 // Fast case, no additional checking required.
1588 __ Goto(&done, __ Int32Div(lhs, rhs));
1589
1590 {
1591 __ Bind(&if_not_positive);
1592
1593 // Check if {rhs} is zero.
1594 Node* check = __ Word32Equal(rhs, zero);
1595 __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(), check,
1596 frame_state);
1597
1598 // Check if {lhs} is zero, as that would produce minus zero.
1599 check = __ Word32Equal(lhs, zero);
1600 __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(), check,
1601 frame_state);
1602
1603 // Check if {lhs} is kMinInt and {rhs} is -1, in which case we'd have
1604 // to return -kMinInt, which is not representable.
1605 Node* minint = __ Int32Constant(std::numeric_limits<int32_t>::min());
1606 Node* check1 = graph()->NewNode(machine()->Word32Equal(), lhs, minint);
1607 __ GotoIf(check1, &if_is_minint);
1608 __ Goto(&minint_check_done);
1609
1610 __ Bind(&if_is_minint);
1611 // Check if {rhs} is -1.
1612 Node* minusone = __ Int32Constant(-1);
1613 Node* is_minus_one = __ Word32Equal(rhs, minusone);
1614 __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), is_minus_one,
1615 frame_state);
1616 __ Goto(&minint_check_done);
1617
1618 __ Bind(&minint_check_done);
1619 // Perform the actual integer division.
1620 __ Goto(&done, __ Int32Div(lhs, rhs));
1621 }
1622
1623 __ Bind(&done);
1624 Node* value = done.PhiAt(0);
1625
1626 // Check if the remainder is non-zero.
1627 Node* check = __ Word32Equal(lhs, __ Int32Mul(rhs, value));
1628 __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(), check,
1629 frame_state);
1630
1631 return value;
1632 }
1633
BuildUint32Mod(Node * lhs,Node * rhs)1634 Node* EffectControlLinearizer::BuildUint32Mod(Node* lhs, Node* rhs) {
1635 auto if_rhs_power_of_two = __ MakeLabel();
1636 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1637
1638 // Compute the mask for the {rhs}.
1639 Node* one = __ Int32Constant(1);
1640 Node* msk = __ Int32Sub(rhs, one);
1641
1642 // Check if the {rhs} is a power of two.
1643 __ GotoIf(__ Word32Equal(__ Word32And(rhs, msk), __ Int32Constant(0)),
1644 &if_rhs_power_of_two);
1645 {
1646 // The {rhs} is not a power of two, do a generic Uint32Mod.
1647 __ Goto(&done, __ Uint32Mod(lhs, rhs));
1648 }
1649
1650 __ Bind(&if_rhs_power_of_two);
1651 {
1652 // The {rhs} is a power of two, just do a fast bit masking.
1653 __ Goto(&done, __ Word32And(lhs, msk));
1654 }
1655
1656 __ Bind(&done);
1657 return done.PhiAt(0);
1658 }
1659
LowerCheckedInt32Mod(Node * node,Node * frame_state)1660 Node* EffectControlLinearizer::LowerCheckedInt32Mod(Node* node,
1661 Node* frame_state) {
1662 // General case for signed integer modulus, with optimization for (unknown)
1663 // power of 2 right hand side.
1664 //
1665 // if rhs <= 0 then
1666 // rhs = -rhs
1667 // deopt if rhs == 0
1668 // let msk = rhs - 1 in
1669 // if lhs < 0 then
1670 // let lhs_abs = -lsh in
1671 // let res = if rhs & msk == 0 then
1672 // lhs_abs & msk
1673 // else
1674 // lhs_abs % rhs in
1675 // if lhs < 0 then
1676 // deopt if res == 0
1677 // -res
1678 // else
1679 // res
1680 // else
1681 // if rhs & msk == 0 then
1682 // lhs & msk
1683 // else
1684 // lhs % rhs
1685 //
1686 Node* lhs = node->InputAt(0);
1687 Node* rhs = node->InputAt(1);
1688
1689 auto if_rhs_not_positive = __ MakeDeferredLabel();
1690 auto if_lhs_negative = __ MakeDeferredLabel();
1691 auto if_rhs_power_of_two = __ MakeLabel();
1692 auto rhs_checked = __ MakeLabel(MachineRepresentation::kWord32);
1693 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1694
1695 Node* zero = __ Int32Constant(0);
1696
1697 // Check if {rhs} is not strictly positive.
1698 Node* check0 = __ Int32LessThanOrEqual(rhs, zero);
1699 __ GotoIf(check0, &if_rhs_not_positive);
1700 __ Goto(&rhs_checked, rhs);
1701
1702 __ Bind(&if_rhs_not_positive);
1703 {
1704 // Negate {rhs}, might still produce a negative result in case of
1705 // -2^31, but that is handled safely below.
1706 Node* vtrue0 = __ Int32Sub(zero, rhs);
1707
1708 // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1709 __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(),
1710 __ Word32Equal(vtrue0, zero), frame_state);
1711 __ Goto(&rhs_checked, vtrue0);
1712 }
1713
1714 __ Bind(&rhs_checked);
1715 rhs = rhs_checked.PhiAt(0);
1716
1717 __ GotoIf(__ Int32LessThan(lhs, zero), &if_lhs_negative);
1718 {
1719 // The {lhs} is a non-negative integer.
1720 __ Goto(&done, BuildUint32Mod(lhs, rhs));
1721 }
1722
1723 __ Bind(&if_lhs_negative);
1724 {
1725 // The {lhs} is a negative integer.
1726 Node* res = BuildUint32Mod(__ Int32Sub(zero, lhs), rhs);
1727
1728 // Check if we would have to return -0.
1729 __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(),
1730 __ Word32Equal(res, zero), frame_state);
1731 __ Goto(&done, __ Int32Sub(zero, res));
1732 }
1733
1734 __ Bind(&done);
1735 return done.PhiAt(0);
1736 }
1737
LowerCheckedUint32Div(Node * node,Node * frame_state)1738 Node* EffectControlLinearizer::LowerCheckedUint32Div(Node* node,
1739 Node* frame_state) {
1740 Node* lhs = node->InputAt(0);
1741 Node* rhs = node->InputAt(1);
1742
1743 Node* zero = __ Int32Constant(0);
1744
1745 // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1746 Node* check = __ Word32Equal(rhs, zero);
1747 __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(), check,
1748 frame_state);
1749
1750 // Perform the actual unsigned integer division.
1751 Node* value = __ Uint32Div(lhs, rhs);
1752
1753 // Check if the remainder is non-zero.
1754 check = __ Word32Equal(lhs, __ Int32Mul(rhs, value));
1755 __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, VectorSlotPair(), check,
1756 frame_state);
1757 return value;
1758 }
1759
LowerCheckedUint32Mod(Node * node,Node * frame_state)1760 Node* EffectControlLinearizer::LowerCheckedUint32Mod(Node* node,
1761 Node* frame_state) {
1762 Node* lhs = node->InputAt(0);
1763 Node* rhs = node->InputAt(1);
1764
1765 Node* zero = __ Int32Constant(0);
1766
1767 // Ensure that {rhs} is not zero, otherwise we'd have to return NaN.
1768 Node* check = __ Word32Equal(rhs, zero);
1769 __ DeoptimizeIf(DeoptimizeReason::kDivisionByZero, VectorSlotPair(), check,
1770 frame_state);
1771
1772 // Perform the actual unsigned integer modulus.
1773 return BuildUint32Mod(lhs, rhs);
1774 }
1775
LowerCheckedInt32Mul(Node * node,Node * frame_state)1776 Node* EffectControlLinearizer::LowerCheckedInt32Mul(Node* node,
1777 Node* frame_state) {
1778 CheckForMinusZeroMode mode = CheckMinusZeroModeOf(node->op());
1779 Node* lhs = node->InputAt(0);
1780 Node* rhs = node->InputAt(1);
1781
1782 Node* projection = __ Int32MulWithOverflow(lhs, rhs);
1783 Node* check = __ Projection(1, projection);
1784 __ DeoptimizeIf(DeoptimizeReason::kOverflow, VectorSlotPair(), check,
1785 frame_state);
1786
1787 Node* value = __ Projection(0, projection);
1788
1789 if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1790 auto if_zero = __ MakeDeferredLabel();
1791 auto check_done = __ MakeLabel();
1792 Node* zero = __ Int32Constant(0);
1793 Node* check_zero = __ Word32Equal(value, zero);
1794 __ GotoIf(check_zero, &if_zero);
1795 __ Goto(&check_done);
1796
1797 __ Bind(&if_zero);
1798 // We may need to return negative zero.
1799 Node* check_or = __ Int32LessThan(__ Word32Or(lhs, rhs), zero);
1800 __ DeoptimizeIf(DeoptimizeReason::kMinusZero, VectorSlotPair(), check_or,
1801 frame_state);
1802 __ Goto(&check_done);
1803
1804 __ Bind(&check_done);
1805 }
1806
1807 return value;
1808 }
1809
LowerCheckedInt32ToTaggedSigned(Node * node,Node * frame_state)1810 Node* EffectControlLinearizer::LowerCheckedInt32ToTaggedSigned(
1811 Node* node, Node* frame_state) {
1812 DCHECK(SmiValuesAre31Bits());
1813 Node* value = node->InputAt(0);
1814 const CheckParameters& params = CheckParametersOf(node->op());
1815
1816 Node* add = __ Int32AddWithOverflow(value, value);
1817 Node* check = __ Projection(1, add);
1818 __ DeoptimizeIf(DeoptimizeReason::kOverflow, params.feedback(), check,
1819 frame_state);
1820 Node* result = __ Projection(0, add);
1821 result = ChangeInt32ToIntPtr(result);
1822 return result;
1823 }
1824
LowerCheckedUint32ToInt32(Node * node,Node * frame_state)1825 Node* EffectControlLinearizer::LowerCheckedUint32ToInt32(Node* node,
1826 Node* frame_state) {
1827 Node* value = node->InputAt(0);
1828 const CheckParameters& params = CheckParametersOf(node->op());
1829 Node* unsafe = __ Int32LessThan(value, __ Int32Constant(0));
1830 __ DeoptimizeIf(DeoptimizeReason::kLostPrecision, params.feedback(), unsafe,
1831 frame_state);
1832 return value;
1833 }
1834
LowerCheckedUint32ToTaggedSigned(Node * node,Node * frame_state)1835 Node* EffectControlLinearizer::LowerCheckedUint32ToTaggedSigned(
1836 Node* node, Node* frame_state) {
1837 Node* value = node->InputAt(0);
1838 const CheckParameters& params = CheckParametersOf(node->op());
1839 Node* check = __ Uint32LessThanOrEqual(value, SmiMaxValueConstant());
1840 __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecision, params.feedback(), check,
1841 frame_state);
1842 return ChangeUint32ToSmi(value);
1843 }
1844
BuildCheckedFloat64ToInt32(CheckForMinusZeroMode mode,const VectorSlotPair & feedback,Node * value,Node * frame_state)1845 Node* EffectControlLinearizer::BuildCheckedFloat64ToInt32(
1846 CheckForMinusZeroMode mode, const VectorSlotPair& feedback, Node* value,
1847 Node* frame_state) {
1848 Node* value32 = __ RoundFloat64ToInt32(value);
1849 Node* check_same = __ Float64Equal(value, __ ChangeInt32ToFloat64(value32));
1850 __ DeoptimizeIfNot(DeoptimizeReason::kLostPrecisionOrNaN, feedback,
1851 check_same, frame_state);
1852
1853 if (mode == CheckForMinusZeroMode::kCheckForMinusZero) {
1854 // Check if {value} is -0.
1855 auto if_zero = __ MakeDeferredLabel();
1856 auto check_done = __ MakeLabel();
1857
1858 Node* check_zero = __ Word32Equal(value32, __ Int32Constant(0));
1859 __ GotoIf(check_zero, &if_zero);
1860 __ Goto(&check_done);
1861
1862 __ Bind(&if_zero);
1863 // In case of 0, we need to check the high bits for the IEEE -0 pattern.
1864 Node* check_negative = __ Int32LessThan(__ Float64ExtractHighWord32(value),
1865 __ Int32Constant(0));
1866 __ DeoptimizeIf(DeoptimizeReason::kMinusZero, feedback, check_negative,
1867 frame_state);
1868 __ Goto(&check_done);
1869
1870 __ Bind(&check_done);
1871 }
1872 return value32;
1873 }
1874
LowerCheckedFloat64ToInt32(Node * node,Node * frame_state)1875 Node* EffectControlLinearizer::LowerCheckedFloat64ToInt32(Node* node,
1876 Node* frame_state) {
1877 const CheckMinusZeroParameters& params =
1878 CheckMinusZeroParametersOf(node->op());
1879 Node* value = node->InputAt(0);
1880 return BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), value,
1881 frame_state);
1882 }
1883
LowerCheckedTaggedSignedToInt32(Node * node,Node * frame_state)1884 Node* EffectControlLinearizer::LowerCheckedTaggedSignedToInt32(
1885 Node* node, Node* frame_state) {
1886 Node* value = node->InputAt(0);
1887 const CheckParameters& params = CheckParametersOf(node->op());
1888 Node* check = ObjectIsSmi(value);
1889 __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
1890 frame_state);
1891 return ChangeSmiToInt32(value);
1892 }
1893
LowerCheckedTaggedToInt32(Node * node,Node * frame_state)1894 Node* EffectControlLinearizer::LowerCheckedTaggedToInt32(Node* node,
1895 Node* frame_state) {
1896 const CheckMinusZeroParameters& params =
1897 CheckMinusZeroParametersOf(node->op());
1898 Node* value = node->InputAt(0);
1899
1900 auto if_not_smi = __ MakeDeferredLabel();
1901 auto done = __ MakeLabel(MachineRepresentation::kWord32);
1902
1903 Node* check = ObjectIsSmi(value);
1904 __ GotoIfNot(check, &if_not_smi);
1905 // In the Smi case, just convert to int32.
1906 __ Goto(&done, ChangeSmiToInt32(value));
1907
1908 // In the non-Smi case, check the heap numberness, load the number and convert
1909 // to int32.
1910 __ Bind(&if_not_smi);
1911 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1912 Node* check_map = __ WordEqual(value_map, __ HeapNumberMapConstant());
1913 __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, params.feedback(),
1914 check_map, frame_state);
1915 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1916 vfalse = BuildCheckedFloat64ToInt32(params.mode(), params.feedback(), vfalse,
1917 frame_state);
1918 __ Goto(&done, vfalse);
1919
1920 __ Bind(&done);
1921 return done.PhiAt(0);
1922 }
1923
BuildCheckedHeapNumberOrOddballToFloat64(CheckTaggedInputMode mode,const VectorSlotPair & feedback,Node * value,Node * frame_state)1924 Node* EffectControlLinearizer::BuildCheckedHeapNumberOrOddballToFloat64(
1925 CheckTaggedInputMode mode, const VectorSlotPair& feedback, Node* value,
1926 Node* frame_state) {
1927 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
1928 Node* check_number = __ WordEqual(value_map, __ HeapNumberMapConstant());
1929 switch (mode) {
1930 case CheckTaggedInputMode::kNumber: {
1931 __ DeoptimizeIfNot(DeoptimizeReason::kNotAHeapNumber, feedback,
1932 check_number, frame_state);
1933 break;
1934 }
1935 case CheckTaggedInputMode::kNumberOrOddball: {
1936 auto check_done = __ MakeLabel();
1937
1938 __ GotoIf(check_number, &check_done);
1939 // For oddballs also contain the numeric value, let us just check that
1940 // we have an oddball here.
1941 Node* instance_type =
1942 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
1943 Node* check_oddball =
1944 __ Word32Equal(instance_type, __ Int32Constant(ODDBALL_TYPE));
1945 __ DeoptimizeIfNot(DeoptimizeReason::kNotANumberOrOddball, feedback,
1946 check_oddball, frame_state);
1947 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
1948 __ Goto(&check_done);
1949
1950 __ Bind(&check_done);
1951 break;
1952 }
1953 }
1954 return __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
1955 }
1956
LowerCheckedTaggedToFloat64(Node * node,Node * frame_state)1957 Node* EffectControlLinearizer::LowerCheckedTaggedToFloat64(Node* node,
1958 Node* frame_state) {
1959 CheckTaggedInputParameters const& p =
1960 CheckTaggedInputParametersOf(node->op());
1961 Node* value = node->InputAt(0);
1962
1963 auto if_smi = __ MakeLabel();
1964 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
1965
1966 Node* check = ObjectIsSmi(value);
1967 __ GotoIf(check, &if_smi);
1968
1969 // In the Smi case, just convert to int32 and then float64.
1970 // Otherwise, check heap numberness and load the number.
1971 Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
1972 p.mode(), p.feedback(), value, frame_state);
1973 __ Goto(&done, number);
1974
1975 __ Bind(&if_smi);
1976 Node* from_smi = ChangeSmiToInt32(value);
1977 from_smi = __ ChangeInt32ToFloat64(from_smi);
1978 __ Goto(&done, from_smi);
1979
1980 __ Bind(&done);
1981 return done.PhiAt(0);
1982 }
1983
LowerCheckedTaggedToTaggedSigned(Node * node,Node * frame_state)1984 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedSigned(
1985 Node* node, Node* frame_state) {
1986 Node* value = node->InputAt(0);
1987 const CheckParameters& params = CheckParametersOf(node->op());
1988
1989 Node* check = ObjectIsSmi(value);
1990 __ DeoptimizeIfNot(DeoptimizeReason::kNotASmi, params.feedback(), check,
1991 frame_state);
1992
1993 return value;
1994 }
1995
LowerCheckedTaggedToTaggedPointer(Node * node,Node * frame_state)1996 Node* EffectControlLinearizer::LowerCheckedTaggedToTaggedPointer(
1997 Node* node, Node* frame_state) {
1998 Node* value = node->InputAt(0);
1999 const CheckParameters& params = CheckParametersOf(node->op());
2000
2001 Node* check = ObjectIsSmi(value);
2002 __ DeoptimizeIf(DeoptimizeReason::kSmi, params.feedback(), check,
2003 frame_state);
2004 return value;
2005 }
2006
LowerTruncateTaggedToWord32(Node * node)2007 Node* EffectControlLinearizer::LowerTruncateTaggedToWord32(Node* node) {
2008 Node* value = node->InputAt(0);
2009
2010 auto if_not_smi = __ MakeDeferredLabel();
2011 auto done = __ MakeLabel(MachineRepresentation::kWord32);
2012
2013 Node* check = ObjectIsSmi(value);
2014 __ GotoIfNot(check, &if_not_smi);
2015 __ Goto(&done, ChangeSmiToInt32(value));
2016
2017 __ Bind(&if_not_smi);
2018 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
2019 Node* vfalse = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2020 vfalse = __ TruncateFloat64ToWord32(vfalse);
2021 __ Goto(&done, vfalse);
2022
2023 __ Bind(&done);
2024 return done.PhiAt(0);
2025 }
2026
LowerCheckedTruncateTaggedToWord32(Node * node,Node * frame_state)2027 Node* EffectControlLinearizer::LowerCheckedTruncateTaggedToWord32(
2028 Node* node, Node* frame_state) {
2029 const CheckTaggedInputParameters& params =
2030 CheckTaggedInputParametersOf(node->op());
2031 Node* value = node->InputAt(0);
2032
2033 auto if_not_smi = __ MakeLabel();
2034 auto done = __ MakeLabel(MachineRepresentation::kWord32);
2035
2036 Node* check = ObjectIsSmi(value);
2037 __ GotoIfNot(check, &if_not_smi);
2038 // In the Smi case, just convert to int32.
2039 __ Goto(&done, ChangeSmiToInt32(value));
2040
2041 // Otherwise, check that it's a heap number or oddball and truncate the value
2042 // to int32.
2043 __ Bind(&if_not_smi);
2044 Node* number = BuildCheckedHeapNumberOrOddballToFloat64(
2045 params.mode(), params.feedback(), value, frame_state);
2046 number = __ TruncateFloat64ToWord32(number);
2047 __ Goto(&done, number);
2048
2049 __ Bind(&done);
2050 return done.PhiAt(0);
2051 }
2052
LowerAllocate(Node * node)2053 Node* EffectControlLinearizer::LowerAllocate(Node* node) {
2054 Node* size = node->InputAt(0);
2055 PretenureFlag pretenure = PretenureFlagOf(node->op());
2056 Node* new_node = __ Allocate(pretenure, size);
2057 return new_node;
2058 }
2059
LowerNumberToString(Node * node)2060 Node* EffectControlLinearizer::LowerNumberToString(Node* node) {
2061 Node* argument = node->InputAt(0);
2062
2063 Callable const callable =
2064 Builtins::CallableFor(isolate(), Builtins::kNumberToString);
2065 Operator::Properties properties = Operator::kEliminatable;
2066 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
2067 auto call_descriptor = Linkage::GetStubCallDescriptor(
2068 graph()->zone(), callable.descriptor(), 0, flags, properties);
2069 return __ Call(call_descriptor, __ HeapConstant(callable.code()), argument,
2070 __ NoContextConstant());
2071 }
2072
LowerObjectIsArrayBufferView(Node * node)2073 Node* EffectControlLinearizer::LowerObjectIsArrayBufferView(Node* node) {
2074 Node* value = node->InputAt(0);
2075
2076 auto if_smi = __ MakeDeferredLabel();
2077 auto done = __ MakeLabel(MachineRepresentation::kBit);
2078
2079 Node* check = ObjectIsSmi(value);
2080 __ GotoIf(check, &if_smi);
2081
2082 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2083 Node* value_instance_type =
2084 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2085 STATIC_ASSERT(JS_TYPED_ARRAY_TYPE + 1 == JS_DATA_VIEW_TYPE);
2086 Node* vfalse = __ Uint32LessThan(
2087 __ Int32Sub(value_instance_type, __ Int32Constant(JS_TYPED_ARRAY_TYPE)),
2088 __ Int32Constant(2));
2089 __ Goto(&done, vfalse);
2090
2091 __ Bind(&if_smi);
2092 __ Goto(&done, __ Int32Constant(0));
2093
2094 __ Bind(&done);
2095 return done.PhiAt(0);
2096 }
2097
LowerObjectIsBigInt(Node * node)2098 Node* EffectControlLinearizer::LowerObjectIsBigInt(Node* node) {
2099 Node* value = node->InputAt(0);
2100
2101 auto if_smi = __ MakeDeferredLabel();
2102 auto done = __ MakeLabel(MachineRepresentation::kBit);
2103
2104 Node* check = ObjectIsSmi(value);
2105 __ GotoIf(check, &if_smi);
2106 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2107 Node* value_instance_type =
2108 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2109 Node* vfalse =
2110 __ Word32Equal(value_instance_type, __ Uint32Constant(BIGINT_TYPE));
2111 __ Goto(&done, vfalse);
2112
2113 __ Bind(&if_smi);
2114 __ Goto(&done, __ Int32Constant(0));
2115
2116 __ Bind(&done);
2117 return done.PhiAt(0);
2118 }
2119
LowerObjectIsCallable(Node * node)2120 Node* EffectControlLinearizer::LowerObjectIsCallable(Node* node) {
2121 Node* value = node->InputAt(0);
2122
2123 auto if_smi = __ MakeDeferredLabel();
2124 auto done = __ MakeLabel(MachineRepresentation::kBit);
2125
2126 Node* check = ObjectIsSmi(value);
2127 __ GotoIf(check, &if_smi);
2128
2129 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2130 Node* value_bit_field =
2131 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2132 Node* vfalse =
2133 __ Word32Equal(__ Int32Constant(Map::IsCallableBit::kMask),
2134 __ Word32And(value_bit_field,
2135 __ Int32Constant(Map::IsCallableBit::kMask)));
2136 __ Goto(&done, vfalse);
2137
2138 __ Bind(&if_smi);
2139 __ Goto(&done, __ Int32Constant(0));
2140
2141 __ Bind(&done);
2142 return done.PhiAt(0);
2143 }
2144
LowerObjectIsConstructor(Node * node)2145 Node* EffectControlLinearizer::LowerObjectIsConstructor(Node* node) {
2146 Node* value = node->InputAt(0);
2147
2148 auto if_smi = __ MakeDeferredLabel();
2149 auto done = __ MakeLabel(MachineRepresentation::kBit);
2150
2151 Node* check = ObjectIsSmi(value);
2152 __ GotoIf(check, &if_smi);
2153
2154 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2155 Node* value_bit_field =
2156 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2157 Node* vfalse = __ Word32Equal(
2158 __ Int32Constant(Map::IsConstructorBit::kMask),
2159 __ Word32And(value_bit_field,
2160 __ Int32Constant(Map::IsConstructorBit::kMask)));
2161 __ Goto(&done, vfalse);
2162
2163 __ Bind(&if_smi);
2164 __ Goto(&done, __ Int32Constant(0));
2165
2166 __ Bind(&done);
2167 return done.PhiAt(0);
2168 }
2169
LowerObjectIsDetectableCallable(Node * node)2170 Node* EffectControlLinearizer::LowerObjectIsDetectableCallable(Node* node) {
2171 Node* value = node->InputAt(0);
2172
2173 auto if_smi = __ MakeDeferredLabel();
2174 auto done = __ MakeLabel(MachineRepresentation::kBit);
2175
2176 Node* check = ObjectIsSmi(value);
2177 __ GotoIf(check, &if_smi);
2178
2179 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2180 Node* value_bit_field =
2181 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2182 Node* vfalse = __ Word32Equal(
2183 __ Int32Constant(Map::IsCallableBit::kMask),
2184 __ Word32And(value_bit_field,
2185 __ Int32Constant((Map::IsCallableBit::kMask) |
2186 (Map::IsUndetectableBit::kMask))));
2187 __ Goto(&done, vfalse);
2188
2189 __ Bind(&if_smi);
2190 __ Goto(&done, __ Int32Constant(0));
2191
2192 __ Bind(&done);
2193 return done.PhiAt(0);
2194 }
2195
LowerNumberIsFloat64Hole(Node * node)2196 Node* EffectControlLinearizer::LowerNumberIsFloat64Hole(Node* node) {
2197 Node* value = node->InputAt(0);
2198 Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
2199 __ Int32Constant(kHoleNanUpper32));
2200 return check;
2201 }
2202
LowerNumberIsFinite(Node * node)2203 Node* EffectControlLinearizer::LowerNumberIsFinite(Node* node) {
2204 Node* number = node->InputAt(0);
2205 Node* diff = __ Float64Sub(number, number);
2206 Node* check = __ Float64Equal(diff, diff);
2207 return check;
2208 }
2209
LowerObjectIsFiniteNumber(Node * node)2210 Node* EffectControlLinearizer::LowerObjectIsFiniteNumber(Node* node) {
2211 Node* object = node->InputAt(0);
2212 Node* zero = __ Int32Constant(0);
2213 Node* one = __ Int32Constant(1);
2214
2215 auto done = __ MakeLabel(MachineRepresentation::kBit);
2216
2217 // Check if {object} is a Smi.
2218 __ GotoIf(ObjectIsSmi(object), &done, one);
2219
2220 // Check if {object} is a HeapNumber.
2221 Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2222 __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2223 zero);
2224
2225 // {object} is a HeapNumber.
2226 Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2227 Node* diff = __ Float64Sub(value, value);
2228 Node* check = __ Float64Equal(diff, diff);
2229 __ Goto(&done, check);
2230
2231 __ Bind(&done);
2232 return done.PhiAt(0);
2233 }
2234
LowerNumberIsInteger(Node * node)2235 Node* EffectControlLinearizer::LowerNumberIsInteger(Node* node) {
2236 Node* number = node->InputAt(0);
2237 Node* trunc = BuildFloat64RoundTruncate(number);
2238 Node* diff = __ Float64Sub(number, trunc);
2239 Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2240 return check;
2241 }
2242
LowerObjectIsInteger(Node * node)2243 Node* EffectControlLinearizer::LowerObjectIsInteger(Node* node) {
2244 Node* object = node->InputAt(0);
2245 Node* zero = __ Int32Constant(0);
2246 Node* one = __ Int32Constant(1);
2247
2248 auto done = __ MakeLabel(MachineRepresentation::kBit);
2249
2250 // Check if {object} is a Smi.
2251 __ GotoIf(ObjectIsSmi(object), &done, one);
2252
2253 // Check if {object} is a HeapNumber.
2254 Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2255 __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2256 zero);
2257
2258 // {object} is a HeapNumber.
2259 Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2260 Node* trunc = BuildFloat64RoundTruncate(value);
2261 Node* diff = __ Float64Sub(value, trunc);
2262 Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2263 __ Goto(&done, check);
2264
2265 __ Bind(&done);
2266 return done.PhiAt(0);
2267 }
2268
LowerNumberIsSafeInteger(Node * node)2269 Node* EffectControlLinearizer::LowerNumberIsSafeInteger(Node* node) {
2270 Node* number = node->InputAt(0);
2271 Node* zero = __ Int32Constant(0);
2272 auto done = __ MakeLabel(MachineRepresentation::kBit);
2273
2274 Node* trunc = BuildFloat64RoundTruncate(number);
2275 Node* diff = __ Float64Sub(number, trunc);
2276 Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2277 __ GotoIfNot(check, &done, zero);
2278 Node* in_range = __ Float64LessThanOrEqual(
2279 __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
2280 __ Goto(&done, in_range);
2281
2282 __ Bind(&done);
2283 return done.PhiAt(0);
2284 }
2285
LowerObjectIsSafeInteger(Node * node)2286 Node* EffectControlLinearizer::LowerObjectIsSafeInteger(Node* node) {
2287 Node* object = node->InputAt(0);
2288 Node* zero = __ Int32Constant(0);
2289 Node* one = __ Int32Constant(1);
2290
2291 auto done = __ MakeLabel(MachineRepresentation::kBit);
2292
2293 // Check if {object} is a Smi.
2294 __ GotoIf(ObjectIsSmi(object), &done, one);
2295
2296 // Check if {object} is a HeapNumber.
2297 Node* value_map = __ LoadField(AccessBuilder::ForMap(), object);
2298 __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2299 zero);
2300
2301 // {object} is a HeapNumber.
2302 Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), object);
2303 Node* trunc = BuildFloat64RoundTruncate(value);
2304 Node* diff = __ Float64Sub(value, trunc);
2305 Node* check = __ Float64Equal(diff, __ Float64Constant(0));
2306 __ GotoIfNot(check, &done, zero);
2307 Node* in_range = __ Float64LessThanOrEqual(
2308 __ Float64Abs(trunc), __ Float64Constant(kMaxSafeInteger));
2309 __ Goto(&done, in_range);
2310
2311 __ Bind(&done);
2312 return done.PhiAt(0);
2313 }
2314
LowerObjectIsMinusZero(Node * node)2315 Node* EffectControlLinearizer::LowerObjectIsMinusZero(Node* node) {
2316 Node* value = node->InputAt(0);
2317 Node* zero = __ Int32Constant(0);
2318
2319 auto done = __ MakeLabel(MachineRepresentation::kBit);
2320
2321 // Check if {value} is a Smi.
2322 __ GotoIf(ObjectIsSmi(value), &done, zero);
2323
2324 // Check if {value} is a HeapNumber.
2325 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2326 __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2327 zero);
2328
2329 // Check if {value} contains -0.
2330 Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2331 __ Goto(&done,
2332 __ Float64Equal(
2333 __ Float64Div(__ Float64Constant(1.0), value_value),
2334 __ Float64Constant(-std::numeric_limits<double>::infinity())));
2335
2336 __ Bind(&done);
2337 return done.PhiAt(0);
2338 }
2339
LowerObjectIsNaN(Node * node)2340 Node* EffectControlLinearizer::LowerObjectIsNaN(Node* node) {
2341 Node* value = node->InputAt(0);
2342 Node* zero = __ Int32Constant(0);
2343
2344 auto done = __ MakeLabel(MachineRepresentation::kBit);
2345
2346 // Check if {value} is a Smi.
2347 __ GotoIf(ObjectIsSmi(value), &done, zero);
2348
2349 // Check if {value} is a HeapNumber.
2350 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2351 __ GotoIfNot(__ WordEqual(value_map, __ HeapNumberMapConstant()), &done,
2352 zero);
2353
2354 // Check if {value} contains a NaN.
2355 Node* value_value = __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
2356 __ Goto(&done,
2357 __ Word32Equal(__ Float64Equal(value_value, value_value), zero));
2358
2359 __ Bind(&done);
2360 return done.PhiAt(0);
2361 }
2362
LowerNumberIsNaN(Node * node)2363 Node* EffectControlLinearizer::LowerNumberIsNaN(Node* node) {
2364 Node* number = node->InputAt(0);
2365 Node* diff = __ Float64Equal(number, number);
2366 Node* check = __ Word32Equal(diff, __ Int32Constant(0));
2367 return check;
2368 }
2369
LowerObjectIsNonCallable(Node * node)2370 Node* EffectControlLinearizer::LowerObjectIsNonCallable(Node* node) {
2371 Node* value = node->InputAt(0);
2372
2373 auto if_primitive = __ MakeDeferredLabel();
2374 auto done = __ MakeLabel(MachineRepresentation::kBit);
2375
2376 Node* check0 = ObjectIsSmi(value);
2377 __ GotoIf(check0, &if_primitive);
2378
2379 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2380 Node* value_instance_type =
2381 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2382 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2383 Node* check1 = __ Uint32LessThanOrEqual(
2384 __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
2385 __ GotoIfNot(check1, &if_primitive);
2386
2387 Node* value_bit_field =
2388 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2389 Node* check2 =
2390 __ Word32Equal(__ Int32Constant(0),
2391 __ Word32And(value_bit_field,
2392 __ Int32Constant(Map::IsCallableBit::kMask)));
2393 __ Goto(&done, check2);
2394
2395 __ Bind(&if_primitive);
2396 __ Goto(&done, __ Int32Constant(0));
2397
2398 __ Bind(&done);
2399 return done.PhiAt(0);
2400 }
2401
LowerObjectIsNumber(Node * node)2402 Node* EffectControlLinearizer::LowerObjectIsNumber(Node* node) {
2403 Node* value = node->InputAt(0);
2404
2405 auto if_smi = __ MakeLabel();
2406 auto done = __ MakeLabel(MachineRepresentation::kBit);
2407
2408 __ GotoIf(ObjectIsSmi(value), &if_smi);
2409 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2410 __ Goto(&done, __ WordEqual(value_map, __ HeapNumberMapConstant()));
2411
2412 __ Bind(&if_smi);
2413 __ Goto(&done, __ Int32Constant(1));
2414
2415 __ Bind(&done);
2416 return done.PhiAt(0);
2417 }
2418
LowerObjectIsReceiver(Node * node)2419 Node* EffectControlLinearizer::LowerObjectIsReceiver(Node* node) {
2420 Node* value = node->InputAt(0);
2421
2422 auto if_smi = __ MakeDeferredLabel();
2423 auto done = __ MakeLabel(MachineRepresentation::kBit);
2424
2425 __ GotoIf(ObjectIsSmi(value), &if_smi);
2426
2427 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2428 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2429 Node* value_instance_type =
2430 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2431 Node* result = __ Uint32LessThanOrEqual(
2432 __ Uint32Constant(FIRST_JS_RECEIVER_TYPE), value_instance_type);
2433 __ Goto(&done, result);
2434
2435 __ Bind(&if_smi);
2436 __ Goto(&done, __ Int32Constant(0));
2437
2438 __ Bind(&done);
2439 return done.PhiAt(0);
2440 }
2441
LowerObjectIsSmi(Node * node)2442 Node* EffectControlLinearizer::LowerObjectIsSmi(Node* node) {
2443 Node* value = node->InputAt(0);
2444 return ObjectIsSmi(value);
2445 }
2446
LowerObjectIsString(Node * node)2447 Node* EffectControlLinearizer::LowerObjectIsString(Node* node) {
2448 Node* value = node->InputAt(0);
2449
2450 auto if_smi = __ MakeDeferredLabel();
2451 auto done = __ MakeLabel(MachineRepresentation::kBit);
2452
2453 Node* check = ObjectIsSmi(value);
2454 __ GotoIf(check, &if_smi);
2455 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2456 Node* value_instance_type =
2457 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2458 Node* vfalse = __ Uint32LessThan(value_instance_type,
2459 __ Uint32Constant(FIRST_NONSTRING_TYPE));
2460 __ Goto(&done, vfalse);
2461
2462 __ Bind(&if_smi);
2463 __ Goto(&done, __ Int32Constant(0));
2464
2465 __ Bind(&done);
2466 return done.PhiAt(0);
2467 }
2468
LowerObjectIsSymbol(Node * node)2469 Node* EffectControlLinearizer::LowerObjectIsSymbol(Node* node) {
2470 Node* value = node->InputAt(0);
2471
2472 auto if_smi = __ MakeDeferredLabel();
2473 auto done = __ MakeLabel(MachineRepresentation::kBit);
2474
2475 Node* check = ObjectIsSmi(value);
2476 __ GotoIf(check, &if_smi);
2477 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2478 Node* value_instance_type =
2479 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
2480 Node* vfalse =
2481 __ Word32Equal(value_instance_type, __ Uint32Constant(SYMBOL_TYPE));
2482 __ Goto(&done, vfalse);
2483
2484 __ Bind(&if_smi);
2485 __ Goto(&done, __ Int32Constant(0));
2486
2487 __ Bind(&done);
2488 return done.PhiAt(0);
2489 }
2490
LowerObjectIsUndetectable(Node * node)2491 Node* EffectControlLinearizer::LowerObjectIsUndetectable(Node* node) {
2492 Node* value = node->InputAt(0);
2493
2494 auto if_smi = __ MakeDeferredLabel();
2495 auto done = __ MakeLabel(MachineRepresentation::kBit);
2496
2497 Node* check = ObjectIsSmi(value);
2498 __ GotoIf(check, &if_smi);
2499
2500 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
2501 Node* value_bit_field =
2502 __ LoadField(AccessBuilder::ForMapBitField(), value_map);
2503 Node* vfalse = __ Word32Equal(
2504 __ Word32Equal(
2505 __ Int32Constant(0),
2506 __ Word32And(value_bit_field,
2507 __ Int32Constant(Map::IsUndetectableBit::kMask))),
2508 __ Int32Constant(0));
2509 __ Goto(&done, vfalse);
2510
2511 __ Bind(&if_smi);
2512 __ Goto(&done, __ Int32Constant(0));
2513
2514 __ Bind(&done);
2515 return done.PhiAt(0);
2516 }
2517
LowerTypeOf(Node * node)2518 Node* EffectControlLinearizer::LowerTypeOf(Node* node) {
2519 Node* obj = node->InputAt(0);
2520 Callable const callable = Builtins::CallableFor(isolate(), Builtins::kTypeof);
2521 Operator::Properties const properties = Operator::kEliminatable;
2522 CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
2523 auto call_descriptor = Linkage::GetStubCallDescriptor(
2524 graph()->zone(), callable.descriptor(), 0, flags, properties);
2525 return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
2526 __ NoContextConstant());
2527 }
2528
LowerToBoolean(Node * node)2529 Node* EffectControlLinearizer::LowerToBoolean(Node* node) {
2530 Node* obj = node->InputAt(0);
2531 Callable const callable =
2532 Builtins::CallableFor(isolate(), Builtins::kToBoolean);
2533 Operator::Properties const properties = Operator::kEliminatable;
2534 CallDescriptor::Flags const flags = CallDescriptor::kNoAllocate;
2535 auto call_descriptor = Linkage::GetStubCallDescriptor(
2536 graph()->zone(), callable.descriptor(), 0, flags, properties);
2537 return __ Call(call_descriptor, __ HeapConstant(callable.code()), obj,
2538 __ NoContextConstant());
2539 }
2540
LowerArgumentsLength(Node * node)2541 Node* EffectControlLinearizer::LowerArgumentsLength(Node* node) {
2542 Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
2543 int formal_parameter_count = FormalParameterCountOf(node->op());
2544 bool is_rest_length = IsRestLengthOf(node->op());
2545 DCHECK_LE(0, formal_parameter_count);
2546
2547 if (is_rest_length) {
2548 // The ArgumentsLength node is computing the number of rest parameters,
2549 // which is max(0, actual_parameter_count - formal_parameter_count).
2550 // We have to distinguish the case, when there is an arguments adaptor frame
2551 // (i.e., arguments_frame != LoadFramePointer()).
2552 auto if_adaptor_frame = __ MakeLabel();
2553 auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
2554
2555 Node* frame = __ LoadFramePointer();
2556 __ GotoIf(__ WordEqual(arguments_frame, frame), &done, __ SmiConstant(0));
2557 __ Goto(&if_adaptor_frame);
2558
2559 __ Bind(&if_adaptor_frame);
2560 Node* arguments_length = __ Load(
2561 MachineType::TaggedSigned(), arguments_frame,
2562 __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset));
2563
2564 Node* rest_length =
2565 __ IntSub(arguments_length, __ SmiConstant(formal_parameter_count));
2566 __ GotoIf(__ IntLessThan(rest_length, __ SmiConstant(0)), &done,
2567 __ SmiConstant(0));
2568 __ Goto(&done, rest_length);
2569
2570 __ Bind(&done);
2571 return done.PhiAt(0);
2572 } else {
2573 // The ArgumentsLength node is computing the actual number of arguments.
2574 // We have to distinguish the case when there is an arguments adaptor frame
2575 // (i.e., arguments_frame != LoadFramePointer()).
2576 auto if_adaptor_frame = __ MakeLabel();
2577 auto done = __ MakeLabel(MachineRepresentation::kTaggedSigned);
2578
2579 Node* frame = __ LoadFramePointer();
2580 __ GotoIf(__ WordEqual(arguments_frame, frame), &done,
2581 __ SmiConstant(formal_parameter_count));
2582 __ Goto(&if_adaptor_frame);
2583
2584 __ Bind(&if_adaptor_frame);
2585 Node* arguments_length = __ Load(
2586 MachineType::TaggedSigned(), arguments_frame,
2587 __ IntPtrConstant(ArgumentsAdaptorFrameConstants::kLengthOffset));
2588 __ Goto(&done, arguments_length);
2589
2590 __ Bind(&done);
2591 return done.PhiAt(0);
2592 }
2593 }
2594
LowerArgumentsFrame(Node * node)2595 Node* EffectControlLinearizer::LowerArgumentsFrame(Node* node) {
2596 auto done = __ MakeLabel(MachineType::PointerRepresentation());
2597
2598 Node* frame = __ LoadFramePointer();
2599 Node* parent_frame =
2600 __ Load(MachineType::AnyTagged(), frame,
2601 __ IntPtrConstant(StandardFrameConstants::kCallerFPOffset));
2602 Node* parent_frame_type = __ Load(
2603 MachineType::AnyTagged(), parent_frame,
2604 __ IntPtrConstant(CommonFrameConstants::kContextOrFrameTypeOffset));
2605 __ GotoIf(__ WordEqual(parent_frame_type,
2606 __ IntPtrConstant(StackFrame::TypeToMarker(
2607 StackFrame::ARGUMENTS_ADAPTOR))),
2608 &done, parent_frame);
2609 __ Goto(&done, frame);
2610
2611 __ Bind(&done);
2612 return done.PhiAt(0);
2613 }
2614
LowerNewDoubleElements(Node * node)2615 Node* EffectControlLinearizer::LowerNewDoubleElements(Node* node) {
2616 PretenureFlag const pretenure = PretenureFlagOf(node->op());
2617 Node* length = node->InputAt(0);
2618
2619 auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
2620 Node* zero_length = __ Word32Equal(length, __ Int32Constant(0));
2621 __ GotoIf(zero_length, &done,
2622 jsgraph()->HeapConstant(factory()->empty_fixed_array()));
2623
2624 // Compute the effective size of the backing store.
2625 Node* size =
2626 __ Int32Add(__ Word32Shl(length, __ Int32Constant(kDoubleSizeLog2)),
2627 __ Int32Constant(FixedDoubleArray::kHeaderSize));
2628
2629 // Allocate the result and initialize the header.
2630 Node* result = __ Allocate(pretenure, size);
2631 __ StoreField(AccessBuilder::ForMap(), result,
2632 __ FixedDoubleArrayMapConstant());
2633 __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
2634 ChangeInt32ToSmi(length));
2635
2636 // Initialize the backing store with holes.
2637 STATIC_ASSERT(HeapNumber::kValueOffset == Oddball::kToNumberRawOffset);
2638 Node* limit = ChangeUint32ToUintPtr(length);
2639 Node* the_hole =
2640 __ LoadField(AccessBuilder::ForHeapNumberValue(), __ TheHoleConstant());
2641 auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
2642 __ Goto(&loop, __ IntPtrConstant(0));
2643 __ Bind(&loop);
2644 {
2645 // Check if we've initialized everything.
2646 Node* index = loop.PhiAt(0);
2647 Node* check = __ UintLessThan(index, limit);
2648 __ GotoIfNot(check, &done, result);
2649
2650 // Storing "the_hole" doesn't need a write barrier.
2651 StoreRepresentation rep(MachineRepresentation::kFloat64, kNoWriteBarrier);
2652 Node* offset = __ IntAdd(
2653 __ WordShl(index, __ IntPtrConstant(kDoubleSizeLog2)),
2654 __ IntPtrConstant(FixedDoubleArray::kHeaderSize - kHeapObjectTag));
2655 __ Store(rep, result, offset, the_hole);
2656
2657 // Advance the {index}.
2658 index = __ IntAdd(index, __ IntPtrConstant(1));
2659 __ Goto(&loop, index);
2660 }
2661
2662 __ Bind(&done);
2663 return done.PhiAt(0);
2664 }
2665
LowerNewSmiOrObjectElements(Node * node)2666 Node* EffectControlLinearizer::LowerNewSmiOrObjectElements(Node* node) {
2667 PretenureFlag const pretenure = PretenureFlagOf(node->op());
2668 Node* length = node->InputAt(0);
2669
2670 auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
2671 Node* zero_length = __ Word32Equal(length, __ Int32Constant(0));
2672 __ GotoIf(zero_length, &done,
2673 jsgraph()->HeapConstant(factory()->empty_fixed_array()));
2674
2675 // Compute the effective size of the backing store.
2676 Node* size =
2677 __ Int32Add(__ Word32Shl(length, __ Int32Constant(kPointerSizeLog2)),
2678 __ Int32Constant(FixedArray::kHeaderSize));
2679
2680 // Allocate the result and initialize the header.
2681 Node* result = __ Allocate(pretenure, size);
2682 __ StoreField(AccessBuilder::ForMap(), result, __ FixedArrayMapConstant());
2683 __ StoreField(AccessBuilder::ForFixedArrayLength(), result,
2684 ChangeInt32ToSmi(length));
2685
2686 // Initialize the backing store with holes.
2687 Node* limit = ChangeUint32ToUintPtr(length);
2688 Node* the_hole = __ TheHoleConstant();
2689 auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
2690 __ Goto(&loop, __ IntPtrConstant(0));
2691 __ Bind(&loop);
2692 {
2693 // Check if we've initialized everything.
2694 Node* index = loop.PhiAt(0);
2695 Node* check = __ UintLessThan(index, limit);
2696 __ GotoIfNot(check, &done, result);
2697
2698 // Storing "the_hole" doesn't need a write barrier.
2699 StoreRepresentation rep(MachineRepresentation::kTagged, kNoWriteBarrier);
2700 Node* offset =
2701 __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
2702 __ IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
2703 __ Store(rep, result, offset, the_hole);
2704
2705 // Advance the {index}.
2706 index = __ IntAdd(index, __ IntPtrConstant(1));
2707 __ Goto(&loop, index);
2708 }
2709
2710 __ Bind(&done);
2711 return done.PhiAt(0);
2712 }
2713
LowerNewArgumentsElements(Node * node)2714 Node* EffectControlLinearizer::LowerNewArgumentsElements(Node* node) {
2715 Node* frame = NodeProperties::GetValueInput(node, 0);
2716 Node* length = NodeProperties::GetValueInput(node, 1);
2717 int mapped_count = NewArgumentsElementsMappedCountOf(node->op());
2718
2719 Callable const callable =
2720 Builtins::CallableFor(isolate(), Builtins::kNewArgumentsElements);
2721 Operator::Properties const properties = node->op()->properties();
2722 CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
2723 auto call_descriptor = Linkage::GetStubCallDescriptor(
2724 graph()->zone(), callable.descriptor(), 0, flags, properties);
2725 return __ Call(call_descriptor, __ HeapConstant(callable.code()), frame,
2726 length, __ SmiConstant(mapped_count), __ NoContextConstant());
2727 }
2728
LowerNewConsString(Node * node)2729 Node* EffectControlLinearizer::LowerNewConsString(Node* node) {
2730 Node* length = node->InputAt(0);
2731 Node* first = node->InputAt(1);
2732 Node* second = node->InputAt(2);
2733
2734 // Determine the instance types of {first} and {second}.
2735 Node* first_map = __ LoadField(AccessBuilder::ForMap(), first);
2736 Node* first_instance_type =
2737 __ LoadField(AccessBuilder::ForMapInstanceType(), first_map);
2738 Node* second_map = __ LoadField(AccessBuilder::ForMap(), second);
2739 Node* second_instance_type =
2740 __ LoadField(AccessBuilder::ForMapInstanceType(), second_map);
2741
2742 // Determine the proper map for the resulting ConsString.
2743 // If both {first} and {second} are one-byte strings, we
2744 // create a new ConsOneByteString, otherwise we create a
2745 // new ConsString instead.
2746 auto if_onebyte = __ MakeLabel();
2747 auto if_twobyte = __ MakeLabel();
2748 auto done = __ MakeLabel(MachineRepresentation::kTaggedPointer);
2749 STATIC_ASSERT(kOneByteStringTag != 0);
2750 STATIC_ASSERT(kTwoByteStringTag == 0);
2751 Node* instance_type = __ Word32And(first_instance_type, second_instance_type);
2752 Node* encoding =
2753 __ Word32And(instance_type, __ Int32Constant(kStringEncodingMask));
2754 __ Branch(__ Word32Equal(encoding, __ Int32Constant(kTwoByteStringTag)),
2755 &if_twobyte, &if_onebyte);
2756 __ Bind(&if_onebyte);
2757 __ Goto(&done,
2758 jsgraph()->HeapConstant(factory()->cons_one_byte_string_map()));
2759 __ Bind(&if_twobyte);
2760 __ Goto(&done, jsgraph()->HeapConstant(factory()->cons_string_map()));
2761 __ Bind(&done);
2762 Node* result_map = done.PhiAt(0);
2763
2764 // Allocate the resulting ConsString.
2765 Node* result = __ Allocate(NOT_TENURED, __ Int32Constant(ConsString::kSize));
2766 __ StoreField(AccessBuilder::ForMap(), result, result_map);
2767 __ StoreField(AccessBuilder::ForNameHashField(), result,
2768 jsgraph()->Int32Constant(Name::kEmptyHashField));
2769 __ StoreField(AccessBuilder::ForStringLength(), result, length);
2770 __ StoreField(AccessBuilder::ForConsStringFirst(), result, first);
2771 __ StoreField(AccessBuilder::ForConsStringSecond(), result, second);
2772 return result;
2773 }
2774
LowerArrayBufferWasNeutered(Node * node)2775 Node* EffectControlLinearizer::LowerArrayBufferWasNeutered(Node* node) {
2776 Node* value = node->InputAt(0);
2777
2778 Node* value_bit_field =
2779 __ LoadField(AccessBuilder::ForJSArrayBufferBitField(), value);
2780 return __ Word32Equal(
2781 __ Word32Equal(
2782 __ Word32And(value_bit_field,
2783 __ Int32Constant(JSArrayBuffer::WasNeutered::kMask)),
2784 __ Int32Constant(0)),
2785 __ Int32Constant(0));
2786 }
2787
LowerSameValue(Node * node)2788 Node* EffectControlLinearizer::LowerSameValue(Node* node) {
2789 Node* lhs = node->InputAt(0);
2790 Node* rhs = node->InputAt(1);
2791
2792 Callable const callable =
2793 Builtins::CallableFor(isolate(), Builtins::kSameValue);
2794 Operator::Properties properties = Operator::kEliminatable;
2795 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
2796 auto call_descriptor = Linkage::GetStubCallDescriptor(
2797 graph()->zone(), callable.descriptor(), 0, flags, properties);
2798 return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
2799 __ NoContextConstant());
2800 }
2801
LowerDeadValue(Node * node)2802 Node* EffectControlLinearizer::LowerDeadValue(Node* node) {
2803 Node* input = NodeProperties::GetValueInput(node, 0);
2804 if (input->opcode() != IrOpcode::kUnreachable) {
2805 Node* unreachable = __ Unreachable();
2806 NodeProperties::ReplaceValueInput(node, unreachable, 0);
2807 }
2808 return node;
2809 }
2810
LowerStringToNumber(Node * node)2811 Node* EffectControlLinearizer::LowerStringToNumber(Node* node) {
2812 Node* string = node->InputAt(0);
2813
2814 Callable const callable =
2815 Builtins::CallableFor(isolate(), Builtins::kStringToNumber);
2816 Operator::Properties properties = Operator::kEliminatable;
2817 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
2818 auto call_descriptor = Linkage::GetStubCallDescriptor(
2819 graph()->zone(), callable.descriptor(), 0, flags, properties);
2820 return __ Call(call_descriptor, __ HeapConstant(callable.code()), string,
2821 __ NoContextConstant());
2822 }
2823
LowerStringCharCodeAt(Node * node)2824 Node* EffectControlLinearizer::LowerStringCharCodeAt(Node* node) {
2825 Node* receiver = node->InputAt(0);
2826 Node* position = node->InputAt(1);
2827
2828 // We need a loop here to properly deal with indirect strings
2829 // (SlicedString, ConsString and ThinString).
2830 auto loop = __ MakeLoopLabel(MachineRepresentation::kTagged,
2831 MachineRepresentation::kWord32);
2832 auto loop_next = __ MakeLabel(MachineRepresentation::kTagged,
2833 MachineRepresentation::kWord32);
2834 auto loop_done = __ MakeLabel(MachineRepresentation::kWord32);
2835 __ Goto(&loop, receiver, position);
2836 __ Bind(&loop);
2837 {
2838 Node* receiver = loop.PhiAt(0);
2839 Node* position = loop.PhiAt(1);
2840 Node* receiver_map = __ LoadField(AccessBuilder::ForMap(), receiver);
2841 Node* receiver_instance_type =
2842 __ LoadField(AccessBuilder::ForMapInstanceType(), receiver_map);
2843 Node* receiver_representation = __ Word32And(
2844 receiver_instance_type, __ Int32Constant(kStringRepresentationMask));
2845
2846 // Dispatch on the current {receiver}s string representation.
2847 auto if_seqstring = __ MakeLabel();
2848 auto if_consstring = __ MakeLabel();
2849 auto if_thinstring = __ MakeLabel();
2850 auto if_externalstring = __ MakeLabel();
2851 auto if_slicedstring = __ MakeLabel();
2852 auto if_runtime = __ MakeDeferredLabel();
2853 __ GotoIf(__ Word32Equal(receiver_representation,
2854 __ Int32Constant(kSeqStringTag)),
2855 &if_seqstring);
2856 __ GotoIf(__ Word32Equal(receiver_representation,
2857 __ Int32Constant(kConsStringTag)),
2858 &if_consstring);
2859 __ GotoIf(__ Word32Equal(receiver_representation,
2860 __ Int32Constant(kThinStringTag)),
2861 &if_thinstring);
2862 __ GotoIf(__ Word32Equal(receiver_representation,
2863 __ Int32Constant(kExternalStringTag)),
2864 &if_externalstring);
2865 __ Branch(__ Word32Equal(receiver_representation,
2866 __ Int32Constant(kSlicedStringTag)),
2867 &if_slicedstring, &if_runtime);
2868
2869 __ Bind(&if_seqstring);
2870 {
2871 Node* receiver_is_onebyte = __ Word32Equal(
2872 __ Word32Equal(__ Word32And(receiver_instance_type,
2873 __ Int32Constant(kStringEncodingMask)),
2874 __ Int32Constant(kTwoByteStringTag)),
2875 __ Int32Constant(0));
2876 Node* result = LoadFromSeqString(receiver, position, receiver_is_onebyte);
2877 __ Goto(&loop_done, result);
2878 }
2879
2880 __ Bind(&if_thinstring);
2881 {
2882 Node* receiver_actual =
2883 __ LoadField(AccessBuilder::ForThinStringActual(), receiver);
2884 __ Goto(&loop_next, receiver_actual, position);
2885 }
2886
2887 __ Bind(&if_consstring);
2888 {
2889 Node* receiver_second =
2890 __ LoadField(AccessBuilder::ForConsStringSecond(), receiver);
2891 __ GotoIfNot(__ WordEqual(receiver_second, __ EmptyStringConstant()),
2892 &if_runtime);
2893 Node* receiver_first =
2894 __ LoadField(AccessBuilder::ForConsStringFirst(), receiver);
2895 __ Goto(&loop_next, receiver_first, position);
2896 }
2897
2898 __ Bind(&if_externalstring);
2899 {
2900 // We need to bailout to the runtime for short external strings.
2901 __ GotoIf(__ Word32Equal(
2902 __ Word32And(receiver_instance_type,
2903 __ Int32Constant(kShortExternalStringMask)),
2904 __ Int32Constant(kShortExternalStringTag)),
2905 &if_runtime);
2906
2907 Node* receiver_data = __ LoadField(
2908 AccessBuilder::ForExternalStringResourceData(), receiver);
2909
2910 auto if_onebyte = __ MakeLabel();
2911 auto if_twobyte = __ MakeLabel();
2912 __ Branch(
2913 __ Word32Equal(__ Word32And(receiver_instance_type,
2914 __ Int32Constant(kStringEncodingMask)),
2915 __ Int32Constant(kTwoByteStringTag)),
2916 &if_twobyte, &if_onebyte);
2917
2918 __ Bind(&if_onebyte);
2919 {
2920 Node* result = __ Load(MachineType::Uint8(), receiver_data,
2921 ChangeInt32ToIntPtr(position));
2922 __ Goto(&loop_done, result);
2923 }
2924
2925 __ Bind(&if_twobyte);
2926 {
2927 Node* result = __ Load(
2928 MachineType::Uint16(), receiver_data,
2929 __ Word32Shl(ChangeInt32ToIntPtr(position), __ Int32Constant(1)));
2930 __ Goto(&loop_done, result);
2931 }
2932 }
2933
2934 __ Bind(&if_slicedstring);
2935 {
2936 Node* receiver_offset =
2937 __ LoadField(AccessBuilder::ForSlicedStringOffset(), receiver);
2938 Node* receiver_parent =
2939 __ LoadField(AccessBuilder::ForSlicedStringParent(), receiver);
2940 __ Goto(&loop_next, receiver_parent,
2941 __ Int32Add(position, ChangeSmiToInt32(receiver_offset)));
2942 }
2943
2944 __ Bind(&if_runtime);
2945 {
2946 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
2947 Runtime::FunctionId id = Runtime::kStringCharCodeAt;
2948 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
2949 graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
2950 Node* result = __ Call(call_descriptor, __ CEntryStubConstant(1),
2951 receiver, ChangeInt32ToSmi(position),
2952 __ ExternalConstant(ExternalReference::Create(id)),
2953 __ Int32Constant(2), __ NoContextConstant());
2954 __ Goto(&loop_done, ChangeSmiToInt32(result));
2955 }
2956
2957 __ Bind(&loop_next);
2958 __ Goto(&loop, loop_next.PhiAt(0), loop_next.PhiAt(1));
2959 }
2960 __ Bind(&loop_done);
2961 return loop_done.PhiAt(0);
2962 }
2963
LowerStringCodePointAt(Node * node,UnicodeEncoding encoding)2964 Node* EffectControlLinearizer::LowerStringCodePointAt(
2965 Node* node, UnicodeEncoding encoding) {
2966 Node* receiver = node->InputAt(0);
2967 Node* position = node->InputAt(1);
2968
2969 Builtins::Name builtin = encoding == UnicodeEncoding::UTF16
2970 ? Builtins::kStringCodePointAtUTF16
2971 : Builtins::kStringCodePointAtUTF32;
2972
2973 Callable const callable = Builtins::CallableFor(isolate(), builtin);
2974 Operator::Properties properties = Operator::kNoThrow | Operator::kNoWrite;
2975 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
2976 auto call_descriptor = Linkage::GetStubCallDescriptor(
2977 graph()->zone(), callable.descriptor(), 0, flags, properties);
2978 return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
2979 position, __ NoContextConstant());
2980 }
2981
LoadFromSeqString(Node * receiver,Node * position,Node * is_one_byte)2982 Node* EffectControlLinearizer::LoadFromSeqString(Node* receiver, Node* position,
2983 Node* is_one_byte) {
2984 auto one_byte_load = __ MakeLabel();
2985 auto done = __ MakeLabel(MachineRepresentation::kWord32);
2986 __ GotoIf(is_one_byte, &one_byte_load);
2987 Node* two_byte_result = __ LoadElement(
2988 AccessBuilder::ForSeqTwoByteStringCharacter(), receiver, position);
2989 __ Goto(&done, two_byte_result);
2990
2991 __ Bind(&one_byte_load);
2992 Node* one_byte_element = __ LoadElement(
2993 AccessBuilder::ForSeqOneByteStringCharacter(), receiver, position);
2994 __ Goto(&done, one_byte_element);
2995
2996 __ Bind(&done);
2997 return done.PhiAt(0);
2998 }
2999
LowerStringFromSingleCharCode(Node * node)3000 Node* EffectControlLinearizer::LowerStringFromSingleCharCode(Node* node) {
3001 Node* value = node->InputAt(0);
3002 Node* code = __ Word32And(value, __ Uint32Constant(0xFFFF));
3003
3004 auto if_not_one_byte = __ MakeDeferredLabel();
3005 auto cache_miss = __ MakeDeferredLabel();
3006 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3007
3008 // Check if the {code} is a one byte character
3009 Node* check1 = __ Uint32LessThanOrEqual(
3010 code, __ Uint32Constant(String::kMaxOneByteCharCode));
3011 __ GotoIfNot(check1, &if_not_one_byte);
3012 {
3013 // Load the isolate wide single character string cache.
3014 Node* cache = __ HeapConstant(factory()->single_character_string_cache());
3015
3016 // Compute the {cache} index for {code}.
3017 Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
3018
3019 // Check if we have an entry for the {code} in the single character string
3020 // cache already.
3021 Node* entry =
3022 __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
3023
3024 Node* check2 = __ WordEqual(entry, __ UndefinedConstant());
3025 __ GotoIf(check2, &cache_miss);
3026
3027 // Use the {entry} from the {cache}.
3028 __ Goto(&done, entry);
3029
3030 __ Bind(&cache_miss);
3031 {
3032 // Allocate a new SeqOneByteString for {code}.
3033 Node* vtrue2 = __ Allocate(
3034 NOT_TENURED, __ Int32Constant(SeqOneByteString::SizeFor(1)));
3035 __ StoreField(AccessBuilder::ForMap(), vtrue2,
3036 __ HeapConstant(factory()->one_byte_string_map()));
3037 __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
3038 __ IntPtrConstant(Name::kEmptyHashField));
3039 __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
3040 __ SmiConstant(1));
3041 __ Store(
3042 StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
3043 vtrue2,
3044 __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
3045 code);
3046
3047 // Remember it in the {cache}.
3048 __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
3049 vtrue2);
3050 __ Goto(&done, vtrue2);
3051 }
3052 }
3053
3054 __ Bind(&if_not_one_byte);
3055 {
3056 // Allocate a new SeqTwoByteString for {code}.
3057 Node* vfalse1 = __ Allocate(NOT_TENURED,
3058 __ Int32Constant(SeqTwoByteString::SizeFor(1)));
3059 __ StoreField(AccessBuilder::ForMap(), vfalse1,
3060 __ HeapConstant(factory()->string_map()));
3061 __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
3062 __ IntPtrConstant(Name::kEmptyHashField));
3063 __ StoreField(AccessBuilder::ForStringLength(), vfalse1, __ SmiConstant(1));
3064 __ Store(
3065 StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
3066 vfalse1,
3067 __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3068 code);
3069 __ Goto(&done, vfalse1);
3070 }
3071
3072 __ Bind(&done);
3073 return done.PhiAt(0);
3074 }
3075
3076 #ifdef V8_INTL_SUPPORT
3077
LowerStringToLowerCaseIntl(Node * node)3078 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
3079 Node* receiver = node->InputAt(0);
3080
3081 Callable callable =
3082 Builtins::CallableFor(isolate(), Builtins::kStringToLowerCaseIntl);
3083 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3084 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3085 auto call_descriptor = Linkage::GetStubCallDescriptor(
3086 graph()->zone(), callable.descriptor(), 0, flags, properties);
3087 return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
3088 __ NoContextConstant());
3089 }
3090
LowerStringToUpperCaseIntl(Node * node)3091 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
3092 Node* receiver = node->InputAt(0);
3093 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3094 Runtime::FunctionId id = Runtime::kStringToUpperCaseIntl;
3095 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3096 graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
3097 return __ Call(call_descriptor, __ CEntryStubConstant(1), receiver,
3098 __ ExternalConstant(ExternalReference::Create(id)),
3099 __ Int32Constant(1), __ NoContextConstant());
3100 }
3101
3102 #else
3103
LowerStringToLowerCaseIntl(Node * node)3104 Node* EffectControlLinearizer::LowerStringToLowerCaseIntl(Node* node) {
3105 UNREACHABLE();
3106 return nullptr;
3107 }
3108
LowerStringToUpperCaseIntl(Node * node)3109 Node* EffectControlLinearizer::LowerStringToUpperCaseIntl(Node* node) {
3110 UNREACHABLE();
3111 return nullptr;
3112 }
3113
3114 #endif // V8_INTL_SUPPORT
3115
LowerStringFromSingleCodePoint(Node * node)3116 Node* EffectControlLinearizer::LowerStringFromSingleCodePoint(Node* node) {
3117 Node* value = node->InputAt(0);
3118 Node* code = value;
3119
3120 auto if_not_single_code = __ MakeDeferredLabel();
3121 auto if_not_one_byte = __ MakeDeferredLabel();
3122 auto cache_miss = __ MakeDeferredLabel();
3123 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3124
3125 // Check if the {code} is a single code unit
3126 Node* check0 = __ Uint32LessThanOrEqual(code, __ Uint32Constant(0xFFFF));
3127 __ GotoIfNot(check0, &if_not_single_code);
3128
3129 {
3130 // Check if the {code} is a one byte character
3131 Node* check1 = __ Uint32LessThanOrEqual(
3132 code, __ Uint32Constant(String::kMaxOneByteCharCode));
3133 __ GotoIfNot(check1, &if_not_one_byte);
3134 {
3135 // Load the isolate wide single character string cache.
3136 Node* cache = __ HeapConstant(factory()->single_character_string_cache());
3137
3138 // Compute the {cache} index for {code}.
3139 Node* index = machine()->Is32() ? code : __ ChangeUint32ToUint64(code);
3140
3141 // Check if we have an entry for the {code} in the single character string
3142 // cache already.
3143 Node* entry =
3144 __ LoadElement(AccessBuilder::ForFixedArrayElement(), cache, index);
3145
3146 Node* check2 = __ WordEqual(entry, __ UndefinedConstant());
3147 __ GotoIf(check2, &cache_miss);
3148
3149 // Use the {entry} from the {cache}.
3150 __ Goto(&done, entry);
3151
3152 __ Bind(&cache_miss);
3153 {
3154 // Allocate a new SeqOneByteString for {code}.
3155 Node* vtrue2 = __ Allocate(
3156 NOT_TENURED, __ Int32Constant(SeqOneByteString::SizeFor(1)));
3157 __ StoreField(AccessBuilder::ForMap(), vtrue2,
3158 __ HeapConstant(factory()->one_byte_string_map()));
3159 __ StoreField(AccessBuilder::ForNameHashField(), vtrue2,
3160 __ IntPtrConstant(Name::kEmptyHashField));
3161 __ StoreField(AccessBuilder::ForStringLength(), vtrue2,
3162 __ SmiConstant(1));
3163 __ Store(
3164 StoreRepresentation(MachineRepresentation::kWord8, kNoWriteBarrier),
3165 vtrue2,
3166 __ IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag),
3167 code);
3168
3169 // Remember it in the {cache}.
3170 __ StoreElement(AccessBuilder::ForFixedArrayElement(), cache, index,
3171 vtrue2);
3172 __ Goto(&done, vtrue2);
3173 }
3174 }
3175
3176 __ Bind(&if_not_one_byte);
3177 {
3178 // Allocate a new SeqTwoByteString for {code}.
3179 Node* vfalse1 = __ Allocate(
3180 NOT_TENURED, __ Int32Constant(SeqTwoByteString::SizeFor(1)));
3181 __ StoreField(AccessBuilder::ForMap(), vfalse1,
3182 __ HeapConstant(factory()->string_map()));
3183 __ StoreField(AccessBuilder::ForNameHashField(), vfalse1,
3184 __ IntPtrConstant(Name::kEmptyHashField));
3185 __ StoreField(AccessBuilder::ForStringLength(), vfalse1,
3186 __ SmiConstant(1));
3187 __ Store(
3188 StoreRepresentation(MachineRepresentation::kWord16, kNoWriteBarrier),
3189 vfalse1,
3190 __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3191 code);
3192 __ Goto(&done, vfalse1);
3193 }
3194 }
3195
3196 __ Bind(&if_not_single_code);
3197 // Generate surrogate pair string
3198 {
3199 switch (UnicodeEncodingOf(node->op())) {
3200 case UnicodeEncoding::UTF16:
3201 break;
3202
3203 case UnicodeEncoding::UTF32: {
3204 // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
3205 Node* lead_offset = __ Int32Constant(0xD800 - (0x10000 >> 10));
3206
3207 // lead = (codepoint >> 10) + LEAD_OFFSET
3208 Node* lead =
3209 __ Int32Add(__ Word32Shr(code, __ Int32Constant(10)), lead_offset);
3210
3211 // trail = (codepoint & 0x3FF) + 0xDC00;
3212 Node* trail = __ Int32Add(__ Word32And(code, __ Int32Constant(0x3FF)),
3213 __ Int32Constant(0xDC00));
3214
3215 // codpoint = (trail << 16) | lead;
3216 #if V8_TARGET_BIG_ENDIAN
3217 code = __ Word32Or(__ Word32Shl(lead, __ Int32Constant(16)), trail);
3218 #else
3219 code = __ Word32Or(__ Word32Shl(trail, __ Int32Constant(16)), lead);
3220 #endif
3221 break;
3222 }
3223 }
3224
3225 // Allocate a new SeqTwoByteString for {code}.
3226 Node* vfalse0 = __ Allocate(NOT_TENURED,
3227 __ Int32Constant(SeqTwoByteString::SizeFor(2)));
3228 __ StoreField(AccessBuilder::ForMap(), vfalse0,
3229 __ HeapConstant(factory()->string_map()));
3230 __ StoreField(AccessBuilder::ForNameHashField(), vfalse0,
3231 __ IntPtrConstant(Name::kEmptyHashField));
3232 __ StoreField(AccessBuilder::ForStringLength(), vfalse0, __ SmiConstant(2));
3233 __ Store(
3234 StoreRepresentation(MachineRepresentation::kWord32, kNoWriteBarrier),
3235 vfalse0,
3236 __ IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3237 code);
3238 __ Goto(&done, vfalse0);
3239 }
3240
3241 __ Bind(&done);
3242 return done.PhiAt(0);
3243 }
3244
LowerStringIndexOf(Node * node)3245 Node* EffectControlLinearizer::LowerStringIndexOf(Node* node) {
3246 Node* subject = node->InputAt(0);
3247 Node* search_string = node->InputAt(1);
3248 Node* position = node->InputAt(2);
3249
3250 Callable callable =
3251 Builtins::CallableFor(isolate(), Builtins::kStringIndexOf);
3252 Operator::Properties properties = Operator::kEliminatable;
3253 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3254 auto call_descriptor = Linkage::GetStubCallDescriptor(
3255 graph()->zone(), callable.descriptor(), 0, flags, properties);
3256 return __ Call(call_descriptor, __ HeapConstant(callable.code()), subject,
3257 search_string, position, __ NoContextConstant());
3258 }
3259
LowerStringLength(Node * node)3260 Node* EffectControlLinearizer::LowerStringLength(Node* node) {
3261 Node* subject = node->InputAt(0);
3262
3263 return __ LoadField(AccessBuilder::ForStringLength(), subject);
3264 }
3265
LowerStringComparison(Callable const & callable,Node * node)3266 Node* EffectControlLinearizer::LowerStringComparison(Callable const& callable,
3267 Node* node) {
3268 Node* lhs = node->InputAt(0);
3269 Node* rhs = node->InputAt(1);
3270
3271 Operator::Properties properties = Operator::kEliminatable;
3272 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3273 auto call_descriptor = Linkage::GetStubCallDescriptor(
3274 graph()->zone(), callable.descriptor(), 0, flags, properties);
3275 return __ Call(call_descriptor, __ HeapConstant(callable.code()), lhs, rhs,
3276 __ NoContextConstant());
3277 }
3278
LowerStringSubstring(Node * node)3279 Node* EffectControlLinearizer::LowerStringSubstring(Node* node) {
3280 Node* receiver = node->InputAt(0);
3281 Node* start = ChangeInt32ToIntPtr(node->InputAt(1));
3282 Node* end = ChangeInt32ToIntPtr(node->InputAt(2));
3283
3284 Callable callable =
3285 Builtins::CallableFor(isolate(), Builtins::kStringSubstring);
3286 Operator::Properties properties = Operator::kEliminatable;
3287 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3288 auto call_descriptor = Linkage::GetStubCallDescriptor(
3289 graph()->zone(), callable.descriptor(), 0, flags, properties);
3290 return __ Call(call_descriptor, __ HeapConstant(callable.code()), receiver,
3291 start, end, __ NoContextConstant());
3292 }
3293
LowerStringEqual(Node * node)3294 Node* EffectControlLinearizer::LowerStringEqual(Node* node) {
3295 return LowerStringComparison(
3296 Builtins::CallableFor(isolate(), Builtins::kStringEqual), node);
3297 }
3298
LowerStringLessThan(Node * node)3299 Node* EffectControlLinearizer::LowerStringLessThan(Node* node) {
3300 return LowerStringComparison(
3301 Builtins::CallableFor(isolate(), Builtins::kStringLessThan), node);
3302 }
3303
LowerStringLessThanOrEqual(Node * node)3304 Node* EffectControlLinearizer::LowerStringLessThanOrEqual(Node* node) {
3305 return LowerStringComparison(
3306 Builtins::CallableFor(isolate(), Builtins::kStringLessThanOrEqual), node);
3307 }
3308
LowerCheckFloat64Hole(Node * node,Node * frame_state)3309 Node* EffectControlLinearizer::LowerCheckFloat64Hole(Node* node,
3310 Node* frame_state) {
3311 // If we reach this point w/o eliminating the {node} that's marked
3312 // with allow-return-hole, we cannot do anything, so just deoptimize
3313 // in case of the hole NaN.
3314 CheckFloat64HoleParameters const& params =
3315 CheckFloat64HoleParametersOf(node->op());
3316 Node* value = node->InputAt(0);
3317 Node* check = __ Word32Equal(__ Float64ExtractHighWord32(value),
3318 __ Int32Constant(kHoleNanUpper32));
3319 __ DeoptimizeIf(DeoptimizeReason::kHole, params.feedback(), check,
3320 frame_state);
3321 return value;
3322 }
3323
LowerCheckNotTaggedHole(Node * node,Node * frame_state)3324 Node* EffectControlLinearizer::LowerCheckNotTaggedHole(Node* node,
3325 Node* frame_state) {
3326 Node* value = node->InputAt(0);
3327 Node* check = __ WordEqual(value, __ TheHoleConstant());
3328 __ DeoptimizeIf(DeoptimizeReason::kHole, VectorSlotPair(), check,
3329 frame_state);
3330 return value;
3331 }
3332
LowerConvertTaggedHoleToUndefined(Node * node)3333 Node* EffectControlLinearizer::LowerConvertTaggedHoleToUndefined(Node* node) {
3334 Node* value = node->InputAt(0);
3335
3336 auto if_is_hole = __ MakeDeferredLabel();
3337 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3338
3339 Node* check = __ WordEqual(value, __ TheHoleConstant());
3340 __ GotoIf(check, &if_is_hole);
3341 __ Goto(&done, value);
3342
3343 __ Bind(&if_is_hole);
3344 __ Goto(&done, __ UndefinedConstant());
3345
3346 __ Bind(&done);
3347 return done.PhiAt(0);
3348 }
3349
LowerCheckEqualsInternalizedString(Node * node,Node * frame_state)3350 void EffectControlLinearizer::LowerCheckEqualsInternalizedString(
3351 Node* node, Node* frame_state) {
3352 Node* exp = node->InputAt(0);
3353 Node* val = node->InputAt(1);
3354
3355 auto if_same = __ MakeLabel();
3356 auto if_notsame = __ MakeDeferredLabel();
3357 auto if_thinstring = __ MakeLabel();
3358 auto if_notthinstring = __ MakeLabel();
3359
3360 // Check if {exp} and {val} are the same, which is the likely case.
3361 __ Branch(__ WordEqual(exp, val), &if_same, &if_notsame);
3362
3363 __ Bind(&if_notsame);
3364 {
3365 // Now {val} could still be a non-internalized String that matches {exp}.
3366 __ DeoptimizeIf(DeoptimizeReason::kWrongName, VectorSlotPair(),
3367 ObjectIsSmi(val), frame_state);
3368 Node* val_map = __ LoadField(AccessBuilder::ForMap(), val);
3369 Node* val_instance_type =
3370 __ LoadField(AccessBuilder::ForMapInstanceType(), val_map);
3371
3372 // Check for the common case of ThinString first.
3373 __ GotoIf(__ Word32Equal(val_instance_type,
3374 __ Int32Constant(THIN_ONE_BYTE_STRING_TYPE)),
3375 &if_thinstring);
3376 __ Branch(
3377 __ Word32Equal(val_instance_type, __ Int32Constant(THIN_STRING_TYPE)),
3378 &if_thinstring, &if_notthinstring);
3379
3380 __ Bind(&if_notthinstring);
3381 {
3382 // Check that the {val} is a non-internalized String, if it's anything
3383 // else it cannot match the recorded feedback {exp} anyways.
3384 __ DeoptimizeIfNot(
3385 DeoptimizeReason::kWrongName, VectorSlotPair(),
3386 __ Word32Equal(__ Word32And(val_instance_type,
3387 __ Int32Constant(kIsNotStringMask |
3388 kIsNotInternalizedMask)),
3389 __ Int32Constant(kStringTag | kNotInternalizedTag)),
3390 frame_state);
3391
3392 // Try to find the {val} in the string table.
3393 MachineSignature::Builder builder(graph()->zone(), 1, 2);
3394 builder.AddReturn(MachineType::AnyTagged());
3395 builder.AddParam(MachineType::Pointer());
3396 builder.AddParam(MachineType::AnyTagged());
3397 Node* try_internalize_string_function = __ ExternalConstant(
3398 ExternalReference::try_internalize_string_function());
3399 Node* const isolate_ptr =
3400 __ ExternalConstant(ExternalReference::isolate_address(isolate()));
3401 auto call_descriptor =
3402 Linkage::GetSimplifiedCDescriptor(graph()->zone(), builder.Build());
3403 Node* val_internalized =
3404 __ Call(common()->Call(call_descriptor),
3405 try_internalize_string_function, isolate_ptr, val);
3406
3407 // Now see if the results match.
3408 __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(),
3409 __ WordEqual(exp, val_internalized), frame_state);
3410 __ Goto(&if_same);
3411 }
3412
3413 __ Bind(&if_thinstring);
3414 {
3415 // The {val} is a ThinString, let's check the actual value.
3416 Node* val_actual =
3417 __ LoadField(AccessBuilder::ForThinStringActual(), val);
3418 __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(),
3419 __ WordEqual(exp, val_actual), frame_state);
3420 __ Goto(&if_same);
3421 }
3422 }
3423
3424 __ Bind(&if_same);
3425 }
3426
LowerCheckEqualsSymbol(Node * node,Node * frame_state)3427 void EffectControlLinearizer::LowerCheckEqualsSymbol(Node* node,
3428 Node* frame_state) {
3429 Node* exp = node->InputAt(0);
3430 Node* val = node->InputAt(1);
3431 Node* check = __ WordEqual(exp, val);
3432 __ DeoptimizeIfNot(DeoptimizeReason::kWrongName, VectorSlotPair(), check,
3433 frame_state);
3434 }
3435
AllocateHeapNumberWithValue(Node * value)3436 Node* EffectControlLinearizer::AllocateHeapNumberWithValue(Node* value) {
3437 Node* result = __ Allocate(NOT_TENURED, __ Int32Constant(HeapNumber::kSize));
3438 __ StoreField(AccessBuilder::ForMap(), result, __ HeapNumberMapConstant());
3439 __ StoreField(AccessBuilder::ForHeapNumberValue(), result, value);
3440 return result;
3441 }
3442
ChangeInt32ToSmi(Node * value)3443 Node* EffectControlLinearizer::ChangeInt32ToSmi(Node* value) {
3444 return __ WordShl(ChangeInt32ToIntPtr(value), SmiShiftBitsConstant());
3445 }
3446
ChangeInt32ToIntPtr(Node * value)3447 Node* EffectControlLinearizer::ChangeInt32ToIntPtr(Node* value) {
3448 if (machine()->Is64()) {
3449 value = __ ChangeInt32ToInt64(value);
3450 }
3451 return value;
3452 }
3453
ChangeIntPtrToInt32(Node * value)3454 Node* EffectControlLinearizer::ChangeIntPtrToInt32(Node* value) {
3455 if (machine()->Is64()) {
3456 value = __ TruncateInt64ToInt32(value);
3457 }
3458 return value;
3459 }
3460
ChangeUint32ToUintPtr(Node * value)3461 Node* EffectControlLinearizer::ChangeUint32ToUintPtr(Node* value) {
3462 if (machine()->Is64()) {
3463 value = __ ChangeUint32ToUint64(value);
3464 }
3465 return value;
3466 }
3467
ChangeUint32ToSmi(Node * value)3468 Node* EffectControlLinearizer::ChangeUint32ToSmi(Node* value) {
3469 value = ChangeUint32ToUintPtr(value);
3470 return __ WordShl(value, SmiShiftBitsConstant());
3471 }
3472
ChangeSmiToIntPtr(Node * value)3473 Node* EffectControlLinearizer::ChangeSmiToIntPtr(Node* value) {
3474 return __ WordSar(value, SmiShiftBitsConstant());
3475 }
3476
ChangeSmiToInt32(Node * value)3477 Node* EffectControlLinearizer::ChangeSmiToInt32(Node* value) {
3478 value = ChangeSmiToIntPtr(value);
3479 if (machine()->Is64()) {
3480 value = __ TruncateInt64ToInt32(value);
3481 }
3482 return value;
3483 }
3484
ObjectIsSmi(Node * value)3485 Node* EffectControlLinearizer::ObjectIsSmi(Node* value) {
3486 return __ WordEqual(__ WordAnd(value, __ IntPtrConstant(kSmiTagMask)),
3487 __ IntPtrConstant(kSmiTag));
3488 }
3489
SmiMaxValueConstant()3490 Node* EffectControlLinearizer::SmiMaxValueConstant() {
3491 return __ Int32Constant(Smi::kMaxValue);
3492 }
3493
SmiShiftBitsConstant()3494 Node* EffectControlLinearizer::SmiShiftBitsConstant() {
3495 return __ IntPtrConstant(kSmiShiftSize + kSmiTagSize);
3496 }
3497
LowerPlainPrimitiveToNumber(Node * node)3498 Node* EffectControlLinearizer::LowerPlainPrimitiveToNumber(Node* node) {
3499 Node* value = node->InputAt(0);
3500 return __ ToNumber(value);
3501 }
3502
LowerPlainPrimitiveToWord32(Node * node)3503 Node* EffectControlLinearizer::LowerPlainPrimitiveToWord32(Node* node) {
3504 Node* value = node->InputAt(0);
3505
3506 auto if_not_smi = __ MakeDeferredLabel();
3507 auto if_to_number_smi = __ MakeLabel();
3508 auto done = __ MakeLabel(MachineRepresentation::kWord32);
3509
3510 Node* check0 = ObjectIsSmi(value);
3511 __ GotoIfNot(check0, &if_not_smi);
3512 __ Goto(&done, ChangeSmiToInt32(value));
3513
3514 __ Bind(&if_not_smi);
3515 Node* to_number = __ ToNumber(value);
3516
3517 Node* check1 = ObjectIsSmi(to_number);
3518 __ GotoIf(check1, &if_to_number_smi);
3519 Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
3520 __ Goto(&done, __ TruncateFloat64ToWord32(number));
3521
3522 __ Bind(&if_to_number_smi);
3523 __ Goto(&done, ChangeSmiToInt32(to_number));
3524
3525 __ Bind(&done);
3526 return done.PhiAt(0);
3527 }
3528
LowerPlainPrimitiveToFloat64(Node * node)3529 Node* EffectControlLinearizer::LowerPlainPrimitiveToFloat64(Node* node) {
3530 Node* value = node->InputAt(0);
3531
3532 auto if_not_smi = __ MakeDeferredLabel();
3533 auto if_to_number_smi = __ MakeLabel();
3534 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
3535
3536 Node* check0 = ObjectIsSmi(value);
3537 __ GotoIfNot(check0, &if_not_smi);
3538 Node* from_smi = ChangeSmiToInt32(value);
3539 __ Goto(&done, __ ChangeInt32ToFloat64(from_smi));
3540
3541 __ Bind(&if_not_smi);
3542 Node* to_number = __ ToNumber(value);
3543 Node* check1 = ObjectIsSmi(to_number);
3544 __ GotoIf(check1, &if_to_number_smi);
3545
3546 Node* number = __ LoadField(AccessBuilder::ForHeapNumberValue(), to_number);
3547 __ Goto(&done, number);
3548
3549 __ Bind(&if_to_number_smi);
3550 Node* number_from_smi = ChangeSmiToInt32(to_number);
3551 number_from_smi = __ ChangeInt32ToFloat64(number_from_smi);
3552 __ Goto(&done, number_from_smi);
3553
3554 __ Bind(&done);
3555 return done.PhiAt(0);
3556 }
3557
LowerEnsureWritableFastElements(Node * node)3558 Node* EffectControlLinearizer::LowerEnsureWritableFastElements(Node* node) {
3559 Node* object = node->InputAt(0);
3560 Node* elements = node->InputAt(1);
3561
3562 auto if_not_fixed_array = __ MakeDeferredLabel();
3563 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3564
3565 // Load the current map of {elements}.
3566 Node* elements_map = __ LoadField(AccessBuilder::ForMap(), elements);
3567
3568 // Check if {elements} is not a copy-on-write FixedArray.
3569 Node* check = __ WordEqual(elements_map, __ FixedArrayMapConstant());
3570 __ GotoIfNot(check, &if_not_fixed_array);
3571 // Nothing to do if the {elements} are not copy-on-write.
3572 __ Goto(&done, elements);
3573
3574 __ Bind(&if_not_fixed_array);
3575 // We need to take a copy of the {elements} and set them up for {object}.
3576 Operator::Properties properties = Operator::kEliminatable;
3577 Callable callable =
3578 Builtins::CallableFor(isolate(), Builtins::kCopyFastSmiOrObjectElements);
3579 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
3580 auto call_descriptor = Linkage::GetStubCallDescriptor(
3581 graph()->zone(), callable.descriptor(), 0, flags, properties);
3582 Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
3583 object, __ NoContextConstant());
3584 __ Goto(&done, result);
3585
3586 __ Bind(&done);
3587 return done.PhiAt(0);
3588 }
3589
LowerMaybeGrowFastElements(Node * node,Node * frame_state)3590 Node* EffectControlLinearizer::LowerMaybeGrowFastElements(Node* node,
3591 Node* frame_state) {
3592 GrowFastElementsParameters params = GrowFastElementsParametersOf(node->op());
3593 Node* object = node->InputAt(0);
3594 Node* elements = node->InputAt(1);
3595 Node* index = node->InputAt(2);
3596 Node* elements_length = node->InputAt(3);
3597
3598 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3599 auto if_grow = __ MakeDeferredLabel();
3600 auto if_not_grow = __ MakeLabel();
3601
3602 // Check if we need to grow the {elements} backing store.
3603 Node* check = __ Uint32LessThan(index, elements_length);
3604 __ GotoIfNot(check, &if_grow);
3605 __ Goto(&done, elements);
3606
3607 __ Bind(&if_grow);
3608 // We need to grow the {elements} for {object}.
3609 Operator::Properties properties = Operator::kEliminatable;
3610 Callable callable =
3611 (params.mode() == GrowFastElementsMode::kDoubleElements)
3612 ? Builtins::CallableFor(isolate(), Builtins::kGrowFastDoubleElements)
3613 : Builtins::CallableFor(isolate(),
3614 Builtins::kGrowFastSmiOrObjectElements);
3615 CallDescriptor::Flags call_flags = CallDescriptor::kNoFlags;
3616 auto call_descriptor = Linkage::GetStubCallDescriptor(
3617 graph()->zone(), callable.descriptor(), 0, call_flags, properties);
3618 Node* new_elements =
3619 __ Call(call_descriptor, __ HeapConstant(callable.code()), object,
3620 ChangeInt32ToSmi(index), __ NoContextConstant());
3621
3622 // Ensure that we were able to grow the {elements}.
3623 __ DeoptimizeIf(DeoptimizeReason::kCouldNotGrowElements, params.feedback(),
3624 ObjectIsSmi(new_elements), frame_state);
3625 __ Goto(&done, new_elements);
3626
3627 __ Bind(&done);
3628 return done.PhiAt(0);
3629 }
3630
LowerTransitionElementsKind(Node * node)3631 void EffectControlLinearizer::LowerTransitionElementsKind(Node* node) {
3632 ElementsTransition const transition = ElementsTransitionOf(node->op());
3633 Node* object = node->InputAt(0);
3634
3635 auto if_map_same = __ MakeDeferredLabel();
3636 auto done = __ MakeLabel();
3637
3638 Node* source_map = __ HeapConstant(transition.source());
3639 Node* target_map = __ HeapConstant(transition.target());
3640
3641 // Load the current map of {object}.
3642 Node* object_map = __ LoadField(AccessBuilder::ForMap(), object);
3643
3644 // Check if {object_map} is the same as {source_map}.
3645 Node* check = __ WordEqual(object_map, source_map);
3646 __ GotoIf(check, &if_map_same);
3647 __ Goto(&done);
3648
3649 __ Bind(&if_map_same);
3650 switch (transition.mode()) {
3651 case ElementsTransition::kFastTransition:
3652 // In-place migration of {object}, just store the {target_map}.
3653 __ StoreField(AccessBuilder::ForMap(), object, target_map);
3654 break;
3655 case ElementsTransition::kSlowTransition: {
3656 // Instance migration, call out to the runtime for {object}.
3657 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3658 Runtime::FunctionId id = Runtime::kTransitionElementsKind;
3659 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3660 graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
3661 __ Call(call_descriptor, __ CEntryStubConstant(1), object, target_map,
3662 __ ExternalConstant(ExternalReference::Create(id)),
3663 __ Int32Constant(2), __ NoContextConstant());
3664 break;
3665 }
3666 }
3667 __ Goto(&done);
3668
3669 __ Bind(&done);
3670 }
3671
LowerLoadFieldByIndex(Node * node)3672 Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
3673 Node* object = node->InputAt(0);
3674 Node* index = node->InputAt(1);
3675 Node* zero = __ IntPtrConstant(0);
3676 Node* one = __ IntPtrConstant(1);
3677
3678 // Sign-extend the {index} on 64-bit architectures.
3679 if (machine()->Is64()) {
3680 index = __ ChangeInt32ToInt64(index);
3681 }
3682
3683 auto if_double = __ MakeDeferredLabel();
3684 auto done = __ MakeLabel(MachineRepresentation::kTagged);
3685
3686 // Check if field is a mutable double field.
3687 __ GotoIfNot(__ WordEqual(__ WordAnd(index, one), zero), &if_double);
3688
3689 // The field is a proper Tagged field on {object}. The {index} is shifted
3690 // to the left by one in the code below.
3691 {
3692 // Check if field is in-object or out-of-object.
3693 auto if_outofobject = __ MakeLabel();
3694 __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
3695
3696 // The field is located in the {object} itself.
3697 {
3698 Node* offset =
3699 __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2 - 1)),
3700 __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
3701 Node* result = __ Load(MachineType::AnyTagged(), object, offset);
3702 __ Goto(&done, result);
3703 }
3704
3705 // The field is located in the properties backing store of {object}.
3706 // The {index} is equal to the negated out of property index plus 1.
3707 __ Bind(&if_outofobject);
3708 {
3709 Node* properties =
3710 __ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
3711 Node* offset =
3712 __ IntAdd(__ WordShl(__ IntSub(zero, index),
3713 __ IntPtrConstant(kPointerSizeLog2 - 1)),
3714 __ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
3715 kHeapObjectTag));
3716 Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
3717 __ Goto(&done, result);
3718 }
3719 }
3720
3721 // The field is a Double field, either unboxed in the object on 64-bit
3722 // architectures, or as MutableHeapNumber.
3723 __ Bind(&if_double);
3724 {
3725 auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);
3726
3727 index = __ WordSar(index, one);
3728
3729 // Check if field is in-object or out-of-object.
3730 auto if_outofobject = __ MakeLabel();
3731 __ GotoIf(__ IntLessThan(index, zero), &if_outofobject);
3732
3733 // The field is located in the {object} itself.
3734 {
3735 Node* offset =
3736 __ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
3737 __ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
3738 if (FLAG_unbox_double_fields) {
3739 Node* result = __ Load(MachineType::Float64(), object, offset);
3740 __ Goto(&done_double, result);
3741 } else {
3742 Node* result = __ Load(MachineType::AnyTagged(), object, offset);
3743 result = __ LoadField(AccessBuilder::ForHeapNumberValue(), result);
3744 __ Goto(&done_double, result);
3745 }
3746 }
3747
3748 __ Bind(&if_outofobject);
3749 {
3750 Node* properties =
3751 __ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
3752 Node* offset =
3753 __ IntAdd(__ WordShl(__ IntSub(zero, index),
3754 __ IntPtrConstant(kPointerSizeLog2)),
3755 __ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
3756 kHeapObjectTag));
3757 Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
3758 result = __ LoadField(AccessBuilder::ForHeapNumberValue(), result);
3759 __ Goto(&done_double, result);
3760 }
3761
3762 __ Bind(&done_double);
3763 {
3764 Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
3765 __ Goto(&done, result);
3766 }
3767 }
3768
3769 __ Bind(&done);
3770 return done.PhiAt(0);
3771 }
3772
BuildReverseBytes(ExternalArrayType type,Node * value)3773 Node* EffectControlLinearizer::BuildReverseBytes(ExternalArrayType type,
3774 Node* value) {
3775 switch (type) {
3776 case kExternalInt8Array:
3777 case kExternalUint8Array:
3778 case kExternalUint8ClampedArray:
3779 return value;
3780
3781 case kExternalInt16Array: {
3782 Node* result = __ Word32ReverseBytes(value);
3783 result = __ Word32Sar(result, __ Int32Constant(16));
3784 return result;
3785 }
3786
3787 case kExternalUint16Array: {
3788 Node* result = __ Word32ReverseBytes(value);
3789 result = __ Word32Shr(result, __ Int32Constant(16));
3790 return result;
3791 }
3792
3793 case kExternalInt32Array: // Fall through.
3794 case kExternalUint32Array:
3795 return __ Word32ReverseBytes(value);
3796
3797 case kExternalFloat32Array: {
3798 Node* result = __ BitcastFloat32ToInt32(value);
3799 result = __ Word32ReverseBytes(result);
3800 result = __ BitcastInt32ToFloat32(result);
3801 return result;
3802 }
3803
3804 case kExternalFloat64Array: {
3805 if (machine()->Is64()) {
3806 Node* result = __ BitcastFloat64ToInt64(value);
3807 result = __ Word64ReverseBytes(result);
3808 result = __ BitcastInt64ToFloat64(result);
3809 return result;
3810 } else {
3811 Node* lo = __ Word32ReverseBytes(__ Float64ExtractLowWord32(value));
3812 Node* hi = __ Word32ReverseBytes(__ Float64ExtractHighWord32(value));
3813 Node* result = __ Float64Constant(0.0);
3814 result = __ Float64InsertLowWord32(result, hi);
3815 result = __ Float64InsertHighWord32(result, lo);
3816 return result;
3817 }
3818 }
3819
3820 case kExternalBigInt64Array:
3821 case kExternalBigUint64Array:
3822 UNREACHABLE();
3823 }
3824 }
3825
LowerLoadDataViewElement(Node * node)3826 Node* EffectControlLinearizer::LowerLoadDataViewElement(Node* node) {
3827 ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
3828 Node* buffer = node->InputAt(0);
3829 Node* storage = node->InputAt(1);
3830 Node* index = node->InputAt(2);
3831 Node* is_little_endian = node->InputAt(3);
3832
3833 // On 64-bit platforms, we need to feed a Word64 index to the Load and
3834 // Store operators.
3835 if (machine()->Is64()) {
3836 index = __ ChangeUint32ToUint64(index);
3837 }
3838
3839 // We need to keep the {buffer} alive so that the GC will not release the
3840 // ArrayBuffer (if there's any) as long as we are still operating on it.
3841 __ Retain(buffer);
3842
3843 MachineType const machine_type =
3844 AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
3845
3846 Node* value = __ LoadUnaligned(machine_type, storage, index);
3847 auto big_endian = __ MakeLabel();
3848 auto done = __ MakeLabel(machine_type.representation());
3849
3850 __ GotoIfNot(is_little_endian, &big_endian);
3851 { // Little-endian load.
3852 #if V8_TARGET_LITTLE_ENDIAN
3853 __ Goto(&done, value);
3854 #else
3855 __ Goto(&done, BuildReverseBytes(element_type, value));
3856 #endif // V8_TARGET_LITTLE_ENDIAN
3857 }
3858
3859 __ Bind(&big_endian);
3860 { // Big-endian load.
3861 #if V8_TARGET_LITTLE_ENDIAN
3862 __ Goto(&done, BuildReverseBytes(element_type, value));
3863 #else
3864 __ Goto(&done, value);
3865 #endif // V8_TARGET_LITTLE_ENDIAN
3866 }
3867
3868 // We're done, return {result}.
3869 __ Bind(&done);
3870 return done.PhiAt(0);
3871 }
3872
LowerStoreDataViewElement(Node * node)3873 void EffectControlLinearizer::LowerStoreDataViewElement(Node* node) {
3874 ExternalArrayType element_type = ExternalArrayTypeOf(node->op());
3875 Node* buffer = node->InputAt(0);
3876 Node* storage = node->InputAt(1);
3877 Node* index = node->InputAt(2);
3878 Node* value = node->InputAt(3);
3879 Node* is_little_endian = node->InputAt(4);
3880
3881 // On 64-bit platforms, we need to feed a Word64 index to the Load and
3882 // Store operators.
3883 if (machine()->Is64()) {
3884 index = __ ChangeUint32ToUint64(index);
3885 }
3886
3887 // We need to keep the {buffer} alive so that the GC will not release the
3888 // ArrayBuffer (if there's any) as long as we are still operating on it.
3889 __ Retain(buffer);
3890
3891 MachineType const machine_type =
3892 AccessBuilder::ForTypedArrayElement(element_type, true).machine_type;
3893
3894 auto big_endian = __ MakeLabel();
3895 auto done = __ MakeLabel(machine_type.representation());
3896
3897 __ GotoIfNot(is_little_endian, &big_endian);
3898 { // Little-endian store.
3899 #if V8_TARGET_LITTLE_ENDIAN
3900 __ Goto(&done, value);
3901 #else
3902 __ Goto(&done, BuildReverseBytes(element_type, value));
3903 #endif // V8_TARGET_LITTLE_ENDIAN
3904 }
3905
3906 __ Bind(&big_endian);
3907 { // Big-endian store.
3908 #if V8_TARGET_LITTLE_ENDIAN
3909 __ Goto(&done, BuildReverseBytes(element_type, value));
3910 #else
3911 __ Goto(&done, value);
3912 #endif // V8_TARGET_LITTLE_ENDIAN
3913 }
3914
3915 __ Bind(&done);
3916 __ StoreUnaligned(machine_type.representation(), storage, index,
3917 done.PhiAt(0));
3918 }
3919
LowerLoadTypedElement(Node * node)3920 Node* EffectControlLinearizer::LowerLoadTypedElement(Node* node) {
3921 ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
3922 Node* buffer = node->InputAt(0);
3923 Node* base = node->InputAt(1);
3924 Node* external = node->InputAt(2);
3925 Node* index = node->InputAt(3);
3926
3927 // We need to keep the {buffer} alive so that the GC will not release the
3928 // ArrayBuffer (if there's any) as long as we are still operating on it.
3929 __ Retain(buffer);
3930
3931 // Compute the effective storage pointer, handling the case where the
3932 // {external} pointer is the effective storage pointer (i.e. the {base}
3933 // is Smi zero).
3934 Node* storage = IntPtrMatcher(base).Is(0)
3935 ? external
3936 : __ UnsafePointerAdd(base, external);
3937
3938 // Perform the actual typed element access.
3939 return __ LoadElement(AccessBuilder::ForTypedArrayElement(
3940 array_type, true, LoadSensitivity::kCritical),
3941 storage, index);
3942 }
3943
LowerStoreTypedElement(Node * node)3944 void EffectControlLinearizer::LowerStoreTypedElement(Node* node) {
3945 ExternalArrayType array_type = ExternalArrayTypeOf(node->op());
3946 Node* buffer = node->InputAt(0);
3947 Node* base = node->InputAt(1);
3948 Node* external = node->InputAt(2);
3949 Node* index = node->InputAt(3);
3950 Node* value = node->InputAt(4);
3951
3952 // We need to keep the {buffer} alive so that the GC will not release the
3953 // ArrayBuffer (if there's any) as long as we are still operating on it.
3954 __ Retain(buffer);
3955
3956 // Compute the effective storage pointer, handling the case where the
3957 // {external} pointer is the effective storage pointer (i.e. the {base}
3958 // is Smi zero).
3959 Node* storage = IntPtrMatcher(base).Is(0)
3960 ? external
3961 : __ UnsafePointerAdd(base, external);
3962
3963 // Perform the actual typed element access.
3964 __ StoreElement(AccessBuilder::ForTypedArrayElement(array_type, true),
3965 storage, index, value);
3966 }
3967
TransitionElementsTo(Node * node,Node * array,ElementsKind from,ElementsKind to)3968 void EffectControlLinearizer::TransitionElementsTo(Node* node, Node* array,
3969 ElementsKind from,
3970 ElementsKind to) {
3971 DCHECK(IsMoreGeneralElementsKindTransition(from, to));
3972 DCHECK(to == HOLEY_ELEMENTS || to == HOLEY_DOUBLE_ELEMENTS);
3973
3974 Handle<Map> target(to == HOLEY_ELEMENTS ? FastMapParameterOf(node->op())
3975 : DoubleMapParameterOf(node->op()));
3976 Node* target_map = __ HeapConstant(target);
3977
3978 if (IsSimpleMapChangeTransition(from, to)) {
3979 __ StoreField(AccessBuilder::ForMap(), array, target_map);
3980 } else {
3981 // Instance migration, call out to the runtime for {array}.
3982 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
3983 Runtime::FunctionId id = Runtime::kTransitionElementsKind;
3984 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
3985 graph()->zone(), id, 2, properties, CallDescriptor::kNoFlags);
3986 __ Call(call_descriptor, __ CEntryStubConstant(1), array, target_map,
3987 __ ExternalConstant(ExternalReference::Create(id)),
3988 __ Int32Constant(2), __ NoContextConstant());
3989 }
3990 }
3991
IsElementsKindGreaterThan(Node * kind,ElementsKind reference_kind)3992 Node* EffectControlLinearizer::IsElementsKindGreaterThan(
3993 Node* kind, ElementsKind reference_kind) {
3994 Node* ref_kind = __ Int32Constant(reference_kind);
3995 Node* ret = __ Int32LessThan(ref_kind, kind);
3996 return ret;
3997 }
3998
LowerTransitionAndStoreElement(Node * node)3999 void EffectControlLinearizer::LowerTransitionAndStoreElement(Node* node) {
4000 Node* array = node->InputAt(0);
4001 Node* index = node->InputAt(1);
4002 Node* value = node->InputAt(2);
4003
4004 // Possibly transition array based on input and store.
4005 //
4006 // -- TRANSITION PHASE -----------------
4007 // kind = ElementsKind(array)
4008 // if value is not smi {
4009 // if kind == HOLEY_SMI_ELEMENTS {
4010 // if value is heap number {
4011 // Transition array to HOLEY_DOUBLE_ELEMENTS
4012 // kind = HOLEY_DOUBLE_ELEMENTS
4013 // } else {
4014 // Transition array to HOLEY_ELEMENTS
4015 // kind = HOLEY_ELEMENTS
4016 // }
4017 // } else if kind == HOLEY_DOUBLE_ELEMENTS {
4018 // if value is not heap number {
4019 // Transition array to HOLEY_ELEMENTS
4020 // kind = HOLEY_ELEMENTS
4021 // }
4022 // }
4023 // }
4024 //
4025 // -- STORE PHASE ----------------------
4026 // [make sure {kind} is up-to-date]
4027 // if kind == HOLEY_DOUBLE_ELEMENTS {
4028 // if value is smi {
4029 // float_value = convert smi to float
4030 // Store array[index] = float_value
4031 // } else {
4032 // float_value = value
4033 // Store array[index] = float_value
4034 // }
4035 // } else {
4036 // // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
4037 // Store array[index] = value
4038 // }
4039 //
4040 Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4041 Node* kind;
4042 {
4043 Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4044 Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4045 Node* andit = __ Word32And(bit_field2, mask);
4046 Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4047 kind = __ Word32Shr(andit, shift);
4048 }
4049
4050 auto do_store = __ MakeLabel(MachineRepresentation::kWord32);
4051 // We can store a smi anywhere.
4052 __ GotoIf(ObjectIsSmi(value), &do_store, kind);
4053
4054 // {value} is a HeapObject.
4055 auto transition_smi_array = __ MakeDeferredLabel();
4056 auto transition_double_to_fast = __ MakeDeferredLabel();
4057 {
4058 __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4059 &transition_smi_array);
4060 __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS), &do_store,
4061 kind);
4062
4063 // We have double elements kind. Only a HeapNumber can be stored
4064 // without effecting a transition.
4065 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4066 Node* heap_number_map = __ HeapNumberMapConstant();
4067 Node* check = __ WordEqual(value_map, heap_number_map);
4068 __ GotoIfNot(check, &transition_double_to_fast);
4069 __ Goto(&do_store, kind);
4070 }
4071
4072 __ Bind(&transition_smi_array); // deferred code.
4073 {
4074 // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS or
4075 // to HOLEY_ELEMENTS.
4076 auto if_value_not_heap_number = __ MakeLabel();
4077 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4078 Node* heap_number_map = __ HeapNumberMapConstant();
4079 Node* check = __ WordEqual(value_map, heap_number_map);
4080 __ GotoIfNot(check, &if_value_not_heap_number);
4081 {
4082 // {value} is a HeapNumber.
4083 TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
4084 HOLEY_DOUBLE_ELEMENTS);
4085 __ Goto(&do_store, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS));
4086 }
4087 __ Bind(&if_value_not_heap_number);
4088 {
4089 TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
4090 __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
4091 }
4092 }
4093
4094 __ Bind(&transition_double_to_fast); // deferred code.
4095 {
4096 TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
4097 __ Goto(&do_store, __ Int32Constant(HOLEY_ELEMENTS));
4098 }
4099
4100 // Make sure kind is up-to-date.
4101 __ Bind(&do_store);
4102 kind = do_store.PhiAt(0);
4103
4104 Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4105 auto if_kind_is_double = __ MakeLabel();
4106 auto done = __ MakeLabel();
4107 __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4108 &if_kind_is_double);
4109 {
4110 // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
4111 __ StoreElement(AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS),
4112 elements, index, value);
4113 __ Goto(&done);
4114 }
4115 __ Bind(&if_kind_is_double);
4116 {
4117 // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
4118 auto do_double_store = __ MakeLabel();
4119 __ GotoIfNot(ObjectIsSmi(value), &do_double_store);
4120 {
4121 Node* int_value = ChangeSmiToInt32(value);
4122 Node* float_value = __ ChangeInt32ToFloat64(int_value);
4123 __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4124 index, float_value);
4125 __ Goto(&done);
4126 }
4127 __ Bind(&do_double_store);
4128 {
4129 Node* float_value =
4130 __ LoadField(AccessBuilder::ForHeapNumberValue(), value);
4131 __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4132 index, float_value);
4133 __ Goto(&done);
4134 }
4135 }
4136
4137 __ Bind(&done);
4138 }
4139
LowerTransitionAndStoreNumberElement(Node * node)4140 void EffectControlLinearizer::LowerTransitionAndStoreNumberElement(Node* node) {
4141 Node* array = node->InputAt(0);
4142 Node* index = node->InputAt(1);
4143 Node* value = node->InputAt(2); // This is a Float64, not tagged.
4144
4145 // Possibly transition array based on input and store.
4146 //
4147 // -- TRANSITION PHASE -----------------
4148 // kind = ElementsKind(array)
4149 // if kind == HOLEY_SMI_ELEMENTS {
4150 // Transition array to HOLEY_DOUBLE_ELEMENTS
4151 // } else if kind != HOLEY_DOUBLE_ELEMENTS {
4152 // This is UNREACHABLE, execute a debug break.
4153 // }
4154 //
4155 // -- STORE PHASE ----------------------
4156 // Store array[index] = value (it's a float)
4157 //
4158 Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4159 Node* kind;
4160 {
4161 Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4162 Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4163 Node* andit = __ Word32And(bit_field2, mask);
4164 Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4165 kind = __ Word32Shr(andit, shift);
4166 }
4167
4168 auto do_store = __ MakeLabel();
4169
4170 // {value} is a float64.
4171 auto transition_smi_array = __ MakeDeferredLabel();
4172 {
4173 __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4174 &transition_smi_array);
4175 // We expect that our input array started at HOLEY_SMI_ELEMENTS, and
4176 // climbs the lattice up to HOLEY_DOUBLE_ELEMENTS. Force a debug break
4177 // if this assumption is broken. It also would be the case that
4178 // loop peeling can break this assumption.
4179 __ GotoIf(__ Word32Equal(kind, __ Int32Constant(HOLEY_DOUBLE_ELEMENTS)),
4180 &do_store);
4181 // TODO(turbofan): It would be good to have an "Unreachable()" node type.
4182 __ DebugBreak();
4183 __ Goto(&do_store);
4184 }
4185
4186 __ Bind(&transition_smi_array); // deferred code.
4187 {
4188 // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_DOUBLE_ELEMENTS.
4189 TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS,
4190 HOLEY_DOUBLE_ELEMENTS);
4191 __ Goto(&do_store);
4192 }
4193
4194 __ Bind(&do_store);
4195
4196 Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4197 __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements, index,
4198 value);
4199 }
4200
LowerTransitionAndStoreNonNumberElement(Node * node)4201 void EffectControlLinearizer::LowerTransitionAndStoreNonNumberElement(
4202 Node* node) {
4203 Node* array = node->InputAt(0);
4204 Node* index = node->InputAt(1);
4205 Node* value = node->InputAt(2);
4206
4207 // Possibly transition array based on input and store.
4208 //
4209 // -- TRANSITION PHASE -----------------
4210 // kind = ElementsKind(array)
4211 // if kind == HOLEY_SMI_ELEMENTS {
4212 // Transition array to HOLEY_ELEMENTS
4213 // } else if kind == HOLEY_DOUBLE_ELEMENTS {
4214 // Transition array to HOLEY_ELEMENTS
4215 // }
4216 //
4217 // -- STORE PHASE ----------------------
4218 // // kind is HOLEY_ELEMENTS
4219 // Store array[index] = value
4220 //
4221 Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4222 Node* kind;
4223 {
4224 Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4225 Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4226 Node* andit = __ Word32And(bit_field2, mask);
4227 Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4228 kind = __ Word32Shr(andit, shift);
4229 }
4230
4231 auto do_store = __ MakeLabel();
4232
4233 auto transition_smi_array = __ MakeDeferredLabel();
4234 auto transition_double_to_fast = __ MakeDeferredLabel();
4235 {
4236 __ GotoIfNot(IsElementsKindGreaterThan(kind, HOLEY_SMI_ELEMENTS),
4237 &transition_smi_array);
4238 __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4239 &transition_double_to_fast);
4240 __ Goto(&do_store);
4241 }
4242
4243 __ Bind(&transition_smi_array); // deferred code.
4244 {
4245 // Transition {array} from HOLEY_SMI_ELEMENTS to HOLEY_ELEMENTS.
4246 TransitionElementsTo(node, array, HOLEY_SMI_ELEMENTS, HOLEY_ELEMENTS);
4247 __ Goto(&do_store);
4248 }
4249
4250 __ Bind(&transition_double_to_fast); // deferred code.
4251 {
4252 TransitionElementsTo(node, array, HOLEY_DOUBLE_ELEMENTS, HOLEY_ELEMENTS);
4253 __ Goto(&do_store);
4254 }
4255
4256 __ Bind(&do_store);
4257
4258 Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4259 // Our ElementsKind is HOLEY_ELEMENTS.
4260 ElementAccess access = AccessBuilder::ForFixedArrayElement(HOLEY_ELEMENTS);
4261 Type value_type = ValueTypeParameterOf(node->op());
4262 if (value_type.Is(Type::BooleanOrNullOrUndefined())) {
4263 access.type = value_type;
4264 access.write_barrier_kind = kNoWriteBarrier;
4265 }
4266 __ StoreElement(access, elements, index, value);
4267 }
4268
LowerStoreSignedSmallElement(Node * node)4269 void EffectControlLinearizer::LowerStoreSignedSmallElement(Node* node) {
4270 Node* array = node->InputAt(0);
4271 Node* index = node->InputAt(1);
4272 Node* value = node->InputAt(2); // int32
4273
4274 // Store a signed small in an output array.
4275 //
4276 // kind = ElementsKind(array)
4277 //
4278 // -- STORE PHASE ----------------------
4279 // if kind == HOLEY_DOUBLE_ELEMENTS {
4280 // float_value = convert int32 to float
4281 // Store array[index] = float_value
4282 // } else {
4283 // // kind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS
4284 // smi_value = convert int32 to smi
4285 // Store array[index] = smi_value
4286 // }
4287 //
4288 Node* map = __ LoadField(AccessBuilder::ForMap(), array);
4289 Node* kind;
4290 {
4291 Node* bit_field2 = __ LoadField(AccessBuilder::ForMapBitField2(), map);
4292 Node* mask = __ Int32Constant(Map::ElementsKindBits::kMask);
4293 Node* andit = __ Word32And(bit_field2, mask);
4294 Node* shift = __ Int32Constant(Map::ElementsKindBits::kShift);
4295 kind = __ Word32Shr(andit, shift);
4296 }
4297
4298 Node* elements = __ LoadField(AccessBuilder::ForJSObjectElements(), array);
4299 auto if_kind_is_double = __ MakeLabel();
4300 auto done = __ MakeLabel();
4301 __ GotoIf(IsElementsKindGreaterThan(kind, HOLEY_ELEMENTS),
4302 &if_kind_is_double);
4303 {
4304 // Our ElementsKind is HOLEY_SMI_ELEMENTS or HOLEY_ELEMENTS.
4305 // In this case, we know our value is a signed small, and we can optimize
4306 // the ElementAccess information.
4307 ElementAccess access = AccessBuilder::ForFixedArrayElement();
4308 access.type = Type::SignedSmall();
4309 access.machine_type = MachineType::TaggedSigned();
4310 access.write_barrier_kind = kNoWriteBarrier;
4311 Node* smi_value = ChangeInt32ToSmi(value);
4312 __ StoreElement(access, elements, index, smi_value);
4313 __ Goto(&done);
4314 }
4315 __ Bind(&if_kind_is_double);
4316 {
4317 // Our ElementsKind is HOLEY_DOUBLE_ELEMENTS.
4318 Node* float_value = __ ChangeInt32ToFloat64(value);
4319 __ StoreElement(AccessBuilder::ForFixedDoubleArrayElement(), elements,
4320 index, float_value);
4321 __ Goto(&done);
4322 }
4323
4324 __ Bind(&done);
4325 }
4326
LowerRuntimeAbort(Node * node)4327 void EffectControlLinearizer::LowerRuntimeAbort(Node* node) {
4328 AbortReason reason = AbortReasonOf(node->op());
4329 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4330 Runtime::FunctionId id = Runtime::kAbort;
4331 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4332 graph()->zone(), id, 1, properties, CallDescriptor::kNoFlags);
4333 __ Call(call_descriptor, __ CEntryStubConstant(1),
4334 jsgraph()->SmiConstant(static_cast<int>(reason)),
4335 __ ExternalConstant(ExternalReference::Create(id)),
4336 __ Int32Constant(1), __ NoContextConstant());
4337 }
4338
LowerConvertReceiver(Node * node)4339 Node* EffectControlLinearizer::LowerConvertReceiver(Node* node) {
4340 ConvertReceiverMode const mode = ConvertReceiverModeOf(node->op());
4341 Node* value = node->InputAt(0);
4342 Node* global_proxy = node->InputAt(1);
4343
4344 switch (mode) {
4345 case ConvertReceiverMode::kNullOrUndefined: {
4346 return global_proxy;
4347 }
4348 case ConvertReceiverMode::kNotNullOrUndefined: {
4349 auto convert_to_object = __ MakeDeferredLabel();
4350 auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
4351
4352 // Check if {value} is already a JSReceiver.
4353 __ GotoIf(ObjectIsSmi(value), &convert_to_object);
4354 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
4355 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4356 Node* value_instance_type =
4357 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
4358 Node* check = __ Uint32LessThan(
4359 value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
4360 __ GotoIf(check, &convert_to_object);
4361 __ Goto(&done_convert, value);
4362
4363 // Wrap the primitive {value} into a JSValue.
4364 __ Bind(&convert_to_object);
4365 Operator::Properties properties = Operator::kEliminatable;
4366 Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
4367 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4368 auto call_descriptor = Linkage::GetStubCallDescriptor(
4369 graph()->zone(), callable.descriptor(), 0, flags, properties);
4370 Node* native_context = __ LoadField(
4371 AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
4372 Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4373 value, native_context);
4374 __ Goto(&done_convert, result);
4375
4376 __ Bind(&done_convert);
4377 return done_convert.PhiAt(0);
4378 }
4379 case ConvertReceiverMode::kAny: {
4380 auto convert_to_object = __ MakeDeferredLabel();
4381 auto convert_global_proxy = __ MakeDeferredLabel();
4382 auto done_convert = __ MakeLabel(MachineRepresentation::kTagged);
4383
4384 // Check if {value} is already a JSReceiver, or null/undefined.
4385 __ GotoIf(ObjectIsSmi(value), &convert_to_object);
4386 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
4387 Node* value_map = __ LoadField(AccessBuilder::ForMap(), value);
4388 Node* value_instance_type =
4389 __ LoadField(AccessBuilder::ForMapInstanceType(), value_map);
4390 Node* check = __ Uint32LessThan(
4391 value_instance_type, __ Uint32Constant(FIRST_JS_RECEIVER_TYPE));
4392 __ GotoIf(check, &convert_to_object);
4393 __ Goto(&done_convert, value);
4394
4395 // Wrap the primitive {value} into a JSValue.
4396 __ Bind(&convert_to_object);
4397 __ GotoIf(__ WordEqual(value, __ UndefinedConstant()),
4398 &convert_global_proxy);
4399 __ GotoIf(__ WordEqual(value, __ NullConstant()), &convert_global_proxy);
4400 Operator::Properties properties = Operator::kEliminatable;
4401 Callable callable = Builtins::CallableFor(isolate(), Builtins::kToObject);
4402 CallDescriptor::Flags flags = CallDescriptor::kNoFlags;
4403 auto call_descriptor = Linkage::GetStubCallDescriptor(
4404 graph()->zone(), callable.descriptor(), 0, flags, properties);
4405 Node* native_context = __ LoadField(
4406 AccessBuilder::ForJSGlobalProxyNativeContext(), global_proxy);
4407 Node* result = __ Call(call_descriptor, __ HeapConstant(callable.code()),
4408 value, native_context);
4409 __ Goto(&done_convert, result);
4410
4411 // Replace the {value} with the {global_proxy}.
4412 __ Bind(&convert_global_proxy);
4413 __ Goto(&done_convert, global_proxy);
4414
4415 __ Bind(&done_convert);
4416 return done_convert.PhiAt(0);
4417 }
4418 }
4419
4420 UNREACHABLE();
4421 return nullptr;
4422 }
4423
LowerFloat64RoundUp(Node * node)4424 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundUp(Node* node) {
4425 // Nothing to be done if a fast hardware instruction is available.
4426 if (machine()->Float64RoundUp().IsSupported()) {
4427 return Nothing<Node*>();
4428 }
4429
4430 Node* const input = node->InputAt(0);
4431
4432 // General case for ceil.
4433 //
4434 // if 0.0 < input then
4435 // if 2^52 <= input then
4436 // input
4437 // else
4438 // let temp1 = (2^52 + input) - 2^52 in
4439 // if temp1 < input then
4440 // temp1 + 1
4441 // else
4442 // temp1
4443 // else
4444 // if input == 0 then
4445 // input
4446 // else
4447 // if input <= -2^52 then
4448 // input
4449 // else
4450 // let temp1 = -0 - input in
4451 // let temp2 = (2^52 + temp1) - 2^52 in
4452 // let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
4453 // -0 - temp3
4454
4455 auto if_not_positive = __ MakeDeferredLabel();
4456 auto if_greater_than_two_52 = __ MakeDeferredLabel();
4457 auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
4458 auto if_zero = __ MakeDeferredLabel();
4459 auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
4460 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4461
4462 Node* const zero = __ Float64Constant(0.0);
4463 Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
4464 Node* const one = __ Float64Constant(1.0);
4465
4466 Node* check0 = __ Float64LessThan(zero, input);
4467 __ GotoIfNot(check0, &if_not_positive);
4468 {
4469 Node* check1 = __ Float64LessThanOrEqual(two_52, input);
4470 __ GotoIf(check1, &if_greater_than_two_52);
4471 {
4472 Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
4473 __ GotoIfNot(__ Float64LessThan(temp1, input), &done, temp1);
4474 __ Goto(&done, __ Float64Add(temp1, one));
4475 }
4476
4477 __ Bind(&if_greater_than_two_52);
4478 __ Goto(&done, input);
4479 }
4480
4481 __ Bind(&if_not_positive);
4482 {
4483 Node* check1 = __ Float64Equal(input, zero);
4484 __ GotoIf(check1, &if_zero);
4485
4486 Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
4487 Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
4488 __ GotoIf(check2, &if_less_than_minus_two_52);
4489
4490 {
4491 Node* const minus_zero = __ Float64Constant(-0.0);
4492 Node* temp1 = __ Float64Sub(minus_zero, input);
4493 Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
4494 Node* check3 = __ Float64LessThan(temp1, temp2);
4495 __ GotoIfNot(check3, &done_temp3, temp2);
4496 __ Goto(&done_temp3, __ Float64Sub(temp2, one));
4497
4498 __ Bind(&done_temp3);
4499 Node* temp3 = done_temp3.PhiAt(0);
4500 __ Goto(&done, __ Float64Sub(minus_zero, temp3));
4501 }
4502 __ Bind(&if_less_than_minus_two_52);
4503 __ Goto(&done, input);
4504
4505 __ Bind(&if_zero);
4506 __ Goto(&done, input);
4507 }
4508 __ Bind(&done);
4509 return Just(done.PhiAt(0));
4510 }
4511
BuildFloat64RoundDown(Node * value)4512 Node* EffectControlLinearizer::BuildFloat64RoundDown(Node* value) {
4513 if (machine()->Float64RoundDown().IsSupported()) {
4514 return __ Float64RoundDown(value);
4515 }
4516
4517 Node* const input = value;
4518
4519 // General case for floor.
4520 //
4521 // if 0.0 < input then
4522 // if 2^52 <= input then
4523 // input
4524 // else
4525 // let temp1 = (2^52 + input) - 2^52 in
4526 // if input < temp1 then
4527 // temp1 - 1
4528 // else
4529 // temp1
4530 // else
4531 // if input == 0 then
4532 // input
4533 // else
4534 // if input <= -2^52 then
4535 // input
4536 // else
4537 // let temp1 = -0 - input in
4538 // let temp2 = (2^52 + temp1) - 2^52 in
4539 // if temp2 < temp1 then
4540 // -1 - temp2
4541 // else
4542 // -0 - temp2
4543
4544 auto if_not_positive = __ MakeDeferredLabel();
4545 auto if_greater_than_two_52 = __ MakeDeferredLabel();
4546 auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
4547 auto if_temp2_lt_temp1 = __ MakeLabel();
4548 auto if_zero = __ MakeDeferredLabel();
4549 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4550
4551 Node* const zero = __ Float64Constant(0.0);
4552 Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
4553
4554 Node* check0 = __ Float64LessThan(zero, input);
4555 __ GotoIfNot(check0, &if_not_positive);
4556 {
4557 Node* check1 = __ Float64LessThanOrEqual(two_52, input);
4558 __ GotoIf(check1, &if_greater_than_two_52);
4559 {
4560 Node* const one = __ Float64Constant(1.0);
4561 Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
4562 __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
4563 __ Goto(&done, __ Float64Sub(temp1, one));
4564 }
4565
4566 __ Bind(&if_greater_than_two_52);
4567 __ Goto(&done, input);
4568 }
4569
4570 __ Bind(&if_not_positive);
4571 {
4572 Node* check1 = __ Float64Equal(input, zero);
4573 __ GotoIf(check1, &if_zero);
4574
4575 Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
4576 Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
4577 __ GotoIf(check2, &if_less_than_minus_two_52);
4578
4579 {
4580 Node* const minus_zero = __ Float64Constant(-0.0);
4581 Node* temp1 = __ Float64Sub(minus_zero, input);
4582 Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
4583 Node* check3 = __ Float64LessThan(temp2, temp1);
4584 __ GotoIf(check3, &if_temp2_lt_temp1);
4585 __ Goto(&done, __ Float64Sub(minus_zero, temp2));
4586
4587 __ Bind(&if_temp2_lt_temp1);
4588 __ Goto(&done, __ Float64Sub(__ Float64Constant(-1.0), temp2));
4589 }
4590 __ Bind(&if_less_than_minus_two_52);
4591 __ Goto(&done, input);
4592
4593 __ Bind(&if_zero);
4594 __ Goto(&done, input);
4595 }
4596 __ Bind(&done);
4597 return done.PhiAt(0);
4598 }
4599
LowerFloat64RoundDown(Node * node)4600 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundDown(Node* node) {
4601 // Nothing to be done if a fast hardware instruction is available.
4602 if (machine()->Float64RoundDown().IsSupported()) {
4603 return Nothing<Node*>();
4604 }
4605
4606 Node* const input = node->InputAt(0);
4607 return Just(BuildFloat64RoundDown(input));
4608 }
4609
LowerFloat64RoundTiesEven(Node * node)4610 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTiesEven(Node* node) {
4611 // Nothing to be done if a fast hardware instruction is available.
4612 if (machine()->Float64RoundTiesEven().IsSupported()) {
4613 return Nothing<Node*>();
4614 }
4615
4616 Node* const input = node->InputAt(0);
4617
4618 // Generate case for round ties to even:
4619 //
4620 // let value = floor(input) in
4621 // let temp1 = input - value in
4622 // if temp1 < 0.5 then
4623 // value
4624 // else if 0.5 < temp1 then
4625 // value + 1.0
4626 // else
4627 // let temp2 = value % 2.0 in
4628 // if temp2 == 0.0 then
4629 // value
4630 // else
4631 // value + 1.0
4632
4633 auto if_is_half = __ MakeLabel();
4634 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4635
4636 Node* value = BuildFloat64RoundDown(input);
4637 Node* temp1 = __ Float64Sub(input, value);
4638
4639 Node* const half = __ Float64Constant(0.5);
4640 Node* check0 = __ Float64LessThan(temp1, half);
4641 __ GotoIf(check0, &done, value);
4642
4643 Node* const one = __ Float64Constant(1.0);
4644 Node* check1 = __ Float64LessThan(half, temp1);
4645 __ GotoIfNot(check1, &if_is_half);
4646 __ Goto(&done, __ Float64Add(value, one));
4647
4648 __ Bind(&if_is_half);
4649 Node* temp2 = __ Float64Mod(value, __ Float64Constant(2.0));
4650 Node* check2 = __ Float64Equal(temp2, __ Float64Constant(0.0));
4651 __ GotoIf(check2, &done, value);
4652 __ Goto(&done, __ Float64Add(value, one));
4653
4654 __ Bind(&done);
4655 return Just(done.PhiAt(0));
4656 }
4657
BuildFloat64RoundTruncate(Node * input)4658 Node* EffectControlLinearizer::BuildFloat64RoundTruncate(Node* input) {
4659 if (machine()->Float64RoundTruncate().IsSupported()) {
4660 return __ Float64RoundTruncate(input);
4661 }
4662 // General case for trunc.
4663 //
4664 // if 0.0 < input then
4665 // if 2^52 <= input then
4666 // input
4667 // else
4668 // let temp1 = (2^52 + input) - 2^52 in
4669 // if input < temp1 then
4670 // temp1 - 1
4671 // else
4672 // temp1
4673 // else
4674 // if input == 0 then
4675 // input
4676 // else
4677 // if input <= -2^52 then
4678 // input
4679 // else
4680 // let temp1 = -0 - input in
4681 // let temp2 = (2^52 + temp1) - 2^52 in
4682 // let temp3 = (if temp1 < temp2 then temp2 - 1 else temp2) in
4683 // -0 - temp3
4684 //
4685 // Note: We do not use the Diamond helper class here, because it really hurts
4686 // readability with nested diamonds.
4687
4688 auto if_not_positive = __ MakeDeferredLabel();
4689 auto if_greater_than_two_52 = __ MakeDeferredLabel();
4690 auto if_less_than_minus_two_52 = __ MakeDeferredLabel();
4691 auto if_zero = __ MakeDeferredLabel();
4692 auto done_temp3 = __ MakeLabel(MachineRepresentation::kFloat64);
4693 auto done = __ MakeLabel(MachineRepresentation::kFloat64);
4694
4695 Node* const zero = __ Float64Constant(0.0);
4696 Node* const two_52 = __ Float64Constant(4503599627370496.0E0);
4697 Node* const one = __ Float64Constant(1.0);
4698
4699 Node* check0 = __ Float64LessThan(zero, input);
4700 __ GotoIfNot(check0, &if_not_positive);
4701 {
4702 Node* check1 = __ Float64LessThanOrEqual(two_52, input);
4703 __ GotoIf(check1, &if_greater_than_two_52);
4704 {
4705 Node* temp1 = __ Float64Sub(__ Float64Add(two_52, input), two_52);
4706 __ GotoIfNot(__ Float64LessThan(input, temp1), &done, temp1);
4707 __ Goto(&done, __ Float64Sub(temp1, one));
4708 }
4709
4710 __ Bind(&if_greater_than_two_52);
4711 __ Goto(&done, input);
4712 }
4713
4714 __ Bind(&if_not_positive);
4715 {
4716 Node* check1 = __ Float64Equal(input, zero);
4717 __ GotoIf(check1, &if_zero);
4718
4719 Node* const minus_two_52 = __ Float64Constant(-4503599627370496.0E0);
4720 Node* check2 = __ Float64LessThanOrEqual(input, minus_two_52);
4721 __ GotoIf(check2, &if_less_than_minus_two_52);
4722
4723 {
4724 Node* const minus_zero = __ Float64Constant(-0.0);
4725 Node* temp1 = __ Float64Sub(minus_zero, input);
4726 Node* temp2 = __ Float64Sub(__ Float64Add(two_52, temp1), two_52);
4727 Node* check3 = __ Float64LessThan(temp1, temp2);
4728 __ GotoIfNot(check3, &done_temp3, temp2);
4729 __ Goto(&done_temp3, __ Float64Sub(temp2, one));
4730
4731 __ Bind(&done_temp3);
4732 Node* temp3 = done_temp3.PhiAt(0);
4733 __ Goto(&done, __ Float64Sub(minus_zero, temp3));
4734 }
4735 __ Bind(&if_less_than_minus_two_52);
4736 __ Goto(&done, input);
4737
4738 __ Bind(&if_zero);
4739 __ Goto(&done, input);
4740 }
4741 __ Bind(&done);
4742 return done.PhiAt(0);
4743 }
4744
LowerFloat64RoundTruncate(Node * node)4745 Maybe<Node*> EffectControlLinearizer::LowerFloat64RoundTruncate(Node* node) {
4746 // Nothing to be done if a fast hardware instruction is available.
4747 if (machine()->Float64RoundTruncate().IsSupported()) {
4748 return Nothing<Node*>();
4749 }
4750
4751 Node* const input = node->InputAt(0);
4752 return Just(BuildFloat64RoundTruncate(input));
4753 }
4754
LowerFindOrderedHashMapEntry(Node * node)4755 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntry(Node* node) {
4756 Node* table = NodeProperties::GetValueInput(node, 0);
4757 Node* key = NodeProperties::GetValueInput(node, 1);
4758
4759 {
4760 Callable const callable =
4761 Builtins::CallableFor(isolate(), Builtins::kFindOrderedHashMapEntry);
4762 Operator::Properties const properties = node->op()->properties();
4763 CallDescriptor::Flags const flags = CallDescriptor::kNoFlags;
4764 auto call_descriptor = Linkage::GetStubCallDescriptor(
4765 graph()->zone(), callable.descriptor(), 0, flags, properties);
4766 return __ Call(call_descriptor, __ HeapConstant(callable.code()), table,
4767 key, __ NoContextConstant());
4768 }
4769 }
4770
ComputeIntegerHash(Node * value)4771 Node* EffectControlLinearizer::ComputeIntegerHash(Node* value) {
4772 // See v8::internal::ComputeIntegerHash()
4773 value = __ Int32Add(__ Word32Xor(value, __ Int32Constant(0xFFFFFFFF)),
4774 __ Word32Shl(value, __ Int32Constant(15)));
4775 value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(12)));
4776 value = __ Int32Add(value, __ Word32Shl(value, __ Int32Constant(2)));
4777 value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(4)));
4778 value = __ Int32Mul(value, __ Int32Constant(2057));
4779 value = __ Word32Xor(value, __ Word32Shr(value, __ Int32Constant(16)));
4780 value = __ Word32And(value, __ Int32Constant(0x3FFFFFFF));
4781 return value;
4782 }
4783
LowerFindOrderedHashMapEntryForInt32Key(Node * node)4784 Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
4785 Node* node) {
4786 Node* table = NodeProperties::GetValueInput(node, 0);
4787 Node* key = NodeProperties::GetValueInput(node, 1);
4788
4789 // Compute the integer hash code.
4790 Node* hash = ChangeUint32ToUintPtr(ComputeIntegerHash(key));
4791
4792 Node* number_of_buckets = ChangeSmiToIntPtr(__ LoadField(
4793 AccessBuilder::ForOrderedHashTableBaseNumberOfBuckets(), table));
4794 hash = __ WordAnd(hash, __ IntSub(number_of_buckets, __ IntPtrConstant(1)));
4795 Node* first_entry = ChangeSmiToIntPtr(__ Load(
4796 MachineType::TaggedSigned(), table,
4797 __ IntAdd(__ WordShl(hash, __ IntPtrConstant(kPointerSizeLog2)),
4798 __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset -
4799 kHeapObjectTag))));
4800
4801 auto loop = __ MakeLoopLabel(MachineType::PointerRepresentation());
4802 auto done = __ MakeLabel(MachineRepresentation::kWord32);
4803 __ Goto(&loop, first_entry);
4804 __ Bind(&loop);
4805 {
4806 Node* entry = loop.PhiAt(0);
4807 Node* check =
4808 __ WordEqual(entry, __ IntPtrConstant(OrderedHashMap::kNotFound));
4809 __ GotoIf(check, &done, __ Int32Constant(-1));
4810 entry = __ IntAdd(
4811 __ IntMul(entry, __ IntPtrConstant(OrderedHashMap::kEntrySize)),
4812 number_of_buckets);
4813
4814 Node* candidate_key = __ Load(
4815 MachineType::AnyTagged(), table,
4816 __ IntAdd(__ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
4817 __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset -
4818 kHeapObjectTag)));
4819
4820 auto if_match = __ MakeLabel();
4821 auto if_notmatch = __ MakeLabel();
4822 auto if_notsmi = __ MakeDeferredLabel();
4823 __ GotoIfNot(ObjectIsSmi(candidate_key), &if_notsmi);
4824 __ Branch(__ Word32Equal(ChangeSmiToInt32(candidate_key), key), &if_match,
4825 &if_notmatch);
4826
4827 __ Bind(&if_notsmi);
4828 __ GotoIfNot(
4829 __ WordEqual(__ LoadField(AccessBuilder::ForMap(), candidate_key),
4830 __ HeapNumberMapConstant()),
4831 &if_notmatch);
4832 __ Branch(__ Float64Equal(__ LoadField(AccessBuilder::ForHeapNumberValue(),
4833 candidate_key),
4834 __ ChangeInt32ToFloat64(key)),
4835 &if_match, &if_notmatch);
4836
4837 __ Bind(&if_match);
4838 {
4839 Node* index = ChangeIntPtrToInt32(entry);
4840 __ Goto(&done, index);
4841 }
4842
4843 __ Bind(&if_notmatch);
4844 {
4845 Node* next_entry = ChangeSmiToIntPtr(__ Load(
4846 MachineType::TaggedSigned(), table,
4847 __ IntAdd(
4848 __ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
4849 __ IntPtrConstant(OrderedHashMap::kHashTableStartOffset +
4850 OrderedHashMap::kChainOffset * kPointerSize -
4851 kHeapObjectTag))));
4852 __ Goto(&loop, next_entry);
4853 }
4854 }
4855
4856 __ Bind(&done);
4857 return done.PhiAt(0);
4858 }
4859
LowerDateNow(Node * node)4860 Node* EffectControlLinearizer::LowerDateNow(Node* node) {
4861 Operator::Properties properties = Operator::kNoDeopt | Operator::kNoThrow;
4862 Runtime::FunctionId id = Runtime::kDateCurrentTime;
4863 auto call_descriptor = Linkage::GetRuntimeCallDescriptor(
4864 graph()->zone(), id, 0, properties, CallDescriptor::kNoFlags);
4865 return __ Call(call_descriptor, __ CEntryStubConstant(1),
4866 __ ExternalConstant(ExternalReference::Create(id)),
4867 __ Int32Constant(0), __ NoContextConstant());
4868 }
4869
4870 #undef __
4871
factory() const4872 Factory* EffectControlLinearizer::factory() const {
4873 return isolate()->factory();
4874 }
4875
isolate() const4876 Isolate* EffectControlLinearizer::isolate() const {
4877 return jsgraph()->isolate();
4878 }
4879
4880 } // namespace compiler
4881 } // namespace internal
4882 } // namespace v8
4883