1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/js-generic-lowering.h"
6
7 #include "src/ast/ast.h"
8 #include "src/builtins/builtins-constructor.h"
9 #include "src/codegen/code-factory.h"
10 #include "src/compiler/access-builder.h"
11 #include "src/compiler/common-operator.h"
12 #include "src/compiler/js-graph.h"
13 #include "src/compiler/js-heap-broker.h"
14 #include "src/compiler/machine-operator.h"
15 #include "src/compiler/node-matchers.h"
16 #include "src/compiler/node-properties.h"
17 #include "src/compiler/operator-properties.h"
18 #include "src/compiler/processed-feedback.h"
19 #include "src/compiler/simplified-operator.h"
20 #include "src/objects/feedback-cell.h"
21 #include "src/objects/feedback-vector.h"
22 #include "src/objects/scope-info.h"
23 #include "src/objects/template-objects-inl.h"
24
25 namespace v8 {
26 namespace internal {
27 namespace compiler {
28
29 namespace {
30
FrameStateFlagForCall(Node * node)31 CallDescriptor::Flags FrameStateFlagForCall(Node* node) {
32 return OperatorProperties::HasFrameStateInput(node->op())
33 ? CallDescriptor::kNeedsFrameState
34 : CallDescriptor::kNoFlags;
35 }
36
37 } // namespace
38
JSGenericLowering(JSGraph * jsgraph,Editor * editor,JSHeapBroker * broker)39 JSGenericLowering::JSGenericLowering(JSGraph* jsgraph, Editor* editor,
40 JSHeapBroker* broker)
41 : AdvancedReducer(editor), jsgraph_(jsgraph), broker_(broker) {}
42
43 JSGenericLowering::~JSGenericLowering() = default;
44
45
Reduce(Node * node)46 Reduction JSGenericLowering::Reduce(Node* node) {
47 switch (node->opcode()) {
48 #define DECLARE_CASE(x, ...) \
49 case IrOpcode::k##x: \
50 Lower##x(node); \
51 break;
52 JS_OP_LIST(DECLARE_CASE)
53 #undef DECLARE_CASE
54 default:
55 // Nothing to see.
56 return NoChange();
57 }
58 return Changed(node);
59 }
60
61 #define REPLACE_STUB_CALL(Name) \
62 void JSGenericLowering::LowerJS##Name(Node* node) { \
63 ReplaceWithBuiltinCall(node, Builtins::k##Name); \
64 }
65 REPLACE_STUB_CALL(ToLength)
REPLACE_STUB_CALL(ToNumber)66 REPLACE_STUB_CALL(ToNumber)
67 REPLACE_STUB_CALL(ToNumberConvertBigInt)
68 REPLACE_STUB_CALL(ToNumeric)
69 REPLACE_STUB_CALL(ToName)
70 REPLACE_STUB_CALL(ToObject)
71 REPLACE_STUB_CALL(ToString)
72 REPLACE_STUB_CALL(ForInEnumerate)
73 REPLACE_STUB_CALL(AsyncFunctionEnter)
74 REPLACE_STUB_CALL(AsyncFunctionReject)
75 REPLACE_STUB_CALL(AsyncFunctionResolve)
76 REPLACE_STUB_CALL(FulfillPromise)
77 REPLACE_STUB_CALL(PerformPromiseThen)
78 REPLACE_STUB_CALL(PromiseResolve)
79 REPLACE_STUB_CALL(RejectPromise)
80 REPLACE_STUB_CALL(ResolvePromise)
81 #undef REPLACE_STUB_CALL
82
83 void JSGenericLowering::ReplaceWithBuiltinCall(Node* node,
84 Builtins::Name builtin) {
85 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
86 Callable callable = Builtins::CallableFor(isolate(), builtin);
87 ReplaceWithBuiltinCall(node, callable, flags);
88 }
89
ReplaceWithBuiltinCall(Node * node,Callable callable,CallDescriptor::Flags flags)90 void JSGenericLowering::ReplaceWithBuiltinCall(Node* node, Callable callable,
91 CallDescriptor::Flags flags) {
92 ReplaceWithBuiltinCall(node, callable, flags, node->op()->properties());
93 }
94
ReplaceWithBuiltinCall(Node * node,Callable callable,CallDescriptor::Flags flags,Operator::Properties properties)95 void JSGenericLowering::ReplaceWithBuiltinCall(
96 Node* node, Callable callable, CallDescriptor::Flags flags,
97 Operator::Properties properties) {
98 const CallInterfaceDescriptor& descriptor = callable.descriptor();
99 auto call_descriptor = Linkage::GetStubCallDescriptor(
100 zone(), descriptor, descriptor.GetStackParameterCount(), flags,
101 properties);
102 Node* stub_code = jsgraph()->HeapConstant(callable.code());
103 node->InsertInput(zone(), 0, stub_code);
104 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
105 }
106
ReplaceWithRuntimeCall(Node * node,Runtime::FunctionId f,int nargs_override)107 void JSGenericLowering::ReplaceWithRuntimeCall(Node* node,
108 Runtime::FunctionId f,
109 int nargs_override) {
110 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
111 Operator::Properties properties = node->op()->properties();
112 const Runtime::Function* fun = Runtime::FunctionForId(f);
113 int nargs = (nargs_override < 0) ? fun->nargs : nargs_override;
114 auto call_descriptor =
115 Linkage::GetRuntimeCallDescriptor(zone(), f, nargs, properties, flags);
116 Node* ref = jsgraph()->ExternalConstant(ExternalReference::Create(f));
117 Node* arity = jsgraph()->Int32Constant(nargs);
118 node->InsertInput(zone(), 0, jsgraph()->CEntryStubConstant(fun->result_size));
119 node->InsertInput(zone(), nargs + 1, ref);
120 node->InsertInput(zone(), nargs + 2, arity);
121 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
122 }
123
ReplaceUnaryOpWithBuiltinCall(Node * node,Builtins::Name builtin_without_feedback,Builtins::Name builtin_with_feedback)124 void JSGenericLowering::ReplaceUnaryOpWithBuiltinCall(
125 Node* node, Builtins::Name builtin_without_feedback,
126 Builtins::Name builtin_with_feedback) {
127 DCHECK(JSOperator::IsUnaryWithFeedback(node->opcode()));
128 const FeedbackParameter& p = FeedbackParameterOf(node->op());
129 if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
130 Callable callable = Builtins::CallableFor(isolate(), builtin_with_feedback);
131 Node* slot = jsgraph()->UintPtrConstant(p.feedback().slot.ToInt());
132 const CallInterfaceDescriptor& descriptor = callable.descriptor();
133 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
134 auto call_descriptor = Linkage::GetStubCallDescriptor(
135 zone(), descriptor, descriptor.GetStackParameterCount(), flags,
136 node->op()->properties());
137 Node* stub_code = jsgraph()->HeapConstant(callable.code());
138 STATIC_ASSERT(JSUnaryOpNode::ValueIndex() == 0);
139 STATIC_ASSERT(JSUnaryOpNode::FeedbackVectorIndex() == 1);
140 DCHECK_EQ(node->op()->ValueInputCount(), 2);
141 node->InsertInput(zone(), 0, stub_code);
142 node->InsertInput(zone(), 2, slot);
143 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
144 } else {
145 node->RemoveInput(JSUnaryOpNode::FeedbackVectorIndex());
146 ReplaceWithBuiltinCall(node, builtin_without_feedback);
147 }
148 }
149
150 #define DEF_UNARY_LOWERING(Name) \
151 void JSGenericLowering::LowerJS##Name(Node* node) { \
152 ReplaceUnaryOpWithBuiltinCall(node, Builtins::k##Name, \
153 Builtins::k##Name##_WithFeedback); \
154 }
155 DEF_UNARY_LOWERING(BitwiseNot)
DEF_UNARY_LOWERING(Decrement)156 DEF_UNARY_LOWERING(Decrement)
157 DEF_UNARY_LOWERING(Increment)
158 DEF_UNARY_LOWERING(Negate)
159 #undef DEF_UNARY_LOWERING
160
161 void JSGenericLowering::ReplaceBinaryOpWithBuiltinCall(
162 Node* node, Builtins::Name builtin_without_feedback,
163 Builtins::Name builtin_with_feedback) {
164 DCHECK(JSOperator::IsBinaryWithFeedback(node->opcode()));
165 Builtins::Name builtin_id;
166 const FeedbackParameter& p = FeedbackParameterOf(node->op());
167 if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
168 Node* slot = jsgraph()->UintPtrConstant(p.feedback().slot.ToInt());
169 STATIC_ASSERT(JSBinaryOpNode::LeftIndex() == 0);
170 STATIC_ASSERT(JSBinaryOpNode::RightIndex() == 1);
171 STATIC_ASSERT(JSBinaryOpNode::FeedbackVectorIndex() == 2);
172 DCHECK_EQ(node->op()->ValueInputCount(), 3);
173 node->InsertInput(zone(), 2, slot);
174 builtin_id = builtin_with_feedback;
175 } else {
176 node->RemoveInput(JSBinaryOpNode::FeedbackVectorIndex());
177 builtin_id = builtin_without_feedback;
178 }
179
180 ReplaceWithBuiltinCall(node, builtin_id);
181 }
182
183 #define DEF_BINARY_LOWERING(Name) \
184 void JSGenericLowering::LowerJS##Name(Node* node) { \
185 ReplaceBinaryOpWithBuiltinCall(node, Builtins::k##Name, \
186 Builtins::k##Name##_WithFeedback); \
187 }
188 // Binary ops.
189 DEF_BINARY_LOWERING(Add)
DEF_BINARY_LOWERING(BitwiseAnd)190 DEF_BINARY_LOWERING(BitwiseAnd)
191 DEF_BINARY_LOWERING(BitwiseOr)
192 DEF_BINARY_LOWERING(BitwiseXor)
193 DEF_BINARY_LOWERING(Divide)
194 DEF_BINARY_LOWERING(Exponentiate)
195 DEF_BINARY_LOWERING(Modulus)
196 DEF_BINARY_LOWERING(Multiply)
197 DEF_BINARY_LOWERING(ShiftLeft)
198 DEF_BINARY_LOWERING(ShiftRight)
199 DEF_BINARY_LOWERING(ShiftRightLogical)
200 DEF_BINARY_LOWERING(Subtract)
201 // Compare ops.
202 DEF_BINARY_LOWERING(Equal)
203 DEF_BINARY_LOWERING(GreaterThan)
204 DEF_BINARY_LOWERING(GreaterThanOrEqual)
205 DEF_BINARY_LOWERING(InstanceOf)
206 DEF_BINARY_LOWERING(LessThan)
207 DEF_BINARY_LOWERING(LessThanOrEqual)
208 #undef DEF_BINARY_LOWERING
209
210 void JSGenericLowering::LowerJSStrictEqual(Node* node) {
211 // The === operator doesn't need the current context.
212 NodeProperties::ReplaceContextInput(node, jsgraph()->NoContextConstant());
213 DCHECK_EQ(node->op()->ControlInputCount(), 1);
214 node->RemoveInput(NodeProperties::FirstControlIndex(node));
215
216 Builtins::Name builtin_id;
217 const FeedbackParameter& p = FeedbackParameterOf(node->op());
218 if (CollectFeedbackInGenericLowering() && p.feedback().IsValid()) {
219 Node* slot = jsgraph()->UintPtrConstant(p.feedback().slot.ToInt());
220 STATIC_ASSERT(JSStrictEqualNode::LeftIndex() == 0);
221 STATIC_ASSERT(JSStrictEqualNode::RightIndex() == 1);
222 STATIC_ASSERT(JSStrictEqualNode::FeedbackVectorIndex() == 2);
223 DCHECK_EQ(node->op()->ValueInputCount(), 3);
224 node->InsertInput(zone(), 2, slot);
225 builtin_id = Builtins::kStrictEqual_WithFeedback;
226 } else {
227 node->RemoveInput(JSStrictEqualNode::FeedbackVectorIndex());
228 builtin_id = Builtins::kStrictEqual;
229 }
230
231 Callable callable = Builtins::CallableFor(isolate(), builtin_id);
232 ReplaceWithBuiltinCall(node, callable, CallDescriptor::kNoFlags,
233 Operator::kEliminatable);
234 }
235
236 namespace {
ShouldUseMegamorphicLoadBuiltin(FeedbackSource const & source,JSHeapBroker * broker)237 bool ShouldUseMegamorphicLoadBuiltin(FeedbackSource const& source,
238 JSHeapBroker* broker) {
239 ProcessedFeedback const& feedback = broker->GetFeedback(source);
240
241 if (feedback.kind() == ProcessedFeedback::kElementAccess) {
242 return feedback.AsElementAccess().transition_groups().empty();
243 } else if (feedback.kind() == ProcessedFeedback::kNamedAccess) {
244 return feedback.AsNamedAccess().maps().empty();
245 } else if (feedback.kind() == ProcessedFeedback::kInsufficient) {
246 return false;
247 }
248 UNREACHABLE();
249 }
250 } // namespace
251
LowerJSHasProperty(Node * node)252 void JSGenericLowering::LowerJSHasProperty(Node* node) {
253 JSHasPropertyNode n(node);
254 const PropertyAccess& p = n.Parameters();
255 if (!p.feedback().IsValid()) {
256 node->RemoveInput(JSHasPropertyNode::FeedbackVectorIndex());
257 ReplaceWithBuiltinCall(node, Builtins::kHasProperty);
258 } else {
259 STATIC_ASSERT(n.FeedbackVectorIndex() == 2);
260 n->InsertInput(zone(), 2,
261 jsgraph()->TaggedIndexConstant(p.feedback().index()));
262 ReplaceWithBuiltinCall(node, Builtins::kKeyedHasIC);
263 }
264 }
265
LowerJSLoadProperty(Node * node)266 void JSGenericLowering::LowerJSLoadProperty(Node* node) {
267 JSLoadPropertyNode n(node);
268 const PropertyAccess& p = n.Parameters();
269 FrameState frame_state = n.frame_state();
270 FrameState outer_state = frame_state.outer_frame_state();
271 STATIC_ASSERT(n.FeedbackVectorIndex() == 2);
272 if (outer_state->opcode() != IrOpcode::kFrameState) {
273 n->RemoveInput(n.FeedbackVectorIndex());
274 n->InsertInput(zone(), 2,
275 jsgraph()->TaggedIndexConstant(p.feedback().index()));
276 ReplaceWithBuiltinCall(
277 node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
278 ? Builtins::kKeyedLoadICTrampoline_Megamorphic
279 : Builtins::kKeyedLoadICTrampoline);
280 } else {
281 n->InsertInput(zone(), 2,
282 jsgraph()->TaggedIndexConstant(p.feedback().index()));
283 ReplaceWithBuiltinCall(
284 node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
285 ? Builtins::kKeyedLoadIC_Megamorphic
286 : Builtins::kKeyedLoadIC);
287 }
288 }
289
LowerJSLoadNamed(Node * node)290 void JSGenericLowering::LowerJSLoadNamed(Node* node) {
291 JSLoadNamedNode n(node);
292 NamedAccess const& p = n.Parameters();
293 FrameState frame_state = n.frame_state();
294 FrameState outer_state = frame_state.outer_frame_state();
295 STATIC_ASSERT(n.FeedbackVectorIndex() == 1);
296 if (!p.feedback().IsValid()) {
297 n->RemoveInput(n.FeedbackVectorIndex());
298 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
299 ReplaceWithBuiltinCall(node, Builtins::kGetProperty);
300 } else if (outer_state->opcode() != IrOpcode::kFrameState) {
301 n->RemoveInput(n.FeedbackVectorIndex());
302 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
303 node->InsertInput(zone(), 2,
304 jsgraph()->TaggedIndexConstant(p.feedback().index()));
305 ReplaceWithBuiltinCall(
306 node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
307 ? Builtins::kLoadICTrampoline_Megamorphic
308 : Builtins::kLoadICTrampoline);
309 } else {
310 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
311 node->InsertInput(zone(), 2,
312 jsgraph()->TaggedIndexConstant(p.feedback().index()));
313 ReplaceWithBuiltinCall(
314 node, ShouldUseMegamorphicLoadBuiltin(p.feedback(), broker())
315 ? Builtins::kLoadIC_Megamorphic
316 : Builtins::kLoadIC);
317 }
318 }
319
LowerJSLoadNamedFromSuper(Node * node)320 void JSGenericLowering::LowerJSLoadNamedFromSuper(Node* node) {
321 // TODO(marja, v8:9237): Call a builtin which collects feedback.
322 JSLoadNamedFromSuperNode n(node);
323 NamedAccess const& p = n.Parameters();
324 node->RemoveInput(2); // Feedback vector
325 node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.name()));
326 ReplaceWithRuntimeCall(node, Runtime::kLoadFromSuper);
327 }
328
LowerJSLoadGlobal(Node * node)329 void JSGenericLowering::LowerJSLoadGlobal(Node* node) {
330 JSLoadGlobalNode n(node);
331 const LoadGlobalParameters& p = n.Parameters();
332 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
333 FrameState frame_state = n.frame_state();
334 FrameState outer_state = frame_state.outer_frame_state();
335 STATIC_ASSERT(n.FeedbackVectorIndex() == 0);
336 if (outer_state->opcode() != IrOpcode::kFrameState) {
337 n->RemoveInput(n.FeedbackVectorIndex());
338 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
339 node->InsertInput(zone(), 1,
340 jsgraph()->TaggedIndexConstant(p.feedback().index()));
341 Callable callable = CodeFactory::LoadGlobalIC(isolate(), p.typeof_mode());
342 ReplaceWithBuiltinCall(node, callable, flags);
343 } else {
344 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
345 node->InsertInput(zone(), 1,
346 jsgraph()->TaggedIndexConstant(p.feedback().index()));
347 Callable callable =
348 CodeFactory::LoadGlobalICInOptimizedCode(isolate(), p.typeof_mode());
349 ReplaceWithBuiltinCall(node, callable, flags);
350 }
351 }
352
LowerJSGetIterator(Node * node)353 void JSGenericLowering::LowerJSGetIterator(Node* node) {
354 // TODO(v8:9625): Currently, the GetIterator operator is desugared in the
355 // native context specialization phase. Thus, the following generic lowering
356 // is not reachable unless that phase is disabled (e.g. for
357 // native-context-independent code).
358 // We can add a check in native context specialization to avoid desugaring
359 // the GetIterator operator when feedback is megamorphic. This would reduce
360 // the size of the compiled code as it would insert 1 call to the builtin
361 // instead of 2 calls resulting from the generic lowering of the LoadNamed
362 // and Call operators.
363
364 JSGetIteratorNode n(node);
365 GetIteratorParameters const& p = n.Parameters();
366 Node* load_slot =
367 jsgraph()->TaggedIndexConstant(p.loadFeedback().slot.ToInt());
368 Node* call_slot =
369 jsgraph()->TaggedIndexConstant(p.callFeedback().slot.ToInt());
370 STATIC_ASSERT(n.FeedbackVectorIndex() == 1);
371 node->InsertInput(zone(), 1, load_slot);
372 node->InsertInput(zone(), 2, call_slot);
373
374 ReplaceWithBuiltinCall(node, Builtins::kGetIteratorWithFeedback);
375 }
376
LowerJSStoreProperty(Node * node)377 void JSGenericLowering::LowerJSStoreProperty(Node* node) {
378 JSStorePropertyNode n(node);
379 const PropertyAccess& p = n.Parameters();
380 FrameState frame_state = n.frame_state();
381 FrameState outer_state = frame_state.outer_frame_state();
382 STATIC_ASSERT(n.FeedbackVectorIndex() == 3);
383 if (outer_state->opcode() != IrOpcode::kFrameState) {
384 n->RemoveInput(n.FeedbackVectorIndex());
385 node->InsertInput(zone(), 3,
386 jsgraph()->TaggedIndexConstant(p.feedback().index()));
387 ReplaceWithBuiltinCall(node, Builtins::kKeyedStoreICTrampoline);
388 } else {
389 node->InsertInput(zone(), 3,
390 jsgraph()->TaggedIndexConstant(p.feedback().index()));
391 ReplaceWithBuiltinCall(node, Builtins::kKeyedStoreIC);
392 }
393 }
394
LowerJSStoreNamed(Node * node)395 void JSGenericLowering::LowerJSStoreNamed(Node* node) {
396 JSStoreNamedNode n(node);
397 NamedAccess const& p = n.Parameters();
398 FrameState frame_state = n.frame_state();
399 FrameState outer_state = frame_state.outer_frame_state();
400 STATIC_ASSERT(n.FeedbackVectorIndex() == 2);
401 if (!p.feedback().IsValid()) {
402 n->RemoveInput(n.FeedbackVectorIndex());
403 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
404 ReplaceWithRuntimeCall(node, Runtime::kSetNamedProperty);
405 } else if (outer_state->opcode() != IrOpcode::kFrameState) {
406 n->RemoveInput(n.FeedbackVectorIndex());
407 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
408 node->InsertInput(zone(), 3,
409 jsgraph()->TaggedIndexConstant(p.feedback().index()));
410 ReplaceWithBuiltinCall(node, Builtins::kStoreICTrampoline);
411 } else {
412 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
413 node->InsertInput(zone(), 3,
414 jsgraph()->TaggedIndexConstant(p.feedback().index()));
415 ReplaceWithBuiltinCall(node, Builtins::kStoreIC);
416 }
417 }
418
LowerJSStoreNamedOwn(Node * node)419 void JSGenericLowering::LowerJSStoreNamedOwn(Node* node) {
420 JSStoreNamedOwnNode n(node);
421 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
422 StoreNamedOwnParameters const& p = n.Parameters();
423 FrameState frame_state = n.frame_state();
424 FrameState outer_state = frame_state.outer_frame_state();
425 STATIC_ASSERT(n.FeedbackVectorIndex() == 2);
426 if (outer_state->opcode() != IrOpcode::kFrameState) {
427 n->RemoveInput(n.FeedbackVectorIndex());
428 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
429 node->InsertInput(zone(), 3,
430 jsgraph()->TaggedIndexConstant(p.feedback().index()));
431 Callable callable = CodeFactory::StoreOwnIC(isolate());
432 ReplaceWithBuiltinCall(node, callable, flags);
433 } else {
434 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(p.name()));
435 node->InsertInput(zone(), 3,
436 jsgraph()->TaggedIndexConstant(p.feedback().index()));
437 Callable callable = CodeFactory::StoreOwnICInOptimizedCode(isolate());
438 ReplaceWithBuiltinCall(node, callable, flags);
439 }
440 }
441
LowerJSStoreGlobal(Node * node)442 void JSGenericLowering::LowerJSStoreGlobal(Node* node) {
443 JSStoreGlobalNode n(node);
444 const StoreGlobalParameters& p = n.Parameters();
445 FrameState frame_state = n.frame_state();
446 FrameState outer_state = frame_state.outer_frame_state();
447 STATIC_ASSERT(n.FeedbackVectorIndex() == 1);
448 if (outer_state->opcode() != IrOpcode::kFrameState) {
449 n->RemoveInput(n.FeedbackVectorIndex());
450 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
451 node->InsertInput(zone(), 2,
452 jsgraph()->TaggedIndexConstant(p.feedback().index()));
453 ReplaceWithBuiltinCall(node, Builtins::kStoreGlobalICTrampoline);
454 } else {
455 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(p.name()));
456 node->InsertInput(zone(), 2,
457 jsgraph()->TaggedIndexConstant(p.feedback().index()));
458 ReplaceWithBuiltinCall(node, Builtins::kStoreGlobalIC);
459 }
460 }
461
LowerJSStoreDataPropertyInLiteral(Node * node)462 void JSGenericLowering::LowerJSStoreDataPropertyInLiteral(Node* node) {
463 JSStoreDataPropertyInLiteralNode n(node);
464 FeedbackParameter const& p = n.Parameters();
465 STATIC_ASSERT(n.FeedbackVectorIndex() == 4);
466 RelaxControls(node);
467 node->InsertInput(zone(), 5,
468 jsgraph()->TaggedIndexConstant(p.feedback().index()));
469 ReplaceWithRuntimeCall(node, Runtime::kDefineDataPropertyInLiteral);
470 }
471
LowerJSStoreInArrayLiteral(Node * node)472 void JSGenericLowering::LowerJSStoreInArrayLiteral(Node* node) {
473 JSStoreInArrayLiteralNode n(node);
474 FeedbackParameter const& p = n.Parameters();
475 STATIC_ASSERT(n.FeedbackVectorIndex() == 3);
476 RelaxControls(node);
477 node->InsertInput(zone(), 3,
478 jsgraph()->TaggedIndexConstant(p.feedback().index()));
479 ReplaceWithBuiltinCall(node, Builtins::kStoreInArrayLiteralIC);
480 }
481
LowerJSDeleteProperty(Node * node)482 void JSGenericLowering::LowerJSDeleteProperty(Node* node) {
483 ReplaceWithBuiltinCall(node, Builtins::kDeleteProperty);
484 }
485
LowerJSGetSuperConstructor(Node * node)486 void JSGenericLowering::LowerJSGetSuperConstructor(Node* node) {
487 Node* active_function = NodeProperties::GetValueInput(node, 0);
488 Node* effect = NodeProperties::GetEffectInput(node);
489 Node* control = NodeProperties::GetControlInput(node);
490
491 Node* function_map = effect = graph()->NewNode(
492 jsgraph()->simplified()->LoadField(AccessBuilder::ForMap()),
493 active_function, effect, control);
494
495 RelaxControls(node);
496 node->ReplaceInput(0, function_map);
497 node->ReplaceInput(1, effect);
498 node->ReplaceInput(2, control);
499 node->TrimInputCount(3);
500 NodeProperties::ChangeOp(node, jsgraph()->simplified()->LoadField(
501 AccessBuilder::ForMapPrototype()));
502 }
503
LowerJSHasInPrototypeChain(Node * node)504 void JSGenericLowering::LowerJSHasInPrototypeChain(Node* node) {
505 ReplaceWithRuntimeCall(node, Runtime::kHasInPrototypeChain);
506 }
507
LowerJSOrdinaryHasInstance(Node * node)508 void JSGenericLowering::LowerJSOrdinaryHasInstance(Node* node) {
509 ReplaceWithBuiltinCall(node, Builtins::kOrdinaryHasInstance);
510 }
511
LowerJSHasContextExtension(Node * node)512 void JSGenericLowering::LowerJSHasContextExtension(Node* node) {
513 UNREACHABLE(); // Eliminated in typed lowering.
514 }
515
LowerJSLoadContext(Node * node)516 void JSGenericLowering::LowerJSLoadContext(Node* node) {
517 UNREACHABLE(); // Eliminated in typed lowering.
518 }
519
520
LowerJSStoreContext(Node * node)521 void JSGenericLowering::LowerJSStoreContext(Node* node) {
522 UNREACHABLE(); // Eliminated in typed lowering.
523 }
524
525
LowerJSCreate(Node * node)526 void JSGenericLowering::LowerJSCreate(Node* node) {
527 ReplaceWithBuiltinCall(node, Builtins::kFastNewObject);
528 }
529
530
LowerJSCreateArguments(Node * node)531 void JSGenericLowering::LowerJSCreateArguments(Node* node) {
532 CreateArgumentsType const type = CreateArgumentsTypeOf(node->op());
533 switch (type) {
534 case CreateArgumentsType::kMappedArguments:
535 ReplaceWithRuntimeCall(node, Runtime::kNewSloppyArguments);
536 break;
537 case CreateArgumentsType::kUnmappedArguments:
538 ReplaceWithRuntimeCall(node, Runtime::kNewStrictArguments);
539 break;
540 case CreateArgumentsType::kRestParameter:
541 ReplaceWithRuntimeCall(node, Runtime::kNewRestParameter);
542 break;
543 }
544 }
545
546
LowerJSCreateArray(Node * node)547 void JSGenericLowering::LowerJSCreateArray(Node* node) {
548 CreateArrayParameters const& p = CreateArrayParametersOf(node->op());
549 int const arity = static_cast<int>(p.arity());
550 auto interface_descriptor = ArrayConstructorDescriptor{};
551 auto call_descriptor = Linkage::GetStubCallDescriptor(
552 zone(), interface_descriptor, arity + 1, CallDescriptor::kNeedsFrameState,
553 node->op()->properties());
554 // If this fails, we might need to update the parameter reordering code
555 // to ensure that the additional arguments passed via stack are pushed
556 // between top of stack and JS arguments.
557 DCHECK_EQ(interface_descriptor.GetStackParameterCount(), 0);
558 Node* stub_code = jsgraph()->ArrayConstructorStubConstant();
559 Node* stub_arity = jsgraph()->Int32Constant(arity);
560 MaybeHandle<AllocationSite> const maybe_site = p.site();
561 Handle<AllocationSite> site;
562 DCHECK_IMPLIES(broker()->is_native_context_independent(),
563 maybe_site.is_null());
564 Node* type_info = maybe_site.ToHandle(&site) ? jsgraph()->HeapConstant(site)
565 : jsgraph()->UndefinedConstant();
566 Node* receiver = jsgraph()->UndefinedConstant();
567 node->InsertInput(zone(), 0, stub_code);
568 node->InsertInput(zone(), 3, stub_arity);
569 node->InsertInput(zone(), 4, type_info);
570 node->InsertInput(zone(), 5, receiver);
571 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
572 }
573
LowerJSCreateArrayIterator(Node * node)574 void JSGenericLowering::LowerJSCreateArrayIterator(Node* node) {
575 UNREACHABLE(); // Eliminated in typed lowering.
576 }
577
LowerJSCreateAsyncFunctionObject(Node * node)578 void JSGenericLowering::LowerJSCreateAsyncFunctionObject(Node* node) {
579 UNREACHABLE(); // Eliminated in typed lowering.
580 }
581
LowerJSCreateCollectionIterator(Node * node)582 void JSGenericLowering::LowerJSCreateCollectionIterator(Node* node) {
583 UNREACHABLE(); // Eliminated in typed lowering.
584 }
585
LowerJSCreateBoundFunction(Node * node)586 void JSGenericLowering::LowerJSCreateBoundFunction(Node* node) {
587 UNREACHABLE(); // Eliminated in typed lowering.
588 }
589
LowerJSObjectIsArray(Node * node)590 void JSGenericLowering::LowerJSObjectIsArray(Node* node) {
591 UNREACHABLE(); // Eliminated in typed lowering.
592 }
593
LowerJSCreateObject(Node * node)594 void JSGenericLowering::LowerJSCreateObject(Node* node) {
595 ReplaceWithBuiltinCall(node, Builtins::kCreateObjectWithoutProperties);
596 }
597
LowerJSParseInt(Node * node)598 void JSGenericLowering::LowerJSParseInt(Node* node) {
599 ReplaceWithBuiltinCall(node, Builtins::kParseInt);
600 }
601
LowerJSRegExpTest(Node * node)602 void JSGenericLowering::LowerJSRegExpTest(Node* node) {
603 ReplaceWithBuiltinCall(node, Builtins::kRegExpPrototypeTestFast);
604 }
605
LowerJSCreateClosure(Node * node)606 void JSGenericLowering::LowerJSCreateClosure(Node* node) {
607 JSCreateClosureNode n(node);
608 CreateClosureParameters const& p = n.Parameters();
609 Handle<SharedFunctionInfo> const shared_info = p.shared_info();
610 STATIC_ASSERT(n.FeedbackCellIndex() == 0);
611 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(shared_info));
612 node->RemoveInput(4); // control
613
614 // Use the FastNewClosure builtin only for functions allocated in new space.
615 if (p.allocation() == AllocationType::kYoung) {
616 ReplaceWithBuiltinCall(node, Builtins::kFastNewClosure);
617 } else {
618 ReplaceWithRuntimeCall(node, Runtime::kNewClosure_Tenured);
619 }
620 }
621
LowerJSCreateFunctionContext(Node * node)622 void JSGenericLowering::LowerJSCreateFunctionContext(Node* node) {
623 const CreateFunctionContextParameters& parameters =
624 CreateFunctionContextParametersOf(node->op());
625 Handle<ScopeInfo> scope_info = parameters.scope_info();
626 int slot_count = parameters.slot_count();
627 ScopeType scope_type = parameters.scope_type();
628 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
629
630 if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
631 Callable callable =
632 CodeFactory::FastNewFunctionContext(isolate(), scope_type);
633 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(scope_info));
634 node->InsertInput(zone(), 1, jsgraph()->Int32Constant(slot_count));
635 ReplaceWithBuiltinCall(node, callable, flags);
636 } else {
637 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(scope_info));
638 ReplaceWithRuntimeCall(node, Runtime::kNewFunctionContext);
639 }
640 }
641
LowerJSCreateGeneratorObject(Node * node)642 void JSGenericLowering::LowerJSCreateGeneratorObject(Node* node) {
643 node->RemoveInput(4); // control
644 ReplaceWithBuiltinCall(node, Builtins::kCreateGeneratorObject);
645 }
646
LowerJSCreateIterResultObject(Node * node)647 void JSGenericLowering::LowerJSCreateIterResultObject(Node* node) {
648 ReplaceWithBuiltinCall(node, Builtins::kCreateIterResultObject);
649 }
650
LowerJSCreateStringIterator(Node * node)651 void JSGenericLowering::LowerJSCreateStringIterator(Node* node) {
652 UNREACHABLE(); // Eliminated in typed lowering.
653 }
654
LowerJSCreateKeyValueArray(Node * node)655 void JSGenericLowering::LowerJSCreateKeyValueArray(Node* node) {
656 UNREACHABLE(); // Eliminated in typed lowering.
657 }
658
LowerJSCreatePromise(Node * node)659 void JSGenericLowering::LowerJSCreatePromise(Node* node) {
660 UNREACHABLE(); // Eliminated in typed lowering.
661 }
662
LowerJSCreateTypedArray(Node * node)663 void JSGenericLowering::LowerJSCreateTypedArray(Node* node) {
664 ReplaceWithBuiltinCall(node, Builtins::kCreateTypedArray);
665 }
666
LowerJSCreateLiteralArray(Node * node)667 void JSGenericLowering::LowerJSCreateLiteralArray(Node* node) {
668 JSCreateLiteralArrayNode n(node);
669 CreateLiteralParameters const& p = n.Parameters();
670 STATIC_ASSERT(n.FeedbackVectorIndex() == 0);
671 node->InsertInput(zone(), 1,
672 jsgraph()->TaggedIndexConstant(p.feedback().index()));
673 node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
674
675 // Use the CreateShallowArrayLiteral builtin only for shallow boilerplates
676 // without properties up to the number of elements that the stubs can handle.
677 if ((p.flags() & AggregateLiteral::kIsShallow) != 0 &&
678 p.length() < ConstructorBuiltins::kMaximumClonedShallowArrayElements) {
679 ReplaceWithBuiltinCall(node, Builtins::kCreateShallowArrayLiteral);
680 } else {
681 node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
682 ReplaceWithRuntimeCall(node, Runtime::kCreateArrayLiteral);
683 }
684 }
685
LowerJSGetTemplateObject(Node * node)686 void JSGenericLowering::LowerJSGetTemplateObject(Node* node) {
687 JSGetTemplateObjectNode n(node);
688 GetTemplateObjectParameters const& p = n.Parameters();
689 SharedFunctionInfoRef shared(broker(), p.shared());
690 TemplateObjectDescriptionRef description(broker(), p.description());
691
692 DCHECK_EQ(node->op()->ControlInputCount(), 1);
693 node->RemoveInput(NodeProperties::FirstControlIndex(node));
694
695 STATIC_ASSERT(JSGetTemplateObjectNode::FeedbackVectorIndex() == 0);
696 node->InsertInput(zone(), 0, jsgraph()->Constant(shared));
697 node->InsertInput(zone(), 1, jsgraph()->Constant(description));
698 node->InsertInput(zone(), 2,
699 jsgraph()->UintPtrConstant(p.feedback().index()));
700
701 ReplaceWithBuiltinCall(node, Builtins::kGetTemplateObject);
702 }
703
LowerJSCreateEmptyLiteralArray(Node * node)704 void JSGenericLowering::LowerJSCreateEmptyLiteralArray(Node* node) {
705 JSCreateEmptyLiteralArrayNode n(node);
706 FeedbackParameter const& p = n.Parameters();
707 STATIC_ASSERT(n.FeedbackVectorIndex() == 0);
708 node->InsertInput(zone(), 1,
709 jsgraph()->TaggedIndexConstant(p.feedback().index()));
710 node->RemoveInput(4); // control
711 ReplaceWithBuiltinCall(node, Builtins::kCreateEmptyArrayLiteral);
712 }
713
LowerJSCreateArrayFromIterable(Node * node)714 void JSGenericLowering::LowerJSCreateArrayFromIterable(Node* node) {
715 ReplaceWithBuiltinCall(node, Builtins::kIterableToListWithSymbolLookup);
716 }
717
LowerJSCreateLiteralObject(Node * node)718 void JSGenericLowering::LowerJSCreateLiteralObject(Node* node) {
719 JSCreateLiteralObjectNode n(node);
720 CreateLiteralParameters const& p = n.Parameters();
721 STATIC_ASSERT(n.FeedbackVectorIndex() == 0);
722 node->InsertInput(zone(), 1,
723 jsgraph()->TaggedIndexConstant(p.feedback().index()));
724 node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
725 node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
726
727 // Use the CreateShallowObjectLiteratal builtin only for shallow boilerplates
728 // without elements up to the number of properties that the stubs can handle.
729 if ((p.flags() & AggregateLiteral::kIsShallow) != 0 &&
730 p.length() <=
731 ConstructorBuiltins::kMaximumClonedShallowObjectProperties) {
732 ReplaceWithBuiltinCall(node, Builtins::kCreateShallowObjectLiteral);
733 } else {
734 ReplaceWithRuntimeCall(node, Runtime::kCreateObjectLiteral);
735 }
736 }
737
LowerJSCloneObject(Node * node)738 void JSGenericLowering::LowerJSCloneObject(Node* node) {
739 JSCloneObjectNode n(node);
740 CloneObjectParameters const& p = n.Parameters();
741 STATIC_ASSERT(n.FeedbackVectorIndex() == 1);
742 node->InsertInput(zone(), 1, jsgraph()->SmiConstant(p.flags()));
743 node->InsertInput(zone(), 2,
744 jsgraph()->TaggedIndexConstant(p.feedback().index()));
745 ReplaceWithBuiltinCall(node, Builtins::kCloneObjectIC);
746 }
747
LowerJSCreateEmptyLiteralObject(Node * node)748 void JSGenericLowering::LowerJSCreateEmptyLiteralObject(Node* node) {
749 ReplaceWithBuiltinCall(node, Builtins::kCreateEmptyLiteralObject);
750 }
751
LowerJSCreateLiteralRegExp(Node * node)752 void JSGenericLowering::LowerJSCreateLiteralRegExp(Node* node) {
753 JSCreateLiteralRegExpNode n(node);
754 CreateLiteralParameters const& p = n.Parameters();
755 STATIC_ASSERT(n.FeedbackVectorIndex() == 0);
756 node->InsertInput(zone(), 1,
757 jsgraph()->TaggedIndexConstant(p.feedback().index()));
758 node->InsertInput(zone(), 2, jsgraph()->HeapConstant(p.constant()));
759 node->InsertInput(zone(), 3, jsgraph()->SmiConstant(p.flags()));
760 ReplaceWithBuiltinCall(node, Builtins::kCreateRegExpLiteral);
761 }
762
763
LowerJSCreateCatchContext(Node * node)764 void JSGenericLowering::LowerJSCreateCatchContext(Node* node) {
765 Handle<ScopeInfo> scope_info = ScopeInfoOf(node->op());
766 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(scope_info));
767 ReplaceWithRuntimeCall(node, Runtime::kPushCatchContext);
768 }
769
LowerJSCreateWithContext(Node * node)770 void JSGenericLowering::LowerJSCreateWithContext(Node* node) {
771 Handle<ScopeInfo> scope_info = ScopeInfoOf(node->op());
772 node->InsertInput(zone(), 1, jsgraph()->HeapConstant(scope_info));
773 ReplaceWithRuntimeCall(node, Runtime::kPushWithContext);
774 }
775
LowerJSCreateBlockContext(Node * node)776 void JSGenericLowering::LowerJSCreateBlockContext(Node* node) {
777 Handle<ScopeInfo> scope_info = ScopeInfoOf(node->op());
778 node->InsertInput(zone(), 0, jsgraph()->HeapConstant(scope_info));
779 ReplaceWithRuntimeCall(node, Runtime::kPushBlockContext);
780 }
781
782 namespace {
783
CollectCallAndConstructFeedback(JSHeapBroker * broker)784 bool CollectCallAndConstructFeedback(JSHeapBroker* broker) {
785 // Call and construct feedback is a special case. Besides shape feedback, we
786 // also increment the call count, which is later used to make inlining
787 // decisions. The call count is only comparable/reliable if it is incremented
788 // for all calls inside a function. This is not the case in default turbofan
789 // mode, in which many calls may be inlined and will thus never reach generic
790 // lowering (where we insert the feedback-collecting builtin call).
791 // Therefore it should only be collected in native context independent code,
792 // where we 1. know every call will reach generic lowering, and 2. we must
793 // collect full feedback to properly tier up later.
794 return broker->is_native_context_independent();
795 }
796
797 } // namespace
798
799 // TODO(jgruber,v8:8888): Should this collect feedback?
LowerJSConstructForwardVarargs(Node * node)800 void JSGenericLowering::LowerJSConstructForwardVarargs(Node* node) {
801 ConstructForwardVarargsParameters p =
802 ConstructForwardVarargsParametersOf(node->op());
803 int const arg_count = static_cast<int>(p.arity() - 2);
804 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
805 Callable callable = CodeFactory::ConstructForwardVarargs(isolate());
806 // If this fails, we might need to update the parameter reordering code
807 // to ensure that the additional arguments passed via stack are pushed
808 // between top of stack and JS arguments.
809 DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
810 auto call_descriptor = Linkage::GetStubCallDescriptor(
811 zone(), callable.descriptor(), arg_count + 1, flags);
812 Node* stub_code = jsgraph()->HeapConstant(callable.code());
813 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
814 Node* start_index = jsgraph()->Uint32Constant(p.start_index());
815 Node* receiver = jsgraph()->UndefinedConstant();
816 node->InsertInput(zone(), 0, stub_code);
817 node->InsertInput(zone(), 3, stub_arity);
818 node->InsertInput(zone(), 4, start_index);
819 node->InsertInput(zone(), 5, receiver);
820 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
821 }
822
LowerJSConstruct(Node * node)823 void JSGenericLowering::LowerJSConstruct(Node* node) {
824 JSConstructNode n(node);
825 ConstructParameters const& p = n.Parameters();
826 int const arg_count = p.arity_without_implicit_args();
827 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
828
829 static constexpr int kReceiver = 1;
830 static constexpr int kMaybeFeedbackVector = 1;
831
832 if (CollectFeedbackInGenericLowering() &&
833 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
834 const int stack_argument_count =
835 arg_count + kReceiver + kMaybeFeedbackVector;
836 Callable callable =
837 Builtins::CallableFor(isolate(), Builtins::kConstruct_WithFeedback);
838 // If this fails, we might need to update the parameter reordering code
839 // to ensure that the additional arguments passed via stack are pushed
840 // between top of stack and JS arguments.
841 DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
842 kMaybeFeedbackVector);
843 auto call_descriptor = Linkage::GetStubCallDescriptor(
844 zone(), callable.descriptor(), stack_argument_count, flags);
845 Node* stub_code = jsgraph()->HeapConstant(callable.code());
846 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
847 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
848 Node* receiver = jsgraph()->UndefinedConstant();
849 Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
850 // Register argument inputs are followed by stack argument inputs (such as
851 // feedback_vector). Both are listed in ascending order. Note that
852 // the receiver is implicitly placed on the stack and is thus inserted
853 // between explicitly-specified register and stack arguments.
854 // TODO(jgruber): Implement a simpler way to specify these mutations.
855 node->InsertInput(zone(), 0, stub_code);
856 node->InsertInput(zone(), 3, stub_arity);
857 node->InsertInput(zone(), 4, slot);
858 node->InsertInput(zone(), 5, feedback_vector);
859 node->InsertInput(zone(), 6, receiver);
860 // After: {code, target, new_target, arity, slot, vector, receiver,
861 // ...args}.
862
863 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
864 } else {
865 const int stack_argument_count = arg_count + kReceiver;
866 Callable callable = Builtins::CallableFor(isolate(), Builtins::kConstruct);
867 auto call_descriptor = Linkage::GetStubCallDescriptor(
868 zone(), callable.descriptor(), stack_argument_count, flags);
869 Node* stub_code = jsgraph()->HeapConstant(callable.code());
870 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
871 Node* receiver = jsgraph()->UndefinedConstant();
872 node->RemoveInput(n.FeedbackVectorIndex());
873 node->InsertInput(zone(), 0, stub_code);
874 node->InsertInput(zone(), 3, stub_arity);
875 node->InsertInput(zone(), 4, receiver);
876
877 // After: {code, target, new_target, arity, receiver, ...args}.
878
879 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
880 }
881 }
882
LowerJSConstructWithArrayLike(Node * node)883 void JSGenericLowering::LowerJSConstructWithArrayLike(Node* node) {
884 JSConstructWithArrayLikeNode n(node);
885 ConstructParameters const& p = n.Parameters();
886 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
887 const int arg_count = p.arity_without_implicit_args();
888 DCHECK_EQ(arg_count, 1);
889
890 static constexpr int kReceiver = 1;
891 static constexpr int kArgumentList = 1;
892 static constexpr int kMaybeFeedbackVector = 1;
893
894 if (CollectFeedbackInGenericLowering() &&
895 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
896 const int stack_argument_count =
897 arg_count - kArgumentList + kReceiver + kMaybeFeedbackVector;
898 Callable callable = Builtins::CallableFor(
899 isolate(), Builtins::kConstructWithArrayLike_WithFeedback);
900 // If this fails, we might need to update the parameter reordering code
901 // to ensure that the additional arguments passed via stack are pushed
902 // between top of stack and JS arguments.
903 DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
904 kMaybeFeedbackVector);
905 auto call_descriptor = Linkage::GetStubCallDescriptor(
906 zone(), callable.descriptor(), stack_argument_count, flags);
907 Node* stub_code = jsgraph()->HeapConstant(callable.code());
908 Node* receiver = jsgraph()->UndefinedConstant();
909 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
910 Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
911 // Register argument inputs are followed by stack argument inputs (such as
912 // feedback_vector). Both are listed in ascending order. Note that
913 // the receiver is implicitly placed on the stack and is thus inserted
914 // between explicitly-specified register and stack arguments.
915 // TODO(jgruber): Implement a simpler way to specify these mutations.
916 node->InsertInput(zone(), 0, stub_code);
917 node->InsertInput(zone(), 4, slot);
918 node->InsertInput(zone(), 5, feedback_vector);
919 node->InsertInput(zone(), 6, receiver);
920 // After: {code, target, new_target, arguments_list, slot, vector,
921 // receiver}.
922
923 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
924 } else {
925 const int stack_argument_count = arg_count - kArgumentList + kReceiver;
926 Callable callable =
927 Builtins::CallableFor(isolate(), Builtins::kConstructWithArrayLike);
928 // If this fails, we might need to update the parameter reordering code
929 // to ensure that the additional arguments passed via stack are pushed
930 // between top of stack and JS arguments.
931 DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
932 auto call_descriptor = Linkage::GetStubCallDescriptor(
933 zone(), callable.descriptor(), stack_argument_count, flags);
934 Node* stub_code = jsgraph()->HeapConstant(callable.code());
935 Node* receiver = jsgraph()->UndefinedConstant();
936 node->RemoveInput(n.FeedbackVectorIndex());
937 node->InsertInput(zone(), 0, stub_code);
938 node->InsertInput(zone(), 4, receiver);
939
940 // After: {code, target, new_target, arguments_list, receiver}.
941
942 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
943 }
944 }
945
LowerJSConstructWithSpread(Node * node)946 void JSGenericLowering::LowerJSConstructWithSpread(Node* node) {
947 JSConstructWithSpreadNode n(node);
948 ConstructParameters const& p = n.Parameters();
949 int const arg_count = p.arity_without_implicit_args();
950 DCHECK_GE(arg_count, 1);
951 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
952
953 static constexpr int kReceiver = 1;
954 static constexpr int kTheSpread = 1; // Included in `arg_count`.
955 static constexpr int kMaybeFeedbackVector = 1;
956
957 if (CollectFeedbackInGenericLowering() &&
958 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
959 const int stack_argument_count =
960 arg_count + kReceiver + kMaybeFeedbackVector;
961 Callable callable = Builtins::CallableFor(
962 isolate(), Builtins::kConstructWithSpread_WithFeedback);
963 // If this fails, we might need to update the parameter reordering code
964 // to ensure that the additional arguments passed via stack are pushed
965 // between top of stack and JS arguments.
966 DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
967 kTheSpread + kMaybeFeedbackVector);
968 auto call_descriptor = Linkage::GetStubCallDescriptor(
969 zone(), callable.descriptor(), stack_argument_count, flags);
970 Node* stub_code = jsgraph()->HeapConstant(callable.code());
971 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
972
973 // The single available register is needed for `slot`, thus `spread` remains
974 // on the stack here.
975 Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
976 Node* receiver = jsgraph()->UndefinedConstant();
977 Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
978 Node* spread = node->RemoveInput(n.LastArgumentIndex());
979
980 // Register argument inputs are followed by stack argument inputs (such as
981 // feedback_vector). Both are listed in ascending order. Note that
982 // the receiver is implicitly placed on the stack and is thus inserted
983 // between explicitly-specified register and stack arguments.
984 // TODO(jgruber): Implement a simpler way to specify these mutations.
985 node->InsertInput(zone(), 0, stub_code);
986 node->InsertInput(zone(), 3, stub_arity);
987 node->InsertInput(zone(), 4, slot);
988 node->InsertInput(zone(), 5, spread);
989 node->InsertInput(zone(), 6, feedback_vector);
990 node->InsertInput(zone(), 7, receiver);
991 // After: {code, target, new_target, arity, slot, spread, vector, receiver,
992 // ...args}.
993
994 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
995 } else {
996 const int stack_argument_count = arg_count + kReceiver - kTheSpread;
997 Callable callable = CodeFactory::ConstructWithSpread(isolate());
998 // If this fails, we might need to update the parameter reordering code
999 // to ensure that the additional arguments passed via stack are pushed
1000 // between top of stack and JS arguments.
1001 DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
1002 auto call_descriptor = Linkage::GetStubCallDescriptor(
1003 zone(), callable.descriptor(), stack_argument_count, flags);
1004 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1005
1006 // We pass the spread in a register, not on the stack.
1007 Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
1008 Node* receiver = jsgraph()->UndefinedConstant();
1009 DCHECK(n.FeedbackVectorIndex() > n.LastArgumentIndex());
1010 node->RemoveInput(n.FeedbackVectorIndex());
1011 Node* spread = node->RemoveInput(n.LastArgumentIndex());
1012
1013 node->InsertInput(zone(), 0, stub_code);
1014 node->InsertInput(zone(), 3, stub_arity);
1015 node->InsertInput(zone(), 4, spread);
1016 node->InsertInput(zone(), 5, receiver);
1017
1018 // After: {code, target, new_target, arity, spread, receiver, ...args}.
1019
1020 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1021 }
1022 }
1023
1024 // TODO(jgruber,v8:8888): Should this collect feedback?
LowerJSCallForwardVarargs(Node * node)1025 void JSGenericLowering::LowerJSCallForwardVarargs(Node* node) {
1026 CallForwardVarargsParameters p = CallForwardVarargsParametersOf(node->op());
1027 int const arg_count = static_cast<int>(p.arity() - 2);
1028 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
1029 Callable callable = CodeFactory::CallForwardVarargs(isolate());
1030 auto call_descriptor = Linkage::GetStubCallDescriptor(
1031 zone(), callable.descriptor(), arg_count + 1, flags);
1032 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1033 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
1034 Node* start_index = jsgraph()->Uint32Constant(p.start_index());
1035 node->InsertInput(zone(), 0, stub_code);
1036 node->InsertInput(zone(), 2, stub_arity);
1037 node->InsertInput(zone(), 3, start_index);
1038 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1039 }
1040
LowerJSCall(Node * node)1041 void JSGenericLowering::LowerJSCall(Node* node) {
1042 JSCallNode n(node);
1043 CallParameters const& p = n.Parameters();
1044 int const arg_count = p.arity_without_implicit_args();
1045 ConvertReceiverMode const mode = p.convert_mode();
1046
1047 Node* feedback_vector = n.feedback_vector();
1048 node->RemoveInput(n.FeedbackVectorIndex());
1049
1050 if (CollectFeedbackInGenericLowering() &&
1051 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
1052 Callable callable = CodeFactory::Call_WithFeedback(isolate(), mode);
1053 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
1054 auto call_descriptor = Linkage::GetStubCallDescriptor(
1055 zone(), callable.descriptor(), arg_count + 1, flags);
1056 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1057 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
1058 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
1059 node->InsertInput(zone(), 0, stub_code);
1060 node->InsertInput(zone(), 2, stub_arity);
1061 node->InsertInput(zone(), 3, slot);
1062 node->InsertInput(zone(), 4, feedback_vector);
1063 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1064 } else {
1065 Callable callable = CodeFactory::Call(isolate(), mode);
1066 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
1067 auto call_descriptor = Linkage::GetStubCallDescriptor(
1068 zone(), callable.descriptor(), arg_count + 1, flags);
1069 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1070 Node* stub_arity = jsgraph()->Int32Constant(arg_count);
1071 node->InsertInput(zone(), 0, stub_code);
1072 node->InsertInput(zone(), 2, stub_arity);
1073 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1074 }
1075 }
1076
LowerJSCallWithArrayLike(Node * node)1077 void JSGenericLowering::LowerJSCallWithArrayLike(Node* node) {
1078 JSCallWithArrayLikeNode n(node);
1079 CallParameters const& p = n.Parameters();
1080 const int arg_count = p.arity_without_implicit_args();
1081 DCHECK_EQ(arg_count, 1); // The arraylike object.
1082 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
1083
1084 static constexpr int kArgumentsList = 1;
1085 static constexpr int kReceiver = 1;
1086
1087 if (CollectFeedbackInGenericLowering() &&
1088 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
1089 const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
1090 Callable callable = Builtins::CallableFor(
1091 isolate(), Builtins::kCallWithArrayLike_WithFeedback);
1092 auto call_descriptor = Linkage::GetStubCallDescriptor(
1093 zone(), callable.descriptor(), stack_argument_count, flags);
1094 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1095 Node* receiver = n.receiver();
1096 Node* arguments_list = n.Argument(0);
1097 Node* feedback_vector = n.feedback_vector();
1098 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
1099
1100 // Shuffling inputs.
1101 // Before: {target, receiver, arguments_list, vector}.
1102
1103 node->ReplaceInput(1, arguments_list);
1104 node->ReplaceInput(2, feedback_vector);
1105 node->ReplaceInput(3, receiver);
1106
1107 // Now: {target, arguments_list, vector, receiver}.
1108
1109 node->InsertInput(zone(), 0, stub_code);
1110 node->InsertInput(zone(), 3, slot);
1111
1112 // After: {code, target, arguments_list, slot, vector, receiver}.
1113
1114 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1115 } else {
1116 const int stack_argument_count = arg_count - kArgumentsList + kReceiver;
1117 Callable callable = CodeFactory::CallWithArrayLike(isolate());
1118 auto call_descriptor = Linkage::GetStubCallDescriptor(
1119 zone(), callable.descriptor(), stack_argument_count, flags);
1120 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1121 Node* receiver = n.receiver();
1122 Node* arguments_list = n.Argument(0);
1123
1124 // Shuffling inputs.
1125 // Before: {target, receiver, arguments_list, vector}.
1126
1127 node->RemoveInput(n.FeedbackVectorIndex());
1128 node->InsertInput(zone(), 0, stub_code);
1129 node->ReplaceInput(2, arguments_list);
1130 node->ReplaceInput(3, receiver);
1131
1132 // After: {code, target, arguments_list, receiver}.
1133
1134 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1135 }
1136 }
1137
LowerJSCallWithSpread(Node * node)1138 void JSGenericLowering::LowerJSCallWithSpread(Node* node) {
1139 JSCallWithSpreadNode n(node);
1140 CallParameters const& p = n.Parameters();
1141 int const arg_count = p.arity_without_implicit_args();
1142 DCHECK_GE(arg_count, 1); // At least the spread.
1143 CallDescriptor::Flags flags = FrameStateFlagForCall(node);
1144
1145 static constexpr int kReceiver = 1;
1146 static constexpr int kTheSpread = 1;
1147 static constexpr int kMaybeFeedbackVector = 1;
1148
1149 if (CollectFeedbackInGenericLowering() &&
1150 CollectCallAndConstructFeedback(broker()) && p.feedback().IsValid()) {
1151 const int stack_argument_count =
1152 arg_count - kTheSpread + kReceiver + kMaybeFeedbackVector;
1153 Callable callable = Builtins::CallableFor(
1154 isolate(), Builtins::kCallWithSpread_WithFeedback);
1155 // If this fails, we might need to update the parameter reordering code
1156 // to ensure that the additional arguments passed via stack are pushed
1157 // between top of stack and JS arguments.
1158 DCHECK_EQ(callable.descriptor().GetStackParameterCount(),
1159 kMaybeFeedbackVector);
1160 auto call_descriptor = Linkage::GetStubCallDescriptor(
1161 zone(), callable.descriptor(), stack_argument_count, flags);
1162 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1163 Node* slot = jsgraph()->Int32Constant(p.feedback().index());
1164
1165 // We pass the spread in a register, not on the stack.
1166 Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
1167
1168 // Register argument inputs are followed by stack argument inputs (such as
1169 // feedback_vector). Both are listed in ascending order. Note that
1170 // the receiver is implicitly placed on the stack and is thus inserted
1171 // between explicitly-specified register and stack arguments.
1172 // TODO(jgruber): Implement a simpler way to specify these mutations.
1173
1174 // Shuffling inputs.
1175 // Before: {target, receiver, ...args, spread, vector}.
1176 Node* feedback_vector = node->RemoveInput(n.FeedbackVectorIndex());
1177 Node* spread = node->RemoveInput(n.LastArgumentIndex());
1178 node->InsertInput(zone(), 0, stub_code);
1179 node->InsertInput(zone(), 2, stub_arity);
1180 node->InsertInput(zone(), 3, spread);
1181 node->InsertInput(zone(), 4, slot);
1182 node->InsertInput(zone(), 5, feedback_vector);
1183 // After: {code, target, arity, spread, slot, vector, receiver, ...args}.
1184
1185 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1186 } else {
1187 const int stack_argument_count = arg_count - kTheSpread + kReceiver;
1188 Callable callable = CodeFactory::CallWithSpread(isolate());
1189 // If this fails, we might need to update the parameter reordering code
1190 // to ensure that the additional arguments passed via stack are pushed
1191 // between top of stack and JS arguments.
1192 DCHECK_EQ(callable.descriptor().GetStackParameterCount(), 0);
1193 auto call_descriptor = Linkage::GetStubCallDescriptor(
1194 zone(), callable.descriptor(), stack_argument_count, flags);
1195 Node* stub_code = jsgraph()->HeapConstant(callable.code());
1196
1197 // We pass the spread in a register, not on the stack.
1198 Node* stub_arity = jsgraph()->Int32Constant(arg_count - kTheSpread);
1199
1200 // Shuffling inputs.
1201 // Before: {target, receiver, ...args, spread, vector}.
1202
1203 node->RemoveInput(n.FeedbackVectorIndex());
1204 Node* spread = node->RemoveInput(n.LastArgumentIndex());
1205
1206 node->InsertInput(zone(), 0, stub_code);
1207 node->InsertInput(zone(), 2, stub_arity);
1208 node->InsertInput(zone(), 3, spread);
1209
1210 // After: {code, target, arity, spread, receiver, ...args}.
1211
1212 NodeProperties::ChangeOp(node, common()->Call(call_descriptor));
1213 }
1214 }
1215
LowerJSCallRuntime(Node * node)1216 void JSGenericLowering::LowerJSCallRuntime(Node* node) {
1217 const CallRuntimeParameters& p = CallRuntimeParametersOf(node->op());
1218 ReplaceWithRuntimeCall(node, p.id(), static_cast<int>(p.arity()));
1219 }
1220
LowerJSForInPrepare(Node * node)1221 void JSGenericLowering::LowerJSForInPrepare(Node* node) {
1222 JSForInPrepareNode n(node);
1223 Effect effect(node); // {node} is kept in the effect chain.
1224 Control control = n.control(); // .. but not in the control chain.
1225 Node* enumerator = n.enumerator();
1226 Node* slot =
1227 jsgraph()->UintPtrConstant(n.Parameters().feedback().slot.ToInt());
1228
1229 std::vector<Edge> use_edges;
1230 for (Edge edge : node->use_edges()) use_edges.push_back(edge);
1231
1232 // {node} will be changed to a builtin call (see below). The returned value
1233 // is a fixed array containing {cache_array} and {cache_length}.
1234 // TODO(jgruber): This is awkward; what we really want is two return values,
1235 // the {cache_array} and {cache_length}, or better yet three return values
1236 // s.t. we can avoid the graph rewrites below. Builtin support for multiple
1237 // return types is unclear though.
1238
1239 Node* result_fixed_array = node;
1240 Node* cache_type = enumerator; // Just to clarify the rename.
1241 Node* cache_array;
1242 Node* cache_length;
1243
1244 cache_array = effect = graph()->NewNode(
1245 machine()->Load(MachineType::AnyTagged()), result_fixed_array,
1246 jsgraph()->IntPtrConstant(FixedArray::OffsetOfElementAt(0) -
1247 kHeapObjectTag),
1248 effect, control);
1249 cache_length = effect = graph()->NewNode(
1250 machine()->Load(MachineType::AnyTagged()), result_fixed_array,
1251 jsgraph()->IntPtrConstant(FixedArray::OffsetOfElementAt(1) -
1252 kHeapObjectTag),
1253 effect, control);
1254
1255 // Update the uses of {node}.
1256 for (Edge edge : use_edges) {
1257 Node* const user = edge.from();
1258 if (NodeProperties::IsEffectEdge(edge)) {
1259 edge.UpdateTo(effect);
1260 } else if (NodeProperties::IsControlEdge(edge)) {
1261 edge.UpdateTo(control);
1262 } else {
1263 DCHECK(NodeProperties::IsValueEdge(edge));
1264 switch (ProjectionIndexOf(user->op())) {
1265 case 0:
1266 Replace(user, cache_type);
1267 break;
1268 case 1:
1269 Replace(user, cache_array);
1270 break;
1271 case 2:
1272 Replace(user, cache_length);
1273 break;
1274 default:
1275 UNREACHABLE();
1276 }
1277 }
1278 }
1279
1280 // Finally, change the original node into a builtin call. This happens here,
1281 // after graph rewrites, since the Call does not have a control output and
1282 // thus must not have any control uses. Any previously existing control
1283 // outputs have been replaced by the graph rewrite above.
1284 node->InsertInput(zone(), n.FeedbackVectorIndex(), slot);
1285 ReplaceWithBuiltinCall(node, Builtins::kForInPrepare);
1286 }
1287
LowerJSForInNext(Node * node)1288 void JSGenericLowering::LowerJSForInNext(Node* node) {
1289 JSForInNextNode n(node);
1290 node->InsertInput(
1291 zone(), 0,
1292 jsgraph()->UintPtrConstant(n.Parameters().feedback().slot.ToInt()));
1293 ReplaceWithBuiltinCall(node, Builtins::kForInNext);
1294 }
1295
LowerJSLoadMessage(Node * node)1296 void JSGenericLowering::LowerJSLoadMessage(Node* node) {
1297 UNREACHABLE(); // Eliminated in typed lowering.
1298 }
1299
1300
LowerJSStoreMessage(Node * node)1301 void JSGenericLowering::LowerJSStoreMessage(Node* node) {
1302 UNREACHABLE(); // Eliminated in typed lowering.
1303 }
1304
LowerJSLoadModule(Node * node)1305 void JSGenericLowering::LowerJSLoadModule(Node* node) {
1306 UNREACHABLE(); // Eliminated in typed lowering.
1307 }
1308
LowerJSStoreModule(Node * node)1309 void JSGenericLowering::LowerJSStoreModule(Node* node) {
1310 UNREACHABLE(); // Eliminated in typed lowering.
1311 }
1312
LowerJSGetImportMeta(Node * node)1313 void JSGenericLowering::LowerJSGetImportMeta(Node* node) {
1314 ReplaceWithRuntimeCall(node, Runtime::kGetImportMetaObject);
1315 }
1316
LowerJSGeneratorStore(Node * node)1317 void JSGenericLowering::LowerJSGeneratorStore(Node* node) {
1318 UNREACHABLE(); // Eliminated in typed lowering.
1319 }
1320
LowerJSGeneratorRestoreContinuation(Node * node)1321 void JSGenericLowering::LowerJSGeneratorRestoreContinuation(Node* node) {
1322 UNREACHABLE(); // Eliminated in typed lowering.
1323 }
1324
LowerJSGeneratorRestoreContext(Node * node)1325 void JSGenericLowering::LowerJSGeneratorRestoreContext(Node* node) {
1326 UNREACHABLE(); // Eliminated in typed lowering.
1327 }
1328
LowerJSGeneratorRestoreInputOrDebugPos(Node * node)1329 void JSGenericLowering::LowerJSGeneratorRestoreInputOrDebugPos(Node* node) {
1330 UNREACHABLE(); // Eliminated in typed lowering.
1331 }
1332
LowerJSGeneratorRestoreRegister(Node * node)1333 void JSGenericLowering::LowerJSGeneratorRestoreRegister(Node* node) {
1334 UNREACHABLE(); // Eliminated in typed lowering.
1335 }
1336
1337 namespace {
1338
StackCheckKindOfJSStackCheck(const Operator * op)1339 StackCheckKind StackCheckKindOfJSStackCheck(const Operator* op) {
1340 DCHECK(op->opcode() == IrOpcode::kJSStackCheck);
1341 return OpParameter<StackCheckKind>(op);
1342 }
1343
1344 } // namespace
1345
LowerJSStackCheck(Node * node)1346 void JSGenericLowering::LowerJSStackCheck(Node* node) {
1347 Node* effect = NodeProperties::GetEffectInput(node);
1348 Node* control = NodeProperties::GetControlInput(node);
1349
1350 Node* limit = effect =
1351 graph()->NewNode(machine()->Load(MachineType::Pointer()),
1352 jsgraph()->ExternalConstant(
1353 ExternalReference::address_of_jslimit(isolate())),
1354 jsgraph()->IntPtrConstant(0), effect, control);
1355
1356 StackCheckKind stack_check_kind = StackCheckKindOfJSStackCheck(node->op());
1357 Node* check = effect = graph()->NewNode(
1358 machine()->StackPointerGreaterThan(stack_check_kind), limit, effect);
1359 Node* branch =
1360 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
1361
1362 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
1363 Node* etrue = effect;
1364
1365 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
1366 NodeProperties::ReplaceControlInput(node, if_false);
1367 NodeProperties::ReplaceEffectInput(node, effect);
1368 Node* efalse = if_false = node;
1369
1370 Node* merge = graph()->NewNode(common()->Merge(2), if_true, if_false);
1371 Node* ephi = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, merge);
1372
1373 // Wire the new diamond into the graph, {node} can still throw.
1374 NodeProperties::ReplaceUses(node, node, ephi, merge, merge);
1375 NodeProperties::ReplaceControlInput(merge, if_false, 1);
1376 NodeProperties::ReplaceEffectInput(ephi, efalse, 1);
1377
1378 // This iteration cuts out potential {IfSuccess} or {IfException} projection
1379 // uses of the original node and places them inside the diamond, so that we
1380 // can change the original {node} into the slow-path runtime call.
1381 for (Edge edge : merge->use_edges()) {
1382 if (!NodeProperties::IsControlEdge(edge)) continue;
1383 if (edge.from()->opcode() == IrOpcode::kIfSuccess) {
1384 NodeProperties::ReplaceUses(edge.from(), nullptr, nullptr, merge);
1385 NodeProperties::ReplaceControlInput(merge, edge.from(), 1);
1386 edge.UpdateTo(node);
1387 }
1388 if (edge.from()->opcode() == IrOpcode::kIfException) {
1389 NodeProperties::ReplaceEffectInput(edge.from(), node);
1390 edge.UpdateTo(node);
1391 }
1392 }
1393
1394 // Turn the stack check into a runtime call. At function entry, the runtime
1395 // function takes an offset argument which is subtracted from the stack
1396 // pointer prior to the stack check (i.e. the check is `sp - offset >=
1397 // limit`).
1398 if (stack_check_kind == StackCheckKind::kJSFunctionEntry) {
1399 node->InsertInput(zone(), 0,
1400 graph()->NewNode(machine()->LoadStackCheckOffset()));
1401 ReplaceWithRuntimeCall(node, Runtime::kStackGuardWithGap);
1402 } else {
1403 ReplaceWithRuntimeCall(node, Runtime::kStackGuard);
1404 }
1405 }
1406
LowerJSDebugger(Node * node)1407 void JSGenericLowering::LowerJSDebugger(Node* node) {
1408 ReplaceWithBuiltinCall(node, Builtins::kHandleDebuggerStatement);
1409 }
1410
zone() const1411 Zone* JSGenericLowering::zone() const { return graph()->zone(); }
1412
1413
isolate() const1414 Isolate* JSGenericLowering::isolate() const { return jsgraph()->isolate(); }
1415
1416
graph() const1417 Graph* JSGenericLowering::graph() const { return jsgraph()->graph(); }
1418
1419
common() const1420 CommonOperatorBuilder* JSGenericLowering::common() const {
1421 return jsgraph()->common();
1422 }
1423
1424
machine() const1425 MachineOperatorBuilder* JSGenericLowering::machine() const {
1426 return jsgraph()->machine();
1427 }
1428
1429 } // namespace compiler
1430 } // namespace internal
1431 } // namespace v8
1432