1 // Copyright 2022 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/maglev/maglev-graph-builder.h"
6
7 #include "src/compiler/compilation-dependencies.h"
8 #include "src/compiler/feedback-source.h"
9 #include "src/compiler/heap-refs.h"
10 #include "src/compiler/processed-feedback.h"
11 #include "src/handles/maybe-handles-inl.h"
12 #include "src/ic/handler-configuration-inl.h"
13 #include "src/maglev/maglev-ir.h"
14 #include "src/objects/feedback-vector.h"
15 #include "src/objects/name-inl.h"
16 #include "src/objects/property-cell.h"
17 #include "src/objects/slots-inl.h"
18
19 namespace v8 {
20 namespace internal {
21
22 namespace maglev {
23
24 namespace {
25
LoadSimpleFieldHandler(FieldIndex field_index)26 int LoadSimpleFieldHandler(FieldIndex field_index) {
27 int config = LoadHandler::KindBits::encode(LoadHandler::Kind::kField) |
28 LoadHandler::IsInobjectBits::encode(field_index.is_inobject()) |
29 LoadHandler::IsDoubleBits::encode(field_index.is_double()) |
30 LoadHandler::FieldIndexBits::encode(field_index.index());
31 return config;
32 }
33
34 } // namespace
35
MaglevGraphBuilder(LocalIsolate * local_isolate,MaglevCompilationUnit * compilation_unit)36 MaglevGraphBuilder::MaglevGraphBuilder(LocalIsolate* local_isolate,
37 MaglevCompilationUnit* compilation_unit)
38 : local_isolate_(local_isolate),
39 compilation_unit_(compilation_unit),
40 iterator_(bytecode().object()),
41 jump_targets_(zone()->NewArray<BasicBlockRef>(bytecode().length())),
42 // Overallocate merge_states_ by one to allow always looking up the
43 // next offset.
44 merge_states_(zone()->NewArray<MergePointInterpreterFrameState*>(
45 bytecode().length() + 1)),
46 graph_(Graph::New(zone())),
47 current_interpreter_frame_(*compilation_unit_) {
48 memset(merge_states_, 0,
49 bytecode().length() * sizeof(InterpreterFrameState*));
50 // Default construct basic block refs.
51 // TODO(leszeks): This could be a memset of nullptr to ..._jump_targets_.
52 for (int i = 0; i < bytecode().length(); ++i) {
53 new (&jump_targets_[i]) BasicBlockRef();
54 }
55
56 CalculatePredecessorCounts();
57
58 for (auto& offset_and_info : bytecode_analysis().GetLoopInfos()) {
59 int offset = offset_and_info.first;
60 const compiler::LoopInfo& loop_info = offset_and_info.second;
61
62 const compiler::BytecodeLivenessState* liveness =
63 bytecode_analysis().GetInLivenessFor(offset);
64
65 merge_states_[offset] = zone()->New<MergePointInterpreterFrameState>(
66 *compilation_unit_, offset, NumPredecessors(offset), liveness,
67 &loop_info);
68 }
69
70 current_block_ = zone()->New<BasicBlock>(nullptr);
71 block_offset_ = -1;
72
73 for (int i = 0; i < parameter_count(); i++) {
74 interpreter::Register reg = interpreter::Register::FromParameterIndex(i);
75 current_interpreter_frame_.set(reg, AddNewNode<InitialValue>({}, reg));
76 }
77
78 // TODO(leszeks): Extract out a separate "incoming context/closure" nodes,
79 // to be able to read in the machine register but also use the frame-spilled
80 // slot.
81 interpreter::Register regs[] = {interpreter::Register::current_context(),
82 interpreter::Register::function_closure()};
83 for (interpreter::Register& reg : regs) {
84 current_interpreter_frame_.set(reg, AddNewNode<InitialValue>({}, reg));
85 }
86
87 interpreter::Register new_target_or_generator_register =
88 bytecode().incoming_new_target_or_generator_register();
89
90 int register_index = 0;
91 // TODO(leszeks): Don't emit if not needed.
92 ValueNode* undefined_value =
93 AddNewNode<RootConstant>({}, RootIndex::kUndefinedValue);
94 if (new_target_or_generator_register.is_valid()) {
95 int new_target_index = new_target_or_generator_register.index();
96 for (; register_index < new_target_index; register_index++) {
97 StoreRegister(interpreter::Register(register_index), undefined_value);
98 }
99 StoreRegister(
100 new_target_or_generator_register,
101 // TODO(leszeks): Expose in Graph.
102 AddNewNode<RegisterInput>({}, kJavaScriptCallNewTargetRegister));
103 register_index++;
104 }
105 for (; register_index < register_count(); register_index++) {
106 StoreRegister(interpreter::Register(register_index), undefined_value);
107 }
108
109 BasicBlock* first_block = CreateBlock<Jump>({}, &jump_targets_[0]);
110 MergeIntoFrameState(first_block, 0);
111 }
112
113 // TODO(v8:7700): Clean up after all bytecodes are supported.
114 #define MAGLEV_UNIMPLEMENTED(BytecodeName) \
115 do { \
116 std::cerr << "Maglev: Can't compile, bytecode " #BytecodeName \
117 " is not supported\n"; \
118 found_unsupported_bytecode_ = true; \
119 this_field_will_be_unused_once_all_bytecodes_are_supported_ = true; \
120 } while (false)
121
122 #define MAGLEV_UNIMPLEMENTED_BYTECODE(Name) \
123 void MaglevGraphBuilder::Visit##Name() { MAGLEV_UNIMPLEMENTED(Name); }
124
125 namespace {
126 template <Operation kOperation>
127 struct NodeForOperationHelper;
128
129 #define NODE_FOR_OPERATION_HELPER(Name) \
130 template <> \
131 struct NodeForOperationHelper<Operation::k##Name> { \
132 using generic_type = Generic##Name; \
133 };
134 OPERATION_LIST(NODE_FOR_OPERATION_HELPER)
135 #undef NODE_FOR_OPERATION_HELPER
136
137 template <Operation kOperation>
138 using GenericNodeForOperation =
139 typename NodeForOperationHelper<kOperation>::generic_type;
140 } // namespace
141
142 template <Operation kOperation>
BuildGenericUnaryOperationNode()143 void MaglevGraphBuilder::BuildGenericUnaryOperationNode() {
144 FeedbackSlot slot_index = GetSlotOperand(0);
145 ValueNode* value = GetAccumulatorTaggedValue();
146 SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
147 {value}, compiler::FeedbackSource{feedback(), slot_index}));
148 }
149
150 template <Operation kOperation>
BuildGenericBinaryOperationNode()151 void MaglevGraphBuilder::BuildGenericBinaryOperationNode() {
152 ValueNode* left = LoadRegisterTaggedValue(0);
153 ValueNode* right = GetAccumulatorTaggedValue();
154 FeedbackSlot slot_index = GetSlotOperand(1);
155 SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
156 {left, right}, compiler::FeedbackSource{feedback(), slot_index}));
157 }
158
159 template <Operation kOperation>
BuildGenericBinarySmiOperationNode()160 void MaglevGraphBuilder::BuildGenericBinarySmiOperationNode() {
161 ValueNode* left = GetAccumulatorTaggedValue();
162 Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0));
163 ValueNode* right = AddNewNode<SmiConstant>({}, constant);
164 FeedbackSlot slot_index = GetSlotOperand(1);
165 SetAccumulator(AddNewNode<GenericNodeForOperation<kOperation>>(
166 {left, right}, compiler::FeedbackSource{feedback(), slot_index}));
167 }
168
169 template <Operation kOperation>
VisitUnaryOperation()170 void MaglevGraphBuilder::VisitUnaryOperation() {
171 // TODO(victorgomes): Use feedback info and create optimized versions.
172 BuildGenericUnaryOperationNode<kOperation>();
173 }
174
175 template <Operation kOperation>
VisitBinaryOperation()176 void MaglevGraphBuilder::VisitBinaryOperation() {
177 FeedbackNexus nexus = FeedbackNexusForOperand(1);
178
179 if (nexus.ic_state() == InlineCacheState::MONOMORPHIC) {
180 if (nexus.kind() == FeedbackSlotKind::kBinaryOp) {
181 BinaryOperationHint hint = nexus.GetBinaryOperationFeedback();
182
183 if (hint == BinaryOperationHint::kSignedSmall) {
184 ValueNode *left, *right;
185 if (IsRegisterEqualToAccumulator(0)) {
186 left = right = LoadRegisterSmiUntaggedValue(0);
187 } else {
188 left = LoadRegisterSmiUntaggedValue(0);
189 right = GetAccumulatorSmiUntaggedValue();
190 }
191
192 if (kOperation == Operation::kAdd) {
193 SetAccumulator(AddNewNode<Int32AddWithOverflow>({left, right}));
194 return;
195 }
196 }
197 }
198 }
199
200 // TODO(victorgomes): Use feedback info and create optimized versions.
201 BuildGenericBinaryOperationNode<kOperation>();
202 }
203
204 template <Operation kOperation>
VisitBinarySmiOperation()205 void MaglevGraphBuilder::VisitBinarySmiOperation() {
206 FeedbackNexus nexus = FeedbackNexusForOperand(1);
207
208 if (nexus.ic_state() == InlineCacheState::MONOMORPHIC) {
209 if (nexus.kind() == FeedbackSlotKind::kBinaryOp) {
210 BinaryOperationHint hint = nexus.GetBinaryOperationFeedback();
211
212 if (hint == BinaryOperationHint::kSignedSmall) {
213 ValueNode* left = GetAccumulatorSmiUntaggedValue();
214 int32_t constant = iterator_.GetImmediateOperand(0);
215
216 if (kOperation == Operation::kAdd) {
217 if (constant == 0) {
218 // For addition of zero, when the accumulator passed the Smi check,
219 // it already has the right value, so we can just return.
220 return;
221 }
222 // TODO(victorgomes): We could create an Int32Add node that receives
223 // a constant and avoid a register move.
224 ValueNode* right = AddNewNode<Int32Constant>({}, constant);
225 SetAccumulator(AddNewNode<Int32AddWithOverflow>({left, right}));
226 return;
227 }
228 }
229 }
230 }
231
232 // TODO(victorgomes): Use feedback info and create optimized versions.
233 BuildGenericBinarySmiOperationNode<kOperation>();
234 }
235
VisitLdar()236 void MaglevGraphBuilder::VisitLdar() {
237 MoveNodeBetweenRegisters(iterator_.GetRegisterOperand(0),
238 interpreter::Register::virtual_accumulator());
239 }
240
VisitLdaZero()241 void MaglevGraphBuilder::VisitLdaZero() {
242 SetAccumulator(AddNewNode<SmiConstant>({}, Smi::zero()));
243 }
VisitLdaSmi()244 void MaglevGraphBuilder::VisitLdaSmi() {
245 Smi constant = Smi::FromInt(iterator_.GetImmediateOperand(0));
246 SetAccumulator(AddNewNode<SmiConstant>({}, constant));
247 }
VisitLdaUndefined()248 void MaglevGraphBuilder::VisitLdaUndefined() {
249 SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kUndefinedValue));
250 }
VisitLdaNull()251 void MaglevGraphBuilder::VisitLdaNull() {
252 SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kNullValue));
253 }
VisitLdaTheHole()254 void MaglevGraphBuilder::VisitLdaTheHole() {
255 SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kTheHoleValue));
256 }
VisitLdaTrue()257 void MaglevGraphBuilder::VisitLdaTrue() {
258 SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kTrueValue));
259 }
VisitLdaFalse()260 void MaglevGraphBuilder::VisitLdaFalse() {
261 SetAccumulator(AddNewNode<RootConstant>({}, RootIndex::kFalseValue));
262 }
VisitLdaConstant()263 void MaglevGraphBuilder::VisitLdaConstant() {
264 SetAccumulator(GetConstant(GetRefOperand<HeapObject>(0)));
265 }
266 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaContextSlot)
MAGLEV_UNIMPLEMENTED_BYTECODE(LdaImmutableContextSlot)267 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaImmutableContextSlot)
268 void MaglevGraphBuilder::VisitLdaCurrentContextSlot() {
269 ValueNode* context = GetContext();
270 int slot_index = iterator_.GetIndexOperand(0);
271
272 // TODO(leszeks): Passing a LoadHandler to LoadField here is a bit of
273 // a hack, maybe we should have a LoadRawOffset or similar.
274 SetAccumulator(AddNewNode<LoadField>(
275 {context},
276 LoadSimpleFieldHandler(FieldIndex::ForInObjectOffset(
277 Context::OffsetOfElementAt(slot_index), FieldIndex::kTagged))));
278 }
VisitLdaImmutableCurrentContextSlot()279 void MaglevGraphBuilder::VisitLdaImmutableCurrentContextSlot() {
280 // TODO(leszeks): Consider context specialising.
281 VisitLdaCurrentContextSlot();
282 }
VisitStar()283 void MaglevGraphBuilder::VisitStar() {
284 MoveNodeBetweenRegisters(interpreter::Register::virtual_accumulator(),
285 iterator_.GetRegisterOperand(0));
286 }
287 #define SHORT_STAR_VISITOR(Name, ...) \
288 void MaglevGraphBuilder::Visit##Name() { \
289 MoveNodeBetweenRegisters( \
290 interpreter::Register::virtual_accumulator(), \
291 interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name)); \
292 }
SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)293 SHORT_STAR_BYTECODE_LIST(SHORT_STAR_VISITOR)
294 #undef SHORT_STAR_VISITOR
295
296 void MaglevGraphBuilder::VisitMov() {
297 MoveNodeBetweenRegisters(iterator_.GetRegisterOperand(0),
298 iterator_.GetRegisterOperand(1));
299 }
300 MAGLEV_UNIMPLEMENTED_BYTECODE(PushContext)
MAGLEV_UNIMPLEMENTED_BYTECODE(PopContext)301 MAGLEV_UNIMPLEMENTED_BYTECODE(PopContext)
302 MAGLEV_UNIMPLEMENTED_BYTECODE(TestReferenceEqual)
303 MAGLEV_UNIMPLEMENTED_BYTECODE(TestUndetectable)
304 MAGLEV_UNIMPLEMENTED_BYTECODE(TestNull)
305 MAGLEV_UNIMPLEMENTED_BYTECODE(TestUndefined)
306 MAGLEV_UNIMPLEMENTED_BYTECODE(TestTypeOf)
307
308 void MaglevGraphBuilder::BuildPropertyCellAccess(
309 const compiler::PropertyCellRef& property_cell) {
310 // TODO(leszeks): A bunch of this is copied from
311 // js-native-context-specialization.cc -- I wonder if we can unify it
312 // somehow.
313 bool was_cached = property_cell.Cache();
314 CHECK(was_cached);
315
316 compiler::ObjectRef property_cell_value = property_cell.value();
317 if (property_cell_value.IsTheHole()) {
318 // The property cell is no longer valid.
319 EmitUnconditionalDeopt();
320 return;
321 }
322
323 PropertyDetails property_details = property_cell.property_details();
324 PropertyCellType property_cell_type = property_details.cell_type();
325 DCHECK_EQ(PropertyKind::kData, property_details.kind());
326
327 if (!property_details.IsConfigurable() && property_details.IsReadOnly()) {
328 SetAccumulator(GetConstant(property_cell_value));
329 return;
330 }
331
332 // Record a code dependency on the cell if we can benefit from the
333 // additional feedback, or the global property is configurable (i.e.
334 // can be deleted or reconfigured to an accessor property).
335 if (property_cell_type != PropertyCellType::kMutable ||
336 property_details.IsConfigurable()) {
337 broker()->dependencies()->DependOnGlobalProperty(property_cell);
338 }
339
340 // Load from constant/undefined global property can be constant-folded.
341 if (property_cell_type == PropertyCellType::kConstant ||
342 property_cell_type == PropertyCellType::kUndefined) {
343 SetAccumulator(GetConstant(property_cell_value));
344 return;
345 }
346
347 ValueNode* property_cell_node =
348 AddNewNode<Constant>({}, property_cell.AsHeapObject());
349 // TODO(leszeks): Padding a LoadHandler to LoadField here is a bit of
350 // a hack, maybe we should have a LoadRawOffset or similar.
351 SetAccumulator(AddNewNode<LoadField>(
352 {property_cell_node},
353 LoadSimpleFieldHandler(FieldIndex::ForInObjectOffset(
354 PropertyCell::kValueOffset, FieldIndex::kTagged))));
355 }
356
VisitLdaGlobal()357 void MaglevGraphBuilder::VisitLdaGlobal() {
358 // LdaGlobal <name_index> <slot>
359
360 static const int kNameOperandIndex = 0;
361 static const int kSlotOperandIndex = 1;
362
363 compiler::NameRef name = GetRefOperand<Name>(kNameOperandIndex);
364 const compiler::ProcessedFeedback& access_feedback =
365 broker()->GetFeedbackForGlobalAccess(compiler::FeedbackSource(
366 feedback(), GetSlotOperand(kSlotOperandIndex)));
367
368 if (access_feedback.IsInsufficient()) {
369 EmitUnconditionalDeopt();
370 return;
371 }
372
373 const compiler::GlobalAccessFeedback& global_access_feedback =
374 access_feedback.AsGlobalAccess();
375
376 if (global_access_feedback.IsPropertyCell()) {
377 BuildPropertyCellAccess(global_access_feedback.property_cell());
378 } else {
379 // TODO(leszeks): Handle the IsScriptContextSlot case.
380
381 ValueNode* context = GetContext();
382 SetAccumulator(AddNewNode<LoadGlobal>({context}, name));
383 }
384 }
385 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaGlobalInsideTypeof)
MAGLEV_UNIMPLEMENTED_BYTECODE(StaGlobal)386 MAGLEV_UNIMPLEMENTED_BYTECODE(StaGlobal)
387 MAGLEV_UNIMPLEMENTED_BYTECODE(StaContextSlot)
388 MAGLEV_UNIMPLEMENTED_BYTECODE(StaCurrentContextSlot)
389 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupSlot)
390 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupContextSlot)
391 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupGlobalSlot)
392 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupSlotInsideTypeof)
393 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupContextSlotInsideTypeof)
394 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaLookupGlobalSlotInsideTypeof)
395 MAGLEV_UNIMPLEMENTED_BYTECODE(StaLookupSlot)
396 void MaglevGraphBuilder::VisitGetNamedProperty() {
397 // GetNamedProperty <object> <name_index> <slot>
398 ValueNode* object = LoadRegisterTaggedValue(0);
399 compiler::NameRef name = GetRefOperand<Name>(1);
400 FeedbackSlot slot = GetSlotOperand(2);
401 compiler::FeedbackSource feedback_source{feedback(), slot};
402
403 const compiler::ProcessedFeedback& processed_feedback =
404 broker()->GetFeedbackForPropertyAccess(feedback_source,
405 compiler::AccessMode::kLoad, name);
406
407 switch (processed_feedback.kind()) {
408 case compiler::ProcessedFeedback::kInsufficient:
409 EmitUnconditionalDeopt();
410 return;
411
412 case compiler::ProcessedFeedback::kNamedAccess: {
413 const compiler::NamedAccessFeedback& named_feedback =
414 processed_feedback.AsNamedAccess();
415 if (named_feedback.maps().size() == 1) {
416 // Monomorphic load, check the handler.
417 // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler.
418 MaybeObjectHandle handler =
419 FeedbackNexusForSlot(slot).FindHandlerForMap(
420 named_feedback.maps()[0].object());
421 if (!handler.is_null() && handler->IsSmi()) {
422 // Smi handler, emit a map check and LoadField.
423 int smi_handler = handler->ToSmi().value();
424 LoadHandler::Kind kind = LoadHandler::KindBits::decode(smi_handler);
425 if (kind == LoadHandler::Kind::kField &&
426 !LoadHandler::IsWasmStructBits::decode(smi_handler)) {
427 AddNewNode<CheckMaps>({object}, named_feedback.maps()[0]);
428 SetAccumulator(AddNewNode<LoadField>({object}, smi_handler));
429 return;
430 }
431 }
432 }
433 } break;
434
435 default:
436 break;
437 }
438
439 // Create a generic load in the fallthrough.
440 ValueNode* context = GetContext();
441 SetAccumulator(
442 AddNewNode<LoadNamedGeneric>({context, object}, name, feedback_source));
443 }
444
445 MAGLEV_UNIMPLEMENTED_BYTECODE(GetNamedPropertyFromSuper)
MAGLEV_UNIMPLEMENTED_BYTECODE(GetKeyedProperty)446 MAGLEV_UNIMPLEMENTED_BYTECODE(GetKeyedProperty)
447 MAGLEV_UNIMPLEMENTED_BYTECODE(LdaModuleVariable)
448 MAGLEV_UNIMPLEMENTED_BYTECODE(StaModuleVariable)
449
450 void MaglevGraphBuilder::VisitSetNamedProperty() {
451 // SetNamedProperty <object> <name_index> <slot>
452 ValueNode* object = LoadRegisterTaggedValue(0);
453 compiler::NameRef name = GetRefOperand<Name>(1);
454 FeedbackSlot slot = GetSlotOperand(2);
455 compiler::FeedbackSource feedback_source{feedback(), slot};
456
457 const compiler::ProcessedFeedback& processed_feedback =
458 broker()->GetFeedbackForPropertyAccess(
459 feedback_source, compiler::AccessMode::kStore, name);
460
461 switch (processed_feedback.kind()) {
462 case compiler::ProcessedFeedback::kInsufficient:
463 EmitUnconditionalDeopt();
464 return;
465
466 case compiler::ProcessedFeedback::kNamedAccess: {
467 const compiler::NamedAccessFeedback& named_feedback =
468 processed_feedback.AsNamedAccess();
469 if (named_feedback.maps().size() == 1) {
470 // Monomorphic store, check the handler.
471 // TODO(leszeks): Make GetFeedbackForPropertyAccess read the handler.
472 MaybeObjectHandle handler =
473 FeedbackNexusForSlot(slot).FindHandlerForMap(
474 named_feedback.maps()[0].object());
475 if (!handler.is_null() && handler->IsSmi()) {
476 int smi_handler = handler->ToSmi().value();
477 StoreHandler::Kind kind = StoreHandler::KindBits::decode(smi_handler);
478 if (kind == StoreHandler::Kind::kField) {
479 AddNewNode<CheckMaps>({object}, named_feedback.maps()[0]);
480 ValueNode* value = GetAccumulatorTaggedValue();
481 AddNewNode<StoreField>({object, value}, smi_handler);
482 return;
483 }
484 }
485 }
486 } break;
487
488 default:
489 break;
490 }
491
492 // TODO(victorgomes): Generic store.
493 MAGLEV_UNIMPLEMENTED(VisitSetNamedProperty);
494 }
495
496 MAGLEV_UNIMPLEMENTED_BYTECODE(DefineNamedOwnProperty)
MAGLEV_UNIMPLEMENTED_BYTECODE(SetKeyedProperty)497 MAGLEV_UNIMPLEMENTED_BYTECODE(SetKeyedProperty)
498 MAGLEV_UNIMPLEMENTED_BYTECODE(DefineKeyedOwnProperty)
499 MAGLEV_UNIMPLEMENTED_BYTECODE(StaInArrayLiteral)
500 MAGLEV_UNIMPLEMENTED_BYTECODE(DefineKeyedOwnPropertyInLiteral)
501 MAGLEV_UNIMPLEMENTED_BYTECODE(CollectTypeProfile)
502
503 void MaglevGraphBuilder::VisitAdd() { VisitBinaryOperation<Operation::kAdd>(); }
VisitSub()504 void MaglevGraphBuilder::VisitSub() {
505 VisitBinaryOperation<Operation::kSubtract>();
506 }
VisitMul()507 void MaglevGraphBuilder::VisitMul() {
508 VisitBinaryOperation<Operation::kMultiply>();
509 }
VisitDiv()510 void MaglevGraphBuilder::VisitDiv() {
511 VisitBinaryOperation<Operation::kDivide>();
512 }
VisitMod()513 void MaglevGraphBuilder::VisitMod() {
514 VisitBinaryOperation<Operation::kModulus>();
515 }
VisitExp()516 void MaglevGraphBuilder::VisitExp() {
517 VisitBinaryOperation<Operation::kExponentiate>();
518 }
VisitBitwiseOr()519 void MaglevGraphBuilder::VisitBitwiseOr() {
520 VisitBinaryOperation<Operation::kBitwiseOr>();
521 }
VisitBitwiseXor()522 void MaglevGraphBuilder::VisitBitwiseXor() {
523 VisitBinaryOperation<Operation::kBitwiseXor>();
524 }
VisitBitwiseAnd()525 void MaglevGraphBuilder::VisitBitwiseAnd() {
526 VisitBinaryOperation<Operation::kBitwiseAnd>();
527 }
VisitShiftLeft()528 void MaglevGraphBuilder::VisitShiftLeft() {
529 VisitBinaryOperation<Operation::kShiftLeft>();
530 }
VisitShiftRight()531 void MaglevGraphBuilder::VisitShiftRight() {
532 VisitBinaryOperation<Operation::kShiftRight>();
533 }
VisitShiftRightLogical()534 void MaglevGraphBuilder::VisitShiftRightLogical() {
535 VisitBinaryOperation<Operation::kShiftRightLogical>();
536 }
537
VisitAddSmi()538 void MaglevGraphBuilder::VisitAddSmi() {
539 VisitBinarySmiOperation<Operation::kAdd>();
540 }
VisitSubSmi()541 void MaglevGraphBuilder::VisitSubSmi() {
542 VisitBinarySmiOperation<Operation::kSubtract>();
543 }
VisitMulSmi()544 void MaglevGraphBuilder::VisitMulSmi() {
545 VisitBinarySmiOperation<Operation::kMultiply>();
546 }
VisitDivSmi()547 void MaglevGraphBuilder::VisitDivSmi() {
548 VisitBinarySmiOperation<Operation::kDivide>();
549 }
VisitModSmi()550 void MaglevGraphBuilder::VisitModSmi() {
551 VisitBinarySmiOperation<Operation::kModulus>();
552 }
VisitExpSmi()553 void MaglevGraphBuilder::VisitExpSmi() {
554 VisitBinarySmiOperation<Operation::kExponentiate>();
555 }
VisitBitwiseOrSmi()556 void MaglevGraphBuilder::VisitBitwiseOrSmi() {
557 VisitBinarySmiOperation<Operation::kBitwiseOr>();
558 }
VisitBitwiseXorSmi()559 void MaglevGraphBuilder::VisitBitwiseXorSmi() {
560 VisitBinarySmiOperation<Operation::kBitwiseXor>();
561 }
VisitBitwiseAndSmi()562 void MaglevGraphBuilder::VisitBitwiseAndSmi() {
563 VisitBinarySmiOperation<Operation::kBitwiseAnd>();
564 }
VisitShiftLeftSmi()565 void MaglevGraphBuilder::VisitShiftLeftSmi() {
566 VisitBinarySmiOperation<Operation::kShiftLeft>();
567 }
VisitShiftRightSmi()568 void MaglevGraphBuilder::VisitShiftRightSmi() {
569 VisitBinarySmiOperation<Operation::kShiftRight>();
570 }
VisitShiftRightLogicalSmi()571 void MaglevGraphBuilder::VisitShiftRightLogicalSmi() {
572 VisitBinarySmiOperation<Operation::kShiftRightLogical>();
573 }
574
VisitInc()575 void MaglevGraphBuilder::VisitInc() {
576 VisitUnaryOperation<Operation::kIncrement>();
577 }
VisitDec()578 void MaglevGraphBuilder::VisitDec() {
579 VisitUnaryOperation<Operation::kDecrement>();
580 }
VisitNegate()581 void MaglevGraphBuilder::VisitNegate() {
582 VisitUnaryOperation<Operation::kNegate>();
583 }
VisitBitwiseNot()584 void MaglevGraphBuilder::VisitBitwiseNot() {
585 VisitUnaryOperation<Operation::kBitwiseNot>();
586 }
587
588 MAGLEV_UNIMPLEMENTED_BYTECODE(ToBooleanLogicalNot)
MAGLEV_UNIMPLEMENTED_BYTECODE(LogicalNot)589 MAGLEV_UNIMPLEMENTED_BYTECODE(LogicalNot)
590 MAGLEV_UNIMPLEMENTED_BYTECODE(TypeOf)
591 MAGLEV_UNIMPLEMENTED_BYTECODE(DeletePropertyStrict)
592 MAGLEV_UNIMPLEMENTED_BYTECODE(DeletePropertySloppy)
593 MAGLEV_UNIMPLEMENTED_BYTECODE(GetSuperConstructor)
594
595 // TODO(v8:7700): Read feedback and implement inlining
596 void MaglevGraphBuilder::BuildCallFromRegisterList(
597 ConvertReceiverMode receiver_mode) {
598 ValueNode* function = LoadRegisterTaggedValue(0);
599
600 interpreter::RegisterList args = iterator_.GetRegisterListOperand(1);
601 ValueNode* context = GetContext();
602
603 size_t input_count = args.register_count() + Call::kFixedInputCount;
604
605 RootConstant* undefined_constant;
606 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
607 // The undefined constant node has to be created before the call node.
608 undefined_constant =
609 AddNewNode<RootConstant>({}, RootIndex::kUndefinedValue);
610 input_count++;
611 }
612
613 Call* call = AddNewNode<Call>(input_count, receiver_mode, function, context);
614 int arg_index = 0;
615 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
616 call->set_arg(arg_index++, undefined_constant);
617 }
618 for (int i = 0; i < args.register_count(); ++i) {
619 call->set_arg(arg_index++, current_interpreter_frame_.get(args[i]));
620 }
621
622 SetAccumulator(call);
623 }
624
BuildCallFromRegisters(int argc_count,ConvertReceiverMode receiver_mode)625 void MaglevGraphBuilder::BuildCallFromRegisters(
626 int argc_count, ConvertReceiverMode receiver_mode) {
627 DCHECK_LE(argc_count, 2);
628 ValueNode* function = LoadRegisterTaggedValue(0);
629 ValueNode* context = GetContext();
630
631 int argc_count_with_recv = argc_count + 1;
632 size_t input_count = argc_count_with_recv + Call::kFixedInputCount;
633
634 // The undefined constant node has to be created before the call node.
635 RootConstant* undefined_constant;
636 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
637 undefined_constant =
638 AddNewNode<RootConstant>({}, RootIndex::kUndefinedValue);
639 }
640
641 Call* call = AddNewNode<Call>(input_count, receiver_mode, function, context);
642 int arg_index = 0;
643 int reg_count = argc_count_with_recv;
644 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
645 reg_count = argc_count;
646 call->set_arg(arg_index++, undefined_constant);
647 }
648 for (int i = 0; i < reg_count; i++) {
649 call->set_arg(arg_index++, LoadRegisterTaggedValue(i + 1));
650 }
651
652 SetAccumulator(call);
653 }
654
VisitCallAnyReceiver()655 void MaglevGraphBuilder::VisitCallAnyReceiver() {
656 BuildCallFromRegisterList(ConvertReceiverMode::kAny);
657 }
VisitCallProperty()658 void MaglevGraphBuilder::VisitCallProperty() {
659 BuildCallFromRegisterList(ConvertReceiverMode::kNotNullOrUndefined);
660 }
VisitCallProperty0()661 void MaglevGraphBuilder::VisitCallProperty0() {
662 BuildCallFromRegisters(0, ConvertReceiverMode::kNotNullOrUndefined);
663 }
VisitCallProperty1()664 void MaglevGraphBuilder::VisitCallProperty1() {
665 BuildCallFromRegisters(1, ConvertReceiverMode::kNotNullOrUndefined);
666 }
VisitCallProperty2()667 void MaglevGraphBuilder::VisitCallProperty2() {
668 BuildCallFromRegisters(2, ConvertReceiverMode::kNotNullOrUndefined);
669 }
VisitCallUndefinedReceiver()670 void MaglevGraphBuilder::VisitCallUndefinedReceiver() {
671 BuildCallFromRegisterList(ConvertReceiverMode::kNullOrUndefined);
672 }
VisitCallUndefinedReceiver0()673 void MaglevGraphBuilder::VisitCallUndefinedReceiver0() {
674 BuildCallFromRegisters(0, ConvertReceiverMode::kNullOrUndefined);
675 }
VisitCallUndefinedReceiver1()676 void MaglevGraphBuilder::VisitCallUndefinedReceiver1() {
677 BuildCallFromRegisters(1, ConvertReceiverMode::kNullOrUndefined);
678 }
VisitCallUndefinedReceiver2()679 void MaglevGraphBuilder::VisitCallUndefinedReceiver2() {
680 BuildCallFromRegisters(2, ConvertReceiverMode::kNullOrUndefined);
681 }
682
683 MAGLEV_UNIMPLEMENTED_BYTECODE(CallWithSpread)
MAGLEV_UNIMPLEMENTED_BYTECODE(CallRuntime)684 MAGLEV_UNIMPLEMENTED_BYTECODE(CallRuntime)
685 MAGLEV_UNIMPLEMENTED_BYTECODE(CallRuntimeForPair)
686 MAGLEV_UNIMPLEMENTED_BYTECODE(CallJSRuntime)
687 MAGLEV_UNIMPLEMENTED_BYTECODE(InvokeIntrinsic)
688 MAGLEV_UNIMPLEMENTED_BYTECODE(Construct)
689 MAGLEV_UNIMPLEMENTED_BYTECODE(ConstructWithSpread)
690
691 void MaglevGraphBuilder::VisitTestEqual() {
692 VisitBinaryOperation<Operation::kEqual>();
693 }
VisitTestEqualStrict()694 void MaglevGraphBuilder::VisitTestEqualStrict() {
695 VisitBinaryOperation<Operation::kStrictEqual>();
696 }
VisitTestLessThan()697 void MaglevGraphBuilder::VisitTestLessThan() {
698 VisitBinaryOperation<Operation::kLessThan>();
699 }
VisitTestLessThanOrEqual()700 void MaglevGraphBuilder::VisitTestLessThanOrEqual() {
701 VisitBinaryOperation<Operation::kLessThanOrEqual>();
702 }
VisitTestGreaterThan()703 void MaglevGraphBuilder::VisitTestGreaterThan() {
704 VisitBinaryOperation<Operation::kGreaterThan>();
705 }
VisitTestGreaterThanOrEqual()706 void MaglevGraphBuilder::VisitTestGreaterThanOrEqual() {
707 VisitBinaryOperation<Operation::kGreaterThanOrEqual>();
708 }
709
710 MAGLEV_UNIMPLEMENTED_BYTECODE(TestInstanceOf)
MAGLEV_UNIMPLEMENTED_BYTECODE(TestIn)711 MAGLEV_UNIMPLEMENTED_BYTECODE(TestIn)
712 MAGLEV_UNIMPLEMENTED_BYTECODE(ToName)
713 MAGLEV_UNIMPLEMENTED_BYTECODE(ToNumber)
714 MAGLEV_UNIMPLEMENTED_BYTECODE(ToNumeric)
715 MAGLEV_UNIMPLEMENTED_BYTECODE(ToObject)
716 MAGLEV_UNIMPLEMENTED_BYTECODE(ToString)
717 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateRegExpLiteral)
718 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateArrayLiteral)
719 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateArrayFromIterable)
720 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateEmptyArrayLiteral)
721 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateObjectLiteral)
722 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateEmptyObjectLiteral)
723 MAGLEV_UNIMPLEMENTED_BYTECODE(CloneObject)
724 MAGLEV_UNIMPLEMENTED_BYTECODE(GetTemplateObject)
725 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateClosure)
726 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateBlockContext)
727 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateCatchContext)
728 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateFunctionContext)
729 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateEvalContext)
730 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateWithContext)
731 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateMappedArguments)
732 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateUnmappedArguments)
733 MAGLEV_UNIMPLEMENTED_BYTECODE(CreateRestParameter)
734
735 void MaglevGraphBuilder::VisitJumpLoop() {
736 int target = iterator_.GetJumpTargetOffset();
737 BasicBlock* block =
738 target == iterator_.current_offset()
739 ? FinishBlock<JumpLoop>(next_offset(), {}, &jump_targets_[target])
740 : FinishBlock<JumpLoop>(next_offset(), {},
741 jump_targets_[target].block_ptr());
742
743 merge_states_[target]->MergeLoop(*compilation_unit_,
744 current_interpreter_frame_, block, target);
745 block->set_predecessor_id(0);
746 }
VisitJump()747 void MaglevGraphBuilder::VisitJump() {
748 BasicBlock* block = FinishBlock<Jump>(
749 next_offset(), {}, &jump_targets_[iterator_.GetJumpTargetOffset()]);
750 MergeIntoFrameState(block, iterator_.GetJumpTargetOffset());
751 DCHECK_LT(next_offset(), bytecode().length());
752 }
MAGLEV_UNIMPLEMENTED_BYTECODE(JumpConstant)753 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpConstant)
754 void MaglevGraphBuilder::VisitJumpIfNullConstant() { VisitJumpIfNull(); }
VisitJumpIfNotNullConstant()755 void MaglevGraphBuilder::VisitJumpIfNotNullConstant() { VisitJumpIfNotNull(); }
VisitJumpIfUndefinedConstant()756 void MaglevGraphBuilder::VisitJumpIfUndefinedConstant() {
757 VisitJumpIfUndefined();
758 }
VisitJumpIfNotUndefinedConstant()759 void MaglevGraphBuilder::VisitJumpIfNotUndefinedConstant() {
760 VisitJumpIfNotUndefined();
761 }
VisitJumpIfUndefinedOrNullConstant()762 void MaglevGraphBuilder::VisitJumpIfUndefinedOrNullConstant() {
763 VisitJumpIfUndefinedOrNull();
764 }
VisitJumpIfTrueConstant()765 void MaglevGraphBuilder::VisitJumpIfTrueConstant() { VisitJumpIfTrue(); }
VisitJumpIfFalseConstant()766 void MaglevGraphBuilder::VisitJumpIfFalseConstant() { VisitJumpIfFalse(); }
VisitJumpIfJSReceiverConstant()767 void MaglevGraphBuilder::VisitJumpIfJSReceiverConstant() {
768 VisitJumpIfJSReceiver();
769 }
VisitJumpIfToBooleanTrueConstant()770 void MaglevGraphBuilder::VisitJumpIfToBooleanTrueConstant() {
771 VisitJumpIfToBooleanTrue();
772 }
VisitJumpIfToBooleanFalseConstant()773 void MaglevGraphBuilder::VisitJumpIfToBooleanFalseConstant() {
774 VisitJumpIfToBooleanFalse();
775 }
776
MergeIntoFrameState(BasicBlock * predecessor,int target)777 void MaglevGraphBuilder::MergeIntoFrameState(BasicBlock* predecessor,
778 int target) {
779 if (merge_states_[target] == nullptr) {
780 DCHECK(!bytecode_analysis().IsLoopHeader(target));
781 const compiler::BytecodeLivenessState* liveness =
782 bytecode_analysis().GetInLivenessFor(target);
783 // If there's no target frame state, allocate a new one.
784 merge_states_[target] = zone()->New<MergePointInterpreterFrameState>(
785 *compilation_unit_, current_interpreter_frame_, target,
786 NumPredecessors(target), predecessor, liveness);
787 } else {
788 // If there already is a frame state, merge.
789 merge_states_[target]->Merge(*compilation_unit_, current_interpreter_frame_,
790 predecessor, target);
791 }
792 }
793
BuildBranchIfTrue(ValueNode * node,int true_target,int false_target)794 void MaglevGraphBuilder::BuildBranchIfTrue(ValueNode* node, int true_target,
795 int false_target) {
796 BasicBlock* block = FinishBlock<BranchIfTrue>(next_offset(), {node},
797 &jump_targets_[true_target],
798 &jump_targets_[false_target]);
799 MergeIntoFrameState(block, iterator_.GetJumpTargetOffset());
800 }
BuildBranchIfToBooleanTrue(ValueNode * node,int true_target,int false_target)801 void MaglevGraphBuilder::BuildBranchIfToBooleanTrue(ValueNode* node,
802 int true_target,
803 int false_target) {
804 BasicBlock* block = FinishBlock<BranchIfToBooleanTrue>(
805 next_offset(), {node}, &jump_targets_[true_target],
806 &jump_targets_[false_target]);
807 MergeIntoFrameState(block, iterator_.GetJumpTargetOffset());
808 }
VisitJumpIfToBooleanTrue()809 void MaglevGraphBuilder::VisitJumpIfToBooleanTrue() {
810 BuildBranchIfToBooleanTrue(GetAccumulatorTaggedValue(),
811 iterator_.GetJumpTargetOffset(), next_offset());
812 }
VisitJumpIfToBooleanFalse()813 void MaglevGraphBuilder::VisitJumpIfToBooleanFalse() {
814 BuildBranchIfToBooleanTrue(GetAccumulatorTaggedValue(), next_offset(),
815 iterator_.GetJumpTargetOffset());
816 }
VisitJumpIfTrue()817 void MaglevGraphBuilder::VisitJumpIfTrue() {
818 BuildBranchIfTrue(GetAccumulatorTaggedValue(),
819 iterator_.GetJumpTargetOffset(), next_offset());
820 }
VisitJumpIfFalse()821 void MaglevGraphBuilder::VisitJumpIfFalse() {
822 BuildBranchIfTrue(GetAccumulatorTaggedValue(), next_offset(),
823 iterator_.GetJumpTargetOffset());
824 }
825 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfNull)
MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfNotNull)826 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfNotNull)
827 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfUndefined)
828 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfNotUndefined)
829 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfUndefinedOrNull)
830 MAGLEV_UNIMPLEMENTED_BYTECODE(JumpIfJSReceiver)
831 MAGLEV_UNIMPLEMENTED_BYTECODE(SwitchOnSmiNoFeedback)
832 MAGLEV_UNIMPLEMENTED_BYTECODE(ForInEnumerate)
833 MAGLEV_UNIMPLEMENTED_BYTECODE(ForInPrepare)
834 MAGLEV_UNIMPLEMENTED_BYTECODE(ForInContinue)
835 MAGLEV_UNIMPLEMENTED_BYTECODE(ForInNext)
836 MAGLEV_UNIMPLEMENTED_BYTECODE(ForInStep)
837 MAGLEV_UNIMPLEMENTED_BYTECODE(SetPendingMessage)
838 MAGLEV_UNIMPLEMENTED_BYTECODE(Throw)
839 MAGLEV_UNIMPLEMENTED_BYTECODE(ReThrow)
840 void MaglevGraphBuilder::VisitReturn() {
841 FinishBlock<Return>(next_offset(), {GetAccumulatorTaggedValue()});
842 }
843 MAGLEV_UNIMPLEMENTED_BYTECODE(ThrowReferenceErrorIfHole)
MAGLEV_UNIMPLEMENTED_BYTECODE(ThrowSuperNotCalledIfHole)844 MAGLEV_UNIMPLEMENTED_BYTECODE(ThrowSuperNotCalledIfHole)
845 MAGLEV_UNIMPLEMENTED_BYTECODE(ThrowSuperAlreadyCalledIfNotHole)
846 MAGLEV_UNIMPLEMENTED_BYTECODE(ThrowIfNotSuperConstructor)
847 MAGLEV_UNIMPLEMENTED_BYTECODE(SwitchOnGeneratorState)
848 MAGLEV_UNIMPLEMENTED_BYTECODE(SuspendGenerator)
849 MAGLEV_UNIMPLEMENTED_BYTECODE(ResumeGenerator)
850 MAGLEV_UNIMPLEMENTED_BYTECODE(GetIterator)
851 MAGLEV_UNIMPLEMENTED_BYTECODE(Debugger)
852 MAGLEV_UNIMPLEMENTED_BYTECODE(IncBlockCounter)
853 MAGLEV_UNIMPLEMENTED_BYTECODE(Abort)
854
855 void MaglevGraphBuilder::VisitWide() { UNREACHABLE(); }
VisitExtraWide()856 void MaglevGraphBuilder::VisitExtraWide() { UNREACHABLE(); }
857 #define DEBUG_BREAK(Name, ...) \
858 void MaglevGraphBuilder::Visit##Name() { UNREACHABLE(); }
DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK)859 DEBUG_BREAK_BYTECODE_LIST(DEBUG_BREAK)
860 #undef DEBUG_BREAK
861 void MaglevGraphBuilder::VisitIllegal() { UNREACHABLE(); }
862
863 } // namespace maglev
864 } // namespace internal
865 } // namespace v8
866