1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_GRAPH_ASSEMBLER_H_
6 #define V8_COMPILER_GRAPH_ASSEMBLER_H_
7
8 #include "src/codegen/tnode.h"
9 #include "src/compiler/feedback-source.h"
10 #include "src/compiler/js-graph.h"
11 #include "src/compiler/node.h"
12 #include "src/compiler/simplified-operator.h"
13
14 namespace v8 {
15 namespace internal {
16
17 class JSGraph;
18 class Graph;
19 class Oddball;
20
21 // TODO(jgruber): Currently this is too permissive, but at least it lets us
22 // document which functions expect JS booleans. If a real Boolean type becomes
23 // possible in the future, use that instead.
24 using Boolean = Oddball;
25
26 namespace compiler {
27
28 class Reducer;
29
30 #define PURE_ASSEMBLER_MACH_UNOP_LIST(V) \
31 V(BitcastFloat32ToInt32) \
32 V(BitcastFloat64ToInt64) \
33 V(BitcastInt32ToFloat32) \
34 V(BitcastWord32ToWord64) \
35 V(BitcastInt64ToFloat64) \
36 V(ChangeFloat32ToFloat64) \
37 V(ChangeFloat64ToInt32) \
38 V(ChangeFloat64ToInt64) \
39 V(ChangeFloat64ToUint32) \
40 V(ChangeInt32ToFloat64) \
41 V(ChangeInt32ToInt64) \
42 V(ChangeInt64ToFloat64) \
43 V(ChangeUint32ToFloat64) \
44 V(ChangeUint32ToUint64) \
45 V(Float64Abs) \
46 V(Float64ExtractHighWord32) \
47 V(Float64ExtractLowWord32) \
48 V(Float64SilenceNaN) \
49 V(RoundFloat64ToInt32) \
50 V(RoundInt32ToFloat32) \
51 V(TruncateFloat64ToFloat32) \
52 V(TruncateFloat64ToWord32) \
53 V(TruncateInt64ToInt32) \
54 V(Word32ReverseBytes) \
55 V(Word64ReverseBytes)
56
57 #define PURE_ASSEMBLER_MACH_BINOP_LIST(V) \
58 V(Float64Add) \
59 V(Float64Div) \
60 V(Float64Equal) \
61 V(Float64InsertHighWord32) \
62 V(Float64InsertLowWord32) \
63 V(Float64LessThan) \
64 V(Float64LessThanOrEqual) \
65 V(Float64Mod) \
66 V(Float64Sub) \
67 V(Int32Add) \
68 V(Int32LessThan) \
69 V(Int32LessThanOrEqual) \
70 V(Int32Mul) \
71 V(Int32Sub) \
72 V(Int64Sub) \
73 V(IntAdd) \
74 V(IntLessThan) \
75 V(IntMul) \
76 V(IntSub) \
77 V(Uint32LessThan) \
78 V(Uint32LessThanOrEqual) \
79 V(Uint64LessThan) \
80 V(Uint64LessThanOrEqual) \
81 V(UintLessThan) \
82 V(Word32And) \
83 V(Word32Equal) \
84 V(Word32Or) \
85 V(Word32Sar) \
86 V(Word32SarShiftOutZeros) \
87 V(Word32Shl) \
88 V(Word32Shr) \
89 V(Word32Xor) \
90 V(Word64And) \
91 V(Word64Equal) \
92 V(Word64Or) \
93 V(Word64Sar) \
94 V(Word64SarShiftOutZeros) \
95 V(Word64Shl) \
96 V(Word64Shr) \
97 V(Word64Xor) \
98 V(WordAnd) \
99 V(WordEqual) \
100 V(WordOr) \
101 V(WordSar) \
102 V(WordSarShiftOutZeros) \
103 V(WordShl) \
104 V(WordShr) \
105 V(WordXor)
106
107 #define CHECKED_ASSEMBLER_MACH_BINOP_LIST(V) \
108 V(Int32AddWithOverflow) \
109 V(Int32Div) \
110 V(Int32Mod) \
111 V(Int32MulWithOverflow) \
112 V(Int32SubWithOverflow) \
113 V(Int64Div) \
114 V(Int64Mod) \
115 V(Uint32Div) \
116 V(Uint32Mod) \
117 V(Uint64Div) \
118 V(Uint64Mod)
119
120 #define JSGRAPH_SINGLETON_CONSTANT_LIST(V) \
121 V(AllocateInOldGenerationStub, Code) \
122 V(AllocateInYoungGenerationStub, Code) \
123 V(AllocateRegularInOldGenerationStub, Code) \
124 V(AllocateRegularInYoungGenerationStub, Code) \
125 V(BigIntMap, Map) \
126 V(BooleanMap, Map) \
127 V(EmptyString, String) \
128 V(False, Boolean) \
129 V(FixedArrayMap, Map) \
130 V(FixedDoubleArrayMap, Map) \
131 V(WeakFixedArrayMap, Map) \
132 V(HeapNumberMap, Map) \
133 V(MinusOne, Number) \
134 V(NaN, Number) \
135 V(NoContext, Object) \
136 V(Null, Oddball) \
137 V(One, Number) \
138 V(TheHole, Oddball) \
139 V(ToNumberBuiltin, Code) \
140 V(PlainPrimitiveToNumberBuiltin, Code) \
141 V(True, Boolean) \
142 V(Undefined, Oddball) \
143 V(Zero, Number)
144
145 class GraphAssembler;
146
147 enum class GraphAssemblerLabelType { kDeferred, kNonDeferred, kLoop };
148
149 // Label with statically known count of incoming branches and phis.
150 template <size_t VarCount>
151 class GraphAssemblerLabel {
152 public:
153 Node* PhiAt(size_t index);
154
155 template <typename T>
PhiAt(size_t index)156 TNode<T> PhiAt(size_t index) {
157 // TODO(jgruber): Investigate issues on ptr compression bots and enable.
158 // DCHECK(IsMachineRepresentationOf<T>(representations_[index]));
159 return TNode<T>::UncheckedCast(PhiAt(index));
160 }
161
GraphAssemblerLabel(GraphAssemblerLabelType type,int loop_nesting_level,const std::array<MachineRepresentation,VarCount> & reps)162 GraphAssemblerLabel(GraphAssemblerLabelType type, int loop_nesting_level,
163 const std::array<MachineRepresentation, VarCount>& reps)
164 : type_(type),
165 loop_nesting_level_(loop_nesting_level),
166 representations_(reps) {}
167
~GraphAssemblerLabel()168 ~GraphAssemblerLabel() { DCHECK(IsBound() || merged_count_ == 0); }
169
170 private:
171 friend class GraphAssembler;
172
SetBound()173 void SetBound() {
174 DCHECK(!IsBound());
175 is_bound_ = true;
176 }
IsBound()177 bool IsBound() const { return is_bound_; }
IsDeferred()178 bool IsDeferred() const {
179 return type_ == GraphAssemblerLabelType::kDeferred;
180 }
IsLoop()181 bool IsLoop() const { return type_ == GraphAssemblerLabelType::kLoop; }
182
183 bool is_bound_ = false;
184 const GraphAssemblerLabelType type_;
185 const int loop_nesting_level_;
186 size_t merged_count_ = 0;
187 Node* effect_;
188 Node* control_;
189 std::array<Node*, VarCount> bindings_;
190 const std::array<MachineRepresentation, VarCount> representations_;
191 };
192
193 using NodeChangedCallback = std::function<void(Node*)>;
194 class V8_EXPORT_PRIVATE GraphAssembler {
195 public:
196 // Constructs a GraphAssembler. If {schedule} is not null, the graph assembler
197 // will maintain the schedule as it updates blocks.
198 GraphAssembler(
199 MachineGraph* jsgraph, Zone* zone,
200 base::Optional<NodeChangedCallback> node_changed_callback = base::nullopt,
201 bool mark_loop_exits = false);
202 virtual ~GraphAssembler();
203
204 void Reset();
205 void InitializeEffectControl(Node* effect, Node* control);
206
207 // Create label.
208 template <typename... Reps>
MakeLabelFor(GraphAssemblerLabelType type,Reps...reps)209 GraphAssemblerLabel<sizeof...(Reps)> MakeLabelFor(
210 GraphAssemblerLabelType type, Reps... reps) {
211 std::array<MachineRepresentation, sizeof...(Reps)> reps_array = {reps...};
212 return MakeLabel<sizeof...(Reps)>(reps_array, type);
213 }
214
215 // As above, but with an std::array of machine representations.
216 template <int VarCount>
MakeLabel(std::array<MachineRepresentation,VarCount> reps_array,GraphAssemblerLabelType type)217 GraphAssemblerLabel<VarCount> MakeLabel(
218 std::array<MachineRepresentation, VarCount> reps_array,
219 GraphAssemblerLabelType type) {
220 return GraphAssemblerLabel<VarCount>(type, loop_nesting_level_, reps_array);
221 }
222
223 // Convenience wrapper for creating non-deferred labels.
224 template <typename... Reps>
MakeLabel(Reps...reps)225 GraphAssemblerLabel<sizeof...(Reps)> MakeLabel(Reps... reps) {
226 return MakeLabelFor(GraphAssemblerLabelType::kNonDeferred, reps...);
227 }
228
229 // Convenience wrapper for creating loop labels.
230 template <typename... Reps>
MakeLoopLabel(Reps...reps)231 GraphAssemblerLabel<sizeof...(Reps)> MakeLoopLabel(Reps... reps) {
232 return MakeLabelFor(GraphAssemblerLabelType::kLoop, reps...);
233 }
234
235 // Convenience wrapper for creating deferred labels.
236 template <typename... Reps>
MakeDeferredLabel(Reps...reps)237 GraphAssemblerLabel<sizeof...(Reps)> MakeDeferredLabel(Reps... reps) {
238 return MakeLabelFor(GraphAssemblerLabelType::kDeferred, reps...);
239 }
240
241 // Value creation.
242 Node* IntPtrConstant(intptr_t value);
243 Node* UintPtrConstant(uintptr_t value);
244 Node* Int32Constant(int32_t value);
245 Node* Uint32Constant(uint32_t value);
246 Node* Int64Constant(int64_t value);
247 Node* Uint64Constant(uint64_t value);
248 Node* UniqueIntPtrConstant(intptr_t value);
249 Node* Float64Constant(double value);
250 Node* Projection(int index, Node* value);
251 Node* ExternalConstant(ExternalReference ref);
252
253 Node* Parameter(int index);
254
255 Node* LoadFramePointer();
256
257 Node* LoadHeapNumberValue(Node* heap_number);
258
259 #define PURE_UNOP_DECL(Name) Node* Name(Node* input);
260 PURE_ASSEMBLER_MACH_UNOP_LIST(PURE_UNOP_DECL)
261 #undef PURE_UNOP_DECL
262
263 #define BINOP_DECL(Name) Node* Name(Node* left, Node* right);
264 PURE_ASSEMBLER_MACH_BINOP_LIST(BINOP_DECL)
265 CHECKED_ASSEMBLER_MACH_BINOP_LIST(BINOP_DECL)
266 #undef BINOP_DECL
267
268 #ifdef V8_MAP_PACKING
269 Node* PackMapWord(TNode<Map> map);
270 TNode<Map> UnpackMapWord(Node* map_word);
271 #endif
272 TNode<Map> LoadMap(Node* object);
273
274 Node* DebugBreak();
275
276 // Unreachable nodes are similar to Goto in that they reset effect/control to
277 // nullptr and it's thus not possible to append other nodes without first
278 // binding a new label.
279 // The block_updater_successor label is a crutch to work around block updater
280 // weaknesses (see the related comment in ConnectUnreachableToEnd); if the
281 // block updater exists, we cannot connect unreachable to end, instead we
282 // must preserve the Goto pattern.
283 Node* Unreachable(GraphAssemblerLabel<0u>* block_updater_successor = nullptr);
284 // This special variant doesn't connect the Unreachable node to end, and does
285 // not reset current effect/control. Intended only for special use-cases like
286 // lowering DeadValue.
287 Node* UnreachableWithoutConnectToEnd();
288
289 Node* IntPtrEqual(Node* left, Node* right);
290 Node* TaggedEqual(Node* left, Node* right);
291
292 Node* SmiSub(Node* left, Node* right);
293 Node* SmiLessThan(Node* left, Node* right);
294
295 Node* Float64RoundDown(Node* value);
296 Node* Float64RoundTruncate(Node* value);
297 Node* TruncateFloat64ToInt64(Node* value, TruncateKind kind);
298
299 Node* BitcastWordToTagged(Node* value);
300 Node* BitcastWordToTaggedSigned(Node* value);
301 Node* BitcastTaggedToWord(Node* value);
302 Node* BitcastTaggedToWordForTagAndSmiBits(Node* value);
303 Node* BitcastMaybeObjectToWord(Node* value);
304
305 Node* TypeGuard(Type type, Node* value);
306 Node* Checkpoint(FrameState frame_state);
307
308 TNode<RawPtrT> StackSlot(int size, int alignment);
309
310 Node* Store(StoreRepresentation rep, Node* object, Node* offset, Node* value);
311 Node* Store(StoreRepresentation rep, Node* object, int offset, Node* value);
312 Node* Load(MachineType type, Node* object, Node* offset);
313 Node* Load(MachineType type, Node* object, int offset);
314
315 Node* StoreUnaligned(MachineRepresentation rep, Node* object, Node* offset,
316 Node* value);
317 Node* LoadUnaligned(MachineType type, Node* object, Node* offset);
318
319 Node* ProtectedStore(MachineRepresentation rep, Node* object, Node* offset,
320 Node* value);
321 Node* ProtectedLoad(MachineType type, Node* object, Node* offset);
322
323 Node* Retain(Node* buffer);
324 Node* UnsafePointerAdd(Node* base, Node* external);
325
326 Node* DeoptimizeIf(DeoptimizeReason reason, FeedbackSource const& feedback,
327 Node* condition, Node* frame_state);
328 Node* DeoptimizeIfNot(DeoptimizeReason reason, FeedbackSource const& feedback,
329 Node* condition, Node* frame_state);
330 TNode<Object> Call(const CallDescriptor* call_descriptor, int inputs_size,
331 Node** inputs);
332 TNode<Object> Call(const Operator* op, int inputs_size, Node** inputs);
333 template <typename... Args>
334 TNode<Object> Call(const CallDescriptor* call_descriptor, Node* first_arg,
335 Args... args);
336 template <typename... Args>
337 TNode<Object> Call(const Operator* op, Node* first_arg, Args... args);
338 void TailCall(const CallDescriptor* call_descriptor, int inputs_size,
339 Node** inputs);
340
341 // Basic control operations.
342 template <size_t VarCount>
343 void Bind(GraphAssemblerLabel<VarCount>* label);
344
345 template <typename... Vars>
346 void Goto(GraphAssemblerLabel<sizeof...(Vars)>* label, Vars...);
347
348 // Branch hints are inferred from if_true/if_false deferred states.
349 void BranchWithCriticalSafetyCheck(Node* condition,
350 GraphAssemblerLabel<0u>* if_true,
351 GraphAssemblerLabel<0u>* if_false);
352
353 // Branch hints are inferred from if_true/if_false deferred states.
354 template <typename... Vars>
355 void Branch(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* if_true,
356 GraphAssemblerLabel<sizeof...(Vars)>* if_false, Vars...);
357
358 template <typename... Vars>
359 void BranchWithHint(Node* condition,
360 GraphAssemblerLabel<sizeof...(Vars)>* if_true,
361 GraphAssemblerLabel<sizeof...(Vars)>* if_false,
362 BranchHint hint, Vars...);
363
364 // Control helpers.
365
366 // {GotoIf(c, l, h)} is equivalent to {BranchWithHint(c, l, templ, h);
367 // Bind(templ)}.
368 template <typename... Vars>
369 void GotoIf(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label,
370 BranchHint hint, Vars...);
371
372 // {GotoIfNot(c, l, h)} is equivalent to {BranchWithHint(c, templ, l, h);
373 // Bind(templ)}.
374 // The branch hint refers to the expected outcome of the provided condition,
375 // so {GotoIfNot(..., BranchHint::kTrue)} means "optimize for the case where
376 // the branch is *not* taken".
377 template <typename... Vars>
378 void GotoIfNot(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label,
379 BranchHint hint, Vars...);
380
381 // {GotoIf(c, l)} is equivalent to {Branch(c, l, templ);Bind(templ)}.
382 template <typename... Vars>
383 void GotoIf(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label,
384 Vars...);
385
386 // {GotoIfNot(c, l)} is equivalent to {Branch(c, templ, l);Bind(templ)}.
387 template <typename... Vars>
388 void GotoIfNot(Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* label,
389 Vars...);
390
HasActiveBlock()391 bool HasActiveBlock() const {
392 // This is false if the current block has been terminated (e.g. by a Goto or
393 // Unreachable). In that case, a new label must be bound before we can
394 // continue emitting nodes.
395 return control() != nullptr;
396 }
397
398 // Updates current effect and control based on outputs of {node}.
UpdateEffectControlWith(Node * node)399 V8_INLINE void UpdateEffectControlWith(Node* node) {
400 if (node->op()->EffectOutputCount() > 0) {
401 effect_ = node;
402 }
403 if (node->op()->ControlOutputCount() > 0) {
404 control_ = node;
405 }
406 }
407
408 // Adds {node} to the current position and updates assembler's current effect
409 // and control.
410 Node* AddNode(Node* node);
411
412 template <typename T>
AddNode(Node * node)413 TNode<T> AddNode(Node* node) {
414 return TNode<T>::UncheckedCast(AddNode(node));
415 }
416
417 void ConnectUnreachableToEnd();
418
419 // Add an inline reducers such that nodes added to the graph will be run
420 // through the reducers and possibly further lowered. Each reducer should
421 // operate on independent node types since once a reducer changes a node we
422 // no longer run any other reducers on that node. The reducers should also
423 // only generate new nodes that wouldn't be further reduced, as new nodes
424 // generated by a reducer won't be passed through the reducers again.
AddInlineReducer(Reducer * reducer)425 void AddInlineReducer(Reducer* reducer) {
426 inline_reducers_.push_back(reducer);
427 }
428
control()429 Control control() const { return Control(control_); }
effect()430 Effect effect() const { return Effect(effect_); }
431
432 protected:
433 template <typename... Vars>
434 void MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label, Vars... vars);
435
436 V8_INLINE Node* AddClonedNode(Node* node);
437
mcgraph()438 MachineGraph* mcgraph() const { return mcgraph_; }
graph()439 Graph* graph() const { return mcgraph_->graph(); }
temp_zone()440 Zone* temp_zone() const { return temp_zone_; }
common()441 CommonOperatorBuilder* common() const { return mcgraph()->common(); }
machine()442 MachineOperatorBuilder* machine() const { return mcgraph()->machine(); }
443
444 // Updates machinery for creating {LoopExit,LoopExitEffect,LoopExitValue}
445 // nodes on loop exits (which are necessary for loop peeling).
446 //
447 // All labels created while a LoopScope is live are considered to be inside
448 // the loop.
449 template <MachineRepresentation... Reps>
450 class V8_NODISCARD LoopScope final {
451 private:
452 // The internal scope is only here to increment the graph assembler's
453 // nesting level prior to `loop_header_label` creation below.
454 class V8_NODISCARD LoopScopeInternal {
455 public:
LoopScopeInternal(GraphAssembler * gasm)456 explicit LoopScopeInternal(GraphAssembler* gasm)
457 : previous_loop_nesting_level_(gasm->loop_nesting_level_),
458 gasm_(gasm) {
459 gasm->loop_nesting_level_++;
460 }
461
~LoopScopeInternal()462 ~LoopScopeInternal() {
463 gasm_->loop_nesting_level_--;
464 DCHECK_EQ(gasm_->loop_nesting_level_, previous_loop_nesting_level_);
465 }
466
467 private:
468 const int previous_loop_nesting_level_;
469 GraphAssembler* const gasm_;
470 };
471
472 public:
LoopScope(GraphAssembler * gasm)473 explicit LoopScope(GraphAssembler* gasm)
474 : internal_scope_(gasm),
475 gasm_(gasm),
476 loop_header_label_(gasm->MakeLoopLabel(Reps...)) {
477 // This feature may only be used if it has been enabled.
478 DCHECK(gasm_->mark_loop_exits_);
479 gasm->loop_headers_.push_back(&loop_header_label_.control_);
480 DCHECK_EQ(static_cast<int>(gasm_->loop_headers_.size()),
481 gasm_->loop_nesting_level_);
482 }
483
~LoopScope()484 ~LoopScope() {
485 DCHECK_EQ(static_cast<int>(gasm_->loop_headers_.size()),
486 gasm_->loop_nesting_level_);
487 gasm_->loop_headers_.pop_back();
488 }
489
loop_header_label()490 GraphAssemblerLabel<sizeof...(Reps)>* loop_header_label() {
491 return &loop_header_label_;
492 }
493
494 private:
495 const LoopScopeInternal internal_scope_;
496 GraphAssembler* const gasm_;
497 GraphAssemblerLabel<sizeof...(Reps)> loop_header_label_;
498 };
499
500 // Upon destruction, restores effect and control to the state at construction.
501 class V8_NODISCARD RestoreEffectControlScope {
502 public:
RestoreEffectControlScope(GraphAssembler * gasm)503 explicit RestoreEffectControlScope(GraphAssembler* gasm)
504 : gasm_(gasm), effect_(gasm->effect()), control_(gasm->control()) {}
505
~RestoreEffectControlScope()506 ~RestoreEffectControlScope() {
507 gasm_->effect_ = effect_;
508 gasm_->control_ = control_;
509 }
510
511 private:
512 GraphAssembler* const gasm_;
513 const Effect effect_;
514 const Control control_;
515 };
516
517 private:
518 class BlockInlineReduction;
519
520 template <typename... Vars>
521 void BranchImpl(Node* condition,
522 GraphAssemblerLabel<sizeof...(Vars)>* if_true,
523 GraphAssemblerLabel<sizeof...(Vars)>* if_false,
524 BranchHint hint, Vars...);
525
526 Zone* temp_zone_;
527 MachineGraph* mcgraph_;
528 Node* effect_;
529 Node* control_;
530 // {node_changed_callback_} should be called when a node outside the
531 // subgraph created by the graph assembler changes.
532 base::Optional<NodeChangedCallback> node_changed_callback_;
533
534 // Inline reducers enable reductions to be performed to nodes as they are
535 // added to the graph with the graph assembler.
536 ZoneVector<Reducer*> inline_reducers_;
537 bool inline_reductions_blocked_;
538
539 // Track loop information in order to properly mark loop exits with
540 // {LoopExit,LoopExitEffect,LoopExitValue} nodes. The outermost level has
541 // a nesting level of 0. See also GraphAssembler::LoopScope.
542 int loop_nesting_level_ = 0;
543 ZoneVector<Node**> loop_headers_;
544
545 // Feature configuration. As more features are added, this should be turned
546 // into a bitfield.
547 const bool mark_loop_exits_;
548 };
549
550 template <size_t VarCount>
PhiAt(size_t index)551 Node* GraphAssemblerLabel<VarCount>::PhiAt(size_t index) {
552 DCHECK(IsBound());
553 DCHECK_LT(index, VarCount);
554 return bindings_[index];
555 }
556
557 template <typename... Vars>
MergeState(GraphAssemblerLabel<sizeof...(Vars)> * label,Vars...vars)558 void GraphAssembler::MergeState(GraphAssemblerLabel<sizeof...(Vars)>* label,
559 Vars... vars) {
560 RestoreEffectControlScope restore_effect_control_scope(this);
561
562 const int merged_count = static_cast<int>(label->merged_count_);
563 static constexpr int kVarCount = sizeof...(vars);
564 std::array<Node*, kVarCount> var_array = {vars...};
565
566 const bool is_loop_exit = label->loop_nesting_level_ != loop_nesting_level_;
567 if (is_loop_exit) {
568 // This feature may only be used if it has been enabled.
569 USE(mark_loop_exits_);
570 DCHECK(mark_loop_exits_);
571 // Jumping from loops to loops not supported.
572 DCHECK(!label->IsLoop());
573 // Currently only the simple case of jumping one level is supported.
574 DCHECK_EQ(label->loop_nesting_level_, loop_nesting_level_ - 1);
575 DCHECK(!loop_headers_.empty());
576 DCHECK_NOT_NULL(*loop_headers_.back());
577
578 // Mark this exit to enable loop peeling.
579 AddNode(graph()->NewNode(common()->LoopExit(), control(),
580 *loop_headers_.back()));
581 AddNode(graph()->NewNode(common()->LoopExitEffect(), effect(), control()));
582 for (size_t i = 0; i < kVarCount; i++) {
583 var_array[i] = AddNode(graph()->NewNode(
584 common()->LoopExitValue(MachineRepresentation::kTagged), var_array[i],
585 control()));
586 }
587 }
588
589 if (label->IsLoop()) {
590 if (merged_count == 0) {
591 DCHECK(!label->IsBound());
592 label->control_ =
593 graph()->NewNode(common()->Loop(2), control(), control());
594 label->effect_ = graph()->NewNode(common()->EffectPhi(2), effect(),
595 effect(), label->control_);
596 Node* terminate = graph()->NewNode(common()->Terminate(), label->effect_,
597 label->control_);
598 NodeProperties::MergeControlToEnd(graph(), common(), terminate);
599 for (size_t i = 0; i < kVarCount; i++) {
600 label->bindings_[i] =
601 graph()->NewNode(common()->Phi(label->representations_[i], 2),
602 var_array[i], var_array[i], label->control_);
603 }
604 } else {
605 DCHECK(label->IsBound());
606 DCHECK_EQ(1, merged_count);
607 label->control_->ReplaceInput(1, control());
608 label->effect_->ReplaceInput(1, effect());
609 for (size_t i = 0; i < kVarCount; i++) {
610 label->bindings_[i]->ReplaceInput(1, var_array[i]);
611 CHECK(!NodeProperties::IsTyped(var_array[i])); // Unsupported.
612 }
613 }
614 } else {
615 DCHECK(!label->IsLoop());
616 DCHECK(!label->IsBound());
617 if (merged_count == 0) {
618 // Just set the control, effect and variables directly.
619 label->control_ = control();
620 label->effect_ = effect();
621 for (size_t i = 0; i < kVarCount; i++) {
622 label->bindings_[i] = var_array[i];
623 }
624 } else if (merged_count == 1) {
625 // Create merge, effect phi and a phi for each variable.
626 label->control_ =
627 graph()->NewNode(common()->Merge(2), label->control_, control());
628 label->effect_ = graph()->NewNode(common()->EffectPhi(2), label->effect_,
629 effect(), label->control_);
630 for (size_t i = 0; i < kVarCount; i++) {
631 label->bindings_[i] = graph()->NewNode(
632 common()->Phi(label->representations_[i], 2), label->bindings_[i],
633 var_array[i], label->control_);
634 }
635 } else {
636 // Append to the merge, effect phi and phis.
637 DCHECK_EQ(IrOpcode::kMerge, label->control_->opcode());
638 label->control_->AppendInput(graph()->zone(), control());
639 NodeProperties::ChangeOp(label->control_,
640 common()->Merge(merged_count + 1));
641
642 DCHECK_EQ(IrOpcode::kEffectPhi, label->effect_->opcode());
643 label->effect_->ReplaceInput(merged_count, effect());
644 label->effect_->AppendInput(graph()->zone(), label->control_);
645 NodeProperties::ChangeOp(label->effect_,
646 common()->EffectPhi(merged_count + 1));
647
648 for (size_t i = 0; i < kVarCount; i++) {
649 DCHECK_EQ(IrOpcode::kPhi, label->bindings_[i]->opcode());
650 label->bindings_[i]->ReplaceInput(merged_count, var_array[i]);
651 label->bindings_[i]->AppendInput(graph()->zone(), label->control_);
652 NodeProperties::ChangeOp(
653 label->bindings_[i],
654 common()->Phi(label->representations_[i], merged_count + 1));
655 if (NodeProperties::IsTyped(label->bindings_[i])) {
656 CHECK(NodeProperties::IsTyped(var_array[i]));
657 Type old_type = NodeProperties::GetType(label->bindings_[i]);
658 Type new_type = Type::Union(
659 old_type, NodeProperties::GetType(var_array[i]), graph()->zone());
660 NodeProperties::SetType(label->bindings_[i], new_type);
661 }
662 }
663 }
664 }
665 label->merged_count_++;
666 }
667
668 template <size_t VarCount>
Bind(GraphAssemblerLabel<VarCount> * label)669 void GraphAssembler::Bind(GraphAssemblerLabel<VarCount>* label) {
670 DCHECK_NULL(control());
671 DCHECK_NULL(effect());
672 DCHECK_LT(0, label->merged_count_);
673 DCHECK_EQ(label->loop_nesting_level_, loop_nesting_level_);
674
675 control_ = label->control_;
676 effect_ = label->effect_;
677
678 label->SetBound();
679
680 if (label->merged_count_ > 1 || label->IsLoop()) {
681 AddNode(label->control_);
682 AddNode(label->effect_);
683 for (size_t i = 0; i < VarCount; i++) {
684 AddNode(label->bindings_[i]);
685 }
686 } else {
687 // If the basic block does not have a control node, insert a dummy
688 // Merge node, so that other passes have a control node to start from.
689 control_ = AddNode(graph()->NewNode(common()->Merge(1), control()));
690 }
691 }
692
693 template <typename... Vars>
Branch(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * if_true,GraphAssemblerLabel<sizeof...(Vars)> * if_false,Vars...vars)694 void GraphAssembler::Branch(Node* condition,
695 GraphAssemblerLabel<sizeof...(Vars)>* if_true,
696 GraphAssemblerLabel<sizeof...(Vars)>* if_false,
697 Vars... vars) {
698 BranchHint hint = BranchHint::kNone;
699 if (if_true->IsDeferred() != if_false->IsDeferred()) {
700 hint = if_false->IsDeferred() ? BranchHint::kTrue : BranchHint::kFalse;
701 }
702
703 BranchImpl(condition, if_true, if_false, hint, vars...);
704 }
705
706 template <typename... Vars>
BranchWithHint(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * if_true,GraphAssemblerLabel<sizeof...(Vars)> * if_false,BranchHint hint,Vars...vars)707 void GraphAssembler::BranchWithHint(
708 Node* condition, GraphAssemblerLabel<sizeof...(Vars)>* if_true,
709 GraphAssemblerLabel<sizeof...(Vars)>* if_false, BranchHint hint,
710 Vars... vars) {
711 BranchImpl(condition, if_true, if_false, hint, vars...);
712 }
713
714 template <typename... Vars>
BranchImpl(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * if_true,GraphAssemblerLabel<sizeof...(Vars)> * if_false,BranchHint hint,Vars...vars)715 void GraphAssembler::BranchImpl(Node* condition,
716 GraphAssemblerLabel<sizeof...(Vars)>* if_true,
717 GraphAssemblerLabel<sizeof...(Vars)>* if_false,
718 BranchHint hint, Vars... vars) {
719 DCHECK_NOT_NULL(control());
720
721 Node* branch = graph()->NewNode(common()->Branch(hint), condition, control());
722
723 control_ = graph()->NewNode(common()->IfTrue(), branch);
724 MergeState(if_true, vars...);
725
726 control_ = graph()->NewNode(common()->IfFalse(), branch);
727 MergeState(if_false, vars...);
728
729 control_ = nullptr;
730 effect_ = nullptr;
731 }
732
733 template <typename... Vars>
Goto(GraphAssemblerLabel<sizeof...(Vars)> * label,Vars...vars)734 void GraphAssembler::Goto(GraphAssemblerLabel<sizeof...(Vars)>* label,
735 Vars... vars) {
736 DCHECK_NOT_NULL(control());
737 DCHECK_NOT_NULL(effect());
738 MergeState(label, vars...);
739
740 control_ = nullptr;
741 effect_ = nullptr;
742 }
743
744 template <typename... Vars>
GotoIf(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * label,BranchHint hint,Vars...vars)745 void GraphAssembler::GotoIf(Node* condition,
746 GraphAssemblerLabel<sizeof...(Vars)>* label,
747 BranchHint hint, Vars... vars) {
748 Node* branch = graph()->NewNode(common()->Branch(hint), condition, control());
749
750 control_ = graph()->NewNode(common()->IfTrue(), branch);
751 MergeState(label, vars...);
752
753 control_ = AddNode(graph()->NewNode(common()->IfFalse(), branch));
754 }
755
756 template <typename... Vars>
GotoIfNot(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * label,BranchHint hint,Vars...vars)757 void GraphAssembler::GotoIfNot(Node* condition,
758 GraphAssemblerLabel<sizeof...(Vars)>* label,
759 BranchHint hint, Vars... vars) {
760 Node* branch = graph()->NewNode(common()->Branch(hint), condition, control());
761
762 control_ = graph()->NewNode(common()->IfFalse(), branch);
763 MergeState(label, vars...);
764
765 control_ = AddNode(graph()->NewNode(common()->IfTrue(), branch));
766 }
767
768 template <typename... Vars>
GotoIf(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * label,Vars...vars)769 void GraphAssembler::GotoIf(Node* condition,
770 GraphAssemblerLabel<sizeof...(Vars)>* label,
771 Vars... vars) {
772 BranchHint hint =
773 label->IsDeferred() ? BranchHint::kFalse : BranchHint::kNone;
774 return GotoIf(condition, label, hint, vars...);
775 }
776
777 template <typename... Vars>
GotoIfNot(Node * condition,GraphAssemblerLabel<sizeof...(Vars)> * label,Vars...vars)778 void GraphAssembler::GotoIfNot(Node* condition,
779 GraphAssemblerLabel<sizeof...(Vars)>* label,
780 Vars... vars) {
781 BranchHint hint = label->IsDeferred() ? BranchHint::kTrue : BranchHint::kNone;
782 return GotoIfNot(condition, label, hint, vars...);
783 }
784
785 template <typename... Args>
Call(const CallDescriptor * call_descriptor,Node * first_arg,Args...args)786 TNode<Object> GraphAssembler::Call(const CallDescriptor* call_descriptor,
787 Node* first_arg, Args... args) {
788 const Operator* op = common()->Call(call_descriptor);
789 return Call(op, first_arg, args...);
790 }
791
792 template <typename... Args>
Call(const Operator * op,Node * first_arg,Args...args)793 TNode<Object> GraphAssembler::Call(const Operator* op, Node* first_arg,
794 Args... args) {
795 Node* args_array[] = {first_arg, args..., effect(), control()};
796 int size = static_cast<int>(1 + sizeof...(args)) + op->EffectInputCount() +
797 op->ControlInputCount();
798 return Call(op, size, args_array);
799 }
800
801 class V8_EXPORT_PRIVATE JSGraphAssembler : public GraphAssembler {
802 public:
803 // Constructs a JSGraphAssembler. If {schedule} is not null, the graph
804 // assembler will maintain the schedule as it updates blocks.
805 JSGraphAssembler(
806 JSGraph* jsgraph, Zone* zone,
807 base::Optional<NodeChangedCallback> node_changed_callback = base::nullopt,
808 bool mark_loop_exits = false)
GraphAssembler(jsgraph,zone,node_changed_callback,mark_loop_exits)809 : GraphAssembler(jsgraph, zone, node_changed_callback, mark_loop_exits),
810 jsgraph_(jsgraph) {}
811
812 Node* SmiConstant(int32_t value);
813 TNode<HeapObject> HeapConstant(Handle<HeapObject> object);
814 TNode<Object> Constant(const ObjectRef& ref);
815 TNode<Number> NumberConstant(double value);
816 Node* CEntryStubConstant(int result_size);
817
818 #define SINGLETON_CONST_DECL(Name, Type) TNode<Type> Name##Constant();
819 JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_DECL)
820 #undef SINGLETON_CONST_DECL
821
822 #define SINGLETON_CONST_TEST_DECL(Name, ...) \
823 TNode<Boolean> Is##Name(TNode<Object> value);
824 JSGRAPH_SINGLETON_CONSTANT_LIST(SINGLETON_CONST_TEST_DECL)
825 #undef SINGLETON_CONST_TEST_DECL
826
827 Node* Allocate(AllocationType allocation, Node* size);
828 Node* LoadField(FieldAccess const&, Node* object);
829 template <typename T>
LoadField(FieldAccess const & access,TNode<HeapObject> object)830 TNode<T> LoadField(FieldAccess const& access, TNode<HeapObject> object) {
831 // TODO(jgruber): Investigate issues on ptr compression bots and enable.
832 // DCHECK(IsMachineRepresentationOf<T>(
833 // access.machine_type.representation()));
834 return TNode<T>::UncheckedCast(LoadField(access, object));
835 }
836 Node* LoadElement(ElementAccess const&, Node* object, Node* index);
837 template <typename T>
LoadElement(ElementAccess const & access,TNode<HeapObject> object,TNode<Number> index)838 TNode<T> LoadElement(ElementAccess const& access, TNode<HeapObject> object,
839 TNode<Number> index) {
840 // TODO(jgruber): Investigate issues on ptr compression bots and enable.
841 // DCHECK(IsMachineRepresentationOf<T>(
842 // access.machine_type.representation()));
843 return TNode<T>::UncheckedCast(LoadElement(access, object, index));
844 }
845 Node* StoreField(FieldAccess const&, Node* object, Node* value);
846 Node* StoreElement(ElementAccess const&, Node* object, Node* index,
847 Node* value);
848 void TransitionAndStoreElement(MapRef double_map, MapRef fast_map,
849 TNode<HeapObject> object, TNode<Number> index,
850 TNode<Object> value);
851 TNode<Number> StringLength(TNode<String> string);
852 TNode<Boolean> ReferenceEqual(TNode<Object> lhs, TNode<Object> rhs);
853 TNode<Number> PlainPrimitiveToNumber(TNode<Object> value);
854 TNode<Number> NumberMin(TNode<Number> lhs, TNode<Number> rhs);
855 TNode<Number> NumberMax(TNode<Number> lhs, TNode<Number> rhs);
856 TNode<Boolean> NumberEqual(TNode<Number> lhs, TNode<Number> rhs);
857 TNode<Boolean> NumberLessThan(TNode<Number> lhs, TNode<Number> rhs);
858 TNode<Boolean> NumberLessThanOrEqual(TNode<Number> lhs, TNode<Number> rhs);
859 TNode<Number> NumberAdd(TNode<Number> lhs, TNode<Number> rhs);
860 TNode<Number> NumberSubtract(TNode<Number> lhs, TNode<Number> rhs);
861 TNode<String> StringSubstring(TNode<String> string, TNode<Number> from,
862 TNode<Number> to);
863 TNode<Boolean> ObjectIsCallable(TNode<Object> value);
864 TNode<Boolean> ObjectIsUndetectable(TNode<Object> value);
865 Node* CheckIf(Node* cond, DeoptimizeReason reason);
866 TNode<Boolean> NumberIsFloat64Hole(TNode<Number> value);
867 TNode<Boolean> ToBoolean(TNode<Object> value);
868 TNode<Object> ConvertTaggedHoleToUndefined(TNode<Object> value);
869 TNode<FixedArrayBase> MaybeGrowFastElements(ElementsKind kind,
870 const FeedbackSource& feedback,
871 TNode<JSArray> array,
872 TNode<FixedArrayBase> elements,
873 TNode<Number> new_length,
874 TNode<Number> old_length);
875 Node* StringCharCodeAt(TNode<String> string, TNode<Number> position);
876
jsgraph()877 JSGraph* jsgraph() const { return jsgraph_; }
isolate()878 Isolate* isolate() const { return jsgraph()->isolate(); }
simplified()879 SimplifiedOperatorBuilder* simplified() const {
880 return jsgraph()->simplified();
881 }
882
883 protected:
884 Operator const* PlainPrimitiveToNumberOperator();
885
886 private:
887 JSGraph* jsgraph_;
888 SetOncePointer<Operator const> to_number_operator_;
889 };
890
891 } // namespace compiler
892 } // namespace internal
893 } // namespace v8
894
895 #endif // V8_COMPILER_GRAPH_ASSEMBLER_H_
896