1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/escape-analysis.h"
6
7 #include "src/codegen/tick-counter.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/node-matchers.h"
10 #include "src/compiler/operator-properties.h"
11 #include "src/compiler/simplified-operator.h"
12 #include "src/handles/handles-inl.h"
13 #include "src/init/bootstrapper.h"
14 #include "src/objects/map-inl.h"
15
16 #ifdef DEBUG
17 #define TRACE(...) \
18 do { \
19 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
20 } while (false)
21 #else
22 #define TRACE(...)
23 #endif
24
25 namespace v8 {
26 namespace internal {
27 namespace compiler {
28
29 template <class T>
30 class Sidetable {
31 public:
Sidetable(Zone * zone)32 explicit Sidetable(Zone* zone) : map_(zone) {}
operator [](const Node * node)33 T& operator[](const Node* node) {
34 NodeId id = node->id();
35 if (id >= map_.size()) {
36 map_.resize(id + 1);
37 }
38 return map_[id];
39 }
40
41 private:
42 ZoneVector<T> map_;
43 };
44
45 template <class T>
46 class SparseSidetable {
47 public:
SparseSidetable(Zone * zone,T def_value=T ())48 explicit SparseSidetable(Zone* zone, T def_value = T())
49 : def_value_(std::move(def_value)), map_(zone) {}
Set(const Node * node,T value)50 void Set(const Node* node, T value) {
51 auto iter = map_.find(node->id());
52 if (iter != map_.end()) {
53 iter->second = std::move(value);
54 } else if (value != def_value_) {
55 map_.insert(iter, std::make_pair(node->id(), std::move(value)));
56 }
57 }
Get(const Node * node) const58 const T& Get(const Node* node) const {
59 auto iter = map_.find(node->id());
60 return iter != map_.end() ? iter->second : def_value_;
61 }
62
63 private:
64 T def_value_;
65 ZoneUnorderedMap<NodeId, T> map_;
66 };
67
68 // Keeps track of the changes to the current node during reduction.
69 // Encapsulates the current state of the IR graph and the reducer state like
70 // side-tables. All access to the IR and the reducer state should happen through
71 // a ReduceScope to ensure that changes and dependencies are tracked and all
72 // necessary node revisitations happen.
73 class ReduceScope {
74 public:
75 using Reduction = EffectGraphReducer::Reduction;
ReduceScope(Node * node,Reduction * reduction)76 explicit ReduceScope(Node* node, Reduction* reduction)
77 : current_node_(node), reduction_(reduction) {}
78
79 protected:
current_node() const80 Node* current_node() const { return current_node_; }
reduction()81 Reduction* reduction() { return reduction_; }
82
83 private:
84 Node* current_node_;
85 Reduction* reduction_;
86 };
87
88 // A VariableTracker object keeps track of the values of variables at all points
89 // of the effect chain and introduces new phi nodes when necessary.
90 // Initially and by default, variables are mapped to nullptr, which means that
91 // the variable allocation point does not dominate the current point on the
92 // effect chain. We map variables that represent uninitialized memory to the
93 // Dead node to ensure it is not read.
94 // Unmapped values are impossible by construction, it is indistinguishable if a
95 // PersistentMap does not contain an element or maps it to the default element.
96 class VariableTracker {
97 private:
98 // The state of all variables at one point in the effect chain.
99 class State {
100 public:
101 using Map = PersistentMap<Variable, Node*>;
102
State(Zone * zone)103 explicit State(Zone* zone) : map_(zone) {}
Get(Variable var) const104 Node* Get(Variable var) const {
105 CHECK(var != Variable::Invalid());
106 return map_.Get(var);
107 }
Set(Variable var,Node * node)108 void Set(Variable var, Node* node) {
109 CHECK(var != Variable::Invalid());
110 return map_.Set(var, node);
111 }
begin() const112 Map::iterator begin() const { return map_.begin(); }
end() const113 Map::iterator end() const { return map_.end(); }
operator !=(const State & other) const114 bool operator!=(const State& other) const { return map_ != other.map_; }
115
116 private:
117 Map map_;
118 };
119
120 public:
121 VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
122 VariableTracker(const VariableTracker&) = delete;
123 VariableTracker& operator=(const VariableTracker&) = delete;
124
NewVariable()125 Variable NewVariable() { return Variable(next_variable_++); }
Get(Variable var,Node * effect)126 Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
zone()127 Zone* zone() { return zone_; }
128
129 class Scope : public ReduceScope {
130 public:
131 Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
132 ~Scope();
Get(Variable var)133 Maybe<Node*> Get(Variable var) {
134 Node* node = current_state_.Get(var);
135 if (node && node->opcode() == IrOpcode::kDead) {
136 // TODO(tebbi): We use {Dead} as a sentinel for uninitialized memory.
137 // Reading uninitialized memory can only happen in unreachable code. In
138 // this case, we have to mark the object as escaping to avoid dead nodes
139 // in the graph. This is a workaround that should be removed once we can
140 // handle dead nodes everywhere.
141 return Nothing<Node*>();
142 }
143 return Just(node);
144 }
Set(Variable var,Node * node)145 void Set(Variable var, Node* node) { current_state_.Set(var, node); }
146
147 private:
148 VariableTracker* states_;
149 State current_state_;
150 };
151
152 private:
153 State MergeInputs(Node* effect_phi);
154 Zone* zone_;
155 JSGraph* graph_;
156 SparseSidetable<State> table_;
157 ZoneVector<Node*> buffer_;
158 EffectGraphReducer* reducer_;
159 int next_variable_ = 0;
160 TickCounter* const tick_counter_;
161 };
162
163 // Encapsulates the current state of the escape analysis reducer to preserve
164 // invariants regarding changes and re-visitation.
165 class EscapeAnalysisTracker : public ZoneObject {
166 public:
EscapeAnalysisTracker(JSGraph * jsgraph,EffectGraphReducer * reducer,Zone * zone)167 EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
168 Zone* zone)
169 : virtual_objects_(zone),
170 replacements_(zone),
171 variable_states_(jsgraph, reducer, zone),
172 jsgraph_(jsgraph),
173 zone_(zone) {}
174 EscapeAnalysisTracker(const EscapeAnalysisTracker&) = delete;
175 EscapeAnalysisTracker& operator=(const EscapeAnalysisTracker&) = delete;
176
177 class Scope : public VariableTracker::Scope {
178 public:
Scope(EffectGraphReducer * reducer,EscapeAnalysisTracker * tracker,Node * node,Reduction * reduction)179 Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
180 Node* node, Reduction* reduction)
181 : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
182 tracker_(tracker),
183 reducer_(reducer) {}
GetVirtualObject(Node * node)184 const VirtualObject* GetVirtualObject(Node* node) {
185 VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
186 if (vobject) vobject->AddDependency(current_node());
187 return vobject;
188 }
189 // Create or retrieve a virtual object for the current node.
InitVirtualObject(int size)190 const VirtualObject* InitVirtualObject(int size) {
191 DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode());
192 VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
193 if (vobject) {
194 CHECK(vobject->size() == size);
195 } else {
196 vobject = tracker_->NewVirtualObject(size);
197 }
198 if (vobject) vobject->AddDependency(current_node());
199 vobject_ = vobject;
200 return vobject;
201 }
202
SetVirtualObject(Node * object)203 void SetVirtualObject(Node* object) {
204 vobject_ = tracker_->virtual_objects_.Get(object);
205 }
206
SetEscaped(Node * node)207 void SetEscaped(Node* node) {
208 if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
209 if (object->HasEscaped()) return;
210 TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
211 node->op()->mnemonic(), node->id(),
212 current_node()->op()->mnemonic(), current_node()->id());
213 object->SetEscaped();
214 object->RevisitDependants(reducer_);
215 }
216 }
217 // The inputs of the current node have to be accessed through the scope to
218 // ensure that they respect the node replacements.
ValueInput(int i)219 Node* ValueInput(int i) {
220 return tracker_->ResolveReplacement(
221 NodeProperties::GetValueInput(current_node(), i));
222 }
ContextInput()223 Node* ContextInput() {
224 return tracker_->ResolveReplacement(
225 NodeProperties::GetContextInput(current_node()));
226 }
227
SetReplacement(Node * replacement)228 void SetReplacement(Node* replacement) {
229 replacement_ = replacement;
230 vobject_ =
231 replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
232 if (replacement) {
233 TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
234 replacement->id());
235 } else {
236 TRACE("Set nullptr as replacement.\n");
237 }
238 }
239
MarkForDeletion()240 void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
241
~Scope()242 ~Scope() {
243 if (replacement_ != tracker_->replacements_[current_node()] ||
244 vobject_ != tracker_->virtual_objects_.Get(current_node())) {
245 reduction()->set_value_changed();
246 }
247 tracker_->replacements_[current_node()] = replacement_;
248 tracker_->virtual_objects_.Set(current_node(), vobject_);
249 }
250
251 private:
252 EscapeAnalysisTracker* tracker_;
253 EffectGraphReducer* reducer_;
254 VirtualObject* vobject_ = nullptr;
255 Node* replacement_ = nullptr;
256 };
257
GetReplacementOf(Node * node)258 Node* GetReplacementOf(Node* node) { return replacements_[node]; }
ResolveReplacement(Node * node)259 Node* ResolveReplacement(Node* node) {
260 if (Node* replacement = GetReplacementOf(node)) {
261 return replacement;
262 }
263 return node;
264 }
265
266 private:
267 friend class EscapeAnalysisResult;
268 static const size_t kMaxTrackedObjects = 100;
269
NewVirtualObject(int size)270 VirtualObject* NewVirtualObject(int size) {
271 if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
272 return zone_->New<VirtualObject>(&variable_states_, next_object_id_++,
273 size);
274 }
275
276 SparseSidetable<VirtualObject*> virtual_objects_;
277 Sidetable<Node*> replacements_;
278 VariableTracker variable_states_;
279 VirtualObject::Id next_object_id_ = 0;
280 JSGraph* const jsgraph_;
281 Zone* const zone_;
282 };
283
EffectGraphReducer(Graph * graph,std::function<void (Node *,Reduction *)> reduce,TickCounter * tick_counter,Zone * zone)284 EffectGraphReducer::EffectGraphReducer(
285 Graph* graph, std::function<void(Node*, Reduction*)> reduce,
286 TickCounter* tick_counter, Zone* zone)
287 : graph_(graph),
288 state_(graph, kNumStates),
289 revisit_(zone),
290 stack_(zone),
291 reduce_(std::move(reduce)),
292 tick_counter_(tick_counter) {}
293
ReduceFrom(Node * node)294 void EffectGraphReducer::ReduceFrom(Node* node) {
295 // Perform DFS and eagerly trigger revisitation as soon as possible.
296 // A stack element {node, i} indicates that input i of node should be visited
297 // next.
298 DCHECK(stack_.empty());
299 stack_.push({node, 0});
300 while (!stack_.empty()) {
301 tick_counter_->TickAndMaybeEnterSafepoint();
302 Node* current = stack_.top().node;
303 int& input_index = stack_.top().input_index;
304 if (input_index < current->InputCount()) {
305 Node* input = current->InputAt(input_index);
306 input_index++;
307 switch (state_.Get(input)) {
308 case State::kVisited:
309 // The input is already reduced.
310 break;
311 case State::kOnStack:
312 // The input is on the DFS stack right now, so it will be revisited
313 // later anyway.
314 break;
315 case State::kUnvisited:
316 case State::kRevisit: {
317 state_.Set(input, State::kOnStack);
318 stack_.push({input, 0});
319 break;
320 }
321 }
322 } else {
323 stack_.pop();
324 Reduction reduction;
325 reduce_(current, &reduction);
326 for (Edge edge : current->use_edges()) {
327 // Mark uses for revisitation.
328 Node* use = edge.from();
329 if (NodeProperties::IsEffectEdge(edge)) {
330 if (reduction.effect_changed()) Revisit(use);
331 } else {
332 if (reduction.value_changed()) Revisit(use);
333 }
334 }
335 state_.Set(current, State::kVisited);
336 // Process the revisitation buffer immediately. This improves performance
337 // of escape analysis. Using a stack for {revisit_} reverses the order in
338 // which the revisitation happens. This also seems to improve performance.
339 while (!revisit_.empty()) {
340 Node* revisit = revisit_.top();
341 if (state_.Get(revisit) == State::kRevisit) {
342 state_.Set(revisit, State::kOnStack);
343 stack_.push({revisit, 0});
344 }
345 revisit_.pop();
346 }
347 }
348 }
349 }
350
Revisit(Node * node)351 void EffectGraphReducer::Revisit(Node* node) {
352 if (state_.Get(node) == State::kVisited) {
353 TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
354 node->id());
355 state_.Set(node, State::kRevisit);
356 revisit_.push(node);
357 }
358 }
359
VariableTracker(JSGraph * graph,EffectGraphReducer * reducer,Zone * zone)360 VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
361 Zone* zone)
362 : zone_(zone),
363 graph_(graph),
364 table_(zone, State(zone)),
365 buffer_(zone),
366 reducer_(reducer),
367 tick_counter_(reducer->tick_counter()) {}
368
Scope(VariableTracker * states,Node * node,Reduction * reduction)369 VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
370 Reduction* reduction)
371 : ReduceScope(node, reduction),
372 states_(states),
373 current_state_(states->zone_) {
374 switch (node->opcode()) {
375 case IrOpcode::kEffectPhi:
376 current_state_ = states_->MergeInputs(node);
377 break;
378 default:
379 int effect_inputs = node->op()->EffectInputCount();
380 if (effect_inputs == 1) {
381 current_state_ =
382 states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
383 } else {
384 DCHECK_EQ(0, effect_inputs);
385 }
386 }
387 }
388
~Scope()389 VariableTracker::Scope::~Scope() {
390 if (!reduction()->effect_changed() &&
391 states_->table_.Get(current_node()) != current_state_) {
392 reduction()->set_effect_changed();
393 }
394 states_->table_.Set(current_node(), current_state_);
395 }
396
MergeInputs(Node * effect_phi)397 VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
398 // A variable that is mapped to [nullptr] was not assigned a value on every
399 // execution path to the current effect phi. Relying on the invariant that
400 // every variable is initialized (at least with a sentinel like the Dead
401 // node), this means that the variable initialization does not dominate the
402 // current point. So for loop effect phis, we can keep nullptr for a variable
403 // as long as the first input of the loop has nullptr for this variable. For
404 // non-loop effect phis, we can even keep it nullptr as long as any input has
405 // nullptr.
406 DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode());
407 int arity = effect_phi->op()->EffectInputCount();
408 Node* control = NodeProperties::GetControlInput(effect_phi, 0);
409 TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
410 bool is_loop = control->opcode() == IrOpcode::kLoop;
411 buffer_.reserve(arity + 1);
412
413 State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
414 State result = first_input;
415 for (std::pair<Variable, Node*> var_value : first_input) {
416 tick_counter_->TickAndMaybeEnterSafepoint();
417 if (Node* value = var_value.second) {
418 Variable var = var_value.first;
419 TRACE("var %i:\n", var.id_);
420 buffer_.clear();
421 buffer_.push_back(value);
422 bool identical_inputs = true;
423 int num_defined_inputs = 1;
424 TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
425 for (int i = 1; i < arity; ++i) {
426 Node* next_value =
427 table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
428 if (next_value != value) identical_inputs = false;
429 if (next_value != nullptr) {
430 num_defined_inputs++;
431 TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
432 next_value->id());
433 } else {
434 TRACE(" input %i: nullptr\n", i);
435 }
436 buffer_.push_back(next_value);
437 }
438
439 Node* old_value = table_.Get(effect_phi).Get(var);
440 if (old_value) {
441 TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
442 } else {
443 TRACE(" old: nullptr\n");
444 }
445 // Reuse a previously created phi node if possible.
446 if (old_value && old_value->opcode() == IrOpcode::kPhi &&
447 NodeProperties::GetControlInput(old_value, 0) == control) {
448 // Since a phi node can never dominate its control node,
449 // [old_value] cannot originate from the inputs. Thus [old_value]
450 // must have been created by a previous reduction of this [effect_phi].
451 for (int i = 0; i < arity; ++i) {
452 Node* old_input = NodeProperties::GetValueInput(old_value, i);
453 Node* new_input = buffer_[i] ? buffer_[i] : graph_->Dead();
454 if (old_input != new_input) {
455 NodeProperties::ReplaceValueInput(old_value, new_input, i);
456 reducer_->Revisit(old_value);
457 }
458 }
459 result.Set(var, old_value);
460 } else {
461 if (num_defined_inputs == 1 && is_loop) {
462 // For loop effect phis, the variable initialization dominates iff it
463 // dominates the first input.
464 DCHECK_EQ(2, arity);
465 DCHECK_EQ(value, buffer_[0]);
466 result.Set(var, value);
467 } else if (num_defined_inputs < arity) {
468 // If the variable is undefined on some input of this non-loop effect
469 // phi, then its initialization does not dominate this point.
470 result.Set(var, nullptr);
471 } else {
472 DCHECK_EQ(num_defined_inputs, arity);
473 // We only create a phi if the values are different.
474 if (identical_inputs) {
475 result.Set(var, value);
476 } else {
477 TRACE("Creating new phi\n");
478 buffer_.push_back(control);
479 Node* phi = graph_->graph()->NewNode(
480 graph_->common()->Phi(MachineRepresentation::kTagged, arity),
481 arity + 1, &buffer_.front());
482 // TODO(tebbi): Computing precise types here is tricky, because of
483 // the necessary revisitations. If we really need this, we should
484 // probably do it afterwards.
485 NodeProperties::SetType(phi, Type::Any());
486 reducer_->AddRoot(phi);
487 result.Set(var, phi);
488 }
489 }
490 }
491 #ifdef DEBUG
492 if (Node* result_node = result.Get(var)) {
493 TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
494 result_node->id());
495 } else {
496 TRACE(" result: nullptr\n");
497 }
498 #endif
499 }
500 }
501 return result;
502 }
503
504 namespace {
505
OffsetOfFieldAccess(const Operator * op)506 int OffsetOfFieldAccess(const Operator* op) {
507 DCHECK(op->opcode() == IrOpcode::kLoadField ||
508 op->opcode() == IrOpcode::kStoreField);
509 FieldAccess access = FieldAccessOf(op);
510 return access.offset;
511 }
512
OffsetOfElementAt(ElementAccess const & access,int index)513 int OffsetOfElementAt(ElementAccess const& access, int index) {
514 DCHECK_GE(index, 0);
515 DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
516 kTaggedSizeLog2);
517 return access.header_size +
518 (index << ElementSizeLog2Of(access.machine_type.representation()));
519 }
520
OffsetOfElementsAccess(const Operator * op,Node * index_node)521 Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
522 DCHECK(op->opcode() == IrOpcode::kLoadElement ||
523 op->opcode() == IrOpcode::kStoreElement);
524 Type index_type = NodeProperties::GetType(index_node);
525 if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>();
526 double max = index_type.Max();
527 double min = index_type.Min();
528 int index = static_cast<int>(min);
529 if (index < 0 || index != min || index != max) return Nothing<int>();
530 return Just(OffsetOfElementAt(ElementAccessOf(op), index));
531 }
532
LowerCompareMapsWithoutLoad(Node * checked_map,ZoneHandleSet<Map> const & checked_against,JSGraph * jsgraph)533 Node* LowerCompareMapsWithoutLoad(Node* checked_map,
534 ZoneHandleSet<Map> const& checked_against,
535 JSGraph* jsgraph) {
536 Node* true_node = jsgraph->TrueConstant();
537 Node* false_node = jsgraph->FalseConstant();
538 Node* replacement = false_node;
539 for (Handle<Map> map : checked_against) {
540 Node* map_node = jsgraph->HeapConstant(map);
541 // We cannot create a HeapConstant type here as we are off-thread.
542 NodeProperties::SetType(map_node, Type::Internal());
543 Node* comparison = jsgraph->graph()->NewNode(
544 jsgraph->simplified()->ReferenceEqual(), checked_map, map_node);
545 NodeProperties::SetType(comparison, Type::Boolean());
546 if (replacement == false_node) {
547 replacement = comparison;
548 } else {
549 replacement = jsgraph->graph()->NewNode(
550 jsgraph->common()->Select(MachineRepresentation::kTaggedPointer),
551 comparison, true_node, replacement);
552 NodeProperties::SetType(replacement, Type::Boolean());
553 }
554 }
555 return replacement;
556 }
557
ReduceNode(const Operator * op,EscapeAnalysisTracker::Scope * current,JSGraph * jsgraph)558 void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
559 JSGraph* jsgraph) {
560 switch (op->opcode()) {
561 case IrOpcode::kAllocate: {
562 NumberMatcher size(current->ValueInput(0));
563 if (!size.HasResolvedValue()) break;
564 int size_int = static_cast<int>(size.ResolvedValue());
565 if (size_int != size.ResolvedValue()) break;
566 if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
567 // Initialize with dead nodes as a sentinel for uninitialized memory.
568 for (Variable field : *vobject) {
569 current->Set(field, jsgraph->Dead());
570 }
571 }
572 break;
573 }
574 case IrOpcode::kFinishRegion:
575 current->SetVirtualObject(current->ValueInput(0));
576 break;
577 case IrOpcode::kStoreField: {
578 Node* object = current->ValueInput(0);
579 Node* value = current->ValueInput(1);
580 const VirtualObject* vobject = current->GetVirtualObject(object);
581 Variable var;
582 if (vobject && !vobject->HasEscaped() &&
583 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
584 current->Set(var, value);
585 current->MarkForDeletion();
586 } else {
587 current->SetEscaped(object);
588 current->SetEscaped(value);
589 }
590 break;
591 }
592 case IrOpcode::kStoreElement: {
593 Node* object = current->ValueInput(0);
594 Node* index = current->ValueInput(1);
595 Node* value = current->ValueInput(2);
596 const VirtualObject* vobject = current->GetVirtualObject(object);
597 int offset;
598 Variable var;
599 if (vobject && !vobject->HasEscaped() &&
600 OffsetOfElementsAccess(op, index).To(&offset) &&
601 vobject->FieldAt(offset).To(&var)) {
602 current->Set(var, value);
603 current->MarkForDeletion();
604 } else {
605 current->SetEscaped(value);
606 current->SetEscaped(object);
607 }
608 break;
609 }
610 case IrOpcode::kLoadField: {
611 Node* object = current->ValueInput(0);
612 const VirtualObject* vobject = current->GetVirtualObject(object);
613 Variable var;
614 Node* value;
615 if (vobject && !vobject->HasEscaped() &&
616 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
617 current->Get(var).To(&value)) {
618 current->SetReplacement(value);
619 } else {
620 current->SetEscaped(object);
621 }
622 break;
623 }
624 case IrOpcode::kLoadElement: {
625 Node* object = current->ValueInput(0);
626 Node* index = current->ValueInput(1);
627 const VirtualObject* vobject = current->GetVirtualObject(object);
628 int offset;
629 Variable var;
630 Node* value;
631 if (vobject && !vobject->HasEscaped() &&
632 OffsetOfElementsAccess(op, index).To(&offset) &&
633 vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) {
634 current->SetReplacement(value);
635 break;
636 } else if (vobject && !vobject->HasEscaped()) {
637 // Compute the known length (aka the number of elements) of {object}
638 // based on the virtual object information.
639 ElementAccess const& access = ElementAccessOf(op);
640 int const length =
641 (vobject->size() - access.header_size) >>
642 ElementSizeLog2Of(access.machine_type.representation());
643 Variable var0, var1;
644 Node* value0;
645 Node* value1;
646 if (length == 1 &&
647 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var) &&
648 current->Get(var).To(&value) &&
649 (value == nullptr ||
650 NodeProperties::GetType(value).Is(access.type))) {
651 // The {object} has no elements, and we know that the LoadElement
652 // {index} must be within bounds, thus it must always yield this
653 // one element of {object}.
654 current->SetReplacement(value);
655 break;
656 } else if (length == 2 &&
657 vobject->FieldAt(OffsetOfElementAt(access, 0)).To(&var0) &&
658 current->Get(var0).To(&value0) &&
659 (value0 == nullptr ||
660 NodeProperties::GetType(value0).Is(access.type)) &&
661 vobject->FieldAt(OffsetOfElementAt(access, 1)).To(&var1) &&
662 current->Get(var1).To(&value1) &&
663 (value1 == nullptr ||
664 NodeProperties::GetType(value1).Is(access.type))) {
665 if (value0 && value1) {
666 // The {object} has exactly two elements, so the LoadElement
667 // must return one of them (i.e. either the element at index
668 // 0 or the one at index 1). So we can turn the LoadElement
669 // into a Select operation instead (still allowing the {object}
670 // to be scalar replaced). We must however mark the elements
671 // of the {object} itself as escaping.
672 Node* check =
673 jsgraph->graph()->NewNode(jsgraph->simplified()->NumberEqual(),
674 index, jsgraph->ZeroConstant());
675 NodeProperties::SetType(check, Type::Boolean());
676 Node* select = jsgraph->graph()->NewNode(
677 jsgraph->common()->Select(access.machine_type.representation()),
678 check, value0, value1);
679 NodeProperties::SetType(select, access.type);
680 current->SetReplacement(select);
681 current->SetEscaped(value0);
682 current->SetEscaped(value1);
683 break;
684 } else {
685 // If the variables have no values, we have
686 // not reached the fixed-point yet.
687 break;
688 }
689 }
690 }
691 current->SetEscaped(object);
692 break;
693 }
694 case IrOpcode::kTypeGuard: {
695 current->SetVirtualObject(current->ValueInput(0));
696 break;
697 }
698 case IrOpcode::kReferenceEqual: {
699 Node* left = current->ValueInput(0);
700 Node* right = current->ValueInput(1);
701 const VirtualObject* left_object = current->GetVirtualObject(left);
702 const VirtualObject* right_object = current->GetVirtualObject(right);
703 Node* replacement = nullptr;
704 if (left_object && !left_object->HasEscaped()) {
705 if (right_object && !right_object->HasEscaped() &&
706 left_object->id() == right_object->id()) {
707 replacement = jsgraph->TrueConstant();
708 } else {
709 replacement = jsgraph->FalseConstant();
710 }
711 } else if (right_object && !right_object->HasEscaped()) {
712 replacement = jsgraph->FalseConstant();
713 }
714 // TODO(tebbi) This is a workaround for uninhabited types. If we
715 // replaced a value of uninhabited type with a constant, we would
716 // widen the type of the node. This could produce inconsistent
717 // types (which might confuse representation selection). We get
718 // around this by refusing to constant-fold and escape-analyze
719 // if the type is not inhabited.
720 if (replacement && !NodeProperties::GetType(left).IsNone() &&
721 !NodeProperties::GetType(right).IsNone()) {
722 current->SetReplacement(replacement);
723 break;
724 }
725 current->SetEscaped(left);
726 current->SetEscaped(right);
727 break;
728 }
729 case IrOpcode::kCheckMaps: {
730 CheckMapsParameters params = CheckMapsParametersOf(op);
731 Node* checked = current->ValueInput(0);
732 const VirtualObject* vobject = current->GetVirtualObject(checked);
733 Variable map_field;
734 Node* map;
735 if (vobject && !vobject->HasEscaped() &&
736 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
737 current->Get(map_field).To(&map)) {
738 if (map) {
739 Type const map_type = NodeProperties::GetType(map);
740 if (map_type.IsHeapConstant() &&
741 params.maps().contains(
742 map_type.AsHeapConstant()->Ref().AsMap().object())) {
743 current->MarkForDeletion();
744 break;
745 }
746 } else {
747 // If the variable has no value, we have not reached the fixed-point
748 // yet.
749 break;
750 }
751 }
752 current->SetEscaped(checked);
753 break;
754 }
755 case IrOpcode::kCompareMaps: {
756 Node* object = current->ValueInput(0);
757 const VirtualObject* vobject = current->GetVirtualObject(object);
758 Variable map_field;
759 Node* object_map;
760 if (vobject && !vobject->HasEscaped() &&
761 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
762 current->Get(map_field).To(&object_map)) {
763 if (object_map) {
764 current->SetReplacement(LowerCompareMapsWithoutLoad(
765 object_map, CompareMapsParametersOf(op), jsgraph));
766 break;
767 } else {
768 // If the variable has no value, we have not reached the fixed-point
769 // yet.
770 break;
771 }
772 }
773 current->SetEscaped(object);
774 break;
775 }
776 case IrOpcode::kCheckHeapObject: {
777 Node* checked = current->ValueInput(0);
778 switch (checked->opcode()) {
779 case IrOpcode::kAllocate:
780 case IrOpcode::kFinishRegion:
781 case IrOpcode::kHeapConstant:
782 current->SetReplacement(checked);
783 break;
784 default:
785 current->SetEscaped(checked);
786 break;
787 }
788 break;
789 }
790 case IrOpcode::kMapGuard: {
791 Node* object = current->ValueInput(0);
792 const VirtualObject* vobject = current->GetVirtualObject(object);
793 if (vobject && !vobject->HasEscaped()) {
794 current->MarkForDeletion();
795 }
796 break;
797 }
798 case IrOpcode::kStateValues:
799 case IrOpcode::kFrameState:
800 // These uses are always safe.
801 break;
802 default: {
803 // For unknown nodes, treat all value inputs as escaping.
804 int value_input_count = op->ValueInputCount();
805 for (int i = 0; i < value_input_count; ++i) {
806 Node* input = current->ValueInput(i);
807 current->SetEscaped(input);
808 }
809 if (OperatorProperties::HasContextInput(op)) {
810 current->SetEscaped(current->ContextInput());
811 }
812 break;
813 }
814 }
815 }
816
817 } // namespace
818
Reduce(Node * node,Reduction * reduction)819 void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
820 const Operator* op = node->op();
821 TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
822
823 EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
824 ReduceNode(op, ¤t, jsgraph());
825 }
826
EscapeAnalysis(JSGraph * jsgraph,TickCounter * tick_counter,Zone * zone)827 EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, TickCounter* tick_counter,
828 Zone* zone)
829 : EffectGraphReducer(
830 jsgraph->graph(),
831 [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
832 tick_counter, zone),
833 tracker_(zone->New<EscapeAnalysisTracker>(jsgraph, this, zone)),
834 jsgraph_(jsgraph) {}
835
GetReplacementOf(Node * node)836 Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
837 Node* replacement = tracker_->GetReplacementOf(node);
838 // Replacements cannot have replacements. This is important to ensure
839 // re-visitation: If a replacement is replaced, then all nodes accessing
840 // the replacement have to be updated.
841 if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement));
842 return replacement;
843 }
844
GetVirtualObjectField(const VirtualObject * vobject,int field,Node * effect)845 Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
846 int field, Node* effect) {
847 return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
848 effect);
849 }
850
GetVirtualObject(Node * node)851 const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
852 return tracker_->virtual_objects_.Get(node);
853 }
854
VirtualObject(VariableTracker * var_states,VirtualObject::Id id,int size)855 VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
856 int size)
857 : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
858 DCHECK(IsAligned(size, kTaggedSize));
859 TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
860 int num_fields = size / kTaggedSize;
861 fields_.reserve(num_fields);
862 for (int i = 0; i < num_fields; ++i) {
863 fields_.push_back(var_states->NewVariable());
864 }
865 }
866
867 #undef TRACE
868
869 } // namespace compiler
870 } // namespace internal
871 } // namespace v8
872