1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/escape-analysis.h"
6
7 #include "src/bootstrapper.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/node-matchers.h"
10 #include "src/compiler/operator-properties.h"
11 #include "src/compiler/simplified-operator.h"
12
13 #ifdef DEBUG
14 #define TRACE(...) \
15 do { \
16 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
17 } while (false)
18 #else
19 #define TRACE(...)
20 #endif
21
22 namespace v8 {
23 namespace internal {
24 namespace compiler {
25
26 template <class T>
27 class Sidetable {
28 public:
Sidetable(Zone * zone)29 explicit Sidetable(Zone* zone) : map_(zone) {}
operator [](const Node * node)30 T& operator[](const Node* node) {
31 NodeId id = node->id();
32 if (id >= map_.size()) {
33 map_.resize(id + 1);
34 }
35 return map_[id];
36 }
37
38 private:
39 ZoneVector<T> map_;
40 };
41
42 template <class T>
43 class SparseSidetable {
44 public:
SparseSidetable(Zone * zone,T def_value=T ())45 explicit SparseSidetable(Zone* zone, T def_value = T())
46 : def_value_(std::move(def_value)), map_(zone) {}
Set(const Node * node,T value)47 void Set(const Node* node, T value) {
48 auto iter = map_.find(node->id());
49 if (iter != map_.end()) {
50 iter->second = std::move(value);
51 } else if (value != def_value_) {
52 map_.insert(iter, std::make_pair(node->id(), std::move(value)));
53 }
54 }
Get(const Node * node) const55 const T& Get(const Node* node) const {
56 auto iter = map_.find(node->id());
57 return iter != map_.end() ? iter->second : def_value_;
58 }
59
60 private:
61 T def_value_;
62 ZoneUnorderedMap<NodeId, T> map_;
63 };
64
65 // Keeps track of the changes to the current node during reduction.
66 // Encapsulates the current state of the IR graph and the reducer state like
67 // side-tables. All access to the IR and the reducer state should happen through
68 // a ReduceScope to ensure that changes and dependencies are tracked and all
69 // necessary node revisitations happen.
70 class ReduceScope {
71 public:
72 typedef EffectGraphReducer::Reduction Reduction;
ReduceScope(Node * node,Reduction * reduction)73 explicit ReduceScope(Node* node, Reduction* reduction)
74 : current_node_(node), reduction_(reduction) {}
75
76 protected:
current_node() const77 Node* current_node() const { return current_node_; }
reduction()78 Reduction* reduction() { return reduction_; }
79
80 private:
81 Node* current_node_;
82 Reduction* reduction_;
83 };
84
85 // A VariableTracker object keeps track of the values of variables at all points
86 // of the effect chain and introduces new phi nodes when necessary.
87 // Initially and by default, variables are mapped to nullptr, which means that
88 // the variable allocation point does not dominate the current point on the
89 // effect chain. We map variables that represent uninitialized memory to the
90 // Dead node to ensure it is not read.
91 // Unmapped values are impossible by construction, it is indistinguishable if a
92 // PersistentMap does not contain an element or maps it to the default element.
93 class VariableTracker {
94 private:
95 // The state of all variables at one point in the effect chain.
96 class State {
97 typedef PersistentMap<Variable, Node*> Map;
98
99 public:
State(Zone * zone)100 explicit State(Zone* zone) : map_(zone) {}
Get(Variable var) const101 Node* Get(Variable var) const {
102 CHECK(var != Variable::Invalid());
103 return map_.Get(var);
104 }
Set(Variable var,Node * node)105 void Set(Variable var, Node* node) {
106 CHECK(var != Variable::Invalid());
107 return map_.Set(var, node);
108 }
begin() const109 Map::iterator begin() const { return map_.begin(); }
end() const110 Map::iterator end() const { return map_.end(); }
operator !=(const State & other) const111 bool operator!=(const State& other) const { return map_ != other.map_; }
112
113 private:
114 Map map_;
115 };
116
117 public:
118 VariableTracker(JSGraph* graph, EffectGraphReducer* reducer, Zone* zone);
NewVariable()119 Variable NewVariable() { return Variable(next_variable_++); }
Get(Variable var,Node * effect)120 Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
zone()121 Zone* zone() { return zone_; }
122
123 class Scope : public ReduceScope {
124 public:
125 Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
126 ~Scope();
Get(Variable var)127 Maybe<Node*> Get(Variable var) {
128 Node* node = current_state_.Get(var);
129 if (node && node->opcode() == IrOpcode::kDead) {
130 // TODO(tebbi): We use {Dead} as a sentinel for uninitialized memory.
131 // Reading uninitialized memory can only happen in unreachable code. In
132 // this case, we have to mark the object as escaping to avoid dead nodes
133 // in the graph. This is a workaround that should be removed once we can
134 // handle dead nodes everywhere.
135 return Nothing<Node*>();
136 }
137 return Just(node);
138 }
Set(Variable var,Node * node)139 void Set(Variable var, Node* node) { current_state_.Set(var, node); }
140
141 private:
142 VariableTracker* states_;
143 State current_state_;
144 };
145
146 private:
147 State MergeInputs(Node* effect_phi);
148 Zone* zone_;
149 JSGraph* graph_;
150 SparseSidetable<State> table_;
151 ZoneVector<Node*> buffer_;
152 EffectGraphReducer* reducer_;
153 int next_variable_ = 0;
154
155 DISALLOW_COPY_AND_ASSIGN(VariableTracker);
156 };
157
158 // Encapsulates the current state of the escape analysis reducer to preserve
159 // invariants regarding changes and re-visitation.
160 class EscapeAnalysisTracker : public ZoneObject {
161 public:
EscapeAnalysisTracker(JSGraph * jsgraph,EffectGraphReducer * reducer,Zone * zone)162 EscapeAnalysisTracker(JSGraph* jsgraph, EffectGraphReducer* reducer,
163 Zone* zone)
164 : virtual_objects_(zone),
165 replacements_(zone),
166 variable_states_(jsgraph, reducer, zone),
167 jsgraph_(jsgraph),
168 zone_(zone) {}
169
170 class Scope : public VariableTracker::Scope {
171 public:
Scope(EffectGraphReducer * reducer,EscapeAnalysisTracker * tracker,Node * node,Reduction * reduction)172 Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
173 Node* node, Reduction* reduction)
174 : VariableTracker::Scope(&tracker->variable_states_, node, reduction),
175 tracker_(tracker),
176 reducer_(reducer) {}
GetVirtualObject(Node * node)177 const VirtualObject* GetVirtualObject(Node* node) {
178 VirtualObject* vobject = tracker_->virtual_objects_.Get(node);
179 if (vobject) vobject->AddDependency(current_node());
180 return vobject;
181 }
182 // Create or retrieve a virtual object for the current node.
InitVirtualObject(int size)183 const VirtualObject* InitVirtualObject(int size) {
184 DCHECK_EQ(IrOpcode::kAllocate, current_node()->opcode());
185 VirtualObject* vobject = tracker_->virtual_objects_.Get(current_node());
186 if (vobject) {
187 CHECK(vobject->size() == size);
188 } else {
189 vobject = tracker_->NewVirtualObject(size);
190 }
191 if (vobject) vobject->AddDependency(current_node());
192 vobject_ = vobject;
193 return vobject;
194 }
195
SetVirtualObject(Node * object)196 void SetVirtualObject(Node* object) {
197 vobject_ = tracker_->virtual_objects_.Get(object);
198 }
199
SetEscaped(Node * node)200 void SetEscaped(Node* node) {
201 if (VirtualObject* object = tracker_->virtual_objects_.Get(node)) {
202 if (object->HasEscaped()) return;
203 TRACE("Setting %s#%d to escaped because of use by %s#%d\n",
204 node->op()->mnemonic(), node->id(),
205 current_node()->op()->mnemonic(), current_node()->id());
206 object->SetEscaped();
207 object->RevisitDependants(reducer_);
208 }
209 }
210 // The inputs of the current node have to be accessed through the scope to
211 // ensure that they respect the node replacements.
ValueInput(int i)212 Node* ValueInput(int i) {
213 return tracker_->ResolveReplacement(
214 NodeProperties::GetValueInput(current_node(), i));
215 }
ContextInput()216 Node* ContextInput() {
217 return tracker_->ResolveReplacement(
218 NodeProperties::GetContextInput(current_node()));
219 }
220
SetReplacement(Node * replacement)221 void SetReplacement(Node* replacement) {
222 replacement_ = replacement;
223 vobject_ =
224 replacement ? tracker_->virtual_objects_.Get(replacement) : nullptr;
225 if (replacement) {
226 TRACE("Set %s#%d as replacement.\n", replacement->op()->mnemonic(),
227 replacement->id());
228 } else {
229 TRACE("Set nullptr as replacement.\n");
230 }
231 }
232
MarkForDeletion()233 void MarkForDeletion() { SetReplacement(tracker_->jsgraph_->Dead()); }
234
~Scope()235 ~Scope() {
236 if (replacement_ != tracker_->replacements_[current_node()] ||
237 vobject_ != tracker_->virtual_objects_.Get(current_node())) {
238 reduction()->set_value_changed();
239 }
240 tracker_->replacements_[current_node()] = replacement_;
241 tracker_->virtual_objects_.Set(current_node(), vobject_);
242 }
243
244 private:
245 EscapeAnalysisTracker* tracker_;
246 EffectGraphReducer* reducer_;
247 VirtualObject* vobject_ = nullptr;
248 Node* replacement_ = nullptr;
249 };
250
GetReplacementOf(Node * node)251 Node* GetReplacementOf(Node* node) { return replacements_[node]; }
ResolveReplacement(Node * node)252 Node* ResolveReplacement(Node* node) {
253 if (Node* replacement = GetReplacementOf(node)) {
254 return replacement;
255 }
256 return node;
257 }
258
259 private:
260 friend class EscapeAnalysisResult;
261 static const size_t kMaxTrackedObjects = 100;
262
NewVirtualObject(int size)263 VirtualObject* NewVirtualObject(int size) {
264 if (next_object_id_ >= kMaxTrackedObjects) return nullptr;
265 return new (zone_)
266 VirtualObject(&variable_states_, next_object_id_++, size);
267 }
268
269 SparseSidetable<VirtualObject*> virtual_objects_;
270 Sidetable<Node*> replacements_;
271 VariableTracker variable_states_;
272 VirtualObject::Id next_object_id_ = 0;
273 JSGraph* const jsgraph_;
274 Zone* const zone_;
275
276 DISALLOW_COPY_AND_ASSIGN(EscapeAnalysisTracker);
277 };
278
EffectGraphReducer(Graph * graph,std::function<void (Node *,Reduction *)> reduce,Zone * zone)279 EffectGraphReducer::EffectGraphReducer(
280 Graph* graph, std::function<void(Node*, Reduction*)> reduce, Zone* zone)
281 : graph_(graph),
282 state_(graph, kNumStates),
283 revisit_(zone),
284 stack_(zone),
285 reduce_(reduce) {}
286
ReduceFrom(Node * node)287 void EffectGraphReducer::ReduceFrom(Node* node) {
288 // Perform DFS and eagerly trigger revisitation as soon as possible.
289 // A stack element {node, i} indicates that input i of node should be visited
290 // next.
291 DCHECK(stack_.empty());
292 stack_.push({node, 0});
293 while (!stack_.empty()) {
294 Node* current = stack_.top().node;
295 int& input_index = stack_.top().input_index;
296 if (input_index < current->InputCount()) {
297 Node* input = current->InputAt(input_index);
298 input_index++;
299 switch (state_.Get(input)) {
300 case State::kVisited:
301 // The input is already reduced.
302 break;
303 case State::kOnStack:
304 // The input is on the DFS stack right now, so it will be revisited
305 // later anyway.
306 break;
307 case State::kUnvisited:
308 case State::kRevisit: {
309 state_.Set(input, State::kOnStack);
310 stack_.push({input, 0});
311 break;
312 }
313 }
314 } else {
315 stack_.pop();
316 Reduction reduction;
317 reduce_(current, &reduction);
318 for (Edge edge : current->use_edges()) {
319 // Mark uses for revisitation.
320 Node* use = edge.from();
321 if (NodeProperties::IsEffectEdge(edge)) {
322 if (reduction.effect_changed()) Revisit(use);
323 } else {
324 if (reduction.value_changed()) Revisit(use);
325 }
326 }
327 state_.Set(current, State::kVisited);
328 // Process the revisitation buffer immediately. This improves performance
329 // of escape analysis. Using a stack for {revisit_} reverses the order in
330 // which the revisitation happens. This also seems to improve performance.
331 while (!revisit_.empty()) {
332 Node* revisit = revisit_.top();
333 if (state_.Get(revisit) == State::kRevisit) {
334 state_.Set(revisit, State::kOnStack);
335 stack_.push({revisit, 0});
336 }
337 revisit_.pop();
338 }
339 }
340 }
341 }
342
Revisit(Node * node)343 void EffectGraphReducer::Revisit(Node* node) {
344 if (state_.Get(node) == State::kVisited) {
345 TRACE(" Queueing for revisit: %s#%d\n", node->op()->mnemonic(),
346 node->id());
347 state_.Set(node, State::kRevisit);
348 revisit_.push(node);
349 }
350 }
351
VariableTracker(JSGraph * graph,EffectGraphReducer * reducer,Zone * zone)352 VariableTracker::VariableTracker(JSGraph* graph, EffectGraphReducer* reducer,
353 Zone* zone)
354 : zone_(zone),
355 graph_(graph),
356 table_(zone, State(zone)),
357 buffer_(zone),
358 reducer_(reducer) {}
359
Scope(VariableTracker * states,Node * node,Reduction * reduction)360 VariableTracker::Scope::Scope(VariableTracker* states, Node* node,
361 Reduction* reduction)
362 : ReduceScope(node, reduction),
363 states_(states),
364 current_state_(states->zone_) {
365 switch (node->opcode()) {
366 case IrOpcode::kEffectPhi:
367 current_state_ = states_->MergeInputs(node);
368 break;
369 default:
370 int effect_inputs = node->op()->EffectInputCount();
371 if (effect_inputs == 1) {
372 current_state_ =
373 states_->table_.Get(NodeProperties::GetEffectInput(node, 0));
374 } else {
375 DCHECK_EQ(0, effect_inputs);
376 }
377 }
378 }
379
~Scope()380 VariableTracker::Scope::~Scope() {
381 if (!reduction()->effect_changed() &&
382 states_->table_.Get(current_node()) != current_state_) {
383 reduction()->set_effect_changed();
384 }
385 states_->table_.Set(current_node(), current_state_);
386 }
387
MergeInputs(Node * effect_phi)388 VariableTracker::State VariableTracker::MergeInputs(Node* effect_phi) {
389 // A variable that is mapped to [nullptr] was not assigned a value on every
390 // execution path to the current effect phi. Relying on the invariant that
391 // every variable is initialized (at least with a sentinel like the Dead
392 // node), this means that the variable initialization does not dominate the
393 // current point. So for loop effect phis, we can keep nullptr for a variable
394 // as long as the first input of the loop has nullptr for this variable. For
395 // non-loop effect phis, we can even keep it nullptr as long as any input has
396 // nullptr.
397 DCHECK_EQ(IrOpcode::kEffectPhi, effect_phi->opcode());
398 int arity = effect_phi->op()->EffectInputCount();
399 Node* control = NodeProperties::GetControlInput(effect_phi, 0);
400 TRACE("control: %s#%d\n", control->op()->mnemonic(), control->id());
401 bool is_loop = control->opcode() == IrOpcode::kLoop;
402 buffer_.reserve(arity + 1);
403
404 State first_input = table_.Get(NodeProperties::GetEffectInput(effect_phi, 0));
405 State result = first_input;
406 for (std::pair<Variable, Node*> var_value : first_input) {
407 if (Node* value = var_value.second) {
408 Variable var = var_value.first;
409 TRACE("var %i:\n", var.id_);
410 buffer_.clear();
411 buffer_.push_back(value);
412 bool identical_inputs = true;
413 int num_defined_inputs = 1;
414 TRACE(" input 0: %s#%d\n", value->op()->mnemonic(), value->id());
415 for (int i = 1; i < arity; ++i) {
416 Node* next_value =
417 table_.Get(NodeProperties::GetEffectInput(effect_phi, i)).Get(var);
418 if (next_value != value) identical_inputs = false;
419 if (next_value != nullptr) {
420 num_defined_inputs++;
421 TRACE(" input %i: %s#%d\n", i, next_value->op()->mnemonic(),
422 next_value->id());
423 } else {
424 TRACE(" input %i: nullptr\n", i);
425 }
426 buffer_.push_back(next_value);
427 }
428
429 Node* old_value = table_.Get(effect_phi).Get(var);
430 if (old_value) {
431 TRACE(" old: %s#%d\n", old_value->op()->mnemonic(), old_value->id());
432 } else {
433 TRACE(" old: nullptr\n");
434 }
435 // Reuse a previously created phi node if possible.
436 if (old_value && old_value->opcode() == IrOpcode::kPhi &&
437 NodeProperties::GetControlInput(old_value, 0) == control) {
438 // Since a phi node can never dominate its control node,
439 // [old_value] cannot originate from the inputs. Thus [old_value]
440 // must have been created by a previous reduction of this [effect_phi].
441 for (int i = 0; i < arity; ++i) {
442 NodeProperties::ReplaceValueInput(
443 old_value, buffer_[i] ? buffer_[i] : graph_->Dead(), i);
444 // This change cannot affect the rest of the reducer, so there is no
445 // need to trigger additional revisitations.
446 }
447 result.Set(var, old_value);
448 } else {
449 if (num_defined_inputs == 1 && is_loop) {
450 // For loop effect phis, the variable initialization dominates iff it
451 // dominates the first input.
452 DCHECK_EQ(2, arity);
453 DCHECK_EQ(value, buffer_[0]);
454 result.Set(var, value);
455 } else if (num_defined_inputs < arity) {
456 // If the variable is undefined on some input of this non-loop effect
457 // phi, then its initialization does not dominate this point.
458 result.Set(var, nullptr);
459 } else {
460 DCHECK_EQ(num_defined_inputs, arity);
461 // We only create a phi if the values are different.
462 if (identical_inputs) {
463 result.Set(var, value);
464 } else {
465 TRACE("Creating new phi\n");
466 buffer_.push_back(control);
467 Node* phi = graph_->graph()->NewNode(
468 graph_->common()->Phi(MachineRepresentation::kTagged, arity),
469 arity + 1, &buffer_.front());
470 // TODO(tebbi): Computing precise types here is tricky, because of
471 // the necessary revisitations. If we really need this, we should
472 // probably do it afterwards.
473 NodeProperties::SetType(phi, Type::Any());
474 reducer_->AddRoot(phi);
475 result.Set(var, phi);
476 }
477 }
478 }
479 #ifdef DEBUG
480 if (Node* result_node = result.Get(var)) {
481 TRACE(" result: %s#%d\n", result_node->op()->mnemonic(),
482 result_node->id());
483 } else {
484 TRACE(" result: nullptr\n");
485 }
486 #endif
487 }
488 }
489 return result;
490 }
491
492 namespace {
493
OffsetOfFieldAccess(const Operator * op)494 int OffsetOfFieldAccess(const Operator* op) {
495 DCHECK(op->opcode() == IrOpcode::kLoadField ||
496 op->opcode() == IrOpcode::kStoreField);
497 FieldAccess access = FieldAccessOf(op);
498 return access.offset;
499 }
500
OffsetOfElementsAccess(const Operator * op,Node * index_node)501 Maybe<int> OffsetOfElementsAccess(const Operator* op, Node* index_node) {
502 DCHECK(op->opcode() == IrOpcode::kLoadElement ||
503 op->opcode() == IrOpcode::kStoreElement);
504 Type index_type = NodeProperties::GetType(index_node);
505 if (!index_type.Is(Type::OrderedNumber())) return Nothing<int>();
506 double max = index_type.Max();
507 double min = index_type.Min();
508 int index = static_cast<int>(min);
509 if (!(index == min && index == max)) return Nothing<int>();
510 ElementAccess access = ElementAccessOf(op);
511 DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
512 kPointerSizeLog2);
513 return Just(access.header_size + (index << ElementSizeLog2Of(
514 access.machine_type.representation())));
515 }
516
LowerCompareMapsWithoutLoad(Node * checked_map,ZoneHandleSet<Map> const & checked_against,JSGraph * jsgraph)517 Node* LowerCompareMapsWithoutLoad(Node* checked_map,
518 ZoneHandleSet<Map> const& checked_against,
519 JSGraph* jsgraph) {
520 Node* true_node = jsgraph->TrueConstant();
521 Node* false_node = jsgraph->FalseConstant();
522 Node* replacement = false_node;
523 for (Handle<Map> map : checked_against) {
524 Node* map_node = jsgraph->HeapConstant(map);
525 // We cannot create a HeapConstant type here as we are off-thread.
526 NodeProperties::SetType(map_node, Type::Internal());
527 Node* comparison = jsgraph->graph()->NewNode(
528 jsgraph->simplified()->ReferenceEqual(), checked_map, map_node);
529 NodeProperties::SetType(comparison, Type::Boolean());
530 if (replacement == false_node) {
531 replacement = comparison;
532 } else {
533 replacement = jsgraph->graph()->NewNode(
534 jsgraph->common()->Select(MachineRepresentation::kTaggedPointer),
535 comparison, true_node, replacement);
536 NodeProperties::SetType(replacement, Type::Boolean());
537 }
538 }
539 return replacement;
540 }
541
ReduceNode(const Operator * op,EscapeAnalysisTracker::Scope * current,JSGraph * jsgraph)542 void ReduceNode(const Operator* op, EscapeAnalysisTracker::Scope* current,
543 JSGraph* jsgraph) {
544 switch (op->opcode()) {
545 case IrOpcode::kAllocate: {
546 NumberMatcher size(current->ValueInput(0));
547 if (!size.HasValue()) break;
548 int size_int = static_cast<int>(size.Value());
549 if (size_int != size.Value()) break;
550 if (const VirtualObject* vobject = current->InitVirtualObject(size_int)) {
551 // Initialize with dead nodes as a sentinel for uninitialized memory.
552 for (Variable field : *vobject) {
553 current->Set(field, jsgraph->Dead());
554 }
555 }
556 break;
557 }
558 case IrOpcode::kFinishRegion:
559 current->SetVirtualObject(current->ValueInput(0));
560 break;
561 case IrOpcode::kStoreField: {
562 Node* object = current->ValueInput(0);
563 Node* value = current->ValueInput(1);
564 const VirtualObject* vobject = current->GetVirtualObject(object);
565 Variable var;
566 if (vobject && !vobject->HasEscaped() &&
567 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var)) {
568 current->Set(var, value);
569 current->MarkForDeletion();
570 } else {
571 current->SetEscaped(object);
572 current->SetEscaped(value);
573 }
574 break;
575 }
576 case IrOpcode::kStoreElement: {
577 Node* object = current->ValueInput(0);
578 Node* index = current->ValueInput(1);
579 Node* value = current->ValueInput(2);
580 const VirtualObject* vobject = current->GetVirtualObject(object);
581 int offset;
582 Variable var;
583 if (vobject && !vobject->HasEscaped() &&
584 OffsetOfElementsAccess(op, index).To(&offset) &&
585 vobject->FieldAt(offset).To(&var)) {
586 current->Set(var, value);
587 current->MarkForDeletion();
588 } else {
589 current->SetEscaped(value);
590 current->SetEscaped(object);
591 }
592 break;
593 }
594 case IrOpcode::kLoadField: {
595 Node* object = current->ValueInput(0);
596 const VirtualObject* vobject = current->GetVirtualObject(object);
597 Variable var;
598 Node* value;
599 if (vobject && !vobject->HasEscaped() &&
600 vobject->FieldAt(OffsetOfFieldAccess(op)).To(&var) &&
601 current->Get(var).To(&value)) {
602 current->SetReplacement(value);
603 } else {
604 current->SetEscaped(object);
605 }
606 break;
607 }
608 case IrOpcode::kLoadElement: {
609 Node* object = current->ValueInput(0);
610 Node* index = current->ValueInput(1);
611 const VirtualObject* vobject = current->GetVirtualObject(object);
612 int offset;
613 Variable var;
614 Node* value;
615 if (vobject && !vobject->HasEscaped() &&
616 OffsetOfElementsAccess(op, index).To(&offset) &&
617 vobject->FieldAt(offset).To(&var) && current->Get(var).To(&value)) {
618 current->SetReplacement(value);
619 } else {
620 current->SetEscaped(object);
621 }
622 break;
623 }
624 case IrOpcode::kTypeGuard: {
625 current->SetVirtualObject(current->ValueInput(0));
626 break;
627 }
628 case IrOpcode::kReferenceEqual: {
629 Node* left = current->ValueInput(0);
630 Node* right = current->ValueInput(1);
631 const VirtualObject* left_object = current->GetVirtualObject(left);
632 const VirtualObject* right_object = current->GetVirtualObject(right);
633 Node* replacement = nullptr;
634 if (left_object && !left_object->HasEscaped()) {
635 if (right_object && !right_object->HasEscaped() &&
636 left_object->id() == right_object->id()) {
637 replacement = jsgraph->TrueConstant();
638 } else {
639 replacement = jsgraph->FalseConstant();
640 }
641 } else if (right_object && !right_object->HasEscaped()) {
642 replacement = jsgraph->FalseConstant();
643 }
644 if (replacement) {
645 // TODO(tebbi) This is a workaround for uninhabited types. If we
646 // replaced a value of uninhabited type with a constant, we would
647 // widen the type of the node. This could produce inconsistent
648 // types (which might confuse representation selection). We get
649 // around this by refusing to constant-fold and escape-analyze
650 // if the type is not inhabited.
651 if (!NodeProperties::GetType(left).IsNone() &&
652 !NodeProperties::GetType(right).IsNone()) {
653 current->SetReplacement(replacement);
654 } else {
655 current->SetEscaped(left);
656 current->SetEscaped(right);
657 }
658 }
659 break;
660 }
661 case IrOpcode::kCheckMaps: {
662 CheckMapsParameters params = CheckMapsParametersOf(op);
663 Node* checked = current->ValueInput(0);
664 const VirtualObject* vobject = current->GetVirtualObject(checked);
665 Variable map_field;
666 Node* map;
667 if (vobject && !vobject->HasEscaped() &&
668 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
669 current->Get(map_field).To(&map)) {
670 if (map) {
671 Type const map_type = NodeProperties::GetType(map);
672 if (map_type.IsHeapConstant() &&
673 params.maps().contains(
674 bit_cast<Handle<Map>>(map_type.AsHeapConstant()->Value()))) {
675 current->MarkForDeletion();
676 break;
677 }
678 } else {
679 // If the variable has no value, we have not reached the fixed-point
680 // yet.
681 break;
682 }
683 }
684 current->SetEscaped(checked);
685 break;
686 }
687 case IrOpcode::kCompareMaps: {
688 Node* object = current->ValueInput(0);
689 const VirtualObject* vobject = current->GetVirtualObject(object);
690 Variable map_field;
691 Node* object_map;
692 if (vobject && !vobject->HasEscaped() &&
693 vobject->FieldAt(HeapObject::kMapOffset).To(&map_field) &&
694 current->Get(map_field).To(&object_map)) {
695 if (object_map) {
696 current->SetReplacement(LowerCompareMapsWithoutLoad(
697 object_map, CompareMapsParametersOf(op).maps(), jsgraph));
698 break;
699 } else {
700 // If the variable has no value, we have not reached the fixed-point
701 // yet.
702 break;
703 }
704 }
705 current->SetEscaped(object);
706 break;
707 }
708 case IrOpcode::kCheckHeapObject: {
709 Node* checked = current->ValueInput(0);
710 switch (checked->opcode()) {
711 case IrOpcode::kAllocate:
712 case IrOpcode::kFinishRegion:
713 case IrOpcode::kHeapConstant:
714 current->SetReplacement(checked);
715 break;
716 default:
717 current->SetEscaped(checked);
718 break;
719 }
720 break;
721 }
722 case IrOpcode::kMapGuard: {
723 Node* object = current->ValueInput(0);
724 const VirtualObject* vobject = current->GetVirtualObject(object);
725 if (vobject && !vobject->HasEscaped()) {
726 current->MarkForDeletion();
727 }
728 break;
729 }
730 case IrOpcode::kStateValues:
731 case IrOpcode::kFrameState:
732 // These uses are always safe.
733 break;
734 default: {
735 // For unknown nodes, treat all value inputs as escaping.
736 int value_input_count = op->ValueInputCount();
737 for (int i = 0; i < value_input_count; ++i) {
738 Node* input = current->ValueInput(i);
739 current->SetEscaped(input);
740 }
741 if (OperatorProperties::HasContextInput(op)) {
742 current->SetEscaped(current->ContextInput());
743 }
744 break;
745 }
746 }
747 }
748
749 } // namespace
750
Reduce(Node * node,Reduction * reduction)751 void EscapeAnalysis::Reduce(Node* node, Reduction* reduction) {
752 const Operator* op = node->op();
753 TRACE("Reducing %s#%d\n", op->mnemonic(), node->id());
754
755 EscapeAnalysisTracker::Scope current(this, tracker_, node, reduction);
756 ReduceNode(op, ¤t, jsgraph());
757 }
758
EscapeAnalysis(JSGraph * jsgraph,Zone * zone)759 EscapeAnalysis::EscapeAnalysis(JSGraph* jsgraph, Zone* zone)
760 : EffectGraphReducer(
761 jsgraph->graph(),
762 [this](Node* node, Reduction* reduction) { Reduce(node, reduction); },
763 zone),
764 tracker_(new (zone) EscapeAnalysisTracker(jsgraph, this, zone)),
765 jsgraph_(jsgraph) {}
766
GetReplacementOf(Node * node)767 Node* EscapeAnalysisResult::GetReplacementOf(Node* node) {
768 Node* replacement = tracker_->GetReplacementOf(node);
769 // Replacements cannot have replacements. This is important to ensure
770 // re-visitation: If a replacement is replaced, then all nodes accessing
771 // the replacement have to be updated.
772 if (replacement) DCHECK_NULL(tracker_->GetReplacementOf(replacement));
773 return replacement;
774 }
775
GetVirtualObjectField(const VirtualObject * vobject,int field,Node * effect)776 Node* EscapeAnalysisResult::GetVirtualObjectField(const VirtualObject* vobject,
777 int field, Node* effect) {
778 return tracker_->variable_states_.Get(vobject->FieldAt(field).FromJust(),
779 effect);
780 }
781
GetVirtualObject(Node * node)782 const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
783 return tracker_->virtual_objects_.Get(node);
784 }
785
VirtualObject(VariableTracker * var_states,VirtualObject::Id id,int size)786 VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
787 int size)
788 : Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
789 DCHECK_EQ(0, size % kPointerSize);
790 TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
791 int num_fields = size / kPointerSize;
792 fields_.reserve(num_fields);
793 for (int i = 0; i < num_fields; ++i) {
794 fields_.push_back(var_states->NewVariable());
795 }
796 }
797
798 #undef TRACE
799
800 } // namespace compiler
801 } // namespace internal
802 } // namespace v8
803