1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/escape-analysis.h"
6
7 #include <limits>
8
9 #include "src/base/flags.h"
10 #include "src/bootstrapper.h"
11 #include "src/compilation-dependencies.h"
12 #include "src/compiler/common-operator.h"
13 #include "src/compiler/graph-reducer.h"
14 #include "src/compiler/js-operator.h"
15 #include "src/compiler/node.h"
16 #include "src/compiler/node-matchers.h"
17 #include "src/compiler/node-properties.h"
18 #include "src/compiler/operator-properties.h"
19 #include "src/compiler/simplified-operator.h"
20 #include "src/objects-inl.h"
21 #include "src/type-cache.h"
22
23 namespace v8 {
24 namespace internal {
25 namespace compiler {
26
27 typedef NodeId Alias;
28
29 #ifdef DEBUG
30 #define TRACE(...) \
31 do { \
32 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
33 } while (false)
34 #else
35 #define TRACE(...)
36 #endif
37
38 // EscapeStatusAnalysis determines for each allocation whether it escapes.
39 class EscapeStatusAnalysis : public ZoneObject {
40 public:
41 enum Status {
42 kUnknown = 0u,
43 kTracked = 1u << 0,
44 kEscaped = 1u << 1,
45 kOnStack = 1u << 2,
46 kVisited = 1u << 3,
47 // A node is dangling, if it is a load of some kind, and does not have
48 // an effect successor.
49 kDanglingComputed = 1u << 4,
50 kDangling = 1u << 5,
51 // A node is is an effect branch point, if it has more than 2 non-dangling
52 // effect successors.
53 kBranchPointComputed = 1u << 6,
54 kBranchPoint = 1u << 7,
55 kInQueue = 1u << 8
56 };
57 typedef base::Flags<Status, uint16_t> StatusFlags;
58
59 void RunStatusAnalysis();
60
61 bool IsVirtual(Node* node);
62 bool IsEscaped(Node* node);
63 bool IsAllocation(Node* node);
64
65 bool IsInQueue(NodeId id);
66 void SetInQueue(NodeId id, bool on_stack);
67
68 void DebugPrint();
69
70 EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
71 Zone* zone);
72 void EnqueueForStatusAnalysis(Node* node);
73 bool SetEscaped(Node* node);
74 bool IsEffectBranchPoint(Node* node);
75 bool IsDanglingEffectNode(Node* node);
76 void ResizeStatusVector();
77 size_t GetStatusVectorSize();
78 bool IsVirtual(NodeId id);
79
graph() const80 Graph* graph() const { return graph_; }
81 void AssignAliases();
GetAlias(NodeId id) const82 Alias GetAlias(NodeId id) const { return aliases_[id]; }
GetAliasMap() const83 const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
AliasCount() const84 Alias AliasCount() const { return next_free_alias_; }
85 static const Alias kNotReachable;
86 static const Alias kUntrackable;
87
88 bool IsNotReachable(Node* node);
89
90 private:
91 void Process(Node* node);
92 void ProcessAllocate(Node* node);
93 void ProcessFinishRegion(Node* node);
94 void ProcessStoreField(Node* node);
95 void ProcessStoreElement(Node* node);
CheckUsesForEscape(Node * node,bool phi_escaping=false)96 bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
97 return CheckUsesForEscape(node, node, phi_escaping);
98 }
99 bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
100 void RevisitUses(Node* node);
101 void RevisitInputs(Node* node);
102
NextAlias()103 Alias NextAlias() { return next_free_alias_++; }
104
105 bool HasEntry(Node* node);
106
107 bool IsAllocationPhi(Node* node);
108
109 ZoneVector<Node*> stack_;
110 EscapeAnalysis* object_analysis_;
111 Graph* const graph_;
112 ZoneVector<StatusFlags> status_;
113 Alias next_free_alias_;
114 ZoneVector<Node*> status_stack_;
115 ZoneVector<Alias> aliases_;
116
117 DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
118 };
119
120 DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
121
122 const Alias EscapeStatusAnalysis::kNotReachable =
123 std::numeric_limits<Alias>::max();
124 const Alias EscapeStatusAnalysis::kUntrackable =
125 std::numeric_limits<Alias>::max() - 1;
126
127 class VirtualObject : public ZoneObject {
128 public:
129 enum Status {
130 kInitial = 0,
131 kTracked = 1u << 0,
132 kInitialized = 1u << 1,
133 kCopyRequired = 1u << 2,
134 };
135 typedef base::Flags<Status, unsigned char> StatusFlags;
136
VirtualObject(NodeId id,VirtualState * owner,Zone * zone)137 VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
138 : id_(id),
139 status_(kInitial),
140 fields_(zone),
141 phi_(zone),
142 object_state_(nullptr),
143 owner_(owner) {}
144
VirtualObject(VirtualState * owner,const VirtualObject & other)145 VirtualObject(VirtualState* owner, const VirtualObject& other)
146 : id_(other.id_),
147 status_(other.status_ & ~kCopyRequired),
148 fields_(other.fields_),
149 phi_(other.phi_),
150 object_state_(other.object_state_),
151 owner_(owner) {}
152
VirtualObject(NodeId id,VirtualState * owner,Zone * zone,size_t field_number,bool initialized)153 VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
154 bool initialized)
155 : id_(id),
156 status_(kTracked | (initialized ? kInitialized : kInitial)),
157 fields_(zone),
158 phi_(zone),
159 object_state_(nullptr),
160 owner_(owner) {
161 fields_.resize(field_number);
162 phi_.resize(field_number, false);
163 }
164
GetField(size_t offset)165 Node* GetField(size_t offset) { return fields_[offset]; }
166
IsCreatedPhi(size_t offset)167 bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
168
SetField(size_t offset,Node * node,bool created_phi=false)169 void SetField(size_t offset, Node* node, bool created_phi = false) {
170 fields_[offset] = node;
171 phi_[offset] = created_phi;
172 }
IsTracked() const173 bool IsTracked() const { return status_ & kTracked; }
IsInitialized() const174 bool IsInitialized() const { return status_ & kInitialized; }
SetInitialized()175 bool SetInitialized() { return status_ |= kInitialized; }
owner() const176 VirtualState* owner() const { return owner_; }
177
fields_array()178 Node** fields_array() { return &fields_.front(); }
field_count()179 size_t field_count() { return fields_.size(); }
ResizeFields(size_t field_count)180 bool ResizeFields(size_t field_count) {
181 if (field_count > fields_.size()) {
182 fields_.resize(field_count);
183 phi_.resize(field_count);
184 return true;
185 }
186 return false;
187 }
ClearAllFields()188 void ClearAllFields() {
189 for (size_t i = 0; i < fields_.size(); ++i) {
190 fields_[i] = nullptr;
191 phi_[i] = false;
192 }
193 }
AllFieldsClear()194 bool AllFieldsClear() {
195 for (size_t i = 0; i < fields_.size(); ++i) {
196 if (fields_[i] != nullptr) {
197 return false;
198 }
199 }
200 return true;
201 }
202 bool UpdateFrom(const VirtualObject& other);
203 bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
204 CommonOperatorBuilder* common);
SetObjectState(Node * node)205 void SetObjectState(Node* node) { object_state_ = node; }
GetObjectState() const206 Node* GetObjectState() const { return object_state_; }
IsCopyRequired() const207 bool IsCopyRequired() const { return status_ & kCopyRequired; }
SetCopyRequired()208 void SetCopyRequired() { status_ |= kCopyRequired; }
NeedCopyForModification()209 bool NeedCopyForModification() {
210 if (!IsCopyRequired() || !IsInitialized()) {
211 return false;
212 }
213 return true;
214 }
215
id() const216 NodeId id() const { return id_; }
id(NodeId id)217 void id(NodeId id) { id_ = id; }
218
219 private:
220 bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
221 CommonOperatorBuilder* common);
222
223 NodeId id_;
224 StatusFlags status_;
225 ZoneVector<Node*> fields_;
226 ZoneVector<bool> phi_;
227 Node* object_state_;
228 VirtualState* owner_;
229
230 DISALLOW_COPY_AND_ASSIGN(VirtualObject);
231 };
232
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)233 DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)
234
235 bool VirtualObject::UpdateFrom(const VirtualObject& other) {
236 bool changed = status_ != other.status_;
237 status_ = other.status_;
238 phi_ = other.phi_;
239 if (fields_.size() != other.fields_.size()) {
240 fields_ = other.fields_;
241 return true;
242 }
243 for (size_t i = 0; i < fields_.size(); ++i) {
244 if (fields_[i] != other.fields_[i]) {
245 changed = true;
246 fields_[i] = other.fields_[i];
247 }
248 }
249 return changed;
250 }
251
252 class VirtualState : public ZoneObject {
253 public:
VirtualState(Node * owner,Zone * zone,size_t size)254 VirtualState(Node* owner, Zone* zone, size_t size)
255 : info_(size, nullptr, zone), owner_(owner) {}
256
VirtualState(Node * owner,const VirtualState & state)257 VirtualState(Node* owner, const VirtualState& state)
258 : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
259 owner_(owner) {
260 for (size_t i = 0; i < info_.size(); ++i) {
261 if (state.info_[i]) {
262 info_[i] = state.info_[i];
263 }
264 }
265 }
266
267 VirtualObject* VirtualObjectFromAlias(size_t alias);
268 void SetVirtualObject(Alias alias, VirtualObject* state);
269 bool UpdateFrom(VirtualState* state, Zone* zone);
270 bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
271 CommonOperatorBuilder* common, Node* at);
size() const272 size_t size() const { return info_.size(); }
owner() const273 Node* owner() const { return owner_; }
274 VirtualObject* Copy(VirtualObject* obj, Alias alias);
SetCopyRequired()275 void SetCopyRequired() {
276 for (VirtualObject* obj : info_) {
277 if (obj) obj->SetCopyRequired();
278 }
279 }
280
281 private:
282 ZoneVector<VirtualObject*> info_;
283 Node* owner_;
284
285 DISALLOW_COPY_AND_ASSIGN(VirtualState);
286 };
287
288 class MergeCache : public ZoneObject {
289 public:
MergeCache(Zone * zone)290 explicit MergeCache(Zone* zone)
291 : states_(zone), objects_(zone), fields_(zone) {
292 states_.reserve(5);
293 objects_.reserve(5);
294 fields_.reserve(5);
295 }
states()296 ZoneVector<VirtualState*>& states() { return states_; }
objects()297 ZoneVector<VirtualObject*>& objects() { return objects_; }
fields()298 ZoneVector<Node*>& fields() { return fields_; }
Clear()299 void Clear() {
300 states_.clear();
301 objects_.clear();
302 fields_.clear();
303 }
304 size_t LoadVirtualObjectsFromStatesFor(Alias alias);
305 void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
306 const ZoneVector<Alias>& aliases);
307 Node* GetFields(size_t pos);
308
309 private:
310 ZoneVector<VirtualState*> states_;
311 ZoneVector<VirtualObject*> objects_;
312 ZoneVector<Node*> fields_;
313
314 DISALLOW_COPY_AND_ASSIGN(MergeCache);
315 };
316
LoadVirtualObjectsFromStatesFor(Alias alias)317 size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
318 objects_.clear();
319 DCHECK_GT(states_.size(), 0u);
320 size_t min = std::numeric_limits<size_t>::max();
321 for (VirtualState* state : states_) {
322 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
323 objects_.push_back(obj);
324 min = std::min(obj->field_count(), min);
325 }
326 }
327 return min;
328 }
329
LoadVirtualObjectsForFieldsFrom(VirtualState * state,const ZoneVector<Alias> & aliases)330 void MergeCache::LoadVirtualObjectsForFieldsFrom(
331 VirtualState* state, const ZoneVector<Alias>& aliases) {
332 objects_.clear();
333 size_t max_alias = state->size();
334 for (Node* field : fields_) {
335 Alias alias = aliases[field->id()];
336 if (alias >= max_alias) continue;
337 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
338 objects_.push_back(obj);
339 }
340 }
341 }
342
GetFields(size_t pos)343 Node* MergeCache::GetFields(size_t pos) {
344 fields_.clear();
345 Node* rep = pos >= objects_.front()->field_count()
346 ? nullptr
347 : objects_.front()->GetField(pos);
348 for (VirtualObject* obj : objects_) {
349 if (pos >= obj->field_count()) continue;
350 Node* field = obj->GetField(pos);
351 if (field) {
352 fields_.push_back(field);
353 }
354 if (field != rep) {
355 rep = nullptr;
356 }
357 }
358 return rep;
359 }
360
Copy(VirtualObject * obj,Alias alias)361 VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
362 if (obj->owner() == this) return obj;
363 VirtualObject* new_obj =
364 new (info_.get_allocator().zone()) VirtualObject(this, *obj);
365 TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
366 static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
367 static_cast<void*>(new_obj));
368 info_[alias] = new_obj;
369 return new_obj;
370 }
371
VirtualObjectFromAlias(size_t alias)372 VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
373 return info_[alias];
374 }
375
SetVirtualObject(Alias alias,VirtualObject * obj)376 void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
377 info_[alias] = obj;
378 }
379
UpdateFrom(VirtualState * from,Zone * zone)380 bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
381 if (from == this) return false;
382 bool changed = false;
383 for (Alias alias = 0; alias < size(); ++alias) {
384 VirtualObject* ls = VirtualObjectFromAlias(alias);
385 VirtualObject* rs = from->VirtualObjectFromAlias(alias);
386
387 if (ls == rs || rs == nullptr) continue;
388
389 if (ls == nullptr) {
390 ls = new (zone) VirtualObject(this, *rs);
391 SetVirtualObject(alias, ls);
392 changed = true;
393 continue;
394 }
395
396 TRACE(" Updating fields of @%d\n", alias);
397
398 changed = ls->UpdateFrom(*rs) || changed;
399 }
400 return false;
401 }
402
403 namespace {
404
IsEquivalentPhi(Node * node1,Node * node2)405 bool IsEquivalentPhi(Node* node1, Node* node2) {
406 if (node1 == node2) return true;
407 if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
408 node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
409 return false;
410 }
411 for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
412 Node* input1 = NodeProperties::GetValueInput(node1, i);
413 Node* input2 = NodeProperties::GetValueInput(node2, i);
414 if (!IsEquivalentPhi(input1, input2)) {
415 return false;
416 }
417 }
418 return true;
419 }
420
IsEquivalentPhi(Node * phi,ZoneVector<Node * > & inputs)421 bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
422 if (phi->opcode() != IrOpcode::kPhi) return false;
423 if (phi->op()->ValueInputCount() != inputs.size()) {
424 return false;
425 }
426 for (size_t i = 0; i < inputs.size(); ++i) {
427 Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
428 if (!IsEquivalentPhi(input, inputs[i])) {
429 return false;
430 }
431 }
432 return true;
433 }
434
435 } // namespace
436
MergeFields(size_t i,Node * at,MergeCache * cache,Graph * graph,CommonOperatorBuilder * common)437 bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
438 Graph* graph, CommonOperatorBuilder* common) {
439 bool changed = false;
440 int value_input_count = static_cast<int>(cache->fields().size());
441 Node* rep = GetField(i);
442 if (!rep || !IsCreatedPhi(i)) {
443 Node* control = NodeProperties::GetControlInput(at);
444 cache->fields().push_back(control);
445 Node* phi = graph->NewNode(
446 common->Phi(MachineRepresentation::kTagged, value_input_count),
447 value_input_count + 1, &cache->fields().front());
448 SetField(i, phi, true);
449 #ifdef DEBUG
450 if (FLAG_trace_turbo_escape) {
451 PrintF(" Creating Phi #%d as merge of", phi->id());
452 for (int i = 0; i < value_input_count; i++) {
453 PrintF(" #%d (%s)", cache->fields()[i]->id(),
454 cache->fields()[i]->op()->mnemonic());
455 }
456 PrintF("\n");
457 }
458 #endif
459 changed = true;
460 } else {
461 DCHECK(rep->opcode() == IrOpcode::kPhi);
462 for (int n = 0; n < value_input_count; ++n) {
463 Node* old = NodeProperties::GetValueInput(rep, n);
464 if (old != cache->fields()[n]) {
465 changed = true;
466 NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
467 }
468 }
469 }
470 return changed;
471 }
472
MergeFrom(MergeCache * cache,Node * at,Graph * graph,CommonOperatorBuilder * common)473 bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
474 CommonOperatorBuilder* common) {
475 DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
476 at->opcode() == IrOpcode::kPhi);
477 bool changed = false;
478 for (size_t i = 0; i < field_count(); ++i) {
479 if (Node* field = cache->GetFields(i)) {
480 changed = changed || GetField(i) != field;
481 SetField(i, field);
482 TRACE(" Field %zu agree on rep #%d\n", i, field->id());
483 } else {
484 int arity = at->opcode() == IrOpcode::kEffectPhi
485 ? at->op()->EffectInputCount()
486 : at->op()->ValueInputCount();
487 if (cache->fields().size() == arity) {
488 changed = MergeFields(i, at, cache, graph, common) || changed;
489 } else {
490 if (GetField(i) != nullptr) {
491 TRACE(" Field %zu cleared\n", i);
492 changed = true;
493 }
494 SetField(i, nullptr);
495 }
496 }
497 }
498 return changed;
499 }
500
MergeFrom(MergeCache * cache,Zone * zone,Graph * graph,CommonOperatorBuilder * common,Node * at)501 bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
502 CommonOperatorBuilder* common, Node* at) {
503 DCHECK_GT(cache->states().size(), 0u);
504 bool changed = false;
505 for (Alias alias = 0; alias < size(); ++alias) {
506 cache->objects().clear();
507 VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
508 bool copy_merge_object = false;
509 size_t fields = std::numeric_limits<size_t>::max();
510 for (VirtualState* state : cache->states()) {
511 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
512 cache->objects().push_back(obj);
513 if (mergeObject == obj) {
514 copy_merge_object = true;
515 }
516 fields = std::min(obj->field_count(), fields);
517 }
518 }
519 if (cache->objects().size() == cache->states().size()) {
520 if (!mergeObject) {
521 VirtualObject* obj = new (zone)
522 VirtualObject(cache->objects().front()->id(), this, zone, fields,
523 cache->objects().front()->IsInitialized());
524 SetVirtualObject(alias, obj);
525 mergeObject = obj;
526 changed = true;
527 } else if (copy_merge_object) {
528 VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
529 SetVirtualObject(alias, obj);
530 mergeObject = obj;
531 changed = true;
532 } else {
533 changed = mergeObject->ResizeFields(fields) || changed;
534 }
535 #ifdef DEBUG
536 if (FLAG_trace_turbo_escape) {
537 PrintF(" Alias @%d, merging into %p virtual objects", alias,
538 static_cast<void*>(mergeObject));
539 for (size_t i = 0; i < cache->objects().size(); i++) {
540 PrintF(" %p", static_cast<void*>(cache->objects()[i]));
541 }
542 PrintF("\n");
543 }
544 #endif // DEBUG
545 changed = mergeObject->MergeFrom(cache, at, graph, common) || changed;
546 } else {
547 if (mergeObject) {
548 TRACE(" Alias %d, virtual object removed\n", alias);
549 changed = true;
550 }
551 SetVirtualObject(alias, nullptr);
552 }
553 }
554 return changed;
555 }
556
EscapeStatusAnalysis(EscapeAnalysis * object_analysis,Graph * graph,Zone * zone)557 EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
558 Graph* graph, Zone* zone)
559 : stack_(zone),
560 object_analysis_(object_analysis),
561 graph_(graph),
562 status_(zone),
563 next_free_alias_(0),
564 status_stack_(zone),
565 aliases_(zone) {}
566
HasEntry(Node * node)567 bool EscapeStatusAnalysis::HasEntry(Node* node) {
568 return status_[node->id()] & (kTracked | kEscaped);
569 }
570
IsVirtual(Node * node)571 bool EscapeStatusAnalysis::IsVirtual(Node* node) {
572 return IsVirtual(node->id());
573 }
574
IsVirtual(NodeId id)575 bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
576 return (status_[id] & kTracked) && !(status_[id] & kEscaped);
577 }
578
IsEscaped(Node * node)579 bool EscapeStatusAnalysis::IsEscaped(Node* node) {
580 return status_[node->id()] & kEscaped;
581 }
582
IsAllocation(Node * node)583 bool EscapeStatusAnalysis::IsAllocation(Node* node) {
584 return node->opcode() == IrOpcode::kAllocate ||
585 node->opcode() == IrOpcode::kFinishRegion;
586 }
587
SetEscaped(Node * node)588 bool EscapeStatusAnalysis::SetEscaped(Node* node) {
589 bool changed = !(status_[node->id()] & kEscaped);
590 status_[node->id()] |= kEscaped | kTracked;
591 return changed;
592 }
593
IsInQueue(NodeId id)594 bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
595 return status_[id] & kInQueue;
596 }
597
SetInQueue(NodeId id,bool on_stack)598 void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
599 if (on_stack) {
600 status_[id] |= kInQueue;
601 } else {
602 status_[id] &= ~kInQueue;
603 }
604 }
605
ResizeStatusVector()606 void EscapeStatusAnalysis::ResizeStatusVector() {
607 if (status_.size() <= graph()->NodeCount()) {
608 status_.resize(graph()->NodeCount() * 1.1, kUnknown);
609 }
610 }
611
GetStatusVectorSize()612 size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
613
RunStatusAnalysis()614 void EscapeStatusAnalysis::RunStatusAnalysis() {
615 ResizeStatusVector();
616 while (!status_stack_.empty()) {
617 Node* node = status_stack_.back();
618 status_stack_.pop_back();
619 status_[node->id()] &= ~kOnStack;
620 Process(node);
621 status_[node->id()] |= kVisited;
622 }
623 }
624
EnqueueForStatusAnalysis(Node * node)625 void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
626 DCHECK_NOT_NULL(node);
627 if (!(status_[node->id()] & kOnStack)) {
628 status_stack_.push_back(node);
629 status_[node->id()] |= kOnStack;
630 }
631 }
632
RevisitInputs(Node * node)633 void EscapeStatusAnalysis::RevisitInputs(Node* node) {
634 for (Edge edge : node->input_edges()) {
635 Node* input = edge.to();
636 if (!(status_[input->id()] & kOnStack)) {
637 status_stack_.push_back(input);
638 status_[input->id()] |= kOnStack;
639 }
640 }
641 }
642
RevisitUses(Node * node)643 void EscapeStatusAnalysis::RevisitUses(Node* node) {
644 for (Edge edge : node->use_edges()) {
645 Node* use = edge.from();
646 if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
647 status_stack_.push_back(use);
648 status_[use->id()] |= kOnStack;
649 }
650 }
651 }
652
Process(Node * node)653 void EscapeStatusAnalysis::Process(Node* node) {
654 switch (node->opcode()) {
655 case IrOpcode::kAllocate:
656 ProcessAllocate(node);
657 break;
658 case IrOpcode::kFinishRegion:
659 ProcessFinishRegion(node);
660 break;
661 case IrOpcode::kStoreField:
662 ProcessStoreField(node);
663 break;
664 case IrOpcode::kStoreElement:
665 ProcessStoreElement(node);
666 break;
667 case IrOpcode::kLoadField:
668 case IrOpcode::kLoadElement: {
669 if (Node* rep = object_analysis_->GetReplacement(node)) {
670 if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
671 RevisitInputs(rep);
672 RevisitUses(rep);
673 }
674 }
675 RevisitUses(node);
676 break;
677 }
678 case IrOpcode::kPhi:
679 if (!HasEntry(node)) {
680 status_[node->id()] |= kTracked;
681 RevisitUses(node);
682 }
683 if (!IsAllocationPhi(node) && SetEscaped(node)) {
684 RevisitInputs(node);
685 RevisitUses(node);
686 }
687 CheckUsesForEscape(node);
688 default:
689 break;
690 }
691 }
692
IsAllocationPhi(Node * node)693 bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
694 for (Edge edge : node->input_edges()) {
695 Node* input = edge.to();
696 if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
697 if (IsAllocation(input)) continue;
698 return false;
699 }
700 return true;
701 }
702
ProcessStoreField(Node * node)703 void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
704 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
705 Node* to = NodeProperties::GetValueInput(node, 0);
706 Node* val = NodeProperties::GetValueInput(node, 1);
707 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
708 RevisitUses(val);
709 RevisitInputs(val);
710 TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
711 val->id(), val->op()->mnemonic(), to->id());
712 }
713 }
714
ProcessStoreElement(Node * node)715 void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
716 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
717 Node* to = NodeProperties::GetValueInput(node, 0);
718 Node* val = NodeProperties::GetValueInput(node, 2);
719 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
720 RevisitUses(val);
721 RevisitInputs(val);
722 TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
723 val->id(), val->op()->mnemonic(), to->id());
724 }
725 }
726
ProcessAllocate(Node * node)727 void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
728 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
729 if (!HasEntry(node)) {
730 status_[node->id()] |= kTracked;
731 TRACE("Created status entry for node #%d (%s)\n", node->id(),
732 node->op()->mnemonic());
733 NumberMatcher size(node->InputAt(0));
734 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
735 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
736 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
737 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
738 RevisitUses(node);
739 if (!size.HasValue() && SetEscaped(node)) {
740 TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
741 // This node is already known to escape, uses do not have to be checked
742 // for escape.
743 return;
744 }
745 }
746 if (CheckUsesForEscape(node, true)) {
747 RevisitUses(node);
748 }
749 }
750
CheckUsesForEscape(Node * uses,Node * rep,bool phi_escaping)751 bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
752 bool phi_escaping) {
753 for (Edge edge : uses->use_edges()) {
754 Node* use = edge.from();
755 if (IsNotReachable(use)) continue;
756 if (edge.index() >= use->op()->ValueInputCount() +
757 OperatorProperties::GetContextInputCount(use->op()))
758 continue;
759 switch (use->opcode()) {
760 case IrOpcode::kPhi:
761 if (phi_escaping && SetEscaped(rep)) {
762 TRACE(
763 "Setting #%d (%s) to escaped because of use by phi node "
764 "#%d (%s)\n",
765 rep->id(), rep->op()->mnemonic(), use->id(),
766 use->op()->mnemonic());
767 return true;
768 }
769 // Fallthrough.
770 case IrOpcode::kStoreField:
771 case IrOpcode::kLoadField:
772 case IrOpcode::kStoreElement:
773 case IrOpcode::kLoadElement:
774 case IrOpcode::kFrameState:
775 case IrOpcode::kStateValues:
776 case IrOpcode::kReferenceEqual:
777 case IrOpcode::kFinishRegion:
778 if (IsEscaped(use) && SetEscaped(rep)) {
779 TRACE(
780 "Setting #%d (%s) to escaped because of use by escaping node "
781 "#%d (%s)\n",
782 rep->id(), rep->op()->mnemonic(), use->id(),
783 use->op()->mnemonic());
784 return true;
785 }
786 break;
787 case IrOpcode::kObjectIsSmi:
788 if (!IsAllocation(rep) && SetEscaped(rep)) {
789 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
790 rep->id(), rep->op()->mnemonic(), use->id(),
791 use->op()->mnemonic());
792 return true;
793 }
794 break;
795 case IrOpcode::kSelect:
796 case IrOpcode::kTypeGuard:
797 // TODO(mstarzinger): The following list of operators will eventually be
798 // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
799 case IrOpcode::kObjectIsCallable:
800 case IrOpcode::kObjectIsNumber:
801 case IrOpcode::kObjectIsString:
802 case IrOpcode::kObjectIsUndetectable:
803 if (SetEscaped(rep)) {
804 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
805 rep->id(), rep->op()->mnemonic(), use->id(),
806 use->op()->mnemonic());
807 return true;
808 }
809 break;
810 default:
811 if (use->op()->EffectInputCount() == 0 &&
812 uses->op()->EffectInputCount() > 0 &&
813 !IrOpcode::IsJsOpcode(use->opcode())) {
814 TRACE("Encountered unaccounted use by #%d (%s)\n", use->id(),
815 use->op()->mnemonic());
816 UNREACHABLE();
817 }
818 if (SetEscaped(rep)) {
819 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
820 rep->id(), rep->op()->mnemonic(), use->id(),
821 use->op()->mnemonic());
822 return true;
823 }
824 }
825 }
826 return false;
827 }
828
ProcessFinishRegion(Node * node)829 void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
830 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
831 if (!HasEntry(node)) {
832 status_[node->id()] |= kTracked;
833 RevisitUses(node);
834 }
835 if (CheckUsesForEscape(node, true)) {
836 RevisitInputs(node);
837 }
838 }
839
DebugPrint()840 void EscapeStatusAnalysis::DebugPrint() {
841 for (NodeId id = 0; id < status_.size(); id++) {
842 if (status_[id] & kTracked) {
843 PrintF("Node #%d is %s\n", id,
844 (status_[id] & kEscaped) ? "escaping" : "virtual");
845 }
846 }
847 }
848
EscapeAnalysis(Graph * graph,CommonOperatorBuilder * common,Zone * zone)849 EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
850 Zone* zone)
851 : zone_(zone),
852 slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
853 common_(common),
854 status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
855 virtual_states_(zone),
856 replacements_(zone),
857 cache_(nullptr) {}
858
~EscapeAnalysis()859 EscapeAnalysis::~EscapeAnalysis() {}
860
Run()861 void EscapeAnalysis::Run() {
862 replacements_.resize(graph()->NodeCount());
863 status_analysis_->AssignAliases();
864 if (status_analysis_->AliasCount() > 0) {
865 cache_ = new (zone()) MergeCache(zone());
866 replacements_.resize(graph()->NodeCount());
867 status_analysis_->ResizeStatusVector();
868 RunObjectAnalysis();
869 status_analysis_->RunStatusAnalysis();
870 }
871 }
872
AssignAliases()873 void EscapeStatusAnalysis::AssignAliases() {
874 size_t max_size = 1024;
875 size_t min_size = 32;
876 size_t stack_size =
877 std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
878 stack_.reserve(stack_size);
879 ResizeStatusVector();
880 stack_.push_back(graph()->end());
881 CHECK_LT(graph()->NodeCount(), kUntrackable);
882 aliases_.resize(graph()->NodeCount(), kNotReachable);
883 aliases_[graph()->end()->id()] = kUntrackable;
884 status_stack_.reserve(8);
885 TRACE("Discovering trackable nodes");
886 while (!stack_.empty()) {
887 Node* node = stack_.back();
888 stack_.pop_back();
889 switch (node->opcode()) {
890 case IrOpcode::kAllocate:
891 if (aliases_[node->id()] >= kUntrackable) {
892 aliases_[node->id()] = NextAlias();
893 TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
894 node->id());
895 EnqueueForStatusAnalysis(node);
896 }
897 break;
898 case IrOpcode::kFinishRegion: {
899 Node* allocate = NodeProperties::GetValueInput(node, 0);
900 DCHECK_NOT_NULL(allocate);
901 if (allocate->opcode() == IrOpcode::kAllocate) {
902 if (aliases_[allocate->id()] >= kUntrackable) {
903 if (aliases_[allocate->id()] == kNotReachable) {
904 stack_.push_back(allocate);
905 }
906 aliases_[allocate->id()] = NextAlias();
907 TRACE(" @%d:%s#%u", aliases_[allocate->id()],
908 allocate->op()->mnemonic(), allocate->id());
909 EnqueueForStatusAnalysis(allocate);
910 }
911 aliases_[node->id()] = aliases_[allocate->id()];
912 TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
913 node->id());
914 }
915 break;
916 }
917 default:
918 DCHECK_EQ(aliases_[node->id()], kUntrackable);
919 break;
920 }
921 for (Edge edge : node->input_edges()) {
922 Node* input = edge.to();
923 if (aliases_[input->id()] == kNotReachable) {
924 stack_.push_back(input);
925 aliases_[input->id()] = kUntrackable;
926 }
927 }
928 }
929 TRACE("\n");
930 }
931
IsNotReachable(Node * node)932 bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
933 if (node->id() >= aliases_.size()) {
934 return false;
935 }
936 return aliases_[node->id()] == kNotReachable;
937 }
938
RunObjectAnalysis()939 void EscapeAnalysis::RunObjectAnalysis() {
940 virtual_states_.resize(graph()->NodeCount());
941 ZoneDeque<Node*> queue(zone());
942 queue.push_back(graph()->start());
943 ZoneVector<Node*> danglers(zone());
944 while (!queue.empty()) {
945 Node* node = queue.back();
946 queue.pop_back();
947 status_analysis_->SetInQueue(node->id(), false);
948 if (Process(node)) {
949 for (Edge edge : node->use_edges()) {
950 Node* use = edge.from();
951 if (status_analysis_->IsNotReachable(use)) {
952 continue;
953 }
954 if (NodeProperties::IsEffectEdge(edge)) {
955 // Iteration order: depth first, but delay phis.
956 // We need DFS do avoid some duplication of VirtualStates and
957 // VirtualObjects, and we want to delay phis to improve performance.
958 if (use->opcode() == IrOpcode::kEffectPhi) {
959 if (!status_analysis_->IsInQueue(use->id())) {
960 queue.push_front(use);
961 }
962 } else if ((use->opcode() != IrOpcode::kLoadField &&
963 use->opcode() != IrOpcode::kLoadElement) ||
964 !status_analysis_->IsDanglingEffectNode(use)) {
965 if (!status_analysis_->IsInQueue(use->id())) {
966 status_analysis_->SetInQueue(use->id(), true);
967 queue.push_back(use);
968 }
969 } else {
970 danglers.push_back(use);
971 }
972 }
973 }
974 // Danglers need to be processed immediately, even if they are
975 // on the stack. Since they do not have effect outputs,
976 // we don't have to track whether they are on the stack.
977 queue.insert(queue.end(), danglers.begin(), danglers.end());
978 danglers.clear();
979 }
980 }
981 #ifdef DEBUG
982 if (FLAG_trace_turbo_escape) {
983 DebugPrint();
984 }
985 #endif
986 }
987
IsDanglingEffectNode(Node * node)988 bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
989 if (status_[node->id()] & kDanglingComputed) {
990 return status_[node->id()] & kDangling;
991 }
992 if (node->op()->EffectInputCount() == 0 ||
993 node->op()->EffectOutputCount() == 0 ||
994 (node->op()->EffectInputCount() == 1 &&
995 NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
996 // The start node is used as sentinel for nodes that are in general
997 // effectful, but of which an analysis has determined that they do not
998 // produce effects in this instance. We don't consider these nodes dangling.
999 status_[node->id()] |= kDanglingComputed;
1000 return false;
1001 }
1002 for (Edge edge : node->use_edges()) {
1003 Node* use = edge.from();
1004 if (aliases_[use->id()] == kNotReachable) continue;
1005 if (NodeProperties::IsEffectEdge(edge)) {
1006 status_[node->id()] |= kDanglingComputed;
1007 return false;
1008 }
1009 }
1010 status_[node->id()] |= kDanglingComputed | kDangling;
1011 return true;
1012 }
1013
IsEffectBranchPoint(Node * node)1014 bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
1015 if (status_[node->id()] & kBranchPointComputed) {
1016 return status_[node->id()] & kBranchPoint;
1017 }
1018 int count = 0;
1019 for (Edge edge : node->use_edges()) {
1020 Node* use = edge.from();
1021 if (aliases_[use->id()] == kNotReachable) continue;
1022 if (NodeProperties::IsEffectEdge(edge)) {
1023 if ((use->opcode() == IrOpcode::kLoadField ||
1024 use->opcode() == IrOpcode::kLoadElement ||
1025 use->opcode() == IrOpcode::kLoad) &&
1026 IsDanglingEffectNode(use))
1027 continue;
1028 if (++count > 1) {
1029 status_[node->id()] |= kBranchPointComputed | kBranchPoint;
1030 return true;
1031 }
1032 }
1033 }
1034 status_[node->id()] |= kBranchPointComputed;
1035 return false;
1036 }
1037
Process(Node * node)1038 bool EscapeAnalysis::Process(Node* node) {
1039 switch (node->opcode()) {
1040 case IrOpcode::kAllocate:
1041 ProcessAllocation(node);
1042 break;
1043 case IrOpcode::kBeginRegion:
1044 ForwardVirtualState(node);
1045 break;
1046 case IrOpcode::kFinishRegion:
1047 ProcessFinishRegion(node);
1048 break;
1049 case IrOpcode::kStoreField:
1050 ProcessStoreField(node);
1051 break;
1052 case IrOpcode::kLoadField:
1053 ProcessLoadField(node);
1054 break;
1055 case IrOpcode::kStoreElement:
1056 ProcessStoreElement(node);
1057 break;
1058 case IrOpcode::kLoadElement:
1059 ProcessLoadElement(node);
1060 break;
1061 case IrOpcode::kStart:
1062 ProcessStart(node);
1063 break;
1064 case IrOpcode::kEffectPhi:
1065 return ProcessEffectPhi(node);
1066 break;
1067 default:
1068 if (node->op()->EffectInputCount() > 0) {
1069 ForwardVirtualState(node);
1070 }
1071 ProcessAllocationUsers(node);
1072 break;
1073 }
1074 return true;
1075 }
1076
ProcessAllocationUsers(Node * node)1077 void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
1078 for (Edge edge : node->input_edges()) {
1079 Node* input = edge.to();
1080 Node* use = edge.from();
1081 if (edge.index() >= use->op()->ValueInputCount() +
1082 OperatorProperties::GetContextInputCount(use->op()))
1083 continue;
1084 switch (node->opcode()) {
1085 case IrOpcode::kStoreField:
1086 case IrOpcode::kLoadField:
1087 case IrOpcode::kStoreElement:
1088 case IrOpcode::kLoadElement:
1089 case IrOpcode::kFrameState:
1090 case IrOpcode::kStateValues:
1091 case IrOpcode::kReferenceEqual:
1092 case IrOpcode::kFinishRegion:
1093 case IrOpcode::kObjectIsSmi:
1094 break;
1095 default:
1096 VirtualState* state = virtual_states_[node->id()];
1097 if (VirtualObject* obj =
1098 GetVirtualObject(state, ResolveReplacement(input))) {
1099 if (!obj->AllFieldsClear()) {
1100 obj = CopyForModificationAt(obj, state, node);
1101 obj->ClearAllFields();
1102 TRACE("Cleared all fields of @%d:#%d\n",
1103 status_analysis_->GetAlias(obj->id()), obj->id());
1104 }
1105 }
1106 break;
1107 }
1108 }
1109 }
1110
CopyForModificationAt(VirtualState * state,Node * node)1111 VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
1112 Node* node) {
1113 if (state->owner() != node) {
1114 VirtualState* new_state = new (zone()) VirtualState(node, *state);
1115 virtual_states_[node->id()] = new_state;
1116 TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
1117 static_cast<void*>(state), static_cast<void*>(new_state),
1118 node->op()->mnemonic(), node->id());
1119 return new_state;
1120 }
1121 return state;
1122 }
1123
CopyForModificationAt(VirtualObject * obj,VirtualState * state,Node * node)1124 VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
1125 VirtualState* state,
1126 Node* node) {
1127 if (obj->NeedCopyForModification()) {
1128 state = CopyForModificationAt(state, node);
1129 return state->Copy(obj, status_analysis_->GetAlias(obj->id()));
1130 }
1131 return obj;
1132 }
1133
ForwardVirtualState(Node * node)1134 void EscapeAnalysis::ForwardVirtualState(Node* node) {
1135 DCHECK_EQ(node->op()->EffectInputCount(), 1);
1136 #ifdef DEBUG
1137 if (node->opcode() != IrOpcode::kLoadField &&
1138 node->opcode() != IrOpcode::kLoadElement &&
1139 node->opcode() != IrOpcode::kLoad &&
1140 status_analysis_->IsDanglingEffectNode(node)) {
1141 PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
1142 node->op()->mnemonic());
1143 UNREACHABLE();
1144 }
1145 #endif // DEBUG
1146 Node* effect = NodeProperties::GetEffectInput(node);
1147 DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1148 if (virtual_states_[node->id()]) {
1149 virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
1150 zone());
1151 } else {
1152 virtual_states_[node->id()] = virtual_states_[effect->id()];
1153 TRACE("Forwarding object state %p from %s#%d to %s#%d",
1154 static_cast<void*>(virtual_states_[effect->id()]),
1155 effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
1156 node->id());
1157 if (status_analysis_->IsEffectBranchPoint(effect) ||
1158 OperatorProperties::GetFrameStateInputCount(node->op()) > 0) {
1159 virtual_states_[node->id()]->SetCopyRequired();
1160 TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
1161 effect->id());
1162 }
1163 TRACE("\n");
1164 }
1165 }
1166
ProcessStart(Node * node)1167 void EscapeAnalysis::ProcessStart(Node* node) {
1168 DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1169 virtual_states_[node->id()] =
1170 new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1171 }
1172
ProcessEffectPhi(Node * node)1173 bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1174 DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
1175 bool changed = false;
1176
1177 VirtualState* mergeState = virtual_states_[node->id()];
1178 if (!mergeState) {
1179 mergeState =
1180 new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1181 virtual_states_[node->id()] = mergeState;
1182 changed = true;
1183 TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
1184 static_cast<void*>(mergeState));
1185 }
1186
1187 cache_->Clear();
1188
1189 TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
1190 static_cast<void*>(mergeState));
1191
1192 for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
1193 Node* input = NodeProperties::GetEffectInput(node, i);
1194 VirtualState* state = virtual_states_[input->id()];
1195 if (state) {
1196 cache_->states().push_back(state);
1197 if (state == mergeState) {
1198 mergeState = new (zone())
1199 VirtualState(node, zone(), status_analysis_->AliasCount());
1200 virtual_states_[node->id()] = mergeState;
1201 changed = true;
1202 }
1203 }
1204 TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
1205 input->op()->mnemonic());
1206 }
1207 TRACE("\n");
1208
1209 if (cache_->states().size() == 0) {
1210 return changed;
1211 }
1212
1213 changed =
1214 mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1215
1216 TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1217
1218 if (changed) {
1219 status_analysis_->ResizeStatusVector();
1220 }
1221 return changed;
1222 }
1223
ProcessAllocation(Node * node)1224 void EscapeAnalysis::ProcessAllocation(Node* node) {
1225 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
1226 ForwardVirtualState(node);
1227 VirtualState* state = virtual_states_[node->id()];
1228 Alias alias = status_analysis_->GetAlias(node->id());
1229
1230 // Check if we have already processed this node.
1231 if (state->VirtualObjectFromAlias(alias)) {
1232 return;
1233 }
1234
1235 if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
1236 state = CopyForModificationAt(state, node);
1237 }
1238
1239 NumberMatcher size(node->InputAt(0));
1240 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
1241 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
1242 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
1243 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1244 if (size.HasValue()) {
1245 VirtualObject* obj = new (zone()) VirtualObject(
1246 node->id(), state, zone(), size.Value() / kPointerSize, false);
1247 state->SetVirtualObject(alias, obj);
1248 } else {
1249 state->SetVirtualObject(
1250 alias, new (zone()) VirtualObject(node->id(), state, zone()));
1251 }
1252 }
1253
ProcessFinishRegion(Node * node)1254 void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1255 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
1256 ForwardVirtualState(node);
1257 Node* allocation = NodeProperties::GetValueInput(node, 0);
1258 if (allocation->opcode() == IrOpcode::kAllocate) {
1259 VirtualState* state = virtual_states_[node->id()];
1260 VirtualObject* obj =
1261 state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1262 DCHECK_NOT_NULL(obj);
1263 obj->SetInitialized();
1264 }
1265 }
1266
replacement(Node * node)1267 Node* EscapeAnalysis::replacement(Node* node) {
1268 if (node->id() >= replacements_.size()) return nullptr;
1269 return replacements_[node->id()];
1270 }
1271
SetReplacement(Node * node,Node * rep)1272 bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
1273 bool changed = replacements_[node->id()] != rep;
1274 replacements_[node->id()] = rep;
1275 return changed;
1276 }
1277
UpdateReplacement(VirtualState * state,Node * node,Node * rep)1278 bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
1279 Node* rep) {
1280 if (SetReplacement(node, rep)) {
1281 if (rep) {
1282 TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
1283 rep->op()->mnemonic());
1284 } else {
1285 TRACE("Replacement of #%d cleared\n", node->id());
1286 }
1287 return true;
1288 }
1289 return false;
1290 }
1291
ResolveReplacement(Node * node)1292 Node* EscapeAnalysis::ResolveReplacement(Node* node) {
1293 while (replacement(node)) {
1294 node = replacement(node);
1295 }
1296 return node;
1297 }
1298
GetReplacement(Node * node)1299 Node* EscapeAnalysis::GetReplacement(Node* node) {
1300 Node* result = nullptr;
1301 while (replacement(node)) {
1302 node = result = replacement(node);
1303 }
1304 return result;
1305 }
1306
IsVirtual(Node * node)1307 bool EscapeAnalysis::IsVirtual(Node* node) {
1308 if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1309 return false;
1310 }
1311 return status_analysis_->IsVirtual(node);
1312 }
1313
IsEscaped(Node * node)1314 bool EscapeAnalysis::IsEscaped(Node* node) {
1315 if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1316 return false;
1317 }
1318 return status_analysis_->IsEscaped(node);
1319 }
1320
CompareVirtualObjects(Node * left,Node * right)1321 bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
1322 DCHECK(IsVirtual(left) && IsVirtual(right));
1323 left = ResolveReplacement(left);
1324 right = ResolveReplacement(right);
1325 if (IsEquivalentPhi(left, right)) {
1326 return true;
1327 }
1328 return false;
1329 }
1330
1331 namespace {
1332
OffsetForFieldAccess(Node * node)1333 int OffsetForFieldAccess(Node* node) {
1334 FieldAccess access = FieldAccessOf(node->op());
1335 DCHECK_EQ(access.offset % kPointerSize, 0);
1336 return access.offset / kPointerSize;
1337 }
1338
OffsetForElementAccess(Node * node,int index)1339 int OffsetForElementAccess(Node* node, int index) {
1340 ElementAccess access = ElementAccessOf(node->op());
1341 DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1342 kPointerSizeLog2);
1343 DCHECK_EQ(access.header_size % kPointerSize, 0);
1344 return access.header_size / kPointerSize + index;
1345 }
1346
1347 } // namespace
1348
ProcessLoadFromPhi(int offset,Node * from,Node * load,VirtualState * state)1349 void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1350 VirtualState* state) {
1351 TRACE("Load #%d from phi #%d", load->id(), from->id());
1352
1353 cache_->fields().clear();
1354 for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
1355 Node* input = NodeProperties::GetValueInput(load, i);
1356 cache_->fields().push_back(input);
1357 }
1358
1359 cache_->LoadVirtualObjectsForFieldsFrom(state,
1360 status_analysis_->GetAliasMap());
1361 if (cache_->objects().size() == cache_->fields().size()) {
1362 cache_->GetFields(offset);
1363 if (cache_->fields().size() == cache_->objects().size()) {
1364 Node* rep = replacement(load);
1365 if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
1366 int value_input_count = static_cast<int>(cache_->fields().size());
1367 cache_->fields().push_back(NodeProperties::GetControlInput(from));
1368 Node* phi = graph()->NewNode(
1369 common()->Phi(MachineRepresentation::kTagged, value_input_count),
1370 value_input_count + 1, &cache_->fields().front());
1371 status_analysis_->ResizeStatusVector();
1372 SetReplacement(load, phi);
1373 TRACE(" got phi created.\n");
1374 } else {
1375 TRACE(" has already phi #%d.\n", rep->id());
1376 }
1377 } else {
1378 TRACE(" has incomplete field info.\n");
1379 }
1380 } else {
1381 TRACE(" has incomplete virtual object info.\n");
1382 }
1383 }
1384
ProcessLoadField(Node * node)1385 void EscapeAnalysis::ProcessLoadField(Node* node) {
1386 DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
1387 ForwardVirtualState(node);
1388 Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1389 VirtualState* state = virtual_states_[node->id()];
1390 if (VirtualObject* object = GetVirtualObject(state, from)) {
1391 if (!object->IsTracked()) return;
1392 int offset = OffsetForFieldAccess(node);
1393 if (static_cast<size_t>(offset) >= object->field_count()) return;
1394 Node* value = object->GetField(offset);
1395 if (value) {
1396 value = ResolveReplacement(value);
1397 }
1398 // Record that the load has this alias.
1399 UpdateReplacement(state, node, value);
1400 } else if (from->opcode() == IrOpcode::kPhi &&
1401 FieldAccessOf(node->op()).offset % kPointerSize == 0) {
1402 int offset = OffsetForFieldAccess(node);
1403 // Only binary phis are supported for now.
1404 ProcessLoadFromPhi(offset, from, node, state);
1405 } else {
1406 UpdateReplacement(state, node, nullptr);
1407 }
1408 }
1409
ProcessLoadElement(Node * node)1410 void EscapeAnalysis::ProcessLoadElement(Node* node) {
1411 DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
1412 ForwardVirtualState(node);
1413 Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1414 VirtualState* state = virtual_states_[node->id()];
1415 Node* index_node = node->InputAt(1);
1416 NumberMatcher index(index_node);
1417 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1418 index_node->opcode() != IrOpcode::kInt64Constant &&
1419 index_node->opcode() != IrOpcode::kFloat32Constant &&
1420 index_node->opcode() != IrOpcode::kFloat64Constant);
1421 if (index.HasValue()) {
1422 if (VirtualObject* object = GetVirtualObject(state, from)) {
1423 if (!object->IsTracked()) return;
1424 int offset = OffsetForElementAccess(node, index.Value());
1425 if (static_cast<size_t>(offset) >= object->field_count()) return;
1426 Node* value = object->GetField(offset);
1427 if (value) {
1428 value = ResolveReplacement(value);
1429 }
1430 // Record that the load has this alias.
1431 UpdateReplacement(state, node, value);
1432 } else if (from->opcode() == IrOpcode::kPhi) {
1433 int offset = OffsetForElementAccess(node, index.Value());
1434 ProcessLoadFromPhi(offset, from, node, state);
1435 } else {
1436 UpdateReplacement(state, node, nullptr);
1437 }
1438 } else {
1439 // We have a load from a non-const index, cannot eliminate object.
1440 if (status_analysis_->SetEscaped(from)) {
1441 TRACE(
1442 "Setting #%d (%s) to escaped because load element #%d from non-const "
1443 "index #%d (%s)\n",
1444 from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
1445 index_node->op()->mnemonic());
1446 }
1447 }
1448 }
1449
ProcessStoreField(Node * node)1450 void EscapeAnalysis::ProcessStoreField(Node* node) {
1451 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
1452 ForwardVirtualState(node);
1453 Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1454 VirtualState* state = virtual_states_[node->id()];
1455 if (VirtualObject* object = GetVirtualObject(state, to)) {
1456 if (!object->IsTracked()) return;
1457 int offset = OffsetForFieldAccess(node);
1458 if (static_cast<size_t>(offset) >= object->field_count()) return;
1459 Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1460 // TODO(mstarzinger): The following is a workaround to not track the code
1461 // entry field in virtual JSFunction objects. We only ever store the inner
1462 // pointer into the compile lazy stub in this field and the deoptimizer has
1463 // this assumption hard-coded in {TranslatedState::MaterializeAt} as well.
1464 if (val->opcode() == IrOpcode::kInt32Constant ||
1465 val->opcode() == IrOpcode::kInt64Constant) {
1466 DCHECK_EQ(JSFunction::kCodeEntryOffset, FieldAccessOf(node->op()).offset);
1467 val = slot_not_analyzed_;
1468 }
1469 if (object->GetField(offset) != val) {
1470 object = CopyForModificationAt(object, state, node);
1471 object->SetField(offset, val);
1472 }
1473 }
1474 }
1475
ProcessStoreElement(Node * node)1476 void EscapeAnalysis::ProcessStoreElement(Node* node) {
1477 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
1478 ForwardVirtualState(node);
1479 Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1480 Node* index_node = node->InputAt(1);
1481 NumberMatcher index(index_node);
1482 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1483 index_node->opcode() != IrOpcode::kInt64Constant &&
1484 index_node->opcode() != IrOpcode::kFloat32Constant &&
1485 index_node->opcode() != IrOpcode::kFloat64Constant);
1486 VirtualState* state = virtual_states_[node->id()];
1487 if (index.HasValue()) {
1488 if (VirtualObject* object = GetVirtualObject(state, to)) {
1489 if (!object->IsTracked()) return;
1490 int offset = OffsetForElementAccess(node, index.Value());
1491 if (static_cast<size_t>(offset) >= object->field_count()) return;
1492 Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1493 if (object->GetField(offset) != val) {
1494 object = CopyForModificationAt(object, state, node);
1495 object->SetField(offset, val);
1496 }
1497 }
1498 } else {
1499 // We have a store to a non-const index, cannot eliminate object.
1500 if (status_analysis_->SetEscaped(to)) {
1501 TRACE(
1502 "Setting #%d (%s) to escaped because store element #%d to non-const "
1503 "index #%d (%s)\n",
1504 to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
1505 index_node->op()->mnemonic());
1506 }
1507 if (VirtualObject* object = GetVirtualObject(state, to)) {
1508 if (!object->IsTracked()) return;
1509 if (!object->AllFieldsClear()) {
1510 object = CopyForModificationAt(object, state, node);
1511 object->ClearAllFields();
1512 TRACE("Cleared all fields of @%d:#%d\n",
1513 status_analysis_->GetAlias(object->id()), object->id());
1514 }
1515 }
1516 }
1517 }
1518
GetOrCreateObjectState(Node * effect,Node * node)1519 Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
1520 if ((node->opcode() == IrOpcode::kFinishRegion ||
1521 node->opcode() == IrOpcode::kAllocate) &&
1522 IsVirtual(node)) {
1523 if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1524 ResolveReplacement(node))) {
1525 if (Node* object_state = vobj->GetObjectState()) {
1526 return object_state;
1527 } else {
1528 cache_->fields().clear();
1529 for (size_t i = 0; i < vobj->field_count(); ++i) {
1530 if (Node* field = vobj->GetField(i)) {
1531 cache_->fields().push_back(field);
1532 }
1533 }
1534 int input_count = static_cast<int>(cache_->fields().size());
1535 Node* new_object_state =
1536 graph()->NewNode(common()->ObjectState(input_count, vobj->id()),
1537 input_count, &cache_->fields().front());
1538 vobj->SetObjectState(new_object_state);
1539 TRACE(
1540 "Creating object state #%d for vobj %p (from node #%d) at effect "
1541 "#%d\n",
1542 new_object_state->id(), static_cast<void*>(vobj), node->id(),
1543 effect->id());
1544 // Now fix uses of other objects.
1545 for (size_t i = 0; i < vobj->field_count(); ++i) {
1546 if (Node* field = vobj->GetField(i)) {
1547 if (Node* field_object_state =
1548 GetOrCreateObjectState(effect, field)) {
1549 NodeProperties::ReplaceValueInput(
1550 new_object_state, field_object_state, static_cast<int>(i));
1551 }
1552 }
1553 }
1554 return new_object_state;
1555 }
1556 }
1557 }
1558 return nullptr;
1559 }
1560
DebugPrintState(VirtualState * state)1561 void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1562 PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1563 for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1564 if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1565 PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
1566 object->field_count());
1567 for (size_t i = 0; i < object->field_count(); ++i) {
1568 if (Node* f = object->GetField(i)) {
1569 PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
1570 }
1571 }
1572 }
1573 }
1574 }
1575
DebugPrint()1576 void EscapeAnalysis::DebugPrint() {
1577 ZoneVector<VirtualState*> object_states(zone());
1578 for (NodeId id = 0; id < virtual_states_.size(); id++) {
1579 if (VirtualState* states = virtual_states_[id]) {
1580 if (std::find(object_states.begin(), object_states.end(), states) ==
1581 object_states.end()) {
1582 object_states.push_back(states);
1583 }
1584 }
1585 }
1586 for (size_t n = 0; n < object_states.size(); n++) {
1587 DebugPrintState(object_states[n]);
1588 }
1589 }
1590
GetVirtualObject(VirtualState * state,Node * node)1591 VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
1592 Node* node) {
1593 if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1594 Alias alias = status_analysis_->GetAlias(node->id());
1595 if (alias >= state->size()) return nullptr;
1596 return state->VirtualObjectFromAlias(alias);
1597 }
1598
ExistsVirtualAllocate()1599 bool EscapeAnalysis::ExistsVirtualAllocate() {
1600 for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1601 Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1602 if (alias < EscapeStatusAnalysis::kUntrackable) {
1603 if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1604 return true;
1605 }
1606 }
1607 }
1608 return false;
1609 }
1610
graph() const1611 Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
1612
1613 } // namespace compiler
1614 } // namespace internal
1615 } // namespace v8
1616