1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/escape-analysis.h"
6
7 #include <limits>
8
9 #include "src/base/flags.h"
10 #include "src/bootstrapper.h"
11 #include "src/compilation-dependencies.h"
12 #include "src/compiler/common-operator.h"
13 #include "src/compiler/graph-reducer.h"
14 #include "src/compiler/js-operator.h"
15 #include "src/compiler/linkage.h"
16 #include "src/compiler/node-matchers.h"
17 #include "src/compiler/node-properties.h"
18 #include "src/compiler/node.h"
19 #include "src/compiler/operator-properties.h"
20 #include "src/compiler/simplified-operator.h"
21 #include "src/compiler/type-cache.h"
22 #include "src/objects-inl.h"
23
24 namespace v8 {
25 namespace internal {
26 namespace compiler {
27
28 typedef NodeId Alias;
29
30 #ifdef DEBUG
31 #define TRACE(...) \
32 do { \
33 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
34 } while (false)
35 #else
36 #define TRACE(...)
37 #endif
38
39 // EscapeStatusAnalysis determines for each allocation whether it escapes.
40 class EscapeStatusAnalysis : public ZoneObject {
41 public:
42 enum Status {
43 kUnknown = 0u,
44 kTracked = 1u << 0,
45 kEscaped = 1u << 1,
46 kOnStack = 1u << 2,
47 kVisited = 1u << 3,
48 // A node is dangling, if it is a load of some kind, and does not have
49 // an effect successor.
50 kDanglingComputed = 1u << 4,
51 kDangling = 1u << 5,
52 // A node is is an effect branch point, if it has more than 2 non-dangling
53 // effect successors.
54 kBranchPointComputed = 1u << 6,
55 kBranchPoint = 1u << 7,
56 kInQueue = 1u << 8
57 };
58 typedef base::Flags<Status, uint16_t> StatusFlags;
59
60 void RunStatusAnalysis();
61
62 bool IsVirtual(Node* node);
63 bool IsEscaped(Node* node);
64 bool IsAllocation(Node* node);
65
66 bool IsInQueue(NodeId id);
67 void SetInQueue(NodeId id, bool on_stack);
68
69 void DebugPrint();
70
71 EscapeStatusAnalysis(EscapeAnalysis* object_analysis, Graph* graph,
72 Zone* zone);
73 void EnqueueForStatusAnalysis(Node* node);
74 bool SetEscaped(Node* node);
75 bool IsEffectBranchPoint(Node* node);
76 bool IsDanglingEffectNode(Node* node);
77 void ResizeStatusVector();
78 size_t GetStatusVectorSize();
79 bool IsVirtual(NodeId id);
80
graph() const81 Graph* graph() const { return graph_; }
82 void AssignAliases();
GetAlias(NodeId id) const83 Alias GetAlias(NodeId id) const { return aliases_[id]; }
GetAliasMap() const84 const ZoneVector<Alias>& GetAliasMap() const { return aliases_; }
AliasCount() const85 Alias AliasCount() const { return next_free_alias_; }
86 static const Alias kNotReachable;
87 static const Alias kUntrackable;
88
89 bool IsNotReachable(Node* node);
90
91 private:
92 void Process(Node* node);
93 void ProcessAllocate(Node* node);
94 void ProcessFinishRegion(Node* node);
95 void ProcessStoreField(Node* node);
96 void ProcessStoreElement(Node* node);
CheckUsesForEscape(Node * node,bool phi_escaping=false)97 bool CheckUsesForEscape(Node* node, bool phi_escaping = false) {
98 return CheckUsesForEscape(node, node, phi_escaping);
99 }
100 bool CheckUsesForEscape(Node* node, Node* rep, bool phi_escaping = false);
101 void RevisitUses(Node* node);
102 void RevisitInputs(Node* node);
103
NextAlias()104 Alias NextAlias() { return next_free_alias_++; }
105
106 bool HasEntry(Node* node);
107
108 bool IsAllocationPhi(Node* node);
109
110 ZoneVector<Node*> stack_;
111 EscapeAnalysis* object_analysis_;
112 Graph* const graph_;
113 ZoneVector<StatusFlags> status_;
114 Alias next_free_alias_;
115 ZoneVector<Node*> status_stack_;
116 ZoneVector<Alias> aliases_;
117
118 DISALLOW_COPY_AND_ASSIGN(EscapeStatusAnalysis);
119 };
120
121 DEFINE_OPERATORS_FOR_FLAGS(EscapeStatusAnalysis::StatusFlags)
122
123 const Alias EscapeStatusAnalysis::kNotReachable =
124 std::numeric_limits<Alias>::max();
125 const Alias EscapeStatusAnalysis::kUntrackable =
126 std::numeric_limits<Alias>::max() - 1;
127
128 class VirtualObject : public ZoneObject {
129 public:
130 enum Status {
131 kInitial = 0,
132 kTracked = 1u << 0,
133 kInitialized = 1u << 1,
134 kCopyRequired = 1u << 2,
135 };
136 typedef base::Flags<Status, unsigned char> StatusFlags;
137
VirtualObject(NodeId id,VirtualState * owner,Zone * zone)138 VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
139 : id_(id),
140 status_(kInitial),
141 fields_(zone),
142 phi_(zone),
143 object_state_(nullptr),
144 owner_(owner) {}
145
VirtualObject(VirtualState * owner,const VirtualObject & other)146 VirtualObject(VirtualState* owner, const VirtualObject& other)
147 : id_(other.id_),
148 status_(other.status_ & ~kCopyRequired),
149 fields_(other.fields_),
150 phi_(other.phi_),
151 object_state_(other.object_state_),
152 owner_(owner) {}
153
VirtualObject(NodeId id,VirtualState * owner,Zone * zone,size_t field_number,bool initialized)154 VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
155 bool initialized)
156 : id_(id),
157 status_(kTracked | (initialized ? kInitialized : kInitial)),
158 fields_(zone),
159 phi_(zone),
160 object_state_(nullptr),
161 owner_(owner) {
162 fields_.resize(field_number);
163 phi_.resize(field_number, false);
164 }
165
GetField(size_t offset)166 Node* GetField(size_t offset) { return fields_[offset]; }
167
IsCreatedPhi(size_t offset)168 bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
169
SetField(size_t offset,Node * node,bool created_phi=false)170 void SetField(size_t offset, Node* node, bool created_phi = false) {
171 fields_[offset] = node;
172 phi_[offset] = created_phi;
173 }
IsTracked() const174 bool IsTracked() const { return status_ & kTracked; }
IsInitialized() const175 bool IsInitialized() const { return status_ & kInitialized; }
SetInitialized()176 bool SetInitialized() { return status_ |= kInitialized; }
owner() const177 VirtualState* owner() const { return owner_; }
178
fields_array()179 Node** fields_array() { return &fields_.front(); }
field_count()180 size_t field_count() { return fields_.size(); }
ResizeFields(size_t field_count)181 bool ResizeFields(size_t field_count) {
182 if (field_count > fields_.size()) {
183 fields_.resize(field_count);
184 phi_.resize(field_count);
185 return true;
186 }
187 return false;
188 }
ClearAllFields()189 void ClearAllFields() {
190 for (size_t i = 0; i < fields_.size(); ++i) {
191 fields_[i] = nullptr;
192 phi_[i] = false;
193 }
194 }
AllFieldsClear()195 bool AllFieldsClear() {
196 for (size_t i = 0; i < fields_.size(); ++i) {
197 if (fields_[i] != nullptr) {
198 return false;
199 }
200 }
201 return true;
202 }
203 bool UpdateFrom(const VirtualObject& other);
204 bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
205 CommonOperatorBuilder* common, bool initialMerge);
SetObjectState(Node * node)206 void SetObjectState(Node* node) { object_state_ = node; }
GetObjectState() const207 Node* GetObjectState() const { return object_state_; }
IsCopyRequired() const208 bool IsCopyRequired() const { return status_ & kCopyRequired; }
SetCopyRequired()209 void SetCopyRequired() { status_ |= kCopyRequired; }
NeedCopyForModification()210 bool NeedCopyForModification() {
211 if (!IsCopyRequired() || !IsInitialized()) {
212 return false;
213 }
214 return true;
215 }
216
id() const217 NodeId id() const { return id_; }
id(NodeId id)218 void id(NodeId id) { id_ = id; }
219
220 private:
221 bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
222 CommonOperatorBuilder* common);
223
224 NodeId id_;
225 StatusFlags status_;
226 ZoneVector<Node*> fields_;
227 ZoneVector<bool> phi_;
228 Node* object_state_;
229 VirtualState* owner_;
230
231 DISALLOW_COPY_AND_ASSIGN(VirtualObject);
232 };
233
DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)234 DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)
235
236 bool VirtualObject::UpdateFrom(const VirtualObject& other) {
237 bool changed = status_ != other.status_;
238 status_ = other.status_;
239 phi_ = other.phi_;
240 if (fields_.size() != other.fields_.size()) {
241 fields_ = other.fields_;
242 return true;
243 }
244 for (size_t i = 0; i < fields_.size(); ++i) {
245 if (fields_[i] != other.fields_[i]) {
246 changed = true;
247 fields_[i] = other.fields_[i];
248 }
249 }
250 return changed;
251 }
252
253 class VirtualState : public ZoneObject {
254 public:
VirtualState(Node * owner,Zone * zone,size_t size)255 VirtualState(Node* owner, Zone* zone, size_t size)
256 : info_(size, nullptr, zone),
257 initialized_(static_cast<int>(size), zone),
258 owner_(owner) {}
259
VirtualState(Node * owner,const VirtualState & state)260 VirtualState(Node* owner, const VirtualState& state)
261 : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
262 initialized_(state.initialized_.length(),
263 state.info_.get_allocator().zone()),
264 owner_(owner) {
265 for (size_t i = 0; i < info_.size(); ++i) {
266 if (state.info_[i]) {
267 info_[i] = state.info_[i];
268 }
269 }
270 }
271
272 VirtualObject* VirtualObjectFromAlias(size_t alias);
273 void SetVirtualObject(Alias alias, VirtualObject* state);
274 bool UpdateFrom(VirtualState* state, Zone* zone);
275 bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
276 CommonOperatorBuilder* common, Node* at);
size() const277 size_t size() const { return info_.size(); }
owner() const278 Node* owner() const { return owner_; }
279 VirtualObject* Copy(VirtualObject* obj, Alias alias);
SetCopyRequired()280 void SetCopyRequired() {
281 for (VirtualObject* obj : info_) {
282 if (obj) obj->SetCopyRequired();
283 }
284 }
285
286 private:
287 ZoneVector<VirtualObject*> info_;
288 BitVector initialized_;
289 Node* owner_;
290
291 DISALLOW_COPY_AND_ASSIGN(VirtualState);
292 };
293
294 class MergeCache : public ZoneObject {
295 public:
MergeCache(Zone * zone)296 explicit MergeCache(Zone* zone)
297 : states_(zone), objects_(zone), fields_(zone) {
298 states_.reserve(5);
299 objects_.reserve(5);
300 fields_.reserve(5);
301 }
states()302 ZoneVector<VirtualState*>& states() { return states_; }
objects()303 ZoneVector<VirtualObject*>& objects() { return objects_; }
fields()304 ZoneVector<Node*>& fields() { return fields_; }
Clear()305 void Clear() {
306 states_.clear();
307 objects_.clear();
308 fields_.clear();
309 }
310 size_t LoadVirtualObjectsFromStatesFor(Alias alias);
311 void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
312 const ZoneVector<Alias>& aliases);
313 Node* GetFields(size_t pos);
314
315 private:
316 ZoneVector<VirtualState*> states_;
317 ZoneVector<VirtualObject*> objects_;
318 ZoneVector<Node*> fields_;
319
320 DISALLOW_COPY_AND_ASSIGN(MergeCache);
321 };
322
LoadVirtualObjectsFromStatesFor(Alias alias)323 size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
324 objects_.clear();
325 DCHECK_GT(states_.size(), 0u);
326 size_t min = std::numeric_limits<size_t>::max();
327 for (VirtualState* state : states_) {
328 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
329 objects_.push_back(obj);
330 min = std::min(obj->field_count(), min);
331 }
332 }
333 return min;
334 }
335
LoadVirtualObjectsForFieldsFrom(VirtualState * state,const ZoneVector<Alias> & aliases)336 void MergeCache::LoadVirtualObjectsForFieldsFrom(
337 VirtualState* state, const ZoneVector<Alias>& aliases) {
338 objects_.clear();
339 size_t max_alias = state->size();
340 for (Node* field : fields_) {
341 Alias alias = aliases[field->id()];
342 if (alias >= max_alias) continue;
343 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
344 objects_.push_back(obj);
345 }
346 }
347 }
348
GetFields(size_t pos)349 Node* MergeCache::GetFields(size_t pos) {
350 fields_.clear();
351 Node* rep = pos >= objects_.front()->field_count()
352 ? nullptr
353 : objects_.front()->GetField(pos);
354 for (VirtualObject* obj : objects_) {
355 if (pos >= obj->field_count()) continue;
356 Node* field = obj->GetField(pos);
357 if (field) {
358 fields_.push_back(field);
359 }
360 if (field != rep) {
361 rep = nullptr;
362 }
363 }
364 return rep;
365 }
366
Copy(VirtualObject * obj,Alias alias)367 VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
368 if (obj->owner() == this) return obj;
369 VirtualObject* new_obj =
370 new (info_.get_allocator().zone()) VirtualObject(this, *obj);
371 TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
372 static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
373 static_cast<void*>(new_obj));
374 info_[alias] = new_obj;
375 return new_obj;
376 }
377
VirtualObjectFromAlias(size_t alias)378 VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
379 return info_[alias];
380 }
381
SetVirtualObject(Alias alias,VirtualObject * obj)382 void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
383 info_[alias] = obj;
384 if (obj) initialized_.Add(alias);
385 }
386
UpdateFrom(VirtualState * from,Zone * zone)387 bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
388 if (from == this) return false;
389 bool changed = false;
390 for (Alias alias = 0; alias < size(); ++alias) {
391 VirtualObject* ls = VirtualObjectFromAlias(alias);
392 VirtualObject* rs = from->VirtualObjectFromAlias(alias);
393
394 if (ls == rs || rs == nullptr) continue;
395
396 if (ls == nullptr) {
397 ls = new (zone) VirtualObject(this, *rs);
398 SetVirtualObject(alias, ls);
399 changed = true;
400 continue;
401 }
402
403 TRACE(" Updating fields of @%d\n", alias);
404
405 changed = ls->UpdateFrom(*rs) || changed;
406 }
407 return false;
408 }
409
410 namespace {
411
IsEquivalentPhi(Node * node1,Node * node2)412 bool IsEquivalentPhi(Node* node1, Node* node2) {
413 if (node1 == node2) return true;
414 if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
415 node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
416 return false;
417 }
418 for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
419 Node* input1 = NodeProperties::GetValueInput(node1, i);
420 Node* input2 = NodeProperties::GetValueInput(node2, i);
421 if (!IsEquivalentPhi(input1, input2)) {
422 return false;
423 }
424 }
425 return true;
426 }
427
IsEquivalentPhi(Node * phi,ZoneVector<Node * > & inputs)428 bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
429 if (phi->opcode() != IrOpcode::kPhi) return false;
430 if (static_cast<size_t>(phi->op()->ValueInputCount()) != inputs.size()) {
431 return false;
432 }
433 for (size_t i = 0; i < inputs.size(); ++i) {
434 Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
435 if (!IsEquivalentPhi(input, inputs[i])) {
436 return false;
437 }
438 }
439 return true;
440 }
441 } // namespace
442
MergeFields(size_t i,Node * at,MergeCache * cache,Graph * graph,CommonOperatorBuilder * common)443 bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
444 Graph* graph, CommonOperatorBuilder* common) {
445 bool changed = false;
446 int value_input_count = static_cast<int>(cache->fields().size());
447 Node* rep = GetField(i);
448 if (!rep || !IsCreatedPhi(i)) {
449 Type* phi_type = Type::None();
450 for (Node* input : cache->fields()) {
451 CHECK_NOT_NULL(input);
452 CHECK(!input->IsDead());
453 Type* input_type = NodeProperties::GetType(input);
454 phi_type = Type::Union(phi_type, input_type, graph->zone());
455 }
456 Node* control = NodeProperties::GetControlInput(at);
457 cache->fields().push_back(control);
458 Node* phi = graph->NewNode(
459 common->Phi(MachineRepresentation::kTagged, value_input_count),
460 value_input_count + 1, &cache->fields().front());
461 NodeProperties::SetType(phi, phi_type);
462 SetField(i, phi, true);
463
464 #ifdef DEBUG
465 if (FLAG_trace_turbo_escape) {
466 PrintF(" Creating Phi #%d as merge of", phi->id());
467 for (int i = 0; i < value_input_count; i++) {
468 PrintF(" #%d (%s)", cache->fields()[i]->id(),
469 cache->fields()[i]->op()->mnemonic());
470 }
471 PrintF("\n");
472 }
473 #endif
474 changed = true;
475 } else {
476 DCHECK(rep->opcode() == IrOpcode::kPhi);
477 for (int n = 0; n < value_input_count; ++n) {
478 Node* old = NodeProperties::GetValueInput(rep, n);
479 if (old != cache->fields()[n]) {
480 changed = true;
481 NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
482 }
483 }
484 }
485 return changed;
486 }
487
MergeFrom(MergeCache * cache,Node * at,Graph * graph,CommonOperatorBuilder * common,bool initialMerge)488 bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
489 CommonOperatorBuilder* common,
490 bool initialMerge) {
491 DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
492 at->opcode() == IrOpcode::kPhi);
493 bool changed = false;
494 for (size_t i = 0; i < field_count(); ++i) {
495 if (!initialMerge && GetField(i) == nullptr) continue;
496 Node* field = cache->GetFields(i);
497 if (field && !IsCreatedPhi(i)) {
498 changed = changed || GetField(i) != field;
499 SetField(i, field);
500 TRACE(" Field %zu agree on rep #%d\n", i, field->id());
501 } else {
502 size_t arity = at->opcode() == IrOpcode::kEffectPhi
503 ? at->op()->EffectInputCount()
504 : at->op()->ValueInputCount();
505 if (cache->fields().size() == arity) {
506 changed = MergeFields(i, at, cache, graph, common) || changed;
507 } else {
508 if (GetField(i) != nullptr) {
509 TRACE(" Field %zu cleared\n", i);
510 changed = true;
511 }
512 SetField(i, nullptr);
513 }
514 }
515 }
516 return changed;
517 }
518
MergeFrom(MergeCache * cache,Zone * zone,Graph * graph,CommonOperatorBuilder * common,Node * at)519 bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
520 CommonOperatorBuilder* common, Node* at) {
521 DCHECK_GT(cache->states().size(), 0u);
522 bool changed = false;
523 for (Alias alias = 0; alias < size(); ++alias) {
524 cache->objects().clear();
525 VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
526 bool copy_merge_object = false;
527 size_t fields = std::numeric_limits<size_t>::max();
528 for (VirtualState* state : cache->states()) {
529 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
530 cache->objects().push_back(obj);
531 if (mergeObject == obj) {
532 copy_merge_object = true;
533 }
534 fields = std::min(obj->field_count(), fields);
535 }
536 }
537 if (cache->objects().size() == cache->states().size() &&
538 (mergeObject || !initialized_.Contains(alias))) {
539 bool initialMerge = false;
540 if (!mergeObject) {
541 initialMerge = true;
542 VirtualObject* obj = new (zone)
543 VirtualObject(cache->objects().front()->id(), this, zone, fields,
544 cache->objects().front()->IsInitialized());
545 SetVirtualObject(alias, obj);
546 mergeObject = obj;
547 changed = true;
548 } else if (copy_merge_object) {
549 VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
550 SetVirtualObject(alias, obj);
551 mergeObject = obj;
552 changed = true;
553 } else {
554 changed = mergeObject->ResizeFields(fields) || changed;
555 }
556 #ifdef DEBUG
557 if (FLAG_trace_turbo_escape) {
558 PrintF(" Alias @%d, merging into %p virtual objects", alias,
559 static_cast<void*>(mergeObject));
560 for (size_t i = 0; i < cache->objects().size(); i++) {
561 PrintF(" %p", static_cast<void*>(cache->objects()[i]));
562 }
563 PrintF("\n");
564 }
565 #endif // DEBUG
566 changed =
567 mergeObject->MergeFrom(cache, at, graph, common, initialMerge) ||
568 changed;
569 } else {
570 if (mergeObject) {
571 TRACE(" Alias %d, virtual object removed\n", alias);
572 changed = true;
573 }
574 SetVirtualObject(alias, nullptr);
575 }
576 }
577 return changed;
578 }
579
EscapeStatusAnalysis(EscapeAnalysis * object_analysis,Graph * graph,Zone * zone)580 EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
581 Graph* graph, Zone* zone)
582 : stack_(zone),
583 object_analysis_(object_analysis),
584 graph_(graph),
585 status_(zone),
586 next_free_alias_(0),
587 status_stack_(zone),
588 aliases_(zone) {}
589
HasEntry(Node * node)590 bool EscapeStatusAnalysis::HasEntry(Node* node) {
591 return status_[node->id()] & (kTracked | kEscaped);
592 }
593
IsVirtual(Node * node)594 bool EscapeStatusAnalysis::IsVirtual(Node* node) {
595 return IsVirtual(node->id());
596 }
597
IsVirtual(NodeId id)598 bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
599 return (status_[id] & kTracked) && !(status_[id] & kEscaped);
600 }
601
IsEscaped(Node * node)602 bool EscapeStatusAnalysis::IsEscaped(Node* node) {
603 return status_[node->id()] & kEscaped;
604 }
605
IsAllocation(Node * node)606 bool EscapeStatusAnalysis::IsAllocation(Node* node) {
607 return node->opcode() == IrOpcode::kAllocate ||
608 node->opcode() == IrOpcode::kFinishRegion;
609 }
610
SetEscaped(Node * node)611 bool EscapeStatusAnalysis::SetEscaped(Node* node) {
612 bool changed = !(status_[node->id()] & kEscaped);
613 status_[node->id()] |= kEscaped | kTracked;
614 return changed;
615 }
616
IsInQueue(NodeId id)617 bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
618 return status_[id] & kInQueue;
619 }
620
SetInQueue(NodeId id,bool on_stack)621 void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
622 if (on_stack) {
623 status_[id] |= kInQueue;
624 } else {
625 status_[id] &= ~kInQueue;
626 }
627 }
628
ResizeStatusVector()629 void EscapeStatusAnalysis::ResizeStatusVector() {
630 if (status_.size() <= graph()->NodeCount()) {
631 status_.resize(graph()->NodeCount() * 1.1, kUnknown);
632 }
633 }
634
GetStatusVectorSize()635 size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
636
RunStatusAnalysis()637 void EscapeStatusAnalysis::RunStatusAnalysis() {
638 ResizeStatusVector();
639 while (!status_stack_.empty()) {
640 Node* node = status_stack_.back();
641 status_stack_.pop_back();
642 status_[node->id()] &= ~kOnStack;
643 Process(node);
644 status_[node->id()] |= kVisited;
645 }
646 }
647
EnqueueForStatusAnalysis(Node * node)648 void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
649 DCHECK_NOT_NULL(node);
650 if (!(status_[node->id()] & kOnStack)) {
651 status_stack_.push_back(node);
652 status_[node->id()] |= kOnStack;
653 }
654 }
655
RevisitInputs(Node * node)656 void EscapeStatusAnalysis::RevisitInputs(Node* node) {
657 for (Edge edge : node->input_edges()) {
658 Node* input = edge.to();
659 if (!(status_[input->id()] & kOnStack)) {
660 status_stack_.push_back(input);
661 status_[input->id()] |= kOnStack;
662 }
663 }
664 }
665
RevisitUses(Node * node)666 void EscapeStatusAnalysis::RevisitUses(Node* node) {
667 for (Edge edge : node->use_edges()) {
668 Node* use = edge.from();
669 if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
670 status_stack_.push_back(use);
671 status_[use->id()] |= kOnStack;
672 }
673 }
674 }
675
Process(Node * node)676 void EscapeStatusAnalysis::Process(Node* node) {
677 switch (node->opcode()) {
678 case IrOpcode::kAllocate:
679 ProcessAllocate(node);
680 break;
681 case IrOpcode::kFinishRegion:
682 ProcessFinishRegion(node);
683 break;
684 case IrOpcode::kStoreField:
685 ProcessStoreField(node);
686 break;
687 case IrOpcode::kStoreElement:
688 ProcessStoreElement(node);
689 break;
690 case IrOpcode::kLoadField:
691 case IrOpcode::kLoadElement: {
692 if (Node* rep = object_analysis_->GetReplacement(node)) {
693 if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
694 RevisitInputs(rep);
695 RevisitUses(rep);
696 }
697 } else {
698 Node* from = NodeProperties::GetValueInput(node, 0);
699 from = object_analysis_->ResolveReplacement(from);
700 if (SetEscaped(from)) {
701 TRACE("Setting #%d (%s) to escaped because of unresolved load #%i\n",
702 from->id(), from->op()->mnemonic(), node->id());
703 RevisitInputs(from);
704 RevisitUses(from);
705 }
706 }
707 RevisitUses(node);
708 break;
709 }
710 case IrOpcode::kPhi:
711 if (!HasEntry(node)) {
712 status_[node->id()] |= kTracked;
713 RevisitUses(node);
714 }
715 if (!IsAllocationPhi(node) && SetEscaped(node)) {
716 RevisitInputs(node);
717 RevisitUses(node);
718 }
719 CheckUsesForEscape(node);
720 default:
721 break;
722 }
723 }
724
IsAllocationPhi(Node * node)725 bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
726 for (Edge edge : node->input_edges()) {
727 Node* input = edge.to();
728 if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
729 if (IsAllocation(input)) continue;
730 return false;
731 }
732 return true;
733 }
734
ProcessStoreField(Node * node)735 void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
736 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
737 Node* to = NodeProperties::GetValueInput(node, 0);
738 Node* val = NodeProperties::GetValueInput(node, 1);
739 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
740 RevisitUses(val);
741 RevisitInputs(val);
742 TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
743 val->id(), val->op()->mnemonic(), to->id());
744 }
745 }
746
ProcessStoreElement(Node * node)747 void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
748 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
749 Node* to = NodeProperties::GetValueInput(node, 0);
750 Node* val = NodeProperties::GetValueInput(node, 2);
751 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
752 RevisitUses(val);
753 RevisitInputs(val);
754 TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
755 val->id(), val->op()->mnemonic(), to->id());
756 }
757 }
758
ProcessAllocate(Node * node)759 void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
760 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
761 if (!HasEntry(node)) {
762 status_[node->id()] |= kTracked;
763 TRACE("Created status entry for node #%d (%s)\n", node->id(),
764 node->op()->mnemonic());
765 NumberMatcher size(node->InputAt(0));
766 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
767 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
768 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
769 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
770 RevisitUses(node);
771 if (!size.HasValue() && SetEscaped(node)) {
772 TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
773 // This node is already known to escape, uses do not have to be checked
774 // for escape.
775 return;
776 }
777 }
778 if (CheckUsesForEscape(node, true)) {
779 RevisitUses(node);
780 }
781 }
782
CheckUsesForEscape(Node * uses,Node * rep,bool phi_escaping)783 bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
784 bool phi_escaping) {
785 for (Edge edge : uses->use_edges()) {
786 Node* use = edge.from();
787 if (IsNotReachable(use)) continue;
788 if (edge.index() >= use->op()->ValueInputCount() +
789 OperatorProperties::GetContextInputCount(use->op()))
790 continue;
791 switch (use->opcode()) {
792 case IrOpcode::kPhi:
793 if (phi_escaping && SetEscaped(rep)) {
794 TRACE(
795 "Setting #%d (%s) to escaped because of use by phi node "
796 "#%d (%s)\n",
797 rep->id(), rep->op()->mnemonic(), use->id(),
798 use->op()->mnemonic());
799 return true;
800 }
801 // Fallthrough.
802 case IrOpcode::kStoreField:
803 case IrOpcode::kLoadField:
804 case IrOpcode::kStoreElement:
805 case IrOpcode::kLoadElement:
806 case IrOpcode::kFrameState:
807 case IrOpcode::kStateValues:
808 case IrOpcode::kReferenceEqual:
809 case IrOpcode::kFinishRegion:
810 if (IsEscaped(use) && SetEscaped(rep)) {
811 TRACE(
812 "Setting #%d (%s) to escaped because of use by escaping node "
813 "#%d (%s)\n",
814 rep->id(), rep->op()->mnemonic(), use->id(),
815 use->op()->mnemonic());
816 return true;
817 }
818 break;
819 case IrOpcode::kObjectIsSmi:
820 if (!IsAllocation(rep) && SetEscaped(rep)) {
821 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
822 rep->id(), rep->op()->mnemonic(), use->id(),
823 use->op()->mnemonic());
824 return true;
825 }
826 break;
827 case IrOpcode::kSelect:
828 // TODO(mstarzinger): The following list of operators will eventually be
829 // handled by the EscapeAnalysisReducer (similar to ObjectIsSmi).
830 case IrOpcode::kConvertTaggedHoleToUndefined:
831 case IrOpcode::kStringEqual:
832 case IrOpcode::kStringLessThan:
833 case IrOpcode::kStringLessThanOrEqual:
834 case IrOpcode::kTypeGuard:
835 case IrOpcode::kPlainPrimitiveToNumber:
836 case IrOpcode::kPlainPrimitiveToWord32:
837 case IrOpcode::kPlainPrimitiveToFloat64:
838 case IrOpcode::kStringCharAt:
839 case IrOpcode::kStringCharCodeAt:
840 case IrOpcode::kStringIndexOf:
841 case IrOpcode::kObjectIsDetectableCallable:
842 case IrOpcode::kObjectIsNonCallable:
843 case IrOpcode::kObjectIsNumber:
844 case IrOpcode::kObjectIsReceiver:
845 case IrOpcode::kObjectIsString:
846 case IrOpcode::kObjectIsUndetectable:
847 if (SetEscaped(rep)) {
848 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
849 rep->id(), rep->op()->mnemonic(), use->id(),
850 use->op()->mnemonic());
851 return true;
852 }
853 break;
854 default:
855 if (use->op()->EffectInputCount() == 0 &&
856 uses->op()->EffectInputCount() > 0 &&
857 !IrOpcode::IsJsOpcode(use->opcode())) {
858 V8_Fatal(__FILE__, __LINE__,
859 "Encountered unaccounted use by #%d (%s)\n", use->id(),
860 use->op()->mnemonic());
861 }
862 if (SetEscaped(rep)) {
863 TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
864 rep->id(), rep->op()->mnemonic(), use->id(),
865 use->op()->mnemonic());
866 return true;
867 }
868 }
869 }
870 return false;
871 }
872
ProcessFinishRegion(Node * node)873 void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
874 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
875 if (!HasEntry(node)) {
876 status_[node->id()] |= kTracked;
877 RevisitUses(node);
878 }
879 if (CheckUsesForEscape(node, true)) {
880 RevisitInputs(node);
881 RevisitUses(node);
882 }
883 }
884
DebugPrint()885 void EscapeStatusAnalysis::DebugPrint() {
886 for (NodeId id = 0; id < status_.size(); id++) {
887 if (status_[id] & kTracked) {
888 PrintF("Node #%d is %s\n", id,
889 (status_[id] & kEscaped) ? "escaping" : "virtual");
890 }
891 }
892 }
893
EscapeAnalysis(Graph * graph,CommonOperatorBuilder * common,Zone * zone)894 EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
895 Zone* zone)
896 : zone_(zone),
897 slot_not_analyzed_(graph->NewNode(common->NumberConstant(0x1c0debad))),
898 common_(common),
899 status_analysis_(new (zone) EscapeStatusAnalysis(this, graph, zone)),
900 virtual_states_(zone),
901 replacements_(zone),
902 cycle_detection_(zone),
903 cache_(nullptr) {
904 // Type slot_not_analyzed_ manually.
905 double v = OpParameter<double>(slot_not_analyzed_);
906 NodeProperties::SetType(slot_not_analyzed_, Type::Range(v, v, zone));
907 }
908
~EscapeAnalysis()909 EscapeAnalysis::~EscapeAnalysis() {}
910
Run()911 bool EscapeAnalysis::Run() {
912 replacements_.resize(graph()->NodeCount());
913 status_analysis_->AssignAliases();
914 if (status_analysis_->AliasCount() > 0) {
915 cache_ = new (zone()) MergeCache(zone());
916 replacements_.resize(graph()->NodeCount());
917 status_analysis_->ResizeStatusVector();
918 RunObjectAnalysis();
919 status_analysis_->RunStatusAnalysis();
920 return true;
921 } else {
922 return false;
923 }
924 }
925
AssignAliases()926 void EscapeStatusAnalysis::AssignAliases() {
927 size_t max_size = 1024;
928 size_t min_size = 32;
929 size_t stack_size =
930 std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
931 stack_.reserve(stack_size);
932 ResizeStatusVector();
933 stack_.push_back(graph()->end());
934 CHECK_LT(graph()->NodeCount(), kUntrackable);
935 aliases_.resize(graph()->NodeCount(), kNotReachable);
936 aliases_[graph()->end()->id()] = kUntrackable;
937 status_stack_.reserve(8);
938 TRACE("Discovering trackable nodes");
939 while (!stack_.empty()) {
940 Node* node = stack_.back();
941 stack_.pop_back();
942 switch (node->opcode()) {
943 case IrOpcode::kAllocate:
944 if (aliases_[node->id()] >= kUntrackable) {
945 aliases_[node->id()] = NextAlias();
946 TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
947 node->id());
948 EnqueueForStatusAnalysis(node);
949 }
950 break;
951 case IrOpcode::kFinishRegion: {
952 Node* allocate = NodeProperties::GetValueInput(node, 0);
953 DCHECK_NOT_NULL(allocate);
954 if (allocate->opcode() == IrOpcode::kAllocate) {
955 if (aliases_[allocate->id()] >= kUntrackable) {
956 if (aliases_[allocate->id()] == kNotReachable) {
957 stack_.push_back(allocate);
958 }
959 aliases_[allocate->id()] = NextAlias();
960 TRACE(" @%d:%s#%u", aliases_[allocate->id()],
961 allocate->op()->mnemonic(), allocate->id());
962 EnqueueForStatusAnalysis(allocate);
963 }
964 aliases_[node->id()] = aliases_[allocate->id()];
965 TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
966 node->id());
967 }
968 break;
969 }
970 default:
971 DCHECK_EQ(aliases_[node->id()], kUntrackable);
972 break;
973 }
974 for (Edge edge : node->input_edges()) {
975 Node* input = edge.to();
976 if (aliases_[input->id()] == kNotReachable) {
977 stack_.push_back(input);
978 aliases_[input->id()] = kUntrackable;
979 }
980 }
981 }
982 TRACE("\n");
983 }
984
IsNotReachable(Node * node)985 bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
986 if (node->id() >= aliases_.size()) {
987 return false;
988 }
989 return aliases_[node->id()] == kNotReachable;
990 }
991
RunObjectAnalysis()992 void EscapeAnalysis::RunObjectAnalysis() {
993 virtual_states_.resize(graph()->NodeCount());
994 ZoneDeque<Node*> queue(zone());
995 queue.push_back(graph()->start());
996 ZoneVector<Node*> danglers(zone());
997 while (!queue.empty()) {
998 Node* node = queue.back();
999 queue.pop_back();
1000 status_analysis_->SetInQueue(node->id(), false);
1001 if (Process(node)) {
1002 for (Edge edge : node->use_edges()) {
1003 Node* use = edge.from();
1004 if (status_analysis_->IsNotReachable(use)) {
1005 continue;
1006 }
1007 if (NodeProperties::IsEffectEdge(edge)) {
1008 // Iteration order: depth first, but delay phis.
1009 // We need DFS do avoid some duplication of VirtualStates and
1010 // VirtualObjects, and we want to delay phis to improve performance.
1011 if (use->opcode() == IrOpcode::kEffectPhi) {
1012 if (!status_analysis_->IsInQueue(use->id())) {
1013 status_analysis_->SetInQueue(use->id(), true);
1014 queue.push_front(use);
1015 }
1016 } else if ((use->opcode() != IrOpcode::kLoadField &&
1017 use->opcode() != IrOpcode::kLoadElement) ||
1018 !status_analysis_->IsDanglingEffectNode(use)) {
1019 if (!status_analysis_->IsInQueue(use->id())) {
1020 status_analysis_->SetInQueue(use->id(), true);
1021 queue.push_back(use);
1022 }
1023 } else {
1024 danglers.push_back(use);
1025 }
1026 }
1027 }
1028 // Danglers need to be processed immediately, even if they are
1029 // on the stack. Since they do not have effect outputs,
1030 // we don't have to track whether they are on the stack.
1031 queue.insert(queue.end(), danglers.begin(), danglers.end());
1032 danglers.clear();
1033 }
1034 }
1035 #ifdef DEBUG
1036 if (FLAG_trace_turbo_escape) {
1037 DebugPrint();
1038 }
1039 #endif
1040 }
1041
IsDanglingEffectNode(Node * node)1042 bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
1043 if (status_[node->id()] & kDanglingComputed) {
1044 return status_[node->id()] & kDangling;
1045 }
1046 if (node->op()->EffectInputCount() == 0 ||
1047 node->op()->EffectOutputCount() == 0 ||
1048 (node->op()->EffectInputCount() == 1 &&
1049 NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
1050 // The start node is used as sentinel for nodes that are in general
1051 // effectful, but of which an analysis has determined that they do not
1052 // produce effects in this instance. We don't consider these nodes dangling.
1053 status_[node->id()] |= kDanglingComputed;
1054 return false;
1055 }
1056 for (Edge edge : node->use_edges()) {
1057 Node* use = edge.from();
1058 if (aliases_[use->id()] == kNotReachable) continue;
1059 if (NodeProperties::IsEffectEdge(edge)) {
1060 status_[node->id()] |= kDanglingComputed;
1061 return false;
1062 }
1063 }
1064 status_[node->id()] |= kDanglingComputed | kDangling;
1065 return true;
1066 }
1067
IsEffectBranchPoint(Node * node)1068 bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
1069 if (status_[node->id()] & kBranchPointComputed) {
1070 return status_[node->id()] & kBranchPoint;
1071 }
1072 int count = 0;
1073 for (Edge edge : node->use_edges()) {
1074 Node* use = edge.from();
1075 if (aliases_[use->id()] == kNotReachable) continue;
1076 if (NodeProperties::IsEffectEdge(edge)) {
1077 if ((use->opcode() == IrOpcode::kLoadField ||
1078 use->opcode() == IrOpcode::kLoadElement ||
1079 use->opcode() == IrOpcode::kLoad) &&
1080 IsDanglingEffectNode(use))
1081 continue;
1082 if (++count > 1) {
1083 status_[node->id()] |= kBranchPointComputed | kBranchPoint;
1084 return true;
1085 }
1086 }
1087 }
1088 status_[node->id()] |= kBranchPointComputed;
1089 return false;
1090 }
1091
1092 namespace {
1093
HasFrameStateInput(const Operator * op)1094 bool HasFrameStateInput(const Operator* op) {
1095 if (op->opcode() == IrOpcode::kCall || op->opcode() == IrOpcode::kTailCall) {
1096 const CallDescriptor* d = CallDescriptorOf(op);
1097 return d->NeedsFrameState();
1098 } else {
1099 return OperatorProperties::HasFrameStateInput(op);
1100 }
1101 }
1102
1103 } // namespace
1104
Process(Node * node)1105 bool EscapeAnalysis::Process(Node* node) {
1106 switch (node->opcode()) {
1107 case IrOpcode::kAllocate:
1108 ProcessAllocation(node);
1109 break;
1110 case IrOpcode::kBeginRegion:
1111 ForwardVirtualState(node);
1112 break;
1113 case IrOpcode::kFinishRegion:
1114 ProcessFinishRegion(node);
1115 break;
1116 case IrOpcode::kStoreField:
1117 ProcessStoreField(node);
1118 break;
1119 case IrOpcode::kLoadField:
1120 ProcessLoadField(node);
1121 break;
1122 case IrOpcode::kStoreElement:
1123 ProcessStoreElement(node);
1124 break;
1125 case IrOpcode::kLoadElement:
1126 ProcessLoadElement(node);
1127 break;
1128 case IrOpcode::kStart:
1129 ProcessStart(node);
1130 break;
1131 case IrOpcode::kEffectPhi:
1132 return ProcessEffectPhi(node);
1133 break;
1134 default:
1135 if (node->op()->EffectInputCount() > 0) {
1136 ForwardVirtualState(node);
1137 }
1138 ProcessAllocationUsers(node);
1139 break;
1140 }
1141 if (HasFrameStateInput(node->op())) {
1142 virtual_states_[node->id()]->SetCopyRequired();
1143 }
1144 return true;
1145 }
1146
ProcessAllocationUsers(Node * node)1147 void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
1148 for (Edge edge : node->input_edges()) {
1149 Node* input = edge.to();
1150 Node* use = edge.from();
1151 if (edge.index() >= use->op()->ValueInputCount() +
1152 OperatorProperties::GetContextInputCount(use->op()))
1153 continue;
1154 switch (node->opcode()) {
1155 case IrOpcode::kStoreField:
1156 case IrOpcode::kLoadField:
1157 case IrOpcode::kStoreElement:
1158 case IrOpcode::kLoadElement:
1159 case IrOpcode::kFrameState:
1160 case IrOpcode::kStateValues:
1161 case IrOpcode::kReferenceEqual:
1162 case IrOpcode::kFinishRegion:
1163 case IrOpcode::kObjectIsSmi:
1164 break;
1165 default:
1166 VirtualState* state = virtual_states_[node->id()];
1167 if (VirtualObject* obj =
1168 GetVirtualObject(state, ResolveReplacement(input))) {
1169 if (!obj->AllFieldsClear()) {
1170 obj = CopyForModificationAt(obj, state, node);
1171 obj->ClearAllFields();
1172 TRACE("Cleared all fields of @%d:#%d\n",
1173 status_analysis_->GetAlias(obj->id()), obj->id());
1174 }
1175 }
1176 break;
1177 }
1178 }
1179 }
1180
CopyForModificationAt(VirtualState * state,Node * node)1181 VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
1182 Node* node) {
1183 if (state->owner() != node) {
1184 VirtualState* new_state = new (zone()) VirtualState(node, *state);
1185 virtual_states_[node->id()] = new_state;
1186 TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
1187 static_cast<void*>(state), static_cast<void*>(new_state),
1188 node->op()->mnemonic(), node->id());
1189 return new_state;
1190 }
1191 return state;
1192 }
1193
CopyForModificationAt(VirtualObject * obj,VirtualState * state,Node * node)1194 VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
1195 VirtualState* state,
1196 Node* node) {
1197 if (obj->NeedCopyForModification()) {
1198 state = CopyForModificationAt(state, node);
1199 // TODO(tebbi): this copies the complete virtual state. Replace with a more
1200 // precise analysis of which objects are actually affected by the change.
1201 Alias changed_alias = status_analysis_->GetAlias(obj->id());
1202 for (Alias alias = 0; alias < state->size(); ++alias) {
1203 if (VirtualObject* next_obj = state->VirtualObjectFromAlias(alias)) {
1204 if (alias != changed_alias && next_obj->NeedCopyForModification()) {
1205 state->Copy(next_obj, alias);
1206 }
1207 }
1208 }
1209 return state->Copy(obj, changed_alias);
1210 }
1211 return obj;
1212 }
1213
ForwardVirtualState(Node * node)1214 void EscapeAnalysis::ForwardVirtualState(Node* node) {
1215 DCHECK_EQ(node->op()->EffectInputCount(), 1);
1216 #ifdef DEBUG
1217 if (node->opcode() != IrOpcode::kLoadField &&
1218 node->opcode() != IrOpcode::kLoadElement &&
1219 node->opcode() != IrOpcode::kLoad &&
1220 status_analysis_->IsDanglingEffectNode(node)) {
1221 PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
1222 node->op()->mnemonic());
1223 UNREACHABLE();
1224 }
1225 #endif // DEBUG
1226 Node* effect = NodeProperties::GetEffectInput(node);
1227 DCHECK_NOT_NULL(virtual_states_[effect->id()]);
1228 if (virtual_states_[node->id()]) {
1229 virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
1230 zone());
1231 } else {
1232 virtual_states_[node->id()] = virtual_states_[effect->id()];
1233 TRACE("Forwarding object state %p from %s#%d to %s#%d",
1234 static_cast<void*>(virtual_states_[effect->id()]),
1235 effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
1236 node->id());
1237 if (status_analysis_->IsEffectBranchPoint(effect)) {
1238 virtual_states_[node->id()]->SetCopyRequired();
1239 TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
1240 effect->id());
1241 }
1242 TRACE("\n");
1243 }
1244 }
1245
ProcessStart(Node * node)1246 void EscapeAnalysis::ProcessStart(Node* node) {
1247 DCHECK_EQ(node->opcode(), IrOpcode::kStart);
1248 virtual_states_[node->id()] =
1249 new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1250 }
1251
ProcessEffectPhi(Node * node)1252 bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
1253 DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
1254 bool changed = false;
1255
1256 VirtualState* mergeState = virtual_states_[node->id()];
1257 if (!mergeState) {
1258 mergeState =
1259 new (zone()) VirtualState(node, zone(), status_analysis_->AliasCount());
1260 virtual_states_[node->id()] = mergeState;
1261 changed = true;
1262 TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
1263 static_cast<void*>(mergeState));
1264 }
1265
1266 cache_->Clear();
1267
1268 TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
1269 static_cast<void*>(mergeState));
1270
1271 for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
1272 Node* input = NodeProperties::GetEffectInput(node, i);
1273 VirtualState* state = virtual_states_[input->id()];
1274 if (state) {
1275 cache_->states().push_back(state);
1276 if (state == mergeState) {
1277 mergeState = new (zone())
1278 VirtualState(node, zone(), status_analysis_->AliasCount());
1279 virtual_states_[node->id()] = mergeState;
1280 changed = true;
1281 }
1282 }
1283 TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
1284 input->op()->mnemonic());
1285 }
1286 TRACE("\n");
1287
1288 if (cache_->states().size() == 0) {
1289 return changed;
1290 }
1291
1292 changed =
1293 mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
1294
1295 TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
1296
1297 if (changed) {
1298 status_analysis_->ResizeStatusVector();
1299 }
1300 return changed;
1301 }
1302
ProcessAllocation(Node * node)1303 void EscapeAnalysis::ProcessAllocation(Node* node) {
1304 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
1305 ForwardVirtualState(node);
1306 VirtualState* state = virtual_states_[node->id()];
1307 Alias alias = status_analysis_->GetAlias(node->id());
1308
1309 // Check if we have already processed this node.
1310 if (state->VirtualObjectFromAlias(alias)) {
1311 return;
1312 }
1313
1314 if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
1315 state = CopyForModificationAt(state, node);
1316 }
1317
1318 NumberMatcher size(node->InputAt(0));
1319 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
1320 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
1321 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
1322 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1323 if (size.HasValue()) {
1324 VirtualObject* obj = new (zone()) VirtualObject(
1325 node->id(), state, zone(), size.Value() / kPointerSize, false);
1326 state->SetVirtualObject(alias, obj);
1327 } else {
1328 state->SetVirtualObject(
1329 alias, new (zone()) VirtualObject(node->id(), state, zone()));
1330 }
1331 }
1332
ProcessFinishRegion(Node * node)1333 void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1334 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
1335 ForwardVirtualState(node);
1336 Node* allocation = NodeProperties::GetValueInput(node, 0);
1337 if (allocation->opcode() == IrOpcode::kAllocate) {
1338 VirtualState* state = virtual_states_[node->id()];
1339 VirtualObject* obj =
1340 state->VirtualObjectFromAlias(status_analysis_->GetAlias(node->id()));
1341 DCHECK_NOT_NULL(obj);
1342 obj->SetInitialized();
1343 }
1344 }
1345
replacement(Node * node)1346 Node* EscapeAnalysis::replacement(Node* node) {
1347 if (node->id() >= replacements_.size()) return nullptr;
1348 return replacements_[node->id()];
1349 }
1350
SetReplacement(Node * node,Node * rep)1351 bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
1352 bool changed = replacements_[node->id()] != rep;
1353 replacements_[node->id()] = rep;
1354 return changed;
1355 }
1356
UpdateReplacement(VirtualState * state,Node * node,Node * rep)1357 bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
1358 Node* rep) {
1359 if (SetReplacement(node, rep)) {
1360 if (rep) {
1361 TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
1362 rep->op()->mnemonic());
1363 } else {
1364 TRACE("Replacement of #%d cleared\n", node->id());
1365 }
1366 return true;
1367 }
1368 return false;
1369 }
1370
ResolveReplacement(Node * node)1371 Node* EscapeAnalysis::ResolveReplacement(Node* node) {
1372 while (replacement(node)) {
1373 node = replacement(node);
1374 }
1375 return node;
1376 }
1377
GetReplacement(Node * node)1378 Node* EscapeAnalysis::GetReplacement(Node* node) {
1379 Node* result = nullptr;
1380 while (replacement(node)) {
1381 node = result = replacement(node);
1382 }
1383 return result;
1384 }
1385
IsVirtual(Node * node)1386 bool EscapeAnalysis::IsVirtual(Node* node) {
1387 if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1388 return false;
1389 }
1390 return status_analysis_->IsVirtual(node);
1391 }
1392
IsEscaped(Node * node)1393 bool EscapeAnalysis::IsEscaped(Node* node) {
1394 if (node->id() >= status_analysis_->GetStatusVectorSize()) {
1395 return false;
1396 }
1397 return status_analysis_->IsEscaped(node);
1398 }
1399
CompareVirtualObjects(Node * left,Node * right)1400 bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
1401 DCHECK(IsVirtual(left) && IsVirtual(right));
1402 left = ResolveReplacement(left);
1403 right = ResolveReplacement(right);
1404 if (IsEquivalentPhi(left, right)) {
1405 return true;
1406 }
1407 return false;
1408 }
1409
1410 namespace {
1411
IsOffsetForFieldAccessCorrect(const FieldAccess & access)1412 bool IsOffsetForFieldAccessCorrect(const FieldAccess& access) {
1413 #if V8_TARGET_LITTLE_ENDIAN
1414 return (access.offset % kPointerSize) == 0;
1415 #else
1416 return ((access.offset +
1417 (1 << ElementSizeLog2Of(access.machine_type.representation()))) %
1418 kPointerSize) == 0;
1419 #endif
1420 }
1421
OffsetForFieldAccess(Node * node)1422 int OffsetForFieldAccess(Node* node) {
1423 FieldAccess access = FieldAccessOf(node->op());
1424 DCHECK(IsOffsetForFieldAccessCorrect(access));
1425 return access.offset / kPointerSize;
1426 }
1427
OffsetForElementAccess(Node * node,int index)1428 int OffsetForElementAccess(Node* node, int index) {
1429 ElementAccess access = ElementAccessOf(node->op());
1430 DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1431 kPointerSizeLog2);
1432 DCHECK_EQ(access.header_size % kPointerSize, 0);
1433 return access.header_size / kPointerSize + index;
1434 }
1435
1436 } // namespace
1437
ProcessLoadFromPhi(int offset,Node * from,Node * load,VirtualState * state)1438 void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
1439 VirtualState* state) {
1440 TRACE("Load #%d from phi #%d", load->id(), from->id());
1441
1442 cache_->fields().clear();
1443 for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
1444 Node* input = NodeProperties::GetValueInput(load, i);
1445 cache_->fields().push_back(input);
1446 }
1447
1448 cache_->LoadVirtualObjectsForFieldsFrom(state,
1449 status_analysis_->GetAliasMap());
1450 if (cache_->objects().size() == cache_->fields().size()) {
1451 cache_->GetFields(offset);
1452 if (cache_->fields().size() == cache_->objects().size()) {
1453 Node* rep = replacement(load);
1454 if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
1455 int value_input_count = static_cast<int>(cache_->fields().size());
1456 Type* phi_type = Type::None();
1457 for (Node* input : cache_->fields()) {
1458 Type* input_type = NodeProperties::GetType(input);
1459 phi_type = Type::Union(phi_type, input_type, graph()->zone());
1460 }
1461 cache_->fields().push_back(NodeProperties::GetControlInput(from));
1462 Node* phi = graph()->NewNode(
1463 common()->Phi(MachineRepresentation::kTagged, value_input_count),
1464 value_input_count + 1, &cache_->fields().front());
1465 NodeProperties::SetType(phi, phi_type);
1466 status_analysis_->ResizeStatusVector();
1467 SetReplacement(load, phi);
1468 TRACE(" got phi created.\n");
1469 } else {
1470 TRACE(" has already phi #%d.\n", rep->id());
1471 }
1472 } else {
1473 TRACE(" has incomplete field info.\n");
1474 }
1475 } else {
1476 TRACE(" has incomplete virtual object info.\n");
1477 }
1478 }
1479
ProcessLoadField(Node * node)1480 void EscapeAnalysis::ProcessLoadField(Node* node) {
1481 DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
1482 ForwardVirtualState(node);
1483 Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1484 VirtualState* state = virtual_states_[node->id()];
1485 if (VirtualObject* object = GetVirtualObject(state, from)) {
1486 if (!object->IsTracked()) return;
1487 int offset = OffsetForFieldAccess(node);
1488 if (static_cast<size_t>(offset) >= object->field_count()) {
1489 // We have a load from a field that is not inside the {object}. This
1490 // can only happen with conflicting type feedback and for dead {node}s.
1491 // For now, we just mark the {object} as escaping.
1492 // TODO(turbofan): Consider introducing an Undefined or None operator
1493 // that we can replace this load with, since we know it's dead code.
1494 if (status_analysis_->SetEscaped(from)) {
1495 TRACE(
1496 "Setting #%d (%s) to escaped because load field #%d from "
1497 "offset %d outside of object\n",
1498 from->id(), from->op()->mnemonic(), node->id(), offset);
1499 }
1500 return;
1501 }
1502 Node* value = object->GetField(offset);
1503 if (value) {
1504 value = ResolveReplacement(value);
1505 }
1506 // Record that the load has this alias.
1507 UpdateReplacement(state, node, value);
1508 } else if (from->opcode() == IrOpcode::kPhi &&
1509 IsOffsetForFieldAccessCorrect(FieldAccessOf(node->op()))) {
1510 int offset = OffsetForFieldAccess(node);
1511 // Only binary phis are supported for now.
1512 ProcessLoadFromPhi(offset, from, node, state);
1513 } else {
1514 UpdateReplacement(state, node, nullptr);
1515 }
1516 }
1517
ProcessLoadElement(Node * node)1518 void EscapeAnalysis::ProcessLoadElement(Node* node) {
1519 DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
1520 ForwardVirtualState(node);
1521 Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1522 VirtualState* state = virtual_states_[node->id()];
1523 Node* index_node = node->InputAt(1);
1524 NumberMatcher index(index_node);
1525 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1526 index_node->opcode() != IrOpcode::kInt64Constant &&
1527 index_node->opcode() != IrOpcode::kFloat32Constant &&
1528 index_node->opcode() != IrOpcode::kFloat64Constant);
1529 if (index.HasValue()) {
1530 if (VirtualObject* object = GetVirtualObject(state, from)) {
1531 if (!object->IsTracked()) return;
1532 int offset = OffsetForElementAccess(node, index.Value());
1533 if (static_cast<size_t>(offset) >= object->field_count()) return;
1534 Node* value = object->GetField(offset);
1535 if (value) {
1536 value = ResolveReplacement(value);
1537 }
1538 // Record that the load has this alias.
1539 UpdateReplacement(state, node, value);
1540 } else if (from->opcode() == IrOpcode::kPhi) {
1541 int offset = OffsetForElementAccess(node, index.Value());
1542 ProcessLoadFromPhi(offset, from, node, state);
1543 } else {
1544 UpdateReplacement(state, node, nullptr);
1545 }
1546 } else {
1547 // We have a load from a non-const index, cannot eliminate object.
1548 if (status_analysis_->SetEscaped(from)) {
1549 TRACE(
1550 "Setting #%d (%s) to escaped because load element #%d from non-const "
1551 "index #%d (%s)\n",
1552 from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
1553 index_node->op()->mnemonic());
1554 }
1555 }
1556 }
1557
ProcessStoreField(Node * node)1558 void EscapeAnalysis::ProcessStoreField(Node* node) {
1559 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
1560 ForwardVirtualState(node);
1561 Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1562 VirtualState* state = virtual_states_[node->id()];
1563 if (VirtualObject* object = GetVirtualObject(state, to)) {
1564 if (!object->IsTracked()) return;
1565 int offset = OffsetForFieldAccess(node);
1566 if (static_cast<size_t>(offset) >= object->field_count()) {
1567 // We have a store to a field that is not inside the {object}. This
1568 // can only happen with conflicting type feedback and for dead {node}s.
1569 // For now, we just mark the {object} as escaping.
1570 // TODO(turbofan): Consider just eliminating the store in the reducer
1571 // pass, as it's dead code anyways.
1572 if (status_analysis_->SetEscaped(to)) {
1573 TRACE(
1574 "Setting #%d (%s) to escaped because store field #%d to "
1575 "offset %d outside of object\n",
1576 to->id(), to->op()->mnemonic(), node->id(), offset);
1577 }
1578 return;
1579 }
1580 Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
1581 // TODO(mstarzinger): The following is a workaround to not track some well
1582 // known raw fields. We only ever store default initial values into these
1583 // fields which are hard-coded in {TranslatedState::MaterializeAt} as well.
1584 if (val->opcode() == IrOpcode::kInt32Constant ||
1585 val->opcode() == IrOpcode::kInt64Constant) {
1586 DCHECK(FieldAccessOf(node->op()).offset == JSFunction::kCodeEntryOffset ||
1587 FieldAccessOf(node->op()).offset == Name::kHashFieldOffset);
1588 val = slot_not_analyzed_;
1589 }
1590 if (object->GetField(offset) != val) {
1591 object = CopyForModificationAt(object, state, node);
1592 object->SetField(offset, val);
1593 }
1594 }
1595 }
1596
ProcessStoreElement(Node * node)1597 void EscapeAnalysis::ProcessStoreElement(Node* node) {
1598 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
1599 ForwardVirtualState(node);
1600 Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
1601 Node* index_node = node->InputAt(1);
1602 NumberMatcher index(index_node);
1603 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1604 index_node->opcode() != IrOpcode::kInt64Constant &&
1605 index_node->opcode() != IrOpcode::kFloat32Constant &&
1606 index_node->opcode() != IrOpcode::kFloat64Constant);
1607 VirtualState* state = virtual_states_[node->id()];
1608 if (index.HasValue()) {
1609 if (VirtualObject* object = GetVirtualObject(state, to)) {
1610 if (!object->IsTracked()) return;
1611 int offset = OffsetForElementAccess(node, index.Value());
1612 if (static_cast<size_t>(offset) >= object->field_count()) return;
1613 Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
1614 if (object->GetField(offset) != val) {
1615 object = CopyForModificationAt(object, state, node);
1616 object->SetField(offset, val);
1617 }
1618 }
1619 } else {
1620 // We have a store to a non-const index, cannot eliminate object.
1621 if (status_analysis_->SetEscaped(to)) {
1622 TRACE(
1623 "Setting #%d (%s) to escaped because store element #%d to non-const "
1624 "index #%d (%s)\n",
1625 to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
1626 index_node->op()->mnemonic());
1627 }
1628 if (VirtualObject* object = GetVirtualObject(state, to)) {
1629 if (!object->IsTracked()) return;
1630 if (!object->AllFieldsClear()) {
1631 object = CopyForModificationAt(object, state, node);
1632 object->ClearAllFields();
1633 TRACE("Cleared all fields of @%d:#%d\n",
1634 status_analysis_->GetAlias(object->id()), object->id());
1635 }
1636 }
1637 }
1638 }
1639
GetOrCreateObjectState(Node * effect,Node * node)1640 Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
1641 if ((node->opcode() == IrOpcode::kFinishRegion ||
1642 node->opcode() == IrOpcode::kAllocate) &&
1643 IsVirtual(node)) {
1644 if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1645 ResolveReplacement(node))) {
1646 if (Node* object_state = vobj->GetObjectState()) {
1647 return object_state;
1648 } else {
1649 cache_->fields().clear();
1650 for (size_t i = 0; i < vobj->field_count(); ++i) {
1651 if (Node* field = vobj->GetField(i)) {
1652 cache_->fields().push_back(ResolveReplacement(field));
1653 }
1654 }
1655 int input_count = static_cast<int>(cache_->fields().size());
1656 Node* new_object_state =
1657 graph()->NewNode(common()->ObjectState(input_count), input_count,
1658 &cache_->fields().front());
1659 NodeProperties::SetType(new_object_state, Type::OtherInternal());
1660 vobj->SetObjectState(new_object_state);
1661 TRACE(
1662 "Creating object state #%d for vobj %p (from node #%d) at effect "
1663 "#%d\n",
1664 new_object_state->id(), static_cast<void*>(vobj), node->id(),
1665 effect->id());
1666 // Now fix uses of other objects.
1667 for (size_t i = 0; i < vobj->field_count(); ++i) {
1668 if (Node* field = vobj->GetField(i)) {
1669 if (Node* field_object_state =
1670 GetOrCreateObjectState(effect, field)) {
1671 NodeProperties::ReplaceValueInput(
1672 new_object_state, field_object_state, static_cast<int>(i));
1673 }
1674 }
1675 }
1676 return new_object_state;
1677 }
1678 }
1679 }
1680 return nullptr;
1681 }
1682
IsCyclicObjectState(Node * effect,Node * node)1683 bool EscapeAnalysis::IsCyclicObjectState(Node* effect, Node* node) {
1684 if ((node->opcode() == IrOpcode::kFinishRegion ||
1685 node->opcode() == IrOpcode::kAllocate) &&
1686 IsVirtual(node)) {
1687 if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
1688 ResolveReplacement(node))) {
1689 if (cycle_detection_.find(vobj) != cycle_detection_.end()) return true;
1690 cycle_detection_.insert(vobj);
1691 bool cycle_detected = false;
1692 for (size_t i = 0; i < vobj->field_count(); ++i) {
1693 if (Node* field = vobj->GetField(i)) {
1694 if (IsCyclicObjectState(effect, field)) cycle_detected = true;
1695 }
1696 }
1697 cycle_detection_.erase(vobj);
1698 return cycle_detected;
1699 }
1700 }
1701 return false;
1702 }
1703
DebugPrintState(VirtualState * state)1704 void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1705 PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
1706 for (Alias alias = 0; alias < status_analysis_->AliasCount(); ++alias) {
1707 if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1708 PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
1709 object->field_count());
1710 for (size_t i = 0; i < object->field_count(); ++i) {
1711 if (Node* f = object->GetField(i)) {
1712 PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
1713 }
1714 }
1715 }
1716 }
1717 }
1718
DebugPrint()1719 void EscapeAnalysis::DebugPrint() {
1720 ZoneVector<VirtualState*> object_states(zone());
1721 for (NodeId id = 0; id < virtual_states_.size(); id++) {
1722 if (VirtualState* states = virtual_states_[id]) {
1723 if (std::find(object_states.begin(), object_states.end(), states) ==
1724 object_states.end()) {
1725 object_states.push_back(states);
1726 }
1727 }
1728 }
1729 for (size_t n = 0; n < object_states.size(); n++) {
1730 DebugPrintState(object_states[n]);
1731 }
1732 }
1733
GetVirtualObject(VirtualState * state,Node * node)1734 VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
1735 Node* node) {
1736 if (node->id() >= status_analysis_->GetAliasMap().size()) return nullptr;
1737 Alias alias = status_analysis_->GetAlias(node->id());
1738 if (alias >= state->size()) return nullptr;
1739 return state->VirtualObjectFromAlias(alias);
1740 }
1741
ExistsVirtualAllocate()1742 bool EscapeAnalysis::ExistsVirtualAllocate() {
1743 for (size_t id = 0; id < status_analysis_->GetAliasMap().size(); ++id) {
1744 Alias alias = status_analysis_->GetAlias(static_cast<NodeId>(id));
1745 if (alias < EscapeStatusAnalysis::kUntrackable) {
1746 if (status_analysis_->IsVirtual(static_cast<int>(id))) {
1747 return true;
1748 }
1749 }
1750 }
1751 return false;
1752 }
1753
graph() const1754 Graph* EscapeAnalysis::graph() const { return status_analysis_->graph(); }
1755
1756 } // namespace compiler
1757 } // namespace internal
1758 } // namespace v8
1759