• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/compiler/escape-analysis-reducer.h"
6 
7 #include "src/compiler/all-nodes.h"
8 #include "src/compiler/simplified-operator.h"
9 #include "src/compiler/type-cache.h"
10 #include "src/frame-constants.h"
11 
12 namespace v8 {
13 namespace internal {
14 namespace compiler {
15 
16 #ifdef DEBUG
17 #define TRACE(...)                                    \
18   do {                                                \
19     if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
20   } while (false)
21 #else
22 #define TRACE(...)
23 #endif  // DEBUG
24 
EscapeAnalysisReducer(Editor * editor,JSGraph * jsgraph,EscapeAnalysisResult analysis_result,Zone * zone)25 EscapeAnalysisReducer::EscapeAnalysisReducer(
26     Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
27     Zone* zone)
28     : AdvancedReducer(editor),
29       jsgraph_(jsgraph),
30       analysis_result_(analysis_result),
31       object_id_cache_(zone),
32       node_cache_(jsgraph->graph(), zone),
33       arguments_elements_(zone),
34       zone_(zone) {}
35 
ReplaceNode(Node * original,Node * replacement)36 Reduction EscapeAnalysisReducer::ReplaceNode(Node* original,
37                                              Node* replacement) {
38   const VirtualObject* vobject =
39       analysis_result().GetVirtualObject(replacement);
40   if (replacement->opcode() == IrOpcode::kDead ||
41       (vobject && !vobject->HasEscaped())) {
42     RelaxEffectsAndControls(original);
43     return Replace(replacement);
44   }
45   Type const replacement_type = NodeProperties::GetType(replacement);
46   Type const original_type = NodeProperties::GetType(original);
47   if (replacement_type.Is(original_type)) {
48     RelaxEffectsAndControls(original);
49     return Replace(replacement);
50   }
51 
52   // We need to guard the replacement if we would widen the type otherwise.
53   DCHECK_EQ(1, original->op()->EffectOutputCount());
54   DCHECK_EQ(1, original->op()->EffectInputCount());
55   DCHECK_EQ(1, original->op()->ControlInputCount());
56   Node* effect = NodeProperties::GetEffectInput(original);
57   Node* control = NodeProperties::GetControlInput(original);
58   original->TrimInputCount(0);
59   original->AppendInput(jsgraph()->zone(), replacement);
60   original->AppendInput(jsgraph()->zone(), effect);
61   original->AppendInput(jsgraph()->zone(), control);
62   NodeProperties::SetType(
63       original,
64       Type::Intersect(original_type, replacement_type, jsgraph()->zone()));
65   NodeProperties::ChangeOp(original,
66                            jsgraph()->common()->TypeGuard(original_type));
67   ReplaceWithValue(original, original, original, control);
68   return NoChange();
69 }
70 
71 namespace {
72 
SkipTypeGuards(Node * node)73 Node* SkipTypeGuards(Node* node) {
74   while (node->opcode() == IrOpcode::kTypeGuard) {
75     node = NodeProperties::GetValueInput(node, 0);
76   }
77   return node;
78 }
79 
80 }  // namespace
81 
ObjectIdNode(const VirtualObject * vobject)82 Node* EscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
83   VirtualObject::Id id = vobject->id();
84   if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
85   if (!object_id_cache_[id]) {
86     Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
87     NodeProperties::SetType(node, Type::Object());
88     object_id_cache_[id] = node;
89   }
90   return object_id_cache_[id];
91 }
92 
Reduce(Node * node)93 Reduction EscapeAnalysisReducer::Reduce(Node* node) {
94   if (Node* replacement = analysis_result().GetReplacementOf(node)) {
95     DCHECK(node->opcode() != IrOpcode::kAllocate &&
96            node->opcode() != IrOpcode::kFinishRegion);
97     DCHECK_NE(replacement, node);
98     return ReplaceNode(node, replacement);
99   }
100 
101   switch (node->opcode()) {
102     case IrOpcode::kAllocate:
103     case IrOpcode::kTypeGuard: {
104       const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
105       if (vobject && !vobject->HasEscaped()) {
106         RelaxEffectsAndControls(node);
107       }
108       return NoChange();
109     }
110     case IrOpcode::kFinishRegion: {
111       Node* effect = NodeProperties::GetEffectInput(node, 0);
112       if (effect->opcode() == IrOpcode::kBeginRegion) {
113         RelaxEffectsAndControls(effect);
114         RelaxEffectsAndControls(node);
115       }
116       return NoChange();
117     }
118     case IrOpcode::kNewArgumentsElements:
119       arguments_elements_.insert(node);
120       return NoChange();
121     default: {
122       // TODO(sigurds): Change this to GetFrameStateInputCount once
123       // it is working. For now we use EffectInputCount > 0 to determine
124       // whether a node might have a frame state input.
125       if (node->op()->EffectInputCount() > 0) {
126         ReduceFrameStateInputs(node);
127       }
128       return NoChange();
129     }
130   }
131 }
132 
133 // While doing DFS on the FrameState tree, we have to recognize duplicate
134 // occurrences of virtual objects.
135 class Deduplicator {
136  public:
Deduplicator(Zone * zone)137   explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
SeenBefore(const VirtualObject * vobject)138   bool SeenBefore(const VirtualObject* vobject) {
139     VirtualObject::Id id = vobject->id();
140     if (id >= is_duplicate_.size()) {
141       is_duplicate_.resize(id + 1);
142     }
143     bool is_duplicate = is_duplicate_[id];
144     is_duplicate_[id] = true;
145     return is_duplicate;
146   }
147 
148  private:
149   ZoneVector<bool> is_duplicate_;
150 };
151 
ReduceFrameStateInputs(Node * node)152 void EscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
153   DCHECK_GE(node->op()->EffectInputCount(), 1);
154   for (int i = 0; i < node->InputCount(); ++i) {
155     Node* input = node->InputAt(i);
156     if (input->opcode() == IrOpcode::kFrameState) {
157       Deduplicator deduplicator(zone());
158       if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
159         node->ReplaceInput(i, ret);
160       }
161     }
162   }
163 }
164 
ReduceDeoptState(Node * node,Node * effect,Deduplicator * deduplicator)165 Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
166                                               Deduplicator* deduplicator) {
167   if (node->opcode() == IrOpcode::kFrameState) {
168     NodeHashCache::Constructor new_node(&node_cache_, node);
169     // This input order is important to match the DFS traversal used in the
170     // instruction selector. Otherwise, the instruction selector might find a
171     // duplicate node before the original one.
172     for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
173                          kFrameStateParametersInput, kFrameStateContextInput,
174                          kFrameStateLocalsInput, kFrameStateStackInput}) {
175       Node* input = node->InputAt(input_id);
176       new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
177                             input_id);
178     }
179     return new_node.Get();
180   } else if (node->opcode() == IrOpcode::kStateValues) {
181     NodeHashCache::Constructor new_node(&node_cache_, node);
182     for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
183       Node* input = NodeProperties::GetValueInput(node, i);
184       new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
185                                  i);
186     }
187     return new_node.Get();
188   } else if (const VirtualObject* vobject =
189                  analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
190     if (vobject->HasEscaped()) return node;
191     if (deduplicator->SeenBefore(vobject)) {
192       return ObjectIdNode(vobject);
193     } else {
194       std::vector<Node*> inputs;
195       for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
196         Node* field =
197             analysis_result().GetVirtualObjectField(vobject, offset, effect);
198         CHECK_NOT_NULL(field);
199         if (field != jsgraph()->Dead()) {
200           inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
201         }
202       }
203       int num_inputs = static_cast<int>(inputs.size());
204       NodeHashCache::Constructor new_node(
205           &node_cache_,
206           jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
207           num_inputs, &inputs.front(), NodeProperties::GetType(node));
208       return new_node.Get();
209     }
210   } else {
211     return node;
212   }
213 }
214 
VerifyReplacement() const215 void EscapeAnalysisReducer::VerifyReplacement() const {
216   AllNodes all(zone(), jsgraph()->graph());
217   for (Node* node : all.reachable) {
218     if (node->opcode() == IrOpcode::kAllocate) {
219       if (const VirtualObject* vobject =
220               analysis_result().GetVirtualObject(node)) {
221         if (!vobject->HasEscaped()) {
222           FATAL("Escape analysis failed to remove node %s#%d\n",
223                 node->op()->mnemonic(), node->id());
224         }
225       }
226     }
227   }
228 }
229 
Finalize()230 void EscapeAnalysisReducer::Finalize() {
231   for (Node* node : arguments_elements_) {
232     int mapped_count = NewArgumentsElementsMappedCountOf(node->op());
233 
234     Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
235     if (arguments_frame->opcode() != IrOpcode::kArgumentsFrame) continue;
236     Node* arguments_length = NodeProperties::GetValueInput(node, 1);
237     if (arguments_length->opcode() != IrOpcode::kArgumentsLength) continue;
238 
239     // If mapped arguments are specified, then their number is always equal to
240     // the number of formal parameters. This allows to use just the three-value
241     // {ArgumentsStateType} enum because the deoptimizer can reconstruct the
242     // value of {mapped_count} from the number of formal parameters.
243     DCHECK_IMPLIES(
244         mapped_count != 0,
245         mapped_count == FormalParameterCountOf(arguments_length->op()));
246     ArgumentsStateType type = IsRestLengthOf(arguments_length->op())
247                                   ? ArgumentsStateType::kRestParameter
248                                   : (mapped_count == 0)
249                                         ? ArgumentsStateType::kUnmappedArguments
250                                         : ArgumentsStateType::kMappedArguments;
251 
252     Node* arguments_length_state = nullptr;
253     for (Edge edge : arguments_length->use_edges()) {
254       Node* use = edge.from();
255       switch (use->opcode()) {
256         case IrOpcode::kObjectState:
257         case IrOpcode::kTypedObjectState:
258         case IrOpcode::kStateValues:
259         case IrOpcode::kTypedStateValues:
260           if (!arguments_length_state) {
261             arguments_length_state = jsgraph()->graph()->NewNode(
262                 jsgraph()->common()->ArgumentsLengthState(type));
263             NodeProperties::SetType(arguments_length_state,
264                                     Type::OtherInternal());
265           }
266           edge.UpdateTo(arguments_length_state);
267           break;
268         default:
269           break;
270       }
271     }
272 
273     bool escaping_use = false;
274     ZoneVector<Node*> loads(zone());
275     for (Edge edge : node->use_edges()) {
276       Node* use = edge.from();
277       if (!NodeProperties::IsValueEdge(edge)) continue;
278       if (use->use_edges().empty()) {
279         // A node without uses is dead, so we don't have to care about it.
280         continue;
281       }
282       switch (use->opcode()) {
283         case IrOpcode::kStateValues:
284         case IrOpcode::kTypedStateValues:
285         case IrOpcode::kObjectState:
286         case IrOpcode::kTypedObjectState:
287           break;
288         case IrOpcode::kLoadElement:
289           if (mapped_count == 0) {
290             loads.push_back(use);
291           } else {
292             escaping_use = true;
293           }
294           break;
295         case IrOpcode::kLoadField:
296           if (FieldAccessOf(use->op()).offset == FixedArray::kLengthOffset) {
297             loads.push_back(use);
298           } else {
299             escaping_use = true;
300           }
301           break;
302         default:
303           // If the arguments elements node node is used by an unhandled node,
304           // then we cannot remove this allocation.
305           escaping_use = true;
306           break;
307       }
308       if (escaping_use) break;
309     }
310     if (!escaping_use) {
311       Node* arguments_elements_state = jsgraph()->graph()->NewNode(
312           jsgraph()->common()->ArgumentsElementsState(type));
313       NodeProperties::SetType(arguments_elements_state, Type::OtherInternal());
314       ReplaceWithValue(node, arguments_elements_state);
315 
316       ElementAccess stack_access;
317       stack_access.base_is_tagged = BaseTaggedness::kUntaggedBase;
318       // Reduce base address by {kPointerSize} such that (length - index)
319       // resolves to the right position.
320       stack_access.header_size =
321           CommonFrameConstants::kFixedFrameSizeAboveFp - kPointerSize;
322       stack_access.type = Type::NonInternal();
323       stack_access.machine_type = MachineType::AnyTagged();
324       stack_access.write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
325       const Operator* load_stack_op =
326           jsgraph()->simplified()->LoadElement(stack_access);
327 
328       for (Node* load : loads) {
329         switch (load->opcode()) {
330           case IrOpcode::kLoadElement: {
331             Node* index = NodeProperties::GetValueInput(load, 1);
332             // {offset} is a reverted index starting from 1. The base address is
333             // adapted to allow offsets starting from 1.
334             Node* offset = jsgraph()->graph()->NewNode(
335                 jsgraph()->simplified()->NumberSubtract(), arguments_length,
336                 index);
337             NodeProperties::SetType(offset,
338                                     TypeCache::Get().kArgumentsLengthType);
339             NodeProperties::ReplaceValueInput(load, arguments_frame, 0);
340             NodeProperties::ReplaceValueInput(load, offset, 1);
341             NodeProperties::ChangeOp(load, load_stack_op);
342             break;
343           }
344           case IrOpcode::kLoadField: {
345             DCHECK_EQ(FieldAccessOf(load->op()).offset,
346                       FixedArray::kLengthOffset);
347             Node* length = NodeProperties::GetValueInput(node, 1);
348             ReplaceWithValue(load, length);
349             break;
350           }
351           default:
352             UNREACHABLE();
353         }
354       }
355     }
356   }
357 }
358 
Query(Node * node)359 Node* NodeHashCache::Query(Node* node) {
360   auto it = cache_.find(node);
361   if (it != cache_.end()) {
362     return *it;
363   } else {
364     return nullptr;
365   }
366 }
367 
Constructor(NodeHashCache * cache,const Operator * op,int input_count,Node ** inputs,Type type)368 NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
369                                         const Operator* op, int input_count,
370                                         Node** inputs, Type type)
371     : node_cache_(cache), from_(nullptr) {
372   if (node_cache_->temp_nodes_.size() > 0) {
373     tmp_ = node_cache_->temp_nodes_.back();
374     node_cache_->temp_nodes_.pop_back();
375     int tmp_input_count = tmp_->InputCount();
376     if (input_count <= tmp_input_count) {
377       tmp_->TrimInputCount(input_count);
378     }
379     for (int i = 0; i < input_count; ++i) {
380       if (i < tmp_input_count) {
381         tmp_->ReplaceInput(i, inputs[i]);
382       } else {
383         tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
384       }
385     }
386     NodeProperties::ChangeOp(tmp_, op);
387   } else {
388     tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
389   }
390   NodeProperties::SetType(tmp_, type);
391 }
392 
Get()393 Node* NodeHashCache::Constructor::Get() {
394   DCHECK(tmp_ || from_);
395   Node* node;
396   if (!tmp_) {
397     node = node_cache_->Query(from_);
398     if (!node) node = from_;
399   } else {
400     node = node_cache_->Query(tmp_);
401     if (node) {
402       node_cache_->temp_nodes_.push_back(tmp_);
403     } else {
404       node = tmp_;
405       node_cache_->Insert(node);
406     }
407   }
408   tmp_ = from_ = nullptr;
409   return node;
410 }
411 
MutableNode()412 Node* NodeHashCache::Constructor::MutableNode() {
413   DCHECK(tmp_ || from_);
414   if (!tmp_) {
415     if (node_cache_->temp_nodes_.empty()) {
416       tmp_ = node_cache_->graph_->CloneNode(from_);
417     } else {
418       tmp_ = node_cache_->temp_nodes_.back();
419       node_cache_->temp_nodes_.pop_back();
420       int from_input_count = from_->InputCount();
421       int tmp_input_count = tmp_->InputCount();
422       if (from_input_count <= tmp_input_count) {
423         tmp_->TrimInputCount(from_input_count);
424       }
425       for (int i = 0; i < from_input_count; ++i) {
426         if (i < tmp_input_count) {
427           tmp_->ReplaceInput(i, from_->InputAt(i));
428         } else {
429           tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
430         }
431       }
432       NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
433       NodeProperties::ChangeOp(tmp_, from_->op());
434     }
435   }
436   return tmp_;
437 }
438 
439 #undef TRACE
440 
441 }  // namespace compiler
442 }  // namespace internal
443 }  // namespace v8
444