1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/hydrogen-alias-analysis.h"
6 #include "src/hydrogen-flow-engine.h"
7 #include "src/hydrogen-instructions.h"
8 #include "src/hydrogen-load-elimination.h"
9
10 namespace v8 {
11 namespace internal {
12
13 #define GLOBAL true
14 #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x
15
16 static const int kMaxTrackedFields = 16;
17 static const int kMaxTrackedObjects = 5;
18
19 // An element in the field approximation list.
20 class HFieldApproximation : public ZoneObject {
21 public: // Just a data blob.
22 HValue* object_;
23 HValue* last_value_;
24 HFieldApproximation* next_;
25
26 // Recursively copy the entire linked list of field approximations.
Copy(Zone * zone)27 HFieldApproximation* Copy(Zone* zone) {
28 HFieldApproximation* copy = new(zone) HFieldApproximation();
29 copy->object_ = this->object_;
30 copy->last_value_ = this->last_value_;
31 copy->next_ = this->next_ == NULL ? NULL : this->next_->Copy(zone);
32 return copy;
33 }
34 };
35
36
37 // The main datastructure used during load/store elimination. Each in-object
38 // field is tracked separately. For each field, store a list of known field
39 // values for known objects.
40 class HLoadEliminationTable : public ZoneObject {
41 public:
HLoadEliminationTable(Zone * zone,HAliasAnalyzer * aliasing)42 HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing)
43 : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { }
44
45 // The main processing of instructions.
Process(HInstruction * instr,Zone * zone)46 HLoadEliminationTable* Process(HInstruction* instr, Zone* zone) {
47 switch (instr->opcode()) {
48 case HValue::kLoadNamedField: {
49 HLoadNamedField* l = HLoadNamedField::cast(instr);
50 TRACE((" process L%d field %d (o%d)\n",
51 instr->id(),
52 FieldOf(l->access()),
53 l->object()->ActualValue()->id()));
54 HValue* result = load(l);
55 if (result != instr && l->CanBeReplacedWith(result)) {
56 // The load can be replaced with a previous load or a value.
57 TRACE((" replace L%d -> v%d\n", instr->id(), result->id()));
58 instr->DeleteAndReplaceWith(result);
59 }
60 break;
61 }
62 case HValue::kStoreNamedField: {
63 HStoreNamedField* s = HStoreNamedField::cast(instr);
64 TRACE((" process S%d field %d (o%d) = v%d\n",
65 instr->id(),
66 FieldOf(s->access()),
67 s->object()->ActualValue()->id(),
68 s->value()->id()));
69 HValue* result = store(s);
70 if (result == NULL) {
71 // The store is redundant. Remove it.
72 TRACE((" remove S%d\n", instr->id()));
73 instr->DeleteAndReplaceWith(NULL);
74 }
75 break;
76 }
77 case HValue::kTransitionElementsKind: {
78 HTransitionElementsKind* t = HTransitionElementsKind::cast(instr);
79 HValue* object = t->object()->ActualValue();
80 KillFieldInternal(object, FieldOf(JSArray::kElementsOffset), NULL);
81 KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL);
82 break;
83 }
84 default: {
85 if (instr->CheckChangesFlag(kInobjectFields)) {
86 TRACE((" kill-all i%d\n", instr->id()));
87 Kill();
88 break;
89 }
90 if (instr->CheckChangesFlag(kMaps)) {
91 TRACE((" kill-maps i%d\n", instr->id()));
92 KillOffset(JSObject::kMapOffset);
93 }
94 if (instr->CheckChangesFlag(kElementsKind)) {
95 TRACE((" kill-elements-kind i%d\n", instr->id()));
96 KillOffset(JSObject::kMapOffset);
97 KillOffset(JSObject::kElementsOffset);
98 }
99 if (instr->CheckChangesFlag(kElementsPointer)) {
100 TRACE((" kill-elements i%d\n", instr->id()));
101 KillOffset(JSObject::kElementsOffset);
102 }
103 if (instr->CheckChangesFlag(kOsrEntries)) {
104 TRACE((" kill-osr i%d\n", instr->id()));
105 Kill();
106 }
107 }
108 // Improvements possible:
109 // - learn from HCheckMaps for field 0
110 // - remove unobservable stores (write-after-write)
111 // - track cells
112 // - track globals
113 // - track roots
114 }
115 return this;
116 }
117
118 // Support for global analysis with HFlowEngine: Merge given state with
119 // the other incoming state.
Merge(HLoadEliminationTable * succ_state,HBasicBlock * succ_block,HLoadEliminationTable * pred_state,HBasicBlock * pred_block,Zone * zone)120 static HLoadEliminationTable* Merge(HLoadEliminationTable* succ_state,
121 HBasicBlock* succ_block,
122 HLoadEliminationTable* pred_state,
123 HBasicBlock* pred_block,
124 Zone* zone) {
125 DCHECK(pred_state != NULL);
126 if (succ_state == NULL) {
127 return pred_state->Copy(succ_block, pred_block, zone);
128 } else {
129 return succ_state->Merge(succ_block, pred_state, pred_block, zone);
130 }
131 }
132
133 // Support for global analysis with HFlowEngine: Given state merged with all
134 // the other incoming states, prepare it for use.
Finish(HLoadEliminationTable * state,HBasicBlock * block,Zone * zone)135 static HLoadEliminationTable* Finish(HLoadEliminationTable* state,
136 HBasicBlock* block,
137 Zone* zone) {
138 DCHECK(state != NULL);
139 return state;
140 }
141
142 private:
143 // Copy state to successor block.
Copy(HBasicBlock * succ,HBasicBlock * from_block,Zone * zone)144 HLoadEliminationTable* Copy(HBasicBlock* succ, HBasicBlock* from_block,
145 Zone* zone) {
146 HLoadEliminationTable* copy =
147 new(zone) HLoadEliminationTable(zone, aliasing_);
148 copy->EnsureFields(fields_.length());
149 for (int i = 0; i < fields_.length(); i++) {
150 copy->fields_[i] = fields_[i] == NULL ? NULL : fields_[i]->Copy(zone);
151 }
152 if (FLAG_trace_load_elimination) {
153 TRACE((" copy-to B%d\n", succ->block_id()));
154 copy->Print();
155 }
156 return copy;
157 }
158
159 // Merge this state with the other incoming state.
Merge(HBasicBlock * succ,HLoadEliminationTable * that,HBasicBlock * that_block,Zone * zone)160 HLoadEliminationTable* Merge(HBasicBlock* succ, HLoadEliminationTable* that,
161 HBasicBlock* that_block, Zone* zone) {
162 if (that->fields_.length() < fields_.length()) {
163 // Drop fields not in the other table.
164 fields_.Rewind(that->fields_.length());
165 }
166 for (int i = 0; i < fields_.length(); i++) {
167 // Merge the field approximations for like fields.
168 HFieldApproximation* approx = fields_[i];
169 HFieldApproximation* prev = NULL;
170 while (approx != NULL) {
171 // TODO(titzer): Merging is O(N * M); sort?
172 HFieldApproximation* other = that->Find(approx->object_, i);
173 if (other == NULL || !Equal(approx->last_value_, other->last_value_)) {
174 // Kill an entry that doesn't agree with the other value.
175 if (prev != NULL) {
176 prev->next_ = approx->next_;
177 } else {
178 fields_[i] = approx->next_;
179 }
180 approx = approx->next_;
181 continue;
182 }
183 prev = approx;
184 approx = approx->next_;
185 }
186 }
187 if (FLAG_trace_load_elimination) {
188 TRACE((" merge-to B%d\n", succ->block_id()));
189 Print();
190 }
191 return this;
192 }
193
194 friend class HLoadEliminationEffects; // Calls Kill() and others.
195 friend class HLoadEliminationPhase;
196
197 private:
198 // Process a load instruction, updating internal table state. If a previous
199 // load or store for this object and field exists, return the new value with
200 // which the load should be replaced. Otherwise, return {instr}.
load(HLoadNamedField * instr)201 HValue* load(HLoadNamedField* instr) {
202 // There must be no loads from non observable in-object properties.
203 DCHECK(!instr->access().IsInobject() ||
204 instr->access().existing_inobject_property());
205
206 int field = FieldOf(instr->access());
207 if (field < 0) return instr;
208
209 HValue* object = instr->object()->ActualValue();
210 HFieldApproximation* approx = FindOrCreate(object, field);
211
212 if (approx->last_value_ == NULL) {
213 // Load is not redundant. Fill out a new entry.
214 approx->last_value_ = instr;
215 return instr;
216 } else if (approx->last_value_->block()->EqualToOrDominates(
217 instr->block())) {
218 // Eliminate the load. Reuse previously stored value or load instruction.
219 return approx->last_value_;
220 } else {
221 return instr;
222 }
223 }
224
225 // Process a store instruction, updating internal table state. If a previous
226 // store to the same object and field makes this store redundant (e.g. because
227 // the stored values are the same), return NULL indicating that this store
228 // instruction is redundant. Otherwise, return {instr}.
store(HStoreNamedField * instr)229 HValue* store(HStoreNamedField* instr) {
230 if (instr->access().IsInobject() &&
231 !instr->access().existing_inobject_property()) {
232 TRACE((" skipping non existing property initialization store\n"));
233 return instr;
234 }
235
236 int field = FieldOf(instr->access());
237 if (field < 0) return KillIfMisaligned(instr);
238
239 HValue* object = instr->object()->ActualValue();
240 HValue* value = instr->value();
241
242 if (instr->has_transition()) {
243 // A transition introduces a new field and alters the map of the object.
244 // Since the field in the object is new, it cannot alias existing entries.
245 // TODO(titzer): introduce a constant for the new map and remember it.
246 KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL);
247 } else {
248 // Kill non-equivalent may-alias entries.
249 KillFieldInternal(object, field, value);
250 }
251 HFieldApproximation* approx = FindOrCreate(object, field);
252
253 if (Equal(approx->last_value_, value)) {
254 // The store is redundant because the field already has this value.
255 return NULL;
256 } else {
257 // The store is not redundant. Update the entry.
258 approx->last_value_ = value;
259 return instr;
260 }
261 }
262
263 // Kill everything in this table.
Kill()264 void Kill() {
265 fields_.Rewind(0);
266 }
267
268 // Kill all entries matching the given offset.
KillOffset(int offset)269 void KillOffset(int offset) {
270 int field = FieldOf(offset);
271 if (field >= 0 && field < fields_.length()) {
272 fields_[field] = NULL;
273 }
274 }
275
276 // Kill all entries aliasing the given store.
KillStore(HStoreNamedField * s)277 void KillStore(HStoreNamedField* s) {
278 int field = FieldOf(s->access());
279 if (field >= 0) {
280 KillFieldInternal(s->object()->ActualValue(), field, s->value());
281 } else {
282 KillIfMisaligned(s);
283 }
284 }
285
286 // Kill multiple entries in the case of a misaligned store.
KillIfMisaligned(HStoreNamedField * instr)287 HValue* KillIfMisaligned(HStoreNamedField* instr) {
288 HObjectAccess access = instr->access();
289 if (access.IsInobject()) {
290 int offset = access.offset();
291 if ((offset % kPointerSize) != 0) {
292 // Kill the field containing the first word of the access.
293 HValue* object = instr->object()->ActualValue();
294 int field = offset / kPointerSize;
295 KillFieldInternal(object, field, NULL);
296
297 // Kill the next field in case of overlap.
298 int size = access.representation().size();
299 int next_field = (offset + size - 1) / kPointerSize;
300 if (next_field != field) KillFieldInternal(object, next_field, NULL);
301 }
302 }
303 return instr;
304 }
305
306 // Find an entry for the given object and field pair.
Find(HValue * object,int field)307 HFieldApproximation* Find(HValue* object, int field) {
308 // Search for a field approximation for this object.
309 HFieldApproximation* approx = fields_[field];
310 while (approx != NULL) {
311 if (aliasing_->MustAlias(object, approx->object_)) return approx;
312 approx = approx->next_;
313 }
314 return NULL;
315 }
316
317 // Find or create an entry for the given object and field pair.
FindOrCreate(HValue * object,int field)318 HFieldApproximation* FindOrCreate(HValue* object, int field) {
319 EnsureFields(field + 1);
320
321 // Search for a field approximation for this object.
322 HFieldApproximation* approx = fields_[field];
323 int count = 0;
324 while (approx != NULL) {
325 if (aliasing_->MustAlias(object, approx->object_)) return approx;
326 count++;
327 approx = approx->next_;
328 }
329
330 if (count >= kMaxTrackedObjects) {
331 // Pull the last entry off the end and repurpose it for this object.
332 approx = ReuseLastApproximation(field);
333 } else {
334 // Allocate a new entry.
335 approx = new(zone_) HFieldApproximation();
336 }
337
338 // Insert the entry at the head of the list.
339 approx->object_ = object;
340 approx->last_value_ = NULL;
341 approx->next_ = fields_[field];
342 fields_[field] = approx;
343
344 return approx;
345 }
346
347 // Kill all entries for a given field that _may_ alias the given object
348 // and do _not_ have the given value.
KillFieldInternal(HValue * object,int field,HValue * value)349 void KillFieldInternal(HValue* object, int field, HValue* value) {
350 if (field >= fields_.length()) return; // Nothing to do.
351
352 HFieldApproximation* approx = fields_[field];
353 HFieldApproximation* prev = NULL;
354 while (approx != NULL) {
355 if (aliasing_->MayAlias(object, approx->object_)) {
356 if (!Equal(approx->last_value_, value)) {
357 // Kill an aliasing entry that doesn't agree on the value.
358 if (prev != NULL) {
359 prev->next_ = approx->next_;
360 } else {
361 fields_[field] = approx->next_;
362 }
363 approx = approx->next_;
364 continue;
365 }
366 }
367 prev = approx;
368 approx = approx->next_;
369 }
370 }
371
Equal(HValue * a,HValue * b)372 bool Equal(HValue* a, HValue* b) {
373 if (a == b) return true;
374 if (a != NULL && b != NULL && a->CheckFlag(HValue::kUseGVN)) {
375 return a->Equals(b);
376 }
377 return false;
378 }
379
380 // Remove the last approximation for a field so that it can be reused.
381 // We reuse the last entry because it was the first inserted and is thus
382 // farthest away from the current instruction.
ReuseLastApproximation(int field)383 HFieldApproximation* ReuseLastApproximation(int field) {
384 HFieldApproximation* approx = fields_[field];
385 DCHECK(approx != NULL);
386
387 HFieldApproximation* prev = NULL;
388 while (approx->next_ != NULL) {
389 prev = approx;
390 approx = approx->next_;
391 }
392 if (prev != NULL) prev->next_ = NULL;
393 return approx;
394 }
395
396 // Compute the field index for the given object access; -1 if not tracked.
FieldOf(HObjectAccess access)397 int FieldOf(HObjectAccess access) {
398 return access.IsInobject() ? FieldOf(access.offset()) : -1;
399 }
400
401 // Compute the field index for the given in-object offset; -1 if not tracked.
FieldOf(int offset)402 int FieldOf(int offset) {
403 if (offset >= kMaxTrackedFields * kPointerSize) return -1;
404 // TODO(titzer): track misaligned loads in a separate list?
405 if ((offset % kPointerSize) != 0) return -1; // Ignore misaligned accesses.
406 return offset / kPointerSize;
407 }
408
409 // Ensure internal storage for the given number of fields.
EnsureFields(int num_fields)410 void EnsureFields(int num_fields) {
411 if (fields_.length() < num_fields) {
412 fields_.AddBlock(NULL, num_fields - fields_.length(), zone_);
413 }
414 }
415
416 // Print this table to stdout.
Print()417 void Print() {
418 for (int i = 0; i < fields_.length(); i++) {
419 PrintF(" field %d: ", i);
420 for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) {
421 PrintF("[o%d =", a->object_->id());
422 if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id());
423 PrintF("] ");
424 }
425 PrintF("\n");
426 }
427 }
428
429 Zone* zone_;
430 ZoneList<HFieldApproximation*> fields_;
431 HAliasAnalyzer* aliasing_;
432 };
433
434
435 // Support for HFlowEngine: collect store effects within loops.
436 class HLoadEliminationEffects : public ZoneObject {
437 public:
HLoadEliminationEffects(Zone * zone)438 explicit HLoadEliminationEffects(Zone* zone)
439 : zone_(zone), stores_(5, zone) { }
440
Disabled()441 inline bool Disabled() {
442 return false; // Effects are _not_ disabled.
443 }
444
445 // Process a possibly side-effecting instruction.
Process(HInstruction * instr,Zone * zone)446 void Process(HInstruction* instr, Zone* zone) {
447 if (instr->IsStoreNamedField()) {
448 stores_.Add(HStoreNamedField::cast(instr), zone_);
449 } else {
450 flags_.Add(instr->ChangesFlags());
451 }
452 }
453
454 // Apply these effects to the given load elimination table.
Apply(HLoadEliminationTable * table)455 void Apply(HLoadEliminationTable* table) {
456 // Loads must not be hoisted past the OSR entry, therefore we kill
457 // everything if we see an OSR entry.
458 if (flags_.Contains(kInobjectFields) || flags_.Contains(kOsrEntries)) {
459 table->Kill();
460 return;
461 }
462 if (flags_.Contains(kElementsKind) || flags_.Contains(kMaps)) {
463 table->KillOffset(JSObject::kMapOffset);
464 }
465 if (flags_.Contains(kElementsKind) || flags_.Contains(kElementsPointer)) {
466 table->KillOffset(JSObject::kElementsOffset);
467 }
468
469 // Kill non-agreeing fields for each store contained in these effects.
470 for (int i = 0; i < stores_.length(); i++) {
471 table->KillStore(stores_[i]);
472 }
473 }
474
475 // Union these effects with the other effects.
Union(HLoadEliminationEffects * that,Zone * zone)476 void Union(HLoadEliminationEffects* that, Zone* zone) {
477 flags_.Add(that->flags_);
478 for (int i = 0; i < that->stores_.length(); i++) {
479 stores_.Add(that->stores_[i], zone);
480 }
481 }
482
483 private:
484 Zone* zone_;
485 GVNFlagSet flags_;
486 ZoneList<HStoreNamedField*> stores_;
487 };
488
489
490 // The main routine of the analysis phase. Use the HFlowEngine for either a
491 // local or a global analysis.
Run()492 void HLoadEliminationPhase::Run() {
493 HFlowEngine<HLoadEliminationTable, HLoadEliminationEffects>
494 engine(graph(), zone());
495 HAliasAnalyzer aliasing;
496 HLoadEliminationTable* table =
497 new(zone()) HLoadEliminationTable(zone(), &aliasing);
498
499 if (GLOBAL) {
500 // Perform a global analysis.
501 engine.AnalyzeDominatedBlocks(graph()->blocks()->at(0), table);
502 } else {
503 // Perform only local analysis.
504 for (int i = 0; i < graph()->blocks()->length(); i++) {
505 table->Kill();
506 engine.AnalyzeOneBlock(graph()->blocks()->at(i), table);
507 }
508 }
509 }
510
511 } } // namespace v8::internal
512