1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/hydrogen.h"
6 #include "src/hydrogen-gvn.h"
7 #include "src/v8.h"
8
9 namespace v8 {
10 namespace internal {
11
12 class HInstructionMap V8_FINAL : public ZoneObject {
13 public:
HInstructionMap(Zone * zone,SideEffectsTracker * side_effects_tracker)14 HInstructionMap(Zone* zone, SideEffectsTracker* side_effects_tracker)
15 : array_size_(0),
16 lists_size_(0),
17 count_(0),
18 array_(NULL),
19 lists_(NULL),
20 free_list_head_(kNil),
21 side_effects_tracker_(side_effects_tracker) {
22 ResizeLists(kInitialSize, zone);
23 Resize(kInitialSize, zone);
24 }
25
26 void Kill(SideEffects side_effects);
27
Add(HInstruction * instr,Zone * zone)28 void Add(HInstruction* instr, Zone* zone) {
29 present_depends_on_.Add(side_effects_tracker_->ComputeDependsOn(instr));
30 Insert(instr, zone);
31 }
32
33 HInstruction* Lookup(HInstruction* instr) const;
34
Copy(Zone * zone) const35 HInstructionMap* Copy(Zone* zone) const {
36 return new(zone) HInstructionMap(zone, this);
37 }
38
IsEmpty() const39 bool IsEmpty() const { return count_ == 0; }
40
41 private:
42 // A linked list of HInstruction* values. Stored in arrays.
43 struct HInstructionMapListElement {
44 HInstruction* instr;
45 int next; // Index in the array of the next list element.
46 };
47 static const int kNil = -1; // The end of a linked list
48
49 // Must be a power of 2.
50 static const int kInitialSize = 16;
51
52 HInstructionMap(Zone* zone, const HInstructionMap* other);
53
54 void Resize(int new_size, Zone* zone);
55 void ResizeLists(int new_size, Zone* zone);
56 void Insert(HInstruction* instr, Zone* zone);
Bound(uint32_t value) const57 uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); }
58
59 int array_size_;
60 int lists_size_;
61 int count_; // The number of values stored in the HInstructionMap.
62 SideEffects present_depends_on_;
63 HInstructionMapListElement* array_;
64 // Primary store - contains the first value
65 // with a given hash. Colliding elements are stored in linked lists.
66 HInstructionMapListElement* lists_;
67 // The linked lists containing hash collisions.
68 int free_list_head_; // Unused elements in lists_ are on the free list.
69 SideEffectsTracker* side_effects_tracker_;
70 };
71
72
73 class HSideEffectMap V8_FINAL BASE_EMBEDDED {
74 public:
75 HSideEffectMap();
76 explicit HSideEffectMap(HSideEffectMap* other);
77 HSideEffectMap& operator= (const HSideEffectMap& other);
78
79 void Kill(SideEffects side_effects);
80
81 void Store(SideEffects side_effects, HInstruction* instr);
82
IsEmpty() const83 bool IsEmpty() const { return count_ == 0; }
84
operator [](int i) const85 inline HInstruction* operator[](int i) const {
86 ASSERT(0 <= i);
87 ASSERT(i < kNumberOfTrackedSideEffects);
88 return data_[i];
89 }
at(int i) const90 inline HInstruction* at(int i) const { return operator[](i); }
91
92 private:
93 int count_;
94 HInstruction* data_[kNumberOfTrackedSideEffects];
95 };
96
97
TraceGVN(const char * msg,...)98 void TraceGVN(const char* msg, ...) {
99 va_list arguments;
100 va_start(arguments, msg);
101 OS::VPrint(msg, arguments);
102 va_end(arguments);
103 }
104
105
106 // Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
107 // --trace-gvn is off.
108 #define TRACE_GVN_1(msg, a1) \
109 if (FLAG_trace_gvn) { \
110 TraceGVN(msg, a1); \
111 }
112
113 #define TRACE_GVN_2(msg, a1, a2) \
114 if (FLAG_trace_gvn) { \
115 TraceGVN(msg, a1, a2); \
116 }
117
118 #define TRACE_GVN_3(msg, a1, a2, a3) \
119 if (FLAG_trace_gvn) { \
120 TraceGVN(msg, a1, a2, a3); \
121 }
122
123 #define TRACE_GVN_4(msg, a1, a2, a3, a4) \
124 if (FLAG_trace_gvn) { \
125 TraceGVN(msg, a1, a2, a3, a4); \
126 }
127
128 #define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \
129 if (FLAG_trace_gvn) { \
130 TraceGVN(msg, a1, a2, a3, a4, a5); \
131 }
132
133
HInstructionMap(Zone * zone,const HInstructionMap * other)134 HInstructionMap::HInstructionMap(Zone* zone, const HInstructionMap* other)
135 : array_size_(other->array_size_),
136 lists_size_(other->lists_size_),
137 count_(other->count_),
138 present_depends_on_(other->present_depends_on_),
139 array_(zone->NewArray<HInstructionMapListElement>(other->array_size_)),
140 lists_(zone->NewArray<HInstructionMapListElement>(other->lists_size_)),
141 free_list_head_(other->free_list_head_),
142 side_effects_tracker_(other->side_effects_tracker_) {
143 MemCopy(array_, other->array_,
144 array_size_ * sizeof(HInstructionMapListElement));
145 MemCopy(lists_, other->lists_,
146 lists_size_ * sizeof(HInstructionMapListElement));
147 }
148
149
Kill(SideEffects changes)150 void HInstructionMap::Kill(SideEffects changes) {
151 if (!present_depends_on_.ContainsAnyOf(changes)) return;
152 present_depends_on_.RemoveAll();
153 for (int i = 0; i < array_size_; ++i) {
154 HInstruction* instr = array_[i].instr;
155 if (instr != NULL) {
156 // Clear list of collisions first, so we know if it becomes empty.
157 int kept = kNil; // List of kept elements.
158 int next;
159 for (int current = array_[i].next; current != kNil; current = next) {
160 next = lists_[current].next;
161 HInstruction* instr = lists_[current].instr;
162 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
163 if (depends_on.ContainsAnyOf(changes)) {
164 // Drop it.
165 count_--;
166 lists_[current].next = free_list_head_;
167 free_list_head_ = current;
168 } else {
169 // Keep it.
170 lists_[current].next = kept;
171 kept = current;
172 present_depends_on_.Add(depends_on);
173 }
174 }
175 array_[i].next = kept;
176
177 // Now possibly drop directly indexed element.
178 instr = array_[i].instr;
179 SideEffects depends_on = side_effects_tracker_->ComputeDependsOn(instr);
180 if (depends_on.ContainsAnyOf(changes)) { // Drop it.
181 count_--;
182 int head = array_[i].next;
183 if (head == kNil) {
184 array_[i].instr = NULL;
185 } else {
186 array_[i].instr = lists_[head].instr;
187 array_[i].next = lists_[head].next;
188 lists_[head].next = free_list_head_;
189 free_list_head_ = head;
190 }
191 } else {
192 present_depends_on_.Add(depends_on); // Keep it.
193 }
194 }
195 }
196 }
197
198
Lookup(HInstruction * instr) const199 HInstruction* HInstructionMap::Lookup(HInstruction* instr) const {
200 uint32_t hash = static_cast<uint32_t>(instr->Hashcode());
201 uint32_t pos = Bound(hash);
202 if (array_[pos].instr != NULL) {
203 if (array_[pos].instr->Equals(instr)) return array_[pos].instr;
204 int next = array_[pos].next;
205 while (next != kNil) {
206 if (lists_[next].instr->Equals(instr)) return lists_[next].instr;
207 next = lists_[next].next;
208 }
209 }
210 return NULL;
211 }
212
213
Resize(int new_size,Zone * zone)214 void HInstructionMap::Resize(int new_size, Zone* zone) {
215 ASSERT(new_size > count_);
216 // Hashing the values into the new array has no more collisions than in the
217 // old hash map, so we can use the existing lists_ array, if we are careful.
218
219 // Make sure we have at least one free element.
220 if (free_list_head_ == kNil) {
221 ResizeLists(lists_size_ << 1, zone);
222 }
223
224 HInstructionMapListElement* new_array =
225 zone->NewArray<HInstructionMapListElement>(new_size);
226 memset(new_array, 0, sizeof(HInstructionMapListElement) * new_size);
227
228 HInstructionMapListElement* old_array = array_;
229 int old_size = array_size_;
230
231 int old_count = count_;
232 count_ = 0;
233 // Do not modify present_depends_on_. It is currently correct.
234 array_size_ = new_size;
235 array_ = new_array;
236
237 if (old_array != NULL) {
238 // Iterate over all the elements in lists, rehashing them.
239 for (int i = 0; i < old_size; ++i) {
240 if (old_array[i].instr != NULL) {
241 int current = old_array[i].next;
242 while (current != kNil) {
243 Insert(lists_[current].instr, zone);
244 int next = lists_[current].next;
245 lists_[current].next = free_list_head_;
246 free_list_head_ = current;
247 current = next;
248 }
249 // Rehash the directly stored instruction.
250 Insert(old_array[i].instr, zone);
251 }
252 }
253 }
254 USE(old_count);
255 ASSERT(count_ == old_count);
256 }
257
258
ResizeLists(int new_size,Zone * zone)259 void HInstructionMap::ResizeLists(int new_size, Zone* zone) {
260 ASSERT(new_size > lists_size_);
261
262 HInstructionMapListElement* new_lists =
263 zone->NewArray<HInstructionMapListElement>(new_size);
264 memset(new_lists, 0, sizeof(HInstructionMapListElement) * new_size);
265
266 HInstructionMapListElement* old_lists = lists_;
267 int old_size = lists_size_;
268
269 lists_size_ = new_size;
270 lists_ = new_lists;
271
272 if (old_lists != NULL) {
273 MemCopy(lists_, old_lists, old_size * sizeof(HInstructionMapListElement));
274 }
275 for (int i = old_size; i < lists_size_; ++i) {
276 lists_[i].next = free_list_head_;
277 free_list_head_ = i;
278 }
279 }
280
281
Insert(HInstruction * instr,Zone * zone)282 void HInstructionMap::Insert(HInstruction* instr, Zone* zone) {
283 ASSERT(instr != NULL);
284 // Resizing when half of the hashtable is filled up.
285 if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone);
286 ASSERT(count_ < array_size_);
287 count_++;
288 uint32_t pos = Bound(static_cast<uint32_t>(instr->Hashcode()));
289 if (array_[pos].instr == NULL) {
290 array_[pos].instr = instr;
291 array_[pos].next = kNil;
292 } else {
293 if (free_list_head_ == kNil) {
294 ResizeLists(lists_size_ << 1, zone);
295 }
296 int new_element_pos = free_list_head_;
297 ASSERT(new_element_pos != kNil);
298 free_list_head_ = lists_[free_list_head_].next;
299 lists_[new_element_pos].instr = instr;
300 lists_[new_element_pos].next = array_[pos].next;
301 ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].instr != NULL);
302 array_[pos].next = new_element_pos;
303 }
304 }
305
306
HSideEffectMap()307 HSideEffectMap::HSideEffectMap() : count_(0) {
308 memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize);
309 }
310
311
HSideEffectMap(HSideEffectMap * other)312 HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) {
313 *this = *other; // Calls operator=.
314 }
315
316
operator =(const HSideEffectMap & other)317 HSideEffectMap& HSideEffectMap::operator=(const HSideEffectMap& other) {
318 if (this != &other) {
319 MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize);
320 }
321 return *this;
322 }
323
324
Kill(SideEffects side_effects)325 void HSideEffectMap::Kill(SideEffects side_effects) {
326 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
327 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
328 if (data_[i] != NULL) count_--;
329 data_[i] = NULL;
330 }
331 }
332 }
333
334
Store(SideEffects side_effects,HInstruction * instr)335 void HSideEffectMap::Store(SideEffects side_effects, HInstruction* instr) {
336 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
337 if (side_effects.ContainsFlag(GVNFlagFromInt(i))) {
338 if (data_[i] == NULL) count_++;
339 data_[i] = instr;
340 }
341 }
342 }
343
344
ComputeChanges(HInstruction * instr)345 SideEffects SideEffectsTracker::ComputeChanges(HInstruction* instr) {
346 int index;
347 SideEffects result(instr->ChangesFlags());
348 if (result.ContainsFlag(kGlobalVars)) {
349 if (instr->IsStoreGlobalCell() &&
350 ComputeGlobalVar(HStoreGlobalCell::cast(instr)->cell(), &index)) {
351 result.RemoveFlag(kGlobalVars);
352 result.AddSpecial(GlobalVar(index));
353 } else {
354 for (index = 0; index < kNumberOfGlobalVars; ++index) {
355 result.AddSpecial(GlobalVar(index));
356 }
357 }
358 }
359 if (result.ContainsFlag(kInobjectFields)) {
360 if (instr->IsStoreNamedField() &&
361 ComputeInobjectField(HStoreNamedField::cast(instr)->access(), &index)) {
362 result.RemoveFlag(kInobjectFields);
363 result.AddSpecial(InobjectField(index));
364 } else {
365 for (index = 0; index < kNumberOfInobjectFields; ++index) {
366 result.AddSpecial(InobjectField(index));
367 }
368 }
369 }
370 return result;
371 }
372
373
ComputeDependsOn(HInstruction * instr)374 SideEffects SideEffectsTracker::ComputeDependsOn(HInstruction* instr) {
375 int index;
376 SideEffects result(instr->DependsOnFlags());
377 if (result.ContainsFlag(kGlobalVars)) {
378 if (instr->IsLoadGlobalCell() &&
379 ComputeGlobalVar(HLoadGlobalCell::cast(instr)->cell(), &index)) {
380 result.RemoveFlag(kGlobalVars);
381 result.AddSpecial(GlobalVar(index));
382 } else {
383 for (index = 0; index < kNumberOfGlobalVars; ++index) {
384 result.AddSpecial(GlobalVar(index));
385 }
386 }
387 }
388 if (result.ContainsFlag(kInobjectFields)) {
389 if (instr->IsLoadNamedField() &&
390 ComputeInobjectField(HLoadNamedField::cast(instr)->access(), &index)) {
391 result.RemoveFlag(kInobjectFields);
392 result.AddSpecial(InobjectField(index));
393 } else {
394 for (index = 0; index < kNumberOfInobjectFields; ++index) {
395 result.AddSpecial(InobjectField(index));
396 }
397 }
398 }
399 return result;
400 }
401
402
PrintSideEffectsTo(StringStream * stream,SideEffects side_effects) const403 void SideEffectsTracker::PrintSideEffectsTo(StringStream* stream,
404 SideEffects side_effects) const {
405 const char* separator = "";
406 stream->Add("[");
407 for (int bit = 0; bit < kNumberOfFlags; ++bit) {
408 GVNFlag flag = GVNFlagFromInt(bit);
409 if (side_effects.ContainsFlag(flag)) {
410 stream->Add(separator);
411 separator = ", ";
412 switch (flag) {
413 #define DECLARE_FLAG(Type) \
414 case k##Type: \
415 stream->Add(#Type); \
416 break;
417 GVN_TRACKED_FLAG_LIST(DECLARE_FLAG)
418 GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG)
419 #undef DECLARE_FLAG
420 default:
421 break;
422 }
423 }
424 }
425 for (int index = 0; index < num_global_vars_; ++index) {
426 if (side_effects.ContainsSpecial(GlobalVar(index))) {
427 stream->Add(separator);
428 separator = ", ";
429 stream->Add("[%p]", *global_vars_[index].handle());
430 }
431 }
432 for (int index = 0; index < num_inobject_fields_; ++index) {
433 if (side_effects.ContainsSpecial(InobjectField(index))) {
434 stream->Add(separator);
435 separator = ", ";
436 inobject_fields_[index].PrintTo(stream);
437 }
438 }
439 stream->Add("]");
440 }
441
442
ComputeGlobalVar(Unique<Cell> cell,int * index)443 bool SideEffectsTracker::ComputeGlobalVar(Unique<Cell> cell, int* index) {
444 for (int i = 0; i < num_global_vars_; ++i) {
445 if (cell == global_vars_[i]) {
446 *index = i;
447 return true;
448 }
449 }
450 if (num_global_vars_ < kNumberOfGlobalVars) {
451 if (FLAG_trace_gvn) {
452 HeapStringAllocator allocator;
453 StringStream stream(&allocator);
454 stream.Add("Tracking global var [%p] (mapped to index %d)\n",
455 *cell.handle(), num_global_vars_);
456 stream.OutputToStdOut();
457 }
458 *index = num_global_vars_;
459 global_vars_[num_global_vars_++] = cell;
460 return true;
461 }
462 return false;
463 }
464
465
ComputeInobjectField(HObjectAccess access,int * index)466 bool SideEffectsTracker::ComputeInobjectField(HObjectAccess access,
467 int* index) {
468 for (int i = 0; i < num_inobject_fields_; ++i) {
469 if (access.Equals(inobject_fields_[i])) {
470 *index = i;
471 return true;
472 }
473 }
474 if (num_inobject_fields_ < kNumberOfInobjectFields) {
475 if (FLAG_trace_gvn) {
476 HeapStringAllocator allocator;
477 StringStream stream(&allocator);
478 stream.Add("Tracking inobject field access ");
479 access.PrintTo(&stream);
480 stream.Add(" (mapped to index %d)\n", num_inobject_fields_);
481 stream.OutputToStdOut();
482 }
483 *index = num_inobject_fields_;
484 inobject_fields_[num_inobject_fields_++] = access;
485 return true;
486 }
487 return false;
488 }
489
490
HGlobalValueNumberingPhase(HGraph * graph)491 HGlobalValueNumberingPhase::HGlobalValueNumberingPhase(HGraph* graph)
492 : HPhase("H_Global value numbering", graph),
493 removed_side_effects_(false),
494 block_side_effects_(graph->blocks()->length(), zone()),
495 loop_side_effects_(graph->blocks()->length(), zone()),
496 visited_on_paths_(graph->blocks()->length(), zone()) {
497 ASSERT(!AllowHandleAllocation::IsAllowed());
498 block_side_effects_.AddBlock(
499 SideEffects(), graph->blocks()->length(), zone());
500 loop_side_effects_.AddBlock(
501 SideEffects(), graph->blocks()->length(), zone());
502 }
503
504
Run()505 void HGlobalValueNumberingPhase::Run() {
506 ASSERT(!removed_side_effects_);
507 for (int i = FLAG_gvn_iterations; i > 0; --i) {
508 // Compute the side effects.
509 ComputeBlockSideEffects();
510
511 // Perform loop invariant code motion if requested.
512 if (FLAG_loop_invariant_code_motion) LoopInvariantCodeMotion();
513
514 // Perform the actual value numbering.
515 AnalyzeGraph();
516
517 // Continue GVN if we removed any side effects.
518 if (!removed_side_effects_) break;
519 removed_side_effects_ = false;
520
521 // Clear all side effects.
522 ASSERT_EQ(block_side_effects_.length(), graph()->blocks()->length());
523 ASSERT_EQ(loop_side_effects_.length(), graph()->blocks()->length());
524 for (int i = 0; i < graph()->blocks()->length(); ++i) {
525 block_side_effects_[i].RemoveAll();
526 loop_side_effects_[i].RemoveAll();
527 }
528 visited_on_paths_.Clear();
529 }
530 }
531
532
ComputeBlockSideEffects()533 void HGlobalValueNumberingPhase::ComputeBlockSideEffects() {
534 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
535 // Compute side effects for the block.
536 HBasicBlock* block = graph()->blocks()->at(i);
537 SideEffects side_effects;
538 if (block->IsReachable() && !block->IsDeoptimizing()) {
539 int id = block->block_id();
540 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
541 HInstruction* instr = it.Current();
542 side_effects.Add(side_effects_tracker_.ComputeChanges(instr));
543 }
544 block_side_effects_[id].Add(side_effects);
545
546 // Loop headers are part of their loop.
547 if (block->IsLoopHeader()) {
548 loop_side_effects_[id].Add(side_effects);
549 }
550
551 // Propagate loop side effects upwards.
552 if (block->HasParentLoopHeader()) {
553 HBasicBlock* with_parent = block;
554 if (block->IsLoopHeader()) side_effects = loop_side_effects_[id];
555 do {
556 HBasicBlock* parent_block = with_parent->parent_loop_header();
557 loop_side_effects_[parent_block->block_id()].Add(side_effects);
558 with_parent = parent_block;
559 } while (with_parent->HasParentLoopHeader());
560 }
561 }
562 }
563 }
564
565
LoopInvariantCodeMotion()566 void HGlobalValueNumberingPhase::LoopInvariantCodeMotion() {
567 TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n",
568 graph()->use_optimistic_licm() ? "yes" : "no");
569 for (int i = graph()->blocks()->length() - 1; i >= 0; --i) {
570 HBasicBlock* block = graph()->blocks()->at(i);
571 if (block->IsLoopHeader()) {
572 SideEffects side_effects = loop_side_effects_[block->block_id()];
573 if (FLAG_trace_gvn) {
574 HeapStringAllocator allocator;
575 StringStream stream(&allocator);
576 stream.Add("Try loop invariant motion for block B%d changes ",
577 block->block_id());
578 side_effects_tracker_.PrintSideEffectsTo(&stream, side_effects);
579 stream.Add("\n");
580 stream.OutputToStdOut();
581 }
582 HBasicBlock* last = block->loop_information()->GetLastBackEdge();
583 for (int j = block->block_id(); j <= last->block_id(); ++j) {
584 ProcessLoopBlock(graph()->blocks()->at(j), block, side_effects);
585 }
586 }
587 }
588 }
589
590
ProcessLoopBlock(HBasicBlock * block,HBasicBlock * loop_header,SideEffects loop_kills)591 void HGlobalValueNumberingPhase::ProcessLoopBlock(
592 HBasicBlock* block,
593 HBasicBlock* loop_header,
594 SideEffects loop_kills) {
595 HBasicBlock* pre_header = loop_header->predecessors()->at(0);
596 if (FLAG_trace_gvn) {
597 HeapStringAllocator allocator;
598 StringStream stream(&allocator);
599 stream.Add("Loop invariant code motion for B%d depends on ",
600 block->block_id());
601 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
602 stream.Add("\n");
603 stream.OutputToStdOut();
604 }
605 HInstruction* instr = block->first();
606 while (instr != NULL) {
607 HInstruction* next = instr->next();
608 if (instr->CheckFlag(HValue::kUseGVN)) {
609 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
610 SideEffects depends_on = side_effects_tracker_.ComputeDependsOn(instr);
611 if (FLAG_trace_gvn) {
612 HeapStringAllocator allocator;
613 StringStream stream(&allocator);
614 stream.Add("Checking instruction i%d (%s) changes ",
615 instr->id(), instr->Mnemonic());
616 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
617 stream.Add(", depends on ");
618 side_effects_tracker_.PrintSideEffectsTo(&stream, depends_on);
619 stream.Add(". Loop changes ");
620 side_effects_tracker_.PrintSideEffectsTo(&stream, loop_kills);
621 stream.Add("\n");
622 stream.OutputToStdOut();
623 }
624 bool can_hoist = !depends_on.ContainsAnyOf(loop_kills);
625 if (can_hoist && !graph()->use_optimistic_licm()) {
626 can_hoist = block->IsLoopSuccessorDominator();
627 }
628
629 if (can_hoist) {
630 bool inputs_loop_invariant = true;
631 for (int i = 0; i < instr->OperandCount(); ++i) {
632 if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) {
633 inputs_loop_invariant = false;
634 }
635 }
636
637 if (inputs_loop_invariant && ShouldMove(instr, loop_header)) {
638 TRACE_GVN_2("Hoisting loop invariant instruction i%d to block B%d\n",
639 instr->id(), pre_header->block_id());
640 // Move the instruction out of the loop.
641 instr->Unlink();
642 instr->InsertBefore(pre_header->end());
643 if (instr->HasSideEffects()) removed_side_effects_ = true;
644 }
645 }
646 }
647 instr = next;
648 }
649 }
650
651
AllowCodeMotion()652 bool HGlobalValueNumberingPhase::AllowCodeMotion() {
653 return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count;
654 }
655
656
ShouldMove(HInstruction * instr,HBasicBlock * loop_header)657 bool HGlobalValueNumberingPhase::ShouldMove(HInstruction* instr,
658 HBasicBlock* loop_header) {
659 // If we've disabled code motion or we're in a block that unconditionally
660 // deoptimizes, don't move any instructions.
661 return AllowCodeMotion() && !instr->block()->IsDeoptimizing() &&
662 instr->block()->IsReachable();
663 }
664
665
666 SideEffects
CollectSideEffectsOnPathsToDominatedBlock(HBasicBlock * dominator,HBasicBlock * dominated)667 HGlobalValueNumberingPhase::CollectSideEffectsOnPathsToDominatedBlock(
668 HBasicBlock* dominator, HBasicBlock* dominated) {
669 SideEffects side_effects;
670 for (int i = 0; i < dominated->predecessors()->length(); ++i) {
671 HBasicBlock* block = dominated->predecessors()->at(i);
672 if (dominator->block_id() < block->block_id() &&
673 block->block_id() < dominated->block_id() &&
674 !visited_on_paths_.Contains(block->block_id())) {
675 visited_on_paths_.Add(block->block_id());
676 side_effects.Add(block_side_effects_[block->block_id()]);
677 if (block->IsLoopHeader()) {
678 side_effects.Add(loop_side_effects_[block->block_id()]);
679 }
680 side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock(
681 dominator, block));
682 }
683 }
684 return side_effects;
685 }
686
687
688 // Each instance of this class is like a "stack frame" for the recursive
689 // traversal of the dominator tree done during GVN (the stack is handled
690 // as a double linked list).
691 // We reuse frames when possible so the list length is limited by the depth
692 // of the dominator tree but this forces us to initialize each frame calling
693 // an explicit "Initialize" method instead of a using constructor.
694 class GvnBasicBlockState: public ZoneObject {
695 public:
CreateEntry(Zone * zone,HBasicBlock * entry_block,HInstructionMap * entry_map)696 static GvnBasicBlockState* CreateEntry(Zone* zone,
697 HBasicBlock* entry_block,
698 HInstructionMap* entry_map) {
699 return new(zone)
700 GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone);
701 }
702
block()703 HBasicBlock* block() { return block_; }
map()704 HInstructionMap* map() { return map_; }
dominators()705 HSideEffectMap* dominators() { return &dominators_; }
706
next_in_dominator_tree_traversal(Zone * zone,HBasicBlock ** dominator)707 GvnBasicBlockState* next_in_dominator_tree_traversal(
708 Zone* zone,
709 HBasicBlock** dominator) {
710 // This assignment needs to happen before calling next_dominated() because
711 // that call can reuse "this" if we are at the last dominated block.
712 *dominator = block();
713 GvnBasicBlockState* result = next_dominated(zone);
714 if (result == NULL) {
715 GvnBasicBlockState* dominator_state = pop();
716 if (dominator_state != NULL) {
717 // This branch is guaranteed not to return NULL because pop() never
718 // returns a state where "is_done() == true".
719 *dominator = dominator_state->block();
720 result = dominator_state->next_dominated(zone);
721 } else {
722 // Unnecessary (we are returning NULL) but done for cleanness.
723 *dominator = NULL;
724 }
725 }
726 return result;
727 }
728
729 private:
Initialize(HBasicBlock * block,HInstructionMap * map,HSideEffectMap * dominators,bool copy_map,Zone * zone)730 void Initialize(HBasicBlock* block,
731 HInstructionMap* map,
732 HSideEffectMap* dominators,
733 bool copy_map,
734 Zone* zone) {
735 block_ = block;
736 map_ = copy_map ? map->Copy(zone) : map;
737 dominated_index_ = -1;
738 length_ = block->dominated_blocks()->length();
739 if (dominators != NULL) {
740 dominators_ = *dominators;
741 }
742 }
is_done()743 bool is_done() { return dominated_index_ >= length_; }
744
GvnBasicBlockState(GvnBasicBlockState * previous,HBasicBlock * block,HInstructionMap * map,HSideEffectMap * dominators,Zone * zone)745 GvnBasicBlockState(GvnBasicBlockState* previous,
746 HBasicBlock* block,
747 HInstructionMap* map,
748 HSideEffectMap* dominators,
749 Zone* zone)
750 : previous_(previous), next_(NULL) {
751 Initialize(block, map, dominators, true, zone);
752 }
753
next_dominated(Zone * zone)754 GvnBasicBlockState* next_dominated(Zone* zone) {
755 dominated_index_++;
756 if (dominated_index_ == length_ - 1) {
757 // No need to copy the map for the last child in the dominator tree.
758 Initialize(block_->dominated_blocks()->at(dominated_index_),
759 map(),
760 dominators(),
761 false,
762 zone);
763 return this;
764 } else if (dominated_index_ < length_) {
765 return push(zone, block_->dominated_blocks()->at(dominated_index_));
766 } else {
767 return NULL;
768 }
769 }
770
push(Zone * zone,HBasicBlock * block)771 GvnBasicBlockState* push(Zone* zone, HBasicBlock* block) {
772 if (next_ == NULL) {
773 next_ =
774 new(zone) GvnBasicBlockState(this, block, map(), dominators(), zone);
775 } else {
776 next_->Initialize(block, map(), dominators(), true, zone);
777 }
778 return next_;
779 }
pop()780 GvnBasicBlockState* pop() {
781 GvnBasicBlockState* result = previous_;
782 while (result != NULL && result->is_done()) {
783 TRACE_GVN_2("Backtracking from block B%d to block b%d\n",
784 block()->block_id(),
785 previous_->block()->block_id())
786 result = result->previous_;
787 }
788 return result;
789 }
790
791 GvnBasicBlockState* previous_;
792 GvnBasicBlockState* next_;
793 HBasicBlock* block_;
794 HInstructionMap* map_;
795 HSideEffectMap dominators_;
796 int dominated_index_;
797 int length_;
798 };
799
800
801 // This is a recursive traversal of the dominator tree but it has been turned
802 // into a loop to avoid stack overflows.
803 // The logical "stack frames" of the recursion are kept in a list of
804 // GvnBasicBlockState instances.
AnalyzeGraph()805 void HGlobalValueNumberingPhase::AnalyzeGraph() {
806 HBasicBlock* entry_block = graph()->entry_block();
807 HInstructionMap* entry_map =
808 new(zone()) HInstructionMap(zone(), &side_effects_tracker_);
809 GvnBasicBlockState* current =
810 GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map);
811
812 while (current != NULL) {
813 HBasicBlock* block = current->block();
814 HInstructionMap* map = current->map();
815 HSideEffectMap* dominators = current->dominators();
816
817 TRACE_GVN_2("Analyzing block B%d%s\n",
818 block->block_id(),
819 block->IsLoopHeader() ? " (loop header)" : "");
820
821 // If this is a loop header kill everything killed by the loop.
822 if (block->IsLoopHeader()) {
823 map->Kill(loop_side_effects_[block->block_id()]);
824 dominators->Kill(loop_side_effects_[block->block_id()]);
825 }
826
827 // Go through all instructions of the current block.
828 for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
829 HInstruction* instr = it.Current();
830 if (instr->CheckFlag(HValue::kTrackSideEffectDominators)) {
831 for (int i = 0; i < kNumberOfTrackedSideEffects; i++) {
832 HValue* other = dominators->at(i);
833 GVNFlag flag = GVNFlagFromInt(i);
834 if (instr->DependsOnFlags().Contains(flag) && other != NULL) {
835 TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n",
836 i,
837 instr->id(),
838 instr->Mnemonic(),
839 other->id(),
840 other->Mnemonic());
841 if (instr->HandleSideEffectDominator(flag, other)) {
842 removed_side_effects_ = true;
843 }
844 }
845 }
846 }
847 // Instruction was unlinked during graph traversal.
848 if (!instr->IsLinked()) continue;
849
850 SideEffects changes = side_effects_tracker_.ComputeChanges(instr);
851 if (!changes.IsEmpty()) {
852 // Clear all instructions in the map that are affected by side effects.
853 // Store instruction as the dominating one for tracked side effects.
854 map->Kill(changes);
855 dominators->Store(changes, instr);
856 if (FLAG_trace_gvn) {
857 HeapStringAllocator allocator;
858 StringStream stream(&allocator);
859 stream.Add("Instruction i%d changes ", instr->id());
860 side_effects_tracker_.PrintSideEffectsTo(&stream, changes);
861 stream.Add("\n");
862 stream.OutputToStdOut();
863 }
864 }
865 if (instr->CheckFlag(HValue::kUseGVN) &&
866 !instr->CheckFlag(HValue::kCantBeReplaced)) {
867 ASSERT(!instr->HasObservableSideEffects());
868 HInstruction* other = map->Lookup(instr);
869 if (other != NULL) {
870 ASSERT(instr->Equals(other) && other->Equals(instr));
871 TRACE_GVN_4("Replacing instruction i%d (%s) with i%d (%s)\n",
872 instr->id(),
873 instr->Mnemonic(),
874 other->id(),
875 other->Mnemonic());
876 if (instr->HasSideEffects()) removed_side_effects_ = true;
877 instr->DeleteAndReplaceWith(other);
878 } else {
879 map->Add(instr, zone());
880 }
881 }
882 }
883
884 HBasicBlock* dominator_block;
885 GvnBasicBlockState* next =
886 current->next_in_dominator_tree_traversal(zone(),
887 &dominator_block);
888
889 if (next != NULL) {
890 HBasicBlock* dominated = next->block();
891 HInstructionMap* successor_map = next->map();
892 HSideEffectMap* successor_dominators = next->dominators();
893
894 // Kill everything killed on any path between this block and the
895 // dominated block. We don't have to traverse these paths if the
896 // value map and the dominators list is already empty. If the range
897 // of block ids (block_id, dominated_id) is empty there are no such
898 // paths.
899 if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) &&
900 dominator_block->block_id() + 1 < dominated->block_id()) {
901 visited_on_paths_.Clear();
902 SideEffects side_effects_on_all_paths =
903 CollectSideEffectsOnPathsToDominatedBlock(dominator_block,
904 dominated);
905 successor_map->Kill(side_effects_on_all_paths);
906 successor_dominators->Kill(side_effects_on_all_paths);
907 }
908 }
909 current = next;
910 }
911 }
912
913 } } // namespace v8::internal
914