• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
19 
20 #include <iostream>
21 
22 #include "base/intrusive_forward_list.h"
23 #include "base/iteration_range.h"
24 #include "base/macros.h"
25 #include "base/scoped_arena_allocator.h"
26 #include "base/scoped_arena_containers.h"
27 #include "nodes.h"
28 
29 namespace art HIDDEN {
30 
31 class CodeGenerator;
32 class SsaLivenessAnalysis;
33 
34 static constexpr int kNoRegister = -1;
35 
36 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
37  public:
BlockInfo(ScopedArenaAllocator * allocator,const HBasicBlock & block,size_t number_of_ssa_values)38   BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
39       : block_(block),
40         live_in_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
41         live_out_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness),
42         kill_(allocator, number_of_ssa_values, false, kArenaAllocSsaLiveness) {
43     UNUSED(block_);
44     live_in_.ClearAllBits();
45     live_out_.ClearAllBits();
46     kill_.ClearAllBits();
47   }
48 
49  private:
50   const HBasicBlock& block_;
51   ArenaBitVector live_in_;
52   ArenaBitVector live_out_;
53   ArenaBitVector kill_;
54 
55   friend class SsaLivenessAnalysis;
56 
57   DISALLOW_COPY_AND_ASSIGN(BlockInfo);
58 };
59 
60 /**
61  * A live range contains the start and end of a range where an instruction or a temporary
62  * is live.
63  */
64 class LiveRange final : public ArenaObject<kArenaAllocSsaLiveness> {
65  public:
LiveRange(size_t start,size_t end,LiveRange * next)66   LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
67     DCHECK_LT(start, end);
68     DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
69   }
70 
GetStart()71   size_t GetStart() const { return start_; }
GetEnd()72   size_t GetEnd() const { return end_; }
GetNext()73   LiveRange* GetNext() const { return next_; }
74 
IntersectsWith(const LiveRange & other)75   bool IntersectsWith(const LiveRange& other) const {
76     return (start_ >= other.start_ && start_ < other.end_)
77         || (other.start_ >= start_ && other.start_ < end_);
78   }
79 
IsBefore(const LiveRange & other)80   bool IsBefore(const LiveRange& other) const {
81     return end_ <= other.start_;
82   }
83 
Dump(std::ostream & stream)84   void Dump(std::ostream& stream) const {
85     stream << "[" << start_ << "," << end_ << ")";
86   }
87 
Dup(ScopedArenaAllocator * allocator)88   LiveRange* Dup(ScopedArenaAllocator* allocator) const {
89     return new (allocator) LiveRange(
90         start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
91   }
92 
GetLastRange()93   LiveRange* GetLastRange() {
94     return next_ == nullptr ? this : next_->GetLastRange();
95   }
96 
97  private:
98   size_t start_;
99   size_t end_;
100   LiveRange* next_;
101 
102   friend class LiveInterval;
103 
104   DISALLOW_COPY_AND_ASSIGN(LiveRange);
105 };
106 
107 /**
108  * A use position represents a live interval use at a given position.
109  */
110 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
111                     public IntrusiveForwardListNode<UsePosition> {
112  public:
UsePosition(HInstruction * user,size_t input_index,size_t position)113   UsePosition(HInstruction* user, size_t input_index, size_t position)
114       : user_(user),
115         input_index_(input_index),
116         position_(position) {
117   }
118 
UsePosition(size_t position)119   explicit UsePosition(size_t position)
120       : user_(nullptr),
121         input_index_(kNoInput),
122         position_(dchecked_integral_cast<uint32_t>(position)) {
123   }
124 
GetPosition()125   size_t GetPosition() const { return position_; }
126 
GetUser()127   HInstruction* GetUser() const { return user_; }
128 
IsSynthesized()129   bool IsSynthesized() const { return user_ == nullptr; }
130 
GetInputIndex()131   size_t GetInputIndex() const { return input_index_; }
132 
Dump(std::ostream & stream)133   void Dump(std::ostream& stream) const {
134     stream << position_;
135   }
136 
GetLoopInformation()137   HLoopInformation* GetLoopInformation() const {
138     return user_->GetBlock()->GetLoopInformation();
139   }
140 
Clone(ScopedArenaAllocator * allocator)141   UsePosition* Clone(ScopedArenaAllocator* allocator) const {
142     return new (allocator) UsePosition(user_, input_index_, position_);
143   }
144 
RequiresRegister()145   bool RequiresRegister() const {
146     if (IsSynthesized()) return false;
147     Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
148     return location.IsUnallocated() && location.RequiresRegisterKind();
149   }
150 
151  private:
152   static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
153 
154   HInstruction* const user_;
155   const size_t input_index_;
156   const size_t position_;
157 
158   DISALLOW_COPY_AND_ASSIGN(UsePosition);
159 };
160 using UsePositionList = IntrusiveForwardList<UsePosition>;
161 
162 /**
163  * An environment use position represents a live interval for environment use at a given position.
164  */
165 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
166                        public IntrusiveForwardListNode<EnvUsePosition> {
167  public:
EnvUsePosition(HEnvironment * environment,size_t input_index,size_t position)168   EnvUsePosition(HEnvironment* environment,
169                  size_t input_index,
170                  size_t position)
171       : environment_(environment),
172         input_index_(input_index),
173         position_(position) {
174     DCHECK(environment != nullptr);
175   }
176 
GetPosition()177   size_t GetPosition() const { return position_; }
178 
GetEnvironment()179   HEnvironment* GetEnvironment() const { return environment_; }
GetInputIndex()180   size_t GetInputIndex() const { return input_index_; }
181 
Dump(std::ostream & stream)182   void Dump(std::ostream& stream) const {
183     stream << position_;
184   }
185 
Clone(ScopedArenaAllocator * allocator)186   EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const {
187     return new (allocator) EnvUsePosition(environment_, input_index_, position_);
188   }
189 
190  private:
191   HEnvironment* const environment_;
192   const size_t input_index_;
193   const size_t position_;
194 
195   DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
196 };
197 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>;
198 
199 template <typename Iterator>
FindUseAtOrAfterPosition(Iterator first,Iterator last,size_t position)200 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) {
201   using value_type = const typename Iterator::value_type;
202   static_assert(std::is_same<value_type, const UsePosition>::value ||
203                     std::is_same<value_type, const EnvUsePosition>::value,
204                 "Expecting value type UsePosition or EnvUsePosition.");
205   Iterator ret = std::find_if(
206       first, last, [position](const value_type& use) { return use.GetPosition() >= position; });
207   // Check that the processed range is sorted. Do not check the rest of the range to avoid
208   // increasing the complexity of callers from O(n) to O(n^2).
209   DCHECK(std::is_sorted(
210       first,
211       ret,
212       [](const value_type& lhs, const value_type& rhs) {
213           return lhs.GetPosition() < rhs.GetPosition();
214       }));
215   return ret;
216 }
217 
218 template <typename Iterator>
FindMatchingUseRange(Iterator first,Iterator last,size_t position_begin,size_t position_end)219 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first,
220                                                      Iterator last,
221                                                      size_t position_begin,
222                                                      size_t position_end) {
223   Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin);
224   Iterator end = FindUseAtOrAfterPosition(begin, last, position_end);
225   return MakeIterationRange(begin, end);
226 }
227 
228 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
229  public:
SafepointPosition(HInstruction * instruction)230   explicit SafepointPosition(HInstruction* instruction)
231       : instruction_(instruction),
232         next_(nullptr) {}
233 
ComputePosition(HInstruction * instruction)234   static size_t ComputePosition(HInstruction* instruction) {
235     // We special case instructions emitted at use site, as their
236     // safepoint position needs to be at their use.
237     if (instruction->IsEmittedAtUseSite()) {
238       // Currently only applies to implicit null checks, which are emitted
239       // at the next instruction.
240       DCHECK(instruction->IsNullCheck()) << instruction->DebugName();
241       return instruction->GetLifetimePosition() + 2;
242     } else {
243       return instruction->GetLifetimePosition();
244     }
245   }
246 
SetNext(SafepointPosition * next)247   void SetNext(SafepointPosition* next) {
248     next_ = next;
249   }
250 
GetPosition()251   size_t GetPosition() const {
252     return ComputePosition(instruction_);
253   }
254 
GetNext()255   SafepointPosition* GetNext() const {
256     return next_;
257   }
258 
GetLocations()259   LocationSummary* GetLocations() const {
260     return instruction_->GetLocations();
261   }
262 
GetInstruction()263   HInstruction* GetInstruction() const {
264     return instruction_;
265   }
266 
267  private:
268   HInstruction* const instruction_;
269   SafepointPosition* next_;
270 
271   DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
272 };
273 
274 /**
275  * An interval is a list of disjoint live ranges where an instruction is live.
276  * Each instruction that has uses gets an interval.
277  */
278 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
279  public:
280   static LiveInterval* MakeInterval(ScopedArenaAllocator* allocator,
281                                     DataType::Type type,
282                                     HInstruction* instruction = nullptr) {
283     return new (allocator) LiveInterval(allocator, type, instruction);
284   }
285 
MakeFixedInterval(ScopedArenaAllocator * allocator,int reg,DataType::Type type)286   static LiveInterval* MakeFixedInterval(ScopedArenaAllocator* allocator,
287                                          int reg,
288                                          DataType::Type type) {
289     return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
290   }
291 
MakeTempInterval(ScopedArenaAllocator * allocator,DataType::Type type)292   static LiveInterval* MakeTempInterval(ScopedArenaAllocator* allocator, DataType::Type type) {
293     return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
294   }
295 
IsFixed()296   bool IsFixed() const { return is_fixed_; }
IsTemp()297   bool IsTemp() const { return is_temp_; }
298   // This interval is the result of a split.
IsSplit()299   bool IsSplit() const { return parent_ != this; }
300 
AddTempUse(HInstruction * instruction,size_t temp_index)301   void AddTempUse(HInstruction* instruction, size_t temp_index) {
302     DCHECK(IsTemp());
303     DCHECK(GetUses().empty()) << "A temporary can only have one user";
304     DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user";
305     size_t position = instruction->GetLifetimePosition();
306     UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position);
307     uses_.push_front(*new_use);
308     AddRange(position, position + 1);
309   }
310 
311   // Record use of an input. The use will be recorded as an environment use if
312   // `environment` is not null and as register use otherwise. If `actual_user`
313   // is specified, the use will be recorded at `actual_user`'s lifetime position.
314   void AddUse(HInstruction* instruction,
315               HEnvironment* environment,
316               size_t input_index,
317               HInstruction* actual_user = nullptr) {
318     bool is_environment = (environment != nullptr);
319     LocationSummary* locations = instruction->GetLocations();
320     if (actual_user == nullptr) {
321       actual_user = instruction;
322     }
323 
324     // Set the use within the instruction.
325     size_t position = actual_user->GetLifetimePosition() + 1;
326     if (!is_environment) {
327       if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
328         // For fixed inputs and output same as input, the register allocator
329         // requires to have inputs die at the instruction, so that input moves use the
330         // location of the input just before that instruction (and not potential moves due
331         // to splitting).
332         DCHECK_EQ(instruction, actual_user);
333         position = actual_user->GetLifetimePosition();
334       } else if (!locations->InAt(input_index).IsValid()) {
335         return;
336       }
337     }
338 
339     if (!is_environment && instruction->IsInLoop()) {
340       AddBackEdgeUses(*instruction->GetBlock());
341     }
342 
343     if ((!uses_.empty()) &&
344         (uses_.front().GetUser() == actual_user) &&
345         (uses_.front().GetPosition() < position)) {
346       // The user uses the instruction multiple times, and one use dies before the other.
347       // We update the use list so that the latter is first.
348       DCHECK(!is_environment);
349       DCHECK(uses_.front().GetPosition() + 1 == position);
350       UsePositionList::iterator next_pos = uses_.begin();
351       UsePositionList::iterator insert_pos;
352       do {
353         insert_pos = next_pos;
354         ++next_pos;
355       } while (next_pos != uses_.end() && next_pos->GetPosition() < position);
356       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
357       uses_.insert_after(insert_pos, *new_use);
358       if (first_range_->GetEnd() == uses_.front().GetPosition()) {
359         first_range_->end_ = position;
360       }
361       return;
362     }
363 
364     if (is_environment) {
365       DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition());
366       EnvUsePosition* new_env_use =
367           new (allocator_) EnvUsePosition(environment, input_index, position);
368       env_uses_.push_front(*new_env_use);
369     } else {
370       DCHECK(uses_.empty() || position <= uses_.front().GetPosition());
371       UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
372       uses_.push_front(*new_use);
373     }
374 
375     size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
376     if (first_range_ == nullptr) {
377       // First time we see a use of that interval.
378       first_range_ = last_range_ = range_search_start_ =
379           new (allocator_) LiveRange(start_block_position, position, nullptr);
380     } else if (first_range_->GetStart() == start_block_position) {
381       // There is a use later in the same block or in a following block.
382       // Note that in such a case, `AddRange` for the whole blocks has been called
383       // before arriving in this method, and this is the reason the start of
384       // `first_range_` is before the given `position`.
385       DCHECK_LE(position, first_range_->GetEnd());
386     } else {
387       DCHECK(first_range_->GetStart() > position);
388       // There is a hole in the interval. Create a new range.
389       // Note that the start of `first_range_` can be equal to `end`: two blocks
390       // having adjacent lifetime positions are not necessarily
391       // predecessor/successor. When two blocks are predecessor/successor, the
392       // liveness algorithm has called `AddRange` before arriving in this method,
393       // and the check line 205 would succeed.
394       first_range_ = range_search_start_ =
395           new (allocator_) LiveRange(start_block_position, position, first_range_);
396     }
397   }
398 
AddPhiUse(HInstruction * instruction,size_t input_index,HBasicBlock * block)399   void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
400     DCHECK(instruction->IsPhi());
401     if (block->IsInLoop()) {
402       AddBackEdgeUses(*block);
403     }
404     UsePosition* new_use =
405         new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd());
406     uses_.push_front(*new_use);
407   }
408 
AddRange(size_t start,size_t end)409   ALWAYS_INLINE void AddRange(size_t start, size_t end) {
410     if (first_range_ == nullptr) {
411       first_range_ = last_range_ = range_search_start_ =
412           new (allocator_) LiveRange(start, end, first_range_);
413     } else if (first_range_->GetStart() == end) {
414       // There is a use in the following block.
415       first_range_->start_ = start;
416     } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
417       DCHECK(is_fixed_);
418     } else {
419       DCHECK_GT(first_range_->GetStart(), end);
420       // There is a hole in the interval. Create a new range.
421       first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
422     }
423   }
424 
AddLoopRange(size_t start,size_t end)425   void AddLoopRange(size_t start, size_t end) {
426     DCHECK(first_range_ != nullptr);
427     DCHECK_LE(start, first_range_->GetStart());
428     // Find the range that covers the positions after the loop.
429     LiveRange* after_loop = first_range_;
430     LiveRange* last_in_loop = nullptr;
431     while (after_loop != nullptr && after_loop->GetEnd() < end) {
432       DCHECK_LE(start, after_loop->GetStart());
433       last_in_loop = after_loop;
434       after_loop = after_loop->GetNext();
435     }
436     if (after_loop == nullptr) {
437       // Uses are only in the loop.
438       first_range_ = last_range_ = range_search_start_ =
439           new (allocator_) LiveRange(start, end, nullptr);
440     } else if (after_loop->GetStart() <= end) {
441       first_range_ = range_search_start_ = after_loop;
442       // There are uses after the loop.
443       first_range_->start_ = start;
444     } else {
445       // The use after the loop is after a lifetime hole.
446       DCHECK(last_in_loop != nullptr);
447       first_range_ = range_search_start_ = last_in_loop;
448       first_range_->start_ = start;
449       first_range_->end_ = end;
450     }
451   }
452 
HasSpillSlot()453   bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
SetSpillSlot(int slot)454   void SetSpillSlot(int slot) {
455     DCHECK(!is_fixed_);
456     DCHECK(!is_temp_);
457     spill_slot_ = slot;
458   }
GetSpillSlot()459   int GetSpillSlot() const { return spill_slot_; }
460 
SetFrom(size_t from)461   void SetFrom(size_t from) {
462     if (first_range_ != nullptr) {
463       first_range_->start_ = from;
464     } else {
465       // Instruction without uses.
466       DCHECK(uses_.empty());
467       DCHECK(from == defined_by_->GetLifetimePosition());
468       first_range_ = last_range_ = range_search_start_ =
469           new (allocator_) LiveRange(from, from + 2, nullptr);
470     }
471   }
472 
GetParent()473   LiveInterval* GetParent() const { return parent_; }
474 
475   // Returns whether this interval is the parent interval, that is, the interval
476   // that starts where the HInstruction is defined.
IsParent()477   bool IsParent() const { return parent_ == this; }
478 
GetFirstRange()479   LiveRange* GetFirstRange() const { return first_range_; }
GetLastRange()480   LiveRange* GetLastRange() const { return last_range_; }
481 
GetRegister()482   int GetRegister() const { return register_; }
SetRegister(int reg)483   void SetRegister(int reg) { register_ = reg; }
ClearRegister()484   void ClearRegister() { register_ = kNoRegister; }
HasRegister()485   bool HasRegister() const { return register_ != kNoRegister; }
486 
IsDeadAt(size_t position)487   bool IsDeadAt(size_t position) const {
488     return GetEnd() <= position;
489   }
490 
IsDefinedAt(size_t position)491   bool IsDefinedAt(size_t position) const {
492     return GetStart() <= position && !IsDeadAt(position);
493   }
494 
495   // Returns true if the interval contains a LiveRange covering `position`.
496   // The range at or immediately after the current position of linear scan
497   // is cached for better performance. If `position` can be smaller than
498   // that, CoversSlow should be used instead.
Covers(size_t position)499   bool Covers(size_t position) {
500     LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
501     range_search_start_ = candidate;
502     return (candidate != nullptr && candidate->GetStart() <= position);
503   }
504 
505   // Same as Covers but always tests all ranges.
CoversSlow(size_t position)506   bool CoversSlow(size_t position) const {
507     LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
508     return candidate != nullptr && candidate->GetStart() <= position;
509   }
510 
511   // Returns the first intersection of this interval with `current`, which
512   // must be the interval currently being allocated by linear scan.
FirstIntersectionWith(LiveInterval * current)513   size_t FirstIntersectionWith(LiveInterval* current) const {
514     // Find the first range after the start of `current`. We use the search
515     // cache to improve performance.
516     DCHECK(GetStart() <= current->GetStart() || IsFixed());
517     LiveRange* other_range = current->first_range_;
518     LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
519     if (my_range == nullptr) {
520       return kNoLifetime;
521     }
522 
523     // Advance both intervals and find the first matching range start in
524     // this interval.
525     do {
526       if (my_range->IsBefore(*other_range)) {
527         my_range = my_range->GetNext();
528         if (my_range == nullptr) {
529           return kNoLifetime;
530         }
531       } else if (other_range->IsBefore(*my_range)) {
532         other_range = other_range->GetNext();
533         if (other_range == nullptr) {
534           return kNoLifetime;
535         }
536       } else {
537         DCHECK(my_range->IntersectsWith(*other_range));
538         return std::max(my_range->GetStart(), other_range->GetStart());
539       }
540     } while (true);
541   }
542 
GetStart()543   size_t GetStart() const {
544     return first_range_->GetStart();
545   }
546 
GetEnd()547   size_t GetEnd() const {
548     return last_range_->GetEnd();
549   }
550 
GetLength()551   size_t GetLength() const {
552     return GetEnd() - GetStart();
553   }
554 
FirstRegisterUseAfter(size_t position)555   size_t FirstRegisterUseAfter(size_t position) const {
556     if (is_temp_) {
557       return position == GetStart() ? position : kNoLifetime;
558     }
559 
560     if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
561       return position;
562     }
563 
564     size_t end = GetEnd();
565     for (const UsePosition& use : GetUses()) {
566       size_t use_position = use.GetPosition();
567       if (use_position > end) {
568         break;
569       }
570       if (use_position > position) {
571         if (use.RequiresRegister()) {
572           return use_position;
573         }
574       }
575     }
576     return kNoLifetime;
577   }
578 
579   // Returns the location of the first register use for this live interval,
580   // including a register definition if applicable.
FirstRegisterUse()581   size_t FirstRegisterUse() const {
582     return FirstRegisterUseAfter(GetStart());
583   }
584 
585   // Whether the interval requires a register rather than a stack location.
586   // If needed for performance, this could be cached.
RequiresRegister()587   bool RequiresRegister() const {
588     return !HasRegister() && FirstRegisterUse() != kNoLifetime;
589   }
590 
FirstUseAfter(size_t position)591   size_t FirstUseAfter(size_t position) const {
592     if (is_temp_) {
593       return position == GetStart() ? position : kNoLifetime;
594     }
595 
596     if (IsDefiningPosition(position)) {
597       DCHECK(defined_by_->GetLocations()->Out().IsValid());
598       return position;
599     }
600 
601     size_t end = GetEnd();
602     for (const UsePosition& use : GetUses()) {
603       size_t use_position = use.GetPosition();
604       if (use_position > end) {
605         break;
606       }
607       if (use_position > position) {
608         return use_position;
609       }
610     }
611     return kNoLifetime;
612   }
613 
GetUses()614   const UsePositionList& GetUses() const {
615     return parent_->uses_;
616   }
617 
GetEnvironmentUses()618   const EnvUsePositionList& GetEnvironmentUses() const {
619     return parent_->env_uses_;
620   }
621 
GetType()622   DataType::Type GetType() const {
623     return type_;
624   }
625 
GetDefinedBy()626   HInstruction* GetDefinedBy() const {
627     return defined_by_;
628   }
629 
HasWillCallSafepoint()630   bool HasWillCallSafepoint() const {
631     for (SafepointPosition* safepoint = first_safepoint_;
632          safepoint != nullptr;
633          safepoint = safepoint->GetNext()) {
634       if (safepoint->GetLocations()->WillCall()) return true;
635     }
636     return false;
637   }
638 
FindSafepointJustBefore(size_t position)639   SafepointPosition* FindSafepointJustBefore(size_t position) const {
640     for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
641          safepoint != nullptr;
642          previous = safepoint, safepoint = safepoint->GetNext()) {
643       if (safepoint->GetPosition() >= position) return previous;
644     }
645     return last_safepoint_;
646   }
647 
648   /**
649    * Split this interval at `position`. This interval is changed to:
650    * [start ... position).
651    *
652    * The new interval covers:
653    * [position ... end)
654    */
SplitAt(size_t position)655   LiveInterval* SplitAt(size_t position) {
656     DCHECK(!is_temp_);
657     DCHECK(!is_fixed_);
658     DCHECK_GT(position, GetStart());
659 
660     if (GetEnd() <= position) {
661       // This range dies before `position`, no need to split.
662       return nullptr;
663     }
664 
665     LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
666     SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
667     if (new_last_safepoint == nullptr) {
668       new_interval->first_safepoint_ = first_safepoint_;
669       new_interval->last_safepoint_ = last_safepoint_;
670       first_safepoint_ = last_safepoint_ = nullptr;
671     } else if (last_safepoint_ != new_last_safepoint) {
672       new_interval->last_safepoint_ = last_safepoint_;
673       new_interval->first_safepoint_ = new_last_safepoint->GetNext();
674       DCHECK(new_interval->first_safepoint_ != nullptr);
675       last_safepoint_ = new_last_safepoint;
676       last_safepoint_->SetNext(nullptr);
677     }
678 
679     new_interval->next_sibling_ = next_sibling_;
680     next_sibling_ = new_interval;
681     new_interval->parent_ = parent_;
682 
683     LiveRange* current = first_range_;
684     LiveRange* previous = nullptr;
685     // Iterate over the ranges, and either find a range that covers this position, or
686     // two ranges in between this position (that is, the position is in a lifetime hole).
687     do {
688       if (position >= current->GetEnd()) {
689         // Move to next range.
690         previous = current;
691         current = current->next_;
692       } else if (position <= current->GetStart()) {
693         // If the previous range did not cover this position, we know position is in
694         // a lifetime hole. We can just break the first_range_ and last_range_ links
695         // and return the new interval.
696         DCHECK(previous != nullptr);
697         DCHECK(current != first_range_);
698         new_interval->last_range_ = last_range_;
699         last_range_ = previous;
700         previous->next_ = nullptr;
701         new_interval->first_range_ = current;
702         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
703           // Search start point is inside `new_interval`. Change it to null
704           // (i.e. the end of the interval) in the original interval.
705           range_search_start_ = nullptr;
706         }
707         new_interval->range_search_start_ = new_interval->first_range_;
708         return new_interval;
709       } else {
710         // This range covers position. We create a new last_range_ for this interval
711         // that covers last_range_->Start() and position. We also shorten the current
712         // range and make it the first range of the new interval.
713         DCHECK(position < current->GetEnd() && position > current->GetStart());
714         new_interval->last_range_ = last_range_;
715         last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
716         if (previous != nullptr) {
717           previous->next_ = last_range_;
718         } else {
719           first_range_ = last_range_;
720         }
721         new_interval->first_range_ = current;
722         current->start_ = position;
723         if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
724           // Search start point is inside `new_interval`. Change it to `last_range`
725           // in the original interval. This is conservative but always correct.
726           range_search_start_ = last_range_;
727         }
728         new_interval->range_search_start_ = new_interval->first_range_;
729         return new_interval;
730       }
731     } while (current != nullptr);
732 
733     LOG(FATAL) << "Unreachable";
734     return nullptr;
735   }
736 
StartsBeforeOrAt(LiveInterval * other)737   bool StartsBeforeOrAt(LiveInterval* other) const {
738     return GetStart() <= other->GetStart();
739   }
740 
StartsAfter(LiveInterval * other)741   bool StartsAfter(LiveInterval* other) const {
742     return GetStart() > other->GetStart();
743   }
744 
Dump(std::ostream & stream)745   void Dump(std::ostream& stream) const {
746     stream << "ranges: { ";
747     LiveRange* current = first_range_;
748     while (current != nullptr) {
749       current->Dump(stream);
750       stream << " ";
751       current = current->GetNext();
752     }
753     stream << "}, uses: { ";
754     for (const UsePosition& use : GetUses()) {
755       use.Dump(stream);
756       stream << " ";
757     }
758     stream << "}, { ";
759     for (const EnvUsePosition& env_use : GetEnvironmentUses()) {
760       env_use.Dump(stream);
761       stream << " ";
762     }
763     stream << "}";
764     stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
765     stream << " is_low: " << IsLowInterval();
766     stream << " is_high: " << IsHighInterval();
767   }
768 
769   // Same as Dump, but adds context such as the instruction defining this interval, and
770   // the register currently assigned to this interval.
771   void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
772 
GetNextSibling()773   LiveInterval* GetNextSibling() const { return next_sibling_; }
GetLastSibling()774   LiveInterval* GetLastSibling() {
775     LiveInterval* result = this;
776     while (result->next_sibling_ != nullptr) {
777       result = result->next_sibling_;
778     }
779     return result;
780   }
781 
782   // Returns the first register hint that is at least free before
783   // the value contained in `free_until`. If none is found, returns
784   // `kNoRegister`.
785   int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
786 
787   // If there is enough at the definition site to find a register (for example
788   // it uses the same input as the first input), returns the register as a hint.
789   // Returns kNoRegister otherwise.
790   int FindHintAtDefinition() const;
791 
792   // Returns the number of required spilling slots (measured as a multiple of the
793   // Dex virtual register size `kVRegSize`).
794   size_t NumberOfSpillSlotsNeeded() const;
795 
IsFloatingPoint()796   bool IsFloatingPoint() const {
797     return type_ == DataType::Type::kFloat32 || type_ == DataType::Type::kFloat64;
798   }
799 
800   // Converts the location of the interval to a `Location` object.
801   Location ToLocation() const;
802 
803   // Returns the location of the interval following its siblings at `position`.
804   Location GetLocationAt(size_t position);
805 
806   // Finds the sibling that is defined at `position`.
807   LiveInterval* GetSiblingAt(size_t position);
808 
809   // Returns whether `other` and `this` share the same kind of register.
810   bool SameRegisterKind(Location other) const;
SameRegisterKind(const LiveInterval & other)811   bool SameRegisterKind(const LiveInterval& other) const {
812     return IsFloatingPoint() == other.IsFloatingPoint();
813   }
814 
HasHighInterval()815   bool HasHighInterval() const {
816     return IsLowInterval();
817   }
818 
HasLowInterval()819   bool HasLowInterval() const {
820     return IsHighInterval();
821   }
822 
GetLowInterval()823   LiveInterval* GetLowInterval() const {
824     DCHECK(HasLowInterval());
825     return high_or_low_interval_;
826   }
827 
GetHighInterval()828   LiveInterval* GetHighInterval() const {
829     DCHECK(HasHighInterval());
830     return high_or_low_interval_;
831   }
832 
IsHighInterval()833   bool IsHighInterval() const {
834     return GetParent()->is_high_interval_;
835   }
836 
IsLowInterval()837   bool IsLowInterval() const {
838     return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
839   }
840 
SetLowInterval(LiveInterval * low)841   void SetLowInterval(LiveInterval* low) {
842     DCHECK(IsHighInterval());
843     high_or_low_interval_ = low;
844   }
845 
SetHighInterval(LiveInterval * high)846   void SetHighInterval(LiveInterval* high) {
847     DCHECK(IsLowInterval());
848     high_or_low_interval_ = high;
849   }
850 
851   void AddHighInterval(bool is_temp = false) {
852     DCHECK(IsParent());
853     DCHECK(!HasHighInterval());
854     DCHECK(!HasLowInterval());
855     high_or_low_interval_ = new (allocator_) LiveInterval(
856         allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
857     high_or_low_interval_->high_or_low_interval_ = this;
858     if (first_range_ != nullptr) {
859       high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
860       high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
861       high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
862     }
863     auto pos = high_or_low_interval_->uses_.before_begin();
864     for (const UsePosition& use : uses_) {
865       UsePosition* new_use = use.Clone(allocator_);
866       pos = high_or_low_interval_->uses_.insert_after(pos, *new_use);
867     }
868 
869     auto env_pos = high_or_low_interval_->env_uses_.before_begin();
870     for (const EnvUsePosition& env_use : env_uses_) {
871       EnvUsePosition* new_env_use = env_use.Clone(allocator_);
872       env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use);
873     }
874   }
875 
876   // Returns whether an interval, when it is non-split, is using
877   // the same register of one of its input. This function should
878   // be used only for DCHECKs.
IsUsingInputRegister()879   bool IsUsingInputRegister() const {
880     if (defined_by_ != nullptr && !IsSplit()) {
881       for (const HInstruction* input : defined_by_->GetInputs()) {
882         LiveInterval* interval = input->GetLiveInterval();
883 
884         // Find the interval that covers `defined_by`_. Calls to this function
885         // are made outside the linear scan, hence we need to use CoversSlow.
886         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
887           interval = interval->GetNextSibling();
888         }
889 
890         // Check if both intervals have the same register of the same kind.
891         if (interval != nullptr
892             && interval->SameRegisterKind(*this)
893             && interval->GetRegister() == GetRegister()) {
894           return true;
895         }
896       }
897     }
898     return false;
899   }
900 
901   // Returns whether an interval, when it is non-split, can safely use
902   // the same register of one of its input. Note that this method requires
903   // IsUsingInputRegister() to be true. This function should be used only
904   // for DCHECKs.
CanUseInputRegister()905   bool CanUseInputRegister() const {
906     DCHECK(IsUsingInputRegister());
907     if (defined_by_ != nullptr && !IsSplit()) {
908       LocationSummary* locations = defined_by_->GetLocations();
909       if (locations->OutputCanOverlapWithInputs()) {
910         return false;
911       }
912       for (const HInstruction* input : defined_by_->GetInputs()) {
913         LiveInterval* interval = input->GetLiveInterval();
914 
915         // Find the interval that covers `defined_by`_. Calls to this function
916         // are made outside the linear scan, hence we need to use CoversSlow.
917         while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
918           interval = interval->GetNextSibling();
919         }
920 
921         if (interval != nullptr
922             && interval->SameRegisterKind(*this)
923             && interval->GetRegister() == GetRegister()) {
924           // We found the input that has the same register. Check if it is live after
925           // `defined_by`_.
926           return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
927         }
928       }
929     }
930     LOG(FATAL) << "Unreachable";
931     UNREACHABLE();
932   }
933 
AddSafepoint(HInstruction * instruction)934   void AddSafepoint(HInstruction* instruction) {
935     SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
936     if (first_safepoint_ == nullptr) {
937       first_safepoint_ = last_safepoint_ = safepoint;
938     } else {
939       DCHECK_LE(last_safepoint_->GetPosition(), safepoint->GetPosition());
940       last_safepoint_->SetNext(safepoint);
941       last_safepoint_ = safepoint;
942     }
943   }
944 
GetFirstSafepoint()945   SafepointPosition* GetFirstSafepoint() const {
946     return first_safepoint_;
947   }
948 
949   // Resets the starting point for range-searching queries to the first range.
950   // Intervals must be reset prior to starting a new linear scan over them.
ResetSearchCache()951   void ResetSearchCache() {
952     range_search_start_ = first_range_;
953   }
954 
DefinitionRequiresRegister()955   bool DefinitionRequiresRegister() const {
956     DCHECK(IsParent());
957     LocationSummary* locations = defined_by_->GetLocations();
958     Location location = locations->Out();
959     // This interval is the first interval of the instruction. If the output
960     // of the instruction requires a register, we return the position of that instruction
961     // as the first register use.
962     if (location.IsUnallocated()) {
963       if ((location.GetPolicy() == Location::kRequiresRegister)
964            || (location.GetPolicy() == Location::kSameAsFirstInput
965                && (locations->InAt(0).IsRegister()
966                    || locations->InAt(0).IsRegisterPair()
967                    || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
968         return true;
969       } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
970                  || (location.GetPolicy() == Location::kSameAsFirstInput
971                      && (locations->InAt(0).IsFpuRegister()
972                          || locations->InAt(0).IsFpuRegisterPair()
973                          || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
974         return true;
975       }
976     } else if (location.IsRegister() || location.IsRegisterPair()) {
977       return true;
978     }
979     return false;
980   }
981 
982  private:
983   LiveInterval(ScopedArenaAllocator* allocator,
984                DataType::Type type,
985                HInstruction* defined_by = nullptr,
986                bool is_fixed = false,
987                int reg = kNoRegister,
988                bool is_temp = false,
989                bool is_high_interval = false)
allocator_(allocator)990       : allocator_(allocator),
991         first_range_(nullptr),
992         last_range_(nullptr),
993         range_search_start_(nullptr),
994         first_safepoint_(nullptr),
995         last_safepoint_(nullptr),
996         uses_(),
997         env_uses_(),
998         type_(type),
999         next_sibling_(nullptr),
1000         parent_(this),
1001         register_(reg),
1002         spill_slot_(kNoSpillSlot),
1003         is_fixed_(is_fixed),
1004         is_temp_(is_temp),
1005         is_high_interval_(is_high_interval),
1006         high_or_low_interval_(nullptr),
1007         defined_by_(defined_by) {}
1008 
1009   // Searches for a LiveRange that either covers the given position or is the
1010   // first next LiveRange. Returns null if no such LiveRange exists. Ranges
1011   // known to end before `position` can be skipped with `search_start`.
FindRangeAtOrAfter(size_t position,LiveRange * search_start)1012   LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
1013     if (kIsDebugBuild) {
1014       if (search_start != first_range_) {
1015         // If we are not searching the entire list of ranges, make sure we do
1016         // not skip the range we are searching for.
1017         if (search_start == nullptr) {
1018           DCHECK(IsDeadAt(position));
1019         } else if (search_start->GetStart() > position) {
1020           DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
1021         }
1022       }
1023     }
1024 
1025     LiveRange* range;
1026     for (range = search_start;
1027          range != nullptr && range->GetEnd() <= position;
1028          range = range->GetNext()) {
1029       continue;
1030     }
1031     return range;
1032   }
1033 
IsDefiningPosition(size_t position)1034   bool IsDefiningPosition(size_t position) const {
1035     return IsParent() && (position == GetStart());
1036   }
1037 
HasSynthesizeUseAt(size_t position)1038   bool HasSynthesizeUseAt(size_t position) const {
1039     for (const UsePosition& use : GetUses()) {
1040       size_t use_position = use.GetPosition();
1041       if ((use_position == position) && use.IsSynthesized()) {
1042         return true;
1043       }
1044       if (use_position > position) break;
1045     }
1046     return false;
1047   }
1048 
AddBackEdgeUses(const HBasicBlock & block_at_use)1049   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
1050     DCHECK(block_at_use.IsInLoop());
1051     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
1052       // Linear order may not be well formed when irreducible loops are present,
1053       // i.e. loop blocks may not be adjacent and a back edge may not be last,
1054       // which violates assumptions made in this method.
1055       return;
1056     }
1057 
1058     // Add synthesized uses at the back edge of loops to help the register allocator.
1059     // Note that this method is called in decreasing liveness order, to faciliate adding
1060     // uses at the head of the `uses_` list. Because below
1061     // we iterate from inner-most to outer-most, which is in increasing liveness order,
1062     // we need to add subsequent entries after the last inserted entry.
1063     const UsePositionList::iterator old_begin = uses_.begin();
1064     UsePositionList::iterator insert_pos = uses_.before_begin();
1065     for (HLoopInformationOutwardIterator it(block_at_use);
1066          !it.Done();
1067          it.Advance()) {
1068       HLoopInformation* current = it.Current();
1069       if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
1070         // This interval is defined in the loop. We can stop going outward.
1071         break;
1072       }
1073 
1074       // We're only adding a synthesized use at the last back edge. Adding synthesized uses on
1075       // all back edges is not necessary: anything used in the loop will have its use at the
1076       // last back edge. If we want branches in a loop to have better register allocation than
1077       // another branch, then it is the linear order we should change.
1078       size_t back_edge_use_position = current->GetLifetimeEnd();
1079       if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) {
1080         // There was a use already seen in this loop. Therefore the previous call to `AddUse`
1081         // already inserted the backedge use. We can stop going outward.
1082         DCHECK(HasSynthesizeUseAt(back_edge_use_position));
1083         break;
1084       }
1085 
1086       DCHECK(insert_pos != uses_.before_begin()
1087              ? back_edge_use_position > insert_pos->GetPosition()
1088              : current == block_at_use.GetLoopInformation())
1089           << std::distance(uses_.before_begin(), insert_pos);
1090 
1091       UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
1092       insert_pos = uses_.insert_after(insert_pos, *new_use);
1093     }
1094   }
1095 
1096   ScopedArenaAllocator* const allocator_;
1097 
1098   // Ranges of this interval. We need a quick access to the last range to test
1099   // for liveness (see `IsDeadAt`).
1100   LiveRange* first_range_;
1101   LiveRange* last_range_;
1102 
1103   // The first range at or after the current position of a linear scan. It is
1104   // used to optimize range-searching queries.
1105   LiveRange* range_search_start_;
1106 
1107   // Safepoints where this interval is live.
1108   SafepointPosition* first_safepoint_;
1109   SafepointPosition* last_safepoint_;
1110 
1111   // Uses of this interval. Only the parent interval keeps these lists.
1112   UsePositionList uses_;
1113   EnvUsePositionList env_uses_;
1114 
1115   // The instruction type this interval corresponds to.
1116   const DataType::Type type_;
1117 
1118   // Live interval that is the result of a split.
1119   LiveInterval* next_sibling_;
1120 
1121   // The first interval from which split intervals come from.
1122   LiveInterval* parent_;
1123 
1124   // The register allocated to this interval.
1125   int register_;
1126 
1127   // The spill slot allocated to this interval.
1128   int spill_slot_;
1129 
1130   // Whether the interval is for a fixed register.
1131   const bool is_fixed_;
1132 
1133   // Whether the interval is for a temporary.
1134   const bool is_temp_;
1135 
1136   // Whether this interval is a synthesized interval for register pair.
1137   const bool is_high_interval_;
1138 
1139   // If this interval needs a register pair, the high or low equivalent.
1140   // `is_high_interval_` tells whether this holds the low or the high.
1141   LiveInterval* high_or_low_interval_;
1142 
1143   // The instruction represented by this interval.
1144   HInstruction* const defined_by_;
1145 
1146   static constexpr int kNoRegister = -1;
1147   static constexpr int kNoSpillSlot = -1;
1148 
1149   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1150 
1151   DISALLOW_COPY_AND_ASSIGN(LiveInterval);
1152 };
1153 
1154 /**
1155  * Analysis that computes the liveness of instructions:
1156  *
1157  * (a) Non-environment uses of an instruction always make
1158  *     the instruction live.
1159  * (b) Environment uses of an instruction whose type is object (that is, non-primitive), make the
1160  *     instruction live, unless the class has an @DeadReferenceSafe annotation.
1161  *     This avoids unexpected premature reference enqueuing or finalization, which could
1162  *     result in premature deletion of native objects.  In the presence of @DeadReferenceSafe,
1163  *     object references are treated like primitive types.
1164  * (c) When the graph has the debuggable property, environment uses
1165  *     of an instruction that has a primitive type make the instruction live.
1166  *     If the graph does not have the debuggable property, the environment
1167  *     use has no effect, and may get a 'none' value after register allocation.
1168  * (d) When compiling in OSR mode, all loops in the compiled method may be entered
1169  *     from the interpreter via SuspendCheck; such use in SuspendCheck makes the instruction
1170  *     live.
1171  *
1172  * (b), (c) and (d) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
1173  */
1174 class SsaLivenessAnalysis : public ValueObject {
1175  public:
SsaLivenessAnalysis(HGraph * graph,CodeGenerator * codegen,ScopedArenaAllocator * allocator)1176   SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen, ScopedArenaAllocator* allocator)
1177       : graph_(graph),
1178         codegen_(codegen),
1179         allocator_(allocator),
1180         block_infos_(graph->GetBlocks().size(),
1181                      nullptr,
1182                      allocator_->Adapter(kArenaAllocSsaLiveness)),
1183         instructions_from_ssa_index_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1184         instructions_from_lifetime_position_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1185         number_of_ssa_values_(0) {
1186   }
1187 
1188   void Analyze();
1189 
GetLiveInSet(const HBasicBlock & block)1190   BitVector* GetLiveInSet(const HBasicBlock& block) const {
1191     return &block_infos_[block.GetBlockId()]->live_in_;
1192   }
1193 
GetLiveOutSet(const HBasicBlock & block)1194   BitVector* GetLiveOutSet(const HBasicBlock& block) const {
1195     return &block_infos_[block.GetBlockId()]->live_out_;
1196   }
1197 
GetKillSet(const HBasicBlock & block)1198   BitVector* GetKillSet(const HBasicBlock& block) const {
1199     return &block_infos_[block.GetBlockId()]->kill_;
1200   }
1201 
GetInstructionFromSsaIndex(size_t index)1202   HInstruction* GetInstructionFromSsaIndex(size_t index) const {
1203     return instructions_from_ssa_index_[index];
1204   }
1205 
GetInstructionFromPosition(size_t index)1206   HInstruction* GetInstructionFromPosition(size_t index) const {
1207     return instructions_from_lifetime_position_[index];
1208   }
1209 
GetBlockFromPosition(size_t index)1210   HBasicBlock* GetBlockFromPosition(size_t index) const {
1211     HInstruction* instruction = GetInstructionFromPosition(index);
1212     if (instruction == nullptr) {
1213       // If we are at a block boundary, get the block following.
1214       instruction = GetInstructionFromPosition(index + 1);
1215     }
1216     return instruction->GetBlock();
1217   }
1218 
IsAtBlockBoundary(size_t index)1219   bool IsAtBlockBoundary(size_t index) const {
1220     return GetInstructionFromPosition(index) == nullptr;
1221   }
1222 
GetTempUser(LiveInterval * temp)1223   HInstruction* GetTempUser(LiveInterval* temp) const {
1224     // A temporary shares the same lifetime start as the instruction that requires it.
1225     DCHECK(temp->IsTemp());
1226     HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
1227     DCHECK_EQ(user, temp->GetUses().front().GetUser());
1228     return user;
1229   }
1230 
GetTempIndex(LiveInterval * temp)1231   size_t GetTempIndex(LiveInterval* temp) const {
1232     // We use the input index to store the index of the temporary in the user's temporary list.
1233     DCHECK(temp->IsTemp());
1234     return temp->GetUses().front().GetInputIndex();
1235   }
1236 
GetMaxLifetimePosition()1237   size_t GetMaxLifetimePosition() const {
1238     return instructions_from_lifetime_position_.size() * 2 - 1;
1239   }
1240 
GetNumberOfSsaValues()1241   size_t GetNumberOfSsaValues() const {
1242     return number_of_ssa_values_;
1243   }
1244 
1245   static constexpr const char* kLivenessPassName = "liveness";
1246 
1247  private:
1248   // Give an SSA number to each instruction that defines a value used by another instruction,
1249   // and setup the lifetime information of each instruction and block.
1250   void NumberInstructions();
1251 
1252   // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
1253   void ComputeLiveness();
1254 
1255   // Compute the live ranges of instructions, as well as the initial live_in, live_out and
1256   // kill sets, that do not take into account backward branches.
1257   void ComputeLiveRanges();
1258 
1259   // After computing the initial sets, this method does a fixed point
1260   // calculation over the live_in and live_out set to take into account
1261   // backwards branches.
1262   void ComputeLiveInAndLiveOutSets();
1263 
1264   // Update the live_in set of the block and returns whether it has changed.
1265   bool UpdateLiveIn(const HBasicBlock& block);
1266 
1267   // Update the live_out set of the block and returns whether it has changed.
1268   bool UpdateLiveOut(const HBasicBlock& block);
1269 
1270   static void ProcessEnvironment(HInstruction* instruction,
1271                                  HInstruction* actual_user,
1272                                  BitVector* live_in);
1273   static void RecursivelyProcessInputs(HInstruction* instruction,
1274                                        HInstruction* actual_user,
1275                                        BitVector* live_in);
1276 
1277   // Returns whether `instruction` in an HEnvironment held by `env_holder`
1278   // should be kept live by the HEnvironment.
ShouldBeLiveForEnvironment(HInstruction * env_holder,HInstruction * instruction)1279   static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
1280     DCHECK(instruction != nullptr);
1281     // A value that's not live in compiled code may still be needed in interpreter,
1282     // due to code motion, etc.
1283     if (env_holder->IsDeoptimize()) return true;
1284     // A value live at a throwing instruction in a try block may be copied by
1285     // the exception handler to its location at the top of the catch block.
1286     if (env_holder->CanThrowIntoCatchBlock()) return true;
1287     HGraph* graph = instruction->GetBlock()->GetGraph();
1288     if (graph->IsDebuggable()) return true;
1289     // When compiling in OSR mode, all loops in the compiled method may be entered
1290     // from the interpreter via SuspendCheck; thus we need to preserve the environment.
1291     if (env_holder->IsSuspendCheck() && graph->IsCompilingOsr()) return true;
1292     if (graph -> IsDeadReferenceSafe()) return false;
1293     return instruction->GetType() == DataType::Type::kReference;
1294   }
1295 
CheckNoLiveInIrreducibleLoop(const HBasicBlock & block)1296   void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
1297     if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
1298       return;
1299     }
1300     BitVector* live_in = GetLiveInSet(block);
1301     // To satisfy our liveness algorithm, we need to ensure loop headers of
1302     // irreducible loops do not have any live-in instructions, except constants
1303     // and the current method, which can be trivially re-materialized.
1304     for (uint32_t idx : live_in->Indexes()) {
1305       HInstruction* instruction = GetInstructionFromSsaIndex(idx);
1306       DCHECK(instruction->GetBlock()->IsEntryBlock()) << instruction->DebugName();
1307       DCHECK(!instruction->IsParameterValue());
1308       DCHECK(instruction->IsCurrentMethod() || instruction->IsConstant())
1309           << instruction->DebugName();
1310     }
1311   }
1312 
1313   HGraph* const graph_;
1314   CodeGenerator* const codegen_;
1315 
1316   // Use a local ScopedArenaAllocator for allocating memory.
1317   // This allocator must remain alive while doing register allocation.
1318   ScopedArenaAllocator* const allocator_;
1319 
1320   ScopedArenaVector<BlockInfo*> block_infos_;
1321 
1322   // Temporary array used when computing live_in, live_out, and kill sets.
1323   ScopedArenaVector<HInstruction*> instructions_from_ssa_index_;
1324 
1325   // Temporary array used when inserting moves in the graph.
1326   ScopedArenaVector<HInstruction*> instructions_from_lifetime_position_;
1327   size_t number_of_ssa_values_;
1328 
1329   ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1330   ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
1331 
1332   DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
1333 };
1334 
1335 }  // namespace art
1336 
1337 #endif  // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
1338