1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
18 #define ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
19
20 #include <iostream>
21
22 #include "base/arena_bit_vector.h"
23 #include "base/bit_vector.h"
24 #include "base/intrusive_forward_list.h"
25 #include "base/iteration_range.h"
26 #include "base/macros.h"
27 #include "base/scoped_arena_allocator.h"
28 #include "base/scoped_arena_containers.h"
29 #include "nodes.h"
30
31 namespace art HIDDEN {
32
33 class CodeGenerator;
34 class SsaLivenessAnalysis;
35
36 static constexpr int kNoRegister = -1;
37
38 class BlockInfo : public ArenaObject<kArenaAllocSsaLiveness> {
39 public:
BlockInfo(ScopedArenaAllocator * allocator,const HBasicBlock & block,size_t number_of_ssa_values)40 BlockInfo(ScopedArenaAllocator* allocator, const HBasicBlock& block, size_t number_of_ssa_values)
41 : block_(block),
42 live_in_(ArenaBitVector::CreateFixedSize(
43 allocator, number_of_ssa_values, kArenaAllocSsaLiveness)),
44 live_out_(ArenaBitVector::CreateFixedSize(
45 allocator, number_of_ssa_values, kArenaAllocSsaLiveness)),
46 kill_(ArenaBitVector::CreateFixedSize(
47 allocator, number_of_ssa_values, kArenaAllocSsaLiveness)) {
48 UNUSED(block_);
49 }
50
51 private:
52 const HBasicBlock& block_;
53 BitVectorView<size_t> live_in_;
54 BitVectorView<size_t> live_out_;
55 BitVectorView<size_t> kill_;
56
57 friend class SsaLivenessAnalysis;
58
59 DISALLOW_COPY_AND_ASSIGN(BlockInfo);
60 };
61
62 /**
63 * A live range contains the start and end of a range where an instruction or a temporary
64 * is live.
65 */
66 class LiveRange final : public ArenaObject<kArenaAllocSsaLiveness> {
67 public:
LiveRange(size_t start,size_t end,LiveRange * next)68 LiveRange(size_t start, size_t end, LiveRange* next) : start_(start), end_(end), next_(next) {
69 DCHECK_LT(start, end);
70 DCHECK(next_ == nullptr || next_->GetStart() > GetEnd());
71 }
72
GetStart()73 size_t GetStart() const { return start_; }
GetEnd()74 size_t GetEnd() const { return end_; }
GetNext()75 LiveRange* GetNext() const { return next_; }
76
IntersectsWith(const LiveRange & other)77 bool IntersectsWith(const LiveRange& other) const {
78 return (start_ >= other.start_ && start_ < other.end_)
79 || (other.start_ >= start_ && other.start_ < end_);
80 }
81
IsBefore(const LiveRange & other)82 bool IsBefore(const LiveRange& other) const {
83 return end_ <= other.start_;
84 }
85
Dump(std::ostream & stream)86 void Dump(std::ostream& stream) const {
87 stream << "[" << start_ << "," << end_ << ")";
88 }
89
Dup(ScopedArenaAllocator * allocator)90 LiveRange* Dup(ScopedArenaAllocator* allocator) const {
91 return new (allocator) LiveRange(
92 start_, end_, next_ == nullptr ? nullptr : next_->Dup(allocator));
93 }
94
GetLastRange()95 LiveRange* GetLastRange() {
96 return next_ == nullptr ? this : next_->GetLastRange();
97 }
98
99 private:
100 size_t start_;
101 size_t end_;
102 LiveRange* next_;
103
104 friend class LiveInterval;
105
106 DISALLOW_COPY_AND_ASSIGN(LiveRange);
107 };
108
109 /**
110 * A use position represents a live interval use at a given position.
111 */
112 class UsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
113 public IntrusiveForwardListNode<UsePosition> {
114 public:
UsePosition(HInstruction * user,size_t input_index,size_t position)115 UsePosition(HInstruction* user, size_t input_index, size_t position)
116 : user_(user),
117 input_index_(input_index),
118 position_(position) {
119 }
120
UsePosition(size_t position)121 explicit UsePosition(size_t position)
122 : user_(nullptr),
123 input_index_(kNoInput),
124 position_(dchecked_integral_cast<uint32_t>(position)) {
125 }
126
GetPosition()127 size_t GetPosition() const { return position_; }
128
GetUser()129 HInstruction* GetUser() const { return user_; }
130
IsSynthesized()131 bool IsSynthesized() const { return user_ == nullptr; }
132
GetInputIndex()133 size_t GetInputIndex() const { return input_index_; }
134
Dump(std::ostream & stream)135 void Dump(std::ostream& stream) const {
136 stream << position_;
137 }
138
GetLoopInformation()139 HLoopInformation* GetLoopInformation() const {
140 return user_->GetBlock()->GetLoopInformation();
141 }
142
Clone(ScopedArenaAllocator * allocator)143 UsePosition* Clone(ScopedArenaAllocator* allocator) const {
144 return new (allocator) UsePosition(user_, input_index_, position_);
145 }
146
RequiresRegister()147 bool RequiresRegister() const {
148 if (IsSynthesized()) return false;
149 Location location = GetUser()->GetLocations()->InAt(GetInputIndex());
150 return location.IsUnallocated() && location.RequiresRegisterKind();
151 }
152
153 private:
154 static constexpr uint32_t kNoInput = static_cast<uint32_t>(-1);
155
156 HInstruction* const user_;
157 const size_t input_index_;
158 const size_t position_;
159
160 DISALLOW_COPY_AND_ASSIGN(UsePosition);
161 };
162 using UsePositionList = IntrusiveForwardList<UsePosition>;
163
164 /**
165 * An environment use position represents a live interval for environment use at a given position.
166 */
167 class EnvUsePosition : public ArenaObject<kArenaAllocSsaLiveness>,
168 public IntrusiveForwardListNode<EnvUsePosition> {
169 public:
EnvUsePosition(HEnvironment * environment,size_t input_index,size_t position)170 EnvUsePosition(HEnvironment* environment,
171 size_t input_index,
172 size_t position)
173 : environment_(environment),
174 input_index_(input_index),
175 position_(position) {
176 DCHECK(environment != nullptr);
177 }
178
GetPosition()179 size_t GetPosition() const { return position_; }
180
GetEnvironment()181 HEnvironment* GetEnvironment() const { return environment_; }
GetInputIndex()182 size_t GetInputIndex() const { return input_index_; }
183
Dump(std::ostream & stream)184 void Dump(std::ostream& stream) const {
185 stream << position_;
186 }
187
Clone(ScopedArenaAllocator * allocator)188 EnvUsePosition* Clone(ScopedArenaAllocator* allocator) const {
189 return new (allocator) EnvUsePosition(environment_, input_index_, position_);
190 }
191
192 private:
193 HEnvironment* const environment_;
194 const size_t input_index_;
195 const size_t position_;
196
197 DISALLOW_COPY_AND_ASSIGN(EnvUsePosition);
198 };
199 using EnvUsePositionList = IntrusiveForwardList<EnvUsePosition>;
200
201 template <typename Iterator>
FindUseAtOrAfterPosition(Iterator first,Iterator last,size_t position)202 inline Iterator FindUseAtOrAfterPosition(Iterator first, Iterator last, size_t position) {
203 using value_type = const typename Iterator::value_type;
204 static_assert(std::is_same<value_type, const UsePosition>::value ||
205 std::is_same<value_type, const EnvUsePosition>::value,
206 "Expecting value type UsePosition or EnvUsePosition.");
207 Iterator ret = std::find_if(
208 first, last, [position](const value_type& use) { return use.GetPosition() >= position; });
209 // Check that the processed range is sorted. Do not check the rest of the range to avoid
210 // increasing the complexity of callers from O(n) to O(n^2).
211 DCHECK(std::is_sorted(
212 first,
213 ret,
214 [](const value_type& lhs, const value_type& rhs) {
215 return lhs.GetPosition() < rhs.GetPosition();
216 }));
217 return ret;
218 }
219
220 template <typename Iterator>
FindMatchingUseRange(Iterator first,Iterator last,size_t position_begin,size_t position_end)221 inline IterationRange<Iterator> FindMatchingUseRange(Iterator first,
222 Iterator last,
223 size_t position_begin,
224 size_t position_end) {
225 Iterator begin = FindUseAtOrAfterPosition(first, last, position_begin);
226 Iterator end = FindUseAtOrAfterPosition(begin, last, position_end);
227 return MakeIterationRange(begin, end);
228 }
229
230 class SafepointPosition : public ArenaObject<kArenaAllocSsaLiveness> {
231 public:
SafepointPosition(HInstruction * instruction)232 explicit SafepointPosition(HInstruction* instruction)
233 : instruction_(instruction),
234 next_(nullptr) {}
235
ComputePosition(HInstruction * instruction)236 static size_t ComputePosition(HInstruction* instruction) {
237 // We special case instructions emitted at use site, as their
238 // safepoint position needs to be at their use.
239 if (instruction->IsEmittedAtUseSite()) {
240 // Currently only applies to implicit null checks, which are emitted
241 // at the next instruction.
242 DCHECK(instruction->IsNullCheck()) << instruction->DebugName();
243 return instruction->GetLifetimePosition() + 2;
244 } else {
245 return instruction->GetLifetimePosition();
246 }
247 }
248
SetNext(SafepointPosition * next)249 void SetNext(SafepointPosition* next) {
250 next_ = next;
251 }
252
GetPosition()253 size_t GetPosition() const {
254 return ComputePosition(instruction_);
255 }
256
GetNext()257 SafepointPosition* GetNext() const {
258 return next_;
259 }
260
GetLocations()261 LocationSummary* GetLocations() const {
262 return instruction_->GetLocations();
263 }
264
GetInstruction()265 HInstruction* GetInstruction() const {
266 return instruction_;
267 }
268
269 private:
270 HInstruction* const instruction_;
271 SafepointPosition* next_;
272
273 DISALLOW_COPY_AND_ASSIGN(SafepointPosition);
274 };
275
276 /**
277 * An interval is a list of disjoint live ranges where an instruction is live.
278 * Each instruction that has uses gets an interval.
279 */
280 class LiveInterval : public ArenaObject<kArenaAllocSsaLiveness> {
281 public:
282 static LiveInterval* MakeInterval(ScopedArenaAllocator* allocator,
283 DataType::Type type,
284 HInstruction* instruction = nullptr) {
285 return new (allocator) LiveInterval(allocator, type, instruction);
286 }
287
MakeFixedInterval(ScopedArenaAllocator * allocator,int reg,DataType::Type type)288 static LiveInterval* MakeFixedInterval(ScopedArenaAllocator* allocator,
289 int reg,
290 DataType::Type type) {
291 return new (allocator) LiveInterval(allocator, type, nullptr, true, reg, false);
292 }
293
MakeTempInterval(ScopedArenaAllocator * allocator,DataType::Type type)294 static LiveInterval* MakeTempInterval(ScopedArenaAllocator* allocator, DataType::Type type) {
295 return new (allocator) LiveInterval(allocator, type, nullptr, false, kNoRegister, true);
296 }
297
IsFixed()298 bool IsFixed() const { return is_fixed_; }
IsTemp()299 bool IsTemp() const { return is_temp_; }
300 // This interval is the result of a split.
IsSplit()301 bool IsSplit() const { return parent_ != this; }
302
AddTempUse(HInstruction * instruction,size_t temp_index)303 void AddTempUse(HInstruction* instruction, size_t temp_index) {
304 DCHECK(IsTemp());
305 DCHECK(GetUses().empty()) << "A temporary can only have one user";
306 DCHECK(GetEnvironmentUses().empty()) << "A temporary cannot have environment user";
307 size_t position = instruction->GetLifetimePosition();
308 UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position);
309 uses_.push_front(*new_use);
310 AddRange(position, position + 1);
311 }
312
313 // Record use of an input. The use will be recorded as an environment use if
314 // `environment` is not null and as register use otherwise. If `actual_user`
315 // is specified, the use will be recorded at `actual_user`'s lifetime position.
316 void AddUse(HInstruction* instruction,
317 HEnvironment* environment,
318 size_t input_index,
319 HInstruction* actual_user = nullptr) {
320 bool is_environment = (environment != nullptr);
321 LocationSummary* locations = instruction->GetLocations();
322 if (actual_user == nullptr) {
323 actual_user = instruction;
324 }
325
326 // Set the use within the instruction.
327 size_t position = actual_user->GetLifetimePosition() + 1;
328 if (!is_environment) {
329 if (locations->IsFixedInput(input_index) || locations->OutputUsesSameAs(input_index)) {
330 // For fixed inputs and output same as input, the register allocator
331 // requires to have inputs die at the instruction, so that input moves use the
332 // location of the input just before that instruction (and not potential moves due
333 // to splitting).
334 DCHECK_EQ(instruction, actual_user);
335 position = actual_user->GetLifetimePosition();
336 } else if (!locations->InAt(input_index).IsValid()) {
337 return;
338 }
339 }
340
341 if (!is_environment && instruction->IsInLoop()) {
342 AddBackEdgeUses(*instruction->GetBlock());
343 }
344
345 if ((!uses_.empty()) &&
346 (uses_.front().GetUser() == actual_user) &&
347 (uses_.front().GetPosition() < position)) {
348 // The user uses the instruction multiple times, and one use dies before the other.
349 // We update the use list so that the latter is first.
350 DCHECK(!is_environment);
351 DCHECK(uses_.front().GetPosition() + 1 == position);
352 UsePositionList::iterator next_pos = uses_.begin();
353 UsePositionList::iterator insert_pos;
354 do {
355 insert_pos = next_pos;
356 ++next_pos;
357 } while (next_pos != uses_.end() && next_pos->GetPosition() < position);
358 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
359 uses_.insert_after(insert_pos, *new_use);
360 if (first_range_->GetEnd() == uses_.front().GetPosition()) {
361 first_range_->end_ = position;
362 }
363 return;
364 }
365
366 if (is_environment) {
367 DCHECK(env_uses_.empty() || position <= env_uses_.front().GetPosition());
368 EnvUsePosition* new_env_use =
369 new (allocator_) EnvUsePosition(environment, input_index, position);
370 env_uses_.push_front(*new_env_use);
371 } else {
372 DCHECK(uses_.empty() || position <= uses_.front().GetPosition());
373 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position);
374 uses_.push_front(*new_use);
375 }
376
377 size_t start_block_position = instruction->GetBlock()->GetLifetimeStart();
378 if (first_range_ == nullptr) {
379 // First time we see a use of that interval.
380 first_range_ = last_range_ = range_search_start_ =
381 new (allocator_) LiveRange(start_block_position, position, nullptr);
382 } else if (first_range_->GetStart() == start_block_position) {
383 // There is a use later in the same block or in a following block.
384 // Note that in such a case, `AddRange` for the whole blocks has been called
385 // before arriving in this method, and this is the reason the start of
386 // `first_range_` is before the given `position`.
387 DCHECK_LE(position, first_range_->GetEnd());
388 } else {
389 DCHECK(first_range_->GetStart() > position);
390 // There is a hole in the interval. Create a new range.
391 // Note that the start of `first_range_` can be equal to `end`: two blocks
392 // having adjacent lifetime positions are not necessarily
393 // predecessor/successor. When two blocks are predecessor/successor, the
394 // liveness algorithm has called `AddRange` before arriving in this method,
395 // and the check line 205 would succeed.
396 first_range_ = range_search_start_ =
397 new (allocator_) LiveRange(start_block_position, position, first_range_);
398 }
399 }
400
AddPhiUse(HInstruction * instruction,size_t input_index,HBasicBlock * block)401 void AddPhiUse(HInstruction* instruction, size_t input_index, HBasicBlock* block) {
402 DCHECK(instruction->IsPhi());
403 if (block->IsInLoop()) {
404 AddBackEdgeUses(*block);
405 }
406 UsePosition* new_use =
407 new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd());
408 uses_.push_front(*new_use);
409 }
410
AddRange(size_t start,size_t end)411 ALWAYS_INLINE void AddRange(size_t start, size_t end) {
412 if (first_range_ == nullptr) {
413 first_range_ = last_range_ = range_search_start_ =
414 new (allocator_) LiveRange(start, end, first_range_);
415 } else if (first_range_->GetStart() == end) {
416 // There is a use in the following block.
417 first_range_->start_ = start;
418 } else if (first_range_->GetStart() == start && first_range_->GetEnd() == end) {
419 DCHECK(is_fixed_);
420 } else {
421 DCHECK_GT(first_range_->GetStart(), end);
422 // There is a hole in the interval. Create a new range.
423 first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_);
424 }
425 }
426
AddLoopRange(size_t start,size_t end)427 void AddLoopRange(size_t start, size_t end) {
428 DCHECK(first_range_ != nullptr);
429 DCHECK_LE(start, first_range_->GetStart());
430 // Find the range that covers the positions after the loop.
431 LiveRange* after_loop = first_range_;
432 LiveRange* last_in_loop = nullptr;
433 while (after_loop != nullptr && after_loop->GetEnd() < end) {
434 DCHECK_LE(start, after_loop->GetStart());
435 last_in_loop = after_loop;
436 after_loop = after_loop->GetNext();
437 }
438 if (after_loop == nullptr) {
439 // Uses are only in the loop.
440 first_range_ = last_range_ = range_search_start_ =
441 new (allocator_) LiveRange(start, end, nullptr);
442 } else if (after_loop->GetStart() <= end) {
443 first_range_ = range_search_start_ = after_loop;
444 // There are uses after the loop.
445 first_range_->start_ = start;
446 } else {
447 // The use after the loop is after a lifetime hole.
448 DCHECK(last_in_loop != nullptr);
449 first_range_ = range_search_start_ = last_in_loop;
450 first_range_->start_ = start;
451 first_range_->end_ = end;
452 }
453 }
454
HasSpillSlot()455 bool HasSpillSlot() const { return spill_slot_ != kNoSpillSlot; }
SetSpillSlot(int slot)456 void SetSpillSlot(int slot) {
457 DCHECK(!is_fixed_);
458 DCHECK(!is_temp_);
459 spill_slot_ = slot;
460 }
GetSpillSlot()461 int GetSpillSlot() const { return spill_slot_; }
462
SetFrom(size_t from)463 void SetFrom(size_t from) {
464 if (first_range_ != nullptr) {
465 first_range_->start_ = from;
466 } else {
467 // Instruction without uses.
468 DCHECK(uses_.empty());
469 DCHECK(from == defined_by_->GetLifetimePosition());
470 first_range_ = last_range_ = range_search_start_ =
471 new (allocator_) LiveRange(from, from + 2, nullptr);
472 }
473 }
474
GetParent()475 LiveInterval* GetParent() const { return parent_; }
476
477 // Returns whether this interval is the parent interval, that is, the interval
478 // that starts where the HInstruction is defined.
IsParent()479 bool IsParent() const { return parent_ == this; }
480
GetFirstRange()481 LiveRange* GetFirstRange() const { return first_range_; }
GetLastRange()482 LiveRange* GetLastRange() const { return last_range_; }
483
GetRegister()484 int GetRegister() const { return register_; }
SetRegister(int reg)485 void SetRegister(int reg) { register_ = reg; }
ClearRegister()486 void ClearRegister() { register_ = kNoRegister; }
HasRegister()487 bool HasRegister() const { return register_ != kNoRegister; }
488
IsDeadAt(size_t position)489 bool IsDeadAt(size_t position) const {
490 return GetEnd() <= position;
491 }
492
IsDefinedAt(size_t position)493 bool IsDefinedAt(size_t position) const {
494 return GetStart() <= position && !IsDeadAt(position);
495 }
496
497 // Returns true if the interval contains a LiveRange covering `position`.
498 // The range at or immediately after the current position of linear scan
499 // is cached for better performance. If `position` can be smaller than
500 // that, CoversSlow should be used instead.
Covers(size_t position)501 bool Covers(size_t position) {
502 LiveRange* candidate = FindRangeAtOrAfter(position, range_search_start_);
503 range_search_start_ = candidate;
504 return (candidate != nullptr && candidate->GetStart() <= position);
505 }
506
507 // Same as Covers but always tests all ranges.
CoversSlow(size_t position)508 bool CoversSlow(size_t position) const {
509 LiveRange* candidate = FindRangeAtOrAfter(position, first_range_);
510 return candidate != nullptr && candidate->GetStart() <= position;
511 }
512
513 // Returns the first intersection of this interval with `current`, which
514 // must be the interval currently being allocated by linear scan.
FirstIntersectionWith(LiveInterval * current)515 size_t FirstIntersectionWith(LiveInterval* current) const {
516 // Find the first range after the start of `current`. We use the search
517 // cache to improve performance.
518 DCHECK(GetStart() <= current->GetStart() || IsFixed());
519 LiveRange* other_range = current->first_range_;
520 LiveRange* my_range = FindRangeAtOrAfter(other_range->GetStart(), range_search_start_);
521 if (my_range == nullptr) {
522 return kNoLifetime;
523 }
524
525 // Advance both intervals and find the first matching range start in
526 // this interval.
527 do {
528 if (my_range->IsBefore(*other_range)) {
529 my_range = my_range->GetNext();
530 if (my_range == nullptr) {
531 return kNoLifetime;
532 }
533 } else if (other_range->IsBefore(*my_range)) {
534 other_range = other_range->GetNext();
535 if (other_range == nullptr) {
536 return kNoLifetime;
537 }
538 } else {
539 DCHECK(my_range->IntersectsWith(*other_range));
540 return std::max(my_range->GetStart(), other_range->GetStart());
541 }
542 } while (true);
543 }
544
GetStart()545 size_t GetStart() const {
546 return first_range_->GetStart();
547 }
548
GetEnd()549 size_t GetEnd() const {
550 return last_range_->GetEnd();
551 }
552
GetLength()553 size_t GetLength() const {
554 return GetEnd() - GetStart();
555 }
556
FirstRegisterUseAfter(size_t position)557 size_t FirstRegisterUseAfter(size_t position) const {
558 if (is_temp_) {
559 return position == GetStart() ? position : kNoLifetime;
560 }
561
562 if (IsDefiningPosition(position) && DefinitionRequiresRegister()) {
563 return position;
564 }
565
566 size_t end = GetEnd();
567 for (const UsePosition& use : GetUses()) {
568 size_t use_position = use.GetPosition();
569 if (use_position > end) {
570 break;
571 }
572 if (use_position > position) {
573 if (use.RequiresRegister()) {
574 return use_position;
575 }
576 }
577 }
578 return kNoLifetime;
579 }
580
581 // Returns the location of the first register use for this live interval,
582 // including a register definition if applicable.
FirstRegisterUse()583 size_t FirstRegisterUse() const {
584 return FirstRegisterUseAfter(GetStart());
585 }
586
587 // Whether the interval requires a register rather than a stack location.
588 // If needed for performance, this could be cached.
RequiresRegister()589 bool RequiresRegister() const {
590 return !HasRegister() && FirstRegisterUse() != kNoLifetime;
591 }
592
FirstUseAfter(size_t position)593 size_t FirstUseAfter(size_t position) const {
594 if (is_temp_) {
595 return position == GetStart() ? position : kNoLifetime;
596 }
597
598 if (IsDefiningPosition(position)) {
599 DCHECK(defined_by_->GetLocations()->Out().IsValid());
600 return position;
601 }
602
603 size_t end = GetEnd();
604 for (const UsePosition& use : GetUses()) {
605 size_t use_position = use.GetPosition();
606 if (use_position > end) {
607 break;
608 }
609 if (use_position > position) {
610 return use_position;
611 }
612 }
613 return kNoLifetime;
614 }
615
GetUses()616 const UsePositionList& GetUses() const {
617 return parent_->uses_;
618 }
619
GetEnvironmentUses()620 const EnvUsePositionList& GetEnvironmentUses() const {
621 return parent_->env_uses_;
622 }
623
GetType()624 DataType::Type GetType() const {
625 return type_;
626 }
627
GetDefinedBy()628 HInstruction* GetDefinedBy() const {
629 return defined_by_;
630 }
631
HasWillCallSafepoint()632 bool HasWillCallSafepoint() const {
633 for (SafepointPosition* safepoint = first_safepoint_;
634 safepoint != nullptr;
635 safepoint = safepoint->GetNext()) {
636 if (safepoint->GetLocations()->WillCall()) return true;
637 }
638 return false;
639 }
640
FindSafepointJustBefore(size_t position)641 SafepointPosition* FindSafepointJustBefore(size_t position) const {
642 for (SafepointPosition* safepoint = first_safepoint_, *previous = nullptr;
643 safepoint != nullptr;
644 previous = safepoint, safepoint = safepoint->GetNext()) {
645 if (safepoint->GetPosition() >= position) return previous;
646 }
647 return last_safepoint_;
648 }
649
650 /**
651 * Split this interval at `position`. This interval is changed to:
652 * [start ... position).
653 *
654 * The new interval covers:
655 * [position ... end)
656 */
SplitAt(size_t position)657 LiveInterval* SplitAt(size_t position) {
658 DCHECK(!is_temp_);
659 DCHECK(!is_fixed_);
660 DCHECK_GT(position, GetStart());
661
662 if (GetEnd() <= position) {
663 // This range dies before `position`, no need to split.
664 return nullptr;
665 }
666
667 LiveInterval* new_interval = new (allocator_) LiveInterval(allocator_, type_);
668 SafepointPosition* new_last_safepoint = FindSafepointJustBefore(position);
669 if (new_last_safepoint == nullptr) {
670 new_interval->first_safepoint_ = first_safepoint_;
671 new_interval->last_safepoint_ = last_safepoint_;
672 first_safepoint_ = last_safepoint_ = nullptr;
673 } else if (last_safepoint_ != new_last_safepoint) {
674 new_interval->last_safepoint_ = last_safepoint_;
675 new_interval->first_safepoint_ = new_last_safepoint->GetNext();
676 DCHECK(new_interval->first_safepoint_ != nullptr);
677 last_safepoint_ = new_last_safepoint;
678 last_safepoint_->SetNext(nullptr);
679 }
680
681 new_interval->next_sibling_ = next_sibling_;
682 next_sibling_ = new_interval;
683 new_interval->parent_ = parent_;
684
685 LiveRange* current = first_range_;
686 LiveRange* previous = nullptr;
687 // Iterate over the ranges, and either find a range that covers this position, or
688 // two ranges in between this position (that is, the position is in a lifetime hole).
689 do {
690 if (position >= current->GetEnd()) {
691 // Move to next range.
692 previous = current;
693 current = current->next_;
694 } else if (position <= current->GetStart()) {
695 // If the previous range did not cover this position, we know position is in
696 // a lifetime hole. We can just break the first_range_ and last_range_ links
697 // and return the new interval.
698 DCHECK(previous != nullptr);
699 DCHECK(current != first_range_);
700 new_interval->last_range_ = last_range_;
701 last_range_ = previous;
702 previous->next_ = nullptr;
703 new_interval->first_range_ = current;
704 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
705 // Search start point is inside `new_interval`. Change it to null
706 // (i.e. the end of the interval) in the original interval.
707 range_search_start_ = nullptr;
708 }
709 new_interval->range_search_start_ = new_interval->first_range_;
710 return new_interval;
711 } else {
712 // This range covers position. We create a new last_range_ for this interval
713 // that covers last_range_->Start() and position. We also shorten the current
714 // range and make it the first range of the new interval.
715 DCHECK(position < current->GetEnd() && position > current->GetStart());
716 new_interval->last_range_ = last_range_;
717 last_range_ = new (allocator_) LiveRange(current->start_, position, nullptr);
718 if (previous != nullptr) {
719 previous->next_ = last_range_;
720 } else {
721 first_range_ = last_range_;
722 }
723 new_interval->first_range_ = current;
724 current->start_ = position;
725 if (range_search_start_ != nullptr && range_search_start_->GetEnd() >= current->GetEnd()) {
726 // Search start point is inside `new_interval`. Change it to `last_range`
727 // in the original interval. This is conservative but always correct.
728 range_search_start_ = last_range_;
729 }
730 new_interval->range_search_start_ = new_interval->first_range_;
731 return new_interval;
732 }
733 } while (current != nullptr);
734
735 LOG(FATAL) << "Unreachable";
736 return nullptr;
737 }
738
StartsBeforeOrAt(LiveInterval * other)739 bool StartsBeforeOrAt(LiveInterval* other) const {
740 return GetStart() <= other->GetStart();
741 }
742
StartsAfter(LiveInterval * other)743 bool StartsAfter(LiveInterval* other) const {
744 return GetStart() > other->GetStart();
745 }
746
Dump(std::ostream & stream)747 void Dump(std::ostream& stream) const {
748 stream << "ranges: { ";
749 LiveRange* current = first_range_;
750 while (current != nullptr) {
751 current->Dump(stream);
752 stream << " ";
753 current = current->GetNext();
754 }
755 stream << "}, uses: { ";
756 for (const UsePosition& use : GetUses()) {
757 use.Dump(stream);
758 stream << " ";
759 }
760 stream << "}, { ";
761 for (const EnvUsePosition& env_use : GetEnvironmentUses()) {
762 env_use.Dump(stream);
763 stream << " ";
764 }
765 stream << "}";
766 stream << " is_fixed: " << is_fixed_ << ", is_split: " << IsSplit();
767 stream << " is_low: " << IsLowInterval();
768 stream << " is_high: " << IsHighInterval();
769 }
770
771 // Same as Dump, but adds context such as the instruction defining this interval, and
772 // the register currently assigned to this interval.
773 void DumpWithContext(std::ostream& stream, const CodeGenerator& codegen) const;
774
GetNextSibling()775 LiveInterval* GetNextSibling() const { return next_sibling_; }
GetLastSibling()776 LiveInterval* GetLastSibling() {
777 LiveInterval* result = this;
778 while (result->next_sibling_ != nullptr) {
779 result = result->next_sibling_;
780 }
781 return result;
782 }
783
784 // Returns the first register hint that is at least free before
785 // the value contained in `free_until`. If none is found, returns
786 // `kNoRegister`.
787 int FindFirstRegisterHint(size_t* free_until, const SsaLivenessAnalysis& liveness) const;
788
789 // If there is enough at the definition site to find a register (for example
790 // it uses the same input as the first input), returns the register as a hint.
791 // Returns kNoRegister otherwise.
792 int FindHintAtDefinition() const;
793
794 // Returns the number of required spilling slots (measured as a multiple of the
795 // Dex virtual register size `kVRegSize`).
796 size_t NumberOfSpillSlotsNeeded() const;
797
IsFloatingPoint()798 bool IsFloatingPoint() const {
799 return type_ == DataType::Type::kFloat32 || type_ == DataType::Type::kFloat64;
800 }
801
802 // Converts the location of the interval to a `Location` object.
803 Location ToLocation() const;
804
805 // Returns the location of the interval following its siblings at `position`.
806 Location GetLocationAt(size_t position);
807
808 // Finds the sibling that is defined at `position`.
809 LiveInterval* GetSiblingAt(size_t position);
810
811 // Returns whether `other` and `this` share the same kind of register.
812 bool SameRegisterKind(Location other) const;
SameRegisterKind(const LiveInterval & other)813 bool SameRegisterKind(const LiveInterval& other) const {
814 return IsFloatingPoint() == other.IsFloatingPoint();
815 }
816
HasHighInterval()817 bool HasHighInterval() const {
818 return IsLowInterval();
819 }
820
HasLowInterval()821 bool HasLowInterval() const {
822 return IsHighInterval();
823 }
824
GetLowInterval()825 LiveInterval* GetLowInterval() const {
826 DCHECK(HasLowInterval());
827 return high_or_low_interval_;
828 }
829
GetHighInterval()830 LiveInterval* GetHighInterval() const {
831 DCHECK(HasHighInterval());
832 return high_or_low_interval_;
833 }
834
IsHighInterval()835 bool IsHighInterval() const {
836 return GetParent()->is_high_interval_;
837 }
838
IsLowInterval()839 bool IsLowInterval() const {
840 return !IsHighInterval() && (GetParent()->high_or_low_interval_ != nullptr);
841 }
842
SetLowInterval(LiveInterval * low)843 void SetLowInterval(LiveInterval* low) {
844 DCHECK(IsHighInterval());
845 high_or_low_interval_ = low;
846 }
847
SetHighInterval(LiveInterval * high)848 void SetHighInterval(LiveInterval* high) {
849 DCHECK(IsLowInterval());
850 high_or_low_interval_ = high;
851 }
852
853 void AddHighInterval(bool is_temp = false) {
854 DCHECK(IsParent());
855 DCHECK(!HasHighInterval());
856 DCHECK(!HasLowInterval());
857 high_or_low_interval_ = new (allocator_) LiveInterval(
858 allocator_, type_, defined_by_, false, kNoRegister, is_temp, true);
859 high_or_low_interval_->high_or_low_interval_ = this;
860 if (first_range_ != nullptr) {
861 high_or_low_interval_->first_range_ = first_range_->Dup(allocator_);
862 high_or_low_interval_->last_range_ = high_or_low_interval_->first_range_->GetLastRange();
863 high_or_low_interval_->range_search_start_ = high_or_low_interval_->first_range_;
864 }
865 auto pos = high_or_low_interval_->uses_.before_begin();
866 for (const UsePosition& use : uses_) {
867 UsePosition* new_use = use.Clone(allocator_);
868 pos = high_or_low_interval_->uses_.insert_after(pos, *new_use);
869 }
870
871 auto env_pos = high_or_low_interval_->env_uses_.before_begin();
872 for (const EnvUsePosition& env_use : env_uses_) {
873 EnvUsePosition* new_env_use = env_use.Clone(allocator_);
874 env_pos = high_or_low_interval_->env_uses_.insert_after(env_pos, *new_env_use);
875 }
876 }
877
878 // Returns whether an interval, when it is non-split, is using
879 // the same register of one of its input. This function should
880 // be used only for DCHECKs.
IsUsingInputRegister()881 bool IsUsingInputRegister() const {
882 if (defined_by_ != nullptr && !IsSplit()) {
883 for (const HInstruction* input : defined_by_->GetInputs()) {
884 LiveInterval* interval = input->GetLiveInterval();
885
886 // Find the interval that covers `defined_by`_. Calls to this function
887 // are made outside the linear scan, hence we need to use CoversSlow.
888 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
889 interval = interval->GetNextSibling();
890 }
891
892 // Check if both intervals have the same register of the same kind.
893 if (interval != nullptr
894 && interval->SameRegisterKind(*this)
895 && interval->GetRegister() == GetRegister()) {
896 return true;
897 }
898 }
899 }
900 return false;
901 }
902
903 // Returns whether an interval, when it is non-split, can safely use
904 // the same register of one of its input. Note that this method requires
905 // IsUsingInputRegister() to be true. This function should be used only
906 // for DCHECKs.
CanUseInputRegister()907 bool CanUseInputRegister() const {
908 DCHECK(IsUsingInputRegister());
909 if (defined_by_ != nullptr && !IsSplit()) {
910 LocationSummary* locations = defined_by_->GetLocations();
911 if (locations->OutputCanOverlapWithInputs()) {
912 return false;
913 }
914 for (const HInstruction* input : defined_by_->GetInputs()) {
915 LiveInterval* interval = input->GetLiveInterval();
916
917 // Find the interval that covers `defined_by`_. Calls to this function
918 // are made outside the linear scan, hence we need to use CoversSlow.
919 while (interval != nullptr && !interval->CoversSlow(defined_by_->GetLifetimePosition())) {
920 interval = interval->GetNextSibling();
921 }
922
923 if (interval != nullptr
924 && interval->SameRegisterKind(*this)
925 && interval->GetRegister() == GetRegister()) {
926 // We found the input that has the same register. Check if it is live after
927 // `defined_by`_.
928 return !interval->CoversSlow(defined_by_->GetLifetimePosition() + 1);
929 }
930 }
931 }
932 LOG(FATAL) << "Unreachable";
933 UNREACHABLE();
934 }
935
AddSafepoint(HInstruction * instruction)936 void AddSafepoint(HInstruction* instruction) {
937 SafepointPosition* safepoint = new (allocator_) SafepointPosition(instruction);
938 if (first_safepoint_ == nullptr) {
939 first_safepoint_ = last_safepoint_ = safepoint;
940 } else {
941 DCHECK_LE(last_safepoint_->GetPosition(), safepoint->GetPosition());
942 last_safepoint_->SetNext(safepoint);
943 last_safepoint_ = safepoint;
944 }
945 }
946
GetFirstSafepoint()947 SafepointPosition* GetFirstSafepoint() const {
948 return first_safepoint_;
949 }
950
951 // Resets the starting point for range-searching queries to the first range.
952 // Intervals must be reset prior to starting a new linear scan over them.
ResetSearchCache()953 void ResetSearchCache() {
954 range_search_start_ = first_range_;
955 }
956
DefinitionRequiresRegister()957 bool DefinitionRequiresRegister() const {
958 DCHECK(IsParent());
959 LocationSummary* locations = defined_by_->GetLocations();
960 Location location = locations->Out();
961 // This interval is the first interval of the instruction. If the output
962 // of the instruction requires a register, we return the position of that instruction
963 // as the first register use.
964 if (location.IsUnallocated()) {
965 if ((location.GetPolicy() == Location::kRequiresRegister)
966 || (location.GetPolicy() == Location::kSameAsFirstInput
967 && (locations->InAt(0).IsRegister()
968 || locations->InAt(0).IsRegisterPair()
969 || locations->InAt(0).GetPolicy() == Location::kRequiresRegister))) {
970 return true;
971 } else if ((location.GetPolicy() == Location::kRequiresFpuRegister)
972 || (location.GetPolicy() == Location::kSameAsFirstInput
973 && (locations->InAt(0).IsFpuRegister()
974 || locations->InAt(0).IsFpuRegisterPair()
975 || locations->InAt(0).GetPolicy() == Location::kRequiresFpuRegister))) {
976 return true;
977 }
978 } else if (location.IsRegister() || location.IsRegisterPair()) {
979 return true;
980 }
981 return false;
982 }
983
984 private:
985 LiveInterval(ScopedArenaAllocator* allocator,
986 DataType::Type type,
987 HInstruction* defined_by = nullptr,
988 bool is_fixed = false,
989 int reg = kNoRegister,
990 bool is_temp = false,
991 bool is_high_interval = false)
allocator_(allocator)992 : allocator_(allocator),
993 first_range_(nullptr),
994 last_range_(nullptr),
995 range_search_start_(nullptr),
996 first_safepoint_(nullptr),
997 last_safepoint_(nullptr),
998 uses_(),
999 env_uses_(),
1000 type_(type),
1001 next_sibling_(nullptr),
1002 parent_(this),
1003 register_(reg),
1004 spill_slot_(kNoSpillSlot),
1005 is_fixed_(is_fixed),
1006 is_temp_(is_temp),
1007 is_high_interval_(is_high_interval),
1008 high_or_low_interval_(nullptr),
1009 defined_by_(defined_by) {}
1010
1011 // Searches for a LiveRange that either covers the given position or is the
1012 // first next LiveRange. Returns null if no such LiveRange exists. Ranges
1013 // known to end before `position` can be skipped with `search_start`.
FindRangeAtOrAfter(size_t position,LiveRange * search_start)1014 LiveRange* FindRangeAtOrAfter(size_t position, LiveRange* search_start) const {
1015 if (kIsDebugBuild) {
1016 if (search_start != first_range_) {
1017 // If we are not searching the entire list of ranges, make sure we do
1018 // not skip the range we are searching for.
1019 if (search_start == nullptr) {
1020 DCHECK(IsDeadAt(position));
1021 } else if (search_start->GetStart() > position) {
1022 DCHECK_EQ(search_start, FindRangeAtOrAfter(position, first_range_));
1023 }
1024 }
1025 }
1026
1027 LiveRange* range;
1028 for (range = search_start;
1029 range != nullptr && range->GetEnd() <= position;
1030 range = range->GetNext()) {
1031 continue;
1032 }
1033 return range;
1034 }
1035
IsDefiningPosition(size_t position)1036 bool IsDefiningPosition(size_t position) const {
1037 return IsParent() && (position == GetStart());
1038 }
1039
HasSynthesizeUseAt(size_t position)1040 bool HasSynthesizeUseAt(size_t position) const {
1041 for (const UsePosition& use : GetUses()) {
1042 size_t use_position = use.GetPosition();
1043 if ((use_position == position) && use.IsSynthesized()) {
1044 return true;
1045 }
1046 if (use_position > position) break;
1047 }
1048 return false;
1049 }
1050
AddBackEdgeUses(const HBasicBlock & block_at_use)1051 void AddBackEdgeUses(const HBasicBlock& block_at_use) {
1052 DCHECK(block_at_use.IsInLoop());
1053 if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
1054 // Linear order may not be well formed when irreducible loops are present,
1055 // i.e. loop blocks may not be adjacent and a back edge may not be last,
1056 // which violates assumptions made in this method.
1057 return;
1058 }
1059
1060 // Add synthesized uses at the back edge of loops to help the register allocator.
1061 // Note that this method is called in decreasing liveness order, to faciliate adding
1062 // uses at the head of the `uses_` list. Because below
1063 // we iterate from inner-most to outer-most, which is in increasing liveness order,
1064 // we need to add subsequent entries after the last inserted entry.
1065 const UsePositionList::iterator old_begin = uses_.begin();
1066 UsePositionList::iterator insert_pos = uses_.before_begin();
1067 for (HLoopInformationOutwardIterator it(block_at_use);
1068 !it.Done();
1069 it.Advance()) {
1070 HLoopInformation* current = it.Current();
1071 if (GetDefinedBy()->GetLifetimePosition() >= current->GetHeader()->GetLifetimeStart()) {
1072 // This interval is defined in the loop. We can stop going outward.
1073 break;
1074 }
1075
1076 // We're only adding a synthesized use at the last back edge. Adding synthesized uses on
1077 // all back edges is not necessary: anything used in the loop will have its use at the
1078 // last back edge. If we want branches in a loop to have better register allocation than
1079 // another branch, then it is the linear order we should change.
1080 size_t back_edge_use_position = current->GetLifetimeEnd();
1081 if ((old_begin != uses_.end()) && (old_begin->GetPosition() <= back_edge_use_position)) {
1082 // There was a use already seen in this loop. Therefore the previous call to `AddUse`
1083 // already inserted the backedge use. We can stop going outward.
1084 DCHECK(HasSynthesizeUseAt(back_edge_use_position));
1085 break;
1086 }
1087
1088 DCHECK(insert_pos != uses_.before_begin()
1089 ? back_edge_use_position > insert_pos->GetPosition()
1090 : current == block_at_use.GetLoopInformation())
1091 << std::distance(uses_.before_begin(), insert_pos);
1092
1093 UsePosition* new_use = new (allocator_) UsePosition(back_edge_use_position);
1094 insert_pos = uses_.insert_after(insert_pos, *new_use);
1095 }
1096 }
1097
1098 ScopedArenaAllocator* const allocator_;
1099
1100 // Ranges of this interval. We need a quick access to the last range to test
1101 // for liveness (see `IsDeadAt`).
1102 LiveRange* first_range_;
1103 LiveRange* last_range_;
1104
1105 // The first range at or after the current position of a linear scan. It is
1106 // used to optimize range-searching queries.
1107 LiveRange* range_search_start_;
1108
1109 // Safepoints where this interval is live.
1110 SafepointPosition* first_safepoint_;
1111 SafepointPosition* last_safepoint_;
1112
1113 // Uses of this interval. Only the parent interval keeps these lists.
1114 UsePositionList uses_;
1115 EnvUsePositionList env_uses_;
1116
1117 // The instruction type this interval corresponds to.
1118 const DataType::Type type_;
1119
1120 // Live interval that is the result of a split.
1121 LiveInterval* next_sibling_;
1122
1123 // The first interval from which split intervals come from.
1124 LiveInterval* parent_;
1125
1126 // The register allocated to this interval.
1127 int register_;
1128
1129 // The spill slot allocated to this interval.
1130 int spill_slot_;
1131
1132 // Whether the interval is for a fixed register.
1133 const bool is_fixed_;
1134
1135 // Whether the interval is for a temporary.
1136 const bool is_temp_;
1137
1138 // Whether this interval is a synthesized interval for register pair.
1139 const bool is_high_interval_;
1140
1141 // If this interval needs a register pair, the high or low equivalent.
1142 // `is_high_interval_` tells whether this holds the low or the high.
1143 LiveInterval* high_or_low_interval_;
1144
1145 // The instruction represented by this interval.
1146 HInstruction* const defined_by_;
1147
1148 static constexpr int kNoRegister = -1;
1149 static constexpr int kNoSpillSlot = -1;
1150
1151 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1152
1153 DISALLOW_COPY_AND_ASSIGN(LiveInterval);
1154 };
1155
1156 /**
1157 * Analysis that computes the liveness of instructions:
1158 *
1159 * (a) Non-environment uses of an instruction always make
1160 * the instruction live.
1161 * (b) Environment uses of an instruction whose type is object (that is, non-primitive), make the
1162 * instruction live, unless the class has an @DeadReferenceSafe annotation.
1163 * This avoids unexpected premature reference enqueuing or finalization, which could
1164 * result in premature deletion of native objects. In the presence of @DeadReferenceSafe,
1165 * object references are treated like primitive types.
1166 * (c) When the graph has the debuggable property, environment uses
1167 * of an instruction that has a primitive type make the instruction live.
1168 * If the graph does not have the debuggable property, the environment
1169 * use has no effect, and may get a 'none' value after register allocation.
1170 * (d) When compiling in OSR mode, all loops in the compiled method may be entered
1171 * from the interpreter via SuspendCheck; such use in SuspendCheck makes the instruction
1172 * live.
1173 *
1174 * (b), (c) and (d) are implemented through SsaLivenessAnalysis::ShouldBeLiveForEnvironment.
1175 */
1176 class SsaLivenessAnalysis : public ValueObject {
1177 public:
SsaLivenessAnalysis(HGraph * graph,CodeGenerator * codegen,ScopedArenaAllocator * allocator)1178 SsaLivenessAnalysis(HGraph* graph, CodeGenerator* codegen, ScopedArenaAllocator* allocator)
1179 : graph_(graph),
1180 codegen_(codegen),
1181 allocator_(allocator),
1182 block_infos_(graph->GetBlocks().size(),
1183 nullptr,
1184 allocator_->Adapter(kArenaAllocSsaLiveness)),
1185 instructions_from_ssa_index_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1186 instructions_from_lifetime_position_(allocator_->Adapter(kArenaAllocSsaLiveness)),
1187 number_of_ssa_values_(0) {
1188 }
1189
1190 void Analyze();
1191
GetLiveInSet(const HBasicBlock & block)1192 BitVectorView<size_t> GetLiveInSet(const HBasicBlock& block) const {
1193 return block_infos_[block.GetBlockId()]->live_in_;
1194 }
1195
GetLiveOutSet(const HBasicBlock & block)1196 BitVectorView<size_t> GetLiveOutSet(const HBasicBlock& block) const {
1197 return block_infos_[block.GetBlockId()]->live_out_;
1198 }
1199
GetKillSet(const HBasicBlock & block)1200 BitVectorView<size_t> GetKillSet(const HBasicBlock& block) const {
1201 return block_infos_[block.GetBlockId()]->kill_;
1202 }
1203
GetInstructionFromSsaIndex(size_t index)1204 HInstruction* GetInstructionFromSsaIndex(size_t index) const {
1205 return instructions_from_ssa_index_[index];
1206 }
1207
GetInstructionFromPosition(size_t index)1208 HInstruction* GetInstructionFromPosition(size_t index) const {
1209 return instructions_from_lifetime_position_[index];
1210 }
1211
GetBlockFromPosition(size_t index)1212 HBasicBlock* GetBlockFromPosition(size_t index) const {
1213 HInstruction* instruction = GetInstructionFromPosition(index);
1214 if (instruction == nullptr) {
1215 // If we are at a block boundary, get the block following.
1216 instruction = GetInstructionFromPosition(index + 1);
1217 }
1218 return instruction->GetBlock();
1219 }
1220
IsAtBlockBoundary(size_t index)1221 bool IsAtBlockBoundary(size_t index) const {
1222 return GetInstructionFromPosition(index) == nullptr;
1223 }
1224
GetTempUser(LiveInterval * temp)1225 HInstruction* GetTempUser(LiveInterval* temp) const {
1226 // A temporary shares the same lifetime start as the instruction that requires it.
1227 DCHECK(temp->IsTemp());
1228 HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
1229 DCHECK_EQ(user, temp->GetUses().front().GetUser());
1230 return user;
1231 }
1232
GetTempIndex(LiveInterval * temp)1233 size_t GetTempIndex(LiveInterval* temp) const {
1234 // We use the input index to store the index of the temporary in the user's temporary list.
1235 DCHECK(temp->IsTemp());
1236 return temp->GetUses().front().GetInputIndex();
1237 }
1238
GetMaxLifetimePosition()1239 size_t GetMaxLifetimePosition() const {
1240 return instructions_from_lifetime_position_.size() * 2 - 1;
1241 }
1242
GetNumberOfSsaValues()1243 size_t GetNumberOfSsaValues() const {
1244 return number_of_ssa_values_;
1245 }
1246
1247 static constexpr const char* kLivenessPassName = "liveness";
1248
1249 private:
1250 // Give an SSA number to each instruction that defines a value used by another instruction,
1251 // and setup the lifetime information of each instruction and block.
1252 void NumberInstructions();
1253
1254 // Compute live ranges of instructions, as well as live_in, live_out and kill sets.
1255 void ComputeLiveness();
1256
1257 // Compute the live ranges of instructions, as well as the initial live_in, live_out and
1258 // kill sets, that do not take into account backward branches.
1259 void ComputeLiveRanges();
1260
1261 // After computing the initial sets, this method does a fixed point
1262 // calculation over the live_in and live_out set to take into account
1263 // backwards branches.
1264 void ComputeLiveInAndLiveOutSets();
1265
1266 // Update the live_in set of the block and returns whether it has changed.
1267 bool UpdateLiveIn(const HBasicBlock& block);
1268
1269 // Update the live_out set of the block and returns whether it has changed.
1270 bool UpdateLiveOut(const HBasicBlock& block);
1271
1272 static void ProcessEnvironment(HInstruction* instruction,
1273 HInstruction* actual_user,
1274 BitVectorView<size_t> live_in);
1275 static void RecursivelyProcessInputs(HInstruction* instruction,
1276 HInstruction* actual_user,
1277 BitVectorView<size_t> live_in);
1278
1279 // Returns whether `instruction` in an HEnvironment held by `env_holder`
1280 // should be kept live by the HEnvironment.
ShouldBeLiveForEnvironment(HInstruction * env_holder,HInstruction * instruction)1281 static bool ShouldBeLiveForEnvironment(HInstruction* env_holder, HInstruction* instruction) {
1282 DCHECK(instruction != nullptr);
1283 // A value that's not live in compiled code may still be needed in interpreter,
1284 // due to code motion, etc.
1285 if (env_holder->IsDeoptimize()) return true;
1286 // A value live at a throwing instruction in a try block may be copied by
1287 // the exception handler to its location at the top of the catch block.
1288 if (env_holder->CanThrowIntoCatchBlock()) return true;
1289 HGraph* graph = instruction->GetBlock()->GetGraph();
1290 if (graph->IsDebuggable()) return true;
1291 // When compiling in OSR mode, all loops in the compiled method may be entered
1292 // from the interpreter via SuspendCheck; thus we need to preserve the environment.
1293 if (env_holder->IsSuspendCheck() && graph->IsCompilingOsr()) return true;
1294 if (graph -> IsDeadReferenceSafe()) return false;
1295 return instruction->GetType() == DataType::Type::kReference;
1296 }
1297
CheckNoLiveInIrreducibleLoop(const HBasicBlock & block)1298 void CheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const {
1299 if (!block.IsLoopHeader() || !block.GetLoopInformation()->IsIrreducible()) {
1300 return;
1301 }
1302 DoCheckNoLiveInIrreducibleLoop(block);
1303 }
1304
1305 void DoCheckNoLiveInIrreducibleLoop(const HBasicBlock& block) const;
1306
1307 HGraph* const graph_;
1308 CodeGenerator* const codegen_;
1309
1310 // Use a local ScopedArenaAllocator for allocating memory.
1311 // This allocator must remain alive while doing register allocation.
1312 ScopedArenaAllocator* const allocator_;
1313
1314 ScopedArenaVector<BlockInfo*> block_infos_;
1315
1316 // Temporary array used when computing live_in, live_out, and kill sets.
1317 ScopedArenaVector<HInstruction*> instructions_from_ssa_index_;
1318
1319 // Temporary array used when inserting moves in the graph.
1320 ScopedArenaVector<HInstruction*> instructions_from_lifetime_position_;
1321 size_t number_of_ssa_values_;
1322
1323 ART_FRIEND_TEST(RegisterAllocatorTest, SpillInactive);
1324 ART_FRIEND_TEST(RegisterAllocatorTest, FreeUntil);
1325
1326 DISALLOW_COPY_AND_ASSIGN(SsaLivenessAnalysis);
1327 };
1328
1329 } // namespace art
1330
1331 #endif // ART_COMPILER_OPTIMIZING_SSA_LIVENESS_ANALYSIS_H_
1332