1 /* 2 * Copyright (C) 2023 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef BERBERIS_BACKEND_COMMON_LIFETIME_H_ 18 #define BERBERIS_BACKEND_COMMON_LIFETIME_H_ 19 20 #include <string> 21 22 #include "berberis/base/arena_alloc.h" 23 #include "berberis/base/arena_list.h" 24 #include "berberis/base/logging.h" 25 #include "berberis/base/stringprintf.h" 26 27 #include "berberis/backend/common/machine_ir.h" 28 29 namespace berberis { 30 31 // One use of virtual register. 32 // TODO(b/232598137): should probably go inside machine IR? 33 // Or make it internal in VRegLifetime? 34 class VRegUse { 35 public: VRegUse(const MachineInsnListPosition & pos,int index,int begin,int end)36 VRegUse(const MachineInsnListPosition& pos, int index, int begin, int end) 37 : pos_(pos), index_(index), begin_(begin), end_(end) {} 38 GetVReg()39 MachineReg GetVReg() const { return pos_.insn()->RegAt(index_); } 40 RewriteVReg(MachineIR * machine_ir,MachineReg reg,int slot)41 void RewriteVReg(MachineIR* machine_ir, MachineReg reg, int slot) { 42 pos_.insn()->SetRegAt(index_, reg); 43 if (slot != -1) { 44 int offset = machine_ir->SpillSlotOffset(slot); 45 MachineReg spill = MachineReg::CreateSpilledRegFromIndex(offset); 46 int size = GetRegClass()->RegSize(); 47 if (IsUse()) { 48 if (pos_.insn()->is_copy() && !pos_.insn()->RegAt(0).IsSpilledReg()) { 49 // Rewrite the src of the copy itself, unless the result is mem-to-mem copy. 50 CHECK_EQ(1, index_); 51 pos_.insn()->SetRegAt(1, spill); 52 } else { 53 pos_.InsertBefore(machine_ir->NewInsn<PseudoCopy>(reg, spill, size)); 54 } 55 } 56 if (IsDef()) { 57 if (pos_.insn()->is_copy() && !pos_.insn()->RegAt(1).IsSpilledReg()) { 58 // Rewrite the dst of the copy itself, unless the result is mem-to-mem copy. 59 CHECK_EQ(0, index_); 60 pos_.insn()->SetRegAt(0, spill); 61 } else { 62 pos_.InsertAfter(machine_ir->NewInsn<PseudoCopy>(spill, reg, size)); 63 } 64 } 65 } 66 } 67 GetRegClass()68 const MachineRegClass* GetRegClass() const { return pos_.insn()->RegKindAt(index_).RegClass(); } 69 begin()70 int begin() const { return begin_; } 71 end()72 int end() const { return end_; } 73 74 // One line. GetDebugString()75 std::string GetDebugString() const { 76 return StringPrintf("[%d, %d) %s", begin(), end(), GetMachineRegDebugString(GetVReg()).c_str()); 77 } 78 79 // One line. GetInsnDebugString()80 std::string GetInsnDebugString() const { return pos_.insn()->GetDebugString(); } 81 IsUse()82 bool IsUse() const { return pos_.insn()->RegKindAt(index_).IsUse(); } 83 IsDef()84 bool IsDef() const { return pos_.insn()->RegKindAt(index_).IsDef(); } 85 86 private: 87 // Insn to rewrite and spill/reload insert position. 88 MachineInsnListPosition pos_; 89 // Index or register operand. 90 int index_; 91 // Range for interference test. 92 int begin_; 93 int end_; 94 }; 95 96 using VRegUseList = ArenaList<VRegUse>; 97 98 // Continuous live range of virtual register. 99 class VRegLiveRange { 100 public: VRegLiveRange(Arena * arena,int begin)101 VRegLiveRange(Arena* arena, int begin) : begin_(begin), end_(begin), use_list_(arena) {} 102 VRegLiveRange(Arena * arena,const VRegUse & use)103 VRegLiveRange(Arena* arena, const VRegUse& use) 104 : begin_(use.begin()), end_(use.end()), use_list_(1, use, arena) {} 105 begin()106 int begin() const { return begin_; } 107 108 // Modify begin only if there are no uses yet. set_begin(int begin)109 void set_begin(int begin) { 110 DCHECK_LE(begin_, begin); 111 DCHECK_LE(end_, begin); 112 DCHECK(use_list_.empty()); 113 begin_ = begin; 114 end_ = begin; 115 } 116 end()117 int end() const { return end_; } 118 set_end(int end)119 void set_end(int end) { 120 DCHECK_LE(end_, end); 121 end_ = end; 122 } 123 use_list()124 const VRegUseList& use_list() const { return use_list_; } 125 126 // Try to kill this! use_list()127 VRegUseList& use_list() { return use_list_; } 128 AppendUse(const VRegUse & use)129 void AppendUse(const VRegUse& use) { 130 DCHECK_LE(begin_, use.begin()); 131 // It can happen that use overlaps previous use. 132 // For example, if an insn 'FOO use_def, use' appears as 'FOO x, x', 133 // then 'x' uses will come (ordered by begin) as [0, 2), [0, 1). 134 // We record each use separately so we can rewrite them. 135 // But because of this lame case we have to do extra check for set_end. 136 // TODO(b/232598137): another option is to order uses as 'use, use_def, def' 137 // in lifetime_analysis::AddInsn, so that uses with equal begin are sorted 138 // by end. 139 use_list_.push_back(use); 140 if (end_ < use.end()) { 141 end_ = use.end(); 142 } 143 } 144 145 // Multiline GetDebugString()146 std::string GetDebugString() const { 147 std::string out(StringPrintf("[%d, %d) {\n", begin(), end())); 148 for (const auto& use : use_list_) { 149 out += " "; 150 out += use.GetDebugString(); 151 out += "\n"; 152 } 153 out += "}\n"; 154 return out; 155 } 156 157 private: 158 // Actual live range, might start before first use and end after last use. 159 int begin_; 160 int end_; 161 // Use list might be empty if register is live but not used :) 162 VRegUseList use_list_; 163 }; 164 165 using VRegLiveRangeList = ArenaList<VRegLiveRange>; 166 167 // We might consider spilling 'lifetime' to free its hard register 'reg' 168 // after 'begin' position to be used by 'new_lifetime'. 169 // 170 // We assume all lifetimes that start before 'begin' are already allocated. 171 // Here is what we do: 172 // - We assign spill slot to 'lifetime', virtual register of this lifetime 173 // now lives in that spill slot; 174 // - If instructions needs virtual register to be in hard register, we create 175 // a 'tiny' lifetime that only describes use of that virtual register in 176 // that instruction. Such tiny lifetime can't be spilled; 177 // - Tiny lifetimes that start before 'begin' are allocated to 'reg'. This 178 // doesn't create any conflicts with other lifetimes that start before 179 // 'begin'; 180 // - Remaining tiny lifetimes start at or after 'begin', so will be allocated 181 // in order, after 'new_lifetime'; 182 // - 'reg' is now free at 'begin', so 'new_lifetime' can use it; 183 // 184 // If some tiny lifetime starts before but ends after 'begin', spilling is 185 // impossible. As that tiny lifetime starts before 'begin', it has to be 186 // allocated to 'reg', otherwise there might be conflicts (and we don't 187 // backtrack to resolve them), so 'reg' is not free at 'begin'. 188 // 189 // If some tiny lifetime starts right at 'begin', it means 'lifetime' and 190 // 'new_lifetime' virtual registers are used in the same instruction. 191 // Spilling is still possible, as that tiny lifetime will be allocated after 192 // 'new_lifetime', so 'new_lifetime' will use 'reg', and tiny lifetime will 193 // get some other hard register. However, that makes sense only if tiny 194 // lifetime will not compete with 'new_lifetime' for the same hard register, 195 // so we mark this case explicitly. 196 197 enum SplitKind { SPLIT_IMPOSSIBLE = 0, SPLIT_CONFLICT, SPLIT_OK }; 198 199 struct SplitPos { 200 VRegLiveRangeList::iterator range_it; 201 VRegUseList::iterator use_it; 202 }; 203 204 // Lifetime of virtual register. 205 // Basically, list of live ranges. 206 class VRegLifetime { 207 public: 208 using List = ArenaList<VRegLifetime>; 209 VRegLifetime(Arena * arena,int begin)210 VRegLifetime(Arena* arena, int begin) 211 : arena_(arena), 212 range_list_(1, VRegLiveRange(arena, begin), arena), 213 reg_class_(nullptr), 214 hard_reg_(0), 215 spill_slot_(-1), 216 spill_weight_(0), 217 move_hint_(nullptr) {} 218 VRegLifetime(Arena * arena,const VRegUse & use)219 VRegLifetime(Arena* arena, const VRegUse& use) 220 : arena_(arena), 221 range_list_(1, VRegLiveRange(arena, use), arena), 222 reg_class_(use.GetRegClass()), 223 hard_reg_(0), 224 spill_slot_(-1), 225 spill_weight_(1), 226 move_hint_(nullptr) {} 227 StartLiveRange(int begin)228 void StartLiveRange(int begin) { 229 DCHECK_LE(end(), begin); 230 range_list_.push_back(VRegLiveRange(arena_, begin)); 231 } 232 AppendUse(const VRegUse & use)233 void AppendUse(const VRegUse& use) { 234 if (use.IsDef() && !use.IsUse() && end() < use.begin()) { 235 // This is write-only use and there is a gap between it and previous use. 236 // Can insert lifetime hole. 237 if (range_list_.back().use_list().empty()) { 238 // If current live range is still empty, this might be live-in 239 // register that gets overwritten, so remove live-in. 240 range_list_.back().set_begin(use.begin()); 241 } else { 242 range_list_.push_back(VRegLiveRange(arena_, use.begin())); 243 } 244 } 245 range_list_.back().AppendUse(use); 246 // We assume reg classes are either nested or unrelated (so have no 247 // common registers). 248 if (reg_class_) { 249 reg_class_ = reg_class_->GetIntersection(use.GetRegClass()); 250 CHECK(reg_class_); 251 } else { 252 reg_class_ = use.GetRegClass(); 253 } 254 ++spill_weight_; 255 } 256 set_hard_reg(MachineReg reg)257 void set_hard_reg(MachineReg reg) { hard_reg_ = reg; } 258 hard_reg()259 MachineReg hard_reg() const { return hard_reg_; } 260 GetSpill()261 int GetSpill() const { return spill_slot_; } 262 SetSpill(int slot)263 void SetSpill(int slot) { 264 DCHECK_EQ(spill_slot_, -1); 265 spill_slot_ = slot; 266 } 267 spill_weight()268 int spill_weight() const { return spill_weight_; } 269 270 // If lifetimes are connected with reg to reg move, try allocating both on the 271 // same register. 272 // Implement move hints as disjoint set of lifetimes, with representative that 273 // is allocated first (so no union by rank, only path compression). 274 FindMoveHint()275 VRegLifetime* FindMoveHint() { 276 if (move_hint_) { 277 move_hint_ = move_hint_->FindMoveHint(); 278 return move_hint_; 279 } 280 return this; 281 } 282 SetMoveHint(VRegLifetime * other)283 void SetMoveHint(VRegLifetime* other) { 284 VRegLifetime* hint = FindMoveHint(); 285 VRegLifetime* other_hint = other->FindMoveHint(); 286 // Select lifetime that begins first. 287 if (hint->begin() > other_hint->begin()) { 288 hint->move_hint_ = other_hint; 289 } else if (other_hint != hint) { 290 other_hint->move_hint_ = hint; 291 } 292 } 293 begin()294 int begin() const { 295 DCHECK(!range_list_.empty()); 296 return range_list_.front().begin(); 297 } 298 LastLiveRangeBegin()299 int LastLiveRangeBegin() const { 300 DCHECK(!range_list_.empty()); 301 return range_list_.back().begin(); 302 } 303 end()304 int end() const { 305 DCHECK(!range_list_.empty()); 306 return range_list_.back().end(); 307 } 308 set_end(int end)309 void set_end(int end) { 310 DCHECK(!range_list_.empty()); 311 range_list_.back().set_end(end); 312 } 313 314 // Multiline. GetDebugString()315 std::string GetDebugString() const { 316 std::string out("lifetime {\n"); 317 for (const auto& range : range_list_) { 318 out += range.GetDebugString(); 319 } 320 out += "}\n"; 321 return out; 322 } 323 GetRegClass()324 const MachineRegClass* GetRegClass() const { 325 DCHECK(reg_class_); 326 return reg_class_; 327 } 328 329 // Return true if lifetimes interfere. TestInterference(const VRegLifetime & other)330 bool TestInterference(const VRegLifetime& other) const { 331 VRegLiveRangeList::const_iterator j = other.range_list_.begin(); 332 for (VRegLiveRangeList::const_iterator i = range_list_.begin(); 333 i != range_list_.end() && j != other.range_list_.end();) { 334 if (i->end() <= j->begin()) { 335 ++i; 336 } else if (j->end() <= i->begin()) { 337 ++j; 338 } else { 339 return true; 340 } 341 } 342 return false; 343 } 344 345 // Consider splitting into tiny lifetimes after 'begin'. 346 // Non-const, as we return non-const iterator?! FindSplitPos(int begin,SplitPos * pos)347 SplitKind FindSplitPos(int begin, SplitPos* pos) { 348 for (auto range_it = range_list_.begin(); range_it != range_list_.end(); ++range_it) { 349 if (range_it->end() <= begin) { 350 continue; 351 } 352 353 for (auto use_it = range_it->use_list().begin(); use_it != range_it->use_list().end(); 354 ++use_it) { 355 if (use_it->end() <= begin) { 356 // Future tiny lifetime ends before 'begin'. 357 continue; 358 } 359 360 if (use_it->begin() < begin) { 361 // Future tiny lifetime starts before but ends after 'begin'. 362 // Problematic case we don't allow. 363 return SPLIT_IMPOSSIBLE; 364 } 365 366 // Future tiny lifetime starts at or after 'begin'. 367 pos->range_it = range_it; 368 pos->use_it = use_it; 369 return use_it->begin() == begin ? SPLIT_CONFLICT : SPLIT_OK; 370 } 371 } 372 373 // If we got here, lifetime spans after begin but has no uses there. 374 // It can happen with live-out virtual registers. 375 pos->range_it = range_list_.end(); 376 return SPLIT_OK; 377 } 378 Split(const SplitPos & split_pos,ArenaList<VRegLifetime> * out)379 void Split(const SplitPos& split_pos, ArenaList<VRegLifetime>* out) { 380 VRegLiveRangeList::iterator range_it = split_pos.range_it; 381 if (range_it == range_list_.end()) { 382 return; 383 } 384 385 // Create tiny lifetime from each use after split pos. 386 VRegUseList::iterator use_it = split_pos.use_it; 387 for (;;) { 388 for (; use_it != range_it->use_list().end(); ++use_it) { 389 out->push_back(VRegLifetime(arena_, *use_it)); 390 out->back().SetSpill(GetSpill()); 391 } 392 if (++range_it == range_list_.end()) { 393 break; 394 } 395 use_it = range_it->use_list().begin(); 396 } 397 398 // Erase transferred uses (so they are not rewritten twice). 399 VRegLiveRangeList::iterator first_range_to_erase = split_pos.range_it; 400 if (split_pos.use_it != first_range_to_erase->use_list().begin()) { 401 // Erase only tail of the first range. 402 split_pos.range_it->use_list().erase(split_pos.use_it, split_pos.range_it->use_list().end()); 403 ++first_range_to_erase; 404 } 405 range_list_.erase(first_range_to_erase, range_list_.end()); 406 } 407 408 // Walk reg uses and replace vreg with assigned hard reg. Rewrite(MachineIR * machine_ir)409 void Rewrite(MachineIR* machine_ir) { 410 for (auto& range : range_list_) { 411 for (auto& use : range.use_list()) { 412 use.RewriteVReg(machine_ir, hard_reg_, spill_slot_); 413 } 414 } 415 } 416 417 private: 418 // Arena for allocations. 419 Arena* arena_; 420 // List of live ranges, must be non-empty after lifetime is populated! 421 VRegLiveRangeList range_list_; 422 // Register class that fits all uses. 423 const MachineRegClass* reg_class_; 424 // Assigned hard register. 425 MachineReg hard_reg_; 426 // Where to spill previous value of assigned hard register. 427 int spill_slot_; 428 // Spill weight, roughly the number of spill/reload insns to add. 429 int spill_weight_; 430 // Lifetime that starts before and is connected by move with this one. 431 VRegLifetime* move_hint_; 432 }; 433 434 using VRegLifetimeList = VRegLifetime::List; 435 436 } // namespace berberis 437 438 #endif // BERBERIS_BACKEND_COMMON_LIFETIME_H_ 439