1 //===-- AArch64StackTaggingPreRA.cpp --- Stack Tagging for AArch64 -----===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9
10
11 #include "AArch64.h"
12 #include "AArch64MachineFunctionInfo.h"
13 #include "AArch64InstrInfo.h"
14 #include "llvm/ADT/DepthFirstIterator.h"
15 #include "llvm/ADT/SetVector.h"
16 #include "llvm/ADT/MapVector.h"
17 #include "llvm/ADT/Statistic.h"
18 #include "llvm/CodeGen/MachineBranchProbabilityInfo.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineFunction.h"
21 #include "llvm/CodeGen/MachineFunctionPass.h"
22 #include "llvm/CodeGen/MachineInstrBuilder.h"
23 #include "llvm/CodeGen/MachineLoopInfo.h"
24 #include "llvm/CodeGen/MachineRegisterInfo.h"
25 #include "llvm/CodeGen/MachineTraceMetrics.h"
26 #include "llvm/CodeGen/Passes.h"
27 #include "llvm/CodeGen/TargetInstrInfo.h"
28 #include "llvm/CodeGen/TargetRegisterInfo.h"
29 #include "llvm/CodeGen/TargetSubtargetInfo.h"
30 #include "llvm/Support/CommandLine.h"
31 #include "llvm/Support/Debug.h"
32 #include "llvm/Support/raw_ostream.h"
33
34 using namespace llvm;
35
36 #define DEBUG_TYPE "aarch64-stack-tagging-pre-ra"
37
38 enum UncheckedLdStMode { UncheckedNever, UncheckedSafe, UncheckedAlways };
39
40 cl::opt<UncheckedLdStMode> ClUncheckedLdSt(
41 "stack-tagging-unchecked-ld-st", cl::Hidden,
42 cl::init(UncheckedSafe),
43 cl::desc(
44 "Unconditionally apply unchecked-ld-st optimization (even for large "
45 "stack frames, or in the presence of variable sized allocas)."),
46 cl::values(
47 clEnumValN(UncheckedNever, "never", "never apply unchecked-ld-st"),
48 clEnumValN(
49 UncheckedSafe, "safe",
50 "apply unchecked-ld-st when the target is definitely within range"),
51 clEnumValN(UncheckedAlways, "always", "always apply unchecked-ld-st")));
52
53 namespace {
54
55 class AArch64StackTaggingPreRA : public MachineFunctionPass {
56 MachineFunction *MF;
57 AArch64FunctionInfo *AFI;
58 MachineFrameInfo *MFI;
59 MachineRegisterInfo *MRI;
60 const AArch64RegisterInfo *TRI;
61 const AArch64InstrInfo *TII;
62
63 SmallVector<MachineInstr*, 16> ReTags;
64
65 public:
66 static char ID;
AArch64StackTaggingPreRA()67 AArch64StackTaggingPreRA() : MachineFunctionPass(ID) {
68 initializeAArch64StackTaggingPreRAPass(*PassRegistry::getPassRegistry());
69 }
70
71 bool mayUseUncheckedLoadStore();
72 void uncheckUsesOf(unsigned TaggedReg, int FI);
73 void uncheckLoadsAndStores();
74
75 bool runOnMachineFunction(MachineFunction &Func) override;
getPassName() const76 StringRef getPassName() const override {
77 return "AArch64 Stack Tagging PreRA";
78 }
79
getAnalysisUsage(AnalysisUsage & AU) const80 void getAnalysisUsage(AnalysisUsage &AU) const override {
81 AU.setPreservesCFG();
82 MachineFunctionPass::getAnalysisUsage(AU);
83 }
84 };
85 } // end anonymous namespace
86
87 char AArch64StackTaggingPreRA::ID = 0;
88
89 INITIALIZE_PASS_BEGIN(AArch64StackTaggingPreRA, "aarch64-stack-tagging-pre-ra",
90 "AArch64 Stack Tagging PreRA Pass", false, false)
91 INITIALIZE_PASS_END(AArch64StackTaggingPreRA, "aarch64-stack-tagging-pre-ra",
92 "AArch64 Stack Tagging PreRA Pass", false, false)
93
createAArch64StackTaggingPreRAPass()94 FunctionPass *llvm::createAArch64StackTaggingPreRAPass() {
95 return new AArch64StackTaggingPreRA();
96 }
97
isUncheckedLoadOrStoreOpcode(unsigned Opcode)98 static bool isUncheckedLoadOrStoreOpcode(unsigned Opcode) {
99 switch (Opcode) {
100 case AArch64::LDRBBui:
101 case AArch64::LDRHHui:
102 case AArch64::LDRWui:
103 case AArch64::LDRXui:
104
105 case AArch64::LDRBui:
106 case AArch64::LDRHui:
107 case AArch64::LDRSui:
108 case AArch64::LDRDui:
109 case AArch64::LDRQui:
110
111 case AArch64::LDRSHWui:
112 case AArch64::LDRSHXui:
113
114 case AArch64::LDRSBWui:
115 case AArch64::LDRSBXui:
116
117 case AArch64::LDRSWui:
118
119 case AArch64::STRBBui:
120 case AArch64::STRHHui:
121 case AArch64::STRWui:
122 case AArch64::STRXui:
123
124 case AArch64::STRBui:
125 case AArch64::STRHui:
126 case AArch64::STRSui:
127 case AArch64::STRDui:
128 case AArch64::STRQui:
129
130 case AArch64::LDPWi:
131 case AArch64::LDPXi:
132 case AArch64::LDPSi:
133 case AArch64::LDPDi:
134 case AArch64::LDPQi:
135
136 case AArch64::LDPSWi:
137
138 case AArch64::STPWi:
139 case AArch64::STPXi:
140 case AArch64::STPSi:
141 case AArch64::STPDi:
142 case AArch64::STPQi:
143 return true;
144 default:
145 return false;
146 }
147 }
148
mayUseUncheckedLoadStore()149 bool AArch64StackTaggingPreRA::mayUseUncheckedLoadStore() {
150 if (ClUncheckedLdSt == UncheckedNever)
151 return false;
152 else if (ClUncheckedLdSt == UncheckedAlways)
153 return true;
154
155 // This estimate can be improved if we had harder guarantees about stack frame
156 // layout. With LocalStackAllocation we can estimate SP offset to any
157 // preallocated slot. AArch64FrameLowering::orderFrameObjects could put tagged
158 // objects ahead of non-tagged ones, but that's not always desirable.
159 //
160 // Underestimating SP offset here may require the use of LDG to materialize
161 // the tagged address of the stack slot, along with a scratch register
162 // allocation (post-regalloc!).
163 //
164 // For now we do the safe thing here and require that the entire stack frame
165 // is within range of the shortest of the unchecked instructions.
166 unsigned FrameSize = 0;
167 for (unsigned i = 0, e = MFI->getObjectIndexEnd(); i != e; ++i)
168 FrameSize += MFI->getObjectSize(i);
169 bool EntireFrameReachableFromSP = FrameSize < 0xf00;
170 return !MFI->hasVarSizedObjects() && EntireFrameReachableFromSP;
171 }
172
uncheckUsesOf(unsigned TaggedReg,int FI)173 void AArch64StackTaggingPreRA::uncheckUsesOf(unsigned TaggedReg, int FI) {
174 for (auto UI = MRI->use_instr_begin(TaggedReg), E = MRI->use_instr_end();
175 UI != E;) {
176 MachineInstr *UseI = &*(UI++);
177 if (isUncheckedLoadOrStoreOpcode(UseI->getOpcode())) {
178 // FI operand is always the one before the immediate offset.
179 unsigned OpIdx = TII->getLoadStoreImmIdx(UseI->getOpcode()) - 1;
180 if (UseI->getOperand(OpIdx).isReg() &&
181 UseI->getOperand(OpIdx).getReg() == TaggedReg) {
182 UseI->getOperand(OpIdx).ChangeToFrameIndex(FI);
183 UseI->getOperand(OpIdx).setTargetFlags(AArch64II::MO_TAGGED);
184 }
185 } else if (UseI->isCopy() &&
186 Register::isVirtualRegister(UseI->getOperand(0).getReg())) {
187 uncheckUsesOf(UseI->getOperand(0).getReg(), FI);
188 }
189 }
190 }
191
uncheckLoadsAndStores()192 void AArch64StackTaggingPreRA::uncheckLoadsAndStores() {
193 for (auto *I : ReTags) {
194 unsigned TaggedReg = I->getOperand(0).getReg();
195 int FI = I->getOperand(1).getIndex();
196 uncheckUsesOf(TaggedReg, FI);
197 }
198 }
199
runOnMachineFunction(MachineFunction & Func)200 bool AArch64StackTaggingPreRA::runOnMachineFunction(MachineFunction &Func) {
201 MF = &Func;
202 MRI = &MF->getRegInfo();
203 AFI = MF->getInfo<AArch64FunctionInfo>();
204 TII = static_cast<const AArch64InstrInfo *>(MF->getSubtarget().getInstrInfo());
205 TRI = static_cast<const AArch64RegisterInfo *>(
206 MF->getSubtarget().getRegisterInfo());
207 MFI = &MF->getFrameInfo();
208 ReTags.clear();
209
210 assert(MRI->isSSA());
211
212 LLVM_DEBUG(dbgs() << "********** AArch64 Stack Tagging PreRA **********\n"
213 << "********** Function: " << MF->getName() << '\n');
214
215 SmallSetVector<int, 8> TaggedSlots;
216 for (auto &BB : *MF) {
217 for (auto &I : BB) {
218 if (I.getOpcode() == AArch64::TAGPstack) {
219 ReTags.push_back(&I);
220 int FI = I.getOperand(1).getIndex();
221 TaggedSlots.insert(FI);
222 // There should be no offsets in TAGP yet.
223 assert(I.getOperand(2).getImm() == 0);
224 }
225 }
226 }
227
228 if (ReTags.empty())
229 return false;
230
231 if (mayUseUncheckedLoadStore())
232 uncheckLoadsAndStores();
233
234 return true;
235 }
236