• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===-- SparcInstrInfo.cpp - Sparc Instruction Information ----------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains the Sparc implementation of the TargetInstrInfo class.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "SparcInstrInfo.h"
14 #include "Sparc.h"
15 #include "SparcMachineFunctionInfo.h"
16 #include "SparcSubtarget.h"
17 #include "llvm/ADT/STLExtras.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/CodeGen/MachineFrameInfo.h"
20 #include "llvm/CodeGen/MachineInstrBuilder.h"
21 #include "llvm/CodeGen/MachineMemOperand.h"
22 #include "llvm/CodeGen/MachineRegisterInfo.h"
23 #include "llvm/MC/TargetRegistry.h"
24 #include "llvm/Support/ErrorHandling.h"
25 
26 using namespace llvm;
27 
28 #define GET_INSTRINFO_CTOR_DTOR
29 #include "SparcGenInstrInfo.inc"
30 
31 // Pin the vtable to this file.
anchor()32 void SparcInstrInfo::anchor() {}
33 
SparcInstrInfo(SparcSubtarget & ST)34 SparcInstrInfo::SparcInstrInfo(SparcSubtarget &ST)
35     : SparcGenInstrInfo(SP::ADJCALLSTACKDOWN, SP::ADJCALLSTACKUP), RI(),
36       Subtarget(ST) {}
37 
38 /// isLoadFromStackSlot - If the specified machine instruction is a direct
39 /// load from a stack slot, return the virtual or physical register number of
40 /// the destination along with the FrameIndex of the loaded stack slot.  If
41 /// not, return 0.  This predicate must return 0 if the instruction has
42 /// any side effects other than loading from the stack slot.
isLoadFromStackSlot(const MachineInstr & MI,int & FrameIndex) const43 unsigned SparcInstrInfo::isLoadFromStackSlot(const MachineInstr &MI,
44                                              int &FrameIndex) const {
45   if (MI.getOpcode() == SP::LDri || MI.getOpcode() == SP::LDXri ||
46       MI.getOpcode() == SP::LDFri || MI.getOpcode() == SP::LDDFri ||
47       MI.getOpcode() == SP::LDQFri) {
48     if (MI.getOperand(1).isFI() && MI.getOperand(2).isImm() &&
49         MI.getOperand(2).getImm() == 0) {
50       FrameIndex = MI.getOperand(1).getIndex();
51       return MI.getOperand(0).getReg();
52     }
53   }
54   return 0;
55 }
56 
57 /// isStoreToStackSlot - If the specified machine instruction is a direct
58 /// store to a stack slot, return the virtual or physical register number of
59 /// the source reg along with the FrameIndex of the loaded stack slot.  If
60 /// not, return 0.  This predicate must return 0 if the instruction has
61 /// any side effects other than storing to the stack slot.
isStoreToStackSlot(const MachineInstr & MI,int & FrameIndex) const62 unsigned SparcInstrInfo::isStoreToStackSlot(const MachineInstr &MI,
63                                             int &FrameIndex) const {
64   if (MI.getOpcode() == SP::STri || MI.getOpcode() == SP::STXri ||
65       MI.getOpcode() == SP::STFri || MI.getOpcode() == SP::STDFri ||
66       MI.getOpcode() == SP::STQFri) {
67     if (MI.getOperand(0).isFI() && MI.getOperand(1).isImm() &&
68         MI.getOperand(1).getImm() == 0) {
69       FrameIndex = MI.getOperand(0).getIndex();
70       return MI.getOperand(2).getReg();
71     }
72   }
73   return 0;
74 }
75 
IsIntegerCC(unsigned CC)76 static bool IsIntegerCC(unsigned CC)
77 {
78   return  (CC <= SPCC::ICC_VC);
79 }
80 
GetOppositeBranchCondition(SPCC::CondCodes CC)81 static SPCC::CondCodes GetOppositeBranchCondition(SPCC::CondCodes CC)
82 {
83   switch(CC) {
84   case SPCC::ICC_A:    return SPCC::ICC_N;
85   case SPCC::ICC_N:    return SPCC::ICC_A;
86   case SPCC::ICC_NE:   return SPCC::ICC_E;
87   case SPCC::ICC_E:    return SPCC::ICC_NE;
88   case SPCC::ICC_G:    return SPCC::ICC_LE;
89   case SPCC::ICC_LE:   return SPCC::ICC_G;
90   case SPCC::ICC_GE:   return SPCC::ICC_L;
91   case SPCC::ICC_L:    return SPCC::ICC_GE;
92   case SPCC::ICC_GU:   return SPCC::ICC_LEU;
93   case SPCC::ICC_LEU:  return SPCC::ICC_GU;
94   case SPCC::ICC_CC:   return SPCC::ICC_CS;
95   case SPCC::ICC_CS:   return SPCC::ICC_CC;
96   case SPCC::ICC_POS:  return SPCC::ICC_NEG;
97   case SPCC::ICC_NEG:  return SPCC::ICC_POS;
98   case SPCC::ICC_VC:   return SPCC::ICC_VS;
99   case SPCC::ICC_VS:   return SPCC::ICC_VC;
100 
101   case SPCC::FCC_A:    return SPCC::FCC_N;
102   case SPCC::FCC_N:    return SPCC::FCC_A;
103   case SPCC::FCC_U:    return SPCC::FCC_O;
104   case SPCC::FCC_O:    return SPCC::FCC_U;
105   case SPCC::FCC_G:    return SPCC::FCC_ULE;
106   case SPCC::FCC_LE:   return SPCC::FCC_UG;
107   case SPCC::FCC_UG:   return SPCC::FCC_LE;
108   case SPCC::FCC_ULE:  return SPCC::FCC_G;
109   case SPCC::FCC_L:    return SPCC::FCC_UGE;
110   case SPCC::FCC_GE:   return SPCC::FCC_UL;
111   case SPCC::FCC_UL:   return SPCC::FCC_GE;
112   case SPCC::FCC_UGE:  return SPCC::FCC_L;
113   case SPCC::FCC_LG:   return SPCC::FCC_UE;
114   case SPCC::FCC_UE:   return SPCC::FCC_LG;
115   case SPCC::FCC_NE:   return SPCC::FCC_E;
116   case SPCC::FCC_E:    return SPCC::FCC_NE;
117 
118   case SPCC::CPCC_A:   return SPCC::CPCC_N;
119   case SPCC::CPCC_N:   return SPCC::CPCC_A;
120   case SPCC::CPCC_3:   [[fallthrough]];
121   case SPCC::CPCC_2:   [[fallthrough]];
122   case SPCC::CPCC_23:  [[fallthrough]];
123   case SPCC::CPCC_1:   [[fallthrough]];
124   case SPCC::CPCC_13:  [[fallthrough]];
125   case SPCC::CPCC_12:  [[fallthrough]];
126   case SPCC::CPCC_123: [[fallthrough]];
127   case SPCC::CPCC_0:   [[fallthrough]];
128   case SPCC::CPCC_03:  [[fallthrough]];
129   case SPCC::CPCC_02:  [[fallthrough]];
130   case SPCC::CPCC_023: [[fallthrough]];
131   case SPCC::CPCC_01:  [[fallthrough]];
132   case SPCC::CPCC_013: [[fallthrough]];
133   case SPCC::CPCC_012:
134       // "Opposite" code is not meaningful, as we don't know
135       // what the CoProc condition means here. The cond-code will
136       // only be used in inline assembler, so this code should
137       // not be reached in a normal compilation pass.
138       llvm_unreachable("Meaningless inversion of co-processor cond code");
139 
140   case SPCC::REG_BEGIN:
141       llvm_unreachable("Use of reserved cond code");
142   case SPCC::REG_Z:
143       return SPCC::REG_NZ;
144   case SPCC::REG_LEZ:
145       return SPCC::REG_GZ;
146   case SPCC::REG_LZ:
147       return SPCC::REG_GEZ;
148   case SPCC::REG_NZ:
149       return SPCC::REG_Z;
150   case SPCC::REG_GZ:
151       return SPCC::REG_LEZ;
152   case SPCC::REG_GEZ:
153       return SPCC::REG_LZ;
154   }
155   llvm_unreachable("Invalid cond code");
156 }
157 
isUncondBranchOpcode(int Opc)158 static bool isUncondBranchOpcode(int Opc) {
159   return Opc == SP::BA || Opc == SP::BPA;
160 }
161 
isI32CondBranchOpcode(int Opc)162 static bool isI32CondBranchOpcode(int Opc) {
163   return Opc == SP::BCOND || Opc == SP::BPICC || Opc == SP::BPICCA ||
164          Opc == SP::BPICCNT || Opc == SP::BPICCANT;
165 }
166 
isI64CondBranchOpcode(int Opc)167 static bool isI64CondBranchOpcode(int Opc) {
168   return Opc == SP::BPXCC || Opc == SP::BPXCCA || Opc == SP::BPXCCNT ||
169          Opc == SP::BPXCCANT;
170 }
171 
isFCondBranchOpcode(int Opc)172 static bool isFCondBranchOpcode(int Opc) { return Opc == SP::FBCOND; }
173 
isCondBranchOpcode(int Opc)174 static bool isCondBranchOpcode(int Opc) {
175   return isI32CondBranchOpcode(Opc) || isI64CondBranchOpcode(Opc) ||
176          isFCondBranchOpcode(Opc);
177 }
178 
isIndirectBranchOpcode(int Opc)179 static bool isIndirectBranchOpcode(int Opc) {
180   return Opc == SP::BINDrr || Opc == SP::BINDri;
181 }
182 
parseCondBranch(MachineInstr * LastInst,MachineBasicBlock * & Target,SmallVectorImpl<MachineOperand> & Cond)183 static void parseCondBranch(MachineInstr *LastInst, MachineBasicBlock *&Target,
184                             SmallVectorImpl<MachineOperand> &Cond) {
185   unsigned Opc = LastInst->getOpcode();
186   int64_t CC = LastInst->getOperand(1).getImm();
187 
188   // Push the branch opcode into Cond too so later in insertBranch
189   // it can use the information to emit the correct SPARC branch opcode.
190   Cond.push_back(MachineOperand::CreateImm(Opc));
191   Cond.push_back(MachineOperand::CreateImm(CC));
192 
193   Target = LastInst->getOperand(0).getMBB();
194 }
195 
analyzeBranch(MachineBasicBlock & MBB,MachineBasicBlock * & TBB,MachineBasicBlock * & FBB,SmallVectorImpl<MachineOperand> & Cond,bool AllowModify) const196 bool SparcInstrInfo::analyzeBranch(MachineBasicBlock &MBB,
197                                    MachineBasicBlock *&TBB,
198                                    MachineBasicBlock *&FBB,
199                                    SmallVectorImpl<MachineOperand> &Cond,
200                                    bool AllowModify) const {
201   MachineBasicBlock::iterator I = MBB.getLastNonDebugInstr();
202   if (I == MBB.end())
203     return false;
204 
205   if (!isUnpredicatedTerminator(*I))
206     return false;
207 
208   // Get the last instruction in the block.
209   MachineInstr *LastInst = &*I;
210   unsigned LastOpc = LastInst->getOpcode();
211 
212   // If there is only one terminator instruction, process it.
213   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
214     if (isUncondBranchOpcode(LastOpc)) {
215       TBB = LastInst->getOperand(0).getMBB();
216       return false;
217     }
218     if (isCondBranchOpcode(LastOpc)) {
219       // Block ends with fall-through condbranch.
220       parseCondBranch(LastInst, TBB, Cond);
221       return false;
222     }
223     return true; // Can't handle indirect branch.
224   }
225 
226   // Get the instruction before it if it is a terminator.
227   MachineInstr *SecondLastInst = &*I;
228   unsigned SecondLastOpc = SecondLastInst->getOpcode();
229 
230   // If AllowModify is true and the block ends with two or more unconditional
231   // branches, delete all but the first unconditional branch.
232   if (AllowModify && isUncondBranchOpcode(LastOpc)) {
233     while (isUncondBranchOpcode(SecondLastOpc)) {
234       LastInst->eraseFromParent();
235       LastInst = SecondLastInst;
236       LastOpc = LastInst->getOpcode();
237       if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
238         // Return now the only terminator is an unconditional branch.
239         TBB = LastInst->getOperand(0).getMBB();
240         return false;
241       } else {
242         SecondLastInst = &*I;
243         SecondLastOpc = SecondLastInst->getOpcode();
244       }
245     }
246   }
247 
248   // If there are three terminators, we don't know what sort of block this is.
249   if (SecondLastInst && I != MBB.begin() && isUnpredicatedTerminator(*--I))
250     return true;
251 
252   // If the block ends with a B and a Bcc, handle it.
253   if (isCondBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
254     parseCondBranch(SecondLastInst, TBB, Cond);
255     FBB = LastInst->getOperand(0).getMBB();
256     return false;
257   }
258 
259   // If the block ends with two unconditional branches, handle it.  The second
260   // one is not executed.
261   if (isUncondBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
262     TBB = SecondLastInst->getOperand(0).getMBB();
263     return false;
264   }
265 
266   // ...likewise if it ends with an indirect branch followed by an unconditional
267   // branch.
268   if (isIndirectBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
269     I = LastInst;
270     if (AllowModify)
271       I->eraseFromParent();
272     return true;
273   }
274 
275   // Otherwise, can't handle this.
276   return true;
277 }
278 
insertBranch(MachineBasicBlock & MBB,MachineBasicBlock * TBB,MachineBasicBlock * FBB,ArrayRef<MachineOperand> Cond,const DebugLoc & DL,int * BytesAdded) const279 unsigned SparcInstrInfo::insertBranch(MachineBasicBlock &MBB,
280                                       MachineBasicBlock *TBB,
281                                       MachineBasicBlock *FBB,
282                                       ArrayRef<MachineOperand> Cond,
283                                       const DebugLoc &DL,
284                                       int *BytesAdded) const {
285   assert(TBB && "insertBranch must not be told to insert a fallthrough");
286   assert((Cond.size() <= 2) &&
287          "Sparc branch conditions should have at most two components!");
288   assert(!BytesAdded && "code size not handled");
289 
290   if (Cond.empty()) {
291     assert(!FBB && "Unconditional branch with multiple successors!");
292     BuildMI(&MBB, DL, get(Subtarget.isV9() ? SP::BPA : SP::BA)).addMBB(TBB);
293     return 1;
294   }
295 
296   // Conditional branch
297   unsigned Opc = Cond[0].getImm();
298   unsigned CC = Cond[1].getImm();
299 
300   if (IsIntegerCC(CC)) {
301     BuildMI(&MBB, DL, get(Opc)).addMBB(TBB).addImm(CC);
302   } else {
303     BuildMI(&MBB, DL, get(SP::FBCOND)).addMBB(TBB).addImm(CC);
304   }
305   if (!FBB)
306     return 1;
307 
308   BuildMI(&MBB, DL, get(Subtarget.isV9() ? SP::BPA : SP::BA)).addMBB(FBB);
309   return 2;
310 }
311 
removeBranch(MachineBasicBlock & MBB,int * BytesRemoved) const312 unsigned SparcInstrInfo::removeBranch(MachineBasicBlock &MBB,
313                                       int *BytesRemoved) const {
314   assert(!BytesRemoved && "code size not handled");
315 
316   MachineBasicBlock::iterator I = MBB.end();
317   unsigned Count = 0;
318   while (I != MBB.begin()) {
319     --I;
320 
321     if (I->isDebugInstr())
322       continue;
323 
324     if (!isCondBranchOpcode(I->getOpcode()) &&
325         !isUncondBranchOpcode(I->getOpcode()))
326       break; // Not a branch
327 
328     I->eraseFromParent();
329     I = MBB.end();
330     ++Count;
331   }
332   return Count;
333 }
334 
reverseBranchCondition(SmallVectorImpl<MachineOperand> & Cond) const335 bool SparcInstrInfo::reverseBranchCondition(
336     SmallVectorImpl<MachineOperand> &Cond) const {
337   assert(Cond.size() <= 2);
338   SPCC::CondCodes CC = static_cast<SPCC::CondCodes>(Cond[1].getImm());
339   Cond[1].setImm(GetOppositeBranchCondition(CC));
340   return false;
341 }
342 
copyPhysReg(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,const DebugLoc & DL,MCRegister DestReg,MCRegister SrcReg,bool KillSrc) const343 void SparcInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
344                                  MachineBasicBlock::iterator I,
345                                  const DebugLoc &DL, MCRegister DestReg,
346                                  MCRegister SrcReg, bool KillSrc) const {
347   unsigned numSubRegs = 0;
348   unsigned movOpc     = 0;
349   const unsigned *subRegIdx = nullptr;
350   bool ExtraG0 = false;
351 
352   const unsigned DW_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd };
353   const unsigned DFP_FP_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd };
354   const unsigned QFP_DFP_SubRegsIdx[] = { SP::sub_even64, SP::sub_odd64 };
355   const unsigned QFP_FP_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd,
356                                           SP::sub_odd64_then_sub_even,
357                                           SP::sub_odd64_then_sub_odd };
358 
359   if (SP::IntRegsRegClass.contains(DestReg, SrcReg))
360     BuildMI(MBB, I, DL, get(SP::ORrr), DestReg).addReg(SP::G0)
361       .addReg(SrcReg, getKillRegState(KillSrc));
362   else if (SP::IntPairRegClass.contains(DestReg, SrcReg)) {
363     subRegIdx  = DW_SubRegsIdx;
364     numSubRegs = 2;
365     movOpc     = SP::ORrr;
366     ExtraG0 = true;
367   } else if (SP::FPRegsRegClass.contains(DestReg, SrcReg))
368     BuildMI(MBB, I, DL, get(SP::FMOVS), DestReg)
369       .addReg(SrcReg, getKillRegState(KillSrc));
370   else if (SP::DFPRegsRegClass.contains(DestReg, SrcReg)) {
371     if (Subtarget.isV9()) {
372       BuildMI(MBB, I, DL, get(SP::FMOVD), DestReg)
373         .addReg(SrcReg, getKillRegState(KillSrc));
374     } else {
375       // Use two FMOVS instructions.
376       subRegIdx  = DFP_FP_SubRegsIdx;
377       numSubRegs = 2;
378       movOpc     = SP::FMOVS;
379     }
380   } else if (SP::QFPRegsRegClass.contains(DestReg, SrcReg)) {
381     if (Subtarget.isV9()) {
382       if (Subtarget.hasHardQuad()) {
383         BuildMI(MBB, I, DL, get(SP::FMOVQ), DestReg)
384           .addReg(SrcReg, getKillRegState(KillSrc));
385       } else {
386         // Use two FMOVD instructions.
387         subRegIdx  = QFP_DFP_SubRegsIdx;
388         numSubRegs = 2;
389         movOpc     = SP::FMOVD;
390       }
391     } else {
392       // Use four FMOVS instructions.
393       subRegIdx  = QFP_FP_SubRegsIdx;
394       numSubRegs = 4;
395       movOpc     = SP::FMOVS;
396     }
397   } else if (SP::ASRRegsRegClass.contains(DestReg) &&
398              SP::IntRegsRegClass.contains(SrcReg)) {
399     BuildMI(MBB, I, DL, get(SP::WRASRrr), DestReg)
400         .addReg(SP::G0)
401         .addReg(SrcReg, getKillRegState(KillSrc));
402   } else if (SP::IntRegsRegClass.contains(DestReg) &&
403              SP::ASRRegsRegClass.contains(SrcReg)) {
404     BuildMI(MBB, I, DL, get(SP::RDASR), DestReg)
405         .addReg(SrcReg, getKillRegState(KillSrc));
406   } else
407     llvm_unreachable("Impossible reg-to-reg copy");
408 
409   if (numSubRegs == 0 || subRegIdx == nullptr || movOpc == 0)
410     return;
411 
412   const TargetRegisterInfo *TRI = &getRegisterInfo();
413   MachineInstr *MovMI = nullptr;
414 
415   for (unsigned i = 0; i != numSubRegs; ++i) {
416     Register Dst = TRI->getSubReg(DestReg, subRegIdx[i]);
417     Register Src = TRI->getSubReg(SrcReg, subRegIdx[i]);
418     assert(Dst && Src && "Bad sub-register");
419 
420     MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(movOpc), Dst);
421     if (ExtraG0)
422       MIB.addReg(SP::G0);
423     MIB.addReg(Src);
424     MovMI = MIB.getInstr();
425   }
426   // Add implicit super-register defs and kills to the last MovMI.
427   MovMI->addRegisterDefined(DestReg, TRI);
428   if (KillSrc)
429     MovMI->addRegisterKilled(SrcReg, TRI);
430 }
431 
storeRegToStackSlot(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,Register SrcReg,bool isKill,int FI,const TargetRegisterClass * RC,const TargetRegisterInfo * TRI,Register VReg) const432 void SparcInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
433                                          MachineBasicBlock::iterator I,
434                                          Register SrcReg, bool isKill, int FI,
435                                          const TargetRegisterClass *RC,
436                                          const TargetRegisterInfo *TRI,
437                                          Register VReg) const {
438   DebugLoc DL;
439   if (I != MBB.end()) DL = I->getDebugLoc();
440 
441   MachineFunction *MF = MBB.getParent();
442   const MachineFrameInfo &MFI = MF->getFrameInfo();
443   MachineMemOperand *MMO = MF->getMachineMemOperand(
444       MachinePointerInfo::getFixedStack(*MF, FI), MachineMemOperand::MOStore,
445       MFI.getObjectSize(FI), MFI.getObjectAlign(FI));
446 
447   // On the order of operands here: think "[FrameIdx + 0] = SrcReg".
448   if (RC == &SP::I64RegsRegClass)
449     BuildMI(MBB, I, DL, get(SP::STXri)).addFrameIndex(FI).addImm(0)
450       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
451   else if (RC == &SP::IntRegsRegClass)
452     BuildMI(MBB, I, DL, get(SP::STri)).addFrameIndex(FI).addImm(0)
453       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
454   else if (RC == &SP::IntPairRegClass)
455     BuildMI(MBB, I, DL, get(SP::STDri)).addFrameIndex(FI).addImm(0)
456       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
457   else if (RC == &SP::FPRegsRegClass)
458     BuildMI(MBB, I, DL, get(SP::STFri)).addFrameIndex(FI).addImm(0)
459       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
460   else if (SP::DFPRegsRegClass.hasSubClassEq(RC))
461     BuildMI(MBB, I, DL, get(SP::STDFri)).addFrameIndex(FI).addImm(0)
462       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
463   else if (SP::QFPRegsRegClass.hasSubClassEq(RC))
464     // Use STQFri irrespective of its legality. If STQ is not legal, it will be
465     // lowered into two STDs in eliminateFrameIndex.
466     BuildMI(MBB, I, DL, get(SP::STQFri)).addFrameIndex(FI).addImm(0)
467       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
468   else
469     llvm_unreachable("Can't store this register to stack slot");
470 }
471 
loadRegFromStackSlot(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,Register DestReg,int FI,const TargetRegisterClass * RC,const TargetRegisterInfo * TRI,Register VReg) const472 void SparcInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
473                                           MachineBasicBlock::iterator I,
474                                           Register DestReg, int FI,
475                                           const TargetRegisterClass *RC,
476                                           const TargetRegisterInfo *TRI,
477                                           Register VReg) const {
478   DebugLoc DL;
479   if (I != MBB.end()) DL = I->getDebugLoc();
480 
481   MachineFunction *MF = MBB.getParent();
482   const MachineFrameInfo &MFI = MF->getFrameInfo();
483   MachineMemOperand *MMO = MF->getMachineMemOperand(
484       MachinePointerInfo::getFixedStack(*MF, FI), MachineMemOperand::MOLoad,
485       MFI.getObjectSize(FI), MFI.getObjectAlign(FI));
486 
487   if (RC == &SP::I64RegsRegClass)
488     BuildMI(MBB, I, DL, get(SP::LDXri), DestReg).addFrameIndex(FI).addImm(0)
489       .addMemOperand(MMO);
490   else if (RC == &SP::IntRegsRegClass)
491     BuildMI(MBB, I, DL, get(SP::LDri), DestReg).addFrameIndex(FI).addImm(0)
492       .addMemOperand(MMO);
493   else if (RC == &SP::IntPairRegClass)
494     BuildMI(MBB, I, DL, get(SP::LDDri), DestReg).addFrameIndex(FI).addImm(0)
495       .addMemOperand(MMO);
496   else if (RC == &SP::FPRegsRegClass)
497     BuildMI(MBB, I, DL, get(SP::LDFri), DestReg).addFrameIndex(FI).addImm(0)
498       .addMemOperand(MMO);
499   else if (SP::DFPRegsRegClass.hasSubClassEq(RC))
500     BuildMI(MBB, I, DL, get(SP::LDDFri), DestReg).addFrameIndex(FI).addImm(0)
501       .addMemOperand(MMO);
502   else if (SP::QFPRegsRegClass.hasSubClassEq(RC))
503     // Use LDQFri irrespective of its legality. If LDQ is not legal, it will be
504     // lowered into two LDDs in eliminateFrameIndex.
505     BuildMI(MBB, I, DL, get(SP::LDQFri), DestReg).addFrameIndex(FI).addImm(0)
506       .addMemOperand(MMO);
507   else
508     llvm_unreachable("Can't load this register from stack slot");
509 }
510 
getGlobalBaseReg(MachineFunction * MF) const511 Register SparcInstrInfo::getGlobalBaseReg(MachineFunction *MF) const {
512   SparcMachineFunctionInfo *SparcFI = MF->getInfo<SparcMachineFunctionInfo>();
513   Register GlobalBaseReg = SparcFI->getGlobalBaseReg();
514   if (GlobalBaseReg)
515     return GlobalBaseReg;
516 
517   // Insert the set of GlobalBaseReg into the first MBB of the function
518   MachineBasicBlock &FirstMBB = MF->front();
519   MachineBasicBlock::iterator MBBI = FirstMBB.begin();
520   MachineRegisterInfo &RegInfo = MF->getRegInfo();
521 
522   const TargetRegisterClass *PtrRC =
523     Subtarget.is64Bit() ? &SP::I64RegsRegClass : &SP::IntRegsRegClass;
524   GlobalBaseReg = RegInfo.createVirtualRegister(PtrRC);
525 
526   DebugLoc dl;
527 
528   BuildMI(FirstMBB, MBBI, dl, get(SP::GETPCX), GlobalBaseReg);
529   SparcFI->setGlobalBaseReg(GlobalBaseReg);
530   return GlobalBaseReg;
531 }
532 
expandPostRAPseudo(MachineInstr & MI) const533 bool SparcInstrInfo::expandPostRAPseudo(MachineInstr &MI) const {
534   switch (MI.getOpcode()) {
535   case TargetOpcode::LOAD_STACK_GUARD: {
536     assert(Subtarget.isTargetLinux() &&
537            "Only Linux target is expected to contain LOAD_STACK_GUARD");
538     // offsetof(tcbhead_t, stack_guard) from sysdeps/sparc/nptl/tls.h in glibc.
539     const int64_t Offset = Subtarget.is64Bit() ? 0x28 : 0x14;
540     MI.setDesc(get(Subtarget.is64Bit() ? SP::LDXri : SP::LDri));
541     MachineInstrBuilder(*MI.getParent()->getParent(), MI)
542         .addReg(SP::G7)
543         .addImm(Offset);
544     return true;
545   }
546   }
547   return false;
548 }
549