• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //===-- MVETailPredUtils.h - Tail predication utility functions -*- C++-*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains utility functions for low overhead and tail predicated
10 // loops, shared between the ARMLowOverheadLoops pass and anywhere else that
11 // needs them.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #ifndef LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
16 #define LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
17 
18 #include "llvm/CodeGen/MachineInstr.h"
19 #include "llvm/CodeGen/MachineInstrBuilder.h"
20 #include "llvm/CodeGen/MachineOperand.h"
21 #include "llvm/CodeGen/TargetInstrInfo.h"
22 
23 namespace llvm {
24 
VCTPOpcodeToLSTP(unsigned Opcode,bool IsDoLoop)25 static inline unsigned VCTPOpcodeToLSTP(unsigned Opcode, bool IsDoLoop) {
26   switch (Opcode) {
27   default:
28     llvm_unreachable("unhandled vctp opcode");
29     break;
30   case ARM::MVE_VCTP8:
31     return IsDoLoop ? ARM::MVE_DLSTP_8 : ARM::MVE_WLSTP_8;
32   case ARM::MVE_VCTP16:
33     return IsDoLoop ? ARM::MVE_DLSTP_16 : ARM::MVE_WLSTP_16;
34   case ARM::MVE_VCTP32:
35     return IsDoLoop ? ARM::MVE_DLSTP_32 : ARM::MVE_WLSTP_32;
36   case ARM::MVE_VCTP64:
37     return IsDoLoop ? ARM::MVE_DLSTP_64 : ARM::MVE_WLSTP_64;
38   }
39   return 0;
40 }
41 
getTailPredVectorWidth(unsigned Opcode)42 static inline unsigned getTailPredVectorWidth(unsigned Opcode) {
43   switch (Opcode) {
44   default:
45     llvm_unreachable("unhandled vctp opcode");
46   case ARM::MVE_VCTP8:
47     return 16;
48   case ARM::MVE_VCTP16:
49     return 8;
50   case ARM::MVE_VCTP32:
51     return 4;
52   case ARM::MVE_VCTP64:
53     return 2;
54   }
55   return 0;
56 }
57 
isVCTP(const MachineInstr * MI)58 static inline bool isVCTP(const MachineInstr *MI) {
59   switch (MI->getOpcode()) {
60   default:
61     break;
62   case ARM::MVE_VCTP8:
63   case ARM::MVE_VCTP16:
64   case ARM::MVE_VCTP32:
65   case ARM::MVE_VCTP64:
66     return true;
67   }
68   return false;
69 }
70 
isLoopStart(MachineInstr & MI)71 static inline bool isLoopStart(MachineInstr &MI) {
72   return MI.getOpcode() == ARM::t2DoLoopStart ||
73          MI.getOpcode() == ARM::t2DoLoopStartTP ||
74          MI.getOpcode() == ARM::t2WhileLoopStart;
75 }
76 
77 // WhileLoopStart holds the exit block, so produce a cmp lr, 0 and then a
78 // beq that branches to the exit branch.
79 inline void RevertWhileLoopStart(MachineInstr *MI, const TargetInstrInfo *TII,
80                         unsigned BrOpc = ARM::t2Bcc) {
81   MachineBasicBlock *MBB = MI->getParent();
82 
83   // Cmp
84   MachineInstrBuilder MIB =
85       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2CMPri));
86   MIB.add(MI->getOperand(0));
87   MIB.addImm(0);
88   MIB.addImm(ARMCC::AL);
89   MIB.addReg(ARM::NoRegister);
90 
91   // Branch
92   MIB = BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(BrOpc));
93   MIB.add(MI->getOperand(1)); // branch target
94   MIB.addImm(ARMCC::EQ);      // condition code
95   MIB.addReg(ARM::CPSR);
96 
97   MI->eraseFromParent();
98 }
99 
RevertDoLoopStart(MachineInstr * MI,const TargetInstrInfo * TII)100 inline void RevertDoLoopStart(MachineInstr *MI, const TargetInstrInfo *TII) {
101   MachineBasicBlock *MBB = MI->getParent();
102   BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::tMOVr))
103       .add(MI->getOperand(0))
104       .add(MI->getOperand(1))
105       .add(predOps(ARMCC::AL));
106 
107   MI->eraseFromParent();
108 }
109 
110 inline void RevertLoopDec(MachineInstr *MI, const TargetInstrInfo *TII,
111                           bool SetFlags = false) {
112   MachineBasicBlock *MBB = MI->getParent();
113 
114   MachineInstrBuilder MIB =
115       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2SUBri));
116   MIB.add(MI->getOperand(0));
117   MIB.add(MI->getOperand(1));
118   MIB.add(MI->getOperand(2));
119   MIB.addImm(ARMCC::AL);
120   MIB.addReg(0);
121 
122   if (SetFlags) {
123     MIB.addReg(ARM::CPSR);
124     MIB->getOperand(5).setIsDef(true);
125   } else
126     MIB.addReg(0);
127 
128   MI->eraseFromParent();
129 }
130 
131 // Generate a subs, or sub and cmp, and a branch instead of an LE.
132 inline void RevertLoopEnd(MachineInstr *MI, const TargetInstrInfo *TII,
133                           unsigned BrOpc = ARM::t2Bcc, bool SkipCmp = false) {
134   MachineBasicBlock *MBB = MI->getParent();
135 
136   // Create cmp
137   if (!SkipCmp) {
138     MachineInstrBuilder MIB =
139         BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2CMPri));
140     MIB.add(MI->getOperand(0));
141     MIB.addImm(0);
142     MIB.addImm(ARMCC::AL);
143     MIB.addReg(ARM::NoRegister);
144   }
145 
146   // Create bne
147   MachineInstrBuilder MIB =
148       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(BrOpc));
149   MIB.add(MI->getOperand(1)); // branch target
150   MIB.addImm(ARMCC::NE);      // condition code
151   MIB.addReg(ARM::CPSR);
152   MI->eraseFromParent();
153 }
154 
155 } // end namespace llvm
156 
157 #endif // LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
158