/external/llvm/lib/Target/ARM/ |
D | Thumb2SizeReduction.cpp | 215 if (*Regs == ARM::CPSR) in HasImplicitCPSRDef() 263 if (Reg == 0 || Reg == ARM::CPSR) in canAddPseudoFlagDep() 345 if (Reg == 0 || Reg == ARM::CPSR) in VerifyLowRegs() 551 MI->getOperand(MCID.getNumOperands()-1).getReg() == ARM::CPSR) in ReduceSpecial() 701 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR); in ReduceTo2Addr() 769 if (!Reg || Reg == ARM::CPSR) in ReduceToNarrow() 797 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR); in ReduceToNarrow() 838 if (MO.isReg() && MO.isImplicit() && MO.getReg() == ARM::CPSR) in ReduceToNarrow() 862 if (MO.getReg() != ARM::CPSR) in UpdateCPSRDef() 877 if (MO.getReg() != ARM::CPSR) in UpdateCPSRUse() [all …]
|
D | Thumb2ITBlockPass.cpp | 88 if (Reg == ARM::CPSR) in TrackDefUses() 144 MI->getOperand(MCID.getNumOperands() - 1).getReg() == ARM::CPSR) in MoveCopyOutOfITBlock()
|
D | ARMMCInstLower.cpp | 74 if (MO.isImplicit() && MO.getReg() != ARM::CPSR) in lowerOperand()
|
D | ARMInstrInfo.td | 82 // SDTBinaryArithWithFlagsInOut - RES1, CPSR = op LHS, RHS, CPSR 1349 /// AdjustInstrPostInstrSelection after giving them an optional CPSR operand. 1350 let hasPostISelHook = 1, Defs = [CPSR] in { 1356 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, so_imm:$imm))]>, 1361 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, GPR:$Rm))]>, 1368 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, 1375 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, 1383 let hasPostISelHook = 1, Defs = [CPSR] in { 1389 [(set GPR:$Rd, CPSR, (opnode so_imm:$imm, GPR:$Rn))]>, 1395 [(set GPR:$Rd, CPSR, (opnode so_reg_imm:$shift, [all …]
|
D | ARMFastISel.cpp | 216 bool DefinesOptionalPredicate(MachineInstr *MI, bool *CPSR); 230 bool ARMFastISel::DefinesOptionalPredicate(MachineInstr *MI, bool *CPSR) { in DefinesOptionalPredicate() argument 238 if (MO.getReg() == ARM::CPSR) in DefinesOptionalPredicate() 239 *CPSR = true; in DefinesOptionalPredicate() 276 bool CPSR = false; in AddOptionalDefs() local 277 if (DefinesOptionalPredicate(MI, &CPSR)) { in AddOptionalDefs() 278 if (CPSR) in AddOptionalDefs() 1278 .addMBB(TBB).addImm(ARMPred).addReg(ARM::CPSR); in SelectBranch() 1302 .addMBB(TBB).addImm(CCMode).addReg(ARM::CPSR); in SelectBranch() 1341 .addMBB(TBB).addImm(CCMode).addReg(ARM::CPSR); in SelectBranch() [all …]
|
D | ARMInstrThumb2.td | 574 /// changed to modify CPSR. 700 /// instruction modifies the CPSR register. 703 /// AdjustInstrPostInstrSelection after giving then an optional CPSR operand. 704 let hasPostISelHook = 1, Defs = [CPSR] in { 712 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 718 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 727 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 735 let hasPostISelHook = 1, Defs = [CPSR] in { 741 [(set rGPR:$Rd, CPSR, (opnode t2_so_imm:$imm, 748 [(set rGPR:$Rd, CPSR, (opnode t2_so_reg:$ShiftedRm, [all …]
|
D | ARMInstrThumb.td | 851 let isCommutable = 1, Uses = [CPSR] in 930 let isCompare = 1, Defs = [CPSR] in { 945 } // isCompare = 1, Defs = [CPSR] 948 let isCompare = 1, Defs = [CPSR] in { 977 } // isCompare = 1, Defs = [CPSR] 1039 (tMOVi8 tGPR:$Rdn, CPSR, imm0_255:$imm, 14, 0)>; 1055 let Defs = [CPSR] in 1136 let Uses = [CPSR] in 1189 let isCompare = 1, isCommutable = 1, Defs = [CPSR] in 1259 let isCall = 1, Defs = [R0, R12, LR, CPSR], Uses = [SP] in [all …]
|
D | ARM.td | 102 /// Some instructions update CPSR partially, which can add false dependency for 104 /// mapped to a separate physical register. Avoid partial CPSR update for these 108 "Avoid CPSR partial update for OOO execution">;
|
D | ARMBaseInstrInfo.cpp | 511 if ((MO.isRegMask() && MO.clobbersPhysReg(ARM::CPSR)) || in DefinesPredicate() 512 (MO.isReg() && MO.isDef() && MO.getReg() == ARM::CPSR)) { in DefinesPredicate() 548 if (MO.getReg() != ARM::CPSR) in IsCPSRDead() 1667 if (CC == ARMCC::AL || PredReg != ARM::CPSR) in commuteInstruction() 2314 if (Instr.modifiesRegister(ARM::CPSR, TRI) || in optimizeCompareInstr() 2315 Instr.readsRegister(ARM::CPSR, TRI)) in optimizeCompareInstr() 2393 if (MO.isRegMask() && MO.clobbersPhysReg(ARM::CPSR)) { in optimizeCompareInstr() 2397 if (!MO.isReg() || MO.getReg() != ARM::CPSR) in optimizeCompareInstr() 2468 if ((*SI)->isLiveIn(ARM::CPSR)) in optimizeCompareInstr() 2473 MI->getOperand(5).setReg(ARM::CPSR); in optimizeCompareInstr() [all …]
|
D | ARMBaseInstrInfo.h | 339 return MIB.addReg(ARM::CPSR, getDefRegState(true) | getDeadRegState(isDead));
|
D | ARMISelLowering.cpp | 3283 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerXALUO() 3313 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT() 3453 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT_CC() 3494 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT_CC() 3606 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in OptimizeVFPBrcond() 3638 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerBR_CC() 3658 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerBR_CC() 4025 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerShiftRightParts() 4059 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerShiftLeftParts() 6372 .addReg(ARM::CPSR, RegState::Define) in SetupEntryBlockForSjLj() [all …]
|
D | ARMAsmPrinter.cpp | 1626 .addReg(ARM::CPSR) in EmitInstruction() 1645 .addReg(ARM::CPSR) in EmitInstruction() 1660 .addReg(ARM::CPSR) in EmitInstruction()
|
D | README-Thumb.txt | 226 to toggle the 's' bit since they do not set CPSR when they are inside IT blocks.
|
D | ARMRegisterInfo.td | 163 def CPSR : ARMReg<0, "cpsr">; 263 def CCR : RegisterClass<"ARM", [i32], 32, (add CPSR)> {
|
/external/chromium_org/v8/src/arm/ |
D | constants-arm.h | 222 CPSR = 0 << 22, enumerator 245 CPSR_c = CPSR | 1 << 16, 246 CPSR_x = CPSR | 1 << 17, 247 CPSR_s = CPSR | 1 << 18, 248 CPSR_f = CPSR | 1 << 19,
|
/external/llvm/test/CodeGen/ARM/ |
D | avoid-cpsr-rmw.ll | 3 ; Avoid some 's' 16-bit instruction which partially update CPSR (and add false 4 ; dependency) when it isn't dependent on last CPSR defining instruction. 19 ; Avoid partial CPSR dependency via loop backedge. 53 ; Allow partial CPSR dependency when code size is the priority.
|
D | cse-call.ll | 5 ; Don't CSE a cmp across a call that clobbers CPSR.
|
D | crash-O0.ll | 7 ; This function would crash RegAllocFast because it tried to spill %CPSR.
|
D | sub-cmp-peephole.ll | 50 ; If CPSR is live-out, we can't remove cmp if there exists
|
D | interrupt-attr.ll | 13 ; Also need special function return setting pc and CPSR simultaneously.
|
/external/llvm/test/CodeGen/AArch64/ |
D | arm64-2011-04-21-CPSRBug.ll | 3 ; CPSR is not allocatable so fast allocatable wouldn't mark them killed.
|
D | arm64-2011-03-09-CPSRSpill.ll | 3 ; Can't copy or spill / restore CPSR.
|
D | arm64-csel.ll | 77 ; If CPSR is used multiple times and V flag is used, we don't remove cmp.
|
/external/llvm/test/CodeGen/Thumb2/ |
D | v8_IT_6.ll | 3 ; Narrow tORR cannot be predicated and set CPSR at the same time!
|
/external/llvm/lib/Target/ARM/AsmParser/ |
D | ARMAsmParser.cpp | 1706 unsigned RegNum = getCondCode() == ARMCC::AL ? 0: ARM::CPSR; in addCondCodeOperands() 5401 Operands.push_back(ARMOperand::CreateCCOut(CarrySetting ? ARM::CPSR : 0, in ParseInstruction() 7321 Inst.getOperand(5).getReg() == (inITBlock() ? 0 : ARM::CPSR) && in processInstruction() 7372 Inst.getOpcode() == ARM::t2MOVSsr ? ARM::CPSR : 0)); in processInstruction() 7379 Inst.getOpcode() == ARM::t2MOVSsr ? ARM::CPSR : 0)); in processInstruction() 7409 Inst.getOpcode() == ARM::t2MOVSsi ? ARM::CPSR : 0)); in processInstruction() 7417 Inst.getOpcode() == ARM::t2MOVSsi ? ARM::CPSR : 0)); in processInstruction() 7604 ((!inITBlock() && Inst.getOperand(5).getReg() != ARM::CPSR) || in processInstruction() 7749 Inst.getOperand(4).getReg() == ARM::CPSR) || in processInstruction() 7772 Inst.getOperand(4).getReg() == ARM::CPSR && in processInstruction() [all …]
|