/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/ARM/ |
D | MLxExpansionPass.cpp | 94 MachineInstr *DefMI = MRI->getVRegDef(Reg); in getAccDefMI() local 96 if (DefMI->getParent() != MBB) in getAccDefMI() 98 if (DefMI->isCopyLike()) { in getAccDefMI() 99 Reg = DefMI->getOperand(1).getReg(); in getAccDefMI() 101 DefMI = MRI->getVRegDef(Reg); in getAccDefMI() 104 } else if (DefMI->isInsertSubreg()) { in getAccDefMI() 105 Reg = DefMI->getOperand(2).getReg(); in getAccDefMI() 107 DefMI = MRI->getVRegDef(Reg); in getAccDefMI() 113 return DefMI; in getAccDefMI() 146 MachineInstr *DefMI = MRI->getVRegDef(Reg); in hasLoopHazard() local [all …]
|
D | ARMHazardRecognizer.cpp | 18 static bool hasRAWHazard(MachineInstr *DefMI, MachineInstr *MI, in hasRAWHazard() argument 29 return MI->readsRegister(DefMI->getOperand(0).getReg(), &TRI); in hasRAWHazard() 44 MachineInstr *DefMI = LastMI; in getHazardType() local 57 DefMI = &*I; in getHazardType() 61 if (TII.isFpMLxInstruction(DefMI->getOpcode()) && in getHazardType() 63 hasRAWHazard(DefMI, MI, TII.getRegisterInfo()))) { in getHazardType()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/ |
D | TargetSchedule.cpp | 185 const MachineInstr *DefMI, unsigned DefOperIdx, in computeOperandLatency() argument 189 return TII->defaultDefLatency(SchedModel, *DefMI); in computeOperandLatency() 194 OperLatency = TII->getOperandLatency(&InstrItins, *DefMI, DefOperIdx, in computeOperandLatency() 198 unsigned DefClass = DefMI->getDesc().getSchedClass(); in computeOperandLatency() 205 unsigned InstrLatency = TII->getInstrLatency(&InstrItins, *DefMI); in computeOperandLatency() 213 std::max(InstrLatency, TII->defaultDefLatency(SchedModel, *DefMI)); in computeOperandLatency() 217 const MCSchedClassDesc *SCDesc = resolveSchedClass(DefMI); in computeOperandLatency() 218 unsigned DefIdx = findDefIdx(DefMI, DefOperIdx); in computeOperandLatency() 241 if (SCDesc->isValid() && !DefMI->getOperand(DefOperIdx).isImplicit() in computeOperandLatency() 242 && !DefMI->getDesc().OpInfo[DefOperIdx].isOptionalDef() in computeOperandLatency() [all …]
|
D | LiveRangeEdit.cpp | 71 const MachineInstr *DefMI, in checkRematerializable() argument 73 assert(DefMI && "Missing instruction"); in checkRematerializable() 75 if (!TII.isTriviallyReMaterializable(*DefMI, aa)) in checkRematerializable() 90 MachineInstr *DefMI = LIS.getInstructionFromIndex(OrigVNI->def); in scanRemattable() local 91 if (!DefMI) in scanRemattable() 93 checkRematerializable(OrigVNI, DefMI, aa); in scanRemattable() 187 MachineInstr *DefMI = nullptr, *UseMI = nullptr; in foldAsLoad() local 193 if (DefMI && DefMI != MI) in foldAsLoad() 197 DefMI = MI; in foldAsLoad() 207 if (!DefMI || !UseMI) in foldAsLoad() [all …]
|
D | MachineTraceMetrics.cpp | 628 const MachineInstr *DefMI; member 632 DataDep(const MachineInstr *DefMI, unsigned DefOp, unsigned UseOp) in DataDep() 633 : DefMI(DefMI), DefOp(DefOp), UseOp(UseOp) {} in DataDep() 641 DefMI = DefI->getParent(); in DataDep() 771 const MachineInstr *DefMI = MTM.MRI->getVRegDef(LIR.Reg); in computeCrossBlockCriticalPath() local 773 const TraceBlockInfo &DefTBI = BlockInfo[DefMI->getParent()->getNumber()]; in computeCrossBlockCriticalPath() 776 unsigned Len = LIR.Height + Cycles[DefMI].Depth; in computeCrossBlockCriticalPath() 796 BlockInfo[Dep.DefMI->getParent()->getNumber()]; in updateDepth() 801 unsigned DepCycle = Cycles.lookup(Dep.DefMI).Depth; in updateDepth() 803 if (!Dep.DefMI->isTransient()) in updateDepth() [all …]
|
D | RegisterCoalescer.cpp | 807 MachineInstr *DefMI = LIS->getInstructionFromIndex(AValNo->def); in removeCopyByCommutingDef() local 808 if (!DefMI) in removeCopyByCommutingDef() 810 if (!DefMI->isCommutable()) in removeCopyByCommutingDef() 814 int DefIdx = DefMI->findRegisterDefOperandIdx(IntA.reg); in removeCopyByCommutingDef() 817 if (!DefMI->isRegTiedToUseOperand(DefIdx, &UseOpIdx)) in removeCopyByCommutingDef() 830 if (!TII->findCommutedOpIndices(*DefMI, UseOpIdx, NewDstIdx)) in removeCopyByCommutingDef() 833 MachineOperand &NewDstMO = DefMI->getOperand(NewDstIdx); in removeCopyByCommutingDef() 858 << *DefMI); in removeCopyByCommutingDef() 862 MachineBasicBlock *MBB = DefMI->getParent(); in removeCopyByCommutingDef() 864 TII->commuteInstruction(*DefMI, false, UseOpIdx, NewDstIdx); in removeCopyByCommutingDef() [all …]
|
D | PHIElimination.cpp | 172 for (MachineInstr *DefMI : ImpDefs) { in runOnMachineFunction() 173 Register DefReg = DefMI->getOperand(0).getReg(); in runOnMachineFunction() 176 LIS->RemoveMachineInstrFromMaps(*DefMI); in runOnMachineFunction() 177 DefMI->eraseFromParent(); in runOnMachineFunction() 410 if (MachineInstr *DefMI = MRI->getVRegDef(SrcReg)) in LowerPHINode() local 411 if (DefMI->isImplicitDef()) in LowerPHINode() 412 ImpDefs.insert(DefMI); in LowerPHINode()
|
D | MachineCSE.cpp | 175 MachineInstr *DefMI = MRI->getVRegDef(Reg); in INITIALIZE_PASS_DEPENDENCY() local 176 if (!DefMI->isCopy()) in INITIALIZE_PASS_DEPENDENCY() 178 Register SrcReg = DefMI->getOperand(1).getReg(); in INITIALIZE_PASS_DEPENDENCY() 181 if (DefMI->getOperand(0).getSubReg()) in INITIALIZE_PASS_DEPENDENCY() 195 if (DefMI->getOperand(1).getSubReg()) in INITIALIZE_PASS_DEPENDENCY() 199 LLVM_DEBUG(dbgs() << "Coalescing: " << *DefMI); in INITIALIZE_PASS_DEPENDENCY() 210 DefMI->changeDebugValuesDefReg(SrcReg); in INITIALIZE_PASS_DEPENDENCY() 212 DefMI->eraseFromParent(); in INITIALIZE_PASS_DEPENDENCY()
|
D | TwoAddressInstructionPass.cpp | 346 for (MachineInstr &DefMI : MRI->def_instructions(Reg)) { in getSingleDef() 347 if (DefMI.getParent() != BB || DefMI.isDebugValue()) in getSingleDef() 350 Ret = &DefMI; in getSingleDef() 351 else if (Ret != &DefMI) in getSingleDef() 474 MachineInstr *DefMI = &MI; in isKilled() local 480 if (!isPlainlyKilled(DefMI, Reg, LIS)) in isKilled() 489 DefMI = Begin->getParent(); in isKilled() 494 if (!isCopyToReg(*DefMI, TII, SrcReg, DstReg, IsSrcPhys, IsDstPhys)) in isKilled() 1026 for (MachineInstr &DefMI : MRI->def_instructions(Reg)) { in isDefTooClose() 1027 if (DefMI.getParent() != MBB || DefMI.isCopy() || DefMI.isCopyLike()) in isDefTooClose() [all …]
|
D | DetectDeadLanes.cpp | 359 const MachineInstr &DefMI = *Def.getParent(); in determineInitialDefinedLanes() local 360 if (lowersToCopies(DefMI)) { in determineInitialDefinedLanes() 377 for (const MachineOperand &MO : DefMI.uses()) { in determineInitialDefinedLanes() 387 } else if (isCrossCopy(*MRI, DefMI, DefRC, MO)) { in determineInitialDefinedLanes() 404 unsigned OpNum = DefMI.getOperandNo(&MO); in determineInitialDefinedLanes() 409 if (DefMI.isImplicitDef() || Def.isDead()) in determineInitialDefinedLanes()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/AArch64/ |
D | AArch64CondBrTuning.cpp | 66 bool tryToTuneBranch(MachineInstr &MI, MachineInstr &DefMI); 143 MachineInstr &DefMI) { in tryToTuneBranch() argument 145 if (MI.getParent() != DefMI.getParent()) in tryToTuneBranch() 151 switch (DefMI.getOpcode()) { in tryToTuneBranch() 197 MachineBasicBlock::iterator I(DefMI), E(MI); in tryToTuneBranch() 204 LLVM_DEBUG(DefMI.print(dbgs())); in tryToTuneBranch() 208 NewCmp = convertToFlagSetting(DefMI, IsFlagSetting); in tryToTuneBranch() 256 MachineBasicBlock::iterator I(DefMI), E(MI); in tryToTuneBranch() 263 LLVM_DEBUG(DefMI.print(dbgs())); in tryToTuneBranch() 267 NewCmp = convertToFlagSetting(DefMI, IsFlagSetting); in tryToTuneBranch() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/CodeGen/GlobalISel/ |
D | Utils.cpp | 305 auto *DefMI = MRI.getVRegDef(Reg); in getDefIgnoringCopies() local 306 auto DstTy = MRI.getType(DefMI->getOperand(0).getReg()); in getDefIgnoringCopies() 309 while (DefMI->getOpcode() == TargetOpcode::COPY) { in getDefIgnoringCopies() 310 Register SrcReg = DefMI->getOperand(1).getReg(); in getDefIgnoringCopies() 314 DefMI = MRI.getVRegDef(SrcReg); in getDefIgnoringCopies() 316 return DefMI; in getDefIgnoringCopies() 321 MachineInstr *DefMI = getDefIgnoringCopies(Reg, MRI); in getOpcodeDef() local 322 return DefMI && DefMI->getOpcode() == Opcode ? DefMI : nullptr; in getOpcodeDef() 391 const MachineInstr *DefMI = MRI.getVRegDef(Val); in isKnownNeverNaN() local 392 if (!DefMI) in isKnownNeverNaN() [all …]
|
D | CombinerHelper.cpp | 350 MachineIRBuilder &Builder, MachineInstr &DefMI, MachineOperand &UseMO, in InsertInsnsWithoutSideEffectsBeforeUse() argument 366 if (InsertBB == DefMI.getParent()) { in InsertInsnsWithoutSideEffectsBeforeUse() 367 MachineBasicBlock::iterator InsertPt = &DefMI; in InsertInsnsWithoutSideEffectsBeforeUse() 562 bool CombinerHelper::isPredecessor(MachineInstr &DefMI, MachineInstr &UseMI) { in isPredecessor() argument 563 assert(DefMI.getParent() == UseMI.getParent()); in isPredecessor() 564 if (&DefMI == &UseMI) in isPredecessor() 568 MachineBasicBlock::const_iterator I = DefMI.getParent()->begin(); in isPredecessor() 569 for (; &*I != &DefMI && &*I != &UseMI; ++I) in isPredecessor() 570 return &*I == &DefMI; in isPredecessor() 575 bool CombinerHelper::dominates(MachineInstr &DefMI, MachineInstr &UseMI) { in dominates() argument [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/X86/ |
D | X86OptimizeLEAs.cpp | 352 for (auto DefMI : List) { in chooseBestLEA() local 354 int64_t AddrDispShiftTemp = getAddrDispShift(MI, MemOpNo, *DefMI, 1); in chooseBestLEA() 366 MRI->getRegClass(DefMI->getOperand(0).getReg())) in chooseBestLEA() 373 int DistTemp = calcInstrDist(*DefMI, MI); in chooseBestLEA() 383 BestLEA = DefMI; in chooseBestLEA() 529 MachineInstr *DefMI; in removeRedundantAddrCalc() local 532 if (!chooseBestLEA(Insns->second, MI, DefMI, AddrDispShift, Dist)) in removeRedundantAddrCalc() 542 DefMI->removeFromParent(); in removeRedundantAddrCalc() 543 MBB->insert(MachineBasicBlock::iterator(&MI), DefMI); in removeRedundantAddrCalc() 544 InstrPos[DefMI] = InstrPos[&MI] - 1; in removeRedundantAddrCalc() [all …]
|
D | X86CallFrameOptimization.cpp | 618 MachineInstr &DefMI = *MRI->getVRegDef(Reg); in canFoldIntoRegPush() local 622 if ((DefMI.getOpcode() != X86::MOV32rm && in canFoldIntoRegPush() 623 DefMI.getOpcode() != X86::MOV64rm) || in canFoldIntoRegPush() 624 DefMI.getParent() != FrameSetup->getParent()) in canFoldIntoRegPush() 629 for (MachineBasicBlock::iterator I = DefMI; I != FrameSetup; ++I) in canFoldIntoRegPush() 633 return &DefMI; in canFoldIntoRegPush()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Mips/ |
D | MipsOptimizePICCall.cpp | 281 MachineInstr *DefMI = MRI.getVRegDef(Reg); in isCallViaRegister() local 283 assert(DefMI); in isCallViaRegister() 287 if (!DefMI->mayLoad() || DefMI->getNumOperands() < 3) in isCallViaRegister() 290 unsigned Flags = DefMI->getOperand(2).getTargetFlags(); in isCallViaRegister() 296 assert(DefMI->hasOneMemOperand()); in isCallViaRegister() 297 Val = (*DefMI->memoperands_begin())->getValue(); in isCallViaRegister() 299 Val = (*DefMI->memoperands_begin())->getPseudoValue(); in isCallViaRegister()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/BPF/ |
D | BPFMIPeephole.cpp | 415 MachineInstr *DefMI; in eliminateTruncSeq() local 439 DefMI = MRI->getVRegDef(SrcReg); in eliminateTruncSeq() 440 if (DefMI) in eliminateTruncSeq() 446 DefMI = MRI->getVRegDef(SrcReg); in eliminateTruncSeq() 448 if (!DefMI) in eliminateTruncSeq() 462 if (DefMI->isPHI()) { in eliminateTruncSeq() 465 for (unsigned i = 1, e = DefMI->getNumOperands(); i < e; i += 2) { in eliminateTruncSeq() 466 MachineOperand &opnd = DefMI->getOperand(i); in eliminateTruncSeq() 482 } else if (!TruncSizeCompatible(TruncSize, DefMI->getOpcode())) { in eliminateTruncSeq()
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/PowerPC/ |
D | PPCInstrInfo.h | 127 unsigned ConstantOpNo, MachineInstr &DefMI, 132 unsigned ConstantOpNo, MachineInstr &DefMI, 147 bool isDefMIElgibleForForwarding(MachineInstr &DefMI, 152 const MachineInstr &DefMI, 156 const MachineInstr &DefMI, 213 const MachineInstr &DefMI, unsigned DefIdx, 224 const MachineInstr &DefMI, in hasLowDefLatency() argument 307 bool FoldImmediate(MachineInstr &UseMI, MachineInstr &DefMI, unsigned Reg,
|
D | PPCMIPeephole.cpp | 353 MachineInstr *DefMI = MRI->getVRegDef(TrueReg1); in simplifyCode() local 355 if (!DefMI) in simplifyCode() 358 unsigned DefOpc = DefMI->getOpcode(); in simplifyCode() 368 TRI->lookThruCopyLike(DefMI->getOperand(1).getReg(), MRI); in simplifyCode() 391 unsigned DefReg1 = DefMI->getOperand(1).getReg(); in simplifyCode() 392 unsigned DefReg2 = DefMI->getOperand(2).getReg(); in simplifyCode() 393 unsigned DefImmed = DefMI->getOperand(3).getImm(); in simplifyCode() 437 .add(DefMI->getOperand(1)); in simplifyCode() 442 (DefMI->getOperand(2).getImm() == 0 || in simplifyCode() 443 DefMI->getOperand(2).getImm() == 3)) { in simplifyCode() [all …]
|
D | PPCVSXSwapRemoval.cpp | 617 MachineInstr* DefMI = MRI->getVRegDef(Reg); in formWebs() local 618 assert(SwapMap.find(DefMI) != SwapMap.end() && in formWebs() 620 int DefIdx = SwapMap[DefMI]; in formWebs() 628 LLVM_DEBUG(DefMI->dump()); in formWebs() 699 MachineInstr *DefMI = MRI->getVRegDef(UseReg); in recordUnoptimizableWebs() local 700 Register DefReg = DefMI->getOperand(0).getReg(); in recordUnoptimizableWebs() 701 int DefIdx = SwapMap[DefMI]; in recordUnoptimizableWebs() 711 LLVM_DEBUG(DefMI->dump()); in recordUnoptimizableWebs() 730 LLVM_DEBUG(DefMI->dump()); in recordUnoptimizableWebs() 776 MachineInstr *DefMI = MRI->getVRegDef(UseReg); in markSwapsForRemoval() local [all …]
|
D | PPCInstrInfo.cpp | 176 const MachineInstr &DefMI, unsigned DefIdx, in getOperandLatency() argument 179 int Latency = PPCGenInstrInfo::getOperandLatency(ItinData, DefMI, DefIdx, in getOperandLatency() 182 if (!DefMI.getParent()) in getOperandLatency() 185 const MachineOperand &DefMO = DefMI.getOperand(DefIdx); in getOperandLatency() 191 &DefMI.getParent()->getParent()->getRegInfo(); in getOperandLatency() 201 Latency = getInstrLatency(ItinData, DefMI); in getOperandLatency() 1324 bool PPCInstrInfo::FoldImmediate(MachineInstr &UseMI, MachineInstr &DefMI, in FoldImmediate() argument 1328 unsigned DefOpc = DefMI.getOpcode(); in FoldImmediate() 1331 if (!DefMI.getOperand(1).isImm()) in FoldImmediate() 1333 if (DefMI.getOperand(1).getImm() != 0) in FoldImmediate() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/lib/Target/Lanai/ |
D | LanaiInstrInfo.cpp | 498 MachineInstr *DefMI = canFoldIntoSelect(MI.getOperand(1).getReg(), MRI); in optimizeSelect() local 499 bool Invert = !DefMI; in optimizeSelect() 500 if (!DefMI) in optimizeSelect() 501 DefMI = canFoldIntoSelect(MI.getOperand(2).getReg(), MRI); in optimizeSelect() 502 if (!DefMI) in optimizeSelect() 514 BuildMI(*MI.getParent(), MI, MI.getDebugLoc(), DefMI->getDesc(), DestReg); in optimizeSelect() 517 const MCInstrDesc &DefDesc = DefMI->getDesc(); in optimizeSelect() 520 NewMI.add(DefMI->getOperand(i)); in optimizeSelect() 538 SeenMIs.erase(DefMI); in optimizeSelect() 544 if (DefMI->getParent() != MI.getParent()) in optimizeSelect() [all …]
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/GlobalISel/ |
D | LegalizationArtifactCombiner.h | 203 if (MachineInstr *DefMI = getOpcodeDef(TargetOpcode::G_IMPLICIT_DEF, in tryFoldImplicitDef() local 226 markInstAndDefDead(MI, *DefMI, DeadInsts); in tryFoldImplicitDef() 575 void markInstAndDefDead(MachineInstr &MI, MachineInstr &DefMI, in markInstAndDefDead() argument 589 while (PrevMI != &DefMI) { in markInstAndDefDead() 594 if (TmpDef != &DefMI) { in markInstAndDefDead() 605 if (PrevMI == &DefMI && MRI.hasOneUse(DefMI.getOperand(0).getReg())) in markInstAndDefDead() 606 DeadInsts.push_back(&DefMI); in markInstAndDefDead()
|
D | CombinerHelper.h | 80 bool isPredecessor(MachineInstr &DefMI, MachineInstr &UseMI); 88 bool dominates(MachineInstr &DefMI, MachineInstr &UseMI);
|
/third_party/skia/third_party/externals/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/ |
D | TargetSchedule.h | 174 unsigned computeOperandLatency(const MachineInstr *DefMI, unsigned DefOperIdx, 198 unsigned computeOutputLatency(const MachineInstr *DefMI, unsigned DefOperIdx,
|