1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 /// \file
10 /// This file implements the MachineIRBuidler class.
11 //===----------------------------------------------------------------------===//
12 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
13
14 #include "llvm/CodeGen/MachineFunction.h"
15 #include "llvm/CodeGen/MachineInstr.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
18 #include "llvm/CodeGen/TargetInstrInfo.h"
19 #include "llvm/CodeGen/TargetOpcodes.h"
20 #include "llvm/CodeGen/TargetSubtargetInfo.h"
21 #include "llvm/IR/DebugInfo.h"
22
23 using namespace llvm;
24
setMF(MachineFunction & MF)25 void MachineIRBuilderBase::setMF(MachineFunction &MF) {
26 State.MF = &MF;
27 State.MBB = nullptr;
28 State.MRI = &MF.getRegInfo();
29 State.TII = MF.getSubtarget().getInstrInfo();
30 State.DL = DebugLoc();
31 State.II = MachineBasicBlock::iterator();
32 State.InsertedInstr = nullptr;
33 }
34
setMBB(MachineBasicBlock & MBB)35 void MachineIRBuilderBase::setMBB(MachineBasicBlock &MBB) {
36 State.MBB = &MBB;
37 State.II = MBB.end();
38 assert(&getMF() == MBB.getParent() &&
39 "Basic block is in a different function");
40 }
41
setInstr(MachineInstr & MI)42 void MachineIRBuilderBase::setInstr(MachineInstr &MI) {
43 assert(MI.getParent() && "Instruction is not part of a basic block");
44 setMBB(*MI.getParent());
45 State.II = MI.getIterator();
46 }
47
setInsertPt(MachineBasicBlock & MBB,MachineBasicBlock::iterator II)48 void MachineIRBuilderBase::setInsertPt(MachineBasicBlock &MBB,
49 MachineBasicBlock::iterator II) {
50 assert(MBB.getParent() == &getMF() &&
51 "Basic block is in a different function");
52 State.MBB = &MBB;
53 State.II = II;
54 }
55
recordInsertion(MachineInstr * InsertedInstr) const56 void MachineIRBuilderBase::recordInsertion(MachineInstr *InsertedInstr) const {
57 if (State.InsertedInstr)
58 State.InsertedInstr(InsertedInstr);
59 }
60
recordInsertions(std::function<void (MachineInstr *)> Inserted)61 void MachineIRBuilderBase::recordInsertions(
62 std::function<void(MachineInstr *)> Inserted) {
63 State.InsertedInstr = std::move(Inserted);
64 }
65
stopRecordingInsertions()66 void MachineIRBuilderBase::stopRecordingInsertions() {
67 State.InsertedInstr = nullptr;
68 }
69
70 //------------------------------------------------------------------------------
71 // Build instruction variants.
72 //------------------------------------------------------------------------------
73
buildInstr(unsigned Opcode)74 MachineInstrBuilder MachineIRBuilderBase::buildInstr(unsigned Opcode) {
75 return insertInstr(buildInstrNoInsert(Opcode));
76 }
77
buildInstrNoInsert(unsigned Opcode)78 MachineInstrBuilder MachineIRBuilderBase::buildInstrNoInsert(unsigned Opcode) {
79 MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80 return MIB;
81 }
82
insertInstr(MachineInstrBuilder MIB)83 MachineInstrBuilder MachineIRBuilderBase::insertInstr(MachineInstrBuilder MIB) {
84 getMBB().insert(getInsertPt(), MIB);
85 recordInsertion(MIB);
86 return MIB;
87 }
88
89 MachineInstrBuilder
buildDirectDbgValue(unsigned Reg,const MDNode * Variable,const MDNode * Expr)90 MachineIRBuilderBase::buildDirectDbgValue(unsigned Reg, const MDNode *Variable,
91 const MDNode *Expr) {
92 assert(isa<DILocalVariable>(Variable) && "not a variable");
93 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94 assert(
95 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96 "Expected inlined-at fields to agree");
97 return insertInstr(BuildMI(getMF(), getDL(),
98 getTII().get(TargetOpcode::DBG_VALUE),
99 /*IsIndirect*/ false, Reg, Variable, Expr));
100 }
101
buildIndirectDbgValue(unsigned Reg,const MDNode * Variable,const MDNode * Expr)102 MachineInstrBuilder MachineIRBuilderBase::buildIndirectDbgValue(
103 unsigned Reg, const MDNode *Variable, const MDNode *Expr) {
104 assert(isa<DILocalVariable>(Variable) && "not a variable");
105 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
106 assert(
107 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
108 "Expected inlined-at fields to agree");
109 return insertInstr(BuildMI(getMF(), getDL(),
110 getTII().get(TargetOpcode::DBG_VALUE),
111 /*IsIndirect*/ true, Reg, Variable, Expr));
112 }
113
114 MachineInstrBuilder
buildFIDbgValue(int FI,const MDNode * Variable,const MDNode * Expr)115 MachineIRBuilderBase::buildFIDbgValue(int FI, const MDNode *Variable,
116 const MDNode *Expr) {
117 assert(isa<DILocalVariable>(Variable) && "not a variable");
118 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
119 assert(
120 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
121 "Expected inlined-at fields to agree");
122 return buildInstr(TargetOpcode::DBG_VALUE)
123 .addFrameIndex(FI)
124 .addImm(0)
125 .addMetadata(Variable)
126 .addMetadata(Expr);
127 }
128
buildConstDbgValue(const Constant & C,const MDNode * Variable,const MDNode * Expr)129 MachineInstrBuilder MachineIRBuilderBase::buildConstDbgValue(
130 const Constant &C, const MDNode *Variable, const MDNode *Expr) {
131 assert(isa<DILocalVariable>(Variable) && "not a variable");
132 assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
133 assert(
134 cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
135 "Expected inlined-at fields to agree");
136 auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
137 if (auto *CI = dyn_cast<ConstantInt>(&C)) {
138 if (CI->getBitWidth() > 64)
139 MIB.addCImm(CI);
140 else
141 MIB.addImm(CI->getZExtValue());
142 } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
143 MIB.addFPImm(CFP);
144 } else {
145 // Insert %noreg if we didn't find a usable constant and had to drop it.
146 MIB.addReg(0U);
147 }
148
149 return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
150 }
151
buildFrameIndex(unsigned Res,int Idx)152 MachineInstrBuilder MachineIRBuilderBase::buildFrameIndex(unsigned Res,
153 int Idx) {
154 assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
155 return buildInstr(TargetOpcode::G_FRAME_INDEX)
156 .addDef(Res)
157 .addFrameIndex(Idx);
158 }
159
160 MachineInstrBuilder
buildGlobalValue(unsigned Res,const GlobalValue * GV)161 MachineIRBuilderBase::buildGlobalValue(unsigned Res, const GlobalValue *GV) {
162 assert(getMRI()->getType(Res).isPointer() && "invalid operand type");
163 assert(getMRI()->getType(Res).getAddressSpace() ==
164 GV->getType()->getAddressSpace() &&
165 "address space mismatch");
166
167 return buildInstr(TargetOpcode::G_GLOBAL_VALUE)
168 .addDef(Res)
169 .addGlobalAddress(GV);
170 }
171
validateBinaryOp(unsigned Res,unsigned Op0,unsigned Op1)172 void MachineIRBuilderBase::validateBinaryOp(unsigned Res, unsigned Op0,
173 unsigned Op1) {
174 assert((getMRI()->getType(Res).isScalar() ||
175 getMRI()->getType(Res).isVector()) &&
176 "invalid operand type");
177 assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
178 getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
179 }
180
buildGEP(unsigned Res,unsigned Op0,unsigned Op1)181 MachineInstrBuilder MachineIRBuilderBase::buildGEP(unsigned Res, unsigned Op0,
182 unsigned Op1) {
183 assert(getMRI()->getType(Res).isPointer() &&
184 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
185 assert(getMRI()->getType(Op1).isScalar() && "invalid offset type");
186
187 return buildInstr(TargetOpcode::G_GEP)
188 .addDef(Res)
189 .addUse(Op0)
190 .addUse(Op1);
191 }
192
193 Optional<MachineInstrBuilder>
materializeGEP(unsigned & Res,unsigned Op0,const LLT & ValueTy,uint64_t Value)194 MachineIRBuilderBase::materializeGEP(unsigned &Res, unsigned Op0,
195 const LLT &ValueTy, uint64_t Value) {
196 assert(Res == 0 && "Res is a result argument");
197 assert(ValueTy.isScalar() && "invalid offset type");
198
199 if (Value == 0) {
200 Res = Op0;
201 return None;
202 }
203
204 Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
205 unsigned TmpReg = getMRI()->createGenericVirtualRegister(ValueTy);
206
207 buildConstant(TmpReg, Value);
208 return buildGEP(Res, Op0, TmpReg);
209 }
210
buildPtrMask(unsigned Res,unsigned Op0,uint32_t NumBits)211 MachineInstrBuilder MachineIRBuilderBase::buildPtrMask(unsigned Res,
212 unsigned Op0,
213 uint32_t NumBits) {
214 assert(getMRI()->getType(Res).isPointer() &&
215 getMRI()->getType(Res) == getMRI()->getType(Op0) && "type mismatch");
216
217 return buildInstr(TargetOpcode::G_PTR_MASK)
218 .addDef(Res)
219 .addUse(Op0)
220 .addImm(NumBits);
221 }
222
buildBr(MachineBasicBlock & Dest)223 MachineInstrBuilder MachineIRBuilderBase::buildBr(MachineBasicBlock &Dest) {
224 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
225 }
226
buildBrIndirect(unsigned Tgt)227 MachineInstrBuilder MachineIRBuilderBase::buildBrIndirect(unsigned Tgt) {
228 assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
229 return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
230 }
231
buildCopy(unsigned Res,unsigned Op)232 MachineInstrBuilder MachineIRBuilderBase::buildCopy(unsigned Res, unsigned Op) {
233 assert(getMRI()->getType(Res) == LLT() || getMRI()->getType(Op) == LLT() ||
234 getMRI()->getType(Res) == getMRI()->getType(Op));
235 return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
236 }
237
238 MachineInstrBuilder
buildConstant(unsigned Res,const ConstantInt & Val)239 MachineIRBuilderBase::buildConstant(unsigned Res, const ConstantInt &Val) {
240 LLT Ty = getMRI()->getType(Res);
241
242 assert((Ty.isScalar() || Ty.isPointer()) && "invalid operand type");
243
244 const ConstantInt *NewVal = &Val;
245 if (Ty.getSizeInBits() != Val.getBitWidth())
246 NewVal = ConstantInt::get(getMF().getFunction().getContext(),
247 Val.getValue().sextOrTrunc(Ty.getSizeInBits()));
248
249 return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addCImm(NewVal);
250 }
251
buildConstant(unsigned Res,int64_t Val)252 MachineInstrBuilder MachineIRBuilderBase::buildConstant(unsigned Res,
253 int64_t Val) {
254 auto IntN = IntegerType::get(getMF().getFunction().getContext(),
255 getMRI()->getType(Res).getSizeInBits());
256 ConstantInt *CI = ConstantInt::get(IntN, Val, true);
257 return buildConstant(Res, *CI);
258 }
259
260 MachineInstrBuilder
buildFConstant(unsigned Res,const ConstantFP & Val)261 MachineIRBuilderBase::buildFConstant(unsigned Res, const ConstantFP &Val) {
262 assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
263
264 return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
265 }
266
buildFConstant(unsigned Res,double Val)267 MachineInstrBuilder MachineIRBuilderBase::buildFConstant(unsigned Res,
268 double Val) {
269 LLT DstTy = getMRI()->getType(Res);
270 auto &Ctx = getMF().getFunction().getContext();
271 auto *CFP =
272 ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getSizeInBits()));
273 return buildFConstant(Res, *CFP);
274 }
275
buildBrCond(unsigned Tst,MachineBasicBlock & Dest)276 MachineInstrBuilder MachineIRBuilderBase::buildBrCond(unsigned Tst,
277 MachineBasicBlock &Dest) {
278 assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
279
280 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
281 }
282
buildLoad(unsigned Res,unsigned Addr,MachineMemOperand & MMO)283 MachineInstrBuilder MachineIRBuilderBase::buildLoad(unsigned Res, unsigned Addr,
284 MachineMemOperand &MMO) {
285 return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
286 }
287
288 MachineInstrBuilder
buildLoadInstr(unsigned Opcode,unsigned Res,unsigned Addr,MachineMemOperand & MMO)289 MachineIRBuilderBase::buildLoadInstr(unsigned Opcode, unsigned Res,
290 unsigned Addr, MachineMemOperand &MMO) {
291 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
292 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
293
294 return buildInstr(Opcode)
295 .addDef(Res)
296 .addUse(Addr)
297 .addMemOperand(&MMO);
298 }
299
buildStore(unsigned Val,unsigned Addr,MachineMemOperand & MMO)300 MachineInstrBuilder MachineIRBuilderBase::buildStore(unsigned Val,
301 unsigned Addr,
302 MachineMemOperand &MMO) {
303 assert(getMRI()->getType(Val).isValid() && "invalid operand type");
304 assert(getMRI()->getType(Addr).isPointer() && "invalid operand type");
305
306 return buildInstr(TargetOpcode::G_STORE)
307 .addUse(Val)
308 .addUse(Addr)
309 .addMemOperand(&MMO);
310 }
311
buildUAdde(unsigned Res,unsigned CarryOut,unsigned Op0,unsigned Op1,unsigned CarryIn)312 MachineInstrBuilder MachineIRBuilderBase::buildUAdde(unsigned Res,
313 unsigned CarryOut,
314 unsigned Op0, unsigned Op1,
315 unsigned CarryIn) {
316 assert(getMRI()->getType(Res).isScalar() && "invalid operand type");
317 assert(getMRI()->getType(Res) == getMRI()->getType(Op0) &&
318 getMRI()->getType(Res) == getMRI()->getType(Op1) && "type mismatch");
319 assert(getMRI()->getType(CarryOut).isScalar() && "invalid operand type");
320 assert(getMRI()->getType(CarryOut) == getMRI()->getType(CarryIn) &&
321 "type mismatch");
322
323 return buildInstr(TargetOpcode::G_UADDE)
324 .addDef(Res)
325 .addDef(CarryOut)
326 .addUse(Op0)
327 .addUse(Op1)
328 .addUse(CarryIn);
329 }
330
buildAnyExt(unsigned Res,unsigned Op)331 MachineInstrBuilder MachineIRBuilderBase::buildAnyExt(unsigned Res,
332 unsigned Op) {
333 validateTruncExt(Res, Op, true);
334 return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
335 }
336
buildSExt(unsigned Res,unsigned Op)337 MachineInstrBuilder MachineIRBuilderBase::buildSExt(unsigned Res, unsigned Op) {
338 validateTruncExt(Res, Op, true);
339 return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
340 }
341
buildZExt(unsigned Res,unsigned Op)342 MachineInstrBuilder MachineIRBuilderBase::buildZExt(unsigned Res, unsigned Op) {
343 validateTruncExt(Res, Op, true);
344 return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
345 }
346
buildExtOrTrunc(unsigned ExtOpc,unsigned Res,unsigned Op)347 MachineInstrBuilder MachineIRBuilderBase::buildExtOrTrunc(unsigned ExtOpc,
348 unsigned Res,
349 unsigned Op) {
350 assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
351 TargetOpcode::G_SEXT == ExtOpc) &&
352 "Expecting Extending Opc");
353 assert(getMRI()->getType(Res).isScalar() ||
354 getMRI()->getType(Res).isVector());
355 assert(getMRI()->getType(Res).isScalar() == getMRI()->getType(Op).isScalar());
356
357 unsigned Opcode = TargetOpcode::COPY;
358 if (getMRI()->getType(Res).getSizeInBits() >
359 getMRI()->getType(Op).getSizeInBits())
360 Opcode = ExtOpc;
361 else if (getMRI()->getType(Res).getSizeInBits() <
362 getMRI()->getType(Op).getSizeInBits())
363 Opcode = TargetOpcode::G_TRUNC;
364 else
365 assert(getMRI()->getType(Res) == getMRI()->getType(Op));
366
367 return buildInstr(Opcode).addDef(Res).addUse(Op);
368 }
369
buildSExtOrTrunc(unsigned Res,unsigned Op)370 MachineInstrBuilder MachineIRBuilderBase::buildSExtOrTrunc(unsigned Res,
371 unsigned Op) {
372 return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
373 }
374
buildZExtOrTrunc(unsigned Res,unsigned Op)375 MachineInstrBuilder MachineIRBuilderBase::buildZExtOrTrunc(unsigned Res,
376 unsigned Op) {
377 return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
378 }
379
buildAnyExtOrTrunc(unsigned Res,unsigned Op)380 MachineInstrBuilder MachineIRBuilderBase::buildAnyExtOrTrunc(unsigned Res,
381 unsigned Op) {
382 return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
383 }
384
buildCast(unsigned Dst,unsigned Src)385 MachineInstrBuilder MachineIRBuilderBase::buildCast(unsigned Dst,
386 unsigned Src) {
387 LLT SrcTy = getMRI()->getType(Src);
388 LLT DstTy = getMRI()->getType(Dst);
389 if (SrcTy == DstTy)
390 return buildCopy(Dst, Src);
391
392 unsigned Opcode;
393 if (SrcTy.isPointer() && DstTy.isScalar())
394 Opcode = TargetOpcode::G_PTRTOINT;
395 else if (DstTy.isPointer() && SrcTy.isScalar())
396 Opcode = TargetOpcode::G_INTTOPTR;
397 else {
398 assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
399 Opcode = TargetOpcode::G_BITCAST;
400 }
401
402 return buildInstr(Opcode).addDef(Dst).addUse(Src);
403 }
404
405 MachineInstrBuilder
buildExtract(unsigned Res,unsigned Src,uint64_t Index)406 MachineIRBuilderBase::buildExtract(unsigned Res, unsigned Src, uint64_t Index) {
407 #ifndef NDEBUG
408 assert(getMRI()->getType(Src).isValid() && "invalid operand type");
409 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
410 assert(Index + getMRI()->getType(Res).getSizeInBits() <=
411 getMRI()->getType(Src).getSizeInBits() &&
412 "extracting off end of register");
413 #endif
414
415 if (getMRI()->getType(Res).getSizeInBits() ==
416 getMRI()->getType(Src).getSizeInBits()) {
417 assert(Index == 0 && "insertion past the end of a register");
418 return buildCast(Res, Src);
419 }
420
421 return buildInstr(TargetOpcode::G_EXTRACT)
422 .addDef(Res)
423 .addUse(Src)
424 .addImm(Index);
425 }
426
buildSequence(unsigned Res,ArrayRef<unsigned> Ops,ArrayRef<uint64_t> Indices)427 void MachineIRBuilderBase::buildSequence(unsigned Res, ArrayRef<unsigned> Ops,
428 ArrayRef<uint64_t> Indices) {
429 #ifndef NDEBUG
430 assert(Ops.size() == Indices.size() && "incompatible args");
431 assert(!Ops.empty() && "invalid trivial sequence");
432 assert(std::is_sorted(Indices.begin(), Indices.end()) &&
433 "sequence offsets must be in ascending order");
434
435 assert(getMRI()->getType(Res).isValid() && "invalid operand type");
436 for (auto Op : Ops)
437 assert(getMRI()->getType(Op).isValid() && "invalid operand type");
438 #endif
439
440 LLT ResTy = getMRI()->getType(Res);
441 LLT OpTy = getMRI()->getType(Ops[0]);
442 unsigned OpSize = OpTy.getSizeInBits();
443 bool MaybeMerge = true;
444 for (unsigned i = 0; i < Ops.size(); ++i) {
445 if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
446 MaybeMerge = false;
447 break;
448 }
449 }
450
451 if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
452 buildMerge(Res, Ops);
453 return;
454 }
455
456 unsigned ResIn = getMRI()->createGenericVirtualRegister(ResTy);
457 buildUndef(ResIn);
458
459 for (unsigned i = 0; i < Ops.size(); ++i) {
460 unsigned ResOut = i + 1 == Ops.size()
461 ? Res
462 : getMRI()->createGenericVirtualRegister(ResTy);
463 buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
464 ResIn = ResOut;
465 }
466 }
467
buildUndef(unsigned Res)468 MachineInstrBuilder MachineIRBuilderBase::buildUndef(unsigned Res) {
469 return buildInstr(TargetOpcode::G_IMPLICIT_DEF).addDef(Res);
470 }
471
buildMerge(unsigned Res,ArrayRef<unsigned> Ops)472 MachineInstrBuilder MachineIRBuilderBase::buildMerge(unsigned Res,
473 ArrayRef<unsigned> Ops) {
474
475 #ifndef NDEBUG
476 assert(!Ops.empty() && "invalid trivial sequence");
477 LLT Ty = getMRI()->getType(Ops[0]);
478 for (auto Reg : Ops)
479 assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
480 assert(Ops.size() * getMRI()->getType(Ops[0]).getSizeInBits() ==
481 getMRI()->getType(Res).getSizeInBits() &&
482 "input operands do not cover output register");
483 #endif
484
485 if (Ops.size() == 1)
486 return buildCast(Res, Ops[0]);
487
488 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_MERGE_VALUES);
489 MIB.addDef(Res);
490 for (unsigned i = 0; i < Ops.size(); ++i)
491 MIB.addUse(Ops[i]);
492 return MIB;
493 }
494
buildUnmerge(ArrayRef<unsigned> Res,unsigned Op)495 MachineInstrBuilder MachineIRBuilderBase::buildUnmerge(ArrayRef<unsigned> Res,
496 unsigned Op) {
497
498 #ifndef NDEBUG
499 assert(!Res.empty() && "invalid trivial sequence");
500 LLT Ty = getMRI()->getType(Res[0]);
501 for (auto Reg : Res)
502 assert(getMRI()->getType(Reg) == Ty && "type mismatch in input list");
503 assert(Res.size() * getMRI()->getType(Res[0]).getSizeInBits() ==
504 getMRI()->getType(Op).getSizeInBits() &&
505 "input operands do not cover output register");
506 #endif
507
508 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_UNMERGE_VALUES);
509 for (unsigned i = 0; i < Res.size(); ++i)
510 MIB.addDef(Res[i]);
511 MIB.addUse(Op);
512 return MIB;
513 }
514
buildInsert(unsigned Res,unsigned Src,unsigned Op,unsigned Index)515 MachineInstrBuilder MachineIRBuilderBase::buildInsert(unsigned Res,
516 unsigned Src, unsigned Op,
517 unsigned Index) {
518 assert(Index + getMRI()->getType(Op).getSizeInBits() <=
519 getMRI()->getType(Res).getSizeInBits() &&
520 "insertion past the end of a register");
521
522 if (getMRI()->getType(Res).getSizeInBits() ==
523 getMRI()->getType(Op).getSizeInBits()) {
524 return buildCast(Res, Op);
525 }
526
527 return buildInstr(TargetOpcode::G_INSERT)
528 .addDef(Res)
529 .addUse(Src)
530 .addUse(Op)
531 .addImm(Index);
532 }
533
buildIntrinsic(Intrinsic::ID ID,unsigned Res,bool HasSideEffects)534 MachineInstrBuilder MachineIRBuilderBase::buildIntrinsic(Intrinsic::ID ID,
535 unsigned Res,
536 bool HasSideEffects) {
537 auto MIB =
538 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
539 : TargetOpcode::G_INTRINSIC);
540 if (Res)
541 MIB.addDef(Res);
542 MIB.addIntrinsicID(ID);
543 return MIB;
544 }
545
buildTrunc(unsigned Res,unsigned Op)546 MachineInstrBuilder MachineIRBuilderBase::buildTrunc(unsigned Res,
547 unsigned Op) {
548 validateTruncExt(Res, Op, false);
549 return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
550 }
551
buildFPTrunc(unsigned Res,unsigned Op)552 MachineInstrBuilder MachineIRBuilderBase::buildFPTrunc(unsigned Res,
553 unsigned Op) {
554 validateTruncExt(Res, Op, false);
555 return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
556 }
557
buildICmp(CmpInst::Predicate Pred,unsigned Res,unsigned Op0,unsigned Op1)558 MachineInstrBuilder MachineIRBuilderBase::buildICmp(CmpInst::Predicate Pred,
559 unsigned Res, unsigned Op0,
560 unsigned Op1) {
561 #ifndef NDEBUG
562 assert(getMRI()->getType(Op0) == getMRI()->getType(Op0) && "type mismatch");
563 assert(CmpInst::isIntPredicate(Pred) && "invalid predicate");
564 if (getMRI()->getType(Op0).isScalar() || getMRI()->getType(Op0).isPointer())
565 assert(getMRI()->getType(Res).isScalar() && "type mismatch");
566 else
567 assert(getMRI()->getType(Res).isVector() &&
568 getMRI()->getType(Res).getNumElements() ==
569 getMRI()->getType(Op0).getNumElements() &&
570 "type mismatch");
571 #endif
572
573 return buildInstr(TargetOpcode::G_ICMP)
574 .addDef(Res)
575 .addPredicate(Pred)
576 .addUse(Op0)
577 .addUse(Op1);
578 }
579
buildFCmp(CmpInst::Predicate Pred,unsigned Res,unsigned Op0,unsigned Op1)580 MachineInstrBuilder MachineIRBuilderBase::buildFCmp(CmpInst::Predicate Pred,
581 unsigned Res, unsigned Op0,
582 unsigned Op1) {
583 #ifndef NDEBUG
584 assert((getMRI()->getType(Op0).isScalar() ||
585 getMRI()->getType(Op0).isVector()) &&
586 "invalid operand type");
587 assert(getMRI()->getType(Op0) == getMRI()->getType(Op1) && "type mismatch");
588 assert(CmpInst::isFPPredicate(Pred) && "invalid predicate");
589 if (getMRI()->getType(Op0).isScalar())
590 assert(getMRI()->getType(Res).isScalar() && "type mismatch");
591 else
592 assert(getMRI()->getType(Res).isVector() &&
593 getMRI()->getType(Res).getNumElements() ==
594 getMRI()->getType(Op0).getNumElements() &&
595 "type mismatch");
596 #endif
597
598 return buildInstr(TargetOpcode::G_FCMP)
599 .addDef(Res)
600 .addPredicate(Pred)
601 .addUse(Op0)
602 .addUse(Op1);
603 }
604
buildSelect(unsigned Res,unsigned Tst,unsigned Op0,unsigned Op1)605 MachineInstrBuilder MachineIRBuilderBase::buildSelect(unsigned Res,
606 unsigned Tst,
607 unsigned Op0,
608 unsigned Op1) {
609 #ifndef NDEBUG
610 LLT ResTy = getMRI()->getType(Res);
611 assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
612 "invalid operand type");
613 assert(ResTy == getMRI()->getType(Op0) && ResTy == getMRI()->getType(Op1) &&
614 "type mismatch");
615 if (ResTy.isScalar() || ResTy.isPointer())
616 assert(getMRI()->getType(Tst).isScalar() && "type mismatch");
617 else
618 assert((getMRI()->getType(Tst).isScalar() ||
619 (getMRI()->getType(Tst).isVector() &&
620 getMRI()->getType(Tst).getNumElements() ==
621 getMRI()->getType(Op0).getNumElements())) &&
622 "type mismatch");
623 #endif
624
625 return buildInstr(TargetOpcode::G_SELECT)
626 .addDef(Res)
627 .addUse(Tst)
628 .addUse(Op0)
629 .addUse(Op1);
630 }
631
632 MachineInstrBuilder
buildInsertVectorElement(unsigned Res,unsigned Val,unsigned Elt,unsigned Idx)633 MachineIRBuilderBase::buildInsertVectorElement(unsigned Res, unsigned Val,
634 unsigned Elt, unsigned Idx) {
635 #ifndef NDEBUG
636 LLT ResTy = getMRI()->getType(Res);
637 LLT ValTy = getMRI()->getType(Val);
638 LLT EltTy = getMRI()->getType(Elt);
639 LLT IdxTy = getMRI()->getType(Idx);
640 assert(ResTy.isVector() && ValTy.isVector() && "invalid operand type");
641 assert(IdxTy.isScalar() && "invalid operand type");
642 assert(ResTy.getNumElements() == ValTy.getNumElements() && "type mismatch");
643 assert(ResTy.getElementType() == EltTy && "type mismatch");
644 #endif
645
646 return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT)
647 .addDef(Res)
648 .addUse(Val)
649 .addUse(Elt)
650 .addUse(Idx);
651 }
652
653 MachineInstrBuilder
buildExtractVectorElement(unsigned Res,unsigned Val,unsigned Idx)654 MachineIRBuilderBase::buildExtractVectorElement(unsigned Res, unsigned Val,
655 unsigned Idx) {
656 #ifndef NDEBUG
657 LLT ResTy = getMRI()->getType(Res);
658 LLT ValTy = getMRI()->getType(Val);
659 LLT IdxTy = getMRI()->getType(Idx);
660 assert(ValTy.isVector() && "invalid operand type");
661 assert((ResTy.isScalar() || ResTy.isPointer()) && "invalid operand type");
662 assert(IdxTy.isScalar() && "invalid operand type");
663 assert(ValTy.getElementType() == ResTy && "type mismatch");
664 #endif
665
666 return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT)
667 .addDef(Res)
668 .addUse(Val)
669 .addUse(Idx);
670 }
671
buildAtomicCmpXchgWithSuccess(unsigned OldValRes,unsigned SuccessRes,unsigned Addr,unsigned CmpVal,unsigned NewVal,MachineMemOperand & MMO)672 MachineInstrBuilder MachineIRBuilderBase::buildAtomicCmpXchgWithSuccess(
673 unsigned OldValRes, unsigned SuccessRes, unsigned Addr, unsigned CmpVal,
674 unsigned NewVal, MachineMemOperand &MMO) {
675 #ifndef NDEBUG
676 LLT OldValResTy = getMRI()->getType(OldValRes);
677 LLT SuccessResTy = getMRI()->getType(SuccessRes);
678 LLT AddrTy = getMRI()->getType(Addr);
679 LLT CmpValTy = getMRI()->getType(CmpVal);
680 LLT NewValTy = getMRI()->getType(NewVal);
681 assert(OldValResTy.isScalar() && "invalid operand type");
682 assert(SuccessResTy.isScalar() && "invalid operand type");
683 assert(AddrTy.isPointer() && "invalid operand type");
684 assert(CmpValTy.isValid() && "invalid operand type");
685 assert(NewValTy.isValid() && "invalid operand type");
686 assert(OldValResTy == CmpValTy && "type mismatch");
687 assert(OldValResTy == NewValTy && "type mismatch");
688 #endif
689
690 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
691 .addDef(OldValRes)
692 .addDef(SuccessRes)
693 .addUse(Addr)
694 .addUse(CmpVal)
695 .addUse(NewVal)
696 .addMemOperand(&MMO);
697 }
698
699 MachineInstrBuilder
buildAtomicCmpXchg(unsigned OldValRes,unsigned Addr,unsigned CmpVal,unsigned NewVal,MachineMemOperand & MMO)700 MachineIRBuilderBase::buildAtomicCmpXchg(unsigned OldValRes, unsigned Addr,
701 unsigned CmpVal, unsigned NewVal,
702 MachineMemOperand &MMO) {
703 #ifndef NDEBUG
704 LLT OldValResTy = getMRI()->getType(OldValRes);
705 LLT AddrTy = getMRI()->getType(Addr);
706 LLT CmpValTy = getMRI()->getType(CmpVal);
707 LLT NewValTy = getMRI()->getType(NewVal);
708 assert(OldValResTy.isScalar() && "invalid operand type");
709 assert(AddrTy.isPointer() && "invalid operand type");
710 assert(CmpValTy.isValid() && "invalid operand type");
711 assert(NewValTy.isValid() && "invalid operand type");
712 assert(OldValResTy == CmpValTy && "type mismatch");
713 assert(OldValResTy == NewValTy && "type mismatch");
714 #endif
715
716 return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
717 .addDef(OldValRes)
718 .addUse(Addr)
719 .addUse(CmpVal)
720 .addUse(NewVal)
721 .addMemOperand(&MMO);
722 }
723
724 MachineInstrBuilder
buildAtomicRMW(unsigned Opcode,unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)725 MachineIRBuilderBase::buildAtomicRMW(unsigned Opcode, unsigned OldValRes,
726 unsigned Addr, unsigned Val,
727 MachineMemOperand &MMO) {
728 #ifndef NDEBUG
729 LLT OldValResTy = getMRI()->getType(OldValRes);
730 LLT AddrTy = getMRI()->getType(Addr);
731 LLT ValTy = getMRI()->getType(Val);
732 assert(OldValResTy.isScalar() && "invalid operand type");
733 assert(AddrTy.isPointer() && "invalid operand type");
734 assert(ValTy.isValid() && "invalid operand type");
735 assert(OldValResTy == ValTy && "type mismatch");
736 #endif
737
738 return buildInstr(Opcode)
739 .addDef(OldValRes)
740 .addUse(Addr)
741 .addUse(Val)
742 .addMemOperand(&MMO);
743 }
744
745 MachineInstrBuilder
buildAtomicRMWXchg(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)746 MachineIRBuilderBase::buildAtomicRMWXchg(unsigned OldValRes, unsigned Addr,
747 unsigned Val, MachineMemOperand &MMO) {
748 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
749 MMO);
750 }
751 MachineInstrBuilder
buildAtomicRMWAdd(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)752 MachineIRBuilderBase::buildAtomicRMWAdd(unsigned OldValRes, unsigned Addr,
753 unsigned Val, MachineMemOperand &MMO) {
754 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
755 MMO);
756 }
757 MachineInstrBuilder
buildAtomicRMWSub(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)758 MachineIRBuilderBase::buildAtomicRMWSub(unsigned OldValRes, unsigned Addr,
759 unsigned Val, MachineMemOperand &MMO) {
760 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
761 MMO);
762 }
763 MachineInstrBuilder
buildAtomicRMWAnd(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)764 MachineIRBuilderBase::buildAtomicRMWAnd(unsigned OldValRes, unsigned Addr,
765 unsigned Val, MachineMemOperand &MMO) {
766 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
767 MMO);
768 }
769 MachineInstrBuilder
buildAtomicRMWNand(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)770 MachineIRBuilderBase::buildAtomicRMWNand(unsigned OldValRes, unsigned Addr,
771 unsigned Val, MachineMemOperand &MMO) {
772 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
773 MMO);
774 }
775 MachineInstrBuilder
buildAtomicRMWOr(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)776 MachineIRBuilderBase::buildAtomicRMWOr(unsigned OldValRes, unsigned Addr,
777 unsigned Val, MachineMemOperand &MMO) {
778 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
779 MMO);
780 }
781 MachineInstrBuilder
buildAtomicRMWXor(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)782 MachineIRBuilderBase::buildAtomicRMWXor(unsigned OldValRes, unsigned Addr,
783 unsigned Val, MachineMemOperand &MMO) {
784 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
785 MMO);
786 }
787 MachineInstrBuilder
buildAtomicRMWMax(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)788 MachineIRBuilderBase::buildAtomicRMWMax(unsigned OldValRes, unsigned Addr,
789 unsigned Val, MachineMemOperand &MMO) {
790 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
791 MMO);
792 }
793 MachineInstrBuilder
buildAtomicRMWMin(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)794 MachineIRBuilderBase::buildAtomicRMWMin(unsigned OldValRes, unsigned Addr,
795 unsigned Val, MachineMemOperand &MMO) {
796 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
797 MMO);
798 }
799 MachineInstrBuilder
buildAtomicRMWUmax(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)800 MachineIRBuilderBase::buildAtomicRMWUmax(unsigned OldValRes, unsigned Addr,
801 unsigned Val, MachineMemOperand &MMO) {
802 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
803 MMO);
804 }
805 MachineInstrBuilder
buildAtomicRMWUmin(unsigned OldValRes,unsigned Addr,unsigned Val,MachineMemOperand & MMO)806 MachineIRBuilderBase::buildAtomicRMWUmin(unsigned OldValRes, unsigned Addr,
807 unsigned Val, MachineMemOperand &MMO) {
808 return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
809 MMO);
810 }
811
812 MachineInstrBuilder
buildBlockAddress(unsigned Res,const BlockAddress * BA)813 MachineIRBuilderBase::buildBlockAddress(unsigned Res, const BlockAddress *BA) {
814 #ifndef NDEBUG
815 assert(getMRI()->getType(Res).isPointer() && "invalid res type");
816 #endif
817
818 return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
819 }
820
validateTruncExt(unsigned Dst,unsigned Src,bool IsExtend)821 void MachineIRBuilderBase::validateTruncExt(unsigned Dst, unsigned Src,
822 bool IsExtend) {
823 #ifndef NDEBUG
824 LLT SrcTy = getMRI()->getType(Src);
825 LLT DstTy = getMRI()->getType(Dst);
826
827 if (DstTy.isVector()) {
828 assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
829 assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
830 "different number of elements in a trunc/ext");
831 } else
832 assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
833
834 if (IsExtend)
835 assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
836 "invalid narrowing extend");
837 else
838 assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
839 "invalid widening trunc");
840 #endif
841 }
842