xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/GlobalISel/MachineIRBuilder.cpp (revision e32fecd0c2c3ee37c47ee100f169e7eb0282a873)
1 //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the MachineIRBuidler class.
10 //===----------------------------------------------------------------------===//
11 #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12 #include "llvm/CodeGen/MachineFunction.h"
13 #include "llvm/CodeGen/MachineInstr.h"
14 #include "llvm/CodeGen/MachineInstrBuilder.h"
15 #include "llvm/CodeGen/MachineRegisterInfo.h"
16 #include "llvm/CodeGen/TargetInstrInfo.h"
17 #include "llvm/CodeGen/TargetLowering.h"
18 #include "llvm/CodeGen/TargetOpcodes.h"
19 #include "llvm/CodeGen/TargetSubtargetInfo.h"
20 #include "llvm/IR/DebugInfoMetadata.h"
21 
22 using namespace llvm;
23 
24 void MachineIRBuilder::setMF(MachineFunction &MF) {
25   State.MF = &MF;
26   State.MBB = nullptr;
27   State.MRI = &MF.getRegInfo();
28   State.TII = MF.getSubtarget().getInstrInfo();
29   State.DL = DebugLoc();
30   State.II = MachineBasicBlock::iterator();
31   State.Observer = nullptr;
32 }
33 
34 //------------------------------------------------------------------------------
35 // Build instruction variants.
36 //------------------------------------------------------------------------------
37 
38 MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
39   MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
40   return MIB;
41 }
42 
43 MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
44   getMBB().insert(getInsertPt(), MIB);
45   recordInsertion(MIB);
46   return MIB;
47 }
48 
49 MachineInstrBuilder
50 MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
51                                       const MDNode *Expr) {
52   assert(isa<DILocalVariable>(Variable) && "not a variable");
53   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
54   assert(
55       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
56       "Expected inlined-at fields to agree");
57   return insertInstr(BuildMI(getMF(), getDL(),
58                              getTII().get(TargetOpcode::DBG_VALUE),
59                              /*IsIndirect*/ false, Reg, Variable, Expr));
60 }
61 
62 MachineInstrBuilder
63 MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
64                                         const MDNode *Expr) {
65   assert(isa<DILocalVariable>(Variable) && "not a variable");
66   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
67   assert(
68       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
69       "Expected inlined-at fields to agree");
70   return insertInstr(BuildMI(getMF(), getDL(),
71                              getTII().get(TargetOpcode::DBG_VALUE),
72                              /*IsIndirect*/ true, Reg, Variable, Expr));
73 }
74 
75 MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
76                                                       const MDNode *Variable,
77                                                       const MDNode *Expr) {
78   assert(isa<DILocalVariable>(Variable) && "not a variable");
79   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
80   assert(
81       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
82       "Expected inlined-at fields to agree");
83   return buildInstr(TargetOpcode::DBG_VALUE)
84       .addFrameIndex(FI)
85       .addImm(0)
86       .addMetadata(Variable)
87       .addMetadata(Expr);
88 }
89 
90 MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
91                                                          const MDNode *Variable,
92                                                          const MDNode *Expr) {
93   assert(isa<DILocalVariable>(Variable) && "not a variable");
94   assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
95   assert(
96       cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
97       "Expected inlined-at fields to agree");
98   auto MIB = buildInstrNoInsert(TargetOpcode::DBG_VALUE);
99   if (auto *CI = dyn_cast<ConstantInt>(&C)) {
100     if (CI->getBitWidth() > 64)
101       MIB.addCImm(CI);
102     else
103       MIB.addImm(CI->getZExtValue());
104   } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
105     MIB.addFPImm(CFP);
106   } else {
107     // Insert $noreg if we didn't find a usable constant and had to drop it.
108     MIB.addReg(Register());
109   }
110 
111   MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
112   return insertInstr(MIB);
113 }
114 
115 MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
116   assert(isa<DILabel>(Label) && "not a label");
117   assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
118          "Expected inlined-at fields to agree");
119   auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
120 
121   return MIB.addMetadata(Label);
122 }
123 
124 MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
125                                                          const SrcOp &Size,
126                                                          Align Alignment) {
127   assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
128   auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
129   Res.addDefToMIB(*getMRI(), MIB);
130   Size.addSrcToMIB(MIB);
131   MIB.addImm(Alignment.value());
132   return MIB;
133 }
134 
135 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
136                                                       int Idx) {
137   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
138   auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
139   Res.addDefToMIB(*getMRI(), MIB);
140   MIB.addFrameIndex(Idx);
141   return MIB;
142 }
143 
144 MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
145                                                        const GlobalValue *GV) {
146   assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
147   assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
148              GV->getType()->getAddressSpace() &&
149          "address space mismatch");
150 
151   auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
152   Res.addDefToMIB(*getMRI(), MIB);
153   MIB.addGlobalAddress(GV);
154   return MIB;
155 }
156 
157 MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
158                                                      unsigned JTI) {
159   return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
160       .addJumpTableIndex(JTI);
161 }
162 
163 void MachineIRBuilder::validateUnaryOp(const LLT Res, const LLT Op0) {
164   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
165   assert((Res == Op0) && "type mismatch");
166 }
167 
168 void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
169                                         const LLT Op1) {
170   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
171   assert((Res == Op0 && Res == Op1) && "type mismatch");
172 }
173 
174 void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
175                                        const LLT Op1) {
176   assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
177   assert((Res == Op0) && "type mismatch");
178 }
179 
180 MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
181                                                   const SrcOp &Op0,
182                                                   const SrcOp &Op1) {
183   assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
184          Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
185   assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
186 
187   return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
188 }
189 
190 Optional<MachineInstrBuilder>
191 MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
192                                     const LLT ValueTy, uint64_t Value) {
193   assert(Res == 0 && "Res is a result argument");
194   assert(ValueTy.isScalar()  && "invalid offset type");
195 
196   if (Value == 0) {
197     Res = Op0;
198     return None;
199   }
200 
201   Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
202   auto Cst = buildConstant(ValueTy, Value);
203   return buildPtrAdd(Res, Op0, Cst.getReg(0));
204 }
205 
206 MachineInstrBuilder MachineIRBuilder::buildMaskLowPtrBits(const DstOp &Res,
207                                                           const SrcOp &Op0,
208                                                           uint32_t NumBits) {
209   LLT PtrTy = Res.getLLTTy(*getMRI());
210   LLT MaskTy = LLT::scalar(PtrTy.getSizeInBits());
211   Register MaskReg = getMRI()->createGenericVirtualRegister(MaskTy);
212   buildConstant(MaskReg, maskTrailingZeros<uint64_t>(NumBits));
213   return buildPtrMask(Res, Op0, MaskReg);
214 }
215 
216 MachineInstrBuilder
217 MachineIRBuilder::buildPadVectorWithUndefElements(const DstOp &Res,
218                                                   const SrcOp &Op0) {
219   LLT ResTy = Res.getLLTTy(*getMRI());
220   LLT Op0Ty = Op0.getLLTTy(*getMRI());
221 
222   assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
223   assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
224          "Different vector element types");
225   assert((ResTy.getNumElements() > Op0Ty.getNumElements()) &&
226          "Op0 has more elements");
227 
228   auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
229   SmallVector<Register, 8> Regs;
230   for (auto Op : Unmerge.getInstr()->defs())
231     Regs.push_back(Op.getReg());
232   Register Undef = buildUndef(Op0Ty.getElementType()).getReg(0);
233   unsigned NumberOfPadElts = ResTy.getNumElements() - Regs.size();
234   for (unsigned i = 0; i < NumberOfPadElts; ++i)
235     Regs.push_back(Undef);
236   return buildMerge(Res, Regs);
237 }
238 
239 MachineInstrBuilder
240 MachineIRBuilder::buildDeleteTrailingVectorElements(const DstOp &Res,
241                                                     const SrcOp &Op0) {
242   LLT ResTy = Res.getLLTTy(*getMRI());
243   LLT Op0Ty = Op0.getLLTTy(*getMRI());
244 
245   assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
246   assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
247          "Different vector element types");
248   assert((ResTy.getNumElements() < Op0Ty.getNumElements()) &&
249          "Op0 has fewer elements");
250 
251   SmallVector<Register, 8> Regs;
252   auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
253   for (unsigned i = 0; i < ResTy.getNumElements(); ++i)
254     Regs.push_back(Unmerge.getReg(i));
255   return buildMerge(Res, Regs);
256 }
257 
258 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
259   return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
260 }
261 
262 MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
263   assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
264   return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
265 }
266 
267 MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
268                                                 unsigned JTI,
269                                                 Register IndexReg) {
270   assert(getMRI()->getType(TablePtr).isPointer() &&
271          "Table reg must be a pointer");
272   return buildInstr(TargetOpcode::G_BRJT)
273       .addUse(TablePtr)
274       .addJumpTableIndex(JTI)
275       .addUse(IndexReg);
276 }
277 
278 MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
279                                                 const SrcOp &Op) {
280   return buildInstr(TargetOpcode::COPY, Res, Op);
281 }
282 
283 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
284                                                     const ConstantInt &Val) {
285   LLT Ty = Res.getLLTTy(*getMRI());
286   LLT EltTy = Ty.getScalarType();
287   assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
288          "creating constant with the wrong size");
289 
290   if (Ty.isVector()) {
291     auto Const = buildInstr(TargetOpcode::G_CONSTANT)
292     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
293     .addCImm(&Val);
294     return buildSplatVector(Res, Const);
295   }
296 
297   auto Const = buildInstr(TargetOpcode::G_CONSTANT);
298   Const->setDebugLoc(DebugLoc());
299   Res.addDefToMIB(*getMRI(), Const);
300   Const.addCImm(&Val);
301   return Const;
302 }
303 
304 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
305                                                     int64_t Val) {
306   auto IntN = IntegerType::get(getMF().getFunction().getContext(),
307                                Res.getLLTTy(*getMRI()).getScalarSizeInBits());
308   ConstantInt *CI = ConstantInt::get(IntN, Val, true);
309   return buildConstant(Res, *CI);
310 }
311 
312 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
313                                                      const ConstantFP &Val) {
314   LLT Ty = Res.getLLTTy(*getMRI());
315   LLT EltTy = Ty.getScalarType();
316 
317   assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
318          == EltTy.getSizeInBits() &&
319          "creating fconstant with the wrong size");
320 
321   assert(!Ty.isPointer() && "invalid operand type");
322 
323   if (Ty.isVector()) {
324     auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
325     .addDef(getMRI()->createGenericVirtualRegister(EltTy))
326     .addFPImm(&Val);
327 
328     return buildSplatVector(Res, Const);
329   }
330 
331   auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
332   Const->setDebugLoc(DebugLoc());
333   Res.addDefToMIB(*getMRI(), Const);
334   Const.addFPImm(&Val);
335   return Const;
336 }
337 
338 MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
339                                                     const APInt &Val) {
340   ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
341   return buildConstant(Res, *CI);
342 }
343 
344 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
345                                                      double Val) {
346   LLT DstTy = Res.getLLTTy(*getMRI());
347   auto &Ctx = getMF().getFunction().getContext();
348   auto *CFP =
349       ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
350   return buildFConstant(Res, *CFP);
351 }
352 
353 MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
354                                                      const APFloat &Val) {
355   auto &Ctx = getMF().getFunction().getContext();
356   auto *CFP = ConstantFP::get(Ctx, Val);
357   return buildFConstant(Res, *CFP);
358 }
359 
360 MachineInstrBuilder MachineIRBuilder::buildBrCond(const SrcOp &Tst,
361                                                   MachineBasicBlock &Dest) {
362   assert(Tst.getLLTTy(*getMRI()).isScalar() && "invalid operand type");
363 
364   auto MIB = buildInstr(TargetOpcode::G_BRCOND);
365   Tst.addSrcToMIB(MIB);
366   MIB.addMBB(&Dest);
367   return MIB;
368 }
369 
370 MachineInstrBuilder
371 MachineIRBuilder::buildLoad(const DstOp &Dst, const SrcOp &Addr,
372                             MachinePointerInfo PtrInfo, Align Alignment,
373                             MachineMemOperand::Flags MMOFlags,
374                             const AAMDNodes &AAInfo) {
375   MMOFlags |= MachineMemOperand::MOLoad;
376   assert((MMOFlags & MachineMemOperand::MOStore) == 0);
377 
378   LLT Ty = Dst.getLLTTy(*getMRI());
379   MachineMemOperand *MMO =
380       getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
381   return buildLoad(Dst, Addr, *MMO);
382 }
383 
384 MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
385                                                      const DstOp &Res,
386                                                      const SrcOp &Addr,
387                                                      MachineMemOperand &MMO) {
388   assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
389   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
390 
391   auto MIB = buildInstr(Opcode);
392   Res.addDefToMIB(*getMRI(), MIB);
393   Addr.addSrcToMIB(MIB);
394   MIB.addMemOperand(&MMO);
395   return MIB;
396 }
397 
398 MachineInstrBuilder MachineIRBuilder::buildLoadFromOffset(
399   const DstOp &Dst, const SrcOp &BasePtr,
400   MachineMemOperand &BaseMMO, int64_t Offset) {
401   LLT LoadTy = Dst.getLLTTy(*getMRI());
402   MachineMemOperand *OffsetMMO =
403       getMF().getMachineMemOperand(&BaseMMO, Offset, LoadTy);
404 
405   if (Offset == 0) // This may be a size or type changing load.
406     return buildLoad(Dst, BasePtr, *OffsetMMO);
407 
408   LLT PtrTy = BasePtr.getLLTTy(*getMRI());
409   LLT OffsetTy = LLT::scalar(PtrTy.getSizeInBits());
410   auto ConstOffset = buildConstant(OffsetTy, Offset);
411   auto Ptr = buildPtrAdd(PtrTy, BasePtr, ConstOffset);
412   return buildLoad(Dst, Ptr, *OffsetMMO);
413 }
414 
415 MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
416                                                  const SrcOp &Addr,
417                                                  MachineMemOperand &MMO) {
418   assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
419   assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
420 
421   auto MIB = buildInstr(TargetOpcode::G_STORE);
422   Val.addSrcToMIB(MIB);
423   Addr.addSrcToMIB(MIB);
424   MIB.addMemOperand(&MMO);
425   return MIB;
426 }
427 
428 MachineInstrBuilder
429 MachineIRBuilder::buildStore(const SrcOp &Val, const SrcOp &Addr,
430                              MachinePointerInfo PtrInfo, Align Alignment,
431                              MachineMemOperand::Flags MMOFlags,
432                              const AAMDNodes &AAInfo) {
433   MMOFlags |= MachineMemOperand::MOStore;
434   assert((MMOFlags & MachineMemOperand::MOLoad) == 0);
435 
436   LLT Ty = Val.getLLTTy(*getMRI());
437   MachineMemOperand *MMO =
438       getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
439   return buildStore(Val, Addr, *MMO);
440 }
441 
442 MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
443                                                   const SrcOp &Op) {
444   return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
445 }
446 
447 MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
448                                                 const SrcOp &Op) {
449   return buildInstr(TargetOpcode::G_SEXT, Res, Op);
450 }
451 
452 MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
453                                                 const SrcOp &Op) {
454   return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
455 }
456 
457 unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
458   const auto *TLI = getMF().getSubtarget().getTargetLowering();
459   switch (TLI->getBooleanContents(IsVec, IsFP)) {
460   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
461     return TargetOpcode::G_SEXT;
462   case TargetLoweringBase::ZeroOrOneBooleanContent:
463     return TargetOpcode::G_ZEXT;
464   default:
465     return TargetOpcode::G_ANYEXT;
466   }
467 }
468 
469 MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
470                                                    const SrcOp &Op,
471                                                    bool IsFP) {
472   unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
473   return buildInstr(ExtOp, Res, Op);
474 }
475 
476 MachineInstrBuilder MachineIRBuilder::buildBoolExtInReg(const DstOp &Res,
477                                                         const SrcOp &Op,
478                                                         bool IsVector,
479                                                         bool IsFP) {
480   const auto *TLI = getMF().getSubtarget().getTargetLowering();
481   switch (TLI->getBooleanContents(IsVector, IsFP)) {
482   case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
483     return buildSExtInReg(Res, Op, 1);
484   case TargetLoweringBase::ZeroOrOneBooleanContent:
485     return buildZExtInReg(Res, Op, 1);
486   case TargetLoweringBase::UndefinedBooleanContent:
487     return buildCopy(Res, Op);
488   }
489 
490   llvm_unreachable("unexpected BooleanContent");
491 }
492 
493 MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
494                                                       const DstOp &Res,
495                                                       const SrcOp &Op) {
496   assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
497           TargetOpcode::G_SEXT == ExtOpc) &&
498          "Expecting Extending Opc");
499   assert(Res.getLLTTy(*getMRI()).isScalar() ||
500          Res.getLLTTy(*getMRI()).isVector());
501   assert(Res.getLLTTy(*getMRI()).isScalar() ==
502          Op.getLLTTy(*getMRI()).isScalar());
503 
504   unsigned Opcode = TargetOpcode::COPY;
505   if (Res.getLLTTy(*getMRI()).getSizeInBits() >
506       Op.getLLTTy(*getMRI()).getSizeInBits())
507     Opcode = ExtOpc;
508   else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
509            Op.getLLTTy(*getMRI()).getSizeInBits())
510     Opcode = TargetOpcode::G_TRUNC;
511   else
512     assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
513 
514   return buildInstr(Opcode, Res, Op);
515 }
516 
517 MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
518                                                        const SrcOp &Op) {
519   return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
520 }
521 
522 MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
523                                                        const SrcOp &Op) {
524   return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
525 }
526 
527 MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
528                                                          const SrcOp &Op) {
529   return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
530 }
531 
532 MachineInstrBuilder MachineIRBuilder::buildZExtInReg(const DstOp &Res,
533                                                      const SrcOp &Op,
534                                                      int64_t ImmOp) {
535   LLT ResTy = Res.getLLTTy(*getMRI());
536   auto Mask = buildConstant(
537       ResTy, APInt::getLowBitsSet(ResTy.getScalarSizeInBits(), ImmOp));
538   return buildAnd(Res, Op, Mask);
539 }
540 
541 MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
542                                                 const SrcOp &Src) {
543   LLT SrcTy = Src.getLLTTy(*getMRI());
544   LLT DstTy = Dst.getLLTTy(*getMRI());
545   if (SrcTy == DstTy)
546     return buildCopy(Dst, Src);
547 
548   unsigned Opcode;
549   if (SrcTy.isPointer() && DstTy.isScalar())
550     Opcode = TargetOpcode::G_PTRTOINT;
551   else if (DstTy.isPointer() && SrcTy.isScalar())
552     Opcode = TargetOpcode::G_INTTOPTR;
553   else {
554     assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
555     Opcode = TargetOpcode::G_BITCAST;
556   }
557 
558   return buildInstr(Opcode, Dst, Src);
559 }
560 
561 MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
562                                                    const SrcOp &Src,
563                                                    uint64_t Index) {
564   LLT SrcTy = Src.getLLTTy(*getMRI());
565   LLT DstTy = Dst.getLLTTy(*getMRI());
566 
567 #ifndef NDEBUG
568   assert(SrcTy.isValid() && "invalid operand type");
569   assert(DstTy.isValid() && "invalid operand type");
570   assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
571          "extracting off end of register");
572 #endif
573 
574   if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
575     assert(Index == 0 && "insertion past the end of a register");
576     return buildCast(Dst, Src);
577   }
578 
579   auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
580   Dst.addDefToMIB(*getMRI(), Extract);
581   Src.addSrcToMIB(Extract);
582   Extract.addImm(Index);
583   return Extract;
584 }
585 
586 MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
587   return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
588 }
589 
590 MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
591                                                  ArrayRef<Register> Ops) {
592   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
593   // we need some temporary storage for the DstOp objects. Here we use a
594   // sufficiently large SmallVector to not go through the heap.
595   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
596   assert(TmpVec.size() > 1);
597   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
598 }
599 
600 MachineInstrBuilder
601 MachineIRBuilder::buildMerge(const DstOp &Res,
602                              std::initializer_list<SrcOp> Ops) {
603   assert(Ops.size() > 1);
604   return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
605 }
606 
607 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
608                                                    const SrcOp &Op) {
609   // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
610   // we need some temporary storage for the DstOp objects. Here we use a
611   // sufficiently large SmallVector to not go through the heap.
612   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
613   assert(TmpVec.size() > 1);
614   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
615 }
616 
617 MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
618                                                    const SrcOp &Op) {
619   unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
620   SmallVector<DstOp, 8> TmpVec(NumReg, Res);
621   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
622 }
623 
624 MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
625                                                    const SrcOp &Op) {
626   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
627   // we need some temporary storage for the DstOp objects. Here we use a
628   // sufficiently large SmallVector to not go through the heap.
629   SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
630   assert(TmpVec.size() > 1);
631   return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
632 }
633 
634 MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
635                                                        ArrayRef<Register> Ops) {
636   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
637   // we need some temporary storage for the DstOp objects. Here we use a
638   // sufficiently large SmallVector to not go through the heap.
639   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
640   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
641 }
642 
643 MachineInstrBuilder
644 MachineIRBuilder::buildBuildVectorConstant(const DstOp &Res,
645                                            ArrayRef<APInt> Ops) {
646   SmallVector<SrcOp> TmpVec;
647   TmpVec.reserve(Ops.size());
648   LLT EltTy = Res.getLLTTy(*getMRI()).getElementType();
649   for (const auto &Op : Ops)
650     TmpVec.push_back(buildConstant(EltTy, Op));
651   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
652 }
653 
654 MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
655                                                        const SrcOp &Src) {
656   SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
657   return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
658 }
659 
660 MachineInstrBuilder
661 MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
662                                         ArrayRef<Register> Ops) {
663   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
664   // we need some temporary storage for the DstOp objects. Here we use a
665   // sufficiently large SmallVector to not go through the heap.
666   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
667   return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
668 }
669 
670 MachineInstrBuilder MachineIRBuilder::buildShuffleSplat(const DstOp &Res,
671                                                         const SrcOp &Src) {
672   LLT DstTy = Res.getLLTTy(*getMRI());
673   assert(Src.getLLTTy(*getMRI()) == DstTy.getElementType() &&
674          "Expected Src to match Dst elt ty");
675   auto UndefVec = buildUndef(DstTy);
676   auto Zero = buildConstant(LLT::scalar(64), 0);
677   auto InsElt = buildInsertVectorElement(DstTy, UndefVec, Src, Zero);
678   SmallVector<int, 16> ZeroMask(DstTy.getNumElements());
679   return buildShuffleVector(DstTy, InsElt, UndefVec, ZeroMask);
680 }
681 
682 MachineInstrBuilder MachineIRBuilder::buildShuffleVector(const DstOp &Res,
683                                                          const SrcOp &Src1,
684                                                          const SrcOp &Src2,
685                                                          ArrayRef<int> Mask) {
686   LLT DstTy = Res.getLLTTy(*getMRI());
687   LLT Src1Ty = Src1.getLLTTy(*getMRI());
688   LLT Src2Ty = Src2.getLLTTy(*getMRI());
689   assert((size_t)(Src1Ty.getNumElements() + Src2Ty.getNumElements()) >=
690          Mask.size());
691   assert(DstTy.getElementType() == Src1Ty.getElementType() &&
692          DstTy.getElementType() == Src2Ty.getElementType());
693   (void)DstTy;
694   (void)Src1Ty;
695   (void)Src2Ty;
696   ArrayRef<int> MaskAlloc = getMF().allocateShuffleMask(Mask);
697   return buildInstr(TargetOpcode::G_SHUFFLE_VECTOR, {Res}, {Src1, Src2})
698       .addShuffleMask(MaskAlloc);
699 }
700 
701 MachineInstrBuilder
702 MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
703   // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
704   // we need some temporary storage for the DstOp objects. Here we use a
705   // sufficiently large SmallVector to not go through the heap.
706   SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
707   return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
708 }
709 
710 MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
711                                                   const SrcOp &Src,
712                                                   const SrcOp &Op,
713                                                   unsigned Index) {
714   assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
715              Res.getLLTTy(*getMRI()).getSizeInBits() &&
716          "insertion past the end of a register");
717 
718   if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
719       Op.getLLTTy(*getMRI()).getSizeInBits()) {
720     return buildCast(Res, Op);
721   }
722 
723   return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
724 }
725 
726 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
727                                                      ArrayRef<Register> ResultRegs,
728                                                      bool HasSideEffects) {
729   auto MIB =
730       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
731                                 : TargetOpcode::G_INTRINSIC);
732   for (unsigned ResultReg : ResultRegs)
733     MIB.addDef(ResultReg);
734   MIB.addIntrinsicID(ID);
735   return MIB;
736 }
737 
738 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
739                                                      ArrayRef<DstOp> Results,
740                                                      bool HasSideEffects) {
741   auto MIB =
742       buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
743                                 : TargetOpcode::G_INTRINSIC);
744   for (DstOp Result : Results)
745     Result.addDefToMIB(*getMRI(), MIB);
746   MIB.addIntrinsicID(ID);
747   return MIB;
748 }
749 
750 MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
751                                                  const SrcOp &Op) {
752   return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
753 }
754 
755 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
756                                                    const SrcOp &Op,
757                                                    Optional<unsigned> Flags) {
758   return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
759 }
760 
761 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
762                                                 const DstOp &Res,
763                                                 const SrcOp &Op0,
764                                                 const SrcOp &Op1) {
765   return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
766 }
767 
768 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
769                                                 const DstOp &Res,
770                                                 const SrcOp &Op0,
771                                                 const SrcOp &Op1,
772                                                 Optional<unsigned> Flags) {
773 
774   return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
775 }
776 
777 MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
778                                                   const SrcOp &Tst,
779                                                   const SrcOp &Op0,
780                                                   const SrcOp &Op1,
781                                                   Optional<unsigned> Flags) {
782 
783   return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
784 }
785 
786 MachineInstrBuilder
787 MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
788                                            const SrcOp &Elt, const SrcOp &Idx) {
789   return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
790 }
791 
792 MachineInstrBuilder
793 MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
794                                             const SrcOp &Idx) {
795   return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
796 }
797 
798 MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
799     Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
800     Register NewVal, MachineMemOperand &MMO) {
801 #ifndef NDEBUG
802   LLT OldValResTy = getMRI()->getType(OldValRes);
803   LLT SuccessResTy = getMRI()->getType(SuccessRes);
804   LLT AddrTy = getMRI()->getType(Addr);
805   LLT CmpValTy = getMRI()->getType(CmpVal);
806   LLT NewValTy = getMRI()->getType(NewVal);
807   assert(OldValResTy.isScalar() && "invalid operand type");
808   assert(SuccessResTy.isScalar() && "invalid operand type");
809   assert(AddrTy.isPointer() && "invalid operand type");
810   assert(CmpValTy.isValid() && "invalid operand type");
811   assert(NewValTy.isValid() && "invalid operand type");
812   assert(OldValResTy == CmpValTy && "type mismatch");
813   assert(OldValResTy == NewValTy && "type mismatch");
814 #endif
815 
816   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
817       .addDef(OldValRes)
818       .addDef(SuccessRes)
819       .addUse(Addr)
820       .addUse(CmpVal)
821       .addUse(NewVal)
822       .addMemOperand(&MMO);
823 }
824 
825 MachineInstrBuilder
826 MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
827                                      Register CmpVal, Register NewVal,
828                                      MachineMemOperand &MMO) {
829 #ifndef NDEBUG
830   LLT OldValResTy = getMRI()->getType(OldValRes);
831   LLT AddrTy = getMRI()->getType(Addr);
832   LLT CmpValTy = getMRI()->getType(CmpVal);
833   LLT NewValTy = getMRI()->getType(NewVal);
834   assert(OldValResTy.isScalar() && "invalid operand type");
835   assert(AddrTy.isPointer() && "invalid operand type");
836   assert(CmpValTy.isValid() && "invalid operand type");
837   assert(NewValTy.isValid() && "invalid operand type");
838   assert(OldValResTy == CmpValTy && "type mismatch");
839   assert(OldValResTy == NewValTy && "type mismatch");
840 #endif
841 
842   return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
843       .addDef(OldValRes)
844       .addUse(Addr)
845       .addUse(CmpVal)
846       .addUse(NewVal)
847       .addMemOperand(&MMO);
848 }
849 
850 MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
851   unsigned Opcode, const DstOp &OldValRes,
852   const SrcOp &Addr, const SrcOp &Val,
853   MachineMemOperand &MMO) {
854 
855 #ifndef NDEBUG
856   LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
857   LLT AddrTy = Addr.getLLTTy(*getMRI());
858   LLT ValTy = Val.getLLTTy(*getMRI());
859   assert(OldValResTy.isScalar() && "invalid operand type");
860   assert(AddrTy.isPointer() && "invalid operand type");
861   assert(ValTy.isValid() && "invalid operand type");
862   assert(OldValResTy == ValTy && "type mismatch");
863   assert(MMO.isAtomic() && "not atomic mem operand");
864 #endif
865 
866   auto MIB = buildInstr(Opcode);
867   OldValRes.addDefToMIB(*getMRI(), MIB);
868   Addr.addSrcToMIB(MIB);
869   Val.addSrcToMIB(MIB);
870   MIB.addMemOperand(&MMO);
871   return MIB;
872 }
873 
874 MachineInstrBuilder
875 MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
876                                      Register Val, MachineMemOperand &MMO) {
877   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
878                         MMO);
879 }
880 MachineInstrBuilder
881 MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
882                                     Register Val, MachineMemOperand &MMO) {
883   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
884                         MMO);
885 }
886 MachineInstrBuilder
887 MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
888                                     Register Val, MachineMemOperand &MMO) {
889   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
890                         MMO);
891 }
892 MachineInstrBuilder
893 MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
894                                     Register Val, MachineMemOperand &MMO) {
895   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
896                         MMO);
897 }
898 MachineInstrBuilder
899 MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
900                                      Register Val, MachineMemOperand &MMO) {
901   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
902                         MMO);
903 }
904 MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
905                                                        Register Addr,
906                                                        Register Val,
907                                                        MachineMemOperand &MMO) {
908   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
909                         MMO);
910 }
911 MachineInstrBuilder
912 MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
913                                     Register Val, MachineMemOperand &MMO) {
914   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
915                         MMO);
916 }
917 MachineInstrBuilder
918 MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
919                                     Register Val, MachineMemOperand &MMO) {
920   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
921                         MMO);
922 }
923 MachineInstrBuilder
924 MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
925                                     Register Val, MachineMemOperand &MMO) {
926   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
927                         MMO);
928 }
929 MachineInstrBuilder
930 MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
931                                      Register Val, MachineMemOperand &MMO) {
932   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
933                         MMO);
934 }
935 MachineInstrBuilder
936 MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
937                                      Register Val, MachineMemOperand &MMO) {
938   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
939                         MMO);
940 }
941 
942 MachineInstrBuilder
943 MachineIRBuilder::buildAtomicRMWFAdd(
944   const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
945   MachineMemOperand &MMO) {
946   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
947                         MMO);
948 }
949 
950 MachineInstrBuilder
951 MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
952                                      MachineMemOperand &MMO) {
953   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
954                         MMO);
955 }
956 
957 MachineInstrBuilder
958 MachineIRBuilder::buildAtomicRMWFMax(const DstOp &OldValRes, const SrcOp &Addr,
959                                      const SrcOp &Val, MachineMemOperand &MMO) {
960   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMAX, OldValRes, Addr, Val,
961                         MMO);
962 }
963 
964 MachineInstrBuilder
965 MachineIRBuilder::buildAtomicRMWFMin(const DstOp &OldValRes, const SrcOp &Addr,
966                                      const SrcOp &Val, MachineMemOperand &MMO) {
967   return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMIN, OldValRes, Addr, Val,
968                         MMO);
969 }
970 
971 MachineInstrBuilder
972 MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
973   return buildInstr(TargetOpcode::G_FENCE)
974     .addImm(Ordering)
975     .addImm(Scope);
976 }
977 
978 MachineInstrBuilder
979 MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
980 #ifndef NDEBUG
981   assert(getMRI()->getType(Res).isPointer() && "invalid res type");
982 #endif
983 
984   return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
985 }
986 
987 void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
988                                         bool IsExtend) {
989 #ifndef NDEBUG
990   if (DstTy.isVector()) {
991     assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
992     assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
993            "different number of elements in a trunc/ext");
994   } else
995     assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
996 
997   if (IsExtend)
998     assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
999            "invalid narrowing extend");
1000   else
1001     assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
1002            "invalid widening trunc");
1003 #endif
1004 }
1005 
1006 void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
1007                                         const LLT Op0Ty, const LLT Op1Ty) {
1008 #ifndef NDEBUG
1009   assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
1010          "invalid operand type");
1011   assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
1012   if (ResTy.isScalar() || ResTy.isPointer())
1013     assert(TstTy.isScalar() && "type mismatch");
1014   else
1015     assert((TstTy.isScalar() ||
1016             (TstTy.isVector() &&
1017              TstTy.getNumElements() == Op0Ty.getNumElements())) &&
1018            "type mismatch");
1019 #endif
1020 }
1021 
1022 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
1023                                                  ArrayRef<DstOp> DstOps,
1024                                                  ArrayRef<SrcOp> SrcOps,
1025                                                  Optional<unsigned> Flags) {
1026   switch (Opc) {
1027   default:
1028     break;
1029   case TargetOpcode::G_SELECT: {
1030     assert(DstOps.size() == 1 && "Invalid select");
1031     assert(SrcOps.size() == 3 && "Invalid select");
1032     validateSelectOp(
1033         DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
1034         SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
1035     break;
1036   }
1037   case TargetOpcode::G_FNEG:
1038   case TargetOpcode::G_ABS:
1039     // All these are unary ops.
1040     assert(DstOps.size() == 1 && "Invalid Dst");
1041     assert(SrcOps.size() == 1 && "Invalid Srcs");
1042     validateUnaryOp(DstOps[0].getLLTTy(*getMRI()),
1043                     SrcOps[0].getLLTTy(*getMRI()));
1044     break;
1045   case TargetOpcode::G_ADD:
1046   case TargetOpcode::G_AND:
1047   case TargetOpcode::G_MUL:
1048   case TargetOpcode::G_OR:
1049   case TargetOpcode::G_SUB:
1050   case TargetOpcode::G_XOR:
1051   case TargetOpcode::G_UDIV:
1052   case TargetOpcode::G_SDIV:
1053   case TargetOpcode::G_UREM:
1054   case TargetOpcode::G_SREM:
1055   case TargetOpcode::G_SMIN:
1056   case TargetOpcode::G_SMAX:
1057   case TargetOpcode::G_UMIN:
1058   case TargetOpcode::G_UMAX:
1059   case TargetOpcode::G_UADDSAT:
1060   case TargetOpcode::G_SADDSAT:
1061   case TargetOpcode::G_USUBSAT:
1062   case TargetOpcode::G_SSUBSAT: {
1063     // All these are binary ops.
1064     assert(DstOps.size() == 1 && "Invalid Dst");
1065     assert(SrcOps.size() == 2 && "Invalid Srcs");
1066     validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
1067                      SrcOps[0].getLLTTy(*getMRI()),
1068                      SrcOps[1].getLLTTy(*getMRI()));
1069     break;
1070   }
1071   case TargetOpcode::G_SHL:
1072   case TargetOpcode::G_ASHR:
1073   case TargetOpcode::G_LSHR:
1074   case TargetOpcode::G_USHLSAT:
1075   case TargetOpcode::G_SSHLSAT: {
1076     assert(DstOps.size() == 1 && "Invalid Dst");
1077     assert(SrcOps.size() == 2 && "Invalid Srcs");
1078     validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
1079                     SrcOps[0].getLLTTy(*getMRI()),
1080                     SrcOps[1].getLLTTy(*getMRI()));
1081     break;
1082   }
1083   case TargetOpcode::G_SEXT:
1084   case TargetOpcode::G_ZEXT:
1085   case TargetOpcode::G_ANYEXT:
1086     assert(DstOps.size() == 1 && "Invalid Dst");
1087     assert(SrcOps.size() == 1 && "Invalid Srcs");
1088     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1089                      SrcOps[0].getLLTTy(*getMRI()), true);
1090     break;
1091   case TargetOpcode::G_TRUNC:
1092   case TargetOpcode::G_FPTRUNC: {
1093     assert(DstOps.size() == 1 && "Invalid Dst");
1094     assert(SrcOps.size() == 1 && "Invalid Srcs");
1095     validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1096                      SrcOps[0].getLLTTy(*getMRI()), false);
1097     break;
1098   }
1099   case TargetOpcode::G_BITCAST: {
1100     assert(DstOps.size() == 1 && "Invalid Dst");
1101     assert(SrcOps.size() == 1 && "Invalid Srcs");
1102     assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1103            SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
1104     break;
1105   }
1106   case TargetOpcode::COPY:
1107     assert(DstOps.size() == 1 && "Invalid Dst");
1108     // If the caller wants to add a subreg source it has to be done separately
1109     // so we may not have any SrcOps at this point yet.
1110     break;
1111   case TargetOpcode::G_FCMP:
1112   case TargetOpcode::G_ICMP: {
1113     assert(DstOps.size() == 1 && "Invalid Dst Operands");
1114     assert(SrcOps.size() == 3 && "Invalid Src Operands");
1115     // For F/ICMP, the first src operand is the predicate, followed by
1116     // the two comparands.
1117     assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1118            "Expecting predicate");
1119     assert([&]() -> bool {
1120       CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1121       return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1122                                          : CmpInst::isFPPredicate(Pred);
1123     }() && "Invalid predicate");
1124     assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1125            "Type mismatch");
1126     assert([&]() -> bool {
1127       LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1128       LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1129       if (Op0Ty.isScalar() || Op0Ty.isPointer())
1130         return DstTy.isScalar();
1131       else
1132         return DstTy.isVector() &&
1133                DstTy.getNumElements() == Op0Ty.getNumElements();
1134     }() && "Type Mismatch");
1135     break;
1136   }
1137   case TargetOpcode::G_UNMERGE_VALUES: {
1138     assert(!DstOps.empty() && "Invalid trivial sequence");
1139     assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1140     assert(llvm::all_of(DstOps,
1141                         [&, this](const DstOp &Op) {
1142                           return Op.getLLTTy(*getMRI()) ==
1143                                  DstOps[0].getLLTTy(*getMRI());
1144                         }) &&
1145            "type mismatch in output list");
1146     assert((TypeSize::ScalarTy)DstOps.size() *
1147                    DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1148                SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1149            "input operands do not cover output register");
1150     break;
1151   }
1152   case TargetOpcode::G_MERGE_VALUES: {
1153     assert(!SrcOps.empty() && "invalid trivial sequence");
1154     assert(DstOps.size() == 1 && "Invalid Dst");
1155     assert(llvm::all_of(SrcOps,
1156                         [&, this](const SrcOp &Op) {
1157                           return Op.getLLTTy(*getMRI()) ==
1158                                  SrcOps[0].getLLTTy(*getMRI());
1159                         }) &&
1160            "type mismatch in input list");
1161     assert((TypeSize::ScalarTy)SrcOps.size() *
1162                    SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1163                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1164            "input operands do not cover output register");
1165     if (SrcOps.size() == 1)
1166       return buildCast(DstOps[0], SrcOps[0]);
1167     if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1168       if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1169         return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1170       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1171     }
1172     break;
1173   }
1174   case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1175     assert(DstOps.size() == 1 && "Invalid Dst size");
1176     assert(SrcOps.size() == 2 && "Invalid Src size");
1177     assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1178     assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1179             DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1180            "Invalid operand type");
1181     assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1182     assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1183                DstOps[0].getLLTTy(*getMRI()) &&
1184            "Type mismatch");
1185     break;
1186   }
1187   case TargetOpcode::G_INSERT_VECTOR_ELT: {
1188     assert(DstOps.size() == 1 && "Invalid dst size");
1189     assert(SrcOps.size() == 3 && "Invalid src size");
1190     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1191            SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1192     assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1193                SrcOps[1].getLLTTy(*getMRI()) &&
1194            "Type mismatch");
1195     assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1196     assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1197                SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1198            "Type mismatch");
1199     break;
1200   }
1201   case TargetOpcode::G_BUILD_VECTOR: {
1202     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1203            "Must have at least 2 operands");
1204     assert(DstOps.size() == 1 && "Invalid DstOps");
1205     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1206            "Res type must be a vector");
1207     assert(llvm::all_of(SrcOps,
1208                         [&, this](const SrcOp &Op) {
1209                           return Op.getLLTTy(*getMRI()) ==
1210                                  SrcOps[0].getLLTTy(*getMRI());
1211                         }) &&
1212            "type mismatch in input list");
1213     assert((TypeSize::ScalarTy)SrcOps.size() *
1214                    SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1215                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1216            "input scalars do not exactly cover the output vector register");
1217     break;
1218   }
1219   case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1220     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1221            "Must have at least 2 operands");
1222     assert(DstOps.size() == 1 && "Invalid DstOps");
1223     assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1224            "Res type must be a vector");
1225     assert(llvm::all_of(SrcOps,
1226                         [&, this](const SrcOp &Op) {
1227                           return Op.getLLTTy(*getMRI()) ==
1228                                  SrcOps[0].getLLTTy(*getMRI());
1229                         }) &&
1230            "type mismatch in input list");
1231     if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1232         DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1233       return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1234     break;
1235   }
1236   case TargetOpcode::G_CONCAT_VECTORS: {
1237     assert(DstOps.size() == 1 && "Invalid DstOps");
1238     assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1239            "Must have at least 2 operands");
1240     assert(llvm::all_of(SrcOps,
1241                         [&, this](const SrcOp &Op) {
1242                           return (Op.getLLTTy(*getMRI()).isVector() &&
1243                                   Op.getLLTTy(*getMRI()) ==
1244                                       SrcOps[0].getLLTTy(*getMRI()));
1245                         }) &&
1246            "type mismatch in input list");
1247     assert((TypeSize::ScalarTy)SrcOps.size() *
1248                    SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1249                DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1250            "input vectors do not exactly cover the output vector register");
1251     break;
1252   }
1253   case TargetOpcode::G_UADDE: {
1254     assert(DstOps.size() == 2 && "Invalid no of dst operands");
1255     assert(SrcOps.size() == 3 && "Invalid no of src operands");
1256     assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1257     assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1258            (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1259            "Invalid operand");
1260     assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1261     assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1262            "type mismatch");
1263     break;
1264   }
1265   }
1266 
1267   auto MIB = buildInstr(Opc);
1268   for (const DstOp &Op : DstOps)
1269     Op.addDefToMIB(*getMRI(), MIB);
1270   for (const SrcOp &Op : SrcOps)
1271     Op.addSrcToMIB(MIB);
1272   if (Flags)
1273     MIB->setFlags(*Flags);
1274   return MIB;
1275 }
1276