| /freebsd/contrib/llvm-project/lldb/source/Plugins/Instruction/RISCV/ |
| H A D | RISCVCInstructions.h | 166 return ADDI{rd, Rs{0}, uint32_t(imm)}; in DecodeC_LI() 167 return ADDI{rd, Rs{0}, uint32_t(int32_t(int8_t(imm | 0xc0)))}; in DecodeC_LI() 183 return ADDI{Rd{gpr_sp_riscv}, Rs{gpr_sp_riscv}, uint32_t(nzimm)}; in DecodeC_LUI_ADDI16SP() 184 return ADDI{Rd{gpr_sp_riscv}, Rs{gpr_sp_riscv}, in DecodeC_LUI_ADDI16SP() 200 return ADDI{rd, rd, uint32_t(imm)}; in DecodeC_ADDI() 201 return ADDI{rd, rd, uint32_t(int32_t(int8_t(imm | 0xc0)))}; in DecodeC_ADDI() 225 return ADDI{rd, Rs{gpr_sp_riscv}, uint32_t(nzuimm)}; in DecodeC_ADDI4SPN()
|
| H A D | RISCVInstructions.h | 121 I_TYPE_INST(ADDI); 276 LUI, AUIPC, JAL, JALR, B, LB, LH, LW, LBU, LHU, SB, SH, SW, ADDI, SLTI,
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/MCTargetDesc/ |
| H A D | RISCVMatInt.cpp | 29 case RISCV::ADDI: in getInstSeqCost() 75 unsigned AddiOpc = (IsRV64 && Hi20) ? RISCV::ADDIW : RISCV::ADDI; in generateInstSeqImpl() 155 Res.emplace_back(RISCV::ADDI, Lo12); in generateInstSeqImpl() 274 TmpSeq.emplace_back(RISCV::ADDI, Imm12); in generateInstSeq() 405 TmpSeq.emplace_back(RISCV::ADDI, Lo12); in generateInstSeq() 420 TmpSeq.emplace_back(RISCV::ADDI, NegImm12); in generateInstSeq() 534 case RISCV::ADDI: in getOpndKind()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/ |
| H A D | RISCVMergeBaseOffset.cpp | 111 if (Lo->getOpcode() != RISCV::ADDI) in INITIALIZE_PASS() 195 if (OffsetTail.getOpcode() == RISCV::ADDI || in foldLargeOffset() 276 if (OffsetTail.getOpcode() != RISCV::ADDI) in foldShiftedOffset() 319 case RISCV::ADDI: { in detectAndFoldOffset() 327 if (TailTail.getOpcode() == RISCV::ADDI) { in detectAndFoldOffset()
|
| H A D | RISCVOptWInstrs.cpp | 297 case RISCV::ADDI: in hasAllNBitUsers() 367 case RISCV::ADDI: in isSignExtendingOpW() 591 case RISCV::ADDI: in isSignExtendedW() 612 case RISCV::ADDI: in getWOp() 694 case RISCV::ADDIW: Opc = RISCV::ADDI; break; in stripWSuffixes() 724 case RISCV::ADDI: in appendWSuffixes()
|
| H A D | RISCVRegisterInfo.cpp | 235 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) in adjustReg() 253 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) in adjustReg() 257 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg) in adjustReg() 493 if (Opc == RISCV::ADDI && !isInt<12>(Val)) { in eliminateFrameIndex() 524 if (MI.getOpcode() == RISCV::ADDI) in eliminateFrameIndex() 540 if (MI.getOpcode() == RISCV::ADDI && in eliminateFrameIndex() 677 BuildMI(*MBB, MBBI, DL, TII->get(RISCV::ADDI), BaseReg) in materializeFrameBaseRegister() 864 case RISCV::ADDI: in getRegAllocationHints()
|
| H A D | RISCVAsmPrinter.cpp | 560 EmitToStreamer(*OutStreamer, MCInstBuilder(RISCV::ADDI) in LowerKCFI_CHECK() 595 : RISCV::ADDI) in LowerKCFI_CHECK() 696 OutStreamer->emitInstruction(MCInstBuilder(RISCV::ADDI) in EmitHwasanMemaccessSymbols() 714 OutStreamer->emitInstruction(MCInstBuilder(RISCV::ADDI) in EmitHwasanMemaccessSymbols() 776 OutStreamer->emitInstruction(MCInstBuilder(RISCV::ADDI) in EmitHwasanMemaccessSymbols() 806 OutStreamer->emitInstruction(MCInstBuilder(RISCV::ADDI) in EmitHwasanMemaccessSymbols() 812 MCInstBuilder(RISCV::ADDI) in EmitHwasanMemaccessSymbols()
|
| H A D | RISCVExpandPseudoInsts.cpp | 201 BuildMI(TrueBB, DL, TII->get(RISCV::ADDI), DestReg) in expandCCOp() 217 case RISCV::PseudoCCADDI: NewOpc = RISCV::ADDI; break; in expandCCOp() 551 RISCV::ADDI); in expandLoadLocalAddress() 574 RISCV::ADDI); in expandLoadTLSGDAddress() 605 BuildMI(MBB, MBBI, DL, TII->get(RISCV::ADDI), RISCV::X10) in expandLoadTLSDescAddress()
|
| H A D | RISCVMacroFusion.cpp | |
| H A D | RISCVInstrInfo.td | 429 // Check if (add r, imm) can be optimized to (ADDI (ADDI r, imm0), imm1), 647 // ADDI isn't always rematerializable, but isReMaterializable will be used as 650 def ADDI : ALU_ri<0b000, "addi">; 859 def : InstAlias<"nop", (ADDI X0, X0, 0)>; 886 def : InstAlias<"li $rd, $imm", (ADDI GPR:$rd, X0, simm12:$imm)>; 887 def : InstAlias<"mv $rd, $rs", (ADDI GPR:$rd, GPR:$rs, 0)>; 1022 (ADDI GPR:$rd, GPR:$rs1, simm12:$imm12)>; 1258 def : PatGprSimm12<add, ADDI>; 1270 // Select 'or' as ADDI if the immediate bits are known to be 0 in $rs1. This 1279 def : PatGprSimm12<or_is_add, ADDI>; [all …]
|
| H A D | RISCVPostRAExpandPseudoInsts.cpp | 114 BuildMI(MBB, MBBI, DL, TII->get(RISCV::ADDI)) in expandMovAddr()
|
| H A D | RISCVInstrInfoC.td | 860 def : CompressPat<(ADDI GPRC:$rd, SP:$rs1, uimm10_lsb00nonzero:$imm), 906 def : CompressPat<(ADDI X0, X0, 0), (C_NOP)>; 907 def : CompressPat<(ADDI GPRNoX0:$rs1, GPRNoX0:$rs1, simm6nonzero:$imm), 922 def : CompressPat<(ADDI GPRNoX0:$rd, X0, simm6:$imm), 924 def : CompressPat<(ADDI X2, X2, simm10_lsb0000nonzero:$imm), 1016 def : CompressPat<(ADDI GPRNoX0:$rs1, GPRNoX0:$rs2, 0),
|
| H A D | RISCVInstrInfo.cpp | 84 return MCInstBuilder(RISCV::ADDI) in getNop() 447 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), DstReg) in copyPhysReg() 454 // Emit an ADDI for both parts of GPRPair. in copyPhysReg() 455 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), in copyPhysReg() 460 BuildMI(MBB, MBBI, DL, get(RISCV::ADDI), in copyPhysReg() 1177 // Right now we only care about LI (i.e. ADDI x0, imm) in optimizeCondBranch() 1179 if (MI->getOpcode() == RISCV::ADDI && MI->getOperand(1).isReg() && in optimizeCondBranch() 1305 case RISCV::ADDI: return RISCV::PseudoCCADDI; break; in getPredicatedOpcode() 1348 if (MI->getOpcode() == RISCV::ADDI && MI->getOperand(1).isReg() && in canFoldAsPredicatedOp() 1539 case RISCV::ADDI in isAsCheapAsAMove() [all...] |
| H A D | RISCVFrameLowering.cpp | 88 BuildMI(MBB, MI, DL, TII->get(RISCV::ADDI)) in emitSCSPrologue() 153 BuildMI(MBB, MI, DL, TII->get(RISCV::ADDI)) in emitSCSEpilogue() 749 BuildMI(MBB, MBBI, DL, TII->get(RISCV::ADDI), BPReg) in emitPrologue() 1150 } else if (MI.getOpcode() == RISCV::ADDI && IsScalableVectorID) { in getScavSlotsNumForRVV()
|
| H A D | RISCVMakeCompressible.cpp | 410 BuildMI(MBB, MI, MI.getDebugLoc(), TII.get(RISCV::ADDI), NewReg) in runOnMachineFunction()
|
| H A D | RISCVExpandAtomicPseudoInsts.cpp | 352 BuildMI(LoopMBB, DL, TII->get(RISCV::ADDI), ScratchReg) in doMaskedAtomicBinOpExpansion() 492 BuildMI(LoopHeadMBB, DL, TII->get(RISCV::ADDI), Scratch1Reg) in expandAtomicMinMaxOp()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/PowerPC/ |
| H A D | PPCMacroFusion.def | 35 FUSION_OP_SET(ADDI, ADDI8, ADDItocL, ADDItocL8), \ 138 FUSION_OP_SET(ADDI, ADDI8, ADDItocL8, ADDItocL)) 142 FUSION_OP_SET(ADDI, ADDI8, ADDItocL8, ADDItocL),
|
| H A D | PPCBack2BackFusion.def | 21 ADDI, 510 ADDI,
|
| H A D | PPCMachineScheduler.cpp | 25 return Cand.SU->getInstr()->getOpcode() == PPC::ADDI || in isADDIInstr()
|
| H A D | PPCCTRLoops.cpp | 253 unsigned ADDIOpcode = Is64Bit ? PPC::ADDI8 : PPC::ADDI; in expandNormalLoops()
|
| H A D | PPCRegisterInfo.cpp | 110 ImmToIdxMap[PPC::ADDI] = PPC::ADD4; in PPCRegisterInfo() 779 BuildMI(MBB, II, dl, TII.get(PPC::ADDI), MI.getOperand(0).getReg()) in lowerDynamicAlloc() 832 BuildMI(MBB, II, dl, TII.get(PPC::ADDI), FramePointer) in prepareDynamicAlloca() 1872 if ((OpC == PPC::ADDI || OpC == PPC::ADDI8) && in needsFrameBaseReg() 1902 unsigned ADDriOpc = TM.isPPC64() ? PPC::ADDI8 : PPC::ADDI; in materializeFrameBaseRegister()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/LoongArch/AsmParser/ |
| H A D | LoongArchAsmParser.cpp | 942 unsigned ADDI = is64Bit() ? LoongArch::ADDI_D : LoongArch::ADDI_W; in emitLoadAddressPcrel() local 947 LoongArchAsmParser::Inst(ADDI, LoongArchMCExpr::VK_LoongArch_PCALA_LO12)); in emitLoadAddressPcrel() 1098 unsigned ADDI = is64Bit() ? LoongArch::ADDI_D : LoongArch::ADDI_W; in emitLoadAddressTLSLD() local 1103 ADDI, LoongArchMCExpr::VK_LoongArch_GOT_PC_LO12)); in emitLoadAddressTLSLD() 1144 unsigned ADDI = is64Bit() ? LoongArch::ADDI_D : LoongArch::ADDI_W; in emitLoadAddressTLSGD() local 1149 ADDI, LoongArchMCExpr::VK_LoongArch_GOT_PC_LO12)); in emitLoadAddressTLSGD() 1234 unsigned ADDI = is64Bit() ? LoongArch::ADDI_D : LoongArch::ADDI_W; in emitLoadAddressTLSDescPcrel() local 1241 ADDI, LoongArchMCExpr::VK_LoongArch_TLS_DESC_PC_LO12)); in emitLoadAddressTLSDescPcrel()
|
| /freebsd/contrib/llvm-project/lld/ELF/Arch/ |
| H A D | RISCV.cpp | 62 ADDI = 0x13, enumerator 238 write32le(buf + 12, itype(ADDI, X_T1, X_T1, -target->pltHeaderSize - 12)); in writePltHeader() 239 write32le(buf + 16, itype(ADDI, X_T0, X_T2, lo12(offset))); in writePltHeader() 255 write32le(buf + 12, itype(ADDI, 0, 0, 0)); in writePlt() 584 write32le(loc, itype(ADDI, X_A0, 0, val)); // addi a0,zero,<lo12> in tlsdescToLe() 586 write32le(loc, itype(ADDI, X_A0, X_A0, lo12(val))); // addi a0,a0,<lo12> in tlsdescToLe()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/LoongArch/ |
| H A D | LoongArchInstrInfo.cpp | 448 MachineInstr &ADDI = in insertIndirectBranch() local 476 ADDI.getOperand(2).setMBB(&RestoreBB); in insertIndirectBranch()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/Xtensa/ |
| H A D | XtensaInstrInfo.cpp | 91 BuildMI(MBB, I, DL, get(Xtensa::ADDI), Reg).addReg(SP).addImm(Amount); in adjustStackPtr()
|