Searched refs:PtrAdd (Results 1 – 9 of 9) sorted by relevance
/freebsd/contrib/llvm-project/llvm/lib/CodeGen/GlobalISel/ |
H A D | CombinerHelper.cpp | 1196 auto *PtrAdd = dyn_cast<GPtrAdd>(&Use); in findPostIndexCandidate() local 1199 if (!PtrAdd || MRI.use_nodbg_empty(PtrAdd->getReg(0))) in findPostIndexCandidate() 1207 Offset = PtrAdd->getOffsetReg(); in findPostIndexCandidate() 1209 !TLI.isIndexingLegal(LdSt, PtrAdd->getBaseReg(), Offset, in findPostIndexCandidate() 1224 for (auto &BasePtrUse : MRI.use_nodbg_instructions(PtrAdd->getBaseReg())) { in findPostIndexCandidate() 1255 Addr = PtrAdd->getReg(0); in findPostIndexCandidate() 1256 Base = PtrAdd->getBaseReg(); in findPostIndexCandidate() 2470 auto PtrAdd = Builder.buildPtrAdd(PtrTy, LHS, RHS); in applyCombineAddP2IToPtrAdd() local 2471 Builder.buildPtrToInt(Dst, PtrAdd); in applyCombineAddP2IToPtrAdd() 2477 auto &PtrAdd = cast<GPtrAdd>(MI); in matchCombineConstPtrAddToI2P() local [all …]
|
/freebsd/contrib/llvm-project/llvm/lib/Transforms/Vectorize/ |
H A D | VPlanRecipes.cpp | 146 case VPInstruction::PtrAdd: in mayHaveSideEffects() 350 return Opcode == VPInstruction::PtrAdd && !vputils::onlyFirstLaneUsed(this); in doesGeneratePerAllLanes() 364 case VPInstruction::PtrAdd: in canGenerateScalarForFirstLane() 376 assert(getOpcode() == VPInstruction::PtrAdd && in generatePerLane() 649 case VPInstruction::PtrAdd: { in generatePerPart() 756 case VPInstruction::PtrAdd: in onlyFirstLaneUsed() 848 case VPInstruction::PtrAdd: in print()
|
H A D | VPlanAnalysis.cpp | 72 case VPInstruction::PtrAdd: in inferScalarTypeForRecipe()
|
H A D | VPlanTransforms.cpp | 598 auto *Recipe = new VPInstruction(VPInstruction::PtrAdd, in legalizeAndOptimizeInductions()
|
H A D | VPlan.h | 1264 PtrAdd, enumerator
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/GISel/ |
H A D | AArch64PostLegalizerCombiner.cpp | 731 GPtrAdd *PtrAdd = cast<GPtrAdd>(MRI.getVRegDef(PtrReg)); in optimizeConsecutiveMemOpAddressing() local 732 StoreInfo New = {St, PtrAdd, Offset.getSExtValue(), StoredValTy}; in optimizeConsecutiveMemOpAddressing()
|
H A D | AArch64InstructionSelector.cpp | 7201 MachineInstr *PtrAdd = in selectAddrModeShiftedExtendXReg() local 7203 if (!PtrAdd || !isWorthFoldingIntoExtendedReg(*PtrAdd, MRI, true)) in selectAddrModeShiftedExtendXReg() 7209 getDefIgnoringCopies(PtrAdd->getOperand(2).getReg(), MRI); in selectAddrModeShiftedExtendXReg() 7210 return selectExtendedSHL(Root, PtrAdd->getOperand(1), in selectAddrModeShiftedExtendXReg() 7262 MachineInstr *PtrAdd = in selectAddrModeXRO() local 7264 if (!PtrAdd) in selectAddrModeXRO() 7281 getIConstantVRegValWithLookThrough(PtrAdd->getOperand(2).getReg(), MRI); in selectAddrModeXRO() 7335 MachineInstr *PtrAdd = in selectAddrModeWRO() local 7337 if (!PtrAdd || !isWorthFoldingIntoExtendedReg(*PtrAdd, MRI, true)) in selectAddrModeWRO() 7340 MachineOperand &LHS = PtrAdd->getOperand(1); in selectAddrModeWRO() [all …]
|
/freebsd/contrib/llvm-project/llvm/include/llvm/CodeGen/GlobalISel/ |
H A D | CombinerHelper.h | 935 bool reassociationCanBreakAddressingModePattern(MachineInstr &PtrAdd);
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AMDGPU/ |
H A D | AMDGPURegisterBankInfo.cpp | 1205 auto PtrAdd = B.buildPtrAdd(PtrTy, SPCopy, ScaledSize); in applyMappingDynStackAlloc() local 1206 B.buildMaskLowPtrBits(Dst, PtrAdd, in applyMappingDynStackAlloc()
|