Lines Matching refs:isPPC64
179 bool isPPC64 = Subtarget.isPPC64(); in PPCTargetLowering() local
180 setMinStackArgumentAlignment(isPPC64 ? Align(8) : Align(4)); in PPCTargetLowering()
262 if (isPPC64 || Subtarget.hasFPCVT()) { in PPCTargetLowering()
265 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
268 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
272 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
275 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
279 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
282 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
286 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
289 isPPC64 ? MVT::i64 : MVT::i32); in PPCTargetLowering()
480 (Subtarget.hasP9Vector() && Subtarget.isPPC64()) ? Custom : Expand); in PPCTargetLowering()
560 if (Subtarget.hasDirectMove() && isPPC64) { in PPCTargetLowering()
705 if (Subtarget.hasLFIWAX() || Subtarget.isPPC64()) { in PPCTargetLowering()
1007 if (Subtarget.hasDirectMove() && isPPC64) { in PPCTargetLowering()
1307 setOperationAction(ISD::SELECT_CC, MVT::i64, isPPC64 ? Custom : Expand); in PPCTargetLowering()
1360 setOperationAction(ISD::READCYCLECOUNTER, MVT::i64, isPPC64 ? Legal : Custom); in PPCTargetLowering()
1362 if (!isPPC64) { in PPCTargetLowering()
1382 else if (isPPC64) in PPCTargetLowering()
1387 setStackPointerRegisterToSaveRestore(isPPC64 ? PPC::X1 : PPC::R1); in PPCTargetLowering()
1435 setLibcallName(RTLIB::MEMCPY, isPPC64 ? "___memmove64" : "___memmove"); in PPCTargetLowering()
1436 setLibcallName(RTLIB::MEMMOVE, isPPC64 ? "___memmove64" : "___memmove"); in PPCTargetLowering()
1437 setLibcallName(RTLIB::MEMSET, isPPC64 ? "___memset64" : "___memset"); in PPCTargetLowering()
1438 setLibcallName(RTLIB::BZERO, isPPC64 ? "___bzero64" : "___bzero"); in PPCTargetLowering()
1632 Align Alignment = Subtarget.isPPC64() ? Align(8) : Align(4); in getByValTypeAlignment()
1652 if (!Subtarget.isPPC64() || !Subtarget.hasVSX()) in shallExtractConstSplatVectorElementToStore()
2867 Base = DAG.getRegister(Subtarget.isPPC64() ? PPC::ZERO8 : PPC::ZERO, in SelectAddressRegImm()
2974 Base = DAG.getRegister(Subtarget.isPPC64() ? PPC::ZERO8 : PPC::ZERO, in SelectAddressRegRegOnly()
3178 const bool Is64Bit = Subtarget.isPPC64(); in getTOCEntry()
3242 if (Subtarget.isPPC64() || Subtarget.isAIXABI()) in isJumpTableRelative()
3249 if (!Subtarget.isPPC64() || Subtarget.isAIXABI()) in getPICJumpTableRelocBase()
3266 if (!Subtarget.isPPC64() || Subtarget.isAIXABI()) in getPICJumpTableRelocBaseExpr()
3419 bool Is64Bit = Subtarget.isPPC64(); in LowerGlobalTLSAddressAIX()
3562 bool is64bit = Subtarget.isPPC64(); in LowerGlobalTLSAddressLinux()
3811 assert(!Subtarget.isPPC64() && "LowerVAARG is PPC32 only"); in LowerVAARG()
3902 assert(!Subtarget.isPPC64() && "LowerVACOPY is PPC32 only"); in LowerVACOPY()
3983 bool isPPC64 = (PtrVT == MVT::i64); in LowerINIT_TRAMPOLINE() local
3993 Entry.Node = DAG.getConstant(isPPC64 ? 48 : 40, dl, in LowerINIT_TRAMPOLINE()
3994 isPPC64 ? MVT::i64 : MVT::i32); in LowerINIT_TRAMPOLINE()
4017 if (Subtarget.isPPC64() || Subtarget.isAIXABI()) { in LowerVASTART()
5222 bool isPPC64 = Subtarget.isPPC64(); in EmitTailCallStoreFPAndRetAddr() local
5223 int SlotSize = isPPC64 ? 8 : 4; in EmitTailCallStoreFPAndRetAddr()
5227 EVT VT = isPPC64 ? MVT::i64 : MVT::i32; in EmitTailCallStoreFPAndRetAddr()
5238 CalculateTailCallArgDest(SelectionDAG &DAG, MachineFunction &MF, bool isPPC64, in CalculateTailCallArgDest() argument
5244 EVT VT = isPPC64 ? MVT::i64 : MVT::i32; in CalculateTailCallArgDest()
5261 EVT VT = Subtarget.isPPC64() ? MVT::i64 : MVT::i32; in EmitTailCallLoadFPAndRetAddr()
5288 SDValue PtrOff, int SPDiff, unsigned ArgOffset, bool isPPC64, in LowerMemOpCallTo() argument
5295 if (isPPC64) in LowerMemOpCallTo()
5305 } else CalculateTailCallArgDest(DAG, MF, isPPC64, Arg, SPDiff, ArgOffset, in LowerMemOpCallTo()
5657 const MVT RegVT = Subtarget.isPPC64() ? MVT::i64 : MVT::i32; in prepareDescriptorIndirectCall()
5658 const Align Alignment = Subtarget.isPPC64() ? Align(8) : Align(4); in prepareDescriptorIndirectCall()
5707 const bool IsPPC64 = Subtarget.isPPC64(); in buildCallOperands()
5883 if (Subtarget.isSVR4ABI() && Subtarget.isPPC64()) in isEligibleForTCO()
5963 if (Subtarget.isPPC64()) in LowerCall()
6848 const bool IsPPC64 = Subtarget.isPPC64(); in CC_AIX()
7204 const bool IsPPC64 = Subtarget.isPPC64(); in LowerFormalArguments_AIX()
7516 const bool IsPPC64 = Subtarget.isPPC64(); in LowerCall_AIX()
7780 const MVT PtrVT = Subtarget.isPPC64() ? MVT::i64 : MVT::i32; in LowerCall_AIX()
7911 bool isPPC64 = Subtarget.isPPC64(); in LowerSTACKRESTORE() local
7912 unsigned SP = isPPC64 ? PPC::X1 : PPC::R1; in LowerSTACKRESTORE()
7932 bool isPPC64 = Subtarget.isPPC64(); in getReturnAddrFrameIndex() local
7945 RASI = MF.getFrameInfo().CreateFixedObject(isPPC64? 8 : 4, LROffset, false); in getReturnAddrFrameIndex()
7955 bool isPPC64 = Subtarget.isPPC64(); in getFramePointerFrameIndex() local
7968 FPSI = MF.getFrameInfo().CreateFixedObject(isPPC64? 8 : 4, FPOffset, true); in getFramePointerFrameIndex()
8001 bool isPPC64 = Subtarget.isPPC64(); in LowerEH_DWARF_CFA() local
8004 int FI = MF.getFrameInfo().CreateFixedObject(isPPC64 ? 8 : 4, 0, false); in LowerEH_DWARF_CFA()
8345 DestTy = Subtarget.isPPC64() ? MVT::i64 : MVT::i32; in convertFPToInt()
8518 if (Subtarget.hasDirectMove() && Subtarget.isPPC64()) in LowerFP_TO_INT()
8810 Subtarget.isPPC64() && Subtarget.hasFPCVT()) in LowerINT_TO_FP()
8991 assert(Subtarget.isPPC64() && in LowerINT_TO_FP()
9341 if (!Subtarget.isPPC64() || (Op0.getOpcode() != ISD::BUILD_PAIR) || in LowerBITCAST()
9588 if (Subtarget.hasVSX() && Subtarget.isPPC64() && in LowerBUILD_VECTOR()
10449 bool isPPC64 = Subtarget.isPPC64(); in LowerVPERM() local
10536 if (isPPC64 && (V1HasXXSWAPD || V2HasXXSWAPD)) { in LowerVPERM()
10869 if (Subtarget.isPPC64()) in LowerINTRINSIC_WO_CHAIN()
10874 assert(Subtarget.isPPC64() && "rldimi is only available in 64-bit!"); in LowerINTRINSIC_WO_CHAIN()
11186 unsigned Opcode = Subtarget.isPPC64() ? PPC::CFENCE8 : PPC::CFENCE; in LowerINTRINSIC_VOID()
11187 EVT FTy = Subtarget.isPPC64() ? MVT::i64 : MVT::i32; in LowerINTRINSIC_VOID()
11203 if (!Subtarget.isPPC64()) in LowerBSWAP()
11389 if (Subtarget.isPPC64()) { in getDataClassTest()
11507 if ((VT == MVT::v2i64 || VT == MVT::v2f64) && !Subtarget.isPPC64()) in LowerINSERT_VECTOR_ELT()
11941 if (!Subtarget.isSVR4ABI() || Subtarget.isPPC64()) in ReplaceNodeResults()
12240 bool is64bit = Subtarget.isPPC64(); in EmitPartwordAtomicBinary()
12507 BaseReg = Subtarget.isPPC64() ? PPC::X1 : PPC::R1; in emitEHSjLjSetJmp()
12509 BaseReg = Subtarget.isPPC64() ? PPC::BP8 : PPC::BP; in emitEHSjLjSetJmp()
12512 TII->get(Subtarget.isPPC64() ? PPC::STD : PPC::STW)) in emitEHSjLjSetJmp()
12535 TII->get(Subtarget.isPPC64() ? PPC::MFLR8 : PPC::MFLR), LabelReg); in emitEHSjLjSetJmp()
12538 if (Subtarget.isPPC64()) { in emitEHSjLjSetJmp()
12698 const bool isPPC64 = Subtarget.isPPC64(); in emitProbedAlloca() local
12739 Register SPReg = isPPC64 ? PPC::X1 : PPC::R1; in emitProbedAlloca()
12740 Register FinalStackPtr = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12741 Register FramePointer = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12742 Register ActualNegSizeReg = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12750 isPPC64 ? PPC::PREPARE_PROBED_ALLOCA_64 : PPC::PREPARE_PROBED_ALLOCA_32; in emitProbedAlloca()
12756 ProbeOpc = isPPC64 ? PPC::PREPARE_PROBED_ALLOCA_NEGSIZE_SAME_REG_64 in emitProbedAlloca()
12765 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::ADD8 : PPC::ADD4), in emitProbedAlloca()
12773 Register ScratchReg = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12775 Register TempReg = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12776 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::LIS8 : PPC::LIS), TempReg) in emitProbedAlloca()
12778 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::ORI8 : PPC::ORI), in emitProbedAlloca()
12783 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::LI8 : PPC::LI), ScratchReg) in emitProbedAlloca()
12788 Register Div = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12789 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::DIVD : PPC::DIVW), Div) in emitProbedAlloca()
12792 Register Mul = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12793 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::MULLD : PPC::MULLW), Mul) in emitProbedAlloca()
12796 Register NegMod = MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12797 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::SUBF8 : PPC::SUBF), NegMod) in emitProbedAlloca()
12800 BuildMI(*MBB, {MI}, DL, TII->get(isPPC64 ? PPC::STDUX : PPC::STWUX), SPReg) in emitProbedAlloca()
12809 BuildMI(TestMBB, DL, TII->get(isPPC64 ? PPC::CMPD : PPC::CMPW), CmpResult) in emitProbedAlloca()
12823 BuildMI(BlockMBB, DL, TII->get(isPPC64 ? PPC::STDUX : PPC::STWUX), SPReg) in emitProbedAlloca()
12834 MRI.createVirtualRegister(isPPC64 ? G8RC : GPRC); in emitProbedAlloca()
12836 TII->get(isPPC64 ? PPC::DYNAREAOFFSET8 : PPC::DYNAREAOFFSET), in emitProbedAlloca()
12840 BuildMI(TailMBB, DL, TII->get(isPPC64 ? PPC::ADD8 : PPC::ADD4), DstReg) in emitProbedAlloca()
13253 bool is64bit = Subtarget.isPPC64(); in EmitInstrWithCustomInserter()
14370 (N->getOperand(0).getValueType() == MVT::i32 && Subtarget.isPPC64()))) in DAGCombineExtBoolTrunc()
15376 (Op1VT == MVT::i32 || (Op1VT == MVT::i64 && Subtarget.isPPC64()) || in combineStoreFPToInt()
15879 (Subtarget.hasLDBRX() && Subtarget.isPPC64() && Op1VT == MVT::i64))) { in PerformDAGCombine()
15914 if (Subtarget.isPPC64() && !DCI.isBeforeLegalize() && in PerformDAGCombine()
16326 Subtarget.isPPC64() && N->getValueType(0) == MVT::i64; in PerformDAGCombine()
16577 if (VT == MVT::i64 && !Subtarget.isPPC64()) in BuildSDIVPow2()
16809 if (VT == MVT::i64 && Subtarget.isPPC64()) in getRegForInlineAsmConstraint()
16813 if (VT == MVT::i64 && Subtarget.isPPC64()) in getRegForInlineAsmConstraint()
16909 if (R.first && VT == MVT::i64 && Subtarget.isPPC64() && in getRegForInlineAsmConstraint()
17087 bool isPPC64 = Subtarget.isPPC64(); in LowerRETURNADDR() local
17099 isPPC64 ? MVT::i64 : MVT::i32); in LowerRETURNADDR()
17121 bool isPPC64 = PtrVT == MVT::i64; in LowerFRAMEADDR() local
17127 FrameReg = isPPC64 ? PPC::X1 : PPC::R1; in LowerFRAMEADDR()
17129 FrameReg = isPPC64 ? PPC::FP8 : PPC::FP; in LowerFRAMEADDR()
17143 bool isPPC64 = Subtarget.isPPC64(); in getRegisterByName() local
17145 bool is64Bit = isPPC64 && VT == LLT::scalar(64); in getRegisterByName()
17151 .Case("r2", isPPC64 ? Register() : PPC::R2) in getRegisterByName()
17370 if (Subtarget.isPPC64()) { in getOptimalMemOpType()
17409 (Subtarget.isPPC64() && MemVT == MVT::i32)) && in isZExtFree()
17593 return Subtarget.isPPC64() ? PPC::X3 : PPC::R3; in getExceptionPointerRegister()
17598 return Subtarget.isPPC64() ? PPC::X4 : PPC::R4; in getExceptionSelectorRegister()
17801 if (!Subtarget.isISA3_0() || !Subtarget.isPPC64() || in combineSHL()
17845 if (!Subtarget.isPPC64()) in combineADDToADDZE()
18465 Disp = DAG.getRegister(Subtarget.isPPC64() ? PPC::ZERO8 : PPC::ZERO, in SelectForceXFormMode()
18686 Base = DAG.getRegister(Subtarget.isPPC64() ? PPC::ZERO8 : PPC::ZERO, in SelectOptimalAddrMode()
18742 Disp = FI ? DAG.getRegister(Subtarget.isPPC64() ? PPC::ZERO8 : PPC::ZERO, in SelectOptimalAddrMode()
18763 return Subtarget.isPPC64() && Subtarget.hasQuadwordAtomics(); in shouldInlineQuadwordAtomics()