Lines Matching refs:CallingConv
51 static bool shouldDisableRetRegFromCSR(CallingConv::ID CC) { in shouldDisableRetRegFromCSR()
55 case CallingConv::X86_RegCall: in shouldDisableRetRegFromCSR()
56 case CallingConv::PreserveMost: in shouldDisableRetRegFromCSR()
57 case CallingConv::PreserveAll: in shouldDisableRetRegFromCSR()
65 static bool shouldDisableArgRegFromCSR(CallingConv::ID CC) { in shouldDisableArgRegFromCSR()
66 return CC == CallingConv::X86_RegCall; in shouldDisableArgRegFromCSR()
70 handleMaskRegisterForCallingConv(unsigned NumElts, CallingConv::ID CC, in handleMaskRegisterForCallingConv()
78 if (NumElts == 8 && CC != CallingConv::X86_RegCall && in handleMaskRegisterForCallingConv()
79 CC != CallingConv::Intel_OCL_BI) in handleMaskRegisterForCallingConv()
81 if (NumElts == 16 && CC != CallingConv::X86_RegCall && in handleMaskRegisterForCallingConv()
82 CC != CallingConv::Intel_OCL_BI) in handleMaskRegisterForCallingConv()
86 if (NumElts == 32 && (!Subtarget.hasBWI() || CC != CallingConv::X86_RegCall)) in handleMaskRegisterForCallingConv()
89 if (NumElts == 64 && Subtarget.hasBWI() && CC != CallingConv::X86_RegCall) { in handleMaskRegisterForCallingConv()
104 CallingConv::ID CC, in getRegisterTypeForCallingConv()
138 CallingConv::ID CC, in getNumRegistersForCallingConv()
173 LLVMContext &Context, CallingConv::ID CC, EVT VT, EVT &IntermediateVT, in getVectorTypeBreakdownForCallingConv()
189 CC != CallingConv::X86_RegCall) { in getVectorTypeBreakdownForCallingConv()
439 if (CC != CallingConv::C && CC != CallingConv::X86_StdCall) in markLibCallAttributes()
603 F->setCallingConv(CallingConv::X86_FastCall); in insertSSPDeclarations()
662 CallingConv::ID CallConv, MachineFunction &MF, bool isVarArg, in CanLowerReturn()
669 const MCPhysReg *X86TargetLowering::getScratchRegisters(CallingConv::ID) const { in getScratchRegisters()
735 X86TargetLowering::LowerReturn(SDValue Chain, CallingConv::ID CallConv, in LowerReturn()
750 if (CallConv == CallingConv::X86_INTR && !Outs.empty()) in LowerReturn()
914 CallConv != CallingConv::PreserveAll && in LowerReturn()
915 CallConv != CallingConv::PreserveMost) in LowerReturn()
938 if (CallConv == CallingConv::X86_INTR) in LowerReturn()
1094 SDValue Chain, SDValue InGlue, CallingConv::ID CallConv, bool isVarArg, in LowerCallResult()
1247 static bool canGuaranteeTCO(CallingConv::ID CC) { in canGuaranteeTCO()
1248 return (CC == CallingConv::Fast || CC == CallingConv::GHC || in canGuaranteeTCO()
1249 CC == CallingConv::X86_RegCall || CC == CallingConv::HiPE || in canGuaranteeTCO()
1250 CC == CallingConv::Tail || CC == CallingConv::SwiftTail); in canGuaranteeTCO()
1254 static bool mayTailCallThisCC(CallingConv::ID CC) { in mayTailCallThisCC()
1257 case CallingConv::C: in mayTailCallThisCC()
1258 case CallingConv::Win64: in mayTailCallThisCC()
1259 case CallingConv::X86_64_SysV: in mayTailCallThisCC()
1260 case CallingConv::PreserveNone: in mayTailCallThisCC()
1262 case CallingConv::X86_ThisCall: in mayTailCallThisCC()
1263 case CallingConv::X86_StdCall: in mayTailCallThisCC()
1264 case CallingConv::X86_VectorCall: in mayTailCallThisCC()
1265 case CallingConv::X86_FastCall: in mayTailCallThisCC()
1267 case CallingConv::Swift: in mayTailCallThisCC()
1276 static bool shouldGuaranteeTCO(CallingConv::ID CC, bool GuaranteedTailCallOpt) { in shouldGuaranteeTCO()
1278 CC == CallingConv::Tail || CC == CallingConv::SwiftTail; in shouldGuaranteeTCO()
1285 CallingConv::ID CalleeCC = CI->getCallingConv(); in mayBeEmittedAsTailCall()
1293 X86TargetLowering::LowerMemArgument(SDValue Chain, CallingConv::ID CallConv, in LowerMemArgument()
1411 static ArrayRef<MCPhysReg> get64BitArgumentGPRs(CallingConv::ID CallConv, in get64BitArgumentGPRs()
1430 CallingConv::ID CallConv, in get64BitArgumentXMMs()
1469 CallingConv::ID CallConv, CCState &CCInfo) in VarArgsLoweringHelper()
1498 CallingConv::ID CallConv;
1508 if (is64Bit() || (CallConv != CallingConv::X86_FastCall && in createVarArgAreaAndStoreRegisters()
1509 CallConv != CallingConv::X86_ThisCall)) { in createVarArgAreaAndStoreRegisters()
1620 (is64Bit() || (CallConv == CallingConv::X86_VectorCall || in forwardMustTailParameters()
1621 CallConv == CallingConv::Intel_OCL_BI))) in forwardMustTailParameters()
1671 SDValue Chain, CallingConv::ID CallConv, bool IsVarArg, in LowerFormalArguments()
1702 if (CallingConv::X86_VectorCall == CallConv) { in LowerFormalArguments()
1834 if (CallConv == CallingConv::Swift || CallConv == CallingConv::SwiftTail) in LowerFormalArguments()
1868 } else if (CallConv == CallingConv::X86_INTR && Ins.size() == 2) { in LowerFormalArguments()
1910 if (CallingConv::PreserveNone == CallConv) in LowerFormalArguments()
2001 CallingConv::ID CallConv = CLI.CallConv; in LowerCall()
2011 CallConv == CallingConv::Tail || CallConv == CallingConv::SwiftTail; in LowerCall()
2023 if (CallConv == CallingConv::X86_INTR) in LowerCall()
2038 if (CallingConv::X86_VectorCall == CallConv) { in LowerCall()
2262 if (CallConv != CallingConv::X86_RegCall) in LowerCall()
2337 assert((CallConv == CallingConv::X86_RegCall) && in LowerCall()
2444 AdaptedCC = (CallingConv::ID)CallingConv::X86_INTR; in LowerCall()
2448 AdaptedCC = (CallingConv::ID)CallingConv::GHC; in LowerCall()
2570 if (CallingConv::PreserveNone == CallConv) in LowerCall()
2738 CallingConv::ID CalleeCC = CLI.CallConv; in IsEligibleForTailCallOptimization()
2754 CallingConv::ID CallerCC = CallerF.getCallingConv(); in IsEligibleForTailCallOptimization()
2759 CalleeCC == CallingConv::Tail || CalleeCC == CallingConv::SwiftTail; in IsEligibleForTailCallOptimization()
2920 bool X86::isCalleePop(CallingConv::ID CallingConv, in isCalleePop() argument
2924 if (!IsVarArg && shouldGuaranteeTCO(CallingConv, GuaranteeTCO)) in isCalleePop()
2927 switch (CallingConv) { in isCalleePop()
2930 case CallingConv::X86_StdCall: in isCalleePop()
2931 case CallingConv::X86_FastCall: in isCalleePop()
2932 case CallingConv::X86_ThisCall: in isCalleePop()
2933 case CallingConv::X86_VectorCall: in isCalleePop()