Lines Matching defs:CallingConv
51 static bool shouldDisableRetRegFromCSR(CallingConv::ID CC) {
55 case CallingConv::X86_RegCall:
56 case CallingConv::PreserveMost:
57 case CallingConv::PreserveAll:
65 static bool shouldDisableArgRegFromCSR(CallingConv::ID CC) {
66 return CC == CallingConv::X86_RegCall;
70 handleMaskRegisterForCallingConv(unsigned NumElts, CallingConv::ID CC,
78 if (NumElts == 8 && CC != CallingConv::X86_RegCall &&
79 CC != CallingConv::Intel_OCL_BI)
81 if (NumElts == 16 && CC != CallingConv::X86_RegCall &&
82 CC != CallingConv::Intel_OCL_BI)
86 if (NumElts == 32 && (!Subtarget.hasBWI() || CC != CallingConv::X86_RegCall))
89 if (NumElts == 64 && Subtarget.hasBWI() && CC != CallingConv::X86_RegCall) {
104 CallingConv::ID CC,
138 CallingConv::ID CC,
173 LLVMContext &Context, CallingConv::ID CC, EVT VT, EVT &IntermediateVT,
189 CC != CallingConv::X86_RegCall) {
439 if (CC != CallingConv::C && CC != CallingConv::X86_StdCall)
603 F->setCallingConv(CallingConv::X86_FastCall);
662 CallingConv::ID CallConv, MachineFunction &MF, bool isVarArg,
669 const MCPhysReg *X86TargetLowering::getScratchRegisters(CallingConv::ID) const {
735 X86TargetLowering::LowerReturn(SDValue Chain, CallingConv::ID CallConv,
750 if (CallConv == CallingConv::X86_INTR && !Outs.empty())
914 CallConv != CallingConv::PreserveAll &&
915 CallConv != CallingConv::PreserveMost)
938 if (CallConv == CallingConv::X86_INTR)
1094 SDValue Chain, SDValue InGlue, CallingConv::ID CallConv, bool isVarArg,
1247 static bool canGuaranteeTCO(CallingConv::ID CC) {
1248 return (CC == CallingConv::Fast || CC == CallingConv::GHC ||
1249 CC == CallingConv::X86_RegCall || CC == CallingConv::HiPE ||
1250 CC == CallingConv::Tail || CC == CallingConv::SwiftTail);
1254 static bool mayTailCallThisCC(CallingConv::ID CC) {
1257 case CallingConv::C:
1258 case CallingConv::Win64:
1259 case CallingConv::X86_64_SysV:
1260 case CallingConv::PreserveNone:
1262 case CallingConv::X86_ThisCall:
1263 case CallingConv::X86_StdCall:
1264 case CallingConv::X86_VectorCall:
1265 case CallingConv::X86_FastCall:
1267 case CallingConv::Swift:
1276 static bool shouldGuaranteeTCO(CallingConv::ID CC, bool GuaranteedTailCallOpt) {
1278 CC == CallingConv::Tail || CC == CallingConv::SwiftTail;
1285 CallingConv::ID CalleeCC = CI->getCallingConv();
1293 X86TargetLowering::LowerMemArgument(SDValue Chain, CallingConv::ID CallConv,
1411 static ArrayRef<MCPhysReg> get64BitArgumentGPRs(CallingConv::ID CallConv,
1430 CallingConv::ID CallConv,
1469 CallingConv::ID CallConv, CCState &CCInfo)
1498 CallingConv::ID CallConv;
1508 if (is64Bit() || (CallConv != CallingConv::X86_FastCall &&
1509 CallConv != CallingConv::X86_ThisCall)) {
1620 (is64Bit() || (CallConv == CallingConv::X86_VectorCall ||
1621 CallConv == CallingConv::Intel_OCL_BI)))
1671 SDValue Chain, CallingConv::ID CallConv, bool IsVarArg,
1702 if (CallingConv::X86_VectorCall == CallConv) {
1834 if (CallConv == CallingConv::Swift || CallConv == CallingConv::SwiftTail)
1868 } else if (CallConv == CallingConv::X86_INTR && Ins.size() == 2) {
1910 if (CallingConv::PreserveNone == CallConv)
2001 CallingConv::ID CallConv = CLI.CallConv;
2011 CallConv == CallingConv::Tail || CallConv == CallingConv::SwiftTail;
2023 if (CallConv == CallingConv::X86_INTR)
2038 if (CallingConv::X86_VectorCall == CallConv) {
2262 if (CallConv != CallingConv::X86_RegCall)
2337 assert((CallConv == CallingConv::X86_RegCall) &&
2444 AdaptedCC = (CallingConv::ID)CallingConv::X86_INTR;
2448 AdaptedCC = (CallingConv::ID)CallingConv::GHC;
2570 if (CallingConv::PreserveNone == CallConv)
2738 CallingConv::ID CalleeCC = CLI.CallConv;
2754 CallingConv::ID CallerCC = CallerF.getCallingConv();
2759 CalleeCC == CallingConv::Tail || CalleeCC == CallingConv::SwiftTail;
2920 bool X86::isCalleePop(CallingConv::ID CallingConv,
2924 if (!IsVarArg && shouldGuaranteeTCO(CallingConv, GuaranteeTCO))
2927 switch (CallingConv) {
2930 case CallingConv::X86_StdCall:
2931 case CallingConv::X86_FastCall:
2932 case CallingConv::X86_ThisCall:
2933 case CallingConv::X86_VectorCall: