Lines Matching defs:CallConv

662     CallingConv::ID CallConv, MachineFunction &MF, bool isVarArg,
665 CCState CCInfo(CallConv, isVarArg, MF, RVLocs, Context);
735 X86TargetLowering::LowerReturn(SDValue Chain, CallingConv::ID CallConv,
747 shouldDisableRetRegFromCSR(CallConv) ||
750 if (CallConv == CallingConv::X86_INTR && !Outs.empty())
754 CCState CCInfo(CallConv, isVarArg, MF, RVLocs, *DAG.getContext());
914 CallConv != CallingConv::PreserveAll &&
915 CallConv != CallingConv::PreserveMost)
938 if (CallConv == CallingConv::X86_INTR)
1094 SDValue Chain, SDValue InGlue, CallingConv::ID CallConv, bool isVarArg,
1102 CCState CCInfo(CallConv, isVarArg, DAG.getMachineFunction(), RVLocs,
1293 X86TargetLowering::LowerMemArgument(SDValue Chain, CallingConv::ID CallConv,
1301 CallConv, DAG.getTarget().Options.GuaranteedTailCallOpt);
1411 static ArrayRef<MCPhysReg> get64BitArgumentGPRs(CallingConv::ID CallConv,
1415 if (Subtarget.isCallingConvWin64(CallConv)) {
1430 CallingConv::ID CallConv,
1433 if (Subtarget.isCallingConvWin64(CallConv)) {
1469 CallingConv::ID CallConv, CCState &CCInfo)
1475 TargLowering(DAG.getTargetLoweringInfo()), CallConv(CallConv),
1487 bool isWin64() const { return Subtarget.isCallingConvWin64(CallConv); }
1498 CallingConv::ID CallConv;
1508 if (is64Bit() || (CallConv != CallingConv::X86_FastCall &&
1509 CallConv != CallingConv::X86_ThisCall)) {
1518 ArrayRef<MCPhysReg> ArgGPRs = get64BitArgumentGPRs(CallConv, Subtarget);
1520 get64BitArgumentXMMs(TheMachineFunction, CallConv, Subtarget);
1620 (is64Bit() || (CallConv == CallingConv::X86_VectorCall ||
1621 CallConv == CallingConv::Intel_OCL_BI)))
1671 SDValue Chain, CallingConv::ID CallConv, bool IsVarArg,
1684 bool IsWin64 = Subtarget.isCallingConvWin64(CallConv);
1687 !(IsVarArg && canGuaranteeTCO(CallConv)) &&
1692 CCState CCInfo(CallConv, IsVarArg, MF, ArgLocs, *DAG.getContext());
1702 if (CallingConv::X86_VectorCall == CallConv) {
1801 LowerMemArgument(Chain, CallConv, Ins, dl, DAG, VA, MFI, InsIndex);
1834 if (CallConv == CallingConv::Swift || CallConv == CallingConv::SwiftTail)
1856 if (shouldGuaranteeTCO(CallConv,
1861 VarArgsLoweringHelper(FuncInfo, dl, DAG, Subtarget, CallConv, CCInfo)
1865 if (X86::isCalleePop(CallConv, Is64Bit, IsVarArg,
1868 } else if (CallConv == CallingConv::X86_INTR && Ins.size() == 2) {
1875 if (!canGuaranteeTCO(CallConv) && hasCalleePopSRet(Ins, Subtarget))
1903 if (shouldDisableArgRegFromCSR(CallConv) ||
1910 if (CallingConv::PreserveNone == CallConv)
2001 CallingConv::ID CallConv = CLI.CallConv;
2008 bool IsWin64 = Subtarget.isCallingConvWin64(CallConv);
2011 CallConv == CallingConv::Tail || CallConv == CallingConv::SwiftTail;
2023 if (CallConv == CallingConv::X86_INTR)
2028 CCState CCInfo(CallConv, isVarArg, MF, ArgLocs, *DAG.getContext());
2038 if (CallingConv::X86_VectorCall == CallConv) {
2073 assert(!(isVarArg && canGuaranteeTCO(CallConv)) &&
2082 else if (IsGuaranteeTCO && canGuaranteeTCO(CallConv))
2087 shouldGuaranteeTCO(CallConv,
2262 if (CallConv != CallingConv::X86_RegCall)
2337 assert((CallConv == CallingConv::X86_RegCall) &&
2439 auto AdaptedCC = CallConv;
2475 bool ShouldDisableArgRegs = shouldDisableArgRegFromCSR(CallConv) || HasNCSR;
2476 if (ShouldDisableArgRegs || shouldDisableRetRegFromCSR(CallConv)) {
2555 if (X86::isCalleePop(CallConv, Is64Bit, isVarArg,
2558 else if (!canGuaranteeTCO(CallConv) && IsCalleePopSRet)
2570 if (CallingConv::PreserveNone == CallConv)
2582 return LowerCallResult(Chain, InGlue, CallConv, isVarArg, Ins, dl, DAG,
2738 CallingConv::ID CalleeCC = CLI.CallConv;