Lines Matching full:callconv

237     Type *Ty, CallingConv::ID CallConv, bool isVarArg,
669 CallingConv::ID CallConv, MachineFunction &MF, bool isVarArg,
673 CCState CCInfo(CallConv, isVarArg, MF, RVLocs, Context);
743 X86TargetLowering::LowerReturn(SDValue Chain, CallingConv::ID CallConv,
755 shouldDisableRetRegFromCSR(CallConv) ||
758 if (CallConv == CallingConv::X86_INTR && !Outs.empty())
762 CCState CCInfo(CallConv, isVarArg, MF, RVLocs, *DAG.getContext());
922 CallConv != CallingConv::PreserveAll &&
923 CallConv != CallingConv::PreserveMost)
946 if (CallConv == CallingConv::X86_INTR)
1102 SDValue Chain, SDValue InGlue, CallingConv::ID CallConv, bool isVarArg,
1110 CCState CCInfo(CallConv, isVarArg, DAG.getMachineFunction(), RVLocs,
1301 X86TargetLowering::LowerMemArgument(SDValue Chain, CallingConv::ID CallConv,
1309 CallConv, DAG.getTarget().Options.GuaranteedTailCallOpt);
1419 static ArrayRef<MCPhysReg> get64BitArgumentGPRs(CallingConv::ID CallConv,
1423 if (Subtarget.isCallingConvWin64(CallConv)) {
1438 CallingConv::ID CallConv,
1441 if (Subtarget.isCallingConvWin64(CallConv)) {
1477 CallingConv::ID CallConv, CCState &CCInfo)
1483 TargLowering(DAG.getTargetLoweringInfo()), CallConv(CallConv),
1495 bool isWin64() const { return Subtarget.isCallingConvWin64(CallConv); }
1506 CallingConv::ID CallConv;
1516 if (is64Bit() || (CallConv != CallingConv::X86_FastCall &&
1517 CallConv != CallingConv::X86_ThisCall)) {
1526 ArrayRef<MCPhysReg> ArgGPRs = get64BitArgumentGPRs(CallConv, Subtarget);
1528 get64BitArgumentXMMs(TheMachineFunction, CallConv, Subtarget);
1628 (is64Bit() || (CallConv == CallingConv::X86_VectorCall ||
1629 CallConv == CallingConv::Intel_OCL_BI)))
1679 SDValue Chain, CallingConv::ID CallConv, bool IsVarArg,
1692 bool IsWin64 = Subtarget.isCallingConvWin64(CallConv);
1695 !(IsVarArg && canGuaranteeTCO(CallConv)) &&
1700 CCState CCInfo(CallConv, IsVarArg, MF, ArgLocs, *DAG.getContext());
1710 if (CallingConv::X86_VectorCall == CallConv) {
1809 LowerMemArgument(Chain, CallConv, Ins, dl, DAG, VA, MFI, InsIndex);
1842 if (CallConv == CallingConv::Swift || CallConv == CallingConv::SwiftTail)
1864 if (shouldGuaranteeTCO(CallConv,
1869 VarArgsLoweringHelper(FuncInfo, dl, DAG, Subtarget, CallConv, CCInfo)
1873 if (X86::isCalleePop(CallConv, Is64Bit, IsVarArg,
1876 } else if (CallConv == CallingConv::X86_INTR && Ins.size() == 2) {
1883 if (!canGuaranteeTCO(CallConv) && hasCalleePopSRet(Ins, Subtarget))
1911 if (shouldDisableArgRegFromCSR(CallConv) ||
1918 if (CallingConv::PreserveNone == CallConv)
2009 CallingConv::ID CallConv = CLI.CallConv;
2016 bool IsWin64 = Subtarget.isCallingConvWin64(CallConv);
2019 CallConv == CallingConv::Tail || CallConv == CallingConv::SwiftTail;
2031 if (CallConv == CallingConv::X86_INTR)
2036 CCState CCInfo(CallConv, isVarArg, MF, ArgLocs, *DAG.getContext());
2046 if (CallingConv::X86_VectorCall == CallConv) {
2081 assert(!(isVarArg && canGuaranteeTCO(CallConv)) &&
2090 else if (IsGuaranteeTCO && canGuaranteeTCO(CallConv))
2095 shouldGuaranteeTCO(CallConv,
2270 if (CallConv != CallingConv::X86_RegCall)
2345 assert((CallConv == CallingConv::X86_RegCall) &&
2445 auto AdaptedCC = CallConv;
2492 bool ShouldDisableArgRegs = shouldDisableArgRegFromCSR(CallConv) || HasNCSR;
2493 if (ShouldDisableArgRegs || shouldDisableRetRegFromCSR(CallConv)) {
2574 if (X86::isCalleePop(CallConv, Is64Bit, isVarArg,
2577 else if (!canGuaranteeTCO(CallConv) && IsCalleePopSRet)
2589 if (CallingConv::PreserveNone == CallConv)
2601 return LowerCallResult(Chain, InGlue, CallConv, isVarArg, Ins, dl, DAG,
2757 CallingConv::ID CalleeCC = CLI.CallConv;