1 //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file 10 /// This file describes how to lower LLVM calls to machine code calls. 11 /// 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H 15 #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H 16 17 #include "llvm/ADT/ArrayRef.h" 18 #include "llvm/ADT/SmallVector.h" 19 #include "llvm/CodeGen/CallingConvLower.h" 20 #include "llvm/CodeGen/MachineOperand.h" 21 #include "llvm/CodeGen/TargetCallingConv.h" 22 #include "llvm/CodeGenTypes/LowLevelType.h" 23 #include "llvm/CodeGenTypes/MachineValueType.h" 24 #include "llvm/IR/CallingConv.h" 25 #include "llvm/IR/Type.h" 26 #include "llvm/IR/Value.h" 27 #include "llvm/Support/ErrorHandling.h" 28 #include <cstdint> 29 #include <functional> 30 31 namespace llvm { 32 33 class AttributeList; 34 class CallBase; 35 class DataLayout; 36 class Function; 37 class FunctionLoweringInfo; 38 class MachineIRBuilder; 39 class MachineFunction; 40 struct MachinePointerInfo; 41 class MachineRegisterInfo; 42 class TargetLowering; 43 44 class CallLowering { 45 const TargetLowering *TLI; 46 47 virtual void anchor(); 48 public: 49 struct BaseArgInfo { 50 Type *Ty; 51 SmallVector<ISD::ArgFlagsTy, 4> Flags; 52 bool IsFixed; 53 54 BaseArgInfo(Type *Ty, 55 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(), 56 bool IsFixed = true) 57 : Ty(Ty), Flags(Flags), IsFixed(IsFixed) {} 58 59 BaseArgInfo() : Ty(nullptr), IsFixed(false) {} 60 }; 61 62 struct ArgInfo : public BaseArgInfo { 63 SmallVector<Register, 4> Regs; 64 // If the argument had to be split into multiple parts according to the 65 // target calling convention, then this contains the original vregs 66 // if the argument was an incoming arg. 67 SmallVector<Register, 2> OrigRegs; 68 69 /// Optionally track the original IR value for the argument. This may not be 70 /// meaningful in all contexts. This should only be used on for forwarding 71 /// through to use for aliasing information in MachinePointerInfo for memory 72 /// arguments. 73 const Value *OrigValue = nullptr; 74 75 /// Index original Function's argument. 76 unsigned OrigArgIndex; 77 78 /// Sentinel value for implicit machine-level input arguments. 79 static const unsigned NoArgIndex = UINT_MAX; 80 81 ArgInfo(ArrayRef<Register> Regs, Type *Ty, unsigned OrigIndex, 82 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(), 83 bool IsFixed = true, const Value *OrigValue = nullptr) 84 : BaseArgInfo(Ty, Flags, IsFixed), Regs(Regs), OrigValue(OrigValue), 85 OrigArgIndex(OrigIndex) { 86 if (!Regs.empty() && Flags.empty()) 87 this->Flags.push_back(ISD::ArgFlagsTy()); 88 // FIXME: We should have just one way of saying "no register". 89 assert(((Ty->isVoidTy() || Ty->isEmptyTy()) == 90 (Regs.empty() || Regs[0] == 0)) && 91 "only void types should have no register"); 92 } 93 94 ArgInfo(ArrayRef<Register> Regs, const Value &OrigValue, unsigned OrigIndex, 95 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(), 96 bool IsFixed = true) 97 : ArgInfo(Regs, OrigValue.getType(), OrigIndex, Flags, IsFixed, &OrigValue) {} 98 99 ArgInfo() = default; 100 }; 101 102 struct PtrAuthInfo { 103 uint64_t Key; 104 Register Discriminator; 105 }; 106 107 struct CallLoweringInfo { 108 /// Calling convention to be used for the call. 109 CallingConv::ID CallConv = CallingConv::C; 110 111 /// Destination of the call. It should be either a register, globaladdress, 112 /// or externalsymbol. 113 MachineOperand Callee = MachineOperand::CreateImm(0); 114 115 /// Descriptor for the return type of the function. 116 ArgInfo OrigRet; 117 118 /// List of descriptors of the arguments passed to the function. 119 SmallVector<ArgInfo, 32> OrigArgs; 120 121 /// Valid if the call has a swifterror inout parameter, and contains the 122 /// vreg that the swifterror should be copied into after the call. 123 Register SwiftErrorVReg; 124 125 /// Valid if the call is a controlled convergent operation. 126 Register ConvergenceCtrlToken; 127 128 /// Original IR callsite corresponding to this call, if available. 129 const CallBase *CB = nullptr; 130 131 MDNode *KnownCallees = nullptr; 132 133 /// The auth-call information in the "ptrauth" bundle, if present. 134 std::optional<PtrAuthInfo> PAI; 135 136 /// True if the call must be tail call optimized. 137 bool IsMustTailCall = false; 138 139 /// True if the call passes all target-independent checks for tail call 140 /// optimization. 141 bool IsTailCall = false; 142 143 /// True if the call was lowered as a tail call. This is consumed by the 144 /// legalizer. This allows the legalizer to lower libcalls as tail calls. 145 bool LoweredTailCall = false; 146 147 /// True if the call is to a vararg function. 148 bool IsVarArg = false; 149 150 /// True if the function's return value can be lowered to registers. 151 bool CanLowerReturn = true; 152 153 /// VReg to hold the hidden sret parameter. 154 Register DemoteRegister; 155 156 /// The stack index for sret demotion. 157 int DemoteStackIndex; 158 159 /// Expected type identifier for indirect calls with a CFI check. 160 const ConstantInt *CFIType = nullptr; 161 162 /// True if this call results in convergent operations. 163 bool IsConvergent = true; 164 }; 165 166 /// Argument handling is mostly uniform between the four places that 167 /// make these decisions: function formal arguments, call 168 /// instruction args, call instruction returns and function 169 /// returns. However, once a decision has been made on where an 170 /// argument should go, exactly what happens can vary slightly. This 171 /// class abstracts the differences. 172 /// 173 /// ValueAssigner should not depend on any specific function state, and 174 /// only determine the types and locations for arguments. 175 struct ValueAssigner { 176 ValueAssigner(bool IsIncoming, CCAssignFn *AssignFn_, 177 CCAssignFn *AssignFnVarArg_ = nullptr) 178 : AssignFn(AssignFn_), AssignFnVarArg(AssignFnVarArg_), 179 IsIncomingArgumentHandler(IsIncoming) { 180 181 // Some targets change the handler depending on whether the call is 182 // varargs or not. If 183 if (!AssignFnVarArg) 184 AssignFnVarArg = AssignFn; 185 } 186 187 virtual ~ValueAssigner() = default; 188 189 /// Returns true if the handler is dealing with incoming arguments, 190 /// i.e. those that move values from some physical location to vregs. 191 bool isIncomingArgumentHandler() const { 192 return IsIncomingArgumentHandler; 193 } 194 195 /// Wrap call to (typically tablegenerated CCAssignFn). This may be 196 /// overridden to track additional state information as arguments are 197 /// assigned or apply target specific hacks around the legacy 198 /// infrastructure. 199 virtual bool assignArg(unsigned ValNo, EVT OrigVT, MVT ValVT, MVT LocVT, 200 CCValAssign::LocInfo LocInfo, const ArgInfo &Info, 201 ISD::ArgFlagsTy Flags, CCState &State) { 202 if (getAssignFn(State.isVarArg())(ValNo, ValVT, LocVT, LocInfo, Flags, 203 State)) 204 return true; 205 StackSize = State.getStackSize(); 206 return false; 207 } 208 209 /// Assignment function to use for a general call. 210 CCAssignFn *AssignFn; 211 212 /// Assignment function to use for a variadic call. This is usually the same 213 /// as AssignFn on most targets. 214 CCAssignFn *AssignFnVarArg; 215 216 /// The size of the currently allocated portion of the stack. 217 uint64_t StackSize = 0; 218 219 /// Select the appropriate assignment function depending on whether this is 220 /// a variadic call. 221 CCAssignFn *getAssignFn(bool IsVarArg) const { 222 return IsVarArg ? AssignFnVarArg : AssignFn; 223 } 224 225 private: 226 const bool IsIncomingArgumentHandler; 227 virtual void anchor(); 228 }; 229 230 struct IncomingValueAssigner : public ValueAssigner { 231 IncomingValueAssigner(CCAssignFn *AssignFn_, 232 CCAssignFn *AssignFnVarArg_ = nullptr) 233 : ValueAssigner(true, AssignFn_, AssignFnVarArg_) {} 234 }; 235 236 struct OutgoingValueAssigner : public ValueAssigner { 237 OutgoingValueAssigner(CCAssignFn *AssignFn_, 238 CCAssignFn *AssignFnVarArg_ = nullptr) 239 : ValueAssigner(false, AssignFn_, AssignFnVarArg_) {} 240 }; 241 242 struct ValueHandler { 243 MachineIRBuilder &MIRBuilder; 244 MachineRegisterInfo &MRI; 245 const bool IsIncomingArgumentHandler; 246 247 ValueHandler(bool IsIncoming, MachineIRBuilder &MIRBuilder, 248 MachineRegisterInfo &MRI) 249 : MIRBuilder(MIRBuilder), MRI(MRI), 250 IsIncomingArgumentHandler(IsIncoming) {} 251 252 virtual ~ValueHandler() = default; 253 254 /// Returns true if the handler is dealing with incoming arguments, 255 /// i.e. those that move values from some physical location to vregs. 256 bool isIncomingArgumentHandler() const { 257 return IsIncomingArgumentHandler; 258 } 259 260 /// Materialize a VReg containing the address of the specified 261 /// stack-based object. This is either based on a FrameIndex or 262 /// direct SP manipulation, depending on the context. \p MPO 263 /// should be initialized to an appropriate description of the 264 /// address created. 265 virtual Register getStackAddress(uint64_t MemSize, int64_t Offset, 266 MachinePointerInfo &MPO, 267 ISD::ArgFlagsTy Flags) = 0; 268 269 /// Return the in-memory size to write for the argument at \p VA. This may 270 /// be smaller than the allocated stack slot size. 271 /// 272 /// This is overridable primarily for targets to maintain compatibility with 273 /// hacks around the existing DAG call lowering infrastructure. 274 virtual LLT getStackValueStoreType(const DataLayout &DL, 275 const CCValAssign &VA, 276 ISD::ArgFlagsTy Flags) const; 277 278 /// The specified value has been assigned to a physical register, 279 /// handle the appropriate COPY (either to or from) and mark any 280 /// relevant uses/defines as needed. 281 virtual void assignValueToReg(Register ValVReg, Register PhysReg, 282 const CCValAssign &VA) = 0; 283 284 /// The specified value has been assigned to a stack 285 /// location. Load or store it there, with appropriate extension 286 /// if necessary. 287 virtual void assignValueToAddress(Register ValVReg, Register Addr, 288 LLT MemTy, const MachinePointerInfo &MPO, 289 const CCValAssign &VA) = 0; 290 291 /// An overload which takes an ArgInfo if additional information about the 292 /// arg is needed. \p ValRegIndex is the index in \p Arg.Regs for the value 293 /// to store. 294 virtual void assignValueToAddress(const ArgInfo &Arg, unsigned ValRegIndex, 295 Register Addr, LLT MemTy, 296 const MachinePointerInfo &MPO, 297 const CCValAssign &VA) { 298 assignValueToAddress(Arg.Regs[ValRegIndex], Addr, MemTy, MPO, VA); 299 } 300 301 /// Handle custom values, which may be passed into one or more of \p VAs. 302 /// \p If the handler wants the assignments to be delayed until after 303 /// mem loc assignments, then it sets \p Thunk to the thunk to do the 304 /// assignment. 305 /// \return The number of \p VAs that have been assigned including the 306 /// first one, and which should therefore be skipped from further 307 /// processing. 308 virtual unsigned assignCustomValue(ArgInfo &Arg, ArrayRef<CCValAssign> VAs, 309 std::function<void()> *Thunk = nullptr) { 310 // This is not a pure virtual method because not all targets need to worry 311 // about custom values. 312 llvm_unreachable("Custom values not supported"); 313 } 314 315 /// Do a memory copy of \p MemSize bytes from \p SrcPtr to \p DstPtr. This 316 /// is necessary for outgoing stack-passed byval arguments. 317 void 318 copyArgumentMemory(const ArgInfo &Arg, Register DstPtr, Register SrcPtr, 319 const MachinePointerInfo &DstPtrInfo, Align DstAlign, 320 const MachinePointerInfo &SrcPtrInfo, Align SrcAlign, 321 uint64_t MemSize, CCValAssign &VA) const; 322 323 /// Extend a register to the location type given in VA, capped at extending 324 /// to at most MaxSize bits. If MaxSizeBits is 0 then no maximum is set. 325 Register extendRegister(Register ValReg, const CCValAssign &VA, 326 unsigned MaxSizeBits = 0); 327 }; 328 329 /// Base class for ValueHandlers used for arguments coming into the current 330 /// function, or for return values received from a call. 331 struct IncomingValueHandler : public ValueHandler { 332 IncomingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI) 333 : ValueHandler(/*IsIncoming*/ true, MIRBuilder, MRI) {} 334 335 /// Insert G_ASSERT_ZEXT/G_ASSERT_SEXT or other hint instruction based on \p 336 /// VA, returning the new register if a hint was inserted. 337 Register buildExtensionHint(const CCValAssign &VA, Register SrcReg, 338 LLT NarrowTy); 339 340 /// Provides a default implementation for argument handling. 341 void assignValueToReg(Register ValVReg, Register PhysReg, 342 const CCValAssign &VA) override; 343 }; 344 345 /// Base class for ValueHandlers used for arguments passed to a function call, 346 /// or for return values. 347 struct OutgoingValueHandler : public ValueHandler { 348 OutgoingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI) 349 : ValueHandler(/*IsIncoming*/ false, MIRBuilder, MRI) {} 350 }; 351 352 protected: 353 /// Getter for generic TargetLowering class. 354 const TargetLowering *getTLI() const { 355 return TLI; 356 } 357 358 /// Getter for target specific TargetLowering class. 359 template <class XXXTargetLowering> 360 const XXXTargetLowering *getTLI() const { 361 return static_cast<const XXXTargetLowering *>(TLI); 362 } 363 364 /// \returns Flags corresponding to the attributes on the \p ArgIdx-th 365 /// parameter of \p Call. 366 ISD::ArgFlagsTy getAttributesForArgIdx(const CallBase &Call, 367 unsigned ArgIdx) const; 368 369 /// \returns Flags corresponding to the attributes on the return from \p Call. 370 ISD::ArgFlagsTy getAttributesForReturn(const CallBase &Call) const; 371 372 /// Adds flags to \p Flags based off of the attributes in \p Attrs. 373 /// \p OpIdx is the index in \p Attrs to add flags from. 374 void addArgFlagsFromAttributes(ISD::ArgFlagsTy &Flags, 375 const AttributeList &Attrs, 376 unsigned OpIdx) const; 377 378 template <typename FuncInfoTy> 379 void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL, 380 const FuncInfoTy &FuncInfo) const; 381 382 /// Break \p OrigArgInfo into one or more pieces the calling convention can 383 /// process, returned in \p SplitArgs. For example, this should break structs 384 /// down into individual fields. 385 /// 386 /// If \p Offsets is non-null, it points to a vector to be filled in 387 /// with the in-memory offsets of each of the individual values. 388 void splitToValueTypes(const ArgInfo &OrigArgInfo, 389 SmallVectorImpl<ArgInfo> &SplitArgs, 390 const DataLayout &DL, CallingConv::ID CallConv, 391 SmallVectorImpl<uint64_t> *Offsets = nullptr) const; 392 393 /// Analyze the argument list in \p Args, using \p Assigner to populate \p 394 /// CCInfo. This will determine the types and locations to use for passed or 395 /// returned values. This may resize fields in \p Args if the value is split 396 /// across multiple registers or stack slots. 397 /// 398 /// This is independent of the function state and can be used 399 /// to determine how a call would pass arguments without needing to change the 400 /// function. This can be used to check if arguments are suitable for tail 401 /// call lowering. 402 /// 403 /// \return True if everything has succeeded, false otherwise. 404 bool determineAssignments(ValueAssigner &Assigner, 405 SmallVectorImpl<ArgInfo> &Args, 406 CCState &CCInfo) const; 407 408 /// Invoke ValueAssigner::assignArg on each of the given \p Args and then use 409 /// \p Handler to move them to the assigned locations. 410 /// 411 /// \return True if everything has succeeded, false otherwise. 412 bool 413 determineAndHandleAssignments(ValueHandler &Handler, ValueAssigner &Assigner, 414 SmallVectorImpl<ArgInfo> &Args, 415 MachineIRBuilder &MIRBuilder, 416 CallingConv::ID CallConv, bool IsVarArg, 417 ArrayRef<Register> ThisReturnRegs = {}) const; 418 419 /// Use \p Handler to insert code to handle the argument/return values 420 /// represented by \p Args. It's expected determineAssignments previously 421 /// processed these arguments to populate \p CCState and \p ArgLocs. 422 bool handleAssignments(ValueHandler &Handler, SmallVectorImpl<ArgInfo> &Args, 423 CCState &CCState, 424 SmallVectorImpl<CCValAssign> &ArgLocs, 425 MachineIRBuilder &MIRBuilder, 426 ArrayRef<Register> ThisReturnRegs = {}) const; 427 428 /// Check whether parameters to a call that are passed in callee saved 429 /// registers are the same as from the calling function. This needs to be 430 /// checked for tail call eligibility. 431 bool parametersInCSRMatch(const MachineRegisterInfo &MRI, 432 const uint32_t *CallerPreservedMask, 433 const SmallVectorImpl<CCValAssign> &ArgLocs, 434 const SmallVectorImpl<ArgInfo> &OutVals) const; 435 436 /// \returns True if the calling convention for a callee and its caller pass 437 /// results in the same way. Typically used for tail call eligibility checks. 438 /// 439 /// \p Info is the CallLoweringInfo for the call. 440 /// \p MF is the MachineFunction for the caller. 441 /// \p InArgs contains the results of the call. 442 /// \p CalleeAssigner specifies the target's handling of the argument types 443 /// for the callee. 444 /// \p CallerAssigner specifies the target's handling of the 445 /// argument types for the caller. 446 bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF, 447 SmallVectorImpl<ArgInfo> &InArgs, 448 ValueAssigner &CalleeAssigner, 449 ValueAssigner &CallerAssigner) const; 450 451 public: 452 CallLowering(const TargetLowering *TLI) : TLI(TLI) {} 453 virtual ~CallLowering() = default; 454 455 /// \return true if the target is capable of handling swifterror values that 456 /// have been promoted to a specified register. The extended versions of 457 /// lowerReturn and lowerCall should be implemented. 458 virtual bool supportSwiftError() const { 459 return false; 460 } 461 462 /// Load the returned value from the stack into virtual registers in \p VRegs. 463 /// It uses the frame index \p FI and the start offset from \p DemoteReg. 464 /// The loaded data size will be determined from \p RetTy. 465 void insertSRetLoads(MachineIRBuilder &MIRBuilder, Type *RetTy, 466 ArrayRef<Register> VRegs, Register DemoteReg, 467 int FI) const; 468 469 /// Store the return value given by \p VRegs into stack starting at the offset 470 /// specified in \p DemoteReg. 471 void insertSRetStores(MachineIRBuilder &MIRBuilder, Type *RetTy, 472 ArrayRef<Register> VRegs, Register DemoteReg) const; 473 474 /// Insert the hidden sret ArgInfo to the beginning of \p SplitArgs. 475 /// This function should be called from the target specific 476 /// lowerFormalArguments when \p F requires the sret demotion. 477 void insertSRetIncomingArgument(const Function &F, 478 SmallVectorImpl<ArgInfo> &SplitArgs, 479 Register &DemoteReg, MachineRegisterInfo &MRI, 480 const DataLayout &DL) const; 481 482 /// For the call-base described by \p CB, insert the hidden sret ArgInfo to 483 /// the OrigArgs field of \p Info. 484 void insertSRetOutgoingArgument(MachineIRBuilder &MIRBuilder, 485 const CallBase &CB, 486 CallLoweringInfo &Info) const; 487 488 /// \return True if the return type described by \p Outs can be returned 489 /// without performing sret demotion. 490 bool checkReturn(CCState &CCInfo, SmallVectorImpl<BaseArgInfo> &Outs, 491 CCAssignFn *Fn) const; 492 493 /// Get the type and the ArgFlags for the split components of \p RetTy as 494 /// returned by \c ComputeValueVTs. 495 void getReturnInfo(CallingConv::ID CallConv, Type *RetTy, AttributeList Attrs, 496 SmallVectorImpl<BaseArgInfo> &Outs, 497 const DataLayout &DL) const; 498 499 /// Toplevel function to check the return type based on the target calling 500 /// convention. \return True if the return value of \p MF can be returned 501 /// without performing sret demotion. 502 bool checkReturnTypeForCallConv(MachineFunction &MF) const; 503 504 /// This hook must be implemented to check whether the return values 505 /// described by \p Outs can fit into the return registers. If false 506 /// is returned, an sret-demotion is performed. 507 virtual bool canLowerReturn(MachineFunction &MF, CallingConv::ID CallConv, 508 SmallVectorImpl<BaseArgInfo> &Outs, 509 bool IsVarArg) const { 510 return true; 511 } 512 513 /// This hook must be implemented to lower outgoing return values, described 514 /// by \p Val, into the specified virtual registers \p VRegs. 515 /// This hook is used by GlobalISel. 516 /// 517 /// \p FLI is required for sret demotion. 518 /// 519 /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter 520 /// that needs to be implicitly returned. 521 /// 522 /// \return True if the lowering succeeds, false otherwise. 523 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, 524 ArrayRef<Register> VRegs, FunctionLoweringInfo &FLI, 525 Register SwiftErrorVReg) const { 526 if (!supportSwiftError()) { 527 assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror"); 528 return lowerReturn(MIRBuilder, Val, VRegs, FLI); 529 } 530 return false; 531 } 532 533 /// This hook behaves as the extended lowerReturn function, but for targets 534 /// that do not support swifterror value promotion. 535 virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, 536 ArrayRef<Register> VRegs, 537 FunctionLoweringInfo &FLI) const { 538 return false; 539 } 540 541 virtual bool fallBackToDAGISel(const MachineFunction &MF) const { 542 return false; 543 } 544 545 /// This hook must be implemented to lower the incoming (formal) 546 /// arguments, described by \p VRegs, for GlobalISel. Each argument 547 /// must end up in the related virtual registers described by \p VRegs. 548 /// In other words, the first argument should end up in \c VRegs[0], 549 /// the second in \c VRegs[1], and so on. For each argument, there will be one 550 /// register for each non-aggregate type, as returned by \c computeValueLLTs. 551 /// \p MIRBuilder is set to the proper insertion for the argument 552 /// lowering. \p FLI is required for sret demotion. 553 /// 554 /// \return True if the lowering succeeded, false otherwise. 555 virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder, 556 const Function &F, 557 ArrayRef<ArrayRef<Register>> VRegs, 558 FunctionLoweringInfo &FLI) const { 559 return false; 560 } 561 562 /// This hook must be implemented to lower the given call instruction, 563 /// including argument and return value marshalling. 564 /// 565 /// 566 /// \return true if the lowering succeeded, false otherwise. 567 virtual bool lowerCall(MachineIRBuilder &MIRBuilder, 568 CallLoweringInfo &Info) const { 569 return false; 570 } 571 572 /// Lower the given call instruction, including argument and return value 573 /// marshalling. 574 /// 575 /// \p CI is the call/invoke instruction. 576 /// 577 /// \p ResRegs are the registers where the call's return value should be 578 /// stored (or 0 if there is no return value). There will be one register for 579 /// each non-aggregate type, as returned by \c computeValueLLTs. 580 /// 581 /// \p ArgRegs is a list of lists of virtual registers containing each 582 /// argument that needs to be passed (argument \c i should be placed in \c 583 /// ArgRegs[i]). For each argument, there will be one register for each 584 /// non-aggregate type, as returned by \c computeValueLLTs. 585 /// 586 /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout 587 /// parameter, and contains the vreg that the swifterror should be copied into 588 /// after the call. 589 /// 590 /// \p GetCalleeReg is a callback to materialize a register for the callee if 591 /// the target determines it cannot jump to the destination based purely on \p 592 /// CI. This might be because \p CI is indirect, or because of the limited 593 /// range of an immediate jump. 594 /// 595 /// \return true if the lowering succeeded, false otherwise. 596 bool lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &Call, 597 ArrayRef<Register> ResRegs, 598 ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg, 599 std::optional<PtrAuthInfo> PAI, Register ConvergenceCtrlToken, 600 std::function<unsigned()> GetCalleeReg) const; 601 602 /// For targets which want to use big-endian can enable it with 603 /// enableBigEndian() hook 604 virtual bool enableBigEndian() const { return false; } 605 606 /// For targets which support the "returned" parameter attribute, returns 607 /// true if the given type is a valid one to use with "returned". 608 virtual bool isTypeIsValidForThisReturn(EVT Ty) const { return false; } 609 }; 610 611 } // end namespace llvm 612 613 #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H 614