1 //===----- CGCall.h - Encapsulate calling convention details ----*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // These classes wrap the information about a call or function 10 // definition used to handle ABI compliancy. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CLANG_LIB_CODEGEN_CGCALL_H 15 #define LLVM_CLANG_LIB_CODEGEN_CGCALL_H 16 17 #include "CGPointerAuthInfo.h" 18 #include "CGValue.h" 19 #include "EHScopeStack.h" 20 #include "clang/AST/ASTFwd.h" 21 #include "clang/AST/CanonicalType.h" 22 #include "clang/AST/GlobalDecl.h" 23 #include "clang/AST/Type.h" 24 #include "llvm/ADT/STLForwardCompat.h" 25 #include "llvm/IR/Value.h" 26 27 namespace llvm { 28 class Type; 29 class Value; 30 } // namespace llvm 31 32 namespace clang { 33 class Decl; 34 class FunctionDecl; 35 class TargetOptions; 36 class VarDecl; 37 38 namespace CodeGen { 39 40 /// Abstract information about a function or function prototype. 41 class CGCalleeInfo { 42 /// The function prototype of the callee. 43 const FunctionProtoType *CalleeProtoTy; 44 /// The function declaration of the callee. 45 GlobalDecl CalleeDecl; 46 47 public: 48 explicit CGCalleeInfo() : CalleeProtoTy(nullptr) {} 49 CGCalleeInfo(const FunctionProtoType *calleeProtoTy, GlobalDecl calleeDecl) 50 : CalleeProtoTy(calleeProtoTy), CalleeDecl(calleeDecl) {} 51 CGCalleeInfo(const FunctionProtoType *calleeProtoTy) 52 : CalleeProtoTy(calleeProtoTy) {} 53 CGCalleeInfo(GlobalDecl calleeDecl) 54 : CalleeProtoTy(nullptr), CalleeDecl(calleeDecl) {} 55 56 const FunctionProtoType *getCalleeFunctionProtoType() const { 57 return CalleeProtoTy; 58 } 59 const GlobalDecl getCalleeDecl() const { return CalleeDecl; } 60 }; 61 62 /// All available information about a concrete callee. 63 class CGCallee { 64 enum class SpecialKind : uintptr_t { 65 Invalid, 66 Builtin, 67 PseudoDestructor, 68 Virtual, 69 70 Last = Virtual 71 }; 72 73 struct OrdinaryInfoStorage { 74 CGCalleeInfo AbstractInfo; 75 CGPointerAuthInfo PointerAuthInfo; 76 }; 77 struct BuiltinInfoStorage { 78 const FunctionDecl *Decl; 79 unsigned ID; 80 }; 81 struct PseudoDestructorInfoStorage { 82 const CXXPseudoDestructorExpr *Expr; 83 }; 84 struct VirtualInfoStorage { 85 const CallExpr *CE; 86 GlobalDecl MD; 87 Address Addr; 88 llvm::FunctionType *FTy; 89 }; 90 91 SpecialKind KindOrFunctionPointer; 92 union { 93 OrdinaryInfoStorage OrdinaryInfo; 94 BuiltinInfoStorage BuiltinInfo; 95 PseudoDestructorInfoStorage PseudoDestructorInfo; 96 VirtualInfoStorage VirtualInfo; 97 }; 98 99 explicit CGCallee(SpecialKind kind) : KindOrFunctionPointer(kind) {} 100 101 CGCallee(const FunctionDecl *builtinDecl, unsigned builtinID) 102 : KindOrFunctionPointer(SpecialKind::Builtin) { 103 BuiltinInfo.Decl = builtinDecl; 104 BuiltinInfo.ID = builtinID; 105 } 106 107 public: 108 CGCallee() : KindOrFunctionPointer(SpecialKind::Invalid) {} 109 110 /// Construct a callee. Call this constructor directly when this 111 /// isn't a direct call. 112 CGCallee(const CGCalleeInfo &abstractInfo, llvm::Value *functionPtr, 113 /* FIXME: make parameter pointerAuthInfo mandatory */ 114 const CGPointerAuthInfo &pointerAuthInfo = CGPointerAuthInfo()) 115 : KindOrFunctionPointer( 116 SpecialKind(reinterpret_cast<uintptr_t>(functionPtr))) { 117 OrdinaryInfo.AbstractInfo = abstractInfo; 118 OrdinaryInfo.PointerAuthInfo = pointerAuthInfo; 119 assert(functionPtr && "configuring callee without function pointer"); 120 assert(functionPtr->getType()->isPointerTy()); 121 } 122 123 static CGCallee forBuiltin(unsigned builtinID, 124 const FunctionDecl *builtinDecl) { 125 CGCallee result(SpecialKind::Builtin); 126 result.BuiltinInfo.Decl = builtinDecl; 127 result.BuiltinInfo.ID = builtinID; 128 return result; 129 } 130 131 static CGCallee forPseudoDestructor(const CXXPseudoDestructorExpr *E) { 132 CGCallee result(SpecialKind::PseudoDestructor); 133 result.PseudoDestructorInfo.Expr = E; 134 return result; 135 } 136 137 static CGCallee forDirect(llvm::Constant *functionPtr, 138 const CGCalleeInfo &abstractInfo = CGCalleeInfo()) { 139 return CGCallee(abstractInfo, functionPtr); 140 } 141 142 static CGCallee forDirect(llvm::FunctionCallee functionPtr, 143 const CGCalleeInfo &abstractInfo = CGCalleeInfo()) { 144 return CGCallee(abstractInfo, functionPtr.getCallee()); 145 } 146 147 static CGCallee forVirtual(const CallExpr *CE, GlobalDecl MD, Address Addr, 148 llvm::FunctionType *FTy) { 149 CGCallee result(SpecialKind::Virtual); 150 result.VirtualInfo.CE = CE; 151 result.VirtualInfo.MD = MD; 152 result.VirtualInfo.Addr = Addr; 153 result.VirtualInfo.FTy = FTy; 154 return result; 155 } 156 157 bool isBuiltin() const { 158 return KindOrFunctionPointer == SpecialKind::Builtin; 159 } 160 const FunctionDecl *getBuiltinDecl() const { 161 assert(isBuiltin()); 162 return BuiltinInfo.Decl; 163 } 164 unsigned getBuiltinID() const { 165 assert(isBuiltin()); 166 return BuiltinInfo.ID; 167 } 168 169 bool isPseudoDestructor() const { 170 return KindOrFunctionPointer == SpecialKind::PseudoDestructor; 171 } 172 const CXXPseudoDestructorExpr *getPseudoDestructorExpr() const { 173 assert(isPseudoDestructor()); 174 return PseudoDestructorInfo.Expr; 175 } 176 177 bool isOrdinary() const { 178 return uintptr_t(KindOrFunctionPointer) > uintptr_t(SpecialKind::Last); 179 } 180 CGCalleeInfo getAbstractInfo() const { 181 if (isVirtual()) 182 return VirtualInfo.MD; 183 assert(isOrdinary()); 184 return OrdinaryInfo.AbstractInfo; 185 } 186 const CGPointerAuthInfo &getPointerAuthInfo() const { 187 assert(isOrdinary()); 188 return OrdinaryInfo.PointerAuthInfo; 189 } 190 llvm::Value *getFunctionPointer() const { 191 assert(isOrdinary()); 192 return reinterpret_cast<llvm::Value *>(uintptr_t(KindOrFunctionPointer)); 193 } 194 void setFunctionPointer(llvm::Value *functionPtr) { 195 assert(isOrdinary()); 196 KindOrFunctionPointer = 197 SpecialKind(reinterpret_cast<uintptr_t>(functionPtr)); 198 } 199 void setPointerAuthInfo(CGPointerAuthInfo PointerAuth) { 200 assert(isOrdinary()); 201 OrdinaryInfo.PointerAuthInfo = PointerAuth; 202 } 203 204 bool isVirtual() const { 205 return KindOrFunctionPointer == SpecialKind::Virtual; 206 } 207 const CallExpr *getVirtualCallExpr() const { 208 assert(isVirtual()); 209 return VirtualInfo.CE; 210 } 211 GlobalDecl getVirtualMethodDecl() const { 212 assert(isVirtual()); 213 return VirtualInfo.MD; 214 } 215 Address getThisAddress() const { 216 assert(isVirtual()); 217 return VirtualInfo.Addr; 218 } 219 llvm::FunctionType *getVirtualFunctionType() const { 220 assert(isVirtual()); 221 return VirtualInfo.FTy; 222 } 223 224 /// If this is a delayed callee computation of some sort, prepare 225 /// a concrete callee. 226 CGCallee prepareConcreteCallee(CodeGenFunction &CGF) const; 227 }; 228 229 struct CallArg { 230 private: 231 union { 232 RValue RV; 233 LValue LV; /// The argument is semantically a load from this l-value. 234 }; 235 bool HasLV; 236 237 /// A data-flow flag to make sure getRValue and/or copyInto are not 238 /// called twice for duplicated IR emission. 239 mutable bool IsUsed; 240 241 public: 242 QualType Ty; 243 CallArg(RValue rv, QualType ty) 244 : RV(rv), HasLV(false), IsUsed(false), Ty(ty) {} 245 CallArg(LValue lv, QualType ty) 246 : LV(lv), HasLV(true), IsUsed(false), Ty(ty) {} 247 bool hasLValue() const { return HasLV; } 248 QualType getType() const { return Ty; } 249 250 /// \returns an independent RValue. If the CallArg contains an LValue, 251 /// a temporary copy is returned. 252 RValue getRValue(CodeGenFunction &CGF) const; 253 254 LValue getKnownLValue() const { 255 assert(HasLV && !IsUsed); 256 return LV; 257 } 258 RValue getKnownRValue() const { 259 assert(!HasLV && !IsUsed); 260 return RV; 261 } 262 void setRValue(RValue _RV) { 263 assert(!HasLV); 264 RV = _RV; 265 } 266 267 bool isAggregate() const { return HasLV || RV.isAggregate(); } 268 269 void copyInto(CodeGenFunction &CGF, Address A) const; 270 }; 271 272 /// CallArgList - Type for representing both the value and type of 273 /// arguments in a call. 274 class CallArgList : public SmallVector<CallArg, 8> { 275 public: 276 CallArgList() = default; 277 278 struct Writeback { 279 /// The original argument. Note that the argument l-value 280 /// is potentially null. 281 LValue Source; 282 283 /// The temporary alloca. 284 Address Temporary; 285 286 /// A value to "use" after the writeback, or null. 287 llvm::Value *ToUse; 288 289 /// An Expression (optional) that performs the writeback with any required 290 /// casting. 291 const Expr *WritebackExpr; 292 293 // Size for optional lifetime end on the temporary. 294 llvm::Value *LifetimeSz; 295 }; 296 297 struct CallArgCleanup { 298 EHScopeStack::stable_iterator Cleanup; 299 300 /// The "is active" insertion point. This instruction is temporary and 301 /// will be removed after insertion. 302 llvm::Instruction *IsActiveIP; 303 }; 304 305 void add(RValue rvalue, QualType type) { push_back(CallArg(rvalue, type)); } 306 307 void addUncopiedAggregate(LValue LV, QualType type) { 308 push_back(CallArg(LV, type)); 309 } 310 311 /// Add all the arguments from another CallArgList to this one. After doing 312 /// this, the old CallArgList retains its list of arguments, but must not 313 /// be used to emit a call. 314 void addFrom(const CallArgList &other) { 315 insert(end(), other.begin(), other.end()); 316 Writebacks.insert(Writebacks.end(), other.Writebacks.begin(), 317 other.Writebacks.end()); 318 CleanupsToDeactivate.insert(CleanupsToDeactivate.end(), 319 other.CleanupsToDeactivate.begin(), 320 other.CleanupsToDeactivate.end()); 321 assert(!(StackBase && other.StackBase) && "can't merge stackbases"); 322 if (!StackBase) 323 StackBase = other.StackBase; 324 } 325 326 void addWriteback(LValue srcLV, Address temporary, llvm::Value *toUse, 327 const Expr *writebackExpr = nullptr, 328 llvm::Value *lifetimeSz = nullptr) { 329 Writeback writeback = {srcLV, temporary, toUse, writebackExpr, lifetimeSz}; 330 Writebacks.push_back(writeback); 331 } 332 333 bool hasWritebacks() const { return !Writebacks.empty(); } 334 335 typedef llvm::iterator_range<SmallVectorImpl<Writeback>::const_iterator> 336 writeback_const_range; 337 338 writeback_const_range writebacks() const { 339 return writeback_const_range(Writebacks.begin(), Writebacks.end()); 340 } 341 342 void addArgCleanupDeactivation(EHScopeStack::stable_iterator Cleanup, 343 llvm::Instruction *IsActiveIP) { 344 CallArgCleanup ArgCleanup; 345 ArgCleanup.Cleanup = Cleanup; 346 ArgCleanup.IsActiveIP = IsActiveIP; 347 CleanupsToDeactivate.push_back(ArgCleanup); 348 } 349 350 ArrayRef<CallArgCleanup> getCleanupsToDeactivate() const { 351 return CleanupsToDeactivate; 352 } 353 354 void allocateArgumentMemory(CodeGenFunction &CGF); 355 llvm::Instruction *getStackBase() const { return StackBase; } 356 void freeArgumentMemory(CodeGenFunction &CGF) const; 357 358 /// Returns if we're using an inalloca struct to pass arguments in 359 /// memory. 360 bool isUsingInAlloca() const { return StackBase; } 361 362 // Support reversing writebacks for MSVC ABI. 363 void reverseWritebacks() { 364 std::reverse(Writebacks.begin(), Writebacks.end()); 365 } 366 367 private: 368 SmallVector<Writeback, 1> Writebacks; 369 370 /// Deactivate these cleanups immediately before making the call. This 371 /// is used to cleanup objects that are owned by the callee once the call 372 /// occurs. 373 SmallVector<CallArgCleanup, 1> CleanupsToDeactivate; 374 375 /// The stacksave call. It dominates all of the argument evaluation. 376 llvm::CallInst *StackBase = nullptr; 377 }; 378 379 /// FunctionArgList - Type for representing both the decl and type 380 /// of parameters to a function. The decl must be either a 381 /// ParmVarDecl or ImplicitParamDecl. 382 class FunctionArgList : public SmallVector<const VarDecl *, 16> {}; 383 384 /// ReturnValueSlot - Contains the address where the return value of a 385 /// function can be stored, and whether the address is volatile or not. 386 class ReturnValueSlot { 387 Address Addr = Address::invalid(); 388 389 // Return value slot flags 390 LLVM_PREFERRED_TYPE(bool) 391 unsigned IsVolatile : 1; 392 LLVM_PREFERRED_TYPE(bool) 393 unsigned IsUnused : 1; 394 LLVM_PREFERRED_TYPE(bool) 395 unsigned IsExternallyDestructed : 1; 396 397 public: 398 ReturnValueSlot() 399 : IsVolatile(false), IsUnused(false), IsExternallyDestructed(false) {} 400 ReturnValueSlot(Address Addr, bool IsVolatile, bool IsUnused = false, 401 bool IsExternallyDestructed = false) 402 : Addr(Addr), IsVolatile(IsVolatile), IsUnused(IsUnused), 403 IsExternallyDestructed(IsExternallyDestructed) {} 404 405 bool isNull() const { return !Addr.isValid(); } 406 bool isVolatile() const { return IsVolatile; } 407 Address getValue() const { return Addr; } 408 bool isUnused() const { return IsUnused; } 409 bool isExternallyDestructed() const { return IsExternallyDestructed; } 410 Address getAddress() const { return Addr; } 411 }; 412 413 /// Adds attributes to \p F according to our \p CodeGenOpts and \p LangOpts, as 414 /// though we had emitted it ourselves. We remove any attributes on F that 415 /// conflict with the attributes we add here. 416 /// 417 /// This is useful for adding attrs to bitcode modules that you want to link 418 /// with but don't control, such as CUDA's libdevice. When linking with such 419 /// a bitcode library, you might want to set e.g. its functions' 420 /// "unsafe-fp-math" attribute to match the attr of the functions you're 421 /// codegen'ing. Otherwise, LLVM will interpret the bitcode module's lack of 422 /// unsafe-fp-math attrs as tantamount to unsafe-fp-math=false, and then LLVM 423 /// will propagate unsafe-fp-math=false up to every transitive caller of a 424 /// function in the bitcode library! 425 /// 426 /// With the exception of fast-math attrs, this will only make the attributes 427 /// on the function more conservative. But it's unsafe to call this on a 428 /// function which relies on particular fast-math attributes for correctness. 429 /// It's up to you to ensure that this is safe. 430 void mergeDefaultFunctionDefinitionAttributes(llvm::Function &F, 431 const CodeGenOptions &CodeGenOpts, 432 const LangOptions &LangOpts, 433 const TargetOptions &TargetOpts, 434 bool WillInternalize); 435 436 enum class FnInfoOpts { 437 None = 0, 438 IsInstanceMethod = 1 << 0, 439 IsChainCall = 1 << 1, 440 IsDelegateCall = 1 << 2, 441 }; 442 443 inline FnInfoOpts operator|(FnInfoOpts A, FnInfoOpts B) { 444 return static_cast<FnInfoOpts>(llvm::to_underlying(A) | 445 llvm::to_underlying(B)); 446 } 447 448 inline FnInfoOpts operator&(FnInfoOpts A, FnInfoOpts B) { 449 return static_cast<FnInfoOpts>(llvm::to_underlying(A) & 450 llvm::to_underlying(B)); 451 } 452 453 inline FnInfoOpts &operator|=(FnInfoOpts &A, FnInfoOpts B) { 454 A = A | B; 455 return A; 456 } 457 458 inline FnInfoOpts &operator&=(FnInfoOpts &A, FnInfoOpts B) { 459 A = A & B; 460 return A; 461 } 462 463 } // end namespace CodeGen 464 } // end namespace clang 465 466 #endif 467