Lines Matching refs:virtual
57 virtual ~TargetCodeGenInfo();
70 virtual void setTargetAttributes(const Decl *D, llvm::GlobalValue *GV, in setTargetAttributes()
75 virtual void emitTargetMetadata( in emitTargetMetadata()
81 virtual void checkFunctionCallABI(CodeGenModule &CGM, SourceLocation CallLoc, in checkFunctionCallABI()
94 virtual unsigned getSizeOfUnwindException() const;
103 virtual bool extendPointerWithSExt() const { return false; } in extendPointerWithSExt()
109 virtual int getDwarfEHStackPointer(CodeGen::CodeGenModule &M) const { in getDwarfEHStackPointer()
117 virtual bool initDwarfEHRegSizeTable(CodeGen::CodeGenFunction &CGF, in initDwarfEHRegSizeTable()
127 virtual llvm::Value *decodeReturnAddress(CodeGen::CodeGenFunction &CGF, in decodeReturnAddress()
137 virtual llvm::Value *encodeReturnAddress(CodeGen::CodeGenFunction &CGF, in encodeReturnAddress()
145 virtual llvm::Value *
157 virtual llvm::Type *adjustInlineAsmType(CodeGen::CodeGenFunction &CGF, in adjustInlineAsmType()
165 virtual bool isScalarizableAsmOperand(CodeGen::CodeGenFunction &CGF, in isScalarizableAsmOperand()
171 virtual void addReturnRegisterOutputs( in addReturnRegisterOutputs()
180 virtual bool doesReturnSlotInterfereWithArgs() const { return true; } in doesReturnSlotInterfereWithArgs()
192 virtual StringRef getARCRetainAutoreleasedReturnValueMarker() const { in getARCRetainAutoreleasedReturnValueMarker()
198 virtual bool markARCOptimizedReturnCallsAsNoTail() const { return false; } in markARCOptimizedReturnCallsAsNoTail()
202 virtual llvm::Constant *
249 virtual bool isNoProtoCallVariadic(const CodeGen::CallArgList &args,
254 virtual void getDependentLibraryOption(llvm::StringRef Lib,
259 virtual void getDetectMismatchOption(llvm::StringRef Name, in getDetectMismatchOption()
264 virtual unsigned getOpenCLKernelCallingConv() const;
271 virtual llvm::Constant *getNullPointer(const CodeGen::CodeGenModule &CGM,
278 virtual LangAS getGlobalVarAddressSpace(CodeGenModule &CGM,
282 virtual LangAS getASTAllocaAddressSpace() const { return LangAS::Default; } in getASTAllocaAddressSpace()
290 virtual llvm::Value *performAddrSpaceCast(CodeGen::CodeGenFunction &CGF,
300 virtual llvm::Constant *performAddrSpaceCast(CodeGenModule &CGM,
306 virtual LangAS getAddrSpaceOfCxaAtexitPtrParam() const { in getAddrSpaceOfCxaAtexitPtrParam()
311 virtual llvm::SyncScope::ID getLLVMSyncScopeID(const LangOptions &LangOpts,
321 virtual ~TargetOpenCLBlockHelper() {} in ~TargetOpenCLBlockHelper()
323 virtual llvm::SmallVector<llvm::Type *, 1> getCustomFieldTypes() = 0;
325 virtual llvm::SmallVector<ValueTy, 1>
327 virtual bool areAllCustomFieldValuesConstant(const CGBlockInfo &Info) = 0;
330 virtual llvm::SmallVector<llvm::Constant *, 1>
333 virtual TargetOpenCLBlockHelper *getTargetOpenCLBlockHelper() const { in getTargetOpenCLBlockHelper()
342 virtual llvm::Function *
350 virtual bool shouldEmitStaticExternCAliases() const { return true; } in shouldEmitStaticExternCAliases()
352 virtual void setCUDAKernelCallingConvention(const FunctionType *&FT) const {} in setCUDAKernelCallingConvention()
355 virtual llvm::Type *getCUDADeviceBuiltinSurfaceDeviceType() const { in getCUDADeviceBuiltinSurfaceDeviceType()
360 virtual llvm::Type *getCUDADeviceBuiltinTextureDeviceType() const { in getCUDADeviceBuiltinTextureDeviceType()
366 virtual bool emitCUDADeviceBuiltinSurfaceDeviceCopy(CodeGenFunction &CGF, in emitCUDADeviceBuiltinSurfaceDeviceCopy()
373 virtual bool emitCUDADeviceBuiltinTextureDeviceCopy(CodeGenFunction &CGF, in emitCUDADeviceBuiltinTextureDeviceCopy()