1 //===- MemoryBuiltins.cpp - Identify calls to memory builtins -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This family of functions identifies calls to builtin functions that allocate 10 // or free memory. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/MemoryBuiltins.h" 15 #include "llvm/ADT/APInt.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/Statistic.h" 18 #include "llvm/Analysis/AliasAnalysis.h" 19 #include "llvm/Analysis/TargetFolder.h" 20 #include "llvm/Analysis/TargetLibraryInfo.h" 21 #include "llvm/Analysis/Utils/Local.h" 22 #include "llvm/Analysis/ValueTracking.h" 23 #include "llvm/IR/Argument.h" 24 #include "llvm/IR/Attributes.h" 25 #include "llvm/IR/Constants.h" 26 #include "llvm/IR/DataLayout.h" 27 #include "llvm/IR/DerivedTypes.h" 28 #include "llvm/IR/Function.h" 29 #include "llvm/IR/GlobalAlias.h" 30 #include "llvm/IR/GlobalVariable.h" 31 #include "llvm/IR/Instruction.h" 32 #include "llvm/IR/Instructions.h" 33 #include "llvm/IR/IntrinsicInst.h" 34 #include "llvm/IR/Operator.h" 35 #include "llvm/IR/Type.h" 36 #include "llvm/IR/Value.h" 37 #include "llvm/Support/Casting.h" 38 #include "llvm/Support/CommandLine.h" 39 #include "llvm/Support/Debug.h" 40 #include "llvm/Support/MathExtras.h" 41 #include "llvm/Support/raw_ostream.h" 42 #include <cassert> 43 #include <cstdint> 44 #include <iterator> 45 #include <numeric> 46 #include <optional> 47 #include <type_traits> 48 #include <utility> 49 50 using namespace llvm; 51 52 #define DEBUG_TYPE "memory-builtins" 53 54 static cl::opt<unsigned> ObjectSizeOffsetVisitorMaxVisitInstructions( 55 "object-size-offset-visitor-max-visit-instructions", 56 cl::desc("Maximum number of instructions for ObjectSizeOffsetVisitor to " 57 "look at"), 58 cl::init(100)); 59 60 enum AllocType : uint8_t { 61 OpNewLike = 1<<0, // allocates; never returns null 62 MallocLike = 1<<1, // allocates; may return null 63 StrDupLike = 1<<2, 64 MallocOrOpNewLike = MallocLike | OpNewLike, 65 AllocLike = MallocOrOpNewLike | StrDupLike, 66 AnyAlloc = AllocLike 67 }; 68 69 enum class MallocFamily { 70 Malloc, 71 CPPNew, // new(unsigned int) 72 CPPNewAligned, // new(unsigned int, align_val_t) 73 CPPNewArray, // new[](unsigned int) 74 CPPNewArrayAligned, // new[](unsigned long, align_val_t) 75 MSVCNew, // new(unsigned int) 76 MSVCArrayNew, // new[](unsigned int) 77 VecMalloc, 78 KmpcAllocShared, 79 }; 80 81 StringRef mangledNameForMallocFamily(const MallocFamily &Family) { 82 switch (Family) { 83 case MallocFamily::Malloc: 84 return "malloc"; 85 case MallocFamily::CPPNew: 86 return "_Znwm"; 87 case MallocFamily::CPPNewAligned: 88 return "_ZnwmSt11align_val_t"; 89 case MallocFamily::CPPNewArray: 90 return "_Znam"; 91 case MallocFamily::CPPNewArrayAligned: 92 return "_ZnamSt11align_val_t"; 93 case MallocFamily::MSVCNew: 94 return "??2@YAPAXI@Z"; 95 case MallocFamily::MSVCArrayNew: 96 return "??_U@YAPAXI@Z"; 97 case MallocFamily::VecMalloc: 98 return "vec_malloc"; 99 case MallocFamily::KmpcAllocShared: 100 return "__kmpc_alloc_shared"; 101 } 102 llvm_unreachable("missing an alloc family"); 103 } 104 105 struct AllocFnsTy { 106 AllocType AllocTy; 107 unsigned NumParams; 108 // First and Second size parameters (or -1 if unused) 109 int FstParam, SndParam; 110 // Alignment parameter for aligned_alloc and aligned new 111 int AlignParam; 112 // Name of default allocator function to group malloc/free calls by family 113 MallocFamily Family; 114 }; 115 116 // clang-format off 117 // FIXME: certain users need more information. E.g., SimplifyLibCalls needs to 118 // know which functions are nounwind, noalias, nocapture parameters, etc. 119 static const std::pair<LibFunc, AllocFnsTy> AllocationFnData[] = { 120 {LibFunc_Znwj, {OpNewLike, 1, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned int) 121 {LibFunc_ZnwjRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned int, nothrow) 122 {LibFunc_ZnwjSt11align_val_t, {OpNewLike, 2, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned int, align_val_t) 123 {LibFunc_ZnwjSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned int, align_val_t, nothrow) 124 {LibFunc_Znwm, {OpNewLike, 1, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned long) 125 {LibFunc_Znwm12__hot_cold_t, {OpNewLike, 2, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned long, __hot_cold_t) 126 {LibFunc_ZnwmRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned long, nothrow) 127 {LibFunc_ZnwmRKSt9nothrow_t12__hot_cold_t, {MallocLike, 3, 0, -1, -1, MallocFamily::CPPNew}}, // new(unsigned long, nothrow, __hot_cold_t) 128 {LibFunc_ZnwmSt11align_val_t, {OpNewLike, 2, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned long, align_val_t) 129 {LibFunc_ZnwmSt11align_val_t12__hot_cold_t, {OpNewLike, 3, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned long, align_val_t, __hot_cold_t) 130 {LibFunc_ZnwmSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned long, align_val_t, nothrow) 131 {LibFunc_ZnwmSt11align_val_tRKSt9nothrow_t12__hot_cold_t, {MallocLike, 4, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new(unsigned long, align_val_t, nothrow, __hot_cold_t) 132 {LibFunc_Znaj, {OpNewLike, 1, 0, -1, -1, MallocFamily::CPPNewArray}}, // new[](unsigned int) 133 {LibFunc_ZnajRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1, MallocFamily::CPPNewArray}}, // new[](unsigned int, nothrow) 134 {LibFunc_ZnajSt11align_val_t, {OpNewLike, 2, 0, -1, 1, MallocFamily::CPPNewArrayAligned}}, // new[](unsigned int, align_val_t) 135 {LibFunc_ZnajSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1, MallocFamily::CPPNewArrayAligned}}, // new[](unsigned int, align_val_t, nothrow) 136 {LibFunc_Znam, {OpNewLike, 1, 0, -1, -1, MallocFamily::CPPNewArray}}, // new[](unsigned long) 137 {LibFunc_Znam12__hot_cold_t, {OpNewLike, 2, 0, -1, -1, MallocFamily::CPPNew}}, // new[](unsigned long, __hot_cold_t) 138 {LibFunc_ZnamRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1, MallocFamily::CPPNewArray}}, // new[](unsigned long, nothrow) 139 {LibFunc_ZnamRKSt9nothrow_t12__hot_cold_t, {MallocLike, 3, 0, -1, -1, MallocFamily::CPPNew}}, // new[](unsigned long, nothrow, __hot_cold_t) 140 {LibFunc_ZnamSt11align_val_t, {OpNewLike, 2, 0, -1, 1, MallocFamily::CPPNewArrayAligned}}, // new[](unsigned long, align_val_t) 141 {LibFunc_ZnamSt11align_val_t12__hot_cold_t, {OpNewLike, 3, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new[](unsigned long, align_val_t, __hot_cold_t) 142 {LibFunc_ZnamSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1, MallocFamily::CPPNewArrayAligned}}, // new[](unsigned long, align_val_t, nothrow) 143 {LibFunc_ZnamSt11align_val_tRKSt9nothrow_t12__hot_cold_t, {MallocLike, 4, 0, -1, 1, MallocFamily::CPPNewAligned}}, // new[](unsigned long, align_val_t, nothrow, __hot_cold_t) 144 {LibFunc_msvc_new_int, {OpNewLike, 1, 0, -1, -1, MallocFamily::MSVCNew}}, // new(unsigned int) 145 {LibFunc_msvc_new_int_nothrow, {MallocLike, 2, 0, -1, -1, MallocFamily::MSVCNew}}, // new(unsigned int, nothrow) 146 {LibFunc_msvc_new_longlong, {OpNewLike, 1, 0, -1, -1, MallocFamily::MSVCNew}}, // new(unsigned long long) 147 {LibFunc_msvc_new_longlong_nothrow, {MallocLike, 2, 0, -1, -1, MallocFamily::MSVCNew}}, // new(unsigned long long, nothrow) 148 {LibFunc_msvc_new_array_int, {OpNewLike, 1, 0, -1, -1, MallocFamily::MSVCArrayNew}}, // new[](unsigned int) 149 {LibFunc_msvc_new_array_int_nothrow, {MallocLike, 2, 0, -1, -1, MallocFamily::MSVCArrayNew}}, // new[](unsigned int, nothrow) 150 {LibFunc_msvc_new_array_longlong, {OpNewLike, 1, 0, -1, -1, MallocFamily::MSVCArrayNew}}, // new[](unsigned long long) 151 {LibFunc_msvc_new_array_longlong_nothrow, {MallocLike, 2, 0, -1, -1, MallocFamily::MSVCArrayNew}}, // new[](unsigned long long, nothrow) 152 {LibFunc_strdup, {StrDupLike, 1, -1, -1, -1, MallocFamily::Malloc}}, 153 {LibFunc_dunder_strdup, {StrDupLike, 1, -1, -1, -1, MallocFamily::Malloc}}, 154 {LibFunc_strndup, {StrDupLike, 2, 1, -1, -1, MallocFamily::Malloc}}, 155 {LibFunc_dunder_strndup, {StrDupLike, 2, 1, -1, -1, MallocFamily::Malloc}}, 156 {LibFunc___kmpc_alloc_shared, {MallocLike, 1, 0, -1, -1, MallocFamily::KmpcAllocShared}}, 157 }; 158 // clang-format on 159 160 static const Function *getCalledFunction(const Value *V) { 161 // Don't care about intrinsics in this case. 162 if (isa<IntrinsicInst>(V)) 163 return nullptr; 164 165 const auto *CB = dyn_cast<CallBase>(V); 166 if (!CB) 167 return nullptr; 168 169 if (CB->isNoBuiltin()) 170 return nullptr; 171 172 return CB->getCalledFunction(); 173 } 174 175 /// Returns the allocation data for the given value if it's a call to a known 176 /// allocation function. 177 static std::optional<AllocFnsTy> 178 getAllocationDataForFunction(const Function *Callee, AllocType AllocTy, 179 const TargetLibraryInfo *TLI) { 180 // Don't perform a slow TLI lookup, if this function doesn't return a pointer 181 // and thus can't be an allocation function. 182 if (!Callee->getReturnType()->isPointerTy()) 183 return std::nullopt; 184 185 // Make sure that the function is available. 186 LibFunc TLIFn; 187 if (!TLI || !TLI->getLibFunc(*Callee, TLIFn) || !TLI->has(TLIFn)) 188 return std::nullopt; 189 190 const auto *Iter = find_if( 191 AllocationFnData, [TLIFn](const std::pair<LibFunc, AllocFnsTy> &P) { 192 return P.first == TLIFn; 193 }); 194 195 if (Iter == std::end(AllocationFnData)) 196 return std::nullopt; 197 198 const AllocFnsTy *FnData = &Iter->second; 199 if ((FnData->AllocTy & AllocTy) != FnData->AllocTy) 200 return std::nullopt; 201 202 // Check function prototype. 203 int FstParam = FnData->FstParam; 204 int SndParam = FnData->SndParam; 205 FunctionType *FTy = Callee->getFunctionType(); 206 207 if (FTy->getReturnType()->isPointerTy() && 208 FTy->getNumParams() == FnData->NumParams && 209 (FstParam < 0 || 210 (FTy->getParamType(FstParam)->isIntegerTy(32) || 211 FTy->getParamType(FstParam)->isIntegerTy(64))) && 212 (SndParam < 0 || 213 FTy->getParamType(SndParam)->isIntegerTy(32) || 214 FTy->getParamType(SndParam)->isIntegerTy(64))) 215 return *FnData; 216 return std::nullopt; 217 } 218 219 static std::optional<AllocFnsTy> 220 getAllocationData(const Value *V, AllocType AllocTy, 221 const TargetLibraryInfo *TLI) { 222 if (const Function *Callee = getCalledFunction(V)) 223 return getAllocationDataForFunction(Callee, AllocTy, TLI); 224 return std::nullopt; 225 } 226 227 static std::optional<AllocFnsTy> 228 getAllocationData(const Value *V, AllocType AllocTy, 229 function_ref<const TargetLibraryInfo &(Function &)> GetTLI) { 230 if (const Function *Callee = getCalledFunction(V)) 231 return getAllocationDataForFunction( 232 Callee, AllocTy, &GetTLI(const_cast<Function &>(*Callee))); 233 return std::nullopt; 234 } 235 236 static std::optional<AllocFnsTy> 237 getAllocationSize(const CallBase *CB, const TargetLibraryInfo *TLI) { 238 if (const Function *Callee = getCalledFunction(CB)) { 239 // Prefer to use existing information over allocsize. This will give us an 240 // accurate AllocTy. 241 if (std::optional<AllocFnsTy> Data = 242 getAllocationDataForFunction(Callee, AnyAlloc, TLI)) 243 return Data; 244 } 245 246 Attribute Attr = CB->getFnAttr(Attribute::AllocSize); 247 if (Attr == Attribute()) 248 return std::nullopt; 249 250 std::pair<unsigned, std::optional<unsigned>> Args = Attr.getAllocSizeArgs(); 251 252 AllocFnsTy Result; 253 // Because allocsize only tells us how many bytes are allocated, we're not 254 // really allowed to assume anything, so we use MallocLike. 255 Result.AllocTy = MallocLike; 256 Result.NumParams = CB->arg_size(); 257 Result.FstParam = Args.first; 258 Result.SndParam = Args.second.value_or(-1); 259 // Allocsize has no way to specify an alignment argument 260 Result.AlignParam = -1; 261 return Result; 262 } 263 264 static AllocFnKind getAllocFnKind(const Value *V) { 265 if (const auto *CB = dyn_cast<CallBase>(V)) { 266 Attribute Attr = CB->getFnAttr(Attribute::AllocKind); 267 if (Attr.isValid()) 268 return AllocFnKind(Attr.getValueAsInt()); 269 } 270 return AllocFnKind::Unknown; 271 } 272 273 static AllocFnKind getAllocFnKind(const Function *F) { 274 return F->getAttributes().getAllocKind(); 275 } 276 277 static bool checkFnAllocKind(const Value *V, AllocFnKind Wanted) { 278 return (getAllocFnKind(V) & Wanted) != AllocFnKind::Unknown; 279 } 280 281 static bool checkFnAllocKind(const Function *F, AllocFnKind Wanted) { 282 return (getAllocFnKind(F) & Wanted) != AllocFnKind::Unknown; 283 } 284 285 /// Tests if a value is a call or invoke to a library function that 286 /// allocates or reallocates memory (either malloc, calloc, realloc, or strdup 287 /// like). 288 bool llvm::isAllocationFn(const Value *V, const TargetLibraryInfo *TLI) { 289 return getAllocationData(V, AnyAlloc, TLI).has_value() || 290 checkFnAllocKind(V, AllocFnKind::Alloc | AllocFnKind::Realloc); 291 } 292 bool llvm::isAllocationFn( 293 const Value *V, 294 function_ref<const TargetLibraryInfo &(Function &)> GetTLI) { 295 return getAllocationData(V, AnyAlloc, GetTLI).has_value() || 296 checkFnAllocKind(V, AllocFnKind::Alloc | AllocFnKind::Realloc); 297 } 298 299 /// Tests if a value is a call or invoke to a library function that 300 /// allocates memory via new. 301 bool llvm::isNewLikeFn(const Value *V, const TargetLibraryInfo *TLI) { 302 return getAllocationData(V, OpNewLike, TLI).has_value(); 303 } 304 305 /// Tests if a value is a call or invoke to a library function that 306 /// allocates memory similar to malloc or calloc. 307 bool llvm::isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI) { 308 // TODO: Function behavior does not match name. 309 return getAllocationData(V, MallocOrOpNewLike, TLI).has_value(); 310 } 311 312 /// Tests if a value is a call or invoke to a library function that 313 /// allocates memory (either malloc, calloc, or strdup like). 314 bool llvm::isAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI) { 315 return getAllocationData(V, AllocLike, TLI).has_value() || 316 checkFnAllocKind(V, AllocFnKind::Alloc); 317 } 318 319 /// Tests if a functions is a call or invoke to a library function that 320 /// reallocates memory (e.g., realloc). 321 bool llvm::isReallocLikeFn(const Function *F) { 322 return checkFnAllocKind(F, AllocFnKind::Realloc); 323 } 324 325 Value *llvm::getReallocatedOperand(const CallBase *CB) { 326 if (checkFnAllocKind(CB, AllocFnKind::Realloc)) 327 return CB->getArgOperandWithAttribute(Attribute::AllocatedPointer); 328 return nullptr; 329 } 330 331 bool llvm::isRemovableAlloc(const CallBase *CB, const TargetLibraryInfo *TLI) { 332 // Note: Removability is highly dependent on the source language. For 333 // example, recent C++ requires direct calls to the global allocation 334 // [basic.stc.dynamic.allocation] to be observable unless part of a new 335 // expression [expr.new paragraph 13]. 336 337 // Historically we've treated the C family allocation routines and operator 338 // new as removable 339 return isAllocLikeFn(CB, TLI); 340 } 341 342 Value *llvm::getAllocAlignment(const CallBase *V, 343 const TargetLibraryInfo *TLI) { 344 const std::optional<AllocFnsTy> FnData = getAllocationData(V, AnyAlloc, TLI); 345 if (FnData && FnData->AlignParam >= 0) { 346 return V->getOperand(FnData->AlignParam); 347 } 348 return V->getArgOperandWithAttribute(Attribute::AllocAlign); 349 } 350 351 /// When we're compiling N-bit code, and the user uses parameters that are 352 /// greater than N bits (e.g. uint64_t on a 32-bit build), we can run into 353 /// trouble with APInt size issues. This function handles resizing + overflow 354 /// checks for us. Check and zext or trunc \p I depending on IntTyBits and 355 /// I's value. 356 static bool CheckedZextOrTrunc(APInt &I, unsigned IntTyBits) { 357 // More bits than we can handle. Checking the bit width isn't necessary, but 358 // it's faster than checking active bits, and should give `false` in the 359 // vast majority of cases. 360 if (I.getBitWidth() > IntTyBits && I.getActiveBits() > IntTyBits) 361 return false; 362 if (I.getBitWidth() != IntTyBits) 363 I = I.zextOrTrunc(IntTyBits); 364 return true; 365 } 366 367 std::optional<APInt> 368 llvm::getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, 369 function_ref<const Value *(const Value *)> Mapper) { 370 // Note: This handles both explicitly listed allocation functions and 371 // allocsize. The code structure could stand to be cleaned up a bit. 372 std::optional<AllocFnsTy> FnData = getAllocationSize(CB, TLI); 373 if (!FnData) 374 return std::nullopt; 375 376 // Get the index type for this address space, results and intermediate 377 // computations are performed at that width. 378 auto &DL = CB->getDataLayout(); 379 const unsigned IntTyBits = DL.getIndexTypeSizeInBits(CB->getType()); 380 381 // Handle strdup-like functions separately. 382 if (FnData->AllocTy == StrDupLike) { 383 APInt Size(IntTyBits, GetStringLength(Mapper(CB->getArgOperand(0)))); 384 if (!Size) 385 return std::nullopt; 386 387 // Strndup limits strlen. 388 if (FnData->FstParam > 0) { 389 const ConstantInt *Arg = 390 dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->FstParam))); 391 if (!Arg) 392 return std::nullopt; 393 394 APInt MaxSize = Arg->getValue().zext(IntTyBits); 395 if (Size.ugt(MaxSize)) 396 Size = MaxSize + 1; 397 } 398 return Size; 399 } 400 401 const ConstantInt *Arg = 402 dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->FstParam))); 403 if (!Arg) 404 return std::nullopt; 405 406 APInt Size = Arg->getValue(); 407 if (!CheckedZextOrTrunc(Size, IntTyBits)) 408 return std::nullopt; 409 410 // Size is determined by just 1 parameter. 411 if (FnData->SndParam < 0) 412 return Size; 413 414 Arg = dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->SndParam))); 415 if (!Arg) 416 return std::nullopt; 417 418 APInt NumElems = Arg->getValue(); 419 if (!CheckedZextOrTrunc(NumElems, IntTyBits)) 420 return std::nullopt; 421 422 bool Overflow; 423 Size = Size.umul_ov(NumElems, Overflow); 424 if (Overflow) 425 return std::nullopt; 426 return Size; 427 } 428 429 Constant *llvm::getInitialValueOfAllocation(const Value *V, 430 const TargetLibraryInfo *TLI, 431 Type *Ty) { 432 auto *Alloc = dyn_cast<CallBase>(V); 433 if (!Alloc) 434 return nullptr; 435 436 // malloc are uninitialized (undef) 437 if (getAllocationData(Alloc, MallocOrOpNewLike, TLI).has_value()) 438 return UndefValue::get(Ty); 439 440 AllocFnKind AK = getAllocFnKind(Alloc); 441 if ((AK & AllocFnKind::Uninitialized) != AllocFnKind::Unknown) 442 return UndefValue::get(Ty); 443 if ((AK & AllocFnKind::Zeroed) != AllocFnKind::Unknown) 444 return Constant::getNullValue(Ty); 445 446 return nullptr; 447 } 448 449 struct FreeFnsTy { 450 unsigned NumParams; 451 // Name of default allocator function to group malloc/free calls by family 452 MallocFamily Family; 453 }; 454 455 // clang-format off 456 static const std::pair<LibFunc, FreeFnsTy> FreeFnData[] = { 457 {LibFunc_ZdlPv, {1, MallocFamily::CPPNew}}, // operator delete(void*) 458 {LibFunc_ZdaPv, {1, MallocFamily::CPPNewArray}}, // operator delete[](void*) 459 {LibFunc_msvc_delete_ptr32, {1, MallocFamily::MSVCNew}}, // operator delete(void*) 460 {LibFunc_msvc_delete_ptr64, {1, MallocFamily::MSVCNew}}, // operator delete(void*) 461 {LibFunc_msvc_delete_array_ptr32, {1, MallocFamily::MSVCArrayNew}}, // operator delete[](void*) 462 {LibFunc_msvc_delete_array_ptr64, {1, MallocFamily::MSVCArrayNew}}, // operator delete[](void*) 463 {LibFunc_ZdlPvj, {2, MallocFamily::CPPNew}}, // delete(void*, uint) 464 {LibFunc_ZdlPvm, {2, MallocFamily::CPPNew}}, // delete(void*, ulong) 465 {LibFunc_ZdlPvRKSt9nothrow_t, {2, MallocFamily::CPPNew}}, // delete(void*, nothrow) 466 {LibFunc_ZdlPvSt11align_val_t, {2, MallocFamily::CPPNewAligned}}, // delete(void*, align_val_t) 467 {LibFunc_ZdaPvj, {2, MallocFamily::CPPNewArray}}, // delete[](void*, uint) 468 {LibFunc_ZdaPvm, {2, MallocFamily::CPPNewArray}}, // delete[](void*, ulong) 469 {LibFunc_ZdaPvRKSt9nothrow_t, {2, MallocFamily::CPPNewArray}}, // delete[](void*, nothrow) 470 {LibFunc_ZdaPvSt11align_val_t, {2, MallocFamily::CPPNewArrayAligned}}, // delete[](void*, align_val_t) 471 {LibFunc_msvc_delete_ptr32_int, {2, MallocFamily::MSVCNew}}, // delete(void*, uint) 472 {LibFunc_msvc_delete_ptr64_longlong, {2, MallocFamily::MSVCNew}}, // delete(void*, ulonglong) 473 {LibFunc_msvc_delete_ptr32_nothrow, {2, MallocFamily::MSVCNew}}, // delete(void*, nothrow) 474 {LibFunc_msvc_delete_ptr64_nothrow, {2, MallocFamily::MSVCNew}}, // delete(void*, nothrow) 475 {LibFunc_msvc_delete_array_ptr32_int, {2, MallocFamily::MSVCArrayNew}}, // delete[](void*, uint) 476 {LibFunc_msvc_delete_array_ptr64_longlong, {2, MallocFamily::MSVCArrayNew}}, // delete[](void*, ulonglong) 477 {LibFunc_msvc_delete_array_ptr32_nothrow, {2, MallocFamily::MSVCArrayNew}}, // delete[](void*, nothrow) 478 {LibFunc_msvc_delete_array_ptr64_nothrow, {2, MallocFamily::MSVCArrayNew}}, // delete[](void*, nothrow) 479 {LibFunc___kmpc_free_shared, {2, MallocFamily::KmpcAllocShared}}, // OpenMP Offloading RTL free 480 {LibFunc_ZdlPvSt11align_val_tRKSt9nothrow_t, {3, MallocFamily::CPPNewAligned}}, // delete(void*, align_val_t, nothrow) 481 {LibFunc_ZdaPvSt11align_val_tRKSt9nothrow_t, {3, MallocFamily::CPPNewArrayAligned}}, // delete[](void*, align_val_t, nothrow) 482 {LibFunc_ZdlPvjSt11align_val_t, {3, MallocFamily::CPPNewAligned}}, // delete(void*, unsigned int, align_val_t) 483 {LibFunc_ZdlPvmSt11align_val_t, {3, MallocFamily::CPPNewAligned}}, // delete(void*, unsigned long, align_val_t) 484 {LibFunc_ZdaPvjSt11align_val_t, {3, MallocFamily::CPPNewArrayAligned}}, // delete[](void*, unsigned int, align_val_t) 485 {LibFunc_ZdaPvmSt11align_val_t, {3, MallocFamily::CPPNewArrayAligned}}, // delete[](void*, unsigned long, align_val_t) 486 }; 487 // clang-format on 488 489 std::optional<FreeFnsTy> getFreeFunctionDataForFunction(const Function *Callee, 490 const LibFunc TLIFn) { 491 const auto *Iter = 492 find_if(FreeFnData, [TLIFn](const std::pair<LibFunc, FreeFnsTy> &P) { 493 return P.first == TLIFn; 494 }); 495 if (Iter == std::end(FreeFnData)) 496 return std::nullopt; 497 return Iter->second; 498 } 499 500 std::optional<StringRef> 501 llvm::getAllocationFamily(const Value *I, const TargetLibraryInfo *TLI) { 502 if (const Function *Callee = getCalledFunction(I)) { 503 LibFunc TLIFn; 504 if (TLI && TLI->getLibFunc(*Callee, TLIFn) && TLI->has(TLIFn)) { 505 // Callee is some known library function. 506 const auto AllocData = 507 getAllocationDataForFunction(Callee, AnyAlloc, TLI); 508 if (AllocData) 509 return mangledNameForMallocFamily(AllocData->Family); 510 const auto FreeData = getFreeFunctionDataForFunction(Callee, TLIFn); 511 if (FreeData) 512 return mangledNameForMallocFamily(FreeData->Family); 513 } 514 } 515 516 // Callee isn't a known library function, still check attributes. 517 if (checkFnAllocKind(I, AllocFnKind::Free | AllocFnKind::Alloc | 518 AllocFnKind::Realloc)) { 519 Attribute Attr = cast<CallBase>(I)->getFnAttr("alloc-family"); 520 if (Attr.isValid()) 521 return Attr.getValueAsString(); 522 } 523 return std::nullopt; 524 } 525 526 /// isLibFreeFunction - Returns true if the function is a builtin free() 527 bool llvm::isLibFreeFunction(const Function *F, const LibFunc TLIFn) { 528 std::optional<FreeFnsTy> FnData = getFreeFunctionDataForFunction(F, TLIFn); 529 if (!FnData) 530 return checkFnAllocKind(F, AllocFnKind::Free); 531 532 // Check free prototype. 533 // FIXME: workaround for PR5130, this will be obsolete when a nobuiltin 534 // attribute will exist. 535 FunctionType *FTy = F->getFunctionType(); 536 if (!FTy->getReturnType()->isVoidTy()) 537 return false; 538 if (FTy->getNumParams() != FnData->NumParams) 539 return false; 540 if (!FTy->getParamType(0)->isPointerTy()) 541 return false; 542 543 return true; 544 } 545 546 Value *llvm::getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI) { 547 if (const Function *Callee = getCalledFunction(CB)) { 548 LibFunc TLIFn; 549 if (TLI && TLI->getLibFunc(*Callee, TLIFn) && TLI->has(TLIFn) && 550 isLibFreeFunction(Callee, TLIFn)) { 551 // All currently supported free functions free the first argument. 552 return CB->getArgOperand(0); 553 } 554 } 555 556 if (checkFnAllocKind(CB, AllocFnKind::Free)) 557 return CB->getArgOperandWithAttribute(Attribute::AllocatedPointer); 558 559 return nullptr; 560 } 561 562 //===----------------------------------------------------------------------===// 563 // Utility functions to compute size of objects. 564 // 565 static APInt getSizeWithOverflow(const SizeOffsetAPInt &Data) { 566 APInt Size = Data.Size; 567 APInt Offset = Data.Offset; 568 if (Offset.isNegative() || Size.ult(Offset)) 569 return APInt(Size.getBitWidth(), 0); 570 return Size - Offset; 571 } 572 573 /// Compute the size of the object pointed by Ptr. Returns true and the 574 /// object size in Size if successful, and false otherwise. 575 /// If RoundToAlign is true, then Size is rounded up to the alignment of 576 /// allocas, byval arguments, and global variables. 577 bool llvm::getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, 578 const TargetLibraryInfo *TLI, ObjectSizeOpts Opts) { 579 ObjectSizeOffsetVisitor Visitor(DL, TLI, Ptr->getContext(), Opts); 580 SizeOffsetAPInt Data = Visitor.compute(const_cast<Value *>(Ptr)); 581 if (!Data.bothKnown()) 582 return false; 583 584 Size = getSizeWithOverflow(Data).getZExtValue(); 585 return true; 586 } 587 588 Value *llvm::lowerObjectSizeCall(IntrinsicInst *ObjectSize, 589 const DataLayout &DL, 590 const TargetLibraryInfo *TLI, 591 bool MustSucceed) { 592 return lowerObjectSizeCall(ObjectSize, DL, TLI, /*AAResults=*/nullptr, 593 MustSucceed); 594 } 595 596 Value *llvm::lowerObjectSizeCall( 597 IntrinsicInst *ObjectSize, const DataLayout &DL, 598 const TargetLibraryInfo *TLI, AAResults *AA, bool MustSucceed, 599 SmallVectorImpl<Instruction *> *InsertedInstructions) { 600 assert(ObjectSize->getIntrinsicID() == Intrinsic::objectsize && 601 "ObjectSize must be a call to llvm.objectsize!"); 602 603 bool MaxVal = cast<ConstantInt>(ObjectSize->getArgOperand(1))->isZero(); 604 ObjectSizeOpts EvalOptions; 605 EvalOptions.AA = AA; 606 607 // Unless we have to fold this to something, try to be as accurate as 608 // possible. 609 if (MustSucceed) 610 EvalOptions.EvalMode = 611 MaxVal ? ObjectSizeOpts::Mode::Max : ObjectSizeOpts::Mode::Min; 612 else 613 EvalOptions.EvalMode = ObjectSizeOpts::Mode::ExactSizeFromOffset; 614 615 EvalOptions.NullIsUnknownSize = 616 cast<ConstantInt>(ObjectSize->getArgOperand(2))->isOne(); 617 618 auto *ResultType = cast<IntegerType>(ObjectSize->getType()); 619 bool StaticOnly = cast<ConstantInt>(ObjectSize->getArgOperand(3))->isZero(); 620 if (StaticOnly) { 621 // FIXME: Does it make sense to just return a failure value if the size won't 622 // fit in the output and `!MustSucceed`? 623 uint64_t Size; 624 if (getObjectSize(ObjectSize->getArgOperand(0), Size, DL, TLI, EvalOptions) && 625 isUIntN(ResultType->getBitWidth(), Size)) 626 return ConstantInt::get(ResultType, Size); 627 } else { 628 LLVMContext &Ctx = ObjectSize->getFunction()->getContext(); 629 ObjectSizeOffsetEvaluator Eval(DL, TLI, Ctx, EvalOptions); 630 SizeOffsetValue SizeOffsetPair = Eval.compute(ObjectSize->getArgOperand(0)); 631 632 if (SizeOffsetPair != ObjectSizeOffsetEvaluator::unknown()) { 633 IRBuilder<TargetFolder, IRBuilderCallbackInserter> Builder( 634 Ctx, TargetFolder(DL), IRBuilderCallbackInserter([&](Instruction *I) { 635 if (InsertedInstructions) 636 InsertedInstructions->push_back(I); 637 })); 638 Builder.SetInsertPoint(ObjectSize); 639 640 Value *Size = SizeOffsetPair.Size; 641 Value *Offset = SizeOffsetPair.Offset; 642 643 // If we've outside the end of the object, then we can always access 644 // exactly 0 bytes. 645 Value *ResultSize = Builder.CreateSub(Size, Offset); 646 Value *UseZero = Builder.CreateICmpULT(Size, Offset); 647 ResultSize = Builder.CreateZExtOrTrunc(ResultSize, ResultType); 648 Value *Ret = Builder.CreateSelect( 649 UseZero, ConstantInt::get(ResultType, 0), ResultSize); 650 651 // The non-constant size expression cannot evaluate to -1. 652 if (!isa<Constant>(Size) || !isa<Constant>(Offset)) 653 Builder.CreateAssumption( 654 Builder.CreateICmpNE(Ret, ConstantInt::get(ResultType, -1))); 655 656 return Ret; 657 } 658 } 659 660 if (!MustSucceed) 661 return nullptr; 662 663 return MaxVal ? Constant::getAllOnesValue(ResultType) 664 : Constant::getNullValue(ResultType); 665 } 666 667 STATISTIC(ObjectVisitorArgument, 668 "Number of arguments with unsolved size and offset"); 669 STATISTIC(ObjectVisitorLoad, 670 "Number of load instructions with unsolved size and offset"); 671 672 APInt ObjectSizeOffsetVisitor::align(APInt Size, MaybeAlign Alignment) { 673 if (Options.RoundToAlign && Alignment) 674 return APInt(IntTyBits, alignTo(Size.getZExtValue(), *Alignment)); 675 return Size; 676 } 677 678 ObjectSizeOffsetVisitor::ObjectSizeOffsetVisitor(const DataLayout &DL, 679 const TargetLibraryInfo *TLI, 680 LLVMContext &Context, 681 ObjectSizeOpts Options) 682 : DL(DL), TLI(TLI), Options(Options) { 683 // Pointer size must be rechecked for each object visited since it could have 684 // a different address space. 685 } 686 687 SizeOffsetAPInt ObjectSizeOffsetVisitor::compute(Value *V) { 688 InstructionsVisited = 0; 689 return computeImpl(V); 690 } 691 692 SizeOffsetAPInt ObjectSizeOffsetVisitor::computeImpl(Value *V) { 693 unsigned InitialIntTyBits = DL.getIndexTypeSizeInBits(V->getType()); 694 695 // Stripping pointer casts can strip address space casts which can change the 696 // index type size. The invariant is that we use the value type to determine 697 // the index type size and if we stripped address space casts we have to 698 // readjust the APInt as we pass it upwards in order for the APInt to match 699 // the type the caller passed in. 700 APInt Offset(InitialIntTyBits, 0); 701 V = V->stripAndAccumulateConstantOffsets( 702 DL, Offset, /* AllowNonInbounds */ true, /* AllowInvariantGroup */ true); 703 704 // Later we use the index type size and zero but it will match the type of the 705 // value that is passed to computeImpl. 706 IntTyBits = DL.getIndexTypeSizeInBits(V->getType()); 707 Zero = APInt::getZero(IntTyBits); 708 709 SizeOffsetAPInt SOT = computeValue(V); 710 711 bool IndexTypeSizeChanged = InitialIntTyBits != IntTyBits; 712 if (!IndexTypeSizeChanged && Offset.isZero()) 713 return SOT; 714 715 // We stripped an address space cast that changed the index type size or we 716 // accumulated some constant offset (or both). Readjust the bit width to match 717 // the argument index type size and apply the offset, as required. 718 if (IndexTypeSizeChanged) { 719 if (SOT.knownSize() && !::CheckedZextOrTrunc(SOT.Size, InitialIntTyBits)) 720 SOT.Size = APInt(); 721 if (SOT.knownOffset() && 722 !::CheckedZextOrTrunc(SOT.Offset, InitialIntTyBits)) 723 SOT.Offset = APInt(); 724 } 725 // If the computed offset is "unknown" we cannot add the stripped offset. 726 return {SOT.Size, 727 SOT.Offset.getBitWidth() > 1 ? SOT.Offset + Offset : SOT.Offset}; 728 } 729 730 SizeOffsetAPInt ObjectSizeOffsetVisitor::computeValue(Value *V) { 731 if (Instruction *I = dyn_cast<Instruction>(V)) { 732 // If we have already seen this instruction, bail out. Cycles can happen in 733 // unreachable code after constant propagation. 734 auto P = SeenInsts.try_emplace(I, ObjectSizeOffsetVisitor::unknown()); 735 if (!P.second) 736 return P.first->second; 737 ++InstructionsVisited; 738 if (InstructionsVisited > ObjectSizeOffsetVisitorMaxVisitInstructions) 739 return ObjectSizeOffsetVisitor::unknown(); 740 SizeOffsetAPInt Res = visit(*I); 741 // Cache the result for later visits. If we happened to visit this during 742 // the above recursion, we would consider it unknown until now. 743 SeenInsts[I] = Res; 744 return Res; 745 } 746 if (Argument *A = dyn_cast<Argument>(V)) 747 return visitArgument(*A); 748 if (ConstantPointerNull *P = dyn_cast<ConstantPointerNull>(V)) 749 return visitConstantPointerNull(*P); 750 if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) 751 return visitGlobalAlias(*GA); 752 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) 753 return visitGlobalVariable(*GV); 754 if (UndefValue *UV = dyn_cast<UndefValue>(V)) 755 return visitUndefValue(*UV); 756 757 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor::compute() unhandled value: " 758 << *V << '\n'); 759 return ObjectSizeOffsetVisitor::unknown(); 760 } 761 762 bool ObjectSizeOffsetVisitor::CheckedZextOrTrunc(APInt &I) { 763 return ::CheckedZextOrTrunc(I, IntTyBits); 764 } 765 766 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) { 767 TypeSize ElemSize = DL.getTypeAllocSize(I.getAllocatedType()); 768 if (ElemSize.isScalable() && Options.EvalMode != ObjectSizeOpts::Mode::Min) 769 return ObjectSizeOffsetVisitor::unknown(); 770 if (!isUIntN(IntTyBits, ElemSize.getKnownMinValue())) 771 return ObjectSizeOffsetVisitor::unknown(); 772 APInt Size(IntTyBits, ElemSize.getKnownMinValue()); 773 if (!I.isArrayAllocation()) 774 return SizeOffsetAPInt(align(Size, I.getAlign()), Zero); 775 776 Value *ArraySize = I.getArraySize(); 777 if (const ConstantInt *C = dyn_cast<ConstantInt>(ArraySize)) { 778 APInt NumElems = C->getValue(); 779 if (!CheckedZextOrTrunc(NumElems)) 780 return ObjectSizeOffsetVisitor::unknown(); 781 782 bool Overflow; 783 Size = Size.umul_ov(NumElems, Overflow); 784 return Overflow ? ObjectSizeOffsetVisitor::unknown() 785 : SizeOffsetAPInt(align(Size, I.getAlign()), Zero); 786 } 787 return ObjectSizeOffsetVisitor::unknown(); 788 } 789 790 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitArgument(Argument &A) { 791 Type *MemoryTy = A.getPointeeInMemoryValueType(); 792 // No interprocedural analysis is done at the moment. 793 if (!MemoryTy|| !MemoryTy->isSized()) { 794 ++ObjectVisitorArgument; 795 return ObjectSizeOffsetVisitor::unknown(); 796 } 797 798 APInt Size(IntTyBits, DL.getTypeAllocSize(MemoryTy)); 799 return SizeOffsetAPInt(align(Size, A.getParamAlign()), Zero); 800 } 801 802 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitCallBase(CallBase &CB) { 803 if (std::optional<APInt> Size = getAllocSize(&CB, TLI)) 804 return SizeOffsetAPInt(*Size, Zero); 805 return ObjectSizeOffsetVisitor::unknown(); 806 } 807 808 SizeOffsetAPInt 809 ObjectSizeOffsetVisitor::visitConstantPointerNull(ConstantPointerNull &CPN) { 810 // If null is unknown, there's nothing we can do. Additionally, non-zero 811 // address spaces can make use of null, so we don't presume to know anything 812 // about that. 813 // 814 // TODO: How should this work with address space casts? We currently just drop 815 // them on the floor, but it's unclear what we should do when a NULL from 816 // addrspace(1) gets casted to addrspace(0) (or vice-versa). 817 if (Options.NullIsUnknownSize || CPN.getType()->getAddressSpace()) 818 return ObjectSizeOffsetVisitor::unknown(); 819 return SizeOffsetAPInt(Zero, Zero); 820 } 821 822 SizeOffsetAPInt 823 ObjectSizeOffsetVisitor::visitExtractElementInst(ExtractElementInst &) { 824 return ObjectSizeOffsetVisitor::unknown(); 825 } 826 827 SizeOffsetAPInt 828 ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst &) { 829 // Easy cases were already folded by previous passes. 830 return ObjectSizeOffsetVisitor::unknown(); 831 } 832 833 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) { 834 if (GA.isInterposable()) 835 return ObjectSizeOffsetVisitor::unknown(); 836 return computeImpl(GA.getAliasee()); 837 } 838 839 SizeOffsetAPInt 840 ObjectSizeOffsetVisitor::visitGlobalVariable(GlobalVariable &GV) { 841 if (!GV.getValueType()->isSized() || GV.hasExternalWeakLinkage() || 842 ((!GV.hasInitializer() || GV.isInterposable()) && 843 Options.EvalMode != ObjectSizeOpts::Mode::Min)) 844 return ObjectSizeOffsetVisitor::unknown(); 845 846 APInt Size(IntTyBits, DL.getTypeAllocSize(GV.getValueType())); 847 return SizeOffsetAPInt(align(Size, GV.getAlign()), Zero); 848 } 849 850 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitIntToPtrInst(IntToPtrInst &) { 851 // clueless 852 return ObjectSizeOffsetVisitor::unknown(); 853 } 854 855 SizeOffsetAPInt ObjectSizeOffsetVisitor::findLoadSizeOffset( 856 LoadInst &Load, BasicBlock &BB, BasicBlock::iterator From, 857 SmallDenseMap<BasicBlock *, SizeOffsetAPInt, 8> &VisitedBlocks, 858 unsigned &ScannedInstCount) { 859 constexpr unsigned MaxInstsToScan = 128; 860 861 auto Where = VisitedBlocks.find(&BB); 862 if (Where != VisitedBlocks.end()) 863 return Where->second; 864 865 auto Unknown = [&BB, &VisitedBlocks]() { 866 return VisitedBlocks[&BB] = ObjectSizeOffsetVisitor::unknown(); 867 }; 868 auto Known = [&BB, &VisitedBlocks](SizeOffsetAPInt SO) { 869 return VisitedBlocks[&BB] = SO; 870 }; 871 872 do { 873 Instruction &I = *From; 874 875 if (I.isDebugOrPseudoInst()) 876 continue; 877 878 if (++ScannedInstCount > MaxInstsToScan) 879 return Unknown(); 880 881 if (!I.mayWriteToMemory()) 882 continue; 883 884 if (auto *SI = dyn_cast<StoreInst>(&I)) { 885 AliasResult AR = 886 Options.AA->alias(SI->getPointerOperand(), Load.getPointerOperand()); 887 switch ((AliasResult::Kind)AR) { 888 case AliasResult::NoAlias: 889 continue; 890 case AliasResult::MustAlias: 891 if (SI->getValueOperand()->getType()->isPointerTy()) 892 return Known(computeImpl(SI->getValueOperand())); 893 else 894 return Unknown(); // No handling of non-pointer values by `compute`. 895 default: 896 return Unknown(); 897 } 898 } 899 900 if (auto *CB = dyn_cast<CallBase>(&I)) { 901 Function *Callee = CB->getCalledFunction(); 902 // Bail out on indirect call. 903 if (!Callee) 904 return Unknown(); 905 906 LibFunc TLIFn; 907 if (!TLI || !TLI->getLibFunc(*CB->getCalledFunction(), TLIFn) || 908 !TLI->has(TLIFn)) 909 return Unknown(); 910 911 // TODO: There's probably more interesting case to support here. 912 if (TLIFn != LibFunc_posix_memalign) 913 return Unknown(); 914 915 AliasResult AR = 916 Options.AA->alias(CB->getOperand(0), Load.getPointerOperand()); 917 switch ((AliasResult::Kind)AR) { 918 case AliasResult::NoAlias: 919 continue; 920 case AliasResult::MustAlias: 921 break; 922 default: 923 return Unknown(); 924 } 925 926 // Is the error status of posix_memalign correctly checked? If not it 927 // would be incorrect to assume it succeeds and load doesn't see the 928 // previous value. 929 std::optional<bool> Checked = isImpliedByDomCondition( 930 ICmpInst::ICMP_EQ, CB, ConstantInt::get(CB->getType(), 0), &Load, DL); 931 if (!Checked || !*Checked) 932 return Unknown(); 933 934 Value *Size = CB->getOperand(2); 935 auto *C = dyn_cast<ConstantInt>(Size); 936 if (!C) 937 return Unknown(); 938 939 return Known({C->getValue(), APInt(C->getValue().getBitWidth(), 0)}); 940 } 941 942 return Unknown(); 943 } while (From-- != BB.begin()); 944 945 SmallVector<SizeOffsetAPInt> PredecessorSizeOffsets; 946 for (auto *PredBB : predecessors(&BB)) { 947 PredecessorSizeOffsets.push_back(findLoadSizeOffset( 948 Load, *PredBB, BasicBlock::iterator(PredBB->getTerminator()), 949 VisitedBlocks, ScannedInstCount)); 950 if (!PredecessorSizeOffsets.back().bothKnown()) 951 return Unknown(); 952 } 953 954 if (PredecessorSizeOffsets.empty()) 955 return Unknown(); 956 957 return Known(std::accumulate( 958 PredecessorSizeOffsets.begin() + 1, PredecessorSizeOffsets.end(), 959 PredecessorSizeOffsets.front(), 960 [this](SizeOffsetAPInt LHS, SizeOffsetAPInt RHS) { 961 return combineSizeOffset(LHS, RHS); 962 })); 963 } 964 965 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitLoadInst(LoadInst &LI) { 966 if (!Options.AA) { 967 ++ObjectVisitorLoad; 968 return ObjectSizeOffsetVisitor::unknown(); 969 } 970 971 SmallDenseMap<BasicBlock *, SizeOffsetAPInt, 8> VisitedBlocks; 972 unsigned ScannedInstCount = 0; 973 SizeOffsetAPInt SO = 974 findLoadSizeOffset(LI, *LI.getParent(), BasicBlock::iterator(LI), 975 VisitedBlocks, ScannedInstCount); 976 if (!SO.bothKnown()) 977 ++ObjectVisitorLoad; 978 return SO; 979 } 980 981 SizeOffsetAPInt 982 ObjectSizeOffsetVisitor::combineSizeOffset(SizeOffsetAPInt LHS, 983 SizeOffsetAPInt RHS) { 984 if (!LHS.bothKnown() || !RHS.bothKnown()) 985 return ObjectSizeOffsetVisitor::unknown(); 986 987 switch (Options.EvalMode) { 988 case ObjectSizeOpts::Mode::Min: 989 return (getSizeWithOverflow(LHS).slt(getSizeWithOverflow(RHS))) ? LHS : RHS; 990 case ObjectSizeOpts::Mode::Max: 991 return (getSizeWithOverflow(LHS).sgt(getSizeWithOverflow(RHS))) ? LHS : RHS; 992 case ObjectSizeOpts::Mode::ExactSizeFromOffset: 993 return (getSizeWithOverflow(LHS).eq(getSizeWithOverflow(RHS))) 994 ? LHS 995 : ObjectSizeOffsetVisitor::unknown(); 996 case ObjectSizeOpts::Mode::ExactUnderlyingSizeAndOffset: 997 return LHS == RHS ? LHS : ObjectSizeOffsetVisitor::unknown(); 998 } 999 llvm_unreachable("missing an eval mode"); 1000 } 1001 1002 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitPHINode(PHINode &PN) { 1003 if (PN.getNumIncomingValues() == 0) 1004 return ObjectSizeOffsetVisitor::unknown(); 1005 auto IncomingValues = PN.incoming_values(); 1006 return std::accumulate(IncomingValues.begin() + 1, IncomingValues.end(), 1007 computeImpl(*IncomingValues.begin()), 1008 [this](SizeOffsetAPInt LHS, Value *VRHS) { 1009 return combineSizeOffset(LHS, computeImpl(VRHS)); 1010 }); 1011 } 1012 1013 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitSelectInst(SelectInst &I) { 1014 return combineSizeOffset(computeImpl(I.getTrueValue()), 1015 computeImpl(I.getFalseValue())); 1016 } 1017 1018 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitUndefValue(UndefValue &) { 1019 return SizeOffsetAPInt(Zero, Zero); 1020 } 1021 1022 SizeOffsetAPInt ObjectSizeOffsetVisitor::visitInstruction(Instruction &I) { 1023 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor unknown instruction:" << I 1024 << '\n'); 1025 return ObjectSizeOffsetVisitor::unknown(); 1026 } 1027 1028 // Just set these right here... 1029 SizeOffsetValue::SizeOffsetValue(const SizeOffsetWeakTrackingVH &SOT) 1030 : SizeOffsetType(SOT.Size, SOT.Offset) {} 1031 1032 ObjectSizeOffsetEvaluator::ObjectSizeOffsetEvaluator( 1033 const DataLayout &DL, const TargetLibraryInfo *TLI, LLVMContext &Context, 1034 ObjectSizeOpts EvalOpts) 1035 : DL(DL), TLI(TLI), Context(Context), 1036 Builder(Context, TargetFolder(DL), 1037 IRBuilderCallbackInserter( 1038 [&](Instruction *I) { InsertedInstructions.insert(I); })), 1039 EvalOpts(EvalOpts) { 1040 // IntTy and Zero must be set for each compute() since the address space may 1041 // be different for later objects. 1042 } 1043 1044 SizeOffsetValue ObjectSizeOffsetEvaluator::compute(Value *V) { 1045 // XXX - Are vectors of pointers possible here? 1046 IntTy = cast<IntegerType>(DL.getIndexType(V->getType())); 1047 Zero = ConstantInt::get(IntTy, 0); 1048 1049 SizeOffsetValue Result = compute_(V); 1050 1051 if (!Result.bothKnown()) { 1052 // Erase everything that was computed in this iteration from the cache, so 1053 // that no dangling references are left behind. We could be a bit smarter if 1054 // we kept a dependency graph. It's probably not worth the complexity. 1055 for (const Value *SeenVal : SeenVals) { 1056 CacheMapTy::iterator CacheIt = CacheMap.find(SeenVal); 1057 // non-computable results can be safely cached 1058 if (CacheIt != CacheMap.end() && CacheIt->second.anyKnown()) 1059 CacheMap.erase(CacheIt); 1060 } 1061 1062 // Erase any instructions we inserted as part of the traversal. 1063 for (Instruction *I : InsertedInstructions) { 1064 I->replaceAllUsesWith(PoisonValue::get(I->getType())); 1065 I->eraseFromParent(); 1066 } 1067 } 1068 1069 SeenVals.clear(); 1070 InsertedInstructions.clear(); 1071 return Result; 1072 } 1073 1074 SizeOffsetValue ObjectSizeOffsetEvaluator::compute_(Value *V) { 1075 ObjectSizeOffsetVisitor Visitor(DL, TLI, Context, EvalOpts); 1076 SizeOffsetAPInt Const = Visitor.compute(V); 1077 if (Const.bothKnown()) 1078 return SizeOffsetValue(ConstantInt::get(Context, Const.Size), 1079 ConstantInt::get(Context, Const.Offset)); 1080 1081 V = V->stripPointerCasts(); 1082 1083 // Check cache. 1084 CacheMapTy::iterator CacheIt = CacheMap.find(V); 1085 if (CacheIt != CacheMap.end()) 1086 return CacheIt->second; 1087 1088 // Always generate code immediately before the instruction being 1089 // processed, so that the generated code dominates the same BBs. 1090 BuilderTy::InsertPointGuard Guard(Builder); 1091 if (Instruction *I = dyn_cast<Instruction>(V)) 1092 Builder.SetInsertPoint(I); 1093 1094 // Now compute the size and offset. 1095 SizeOffsetValue Result; 1096 1097 // Record the pointers that were handled in this run, so that they can be 1098 // cleaned later if something fails. We also use this set to break cycles that 1099 // can occur in dead code. 1100 if (!SeenVals.insert(V).second) { 1101 Result = ObjectSizeOffsetEvaluator::unknown(); 1102 } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) { 1103 Result = visitGEPOperator(*GEP); 1104 } else if (Instruction *I = dyn_cast<Instruction>(V)) { 1105 Result = visit(*I); 1106 } else if (isa<Argument>(V) || 1107 (isa<ConstantExpr>(V) && 1108 cast<ConstantExpr>(V)->getOpcode() == Instruction::IntToPtr) || 1109 isa<GlobalAlias>(V) || 1110 isa<GlobalVariable>(V)) { 1111 // Ignore values where we cannot do more than ObjectSizeVisitor. 1112 Result = ObjectSizeOffsetEvaluator::unknown(); 1113 } else { 1114 LLVM_DEBUG( 1115 dbgs() << "ObjectSizeOffsetEvaluator::compute() unhandled value: " << *V 1116 << '\n'); 1117 Result = ObjectSizeOffsetEvaluator::unknown(); 1118 } 1119 1120 // Don't reuse CacheIt since it may be invalid at this point. 1121 CacheMap[V] = SizeOffsetWeakTrackingVH(Result); 1122 return Result; 1123 } 1124 1125 SizeOffsetValue ObjectSizeOffsetEvaluator::visitAllocaInst(AllocaInst &I) { 1126 if (!I.getAllocatedType()->isSized()) 1127 return ObjectSizeOffsetEvaluator::unknown(); 1128 1129 // must be a VLA or vscale. 1130 assert(I.isArrayAllocation() || I.getAllocatedType()->isScalableTy()); 1131 1132 // If needed, adjust the alloca's operand size to match the pointer indexing 1133 // size. Subsequent math operations expect the types to match. 1134 Value *ArraySize = Builder.CreateZExtOrTrunc( 1135 I.getArraySize(), 1136 DL.getIndexType(I.getContext(), DL.getAllocaAddrSpace())); 1137 assert(ArraySize->getType() == Zero->getType() && 1138 "Expected zero constant to have pointer index type"); 1139 1140 Value *Size = Builder.CreateTypeSize( 1141 ArraySize->getType(), DL.getTypeAllocSize(I.getAllocatedType())); 1142 Size = Builder.CreateMul(Size, ArraySize); 1143 return SizeOffsetValue(Size, Zero); 1144 } 1145 1146 SizeOffsetValue ObjectSizeOffsetEvaluator::visitCallBase(CallBase &CB) { 1147 std::optional<AllocFnsTy> FnData = getAllocationSize(&CB, TLI); 1148 if (!FnData) 1149 return ObjectSizeOffsetEvaluator::unknown(); 1150 1151 // Handle strdup-like functions separately. 1152 if (FnData->AllocTy == StrDupLike) { 1153 // TODO: implement evaluation of strdup/strndup 1154 return ObjectSizeOffsetEvaluator::unknown(); 1155 } 1156 1157 Value *FirstArg = CB.getArgOperand(FnData->FstParam); 1158 FirstArg = Builder.CreateZExtOrTrunc(FirstArg, IntTy); 1159 if (FnData->SndParam < 0) 1160 return SizeOffsetValue(FirstArg, Zero); 1161 1162 Value *SecondArg = CB.getArgOperand(FnData->SndParam); 1163 SecondArg = Builder.CreateZExtOrTrunc(SecondArg, IntTy); 1164 Value *Size = Builder.CreateMul(FirstArg, SecondArg); 1165 return SizeOffsetValue(Size, Zero); 1166 } 1167 1168 SizeOffsetValue 1169 ObjectSizeOffsetEvaluator::visitExtractElementInst(ExtractElementInst &) { 1170 return ObjectSizeOffsetEvaluator::unknown(); 1171 } 1172 1173 SizeOffsetValue 1174 ObjectSizeOffsetEvaluator::visitExtractValueInst(ExtractValueInst &) { 1175 return ObjectSizeOffsetEvaluator::unknown(); 1176 } 1177 1178 SizeOffsetValue ObjectSizeOffsetEvaluator::visitGEPOperator(GEPOperator &GEP) { 1179 SizeOffsetValue PtrData = compute_(GEP.getPointerOperand()); 1180 if (!PtrData.bothKnown()) 1181 return ObjectSizeOffsetEvaluator::unknown(); 1182 1183 Value *Offset = emitGEPOffset(&Builder, DL, &GEP, /*NoAssumptions=*/true); 1184 Offset = Builder.CreateAdd(PtrData.Offset, Offset); 1185 return SizeOffsetValue(PtrData.Size, Offset); 1186 } 1187 1188 SizeOffsetValue ObjectSizeOffsetEvaluator::visitIntToPtrInst(IntToPtrInst &) { 1189 // clueless 1190 return ObjectSizeOffsetEvaluator::unknown(); 1191 } 1192 1193 SizeOffsetValue ObjectSizeOffsetEvaluator::visitLoadInst(LoadInst &LI) { 1194 return ObjectSizeOffsetEvaluator::unknown(); 1195 } 1196 1197 SizeOffsetValue ObjectSizeOffsetEvaluator::visitPHINode(PHINode &PHI) { 1198 // Create 2 PHIs: one for size and another for offset. 1199 PHINode *SizePHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues()); 1200 PHINode *OffsetPHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues()); 1201 1202 // Insert right away in the cache to handle recursive PHIs. 1203 CacheMap[&PHI] = SizeOffsetWeakTrackingVH(SizePHI, OffsetPHI); 1204 1205 // Compute offset/size for each PHI incoming pointer. 1206 for (unsigned i = 0, e = PHI.getNumIncomingValues(); i != e; ++i) { 1207 BasicBlock *IncomingBlock = PHI.getIncomingBlock(i); 1208 Builder.SetInsertPoint(IncomingBlock, IncomingBlock->getFirstInsertionPt()); 1209 SizeOffsetValue EdgeData = compute_(PHI.getIncomingValue(i)); 1210 1211 if (!EdgeData.bothKnown()) { 1212 OffsetPHI->replaceAllUsesWith(PoisonValue::get(IntTy)); 1213 OffsetPHI->eraseFromParent(); 1214 InsertedInstructions.erase(OffsetPHI); 1215 SizePHI->replaceAllUsesWith(PoisonValue::get(IntTy)); 1216 SizePHI->eraseFromParent(); 1217 InsertedInstructions.erase(SizePHI); 1218 return ObjectSizeOffsetEvaluator::unknown(); 1219 } 1220 SizePHI->addIncoming(EdgeData.Size, IncomingBlock); 1221 OffsetPHI->addIncoming(EdgeData.Offset, IncomingBlock); 1222 } 1223 1224 Value *Size = SizePHI, *Offset = OffsetPHI; 1225 if (Value *Tmp = SizePHI->hasConstantValue()) { 1226 Size = Tmp; 1227 SizePHI->replaceAllUsesWith(Size); 1228 SizePHI->eraseFromParent(); 1229 InsertedInstructions.erase(SizePHI); 1230 } 1231 if (Value *Tmp = OffsetPHI->hasConstantValue()) { 1232 Offset = Tmp; 1233 OffsetPHI->replaceAllUsesWith(Offset); 1234 OffsetPHI->eraseFromParent(); 1235 InsertedInstructions.erase(OffsetPHI); 1236 } 1237 return SizeOffsetValue(Size, Offset); 1238 } 1239 1240 SizeOffsetValue ObjectSizeOffsetEvaluator::visitSelectInst(SelectInst &I) { 1241 SizeOffsetValue TrueSide = compute_(I.getTrueValue()); 1242 SizeOffsetValue FalseSide = compute_(I.getFalseValue()); 1243 1244 if (!TrueSide.bothKnown() || !FalseSide.bothKnown()) 1245 return ObjectSizeOffsetEvaluator::unknown(); 1246 if (TrueSide == FalseSide) 1247 return TrueSide; 1248 1249 Value *Size = 1250 Builder.CreateSelect(I.getCondition(), TrueSide.Size, FalseSide.Size); 1251 Value *Offset = 1252 Builder.CreateSelect(I.getCondition(), TrueSide.Offset, FalseSide.Offset); 1253 return SizeOffsetValue(Size, Offset); 1254 } 1255 1256 SizeOffsetValue ObjectSizeOffsetEvaluator::visitInstruction(Instruction &I) { 1257 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetEvaluator unknown instruction:" << I 1258 << '\n'); 1259 return ObjectSizeOffsetEvaluator::unknown(); 1260 } 1261