xref: /llvm-project/llvm/lib/Analysis/TypeMetadataUtils.cpp (revision 86dbcafd0cdc88ae85896c6f12ecaa6006aeba54)
1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains functions that make it easier to manipulate type metadata
10 // for devirtualization.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "llvm/Analysis/TypeMetadataUtils.h"
15 #include "llvm/IR/Constants.h"
16 #include "llvm/IR/Dominators.h"
17 #include "llvm/IR/Instructions.h"
18 #include "llvm/IR/IntrinsicInst.h"
19 #include "llvm/IR/Module.h"
20 
21 using namespace llvm;
22 
23 // Search for virtual calls that call FPtr and add them to DevirtCalls.
24 static void
25 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
26                           bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
27                           const CallInst *CI, DominatorTree &DT) {
28   for (const Use &U : FPtr->uses()) {
29     Instruction *User = cast<Instruction>(U.getUser());
30     // Ignore this instruction if it is not dominated by the type intrinsic
31     // being analyzed. Otherwise we may transform a call sharing the same
32     // vtable pointer incorrectly. Specifically, this situation can arise
33     // after indirect call promotion and inlining, where we may have uses
34     // of the vtable pointer guarded by a function pointer check, and a fallback
35     // indirect call.
36     if (!DT.dominates(CI, User))
37       continue;
38     if (isa<BitCastInst>(User)) {
39       findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
40                                 DT);
41     } else if (auto *CI = dyn_cast<CallInst>(User)) {
42       DevirtCalls.push_back({Offset, *CI});
43     } else if (auto *II = dyn_cast<InvokeInst>(User)) {
44       DevirtCalls.push_back({Offset, *II});
45     } else if (HasNonCallUses) {
46       *HasNonCallUses = true;
47     }
48   }
49 }
50 
51 // Search for virtual calls that load from VPtr and add them to DevirtCalls.
52 static void findLoadCallsAtConstantOffset(
53     const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
54     int64_t Offset, const CallInst *CI, DominatorTree &DT) {
55   for (const Use &U : VPtr->uses()) {
56     Value *User = U.getUser();
57     if (isa<BitCastInst>(User)) {
58       findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
59     } else if (isa<LoadInst>(User)) {
60       findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
61     } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
62       // Take into account the GEP offset.
63       if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
64         SmallVector<Value *, 8> Indices(drop_begin(GEP->operands()));
65         int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
66             GEP->getSourceElementType(), Indices);
67         findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
68                                       CI, DT);
69       }
70     } else if (auto *Call = dyn_cast<CallInst>(User)) {
71       if (Call->getIntrinsicID() == llvm::Intrinsic::load_relative) {
72         if (auto *LoadOffset = dyn_cast<ConstantInt>(Call->getOperand(1))) {
73           findCallsAtConstantOffset(DevirtCalls, nullptr, User,
74                                     Offset + LoadOffset->getSExtValue(), CI,
75                                     DT);
76         }
77       }
78     }
79   }
80 }
81 
82 void llvm::findDevirtualizableCallsForTypeTest(
83     SmallVectorImpl<DevirtCallSite> &DevirtCalls,
84     SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
85     DominatorTree &DT) {
86   assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test ||
87          CI->getCalledFunction()->getIntrinsicID() ==
88              Intrinsic::public_type_test);
89 
90   const Module *M = CI->getParent()->getParent()->getParent();
91 
92   // Find llvm.assume intrinsics for this llvm.type.test call.
93   for (const Use &CIU : CI->uses())
94     if (auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
95       Assumes.push_back(Assume);
96 
97   // If we found any, search for virtual calls based on %p and add them to
98   // DevirtCalls.
99   if (!Assumes.empty())
100     findLoadCallsAtConstantOffset(
101         M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
102 }
103 
104 void llvm::findDevirtualizableCallsForTypeCheckedLoad(
105     SmallVectorImpl<DevirtCallSite> &DevirtCalls,
106     SmallVectorImpl<Instruction *> &LoadedPtrs,
107     SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
108     const CallInst *CI, DominatorTree &DT) {
109   assert(CI->getCalledFunction()->getIntrinsicID() ==
110          Intrinsic::type_checked_load);
111 
112   auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1));
113   if (!Offset) {
114     HasNonCallUses = true;
115     return;
116   }
117 
118   for (const Use &U : CI->uses()) {
119     auto CIU = U.getUser();
120     if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
121       if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
122         LoadedPtrs.push_back(EVI);
123         continue;
124       }
125       if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
126         Preds.push_back(EVI);
127         continue;
128       }
129     }
130     HasNonCallUses = true;
131   }
132 
133   for (Value *LoadedPtr : LoadedPtrs)
134     findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
135                               Offset->getZExtValue(), CI, DT);
136 }
137 
138 Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M,
139                                    Constant *TopLevelGlobal) {
140   // TODO: Ideally it would be the caller who knows if it's appropriate to strip
141   // the DSOLocalEquicalent. More generally, it would feel more appropriate to
142   // have two functions that handle absolute and relative pointers separately.
143   if (auto *Equiv = dyn_cast<DSOLocalEquivalent>(I))
144     I = Equiv->getGlobalValue();
145 
146   if (I->getType()->isPointerTy()) {
147     if (Offset == 0)
148       return I;
149     return nullptr;
150   }
151 
152   const DataLayout &DL = M.getDataLayout();
153 
154   if (auto *C = dyn_cast<ConstantStruct>(I)) {
155     const StructLayout *SL = DL.getStructLayout(C->getType());
156     if (Offset >= SL->getSizeInBytes())
157       return nullptr;
158 
159     unsigned Op = SL->getElementContainingOffset(Offset);
160     return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
161                               Offset - SL->getElementOffset(Op), M,
162                               TopLevelGlobal);
163   }
164   if (auto *C = dyn_cast<ConstantArray>(I)) {
165     ArrayType *VTableTy = C->getType();
166     uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
167 
168     unsigned Op = Offset / ElemSize;
169     if (Op >= C->getNumOperands())
170       return nullptr;
171 
172     return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
173                               Offset % ElemSize, M, TopLevelGlobal);
174   }
175 
176   // Relative-pointer support starts here.
177   if (auto *CI = dyn_cast<ConstantInt>(I)) {
178     if (Offset == 0 && CI->getZExtValue() == 0) {
179       return I;
180     }
181   }
182   if (auto *C = dyn_cast<ConstantExpr>(I)) {
183     switch (C->getOpcode()) {
184     case Instruction::Trunc:
185     case Instruction::PtrToInt:
186       return getPointerAtOffset(cast<Constant>(C->getOperand(0)), Offset, M,
187                                 TopLevelGlobal);
188     case Instruction::Sub: {
189       auto *Operand0 = cast<Constant>(C->getOperand(0));
190       auto *Operand1 = cast<Constant>(C->getOperand(1));
191 
192       auto StripGEP = [](Constant *C) {
193         auto *CE = dyn_cast<ConstantExpr>(C);
194         if (!CE)
195           return C;
196         if (CE->getOpcode() != Instruction::GetElementPtr)
197           return C;
198         return CE->getOperand(0);
199       };
200       auto *Operand1TargetGlobal = StripGEP(getPointerAtOffset(Operand1, 0, M));
201 
202       // Check that in the "sub (@a, @b)" expression, @b points back to the top
203       // level global (or a GEP thereof) that we're processing. Otherwise bail.
204       if (Operand1TargetGlobal != TopLevelGlobal)
205         return nullptr;
206 
207       return getPointerAtOffset(Operand0, Offset, M, TopLevelGlobal);
208     }
209     default:
210       return nullptr;
211     }
212   }
213   return nullptr;
214 }
215 
216 static void replaceRelativePointerUserWithZero(User *U) {
217   auto *PtrExpr = dyn_cast<ConstantExpr>(U);
218   if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt)
219     return;
220 
221   for (auto *PtrToIntUser : PtrExpr->users()) {
222     auto *SubExpr = dyn_cast<ConstantExpr>(PtrToIntUser);
223     if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub)
224       return;
225 
226     SubExpr->replaceNonMetadataUsesWith(
227         ConstantInt::get(SubExpr->getType(), 0));
228   }
229 }
230 
231 void llvm::replaceRelativePointerUsersWithZero(Constant *C) {
232   for (auto *U : C->users()) {
233     if (auto *Equiv = dyn_cast<DSOLocalEquivalent>(U))
234       replaceRelativePointerUsersWithZero(Equiv);
235     else
236       replaceRelativePointerUserWithZero(U);
237   }
238 }
239