1 //===- AssumeBundleBuilder.cpp - tools to preserve informations -*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/Transforms/Utils/AssumeBundleBuilder.h" 10 #include "llvm/ADT/DepthFirstIterator.h" 11 #include "llvm/ADT/MapVector.h" 12 #include "llvm/Analysis/AssumeBundleQueries.h" 13 #include "llvm/Analysis/AssumptionCache.h" 14 #include "llvm/Analysis/ValueTracking.h" 15 #include "llvm/IR/Dominators.h" 16 #include "llvm/IR/Function.h" 17 #include "llvm/IR/InstIterator.h" 18 #include "llvm/IR/IntrinsicInst.h" 19 #include "llvm/IR/Module.h" 20 #include "llvm/InitializePasses.h" 21 #include "llvm/Support/CommandLine.h" 22 #include "llvm/Transforms/Utils/Local.h" 23 24 using namespace llvm; 25 26 cl::opt<bool> ShouldPreserveAllAttributes( 27 "assume-preserve-all", cl::init(false), cl::Hidden, 28 cl::desc("enable preservation of all attrbitues. even those that are " 29 "unlikely to be usefull")); 30 31 cl::opt<bool> EnableKnowledgeRetention( 32 "enable-knowledge-retention", cl::init(false), cl::Hidden, 33 cl::desc( 34 "enable preservation of attributes throughout code transformation")); 35 36 namespace { 37 38 bool isUsefullToPreserve(Attribute::AttrKind Kind) { 39 switch (Kind) { 40 case Attribute::NonNull: 41 case Attribute::Alignment: 42 case Attribute::Dereferenceable: 43 case Attribute::DereferenceableOrNull: 44 case Attribute::Cold: 45 return true; 46 default: 47 return false; 48 } 49 } 50 51 /// This class contain all knowledge that have been gather while building an 52 /// llvm.assume and the function to manipulate it. 53 struct AssumeBuilderState { 54 Module *M; 55 56 using MapKey = std::pair<Value *, Attribute::AttrKind>; 57 SmallMapVector<MapKey, unsigned, 8> AssumedKnowledgeMap; 58 Instruction *InsertBeforeInstruction = nullptr; 59 AssumptionCache* AC = nullptr; 60 DominatorTree* DT = nullptr; 61 62 AssumeBuilderState(Module *M, Instruction *I = nullptr, 63 AssumptionCache *AC = nullptr, DominatorTree *DT = nullptr) 64 : M(M), InsertBeforeInstruction(I), AC(AC), DT(DT) {} 65 66 bool tryToPreserveWithoutAddingAssume(RetainedKnowledge RK) { 67 if (!InsertBeforeInstruction || !AC || !RK.WasOn) 68 return false; 69 bool HasBeenPreserved = false; 70 Use* ToUpdate = nullptr; 71 getKnowledgeForValue( 72 RK.WasOn, {RK.AttrKind}, AC, 73 [&](RetainedKnowledge RKOther, Instruction *Assume, 74 const CallInst::BundleOpInfo *Bundle) { 75 if (!isValidAssumeForContext(Assume, InsertBeforeInstruction, DT)) 76 return false; 77 if (RKOther.ArgValue >= RK.ArgValue) { 78 HasBeenPreserved = true; 79 return true; 80 } else if (isValidAssumeForContext(InsertBeforeInstruction, Assume, 81 DT)) { 82 HasBeenPreserved = true; 83 IntrinsicInst *Intr = cast<IntrinsicInst>(Assume); 84 ToUpdate = &Intr->op_begin()[Bundle->Begin + ABA_Argument]; 85 return true; 86 } 87 return false; 88 }); 89 if (ToUpdate) 90 ToUpdate->set( 91 ConstantInt::get(Type::getInt64Ty(M->getContext()), RK.ArgValue)); 92 return HasBeenPreserved; 93 } 94 95 void addKnowledge(RetainedKnowledge RK) { 96 if (RK.AttrKind == Attribute::None || tryToPreserveWithoutAddingAssume(RK)) 97 return; 98 MapKey Key{RK.WasOn, RK.AttrKind}; 99 auto Lookup = AssumedKnowledgeMap.find(Key); 100 if (Lookup == AssumedKnowledgeMap.end()) { 101 AssumedKnowledgeMap[Key] = RK.ArgValue; 102 return; 103 } 104 assert(((Lookup->second == 0 && RK.ArgValue == 0) || 105 (Lookup->second != 0 && RK.ArgValue != 0)) && 106 "inconsistent argument value"); 107 108 /// This is only desirable because for all attributes taking an argument 109 /// higher is better. 110 Lookup->second = std::max(Lookup->second, RK.ArgValue); 111 } 112 113 void addAttribute(Attribute Attr, Value *WasOn) { 114 if (Attr.isTypeAttribute() || Attr.isStringAttribute() || 115 (!ShouldPreserveAllAttributes && 116 !isUsefullToPreserve(Attr.getKindAsEnum()))) 117 return; 118 unsigned AttrArg = 0; 119 if (Attr.isIntAttribute()) 120 AttrArg = Attr.getValueAsInt(); 121 addKnowledge({Attr.getKindAsEnum(), AttrArg, WasOn}); 122 } 123 124 void addCall(const CallBase *Call) { 125 auto addAttrList = [&](AttributeList AttrList) { 126 for (unsigned Idx = AttributeList::FirstArgIndex; 127 Idx < AttrList.getNumAttrSets(); Idx++) 128 for (Attribute Attr : AttrList.getAttributes(Idx)) 129 addAttribute(Attr, Call->getArgOperand(Idx - 1)); 130 for (Attribute Attr : AttrList.getFnAttributes()) 131 addAttribute(Attr, nullptr); 132 }; 133 addAttrList(Call->getAttributes()); 134 if (Function *Fn = Call->getCalledFunction()) 135 addAttrList(Fn->getAttributes()); 136 } 137 138 IntrinsicInst *build() { 139 if (AssumedKnowledgeMap.empty()) 140 return nullptr; 141 Function *FnAssume = Intrinsic::getDeclaration(M, Intrinsic::assume); 142 LLVMContext &C = M->getContext(); 143 SmallVector<OperandBundleDef, 8> OpBundle; 144 for (auto &MapElem : AssumedKnowledgeMap) { 145 SmallVector<Value *, 2> Args; 146 if (MapElem.first.first) 147 Args.push_back(MapElem.first.first); 148 149 /// This is only valid because for all attribute that currently exist a 150 /// value of 0 is useless. and should not be preserved. 151 if (MapElem.second) 152 Args.push_back(ConstantInt::get(Type::getInt64Ty(M->getContext()), 153 MapElem.second)); 154 OpBundle.push_back(OperandBundleDefT<Value *>( 155 std::string(Attribute::getNameFromAttrKind(MapElem.first.second)), 156 Args)); 157 } 158 return cast<IntrinsicInst>(CallInst::Create( 159 FnAssume, ArrayRef<Value *>({ConstantInt::getTrue(C)}), OpBundle)); 160 } 161 162 void addAccessedPtr(Instruction *MemInst, Value *Pointer, Type *AccType, 163 MaybeAlign MA) { 164 unsigned DerefSize = MemInst->getModule() 165 ->getDataLayout() 166 .getTypeStoreSize(AccType) 167 .getKnownMinSize(); 168 if (DerefSize != 0) { 169 addKnowledge({Attribute::Dereferenceable, DerefSize, Pointer}); 170 if (!NullPointerIsDefined(MemInst->getFunction(), 171 Pointer->getType()->getPointerAddressSpace())) 172 addKnowledge({Attribute::NonNull, 0u, Pointer}); 173 } 174 if (MA.valueOrOne() > 1) 175 addKnowledge( 176 {Attribute::Alignment, unsigned(MA.valueOrOne().value()), Pointer}); 177 } 178 179 void addInstruction(Instruction *I) { 180 if (auto *Call = dyn_cast<CallBase>(I)) 181 return addCall(Call); 182 if (auto *Load = dyn_cast<LoadInst>(I)) 183 return addAccessedPtr(I, Load->getPointerOperand(), Load->getType(), 184 Load->getAlign()); 185 if (auto *Store = dyn_cast<StoreInst>(I)) 186 return addAccessedPtr(I, Store->getPointerOperand(), 187 Store->getValueOperand()->getType(), 188 Store->getAlign()); 189 // TODO: Add support for the other Instructions. 190 // TODO: Maybe we should look around and merge with other llvm.assume. 191 } 192 }; 193 194 } // namespace 195 196 IntrinsicInst *llvm::buildAssumeFromInst(Instruction *I) { 197 if (!EnableKnowledgeRetention) 198 return nullptr; 199 AssumeBuilderState Builder(I->getModule()); 200 Builder.addInstruction(I); 201 return Builder.build(); 202 } 203 204 void llvm::salvageKnowledge(Instruction *I, AssumptionCache *AC, 205 DominatorTree *DT) { 206 if (!EnableKnowledgeRetention || I->isTerminator()) 207 return; 208 AssumeBuilderState Builder(I->getModule(), I, AC, DT); 209 Builder.addInstruction(I); 210 if (IntrinsicInst *Intr = Builder.build()) { 211 Intr->insertBefore(I); 212 if (AC) 213 AC->registerAssumption(Intr); 214 } 215 } 216 217 namespace { 218 219 struct AssumeSimplify { 220 Function &F; 221 AssumptionCache &AC; 222 DominatorTree *DT; 223 LLVMContext &C; 224 SmallDenseSet<IntrinsicInst *> CleanupToDo; 225 StringMapEntry<uint32_t> *IgnoreTag; 226 SmallDenseMap<BasicBlock *, SmallVector<IntrinsicInst *, 4>, 8> BBToAssume; 227 bool MadeChange = false; 228 229 AssumeSimplify(Function &F, AssumptionCache &AC, DominatorTree *DT, 230 LLVMContext &C) 231 : F(F), AC(AC), DT(DT), C(C), 232 IgnoreTag(C.getOrInsertBundleTag(IgnoreBundleTag)) {} 233 234 void buildMapping(bool FilterBooleanArgument) { 235 BBToAssume.clear(); 236 for (Value *V : AC.assumptions()) { 237 if (!V) 238 continue; 239 IntrinsicInst *Assume = cast<IntrinsicInst>(V); 240 if (FilterBooleanArgument) { 241 auto *Arg = dyn_cast<ConstantInt>(Assume->getOperand(0)); 242 if (!Arg || Arg->isZero()) 243 continue; 244 } 245 BBToAssume[Assume->getParent()].push_back(Assume); 246 } 247 248 for (auto &Elem : BBToAssume) { 249 llvm::sort(Elem.second, 250 [](const IntrinsicInst *LHS, const IntrinsicInst *RHS) { 251 return LHS->comesBefore(RHS); 252 }); 253 } 254 } 255 256 /// Remove all asumes in CleanupToDo if there boolean argument is true and 257 /// ForceCleanup is set or the assume doesn't hold valuable knowledge. 258 void RunCleanup(bool ForceCleanup) { 259 for (IntrinsicInst *Assume : CleanupToDo) { 260 auto *Arg = dyn_cast<ConstantInt>(Assume->getOperand(0)); 261 if (!Arg || Arg->isZero() || 262 (!ForceCleanup && !isAssumeWithEmptyBundle(*Assume))) 263 continue; 264 MadeChange = true; 265 Assume->eraseFromParent(); 266 } 267 CleanupToDo.clear(); 268 } 269 270 /// Remove knowledge stored in assume when it is already know by an attribute 271 /// or an other assume. This can when valid update an existing knowledge in an 272 /// attribute or an other assume. 273 void dropRedundantKnowledge() { 274 struct MapValue { 275 IntrinsicInst *Assume; 276 unsigned ArgValue; 277 CallInst::BundleOpInfo *BOI; 278 }; 279 buildMapping(false); 280 SmallDenseMap<std::pair<Value *, Attribute::AttrKind>, 281 SmallVector<MapValue, 2>, 16> 282 Knowledge; 283 for (BasicBlock *BB : depth_first(&F)) 284 for (Value *V : BBToAssume[BB]) { 285 if (!V) 286 continue; 287 IntrinsicInst *Assume = cast<IntrinsicInst>(V); 288 for (CallInst::BundleOpInfo &BOI : Assume->bundle_op_infos()) { 289 auto RemoveFromAssume = [&]() { 290 CleanupToDo.insert(Assume); 291 if (BOI.Begin != BOI.End) { 292 Use *U = &Assume->op_begin()[BOI.Begin + ABA_WasOn]; 293 U->set(UndefValue::get(U->get()->getType())); 294 } 295 BOI.Tag = IgnoreTag; 296 }; 297 if (BOI.Tag == IgnoreTag) { 298 CleanupToDo.insert(Assume); 299 continue; 300 } 301 RetainedKnowledge RK = getKnowledgeFromBundle(*Assume, BOI); 302 if (auto *Arg = dyn_cast_or_null<Argument>(RK.WasOn)) { 303 bool HasSameKindAttr = Arg->hasAttribute(RK.AttrKind); 304 if (HasSameKindAttr) 305 if (!Attribute::doesAttrKindHaveArgument(RK.AttrKind) || 306 Arg->getAttribute(RK.AttrKind).getValueAsInt() >= 307 RK.ArgValue) { 308 RemoveFromAssume(); 309 continue; 310 } 311 if (isValidAssumeForContext( 312 Assume, &*F.getEntryBlock().getFirstInsertionPt()) || 313 Assume == &*F.getEntryBlock().getFirstInsertionPt()) { 314 if (HasSameKindAttr) 315 Arg->removeAttr(RK.AttrKind); 316 Arg->addAttr(Attribute::get(C, RK.AttrKind, RK.ArgValue)); 317 MadeChange = true; 318 RemoveFromAssume(); 319 continue; 320 } 321 } 322 auto &Lookup = Knowledge[{RK.WasOn, RK.AttrKind}]; 323 for (MapValue &Elem : Lookup) { 324 if (!isValidAssumeForContext(Elem.Assume, Assume, DT)) 325 continue; 326 if (Elem.ArgValue >= RK.ArgValue) { 327 RemoveFromAssume(); 328 continue; 329 } else if (isValidAssumeForContext(Assume, Elem.Assume, DT)) { 330 Elem.Assume->op_begin()[Elem.BOI->Begin + ABA_Argument].set( 331 ConstantInt::get(Type::getInt64Ty(C), RK.ArgValue)); 332 MadeChange = true; 333 RemoveFromAssume(); 334 continue; 335 } 336 } 337 Lookup.push_back({Assume, RK.ArgValue, &BOI}); 338 } 339 } 340 } 341 342 using MergeIterator = SmallVectorImpl<IntrinsicInst *>::iterator; 343 344 /// Merge all Assumes from Begin to End in and insert the resulting assume as 345 /// high as possible in the basicblock. 346 void mergeRange(BasicBlock *BB, MergeIterator Begin, MergeIterator End) { 347 if (Begin == End || std::next(Begin) == End) 348 return; 349 /// Provide no additional information so that AssumeBuilderState doesn't 350 /// try to do any punning since it already has been done better. 351 AssumeBuilderState Builder(F.getParent()); 352 353 /// For now it is initialized to the best value it could have 354 Instruction *InsertPt = BB->getFirstNonPHI(); 355 if (isa<LandingPadInst>(InsertPt)) 356 InsertPt = InsertPt->getNextNode(); 357 for (IntrinsicInst *I : make_range(Begin, End)) { 358 CleanupToDo.insert(I); 359 for (CallInst::BundleOpInfo &BOI : I->bundle_op_infos()) { 360 RetainedKnowledge RK = getKnowledgeFromBundle(*I, BOI); 361 if (!RK) 362 continue; 363 Builder.addKnowledge(RK); 364 if (auto *I = dyn_cast_or_null<Instruction>(RK.WasOn)) 365 if (I->getParent() == InsertPt->getParent() && 366 (InsertPt->comesBefore(I) || InsertPt == I)) 367 InsertPt = I->getNextNode(); 368 } 369 } 370 371 /// Adjust InsertPt if it is before Begin, since mergeAssumes only 372 /// guarantees we can place the resulting assume between Begin and End. 373 if (InsertPt->comesBefore(*Begin)) 374 for (auto It = (*Begin)->getIterator(), E = InsertPt->getIterator(); 375 It != E; --It) 376 if (!isGuaranteedToTransferExecutionToSuccessor(&*It)) { 377 InsertPt = It->getNextNode(); 378 break; 379 } 380 IntrinsicInst *MergedAssume = Builder.build(); 381 if (!MergedAssume) 382 return; 383 MadeChange = true; 384 MergedAssume->insertBefore(InsertPt); 385 AC.registerAssumption(MergedAssume); 386 } 387 388 /// Merge assume when they are in the same BasicBlock and for all instruction 389 /// between them isGuaranteedToTransferExecutionToSuccessor returns true. 390 void mergeAssumes() { 391 buildMapping(true); 392 393 SmallVector<MergeIterator, 4> SplitPoints; 394 for (auto &Elem : BBToAssume) { 395 SmallVectorImpl<IntrinsicInst *> &AssumesInBB = Elem.second; 396 if (AssumesInBB.size() < 2) 397 continue; 398 /// AssumesInBB is already sorted by order in the block. 399 400 BasicBlock::iterator It = AssumesInBB.front()->getIterator(); 401 BasicBlock::iterator E = AssumesInBB.back()->getIterator(); 402 SplitPoints.push_back(AssumesInBB.begin()); 403 MergeIterator LastSplit = AssumesInBB.begin(); 404 for (; It != E; ++It) 405 if (!isGuaranteedToTransferExecutionToSuccessor(&*It)) { 406 for (; (*LastSplit)->comesBefore(&*It); ++LastSplit) 407 ; 408 if (SplitPoints.back() != LastSplit) 409 SplitPoints.push_back(LastSplit); 410 } 411 SplitPoints.push_back(AssumesInBB.end()); 412 for (auto SplitIt = SplitPoints.begin(); 413 SplitIt != std::prev(SplitPoints.end()); SplitIt++) { 414 mergeRange(Elem.first, *SplitIt, *(SplitIt + 1)); 415 } 416 SplitPoints.clear(); 417 } 418 } 419 }; 420 421 bool simplifyAssumes(Function &F, AssumptionCache *AC, DominatorTree *DT) { 422 AssumeSimplify AS(F, *AC, DT, F.getContext()); 423 424 /// Remove knowledge that is already known by a dominating other assume or an 425 /// attribute. 426 AS.dropRedundantKnowledge(); 427 428 /// Remove assume that are empty. 429 AS.RunCleanup(false); 430 431 /// Merge assume in the same basicblock when possible. 432 AS.mergeAssumes(); 433 434 /// Remove assume that were merged. 435 AS.RunCleanup(true); 436 return AS.MadeChange; 437 } 438 439 } // namespace 440 441 PreservedAnalyses AssumeSimplifyPass::run(Function &F, 442 FunctionAnalysisManager &AM) { 443 if (!EnableKnowledgeRetention) 444 return PreservedAnalyses::all(); 445 simplifyAssumes(F, &AM.getResult<AssumptionAnalysis>(F), 446 AM.getCachedResult<DominatorTreeAnalysis>(F)); 447 return PreservedAnalyses::all(); 448 } 449 450 class AssumeSimplifyPassLegacyPass : public FunctionPass { 451 public: 452 static char ID; 453 454 AssumeSimplifyPassLegacyPass() : FunctionPass(ID) { 455 initializeAssumeSimplifyPassLegacyPassPass( 456 *PassRegistry::getPassRegistry()); 457 } 458 bool runOnFunction(Function &F) override { 459 if (skipFunction(F) || !EnableKnowledgeRetention) 460 return false; 461 DominatorTreeWrapperPass *DT = 462 getAnalysisIfAvailable<DominatorTreeWrapperPass>(); 463 AssumptionCache &AC = 464 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F); 465 return simplifyAssumes(F, &AC, DT ? &DT->getDomTree() : nullptr); 466 } 467 468 void getAnalysisUsage(AnalysisUsage &AU) const override { 469 AU.setPreservesAll(); 470 } 471 }; 472 473 char AssumeSimplifyPassLegacyPass::ID = 0; 474 475 INITIALIZE_PASS_BEGIN(AssumeSimplifyPassLegacyPass, "assume-simplify", 476 "Assume Simplify", false, false) 477 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 478 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 479 INITIALIZE_PASS_END(AssumeSimplifyPassLegacyPass, "assume-simplify", 480 "Assume Simplify", false, false) 481 482 FunctionPass *llvm::createAssumeSimplifyPass() { 483 return new AssumeSimplifyPassLegacyPass(); 484 } 485 486 PreservedAnalyses AssumeBuilderPass::run(Function &F, 487 FunctionAnalysisManager &AM) { 488 AssumptionCache* AC = AM.getCachedResult<AssumptionAnalysis>(F); 489 DominatorTree* DT = AM.getCachedResult<DominatorTreeAnalysis>(F); 490 for (Instruction &I : instructions(F)) 491 salvageKnowledge(&I, AC, DT); 492 return PreservedAnalyses::all(); 493 } 494