15ffd83dbSDimitry Andric //===- ScalarEvolutionExpander.cpp - Scalar Evolution Analysis ------------===// 25ffd83dbSDimitry Andric // 35ffd83dbSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 45ffd83dbSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 55ffd83dbSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 65ffd83dbSDimitry Andric // 75ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 85ffd83dbSDimitry Andric // 95ffd83dbSDimitry Andric // This file contains the implementation of the scalar evolution expander, 105ffd83dbSDimitry Andric // which is used to generate the code corresponding to a given scalar evolution 115ffd83dbSDimitry Andric // expression. 125ffd83dbSDimitry Andric // 135ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 145ffd83dbSDimitry Andric 155ffd83dbSDimitry Andric #include "llvm/Transforms/Utils/ScalarEvolutionExpander.h" 165ffd83dbSDimitry Andric #include "llvm/ADT/STLExtras.h" 175ffd83dbSDimitry Andric #include "llvm/ADT/SmallSet.h" 185ffd83dbSDimitry Andric #include "llvm/Analysis/InstructionSimplify.h" 195ffd83dbSDimitry Andric #include "llvm/Analysis/LoopInfo.h" 205ffd83dbSDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h" 215ffd83dbSDimitry Andric #include "llvm/IR/DataLayout.h" 225ffd83dbSDimitry Andric #include "llvm/IR/Dominators.h" 235ffd83dbSDimitry Andric #include "llvm/IR/IntrinsicInst.h" 245ffd83dbSDimitry Andric #include "llvm/IR/LLVMContext.h" 255ffd83dbSDimitry Andric #include "llvm/IR/Module.h" 265ffd83dbSDimitry Andric #include "llvm/IR/PatternMatch.h" 275ffd83dbSDimitry Andric #include "llvm/Support/CommandLine.h" 285ffd83dbSDimitry Andric #include "llvm/Support/Debug.h" 295ffd83dbSDimitry Andric #include "llvm/Support/raw_ostream.h" 30e8d8bef9SDimitry Andric #include "llvm/Transforms/Utils/LoopUtils.h" 315ffd83dbSDimitry Andric 32fe6060f1SDimitry Andric #ifdef LLVM_ENABLE_ABI_BREAKING_CHECKS 33fe6060f1SDimitry Andric #define SCEV_DEBUG_WITH_TYPE(TYPE, X) DEBUG_WITH_TYPE(TYPE, X) 34fe6060f1SDimitry Andric #else 35fe6060f1SDimitry Andric #define SCEV_DEBUG_WITH_TYPE(TYPE, X) 36fe6060f1SDimitry Andric #endif 37fe6060f1SDimitry Andric 385ffd83dbSDimitry Andric using namespace llvm; 395ffd83dbSDimitry Andric 405ffd83dbSDimitry Andric cl::opt<unsigned> llvm::SCEVCheapExpansionBudget( 415ffd83dbSDimitry Andric "scev-cheap-expansion-budget", cl::Hidden, cl::init(4), 425ffd83dbSDimitry Andric cl::desc("When performing SCEV expansion only if it is cheap to do, this " 435ffd83dbSDimitry Andric "controls the budget that is considered cheap (default = 4)")); 445ffd83dbSDimitry Andric 455ffd83dbSDimitry Andric using namespace PatternMatch; 465ffd83dbSDimitry Andric 475ffd83dbSDimitry Andric /// ReuseOrCreateCast - Arrange for there to be a cast of V to Ty at IP, 48e8d8bef9SDimitry Andric /// reusing an existing cast if a suitable one (= dominating IP) exists, or 495ffd83dbSDimitry Andric /// creating a new one. 505ffd83dbSDimitry Andric Value *SCEVExpander::ReuseOrCreateCast(Value *V, Type *Ty, 515ffd83dbSDimitry Andric Instruction::CastOps Op, 525ffd83dbSDimitry Andric BasicBlock::iterator IP) { 535ffd83dbSDimitry Andric // This function must be called with the builder having a valid insertion 545ffd83dbSDimitry Andric // point. It doesn't need to be the actual IP where the uses of the returned 555ffd83dbSDimitry Andric // cast will be added, but it must dominate such IP. 565ffd83dbSDimitry Andric // We use this precondition to produce a cast that will dominate all its 575ffd83dbSDimitry Andric // uses. In particular, this is crucial for the case where the builder's 585ffd83dbSDimitry Andric // insertion point *is* the point where we were asked to put the cast. 595ffd83dbSDimitry Andric // Since we don't know the builder's insertion point is actually 605ffd83dbSDimitry Andric // where the uses will be added (only that it dominates it), we are 615ffd83dbSDimitry Andric // not allowed to move it. 625ffd83dbSDimitry Andric BasicBlock::iterator BIP = Builder.GetInsertPoint(); 635ffd83dbSDimitry Andric 64fe6060f1SDimitry Andric Value *Ret = nullptr; 655ffd83dbSDimitry Andric 665ffd83dbSDimitry Andric // Check to see if there is already a cast! 67e8d8bef9SDimitry Andric for (User *U : V->users()) { 68e8d8bef9SDimitry Andric if (U->getType() != Ty) 69e8d8bef9SDimitry Andric continue; 70e8d8bef9SDimitry Andric CastInst *CI = dyn_cast<CastInst>(U); 71e8d8bef9SDimitry Andric if (!CI || CI->getOpcode() != Op) 72e8d8bef9SDimitry Andric continue; 73e8d8bef9SDimitry Andric 74e8d8bef9SDimitry Andric // Found a suitable cast that is at IP or comes before IP. Use it. Note that 75e8d8bef9SDimitry Andric // the cast must also properly dominate the Builder's insertion point. 76e8d8bef9SDimitry Andric if (IP->getParent() == CI->getParent() && &*BIP != CI && 77e8d8bef9SDimitry Andric (&*IP == CI || CI->comesBefore(&*IP))) { 785ffd83dbSDimitry Andric Ret = CI; 795ffd83dbSDimitry Andric break; 805ffd83dbSDimitry Andric } 81e8d8bef9SDimitry Andric } 825ffd83dbSDimitry Andric 835ffd83dbSDimitry Andric // Create a new cast. 84e8d8bef9SDimitry Andric if (!Ret) { 85fe6060f1SDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 86fe6060f1SDimitry Andric Builder.SetInsertPoint(&*IP); 87fe6060f1SDimitry Andric Ret = Builder.CreateCast(Op, V, Ty, V->getName()); 88e8d8bef9SDimitry Andric } 895ffd83dbSDimitry Andric 905ffd83dbSDimitry Andric // We assert at the end of the function since IP might point to an 915ffd83dbSDimitry Andric // instruction with different dominance properties than a cast 925ffd83dbSDimitry Andric // (an invoke for example) and not dominate BIP (but the cast does). 93fe6060f1SDimitry Andric assert(!isa<Instruction>(Ret) || 94fe6060f1SDimitry Andric SE.DT.dominates(cast<Instruction>(Ret), &*BIP)); 955ffd83dbSDimitry Andric 965ffd83dbSDimitry Andric return Ret; 975ffd83dbSDimitry Andric } 985ffd83dbSDimitry Andric 99e8d8bef9SDimitry Andric BasicBlock::iterator 100fe6060f1SDimitry Andric SCEVExpander::findInsertPointAfter(Instruction *I, 101fe6060f1SDimitry Andric Instruction *MustDominate) const { 1025ffd83dbSDimitry Andric BasicBlock::iterator IP = ++I->getIterator(); 1035ffd83dbSDimitry Andric if (auto *II = dyn_cast<InvokeInst>(I)) 1045ffd83dbSDimitry Andric IP = II->getNormalDest()->begin(); 1055ffd83dbSDimitry Andric 1065ffd83dbSDimitry Andric while (isa<PHINode>(IP)) 1075ffd83dbSDimitry Andric ++IP; 1085ffd83dbSDimitry Andric 1095ffd83dbSDimitry Andric if (isa<FuncletPadInst>(IP) || isa<LandingPadInst>(IP)) { 1105ffd83dbSDimitry Andric ++IP; 1115ffd83dbSDimitry Andric } else if (isa<CatchSwitchInst>(IP)) { 112e8d8bef9SDimitry Andric IP = MustDominate->getParent()->getFirstInsertionPt(); 1135ffd83dbSDimitry Andric } else { 1145ffd83dbSDimitry Andric assert(!IP->isEHPad() && "unexpected eh pad!"); 1155ffd83dbSDimitry Andric } 1165ffd83dbSDimitry Andric 117e8d8bef9SDimitry Andric // Adjust insert point to be after instructions inserted by the expander, so 118e8d8bef9SDimitry Andric // we can re-use already inserted instructions. Avoid skipping past the 119e8d8bef9SDimitry Andric // original \p MustDominate, in case it is an inserted instruction. 120e8d8bef9SDimitry Andric while (isInsertedInstruction(&*IP) && &*IP != MustDominate) 121e8d8bef9SDimitry Andric ++IP; 122e8d8bef9SDimitry Andric 1235ffd83dbSDimitry Andric return IP; 1245ffd83dbSDimitry Andric } 1255ffd83dbSDimitry Andric 126fe6060f1SDimitry Andric BasicBlock::iterator 127fe6060f1SDimitry Andric SCEVExpander::GetOptimalInsertionPointForCastOf(Value *V) const { 128fe6060f1SDimitry Andric // Cast the argument at the beginning of the entry block, after 129fe6060f1SDimitry Andric // any bitcasts of other arguments. 130fe6060f1SDimitry Andric if (Argument *A = dyn_cast<Argument>(V)) { 131fe6060f1SDimitry Andric BasicBlock::iterator IP = A->getParent()->getEntryBlock().begin(); 132fe6060f1SDimitry Andric while ((isa<BitCastInst>(IP) && 133fe6060f1SDimitry Andric isa<Argument>(cast<BitCastInst>(IP)->getOperand(0)) && 134fe6060f1SDimitry Andric cast<BitCastInst>(IP)->getOperand(0) != A) || 135fe6060f1SDimitry Andric isa<DbgInfoIntrinsic>(IP)) 136fe6060f1SDimitry Andric ++IP; 137fe6060f1SDimitry Andric return IP; 138fe6060f1SDimitry Andric } 139fe6060f1SDimitry Andric 140fe6060f1SDimitry Andric // Cast the instruction immediately after the instruction. 141fe6060f1SDimitry Andric if (Instruction *I = dyn_cast<Instruction>(V)) 142fe6060f1SDimitry Andric return findInsertPointAfter(I, &*Builder.GetInsertPoint()); 143fe6060f1SDimitry Andric 144fe6060f1SDimitry Andric // Otherwise, this must be some kind of a constant, 145fe6060f1SDimitry Andric // so let's plop this cast into the function's entry block. 146fe6060f1SDimitry Andric assert(isa<Constant>(V) && 147fe6060f1SDimitry Andric "Expected the cast argument to be a global/constant"); 148fe6060f1SDimitry Andric return Builder.GetInsertBlock() 149fe6060f1SDimitry Andric ->getParent() 150fe6060f1SDimitry Andric ->getEntryBlock() 151fe6060f1SDimitry Andric .getFirstInsertionPt(); 152fe6060f1SDimitry Andric } 153fe6060f1SDimitry Andric 1545ffd83dbSDimitry Andric /// InsertNoopCastOfTo - Insert a cast of V to the specified type, 1555ffd83dbSDimitry Andric /// which must be possible with a noop cast, doing what we can to share 1565ffd83dbSDimitry Andric /// the casts. 1575ffd83dbSDimitry Andric Value *SCEVExpander::InsertNoopCastOfTo(Value *V, Type *Ty) { 1585ffd83dbSDimitry Andric Instruction::CastOps Op = CastInst::getCastOpcode(V, false, Ty, false); 1595ffd83dbSDimitry Andric assert((Op == Instruction::BitCast || 1605ffd83dbSDimitry Andric Op == Instruction::PtrToInt || 1615ffd83dbSDimitry Andric Op == Instruction::IntToPtr) && 1625ffd83dbSDimitry Andric "InsertNoopCastOfTo cannot perform non-noop casts!"); 1635ffd83dbSDimitry Andric assert(SE.getTypeSizeInBits(V->getType()) == SE.getTypeSizeInBits(Ty) && 1645ffd83dbSDimitry Andric "InsertNoopCastOfTo cannot change sizes!"); 1655ffd83dbSDimitry Andric 166e8d8bef9SDimitry Andric // inttoptr only works for integral pointers. For non-integral pointers, we 167e8d8bef9SDimitry Andric // can create a GEP on i8* null with the integral value as index. Note that 168e8d8bef9SDimitry Andric // it is safe to use GEP of null instead of inttoptr here, because only 169e8d8bef9SDimitry Andric // expressions already based on a GEP of null should be converted to pointers 170e8d8bef9SDimitry Andric // during expansion. 171e8d8bef9SDimitry Andric if (Op == Instruction::IntToPtr) { 172e8d8bef9SDimitry Andric auto *PtrTy = cast<PointerType>(Ty); 173e8d8bef9SDimitry Andric if (DL.isNonIntegralPointerType(PtrTy)) { 174e8d8bef9SDimitry Andric auto *Int8PtrTy = Builder.getInt8PtrTy(PtrTy->getAddressSpace()); 175e8d8bef9SDimitry Andric assert(DL.getTypeAllocSize(Int8PtrTy->getElementType()) == 1 && 176e8d8bef9SDimitry Andric "alloc size of i8 must by 1 byte for the GEP to be correct"); 177e8d8bef9SDimitry Andric auto *GEP = Builder.CreateGEP( 178e8d8bef9SDimitry Andric Builder.getInt8Ty(), Constant::getNullValue(Int8PtrTy), V, "uglygep"); 179e8d8bef9SDimitry Andric return Builder.CreateBitCast(GEP, Ty); 180e8d8bef9SDimitry Andric } 181e8d8bef9SDimitry Andric } 1825ffd83dbSDimitry Andric // Short-circuit unnecessary bitcasts. 1835ffd83dbSDimitry Andric if (Op == Instruction::BitCast) { 1845ffd83dbSDimitry Andric if (V->getType() == Ty) 1855ffd83dbSDimitry Andric return V; 1865ffd83dbSDimitry Andric if (CastInst *CI = dyn_cast<CastInst>(V)) { 1875ffd83dbSDimitry Andric if (CI->getOperand(0)->getType() == Ty) 1885ffd83dbSDimitry Andric return CI->getOperand(0); 1895ffd83dbSDimitry Andric } 1905ffd83dbSDimitry Andric } 1915ffd83dbSDimitry Andric // Short-circuit unnecessary inttoptr<->ptrtoint casts. 1925ffd83dbSDimitry Andric if ((Op == Instruction::PtrToInt || Op == Instruction::IntToPtr) && 1935ffd83dbSDimitry Andric SE.getTypeSizeInBits(Ty) == SE.getTypeSizeInBits(V->getType())) { 1945ffd83dbSDimitry Andric if (CastInst *CI = dyn_cast<CastInst>(V)) 1955ffd83dbSDimitry Andric if ((CI->getOpcode() == Instruction::PtrToInt || 1965ffd83dbSDimitry Andric CI->getOpcode() == Instruction::IntToPtr) && 1975ffd83dbSDimitry Andric SE.getTypeSizeInBits(CI->getType()) == 1985ffd83dbSDimitry Andric SE.getTypeSizeInBits(CI->getOperand(0)->getType())) 1995ffd83dbSDimitry Andric return CI->getOperand(0); 2005ffd83dbSDimitry Andric if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) 2015ffd83dbSDimitry Andric if ((CE->getOpcode() == Instruction::PtrToInt || 2025ffd83dbSDimitry Andric CE->getOpcode() == Instruction::IntToPtr) && 2035ffd83dbSDimitry Andric SE.getTypeSizeInBits(CE->getType()) == 2045ffd83dbSDimitry Andric SE.getTypeSizeInBits(CE->getOperand(0)->getType())) 2055ffd83dbSDimitry Andric return CE->getOperand(0); 2065ffd83dbSDimitry Andric } 2075ffd83dbSDimitry Andric 2085ffd83dbSDimitry Andric // Fold a cast of a constant. 2095ffd83dbSDimitry Andric if (Constant *C = dyn_cast<Constant>(V)) 2105ffd83dbSDimitry Andric return ConstantExpr::getCast(Op, C, Ty); 2115ffd83dbSDimitry Andric 212fe6060f1SDimitry Andric // Try to reuse existing cast, or insert one. 213fe6060f1SDimitry Andric return ReuseOrCreateCast(V, Ty, Op, GetOptimalInsertionPointForCastOf(V)); 2145ffd83dbSDimitry Andric } 2155ffd83dbSDimitry Andric 2165ffd83dbSDimitry Andric /// InsertBinop - Insert the specified binary operator, doing a small amount 2175ffd83dbSDimitry Andric /// of work to avoid inserting an obviously redundant operation, and hoisting 2185ffd83dbSDimitry Andric /// to an outer loop when the opportunity is there and it is safe. 2195ffd83dbSDimitry Andric Value *SCEVExpander::InsertBinop(Instruction::BinaryOps Opcode, 2205ffd83dbSDimitry Andric Value *LHS, Value *RHS, 2215ffd83dbSDimitry Andric SCEV::NoWrapFlags Flags, bool IsSafeToHoist) { 2225ffd83dbSDimitry Andric // Fold a binop with constant operands. 2235ffd83dbSDimitry Andric if (Constant *CLHS = dyn_cast<Constant>(LHS)) 2245ffd83dbSDimitry Andric if (Constant *CRHS = dyn_cast<Constant>(RHS)) 2255ffd83dbSDimitry Andric return ConstantExpr::get(Opcode, CLHS, CRHS); 2265ffd83dbSDimitry Andric 2275ffd83dbSDimitry Andric // Do a quick scan to see if we have this binop nearby. If so, reuse it. 2285ffd83dbSDimitry Andric unsigned ScanLimit = 6; 2295ffd83dbSDimitry Andric BasicBlock::iterator BlockBegin = Builder.GetInsertBlock()->begin(); 2305ffd83dbSDimitry Andric // Scanning starts from the last instruction before the insertion point. 2315ffd83dbSDimitry Andric BasicBlock::iterator IP = Builder.GetInsertPoint(); 2325ffd83dbSDimitry Andric if (IP != BlockBegin) { 2335ffd83dbSDimitry Andric --IP; 2345ffd83dbSDimitry Andric for (; ScanLimit; --IP, --ScanLimit) { 2355ffd83dbSDimitry Andric // Don't count dbg.value against the ScanLimit, to avoid perturbing the 2365ffd83dbSDimitry Andric // generated code. 2375ffd83dbSDimitry Andric if (isa<DbgInfoIntrinsic>(IP)) 2385ffd83dbSDimitry Andric ScanLimit++; 2395ffd83dbSDimitry Andric 2405ffd83dbSDimitry Andric auto canGenerateIncompatiblePoison = [&Flags](Instruction *I) { 2415ffd83dbSDimitry Andric // Ensure that no-wrap flags match. 2425ffd83dbSDimitry Andric if (isa<OverflowingBinaryOperator>(I)) { 2435ffd83dbSDimitry Andric if (I->hasNoSignedWrap() != (Flags & SCEV::FlagNSW)) 2445ffd83dbSDimitry Andric return true; 2455ffd83dbSDimitry Andric if (I->hasNoUnsignedWrap() != (Flags & SCEV::FlagNUW)) 2465ffd83dbSDimitry Andric return true; 2475ffd83dbSDimitry Andric } 2485ffd83dbSDimitry Andric // Conservatively, do not use any instruction which has any of exact 2495ffd83dbSDimitry Andric // flags installed. 2505ffd83dbSDimitry Andric if (isa<PossiblyExactOperator>(I) && I->isExact()) 2515ffd83dbSDimitry Andric return true; 2525ffd83dbSDimitry Andric return false; 2535ffd83dbSDimitry Andric }; 2545ffd83dbSDimitry Andric if (IP->getOpcode() == (unsigned)Opcode && IP->getOperand(0) == LHS && 2555ffd83dbSDimitry Andric IP->getOperand(1) == RHS && !canGenerateIncompatiblePoison(&*IP)) 2565ffd83dbSDimitry Andric return &*IP; 2575ffd83dbSDimitry Andric if (IP == BlockBegin) break; 2585ffd83dbSDimitry Andric } 2595ffd83dbSDimitry Andric } 2605ffd83dbSDimitry Andric 2615ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 2625ffd83dbSDimitry Andric DebugLoc Loc = Builder.GetInsertPoint()->getDebugLoc(); 2635ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 2645ffd83dbSDimitry Andric 2655ffd83dbSDimitry Andric if (IsSafeToHoist) { 2665ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 2675ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 2685ffd83dbSDimitry Andric if (!L->isLoopInvariant(LHS) || !L->isLoopInvariant(RHS)) break; 2695ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 2705ffd83dbSDimitry Andric if (!Preheader) break; 2715ffd83dbSDimitry Andric 2725ffd83dbSDimitry Andric // Ok, move up a level. 2735ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 2745ffd83dbSDimitry Andric } 2755ffd83dbSDimitry Andric } 2765ffd83dbSDimitry Andric 2775ffd83dbSDimitry Andric // If we haven't found this binop, insert it. 2785ffd83dbSDimitry Andric Instruction *BO = cast<Instruction>(Builder.CreateBinOp(Opcode, LHS, RHS)); 2795ffd83dbSDimitry Andric BO->setDebugLoc(Loc); 2805ffd83dbSDimitry Andric if (Flags & SCEV::FlagNUW) 2815ffd83dbSDimitry Andric BO->setHasNoUnsignedWrap(); 2825ffd83dbSDimitry Andric if (Flags & SCEV::FlagNSW) 2835ffd83dbSDimitry Andric BO->setHasNoSignedWrap(); 2845ffd83dbSDimitry Andric 2855ffd83dbSDimitry Andric return BO; 2865ffd83dbSDimitry Andric } 2875ffd83dbSDimitry Andric 2885ffd83dbSDimitry Andric /// FactorOutConstant - Test if S is divisible by Factor, using signed 2895ffd83dbSDimitry Andric /// division. If so, update S with Factor divided out and return true. 2905ffd83dbSDimitry Andric /// S need not be evenly divisible if a reasonable remainder can be 2915ffd83dbSDimitry Andric /// computed. 2925ffd83dbSDimitry Andric static bool FactorOutConstant(const SCEV *&S, const SCEV *&Remainder, 2935ffd83dbSDimitry Andric const SCEV *Factor, ScalarEvolution &SE, 2945ffd83dbSDimitry Andric const DataLayout &DL) { 2955ffd83dbSDimitry Andric // Everything is divisible by one. 2965ffd83dbSDimitry Andric if (Factor->isOne()) 2975ffd83dbSDimitry Andric return true; 2985ffd83dbSDimitry Andric 2995ffd83dbSDimitry Andric // x/x == 1. 3005ffd83dbSDimitry Andric if (S == Factor) { 3015ffd83dbSDimitry Andric S = SE.getConstant(S->getType(), 1); 3025ffd83dbSDimitry Andric return true; 3035ffd83dbSDimitry Andric } 3045ffd83dbSDimitry Andric 3055ffd83dbSDimitry Andric // For a Constant, check for a multiple of the given factor. 3065ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) { 3075ffd83dbSDimitry Andric // 0/x == 0. 3085ffd83dbSDimitry Andric if (C->isZero()) 3095ffd83dbSDimitry Andric return true; 3105ffd83dbSDimitry Andric // Check for divisibility. 3115ffd83dbSDimitry Andric if (const SCEVConstant *FC = dyn_cast<SCEVConstant>(Factor)) { 3125ffd83dbSDimitry Andric ConstantInt *CI = 3135ffd83dbSDimitry Andric ConstantInt::get(SE.getContext(), C->getAPInt().sdiv(FC->getAPInt())); 3145ffd83dbSDimitry Andric // If the quotient is zero and the remainder is non-zero, reject 3155ffd83dbSDimitry Andric // the value at this scale. It will be considered for subsequent 3165ffd83dbSDimitry Andric // smaller scales. 3175ffd83dbSDimitry Andric if (!CI->isZero()) { 3185ffd83dbSDimitry Andric const SCEV *Div = SE.getConstant(CI); 3195ffd83dbSDimitry Andric S = Div; 3205ffd83dbSDimitry Andric Remainder = SE.getAddExpr( 3215ffd83dbSDimitry Andric Remainder, SE.getConstant(C->getAPInt().srem(FC->getAPInt()))); 3225ffd83dbSDimitry Andric return true; 3235ffd83dbSDimitry Andric } 3245ffd83dbSDimitry Andric } 3255ffd83dbSDimitry Andric } 3265ffd83dbSDimitry Andric 3275ffd83dbSDimitry Andric // In a Mul, check if there is a constant operand which is a multiple 3285ffd83dbSDimitry Andric // of the given factor. 3295ffd83dbSDimitry Andric if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(S)) { 3305ffd83dbSDimitry Andric // Size is known, check if there is a constant operand which is a multiple 3315ffd83dbSDimitry Andric // of the given factor. If so, we can factor it. 3325ffd83dbSDimitry Andric if (const SCEVConstant *FC = dyn_cast<SCEVConstant>(Factor)) 3335ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(M->getOperand(0))) 3345ffd83dbSDimitry Andric if (!C->getAPInt().srem(FC->getAPInt())) { 335e8d8bef9SDimitry Andric SmallVector<const SCEV *, 4> NewMulOps(M->operands()); 3365ffd83dbSDimitry Andric NewMulOps[0] = SE.getConstant(C->getAPInt().sdiv(FC->getAPInt())); 3375ffd83dbSDimitry Andric S = SE.getMulExpr(NewMulOps); 3385ffd83dbSDimitry Andric return true; 3395ffd83dbSDimitry Andric } 3405ffd83dbSDimitry Andric } 3415ffd83dbSDimitry Andric 3425ffd83dbSDimitry Andric // In an AddRec, check if both start and step are divisible. 3435ffd83dbSDimitry Andric if (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(S)) { 3445ffd83dbSDimitry Andric const SCEV *Step = A->getStepRecurrence(SE); 3455ffd83dbSDimitry Andric const SCEV *StepRem = SE.getConstant(Step->getType(), 0); 3465ffd83dbSDimitry Andric if (!FactorOutConstant(Step, StepRem, Factor, SE, DL)) 3475ffd83dbSDimitry Andric return false; 3485ffd83dbSDimitry Andric if (!StepRem->isZero()) 3495ffd83dbSDimitry Andric return false; 3505ffd83dbSDimitry Andric const SCEV *Start = A->getStart(); 3515ffd83dbSDimitry Andric if (!FactorOutConstant(Start, Remainder, Factor, SE, DL)) 3525ffd83dbSDimitry Andric return false; 3535ffd83dbSDimitry Andric S = SE.getAddRecExpr(Start, Step, A->getLoop(), 3545ffd83dbSDimitry Andric A->getNoWrapFlags(SCEV::FlagNW)); 3555ffd83dbSDimitry Andric return true; 3565ffd83dbSDimitry Andric } 3575ffd83dbSDimitry Andric 3585ffd83dbSDimitry Andric return false; 3595ffd83dbSDimitry Andric } 3605ffd83dbSDimitry Andric 3615ffd83dbSDimitry Andric /// SimplifyAddOperands - Sort and simplify a list of add operands. NumAddRecs 3625ffd83dbSDimitry Andric /// is the number of SCEVAddRecExprs present, which are kept at the end of 3635ffd83dbSDimitry Andric /// the list. 3645ffd83dbSDimitry Andric /// 3655ffd83dbSDimitry Andric static void SimplifyAddOperands(SmallVectorImpl<const SCEV *> &Ops, 3665ffd83dbSDimitry Andric Type *Ty, 3675ffd83dbSDimitry Andric ScalarEvolution &SE) { 3685ffd83dbSDimitry Andric unsigned NumAddRecs = 0; 3695ffd83dbSDimitry Andric for (unsigned i = Ops.size(); i > 0 && isa<SCEVAddRecExpr>(Ops[i-1]); --i) 3705ffd83dbSDimitry Andric ++NumAddRecs; 3715ffd83dbSDimitry Andric // Group Ops into non-addrecs and addrecs. 3725ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> NoAddRecs(Ops.begin(), Ops.end() - NumAddRecs); 3735ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> AddRecs(Ops.end() - NumAddRecs, Ops.end()); 3745ffd83dbSDimitry Andric // Let ScalarEvolution sort and simplify the non-addrecs list. 3755ffd83dbSDimitry Andric const SCEV *Sum = NoAddRecs.empty() ? 3765ffd83dbSDimitry Andric SE.getConstant(Ty, 0) : 3775ffd83dbSDimitry Andric SE.getAddExpr(NoAddRecs); 3785ffd83dbSDimitry Andric // If it returned an add, use the operands. Otherwise it simplified 3795ffd83dbSDimitry Andric // the sum into a single value, so just use that. 3805ffd83dbSDimitry Andric Ops.clear(); 3815ffd83dbSDimitry Andric if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Sum)) 3825ffd83dbSDimitry Andric Ops.append(Add->op_begin(), Add->op_end()); 3835ffd83dbSDimitry Andric else if (!Sum->isZero()) 3845ffd83dbSDimitry Andric Ops.push_back(Sum); 3855ffd83dbSDimitry Andric // Then append the addrecs. 3865ffd83dbSDimitry Andric Ops.append(AddRecs.begin(), AddRecs.end()); 3875ffd83dbSDimitry Andric } 3885ffd83dbSDimitry Andric 3895ffd83dbSDimitry Andric /// SplitAddRecs - Flatten a list of add operands, moving addrec start values 3905ffd83dbSDimitry Andric /// out to the top level. For example, convert {a + b,+,c} to a, b, {0,+,d}. 3915ffd83dbSDimitry Andric /// This helps expose more opportunities for folding parts of the expressions 3925ffd83dbSDimitry Andric /// into GEP indices. 3935ffd83dbSDimitry Andric /// 3945ffd83dbSDimitry Andric static void SplitAddRecs(SmallVectorImpl<const SCEV *> &Ops, 3955ffd83dbSDimitry Andric Type *Ty, 3965ffd83dbSDimitry Andric ScalarEvolution &SE) { 3975ffd83dbSDimitry Andric // Find the addrecs. 3985ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> AddRecs; 3995ffd83dbSDimitry Andric for (unsigned i = 0, e = Ops.size(); i != e; ++i) 4005ffd83dbSDimitry Andric while (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(Ops[i])) { 4015ffd83dbSDimitry Andric const SCEV *Start = A->getStart(); 4025ffd83dbSDimitry Andric if (Start->isZero()) break; 4035ffd83dbSDimitry Andric const SCEV *Zero = SE.getConstant(Ty, 0); 4045ffd83dbSDimitry Andric AddRecs.push_back(SE.getAddRecExpr(Zero, 4055ffd83dbSDimitry Andric A->getStepRecurrence(SE), 4065ffd83dbSDimitry Andric A->getLoop(), 4075ffd83dbSDimitry Andric A->getNoWrapFlags(SCEV::FlagNW))); 4085ffd83dbSDimitry Andric if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Start)) { 4095ffd83dbSDimitry Andric Ops[i] = Zero; 4105ffd83dbSDimitry Andric Ops.append(Add->op_begin(), Add->op_end()); 4115ffd83dbSDimitry Andric e += Add->getNumOperands(); 4125ffd83dbSDimitry Andric } else { 4135ffd83dbSDimitry Andric Ops[i] = Start; 4145ffd83dbSDimitry Andric } 4155ffd83dbSDimitry Andric } 4165ffd83dbSDimitry Andric if (!AddRecs.empty()) { 4175ffd83dbSDimitry Andric // Add the addrecs onto the end of the list. 4185ffd83dbSDimitry Andric Ops.append(AddRecs.begin(), AddRecs.end()); 4195ffd83dbSDimitry Andric // Resort the operand list, moving any constants to the front. 4205ffd83dbSDimitry Andric SimplifyAddOperands(Ops, Ty, SE); 4215ffd83dbSDimitry Andric } 4225ffd83dbSDimitry Andric } 4235ffd83dbSDimitry Andric 4245ffd83dbSDimitry Andric /// expandAddToGEP - Expand an addition expression with a pointer type into 4255ffd83dbSDimitry Andric /// a GEP instead of using ptrtoint+arithmetic+inttoptr. This helps 4265ffd83dbSDimitry Andric /// BasicAliasAnalysis and other passes analyze the result. See the rules 4275ffd83dbSDimitry Andric /// for getelementptr vs. inttoptr in 4285ffd83dbSDimitry Andric /// http://llvm.org/docs/LangRef.html#pointeraliasing 4295ffd83dbSDimitry Andric /// for details. 4305ffd83dbSDimitry Andric /// 4315ffd83dbSDimitry Andric /// Design note: The correctness of using getelementptr here depends on 4325ffd83dbSDimitry Andric /// ScalarEvolution not recognizing inttoptr and ptrtoint operators, as 4335ffd83dbSDimitry Andric /// they may introduce pointer arithmetic which may not be safely converted 4345ffd83dbSDimitry Andric /// into getelementptr. 4355ffd83dbSDimitry Andric /// 4365ffd83dbSDimitry Andric /// Design note: It might seem desirable for this function to be more 4375ffd83dbSDimitry Andric /// loop-aware. If some of the indices are loop-invariant while others 4385ffd83dbSDimitry Andric /// aren't, it might seem desirable to emit multiple GEPs, keeping the 4395ffd83dbSDimitry Andric /// loop-invariant portions of the overall computation outside the loop. 4405ffd83dbSDimitry Andric /// However, there are a few reasons this is not done here. Hoisting simple 4415ffd83dbSDimitry Andric /// arithmetic is a low-level optimization that often isn't very 4425ffd83dbSDimitry Andric /// important until late in the optimization process. In fact, passes 4435ffd83dbSDimitry Andric /// like InstructionCombining will combine GEPs, even if it means 4445ffd83dbSDimitry Andric /// pushing loop-invariant computation down into loops, so even if the 4455ffd83dbSDimitry Andric /// GEPs were split here, the work would quickly be undone. The 4465ffd83dbSDimitry Andric /// LoopStrengthReduction pass, which is usually run quite late (and 4475ffd83dbSDimitry Andric /// after the last InstructionCombining pass), takes care of hoisting 4485ffd83dbSDimitry Andric /// loop-invariant portions of expressions, after considering what 4495ffd83dbSDimitry Andric /// can be folded using target addressing modes. 4505ffd83dbSDimitry Andric /// 4515ffd83dbSDimitry Andric Value *SCEVExpander::expandAddToGEP(const SCEV *const *op_begin, 4525ffd83dbSDimitry Andric const SCEV *const *op_end, 4535ffd83dbSDimitry Andric PointerType *PTy, 4545ffd83dbSDimitry Andric Type *Ty, 4555ffd83dbSDimitry Andric Value *V) { 4565ffd83dbSDimitry Andric SmallVector<Value *, 4> GepIndices; 4575ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> Ops(op_begin, op_end); 4585ffd83dbSDimitry Andric bool AnyNonZeroIndices = false; 4595ffd83dbSDimitry Andric 4605ffd83dbSDimitry Andric // Split AddRecs up into parts as either of the parts may be usable 4615ffd83dbSDimitry Andric // without the other. 4625ffd83dbSDimitry Andric SplitAddRecs(Ops, Ty, SE); 4635ffd83dbSDimitry Andric 4645ffd83dbSDimitry Andric Type *IntIdxTy = DL.getIndexType(PTy); 4655ffd83dbSDimitry Andric 466fe6060f1SDimitry Andric // For opaque pointers, always generate i8 GEP. 467fe6060f1SDimitry Andric if (!PTy->isOpaque()) { 4685ffd83dbSDimitry Andric // Descend down the pointer's type and attempt to convert the other 4695ffd83dbSDimitry Andric // operands into GEP indices, at each level. The first index in a GEP 4705ffd83dbSDimitry Andric // indexes into the array implied by the pointer operand; the rest of 4715ffd83dbSDimitry Andric // the indices index into the element or field type selected by the 4725ffd83dbSDimitry Andric // preceding index. 473fe6060f1SDimitry Andric Type *ElTy = PTy->getElementType(); 4745ffd83dbSDimitry Andric for (;;) { 4755ffd83dbSDimitry Andric // If the scale size is not 0, attempt to factor out a scale for 4765ffd83dbSDimitry Andric // array indexing. 4775ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> ScaledOps; 4785ffd83dbSDimitry Andric if (ElTy->isSized()) { 4795ffd83dbSDimitry Andric const SCEV *ElSize = SE.getSizeOfExpr(IntIdxTy, ElTy); 4805ffd83dbSDimitry Andric if (!ElSize->isZero()) { 4815ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> NewOps; 4825ffd83dbSDimitry Andric for (const SCEV *Op : Ops) { 4835ffd83dbSDimitry Andric const SCEV *Remainder = SE.getConstant(Ty, 0); 4845ffd83dbSDimitry Andric if (FactorOutConstant(Op, Remainder, ElSize, SE, DL)) { 4855ffd83dbSDimitry Andric // Op now has ElSize factored out. 4865ffd83dbSDimitry Andric ScaledOps.push_back(Op); 4875ffd83dbSDimitry Andric if (!Remainder->isZero()) 4885ffd83dbSDimitry Andric NewOps.push_back(Remainder); 4895ffd83dbSDimitry Andric AnyNonZeroIndices = true; 4905ffd83dbSDimitry Andric } else { 491fe6060f1SDimitry Andric // The operand was not divisible, so add it to the list of 492fe6060f1SDimitry Andric // operands we'll scan next iteration. 4935ffd83dbSDimitry Andric NewOps.push_back(Op); 4945ffd83dbSDimitry Andric } 4955ffd83dbSDimitry Andric } 4965ffd83dbSDimitry Andric // If we made any changes, update Ops. 4975ffd83dbSDimitry Andric if (!ScaledOps.empty()) { 4985ffd83dbSDimitry Andric Ops = NewOps; 4995ffd83dbSDimitry Andric SimplifyAddOperands(Ops, Ty, SE); 5005ffd83dbSDimitry Andric } 5015ffd83dbSDimitry Andric } 5025ffd83dbSDimitry Andric } 5035ffd83dbSDimitry Andric 5045ffd83dbSDimitry Andric // Record the scaled array index for this level of the type. If 5055ffd83dbSDimitry Andric // we didn't find any operands that could be factored, tentatively 5065ffd83dbSDimitry Andric // assume that element zero was selected (since the zero offset 5075ffd83dbSDimitry Andric // would obviously be folded away). 508e8d8bef9SDimitry Andric Value *Scaled = 509e8d8bef9SDimitry Andric ScaledOps.empty() 510e8d8bef9SDimitry Andric ? Constant::getNullValue(Ty) 511e8d8bef9SDimitry Andric : expandCodeForImpl(SE.getAddExpr(ScaledOps), Ty, false); 5125ffd83dbSDimitry Andric GepIndices.push_back(Scaled); 5135ffd83dbSDimitry Andric 5145ffd83dbSDimitry Andric // Collect struct field index operands. 5155ffd83dbSDimitry Andric while (StructType *STy = dyn_cast<StructType>(ElTy)) { 5165ffd83dbSDimitry Andric bool FoundFieldNo = false; 5175ffd83dbSDimitry Andric // An empty struct has no fields. 5185ffd83dbSDimitry Andric if (STy->getNumElements() == 0) break; 5195ffd83dbSDimitry Andric // Field offsets are known. See if a constant offset falls within any of 5205ffd83dbSDimitry Andric // the struct fields. 5215ffd83dbSDimitry Andric if (Ops.empty()) 5225ffd83dbSDimitry Andric break; 5235ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[0])) 5245ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(C->getType()) <= 64) { 5255ffd83dbSDimitry Andric const StructLayout &SL = *DL.getStructLayout(STy); 5265ffd83dbSDimitry Andric uint64_t FullOffset = C->getValue()->getZExtValue(); 5275ffd83dbSDimitry Andric if (FullOffset < SL.getSizeInBytes()) { 5285ffd83dbSDimitry Andric unsigned ElIdx = SL.getElementContainingOffset(FullOffset); 5295ffd83dbSDimitry Andric GepIndices.push_back( 5305ffd83dbSDimitry Andric ConstantInt::get(Type::getInt32Ty(Ty->getContext()), ElIdx)); 5315ffd83dbSDimitry Andric ElTy = STy->getTypeAtIndex(ElIdx); 5325ffd83dbSDimitry Andric Ops[0] = 5335ffd83dbSDimitry Andric SE.getConstant(Ty, FullOffset - SL.getElementOffset(ElIdx)); 5345ffd83dbSDimitry Andric AnyNonZeroIndices = true; 5355ffd83dbSDimitry Andric FoundFieldNo = true; 5365ffd83dbSDimitry Andric } 5375ffd83dbSDimitry Andric } 5385ffd83dbSDimitry Andric // If no struct field offsets were found, tentatively assume that 5395ffd83dbSDimitry Andric // field zero was selected (since the zero offset would obviously 5405ffd83dbSDimitry Andric // be folded away). 5415ffd83dbSDimitry Andric if (!FoundFieldNo) { 5425ffd83dbSDimitry Andric ElTy = STy->getTypeAtIndex(0u); 5435ffd83dbSDimitry Andric GepIndices.push_back( 5445ffd83dbSDimitry Andric Constant::getNullValue(Type::getInt32Ty(Ty->getContext()))); 5455ffd83dbSDimitry Andric } 5465ffd83dbSDimitry Andric } 5475ffd83dbSDimitry Andric 5485ffd83dbSDimitry Andric if (ArrayType *ATy = dyn_cast<ArrayType>(ElTy)) 5495ffd83dbSDimitry Andric ElTy = ATy->getElementType(); 5505ffd83dbSDimitry Andric else 5515ffd83dbSDimitry Andric // FIXME: Handle VectorType. 5525ffd83dbSDimitry Andric // E.g., If ElTy is scalable vector, then ElSize is not a compile-time 5535ffd83dbSDimitry Andric // constant, therefore can not be factored out. The generated IR is less 5545ffd83dbSDimitry Andric // ideal with base 'V' cast to i8* and do ugly getelementptr over that. 5555ffd83dbSDimitry Andric break; 5565ffd83dbSDimitry Andric } 557fe6060f1SDimitry Andric } 5585ffd83dbSDimitry Andric 5595ffd83dbSDimitry Andric // If none of the operands were convertible to proper GEP indices, cast 5605ffd83dbSDimitry Andric // the base to i8* and do an ugly getelementptr with that. It's still 5615ffd83dbSDimitry Andric // better than ptrtoint+arithmetic+inttoptr at least. 5625ffd83dbSDimitry Andric if (!AnyNonZeroIndices) { 5635ffd83dbSDimitry Andric // Cast the base to i8*. 564fe6060f1SDimitry Andric if (!PTy->isOpaque()) 5655ffd83dbSDimitry Andric V = InsertNoopCastOfTo(V, 5665ffd83dbSDimitry Andric Type::getInt8PtrTy(Ty->getContext(), PTy->getAddressSpace())); 5675ffd83dbSDimitry Andric 5685ffd83dbSDimitry Andric assert(!isa<Instruction>(V) || 5695ffd83dbSDimitry Andric SE.DT.dominates(cast<Instruction>(V), &*Builder.GetInsertPoint())); 5705ffd83dbSDimitry Andric 5715ffd83dbSDimitry Andric // Expand the operands for a plain byte offset. 572e8d8bef9SDimitry Andric Value *Idx = expandCodeForImpl(SE.getAddExpr(Ops), Ty, false); 5735ffd83dbSDimitry Andric 5745ffd83dbSDimitry Andric // Fold a GEP with constant operands. 5755ffd83dbSDimitry Andric if (Constant *CLHS = dyn_cast<Constant>(V)) 5765ffd83dbSDimitry Andric if (Constant *CRHS = dyn_cast<Constant>(Idx)) 5775ffd83dbSDimitry Andric return ConstantExpr::getGetElementPtr(Type::getInt8Ty(Ty->getContext()), 5785ffd83dbSDimitry Andric CLHS, CRHS); 5795ffd83dbSDimitry Andric 5805ffd83dbSDimitry Andric // Do a quick scan to see if we have this GEP nearby. If so, reuse it. 5815ffd83dbSDimitry Andric unsigned ScanLimit = 6; 5825ffd83dbSDimitry Andric BasicBlock::iterator BlockBegin = Builder.GetInsertBlock()->begin(); 5835ffd83dbSDimitry Andric // Scanning starts from the last instruction before the insertion point. 5845ffd83dbSDimitry Andric BasicBlock::iterator IP = Builder.GetInsertPoint(); 5855ffd83dbSDimitry Andric if (IP != BlockBegin) { 5865ffd83dbSDimitry Andric --IP; 5875ffd83dbSDimitry Andric for (; ScanLimit; --IP, --ScanLimit) { 5885ffd83dbSDimitry Andric // Don't count dbg.value against the ScanLimit, to avoid perturbing the 5895ffd83dbSDimitry Andric // generated code. 5905ffd83dbSDimitry Andric if (isa<DbgInfoIntrinsic>(IP)) 5915ffd83dbSDimitry Andric ScanLimit++; 5925ffd83dbSDimitry Andric if (IP->getOpcode() == Instruction::GetElementPtr && 5935ffd83dbSDimitry Andric IP->getOperand(0) == V && IP->getOperand(1) == Idx) 5945ffd83dbSDimitry Andric return &*IP; 5955ffd83dbSDimitry Andric if (IP == BlockBegin) break; 5965ffd83dbSDimitry Andric } 5975ffd83dbSDimitry Andric } 5985ffd83dbSDimitry Andric 5995ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 6005ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 6015ffd83dbSDimitry Andric 6025ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 6035ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 6045ffd83dbSDimitry Andric if (!L->isLoopInvariant(V) || !L->isLoopInvariant(Idx)) break; 6055ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 6065ffd83dbSDimitry Andric if (!Preheader) break; 6075ffd83dbSDimitry Andric 6085ffd83dbSDimitry Andric // Ok, move up a level. 6095ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 6105ffd83dbSDimitry Andric } 6115ffd83dbSDimitry Andric 6125ffd83dbSDimitry Andric // Emit a GEP. 613e8d8bef9SDimitry Andric return Builder.CreateGEP(Builder.getInt8Ty(), V, Idx, "uglygep"); 6145ffd83dbSDimitry Andric } 6155ffd83dbSDimitry Andric 6165ffd83dbSDimitry Andric { 6175ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 6185ffd83dbSDimitry Andric 6195ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 6205ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 6215ffd83dbSDimitry Andric if (!L->isLoopInvariant(V)) break; 6225ffd83dbSDimitry Andric 6235ffd83dbSDimitry Andric bool AnyIndexNotLoopInvariant = any_of( 6245ffd83dbSDimitry Andric GepIndices, [L](Value *Op) { return !L->isLoopInvariant(Op); }); 6255ffd83dbSDimitry Andric 6265ffd83dbSDimitry Andric if (AnyIndexNotLoopInvariant) 6275ffd83dbSDimitry Andric break; 6285ffd83dbSDimitry Andric 6295ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 6305ffd83dbSDimitry Andric if (!Preheader) break; 6315ffd83dbSDimitry Andric 6325ffd83dbSDimitry Andric // Ok, move up a level. 6335ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 6345ffd83dbSDimitry Andric } 6355ffd83dbSDimitry Andric 6365ffd83dbSDimitry Andric // Insert a pretty getelementptr. Note that this GEP is not marked inbounds, 6375ffd83dbSDimitry Andric // because ScalarEvolution may have changed the address arithmetic to 6385ffd83dbSDimitry Andric // compute a value which is beyond the end of the allocated object. 6395ffd83dbSDimitry Andric Value *Casted = V; 6405ffd83dbSDimitry Andric if (V->getType() != PTy) 6415ffd83dbSDimitry Andric Casted = InsertNoopCastOfTo(Casted, PTy); 642fe6060f1SDimitry Andric Value *GEP = Builder.CreateGEP(PTy->getElementType(), Casted, GepIndices, 643fe6060f1SDimitry Andric "scevgep"); 6445ffd83dbSDimitry Andric Ops.push_back(SE.getUnknown(GEP)); 6455ffd83dbSDimitry Andric } 6465ffd83dbSDimitry Andric 6475ffd83dbSDimitry Andric return expand(SE.getAddExpr(Ops)); 6485ffd83dbSDimitry Andric } 6495ffd83dbSDimitry Andric 6505ffd83dbSDimitry Andric Value *SCEVExpander::expandAddToGEP(const SCEV *Op, PointerType *PTy, Type *Ty, 6515ffd83dbSDimitry Andric Value *V) { 6525ffd83dbSDimitry Andric const SCEV *const Ops[1] = {Op}; 6535ffd83dbSDimitry Andric return expandAddToGEP(Ops, Ops + 1, PTy, Ty, V); 6545ffd83dbSDimitry Andric } 6555ffd83dbSDimitry Andric 6565ffd83dbSDimitry Andric /// PickMostRelevantLoop - Given two loops pick the one that's most relevant for 6575ffd83dbSDimitry Andric /// SCEV expansion. If they are nested, this is the most nested. If they are 6585ffd83dbSDimitry Andric /// neighboring, pick the later. 6595ffd83dbSDimitry Andric static const Loop *PickMostRelevantLoop(const Loop *A, const Loop *B, 6605ffd83dbSDimitry Andric DominatorTree &DT) { 6615ffd83dbSDimitry Andric if (!A) return B; 6625ffd83dbSDimitry Andric if (!B) return A; 6635ffd83dbSDimitry Andric if (A->contains(B)) return B; 6645ffd83dbSDimitry Andric if (B->contains(A)) return A; 6655ffd83dbSDimitry Andric if (DT.dominates(A->getHeader(), B->getHeader())) return B; 6665ffd83dbSDimitry Andric if (DT.dominates(B->getHeader(), A->getHeader())) return A; 6675ffd83dbSDimitry Andric return A; // Arbitrarily break the tie. 6685ffd83dbSDimitry Andric } 6695ffd83dbSDimitry Andric 6705ffd83dbSDimitry Andric /// getRelevantLoop - Get the most relevant loop associated with the given 6715ffd83dbSDimitry Andric /// expression, according to PickMostRelevantLoop. 6725ffd83dbSDimitry Andric const Loop *SCEVExpander::getRelevantLoop(const SCEV *S) { 6735ffd83dbSDimitry Andric // Test whether we've already computed the most relevant loop for this SCEV. 6745ffd83dbSDimitry Andric auto Pair = RelevantLoops.insert(std::make_pair(S, nullptr)); 6755ffd83dbSDimitry Andric if (!Pair.second) 6765ffd83dbSDimitry Andric return Pair.first->second; 6775ffd83dbSDimitry Andric 6785ffd83dbSDimitry Andric if (isa<SCEVConstant>(S)) 6795ffd83dbSDimitry Andric // A constant has no relevant loops. 6805ffd83dbSDimitry Andric return nullptr; 6815ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { 6825ffd83dbSDimitry Andric if (const Instruction *I = dyn_cast<Instruction>(U->getValue())) 6835ffd83dbSDimitry Andric return Pair.first->second = SE.LI.getLoopFor(I->getParent()); 6845ffd83dbSDimitry Andric // A non-instruction has no relevant loops. 6855ffd83dbSDimitry Andric return nullptr; 6865ffd83dbSDimitry Andric } 6875ffd83dbSDimitry Andric if (const SCEVNAryExpr *N = dyn_cast<SCEVNAryExpr>(S)) { 6885ffd83dbSDimitry Andric const Loop *L = nullptr; 6895ffd83dbSDimitry Andric if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) 6905ffd83dbSDimitry Andric L = AR->getLoop(); 6915ffd83dbSDimitry Andric for (const SCEV *Op : N->operands()) 6925ffd83dbSDimitry Andric L = PickMostRelevantLoop(L, getRelevantLoop(Op), SE.DT); 6935ffd83dbSDimitry Andric return RelevantLoops[N] = L; 6945ffd83dbSDimitry Andric } 6955ffd83dbSDimitry Andric if (const SCEVCastExpr *C = dyn_cast<SCEVCastExpr>(S)) { 6965ffd83dbSDimitry Andric const Loop *Result = getRelevantLoop(C->getOperand()); 6975ffd83dbSDimitry Andric return RelevantLoops[C] = Result; 6985ffd83dbSDimitry Andric } 6995ffd83dbSDimitry Andric if (const SCEVUDivExpr *D = dyn_cast<SCEVUDivExpr>(S)) { 7005ffd83dbSDimitry Andric const Loop *Result = PickMostRelevantLoop( 7015ffd83dbSDimitry Andric getRelevantLoop(D->getLHS()), getRelevantLoop(D->getRHS()), SE.DT); 7025ffd83dbSDimitry Andric return RelevantLoops[D] = Result; 7035ffd83dbSDimitry Andric } 7045ffd83dbSDimitry Andric llvm_unreachable("Unexpected SCEV type!"); 7055ffd83dbSDimitry Andric } 7065ffd83dbSDimitry Andric 7075ffd83dbSDimitry Andric namespace { 7085ffd83dbSDimitry Andric 7095ffd83dbSDimitry Andric /// LoopCompare - Compare loops by PickMostRelevantLoop. 7105ffd83dbSDimitry Andric class LoopCompare { 7115ffd83dbSDimitry Andric DominatorTree &DT; 7125ffd83dbSDimitry Andric public: 7135ffd83dbSDimitry Andric explicit LoopCompare(DominatorTree &dt) : DT(dt) {} 7145ffd83dbSDimitry Andric 7155ffd83dbSDimitry Andric bool operator()(std::pair<const Loop *, const SCEV *> LHS, 7165ffd83dbSDimitry Andric std::pair<const Loop *, const SCEV *> RHS) const { 7175ffd83dbSDimitry Andric // Keep pointer operands sorted at the end. 7185ffd83dbSDimitry Andric if (LHS.second->getType()->isPointerTy() != 7195ffd83dbSDimitry Andric RHS.second->getType()->isPointerTy()) 7205ffd83dbSDimitry Andric return LHS.second->getType()->isPointerTy(); 7215ffd83dbSDimitry Andric 7225ffd83dbSDimitry Andric // Compare loops with PickMostRelevantLoop. 7235ffd83dbSDimitry Andric if (LHS.first != RHS.first) 7245ffd83dbSDimitry Andric return PickMostRelevantLoop(LHS.first, RHS.first, DT) != LHS.first; 7255ffd83dbSDimitry Andric 7265ffd83dbSDimitry Andric // If one operand is a non-constant negative and the other is not, 7275ffd83dbSDimitry Andric // put the non-constant negative on the right so that a sub can 7285ffd83dbSDimitry Andric // be used instead of a negate and add. 7295ffd83dbSDimitry Andric if (LHS.second->isNonConstantNegative()) { 7305ffd83dbSDimitry Andric if (!RHS.second->isNonConstantNegative()) 7315ffd83dbSDimitry Andric return false; 7325ffd83dbSDimitry Andric } else if (RHS.second->isNonConstantNegative()) 7335ffd83dbSDimitry Andric return true; 7345ffd83dbSDimitry Andric 7355ffd83dbSDimitry Andric // Otherwise they are equivalent according to this comparison. 7365ffd83dbSDimitry Andric return false; 7375ffd83dbSDimitry Andric } 7385ffd83dbSDimitry Andric }; 7395ffd83dbSDimitry Andric 7405ffd83dbSDimitry Andric } 7415ffd83dbSDimitry Andric 7425ffd83dbSDimitry Andric Value *SCEVExpander::visitAddExpr(const SCEVAddExpr *S) { 7435ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 7445ffd83dbSDimitry Andric 7455ffd83dbSDimitry Andric // Collect all the add operands in a loop, along with their associated loops. 7465ffd83dbSDimitry Andric // Iterate in reverse so that constants are emitted last, all else equal, and 7475ffd83dbSDimitry Andric // so that pointer operands are inserted first, which the code below relies on 7485ffd83dbSDimitry Andric // to form more involved GEPs. 7495ffd83dbSDimitry Andric SmallVector<std::pair<const Loop *, const SCEV *>, 8> OpsAndLoops; 750*349cc55cSDimitry Andric for (const SCEV *Op : reverse(S->operands())) 751*349cc55cSDimitry Andric OpsAndLoops.push_back(std::make_pair(getRelevantLoop(Op), Op)); 7525ffd83dbSDimitry Andric 7535ffd83dbSDimitry Andric // Sort by loop. Use a stable sort so that constants follow non-constants and 7545ffd83dbSDimitry Andric // pointer operands precede non-pointer operands. 7555ffd83dbSDimitry Andric llvm::stable_sort(OpsAndLoops, LoopCompare(SE.DT)); 7565ffd83dbSDimitry Andric 7575ffd83dbSDimitry Andric // Emit instructions to add all the operands. Hoist as much as possible 7585ffd83dbSDimitry Andric // out of loops, and form meaningful getelementptrs where possible. 7595ffd83dbSDimitry Andric Value *Sum = nullptr; 7605ffd83dbSDimitry Andric for (auto I = OpsAndLoops.begin(), E = OpsAndLoops.end(); I != E;) { 7615ffd83dbSDimitry Andric const Loop *CurLoop = I->first; 7625ffd83dbSDimitry Andric const SCEV *Op = I->second; 7635ffd83dbSDimitry Andric if (!Sum) { 7645ffd83dbSDimitry Andric // This is the first operand. Just expand it. 7655ffd83dbSDimitry Andric Sum = expand(Op); 7665ffd83dbSDimitry Andric ++I; 767*349cc55cSDimitry Andric continue; 768*349cc55cSDimitry Andric } 769*349cc55cSDimitry Andric 770*349cc55cSDimitry Andric assert(!Op->getType()->isPointerTy() && "Only first op can be pointer"); 771*349cc55cSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(Sum->getType())) { 7725ffd83dbSDimitry Andric // The running sum expression is a pointer. Try to form a getelementptr 7735ffd83dbSDimitry Andric // at this level with that as the base. 7745ffd83dbSDimitry Andric SmallVector<const SCEV *, 4> NewOps; 7755ffd83dbSDimitry Andric for (; I != E && I->first == CurLoop; ++I) { 7765ffd83dbSDimitry Andric // If the operand is SCEVUnknown and not instructions, peek through 7775ffd83dbSDimitry Andric // it, to enable more of it to be folded into the GEP. 7785ffd83dbSDimitry Andric const SCEV *X = I->second; 7795ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(X)) 7805ffd83dbSDimitry Andric if (!isa<Instruction>(U->getValue())) 7815ffd83dbSDimitry Andric X = SE.getSCEV(U->getValue()); 7825ffd83dbSDimitry Andric NewOps.push_back(X); 7835ffd83dbSDimitry Andric } 7845ffd83dbSDimitry Andric Sum = expandAddToGEP(NewOps.begin(), NewOps.end(), PTy, Ty, Sum); 7855ffd83dbSDimitry Andric } else if (Op->isNonConstantNegative()) { 7865ffd83dbSDimitry Andric // Instead of doing a negate and add, just do a subtract. 787e8d8bef9SDimitry Andric Value *W = expandCodeForImpl(SE.getNegativeSCEV(Op), Ty, false); 7885ffd83dbSDimitry Andric Sum = InsertNoopCastOfTo(Sum, Ty); 7895ffd83dbSDimitry Andric Sum = InsertBinop(Instruction::Sub, Sum, W, SCEV::FlagAnyWrap, 7905ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 7915ffd83dbSDimitry Andric ++I; 7925ffd83dbSDimitry Andric } else { 7935ffd83dbSDimitry Andric // A simple add. 794e8d8bef9SDimitry Andric Value *W = expandCodeForImpl(Op, Ty, false); 7955ffd83dbSDimitry Andric Sum = InsertNoopCastOfTo(Sum, Ty); 7965ffd83dbSDimitry Andric // Canonicalize a constant to the RHS. 7975ffd83dbSDimitry Andric if (isa<Constant>(Sum)) std::swap(Sum, W); 7985ffd83dbSDimitry Andric Sum = InsertBinop(Instruction::Add, Sum, W, S->getNoWrapFlags(), 7995ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8005ffd83dbSDimitry Andric ++I; 8015ffd83dbSDimitry Andric } 8025ffd83dbSDimitry Andric } 8035ffd83dbSDimitry Andric 8045ffd83dbSDimitry Andric return Sum; 8055ffd83dbSDimitry Andric } 8065ffd83dbSDimitry Andric 8075ffd83dbSDimitry Andric Value *SCEVExpander::visitMulExpr(const SCEVMulExpr *S) { 8085ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 8095ffd83dbSDimitry Andric 8105ffd83dbSDimitry Andric // Collect all the mul operands in a loop, along with their associated loops. 8115ffd83dbSDimitry Andric // Iterate in reverse so that constants are emitted last, all else equal. 8125ffd83dbSDimitry Andric SmallVector<std::pair<const Loop *, const SCEV *>, 8> OpsAndLoops; 813*349cc55cSDimitry Andric for (const SCEV *Op : reverse(S->operands())) 814*349cc55cSDimitry Andric OpsAndLoops.push_back(std::make_pair(getRelevantLoop(Op), Op)); 8155ffd83dbSDimitry Andric 8165ffd83dbSDimitry Andric // Sort by loop. Use a stable sort so that constants follow non-constants. 8175ffd83dbSDimitry Andric llvm::stable_sort(OpsAndLoops, LoopCompare(SE.DT)); 8185ffd83dbSDimitry Andric 8195ffd83dbSDimitry Andric // Emit instructions to mul all the operands. Hoist as much as possible 8205ffd83dbSDimitry Andric // out of loops. 8215ffd83dbSDimitry Andric Value *Prod = nullptr; 8225ffd83dbSDimitry Andric auto I = OpsAndLoops.begin(); 8235ffd83dbSDimitry Andric 8245ffd83dbSDimitry Andric // Expand the calculation of X pow N in the following manner: 8255ffd83dbSDimitry Andric // Let N = P1 + P2 + ... + PK, where all P are powers of 2. Then: 8265ffd83dbSDimitry Andric // X pow N = (X pow P1) * (X pow P2) * ... * (X pow PK). 8275ffd83dbSDimitry Andric const auto ExpandOpBinPowN = [this, &I, &OpsAndLoops, &Ty]() { 8285ffd83dbSDimitry Andric auto E = I; 8295ffd83dbSDimitry Andric // Calculate how many times the same operand from the same loop is included 8305ffd83dbSDimitry Andric // into this power. 8315ffd83dbSDimitry Andric uint64_t Exponent = 0; 8325ffd83dbSDimitry Andric const uint64_t MaxExponent = UINT64_MAX >> 1; 8335ffd83dbSDimitry Andric // No one sane will ever try to calculate such huge exponents, but if we 8345ffd83dbSDimitry Andric // need this, we stop on UINT64_MAX / 2 because we need to exit the loop 8355ffd83dbSDimitry Andric // below when the power of 2 exceeds our Exponent, and we want it to be 8365ffd83dbSDimitry Andric // 1u << 31 at most to not deal with unsigned overflow. 8375ffd83dbSDimitry Andric while (E != OpsAndLoops.end() && *I == *E && Exponent != MaxExponent) { 8385ffd83dbSDimitry Andric ++Exponent; 8395ffd83dbSDimitry Andric ++E; 8405ffd83dbSDimitry Andric } 8415ffd83dbSDimitry Andric assert(Exponent > 0 && "Trying to calculate a zeroth exponent of operand?"); 8425ffd83dbSDimitry Andric 8435ffd83dbSDimitry Andric // Calculate powers with exponents 1, 2, 4, 8 etc. and include those of them 8445ffd83dbSDimitry Andric // that are needed into the result. 845e8d8bef9SDimitry Andric Value *P = expandCodeForImpl(I->second, Ty, false); 8465ffd83dbSDimitry Andric Value *Result = nullptr; 8475ffd83dbSDimitry Andric if (Exponent & 1) 8485ffd83dbSDimitry Andric Result = P; 8495ffd83dbSDimitry Andric for (uint64_t BinExp = 2; BinExp <= Exponent; BinExp <<= 1) { 8505ffd83dbSDimitry Andric P = InsertBinop(Instruction::Mul, P, P, SCEV::FlagAnyWrap, 8515ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8525ffd83dbSDimitry Andric if (Exponent & BinExp) 8535ffd83dbSDimitry Andric Result = Result ? InsertBinop(Instruction::Mul, Result, P, 8545ffd83dbSDimitry Andric SCEV::FlagAnyWrap, 8555ffd83dbSDimitry Andric /*IsSafeToHoist*/ true) 8565ffd83dbSDimitry Andric : P; 8575ffd83dbSDimitry Andric } 8585ffd83dbSDimitry Andric 8595ffd83dbSDimitry Andric I = E; 8605ffd83dbSDimitry Andric assert(Result && "Nothing was expanded?"); 8615ffd83dbSDimitry Andric return Result; 8625ffd83dbSDimitry Andric }; 8635ffd83dbSDimitry Andric 8645ffd83dbSDimitry Andric while (I != OpsAndLoops.end()) { 8655ffd83dbSDimitry Andric if (!Prod) { 8665ffd83dbSDimitry Andric // This is the first operand. Just expand it. 8675ffd83dbSDimitry Andric Prod = ExpandOpBinPowN(); 8685ffd83dbSDimitry Andric } else if (I->second->isAllOnesValue()) { 8695ffd83dbSDimitry Andric // Instead of doing a multiply by negative one, just do a negate. 8705ffd83dbSDimitry Andric Prod = InsertNoopCastOfTo(Prod, Ty); 8715ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Sub, Constant::getNullValue(Ty), Prod, 8725ffd83dbSDimitry Andric SCEV::FlagAnyWrap, /*IsSafeToHoist*/ true); 8735ffd83dbSDimitry Andric ++I; 8745ffd83dbSDimitry Andric } else { 8755ffd83dbSDimitry Andric // A simple mul. 8765ffd83dbSDimitry Andric Value *W = ExpandOpBinPowN(); 8775ffd83dbSDimitry Andric Prod = InsertNoopCastOfTo(Prod, Ty); 8785ffd83dbSDimitry Andric // Canonicalize a constant to the RHS. 8795ffd83dbSDimitry Andric if (isa<Constant>(Prod)) std::swap(Prod, W); 8805ffd83dbSDimitry Andric const APInt *RHS; 8815ffd83dbSDimitry Andric if (match(W, m_Power2(RHS))) { 8825ffd83dbSDimitry Andric // Canonicalize Prod*(1<<C) to Prod<<C. 8835ffd83dbSDimitry Andric assert(!Ty->isVectorTy() && "vector types are not SCEVable"); 8845ffd83dbSDimitry Andric auto NWFlags = S->getNoWrapFlags(); 8855ffd83dbSDimitry Andric // clear nsw flag if shl will produce poison value. 8865ffd83dbSDimitry Andric if (RHS->logBase2() == RHS->getBitWidth() - 1) 8875ffd83dbSDimitry Andric NWFlags = ScalarEvolution::clearFlags(NWFlags, SCEV::FlagNSW); 8885ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Shl, Prod, 8895ffd83dbSDimitry Andric ConstantInt::get(Ty, RHS->logBase2()), NWFlags, 8905ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8915ffd83dbSDimitry Andric } else { 8925ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Mul, Prod, W, S->getNoWrapFlags(), 8935ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8945ffd83dbSDimitry Andric } 8955ffd83dbSDimitry Andric } 8965ffd83dbSDimitry Andric } 8975ffd83dbSDimitry Andric 8985ffd83dbSDimitry Andric return Prod; 8995ffd83dbSDimitry Andric } 9005ffd83dbSDimitry Andric 9015ffd83dbSDimitry Andric Value *SCEVExpander::visitUDivExpr(const SCEVUDivExpr *S) { 9025ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 9035ffd83dbSDimitry Andric 904e8d8bef9SDimitry Andric Value *LHS = expandCodeForImpl(S->getLHS(), Ty, false); 9055ffd83dbSDimitry Andric if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(S->getRHS())) { 9065ffd83dbSDimitry Andric const APInt &RHS = SC->getAPInt(); 9075ffd83dbSDimitry Andric if (RHS.isPowerOf2()) 9085ffd83dbSDimitry Andric return InsertBinop(Instruction::LShr, LHS, 9095ffd83dbSDimitry Andric ConstantInt::get(Ty, RHS.logBase2()), 9105ffd83dbSDimitry Andric SCEV::FlagAnyWrap, /*IsSafeToHoist*/ true); 9115ffd83dbSDimitry Andric } 9125ffd83dbSDimitry Andric 913e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getRHS(), Ty, false); 9145ffd83dbSDimitry Andric return InsertBinop(Instruction::UDiv, LHS, RHS, SCEV::FlagAnyWrap, 9155ffd83dbSDimitry Andric /*IsSafeToHoist*/ SE.isKnownNonZero(S->getRHS())); 9165ffd83dbSDimitry Andric } 9175ffd83dbSDimitry Andric 9185ffd83dbSDimitry Andric /// Determine if this is a well-behaved chain of instructions leading back to 9195ffd83dbSDimitry Andric /// the PHI. If so, it may be reused by expanded expressions. 9205ffd83dbSDimitry Andric bool SCEVExpander::isNormalAddRecExprPHI(PHINode *PN, Instruction *IncV, 9215ffd83dbSDimitry Andric const Loop *L) { 9225ffd83dbSDimitry Andric if (IncV->getNumOperands() == 0 || isa<PHINode>(IncV) || 9235ffd83dbSDimitry Andric (isa<CastInst>(IncV) && !isa<BitCastInst>(IncV))) 9245ffd83dbSDimitry Andric return false; 9255ffd83dbSDimitry Andric // If any of the operands don't dominate the insert position, bail. 9265ffd83dbSDimitry Andric // Addrec operands are always loop-invariant, so this can only happen 9275ffd83dbSDimitry Andric // if there are instructions which haven't been hoisted. 9285ffd83dbSDimitry Andric if (L == IVIncInsertLoop) { 929fe6060f1SDimitry Andric for (Use &Op : llvm::drop_begin(IncV->operands())) 930fe6060f1SDimitry Andric if (Instruction *OInst = dyn_cast<Instruction>(Op)) 9315ffd83dbSDimitry Andric if (!SE.DT.dominates(OInst, IVIncInsertPos)) 9325ffd83dbSDimitry Andric return false; 9335ffd83dbSDimitry Andric } 9345ffd83dbSDimitry Andric // Advance to the next instruction. 9355ffd83dbSDimitry Andric IncV = dyn_cast<Instruction>(IncV->getOperand(0)); 9365ffd83dbSDimitry Andric if (!IncV) 9375ffd83dbSDimitry Andric return false; 9385ffd83dbSDimitry Andric 9395ffd83dbSDimitry Andric if (IncV->mayHaveSideEffects()) 9405ffd83dbSDimitry Andric return false; 9415ffd83dbSDimitry Andric 9425ffd83dbSDimitry Andric if (IncV == PN) 9435ffd83dbSDimitry Andric return true; 9445ffd83dbSDimitry Andric 9455ffd83dbSDimitry Andric return isNormalAddRecExprPHI(PN, IncV, L); 9465ffd83dbSDimitry Andric } 9475ffd83dbSDimitry Andric 9485ffd83dbSDimitry Andric /// getIVIncOperand returns an induction variable increment's induction 9495ffd83dbSDimitry Andric /// variable operand. 9505ffd83dbSDimitry Andric /// 9515ffd83dbSDimitry Andric /// If allowScale is set, any type of GEP is allowed as long as the nonIV 9525ffd83dbSDimitry Andric /// operands dominate InsertPos. 9535ffd83dbSDimitry Andric /// 9545ffd83dbSDimitry Andric /// If allowScale is not set, ensure that a GEP increment conforms to one of the 9555ffd83dbSDimitry Andric /// simple patterns generated by getAddRecExprPHILiterally and 9565ffd83dbSDimitry Andric /// expandAddtoGEP. If the pattern isn't recognized, return NULL. 9575ffd83dbSDimitry Andric Instruction *SCEVExpander::getIVIncOperand(Instruction *IncV, 9585ffd83dbSDimitry Andric Instruction *InsertPos, 9595ffd83dbSDimitry Andric bool allowScale) { 9605ffd83dbSDimitry Andric if (IncV == InsertPos) 9615ffd83dbSDimitry Andric return nullptr; 9625ffd83dbSDimitry Andric 9635ffd83dbSDimitry Andric switch (IncV->getOpcode()) { 9645ffd83dbSDimitry Andric default: 9655ffd83dbSDimitry Andric return nullptr; 9665ffd83dbSDimitry Andric // Check for a simple Add/Sub or GEP of a loop invariant step. 9675ffd83dbSDimitry Andric case Instruction::Add: 9685ffd83dbSDimitry Andric case Instruction::Sub: { 9695ffd83dbSDimitry Andric Instruction *OInst = dyn_cast<Instruction>(IncV->getOperand(1)); 9705ffd83dbSDimitry Andric if (!OInst || SE.DT.dominates(OInst, InsertPos)) 9715ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 9725ffd83dbSDimitry Andric return nullptr; 9735ffd83dbSDimitry Andric } 9745ffd83dbSDimitry Andric case Instruction::BitCast: 9755ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 9765ffd83dbSDimitry Andric case Instruction::GetElementPtr: 977fe6060f1SDimitry Andric for (Use &U : llvm::drop_begin(IncV->operands())) { 978fe6060f1SDimitry Andric if (isa<Constant>(U)) 9795ffd83dbSDimitry Andric continue; 980fe6060f1SDimitry Andric if (Instruction *OInst = dyn_cast<Instruction>(U)) { 9815ffd83dbSDimitry Andric if (!SE.DT.dominates(OInst, InsertPos)) 9825ffd83dbSDimitry Andric return nullptr; 9835ffd83dbSDimitry Andric } 9845ffd83dbSDimitry Andric if (allowScale) { 9855ffd83dbSDimitry Andric // allow any kind of GEP as long as it can be hoisted. 9865ffd83dbSDimitry Andric continue; 9875ffd83dbSDimitry Andric } 9885ffd83dbSDimitry Andric // This must be a pointer addition of constants (pretty), which is already 9895ffd83dbSDimitry Andric // handled, or some number of address-size elements (ugly). Ugly geps 9905ffd83dbSDimitry Andric // have 2 operands. i1* is used by the expander to represent an 9915ffd83dbSDimitry Andric // address-size element. 9925ffd83dbSDimitry Andric if (IncV->getNumOperands() != 2) 9935ffd83dbSDimitry Andric return nullptr; 9945ffd83dbSDimitry Andric unsigned AS = cast<PointerType>(IncV->getType())->getAddressSpace(); 9955ffd83dbSDimitry Andric if (IncV->getType() != Type::getInt1PtrTy(SE.getContext(), AS) 9965ffd83dbSDimitry Andric && IncV->getType() != Type::getInt8PtrTy(SE.getContext(), AS)) 9975ffd83dbSDimitry Andric return nullptr; 9985ffd83dbSDimitry Andric break; 9995ffd83dbSDimitry Andric } 10005ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 10015ffd83dbSDimitry Andric } 10025ffd83dbSDimitry Andric } 10035ffd83dbSDimitry Andric 10045ffd83dbSDimitry Andric /// If the insert point of the current builder or any of the builders on the 10055ffd83dbSDimitry Andric /// stack of saved builders has 'I' as its insert point, update it to point to 10065ffd83dbSDimitry Andric /// the instruction after 'I'. This is intended to be used when the instruction 10075ffd83dbSDimitry Andric /// 'I' is being moved. If this fixup is not done and 'I' is moved to a 10085ffd83dbSDimitry Andric /// different block, the inconsistent insert point (with a mismatched 10095ffd83dbSDimitry Andric /// Instruction and Block) can lead to an instruction being inserted in a block 10105ffd83dbSDimitry Andric /// other than its parent. 10115ffd83dbSDimitry Andric void SCEVExpander::fixupInsertPoints(Instruction *I) { 10125ffd83dbSDimitry Andric BasicBlock::iterator It(*I); 10135ffd83dbSDimitry Andric BasicBlock::iterator NewInsertPt = std::next(It); 10145ffd83dbSDimitry Andric if (Builder.GetInsertPoint() == It) 10155ffd83dbSDimitry Andric Builder.SetInsertPoint(&*NewInsertPt); 10165ffd83dbSDimitry Andric for (auto *InsertPtGuard : InsertPointGuards) 10175ffd83dbSDimitry Andric if (InsertPtGuard->GetInsertPoint() == It) 10185ffd83dbSDimitry Andric InsertPtGuard->SetInsertPoint(NewInsertPt); 10195ffd83dbSDimitry Andric } 10205ffd83dbSDimitry Andric 10215ffd83dbSDimitry Andric /// hoistStep - Attempt to hoist a simple IV increment above InsertPos to make 10225ffd83dbSDimitry Andric /// it available to other uses in this loop. Recursively hoist any operands, 10235ffd83dbSDimitry Andric /// until we reach a value that dominates InsertPos. 10245ffd83dbSDimitry Andric bool SCEVExpander::hoistIVInc(Instruction *IncV, Instruction *InsertPos) { 10255ffd83dbSDimitry Andric if (SE.DT.dominates(IncV, InsertPos)) 10265ffd83dbSDimitry Andric return true; 10275ffd83dbSDimitry Andric 10285ffd83dbSDimitry Andric // InsertPos must itself dominate IncV so that IncV's new position satisfies 10295ffd83dbSDimitry Andric // its existing users. 10305ffd83dbSDimitry Andric if (isa<PHINode>(InsertPos) || 10315ffd83dbSDimitry Andric !SE.DT.dominates(InsertPos->getParent(), IncV->getParent())) 10325ffd83dbSDimitry Andric return false; 10335ffd83dbSDimitry Andric 10345ffd83dbSDimitry Andric if (!SE.LI.movementPreservesLCSSAForm(IncV, InsertPos)) 10355ffd83dbSDimitry Andric return false; 10365ffd83dbSDimitry Andric 10375ffd83dbSDimitry Andric // Check that the chain of IV operands leading back to Phi can be hoisted. 10385ffd83dbSDimitry Andric SmallVector<Instruction*, 4> IVIncs; 10395ffd83dbSDimitry Andric for(;;) { 10405ffd83dbSDimitry Andric Instruction *Oper = getIVIncOperand(IncV, InsertPos, /*allowScale*/true); 10415ffd83dbSDimitry Andric if (!Oper) 10425ffd83dbSDimitry Andric return false; 10435ffd83dbSDimitry Andric // IncV is safe to hoist. 10445ffd83dbSDimitry Andric IVIncs.push_back(IncV); 10455ffd83dbSDimitry Andric IncV = Oper; 10465ffd83dbSDimitry Andric if (SE.DT.dominates(IncV, InsertPos)) 10475ffd83dbSDimitry Andric break; 10485ffd83dbSDimitry Andric } 10495ffd83dbSDimitry Andric for (auto I = IVIncs.rbegin(), E = IVIncs.rend(); I != E; ++I) { 10505ffd83dbSDimitry Andric fixupInsertPoints(*I); 10515ffd83dbSDimitry Andric (*I)->moveBefore(InsertPos); 10525ffd83dbSDimitry Andric } 10535ffd83dbSDimitry Andric return true; 10545ffd83dbSDimitry Andric } 10555ffd83dbSDimitry Andric 10565ffd83dbSDimitry Andric /// Determine if this cyclic phi is in a form that would have been generated by 10575ffd83dbSDimitry Andric /// LSR. We don't care if the phi was actually expanded in this pass, as long 10585ffd83dbSDimitry Andric /// as it is in a low-cost form, for example, no implied multiplication. This 10595ffd83dbSDimitry Andric /// should match any patterns generated by getAddRecExprPHILiterally and 10605ffd83dbSDimitry Andric /// expandAddtoGEP. 10615ffd83dbSDimitry Andric bool SCEVExpander::isExpandedAddRecExprPHI(PHINode *PN, Instruction *IncV, 10625ffd83dbSDimitry Andric const Loop *L) { 10635ffd83dbSDimitry Andric for(Instruction *IVOper = IncV; 10645ffd83dbSDimitry Andric (IVOper = getIVIncOperand(IVOper, L->getLoopPreheader()->getTerminator(), 10655ffd83dbSDimitry Andric /*allowScale=*/false));) { 10665ffd83dbSDimitry Andric if (IVOper == PN) 10675ffd83dbSDimitry Andric return true; 10685ffd83dbSDimitry Andric } 10695ffd83dbSDimitry Andric return false; 10705ffd83dbSDimitry Andric } 10715ffd83dbSDimitry Andric 10725ffd83dbSDimitry Andric /// expandIVInc - Expand an IV increment at Builder's current InsertPos. 10735ffd83dbSDimitry Andric /// Typically this is the LatchBlock terminator or IVIncInsertPos, but we may 10745ffd83dbSDimitry Andric /// need to materialize IV increments elsewhere to handle difficult situations. 10755ffd83dbSDimitry Andric Value *SCEVExpander::expandIVInc(PHINode *PN, Value *StepV, const Loop *L, 10765ffd83dbSDimitry Andric Type *ExpandTy, Type *IntTy, 10775ffd83dbSDimitry Andric bool useSubtract) { 10785ffd83dbSDimitry Andric Value *IncV; 10795ffd83dbSDimitry Andric // If the PHI is a pointer, use a GEP, otherwise use an add or sub. 10805ffd83dbSDimitry Andric if (ExpandTy->isPointerTy()) { 10815ffd83dbSDimitry Andric PointerType *GEPPtrTy = cast<PointerType>(ExpandTy); 10825ffd83dbSDimitry Andric // If the step isn't constant, don't use an implicitly scaled GEP, because 10835ffd83dbSDimitry Andric // that would require a multiply inside the loop. 10845ffd83dbSDimitry Andric if (!isa<ConstantInt>(StepV)) 10855ffd83dbSDimitry Andric GEPPtrTy = PointerType::get(Type::getInt1Ty(SE.getContext()), 10865ffd83dbSDimitry Andric GEPPtrTy->getAddressSpace()); 10875ffd83dbSDimitry Andric IncV = expandAddToGEP(SE.getSCEV(StepV), GEPPtrTy, IntTy, PN); 1088e8d8bef9SDimitry Andric if (IncV->getType() != PN->getType()) 10895ffd83dbSDimitry Andric IncV = Builder.CreateBitCast(IncV, PN->getType()); 10905ffd83dbSDimitry Andric } else { 10915ffd83dbSDimitry Andric IncV = useSubtract ? 10925ffd83dbSDimitry Andric Builder.CreateSub(PN, StepV, Twine(IVName) + ".iv.next") : 10935ffd83dbSDimitry Andric Builder.CreateAdd(PN, StepV, Twine(IVName) + ".iv.next"); 10945ffd83dbSDimitry Andric } 10955ffd83dbSDimitry Andric return IncV; 10965ffd83dbSDimitry Andric } 10975ffd83dbSDimitry Andric 10985ffd83dbSDimitry Andric /// Check whether we can cheaply express the requested SCEV in terms of 10995ffd83dbSDimitry Andric /// the available PHI SCEV by truncation and/or inversion of the step. 11005ffd83dbSDimitry Andric static bool canBeCheaplyTransformed(ScalarEvolution &SE, 11015ffd83dbSDimitry Andric const SCEVAddRecExpr *Phi, 11025ffd83dbSDimitry Andric const SCEVAddRecExpr *Requested, 11035ffd83dbSDimitry Andric bool &InvertStep) { 1104fe6060f1SDimitry Andric // We can't transform to match a pointer PHI. 1105fe6060f1SDimitry Andric if (Phi->getType()->isPointerTy()) 1106fe6060f1SDimitry Andric return false; 1107fe6060f1SDimitry Andric 11085ffd83dbSDimitry Andric Type *PhiTy = SE.getEffectiveSCEVType(Phi->getType()); 11095ffd83dbSDimitry Andric Type *RequestedTy = SE.getEffectiveSCEVType(Requested->getType()); 11105ffd83dbSDimitry Andric 11115ffd83dbSDimitry Andric if (RequestedTy->getIntegerBitWidth() > PhiTy->getIntegerBitWidth()) 11125ffd83dbSDimitry Andric return false; 11135ffd83dbSDimitry Andric 11145ffd83dbSDimitry Andric // Try truncate it if necessary. 11155ffd83dbSDimitry Andric Phi = dyn_cast<SCEVAddRecExpr>(SE.getTruncateOrNoop(Phi, RequestedTy)); 11165ffd83dbSDimitry Andric if (!Phi) 11175ffd83dbSDimitry Andric return false; 11185ffd83dbSDimitry Andric 11195ffd83dbSDimitry Andric // Check whether truncation will help. 11205ffd83dbSDimitry Andric if (Phi == Requested) { 11215ffd83dbSDimitry Andric InvertStep = false; 11225ffd83dbSDimitry Andric return true; 11235ffd83dbSDimitry Andric } 11245ffd83dbSDimitry Andric 11255ffd83dbSDimitry Andric // Check whether inverting will help: {R,+,-1} == R - {0,+,1}. 1126fe6060f1SDimitry Andric if (SE.getMinusSCEV(Requested->getStart(), Requested) == Phi) { 11275ffd83dbSDimitry Andric InvertStep = true; 11285ffd83dbSDimitry Andric return true; 11295ffd83dbSDimitry Andric } 11305ffd83dbSDimitry Andric 11315ffd83dbSDimitry Andric return false; 11325ffd83dbSDimitry Andric } 11335ffd83dbSDimitry Andric 11345ffd83dbSDimitry Andric static bool IsIncrementNSW(ScalarEvolution &SE, const SCEVAddRecExpr *AR) { 11355ffd83dbSDimitry Andric if (!isa<IntegerType>(AR->getType())) 11365ffd83dbSDimitry Andric return false; 11375ffd83dbSDimitry Andric 11385ffd83dbSDimitry Andric unsigned BitWidth = cast<IntegerType>(AR->getType())->getBitWidth(); 11395ffd83dbSDimitry Andric Type *WideTy = IntegerType::get(AR->getType()->getContext(), BitWidth * 2); 11405ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 11415ffd83dbSDimitry Andric const SCEV *OpAfterExtend = SE.getAddExpr(SE.getSignExtendExpr(Step, WideTy), 11425ffd83dbSDimitry Andric SE.getSignExtendExpr(AR, WideTy)); 11435ffd83dbSDimitry Andric const SCEV *ExtendAfterOp = 11445ffd83dbSDimitry Andric SE.getSignExtendExpr(SE.getAddExpr(AR, Step), WideTy); 11455ffd83dbSDimitry Andric return ExtendAfterOp == OpAfterExtend; 11465ffd83dbSDimitry Andric } 11475ffd83dbSDimitry Andric 11485ffd83dbSDimitry Andric static bool IsIncrementNUW(ScalarEvolution &SE, const SCEVAddRecExpr *AR) { 11495ffd83dbSDimitry Andric if (!isa<IntegerType>(AR->getType())) 11505ffd83dbSDimitry Andric return false; 11515ffd83dbSDimitry Andric 11525ffd83dbSDimitry Andric unsigned BitWidth = cast<IntegerType>(AR->getType())->getBitWidth(); 11535ffd83dbSDimitry Andric Type *WideTy = IntegerType::get(AR->getType()->getContext(), BitWidth * 2); 11545ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 11555ffd83dbSDimitry Andric const SCEV *OpAfterExtend = SE.getAddExpr(SE.getZeroExtendExpr(Step, WideTy), 11565ffd83dbSDimitry Andric SE.getZeroExtendExpr(AR, WideTy)); 11575ffd83dbSDimitry Andric const SCEV *ExtendAfterOp = 11585ffd83dbSDimitry Andric SE.getZeroExtendExpr(SE.getAddExpr(AR, Step), WideTy); 11595ffd83dbSDimitry Andric return ExtendAfterOp == OpAfterExtend; 11605ffd83dbSDimitry Andric } 11615ffd83dbSDimitry Andric 11625ffd83dbSDimitry Andric /// getAddRecExprPHILiterally - Helper for expandAddRecExprLiterally. Expand 11635ffd83dbSDimitry Andric /// the base addrec, which is the addrec without any non-loop-dominating 11645ffd83dbSDimitry Andric /// values, and return the PHI. 11655ffd83dbSDimitry Andric PHINode * 11665ffd83dbSDimitry Andric SCEVExpander::getAddRecExprPHILiterally(const SCEVAddRecExpr *Normalized, 11675ffd83dbSDimitry Andric const Loop *L, 11685ffd83dbSDimitry Andric Type *ExpandTy, 11695ffd83dbSDimitry Andric Type *IntTy, 11705ffd83dbSDimitry Andric Type *&TruncTy, 11715ffd83dbSDimitry Andric bool &InvertStep) { 11725ffd83dbSDimitry Andric assert((!IVIncInsertLoop||IVIncInsertPos) && "Uninitialized insert position"); 11735ffd83dbSDimitry Andric 11745ffd83dbSDimitry Andric // Reuse a previously-inserted PHI, if present. 11755ffd83dbSDimitry Andric BasicBlock *LatchBlock = L->getLoopLatch(); 11765ffd83dbSDimitry Andric if (LatchBlock) { 11775ffd83dbSDimitry Andric PHINode *AddRecPhiMatch = nullptr; 11785ffd83dbSDimitry Andric Instruction *IncV = nullptr; 11795ffd83dbSDimitry Andric TruncTy = nullptr; 11805ffd83dbSDimitry Andric InvertStep = false; 11815ffd83dbSDimitry Andric 11825ffd83dbSDimitry Andric // Only try partially matching scevs that need truncation and/or 11835ffd83dbSDimitry Andric // step-inversion if we know this loop is outside the current loop. 11845ffd83dbSDimitry Andric bool TryNonMatchingSCEV = 11855ffd83dbSDimitry Andric IVIncInsertLoop && 11865ffd83dbSDimitry Andric SE.DT.properlyDominates(LatchBlock, IVIncInsertLoop->getHeader()); 11875ffd83dbSDimitry Andric 11885ffd83dbSDimitry Andric for (PHINode &PN : L->getHeader()->phis()) { 11895ffd83dbSDimitry Andric if (!SE.isSCEVable(PN.getType())) 11905ffd83dbSDimitry Andric continue; 11915ffd83dbSDimitry Andric 1192e8d8bef9SDimitry Andric // We should not look for a incomplete PHI. Getting SCEV for a incomplete 1193e8d8bef9SDimitry Andric // PHI has no meaning at all. 1194e8d8bef9SDimitry Andric if (!PN.isComplete()) { 1195fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 1196e8d8bef9SDimitry Andric DebugType, dbgs() << "One incomplete PHI is found: " << PN << "\n"); 1197e8d8bef9SDimitry Andric continue; 1198e8d8bef9SDimitry Andric } 1199e8d8bef9SDimitry Andric 12005ffd83dbSDimitry Andric const SCEVAddRecExpr *PhiSCEV = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(&PN)); 12015ffd83dbSDimitry Andric if (!PhiSCEV) 12025ffd83dbSDimitry Andric continue; 12035ffd83dbSDimitry Andric 12045ffd83dbSDimitry Andric bool IsMatchingSCEV = PhiSCEV == Normalized; 12055ffd83dbSDimitry Andric // We only handle truncation and inversion of phi recurrences for the 12065ffd83dbSDimitry Andric // expanded expression if the expanded expression's loop dominates the 12075ffd83dbSDimitry Andric // loop we insert to. Check now, so we can bail out early. 12085ffd83dbSDimitry Andric if (!IsMatchingSCEV && !TryNonMatchingSCEV) 12095ffd83dbSDimitry Andric continue; 12105ffd83dbSDimitry Andric 12115ffd83dbSDimitry Andric // TODO: this possibly can be reworked to avoid this cast at all. 12125ffd83dbSDimitry Andric Instruction *TempIncV = 12135ffd83dbSDimitry Andric dyn_cast<Instruction>(PN.getIncomingValueForBlock(LatchBlock)); 12145ffd83dbSDimitry Andric if (!TempIncV) 12155ffd83dbSDimitry Andric continue; 12165ffd83dbSDimitry Andric 12175ffd83dbSDimitry Andric // Check whether we can reuse this PHI node. 12185ffd83dbSDimitry Andric if (LSRMode) { 12195ffd83dbSDimitry Andric if (!isExpandedAddRecExprPHI(&PN, TempIncV, L)) 12205ffd83dbSDimitry Andric continue; 12215ffd83dbSDimitry Andric } else { 12225ffd83dbSDimitry Andric if (!isNormalAddRecExprPHI(&PN, TempIncV, L)) 12235ffd83dbSDimitry Andric continue; 12245ffd83dbSDimitry Andric } 12255ffd83dbSDimitry Andric 12265ffd83dbSDimitry Andric // Stop if we have found an exact match SCEV. 12275ffd83dbSDimitry Andric if (IsMatchingSCEV) { 12285ffd83dbSDimitry Andric IncV = TempIncV; 12295ffd83dbSDimitry Andric TruncTy = nullptr; 12305ffd83dbSDimitry Andric InvertStep = false; 12315ffd83dbSDimitry Andric AddRecPhiMatch = &PN; 12325ffd83dbSDimitry Andric break; 12335ffd83dbSDimitry Andric } 12345ffd83dbSDimitry Andric 12355ffd83dbSDimitry Andric // Try whether the phi can be translated into the requested form 12365ffd83dbSDimitry Andric // (truncated and/or offset by a constant). 12375ffd83dbSDimitry Andric if ((!TruncTy || InvertStep) && 12385ffd83dbSDimitry Andric canBeCheaplyTransformed(SE, PhiSCEV, Normalized, InvertStep)) { 12395ffd83dbSDimitry Andric // Record the phi node. But don't stop we might find an exact match 12405ffd83dbSDimitry Andric // later. 12415ffd83dbSDimitry Andric AddRecPhiMatch = &PN; 12425ffd83dbSDimitry Andric IncV = TempIncV; 12435ffd83dbSDimitry Andric TruncTy = SE.getEffectiveSCEVType(Normalized->getType()); 12445ffd83dbSDimitry Andric } 12455ffd83dbSDimitry Andric } 12465ffd83dbSDimitry Andric 12475ffd83dbSDimitry Andric if (AddRecPhiMatch) { 12485ffd83dbSDimitry Andric // Ok, the add recurrence looks usable. 12495ffd83dbSDimitry Andric // Remember this PHI, even in post-inc mode. 12505ffd83dbSDimitry Andric InsertedValues.insert(AddRecPhiMatch); 12515ffd83dbSDimitry Andric // Remember the increment. 12525ffd83dbSDimitry Andric rememberInstruction(IncV); 1253e8d8bef9SDimitry Andric // Those values were not actually inserted but re-used. 1254e8d8bef9SDimitry Andric ReusedValues.insert(AddRecPhiMatch); 1255e8d8bef9SDimitry Andric ReusedValues.insert(IncV); 12565ffd83dbSDimitry Andric return AddRecPhiMatch; 12575ffd83dbSDimitry Andric } 12585ffd83dbSDimitry Andric } 12595ffd83dbSDimitry Andric 12605ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 12615ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 12625ffd83dbSDimitry Andric 12635ffd83dbSDimitry Andric // Another AddRec may need to be recursively expanded below. For example, if 12645ffd83dbSDimitry Andric // this AddRec is quadratic, the StepV may itself be an AddRec in this 12655ffd83dbSDimitry Andric // loop. Remove this loop from the PostIncLoops set before expanding such 12665ffd83dbSDimitry Andric // AddRecs. Otherwise, we cannot find a valid position for the step 12675ffd83dbSDimitry Andric // (i.e. StepV can never dominate its loop header). Ideally, we could do 12685ffd83dbSDimitry Andric // SavedIncLoops.swap(PostIncLoops), but we generally have a single element, 12695ffd83dbSDimitry Andric // so it's not worth implementing SmallPtrSet::swap. 12705ffd83dbSDimitry Andric PostIncLoopSet SavedPostIncLoops = PostIncLoops; 12715ffd83dbSDimitry Andric PostIncLoops.clear(); 12725ffd83dbSDimitry Andric 12735ffd83dbSDimitry Andric // Expand code for the start value into the loop preheader. 12745ffd83dbSDimitry Andric assert(L->getLoopPreheader() && 12755ffd83dbSDimitry Andric "Can't expand add recurrences without a loop preheader!"); 1276e8d8bef9SDimitry Andric Value *StartV = 1277e8d8bef9SDimitry Andric expandCodeForImpl(Normalized->getStart(), ExpandTy, 1278e8d8bef9SDimitry Andric L->getLoopPreheader()->getTerminator(), false); 12795ffd83dbSDimitry Andric 12805ffd83dbSDimitry Andric // StartV must have been be inserted into L's preheader to dominate the new 12815ffd83dbSDimitry Andric // phi. 12825ffd83dbSDimitry Andric assert(!isa<Instruction>(StartV) || 12835ffd83dbSDimitry Andric SE.DT.properlyDominates(cast<Instruction>(StartV)->getParent(), 12845ffd83dbSDimitry Andric L->getHeader())); 12855ffd83dbSDimitry Andric 12865ffd83dbSDimitry Andric // Expand code for the step value. Do this before creating the PHI so that PHI 12875ffd83dbSDimitry Andric // reuse code doesn't see an incomplete PHI. 12885ffd83dbSDimitry Andric const SCEV *Step = Normalized->getStepRecurrence(SE); 12895ffd83dbSDimitry Andric // If the stride is negative, insert a sub instead of an add for the increment 12905ffd83dbSDimitry Andric // (unless it's a constant, because subtracts of constants are canonicalized 12915ffd83dbSDimitry Andric // to adds). 12925ffd83dbSDimitry Andric bool useSubtract = !ExpandTy->isPointerTy() && Step->isNonConstantNegative(); 12935ffd83dbSDimitry Andric if (useSubtract) 12945ffd83dbSDimitry Andric Step = SE.getNegativeSCEV(Step); 12955ffd83dbSDimitry Andric // Expand the step somewhere that dominates the loop header. 1296e8d8bef9SDimitry Andric Value *StepV = expandCodeForImpl( 1297e8d8bef9SDimitry Andric Step, IntTy, &*L->getHeader()->getFirstInsertionPt(), false); 12985ffd83dbSDimitry Andric 12995ffd83dbSDimitry Andric // The no-wrap behavior proved by IsIncrement(NUW|NSW) is only applicable if 13005ffd83dbSDimitry Andric // we actually do emit an addition. It does not apply if we emit a 13015ffd83dbSDimitry Andric // subtraction. 13025ffd83dbSDimitry Andric bool IncrementIsNUW = !useSubtract && IsIncrementNUW(SE, Normalized); 13035ffd83dbSDimitry Andric bool IncrementIsNSW = !useSubtract && IsIncrementNSW(SE, Normalized); 13045ffd83dbSDimitry Andric 13055ffd83dbSDimitry Andric // Create the PHI. 13065ffd83dbSDimitry Andric BasicBlock *Header = L->getHeader(); 13075ffd83dbSDimitry Andric Builder.SetInsertPoint(Header, Header->begin()); 13085ffd83dbSDimitry Andric pred_iterator HPB = pred_begin(Header), HPE = pred_end(Header); 13095ffd83dbSDimitry Andric PHINode *PN = Builder.CreatePHI(ExpandTy, std::distance(HPB, HPE), 13105ffd83dbSDimitry Andric Twine(IVName) + ".iv"); 13115ffd83dbSDimitry Andric 13125ffd83dbSDimitry Andric // Create the step instructions and populate the PHI. 13135ffd83dbSDimitry Andric for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) { 13145ffd83dbSDimitry Andric BasicBlock *Pred = *HPI; 13155ffd83dbSDimitry Andric 13165ffd83dbSDimitry Andric // Add a start value. 13175ffd83dbSDimitry Andric if (!L->contains(Pred)) { 13185ffd83dbSDimitry Andric PN->addIncoming(StartV, Pred); 13195ffd83dbSDimitry Andric continue; 13205ffd83dbSDimitry Andric } 13215ffd83dbSDimitry Andric 13225ffd83dbSDimitry Andric // Create a step value and add it to the PHI. 13235ffd83dbSDimitry Andric // If IVIncInsertLoop is non-null and equal to the addrec's loop, insert the 13245ffd83dbSDimitry Andric // instructions at IVIncInsertPos. 13255ffd83dbSDimitry Andric Instruction *InsertPos = L == IVIncInsertLoop ? 13265ffd83dbSDimitry Andric IVIncInsertPos : Pred->getTerminator(); 13275ffd83dbSDimitry Andric Builder.SetInsertPoint(InsertPos); 13285ffd83dbSDimitry Andric Value *IncV = expandIVInc(PN, StepV, L, ExpandTy, IntTy, useSubtract); 13295ffd83dbSDimitry Andric 13305ffd83dbSDimitry Andric if (isa<OverflowingBinaryOperator>(IncV)) { 13315ffd83dbSDimitry Andric if (IncrementIsNUW) 13325ffd83dbSDimitry Andric cast<BinaryOperator>(IncV)->setHasNoUnsignedWrap(); 13335ffd83dbSDimitry Andric if (IncrementIsNSW) 13345ffd83dbSDimitry Andric cast<BinaryOperator>(IncV)->setHasNoSignedWrap(); 13355ffd83dbSDimitry Andric } 13365ffd83dbSDimitry Andric PN->addIncoming(IncV, Pred); 13375ffd83dbSDimitry Andric } 13385ffd83dbSDimitry Andric 13395ffd83dbSDimitry Andric // After expanding subexpressions, restore the PostIncLoops set so the caller 13405ffd83dbSDimitry Andric // can ensure that IVIncrement dominates the current uses. 13415ffd83dbSDimitry Andric PostIncLoops = SavedPostIncLoops; 13425ffd83dbSDimitry Andric 1343fe6060f1SDimitry Andric // Remember this PHI, even in post-inc mode. LSR SCEV-based salvaging is most 1344fe6060f1SDimitry Andric // effective when we are able to use an IV inserted here, so record it. 13455ffd83dbSDimitry Andric InsertedValues.insert(PN); 1346fe6060f1SDimitry Andric InsertedIVs.push_back(PN); 13475ffd83dbSDimitry Andric return PN; 13485ffd83dbSDimitry Andric } 13495ffd83dbSDimitry Andric 13505ffd83dbSDimitry Andric Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) { 13515ffd83dbSDimitry Andric Type *STy = S->getType(); 13525ffd83dbSDimitry Andric Type *IntTy = SE.getEffectiveSCEVType(STy); 13535ffd83dbSDimitry Andric const Loop *L = S->getLoop(); 13545ffd83dbSDimitry Andric 13555ffd83dbSDimitry Andric // Determine a normalized form of this expression, which is the expression 13565ffd83dbSDimitry Andric // before any post-inc adjustment is made. 13575ffd83dbSDimitry Andric const SCEVAddRecExpr *Normalized = S; 13585ffd83dbSDimitry Andric if (PostIncLoops.count(L)) { 13595ffd83dbSDimitry Andric PostIncLoopSet Loops; 13605ffd83dbSDimitry Andric Loops.insert(L); 13615ffd83dbSDimitry Andric Normalized = cast<SCEVAddRecExpr>(normalizeForPostIncUse(S, Loops, SE)); 13625ffd83dbSDimitry Andric } 13635ffd83dbSDimitry Andric 13645ffd83dbSDimitry Andric // Strip off any non-loop-dominating component from the addrec start. 13655ffd83dbSDimitry Andric const SCEV *Start = Normalized->getStart(); 13665ffd83dbSDimitry Andric const SCEV *PostLoopOffset = nullptr; 13675ffd83dbSDimitry Andric if (!SE.properlyDominates(Start, L->getHeader())) { 13685ffd83dbSDimitry Andric PostLoopOffset = Start; 13695ffd83dbSDimitry Andric Start = SE.getConstant(Normalized->getType(), 0); 13705ffd83dbSDimitry Andric Normalized = cast<SCEVAddRecExpr>( 13715ffd83dbSDimitry Andric SE.getAddRecExpr(Start, Normalized->getStepRecurrence(SE), 13725ffd83dbSDimitry Andric Normalized->getLoop(), 13735ffd83dbSDimitry Andric Normalized->getNoWrapFlags(SCEV::FlagNW))); 13745ffd83dbSDimitry Andric } 13755ffd83dbSDimitry Andric 13765ffd83dbSDimitry Andric // Strip off any non-loop-dominating component from the addrec step. 13775ffd83dbSDimitry Andric const SCEV *Step = Normalized->getStepRecurrence(SE); 13785ffd83dbSDimitry Andric const SCEV *PostLoopScale = nullptr; 13795ffd83dbSDimitry Andric if (!SE.dominates(Step, L->getHeader())) { 13805ffd83dbSDimitry Andric PostLoopScale = Step; 13815ffd83dbSDimitry Andric Step = SE.getConstant(Normalized->getType(), 1); 13825ffd83dbSDimitry Andric if (!Start->isZero()) { 13835ffd83dbSDimitry Andric // The normalization below assumes that Start is constant zero, so if 13845ffd83dbSDimitry Andric // it isn't re-associate Start to PostLoopOffset. 13855ffd83dbSDimitry Andric assert(!PostLoopOffset && "Start not-null but PostLoopOffset set?"); 13865ffd83dbSDimitry Andric PostLoopOffset = Start; 13875ffd83dbSDimitry Andric Start = SE.getConstant(Normalized->getType(), 0); 13885ffd83dbSDimitry Andric } 13895ffd83dbSDimitry Andric Normalized = 13905ffd83dbSDimitry Andric cast<SCEVAddRecExpr>(SE.getAddRecExpr( 13915ffd83dbSDimitry Andric Start, Step, Normalized->getLoop(), 13925ffd83dbSDimitry Andric Normalized->getNoWrapFlags(SCEV::FlagNW))); 13935ffd83dbSDimitry Andric } 13945ffd83dbSDimitry Andric 13955ffd83dbSDimitry Andric // Expand the core addrec. If we need post-loop scaling, force it to 13965ffd83dbSDimitry Andric // expand to an integer type to avoid the need for additional casting. 13975ffd83dbSDimitry Andric Type *ExpandTy = PostLoopScale ? IntTy : STy; 13985ffd83dbSDimitry Andric // We can't use a pointer type for the addrec if the pointer type is 13995ffd83dbSDimitry Andric // non-integral. 14005ffd83dbSDimitry Andric Type *AddRecPHIExpandTy = 14015ffd83dbSDimitry Andric DL.isNonIntegralPointerType(STy) ? Normalized->getType() : ExpandTy; 14025ffd83dbSDimitry Andric 14035ffd83dbSDimitry Andric // In some cases, we decide to reuse an existing phi node but need to truncate 14045ffd83dbSDimitry Andric // it and/or invert the step. 14055ffd83dbSDimitry Andric Type *TruncTy = nullptr; 14065ffd83dbSDimitry Andric bool InvertStep = false; 14075ffd83dbSDimitry Andric PHINode *PN = getAddRecExprPHILiterally(Normalized, L, AddRecPHIExpandTy, 14085ffd83dbSDimitry Andric IntTy, TruncTy, InvertStep); 14095ffd83dbSDimitry Andric 14105ffd83dbSDimitry Andric // Accommodate post-inc mode, if necessary. 14115ffd83dbSDimitry Andric Value *Result; 14125ffd83dbSDimitry Andric if (!PostIncLoops.count(L)) 14135ffd83dbSDimitry Andric Result = PN; 14145ffd83dbSDimitry Andric else { 14155ffd83dbSDimitry Andric // In PostInc mode, use the post-incremented value. 14165ffd83dbSDimitry Andric BasicBlock *LatchBlock = L->getLoopLatch(); 14175ffd83dbSDimitry Andric assert(LatchBlock && "PostInc mode requires a unique loop latch!"); 14185ffd83dbSDimitry Andric Result = PN->getIncomingValueForBlock(LatchBlock); 14195ffd83dbSDimitry Andric 1420e8d8bef9SDimitry Andric // We might be introducing a new use of the post-inc IV that is not poison 1421e8d8bef9SDimitry Andric // safe, in which case we should drop poison generating flags. Only keep 1422e8d8bef9SDimitry Andric // those flags for which SCEV has proven that they always hold. 1423e8d8bef9SDimitry Andric if (isa<OverflowingBinaryOperator>(Result)) { 1424e8d8bef9SDimitry Andric auto *I = cast<Instruction>(Result); 1425e8d8bef9SDimitry Andric if (!S->hasNoUnsignedWrap()) 1426e8d8bef9SDimitry Andric I->setHasNoUnsignedWrap(false); 1427e8d8bef9SDimitry Andric if (!S->hasNoSignedWrap()) 1428e8d8bef9SDimitry Andric I->setHasNoSignedWrap(false); 1429e8d8bef9SDimitry Andric } 1430e8d8bef9SDimitry Andric 14315ffd83dbSDimitry Andric // For an expansion to use the postinc form, the client must call 14325ffd83dbSDimitry Andric // expandCodeFor with an InsertPoint that is either outside the PostIncLoop 14335ffd83dbSDimitry Andric // or dominated by IVIncInsertPos. 14345ffd83dbSDimitry Andric if (isa<Instruction>(Result) && 14355ffd83dbSDimitry Andric !SE.DT.dominates(cast<Instruction>(Result), 14365ffd83dbSDimitry Andric &*Builder.GetInsertPoint())) { 14375ffd83dbSDimitry Andric // The induction variable's postinc expansion does not dominate this use. 14385ffd83dbSDimitry Andric // IVUsers tries to prevent this case, so it is rare. However, it can 14395ffd83dbSDimitry Andric // happen when an IVUser outside the loop is not dominated by the latch 14405ffd83dbSDimitry Andric // block. Adjusting IVIncInsertPos before expansion begins cannot handle 14415ffd83dbSDimitry Andric // all cases. Consider a phi outside whose operand is replaced during 14425ffd83dbSDimitry Andric // expansion with the value of the postinc user. Without fundamentally 14435ffd83dbSDimitry Andric // changing the way postinc users are tracked, the only remedy is 14445ffd83dbSDimitry Andric // inserting an extra IV increment. StepV might fold into PostLoopOffset, 14455ffd83dbSDimitry Andric // but hopefully expandCodeFor handles that. 14465ffd83dbSDimitry Andric bool useSubtract = 14475ffd83dbSDimitry Andric !ExpandTy->isPointerTy() && Step->isNonConstantNegative(); 14485ffd83dbSDimitry Andric if (useSubtract) 14495ffd83dbSDimitry Andric Step = SE.getNegativeSCEV(Step); 14505ffd83dbSDimitry Andric Value *StepV; 14515ffd83dbSDimitry Andric { 14525ffd83dbSDimitry Andric // Expand the step somewhere that dominates the loop header. 14535ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 1454e8d8bef9SDimitry Andric StepV = expandCodeForImpl( 1455e8d8bef9SDimitry Andric Step, IntTy, &*L->getHeader()->getFirstInsertionPt(), false); 14565ffd83dbSDimitry Andric } 14575ffd83dbSDimitry Andric Result = expandIVInc(PN, StepV, L, ExpandTy, IntTy, useSubtract); 14585ffd83dbSDimitry Andric } 14595ffd83dbSDimitry Andric } 14605ffd83dbSDimitry Andric 14615ffd83dbSDimitry Andric // We have decided to reuse an induction variable of a dominating loop. Apply 14625ffd83dbSDimitry Andric // truncation and/or inversion of the step. 14635ffd83dbSDimitry Andric if (TruncTy) { 14645ffd83dbSDimitry Andric Type *ResTy = Result->getType(); 14655ffd83dbSDimitry Andric // Normalize the result type. 14665ffd83dbSDimitry Andric if (ResTy != SE.getEffectiveSCEVType(ResTy)) 14675ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, SE.getEffectiveSCEVType(ResTy)); 14685ffd83dbSDimitry Andric // Truncate the result. 1469e8d8bef9SDimitry Andric if (TruncTy != Result->getType()) 14705ffd83dbSDimitry Andric Result = Builder.CreateTrunc(Result, TruncTy); 1471e8d8bef9SDimitry Andric 14725ffd83dbSDimitry Andric // Invert the result. 1473e8d8bef9SDimitry Andric if (InvertStep) 1474e8d8bef9SDimitry Andric Result = Builder.CreateSub( 1475e8d8bef9SDimitry Andric expandCodeForImpl(Normalized->getStart(), TruncTy, false), Result); 14765ffd83dbSDimitry Andric } 14775ffd83dbSDimitry Andric 14785ffd83dbSDimitry Andric // Re-apply any non-loop-dominating scale. 14795ffd83dbSDimitry Andric if (PostLoopScale) { 14805ffd83dbSDimitry Andric assert(S->isAffine() && "Can't linearly scale non-affine recurrences."); 14815ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, IntTy); 14825ffd83dbSDimitry Andric Result = Builder.CreateMul(Result, 1483e8d8bef9SDimitry Andric expandCodeForImpl(PostLoopScale, IntTy, false)); 14845ffd83dbSDimitry Andric } 14855ffd83dbSDimitry Andric 14865ffd83dbSDimitry Andric // Re-apply any non-loop-dominating offset. 14875ffd83dbSDimitry Andric if (PostLoopOffset) { 14885ffd83dbSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(ExpandTy)) { 14895ffd83dbSDimitry Andric if (Result->getType()->isIntegerTy()) { 1490e8d8bef9SDimitry Andric Value *Base = expandCodeForImpl(PostLoopOffset, ExpandTy, false); 14915ffd83dbSDimitry Andric Result = expandAddToGEP(SE.getUnknown(Result), PTy, IntTy, Base); 14925ffd83dbSDimitry Andric } else { 14935ffd83dbSDimitry Andric Result = expandAddToGEP(PostLoopOffset, PTy, IntTy, Result); 14945ffd83dbSDimitry Andric } 14955ffd83dbSDimitry Andric } else { 14965ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, IntTy); 1497e8d8bef9SDimitry Andric Result = Builder.CreateAdd( 1498e8d8bef9SDimitry Andric Result, expandCodeForImpl(PostLoopOffset, IntTy, false)); 14995ffd83dbSDimitry Andric } 15005ffd83dbSDimitry Andric } 15015ffd83dbSDimitry Andric 15025ffd83dbSDimitry Andric return Result; 15035ffd83dbSDimitry Andric } 15045ffd83dbSDimitry Andric 15055ffd83dbSDimitry Andric Value *SCEVExpander::visitAddRecExpr(const SCEVAddRecExpr *S) { 15065ffd83dbSDimitry Andric // In canonical mode we compute the addrec as an expression of a canonical IV 15075ffd83dbSDimitry Andric // using evaluateAtIteration and expand the resulting SCEV expression. This 15085ffd83dbSDimitry Andric // way we avoid introducing new IVs to carry on the comutation of the addrec 15095ffd83dbSDimitry Andric // throughout the loop. 15105ffd83dbSDimitry Andric // 15115ffd83dbSDimitry Andric // For nested addrecs evaluateAtIteration might need a canonical IV of a 15125ffd83dbSDimitry Andric // type wider than the addrec itself. Emitting a canonical IV of the 15135ffd83dbSDimitry Andric // proper type might produce non-legal types, for example expanding an i64 15145ffd83dbSDimitry Andric // {0,+,2,+,1} addrec would need an i65 canonical IV. To avoid this just fall 15155ffd83dbSDimitry Andric // back to non-canonical mode for nested addrecs. 15165ffd83dbSDimitry Andric if (!CanonicalMode || (S->getNumOperands() > 2)) 15175ffd83dbSDimitry Andric return expandAddRecExprLiterally(S); 15185ffd83dbSDimitry Andric 15195ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 15205ffd83dbSDimitry Andric const Loop *L = S->getLoop(); 15215ffd83dbSDimitry Andric 15225ffd83dbSDimitry Andric // First check for an existing canonical IV in a suitable type. 15235ffd83dbSDimitry Andric PHINode *CanonicalIV = nullptr; 15245ffd83dbSDimitry Andric if (PHINode *PN = L->getCanonicalInductionVariable()) 15255ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(PN->getType()) >= SE.getTypeSizeInBits(Ty)) 15265ffd83dbSDimitry Andric CanonicalIV = PN; 15275ffd83dbSDimitry Andric 15285ffd83dbSDimitry Andric // Rewrite an AddRec in terms of the canonical induction variable, if 15295ffd83dbSDimitry Andric // its type is more narrow. 15305ffd83dbSDimitry Andric if (CanonicalIV && 1531fe6060f1SDimitry Andric SE.getTypeSizeInBits(CanonicalIV->getType()) > SE.getTypeSizeInBits(Ty) && 1532fe6060f1SDimitry Andric !S->getType()->isPointerTy()) { 15335ffd83dbSDimitry Andric SmallVector<const SCEV *, 4> NewOps(S->getNumOperands()); 15345ffd83dbSDimitry Andric for (unsigned i = 0, e = S->getNumOperands(); i != e; ++i) 15355ffd83dbSDimitry Andric NewOps[i] = SE.getAnyExtendExpr(S->op_begin()[i], CanonicalIV->getType()); 15365ffd83dbSDimitry Andric Value *V = expand(SE.getAddRecExpr(NewOps, S->getLoop(), 15375ffd83dbSDimitry Andric S->getNoWrapFlags(SCEV::FlagNW))); 15385ffd83dbSDimitry Andric BasicBlock::iterator NewInsertPt = 1539e8d8bef9SDimitry Andric findInsertPointAfter(cast<Instruction>(V), &*Builder.GetInsertPoint()); 1540e8d8bef9SDimitry Andric V = expandCodeForImpl(SE.getTruncateExpr(SE.getUnknown(V), Ty), nullptr, 1541e8d8bef9SDimitry Andric &*NewInsertPt, false); 15425ffd83dbSDimitry Andric return V; 15435ffd83dbSDimitry Andric } 15445ffd83dbSDimitry Andric 15455ffd83dbSDimitry Andric // {X,+,F} --> X + {0,+,F} 15465ffd83dbSDimitry Andric if (!S->getStart()->isZero()) { 1547*349cc55cSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(S->getType())) { 1548*349cc55cSDimitry Andric Value *StartV = expand(SE.getPointerBase(S)); 1549*349cc55cSDimitry Andric assert(StartV->getType() == PTy && "Pointer type mismatch for GEP!"); 1550*349cc55cSDimitry Andric return expandAddToGEP(SE.removePointerBase(S), PTy, Ty, StartV); 1551*349cc55cSDimitry Andric } 1552*349cc55cSDimitry Andric 1553e8d8bef9SDimitry Andric SmallVector<const SCEV *, 4> NewOps(S->operands()); 15545ffd83dbSDimitry Andric NewOps[0] = SE.getConstant(Ty, 0); 15555ffd83dbSDimitry Andric const SCEV *Rest = SE.getAddRecExpr(NewOps, L, 15565ffd83dbSDimitry Andric S->getNoWrapFlags(SCEV::FlagNW)); 15575ffd83dbSDimitry Andric 15585ffd83dbSDimitry Andric // Just do a normal add. Pre-expand the operands to suppress folding. 15595ffd83dbSDimitry Andric // 15605ffd83dbSDimitry Andric // The LHS and RHS values are factored out of the expand call to make the 15615ffd83dbSDimitry Andric // output independent of the argument evaluation order. 15625ffd83dbSDimitry Andric const SCEV *AddExprLHS = SE.getUnknown(expand(S->getStart())); 15635ffd83dbSDimitry Andric const SCEV *AddExprRHS = SE.getUnknown(expand(Rest)); 15645ffd83dbSDimitry Andric return expand(SE.getAddExpr(AddExprLHS, AddExprRHS)); 15655ffd83dbSDimitry Andric } 15665ffd83dbSDimitry Andric 15675ffd83dbSDimitry Andric // If we don't yet have a canonical IV, create one. 15685ffd83dbSDimitry Andric if (!CanonicalIV) { 15695ffd83dbSDimitry Andric // Create and insert the PHI node for the induction variable in the 15705ffd83dbSDimitry Andric // specified loop. 15715ffd83dbSDimitry Andric BasicBlock *Header = L->getHeader(); 15725ffd83dbSDimitry Andric pred_iterator HPB = pred_begin(Header), HPE = pred_end(Header); 15735ffd83dbSDimitry Andric CanonicalIV = PHINode::Create(Ty, std::distance(HPB, HPE), "indvar", 15745ffd83dbSDimitry Andric &Header->front()); 15755ffd83dbSDimitry Andric rememberInstruction(CanonicalIV); 15765ffd83dbSDimitry Andric 15775ffd83dbSDimitry Andric SmallSet<BasicBlock *, 4> PredSeen; 15785ffd83dbSDimitry Andric Constant *One = ConstantInt::get(Ty, 1); 15795ffd83dbSDimitry Andric for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) { 15805ffd83dbSDimitry Andric BasicBlock *HP = *HPI; 15815ffd83dbSDimitry Andric if (!PredSeen.insert(HP).second) { 15825ffd83dbSDimitry Andric // There must be an incoming value for each predecessor, even the 15835ffd83dbSDimitry Andric // duplicates! 15845ffd83dbSDimitry Andric CanonicalIV->addIncoming(CanonicalIV->getIncomingValueForBlock(HP), HP); 15855ffd83dbSDimitry Andric continue; 15865ffd83dbSDimitry Andric } 15875ffd83dbSDimitry Andric 15885ffd83dbSDimitry Andric if (L->contains(HP)) { 15895ffd83dbSDimitry Andric // Insert a unit add instruction right before the terminator 15905ffd83dbSDimitry Andric // corresponding to the back-edge. 15915ffd83dbSDimitry Andric Instruction *Add = BinaryOperator::CreateAdd(CanonicalIV, One, 15925ffd83dbSDimitry Andric "indvar.next", 15935ffd83dbSDimitry Andric HP->getTerminator()); 15945ffd83dbSDimitry Andric Add->setDebugLoc(HP->getTerminator()->getDebugLoc()); 15955ffd83dbSDimitry Andric rememberInstruction(Add); 15965ffd83dbSDimitry Andric CanonicalIV->addIncoming(Add, HP); 15975ffd83dbSDimitry Andric } else { 15985ffd83dbSDimitry Andric CanonicalIV->addIncoming(Constant::getNullValue(Ty), HP); 15995ffd83dbSDimitry Andric } 16005ffd83dbSDimitry Andric } 16015ffd83dbSDimitry Andric } 16025ffd83dbSDimitry Andric 16035ffd83dbSDimitry Andric // {0,+,1} --> Insert a canonical induction variable into the loop! 16045ffd83dbSDimitry Andric if (S->isAffine() && S->getOperand(1)->isOne()) { 16055ffd83dbSDimitry Andric assert(Ty == SE.getEffectiveSCEVType(CanonicalIV->getType()) && 16065ffd83dbSDimitry Andric "IVs with types different from the canonical IV should " 16075ffd83dbSDimitry Andric "already have been handled!"); 16085ffd83dbSDimitry Andric return CanonicalIV; 16095ffd83dbSDimitry Andric } 16105ffd83dbSDimitry Andric 16115ffd83dbSDimitry Andric // {0,+,F} --> {0,+,1} * F 16125ffd83dbSDimitry Andric 16135ffd83dbSDimitry Andric // If this is a simple linear addrec, emit it now as a special case. 16145ffd83dbSDimitry Andric if (S->isAffine()) // {0,+,F} --> i*F 16155ffd83dbSDimitry Andric return 16165ffd83dbSDimitry Andric expand(SE.getTruncateOrNoop( 16175ffd83dbSDimitry Andric SE.getMulExpr(SE.getUnknown(CanonicalIV), 16185ffd83dbSDimitry Andric SE.getNoopOrAnyExtend(S->getOperand(1), 16195ffd83dbSDimitry Andric CanonicalIV->getType())), 16205ffd83dbSDimitry Andric Ty)); 16215ffd83dbSDimitry Andric 16225ffd83dbSDimitry Andric // If this is a chain of recurrences, turn it into a closed form, using the 16235ffd83dbSDimitry Andric // folders, then expandCodeFor the closed form. This allows the folders to 16245ffd83dbSDimitry Andric // simplify the expression without having to build a bunch of special code 16255ffd83dbSDimitry Andric // into this folder. 16265ffd83dbSDimitry Andric const SCEV *IH = SE.getUnknown(CanonicalIV); // Get I as a "symbolic" SCEV. 16275ffd83dbSDimitry Andric 16285ffd83dbSDimitry Andric // Promote S up to the canonical IV type, if the cast is foldable. 16295ffd83dbSDimitry Andric const SCEV *NewS = S; 16305ffd83dbSDimitry Andric const SCEV *Ext = SE.getNoopOrAnyExtend(S, CanonicalIV->getType()); 16315ffd83dbSDimitry Andric if (isa<SCEVAddRecExpr>(Ext)) 16325ffd83dbSDimitry Andric NewS = Ext; 16335ffd83dbSDimitry Andric 16345ffd83dbSDimitry Andric const SCEV *V = cast<SCEVAddRecExpr>(NewS)->evaluateAtIteration(IH, SE); 16355ffd83dbSDimitry Andric //cerr << "Evaluated: " << *this << "\n to: " << *V << "\n"; 16365ffd83dbSDimitry Andric 16375ffd83dbSDimitry Andric // Truncate the result down to the original type, if needed. 16385ffd83dbSDimitry Andric const SCEV *T = SE.getTruncateOrNoop(V, Ty); 16395ffd83dbSDimitry Andric return expand(T); 16405ffd83dbSDimitry Andric } 16415ffd83dbSDimitry Andric 1642e8d8bef9SDimitry Andric Value *SCEVExpander::visitPtrToIntExpr(const SCEVPtrToIntExpr *S) { 1643e8d8bef9SDimitry Andric Value *V = 1644e8d8bef9SDimitry Andric expandCodeForImpl(S->getOperand(), S->getOperand()->getType(), false); 1645fe6060f1SDimitry Andric return ReuseOrCreateCast(V, S->getType(), CastInst::PtrToInt, 1646fe6060f1SDimitry Andric GetOptimalInsertionPointForCastOf(V)); 1647e8d8bef9SDimitry Andric } 1648e8d8bef9SDimitry Andric 16495ffd83dbSDimitry Andric Value *SCEVExpander::visitTruncateExpr(const SCEVTruncateExpr *S) { 16505ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1651e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1652e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1653e8d8bef9SDimitry Andric false); 1654e8d8bef9SDimitry Andric return Builder.CreateTrunc(V, Ty); 16555ffd83dbSDimitry Andric } 16565ffd83dbSDimitry Andric 16575ffd83dbSDimitry Andric Value *SCEVExpander::visitZeroExtendExpr(const SCEVZeroExtendExpr *S) { 16585ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1659e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1660e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1661e8d8bef9SDimitry Andric false); 1662e8d8bef9SDimitry Andric return Builder.CreateZExt(V, Ty); 16635ffd83dbSDimitry Andric } 16645ffd83dbSDimitry Andric 16655ffd83dbSDimitry Andric Value *SCEVExpander::visitSignExtendExpr(const SCEVSignExtendExpr *S) { 16665ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1667e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1668e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1669e8d8bef9SDimitry Andric false); 1670e8d8bef9SDimitry Andric return Builder.CreateSExt(V, Ty); 16715ffd83dbSDimitry Andric } 16725ffd83dbSDimitry Andric 16735ffd83dbSDimitry Andric Value *SCEVExpander::visitSMaxExpr(const SCEVSMaxExpr *S) { 16745ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands()-1)); 16755ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 16765ffd83dbSDimitry Andric for (int i = S->getNumOperands()-2; i >= 0; --i) { 16775ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 16785ffd83dbSDimitry Andric // rest of the comparisons as integer. 16795ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 16805ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 16815ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 16825ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 16835ffd83dbSDimitry Andric } 1684e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1685fe6060f1SDimitry Andric Value *Sel; 1686fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1687fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::smax, {Ty}, {LHS, RHS}, 1688fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "smax"); 1689fe6060f1SDimitry Andric else { 16905ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpSGT(LHS, RHS); 1691fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "smax"); 1692fe6060f1SDimitry Andric } 16935ffd83dbSDimitry Andric LHS = Sel; 16945ffd83dbSDimitry Andric } 16955ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 16965ffd83dbSDimitry Andric // final result back to the pointer type. 16975ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 16985ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 16995ffd83dbSDimitry Andric return LHS; 17005ffd83dbSDimitry Andric } 17015ffd83dbSDimitry Andric 17025ffd83dbSDimitry Andric Value *SCEVExpander::visitUMaxExpr(const SCEVUMaxExpr *S) { 17035ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands()-1)); 17045ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17055ffd83dbSDimitry Andric for (int i = S->getNumOperands()-2; i >= 0; --i) { 17065ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17075ffd83dbSDimitry Andric // rest of the comparisons as integer. 17085ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17095ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17105ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17115ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17125ffd83dbSDimitry Andric } 1713e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1714fe6060f1SDimitry Andric Value *Sel; 1715fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1716fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::umax, {Ty}, {LHS, RHS}, 1717fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "umax"); 1718fe6060f1SDimitry Andric else { 17195ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpUGT(LHS, RHS); 1720fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "umax"); 1721fe6060f1SDimitry Andric } 17225ffd83dbSDimitry Andric LHS = Sel; 17235ffd83dbSDimitry Andric } 17245ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17255ffd83dbSDimitry Andric // final result back to the pointer type. 17265ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17275ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17285ffd83dbSDimitry Andric return LHS; 17295ffd83dbSDimitry Andric } 17305ffd83dbSDimitry Andric 17315ffd83dbSDimitry Andric Value *SCEVExpander::visitSMinExpr(const SCEVSMinExpr *S) { 17325ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands() - 1)); 17335ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17345ffd83dbSDimitry Andric for (int i = S->getNumOperands() - 2; i >= 0; --i) { 17355ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17365ffd83dbSDimitry Andric // rest of the comparisons as integer. 17375ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17385ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17395ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17405ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17415ffd83dbSDimitry Andric } 1742e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1743fe6060f1SDimitry Andric Value *Sel; 1744fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1745fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::smin, {Ty}, {LHS, RHS}, 1746fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "smin"); 1747fe6060f1SDimitry Andric else { 17485ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpSLT(LHS, RHS); 1749fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "smin"); 1750fe6060f1SDimitry Andric } 17515ffd83dbSDimitry Andric LHS = Sel; 17525ffd83dbSDimitry Andric } 17535ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17545ffd83dbSDimitry Andric // final result back to the pointer type. 17555ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17565ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17575ffd83dbSDimitry Andric return LHS; 17585ffd83dbSDimitry Andric } 17595ffd83dbSDimitry Andric 17605ffd83dbSDimitry Andric Value *SCEVExpander::visitUMinExpr(const SCEVUMinExpr *S) { 17615ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands() - 1)); 17625ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17635ffd83dbSDimitry Andric for (int i = S->getNumOperands() - 2; i >= 0; --i) { 17645ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17655ffd83dbSDimitry Andric // rest of the comparisons as integer. 17665ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17675ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17685ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17695ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17705ffd83dbSDimitry Andric } 1771e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1772fe6060f1SDimitry Andric Value *Sel; 1773fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1774fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::umin, {Ty}, {LHS, RHS}, 1775fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "umin"); 1776fe6060f1SDimitry Andric else { 17775ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpULT(LHS, RHS); 1778fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "umin"); 1779fe6060f1SDimitry Andric } 17805ffd83dbSDimitry Andric LHS = Sel; 17815ffd83dbSDimitry Andric } 17825ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17835ffd83dbSDimitry Andric // final result back to the pointer type. 17845ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17855ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17865ffd83dbSDimitry Andric return LHS; 17875ffd83dbSDimitry Andric } 17885ffd83dbSDimitry Andric 1789e8d8bef9SDimitry Andric Value *SCEVExpander::expandCodeForImpl(const SCEV *SH, Type *Ty, 1790e8d8bef9SDimitry Andric Instruction *IP, bool Root) { 17915ffd83dbSDimitry Andric setInsertPoint(IP); 1792e8d8bef9SDimitry Andric Value *V = expandCodeForImpl(SH, Ty, Root); 1793e8d8bef9SDimitry Andric return V; 17945ffd83dbSDimitry Andric } 17955ffd83dbSDimitry Andric 1796e8d8bef9SDimitry Andric Value *SCEVExpander::expandCodeForImpl(const SCEV *SH, Type *Ty, bool Root) { 17975ffd83dbSDimitry Andric // Expand the code for this SCEV. 17985ffd83dbSDimitry Andric Value *V = expand(SH); 1799e8d8bef9SDimitry Andric 1800e8d8bef9SDimitry Andric if (PreserveLCSSA) { 1801e8d8bef9SDimitry Andric if (auto *Inst = dyn_cast<Instruction>(V)) { 1802e8d8bef9SDimitry Andric // Create a temporary instruction to at the current insertion point, so we 1803e8d8bef9SDimitry Andric // can hand it off to the helper to create LCSSA PHIs if required for the 1804e8d8bef9SDimitry Andric // new use. 1805e8d8bef9SDimitry Andric // FIXME: Ideally formLCSSAForInstructions (used in fixupLCSSAFormFor) 1806e8d8bef9SDimitry Andric // would accept a insertion point and return an LCSSA phi for that 1807e8d8bef9SDimitry Andric // insertion point, so there is no need to insert & remove the temporary 1808e8d8bef9SDimitry Andric // instruction. 1809e8d8bef9SDimitry Andric Instruction *Tmp; 1810e8d8bef9SDimitry Andric if (Inst->getType()->isIntegerTy()) 1811e8d8bef9SDimitry Andric Tmp = 1812e8d8bef9SDimitry Andric cast<Instruction>(Builder.CreateAdd(Inst, Inst, "tmp.lcssa.user")); 1813e8d8bef9SDimitry Andric else { 1814e8d8bef9SDimitry Andric assert(Inst->getType()->isPointerTy()); 1815fe6060f1SDimitry Andric Tmp = cast<Instruction>(Builder.CreatePtrToInt( 1816fe6060f1SDimitry Andric Inst, Type::getInt32Ty(Inst->getContext()), "tmp.lcssa.user")); 1817e8d8bef9SDimitry Andric } 1818e8d8bef9SDimitry Andric V = fixupLCSSAFormFor(Tmp, 0); 1819e8d8bef9SDimitry Andric 1820e8d8bef9SDimitry Andric // Clean up temporary instruction. 1821e8d8bef9SDimitry Andric InsertedValues.erase(Tmp); 1822e8d8bef9SDimitry Andric InsertedPostIncValues.erase(Tmp); 1823e8d8bef9SDimitry Andric Tmp->eraseFromParent(); 1824e8d8bef9SDimitry Andric } 1825e8d8bef9SDimitry Andric } 1826e8d8bef9SDimitry Andric 1827e8d8bef9SDimitry Andric InsertedExpressions[std::make_pair(SH, &*Builder.GetInsertPoint())] = V; 18285ffd83dbSDimitry Andric if (Ty) { 18295ffd83dbSDimitry Andric assert(SE.getTypeSizeInBits(Ty) == SE.getTypeSizeInBits(SH->getType()) && 18305ffd83dbSDimitry Andric "non-trivial casts should be done with the SCEVs directly!"); 18315ffd83dbSDimitry Andric V = InsertNoopCastOfTo(V, Ty); 18325ffd83dbSDimitry Andric } 18335ffd83dbSDimitry Andric return V; 18345ffd83dbSDimitry Andric } 18355ffd83dbSDimitry Andric 1836*349cc55cSDimitry Andric /// Check whether value has nuw/nsw/exact set but SCEV does not. 1837*349cc55cSDimitry Andric /// TODO: In reality it is better to check the poison recursively 1838*349cc55cSDimitry Andric /// but this is better than nothing. 1839*349cc55cSDimitry Andric static bool SCEVLostPoisonFlags(const SCEV *S, const Instruction *I) { 1840*349cc55cSDimitry Andric if (isa<OverflowingBinaryOperator>(I)) { 1841*349cc55cSDimitry Andric if (auto *NS = dyn_cast<SCEVNAryExpr>(S)) { 1842*349cc55cSDimitry Andric if (I->hasNoSignedWrap() && !NS->hasNoSignedWrap()) 1843*349cc55cSDimitry Andric return true; 1844*349cc55cSDimitry Andric if (I->hasNoUnsignedWrap() && !NS->hasNoUnsignedWrap()) 1845*349cc55cSDimitry Andric return true; 1846*349cc55cSDimitry Andric } 1847*349cc55cSDimitry Andric } else if (isa<PossiblyExactOperator>(I) && I->isExact()) 1848*349cc55cSDimitry Andric return true; 1849*349cc55cSDimitry Andric return false; 1850*349cc55cSDimitry Andric } 1851*349cc55cSDimitry Andric 18525ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair 18535ffd83dbSDimitry Andric SCEVExpander::FindValueInExprValueMap(const SCEV *S, 18545ffd83dbSDimitry Andric const Instruction *InsertPt) { 1855fe6060f1SDimitry Andric auto *Set = SE.getSCEVValues(S); 18565ffd83dbSDimitry Andric // If the expansion is not in CanonicalMode, and the SCEV contains any 18575ffd83dbSDimitry Andric // sub scAddRecExpr type SCEV, it is required to expand the SCEV literally. 18585ffd83dbSDimitry Andric if (CanonicalMode || !SE.containsAddRecurrence(S)) { 18595ffd83dbSDimitry Andric // If S is scConstant, it may be worse to reuse an existing Value. 18605ffd83dbSDimitry Andric if (S->getSCEVType() != scConstant && Set) { 1861*349cc55cSDimitry Andric // Choose a Value from the set which dominates the InsertPt. 1862*349cc55cSDimitry Andric // InsertPt should be inside the Value's parent loop so as not to break 18635ffd83dbSDimitry Andric // the LCSSA form. 18645ffd83dbSDimitry Andric for (auto const &VOPair : *Set) { 18655ffd83dbSDimitry Andric Value *V = VOPair.first; 18665ffd83dbSDimitry Andric ConstantInt *Offset = VOPair.second; 1867*349cc55cSDimitry Andric Instruction *EntInst = dyn_cast_or_null<Instruction>(V); 1868*349cc55cSDimitry Andric if (!EntInst) 1869*349cc55cSDimitry Andric continue; 1870*349cc55cSDimitry Andric 1871*349cc55cSDimitry Andric assert(EntInst->getFunction() == InsertPt->getFunction()); 1872*349cc55cSDimitry Andric if (S->getType() == V->getType() && 18735ffd83dbSDimitry Andric SE.DT.dominates(EntInst, InsertPt) && 18745ffd83dbSDimitry Andric (SE.LI.getLoopFor(EntInst->getParent()) == nullptr || 1875*349cc55cSDimitry Andric SE.LI.getLoopFor(EntInst->getParent())->contains(InsertPt)) && 1876*349cc55cSDimitry Andric !SCEVLostPoisonFlags(S, EntInst)) 18775ffd83dbSDimitry Andric return {V, Offset}; 18785ffd83dbSDimitry Andric } 18795ffd83dbSDimitry Andric } 18805ffd83dbSDimitry Andric } 18815ffd83dbSDimitry Andric return {nullptr, nullptr}; 18825ffd83dbSDimitry Andric } 18835ffd83dbSDimitry Andric 18845ffd83dbSDimitry Andric // The expansion of SCEV will either reuse a previous Value in ExprValueMap, 18855ffd83dbSDimitry Andric // or expand the SCEV literally. Specifically, if the expansion is in LSRMode, 18865ffd83dbSDimitry Andric // and the SCEV contains any sub scAddRecExpr type SCEV, it will be expanded 18875ffd83dbSDimitry Andric // literally, to prevent LSR's transformed SCEV from being reverted. Otherwise, 18885ffd83dbSDimitry Andric // the expansion will try to reuse Value from ExprValueMap, and only when it 18895ffd83dbSDimitry Andric // fails, expand the SCEV literally. 18905ffd83dbSDimitry Andric Value *SCEVExpander::expand(const SCEV *S) { 18915ffd83dbSDimitry Andric // Compute an insertion point for this SCEV object. Hoist the instructions 18925ffd83dbSDimitry Andric // as far out in the loop nest as possible. 18935ffd83dbSDimitry Andric Instruction *InsertPt = &*Builder.GetInsertPoint(); 18945ffd83dbSDimitry Andric 18955ffd83dbSDimitry Andric // We can move insertion point only if there is no div or rem operations 18965ffd83dbSDimitry Andric // otherwise we are risky to move it over the check for zero denominator. 18975ffd83dbSDimitry Andric auto SafeToHoist = [](const SCEV *S) { 18985ffd83dbSDimitry Andric return !SCEVExprContains(S, [](const SCEV *S) { 18995ffd83dbSDimitry Andric if (const auto *D = dyn_cast<SCEVUDivExpr>(S)) { 19005ffd83dbSDimitry Andric if (const auto *SC = dyn_cast<SCEVConstant>(D->getRHS())) 19015ffd83dbSDimitry Andric // Division by non-zero constants can be hoisted. 19025ffd83dbSDimitry Andric return SC->getValue()->isZero(); 19035ffd83dbSDimitry Andric // All other divisions should not be moved as they may be 19045ffd83dbSDimitry Andric // divisions by zero and should be kept within the 19055ffd83dbSDimitry Andric // conditions of the surrounding loops that guard their 19065ffd83dbSDimitry Andric // execution (see PR35406). 19075ffd83dbSDimitry Andric return true; 19085ffd83dbSDimitry Andric } 19095ffd83dbSDimitry Andric return false; 19105ffd83dbSDimitry Andric }); 19115ffd83dbSDimitry Andric }; 19125ffd83dbSDimitry Andric if (SafeToHoist(S)) { 19135ffd83dbSDimitry Andric for (Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock());; 19145ffd83dbSDimitry Andric L = L->getParentLoop()) { 19155ffd83dbSDimitry Andric if (SE.isLoopInvariant(S, L)) { 19165ffd83dbSDimitry Andric if (!L) break; 19175ffd83dbSDimitry Andric if (BasicBlock *Preheader = L->getLoopPreheader()) 19185ffd83dbSDimitry Andric InsertPt = Preheader->getTerminator(); 19195ffd83dbSDimitry Andric else 19205ffd83dbSDimitry Andric // LSR sets the insertion point for AddRec start/step values to the 19215ffd83dbSDimitry Andric // block start to simplify value reuse, even though it's an invalid 19225ffd83dbSDimitry Andric // position. SCEVExpander must correct for this in all cases. 19235ffd83dbSDimitry Andric InsertPt = &*L->getHeader()->getFirstInsertionPt(); 19245ffd83dbSDimitry Andric } else { 19255ffd83dbSDimitry Andric // If the SCEV is computable at this level, insert it into the header 19265ffd83dbSDimitry Andric // after the PHIs (and after any other instructions that we've inserted 19275ffd83dbSDimitry Andric // there) so that it is guaranteed to dominate any user inside the loop. 19285ffd83dbSDimitry Andric if (L && SE.hasComputableLoopEvolution(S, L) && !PostIncLoops.count(L)) 19295ffd83dbSDimitry Andric InsertPt = &*L->getHeader()->getFirstInsertionPt(); 1930e8d8bef9SDimitry Andric 19315ffd83dbSDimitry Andric while (InsertPt->getIterator() != Builder.GetInsertPoint() && 19325ffd83dbSDimitry Andric (isInsertedInstruction(InsertPt) || 1933e8d8bef9SDimitry Andric isa<DbgInfoIntrinsic>(InsertPt))) { 19345ffd83dbSDimitry Andric InsertPt = &*std::next(InsertPt->getIterator()); 1935e8d8bef9SDimitry Andric } 19365ffd83dbSDimitry Andric break; 19375ffd83dbSDimitry Andric } 19385ffd83dbSDimitry Andric } 19395ffd83dbSDimitry Andric } 19405ffd83dbSDimitry Andric 19415ffd83dbSDimitry Andric // Check to see if we already expanded this here. 19425ffd83dbSDimitry Andric auto I = InsertedExpressions.find(std::make_pair(S, InsertPt)); 19435ffd83dbSDimitry Andric if (I != InsertedExpressions.end()) 19445ffd83dbSDimitry Andric return I->second; 19455ffd83dbSDimitry Andric 19465ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 19475ffd83dbSDimitry Andric Builder.SetInsertPoint(InsertPt); 19485ffd83dbSDimitry Andric 19495ffd83dbSDimitry Andric // Expand the expression into instructions. 19505ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair VO = FindValueInExprValueMap(S, InsertPt); 19515ffd83dbSDimitry Andric Value *V = VO.first; 19525ffd83dbSDimitry Andric 19535ffd83dbSDimitry Andric if (!V) 19545ffd83dbSDimitry Andric V = visit(S); 19555ffd83dbSDimitry Andric else if (VO.second) { 19565ffd83dbSDimitry Andric if (PointerType *Vty = dyn_cast<PointerType>(V->getType())) { 19575ffd83dbSDimitry Andric Type *Ety = Vty->getPointerElementType(); 19585ffd83dbSDimitry Andric int64_t Offset = VO.second->getSExtValue(); 19595ffd83dbSDimitry Andric int64_t ESize = SE.getTypeSizeInBits(Ety); 19605ffd83dbSDimitry Andric if ((Offset * 8) % ESize == 0) { 19615ffd83dbSDimitry Andric ConstantInt *Idx = 19625ffd83dbSDimitry Andric ConstantInt::getSigned(VO.second->getType(), -(Offset * 8) / ESize); 19635ffd83dbSDimitry Andric V = Builder.CreateGEP(Ety, V, Idx, "scevgep"); 19645ffd83dbSDimitry Andric } else { 19655ffd83dbSDimitry Andric ConstantInt *Idx = 19665ffd83dbSDimitry Andric ConstantInt::getSigned(VO.second->getType(), -Offset); 19675ffd83dbSDimitry Andric unsigned AS = Vty->getAddressSpace(); 19685ffd83dbSDimitry Andric V = Builder.CreateBitCast(V, Type::getInt8PtrTy(SE.getContext(), AS)); 19695ffd83dbSDimitry Andric V = Builder.CreateGEP(Type::getInt8Ty(SE.getContext()), V, Idx, 19705ffd83dbSDimitry Andric "uglygep"); 19715ffd83dbSDimitry Andric V = Builder.CreateBitCast(V, Vty); 19725ffd83dbSDimitry Andric } 19735ffd83dbSDimitry Andric } else { 19745ffd83dbSDimitry Andric V = Builder.CreateSub(V, VO.second); 19755ffd83dbSDimitry Andric } 19765ffd83dbSDimitry Andric } 19775ffd83dbSDimitry Andric // Remember the expanded value for this SCEV at this location. 19785ffd83dbSDimitry Andric // 19795ffd83dbSDimitry Andric // This is independent of PostIncLoops. The mapped value simply materializes 19805ffd83dbSDimitry Andric // the expression at this insertion point. If the mapped value happened to be 19815ffd83dbSDimitry Andric // a postinc expansion, it could be reused by a non-postinc user, but only if 19825ffd83dbSDimitry Andric // its insertion point was already at the head of the loop. 19835ffd83dbSDimitry Andric InsertedExpressions[std::make_pair(S, InsertPt)] = V; 19845ffd83dbSDimitry Andric return V; 19855ffd83dbSDimitry Andric } 19865ffd83dbSDimitry Andric 19875ffd83dbSDimitry Andric void SCEVExpander::rememberInstruction(Value *I) { 1988e8d8bef9SDimitry Andric auto DoInsert = [this](Value *V) { 19895ffd83dbSDimitry Andric if (!PostIncLoops.empty()) 1990e8d8bef9SDimitry Andric InsertedPostIncValues.insert(V); 19915ffd83dbSDimitry Andric else 1992e8d8bef9SDimitry Andric InsertedValues.insert(V); 1993e8d8bef9SDimitry Andric }; 1994e8d8bef9SDimitry Andric DoInsert(I); 1995e8d8bef9SDimitry Andric 1996e8d8bef9SDimitry Andric if (!PreserveLCSSA) 1997e8d8bef9SDimitry Andric return; 1998e8d8bef9SDimitry Andric 1999e8d8bef9SDimitry Andric if (auto *Inst = dyn_cast<Instruction>(I)) { 2000e8d8bef9SDimitry Andric // A new instruction has been added, which might introduce new uses outside 2001e8d8bef9SDimitry Andric // a defining loop. Fix LCSSA from for each operand of the new instruction, 2002e8d8bef9SDimitry Andric // if required. 2003e8d8bef9SDimitry Andric for (unsigned OpIdx = 0, OpEnd = Inst->getNumOperands(); OpIdx != OpEnd; 2004e8d8bef9SDimitry Andric OpIdx++) 2005e8d8bef9SDimitry Andric fixupLCSSAFormFor(Inst, OpIdx); 20065ffd83dbSDimitry Andric } 20075ffd83dbSDimitry Andric } 20085ffd83dbSDimitry Andric 20095ffd83dbSDimitry Andric /// replaceCongruentIVs - Check for congruent phis in this loop header and 20105ffd83dbSDimitry Andric /// replace them with their most canonical representative. Return the number of 20115ffd83dbSDimitry Andric /// phis eliminated. 20125ffd83dbSDimitry Andric /// 20135ffd83dbSDimitry Andric /// This does not depend on any SCEVExpander state but should be used in 20145ffd83dbSDimitry Andric /// the same context that SCEVExpander is used. 20155ffd83dbSDimitry Andric unsigned 20165ffd83dbSDimitry Andric SCEVExpander::replaceCongruentIVs(Loop *L, const DominatorTree *DT, 20175ffd83dbSDimitry Andric SmallVectorImpl<WeakTrackingVH> &DeadInsts, 20185ffd83dbSDimitry Andric const TargetTransformInfo *TTI) { 20195ffd83dbSDimitry Andric // Find integer phis in order of increasing width. 20205ffd83dbSDimitry Andric SmallVector<PHINode*, 8> Phis; 20215ffd83dbSDimitry Andric for (PHINode &PN : L->getHeader()->phis()) 20225ffd83dbSDimitry Andric Phis.push_back(&PN); 20235ffd83dbSDimitry Andric 20245ffd83dbSDimitry Andric if (TTI) 2025*349cc55cSDimitry Andric // Use stable_sort to preserve order of equivalent PHIs, so the order 2026*349cc55cSDimitry Andric // of the sorted Phis is the same from run to run on the same loop. 2027*349cc55cSDimitry Andric llvm::stable_sort(Phis, [](Value *LHS, Value *RHS) { 20285ffd83dbSDimitry Andric // Put pointers at the back and make sure pointer < pointer = false. 20295ffd83dbSDimitry Andric if (!LHS->getType()->isIntegerTy() || !RHS->getType()->isIntegerTy()) 20305ffd83dbSDimitry Andric return RHS->getType()->isIntegerTy() && !LHS->getType()->isIntegerTy(); 2031e8d8bef9SDimitry Andric return RHS->getType()->getPrimitiveSizeInBits().getFixedSize() < 2032e8d8bef9SDimitry Andric LHS->getType()->getPrimitiveSizeInBits().getFixedSize(); 20335ffd83dbSDimitry Andric }); 20345ffd83dbSDimitry Andric 20355ffd83dbSDimitry Andric unsigned NumElim = 0; 20365ffd83dbSDimitry Andric DenseMap<const SCEV *, PHINode *> ExprToIVMap; 20375ffd83dbSDimitry Andric // Process phis from wide to narrow. Map wide phis to their truncation 20385ffd83dbSDimitry Andric // so narrow phis can reuse them. 20395ffd83dbSDimitry Andric for (PHINode *Phi : Phis) { 20405ffd83dbSDimitry Andric auto SimplifyPHINode = [&](PHINode *PN) -> Value * { 20415ffd83dbSDimitry Andric if (Value *V = SimplifyInstruction(PN, {DL, &SE.TLI, &SE.DT, &SE.AC})) 20425ffd83dbSDimitry Andric return V; 20435ffd83dbSDimitry Andric if (!SE.isSCEVable(PN->getType())) 20445ffd83dbSDimitry Andric return nullptr; 20455ffd83dbSDimitry Andric auto *Const = dyn_cast<SCEVConstant>(SE.getSCEV(PN)); 20465ffd83dbSDimitry Andric if (!Const) 20475ffd83dbSDimitry Andric return nullptr; 20485ffd83dbSDimitry Andric return Const->getValue(); 20495ffd83dbSDimitry Andric }; 20505ffd83dbSDimitry Andric 20515ffd83dbSDimitry Andric // Fold constant phis. They may be congruent to other constant phis and 20525ffd83dbSDimitry Andric // would confuse the logic below that expects proper IVs. 20535ffd83dbSDimitry Andric if (Value *V = SimplifyPHINode(Phi)) { 20545ffd83dbSDimitry Andric if (V->getType() != Phi->getType()) 20555ffd83dbSDimitry Andric continue; 20565ffd83dbSDimitry Andric Phi->replaceAllUsesWith(V); 20575ffd83dbSDimitry Andric DeadInsts.emplace_back(Phi); 20585ffd83dbSDimitry Andric ++NumElim; 2059fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE(DebugType, 2060fe6060f1SDimitry Andric dbgs() << "INDVARS: Eliminated constant iv: " << *Phi 2061fe6060f1SDimitry Andric << '\n'); 20625ffd83dbSDimitry Andric continue; 20635ffd83dbSDimitry Andric } 20645ffd83dbSDimitry Andric 20655ffd83dbSDimitry Andric if (!SE.isSCEVable(Phi->getType())) 20665ffd83dbSDimitry Andric continue; 20675ffd83dbSDimitry Andric 20685ffd83dbSDimitry Andric PHINode *&OrigPhiRef = ExprToIVMap[SE.getSCEV(Phi)]; 20695ffd83dbSDimitry Andric if (!OrigPhiRef) { 20705ffd83dbSDimitry Andric OrigPhiRef = Phi; 20715ffd83dbSDimitry Andric if (Phi->getType()->isIntegerTy() && TTI && 20725ffd83dbSDimitry Andric TTI->isTruncateFree(Phi->getType(), Phis.back()->getType())) { 20735ffd83dbSDimitry Andric // This phi can be freely truncated to the narrowest phi type. Map the 20745ffd83dbSDimitry Andric // truncated expression to it so it will be reused for narrow types. 20755ffd83dbSDimitry Andric const SCEV *TruncExpr = 20765ffd83dbSDimitry Andric SE.getTruncateExpr(SE.getSCEV(Phi), Phis.back()->getType()); 20775ffd83dbSDimitry Andric ExprToIVMap[TruncExpr] = Phi; 20785ffd83dbSDimitry Andric } 20795ffd83dbSDimitry Andric continue; 20805ffd83dbSDimitry Andric } 20815ffd83dbSDimitry Andric 20825ffd83dbSDimitry Andric // Replacing a pointer phi with an integer phi or vice-versa doesn't make 20835ffd83dbSDimitry Andric // sense. 20845ffd83dbSDimitry Andric if (OrigPhiRef->getType()->isPointerTy() != Phi->getType()->isPointerTy()) 20855ffd83dbSDimitry Andric continue; 20865ffd83dbSDimitry Andric 20875ffd83dbSDimitry Andric if (BasicBlock *LatchBlock = L->getLoopLatch()) { 20885ffd83dbSDimitry Andric Instruction *OrigInc = dyn_cast<Instruction>( 20895ffd83dbSDimitry Andric OrigPhiRef->getIncomingValueForBlock(LatchBlock)); 20905ffd83dbSDimitry Andric Instruction *IsomorphicInc = 20915ffd83dbSDimitry Andric dyn_cast<Instruction>(Phi->getIncomingValueForBlock(LatchBlock)); 20925ffd83dbSDimitry Andric 20935ffd83dbSDimitry Andric if (OrigInc && IsomorphicInc) { 20945ffd83dbSDimitry Andric // If this phi has the same width but is more canonical, replace the 20955ffd83dbSDimitry Andric // original with it. As part of the "more canonical" determination, 20965ffd83dbSDimitry Andric // respect a prior decision to use an IV chain. 20975ffd83dbSDimitry Andric if (OrigPhiRef->getType() == Phi->getType() && 20985ffd83dbSDimitry Andric !(ChainedPhis.count(Phi) || 20995ffd83dbSDimitry Andric isExpandedAddRecExprPHI(OrigPhiRef, OrigInc, L)) && 21005ffd83dbSDimitry Andric (ChainedPhis.count(Phi) || 21015ffd83dbSDimitry Andric isExpandedAddRecExprPHI(Phi, IsomorphicInc, L))) { 21025ffd83dbSDimitry Andric std::swap(OrigPhiRef, Phi); 21035ffd83dbSDimitry Andric std::swap(OrigInc, IsomorphicInc); 21045ffd83dbSDimitry Andric } 21055ffd83dbSDimitry Andric // Replacing the congruent phi is sufficient because acyclic 21065ffd83dbSDimitry Andric // redundancy elimination, CSE/GVN, should handle the 21075ffd83dbSDimitry Andric // rest. However, once SCEV proves that a phi is congruent, 21085ffd83dbSDimitry Andric // it's often the head of an IV user cycle that is isomorphic 21095ffd83dbSDimitry Andric // with the original phi. It's worth eagerly cleaning up the 21105ffd83dbSDimitry Andric // common case of a single IV increment so that DeleteDeadPHIs 21115ffd83dbSDimitry Andric // can remove cycles that had postinc uses. 21125ffd83dbSDimitry Andric const SCEV *TruncExpr = 21135ffd83dbSDimitry Andric SE.getTruncateOrNoop(SE.getSCEV(OrigInc), IsomorphicInc->getType()); 21145ffd83dbSDimitry Andric if (OrigInc != IsomorphicInc && 21155ffd83dbSDimitry Andric TruncExpr == SE.getSCEV(IsomorphicInc) && 21165ffd83dbSDimitry Andric SE.LI.replacementPreservesLCSSAForm(IsomorphicInc, OrigInc) && 21175ffd83dbSDimitry Andric hoistIVInc(OrigInc, IsomorphicInc)) { 2118fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 2119fe6060f1SDimitry Andric DebugType, dbgs() << "INDVARS: Eliminated congruent iv.inc: " 21205ffd83dbSDimitry Andric << *IsomorphicInc << '\n'); 21215ffd83dbSDimitry Andric Value *NewInc = OrigInc; 21225ffd83dbSDimitry Andric if (OrigInc->getType() != IsomorphicInc->getType()) { 21235ffd83dbSDimitry Andric Instruction *IP = nullptr; 21245ffd83dbSDimitry Andric if (PHINode *PN = dyn_cast<PHINode>(OrigInc)) 21255ffd83dbSDimitry Andric IP = &*PN->getParent()->getFirstInsertionPt(); 21265ffd83dbSDimitry Andric else 21275ffd83dbSDimitry Andric IP = OrigInc->getNextNode(); 21285ffd83dbSDimitry Andric 21295ffd83dbSDimitry Andric IRBuilder<> Builder(IP); 21305ffd83dbSDimitry Andric Builder.SetCurrentDebugLocation(IsomorphicInc->getDebugLoc()); 21315ffd83dbSDimitry Andric NewInc = Builder.CreateTruncOrBitCast( 21325ffd83dbSDimitry Andric OrigInc, IsomorphicInc->getType(), IVName); 21335ffd83dbSDimitry Andric } 21345ffd83dbSDimitry Andric IsomorphicInc->replaceAllUsesWith(NewInc); 21355ffd83dbSDimitry Andric DeadInsts.emplace_back(IsomorphicInc); 21365ffd83dbSDimitry Andric } 21375ffd83dbSDimitry Andric } 21385ffd83dbSDimitry Andric } 2139fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE(DebugType, 2140fe6060f1SDimitry Andric dbgs() << "INDVARS: Eliminated congruent iv: " << *Phi 2141fe6060f1SDimitry Andric << '\n'); 2142fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 2143fe6060f1SDimitry Andric DebugType, dbgs() << "INDVARS: Original iv: " << *OrigPhiRef << '\n'); 21445ffd83dbSDimitry Andric ++NumElim; 21455ffd83dbSDimitry Andric Value *NewIV = OrigPhiRef; 21465ffd83dbSDimitry Andric if (OrigPhiRef->getType() != Phi->getType()) { 21475ffd83dbSDimitry Andric IRBuilder<> Builder(&*L->getHeader()->getFirstInsertionPt()); 21485ffd83dbSDimitry Andric Builder.SetCurrentDebugLocation(Phi->getDebugLoc()); 21495ffd83dbSDimitry Andric NewIV = Builder.CreateTruncOrBitCast(OrigPhiRef, Phi->getType(), IVName); 21505ffd83dbSDimitry Andric } 21515ffd83dbSDimitry Andric Phi->replaceAllUsesWith(NewIV); 21525ffd83dbSDimitry Andric DeadInsts.emplace_back(Phi); 21535ffd83dbSDimitry Andric } 21545ffd83dbSDimitry Andric return NumElim; 21555ffd83dbSDimitry Andric } 21565ffd83dbSDimitry Andric 21575ffd83dbSDimitry Andric Optional<ScalarEvolution::ValueOffsetPair> 21585ffd83dbSDimitry Andric SCEVExpander::getRelatedExistingExpansion(const SCEV *S, const Instruction *At, 21595ffd83dbSDimitry Andric Loop *L) { 21605ffd83dbSDimitry Andric using namespace llvm::PatternMatch; 21615ffd83dbSDimitry Andric 21625ffd83dbSDimitry Andric SmallVector<BasicBlock *, 4> ExitingBlocks; 21635ffd83dbSDimitry Andric L->getExitingBlocks(ExitingBlocks); 21645ffd83dbSDimitry Andric 21655ffd83dbSDimitry Andric // Look for suitable value in simple conditions at the loop exits. 21665ffd83dbSDimitry Andric for (BasicBlock *BB : ExitingBlocks) { 21675ffd83dbSDimitry Andric ICmpInst::Predicate Pred; 21685ffd83dbSDimitry Andric Instruction *LHS, *RHS; 21695ffd83dbSDimitry Andric 21705ffd83dbSDimitry Andric if (!match(BB->getTerminator(), 21715ffd83dbSDimitry Andric m_Br(m_ICmp(Pred, m_Instruction(LHS), m_Instruction(RHS)), 21725ffd83dbSDimitry Andric m_BasicBlock(), m_BasicBlock()))) 21735ffd83dbSDimitry Andric continue; 21745ffd83dbSDimitry Andric 21755ffd83dbSDimitry Andric if (SE.getSCEV(LHS) == S && SE.DT.dominates(LHS, At)) 21765ffd83dbSDimitry Andric return ScalarEvolution::ValueOffsetPair(LHS, nullptr); 21775ffd83dbSDimitry Andric 21785ffd83dbSDimitry Andric if (SE.getSCEV(RHS) == S && SE.DT.dominates(RHS, At)) 21795ffd83dbSDimitry Andric return ScalarEvolution::ValueOffsetPair(RHS, nullptr); 21805ffd83dbSDimitry Andric } 21815ffd83dbSDimitry Andric 21825ffd83dbSDimitry Andric // Use expand's logic which is used for reusing a previous Value in 21835ffd83dbSDimitry Andric // ExprValueMap. 21845ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair VO = FindValueInExprValueMap(S, At); 21855ffd83dbSDimitry Andric if (VO.first) 21865ffd83dbSDimitry Andric return VO; 21875ffd83dbSDimitry Andric 21885ffd83dbSDimitry Andric // There is potential to make this significantly smarter, but this simple 21895ffd83dbSDimitry Andric // heuristic already gets some interesting cases. 21905ffd83dbSDimitry Andric 21915ffd83dbSDimitry Andric // Can not find suitable value. 21925ffd83dbSDimitry Andric return None; 21935ffd83dbSDimitry Andric } 21945ffd83dbSDimitry Andric 2195fe6060f1SDimitry Andric template<typename T> static InstructionCost costAndCollectOperands( 2196e8d8bef9SDimitry Andric const SCEVOperand &WorkItem, const TargetTransformInfo &TTI, 2197e8d8bef9SDimitry Andric TargetTransformInfo::TargetCostKind CostKind, 2198e8d8bef9SDimitry Andric SmallVectorImpl<SCEVOperand> &Worklist) { 2199e8d8bef9SDimitry Andric 2200e8d8bef9SDimitry Andric const T *S = cast<T>(WorkItem.S); 2201fe6060f1SDimitry Andric InstructionCost Cost = 0; 2202e8d8bef9SDimitry Andric // Object to help map SCEV operands to expanded IR instructions. 2203e8d8bef9SDimitry Andric struct OperationIndices { 2204e8d8bef9SDimitry Andric OperationIndices(unsigned Opc, size_t min, size_t max) : 2205e8d8bef9SDimitry Andric Opcode(Opc), MinIdx(min), MaxIdx(max) { } 2206e8d8bef9SDimitry Andric unsigned Opcode; 2207e8d8bef9SDimitry Andric size_t MinIdx; 2208e8d8bef9SDimitry Andric size_t MaxIdx; 2209e8d8bef9SDimitry Andric }; 2210e8d8bef9SDimitry Andric 2211e8d8bef9SDimitry Andric // Collect the operations of all the instructions that will be needed to 2212e8d8bef9SDimitry Andric // expand the SCEVExpr. This is so that when we come to cost the operands, 2213e8d8bef9SDimitry Andric // we know what the generated user(s) will be. 2214e8d8bef9SDimitry Andric SmallVector<OperationIndices, 2> Operations; 2215e8d8bef9SDimitry Andric 2216fe6060f1SDimitry Andric auto CastCost = [&](unsigned Opcode) -> InstructionCost { 2217e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, 0, 0); 2218e8d8bef9SDimitry Andric return TTI.getCastInstrCost(Opcode, S->getType(), 2219e8d8bef9SDimitry Andric S->getOperand(0)->getType(), 2220e8d8bef9SDimitry Andric TTI::CastContextHint::None, CostKind); 2221e8d8bef9SDimitry Andric }; 2222e8d8bef9SDimitry Andric 2223e8d8bef9SDimitry Andric auto ArithCost = [&](unsigned Opcode, unsigned NumRequired, 2224fe6060f1SDimitry Andric unsigned MinIdx = 0, 2225fe6060f1SDimitry Andric unsigned MaxIdx = 1) -> InstructionCost { 2226e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, MinIdx, MaxIdx); 2227e8d8bef9SDimitry Andric return NumRequired * 2228e8d8bef9SDimitry Andric TTI.getArithmeticInstrCost(Opcode, S->getType(), CostKind); 2229e8d8bef9SDimitry Andric }; 2230e8d8bef9SDimitry Andric 2231fe6060f1SDimitry Andric auto CmpSelCost = [&](unsigned Opcode, unsigned NumRequired, unsigned MinIdx, 2232fe6060f1SDimitry Andric unsigned MaxIdx) -> InstructionCost { 2233e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, MinIdx, MaxIdx); 2234e8d8bef9SDimitry Andric Type *OpType = S->getOperand(0)->getType(); 2235e8d8bef9SDimitry Andric return NumRequired * TTI.getCmpSelInstrCost( 2236e8d8bef9SDimitry Andric Opcode, OpType, CmpInst::makeCmpResultType(OpType), 2237e8d8bef9SDimitry Andric CmpInst::BAD_ICMP_PREDICATE, CostKind); 2238e8d8bef9SDimitry Andric }; 2239e8d8bef9SDimitry Andric 2240e8d8bef9SDimitry Andric switch (S->getSCEVType()) { 2241e8d8bef9SDimitry Andric case scCouldNotCompute: 2242e8d8bef9SDimitry Andric llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); 2243e8d8bef9SDimitry Andric case scUnknown: 2244e8d8bef9SDimitry Andric case scConstant: 2245e8d8bef9SDimitry Andric return 0; 2246e8d8bef9SDimitry Andric case scPtrToInt: 2247e8d8bef9SDimitry Andric Cost = CastCost(Instruction::PtrToInt); 2248e8d8bef9SDimitry Andric break; 2249e8d8bef9SDimitry Andric case scTruncate: 2250e8d8bef9SDimitry Andric Cost = CastCost(Instruction::Trunc); 2251e8d8bef9SDimitry Andric break; 2252e8d8bef9SDimitry Andric case scZeroExtend: 2253e8d8bef9SDimitry Andric Cost = CastCost(Instruction::ZExt); 2254e8d8bef9SDimitry Andric break; 2255e8d8bef9SDimitry Andric case scSignExtend: 2256e8d8bef9SDimitry Andric Cost = CastCost(Instruction::SExt); 2257e8d8bef9SDimitry Andric break; 2258e8d8bef9SDimitry Andric case scUDivExpr: { 2259e8d8bef9SDimitry Andric unsigned Opcode = Instruction::UDiv; 2260e8d8bef9SDimitry Andric if (auto *SC = dyn_cast<SCEVConstant>(S->getOperand(1))) 2261e8d8bef9SDimitry Andric if (SC->getAPInt().isPowerOf2()) 2262e8d8bef9SDimitry Andric Opcode = Instruction::LShr; 2263e8d8bef9SDimitry Andric Cost = ArithCost(Opcode, 1); 2264e8d8bef9SDimitry Andric break; 2265e8d8bef9SDimitry Andric } 2266e8d8bef9SDimitry Andric case scAddExpr: 2267e8d8bef9SDimitry Andric Cost = ArithCost(Instruction::Add, S->getNumOperands() - 1); 2268e8d8bef9SDimitry Andric break; 2269e8d8bef9SDimitry Andric case scMulExpr: 2270e8d8bef9SDimitry Andric // TODO: this is a very pessimistic cost modelling for Mul, 2271e8d8bef9SDimitry Andric // because of Bin Pow algorithm actually used by the expander, 2272e8d8bef9SDimitry Andric // see SCEVExpander::visitMulExpr(), ExpandOpBinPowN(). 2273e8d8bef9SDimitry Andric Cost = ArithCost(Instruction::Mul, S->getNumOperands() - 1); 2274e8d8bef9SDimitry Andric break; 2275e8d8bef9SDimitry Andric case scSMaxExpr: 2276e8d8bef9SDimitry Andric case scUMaxExpr: 2277e8d8bef9SDimitry Andric case scSMinExpr: 2278e8d8bef9SDimitry Andric case scUMinExpr: { 2279fe6060f1SDimitry Andric // FIXME: should this ask the cost for Intrinsic's? 2280e8d8bef9SDimitry Andric Cost += CmpSelCost(Instruction::ICmp, S->getNumOperands() - 1, 0, 1); 2281e8d8bef9SDimitry Andric Cost += CmpSelCost(Instruction::Select, S->getNumOperands() - 1, 0, 2); 2282e8d8bef9SDimitry Andric break; 2283e8d8bef9SDimitry Andric } 2284e8d8bef9SDimitry Andric case scAddRecExpr: { 2285e8d8bef9SDimitry Andric // In this polynominal, we may have some zero operands, and we shouldn't 2286e8d8bef9SDimitry Andric // really charge for those. So how many non-zero coeffients are there? 2287e8d8bef9SDimitry Andric int NumTerms = llvm::count_if(S->operands(), [](const SCEV *Op) { 2288e8d8bef9SDimitry Andric return !Op->isZero(); 2289e8d8bef9SDimitry Andric }); 2290e8d8bef9SDimitry Andric 2291e8d8bef9SDimitry Andric assert(NumTerms >= 1 && "Polynominal should have at least one term."); 2292e8d8bef9SDimitry Andric assert(!(*std::prev(S->operands().end()))->isZero() && 2293e8d8bef9SDimitry Andric "Last operand should not be zero"); 2294e8d8bef9SDimitry Andric 2295e8d8bef9SDimitry Andric // Ignoring constant term (operand 0), how many of the coeffients are u> 1? 2296e8d8bef9SDimitry Andric int NumNonZeroDegreeNonOneTerms = 2297e8d8bef9SDimitry Andric llvm::count_if(S->operands(), [](const SCEV *Op) { 2298e8d8bef9SDimitry Andric auto *SConst = dyn_cast<SCEVConstant>(Op); 2299e8d8bef9SDimitry Andric return !SConst || SConst->getAPInt().ugt(1); 2300e8d8bef9SDimitry Andric }); 2301e8d8bef9SDimitry Andric 2302e8d8bef9SDimitry Andric // Much like with normal add expr, the polynominal will require 2303e8d8bef9SDimitry Andric // one less addition than the number of it's terms. 2304fe6060f1SDimitry Andric InstructionCost AddCost = ArithCost(Instruction::Add, NumTerms - 1, 2305e8d8bef9SDimitry Andric /*MinIdx*/ 1, /*MaxIdx*/ 1); 2306e8d8bef9SDimitry Andric // Here, *each* one of those will require a multiplication. 2307fe6060f1SDimitry Andric InstructionCost MulCost = 2308fe6060f1SDimitry Andric ArithCost(Instruction::Mul, NumNonZeroDegreeNonOneTerms); 2309e8d8bef9SDimitry Andric Cost = AddCost + MulCost; 2310e8d8bef9SDimitry Andric 2311e8d8bef9SDimitry Andric // What is the degree of this polynominal? 2312e8d8bef9SDimitry Andric int PolyDegree = S->getNumOperands() - 1; 2313e8d8bef9SDimitry Andric assert(PolyDegree >= 1 && "Should be at least affine."); 2314e8d8bef9SDimitry Andric 2315e8d8bef9SDimitry Andric // The final term will be: 2316e8d8bef9SDimitry Andric // Op_{PolyDegree} * x ^ {PolyDegree} 2317e8d8bef9SDimitry Andric // Where x ^ {PolyDegree} will again require PolyDegree-1 mul operations. 2318e8d8bef9SDimitry Andric // Note that x ^ {PolyDegree} = x * x ^ {PolyDegree-1} so charging for 2319e8d8bef9SDimitry Andric // x ^ {PolyDegree} will give us x ^ {2} .. x ^ {PolyDegree-1} for free. 2320e8d8bef9SDimitry Andric // FIXME: this is conservatively correct, but might be overly pessimistic. 2321e8d8bef9SDimitry Andric Cost += MulCost * (PolyDegree - 1); 2322e8d8bef9SDimitry Andric break; 2323e8d8bef9SDimitry Andric } 2324e8d8bef9SDimitry Andric } 2325e8d8bef9SDimitry Andric 2326e8d8bef9SDimitry Andric for (auto &CostOp : Operations) { 2327e8d8bef9SDimitry Andric for (auto SCEVOp : enumerate(S->operands())) { 2328e8d8bef9SDimitry Andric // Clamp the index to account for multiple IR operations being chained. 2329e8d8bef9SDimitry Andric size_t MinIdx = std::max(SCEVOp.index(), CostOp.MinIdx); 2330e8d8bef9SDimitry Andric size_t OpIdx = std::min(MinIdx, CostOp.MaxIdx); 2331e8d8bef9SDimitry Andric Worklist.emplace_back(CostOp.Opcode, OpIdx, SCEVOp.value()); 2332e8d8bef9SDimitry Andric } 2333e8d8bef9SDimitry Andric } 2334e8d8bef9SDimitry Andric return Cost; 2335e8d8bef9SDimitry Andric } 2336e8d8bef9SDimitry Andric 23375ffd83dbSDimitry Andric bool SCEVExpander::isHighCostExpansionHelper( 2338e8d8bef9SDimitry Andric const SCEVOperand &WorkItem, Loop *L, const Instruction &At, 2339fe6060f1SDimitry Andric InstructionCost &Cost, unsigned Budget, const TargetTransformInfo &TTI, 2340e8d8bef9SDimitry Andric SmallPtrSetImpl<const SCEV *> &Processed, 2341e8d8bef9SDimitry Andric SmallVectorImpl<SCEVOperand> &Worklist) { 2342fe6060f1SDimitry Andric if (Cost > Budget) 23435ffd83dbSDimitry Andric return true; // Already run out of budget, give up. 23445ffd83dbSDimitry Andric 2345e8d8bef9SDimitry Andric const SCEV *S = WorkItem.S; 23465ffd83dbSDimitry Andric // Was the cost of expansion of this expression already accounted for? 2347e8d8bef9SDimitry Andric if (!isa<SCEVConstant>(S) && !Processed.insert(S).second) 23485ffd83dbSDimitry Andric return false; // We have already accounted for this expression. 23495ffd83dbSDimitry Andric 23505ffd83dbSDimitry Andric // If we can find an existing value for this scev available at the point "At" 23515ffd83dbSDimitry Andric // then consider the expression cheap. 23525ffd83dbSDimitry Andric if (getRelatedExistingExpansion(S, &At, L)) 23535ffd83dbSDimitry Andric return false; // Consider the expression to be free. 23545ffd83dbSDimitry Andric 23555ffd83dbSDimitry Andric TargetTransformInfo::TargetCostKind CostKind = 2356e8d8bef9SDimitry Andric L->getHeader()->getParent()->hasMinSize() 2357e8d8bef9SDimitry Andric ? TargetTransformInfo::TCK_CodeSize 2358e8d8bef9SDimitry Andric : TargetTransformInfo::TCK_RecipThroughput; 23595ffd83dbSDimitry Andric 23605ffd83dbSDimitry Andric switch (S->getSCEVType()) { 2361e8d8bef9SDimitry Andric case scCouldNotCompute: 2362e8d8bef9SDimitry Andric llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); 2363e8d8bef9SDimitry Andric case scUnknown: 2364e8d8bef9SDimitry Andric // Assume to be zero-cost. 2365e8d8bef9SDimitry Andric return false; 2366e8d8bef9SDimitry Andric case scConstant: { 2367e8d8bef9SDimitry Andric // Only evalulate the costs of constants when optimizing for size. 2368e8d8bef9SDimitry Andric if (CostKind != TargetTransformInfo::TCK_CodeSize) 2369e8d8bef9SDimitry Andric return 0; 2370e8d8bef9SDimitry Andric const APInt &Imm = cast<SCEVConstant>(S)->getAPInt(); 2371e8d8bef9SDimitry Andric Type *Ty = S->getType(); 2372fe6060f1SDimitry Andric Cost += TTI.getIntImmCostInst( 2373e8d8bef9SDimitry Andric WorkItem.ParentOpcode, WorkItem.OperandIdx, Imm, Ty, CostKind); 2374fe6060f1SDimitry Andric return Cost > Budget; 2375e8d8bef9SDimitry Andric } 23765ffd83dbSDimitry Andric case scTruncate: 2377e8d8bef9SDimitry Andric case scPtrToInt: 23785ffd83dbSDimitry Andric case scZeroExtend: 2379e8d8bef9SDimitry Andric case scSignExtend: { 2380fe6060f1SDimitry Andric Cost += 2381e8d8bef9SDimitry Andric costAndCollectOperands<SCEVCastExpr>(WorkItem, TTI, CostKind, Worklist); 23825ffd83dbSDimitry Andric return false; // Will answer upon next entry into this function. 23835ffd83dbSDimitry Andric } 2384e8d8bef9SDimitry Andric case scUDivExpr: { 23855ffd83dbSDimitry Andric // UDivExpr is very likely a UDiv that ScalarEvolution's HowFarToZero or 23865ffd83dbSDimitry Andric // HowManyLessThans produced to compute a precise expression, rather than a 23875ffd83dbSDimitry Andric // UDiv from the user's code. If we can't find a UDiv in the code with some 23885ffd83dbSDimitry Andric // simple searching, we need to account for it's cost. 23895ffd83dbSDimitry Andric 23905ffd83dbSDimitry Andric // At the beginning of this function we already tried to find existing 23915ffd83dbSDimitry Andric // value for plain 'S'. Now try to lookup 'S + 1' since it is common 23925ffd83dbSDimitry Andric // pattern involving division. This is just a simple search heuristic. 23935ffd83dbSDimitry Andric if (getRelatedExistingExpansion( 23945ffd83dbSDimitry Andric SE.getAddExpr(S, SE.getConstant(S->getType(), 1)), &At, L)) 23955ffd83dbSDimitry Andric return false; // Consider it to be free. 23965ffd83dbSDimitry Andric 2397fe6060f1SDimitry Andric Cost += 2398e8d8bef9SDimitry Andric costAndCollectOperands<SCEVUDivExpr>(WorkItem, TTI, CostKind, Worklist); 23995ffd83dbSDimitry Andric return false; // Will answer upon next entry into this function. 24005ffd83dbSDimitry Andric } 24015ffd83dbSDimitry Andric case scAddExpr: 24025ffd83dbSDimitry Andric case scMulExpr: 24035ffd83dbSDimitry Andric case scUMaxExpr: 2404e8d8bef9SDimitry Andric case scSMaxExpr: 24055ffd83dbSDimitry Andric case scUMinExpr: 2406e8d8bef9SDimitry Andric case scSMinExpr: { 2407e8d8bef9SDimitry Andric assert(cast<SCEVNAryExpr>(S)->getNumOperands() > 1 && 24085ffd83dbSDimitry Andric "Nary expr should have more than 1 operand."); 24095ffd83dbSDimitry Andric // The simple nary expr will require one less op (or pair of ops) 24105ffd83dbSDimitry Andric // than the number of it's terms. 2411fe6060f1SDimitry Andric Cost += 2412e8d8bef9SDimitry Andric costAndCollectOperands<SCEVNAryExpr>(WorkItem, TTI, CostKind, Worklist); 2413fe6060f1SDimitry Andric return Cost > Budget; 24145ffd83dbSDimitry Andric } 2415e8d8bef9SDimitry Andric case scAddRecExpr: { 2416e8d8bef9SDimitry Andric assert(cast<SCEVAddRecExpr>(S)->getNumOperands() >= 2 && 2417e8d8bef9SDimitry Andric "Polynomial should be at least linear"); 2418fe6060f1SDimitry Andric Cost += costAndCollectOperands<SCEVAddRecExpr>( 2419e8d8bef9SDimitry Andric WorkItem, TTI, CostKind, Worklist); 2420fe6060f1SDimitry Andric return Cost > Budget; 2421e8d8bef9SDimitry Andric } 2422e8d8bef9SDimitry Andric } 2423e8d8bef9SDimitry Andric llvm_unreachable("Unknown SCEV kind!"); 24245ffd83dbSDimitry Andric } 24255ffd83dbSDimitry Andric 24265ffd83dbSDimitry Andric Value *SCEVExpander::expandCodeForPredicate(const SCEVPredicate *Pred, 24275ffd83dbSDimitry Andric Instruction *IP) { 24285ffd83dbSDimitry Andric assert(IP); 24295ffd83dbSDimitry Andric switch (Pred->getKind()) { 24305ffd83dbSDimitry Andric case SCEVPredicate::P_Union: 24315ffd83dbSDimitry Andric return expandUnionPredicate(cast<SCEVUnionPredicate>(Pred), IP); 24325ffd83dbSDimitry Andric case SCEVPredicate::P_Equal: 24335ffd83dbSDimitry Andric return expandEqualPredicate(cast<SCEVEqualPredicate>(Pred), IP); 24345ffd83dbSDimitry Andric case SCEVPredicate::P_Wrap: { 24355ffd83dbSDimitry Andric auto *AddRecPred = cast<SCEVWrapPredicate>(Pred); 24365ffd83dbSDimitry Andric return expandWrapPredicate(AddRecPred, IP); 24375ffd83dbSDimitry Andric } 24385ffd83dbSDimitry Andric } 24395ffd83dbSDimitry Andric llvm_unreachable("Unknown SCEV predicate type"); 24405ffd83dbSDimitry Andric } 24415ffd83dbSDimitry Andric 24425ffd83dbSDimitry Andric Value *SCEVExpander::expandEqualPredicate(const SCEVEqualPredicate *Pred, 24435ffd83dbSDimitry Andric Instruction *IP) { 2444e8d8bef9SDimitry Andric Value *Expr0 = 2445e8d8bef9SDimitry Andric expandCodeForImpl(Pred->getLHS(), Pred->getLHS()->getType(), IP, false); 2446e8d8bef9SDimitry Andric Value *Expr1 = 2447e8d8bef9SDimitry Andric expandCodeForImpl(Pred->getRHS(), Pred->getRHS()->getType(), IP, false); 24485ffd83dbSDimitry Andric 24495ffd83dbSDimitry Andric Builder.SetInsertPoint(IP); 24505ffd83dbSDimitry Andric auto *I = Builder.CreateICmpNE(Expr0, Expr1, "ident.check"); 24515ffd83dbSDimitry Andric return I; 24525ffd83dbSDimitry Andric } 24535ffd83dbSDimitry Andric 24545ffd83dbSDimitry Andric Value *SCEVExpander::generateOverflowCheck(const SCEVAddRecExpr *AR, 24555ffd83dbSDimitry Andric Instruction *Loc, bool Signed) { 24565ffd83dbSDimitry Andric assert(AR->isAffine() && "Cannot generate RT check for " 24575ffd83dbSDimitry Andric "non-affine expression"); 24585ffd83dbSDimitry Andric 24595ffd83dbSDimitry Andric SCEVUnionPredicate Pred; 24605ffd83dbSDimitry Andric const SCEV *ExitCount = 24615ffd83dbSDimitry Andric SE.getPredicatedBackedgeTakenCount(AR->getLoop(), Pred); 24625ffd83dbSDimitry Andric 2463e8d8bef9SDimitry Andric assert(!isa<SCEVCouldNotCompute>(ExitCount) && "Invalid loop count"); 24645ffd83dbSDimitry Andric 24655ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 24665ffd83dbSDimitry Andric const SCEV *Start = AR->getStart(); 24675ffd83dbSDimitry Andric 24685ffd83dbSDimitry Andric Type *ARTy = AR->getType(); 24695ffd83dbSDimitry Andric unsigned SrcBits = SE.getTypeSizeInBits(ExitCount->getType()); 24705ffd83dbSDimitry Andric unsigned DstBits = SE.getTypeSizeInBits(ARTy); 24715ffd83dbSDimitry Andric 24725ffd83dbSDimitry Andric // The expression {Start,+,Step} has nusw/nssw if 24735ffd83dbSDimitry Andric // Step < 0, Start - |Step| * Backedge <= Start 24745ffd83dbSDimitry Andric // Step >= 0, Start + |Step| * Backedge > Start 24755ffd83dbSDimitry Andric // and |Step| * Backedge doesn't unsigned overflow. 24765ffd83dbSDimitry Andric 24775ffd83dbSDimitry Andric IntegerType *CountTy = IntegerType::get(Loc->getContext(), SrcBits); 24785ffd83dbSDimitry Andric Builder.SetInsertPoint(Loc); 2479e8d8bef9SDimitry Andric Value *TripCountVal = expandCodeForImpl(ExitCount, CountTy, Loc, false); 24805ffd83dbSDimitry Andric 24815ffd83dbSDimitry Andric IntegerType *Ty = 24825ffd83dbSDimitry Andric IntegerType::get(Loc->getContext(), SE.getTypeSizeInBits(ARTy)); 24835ffd83dbSDimitry Andric 2484e8d8bef9SDimitry Andric Value *StepValue = expandCodeForImpl(Step, Ty, Loc, false); 2485e8d8bef9SDimitry Andric Value *NegStepValue = 2486e8d8bef9SDimitry Andric expandCodeForImpl(SE.getNegativeSCEV(Step), Ty, Loc, false); 2487*349cc55cSDimitry Andric Value *StartValue = expandCodeForImpl(Start, ARTy, Loc, false); 24885ffd83dbSDimitry Andric 24895ffd83dbSDimitry Andric ConstantInt *Zero = 2490*349cc55cSDimitry Andric ConstantInt::get(Loc->getContext(), APInt::getZero(DstBits)); 24915ffd83dbSDimitry Andric 24925ffd83dbSDimitry Andric Builder.SetInsertPoint(Loc); 24935ffd83dbSDimitry Andric // Compute |Step| 24945ffd83dbSDimitry Andric Value *StepCompare = Builder.CreateICmp(ICmpInst::ICMP_SLT, StepValue, Zero); 24955ffd83dbSDimitry Andric Value *AbsStep = Builder.CreateSelect(StepCompare, NegStepValue, StepValue); 24965ffd83dbSDimitry Andric 24975ffd83dbSDimitry Andric // Get the backedge taken count and truncate or extended to the AR type. 24985ffd83dbSDimitry Andric Value *TruncTripCount = Builder.CreateZExtOrTrunc(TripCountVal, Ty); 24995ffd83dbSDimitry Andric 25005ffd83dbSDimitry Andric // Compute |Step| * Backedge 2501*349cc55cSDimitry Andric Value *MulV, *OfMul; 2502*349cc55cSDimitry Andric if (Step->isOne()) { 2503*349cc55cSDimitry Andric // Special-case Step of one. Potentially-costly `umul_with_overflow` isn't 2504*349cc55cSDimitry Andric // needed, there is never an overflow, so to avoid artificially inflating 2505*349cc55cSDimitry Andric // the cost of the check, directly emit the optimized IR. 2506*349cc55cSDimitry Andric MulV = TruncTripCount; 2507*349cc55cSDimitry Andric OfMul = ConstantInt::getFalse(MulV->getContext()); 2508*349cc55cSDimitry Andric } else { 2509*349cc55cSDimitry Andric auto *MulF = Intrinsic::getDeclaration(Loc->getModule(), 2510*349cc55cSDimitry Andric Intrinsic::umul_with_overflow, Ty); 25115ffd83dbSDimitry Andric CallInst *Mul = Builder.CreateCall(MulF, {AbsStep, TruncTripCount}, "mul"); 2512*349cc55cSDimitry Andric MulV = Builder.CreateExtractValue(Mul, 0, "mul.result"); 2513*349cc55cSDimitry Andric OfMul = Builder.CreateExtractValue(Mul, 1, "mul.overflow"); 2514*349cc55cSDimitry Andric } 25155ffd83dbSDimitry Andric 25165ffd83dbSDimitry Andric // Compute: 25175ffd83dbSDimitry Andric // Start + |Step| * Backedge < Start 25185ffd83dbSDimitry Andric // Start - |Step| * Backedge > Start 25195ffd83dbSDimitry Andric Value *Add = nullptr, *Sub = nullptr; 2520*349cc55cSDimitry Andric if (PointerType *ARPtrTy = dyn_cast<PointerType>(ARTy)) { 2521*349cc55cSDimitry Andric StartValue = InsertNoopCastOfTo( 2522*349cc55cSDimitry Andric StartValue, Builder.getInt8PtrTy(ARPtrTy->getAddressSpace())); 2523*349cc55cSDimitry Andric Value *NegMulV = Builder.CreateNeg(MulV); 2524*349cc55cSDimitry Andric Add = Builder.CreateGEP(Builder.getInt8Ty(), StartValue, MulV); 2525*349cc55cSDimitry Andric Sub = Builder.CreateGEP(Builder.getInt8Ty(), StartValue, NegMulV); 25265ffd83dbSDimitry Andric } else { 25275ffd83dbSDimitry Andric Add = Builder.CreateAdd(StartValue, MulV); 25285ffd83dbSDimitry Andric Sub = Builder.CreateSub(StartValue, MulV); 25295ffd83dbSDimitry Andric } 25305ffd83dbSDimitry Andric 25315ffd83dbSDimitry Andric Value *EndCompareGT = Builder.CreateICmp( 25325ffd83dbSDimitry Andric Signed ? ICmpInst::ICMP_SGT : ICmpInst::ICMP_UGT, Sub, StartValue); 25335ffd83dbSDimitry Andric 25345ffd83dbSDimitry Andric Value *EndCompareLT = Builder.CreateICmp( 25355ffd83dbSDimitry Andric Signed ? ICmpInst::ICMP_SLT : ICmpInst::ICMP_ULT, Add, StartValue); 25365ffd83dbSDimitry Andric 25375ffd83dbSDimitry Andric // Select the answer based on the sign of Step. 25385ffd83dbSDimitry Andric Value *EndCheck = 25395ffd83dbSDimitry Andric Builder.CreateSelect(StepCompare, EndCompareGT, EndCompareLT); 25405ffd83dbSDimitry Andric 25415ffd83dbSDimitry Andric // If the backedge taken count type is larger than the AR type, 25425ffd83dbSDimitry Andric // check that we don't drop any bits by truncating it. If we are 25435ffd83dbSDimitry Andric // dropping bits, then we have overflow (unless the step is zero). 25445ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(CountTy) > SE.getTypeSizeInBits(Ty)) { 25455ffd83dbSDimitry Andric auto MaxVal = APInt::getMaxValue(DstBits).zext(SrcBits); 25465ffd83dbSDimitry Andric auto *BackedgeCheck = 25475ffd83dbSDimitry Andric Builder.CreateICmp(ICmpInst::ICMP_UGT, TripCountVal, 25485ffd83dbSDimitry Andric ConstantInt::get(Loc->getContext(), MaxVal)); 25495ffd83dbSDimitry Andric BackedgeCheck = Builder.CreateAnd( 25505ffd83dbSDimitry Andric BackedgeCheck, Builder.CreateICmp(ICmpInst::ICMP_NE, StepValue, Zero)); 25515ffd83dbSDimitry Andric 25525ffd83dbSDimitry Andric EndCheck = Builder.CreateOr(EndCheck, BackedgeCheck); 25535ffd83dbSDimitry Andric } 25545ffd83dbSDimitry Andric 2555e8d8bef9SDimitry Andric return Builder.CreateOr(EndCheck, OfMul); 25565ffd83dbSDimitry Andric } 25575ffd83dbSDimitry Andric 25585ffd83dbSDimitry Andric Value *SCEVExpander::expandWrapPredicate(const SCEVWrapPredicate *Pred, 25595ffd83dbSDimitry Andric Instruction *IP) { 25605ffd83dbSDimitry Andric const auto *A = cast<SCEVAddRecExpr>(Pred->getExpr()); 25615ffd83dbSDimitry Andric Value *NSSWCheck = nullptr, *NUSWCheck = nullptr; 25625ffd83dbSDimitry Andric 25635ffd83dbSDimitry Andric // Add a check for NUSW 25645ffd83dbSDimitry Andric if (Pred->getFlags() & SCEVWrapPredicate::IncrementNUSW) 25655ffd83dbSDimitry Andric NUSWCheck = generateOverflowCheck(A, IP, false); 25665ffd83dbSDimitry Andric 25675ffd83dbSDimitry Andric // Add a check for NSSW 25685ffd83dbSDimitry Andric if (Pred->getFlags() & SCEVWrapPredicate::IncrementNSSW) 25695ffd83dbSDimitry Andric NSSWCheck = generateOverflowCheck(A, IP, true); 25705ffd83dbSDimitry Andric 25715ffd83dbSDimitry Andric if (NUSWCheck && NSSWCheck) 25725ffd83dbSDimitry Andric return Builder.CreateOr(NUSWCheck, NSSWCheck); 25735ffd83dbSDimitry Andric 25745ffd83dbSDimitry Andric if (NUSWCheck) 25755ffd83dbSDimitry Andric return NUSWCheck; 25765ffd83dbSDimitry Andric 25775ffd83dbSDimitry Andric if (NSSWCheck) 25785ffd83dbSDimitry Andric return NSSWCheck; 25795ffd83dbSDimitry Andric 25805ffd83dbSDimitry Andric return ConstantInt::getFalse(IP->getContext()); 25815ffd83dbSDimitry Andric } 25825ffd83dbSDimitry Andric 25835ffd83dbSDimitry Andric Value *SCEVExpander::expandUnionPredicate(const SCEVUnionPredicate *Union, 25845ffd83dbSDimitry Andric Instruction *IP) { 25855ffd83dbSDimitry Andric auto *BoolType = IntegerType::get(IP->getContext(), 1); 25865ffd83dbSDimitry Andric Value *Check = ConstantInt::getNullValue(BoolType); 25875ffd83dbSDimitry Andric 25885ffd83dbSDimitry Andric // Loop over all checks in this set. 25895ffd83dbSDimitry Andric for (auto Pred : Union->getPredicates()) { 25905ffd83dbSDimitry Andric auto *NextCheck = expandCodeForPredicate(Pred, IP); 25915ffd83dbSDimitry Andric Builder.SetInsertPoint(IP); 25925ffd83dbSDimitry Andric Check = Builder.CreateOr(Check, NextCheck); 25935ffd83dbSDimitry Andric } 25945ffd83dbSDimitry Andric 25955ffd83dbSDimitry Andric return Check; 25965ffd83dbSDimitry Andric } 25975ffd83dbSDimitry Andric 2598e8d8bef9SDimitry Andric Value *SCEVExpander::fixupLCSSAFormFor(Instruction *User, unsigned OpIdx) { 2599e8d8bef9SDimitry Andric assert(PreserveLCSSA); 2600e8d8bef9SDimitry Andric SmallVector<Instruction *, 1> ToUpdate; 2601e8d8bef9SDimitry Andric 2602e8d8bef9SDimitry Andric auto *OpV = User->getOperand(OpIdx); 2603e8d8bef9SDimitry Andric auto *OpI = dyn_cast<Instruction>(OpV); 2604e8d8bef9SDimitry Andric if (!OpI) 2605e8d8bef9SDimitry Andric return OpV; 2606e8d8bef9SDimitry Andric 2607e8d8bef9SDimitry Andric Loop *DefLoop = SE.LI.getLoopFor(OpI->getParent()); 2608e8d8bef9SDimitry Andric Loop *UseLoop = SE.LI.getLoopFor(User->getParent()); 2609e8d8bef9SDimitry Andric if (!DefLoop || UseLoop == DefLoop || DefLoop->contains(UseLoop)) 2610e8d8bef9SDimitry Andric return OpV; 2611e8d8bef9SDimitry Andric 2612e8d8bef9SDimitry Andric ToUpdate.push_back(OpI); 2613e8d8bef9SDimitry Andric SmallVector<PHINode *, 16> PHIsToRemove; 2614e8d8bef9SDimitry Andric formLCSSAForInstructions(ToUpdate, SE.DT, SE.LI, &SE, Builder, &PHIsToRemove); 2615e8d8bef9SDimitry Andric for (PHINode *PN : PHIsToRemove) { 2616e8d8bef9SDimitry Andric if (!PN->use_empty()) 2617e8d8bef9SDimitry Andric continue; 2618e8d8bef9SDimitry Andric InsertedValues.erase(PN); 2619e8d8bef9SDimitry Andric InsertedPostIncValues.erase(PN); 2620e8d8bef9SDimitry Andric PN->eraseFromParent(); 2621e8d8bef9SDimitry Andric } 2622e8d8bef9SDimitry Andric 2623e8d8bef9SDimitry Andric return User->getOperand(OpIdx); 2624e8d8bef9SDimitry Andric } 2625e8d8bef9SDimitry Andric 26265ffd83dbSDimitry Andric namespace { 26275ffd83dbSDimitry Andric // Search for a SCEV subexpression that is not safe to expand. Any expression 26285ffd83dbSDimitry Andric // that may expand to a !isSafeToSpeculativelyExecute value is unsafe, namely 26295ffd83dbSDimitry Andric // UDiv expressions. We don't know if the UDiv is derived from an IR divide 26305ffd83dbSDimitry Andric // instruction, but the important thing is that we prove the denominator is 26315ffd83dbSDimitry Andric // nonzero before expansion. 26325ffd83dbSDimitry Andric // 26335ffd83dbSDimitry Andric // IVUsers already checks that IV-derived expressions are safe. So this check is 26345ffd83dbSDimitry Andric // only needed when the expression includes some subexpression that is not IV 26355ffd83dbSDimitry Andric // derived. 26365ffd83dbSDimitry Andric // 26375ffd83dbSDimitry Andric // Currently, we only allow division by a nonzero constant here. If this is 26385ffd83dbSDimitry Andric // inadequate, we could easily allow division by SCEVUnknown by using 26395ffd83dbSDimitry Andric // ValueTracking to check isKnownNonZero(). 26405ffd83dbSDimitry Andric // 26415ffd83dbSDimitry Andric // We cannot generally expand recurrences unless the step dominates the loop 26425ffd83dbSDimitry Andric // header. The expander handles the special case of affine recurrences by 26435ffd83dbSDimitry Andric // scaling the recurrence outside the loop, but this technique isn't generally 26445ffd83dbSDimitry Andric // applicable. Expanding a nested recurrence outside a loop requires computing 26455ffd83dbSDimitry Andric // binomial coefficients. This could be done, but the recurrence has to be in a 26465ffd83dbSDimitry Andric // perfectly reduced form, which can't be guaranteed. 26475ffd83dbSDimitry Andric struct SCEVFindUnsafe { 26485ffd83dbSDimitry Andric ScalarEvolution &SE; 2649*349cc55cSDimitry Andric bool CanonicalMode; 26505ffd83dbSDimitry Andric bool IsUnsafe; 26515ffd83dbSDimitry Andric 2652*349cc55cSDimitry Andric SCEVFindUnsafe(ScalarEvolution &SE, bool CanonicalMode) 2653*349cc55cSDimitry Andric : SE(SE), CanonicalMode(CanonicalMode), IsUnsafe(false) {} 26545ffd83dbSDimitry Andric 26555ffd83dbSDimitry Andric bool follow(const SCEV *S) { 26565ffd83dbSDimitry Andric if (const SCEVUDivExpr *D = dyn_cast<SCEVUDivExpr>(S)) { 26575ffd83dbSDimitry Andric const SCEVConstant *SC = dyn_cast<SCEVConstant>(D->getRHS()); 26585ffd83dbSDimitry Andric if (!SC || SC->getValue()->isZero()) { 26595ffd83dbSDimitry Andric IsUnsafe = true; 26605ffd83dbSDimitry Andric return false; 26615ffd83dbSDimitry Andric } 26625ffd83dbSDimitry Andric } 26635ffd83dbSDimitry Andric if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) { 26645ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 26655ffd83dbSDimitry Andric if (!AR->isAffine() && !SE.dominates(Step, AR->getLoop()->getHeader())) { 26665ffd83dbSDimitry Andric IsUnsafe = true; 26675ffd83dbSDimitry Andric return false; 26685ffd83dbSDimitry Andric } 2669*349cc55cSDimitry Andric 2670*349cc55cSDimitry Andric // For non-affine addrecs or in non-canonical mode we need a preheader 2671*349cc55cSDimitry Andric // to insert into. 2672*349cc55cSDimitry Andric if (!AR->getLoop()->getLoopPreheader() && 2673*349cc55cSDimitry Andric (!CanonicalMode || !AR->isAffine())) { 2674*349cc55cSDimitry Andric IsUnsafe = true; 2675*349cc55cSDimitry Andric return false; 2676*349cc55cSDimitry Andric } 26775ffd83dbSDimitry Andric } 26785ffd83dbSDimitry Andric return true; 26795ffd83dbSDimitry Andric } 26805ffd83dbSDimitry Andric bool isDone() const { return IsUnsafe; } 26815ffd83dbSDimitry Andric }; 26825ffd83dbSDimitry Andric } 26835ffd83dbSDimitry Andric 26845ffd83dbSDimitry Andric namespace llvm { 2685*349cc55cSDimitry Andric bool isSafeToExpand(const SCEV *S, ScalarEvolution &SE, bool CanonicalMode) { 2686*349cc55cSDimitry Andric SCEVFindUnsafe Search(SE, CanonicalMode); 26875ffd83dbSDimitry Andric visitAll(S, Search); 26885ffd83dbSDimitry Andric return !Search.IsUnsafe; 26895ffd83dbSDimitry Andric } 26905ffd83dbSDimitry Andric 26915ffd83dbSDimitry Andric bool isSafeToExpandAt(const SCEV *S, const Instruction *InsertionPoint, 26925ffd83dbSDimitry Andric ScalarEvolution &SE) { 26935ffd83dbSDimitry Andric if (!isSafeToExpand(S, SE)) 26945ffd83dbSDimitry Andric return false; 26955ffd83dbSDimitry Andric // We have to prove that the expanded site of S dominates InsertionPoint. 26965ffd83dbSDimitry Andric // This is easy when not in the same block, but hard when S is an instruction 26975ffd83dbSDimitry Andric // to be expanded somewhere inside the same block as our insertion point. 26985ffd83dbSDimitry Andric // What we really need here is something analogous to an OrderedBasicBlock, 26995ffd83dbSDimitry Andric // but for the moment, we paper over the problem by handling two common and 27005ffd83dbSDimitry Andric // cheap to check cases. 27015ffd83dbSDimitry Andric if (SE.properlyDominates(S, InsertionPoint->getParent())) 27025ffd83dbSDimitry Andric return true; 27035ffd83dbSDimitry Andric if (SE.dominates(S, InsertionPoint->getParent())) { 27045ffd83dbSDimitry Andric if (InsertionPoint->getParent()->getTerminator() == InsertionPoint) 27055ffd83dbSDimitry Andric return true; 27065ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) 2707fe6060f1SDimitry Andric if (llvm::is_contained(InsertionPoint->operand_values(), U->getValue())) 27085ffd83dbSDimitry Andric return true; 27095ffd83dbSDimitry Andric } 27105ffd83dbSDimitry Andric return false; 27115ffd83dbSDimitry Andric } 2712e8d8bef9SDimitry Andric 2713fe6060f1SDimitry Andric void SCEVExpanderCleaner::cleanup() { 2714e8d8bef9SDimitry Andric // Result is used, nothing to remove. 2715e8d8bef9SDimitry Andric if (ResultUsed) 2716e8d8bef9SDimitry Andric return; 2717e8d8bef9SDimitry Andric 2718e8d8bef9SDimitry Andric auto InsertedInstructions = Expander.getAllInsertedInstructions(); 2719e8d8bef9SDimitry Andric #ifndef NDEBUG 2720e8d8bef9SDimitry Andric SmallPtrSet<Instruction *, 8> InsertedSet(InsertedInstructions.begin(), 2721e8d8bef9SDimitry Andric InsertedInstructions.end()); 2722e8d8bef9SDimitry Andric (void)InsertedSet; 2723e8d8bef9SDimitry Andric #endif 2724e8d8bef9SDimitry Andric // Remove sets with value handles. 2725e8d8bef9SDimitry Andric Expander.clear(); 2726e8d8bef9SDimitry Andric 2727e8d8bef9SDimitry Andric // Sort so that earlier instructions do not dominate later instructions. 2728e8d8bef9SDimitry Andric stable_sort(InsertedInstructions, [this](Instruction *A, Instruction *B) { 2729e8d8bef9SDimitry Andric return DT.dominates(B, A); 2730e8d8bef9SDimitry Andric }); 2731e8d8bef9SDimitry Andric // Remove all inserted instructions. 2732e8d8bef9SDimitry Andric for (Instruction *I : InsertedInstructions) { 2733e8d8bef9SDimitry Andric 2734e8d8bef9SDimitry Andric #ifndef NDEBUG 2735e8d8bef9SDimitry Andric assert(all_of(I->users(), 2736e8d8bef9SDimitry Andric [&InsertedSet](Value *U) { 2737e8d8bef9SDimitry Andric return InsertedSet.contains(cast<Instruction>(U)); 2738e8d8bef9SDimitry Andric }) && 2739e8d8bef9SDimitry Andric "removed instruction should only be used by instructions inserted " 2740e8d8bef9SDimitry Andric "during expansion"); 2741e8d8bef9SDimitry Andric #endif 2742e8d8bef9SDimitry Andric assert(!I->getType()->isVoidTy() && 2743e8d8bef9SDimitry Andric "inserted instruction should have non-void types"); 2744e8d8bef9SDimitry Andric I->replaceAllUsesWith(UndefValue::get(I->getType())); 2745e8d8bef9SDimitry Andric I->eraseFromParent(); 2746e8d8bef9SDimitry Andric } 2747e8d8bef9SDimitry Andric } 27485ffd83dbSDimitry Andric } 2749