15ffd83dbSDimitry Andric //===- ScalarEvolutionExpander.cpp - Scalar Evolution Analysis ------------===// 25ffd83dbSDimitry Andric // 35ffd83dbSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 45ffd83dbSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 55ffd83dbSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 65ffd83dbSDimitry Andric // 75ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 85ffd83dbSDimitry Andric // 95ffd83dbSDimitry Andric // This file contains the implementation of the scalar evolution expander, 105ffd83dbSDimitry Andric // which is used to generate the code corresponding to a given scalar evolution 115ffd83dbSDimitry Andric // expression. 125ffd83dbSDimitry Andric // 135ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 145ffd83dbSDimitry Andric 155ffd83dbSDimitry Andric #include "llvm/Transforms/Utils/ScalarEvolutionExpander.h" 165ffd83dbSDimitry Andric #include "llvm/ADT/STLExtras.h" 175ffd83dbSDimitry Andric #include "llvm/ADT/SmallSet.h" 185ffd83dbSDimitry Andric #include "llvm/Analysis/InstructionSimplify.h" 195ffd83dbSDimitry Andric #include "llvm/Analysis/LoopInfo.h" 205ffd83dbSDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h" 21*4824e7fdSDimitry Andric #include "llvm/Analysis/ValueTracking.h" 225ffd83dbSDimitry Andric #include "llvm/IR/DataLayout.h" 235ffd83dbSDimitry Andric #include "llvm/IR/Dominators.h" 245ffd83dbSDimitry Andric #include "llvm/IR/IntrinsicInst.h" 255ffd83dbSDimitry Andric #include "llvm/IR/LLVMContext.h" 265ffd83dbSDimitry Andric #include "llvm/IR/Module.h" 275ffd83dbSDimitry Andric #include "llvm/IR/PatternMatch.h" 285ffd83dbSDimitry Andric #include "llvm/Support/CommandLine.h" 295ffd83dbSDimitry Andric #include "llvm/Support/Debug.h" 305ffd83dbSDimitry Andric #include "llvm/Support/raw_ostream.h" 31e8d8bef9SDimitry Andric #include "llvm/Transforms/Utils/LoopUtils.h" 325ffd83dbSDimitry Andric 33fe6060f1SDimitry Andric #ifdef LLVM_ENABLE_ABI_BREAKING_CHECKS 34fe6060f1SDimitry Andric #define SCEV_DEBUG_WITH_TYPE(TYPE, X) DEBUG_WITH_TYPE(TYPE, X) 35fe6060f1SDimitry Andric #else 36fe6060f1SDimitry Andric #define SCEV_DEBUG_WITH_TYPE(TYPE, X) 37fe6060f1SDimitry Andric #endif 38fe6060f1SDimitry Andric 395ffd83dbSDimitry Andric using namespace llvm; 405ffd83dbSDimitry Andric 415ffd83dbSDimitry Andric cl::opt<unsigned> llvm::SCEVCheapExpansionBudget( 425ffd83dbSDimitry Andric "scev-cheap-expansion-budget", cl::Hidden, cl::init(4), 435ffd83dbSDimitry Andric cl::desc("When performing SCEV expansion only if it is cheap to do, this " 445ffd83dbSDimitry Andric "controls the budget that is considered cheap (default = 4)")); 455ffd83dbSDimitry Andric 465ffd83dbSDimitry Andric using namespace PatternMatch; 475ffd83dbSDimitry Andric 485ffd83dbSDimitry Andric /// ReuseOrCreateCast - Arrange for there to be a cast of V to Ty at IP, 49e8d8bef9SDimitry Andric /// reusing an existing cast if a suitable one (= dominating IP) exists, or 505ffd83dbSDimitry Andric /// creating a new one. 515ffd83dbSDimitry Andric Value *SCEVExpander::ReuseOrCreateCast(Value *V, Type *Ty, 525ffd83dbSDimitry Andric Instruction::CastOps Op, 535ffd83dbSDimitry Andric BasicBlock::iterator IP) { 545ffd83dbSDimitry Andric // This function must be called with the builder having a valid insertion 555ffd83dbSDimitry Andric // point. It doesn't need to be the actual IP where the uses of the returned 565ffd83dbSDimitry Andric // cast will be added, but it must dominate such IP. 575ffd83dbSDimitry Andric // We use this precondition to produce a cast that will dominate all its 585ffd83dbSDimitry Andric // uses. In particular, this is crucial for the case where the builder's 595ffd83dbSDimitry Andric // insertion point *is* the point where we were asked to put the cast. 605ffd83dbSDimitry Andric // Since we don't know the builder's insertion point is actually 615ffd83dbSDimitry Andric // where the uses will be added (only that it dominates it), we are 625ffd83dbSDimitry Andric // not allowed to move it. 635ffd83dbSDimitry Andric BasicBlock::iterator BIP = Builder.GetInsertPoint(); 645ffd83dbSDimitry Andric 65fe6060f1SDimitry Andric Value *Ret = nullptr; 665ffd83dbSDimitry Andric 675ffd83dbSDimitry Andric // Check to see if there is already a cast! 68e8d8bef9SDimitry Andric for (User *U : V->users()) { 69e8d8bef9SDimitry Andric if (U->getType() != Ty) 70e8d8bef9SDimitry Andric continue; 71e8d8bef9SDimitry Andric CastInst *CI = dyn_cast<CastInst>(U); 72e8d8bef9SDimitry Andric if (!CI || CI->getOpcode() != Op) 73e8d8bef9SDimitry Andric continue; 74e8d8bef9SDimitry Andric 75e8d8bef9SDimitry Andric // Found a suitable cast that is at IP or comes before IP. Use it. Note that 76e8d8bef9SDimitry Andric // the cast must also properly dominate the Builder's insertion point. 77e8d8bef9SDimitry Andric if (IP->getParent() == CI->getParent() && &*BIP != CI && 78e8d8bef9SDimitry Andric (&*IP == CI || CI->comesBefore(&*IP))) { 795ffd83dbSDimitry Andric Ret = CI; 805ffd83dbSDimitry Andric break; 815ffd83dbSDimitry Andric } 82e8d8bef9SDimitry Andric } 835ffd83dbSDimitry Andric 845ffd83dbSDimitry Andric // Create a new cast. 85e8d8bef9SDimitry Andric if (!Ret) { 86fe6060f1SDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 87fe6060f1SDimitry Andric Builder.SetInsertPoint(&*IP); 88fe6060f1SDimitry Andric Ret = Builder.CreateCast(Op, V, Ty, V->getName()); 89e8d8bef9SDimitry Andric } 905ffd83dbSDimitry Andric 915ffd83dbSDimitry Andric // We assert at the end of the function since IP might point to an 925ffd83dbSDimitry Andric // instruction with different dominance properties than a cast 935ffd83dbSDimitry Andric // (an invoke for example) and not dominate BIP (but the cast does). 94fe6060f1SDimitry Andric assert(!isa<Instruction>(Ret) || 95fe6060f1SDimitry Andric SE.DT.dominates(cast<Instruction>(Ret), &*BIP)); 965ffd83dbSDimitry Andric 975ffd83dbSDimitry Andric return Ret; 985ffd83dbSDimitry Andric } 995ffd83dbSDimitry Andric 100e8d8bef9SDimitry Andric BasicBlock::iterator 101fe6060f1SDimitry Andric SCEVExpander::findInsertPointAfter(Instruction *I, 102fe6060f1SDimitry Andric Instruction *MustDominate) const { 1035ffd83dbSDimitry Andric BasicBlock::iterator IP = ++I->getIterator(); 1045ffd83dbSDimitry Andric if (auto *II = dyn_cast<InvokeInst>(I)) 1055ffd83dbSDimitry Andric IP = II->getNormalDest()->begin(); 1065ffd83dbSDimitry Andric 1075ffd83dbSDimitry Andric while (isa<PHINode>(IP)) 1085ffd83dbSDimitry Andric ++IP; 1095ffd83dbSDimitry Andric 1105ffd83dbSDimitry Andric if (isa<FuncletPadInst>(IP) || isa<LandingPadInst>(IP)) { 1115ffd83dbSDimitry Andric ++IP; 1125ffd83dbSDimitry Andric } else if (isa<CatchSwitchInst>(IP)) { 113e8d8bef9SDimitry Andric IP = MustDominate->getParent()->getFirstInsertionPt(); 1145ffd83dbSDimitry Andric } else { 1155ffd83dbSDimitry Andric assert(!IP->isEHPad() && "unexpected eh pad!"); 1165ffd83dbSDimitry Andric } 1175ffd83dbSDimitry Andric 118e8d8bef9SDimitry Andric // Adjust insert point to be after instructions inserted by the expander, so 119e8d8bef9SDimitry Andric // we can re-use already inserted instructions. Avoid skipping past the 120e8d8bef9SDimitry Andric // original \p MustDominate, in case it is an inserted instruction. 121e8d8bef9SDimitry Andric while (isInsertedInstruction(&*IP) && &*IP != MustDominate) 122e8d8bef9SDimitry Andric ++IP; 123e8d8bef9SDimitry Andric 1245ffd83dbSDimitry Andric return IP; 1255ffd83dbSDimitry Andric } 1265ffd83dbSDimitry Andric 127fe6060f1SDimitry Andric BasicBlock::iterator 128fe6060f1SDimitry Andric SCEVExpander::GetOptimalInsertionPointForCastOf(Value *V) const { 129fe6060f1SDimitry Andric // Cast the argument at the beginning of the entry block, after 130fe6060f1SDimitry Andric // any bitcasts of other arguments. 131fe6060f1SDimitry Andric if (Argument *A = dyn_cast<Argument>(V)) { 132fe6060f1SDimitry Andric BasicBlock::iterator IP = A->getParent()->getEntryBlock().begin(); 133fe6060f1SDimitry Andric while ((isa<BitCastInst>(IP) && 134fe6060f1SDimitry Andric isa<Argument>(cast<BitCastInst>(IP)->getOperand(0)) && 135fe6060f1SDimitry Andric cast<BitCastInst>(IP)->getOperand(0) != A) || 136fe6060f1SDimitry Andric isa<DbgInfoIntrinsic>(IP)) 137fe6060f1SDimitry Andric ++IP; 138fe6060f1SDimitry Andric return IP; 139fe6060f1SDimitry Andric } 140fe6060f1SDimitry Andric 141fe6060f1SDimitry Andric // Cast the instruction immediately after the instruction. 142fe6060f1SDimitry Andric if (Instruction *I = dyn_cast<Instruction>(V)) 143fe6060f1SDimitry Andric return findInsertPointAfter(I, &*Builder.GetInsertPoint()); 144fe6060f1SDimitry Andric 145fe6060f1SDimitry Andric // Otherwise, this must be some kind of a constant, 146fe6060f1SDimitry Andric // so let's plop this cast into the function's entry block. 147fe6060f1SDimitry Andric assert(isa<Constant>(V) && 148fe6060f1SDimitry Andric "Expected the cast argument to be a global/constant"); 149fe6060f1SDimitry Andric return Builder.GetInsertBlock() 150fe6060f1SDimitry Andric ->getParent() 151fe6060f1SDimitry Andric ->getEntryBlock() 152fe6060f1SDimitry Andric .getFirstInsertionPt(); 153fe6060f1SDimitry Andric } 154fe6060f1SDimitry Andric 1555ffd83dbSDimitry Andric /// InsertNoopCastOfTo - Insert a cast of V to the specified type, 1565ffd83dbSDimitry Andric /// which must be possible with a noop cast, doing what we can to share 1575ffd83dbSDimitry Andric /// the casts. 1585ffd83dbSDimitry Andric Value *SCEVExpander::InsertNoopCastOfTo(Value *V, Type *Ty) { 1595ffd83dbSDimitry Andric Instruction::CastOps Op = CastInst::getCastOpcode(V, false, Ty, false); 1605ffd83dbSDimitry Andric assert((Op == Instruction::BitCast || 1615ffd83dbSDimitry Andric Op == Instruction::PtrToInt || 1625ffd83dbSDimitry Andric Op == Instruction::IntToPtr) && 1635ffd83dbSDimitry Andric "InsertNoopCastOfTo cannot perform non-noop casts!"); 1645ffd83dbSDimitry Andric assert(SE.getTypeSizeInBits(V->getType()) == SE.getTypeSizeInBits(Ty) && 1655ffd83dbSDimitry Andric "InsertNoopCastOfTo cannot change sizes!"); 1665ffd83dbSDimitry Andric 167e8d8bef9SDimitry Andric // inttoptr only works for integral pointers. For non-integral pointers, we 168e8d8bef9SDimitry Andric // can create a GEP on i8* null with the integral value as index. Note that 169e8d8bef9SDimitry Andric // it is safe to use GEP of null instead of inttoptr here, because only 170e8d8bef9SDimitry Andric // expressions already based on a GEP of null should be converted to pointers 171e8d8bef9SDimitry Andric // during expansion. 172e8d8bef9SDimitry Andric if (Op == Instruction::IntToPtr) { 173e8d8bef9SDimitry Andric auto *PtrTy = cast<PointerType>(Ty); 174e8d8bef9SDimitry Andric if (DL.isNonIntegralPointerType(PtrTy)) { 175e8d8bef9SDimitry Andric auto *Int8PtrTy = Builder.getInt8PtrTy(PtrTy->getAddressSpace()); 176e8d8bef9SDimitry Andric assert(DL.getTypeAllocSize(Int8PtrTy->getElementType()) == 1 && 177e8d8bef9SDimitry Andric "alloc size of i8 must by 1 byte for the GEP to be correct"); 178e8d8bef9SDimitry Andric auto *GEP = Builder.CreateGEP( 179e8d8bef9SDimitry Andric Builder.getInt8Ty(), Constant::getNullValue(Int8PtrTy), V, "uglygep"); 180e8d8bef9SDimitry Andric return Builder.CreateBitCast(GEP, Ty); 181e8d8bef9SDimitry Andric } 182e8d8bef9SDimitry Andric } 1835ffd83dbSDimitry Andric // Short-circuit unnecessary bitcasts. 1845ffd83dbSDimitry Andric if (Op == Instruction::BitCast) { 1855ffd83dbSDimitry Andric if (V->getType() == Ty) 1865ffd83dbSDimitry Andric return V; 1875ffd83dbSDimitry Andric if (CastInst *CI = dyn_cast<CastInst>(V)) { 1885ffd83dbSDimitry Andric if (CI->getOperand(0)->getType() == Ty) 1895ffd83dbSDimitry Andric return CI->getOperand(0); 1905ffd83dbSDimitry Andric } 1915ffd83dbSDimitry Andric } 1925ffd83dbSDimitry Andric // Short-circuit unnecessary inttoptr<->ptrtoint casts. 1935ffd83dbSDimitry Andric if ((Op == Instruction::PtrToInt || Op == Instruction::IntToPtr) && 1945ffd83dbSDimitry Andric SE.getTypeSizeInBits(Ty) == SE.getTypeSizeInBits(V->getType())) { 1955ffd83dbSDimitry Andric if (CastInst *CI = dyn_cast<CastInst>(V)) 1965ffd83dbSDimitry Andric if ((CI->getOpcode() == Instruction::PtrToInt || 1975ffd83dbSDimitry Andric CI->getOpcode() == Instruction::IntToPtr) && 1985ffd83dbSDimitry Andric SE.getTypeSizeInBits(CI->getType()) == 1995ffd83dbSDimitry Andric SE.getTypeSizeInBits(CI->getOperand(0)->getType())) 2005ffd83dbSDimitry Andric return CI->getOperand(0); 2015ffd83dbSDimitry Andric if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) 2025ffd83dbSDimitry Andric if ((CE->getOpcode() == Instruction::PtrToInt || 2035ffd83dbSDimitry Andric CE->getOpcode() == Instruction::IntToPtr) && 2045ffd83dbSDimitry Andric SE.getTypeSizeInBits(CE->getType()) == 2055ffd83dbSDimitry Andric SE.getTypeSizeInBits(CE->getOperand(0)->getType())) 2065ffd83dbSDimitry Andric return CE->getOperand(0); 2075ffd83dbSDimitry Andric } 2085ffd83dbSDimitry Andric 2095ffd83dbSDimitry Andric // Fold a cast of a constant. 2105ffd83dbSDimitry Andric if (Constant *C = dyn_cast<Constant>(V)) 2115ffd83dbSDimitry Andric return ConstantExpr::getCast(Op, C, Ty); 2125ffd83dbSDimitry Andric 213fe6060f1SDimitry Andric // Try to reuse existing cast, or insert one. 214fe6060f1SDimitry Andric return ReuseOrCreateCast(V, Ty, Op, GetOptimalInsertionPointForCastOf(V)); 2155ffd83dbSDimitry Andric } 2165ffd83dbSDimitry Andric 2175ffd83dbSDimitry Andric /// InsertBinop - Insert the specified binary operator, doing a small amount 2185ffd83dbSDimitry Andric /// of work to avoid inserting an obviously redundant operation, and hoisting 2195ffd83dbSDimitry Andric /// to an outer loop when the opportunity is there and it is safe. 2205ffd83dbSDimitry Andric Value *SCEVExpander::InsertBinop(Instruction::BinaryOps Opcode, 2215ffd83dbSDimitry Andric Value *LHS, Value *RHS, 2225ffd83dbSDimitry Andric SCEV::NoWrapFlags Flags, bool IsSafeToHoist) { 2235ffd83dbSDimitry Andric // Fold a binop with constant operands. 2245ffd83dbSDimitry Andric if (Constant *CLHS = dyn_cast<Constant>(LHS)) 2255ffd83dbSDimitry Andric if (Constant *CRHS = dyn_cast<Constant>(RHS)) 2265ffd83dbSDimitry Andric return ConstantExpr::get(Opcode, CLHS, CRHS); 2275ffd83dbSDimitry Andric 2285ffd83dbSDimitry Andric // Do a quick scan to see if we have this binop nearby. If so, reuse it. 2295ffd83dbSDimitry Andric unsigned ScanLimit = 6; 2305ffd83dbSDimitry Andric BasicBlock::iterator BlockBegin = Builder.GetInsertBlock()->begin(); 2315ffd83dbSDimitry Andric // Scanning starts from the last instruction before the insertion point. 2325ffd83dbSDimitry Andric BasicBlock::iterator IP = Builder.GetInsertPoint(); 2335ffd83dbSDimitry Andric if (IP != BlockBegin) { 2345ffd83dbSDimitry Andric --IP; 2355ffd83dbSDimitry Andric for (; ScanLimit; --IP, --ScanLimit) { 2365ffd83dbSDimitry Andric // Don't count dbg.value against the ScanLimit, to avoid perturbing the 2375ffd83dbSDimitry Andric // generated code. 2385ffd83dbSDimitry Andric if (isa<DbgInfoIntrinsic>(IP)) 2395ffd83dbSDimitry Andric ScanLimit++; 2405ffd83dbSDimitry Andric 2415ffd83dbSDimitry Andric auto canGenerateIncompatiblePoison = [&Flags](Instruction *I) { 2425ffd83dbSDimitry Andric // Ensure that no-wrap flags match. 2435ffd83dbSDimitry Andric if (isa<OverflowingBinaryOperator>(I)) { 2445ffd83dbSDimitry Andric if (I->hasNoSignedWrap() != (Flags & SCEV::FlagNSW)) 2455ffd83dbSDimitry Andric return true; 2465ffd83dbSDimitry Andric if (I->hasNoUnsignedWrap() != (Flags & SCEV::FlagNUW)) 2475ffd83dbSDimitry Andric return true; 2485ffd83dbSDimitry Andric } 2495ffd83dbSDimitry Andric // Conservatively, do not use any instruction which has any of exact 2505ffd83dbSDimitry Andric // flags installed. 2515ffd83dbSDimitry Andric if (isa<PossiblyExactOperator>(I) && I->isExact()) 2525ffd83dbSDimitry Andric return true; 2535ffd83dbSDimitry Andric return false; 2545ffd83dbSDimitry Andric }; 2555ffd83dbSDimitry Andric if (IP->getOpcode() == (unsigned)Opcode && IP->getOperand(0) == LHS && 2565ffd83dbSDimitry Andric IP->getOperand(1) == RHS && !canGenerateIncompatiblePoison(&*IP)) 2575ffd83dbSDimitry Andric return &*IP; 2585ffd83dbSDimitry Andric if (IP == BlockBegin) break; 2595ffd83dbSDimitry Andric } 2605ffd83dbSDimitry Andric } 2615ffd83dbSDimitry Andric 2625ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 2635ffd83dbSDimitry Andric DebugLoc Loc = Builder.GetInsertPoint()->getDebugLoc(); 2645ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 2655ffd83dbSDimitry Andric 2665ffd83dbSDimitry Andric if (IsSafeToHoist) { 2675ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 2685ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 2695ffd83dbSDimitry Andric if (!L->isLoopInvariant(LHS) || !L->isLoopInvariant(RHS)) break; 2705ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 2715ffd83dbSDimitry Andric if (!Preheader) break; 2725ffd83dbSDimitry Andric 2735ffd83dbSDimitry Andric // Ok, move up a level. 2745ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 2755ffd83dbSDimitry Andric } 2765ffd83dbSDimitry Andric } 2775ffd83dbSDimitry Andric 2785ffd83dbSDimitry Andric // If we haven't found this binop, insert it. 2795ffd83dbSDimitry Andric Instruction *BO = cast<Instruction>(Builder.CreateBinOp(Opcode, LHS, RHS)); 2805ffd83dbSDimitry Andric BO->setDebugLoc(Loc); 2815ffd83dbSDimitry Andric if (Flags & SCEV::FlagNUW) 2825ffd83dbSDimitry Andric BO->setHasNoUnsignedWrap(); 2835ffd83dbSDimitry Andric if (Flags & SCEV::FlagNSW) 2845ffd83dbSDimitry Andric BO->setHasNoSignedWrap(); 2855ffd83dbSDimitry Andric 2865ffd83dbSDimitry Andric return BO; 2875ffd83dbSDimitry Andric } 2885ffd83dbSDimitry Andric 2895ffd83dbSDimitry Andric /// FactorOutConstant - Test if S is divisible by Factor, using signed 2905ffd83dbSDimitry Andric /// division. If so, update S with Factor divided out and return true. 2915ffd83dbSDimitry Andric /// S need not be evenly divisible if a reasonable remainder can be 2925ffd83dbSDimitry Andric /// computed. 2935ffd83dbSDimitry Andric static bool FactorOutConstant(const SCEV *&S, const SCEV *&Remainder, 2945ffd83dbSDimitry Andric const SCEV *Factor, ScalarEvolution &SE, 2955ffd83dbSDimitry Andric const DataLayout &DL) { 2965ffd83dbSDimitry Andric // Everything is divisible by one. 2975ffd83dbSDimitry Andric if (Factor->isOne()) 2985ffd83dbSDimitry Andric return true; 2995ffd83dbSDimitry Andric 3005ffd83dbSDimitry Andric // x/x == 1. 3015ffd83dbSDimitry Andric if (S == Factor) { 3025ffd83dbSDimitry Andric S = SE.getConstant(S->getType(), 1); 3035ffd83dbSDimitry Andric return true; 3045ffd83dbSDimitry Andric } 3055ffd83dbSDimitry Andric 3065ffd83dbSDimitry Andric // For a Constant, check for a multiple of the given factor. 3075ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(S)) { 3085ffd83dbSDimitry Andric // 0/x == 0. 3095ffd83dbSDimitry Andric if (C->isZero()) 3105ffd83dbSDimitry Andric return true; 3115ffd83dbSDimitry Andric // Check for divisibility. 3125ffd83dbSDimitry Andric if (const SCEVConstant *FC = dyn_cast<SCEVConstant>(Factor)) { 3135ffd83dbSDimitry Andric ConstantInt *CI = 3145ffd83dbSDimitry Andric ConstantInt::get(SE.getContext(), C->getAPInt().sdiv(FC->getAPInt())); 3155ffd83dbSDimitry Andric // If the quotient is zero and the remainder is non-zero, reject 3165ffd83dbSDimitry Andric // the value at this scale. It will be considered for subsequent 3175ffd83dbSDimitry Andric // smaller scales. 3185ffd83dbSDimitry Andric if (!CI->isZero()) { 3195ffd83dbSDimitry Andric const SCEV *Div = SE.getConstant(CI); 3205ffd83dbSDimitry Andric S = Div; 3215ffd83dbSDimitry Andric Remainder = SE.getAddExpr( 3225ffd83dbSDimitry Andric Remainder, SE.getConstant(C->getAPInt().srem(FC->getAPInt()))); 3235ffd83dbSDimitry Andric return true; 3245ffd83dbSDimitry Andric } 3255ffd83dbSDimitry Andric } 3265ffd83dbSDimitry Andric } 3275ffd83dbSDimitry Andric 3285ffd83dbSDimitry Andric // In a Mul, check if there is a constant operand which is a multiple 3295ffd83dbSDimitry Andric // of the given factor. 3305ffd83dbSDimitry Andric if (const SCEVMulExpr *M = dyn_cast<SCEVMulExpr>(S)) { 3315ffd83dbSDimitry Andric // Size is known, check if there is a constant operand which is a multiple 3325ffd83dbSDimitry Andric // of the given factor. If so, we can factor it. 3335ffd83dbSDimitry Andric if (const SCEVConstant *FC = dyn_cast<SCEVConstant>(Factor)) 3345ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(M->getOperand(0))) 3355ffd83dbSDimitry Andric if (!C->getAPInt().srem(FC->getAPInt())) { 336e8d8bef9SDimitry Andric SmallVector<const SCEV *, 4> NewMulOps(M->operands()); 3375ffd83dbSDimitry Andric NewMulOps[0] = SE.getConstant(C->getAPInt().sdiv(FC->getAPInt())); 3385ffd83dbSDimitry Andric S = SE.getMulExpr(NewMulOps); 3395ffd83dbSDimitry Andric return true; 3405ffd83dbSDimitry Andric } 3415ffd83dbSDimitry Andric } 3425ffd83dbSDimitry Andric 3435ffd83dbSDimitry Andric // In an AddRec, check if both start and step are divisible. 3445ffd83dbSDimitry Andric if (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(S)) { 3455ffd83dbSDimitry Andric const SCEV *Step = A->getStepRecurrence(SE); 3465ffd83dbSDimitry Andric const SCEV *StepRem = SE.getConstant(Step->getType(), 0); 3475ffd83dbSDimitry Andric if (!FactorOutConstant(Step, StepRem, Factor, SE, DL)) 3485ffd83dbSDimitry Andric return false; 3495ffd83dbSDimitry Andric if (!StepRem->isZero()) 3505ffd83dbSDimitry Andric return false; 3515ffd83dbSDimitry Andric const SCEV *Start = A->getStart(); 3525ffd83dbSDimitry Andric if (!FactorOutConstant(Start, Remainder, Factor, SE, DL)) 3535ffd83dbSDimitry Andric return false; 3545ffd83dbSDimitry Andric S = SE.getAddRecExpr(Start, Step, A->getLoop(), 3555ffd83dbSDimitry Andric A->getNoWrapFlags(SCEV::FlagNW)); 3565ffd83dbSDimitry Andric return true; 3575ffd83dbSDimitry Andric } 3585ffd83dbSDimitry Andric 3595ffd83dbSDimitry Andric return false; 3605ffd83dbSDimitry Andric } 3615ffd83dbSDimitry Andric 3625ffd83dbSDimitry Andric /// SimplifyAddOperands - Sort and simplify a list of add operands. NumAddRecs 3635ffd83dbSDimitry Andric /// is the number of SCEVAddRecExprs present, which are kept at the end of 3645ffd83dbSDimitry Andric /// the list. 3655ffd83dbSDimitry Andric /// 3665ffd83dbSDimitry Andric static void SimplifyAddOperands(SmallVectorImpl<const SCEV *> &Ops, 3675ffd83dbSDimitry Andric Type *Ty, 3685ffd83dbSDimitry Andric ScalarEvolution &SE) { 3695ffd83dbSDimitry Andric unsigned NumAddRecs = 0; 3705ffd83dbSDimitry Andric for (unsigned i = Ops.size(); i > 0 && isa<SCEVAddRecExpr>(Ops[i-1]); --i) 3715ffd83dbSDimitry Andric ++NumAddRecs; 3725ffd83dbSDimitry Andric // Group Ops into non-addrecs and addrecs. 3735ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> NoAddRecs(Ops.begin(), Ops.end() - NumAddRecs); 3745ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> AddRecs(Ops.end() - NumAddRecs, Ops.end()); 3755ffd83dbSDimitry Andric // Let ScalarEvolution sort and simplify the non-addrecs list. 3765ffd83dbSDimitry Andric const SCEV *Sum = NoAddRecs.empty() ? 3775ffd83dbSDimitry Andric SE.getConstant(Ty, 0) : 3785ffd83dbSDimitry Andric SE.getAddExpr(NoAddRecs); 3795ffd83dbSDimitry Andric // If it returned an add, use the operands. Otherwise it simplified 3805ffd83dbSDimitry Andric // the sum into a single value, so just use that. 3815ffd83dbSDimitry Andric Ops.clear(); 3825ffd83dbSDimitry Andric if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Sum)) 3835ffd83dbSDimitry Andric Ops.append(Add->op_begin(), Add->op_end()); 3845ffd83dbSDimitry Andric else if (!Sum->isZero()) 3855ffd83dbSDimitry Andric Ops.push_back(Sum); 3865ffd83dbSDimitry Andric // Then append the addrecs. 3875ffd83dbSDimitry Andric Ops.append(AddRecs.begin(), AddRecs.end()); 3885ffd83dbSDimitry Andric } 3895ffd83dbSDimitry Andric 3905ffd83dbSDimitry Andric /// SplitAddRecs - Flatten a list of add operands, moving addrec start values 3915ffd83dbSDimitry Andric /// out to the top level. For example, convert {a + b,+,c} to a, b, {0,+,d}. 3925ffd83dbSDimitry Andric /// This helps expose more opportunities for folding parts of the expressions 3935ffd83dbSDimitry Andric /// into GEP indices. 3945ffd83dbSDimitry Andric /// 3955ffd83dbSDimitry Andric static void SplitAddRecs(SmallVectorImpl<const SCEV *> &Ops, 3965ffd83dbSDimitry Andric Type *Ty, 3975ffd83dbSDimitry Andric ScalarEvolution &SE) { 3985ffd83dbSDimitry Andric // Find the addrecs. 3995ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> AddRecs; 4005ffd83dbSDimitry Andric for (unsigned i = 0, e = Ops.size(); i != e; ++i) 4015ffd83dbSDimitry Andric while (const SCEVAddRecExpr *A = dyn_cast<SCEVAddRecExpr>(Ops[i])) { 4025ffd83dbSDimitry Andric const SCEV *Start = A->getStart(); 4035ffd83dbSDimitry Andric if (Start->isZero()) break; 4045ffd83dbSDimitry Andric const SCEV *Zero = SE.getConstant(Ty, 0); 4055ffd83dbSDimitry Andric AddRecs.push_back(SE.getAddRecExpr(Zero, 4065ffd83dbSDimitry Andric A->getStepRecurrence(SE), 4075ffd83dbSDimitry Andric A->getLoop(), 4085ffd83dbSDimitry Andric A->getNoWrapFlags(SCEV::FlagNW))); 4095ffd83dbSDimitry Andric if (const SCEVAddExpr *Add = dyn_cast<SCEVAddExpr>(Start)) { 4105ffd83dbSDimitry Andric Ops[i] = Zero; 4115ffd83dbSDimitry Andric Ops.append(Add->op_begin(), Add->op_end()); 4125ffd83dbSDimitry Andric e += Add->getNumOperands(); 4135ffd83dbSDimitry Andric } else { 4145ffd83dbSDimitry Andric Ops[i] = Start; 4155ffd83dbSDimitry Andric } 4165ffd83dbSDimitry Andric } 4175ffd83dbSDimitry Andric if (!AddRecs.empty()) { 4185ffd83dbSDimitry Andric // Add the addrecs onto the end of the list. 4195ffd83dbSDimitry Andric Ops.append(AddRecs.begin(), AddRecs.end()); 4205ffd83dbSDimitry Andric // Resort the operand list, moving any constants to the front. 4215ffd83dbSDimitry Andric SimplifyAddOperands(Ops, Ty, SE); 4225ffd83dbSDimitry Andric } 4235ffd83dbSDimitry Andric } 4245ffd83dbSDimitry Andric 4255ffd83dbSDimitry Andric /// expandAddToGEP - Expand an addition expression with a pointer type into 4265ffd83dbSDimitry Andric /// a GEP instead of using ptrtoint+arithmetic+inttoptr. This helps 4275ffd83dbSDimitry Andric /// BasicAliasAnalysis and other passes analyze the result. See the rules 4285ffd83dbSDimitry Andric /// for getelementptr vs. inttoptr in 4295ffd83dbSDimitry Andric /// http://llvm.org/docs/LangRef.html#pointeraliasing 4305ffd83dbSDimitry Andric /// for details. 4315ffd83dbSDimitry Andric /// 4325ffd83dbSDimitry Andric /// Design note: The correctness of using getelementptr here depends on 4335ffd83dbSDimitry Andric /// ScalarEvolution not recognizing inttoptr and ptrtoint operators, as 4345ffd83dbSDimitry Andric /// they may introduce pointer arithmetic which may not be safely converted 4355ffd83dbSDimitry Andric /// into getelementptr. 4365ffd83dbSDimitry Andric /// 4375ffd83dbSDimitry Andric /// Design note: It might seem desirable for this function to be more 4385ffd83dbSDimitry Andric /// loop-aware. If some of the indices are loop-invariant while others 4395ffd83dbSDimitry Andric /// aren't, it might seem desirable to emit multiple GEPs, keeping the 4405ffd83dbSDimitry Andric /// loop-invariant portions of the overall computation outside the loop. 4415ffd83dbSDimitry Andric /// However, there are a few reasons this is not done here. Hoisting simple 4425ffd83dbSDimitry Andric /// arithmetic is a low-level optimization that often isn't very 4435ffd83dbSDimitry Andric /// important until late in the optimization process. In fact, passes 4445ffd83dbSDimitry Andric /// like InstructionCombining will combine GEPs, even if it means 4455ffd83dbSDimitry Andric /// pushing loop-invariant computation down into loops, so even if the 4465ffd83dbSDimitry Andric /// GEPs were split here, the work would quickly be undone. The 4475ffd83dbSDimitry Andric /// LoopStrengthReduction pass, which is usually run quite late (and 4485ffd83dbSDimitry Andric /// after the last InstructionCombining pass), takes care of hoisting 4495ffd83dbSDimitry Andric /// loop-invariant portions of expressions, after considering what 4505ffd83dbSDimitry Andric /// can be folded using target addressing modes. 4515ffd83dbSDimitry Andric /// 4525ffd83dbSDimitry Andric Value *SCEVExpander::expandAddToGEP(const SCEV *const *op_begin, 4535ffd83dbSDimitry Andric const SCEV *const *op_end, 4545ffd83dbSDimitry Andric PointerType *PTy, 4555ffd83dbSDimitry Andric Type *Ty, 4565ffd83dbSDimitry Andric Value *V) { 4575ffd83dbSDimitry Andric SmallVector<Value *, 4> GepIndices; 4585ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> Ops(op_begin, op_end); 4595ffd83dbSDimitry Andric bool AnyNonZeroIndices = false; 4605ffd83dbSDimitry Andric 4615ffd83dbSDimitry Andric // Split AddRecs up into parts as either of the parts may be usable 4625ffd83dbSDimitry Andric // without the other. 4635ffd83dbSDimitry Andric SplitAddRecs(Ops, Ty, SE); 4645ffd83dbSDimitry Andric 4655ffd83dbSDimitry Andric Type *IntIdxTy = DL.getIndexType(PTy); 4665ffd83dbSDimitry Andric 467fe6060f1SDimitry Andric // For opaque pointers, always generate i8 GEP. 468fe6060f1SDimitry Andric if (!PTy->isOpaque()) { 4695ffd83dbSDimitry Andric // Descend down the pointer's type and attempt to convert the other 4705ffd83dbSDimitry Andric // operands into GEP indices, at each level. The first index in a GEP 4715ffd83dbSDimitry Andric // indexes into the array implied by the pointer operand; the rest of 4725ffd83dbSDimitry Andric // the indices index into the element or field type selected by the 4735ffd83dbSDimitry Andric // preceding index. 474fe6060f1SDimitry Andric Type *ElTy = PTy->getElementType(); 4755ffd83dbSDimitry Andric for (;;) { 4765ffd83dbSDimitry Andric // If the scale size is not 0, attempt to factor out a scale for 4775ffd83dbSDimitry Andric // array indexing. 4785ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> ScaledOps; 4795ffd83dbSDimitry Andric if (ElTy->isSized()) { 4805ffd83dbSDimitry Andric const SCEV *ElSize = SE.getSizeOfExpr(IntIdxTy, ElTy); 4815ffd83dbSDimitry Andric if (!ElSize->isZero()) { 4825ffd83dbSDimitry Andric SmallVector<const SCEV *, 8> NewOps; 4835ffd83dbSDimitry Andric for (const SCEV *Op : Ops) { 4845ffd83dbSDimitry Andric const SCEV *Remainder = SE.getConstant(Ty, 0); 4855ffd83dbSDimitry Andric if (FactorOutConstant(Op, Remainder, ElSize, SE, DL)) { 4865ffd83dbSDimitry Andric // Op now has ElSize factored out. 4875ffd83dbSDimitry Andric ScaledOps.push_back(Op); 4885ffd83dbSDimitry Andric if (!Remainder->isZero()) 4895ffd83dbSDimitry Andric NewOps.push_back(Remainder); 4905ffd83dbSDimitry Andric AnyNonZeroIndices = true; 4915ffd83dbSDimitry Andric } else { 492fe6060f1SDimitry Andric // The operand was not divisible, so add it to the list of 493fe6060f1SDimitry Andric // operands we'll scan next iteration. 4945ffd83dbSDimitry Andric NewOps.push_back(Op); 4955ffd83dbSDimitry Andric } 4965ffd83dbSDimitry Andric } 4975ffd83dbSDimitry Andric // If we made any changes, update Ops. 4985ffd83dbSDimitry Andric if (!ScaledOps.empty()) { 4995ffd83dbSDimitry Andric Ops = NewOps; 5005ffd83dbSDimitry Andric SimplifyAddOperands(Ops, Ty, SE); 5015ffd83dbSDimitry Andric } 5025ffd83dbSDimitry Andric } 5035ffd83dbSDimitry Andric } 5045ffd83dbSDimitry Andric 5055ffd83dbSDimitry Andric // Record the scaled array index for this level of the type. If 5065ffd83dbSDimitry Andric // we didn't find any operands that could be factored, tentatively 5075ffd83dbSDimitry Andric // assume that element zero was selected (since the zero offset 5085ffd83dbSDimitry Andric // would obviously be folded away). 509e8d8bef9SDimitry Andric Value *Scaled = 510e8d8bef9SDimitry Andric ScaledOps.empty() 511e8d8bef9SDimitry Andric ? Constant::getNullValue(Ty) 512e8d8bef9SDimitry Andric : expandCodeForImpl(SE.getAddExpr(ScaledOps), Ty, false); 5135ffd83dbSDimitry Andric GepIndices.push_back(Scaled); 5145ffd83dbSDimitry Andric 5155ffd83dbSDimitry Andric // Collect struct field index operands. 5165ffd83dbSDimitry Andric while (StructType *STy = dyn_cast<StructType>(ElTy)) { 5175ffd83dbSDimitry Andric bool FoundFieldNo = false; 5185ffd83dbSDimitry Andric // An empty struct has no fields. 5195ffd83dbSDimitry Andric if (STy->getNumElements() == 0) break; 5205ffd83dbSDimitry Andric // Field offsets are known. See if a constant offset falls within any of 5215ffd83dbSDimitry Andric // the struct fields. 5225ffd83dbSDimitry Andric if (Ops.empty()) 5235ffd83dbSDimitry Andric break; 5245ffd83dbSDimitry Andric if (const SCEVConstant *C = dyn_cast<SCEVConstant>(Ops[0])) 5255ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(C->getType()) <= 64) { 5265ffd83dbSDimitry Andric const StructLayout &SL = *DL.getStructLayout(STy); 5275ffd83dbSDimitry Andric uint64_t FullOffset = C->getValue()->getZExtValue(); 5285ffd83dbSDimitry Andric if (FullOffset < SL.getSizeInBytes()) { 5295ffd83dbSDimitry Andric unsigned ElIdx = SL.getElementContainingOffset(FullOffset); 5305ffd83dbSDimitry Andric GepIndices.push_back( 5315ffd83dbSDimitry Andric ConstantInt::get(Type::getInt32Ty(Ty->getContext()), ElIdx)); 5325ffd83dbSDimitry Andric ElTy = STy->getTypeAtIndex(ElIdx); 5335ffd83dbSDimitry Andric Ops[0] = 5345ffd83dbSDimitry Andric SE.getConstant(Ty, FullOffset - SL.getElementOffset(ElIdx)); 5355ffd83dbSDimitry Andric AnyNonZeroIndices = true; 5365ffd83dbSDimitry Andric FoundFieldNo = true; 5375ffd83dbSDimitry Andric } 5385ffd83dbSDimitry Andric } 5395ffd83dbSDimitry Andric // If no struct field offsets were found, tentatively assume that 5405ffd83dbSDimitry Andric // field zero was selected (since the zero offset would obviously 5415ffd83dbSDimitry Andric // be folded away). 5425ffd83dbSDimitry Andric if (!FoundFieldNo) { 5435ffd83dbSDimitry Andric ElTy = STy->getTypeAtIndex(0u); 5445ffd83dbSDimitry Andric GepIndices.push_back( 5455ffd83dbSDimitry Andric Constant::getNullValue(Type::getInt32Ty(Ty->getContext()))); 5465ffd83dbSDimitry Andric } 5475ffd83dbSDimitry Andric } 5485ffd83dbSDimitry Andric 5495ffd83dbSDimitry Andric if (ArrayType *ATy = dyn_cast<ArrayType>(ElTy)) 5505ffd83dbSDimitry Andric ElTy = ATy->getElementType(); 5515ffd83dbSDimitry Andric else 5525ffd83dbSDimitry Andric // FIXME: Handle VectorType. 5535ffd83dbSDimitry Andric // E.g., If ElTy is scalable vector, then ElSize is not a compile-time 5545ffd83dbSDimitry Andric // constant, therefore can not be factored out. The generated IR is less 5555ffd83dbSDimitry Andric // ideal with base 'V' cast to i8* and do ugly getelementptr over that. 5565ffd83dbSDimitry Andric break; 5575ffd83dbSDimitry Andric } 558fe6060f1SDimitry Andric } 5595ffd83dbSDimitry Andric 5605ffd83dbSDimitry Andric // If none of the operands were convertible to proper GEP indices, cast 5615ffd83dbSDimitry Andric // the base to i8* and do an ugly getelementptr with that. It's still 5625ffd83dbSDimitry Andric // better than ptrtoint+arithmetic+inttoptr at least. 5635ffd83dbSDimitry Andric if (!AnyNonZeroIndices) { 5645ffd83dbSDimitry Andric // Cast the base to i8*. 565fe6060f1SDimitry Andric if (!PTy->isOpaque()) 5665ffd83dbSDimitry Andric V = InsertNoopCastOfTo(V, 5675ffd83dbSDimitry Andric Type::getInt8PtrTy(Ty->getContext(), PTy->getAddressSpace())); 5685ffd83dbSDimitry Andric 5695ffd83dbSDimitry Andric assert(!isa<Instruction>(V) || 5705ffd83dbSDimitry Andric SE.DT.dominates(cast<Instruction>(V), &*Builder.GetInsertPoint())); 5715ffd83dbSDimitry Andric 5725ffd83dbSDimitry Andric // Expand the operands for a plain byte offset. 573e8d8bef9SDimitry Andric Value *Idx = expandCodeForImpl(SE.getAddExpr(Ops), Ty, false); 5745ffd83dbSDimitry Andric 5755ffd83dbSDimitry Andric // Fold a GEP with constant operands. 5765ffd83dbSDimitry Andric if (Constant *CLHS = dyn_cast<Constant>(V)) 5775ffd83dbSDimitry Andric if (Constant *CRHS = dyn_cast<Constant>(Idx)) 5785ffd83dbSDimitry Andric return ConstantExpr::getGetElementPtr(Type::getInt8Ty(Ty->getContext()), 5795ffd83dbSDimitry Andric CLHS, CRHS); 5805ffd83dbSDimitry Andric 5815ffd83dbSDimitry Andric // Do a quick scan to see if we have this GEP nearby. If so, reuse it. 5825ffd83dbSDimitry Andric unsigned ScanLimit = 6; 5835ffd83dbSDimitry Andric BasicBlock::iterator BlockBegin = Builder.GetInsertBlock()->begin(); 5845ffd83dbSDimitry Andric // Scanning starts from the last instruction before the insertion point. 5855ffd83dbSDimitry Andric BasicBlock::iterator IP = Builder.GetInsertPoint(); 5865ffd83dbSDimitry Andric if (IP != BlockBegin) { 5875ffd83dbSDimitry Andric --IP; 5885ffd83dbSDimitry Andric for (; ScanLimit; --IP, --ScanLimit) { 5895ffd83dbSDimitry Andric // Don't count dbg.value against the ScanLimit, to avoid perturbing the 5905ffd83dbSDimitry Andric // generated code. 5915ffd83dbSDimitry Andric if (isa<DbgInfoIntrinsic>(IP)) 5925ffd83dbSDimitry Andric ScanLimit++; 5935ffd83dbSDimitry Andric if (IP->getOpcode() == Instruction::GetElementPtr && 5945ffd83dbSDimitry Andric IP->getOperand(0) == V && IP->getOperand(1) == Idx) 5955ffd83dbSDimitry Andric return &*IP; 5965ffd83dbSDimitry Andric if (IP == BlockBegin) break; 5975ffd83dbSDimitry Andric } 5985ffd83dbSDimitry Andric } 5995ffd83dbSDimitry Andric 6005ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 6015ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 6025ffd83dbSDimitry Andric 6035ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 6045ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 6055ffd83dbSDimitry Andric if (!L->isLoopInvariant(V) || !L->isLoopInvariant(Idx)) break; 6065ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 6075ffd83dbSDimitry Andric if (!Preheader) break; 6085ffd83dbSDimitry Andric 6095ffd83dbSDimitry Andric // Ok, move up a level. 6105ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 6115ffd83dbSDimitry Andric } 6125ffd83dbSDimitry Andric 6135ffd83dbSDimitry Andric // Emit a GEP. 614e8d8bef9SDimitry Andric return Builder.CreateGEP(Builder.getInt8Ty(), V, Idx, "uglygep"); 6155ffd83dbSDimitry Andric } 6165ffd83dbSDimitry Andric 6175ffd83dbSDimitry Andric { 6185ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 6195ffd83dbSDimitry Andric 6205ffd83dbSDimitry Andric // Move the insertion point out of as many loops as we can. 6215ffd83dbSDimitry Andric while (const Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock())) { 6225ffd83dbSDimitry Andric if (!L->isLoopInvariant(V)) break; 6235ffd83dbSDimitry Andric 6245ffd83dbSDimitry Andric bool AnyIndexNotLoopInvariant = any_of( 6255ffd83dbSDimitry Andric GepIndices, [L](Value *Op) { return !L->isLoopInvariant(Op); }); 6265ffd83dbSDimitry Andric 6275ffd83dbSDimitry Andric if (AnyIndexNotLoopInvariant) 6285ffd83dbSDimitry Andric break; 6295ffd83dbSDimitry Andric 6305ffd83dbSDimitry Andric BasicBlock *Preheader = L->getLoopPreheader(); 6315ffd83dbSDimitry Andric if (!Preheader) break; 6325ffd83dbSDimitry Andric 6335ffd83dbSDimitry Andric // Ok, move up a level. 6345ffd83dbSDimitry Andric Builder.SetInsertPoint(Preheader->getTerminator()); 6355ffd83dbSDimitry Andric } 6365ffd83dbSDimitry Andric 6375ffd83dbSDimitry Andric // Insert a pretty getelementptr. Note that this GEP is not marked inbounds, 6385ffd83dbSDimitry Andric // because ScalarEvolution may have changed the address arithmetic to 6395ffd83dbSDimitry Andric // compute a value which is beyond the end of the allocated object. 6405ffd83dbSDimitry Andric Value *Casted = V; 6415ffd83dbSDimitry Andric if (V->getType() != PTy) 6425ffd83dbSDimitry Andric Casted = InsertNoopCastOfTo(Casted, PTy); 643fe6060f1SDimitry Andric Value *GEP = Builder.CreateGEP(PTy->getElementType(), Casted, GepIndices, 644fe6060f1SDimitry Andric "scevgep"); 6455ffd83dbSDimitry Andric Ops.push_back(SE.getUnknown(GEP)); 6465ffd83dbSDimitry Andric } 6475ffd83dbSDimitry Andric 6485ffd83dbSDimitry Andric return expand(SE.getAddExpr(Ops)); 6495ffd83dbSDimitry Andric } 6505ffd83dbSDimitry Andric 6515ffd83dbSDimitry Andric Value *SCEVExpander::expandAddToGEP(const SCEV *Op, PointerType *PTy, Type *Ty, 6525ffd83dbSDimitry Andric Value *V) { 6535ffd83dbSDimitry Andric const SCEV *const Ops[1] = {Op}; 6545ffd83dbSDimitry Andric return expandAddToGEP(Ops, Ops + 1, PTy, Ty, V); 6555ffd83dbSDimitry Andric } 6565ffd83dbSDimitry Andric 6575ffd83dbSDimitry Andric /// PickMostRelevantLoop - Given two loops pick the one that's most relevant for 6585ffd83dbSDimitry Andric /// SCEV expansion. If they are nested, this is the most nested. If they are 6595ffd83dbSDimitry Andric /// neighboring, pick the later. 6605ffd83dbSDimitry Andric static const Loop *PickMostRelevantLoop(const Loop *A, const Loop *B, 6615ffd83dbSDimitry Andric DominatorTree &DT) { 6625ffd83dbSDimitry Andric if (!A) return B; 6635ffd83dbSDimitry Andric if (!B) return A; 6645ffd83dbSDimitry Andric if (A->contains(B)) return B; 6655ffd83dbSDimitry Andric if (B->contains(A)) return A; 6665ffd83dbSDimitry Andric if (DT.dominates(A->getHeader(), B->getHeader())) return B; 6675ffd83dbSDimitry Andric if (DT.dominates(B->getHeader(), A->getHeader())) return A; 6685ffd83dbSDimitry Andric return A; // Arbitrarily break the tie. 6695ffd83dbSDimitry Andric } 6705ffd83dbSDimitry Andric 6715ffd83dbSDimitry Andric /// getRelevantLoop - Get the most relevant loop associated with the given 6725ffd83dbSDimitry Andric /// expression, according to PickMostRelevantLoop. 6735ffd83dbSDimitry Andric const Loop *SCEVExpander::getRelevantLoop(const SCEV *S) { 6745ffd83dbSDimitry Andric // Test whether we've already computed the most relevant loop for this SCEV. 6755ffd83dbSDimitry Andric auto Pair = RelevantLoops.insert(std::make_pair(S, nullptr)); 6765ffd83dbSDimitry Andric if (!Pair.second) 6775ffd83dbSDimitry Andric return Pair.first->second; 6785ffd83dbSDimitry Andric 6795ffd83dbSDimitry Andric if (isa<SCEVConstant>(S)) 6805ffd83dbSDimitry Andric // A constant has no relevant loops. 6815ffd83dbSDimitry Andric return nullptr; 6825ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) { 6835ffd83dbSDimitry Andric if (const Instruction *I = dyn_cast<Instruction>(U->getValue())) 6845ffd83dbSDimitry Andric return Pair.first->second = SE.LI.getLoopFor(I->getParent()); 6855ffd83dbSDimitry Andric // A non-instruction has no relevant loops. 6865ffd83dbSDimitry Andric return nullptr; 6875ffd83dbSDimitry Andric } 6885ffd83dbSDimitry Andric if (const SCEVNAryExpr *N = dyn_cast<SCEVNAryExpr>(S)) { 6895ffd83dbSDimitry Andric const Loop *L = nullptr; 6905ffd83dbSDimitry Andric if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) 6915ffd83dbSDimitry Andric L = AR->getLoop(); 6925ffd83dbSDimitry Andric for (const SCEV *Op : N->operands()) 6935ffd83dbSDimitry Andric L = PickMostRelevantLoop(L, getRelevantLoop(Op), SE.DT); 6945ffd83dbSDimitry Andric return RelevantLoops[N] = L; 6955ffd83dbSDimitry Andric } 6965ffd83dbSDimitry Andric if (const SCEVCastExpr *C = dyn_cast<SCEVCastExpr>(S)) { 6975ffd83dbSDimitry Andric const Loop *Result = getRelevantLoop(C->getOperand()); 6985ffd83dbSDimitry Andric return RelevantLoops[C] = Result; 6995ffd83dbSDimitry Andric } 7005ffd83dbSDimitry Andric if (const SCEVUDivExpr *D = dyn_cast<SCEVUDivExpr>(S)) { 7015ffd83dbSDimitry Andric const Loop *Result = PickMostRelevantLoop( 7025ffd83dbSDimitry Andric getRelevantLoop(D->getLHS()), getRelevantLoop(D->getRHS()), SE.DT); 7035ffd83dbSDimitry Andric return RelevantLoops[D] = Result; 7045ffd83dbSDimitry Andric } 7055ffd83dbSDimitry Andric llvm_unreachable("Unexpected SCEV type!"); 7065ffd83dbSDimitry Andric } 7075ffd83dbSDimitry Andric 7085ffd83dbSDimitry Andric namespace { 7095ffd83dbSDimitry Andric 7105ffd83dbSDimitry Andric /// LoopCompare - Compare loops by PickMostRelevantLoop. 7115ffd83dbSDimitry Andric class LoopCompare { 7125ffd83dbSDimitry Andric DominatorTree &DT; 7135ffd83dbSDimitry Andric public: 7145ffd83dbSDimitry Andric explicit LoopCompare(DominatorTree &dt) : DT(dt) {} 7155ffd83dbSDimitry Andric 7165ffd83dbSDimitry Andric bool operator()(std::pair<const Loop *, const SCEV *> LHS, 7175ffd83dbSDimitry Andric std::pair<const Loop *, const SCEV *> RHS) const { 7185ffd83dbSDimitry Andric // Keep pointer operands sorted at the end. 7195ffd83dbSDimitry Andric if (LHS.second->getType()->isPointerTy() != 7205ffd83dbSDimitry Andric RHS.second->getType()->isPointerTy()) 7215ffd83dbSDimitry Andric return LHS.second->getType()->isPointerTy(); 7225ffd83dbSDimitry Andric 7235ffd83dbSDimitry Andric // Compare loops with PickMostRelevantLoop. 7245ffd83dbSDimitry Andric if (LHS.first != RHS.first) 7255ffd83dbSDimitry Andric return PickMostRelevantLoop(LHS.first, RHS.first, DT) != LHS.first; 7265ffd83dbSDimitry Andric 7275ffd83dbSDimitry Andric // If one operand is a non-constant negative and the other is not, 7285ffd83dbSDimitry Andric // put the non-constant negative on the right so that a sub can 7295ffd83dbSDimitry Andric // be used instead of a negate and add. 7305ffd83dbSDimitry Andric if (LHS.second->isNonConstantNegative()) { 7315ffd83dbSDimitry Andric if (!RHS.second->isNonConstantNegative()) 7325ffd83dbSDimitry Andric return false; 7335ffd83dbSDimitry Andric } else if (RHS.second->isNonConstantNegative()) 7345ffd83dbSDimitry Andric return true; 7355ffd83dbSDimitry Andric 7365ffd83dbSDimitry Andric // Otherwise they are equivalent according to this comparison. 7375ffd83dbSDimitry Andric return false; 7385ffd83dbSDimitry Andric } 7395ffd83dbSDimitry Andric }; 7405ffd83dbSDimitry Andric 7415ffd83dbSDimitry Andric } 7425ffd83dbSDimitry Andric 7435ffd83dbSDimitry Andric Value *SCEVExpander::visitAddExpr(const SCEVAddExpr *S) { 7445ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 7455ffd83dbSDimitry Andric 7465ffd83dbSDimitry Andric // Collect all the add operands in a loop, along with their associated loops. 7475ffd83dbSDimitry Andric // Iterate in reverse so that constants are emitted last, all else equal, and 7485ffd83dbSDimitry Andric // so that pointer operands are inserted first, which the code below relies on 7495ffd83dbSDimitry Andric // to form more involved GEPs. 7505ffd83dbSDimitry Andric SmallVector<std::pair<const Loop *, const SCEV *>, 8> OpsAndLoops; 751349cc55cSDimitry Andric for (const SCEV *Op : reverse(S->operands())) 752349cc55cSDimitry Andric OpsAndLoops.push_back(std::make_pair(getRelevantLoop(Op), Op)); 7535ffd83dbSDimitry Andric 7545ffd83dbSDimitry Andric // Sort by loop. Use a stable sort so that constants follow non-constants and 7555ffd83dbSDimitry Andric // pointer operands precede non-pointer operands. 7565ffd83dbSDimitry Andric llvm::stable_sort(OpsAndLoops, LoopCompare(SE.DT)); 7575ffd83dbSDimitry Andric 7585ffd83dbSDimitry Andric // Emit instructions to add all the operands. Hoist as much as possible 7595ffd83dbSDimitry Andric // out of loops, and form meaningful getelementptrs where possible. 7605ffd83dbSDimitry Andric Value *Sum = nullptr; 7615ffd83dbSDimitry Andric for (auto I = OpsAndLoops.begin(), E = OpsAndLoops.end(); I != E;) { 7625ffd83dbSDimitry Andric const Loop *CurLoop = I->first; 7635ffd83dbSDimitry Andric const SCEV *Op = I->second; 7645ffd83dbSDimitry Andric if (!Sum) { 7655ffd83dbSDimitry Andric // This is the first operand. Just expand it. 7665ffd83dbSDimitry Andric Sum = expand(Op); 7675ffd83dbSDimitry Andric ++I; 768349cc55cSDimitry Andric continue; 769349cc55cSDimitry Andric } 770349cc55cSDimitry Andric 771349cc55cSDimitry Andric assert(!Op->getType()->isPointerTy() && "Only first op can be pointer"); 772349cc55cSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(Sum->getType())) { 7735ffd83dbSDimitry Andric // The running sum expression is a pointer. Try to form a getelementptr 7745ffd83dbSDimitry Andric // at this level with that as the base. 7755ffd83dbSDimitry Andric SmallVector<const SCEV *, 4> NewOps; 7765ffd83dbSDimitry Andric for (; I != E && I->first == CurLoop; ++I) { 7775ffd83dbSDimitry Andric // If the operand is SCEVUnknown and not instructions, peek through 7785ffd83dbSDimitry Andric // it, to enable more of it to be folded into the GEP. 7795ffd83dbSDimitry Andric const SCEV *X = I->second; 7805ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(X)) 7815ffd83dbSDimitry Andric if (!isa<Instruction>(U->getValue())) 7825ffd83dbSDimitry Andric X = SE.getSCEV(U->getValue()); 7835ffd83dbSDimitry Andric NewOps.push_back(X); 7845ffd83dbSDimitry Andric } 7855ffd83dbSDimitry Andric Sum = expandAddToGEP(NewOps.begin(), NewOps.end(), PTy, Ty, Sum); 7865ffd83dbSDimitry Andric } else if (Op->isNonConstantNegative()) { 7875ffd83dbSDimitry Andric // Instead of doing a negate and add, just do a subtract. 788e8d8bef9SDimitry Andric Value *W = expandCodeForImpl(SE.getNegativeSCEV(Op), Ty, false); 7895ffd83dbSDimitry Andric Sum = InsertNoopCastOfTo(Sum, Ty); 7905ffd83dbSDimitry Andric Sum = InsertBinop(Instruction::Sub, Sum, W, SCEV::FlagAnyWrap, 7915ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 7925ffd83dbSDimitry Andric ++I; 7935ffd83dbSDimitry Andric } else { 7945ffd83dbSDimitry Andric // A simple add. 795e8d8bef9SDimitry Andric Value *W = expandCodeForImpl(Op, Ty, false); 7965ffd83dbSDimitry Andric Sum = InsertNoopCastOfTo(Sum, Ty); 7975ffd83dbSDimitry Andric // Canonicalize a constant to the RHS. 7985ffd83dbSDimitry Andric if (isa<Constant>(Sum)) std::swap(Sum, W); 7995ffd83dbSDimitry Andric Sum = InsertBinop(Instruction::Add, Sum, W, S->getNoWrapFlags(), 8005ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8015ffd83dbSDimitry Andric ++I; 8025ffd83dbSDimitry Andric } 8035ffd83dbSDimitry Andric } 8045ffd83dbSDimitry Andric 8055ffd83dbSDimitry Andric return Sum; 8065ffd83dbSDimitry Andric } 8075ffd83dbSDimitry Andric 8085ffd83dbSDimitry Andric Value *SCEVExpander::visitMulExpr(const SCEVMulExpr *S) { 8095ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 8105ffd83dbSDimitry Andric 8115ffd83dbSDimitry Andric // Collect all the mul operands in a loop, along with their associated loops. 8125ffd83dbSDimitry Andric // Iterate in reverse so that constants are emitted last, all else equal. 8135ffd83dbSDimitry Andric SmallVector<std::pair<const Loop *, const SCEV *>, 8> OpsAndLoops; 814349cc55cSDimitry Andric for (const SCEV *Op : reverse(S->operands())) 815349cc55cSDimitry Andric OpsAndLoops.push_back(std::make_pair(getRelevantLoop(Op), Op)); 8165ffd83dbSDimitry Andric 8175ffd83dbSDimitry Andric // Sort by loop. Use a stable sort so that constants follow non-constants. 8185ffd83dbSDimitry Andric llvm::stable_sort(OpsAndLoops, LoopCompare(SE.DT)); 8195ffd83dbSDimitry Andric 8205ffd83dbSDimitry Andric // Emit instructions to mul all the operands. Hoist as much as possible 8215ffd83dbSDimitry Andric // out of loops. 8225ffd83dbSDimitry Andric Value *Prod = nullptr; 8235ffd83dbSDimitry Andric auto I = OpsAndLoops.begin(); 8245ffd83dbSDimitry Andric 8255ffd83dbSDimitry Andric // Expand the calculation of X pow N in the following manner: 8265ffd83dbSDimitry Andric // Let N = P1 + P2 + ... + PK, where all P are powers of 2. Then: 8275ffd83dbSDimitry Andric // X pow N = (X pow P1) * (X pow P2) * ... * (X pow PK). 8285ffd83dbSDimitry Andric const auto ExpandOpBinPowN = [this, &I, &OpsAndLoops, &Ty]() { 8295ffd83dbSDimitry Andric auto E = I; 8305ffd83dbSDimitry Andric // Calculate how many times the same operand from the same loop is included 8315ffd83dbSDimitry Andric // into this power. 8325ffd83dbSDimitry Andric uint64_t Exponent = 0; 8335ffd83dbSDimitry Andric const uint64_t MaxExponent = UINT64_MAX >> 1; 8345ffd83dbSDimitry Andric // No one sane will ever try to calculate such huge exponents, but if we 8355ffd83dbSDimitry Andric // need this, we stop on UINT64_MAX / 2 because we need to exit the loop 8365ffd83dbSDimitry Andric // below when the power of 2 exceeds our Exponent, and we want it to be 8375ffd83dbSDimitry Andric // 1u << 31 at most to not deal with unsigned overflow. 8385ffd83dbSDimitry Andric while (E != OpsAndLoops.end() && *I == *E && Exponent != MaxExponent) { 8395ffd83dbSDimitry Andric ++Exponent; 8405ffd83dbSDimitry Andric ++E; 8415ffd83dbSDimitry Andric } 8425ffd83dbSDimitry Andric assert(Exponent > 0 && "Trying to calculate a zeroth exponent of operand?"); 8435ffd83dbSDimitry Andric 8445ffd83dbSDimitry Andric // Calculate powers with exponents 1, 2, 4, 8 etc. and include those of them 8455ffd83dbSDimitry Andric // that are needed into the result. 846e8d8bef9SDimitry Andric Value *P = expandCodeForImpl(I->second, Ty, false); 8475ffd83dbSDimitry Andric Value *Result = nullptr; 8485ffd83dbSDimitry Andric if (Exponent & 1) 8495ffd83dbSDimitry Andric Result = P; 8505ffd83dbSDimitry Andric for (uint64_t BinExp = 2; BinExp <= Exponent; BinExp <<= 1) { 8515ffd83dbSDimitry Andric P = InsertBinop(Instruction::Mul, P, P, SCEV::FlagAnyWrap, 8525ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8535ffd83dbSDimitry Andric if (Exponent & BinExp) 8545ffd83dbSDimitry Andric Result = Result ? InsertBinop(Instruction::Mul, Result, P, 8555ffd83dbSDimitry Andric SCEV::FlagAnyWrap, 8565ffd83dbSDimitry Andric /*IsSafeToHoist*/ true) 8575ffd83dbSDimitry Andric : P; 8585ffd83dbSDimitry Andric } 8595ffd83dbSDimitry Andric 8605ffd83dbSDimitry Andric I = E; 8615ffd83dbSDimitry Andric assert(Result && "Nothing was expanded?"); 8625ffd83dbSDimitry Andric return Result; 8635ffd83dbSDimitry Andric }; 8645ffd83dbSDimitry Andric 8655ffd83dbSDimitry Andric while (I != OpsAndLoops.end()) { 8665ffd83dbSDimitry Andric if (!Prod) { 8675ffd83dbSDimitry Andric // This is the first operand. Just expand it. 8685ffd83dbSDimitry Andric Prod = ExpandOpBinPowN(); 8695ffd83dbSDimitry Andric } else if (I->second->isAllOnesValue()) { 8705ffd83dbSDimitry Andric // Instead of doing a multiply by negative one, just do a negate. 8715ffd83dbSDimitry Andric Prod = InsertNoopCastOfTo(Prod, Ty); 8725ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Sub, Constant::getNullValue(Ty), Prod, 8735ffd83dbSDimitry Andric SCEV::FlagAnyWrap, /*IsSafeToHoist*/ true); 8745ffd83dbSDimitry Andric ++I; 8755ffd83dbSDimitry Andric } else { 8765ffd83dbSDimitry Andric // A simple mul. 8775ffd83dbSDimitry Andric Value *W = ExpandOpBinPowN(); 8785ffd83dbSDimitry Andric Prod = InsertNoopCastOfTo(Prod, Ty); 8795ffd83dbSDimitry Andric // Canonicalize a constant to the RHS. 8805ffd83dbSDimitry Andric if (isa<Constant>(Prod)) std::swap(Prod, W); 8815ffd83dbSDimitry Andric const APInt *RHS; 8825ffd83dbSDimitry Andric if (match(W, m_Power2(RHS))) { 8835ffd83dbSDimitry Andric // Canonicalize Prod*(1<<C) to Prod<<C. 8845ffd83dbSDimitry Andric assert(!Ty->isVectorTy() && "vector types are not SCEVable"); 8855ffd83dbSDimitry Andric auto NWFlags = S->getNoWrapFlags(); 8865ffd83dbSDimitry Andric // clear nsw flag if shl will produce poison value. 8875ffd83dbSDimitry Andric if (RHS->logBase2() == RHS->getBitWidth() - 1) 8885ffd83dbSDimitry Andric NWFlags = ScalarEvolution::clearFlags(NWFlags, SCEV::FlagNSW); 8895ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Shl, Prod, 8905ffd83dbSDimitry Andric ConstantInt::get(Ty, RHS->logBase2()), NWFlags, 8915ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8925ffd83dbSDimitry Andric } else { 8935ffd83dbSDimitry Andric Prod = InsertBinop(Instruction::Mul, Prod, W, S->getNoWrapFlags(), 8945ffd83dbSDimitry Andric /*IsSafeToHoist*/ true); 8955ffd83dbSDimitry Andric } 8965ffd83dbSDimitry Andric } 8975ffd83dbSDimitry Andric } 8985ffd83dbSDimitry Andric 8995ffd83dbSDimitry Andric return Prod; 9005ffd83dbSDimitry Andric } 9015ffd83dbSDimitry Andric 9025ffd83dbSDimitry Andric Value *SCEVExpander::visitUDivExpr(const SCEVUDivExpr *S) { 9035ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 9045ffd83dbSDimitry Andric 905e8d8bef9SDimitry Andric Value *LHS = expandCodeForImpl(S->getLHS(), Ty, false); 9065ffd83dbSDimitry Andric if (const SCEVConstant *SC = dyn_cast<SCEVConstant>(S->getRHS())) { 9075ffd83dbSDimitry Andric const APInt &RHS = SC->getAPInt(); 9085ffd83dbSDimitry Andric if (RHS.isPowerOf2()) 9095ffd83dbSDimitry Andric return InsertBinop(Instruction::LShr, LHS, 9105ffd83dbSDimitry Andric ConstantInt::get(Ty, RHS.logBase2()), 9115ffd83dbSDimitry Andric SCEV::FlagAnyWrap, /*IsSafeToHoist*/ true); 9125ffd83dbSDimitry Andric } 9135ffd83dbSDimitry Andric 914e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getRHS(), Ty, false); 9155ffd83dbSDimitry Andric return InsertBinop(Instruction::UDiv, LHS, RHS, SCEV::FlagAnyWrap, 9165ffd83dbSDimitry Andric /*IsSafeToHoist*/ SE.isKnownNonZero(S->getRHS())); 9175ffd83dbSDimitry Andric } 9185ffd83dbSDimitry Andric 9195ffd83dbSDimitry Andric /// Determine if this is a well-behaved chain of instructions leading back to 9205ffd83dbSDimitry Andric /// the PHI. If so, it may be reused by expanded expressions. 9215ffd83dbSDimitry Andric bool SCEVExpander::isNormalAddRecExprPHI(PHINode *PN, Instruction *IncV, 9225ffd83dbSDimitry Andric const Loop *L) { 9235ffd83dbSDimitry Andric if (IncV->getNumOperands() == 0 || isa<PHINode>(IncV) || 9245ffd83dbSDimitry Andric (isa<CastInst>(IncV) && !isa<BitCastInst>(IncV))) 9255ffd83dbSDimitry Andric return false; 9265ffd83dbSDimitry Andric // If any of the operands don't dominate the insert position, bail. 9275ffd83dbSDimitry Andric // Addrec operands are always loop-invariant, so this can only happen 9285ffd83dbSDimitry Andric // if there are instructions which haven't been hoisted. 9295ffd83dbSDimitry Andric if (L == IVIncInsertLoop) { 930fe6060f1SDimitry Andric for (Use &Op : llvm::drop_begin(IncV->operands())) 931fe6060f1SDimitry Andric if (Instruction *OInst = dyn_cast<Instruction>(Op)) 9325ffd83dbSDimitry Andric if (!SE.DT.dominates(OInst, IVIncInsertPos)) 9335ffd83dbSDimitry Andric return false; 9345ffd83dbSDimitry Andric } 9355ffd83dbSDimitry Andric // Advance to the next instruction. 9365ffd83dbSDimitry Andric IncV = dyn_cast<Instruction>(IncV->getOperand(0)); 9375ffd83dbSDimitry Andric if (!IncV) 9385ffd83dbSDimitry Andric return false; 9395ffd83dbSDimitry Andric 9405ffd83dbSDimitry Andric if (IncV->mayHaveSideEffects()) 9415ffd83dbSDimitry Andric return false; 9425ffd83dbSDimitry Andric 9435ffd83dbSDimitry Andric if (IncV == PN) 9445ffd83dbSDimitry Andric return true; 9455ffd83dbSDimitry Andric 9465ffd83dbSDimitry Andric return isNormalAddRecExprPHI(PN, IncV, L); 9475ffd83dbSDimitry Andric } 9485ffd83dbSDimitry Andric 9495ffd83dbSDimitry Andric /// getIVIncOperand returns an induction variable increment's induction 9505ffd83dbSDimitry Andric /// variable operand. 9515ffd83dbSDimitry Andric /// 9525ffd83dbSDimitry Andric /// If allowScale is set, any type of GEP is allowed as long as the nonIV 9535ffd83dbSDimitry Andric /// operands dominate InsertPos. 9545ffd83dbSDimitry Andric /// 9555ffd83dbSDimitry Andric /// If allowScale is not set, ensure that a GEP increment conforms to one of the 9565ffd83dbSDimitry Andric /// simple patterns generated by getAddRecExprPHILiterally and 9575ffd83dbSDimitry Andric /// expandAddtoGEP. If the pattern isn't recognized, return NULL. 9585ffd83dbSDimitry Andric Instruction *SCEVExpander::getIVIncOperand(Instruction *IncV, 9595ffd83dbSDimitry Andric Instruction *InsertPos, 9605ffd83dbSDimitry Andric bool allowScale) { 9615ffd83dbSDimitry Andric if (IncV == InsertPos) 9625ffd83dbSDimitry Andric return nullptr; 9635ffd83dbSDimitry Andric 9645ffd83dbSDimitry Andric switch (IncV->getOpcode()) { 9655ffd83dbSDimitry Andric default: 9665ffd83dbSDimitry Andric return nullptr; 9675ffd83dbSDimitry Andric // Check for a simple Add/Sub or GEP of a loop invariant step. 9685ffd83dbSDimitry Andric case Instruction::Add: 9695ffd83dbSDimitry Andric case Instruction::Sub: { 9705ffd83dbSDimitry Andric Instruction *OInst = dyn_cast<Instruction>(IncV->getOperand(1)); 9715ffd83dbSDimitry Andric if (!OInst || SE.DT.dominates(OInst, InsertPos)) 9725ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 9735ffd83dbSDimitry Andric return nullptr; 9745ffd83dbSDimitry Andric } 9755ffd83dbSDimitry Andric case Instruction::BitCast: 9765ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 9775ffd83dbSDimitry Andric case Instruction::GetElementPtr: 978fe6060f1SDimitry Andric for (Use &U : llvm::drop_begin(IncV->operands())) { 979fe6060f1SDimitry Andric if (isa<Constant>(U)) 9805ffd83dbSDimitry Andric continue; 981fe6060f1SDimitry Andric if (Instruction *OInst = dyn_cast<Instruction>(U)) { 9825ffd83dbSDimitry Andric if (!SE.DT.dominates(OInst, InsertPos)) 9835ffd83dbSDimitry Andric return nullptr; 9845ffd83dbSDimitry Andric } 9855ffd83dbSDimitry Andric if (allowScale) { 9865ffd83dbSDimitry Andric // allow any kind of GEP as long as it can be hoisted. 9875ffd83dbSDimitry Andric continue; 9885ffd83dbSDimitry Andric } 9895ffd83dbSDimitry Andric // This must be a pointer addition of constants (pretty), which is already 9905ffd83dbSDimitry Andric // handled, or some number of address-size elements (ugly). Ugly geps 9915ffd83dbSDimitry Andric // have 2 operands. i1* is used by the expander to represent an 9925ffd83dbSDimitry Andric // address-size element. 9935ffd83dbSDimitry Andric if (IncV->getNumOperands() != 2) 9945ffd83dbSDimitry Andric return nullptr; 9955ffd83dbSDimitry Andric unsigned AS = cast<PointerType>(IncV->getType())->getAddressSpace(); 9965ffd83dbSDimitry Andric if (IncV->getType() != Type::getInt1PtrTy(SE.getContext(), AS) 9975ffd83dbSDimitry Andric && IncV->getType() != Type::getInt8PtrTy(SE.getContext(), AS)) 9985ffd83dbSDimitry Andric return nullptr; 9995ffd83dbSDimitry Andric break; 10005ffd83dbSDimitry Andric } 10015ffd83dbSDimitry Andric return dyn_cast<Instruction>(IncV->getOperand(0)); 10025ffd83dbSDimitry Andric } 10035ffd83dbSDimitry Andric } 10045ffd83dbSDimitry Andric 10055ffd83dbSDimitry Andric /// If the insert point of the current builder or any of the builders on the 10065ffd83dbSDimitry Andric /// stack of saved builders has 'I' as its insert point, update it to point to 10075ffd83dbSDimitry Andric /// the instruction after 'I'. This is intended to be used when the instruction 10085ffd83dbSDimitry Andric /// 'I' is being moved. If this fixup is not done and 'I' is moved to a 10095ffd83dbSDimitry Andric /// different block, the inconsistent insert point (with a mismatched 10105ffd83dbSDimitry Andric /// Instruction and Block) can lead to an instruction being inserted in a block 10115ffd83dbSDimitry Andric /// other than its parent. 10125ffd83dbSDimitry Andric void SCEVExpander::fixupInsertPoints(Instruction *I) { 10135ffd83dbSDimitry Andric BasicBlock::iterator It(*I); 10145ffd83dbSDimitry Andric BasicBlock::iterator NewInsertPt = std::next(It); 10155ffd83dbSDimitry Andric if (Builder.GetInsertPoint() == It) 10165ffd83dbSDimitry Andric Builder.SetInsertPoint(&*NewInsertPt); 10175ffd83dbSDimitry Andric for (auto *InsertPtGuard : InsertPointGuards) 10185ffd83dbSDimitry Andric if (InsertPtGuard->GetInsertPoint() == It) 10195ffd83dbSDimitry Andric InsertPtGuard->SetInsertPoint(NewInsertPt); 10205ffd83dbSDimitry Andric } 10215ffd83dbSDimitry Andric 10225ffd83dbSDimitry Andric /// hoistStep - Attempt to hoist a simple IV increment above InsertPos to make 10235ffd83dbSDimitry Andric /// it available to other uses in this loop. Recursively hoist any operands, 10245ffd83dbSDimitry Andric /// until we reach a value that dominates InsertPos. 10255ffd83dbSDimitry Andric bool SCEVExpander::hoistIVInc(Instruction *IncV, Instruction *InsertPos) { 10265ffd83dbSDimitry Andric if (SE.DT.dominates(IncV, InsertPos)) 10275ffd83dbSDimitry Andric return true; 10285ffd83dbSDimitry Andric 10295ffd83dbSDimitry Andric // InsertPos must itself dominate IncV so that IncV's new position satisfies 10305ffd83dbSDimitry Andric // its existing users. 10315ffd83dbSDimitry Andric if (isa<PHINode>(InsertPos) || 10325ffd83dbSDimitry Andric !SE.DT.dominates(InsertPos->getParent(), IncV->getParent())) 10335ffd83dbSDimitry Andric return false; 10345ffd83dbSDimitry Andric 10355ffd83dbSDimitry Andric if (!SE.LI.movementPreservesLCSSAForm(IncV, InsertPos)) 10365ffd83dbSDimitry Andric return false; 10375ffd83dbSDimitry Andric 10385ffd83dbSDimitry Andric // Check that the chain of IV operands leading back to Phi can be hoisted. 10395ffd83dbSDimitry Andric SmallVector<Instruction*, 4> IVIncs; 10405ffd83dbSDimitry Andric for(;;) { 10415ffd83dbSDimitry Andric Instruction *Oper = getIVIncOperand(IncV, InsertPos, /*allowScale*/true); 10425ffd83dbSDimitry Andric if (!Oper) 10435ffd83dbSDimitry Andric return false; 10445ffd83dbSDimitry Andric // IncV is safe to hoist. 10455ffd83dbSDimitry Andric IVIncs.push_back(IncV); 10465ffd83dbSDimitry Andric IncV = Oper; 10475ffd83dbSDimitry Andric if (SE.DT.dominates(IncV, InsertPos)) 10485ffd83dbSDimitry Andric break; 10495ffd83dbSDimitry Andric } 10505ffd83dbSDimitry Andric for (auto I = IVIncs.rbegin(), E = IVIncs.rend(); I != E; ++I) { 10515ffd83dbSDimitry Andric fixupInsertPoints(*I); 10525ffd83dbSDimitry Andric (*I)->moveBefore(InsertPos); 10535ffd83dbSDimitry Andric } 10545ffd83dbSDimitry Andric return true; 10555ffd83dbSDimitry Andric } 10565ffd83dbSDimitry Andric 10575ffd83dbSDimitry Andric /// Determine if this cyclic phi is in a form that would have been generated by 10585ffd83dbSDimitry Andric /// LSR. We don't care if the phi was actually expanded in this pass, as long 10595ffd83dbSDimitry Andric /// as it is in a low-cost form, for example, no implied multiplication. This 10605ffd83dbSDimitry Andric /// should match any patterns generated by getAddRecExprPHILiterally and 10615ffd83dbSDimitry Andric /// expandAddtoGEP. 10625ffd83dbSDimitry Andric bool SCEVExpander::isExpandedAddRecExprPHI(PHINode *PN, Instruction *IncV, 10635ffd83dbSDimitry Andric const Loop *L) { 10645ffd83dbSDimitry Andric for(Instruction *IVOper = IncV; 10655ffd83dbSDimitry Andric (IVOper = getIVIncOperand(IVOper, L->getLoopPreheader()->getTerminator(), 10665ffd83dbSDimitry Andric /*allowScale=*/false));) { 10675ffd83dbSDimitry Andric if (IVOper == PN) 10685ffd83dbSDimitry Andric return true; 10695ffd83dbSDimitry Andric } 10705ffd83dbSDimitry Andric return false; 10715ffd83dbSDimitry Andric } 10725ffd83dbSDimitry Andric 10735ffd83dbSDimitry Andric /// expandIVInc - Expand an IV increment at Builder's current InsertPos. 10745ffd83dbSDimitry Andric /// Typically this is the LatchBlock terminator or IVIncInsertPos, but we may 10755ffd83dbSDimitry Andric /// need to materialize IV increments elsewhere to handle difficult situations. 10765ffd83dbSDimitry Andric Value *SCEVExpander::expandIVInc(PHINode *PN, Value *StepV, const Loop *L, 10775ffd83dbSDimitry Andric Type *ExpandTy, Type *IntTy, 10785ffd83dbSDimitry Andric bool useSubtract) { 10795ffd83dbSDimitry Andric Value *IncV; 10805ffd83dbSDimitry Andric // If the PHI is a pointer, use a GEP, otherwise use an add or sub. 10815ffd83dbSDimitry Andric if (ExpandTy->isPointerTy()) { 10825ffd83dbSDimitry Andric PointerType *GEPPtrTy = cast<PointerType>(ExpandTy); 10835ffd83dbSDimitry Andric // If the step isn't constant, don't use an implicitly scaled GEP, because 10845ffd83dbSDimitry Andric // that would require a multiply inside the loop. 10855ffd83dbSDimitry Andric if (!isa<ConstantInt>(StepV)) 10865ffd83dbSDimitry Andric GEPPtrTy = PointerType::get(Type::getInt1Ty(SE.getContext()), 10875ffd83dbSDimitry Andric GEPPtrTy->getAddressSpace()); 10885ffd83dbSDimitry Andric IncV = expandAddToGEP(SE.getSCEV(StepV), GEPPtrTy, IntTy, PN); 1089e8d8bef9SDimitry Andric if (IncV->getType() != PN->getType()) 10905ffd83dbSDimitry Andric IncV = Builder.CreateBitCast(IncV, PN->getType()); 10915ffd83dbSDimitry Andric } else { 10925ffd83dbSDimitry Andric IncV = useSubtract ? 10935ffd83dbSDimitry Andric Builder.CreateSub(PN, StepV, Twine(IVName) + ".iv.next") : 10945ffd83dbSDimitry Andric Builder.CreateAdd(PN, StepV, Twine(IVName) + ".iv.next"); 10955ffd83dbSDimitry Andric } 10965ffd83dbSDimitry Andric return IncV; 10975ffd83dbSDimitry Andric } 10985ffd83dbSDimitry Andric 10995ffd83dbSDimitry Andric /// Check whether we can cheaply express the requested SCEV in terms of 11005ffd83dbSDimitry Andric /// the available PHI SCEV by truncation and/or inversion of the step. 11015ffd83dbSDimitry Andric static bool canBeCheaplyTransformed(ScalarEvolution &SE, 11025ffd83dbSDimitry Andric const SCEVAddRecExpr *Phi, 11035ffd83dbSDimitry Andric const SCEVAddRecExpr *Requested, 11045ffd83dbSDimitry Andric bool &InvertStep) { 1105fe6060f1SDimitry Andric // We can't transform to match a pointer PHI. 1106fe6060f1SDimitry Andric if (Phi->getType()->isPointerTy()) 1107fe6060f1SDimitry Andric return false; 1108fe6060f1SDimitry Andric 11095ffd83dbSDimitry Andric Type *PhiTy = SE.getEffectiveSCEVType(Phi->getType()); 11105ffd83dbSDimitry Andric Type *RequestedTy = SE.getEffectiveSCEVType(Requested->getType()); 11115ffd83dbSDimitry Andric 11125ffd83dbSDimitry Andric if (RequestedTy->getIntegerBitWidth() > PhiTy->getIntegerBitWidth()) 11135ffd83dbSDimitry Andric return false; 11145ffd83dbSDimitry Andric 11155ffd83dbSDimitry Andric // Try truncate it if necessary. 11165ffd83dbSDimitry Andric Phi = dyn_cast<SCEVAddRecExpr>(SE.getTruncateOrNoop(Phi, RequestedTy)); 11175ffd83dbSDimitry Andric if (!Phi) 11185ffd83dbSDimitry Andric return false; 11195ffd83dbSDimitry Andric 11205ffd83dbSDimitry Andric // Check whether truncation will help. 11215ffd83dbSDimitry Andric if (Phi == Requested) { 11225ffd83dbSDimitry Andric InvertStep = false; 11235ffd83dbSDimitry Andric return true; 11245ffd83dbSDimitry Andric } 11255ffd83dbSDimitry Andric 11265ffd83dbSDimitry Andric // Check whether inverting will help: {R,+,-1} == R - {0,+,1}. 1127fe6060f1SDimitry Andric if (SE.getMinusSCEV(Requested->getStart(), Requested) == Phi) { 11285ffd83dbSDimitry Andric InvertStep = true; 11295ffd83dbSDimitry Andric return true; 11305ffd83dbSDimitry Andric } 11315ffd83dbSDimitry Andric 11325ffd83dbSDimitry Andric return false; 11335ffd83dbSDimitry Andric } 11345ffd83dbSDimitry Andric 11355ffd83dbSDimitry Andric static bool IsIncrementNSW(ScalarEvolution &SE, const SCEVAddRecExpr *AR) { 11365ffd83dbSDimitry Andric if (!isa<IntegerType>(AR->getType())) 11375ffd83dbSDimitry Andric return false; 11385ffd83dbSDimitry Andric 11395ffd83dbSDimitry Andric unsigned BitWidth = cast<IntegerType>(AR->getType())->getBitWidth(); 11405ffd83dbSDimitry Andric Type *WideTy = IntegerType::get(AR->getType()->getContext(), BitWidth * 2); 11415ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 11425ffd83dbSDimitry Andric const SCEV *OpAfterExtend = SE.getAddExpr(SE.getSignExtendExpr(Step, WideTy), 11435ffd83dbSDimitry Andric SE.getSignExtendExpr(AR, WideTy)); 11445ffd83dbSDimitry Andric const SCEV *ExtendAfterOp = 11455ffd83dbSDimitry Andric SE.getSignExtendExpr(SE.getAddExpr(AR, Step), WideTy); 11465ffd83dbSDimitry Andric return ExtendAfterOp == OpAfterExtend; 11475ffd83dbSDimitry Andric } 11485ffd83dbSDimitry Andric 11495ffd83dbSDimitry Andric static bool IsIncrementNUW(ScalarEvolution &SE, const SCEVAddRecExpr *AR) { 11505ffd83dbSDimitry Andric if (!isa<IntegerType>(AR->getType())) 11515ffd83dbSDimitry Andric return false; 11525ffd83dbSDimitry Andric 11535ffd83dbSDimitry Andric unsigned BitWidth = cast<IntegerType>(AR->getType())->getBitWidth(); 11545ffd83dbSDimitry Andric Type *WideTy = IntegerType::get(AR->getType()->getContext(), BitWidth * 2); 11555ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 11565ffd83dbSDimitry Andric const SCEV *OpAfterExtend = SE.getAddExpr(SE.getZeroExtendExpr(Step, WideTy), 11575ffd83dbSDimitry Andric SE.getZeroExtendExpr(AR, WideTy)); 11585ffd83dbSDimitry Andric const SCEV *ExtendAfterOp = 11595ffd83dbSDimitry Andric SE.getZeroExtendExpr(SE.getAddExpr(AR, Step), WideTy); 11605ffd83dbSDimitry Andric return ExtendAfterOp == OpAfterExtend; 11615ffd83dbSDimitry Andric } 11625ffd83dbSDimitry Andric 11635ffd83dbSDimitry Andric /// getAddRecExprPHILiterally - Helper for expandAddRecExprLiterally. Expand 11645ffd83dbSDimitry Andric /// the base addrec, which is the addrec without any non-loop-dominating 11655ffd83dbSDimitry Andric /// values, and return the PHI. 11665ffd83dbSDimitry Andric PHINode * 11675ffd83dbSDimitry Andric SCEVExpander::getAddRecExprPHILiterally(const SCEVAddRecExpr *Normalized, 11685ffd83dbSDimitry Andric const Loop *L, 11695ffd83dbSDimitry Andric Type *ExpandTy, 11705ffd83dbSDimitry Andric Type *IntTy, 11715ffd83dbSDimitry Andric Type *&TruncTy, 11725ffd83dbSDimitry Andric bool &InvertStep) { 11735ffd83dbSDimitry Andric assert((!IVIncInsertLoop||IVIncInsertPos) && "Uninitialized insert position"); 11745ffd83dbSDimitry Andric 11755ffd83dbSDimitry Andric // Reuse a previously-inserted PHI, if present. 11765ffd83dbSDimitry Andric BasicBlock *LatchBlock = L->getLoopLatch(); 11775ffd83dbSDimitry Andric if (LatchBlock) { 11785ffd83dbSDimitry Andric PHINode *AddRecPhiMatch = nullptr; 11795ffd83dbSDimitry Andric Instruction *IncV = nullptr; 11805ffd83dbSDimitry Andric TruncTy = nullptr; 11815ffd83dbSDimitry Andric InvertStep = false; 11825ffd83dbSDimitry Andric 11835ffd83dbSDimitry Andric // Only try partially matching scevs that need truncation and/or 11845ffd83dbSDimitry Andric // step-inversion if we know this loop is outside the current loop. 11855ffd83dbSDimitry Andric bool TryNonMatchingSCEV = 11865ffd83dbSDimitry Andric IVIncInsertLoop && 11875ffd83dbSDimitry Andric SE.DT.properlyDominates(LatchBlock, IVIncInsertLoop->getHeader()); 11885ffd83dbSDimitry Andric 11895ffd83dbSDimitry Andric for (PHINode &PN : L->getHeader()->phis()) { 11905ffd83dbSDimitry Andric if (!SE.isSCEVable(PN.getType())) 11915ffd83dbSDimitry Andric continue; 11925ffd83dbSDimitry Andric 1193e8d8bef9SDimitry Andric // We should not look for a incomplete PHI. Getting SCEV for a incomplete 1194e8d8bef9SDimitry Andric // PHI has no meaning at all. 1195e8d8bef9SDimitry Andric if (!PN.isComplete()) { 1196fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 1197e8d8bef9SDimitry Andric DebugType, dbgs() << "One incomplete PHI is found: " << PN << "\n"); 1198e8d8bef9SDimitry Andric continue; 1199e8d8bef9SDimitry Andric } 1200e8d8bef9SDimitry Andric 12015ffd83dbSDimitry Andric const SCEVAddRecExpr *PhiSCEV = dyn_cast<SCEVAddRecExpr>(SE.getSCEV(&PN)); 12025ffd83dbSDimitry Andric if (!PhiSCEV) 12035ffd83dbSDimitry Andric continue; 12045ffd83dbSDimitry Andric 12055ffd83dbSDimitry Andric bool IsMatchingSCEV = PhiSCEV == Normalized; 12065ffd83dbSDimitry Andric // We only handle truncation and inversion of phi recurrences for the 12075ffd83dbSDimitry Andric // expanded expression if the expanded expression's loop dominates the 12085ffd83dbSDimitry Andric // loop we insert to. Check now, so we can bail out early. 12095ffd83dbSDimitry Andric if (!IsMatchingSCEV && !TryNonMatchingSCEV) 12105ffd83dbSDimitry Andric continue; 12115ffd83dbSDimitry Andric 12125ffd83dbSDimitry Andric // TODO: this possibly can be reworked to avoid this cast at all. 12135ffd83dbSDimitry Andric Instruction *TempIncV = 12145ffd83dbSDimitry Andric dyn_cast<Instruction>(PN.getIncomingValueForBlock(LatchBlock)); 12155ffd83dbSDimitry Andric if (!TempIncV) 12165ffd83dbSDimitry Andric continue; 12175ffd83dbSDimitry Andric 12185ffd83dbSDimitry Andric // Check whether we can reuse this PHI node. 12195ffd83dbSDimitry Andric if (LSRMode) { 12205ffd83dbSDimitry Andric if (!isExpandedAddRecExprPHI(&PN, TempIncV, L)) 12215ffd83dbSDimitry Andric continue; 12225ffd83dbSDimitry Andric } else { 12235ffd83dbSDimitry Andric if (!isNormalAddRecExprPHI(&PN, TempIncV, L)) 12245ffd83dbSDimitry Andric continue; 12255ffd83dbSDimitry Andric } 12265ffd83dbSDimitry Andric 12275ffd83dbSDimitry Andric // Stop if we have found an exact match SCEV. 12285ffd83dbSDimitry Andric if (IsMatchingSCEV) { 12295ffd83dbSDimitry Andric IncV = TempIncV; 12305ffd83dbSDimitry Andric TruncTy = nullptr; 12315ffd83dbSDimitry Andric InvertStep = false; 12325ffd83dbSDimitry Andric AddRecPhiMatch = &PN; 12335ffd83dbSDimitry Andric break; 12345ffd83dbSDimitry Andric } 12355ffd83dbSDimitry Andric 12365ffd83dbSDimitry Andric // Try whether the phi can be translated into the requested form 12375ffd83dbSDimitry Andric // (truncated and/or offset by a constant). 12385ffd83dbSDimitry Andric if ((!TruncTy || InvertStep) && 12395ffd83dbSDimitry Andric canBeCheaplyTransformed(SE, PhiSCEV, Normalized, InvertStep)) { 12405ffd83dbSDimitry Andric // Record the phi node. But don't stop we might find an exact match 12415ffd83dbSDimitry Andric // later. 12425ffd83dbSDimitry Andric AddRecPhiMatch = &PN; 12435ffd83dbSDimitry Andric IncV = TempIncV; 12445ffd83dbSDimitry Andric TruncTy = SE.getEffectiveSCEVType(Normalized->getType()); 12455ffd83dbSDimitry Andric } 12465ffd83dbSDimitry Andric } 12475ffd83dbSDimitry Andric 12485ffd83dbSDimitry Andric if (AddRecPhiMatch) { 12495ffd83dbSDimitry Andric // Ok, the add recurrence looks usable. 12505ffd83dbSDimitry Andric // Remember this PHI, even in post-inc mode. 12515ffd83dbSDimitry Andric InsertedValues.insert(AddRecPhiMatch); 12525ffd83dbSDimitry Andric // Remember the increment. 12535ffd83dbSDimitry Andric rememberInstruction(IncV); 1254e8d8bef9SDimitry Andric // Those values were not actually inserted but re-used. 1255e8d8bef9SDimitry Andric ReusedValues.insert(AddRecPhiMatch); 1256e8d8bef9SDimitry Andric ReusedValues.insert(IncV); 12575ffd83dbSDimitry Andric return AddRecPhiMatch; 12585ffd83dbSDimitry Andric } 12595ffd83dbSDimitry Andric } 12605ffd83dbSDimitry Andric 12615ffd83dbSDimitry Andric // Save the original insertion point so we can restore it when we're done. 12625ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 12635ffd83dbSDimitry Andric 12645ffd83dbSDimitry Andric // Another AddRec may need to be recursively expanded below. For example, if 12655ffd83dbSDimitry Andric // this AddRec is quadratic, the StepV may itself be an AddRec in this 12665ffd83dbSDimitry Andric // loop. Remove this loop from the PostIncLoops set before expanding such 12675ffd83dbSDimitry Andric // AddRecs. Otherwise, we cannot find a valid position for the step 12685ffd83dbSDimitry Andric // (i.e. StepV can never dominate its loop header). Ideally, we could do 12695ffd83dbSDimitry Andric // SavedIncLoops.swap(PostIncLoops), but we generally have a single element, 12705ffd83dbSDimitry Andric // so it's not worth implementing SmallPtrSet::swap. 12715ffd83dbSDimitry Andric PostIncLoopSet SavedPostIncLoops = PostIncLoops; 12725ffd83dbSDimitry Andric PostIncLoops.clear(); 12735ffd83dbSDimitry Andric 12745ffd83dbSDimitry Andric // Expand code for the start value into the loop preheader. 12755ffd83dbSDimitry Andric assert(L->getLoopPreheader() && 12765ffd83dbSDimitry Andric "Can't expand add recurrences without a loop preheader!"); 1277e8d8bef9SDimitry Andric Value *StartV = 1278e8d8bef9SDimitry Andric expandCodeForImpl(Normalized->getStart(), ExpandTy, 1279e8d8bef9SDimitry Andric L->getLoopPreheader()->getTerminator(), false); 12805ffd83dbSDimitry Andric 12815ffd83dbSDimitry Andric // StartV must have been be inserted into L's preheader to dominate the new 12825ffd83dbSDimitry Andric // phi. 12835ffd83dbSDimitry Andric assert(!isa<Instruction>(StartV) || 12845ffd83dbSDimitry Andric SE.DT.properlyDominates(cast<Instruction>(StartV)->getParent(), 12855ffd83dbSDimitry Andric L->getHeader())); 12865ffd83dbSDimitry Andric 12875ffd83dbSDimitry Andric // Expand code for the step value. Do this before creating the PHI so that PHI 12885ffd83dbSDimitry Andric // reuse code doesn't see an incomplete PHI. 12895ffd83dbSDimitry Andric const SCEV *Step = Normalized->getStepRecurrence(SE); 12905ffd83dbSDimitry Andric // If the stride is negative, insert a sub instead of an add for the increment 12915ffd83dbSDimitry Andric // (unless it's a constant, because subtracts of constants are canonicalized 12925ffd83dbSDimitry Andric // to adds). 12935ffd83dbSDimitry Andric bool useSubtract = !ExpandTy->isPointerTy() && Step->isNonConstantNegative(); 12945ffd83dbSDimitry Andric if (useSubtract) 12955ffd83dbSDimitry Andric Step = SE.getNegativeSCEV(Step); 12965ffd83dbSDimitry Andric // Expand the step somewhere that dominates the loop header. 1297e8d8bef9SDimitry Andric Value *StepV = expandCodeForImpl( 1298e8d8bef9SDimitry Andric Step, IntTy, &*L->getHeader()->getFirstInsertionPt(), false); 12995ffd83dbSDimitry Andric 13005ffd83dbSDimitry Andric // The no-wrap behavior proved by IsIncrement(NUW|NSW) is only applicable if 13015ffd83dbSDimitry Andric // we actually do emit an addition. It does not apply if we emit a 13025ffd83dbSDimitry Andric // subtraction. 13035ffd83dbSDimitry Andric bool IncrementIsNUW = !useSubtract && IsIncrementNUW(SE, Normalized); 13045ffd83dbSDimitry Andric bool IncrementIsNSW = !useSubtract && IsIncrementNSW(SE, Normalized); 13055ffd83dbSDimitry Andric 13065ffd83dbSDimitry Andric // Create the PHI. 13075ffd83dbSDimitry Andric BasicBlock *Header = L->getHeader(); 13085ffd83dbSDimitry Andric Builder.SetInsertPoint(Header, Header->begin()); 13095ffd83dbSDimitry Andric pred_iterator HPB = pred_begin(Header), HPE = pred_end(Header); 13105ffd83dbSDimitry Andric PHINode *PN = Builder.CreatePHI(ExpandTy, std::distance(HPB, HPE), 13115ffd83dbSDimitry Andric Twine(IVName) + ".iv"); 13125ffd83dbSDimitry Andric 13135ffd83dbSDimitry Andric // Create the step instructions and populate the PHI. 13145ffd83dbSDimitry Andric for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) { 13155ffd83dbSDimitry Andric BasicBlock *Pred = *HPI; 13165ffd83dbSDimitry Andric 13175ffd83dbSDimitry Andric // Add a start value. 13185ffd83dbSDimitry Andric if (!L->contains(Pred)) { 13195ffd83dbSDimitry Andric PN->addIncoming(StartV, Pred); 13205ffd83dbSDimitry Andric continue; 13215ffd83dbSDimitry Andric } 13225ffd83dbSDimitry Andric 13235ffd83dbSDimitry Andric // Create a step value and add it to the PHI. 13245ffd83dbSDimitry Andric // If IVIncInsertLoop is non-null and equal to the addrec's loop, insert the 13255ffd83dbSDimitry Andric // instructions at IVIncInsertPos. 13265ffd83dbSDimitry Andric Instruction *InsertPos = L == IVIncInsertLoop ? 13275ffd83dbSDimitry Andric IVIncInsertPos : Pred->getTerminator(); 13285ffd83dbSDimitry Andric Builder.SetInsertPoint(InsertPos); 13295ffd83dbSDimitry Andric Value *IncV = expandIVInc(PN, StepV, L, ExpandTy, IntTy, useSubtract); 13305ffd83dbSDimitry Andric 13315ffd83dbSDimitry Andric if (isa<OverflowingBinaryOperator>(IncV)) { 13325ffd83dbSDimitry Andric if (IncrementIsNUW) 13335ffd83dbSDimitry Andric cast<BinaryOperator>(IncV)->setHasNoUnsignedWrap(); 13345ffd83dbSDimitry Andric if (IncrementIsNSW) 13355ffd83dbSDimitry Andric cast<BinaryOperator>(IncV)->setHasNoSignedWrap(); 13365ffd83dbSDimitry Andric } 13375ffd83dbSDimitry Andric PN->addIncoming(IncV, Pred); 13385ffd83dbSDimitry Andric } 13395ffd83dbSDimitry Andric 13405ffd83dbSDimitry Andric // After expanding subexpressions, restore the PostIncLoops set so the caller 13415ffd83dbSDimitry Andric // can ensure that IVIncrement dominates the current uses. 13425ffd83dbSDimitry Andric PostIncLoops = SavedPostIncLoops; 13435ffd83dbSDimitry Andric 1344fe6060f1SDimitry Andric // Remember this PHI, even in post-inc mode. LSR SCEV-based salvaging is most 1345fe6060f1SDimitry Andric // effective when we are able to use an IV inserted here, so record it. 13465ffd83dbSDimitry Andric InsertedValues.insert(PN); 1347fe6060f1SDimitry Andric InsertedIVs.push_back(PN); 13485ffd83dbSDimitry Andric return PN; 13495ffd83dbSDimitry Andric } 13505ffd83dbSDimitry Andric 13515ffd83dbSDimitry Andric Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) { 13525ffd83dbSDimitry Andric Type *STy = S->getType(); 13535ffd83dbSDimitry Andric Type *IntTy = SE.getEffectiveSCEVType(STy); 13545ffd83dbSDimitry Andric const Loop *L = S->getLoop(); 13555ffd83dbSDimitry Andric 13565ffd83dbSDimitry Andric // Determine a normalized form of this expression, which is the expression 13575ffd83dbSDimitry Andric // before any post-inc adjustment is made. 13585ffd83dbSDimitry Andric const SCEVAddRecExpr *Normalized = S; 13595ffd83dbSDimitry Andric if (PostIncLoops.count(L)) { 13605ffd83dbSDimitry Andric PostIncLoopSet Loops; 13615ffd83dbSDimitry Andric Loops.insert(L); 13625ffd83dbSDimitry Andric Normalized = cast<SCEVAddRecExpr>(normalizeForPostIncUse(S, Loops, SE)); 13635ffd83dbSDimitry Andric } 13645ffd83dbSDimitry Andric 13655ffd83dbSDimitry Andric // Strip off any non-loop-dominating component from the addrec start. 13665ffd83dbSDimitry Andric const SCEV *Start = Normalized->getStart(); 13675ffd83dbSDimitry Andric const SCEV *PostLoopOffset = nullptr; 13685ffd83dbSDimitry Andric if (!SE.properlyDominates(Start, L->getHeader())) { 13695ffd83dbSDimitry Andric PostLoopOffset = Start; 13705ffd83dbSDimitry Andric Start = SE.getConstant(Normalized->getType(), 0); 13715ffd83dbSDimitry Andric Normalized = cast<SCEVAddRecExpr>( 13725ffd83dbSDimitry Andric SE.getAddRecExpr(Start, Normalized->getStepRecurrence(SE), 13735ffd83dbSDimitry Andric Normalized->getLoop(), 13745ffd83dbSDimitry Andric Normalized->getNoWrapFlags(SCEV::FlagNW))); 13755ffd83dbSDimitry Andric } 13765ffd83dbSDimitry Andric 13775ffd83dbSDimitry Andric // Strip off any non-loop-dominating component from the addrec step. 13785ffd83dbSDimitry Andric const SCEV *Step = Normalized->getStepRecurrence(SE); 13795ffd83dbSDimitry Andric const SCEV *PostLoopScale = nullptr; 13805ffd83dbSDimitry Andric if (!SE.dominates(Step, L->getHeader())) { 13815ffd83dbSDimitry Andric PostLoopScale = Step; 13825ffd83dbSDimitry Andric Step = SE.getConstant(Normalized->getType(), 1); 13835ffd83dbSDimitry Andric if (!Start->isZero()) { 13845ffd83dbSDimitry Andric // The normalization below assumes that Start is constant zero, so if 13855ffd83dbSDimitry Andric // it isn't re-associate Start to PostLoopOffset. 13865ffd83dbSDimitry Andric assert(!PostLoopOffset && "Start not-null but PostLoopOffset set?"); 13875ffd83dbSDimitry Andric PostLoopOffset = Start; 13885ffd83dbSDimitry Andric Start = SE.getConstant(Normalized->getType(), 0); 13895ffd83dbSDimitry Andric } 13905ffd83dbSDimitry Andric Normalized = 13915ffd83dbSDimitry Andric cast<SCEVAddRecExpr>(SE.getAddRecExpr( 13925ffd83dbSDimitry Andric Start, Step, Normalized->getLoop(), 13935ffd83dbSDimitry Andric Normalized->getNoWrapFlags(SCEV::FlagNW))); 13945ffd83dbSDimitry Andric } 13955ffd83dbSDimitry Andric 13965ffd83dbSDimitry Andric // Expand the core addrec. If we need post-loop scaling, force it to 13975ffd83dbSDimitry Andric // expand to an integer type to avoid the need for additional casting. 13985ffd83dbSDimitry Andric Type *ExpandTy = PostLoopScale ? IntTy : STy; 13995ffd83dbSDimitry Andric // We can't use a pointer type for the addrec if the pointer type is 14005ffd83dbSDimitry Andric // non-integral. 14015ffd83dbSDimitry Andric Type *AddRecPHIExpandTy = 14025ffd83dbSDimitry Andric DL.isNonIntegralPointerType(STy) ? Normalized->getType() : ExpandTy; 14035ffd83dbSDimitry Andric 14045ffd83dbSDimitry Andric // In some cases, we decide to reuse an existing phi node but need to truncate 14055ffd83dbSDimitry Andric // it and/or invert the step. 14065ffd83dbSDimitry Andric Type *TruncTy = nullptr; 14075ffd83dbSDimitry Andric bool InvertStep = false; 14085ffd83dbSDimitry Andric PHINode *PN = getAddRecExprPHILiterally(Normalized, L, AddRecPHIExpandTy, 14095ffd83dbSDimitry Andric IntTy, TruncTy, InvertStep); 14105ffd83dbSDimitry Andric 14115ffd83dbSDimitry Andric // Accommodate post-inc mode, if necessary. 14125ffd83dbSDimitry Andric Value *Result; 14135ffd83dbSDimitry Andric if (!PostIncLoops.count(L)) 14145ffd83dbSDimitry Andric Result = PN; 14155ffd83dbSDimitry Andric else { 14165ffd83dbSDimitry Andric // In PostInc mode, use the post-incremented value. 14175ffd83dbSDimitry Andric BasicBlock *LatchBlock = L->getLoopLatch(); 14185ffd83dbSDimitry Andric assert(LatchBlock && "PostInc mode requires a unique loop latch!"); 14195ffd83dbSDimitry Andric Result = PN->getIncomingValueForBlock(LatchBlock); 14205ffd83dbSDimitry Andric 1421e8d8bef9SDimitry Andric // We might be introducing a new use of the post-inc IV that is not poison 1422e8d8bef9SDimitry Andric // safe, in which case we should drop poison generating flags. Only keep 1423e8d8bef9SDimitry Andric // those flags for which SCEV has proven that they always hold. 1424e8d8bef9SDimitry Andric if (isa<OverflowingBinaryOperator>(Result)) { 1425e8d8bef9SDimitry Andric auto *I = cast<Instruction>(Result); 1426e8d8bef9SDimitry Andric if (!S->hasNoUnsignedWrap()) 1427e8d8bef9SDimitry Andric I->setHasNoUnsignedWrap(false); 1428e8d8bef9SDimitry Andric if (!S->hasNoSignedWrap()) 1429e8d8bef9SDimitry Andric I->setHasNoSignedWrap(false); 1430e8d8bef9SDimitry Andric } 1431e8d8bef9SDimitry Andric 14325ffd83dbSDimitry Andric // For an expansion to use the postinc form, the client must call 14335ffd83dbSDimitry Andric // expandCodeFor with an InsertPoint that is either outside the PostIncLoop 14345ffd83dbSDimitry Andric // or dominated by IVIncInsertPos. 14355ffd83dbSDimitry Andric if (isa<Instruction>(Result) && 14365ffd83dbSDimitry Andric !SE.DT.dominates(cast<Instruction>(Result), 14375ffd83dbSDimitry Andric &*Builder.GetInsertPoint())) { 14385ffd83dbSDimitry Andric // The induction variable's postinc expansion does not dominate this use. 14395ffd83dbSDimitry Andric // IVUsers tries to prevent this case, so it is rare. However, it can 14405ffd83dbSDimitry Andric // happen when an IVUser outside the loop is not dominated by the latch 14415ffd83dbSDimitry Andric // block. Adjusting IVIncInsertPos before expansion begins cannot handle 14425ffd83dbSDimitry Andric // all cases. Consider a phi outside whose operand is replaced during 14435ffd83dbSDimitry Andric // expansion with the value of the postinc user. Without fundamentally 14445ffd83dbSDimitry Andric // changing the way postinc users are tracked, the only remedy is 14455ffd83dbSDimitry Andric // inserting an extra IV increment. StepV might fold into PostLoopOffset, 14465ffd83dbSDimitry Andric // but hopefully expandCodeFor handles that. 14475ffd83dbSDimitry Andric bool useSubtract = 14485ffd83dbSDimitry Andric !ExpandTy->isPointerTy() && Step->isNonConstantNegative(); 14495ffd83dbSDimitry Andric if (useSubtract) 14505ffd83dbSDimitry Andric Step = SE.getNegativeSCEV(Step); 14515ffd83dbSDimitry Andric Value *StepV; 14525ffd83dbSDimitry Andric { 14535ffd83dbSDimitry Andric // Expand the step somewhere that dominates the loop header. 14545ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 1455e8d8bef9SDimitry Andric StepV = expandCodeForImpl( 1456e8d8bef9SDimitry Andric Step, IntTy, &*L->getHeader()->getFirstInsertionPt(), false); 14575ffd83dbSDimitry Andric } 14585ffd83dbSDimitry Andric Result = expandIVInc(PN, StepV, L, ExpandTy, IntTy, useSubtract); 14595ffd83dbSDimitry Andric } 14605ffd83dbSDimitry Andric } 14615ffd83dbSDimitry Andric 14625ffd83dbSDimitry Andric // We have decided to reuse an induction variable of a dominating loop. Apply 14635ffd83dbSDimitry Andric // truncation and/or inversion of the step. 14645ffd83dbSDimitry Andric if (TruncTy) { 14655ffd83dbSDimitry Andric Type *ResTy = Result->getType(); 14665ffd83dbSDimitry Andric // Normalize the result type. 14675ffd83dbSDimitry Andric if (ResTy != SE.getEffectiveSCEVType(ResTy)) 14685ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, SE.getEffectiveSCEVType(ResTy)); 14695ffd83dbSDimitry Andric // Truncate the result. 1470e8d8bef9SDimitry Andric if (TruncTy != Result->getType()) 14715ffd83dbSDimitry Andric Result = Builder.CreateTrunc(Result, TruncTy); 1472e8d8bef9SDimitry Andric 14735ffd83dbSDimitry Andric // Invert the result. 1474e8d8bef9SDimitry Andric if (InvertStep) 1475e8d8bef9SDimitry Andric Result = Builder.CreateSub( 1476e8d8bef9SDimitry Andric expandCodeForImpl(Normalized->getStart(), TruncTy, false), Result); 14775ffd83dbSDimitry Andric } 14785ffd83dbSDimitry Andric 14795ffd83dbSDimitry Andric // Re-apply any non-loop-dominating scale. 14805ffd83dbSDimitry Andric if (PostLoopScale) { 14815ffd83dbSDimitry Andric assert(S->isAffine() && "Can't linearly scale non-affine recurrences."); 14825ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, IntTy); 14835ffd83dbSDimitry Andric Result = Builder.CreateMul(Result, 1484e8d8bef9SDimitry Andric expandCodeForImpl(PostLoopScale, IntTy, false)); 14855ffd83dbSDimitry Andric } 14865ffd83dbSDimitry Andric 14875ffd83dbSDimitry Andric // Re-apply any non-loop-dominating offset. 14885ffd83dbSDimitry Andric if (PostLoopOffset) { 14895ffd83dbSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(ExpandTy)) { 14905ffd83dbSDimitry Andric if (Result->getType()->isIntegerTy()) { 1491e8d8bef9SDimitry Andric Value *Base = expandCodeForImpl(PostLoopOffset, ExpandTy, false); 14925ffd83dbSDimitry Andric Result = expandAddToGEP(SE.getUnknown(Result), PTy, IntTy, Base); 14935ffd83dbSDimitry Andric } else { 14945ffd83dbSDimitry Andric Result = expandAddToGEP(PostLoopOffset, PTy, IntTy, Result); 14955ffd83dbSDimitry Andric } 14965ffd83dbSDimitry Andric } else { 14975ffd83dbSDimitry Andric Result = InsertNoopCastOfTo(Result, IntTy); 1498e8d8bef9SDimitry Andric Result = Builder.CreateAdd( 1499e8d8bef9SDimitry Andric Result, expandCodeForImpl(PostLoopOffset, IntTy, false)); 15005ffd83dbSDimitry Andric } 15015ffd83dbSDimitry Andric } 15025ffd83dbSDimitry Andric 15035ffd83dbSDimitry Andric return Result; 15045ffd83dbSDimitry Andric } 15055ffd83dbSDimitry Andric 15065ffd83dbSDimitry Andric Value *SCEVExpander::visitAddRecExpr(const SCEVAddRecExpr *S) { 15075ffd83dbSDimitry Andric // In canonical mode we compute the addrec as an expression of a canonical IV 15085ffd83dbSDimitry Andric // using evaluateAtIteration and expand the resulting SCEV expression. This 15095ffd83dbSDimitry Andric // way we avoid introducing new IVs to carry on the comutation of the addrec 15105ffd83dbSDimitry Andric // throughout the loop. 15115ffd83dbSDimitry Andric // 15125ffd83dbSDimitry Andric // For nested addrecs evaluateAtIteration might need a canonical IV of a 15135ffd83dbSDimitry Andric // type wider than the addrec itself. Emitting a canonical IV of the 15145ffd83dbSDimitry Andric // proper type might produce non-legal types, for example expanding an i64 15155ffd83dbSDimitry Andric // {0,+,2,+,1} addrec would need an i65 canonical IV. To avoid this just fall 15165ffd83dbSDimitry Andric // back to non-canonical mode for nested addrecs. 15175ffd83dbSDimitry Andric if (!CanonicalMode || (S->getNumOperands() > 2)) 15185ffd83dbSDimitry Andric return expandAddRecExprLiterally(S); 15195ffd83dbSDimitry Andric 15205ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 15215ffd83dbSDimitry Andric const Loop *L = S->getLoop(); 15225ffd83dbSDimitry Andric 15235ffd83dbSDimitry Andric // First check for an existing canonical IV in a suitable type. 15245ffd83dbSDimitry Andric PHINode *CanonicalIV = nullptr; 15255ffd83dbSDimitry Andric if (PHINode *PN = L->getCanonicalInductionVariable()) 15265ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(PN->getType()) >= SE.getTypeSizeInBits(Ty)) 15275ffd83dbSDimitry Andric CanonicalIV = PN; 15285ffd83dbSDimitry Andric 15295ffd83dbSDimitry Andric // Rewrite an AddRec in terms of the canonical induction variable, if 15305ffd83dbSDimitry Andric // its type is more narrow. 15315ffd83dbSDimitry Andric if (CanonicalIV && 1532fe6060f1SDimitry Andric SE.getTypeSizeInBits(CanonicalIV->getType()) > SE.getTypeSizeInBits(Ty) && 1533fe6060f1SDimitry Andric !S->getType()->isPointerTy()) { 15345ffd83dbSDimitry Andric SmallVector<const SCEV *, 4> NewOps(S->getNumOperands()); 15355ffd83dbSDimitry Andric for (unsigned i = 0, e = S->getNumOperands(); i != e; ++i) 15365ffd83dbSDimitry Andric NewOps[i] = SE.getAnyExtendExpr(S->op_begin()[i], CanonicalIV->getType()); 15375ffd83dbSDimitry Andric Value *V = expand(SE.getAddRecExpr(NewOps, S->getLoop(), 15385ffd83dbSDimitry Andric S->getNoWrapFlags(SCEV::FlagNW))); 15395ffd83dbSDimitry Andric BasicBlock::iterator NewInsertPt = 1540e8d8bef9SDimitry Andric findInsertPointAfter(cast<Instruction>(V), &*Builder.GetInsertPoint()); 1541e8d8bef9SDimitry Andric V = expandCodeForImpl(SE.getTruncateExpr(SE.getUnknown(V), Ty), nullptr, 1542e8d8bef9SDimitry Andric &*NewInsertPt, false); 15435ffd83dbSDimitry Andric return V; 15445ffd83dbSDimitry Andric } 15455ffd83dbSDimitry Andric 15465ffd83dbSDimitry Andric // {X,+,F} --> X + {0,+,F} 15475ffd83dbSDimitry Andric if (!S->getStart()->isZero()) { 1548349cc55cSDimitry Andric if (PointerType *PTy = dyn_cast<PointerType>(S->getType())) { 1549349cc55cSDimitry Andric Value *StartV = expand(SE.getPointerBase(S)); 1550349cc55cSDimitry Andric assert(StartV->getType() == PTy && "Pointer type mismatch for GEP!"); 1551349cc55cSDimitry Andric return expandAddToGEP(SE.removePointerBase(S), PTy, Ty, StartV); 1552349cc55cSDimitry Andric } 1553349cc55cSDimitry Andric 1554e8d8bef9SDimitry Andric SmallVector<const SCEV *, 4> NewOps(S->operands()); 15555ffd83dbSDimitry Andric NewOps[0] = SE.getConstant(Ty, 0); 15565ffd83dbSDimitry Andric const SCEV *Rest = SE.getAddRecExpr(NewOps, L, 15575ffd83dbSDimitry Andric S->getNoWrapFlags(SCEV::FlagNW)); 15585ffd83dbSDimitry Andric 15595ffd83dbSDimitry Andric // Just do a normal add. Pre-expand the operands to suppress folding. 15605ffd83dbSDimitry Andric // 15615ffd83dbSDimitry Andric // The LHS and RHS values are factored out of the expand call to make the 15625ffd83dbSDimitry Andric // output independent of the argument evaluation order. 15635ffd83dbSDimitry Andric const SCEV *AddExprLHS = SE.getUnknown(expand(S->getStart())); 15645ffd83dbSDimitry Andric const SCEV *AddExprRHS = SE.getUnknown(expand(Rest)); 15655ffd83dbSDimitry Andric return expand(SE.getAddExpr(AddExprLHS, AddExprRHS)); 15665ffd83dbSDimitry Andric } 15675ffd83dbSDimitry Andric 15685ffd83dbSDimitry Andric // If we don't yet have a canonical IV, create one. 15695ffd83dbSDimitry Andric if (!CanonicalIV) { 15705ffd83dbSDimitry Andric // Create and insert the PHI node for the induction variable in the 15715ffd83dbSDimitry Andric // specified loop. 15725ffd83dbSDimitry Andric BasicBlock *Header = L->getHeader(); 15735ffd83dbSDimitry Andric pred_iterator HPB = pred_begin(Header), HPE = pred_end(Header); 15745ffd83dbSDimitry Andric CanonicalIV = PHINode::Create(Ty, std::distance(HPB, HPE), "indvar", 15755ffd83dbSDimitry Andric &Header->front()); 15765ffd83dbSDimitry Andric rememberInstruction(CanonicalIV); 15775ffd83dbSDimitry Andric 15785ffd83dbSDimitry Andric SmallSet<BasicBlock *, 4> PredSeen; 15795ffd83dbSDimitry Andric Constant *One = ConstantInt::get(Ty, 1); 15805ffd83dbSDimitry Andric for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) { 15815ffd83dbSDimitry Andric BasicBlock *HP = *HPI; 15825ffd83dbSDimitry Andric if (!PredSeen.insert(HP).second) { 15835ffd83dbSDimitry Andric // There must be an incoming value for each predecessor, even the 15845ffd83dbSDimitry Andric // duplicates! 15855ffd83dbSDimitry Andric CanonicalIV->addIncoming(CanonicalIV->getIncomingValueForBlock(HP), HP); 15865ffd83dbSDimitry Andric continue; 15875ffd83dbSDimitry Andric } 15885ffd83dbSDimitry Andric 15895ffd83dbSDimitry Andric if (L->contains(HP)) { 15905ffd83dbSDimitry Andric // Insert a unit add instruction right before the terminator 15915ffd83dbSDimitry Andric // corresponding to the back-edge. 15925ffd83dbSDimitry Andric Instruction *Add = BinaryOperator::CreateAdd(CanonicalIV, One, 15935ffd83dbSDimitry Andric "indvar.next", 15945ffd83dbSDimitry Andric HP->getTerminator()); 15955ffd83dbSDimitry Andric Add->setDebugLoc(HP->getTerminator()->getDebugLoc()); 15965ffd83dbSDimitry Andric rememberInstruction(Add); 15975ffd83dbSDimitry Andric CanonicalIV->addIncoming(Add, HP); 15985ffd83dbSDimitry Andric } else { 15995ffd83dbSDimitry Andric CanonicalIV->addIncoming(Constant::getNullValue(Ty), HP); 16005ffd83dbSDimitry Andric } 16015ffd83dbSDimitry Andric } 16025ffd83dbSDimitry Andric } 16035ffd83dbSDimitry Andric 16045ffd83dbSDimitry Andric // {0,+,1} --> Insert a canonical induction variable into the loop! 16055ffd83dbSDimitry Andric if (S->isAffine() && S->getOperand(1)->isOne()) { 16065ffd83dbSDimitry Andric assert(Ty == SE.getEffectiveSCEVType(CanonicalIV->getType()) && 16075ffd83dbSDimitry Andric "IVs with types different from the canonical IV should " 16085ffd83dbSDimitry Andric "already have been handled!"); 16095ffd83dbSDimitry Andric return CanonicalIV; 16105ffd83dbSDimitry Andric } 16115ffd83dbSDimitry Andric 16125ffd83dbSDimitry Andric // {0,+,F} --> {0,+,1} * F 16135ffd83dbSDimitry Andric 16145ffd83dbSDimitry Andric // If this is a simple linear addrec, emit it now as a special case. 16155ffd83dbSDimitry Andric if (S->isAffine()) // {0,+,F} --> i*F 16165ffd83dbSDimitry Andric return 16175ffd83dbSDimitry Andric expand(SE.getTruncateOrNoop( 16185ffd83dbSDimitry Andric SE.getMulExpr(SE.getUnknown(CanonicalIV), 16195ffd83dbSDimitry Andric SE.getNoopOrAnyExtend(S->getOperand(1), 16205ffd83dbSDimitry Andric CanonicalIV->getType())), 16215ffd83dbSDimitry Andric Ty)); 16225ffd83dbSDimitry Andric 16235ffd83dbSDimitry Andric // If this is a chain of recurrences, turn it into a closed form, using the 16245ffd83dbSDimitry Andric // folders, then expandCodeFor the closed form. This allows the folders to 16255ffd83dbSDimitry Andric // simplify the expression without having to build a bunch of special code 16265ffd83dbSDimitry Andric // into this folder. 16275ffd83dbSDimitry Andric const SCEV *IH = SE.getUnknown(CanonicalIV); // Get I as a "symbolic" SCEV. 16285ffd83dbSDimitry Andric 16295ffd83dbSDimitry Andric // Promote S up to the canonical IV type, if the cast is foldable. 16305ffd83dbSDimitry Andric const SCEV *NewS = S; 16315ffd83dbSDimitry Andric const SCEV *Ext = SE.getNoopOrAnyExtend(S, CanonicalIV->getType()); 16325ffd83dbSDimitry Andric if (isa<SCEVAddRecExpr>(Ext)) 16335ffd83dbSDimitry Andric NewS = Ext; 16345ffd83dbSDimitry Andric 16355ffd83dbSDimitry Andric const SCEV *V = cast<SCEVAddRecExpr>(NewS)->evaluateAtIteration(IH, SE); 16365ffd83dbSDimitry Andric //cerr << "Evaluated: " << *this << "\n to: " << *V << "\n"; 16375ffd83dbSDimitry Andric 16385ffd83dbSDimitry Andric // Truncate the result down to the original type, if needed. 16395ffd83dbSDimitry Andric const SCEV *T = SE.getTruncateOrNoop(V, Ty); 16405ffd83dbSDimitry Andric return expand(T); 16415ffd83dbSDimitry Andric } 16425ffd83dbSDimitry Andric 1643e8d8bef9SDimitry Andric Value *SCEVExpander::visitPtrToIntExpr(const SCEVPtrToIntExpr *S) { 1644e8d8bef9SDimitry Andric Value *V = 1645e8d8bef9SDimitry Andric expandCodeForImpl(S->getOperand(), S->getOperand()->getType(), false); 1646fe6060f1SDimitry Andric return ReuseOrCreateCast(V, S->getType(), CastInst::PtrToInt, 1647fe6060f1SDimitry Andric GetOptimalInsertionPointForCastOf(V)); 1648e8d8bef9SDimitry Andric } 1649e8d8bef9SDimitry Andric 16505ffd83dbSDimitry Andric Value *SCEVExpander::visitTruncateExpr(const SCEVTruncateExpr *S) { 16515ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1652e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1653e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1654e8d8bef9SDimitry Andric false); 1655e8d8bef9SDimitry Andric return Builder.CreateTrunc(V, Ty); 16565ffd83dbSDimitry Andric } 16575ffd83dbSDimitry Andric 16585ffd83dbSDimitry Andric Value *SCEVExpander::visitZeroExtendExpr(const SCEVZeroExtendExpr *S) { 16595ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1660e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1661e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1662e8d8bef9SDimitry Andric false); 1663e8d8bef9SDimitry Andric return Builder.CreateZExt(V, Ty); 16645ffd83dbSDimitry Andric } 16655ffd83dbSDimitry Andric 16665ffd83dbSDimitry Andric Value *SCEVExpander::visitSignExtendExpr(const SCEVSignExtendExpr *S) { 16675ffd83dbSDimitry Andric Type *Ty = SE.getEffectiveSCEVType(S->getType()); 1668e8d8bef9SDimitry Andric Value *V = expandCodeForImpl( 1669e8d8bef9SDimitry Andric S->getOperand(), SE.getEffectiveSCEVType(S->getOperand()->getType()), 1670e8d8bef9SDimitry Andric false); 1671e8d8bef9SDimitry Andric return Builder.CreateSExt(V, Ty); 16725ffd83dbSDimitry Andric } 16735ffd83dbSDimitry Andric 16745ffd83dbSDimitry Andric Value *SCEVExpander::visitSMaxExpr(const SCEVSMaxExpr *S) { 16755ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands()-1)); 16765ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 16775ffd83dbSDimitry Andric for (int i = S->getNumOperands()-2; i >= 0; --i) { 16785ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 16795ffd83dbSDimitry Andric // rest of the comparisons as integer. 16805ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 16815ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 16825ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 16835ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 16845ffd83dbSDimitry Andric } 1685e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1686fe6060f1SDimitry Andric Value *Sel; 1687fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1688fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::smax, {Ty}, {LHS, RHS}, 1689fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "smax"); 1690fe6060f1SDimitry Andric else { 16915ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpSGT(LHS, RHS); 1692fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "smax"); 1693fe6060f1SDimitry Andric } 16945ffd83dbSDimitry Andric LHS = Sel; 16955ffd83dbSDimitry Andric } 16965ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 16975ffd83dbSDimitry Andric // final result back to the pointer type. 16985ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 16995ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17005ffd83dbSDimitry Andric return LHS; 17015ffd83dbSDimitry Andric } 17025ffd83dbSDimitry Andric 17035ffd83dbSDimitry Andric Value *SCEVExpander::visitUMaxExpr(const SCEVUMaxExpr *S) { 17045ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands()-1)); 17055ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17065ffd83dbSDimitry Andric for (int i = S->getNumOperands()-2; i >= 0; --i) { 17075ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17085ffd83dbSDimitry Andric // rest of the comparisons as integer. 17095ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17105ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17115ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17125ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17135ffd83dbSDimitry Andric } 1714e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1715fe6060f1SDimitry Andric Value *Sel; 1716fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1717fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::umax, {Ty}, {LHS, RHS}, 1718fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "umax"); 1719fe6060f1SDimitry Andric else { 17205ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpUGT(LHS, RHS); 1721fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "umax"); 1722fe6060f1SDimitry Andric } 17235ffd83dbSDimitry Andric LHS = Sel; 17245ffd83dbSDimitry Andric } 17255ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17265ffd83dbSDimitry Andric // final result back to the pointer type. 17275ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17285ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17295ffd83dbSDimitry Andric return LHS; 17305ffd83dbSDimitry Andric } 17315ffd83dbSDimitry Andric 17325ffd83dbSDimitry Andric Value *SCEVExpander::visitSMinExpr(const SCEVSMinExpr *S) { 17335ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands() - 1)); 17345ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17355ffd83dbSDimitry Andric for (int i = S->getNumOperands() - 2; i >= 0; --i) { 17365ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17375ffd83dbSDimitry Andric // rest of the comparisons as integer. 17385ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17395ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17405ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17415ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17425ffd83dbSDimitry Andric } 1743e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1744fe6060f1SDimitry Andric Value *Sel; 1745fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1746fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::smin, {Ty}, {LHS, RHS}, 1747fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "smin"); 1748fe6060f1SDimitry Andric else { 17495ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpSLT(LHS, RHS); 1750fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "smin"); 1751fe6060f1SDimitry Andric } 17525ffd83dbSDimitry Andric LHS = Sel; 17535ffd83dbSDimitry Andric } 17545ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17555ffd83dbSDimitry Andric // final result back to the pointer type. 17565ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17575ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17585ffd83dbSDimitry Andric return LHS; 17595ffd83dbSDimitry Andric } 17605ffd83dbSDimitry Andric 17615ffd83dbSDimitry Andric Value *SCEVExpander::visitUMinExpr(const SCEVUMinExpr *S) { 17625ffd83dbSDimitry Andric Value *LHS = expand(S->getOperand(S->getNumOperands() - 1)); 17635ffd83dbSDimitry Andric Type *Ty = LHS->getType(); 17645ffd83dbSDimitry Andric for (int i = S->getNumOperands() - 2; i >= 0; --i) { 17655ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, do the 17665ffd83dbSDimitry Andric // rest of the comparisons as integer. 17675ffd83dbSDimitry Andric Type *OpTy = S->getOperand(i)->getType(); 17685ffd83dbSDimitry Andric if (OpTy->isIntegerTy() != Ty->isIntegerTy()) { 17695ffd83dbSDimitry Andric Ty = SE.getEffectiveSCEVType(Ty); 17705ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, Ty); 17715ffd83dbSDimitry Andric } 1772e8d8bef9SDimitry Andric Value *RHS = expandCodeForImpl(S->getOperand(i), Ty, false); 1773fe6060f1SDimitry Andric Value *Sel; 1774fe6060f1SDimitry Andric if (Ty->isIntegerTy()) 1775fe6060f1SDimitry Andric Sel = Builder.CreateIntrinsic(Intrinsic::umin, {Ty}, {LHS, RHS}, 1776fe6060f1SDimitry Andric /*FMFSource=*/nullptr, "umin"); 1777fe6060f1SDimitry Andric else { 17785ffd83dbSDimitry Andric Value *ICmp = Builder.CreateICmpULT(LHS, RHS); 1779fe6060f1SDimitry Andric Sel = Builder.CreateSelect(ICmp, LHS, RHS, "umin"); 1780fe6060f1SDimitry Andric } 17815ffd83dbSDimitry Andric LHS = Sel; 17825ffd83dbSDimitry Andric } 17835ffd83dbSDimitry Andric // In the case of mixed integer and pointer types, cast the 17845ffd83dbSDimitry Andric // final result back to the pointer type. 17855ffd83dbSDimitry Andric if (LHS->getType() != S->getType()) 17865ffd83dbSDimitry Andric LHS = InsertNoopCastOfTo(LHS, S->getType()); 17875ffd83dbSDimitry Andric return LHS; 17885ffd83dbSDimitry Andric } 17895ffd83dbSDimitry Andric 1790e8d8bef9SDimitry Andric Value *SCEVExpander::expandCodeForImpl(const SCEV *SH, Type *Ty, 1791e8d8bef9SDimitry Andric Instruction *IP, bool Root) { 17925ffd83dbSDimitry Andric setInsertPoint(IP); 1793e8d8bef9SDimitry Andric Value *V = expandCodeForImpl(SH, Ty, Root); 1794e8d8bef9SDimitry Andric return V; 17955ffd83dbSDimitry Andric } 17965ffd83dbSDimitry Andric 1797e8d8bef9SDimitry Andric Value *SCEVExpander::expandCodeForImpl(const SCEV *SH, Type *Ty, bool Root) { 17985ffd83dbSDimitry Andric // Expand the code for this SCEV. 17995ffd83dbSDimitry Andric Value *V = expand(SH); 1800e8d8bef9SDimitry Andric 1801e8d8bef9SDimitry Andric if (PreserveLCSSA) { 1802e8d8bef9SDimitry Andric if (auto *Inst = dyn_cast<Instruction>(V)) { 1803e8d8bef9SDimitry Andric // Create a temporary instruction to at the current insertion point, so we 1804e8d8bef9SDimitry Andric // can hand it off to the helper to create LCSSA PHIs if required for the 1805e8d8bef9SDimitry Andric // new use. 1806e8d8bef9SDimitry Andric // FIXME: Ideally formLCSSAForInstructions (used in fixupLCSSAFormFor) 1807e8d8bef9SDimitry Andric // would accept a insertion point and return an LCSSA phi for that 1808e8d8bef9SDimitry Andric // insertion point, so there is no need to insert & remove the temporary 1809e8d8bef9SDimitry Andric // instruction. 1810e8d8bef9SDimitry Andric Instruction *Tmp; 1811e8d8bef9SDimitry Andric if (Inst->getType()->isIntegerTy()) 1812e8d8bef9SDimitry Andric Tmp = 1813e8d8bef9SDimitry Andric cast<Instruction>(Builder.CreateAdd(Inst, Inst, "tmp.lcssa.user")); 1814e8d8bef9SDimitry Andric else { 1815e8d8bef9SDimitry Andric assert(Inst->getType()->isPointerTy()); 1816fe6060f1SDimitry Andric Tmp = cast<Instruction>(Builder.CreatePtrToInt( 1817fe6060f1SDimitry Andric Inst, Type::getInt32Ty(Inst->getContext()), "tmp.lcssa.user")); 1818e8d8bef9SDimitry Andric } 1819e8d8bef9SDimitry Andric V = fixupLCSSAFormFor(Tmp, 0); 1820e8d8bef9SDimitry Andric 1821e8d8bef9SDimitry Andric // Clean up temporary instruction. 1822e8d8bef9SDimitry Andric InsertedValues.erase(Tmp); 1823e8d8bef9SDimitry Andric InsertedPostIncValues.erase(Tmp); 1824e8d8bef9SDimitry Andric Tmp->eraseFromParent(); 1825e8d8bef9SDimitry Andric } 1826e8d8bef9SDimitry Andric } 1827e8d8bef9SDimitry Andric 1828e8d8bef9SDimitry Andric InsertedExpressions[std::make_pair(SH, &*Builder.GetInsertPoint())] = V; 18295ffd83dbSDimitry Andric if (Ty) { 18305ffd83dbSDimitry Andric assert(SE.getTypeSizeInBits(Ty) == SE.getTypeSizeInBits(SH->getType()) && 18315ffd83dbSDimitry Andric "non-trivial casts should be done with the SCEVs directly!"); 18325ffd83dbSDimitry Andric V = InsertNoopCastOfTo(V, Ty); 18335ffd83dbSDimitry Andric } 18345ffd83dbSDimitry Andric return V; 18355ffd83dbSDimitry Andric } 18365ffd83dbSDimitry Andric 18375ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair 18385ffd83dbSDimitry Andric SCEVExpander::FindValueInExprValueMap(const SCEV *S, 18395ffd83dbSDimitry Andric const Instruction *InsertPt) { 1840fe6060f1SDimitry Andric auto *Set = SE.getSCEVValues(S); 18415ffd83dbSDimitry Andric // If the expansion is not in CanonicalMode, and the SCEV contains any 18425ffd83dbSDimitry Andric // sub scAddRecExpr type SCEV, it is required to expand the SCEV literally. 18435ffd83dbSDimitry Andric if (CanonicalMode || !SE.containsAddRecurrence(S)) { 18445ffd83dbSDimitry Andric // If S is scConstant, it may be worse to reuse an existing Value. 18455ffd83dbSDimitry Andric if (S->getSCEVType() != scConstant && Set) { 1846349cc55cSDimitry Andric // Choose a Value from the set which dominates the InsertPt. 1847349cc55cSDimitry Andric // InsertPt should be inside the Value's parent loop so as not to break 18485ffd83dbSDimitry Andric // the LCSSA form. 18495ffd83dbSDimitry Andric for (auto const &VOPair : *Set) { 18505ffd83dbSDimitry Andric Value *V = VOPair.first; 18515ffd83dbSDimitry Andric ConstantInt *Offset = VOPair.second; 1852349cc55cSDimitry Andric Instruction *EntInst = dyn_cast_or_null<Instruction>(V); 1853349cc55cSDimitry Andric if (!EntInst) 1854349cc55cSDimitry Andric continue; 1855349cc55cSDimitry Andric 1856349cc55cSDimitry Andric assert(EntInst->getFunction() == InsertPt->getFunction()); 1857349cc55cSDimitry Andric if (S->getType() == V->getType() && 18585ffd83dbSDimitry Andric SE.DT.dominates(EntInst, InsertPt) && 18595ffd83dbSDimitry Andric (SE.LI.getLoopFor(EntInst->getParent()) == nullptr || 1860*4824e7fdSDimitry Andric SE.LI.getLoopFor(EntInst->getParent())->contains(InsertPt))) 18615ffd83dbSDimitry Andric return {V, Offset}; 18625ffd83dbSDimitry Andric } 18635ffd83dbSDimitry Andric } 18645ffd83dbSDimitry Andric } 18655ffd83dbSDimitry Andric return {nullptr, nullptr}; 18665ffd83dbSDimitry Andric } 18675ffd83dbSDimitry Andric 18685ffd83dbSDimitry Andric // The expansion of SCEV will either reuse a previous Value in ExprValueMap, 18695ffd83dbSDimitry Andric // or expand the SCEV literally. Specifically, if the expansion is in LSRMode, 18705ffd83dbSDimitry Andric // and the SCEV contains any sub scAddRecExpr type SCEV, it will be expanded 18715ffd83dbSDimitry Andric // literally, to prevent LSR's transformed SCEV from being reverted. Otherwise, 18725ffd83dbSDimitry Andric // the expansion will try to reuse Value from ExprValueMap, and only when it 18735ffd83dbSDimitry Andric // fails, expand the SCEV literally. 18745ffd83dbSDimitry Andric Value *SCEVExpander::expand(const SCEV *S) { 18755ffd83dbSDimitry Andric // Compute an insertion point for this SCEV object. Hoist the instructions 18765ffd83dbSDimitry Andric // as far out in the loop nest as possible. 18775ffd83dbSDimitry Andric Instruction *InsertPt = &*Builder.GetInsertPoint(); 18785ffd83dbSDimitry Andric 18795ffd83dbSDimitry Andric // We can move insertion point only if there is no div or rem operations 18805ffd83dbSDimitry Andric // otherwise we are risky to move it over the check for zero denominator. 18815ffd83dbSDimitry Andric auto SafeToHoist = [](const SCEV *S) { 18825ffd83dbSDimitry Andric return !SCEVExprContains(S, [](const SCEV *S) { 18835ffd83dbSDimitry Andric if (const auto *D = dyn_cast<SCEVUDivExpr>(S)) { 18845ffd83dbSDimitry Andric if (const auto *SC = dyn_cast<SCEVConstant>(D->getRHS())) 18855ffd83dbSDimitry Andric // Division by non-zero constants can be hoisted. 18865ffd83dbSDimitry Andric return SC->getValue()->isZero(); 18875ffd83dbSDimitry Andric // All other divisions should not be moved as they may be 18885ffd83dbSDimitry Andric // divisions by zero and should be kept within the 18895ffd83dbSDimitry Andric // conditions of the surrounding loops that guard their 18905ffd83dbSDimitry Andric // execution (see PR35406). 18915ffd83dbSDimitry Andric return true; 18925ffd83dbSDimitry Andric } 18935ffd83dbSDimitry Andric return false; 18945ffd83dbSDimitry Andric }); 18955ffd83dbSDimitry Andric }; 18965ffd83dbSDimitry Andric if (SafeToHoist(S)) { 18975ffd83dbSDimitry Andric for (Loop *L = SE.LI.getLoopFor(Builder.GetInsertBlock());; 18985ffd83dbSDimitry Andric L = L->getParentLoop()) { 18995ffd83dbSDimitry Andric if (SE.isLoopInvariant(S, L)) { 19005ffd83dbSDimitry Andric if (!L) break; 19015ffd83dbSDimitry Andric if (BasicBlock *Preheader = L->getLoopPreheader()) 19025ffd83dbSDimitry Andric InsertPt = Preheader->getTerminator(); 19035ffd83dbSDimitry Andric else 19045ffd83dbSDimitry Andric // LSR sets the insertion point for AddRec start/step values to the 19055ffd83dbSDimitry Andric // block start to simplify value reuse, even though it's an invalid 19065ffd83dbSDimitry Andric // position. SCEVExpander must correct for this in all cases. 19075ffd83dbSDimitry Andric InsertPt = &*L->getHeader()->getFirstInsertionPt(); 19085ffd83dbSDimitry Andric } else { 19095ffd83dbSDimitry Andric // If the SCEV is computable at this level, insert it into the header 19105ffd83dbSDimitry Andric // after the PHIs (and after any other instructions that we've inserted 19115ffd83dbSDimitry Andric // there) so that it is guaranteed to dominate any user inside the loop. 19125ffd83dbSDimitry Andric if (L && SE.hasComputableLoopEvolution(S, L) && !PostIncLoops.count(L)) 19135ffd83dbSDimitry Andric InsertPt = &*L->getHeader()->getFirstInsertionPt(); 1914e8d8bef9SDimitry Andric 19155ffd83dbSDimitry Andric while (InsertPt->getIterator() != Builder.GetInsertPoint() && 19165ffd83dbSDimitry Andric (isInsertedInstruction(InsertPt) || 1917e8d8bef9SDimitry Andric isa<DbgInfoIntrinsic>(InsertPt))) { 19185ffd83dbSDimitry Andric InsertPt = &*std::next(InsertPt->getIterator()); 1919e8d8bef9SDimitry Andric } 19205ffd83dbSDimitry Andric break; 19215ffd83dbSDimitry Andric } 19225ffd83dbSDimitry Andric } 19235ffd83dbSDimitry Andric } 19245ffd83dbSDimitry Andric 19255ffd83dbSDimitry Andric // Check to see if we already expanded this here. 19265ffd83dbSDimitry Andric auto I = InsertedExpressions.find(std::make_pair(S, InsertPt)); 19275ffd83dbSDimitry Andric if (I != InsertedExpressions.end()) 19285ffd83dbSDimitry Andric return I->second; 19295ffd83dbSDimitry Andric 19305ffd83dbSDimitry Andric SCEVInsertPointGuard Guard(Builder, this); 19315ffd83dbSDimitry Andric Builder.SetInsertPoint(InsertPt); 19325ffd83dbSDimitry Andric 19335ffd83dbSDimitry Andric // Expand the expression into instructions. 19345ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair VO = FindValueInExprValueMap(S, InsertPt); 19355ffd83dbSDimitry Andric Value *V = VO.first; 19365ffd83dbSDimitry Andric 19375ffd83dbSDimitry Andric if (!V) 19385ffd83dbSDimitry Andric V = visit(S); 1939*4824e7fdSDimitry Andric else { 1940*4824e7fdSDimitry Andric // If we're reusing an existing instruction, we are effectively CSEing two 1941*4824e7fdSDimitry Andric // copies of the instruction (with potentially different flags). As such, 1942*4824e7fdSDimitry Andric // we need to drop any poison generating flags unless we can prove that 1943*4824e7fdSDimitry Andric // said flags must be valid for all new users. 1944*4824e7fdSDimitry Andric if (auto *I = dyn_cast<Instruction>(V)) 1945*4824e7fdSDimitry Andric if (I->hasPoisonGeneratingFlags() && !programUndefinedIfPoison(I)) 1946*4824e7fdSDimitry Andric I->dropPoisonGeneratingFlags(); 1947*4824e7fdSDimitry Andric 1948*4824e7fdSDimitry Andric if (VO.second) { 19495ffd83dbSDimitry Andric if (PointerType *Vty = dyn_cast<PointerType>(V->getType())) { 19505ffd83dbSDimitry Andric Type *Ety = Vty->getPointerElementType(); 19515ffd83dbSDimitry Andric int64_t Offset = VO.second->getSExtValue(); 19525ffd83dbSDimitry Andric int64_t ESize = SE.getTypeSizeInBits(Ety); 19535ffd83dbSDimitry Andric if ((Offset * 8) % ESize == 0) { 19545ffd83dbSDimitry Andric ConstantInt *Idx = 19555ffd83dbSDimitry Andric ConstantInt::getSigned(VO.second->getType(), -(Offset * 8) / ESize); 19565ffd83dbSDimitry Andric V = Builder.CreateGEP(Ety, V, Idx, "scevgep"); 19575ffd83dbSDimitry Andric } else { 19585ffd83dbSDimitry Andric ConstantInt *Idx = 19595ffd83dbSDimitry Andric ConstantInt::getSigned(VO.second->getType(), -Offset); 19605ffd83dbSDimitry Andric unsigned AS = Vty->getAddressSpace(); 19615ffd83dbSDimitry Andric V = Builder.CreateBitCast(V, Type::getInt8PtrTy(SE.getContext(), AS)); 19625ffd83dbSDimitry Andric V = Builder.CreateGEP(Type::getInt8Ty(SE.getContext()), V, Idx, 19635ffd83dbSDimitry Andric "uglygep"); 19645ffd83dbSDimitry Andric V = Builder.CreateBitCast(V, Vty); 19655ffd83dbSDimitry Andric } 19665ffd83dbSDimitry Andric } else { 19675ffd83dbSDimitry Andric V = Builder.CreateSub(V, VO.second); 19685ffd83dbSDimitry Andric } 19695ffd83dbSDimitry Andric } 1970*4824e7fdSDimitry Andric } 19715ffd83dbSDimitry Andric // Remember the expanded value for this SCEV at this location. 19725ffd83dbSDimitry Andric // 19735ffd83dbSDimitry Andric // This is independent of PostIncLoops. The mapped value simply materializes 19745ffd83dbSDimitry Andric // the expression at this insertion point. If the mapped value happened to be 19755ffd83dbSDimitry Andric // a postinc expansion, it could be reused by a non-postinc user, but only if 19765ffd83dbSDimitry Andric // its insertion point was already at the head of the loop. 19775ffd83dbSDimitry Andric InsertedExpressions[std::make_pair(S, InsertPt)] = V; 19785ffd83dbSDimitry Andric return V; 19795ffd83dbSDimitry Andric } 19805ffd83dbSDimitry Andric 19815ffd83dbSDimitry Andric void SCEVExpander::rememberInstruction(Value *I) { 1982e8d8bef9SDimitry Andric auto DoInsert = [this](Value *V) { 19835ffd83dbSDimitry Andric if (!PostIncLoops.empty()) 1984e8d8bef9SDimitry Andric InsertedPostIncValues.insert(V); 19855ffd83dbSDimitry Andric else 1986e8d8bef9SDimitry Andric InsertedValues.insert(V); 1987e8d8bef9SDimitry Andric }; 1988e8d8bef9SDimitry Andric DoInsert(I); 1989e8d8bef9SDimitry Andric 1990e8d8bef9SDimitry Andric if (!PreserveLCSSA) 1991e8d8bef9SDimitry Andric return; 1992e8d8bef9SDimitry Andric 1993e8d8bef9SDimitry Andric if (auto *Inst = dyn_cast<Instruction>(I)) { 1994e8d8bef9SDimitry Andric // A new instruction has been added, which might introduce new uses outside 1995e8d8bef9SDimitry Andric // a defining loop. Fix LCSSA from for each operand of the new instruction, 1996e8d8bef9SDimitry Andric // if required. 1997e8d8bef9SDimitry Andric for (unsigned OpIdx = 0, OpEnd = Inst->getNumOperands(); OpIdx != OpEnd; 1998e8d8bef9SDimitry Andric OpIdx++) 1999e8d8bef9SDimitry Andric fixupLCSSAFormFor(Inst, OpIdx); 20005ffd83dbSDimitry Andric } 20015ffd83dbSDimitry Andric } 20025ffd83dbSDimitry Andric 20035ffd83dbSDimitry Andric /// replaceCongruentIVs - Check for congruent phis in this loop header and 20045ffd83dbSDimitry Andric /// replace them with their most canonical representative. Return the number of 20055ffd83dbSDimitry Andric /// phis eliminated. 20065ffd83dbSDimitry Andric /// 20075ffd83dbSDimitry Andric /// This does not depend on any SCEVExpander state but should be used in 20085ffd83dbSDimitry Andric /// the same context that SCEVExpander is used. 20095ffd83dbSDimitry Andric unsigned 20105ffd83dbSDimitry Andric SCEVExpander::replaceCongruentIVs(Loop *L, const DominatorTree *DT, 20115ffd83dbSDimitry Andric SmallVectorImpl<WeakTrackingVH> &DeadInsts, 20125ffd83dbSDimitry Andric const TargetTransformInfo *TTI) { 20135ffd83dbSDimitry Andric // Find integer phis in order of increasing width. 20145ffd83dbSDimitry Andric SmallVector<PHINode*, 8> Phis; 20155ffd83dbSDimitry Andric for (PHINode &PN : L->getHeader()->phis()) 20165ffd83dbSDimitry Andric Phis.push_back(&PN); 20175ffd83dbSDimitry Andric 20185ffd83dbSDimitry Andric if (TTI) 2019349cc55cSDimitry Andric // Use stable_sort to preserve order of equivalent PHIs, so the order 2020349cc55cSDimitry Andric // of the sorted Phis is the same from run to run on the same loop. 2021349cc55cSDimitry Andric llvm::stable_sort(Phis, [](Value *LHS, Value *RHS) { 20225ffd83dbSDimitry Andric // Put pointers at the back and make sure pointer < pointer = false. 20235ffd83dbSDimitry Andric if (!LHS->getType()->isIntegerTy() || !RHS->getType()->isIntegerTy()) 20245ffd83dbSDimitry Andric return RHS->getType()->isIntegerTy() && !LHS->getType()->isIntegerTy(); 2025e8d8bef9SDimitry Andric return RHS->getType()->getPrimitiveSizeInBits().getFixedSize() < 2026e8d8bef9SDimitry Andric LHS->getType()->getPrimitiveSizeInBits().getFixedSize(); 20275ffd83dbSDimitry Andric }); 20285ffd83dbSDimitry Andric 20295ffd83dbSDimitry Andric unsigned NumElim = 0; 20305ffd83dbSDimitry Andric DenseMap<const SCEV *, PHINode *> ExprToIVMap; 20315ffd83dbSDimitry Andric // Process phis from wide to narrow. Map wide phis to their truncation 20325ffd83dbSDimitry Andric // so narrow phis can reuse them. 20335ffd83dbSDimitry Andric for (PHINode *Phi : Phis) { 20345ffd83dbSDimitry Andric auto SimplifyPHINode = [&](PHINode *PN) -> Value * { 20355ffd83dbSDimitry Andric if (Value *V = SimplifyInstruction(PN, {DL, &SE.TLI, &SE.DT, &SE.AC})) 20365ffd83dbSDimitry Andric return V; 20375ffd83dbSDimitry Andric if (!SE.isSCEVable(PN->getType())) 20385ffd83dbSDimitry Andric return nullptr; 20395ffd83dbSDimitry Andric auto *Const = dyn_cast<SCEVConstant>(SE.getSCEV(PN)); 20405ffd83dbSDimitry Andric if (!Const) 20415ffd83dbSDimitry Andric return nullptr; 20425ffd83dbSDimitry Andric return Const->getValue(); 20435ffd83dbSDimitry Andric }; 20445ffd83dbSDimitry Andric 20455ffd83dbSDimitry Andric // Fold constant phis. They may be congruent to other constant phis and 20465ffd83dbSDimitry Andric // would confuse the logic below that expects proper IVs. 20475ffd83dbSDimitry Andric if (Value *V = SimplifyPHINode(Phi)) { 20485ffd83dbSDimitry Andric if (V->getType() != Phi->getType()) 20495ffd83dbSDimitry Andric continue; 20505ffd83dbSDimitry Andric Phi->replaceAllUsesWith(V); 20515ffd83dbSDimitry Andric DeadInsts.emplace_back(Phi); 20525ffd83dbSDimitry Andric ++NumElim; 2053fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE(DebugType, 2054fe6060f1SDimitry Andric dbgs() << "INDVARS: Eliminated constant iv: " << *Phi 2055fe6060f1SDimitry Andric << '\n'); 20565ffd83dbSDimitry Andric continue; 20575ffd83dbSDimitry Andric } 20585ffd83dbSDimitry Andric 20595ffd83dbSDimitry Andric if (!SE.isSCEVable(Phi->getType())) 20605ffd83dbSDimitry Andric continue; 20615ffd83dbSDimitry Andric 20625ffd83dbSDimitry Andric PHINode *&OrigPhiRef = ExprToIVMap[SE.getSCEV(Phi)]; 20635ffd83dbSDimitry Andric if (!OrigPhiRef) { 20645ffd83dbSDimitry Andric OrigPhiRef = Phi; 20655ffd83dbSDimitry Andric if (Phi->getType()->isIntegerTy() && TTI && 20665ffd83dbSDimitry Andric TTI->isTruncateFree(Phi->getType(), Phis.back()->getType())) { 20675ffd83dbSDimitry Andric // This phi can be freely truncated to the narrowest phi type. Map the 20685ffd83dbSDimitry Andric // truncated expression to it so it will be reused for narrow types. 20695ffd83dbSDimitry Andric const SCEV *TruncExpr = 20705ffd83dbSDimitry Andric SE.getTruncateExpr(SE.getSCEV(Phi), Phis.back()->getType()); 20715ffd83dbSDimitry Andric ExprToIVMap[TruncExpr] = Phi; 20725ffd83dbSDimitry Andric } 20735ffd83dbSDimitry Andric continue; 20745ffd83dbSDimitry Andric } 20755ffd83dbSDimitry Andric 20765ffd83dbSDimitry Andric // Replacing a pointer phi with an integer phi or vice-versa doesn't make 20775ffd83dbSDimitry Andric // sense. 20785ffd83dbSDimitry Andric if (OrigPhiRef->getType()->isPointerTy() != Phi->getType()->isPointerTy()) 20795ffd83dbSDimitry Andric continue; 20805ffd83dbSDimitry Andric 20815ffd83dbSDimitry Andric if (BasicBlock *LatchBlock = L->getLoopLatch()) { 20825ffd83dbSDimitry Andric Instruction *OrigInc = dyn_cast<Instruction>( 20835ffd83dbSDimitry Andric OrigPhiRef->getIncomingValueForBlock(LatchBlock)); 20845ffd83dbSDimitry Andric Instruction *IsomorphicInc = 20855ffd83dbSDimitry Andric dyn_cast<Instruction>(Phi->getIncomingValueForBlock(LatchBlock)); 20865ffd83dbSDimitry Andric 20875ffd83dbSDimitry Andric if (OrigInc && IsomorphicInc) { 20885ffd83dbSDimitry Andric // If this phi has the same width but is more canonical, replace the 20895ffd83dbSDimitry Andric // original with it. As part of the "more canonical" determination, 20905ffd83dbSDimitry Andric // respect a prior decision to use an IV chain. 20915ffd83dbSDimitry Andric if (OrigPhiRef->getType() == Phi->getType() && 20925ffd83dbSDimitry Andric !(ChainedPhis.count(Phi) || 20935ffd83dbSDimitry Andric isExpandedAddRecExprPHI(OrigPhiRef, OrigInc, L)) && 20945ffd83dbSDimitry Andric (ChainedPhis.count(Phi) || 20955ffd83dbSDimitry Andric isExpandedAddRecExprPHI(Phi, IsomorphicInc, L))) { 20965ffd83dbSDimitry Andric std::swap(OrigPhiRef, Phi); 20975ffd83dbSDimitry Andric std::swap(OrigInc, IsomorphicInc); 20985ffd83dbSDimitry Andric } 20995ffd83dbSDimitry Andric // Replacing the congruent phi is sufficient because acyclic 21005ffd83dbSDimitry Andric // redundancy elimination, CSE/GVN, should handle the 21015ffd83dbSDimitry Andric // rest. However, once SCEV proves that a phi is congruent, 21025ffd83dbSDimitry Andric // it's often the head of an IV user cycle that is isomorphic 21035ffd83dbSDimitry Andric // with the original phi. It's worth eagerly cleaning up the 21045ffd83dbSDimitry Andric // common case of a single IV increment so that DeleteDeadPHIs 21055ffd83dbSDimitry Andric // can remove cycles that had postinc uses. 21065ffd83dbSDimitry Andric const SCEV *TruncExpr = 21075ffd83dbSDimitry Andric SE.getTruncateOrNoop(SE.getSCEV(OrigInc), IsomorphicInc->getType()); 21085ffd83dbSDimitry Andric if (OrigInc != IsomorphicInc && 21095ffd83dbSDimitry Andric TruncExpr == SE.getSCEV(IsomorphicInc) && 21105ffd83dbSDimitry Andric SE.LI.replacementPreservesLCSSAForm(IsomorphicInc, OrigInc) && 21115ffd83dbSDimitry Andric hoistIVInc(OrigInc, IsomorphicInc)) { 2112fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 2113fe6060f1SDimitry Andric DebugType, dbgs() << "INDVARS: Eliminated congruent iv.inc: " 21145ffd83dbSDimitry Andric << *IsomorphicInc << '\n'); 21155ffd83dbSDimitry Andric Value *NewInc = OrigInc; 21165ffd83dbSDimitry Andric if (OrigInc->getType() != IsomorphicInc->getType()) { 21175ffd83dbSDimitry Andric Instruction *IP = nullptr; 21185ffd83dbSDimitry Andric if (PHINode *PN = dyn_cast<PHINode>(OrigInc)) 21195ffd83dbSDimitry Andric IP = &*PN->getParent()->getFirstInsertionPt(); 21205ffd83dbSDimitry Andric else 21215ffd83dbSDimitry Andric IP = OrigInc->getNextNode(); 21225ffd83dbSDimitry Andric 21235ffd83dbSDimitry Andric IRBuilder<> Builder(IP); 21245ffd83dbSDimitry Andric Builder.SetCurrentDebugLocation(IsomorphicInc->getDebugLoc()); 21255ffd83dbSDimitry Andric NewInc = Builder.CreateTruncOrBitCast( 21265ffd83dbSDimitry Andric OrigInc, IsomorphicInc->getType(), IVName); 21275ffd83dbSDimitry Andric } 21285ffd83dbSDimitry Andric IsomorphicInc->replaceAllUsesWith(NewInc); 21295ffd83dbSDimitry Andric DeadInsts.emplace_back(IsomorphicInc); 21305ffd83dbSDimitry Andric } 21315ffd83dbSDimitry Andric } 21325ffd83dbSDimitry Andric } 2133fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE(DebugType, 2134fe6060f1SDimitry Andric dbgs() << "INDVARS: Eliminated congruent iv: " << *Phi 2135fe6060f1SDimitry Andric << '\n'); 2136fe6060f1SDimitry Andric SCEV_DEBUG_WITH_TYPE( 2137fe6060f1SDimitry Andric DebugType, dbgs() << "INDVARS: Original iv: " << *OrigPhiRef << '\n'); 21385ffd83dbSDimitry Andric ++NumElim; 21395ffd83dbSDimitry Andric Value *NewIV = OrigPhiRef; 21405ffd83dbSDimitry Andric if (OrigPhiRef->getType() != Phi->getType()) { 21415ffd83dbSDimitry Andric IRBuilder<> Builder(&*L->getHeader()->getFirstInsertionPt()); 21425ffd83dbSDimitry Andric Builder.SetCurrentDebugLocation(Phi->getDebugLoc()); 21435ffd83dbSDimitry Andric NewIV = Builder.CreateTruncOrBitCast(OrigPhiRef, Phi->getType(), IVName); 21445ffd83dbSDimitry Andric } 21455ffd83dbSDimitry Andric Phi->replaceAllUsesWith(NewIV); 21465ffd83dbSDimitry Andric DeadInsts.emplace_back(Phi); 21475ffd83dbSDimitry Andric } 21485ffd83dbSDimitry Andric return NumElim; 21495ffd83dbSDimitry Andric } 21505ffd83dbSDimitry Andric 21515ffd83dbSDimitry Andric Optional<ScalarEvolution::ValueOffsetPair> 21525ffd83dbSDimitry Andric SCEVExpander::getRelatedExistingExpansion(const SCEV *S, const Instruction *At, 21535ffd83dbSDimitry Andric Loop *L) { 21545ffd83dbSDimitry Andric using namespace llvm::PatternMatch; 21555ffd83dbSDimitry Andric 21565ffd83dbSDimitry Andric SmallVector<BasicBlock *, 4> ExitingBlocks; 21575ffd83dbSDimitry Andric L->getExitingBlocks(ExitingBlocks); 21585ffd83dbSDimitry Andric 21595ffd83dbSDimitry Andric // Look for suitable value in simple conditions at the loop exits. 21605ffd83dbSDimitry Andric for (BasicBlock *BB : ExitingBlocks) { 21615ffd83dbSDimitry Andric ICmpInst::Predicate Pred; 21625ffd83dbSDimitry Andric Instruction *LHS, *RHS; 21635ffd83dbSDimitry Andric 21645ffd83dbSDimitry Andric if (!match(BB->getTerminator(), 21655ffd83dbSDimitry Andric m_Br(m_ICmp(Pred, m_Instruction(LHS), m_Instruction(RHS)), 21665ffd83dbSDimitry Andric m_BasicBlock(), m_BasicBlock()))) 21675ffd83dbSDimitry Andric continue; 21685ffd83dbSDimitry Andric 21695ffd83dbSDimitry Andric if (SE.getSCEV(LHS) == S && SE.DT.dominates(LHS, At)) 21705ffd83dbSDimitry Andric return ScalarEvolution::ValueOffsetPair(LHS, nullptr); 21715ffd83dbSDimitry Andric 21725ffd83dbSDimitry Andric if (SE.getSCEV(RHS) == S && SE.DT.dominates(RHS, At)) 21735ffd83dbSDimitry Andric return ScalarEvolution::ValueOffsetPair(RHS, nullptr); 21745ffd83dbSDimitry Andric } 21755ffd83dbSDimitry Andric 21765ffd83dbSDimitry Andric // Use expand's logic which is used for reusing a previous Value in 2177*4824e7fdSDimitry Andric // ExprValueMap. Note that we don't currently model the cost of 2178*4824e7fdSDimitry Andric // needing to drop poison generating flags on the instruction if we 2179*4824e7fdSDimitry Andric // want to reuse it. We effectively assume that has zero cost. 21805ffd83dbSDimitry Andric ScalarEvolution::ValueOffsetPair VO = FindValueInExprValueMap(S, At); 21815ffd83dbSDimitry Andric if (VO.first) 21825ffd83dbSDimitry Andric return VO; 21835ffd83dbSDimitry Andric 21845ffd83dbSDimitry Andric // There is potential to make this significantly smarter, but this simple 21855ffd83dbSDimitry Andric // heuristic already gets some interesting cases. 21865ffd83dbSDimitry Andric 21875ffd83dbSDimitry Andric // Can not find suitable value. 21885ffd83dbSDimitry Andric return None; 21895ffd83dbSDimitry Andric } 21905ffd83dbSDimitry Andric 2191fe6060f1SDimitry Andric template<typename T> static InstructionCost costAndCollectOperands( 2192e8d8bef9SDimitry Andric const SCEVOperand &WorkItem, const TargetTransformInfo &TTI, 2193e8d8bef9SDimitry Andric TargetTransformInfo::TargetCostKind CostKind, 2194e8d8bef9SDimitry Andric SmallVectorImpl<SCEVOperand> &Worklist) { 2195e8d8bef9SDimitry Andric 2196e8d8bef9SDimitry Andric const T *S = cast<T>(WorkItem.S); 2197fe6060f1SDimitry Andric InstructionCost Cost = 0; 2198e8d8bef9SDimitry Andric // Object to help map SCEV operands to expanded IR instructions. 2199e8d8bef9SDimitry Andric struct OperationIndices { 2200e8d8bef9SDimitry Andric OperationIndices(unsigned Opc, size_t min, size_t max) : 2201e8d8bef9SDimitry Andric Opcode(Opc), MinIdx(min), MaxIdx(max) { } 2202e8d8bef9SDimitry Andric unsigned Opcode; 2203e8d8bef9SDimitry Andric size_t MinIdx; 2204e8d8bef9SDimitry Andric size_t MaxIdx; 2205e8d8bef9SDimitry Andric }; 2206e8d8bef9SDimitry Andric 2207e8d8bef9SDimitry Andric // Collect the operations of all the instructions that will be needed to 2208e8d8bef9SDimitry Andric // expand the SCEVExpr. This is so that when we come to cost the operands, 2209e8d8bef9SDimitry Andric // we know what the generated user(s) will be. 2210e8d8bef9SDimitry Andric SmallVector<OperationIndices, 2> Operations; 2211e8d8bef9SDimitry Andric 2212fe6060f1SDimitry Andric auto CastCost = [&](unsigned Opcode) -> InstructionCost { 2213e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, 0, 0); 2214e8d8bef9SDimitry Andric return TTI.getCastInstrCost(Opcode, S->getType(), 2215e8d8bef9SDimitry Andric S->getOperand(0)->getType(), 2216e8d8bef9SDimitry Andric TTI::CastContextHint::None, CostKind); 2217e8d8bef9SDimitry Andric }; 2218e8d8bef9SDimitry Andric 2219e8d8bef9SDimitry Andric auto ArithCost = [&](unsigned Opcode, unsigned NumRequired, 2220fe6060f1SDimitry Andric unsigned MinIdx = 0, 2221fe6060f1SDimitry Andric unsigned MaxIdx = 1) -> InstructionCost { 2222e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, MinIdx, MaxIdx); 2223e8d8bef9SDimitry Andric return NumRequired * 2224e8d8bef9SDimitry Andric TTI.getArithmeticInstrCost(Opcode, S->getType(), CostKind); 2225e8d8bef9SDimitry Andric }; 2226e8d8bef9SDimitry Andric 2227fe6060f1SDimitry Andric auto CmpSelCost = [&](unsigned Opcode, unsigned NumRequired, unsigned MinIdx, 2228fe6060f1SDimitry Andric unsigned MaxIdx) -> InstructionCost { 2229e8d8bef9SDimitry Andric Operations.emplace_back(Opcode, MinIdx, MaxIdx); 2230e8d8bef9SDimitry Andric Type *OpType = S->getOperand(0)->getType(); 2231e8d8bef9SDimitry Andric return NumRequired * TTI.getCmpSelInstrCost( 2232e8d8bef9SDimitry Andric Opcode, OpType, CmpInst::makeCmpResultType(OpType), 2233e8d8bef9SDimitry Andric CmpInst::BAD_ICMP_PREDICATE, CostKind); 2234e8d8bef9SDimitry Andric }; 2235e8d8bef9SDimitry Andric 2236e8d8bef9SDimitry Andric switch (S->getSCEVType()) { 2237e8d8bef9SDimitry Andric case scCouldNotCompute: 2238e8d8bef9SDimitry Andric llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); 2239e8d8bef9SDimitry Andric case scUnknown: 2240e8d8bef9SDimitry Andric case scConstant: 2241e8d8bef9SDimitry Andric return 0; 2242e8d8bef9SDimitry Andric case scPtrToInt: 2243e8d8bef9SDimitry Andric Cost = CastCost(Instruction::PtrToInt); 2244e8d8bef9SDimitry Andric break; 2245e8d8bef9SDimitry Andric case scTruncate: 2246e8d8bef9SDimitry Andric Cost = CastCost(Instruction::Trunc); 2247e8d8bef9SDimitry Andric break; 2248e8d8bef9SDimitry Andric case scZeroExtend: 2249e8d8bef9SDimitry Andric Cost = CastCost(Instruction::ZExt); 2250e8d8bef9SDimitry Andric break; 2251e8d8bef9SDimitry Andric case scSignExtend: 2252e8d8bef9SDimitry Andric Cost = CastCost(Instruction::SExt); 2253e8d8bef9SDimitry Andric break; 2254e8d8bef9SDimitry Andric case scUDivExpr: { 2255e8d8bef9SDimitry Andric unsigned Opcode = Instruction::UDiv; 2256e8d8bef9SDimitry Andric if (auto *SC = dyn_cast<SCEVConstant>(S->getOperand(1))) 2257e8d8bef9SDimitry Andric if (SC->getAPInt().isPowerOf2()) 2258e8d8bef9SDimitry Andric Opcode = Instruction::LShr; 2259e8d8bef9SDimitry Andric Cost = ArithCost(Opcode, 1); 2260e8d8bef9SDimitry Andric break; 2261e8d8bef9SDimitry Andric } 2262e8d8bef9SDimitry Andric case scAddExpr: 2263e8d8bef9SDimitry Andric Cost = ArithCost(Instruction::Add, S->getNumOperands() - 1); 2264e8d8bef9SDimitry Andric break; 2265e8d8bef9SDimitry Andric case scMulExpr: 2266e8d8bef9SDimitry Andric // TODO: this is a very pessimistic cost modelling for Mul, 2267e8d8bef9SDimitry Andric // because of Bin Pow algorithm actually used by the expander, 2268e8d8bef9SDimitry Andric // see SCEVExpander::visitMulExpr(), ExpandOpBinPowN(). 2269e8d8bef9SDimitry Andric Cost = ArithCost(Instruction::Mul, S->getNumOperands() - 1); 2270e8d8bef9SDimitry Andric break; 2271e8d8bef9SDimitry Andric case scSMaxExpr: 2272e8d8bef9SDimitry Andric case scUMaxExpr: 2273e8d8bef9SDimitry Andric case scSMinExpr: 2274e8d8bef9SDimitry Andric case scUMinExpr: { 2275fe6060f1SDimitry Andric // FIXME: should this ask the cost for Intrinsic's? 2276e8d8bef9SDimitry Andric Cost += CmpSelCost(Instruction::ICmp, S->getNumOperands() - 1, 0, 1); 2277e8d8bef9SDimitry Andric Cost += CmpSelCost(Instruction::Select, S->getNumOperands() - 1, 0, 2); 2278e8d8bef9SDimitry Andric break; 2279e8d8bef9SDimitry Andric } 2280e8d8bef9SDimitry Andric case scAddRecExpr: { 2281e8d8bef9SDimitry Andric // In this polynominal, we may have some zero operands, and we shouldn't 2282e8d8bef9SDimitry Andric // really charge for those. So how many non-zero coeffients are there? 2283e8d8bef9SDimitry Andric int NumTerms = llvm::count_if(S->operands(), [](const SCEV *Op) { 2284e8d8bef9SDimitry Andric return !Op->isZero(); 2285e8d8bef9SDimitry Andric }); 2286e8d8bef9SDimitry Andric 2287e8d8bef9SDimitry Andric assert(NumTerms >= 1 && "Polynominal should have at least one term."); 2288e8d8bef9SDimitry Andric assert(!(*std::prev(S->operands().end()))->isZero() && 2289e8d8bef9SDimitry Andric "Last operand should not be zero"); 2290e8d8bef9SDimitry Andric 2291e8d8bef9SDimitry Andric // Ignoring constant term (operand 0), how many of the coeffients are u> 1? 2292e8d8bef9SDimitry Andric int NumNonZeroDegreeNonOneTerms = 2293e8d8bef9SDimitry Andric llvm::count_if(S->operands(), [](const SCEV *Op) { 2294e8d8bef9SDimitry Andric auto *SConst = dyn_cast<SCEVConstant>(Op); 2295e8d8bef9SDimitry Andric return !SConst || SConst->getAPInt().ugt(1); 2296e8d8bef9SDimitry Andric }); 2297e8d8bef9SDimitry Andric 2298e8d8bef9SDimitry Andric // Much like with normal add expr, the polynominal will require 2299e8d8bef9SDimitry Andric // one less addition than the number of it's terms. 2300fe6060f1SDimitry Andric InstructionCost AddCost = ArithCost(Instruction::Add, NumTerms - 1, 2301e8d8bef9SDimitry Andric /*MinIdx*/ 1, /*MaxIdx*/ 1); 2302e8d8bef9SDimitry Andric // Here, *each* one of those will require a multiplication. 2303fe6060f1SDimitry Andric InstructionCost MulCost = 2304fe6060f1SDimitry Andric ArithCost(Instruction::Mul, NumNonZeroDegreeNonOneTerms); 2305e8d8bef9SDimitry Andric Cost = AddCost + MulCost; 2306e8d8bef9SDimitry Andric 2307e8d8bef9SDimitry Andric // What is the degree of this polynominal? 2308e8d8bef9SDimitry Andric int PolyDegree = S->getNumOperands() - 1; 2309e8d8bef9SDimitry Andric assert(PolyDegree >= 1 && "Should be at least affine."); 2310e8d8bef9SDimitry Andric 2311e8d8bef9SDimitry Andric // The final term will be: 2312e8d8bef9SDimitry Andric // Op_{PolyDegree} * x ^ {PolyDegree} 2313e8d8bef9SDimitry Andric // Where x ^ {PolyDegree} will again require PolyDegree-1 mul operations. 2314e8d8bef9SDimitry Andric // Note that x ^ {PolyDegree} = x * x ^ {PolyDegree-1} so charging for 2315e8d8bef9SDimitry Andric // x ^ {PolyDegree} will give us x ^ {2} .. x ^ {PolyDegree-1} for free. 2316e8d8bef9SDimitry Andric // FIXME: this is conservatively correct, but might be overly pessimistic. 2317e8d8bef9SDimitry Andric Cost += MulCost * (PolyDegree - 1); 2318e8d8bef9SDimitry Andric break; 2319e8d8bef9SDimitry Andric } 2320e8d8bef9SDimitry Andric } 2321e8d8bef9SDimitry Andric 2322e8d8bef9SDimitry Andric for (auto &CostOp : Operations) { 2323e8d8bef9SDimitry Andric for (auto SCEVOp : enumerate(S->operands())) { 2324e8d8bef9SDimitry Andric // Clamp the index to account for multiple IR operations being chained. 2325e8d8bef9SDimitry Andric size_t MinIdx = std::max(SCEVOp.index(), CostOp.MinIdx); 2326e8d8bef9SDimitry Andric size_t OpIdx = std::min(MinIdx, CostOp.MaxIdx); 2327e8d8bef9SDimitry Andric Worklist.emplace_back(CostOp.Opcode, OpIdx, SCEVOp.value()); 2328e8d8bef9SDimitry Andric } 2329e8d8bef9SDimitry Andric } 2330e8d8bef9SDimitry Andric return Cost; 2331e8d8bef9SDimitry Andric } 2332e8d8bef9SDimitry Andric 23335ffd83dbSDimitry Andric bool SCEVExpander::isHighCostExpansionHelper( 2334e8d8bef9SDimitry Andric const SCEVOperand &WorkItem, Loop *L, const Instruction &At, 2335fe6060f1SDimitry Andric InstructionCost &Cost, unsigned Budget, const TargetTransformInfo &TTI, 2336e8d8bef9SDimitry Andric SmallPtrSetImpl<const SCEV *> &Processed, 2337e8d8bef9SDimitry Andric SmallVectorImpl<SCEVOperand> &Worklist) { 2338fe6060f1SDimitry Andric if (Cost > Budget) 23395ffd83dbSDimitry Andric return true; // Already run out of budget, give up. 23405ffd83dbSDimitry Andric 2341e8d8bef9SDimitry Andric const SCEV *S = WorkItem.S; 23425ffd83dbSDimitry Andric // Was the cost of expansion of this expression already accounted for? 2343e8d8bef9SDimitry Andric if (!isa<SCEVConstant>(S) && !Processed.insert(S).second) 23445ffd83dbSDimitry Andric return false; // We have already accounted for this expression. 23455ffd83dbSDimitry Andric 23465ffd83dbSDimitry Andric // If we can find an existing value for this scev available at the point "At" 23475ffd83dbSDimitry Andric // then consider the expression cheap. 23485ffd83dbSDimitry Andric if (getRelatedExistingExpansion(S, &At, L)) 23495ffd83dbSDimitry Andric return false; // Consider the expression to be free. 23505ffd83dbSDimitry Andric 23515ffd83dbSDimitry Andric TargetTransformInfo::TargetCostKind CostKind = 2352e8d8bef9SDimitry Andric L->getHeader()->getParent()->hasMinSize() 2353e8d8bef9SDimitry Andric ? TargetTransformInfo::TCK_CodeSize 2354e8d8bef9SDimitry Andric : TargetTransformInfo::TCK_RecipThroughput; 23555ffd83dbSDimitry Andric 23565ffd83dbSDimitry Andric switch (S->getSCEVType()) { 2357e8d8bef9SDimitry Andric case scCouldNotCompute: 2358e8d8bef9SDimitry Andric llvm_unreachable("Attempt to use a SCEVCouldNotCompute object!"); 2359e8d8bef9SDimitry Andric case scUnknown: 2360e8d8bef9SDimitry Andric // Assume to be zero-cost. 2361e8d8bef9SDimitry Andric return false; 2362e8d8bef9SDimitry Andric case scConstant: { 2363e8d8bef9SDimitry Andric // Only evalulate the costs of constants when optimizing for size. 2364e8d8bef9SDimitry Andric if (CostKind != TargetTransformInfo::TCK_CodeSize) 2365e8d8bef9SDimitry Andric return 0; 2366e8d8bef9SDimitry Andric const APInt &Imm = cast<SCEVConstant>(S)->getAPInt(); 2367e8d8bef9SDimitry Andric Type *Ty = S->getType(); 2368fe6060f1SDimitry Andric Cost += TTI.getIntImmCostInst( 2369e8d8bef9SDimitry Andric WorkItem.ParentOpcode, WorkItem.OperandIdx, Imm, Ty, CostKind); 2370fe6060f1SDimitry Andric return Cost > Budget; 2371e8d8bef9SDimitry Andric } 23725ffd83dbSDimitry Andric case scTruncate: 2373e8d8bef9SDimitry Andric case scPtrToInt: 23745ffd83dbSDimitry Andric case scZeroExtend: 2375e8d8bef9SDimitry Andric case scSignExtend: { 2376fe6060f1SDimitry Andric Cost += 2377e8d8bef9SDimitry Andric costAndCollectOperands<SCEVCastExpr>(WorkItem, TTI, CostKind, Worklist); 23785ffd83dbSDimitry Andric return false; // Will answer upon next entry into this function. 23795ffd83dbSDimitry Andric } 2380e8d8bef9SDimitry Andric case scUDivExpr: { 23815ffd83dbSDimitry Andric // UDivExpr is very likely a UDiv that ScalarEvolution's HowFarToZero or 23825ffd83dbSDimitry Andric // HowManyLessThans produced to compute a precise expression, rather than a 23835ffd83dbSDimitry Andric // UDiv from the user's code. If we can't find a UDiv in the code with some 23845ffd83dbSDimitry Andric // simple searching, we need to account for it's cost. 23855ffd83dbSDimitry Andric 23865ffd83dbSDimitry Andric // At the beginning of this function we already tried to find existing 23875ffd83dbSDimitry Andric // value for plain 'S'. Now try to lookup 'S + 1' since it is common 23885ffd83dbSDimitry Andric // pattern involving division. This is just a simple search heuristic. 23895ffd83dbSDimitry Andric if (getRelatedExistingExpansion( 23905ffd83dbSDimitry Andric SE.getAddExpr(S, SE.getConstant(S->getType(), 1)), &At, L)) 23915ffd83dbSDimitry Andric return false; // Consider it to be free. 23925ffd83dbSDimitry Andric 2393fe6060f1SDimitry Andric Cost += 2394e8d8bef9SDimitry Andric costAndCollectOperands<SCEVUDivExpr>(WorkItem, TTI, CostKind, Worklist); 23955ffd83dbSDimitry Andric return false; // Will answer upon next entry into this function. 23965ffd83dbSDimitry Andric } 23975ffd83dbSDimitry Andric case scAddExpr: 23985ffd83dbSDimitry Andric case scMulExpr: 23995ffd83dbSDimitry Andric case scUMaxExpr: 2400e8d8bef9SDimitry Andric case scSMaxExpr: 24015ffd83dbSDimitry Andric case scUMinExpr: 2402e8d8bef9SDimitry Andric case scSMinExpr: { 2403e8d8bef9SDimitry Andric assert(cast<SCEVNAryExpr>(S)->getNumOperands() > 1 && 24045ffd83dbSDimitry Andric "Nary expr should have more than 1 operand."); 24055ffd83dbSDimitry Andric // The simple nary expr will require one less op (or pair of ops) 24065ffd83dbSDimitry Andric // than the number of it's terms. 2407fe6060f1SDimitry Andric Cost += 2408e8d8bef9SDimitry Andric costAndCollectOperands<SCEVNAryExpr>(WorkItem, TTI, CostKind, Worklist); 2409fe6060f1SDimitry Andric return Cost > Budget; 24105ffd83dbSDimitry Andric } 2411e8d8bef9SDimitry Andric case scAddRecExpr: { 2412e8d8bef9SDimitry Andric assert(cast<SCEVAddRecExpr>(S)->getNumOperands() >= 2 && 2413e8d8bef9SDimitry Andric "Polynomial should be at least linear"); 2414fe6060f1SDimitry Andric Cost += costAndCollectOperands<SCEVAddRecExpr>( 2415e8d8bef9SDimitry Andric WorkItem, TTI, CostKind, Worklist); 2416fe6060f1SDimitry Andric return Cost > Budget; 2417e8d8bef9SDimitry Andric } 2418e8d8bef9SDimitry Andric } 2419e8d8bef9SDimitry Andric llvm_unreachable("Unknown SCEV kind!"); 24205ffd83dbSDimitry Andric } 24215ffd83dbSDimitry Andric 24225ffd83dbSDimitry Andric Value *SCEVExpander::expandCodeForPredicate(const SCEVPredicate *Pred, 24235ffd83dbSDimitry Andric Instruction *IP) { 24245ffd83dbSDimitry Andric assert(IP); 24255ffd83dbSDimitry Andric switch (Pred->getKind()) { 24265ffd83dbSDimitry Andric case SCEVPredicate::P_Union: 24275ffd83dbSDimitry Andric return expandUnionPredicate(cast<SCEVUnionPredicate>(Pred), IP); 24285ffd83dbSDimitry Andric case SCEVPredicate::P_Equal: 24295ffd83dbSDimitry Andric return expandEqualPredicate(cast<SCEVEqualPredicate>(Pred), IP); 24305ffd83dbSDimitry Andric case SCEVPredicate::P_Wrap: { 24315ffd83dbSDimitry Andric auto *AddRecPred = cast<SCEVWrapPredicate>(Pred); 24325ffd83dbSDimitry Andric return expandWrapPredicate(AddRecPred, IP); 24335ffd83dbSDimitry Andric } 24345ffd83dbSDimitry Andric } 24355ffd83dbSDimitry Andric llvm_unreachable("Unknown SCEV predicate type"); 24365ffd83dbSDimitry Andric } 24375ffd83dbSDimitry Andric 24385ffd83dbSDimitry Andric Value *SCEVExpander::expandEqualPredicate(const SCEVEqualPredicate *Pred, 24395ffd83dbSDimitry Andric Instruction *IP) { 2440e8d8bef9SDimitry Andric Value *Expr0 = 2441e8d8bef9SDimitry Andric expandCodeForImpl(Pred->getLHS(), Pred->getLHS()->getType(), IP, false); 2442e8d8bef9SDimitry Andric Value *Expr1 = 2443e8d8bef9SDimitry Andric expandCodeForImpl(Pred->getRHS(), Pred->getRHS()->getType(), IP, false); 24445ffd83dbSDimitry Andric 24455ffd83dbSDimitry Andric Builder.SetInsertPoint(IP); 24465ffd83dbSDimitry Andric auto *I = Builder.CreateICmpNE(Expr0, Expr1, "ident.check"); 24475ffd83dbSDimitry Andric return I; 24485ffd83dbSDimitry Andric } 24495ffd83dbSDimitry Andric 24505ffd83dbSDimitry Andric Value *SCEVExpander::generateOverflowCheck(const SCEVAddRecExpr *AR, 24515ffd83dbSDimitry Andric Instruction *Loc, bool Signed) { 24525ffd83dbSDimitry Andric assert(AR->isAffine() && "Cannot generate RT check for " 24535ffd83dbSDimitry Andric "non-affine expression"); 24545ffd83dbSDimitry Andric 24555ffd83dbSDimitry Andric SCEVUnionPredicate Pred; 24565ffd83dbSDimitry Andric const SCEV *ExitCount = 24575ffd83dbSDimitry Andric SE.getPredicatedBackedgeTakenCount(AR->getLoop(), Pred); 24585ffd83dbSDimitry Andric 2459e8d8bef9SDimitry Andric assert(!isa<SCEVCouldNotCompute>(ExitCount) && "Invalid loop count"); 24605ffd83dbSDimitry Andric 24615ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 24625ffd83dbSDimitry Andric const SCEV *Start = AR->getStart(); 24635ffd83dbSDimitry Andric 24645ffd83dbSDimitry Andric Type *ARTy = AR->getType(); 24655ffd83dbSDimitry Andric unsigned SrcBits = SE.getTypeSizeInBits(ExitCount->getType()); 24665ffd83dbSDimitry Andric unsigned DstBits = SE.getTypeSizeInBits(ARTy); 24675ffd83dbSDimitry Andric 24685ffd83dbSDimitry Andric // The expression {Start,+,Step} has nusw/nssw if 24695ffd83dbSDimitry Andric // Step < 0, Start - |Step| * Backedge <= Start 24705ffd83dbSDimitry Andric // Step >= 0, Start + |Step| * Backedge > Start 24715ffd83dbSDimitry Andric // and |Step| * Backedge doesn't unsigned overflow. 24725ffd83dbSDimitry Andric 24735ffd83dbSDimitry Andric IntegerType *CountTy = IntegerType::get(Loc->getContext(), SrcBits); 24745ffd83dbSDimitry Andric Builder.SetInsertPoint(Loc); 2475e8d8bef9SDimitry Andric Value *TripCountVal = expandCodeForImpl(ExitCount, CountTy, Loc, false); 24765ffd83dbSDimitry Andric 24775ffd83dbSDimitry Andric IntegerType *Ty = 24785ffd83dbSDimitry Andric IntegerType::get(Loc->getContext(), SE.getTypeSizeInBits(ARTy)); 24795ffd83dbSDimitry Andric 2480e8d8bef9SDimitry Andric Value *StepValue = expandCodeForImpl(Step, Ty, Loc, false); 2481e8d8bef9SDimitry Andric Value *NegStepValue = 2482e8d8bef9SDimitry Andric expandCodeForImpl(SE.getNegativeSCEV(Step), Ty, Loc, false); 2483349cc55cSDimitry Andric Value *StartValue = expandCodeForImpl(Start, ARTy, Loc, false); 24845ffd83dbSDimitry Andric 24855ffd83dbSDimitry Andric ConstantInt *Zero = 2486349cc55cSDimitry Andric ConstantInt::get(Loc->getContext(), APInt::getZero(DstBits)); 24875ffd83dbSDimitry Andric 24885ffd83dbSDimitry Andric Builder.SetInsertPoint(Loc); 24895ffd83dbSDimitry Andric // Compute |Step| 24905ffd83dbSDimitry Andric Value *StepCompare = Builder.CreateICmp(ICmpInst::ICMP_SLT, StepValue, Zero); 24915ffd83dbSDimitry Andric Value *AbsStep = Builder.CreateSelect(StepCompare, NegStepValue, StepValue); 24925ffd83dbSDimitry Andric 24935ffd83dbSDimitry Andric // Get the backedge taken count and truncate or extended to the AR type. 24945ffd83dbSDimitry Andric Value *TruncTripCount = Builder.CreateZExtOrTrunc(TripCountVal, Ty); 24955ffd83dbSDimitry Andric 24965ffd83dbSDimitry Andric // Compute |Step| * Backedge 2497349cc55cSDimitry Andric Value *MulV, *OfMul; 2498349cc55cSDimitry Andric if (Step->isOne()) { 2499349cc55cSDimitry Andric // Special-case Step of one. Potentially-costly `umul_with_overflow` isn't 2500349cc55cSDimitry Andric // needed, there is never an overflow, so to avoid artificially inflating 2501349cc55cSDimitry Andric // the cost of the check, directly emit the optimized IR. 2502349cc55cSDimitry Andric MulV = TruncTripCount; 2503349cc55cSDimitry Andric OfMul = ConstantInt::getFalse(MulV->getContext()); 2504349cc55cSDimitry Andric } else { 2505349cc55cSDimitry Andric auto *MulF = Intrinsic::getDeclaration(Loc->getModule(), 2506349cc55cSDimitry Andric Intrinsic::umul_with_overflow, Ty); 25075ffd83dbSDimitry Andric CallInst *Mul = Builder.CreateCall(MulF, {AbsStep, TruncTripCount}, "mul"); 2508349cc55cSDimitry Andric MulV = Builder.CreateExtractValue(Mul, 0, "mul.result"); 2509349cc55cSDimitry Andric OfMul = Builder.CreateExtractValue(Mul, 1, "mul.overflow"); 2510349cc55cSDimitry Andric } 25115ffd83dbSDimitry Andric 25125ffd83dbSDimitry Andric // Compute: 25135ffd83dbSDimitry Andric // Start + |Step| * Backedge < Start 25145ffd83dbSDimitry Andric // Start - |Step| * Backedge > Start 25155ffd83dbSDimitry Andric Value *Add = nullptr, *Sub = nullptr; 2516349cc55cSDimitry Andric if (PointerType *ARPtrTy = dyn_cast<PointerType>(ARTy)) { 2517349cc55cSDimitry Andric StartValue = InsertNoopCastOfTo( 2518349cc55cSDimitry Andric StartValue, Builder.getInt8PtrTy(ARPtrTy->getAddressSpace())); 2519349cc55cSDimitry Andric Value *NegMulV = Builder.CreateNeg(MulV); 2520349cc55cSDimitry Andric Add = Builder.CreateGEP(Builder.getInt8Ty(), StartValue, MulV); 2521349cc55cSDimitry Andric Sub = Builder.CreateGEP(Builder.getInt8Ty(), StartValue, NegMulV); 25225ffd83dbSDimitry Andric } else { 25235ffd83dbSDimitry Andric Add = Builder.CreateAdd(StartValue, MulV); 25245ffd83dbSDimitry Andric Sub = Builder.CreateSub(StartValue, MulV); 25255ffd83dbSDimitry Andric } 25265ffd83dbSDimitry Andric 25275ffd83dbSDimitry Andric Value *EndCompareGT = Builder.CreateICmp( 25285ffd83dbSDimitry Andric Signed ? ICmpInst::ICMP_SGT : ICmpInst::ICMP_UGT, Sub, StartValue); 25295ffd83dbSDimitry Andric 25305ffd83dbSDimitry Andric Value *EndCompareLT = Builder.CreateICmp( 25315ffd83dbSDimitry Andric Signed ? ICmpInst::ICMP_SLT : ICmpInst::ICMP_ULT, Add, StartValue); 25325ffd83dbSDimitry Andric 25335ffd83dbSDimitry Andric // Select the answer based on the sign of Step. 25345ffd83dbSDimitry Andric Value *EndCheck = 25355ffd83dbSDimitry Andric Builder.CreateSelect(StepCompare, EndCompareGT, EndCompareLT); 25365ffd83dbSDimitry Andric 25375ffd83dbSDimitry Andric // If the backedge taken count type is larger than the AR type, 25385ffd83dbSDimitry Andric // check that we don't drop any bits by truncating it. If we are 25395ffd83dbSDimitry Andric // dropping bits, then we have overflow (unless the step is zero). 25405ffd83dbSDimitry Andric if (SE.getTypeSizeInBits(CountTy) > SE.getTypeSizeInBits(Ty)) { 25415ffd83dbSDimitry Andric auto MaxVal = APInt::getMaxValue(DstBits).zext(SrcBits); 25425ffd83dbSDimitry Andric auto *BackedgeCheck = 25435ffd83dbSDimitry Andric Builder.CreateICmp(ICmpInst::ICMP_UGT, TripCountVal, 25445ffd83dbSDimitry Andric ConstantInt::get(Loc->getContext(), MaxVal)); 25455ffd83dbSDimitry Andric BackedgeCheck = Builder.CreateAnd( 25465ffd83dbSDimitry Andric BackedgeCheck, Builder.CreateICmp(ICmpInst::ICMP_NE, StepValue, Zero)); 25475ffd83dbSDimitry Andric 25485ffd83dbSDimitry Andric EndCheck = Builder.CreateOr(EndCheck, BackedgeCheck); 25495ffd83dbSDimitry Andric } 25505ffd83dbSDimitry Andric 2551e8d8bef9SDimitry Andric return Builder.CreateOr(EndCheck, OfMul); 25525ffd83dbSDimitry Andric } 25535ffd83dbSDimitry Andric 25545ffd83dbSDimitry Andric Value *SCEVExpander::expandWrapPredicate(const SCEVWrapPredicate *Pred, 25555ffd83dbSDimitry Andric Instruction *IP) { 25565ffd83dbSDimitry Andric const auto *A = cast<SCEVAddRecExpr>(Pred->getExpr()); 25575ffd83dbSDimitry Andric Value *NSSWCheck = nullptr, *NUSWCheck = nullptr; 25585ffd83dbSDimitry Andric 25595ffd83dbSDimitry Andric // Add a check for NUSW 25605ffd83dbSDimitry Andric if (Pred->getFlags() & SCEVWrapPredicate::IncrementNUSW) 25615ffd83dbSDimitry Andric NUSWCheck = generateOverflowCheck(A, IP, false); 25625ffd83dbSDimitry Andric 25635ffd83dbSDimitry Andric // Add a check for NSSW 25645ffd83dbSDimitry Andric if (Pred->getFlags() & SCEVWrapPredicate::IncrementNSSW) 25655ffd83dbSDimitry Andric NSSWCheck = generateOverflowCheck(A, IP, true); 25665ffd83dbSDimitry Andric 25675ffd83dbSDimitry Andric if (NUSWCheck && NSSWCheck) 25685ffd83dbSDimitry Andric return Builder.CreateOr(NUSWCheck, NSSWCheck); 25695ffd83dbSDimitry Andric 25705ffd83dbSDimitry Andric if (NUSWCheck) 25715ffd83dbSDimitry Andric return NUSWCheck; 25725ffd83dbSDimitry Andric 25735ffd83dbSDimitry Andric if (NSSWCheck) 25745ffd83dbSDimitry Andric return NSSWCheck; 25755ffd83dbSDimitry Andric 25765ffd83dbSDimitry Andric return ConstantInt::getFalse(IP->getContext()); 25775ffd83dbSDimitry Andric } 25785ffd83dbSDimitry Andric 25795ffd83dbSDimitry Andric Value *SCEVExpander::expandUnionPredicate(const SCEVUnionPredicate *Union, 25805ffd83dbSDimitry Andric Instruction *IP) { 25815ffd83dbSDimitry Andric auto *BoolType = IntegerType::get(IP->getContext(), 1); 25825ffd83dbSDimitry Andric Value *Check = ConstantInt::getNullValue(BoolType); 25835ffd83dbSDimitry Andric 25845ffd83dbSDimitry Andric // Loop over all checks in this set. 25855ffd83dbSDimitry Andric for (auto Pred : Union->getPredicates()) { 25865ffd83dbSDimitry Andric auto *NextCheck = expandCodeForPredicate(Pred, IP); 25875ffd83dbSDimitry Andric Builder.SetInsertPoint(IP); 25885ffd83dbSDimitry Andric Check = Builder.CreateOr(Check, NextCheck); 25895ffd83dbSDimitry Andric } 25905ffd83dbSDimitry Andric 25915ffd83dbSDimitry Andric return Check; 25925ffd83dbSDimitry Andric } 25935ffd83dbSDimitry Andric 2594e8d8bef9SDimitry Andric Value *SCEVExpander::fixupLCSSAFormFor(Instruction *User, unsigned OpIdx) { 2595e8d8bef9SDimitry Andric assert(PreserveLCSSA); 2596e8d8bef9SDimitry Andric SmallVector<Instruction *, 1> ToUpdate; 2597e8d8bef9SDimitry Andric 2598e8d8bef9SDimitry Andric auto *OpV = User->getOperand(OpIdx); 2599e8d8bef9SDimitry Andric auto *OpI = dyn_cast<Instruction>(OpV); 2600e8d8bef9SDimitry Andric if (!OpI) 2601e8d8bef9SDimitry Andric return OpV; 2602e8d8bef9SDimitry Andric 2603e8d8bef9SDimitry Andric Loop *DefLoop = SE.LI.getLoopFor(OpI->getParent()); 2604e8d8bef9SDimitry Andric Loop *UseLoop = SE.LI.getLoopFor(User->getParent()); 2605e8d8bef9SDimitry Andric if (!DefLoop || UseLoop == DefLoop || DefLoop->contains(UseLoop)) 2606e8d8bef9SDimitry Andric return OpV; 2607e8d8bef9SDimitry Andric 2608e8d8bef9SDimitry Andric ToUpdate.push_back(OpI); 2609e8d8bef9SDimitry Andric SmallVector<PHINode *, 16> PHIsToRemove; 2610e8d8bef9SDimitry Andric formLCSSAForInstructions(ToUpdate, SE.DT, SE.LI, &SE, Builder, &PHIsToRemove); 2611e8d8bef9SDimitry Andric for (PHINode *PN : PHIsToRemove) { 2612e8d8bef9SDimitry Andric if (!PN->use_empty()) 2613e8d8bef9SDimitry Andric continue; 2614e8d8bef9SDimitry Andric InsertedValues.erase(PN); 2615e8d8bef9SDimitry Andric InsertedPostIncValues.erase(PN); 2616e8d8bef9SDimitry Andric PN->eraseFromParent(); 2617e8d8bef9SDimitry Andric } 2618e8d8bef9SDimitry Andric 2619e8d8bef9SDimitry Andric return User->getOperand(OpIdx); 2620e8d8bef9SDimitry Andric } 2621e8d8bef9SDimitry Andric 26225ffd83dbSDimitry Andric namespace { 26235ffd83dbSDimitry Andric // Search for a SCEV subexpression that is not safe to expand. Any expression 26245ffd83dbSDimitry Andric // that may expand to a !isSafeToSpeculativelyExecute value is unsafe, namely 26255ffd83dbSDimitry Andric // UDiv expressions. We don't know if the UDiv is derived from an IR divide 26265ffd83dbSDimitry Andric // instruction, but the important thing is that we prove the denominator is 26275ffd83dbSDimitry Andric // nonzero before expansion. 26285ffd83dbSDimitry Andric // 26295ffd83dbSDimitry Andric // IVUsers already checks that IV-derived expressions are safe. So this check is 26305ffd83dbSDimitry Andric // only needed when the expression includes some subexpression that is not IV 26315ffd83dbSDimitry Andric // derived. 26325ffd83dbSDimitry Andric // 26335ffd83dbSDimitry Andric // Currently, we only allow division by a nonzero constant here. If this is 26345ffd83dbSDimitry Andric // inadequate, we could easily allow division by SCEVUnknown by using 26355ffd83dbSDimitry Andric // ValueTracking to check isKnownNonZero(). 26365ffd83dbSDimitry Andric // 26375ffd83dbSDimitry Andric // We cannot generally expand recurrences unless the step dominates the loop 26385ffd83dbSDimitry Andric // header. The expander handles the special case of affine recurrences by 26395ffd83dbSDimitry Andric // scaling the recurrence outside the loop, but this technique isn't generally 26405ffd83dbSDimitry Andric // applicable. Expanding a nested recurrence outside a loop requires computing 26415ffd83dbSDimitry Andric // binomial coefficients. This could be done, but the recurrence has to be in a 26425ffd83dbSDimitry Andric // perfectly reduced form, which can't be guaranteed. 26435ffd83dbSDimitry Andric struct SCEVFindUnsafe { 26445ffd83dbSDimitry Andric ScalarEvolution &SE; 2645349cc55cSDimitry Andric bool CanonicalMode; 26465ffd83dbSDimitry Andric bool IsUnsafe; 26475ffd83dbSDimitry Andric 2648349cc55cSDimitry Andric SCEVFindUnsafe(ScalarEvolution &SE, bool CanonicalMode) 2649349cc55cSDimitry Andric : SE(SE), CanonicalMode(CanonicalMode), IsUnsafe(false) {} 26505ffd83dbSDimitry Andric 26515ffd83dbSDimitry Andric bool follow(const SCEV *S) { 26525ffd83dbSDimitry Andric if (const SCEVUDivExpr *D = dyn_cast<SCEVUDivExpr>(S)) { 26535ffd83dbSDimitry Andric const SCEVConstant *SC = dyn_cast<SCEVConstant>(D->getRHS()); 26545ffd83dbSDimitry Andric if (!SC || SC->getValue()->isZero()) { 26555ffd83dbSDimitry Andric IsUnsafe = true; 26565ffd83dbSDimitry Andric return false; 26575ffd83dbSDimitry Andric } 26585ffd83dbSDimitry Andric } 26595ffd83dbSDimitry Andric if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) { 26605ffd83dbSDimitry Andric const SCEV *Step = AR->getStepRecurrence(SE); 26615ffd83dbSDimitry Andric if (!AR->isAffine() && !SE.dominates(Step, AR->getLoop()->getHeader())) { 26625ffd83dbSDimitry Andric IsUnsafe = true; 26635ffd83dbSDimitry Andric return false; 26645ffd83dbSDimitry Andric } 2665349cc55cSDimitry Andric 2666349cc55cSDimitry Andric // For non-affine addrecs or in non-canonical mode we need a preheader 2667349cc55cSDimitry Andric // to insert into. 2668349cc55cSDimitry Andric if (!AR->getLoop()->getLoopPreheader() && 2669349cc55cSDimitry Andric (!CanonicalMode || !AR->isAffine())) { 2670349cc55cSDimitry Andric IsUnsafe = true; 2671349cc55cSDimitry Andric return false; 2672349cc55cSDimitry Andric } 26735ffd83dbSDimitry Andric } 26745ffd83dbSDimitry Andric return true; 26755ffd83dbSDimitry Andric } 26765ffd83dbSDimitry Andric bool isDone() const { return IsUnsafe; } 26775ffd83dbSDimitry Andric }; 26785ffd83dbSDimitry Andric } 26795ffd83dbSDimitry Andric 26805ffd83dbSDimitry Andric namespace llvm { 2681349cc55cSDimitry Andric bool isSafeToExpand(const SCEV *S, ScalarEvolution &SE, bool CanonicalMode) { 2682349cc55cSDimitry Andric SCEVFindUnsafe Search(SE, CanonicalMode); 26835ffd83dbSDimitry Andric visitAll(S, Search); 26845ffd83dbSDimitry Andric return !Search.IsUnsafe; 26855ffd83dbSDimitry Andric } 26865ffd83dbSDimitry Andric 26875ffd83dbSDimitry Andric bool isSafeToExpandAt(const SCEV *S, const Instruction *InsertionPoint, 26885ffd83dbSDimitry Andric ScalarEvolution &SE) { 26895ffd83dbSDimitry Andric if (!isSafeToExpand(S, SE)) 26905ffd83dbSDimitry Andric return false; 26915ffd83dbSDimitry Andric // We have to prove that the expanded site of S dominates InsertionPoint. 26925ffd83dbSDimitry Andric // This is easy when not in the same block, but hard when S is an instruction 26935ffd83dbSDimitry Andric // to be expanded somewhere inside the same block as our insertion point. 26945ffd83dbSDimitry Andric // What we really need here is something analogous to an OrderedBasicBlock, 26955ffd83dbSDimitry Andric // but for the moment, we paper over the problem by handling two common and 26965ffd83dbSDimitry Andric // cheap to check cases. 26975ffd83dbSDimitry Andric if (SE.properlyDominates(S, InsertionPoint->getParent())) 26985ffd83dbSDimitry Andric return true; 26995ffd83dbSDimitry Andric if (SE.dominates(S, InsertionPoint->getParent())) { 27005ffd83dbSDimitry Andric if (InsertionPoint->getParent()->getTerminator() == InsertionPoint) 27015ffd83dbSDimitry Andric return true; 27025ffd83dbSDimitry Andric if (const SCEVUnknown *U = dyn_cast<SCEVUnknown>(S)) 2703fe6060f1SDimitry Andric if (llvm::is_contained(InsertionPoint->operand_values(), U->getValue())) 27045ffd83dbSDimitry Andric return true; 27055ffd83dbSDimitry Andric } 27065ffd83dbSDimitry Andric return false; 27075ffd83dbSDimitry Andric } 2708e8d8bef9SDimitry Andric 2709fe6060f1SDimitry Andric void SCEVExpanderCleaner::cleanup() { 2710e8d8bef9SDimitry Andric // Result is used, nothing to remove. 2711e8d8bef9SDimitry Andric if (ResultUsed) 2712e8d8bef9SDimitry Andric return; 2713e8d8bef9SDimitry Andric 2714e8d8bef9SDimitry Andric auto InsertedInstructions = Expander.getAllInsertedInstructions(); 2715e8d8bef9SDimitry Andric #ifndef NDEBUG 2716e8d8bef9SDimitry Andric SmallPtrSet<Instruction *, 8> InsertedSet(InsertedInstructions.begin(), 2717e8d8bef9SDimitry Andric InsertedInstructions.end()); 2718e8d8bef9SDimitry Andric (void)InsertedSet; 2719e8d8bef9SDimitry Andric #endif 2720e8d8bef9SDimitry Andric // Remove sets with value handles. 2721e8d8bef9SDimitry Andric Expander.clear(); 2722e8d8bef9SDimitry Andric 2723e8d8bef9SDimitry Andric // Sort so that earlier instructions do not dominate later instructions. 2724e8d8bef9SDimitry Andric stable_sort(InsertedInstructions, [this](Instruction *A, Instruction *B) { 2725e8d8bef9SDimitry Andric return DT.dominates(B, A); 2726e8d8bef9SDimitry Andric }); 2727e8d8bef9SDimitry Andric // Remove all inserted instructions. 2728e8d8bef9SDimitry Andric for (Instruction *I : InsertedInstructions) { 2729e8d8bef9SDimitry Andric 2730e8d8bef9SDimitry Andric #ifndef NDEBUG 2731e8d8bef9SDimitry Andric assert(all_of(I->users(), 2732e8d8bef9SDimitry Andric [&InsertedSet](Value *U) { 2733e8d8bef9SDimitry Andric return InsertedSet.contains(cast<Instruction>(U)); 2734e8d8bef9SDimitry Andric }) && 2735e8d8bef9SDimitry Andric "removed instruction should only be used by instructions inserted " 2736e8d8bef9SDimitry Andric "during expansion"); 2737e8d8bef9SDimitry Andric #endif 2738e8d8bef9SDimitry Andric assert(!I->getType()->isVoidTy() && 2739e8d8bef9SDimitry Andric "inserted instruction should have non-void types"); 2740e8d8bef9SDimitry Andric I->replaceAllUsesWith(UndefValue::get(I->getType())); 2741e8d8bef9SDimitry Andric I->eraseFromParent(); 2742e8d8bef9SDimitry Andric } 2743e8d8bef9SDimitry Andric } 27445ffd83dbSDimitry Andric } 2745