xref: /llvm-project/llvm/lib/Transforms/Vectorize/VPlanAnalysis.cpp (revision 5c15caa83fec6aaae7827b9406adf8ab9fac7eac)
1 //===- VPlanAnalysis.cpp - Various Analyses working on VPlan ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "VPlanAnalysis.h"
10 #include "VPlan.h"
11 #include "VPlanCFG.h"
12 #include "VPlanDominatorTree.h"
13 #include "llvm/ADT/TypeSwitch.h"
14 #include "llvm/Analysis/ScalarEvolution.h"
15 #include "llvm/IR/Instruction.h"
16 #include "llvm/IR/PatternMatch.h"
17 #include "llvm/Support/GenericDomTreeConstruction.h"
18 
19 using namespace llvm;
20 
21 #define DEBUG_TYPE "vplan"
22 
23 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPBlendRecipe *R) {
24   Type *ResTy = inferScalarType(R->getIncomingValue(0));
25   for (unsigned I = 1, E = R->getNumIncomingValues(); I != E; ++I) {
26     VPValue *Inc = R->getIncomingValue(I);
27     assert(inferScalarType(Inc) == ResTy &&
28            "different types inferred for different incoming values");
29     CachedTypes[Inc] = ResTy;
30   }
31   return ResTy;
32 }
33 
34 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPInstruction *R) {
35   // Set the result type from the first operand, check if the types for all
36   // other operands match and cache them.
37   auto SetResultTyFromOp = [this, R]() {
38     Type *ResTy = inferScalarType(R->getOperand(0));
39     for (unsigned Op = 1; Op != R->getNumOperands(); ++Op) {
40       VPValue *OtherV = R->getOperand(Op);
41       assert(inferScalarType(OtherV) == ResTy &&
42              "different types inferred for different operands");
43       CachedTypes[OtherV] = ResTy;
44     }
45     return ResTy;
46   };
47 
48   unsigned Opcode = R->getOpcode();
49   if (Instruction::isBinaryOp(Opcode) || Instruction::isUnaryOp(Opcode))
50     return SetResultTyFromOp();
51 
52   switch (Opcode) {
53   case Instruction::Select: {
54     Type *ResTy = inferScalarType(R->getOperand(1));
55     VPValue *OtherV = R->getOperand(2);
56     assert(inferScalarType(OtherV) == ResTy &&
57            "different types inferred for different operands");
58     CachedTypes[OtherV] = ResTy;
59     return ResTy;
60   }
61   case Instruction::ICmp:
62   case VPInstruction::ActiveLaneMask:
63     assert(inferScalarType(R->getOperand(0)) ==
64                inferScalarType(R->getOperand(1)) &&
65            "different types inferred for different operands");
66     return IntegerType::get(Ctx, 1);
67   case VPInstruction::ComputeReductionResult: {
68     auto *PhiR = cast<VPReductionPHIRecipe>(R->getOperand(0));
69     auto *OrigPhi = cast<PHINode>(PhiR->getUnderlyingValue());
70     return OrigPhi->getType();
71   }
72   case VPInstruction::ExplicitVectorLength:
73     return Type::getIntNTy(Ctx, 32);
74   case VPInstruction::FirstOrderRecurrenceSplice:
75   case VPInstruction::Not:
76   case VPInstruction::ResumePhi:
77   case VPInstruction::CalculateTripCountMinusVF:
78   case VPInstruction::CanonicalIVIncrementForPart:
79   case VPInstruction::AnyOf:
80     return SetResultTyFromOp();
81   case VPInstruction::ExtractFromEnd: {
82     Type *BaseTy = inferScalarType(R->getOperand(0));
83     if (auto *VecTy = dyn_cast<VectorType>(BaseTy))
84       return VecTy->getElementType();
85     return BaseTy;
86   }
87   case VPInstruction::LogicalAnd:
88     assert(inferScalarType(R->getOperand(0))->isIntegerTy(1) &&
89            inferScalarType(R->getOperand(1))->isIntegerTy(1) &&
90            "LogicalAnd operands should be bool");
91     return IntegerType::get(Ctx, 1);
92   case VPInstruction::PtrAdd:
93     // Return the type based on the pointer argument (i.e. first operand).
94     return inferScalarType(R->getOperand(0));
95   case VPInstruction::BranchOnCond:
96   case VPInstruction::BranchOnCount:
97     return Type::getVoidTy(Ctx);
98   default:
99     break;
100   }
101   // Type inference not implemented for opcode.
102   LLVM_DEBUG({
103     dbgs() << "LV: Found unhandled opcode for: ";
104     R->getVPSingleValue()->dump();
105   });
106   llvm_unreachable("Unhandled opcode!");
107 }
108 
109 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenRecipe *R) {
110   unsigned Opcode = R->getOpcode();
111   if (Instruction::isBinaryOp(Opcode) || Instruction::isShift(Opcode) ||
112       Instruction::isBitwiseLogicOp(Opcode)) {
113     Type *ResTy = inferScalarType(R->getOperand(0));
114     assert(ResTy == inferScalarType(R->getOperand(1)) &&
115            "types for both operands must match for binary op");
116     CachedTypes[R->getOperand(1)] = ResTy;
117     return ResTy;
118   }
119 
120   switch (Opcode) {
121   case Instruction::ICmp:
122   case Instruction::FCmp:
123     return IntegerType::get(Ctx, 1);
124   case Instruction::FNeg:
125   case Instruction::Freeze:
126     return inferScalarType(R->getOperand(0));
127   default:
128     break;
129   }
130 
131   // Type inference not implemented for opcode.
132   LLVM_DEBUG({
133     dbgs() << "LV: Found unhandled opcode for: ";
134     R->getVPSingleValue()->dump();
135   });
136   llvm_unreachable("Unhandled opcode!");
137 }
138 
139 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenCallRecipe *R) {
140   auto &CI = *cast<CallInst>(R->getUnderlyingInstr());
141   return CI.getType();
142 }
143 
144 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenMemoryRecipe *R) {
145   assert((isa<VPWidenLoadRecipe, VPWidenLoadEVLRecipe>(R)) &&
146          "Store recipes should not define any values");
147   return cast<LoadInst>(&R->getIngredient())->getType();
148 }
149 
150 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenSelectRecipe *R) {
151   Type *ResTy = inferScalarType(R->getOperand(1));
152   VPValue *OtherV = R->getOperand(2);
153   assert(inferScalarType(OtherV) == ResTy &&
154          "different types inferred for different operands");
155   CachedTypes[OtherV] = ResTy;
156   return ResTy;
157 }
158 
159 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPReplicateRecipe *R) {
160   unsigned Opcode = R->getUnderlyingInstr()->getOpcode();
161 
162   if (Instruction::isBinaryOp(Opcode) || Instruction::isShift(Opcode) ||
163       Instruction::isBitwiseLogicOp(Opcode)) {
164     Type *ResTy = inferScalarType(R->getOperand(0));
165     assert(ResTy == inferScalarType(R->getOperand(1)) &&
166            "inferred types for operands of binary op don't match");
167     CachedTypes[R->getOperand(1)] = ResTy;
168     return ResTy;
169   }
170 
171   if (Instruction::isCast(Opcode))
172     return R->getUnderlyingInstr()->getType();
173 
174   switch (Opcode) {
175   case Instruction::Call: {
176     unsigned CallIdx = R->getNumOperands() - (R->isPredicated() ? 2 : 1);
177     return cast<Function>(R->getOperand(CallIdx)->getLiveInIRValue())
178         ->getReturnType();
179   }
180   case Instruction::Select: {
181     Type *ResTy = inferScalarType(R->getOperand(1));
182     assert(ResTy == inferScalarType(R->getOperand(2)) &&
183            "inferred types for operands of select op don't match");
184     CachedTypes[R->getOperand(2)] = ResTy;
185     return ResTy;
186   }
187   case Instruction::ICmp:
188   case Instruction::FCmp:
189     return IntegerType::get(Ctx, 1);
190   case Instruction::Alloca:
191   case Instruction::ExtractValue:
192     return R->getUnderlyingInstr()->getType();
193   case Instruction::Freeze:
194   case Instruction::FNeg:
195   case Instruction::GetElementPtr:
196     return inferScalarType(R->getOperand(0));
197   case Instruction::Load:
198     return cast<LoadInst>(R->getUnderlyingInstr())->getType();
199   case Instruction::Store:
200     // FIXME: VPReplicateRecipes with store opcodes still define a result
201     // VPValue, so we need to handle them here. Remove the code here once this
202     // is modeled accurately in VPlan.
203     return Type::getVoidTy(Ctx);
204   default:
205     break;
206   }
207   // Type inference not implemented for opcode.
208   LLVM_DEBUG({
209     dbgs() << "LV: Found unhandled opcode for: ";
210     R->getVPSingleValue()->dump();
211   });
212   llvm_unreachable("Unhandled opcode");
213 }
214 
215 Type *VPTypeAnalysis::inferScalarType(const VPValue *V) {
216   if (Type *CachedTy = CachedTypes.lookup(V))
217     return CachedTy;
218 
219   if (V->isLiveIn()) {
220     if (auto *IRValue = V->getLiveInIRValue())
221       return IRValue->getType();
222     // All VPValues without any underlying IR value (like the vector trip count
223     // or the backedge-taken count) have the same type as the canonical IV.
224     return CanonicalIVTy;
225   }
226 
227   Type *ResultTy =
228       TypeSwitch<const VPRecipeBase *, Type *>(V->getDefiningRecipe())
229           .Case<VPActiveLaneMaskPHIRecipe, VPCanonicalIVPHIRecipe,
230                 VPFirstOrderRecurrencePHIRecipe, VPReductionPHIRecipe,
231                 VPWidenPointerInductionRecipe, VPEVLBasedIVPHIRecipe,
232                 VPScalarPHIRecipe>([this](const auto *R) {
233             // Handle header phi recipes, except VPWidenIntOrFpInduction
234             // which needs special handling due it being possibly truncated.
235             // TODO: consider inferring/caching type of siblings, e.g.,
236             // backedge value, here and in cases below.
237             return inferScalarType(R->getStartValue());
238           })
239           .Case<VPWidenIntOrFpInductionRecipe, VPDerivedIVRecipe>(
240               [](const auto *R) { return R->getScalarType(); })
241           .Case<VPReductionRecipe, VPPredInstPHIRecipe, VPWidenPHIRecipe,
242                 VPScalarIVStepsRecipe, VPWidenGEPRecipe, VPVectorPointerRecipe,
243                 VPReverseVectorPointerRecipe, VPWidenCanonicalIVRecipe,
244                 VPPartialReductionRecipe>([this](const VPRecipeBase *R) {
245             return inferScalarType(R->getOperand(0));
246           })
247           .Case<VPBlendRecipe, VPInstruction, VPWidenRecipe, VPWidenEVLRecipe,
248                 VPReplicateRecipe, VPWidenCallRecipe, VPWidenMemoryRecipe,
249                 VPWidenSelectRecipe>(
250               [this](const auto *R) { return inferScalarTypeForRecipe(R); })
251           .Case<VPWidenIntrinsicRecipe>([](const VPWidenIntrinsicRecipe *R) {
252             return R->getResultType();
253           })
254           .Case<VPInterleaveRecipe>([V](const VPInterleaveRecipe *R) {
255             // TODO: Use info from interleave group.
256             return V->getUnderlyingValue()->getType();
257           })
258           .Case<VPWidenCastRecipe>(
259               [](const VPWidenCastRecipe *R) { return R->getResultType(); })
260           .Case<VPScalarCastRecipe>(
261               [](const VPScalarCastRecipe *R) { return R->getResultType(); })
262           .Case<VPExpandSCEVRecipe>([](const VPExpandSCEVRecipe *R) {
263             return R->getSCEV()->getType();
264           })
265           .Case<VPReductionRecipe>([this](const auto *R) {
266             return inferScalarType(R->getChainOp());
267           });
268 
269   assert(ResultTy && "could not infer type for the given VPValue");
270   CachedTypes[V] = ResultTy;
271   return ResultTy;
272 }
273 
274 void llvm::collectEphemeralRecipesForVPlan(
275     VPlan &Plan, DenseSet<VPRecipeBase *> &EphRecipes) {
276   // First, collect seed recipes which are operands of assumes.
277   SmallVector<VPRecipeBase *> Worklist;
278   for (VPBasicBlock *VPBB : VPBlockUtils::blocksOnly<VPBasicBlock>(
279            vp_depth_first_deep(Plan.getVectorLoopRegion()->getEntry()))) {
280     for (VPRecipeBase &R : *VPBB) {
281       auto *RepR = dyn_cast<VPReplicateRecipe>(&R);
282       if (!RepR || !match(RepR->getUnderlyingInstr(),
283                           PatternMatch::m_Intrinsic<Intrinsic::assume>()))
284         continue;
285       Worklist.push_back(RepR);
286       EphRecipes.insert(RepR);
287     }
288   }
289 
290   // Process operands of candidates in worklist and add them to the set of
291   // ephemeral recipes, if they don't have side-effects and are only used by
292   // other ephemeral recipes.
293   while (!Worklist.empty()) {
294     VPRecipeBase *Cur = Worklist.pop_back_val();
295     for (VPValue *Op : Cur->operands()) {
296       auto *OpR = Op->getDefiningRecipe();
297       if (!OpR || OpR->mayHaveSideEffects() || EphRecipes.contains(OpR))
298         continue;
299       if (any_of(Op->users(), [EphRecipes](VPUser *U) {
300             auto *UR = dyn_cast<VPRecipeBase>(U);
301             return !UR || !EphRecipes.contains(UR);
302           }))
303         continue;
304       EphRecipes.insert(OpR);
305       Worklist.push_back(OpR);
306     }
307   }
308 }
309 
310 template void DomTreeBuilder::Calculate<DominatorTreeBase<VPBlockBase, false>>(
311     DominatorTreeBase<VPBlockBase, false> &DT);
312 
313 bool VPDominatorTree::properlyDominates(const VPRecipeBase *A,
314                                         const VPRecipeBase *B) {
315   if (A == B)
316     return false;
317 
318   auto LocalComesBefore = [](const VPRecipeBase *A, const VPRecipeBase *B) {
319     for (auto &R : *A->getParent()) {
320       if (&R == A)
321         return true;
322       if (&R == B)
323         return false;
324     }
325     llvm_unreachable("recipe not found");
326   };
327   const VPBlockBase *ParentA = A->getParent();
328   const VPBlockBase *ParentB = B->getParent();
329   if (ParentA == ParentB)
330     return LocalComesBefore(A, B);
331 
332 #ifndef NDEBUG
333   auto GetReplicateRegion = [](VPRecipeBase *R) -> VPRegionBlock * {
334     auto *Region = dyn_cast_or_null<VPRegionBlock>(R->getParent()->getParent());
335     if (Region && Region->isReplicator()) {
336       assert(Region->getNumSuccessors() == 1 &&
337              Region->getNumPredecessors() == 1 && "Expected SESE region!");
338       assert(R->getParent()->size() == 1 &&
339              "A recipe in an original replicator region must be the only "
340              "recipe in its block");
341       return Region;
342     }
343     return nullptr;
344   };
345   assert(!GetReplicateRegion(const_cast<VPRecipeBase *>(A)) &&
346          "No replicate regions expected at this point");
347   assert(!GetReplicateRegion(const_cast<VPRecipeBase *>(B)) &&
348          "No replicate regions expected at this point");
349 #endif
350   return Base::properlyDominates(ParentA, ParentB);
351 }
352