xref: /llvm-project/llvm/lib/Transforms/Vectorize/VPlanAnalysis.cpp (revision 0d736e296c2feed7709e002d5972ed60844b6b56)
1 //===- VPlanAnalysis.cpp - Various Analyses working on VPlan ----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "VPlanAnalysis.h"
10 #include "VPlan.h"
11 #include "VPlanCFG.h"
12 #include "VPlanDominatorTree.h"
13 #include "llvm/ADT/TypeSwitch.h"
14 #include "llvm/Analysis/ScalarEvolution.h"
15 #include "llvm/IR/Instruction.h"
16 #include "llvm/IR/PatternMatch.h"
17 #include "llvm/Support/GenericDomTreeConstruction.h"
18 
19 using namespace llvm;
20 
21 #define DEBUG_TYPE "vplan"
22 
23 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPBlendRecipe *R) {
24   Type *ResTy = inferScalarType(R->getIncomingValue(0));
25   for (unsigned I = 1, E = R->getNumIncomingValues(); I != E; ++I) {
26     VPValue *Inc = R->getIncomingValue(I);
27     assert(inferScalarType(Inc) == ResTy &&
28            "different types inferred for different incoming values");
29     CachedTypes[Inc] = ResTy;
30   }
31   return ResTy;
32 }
33 
34 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPInstruction *R) {
35   // Set the result type from the first operand, check if the types for all
36   // other operands match and cache them.
37   auto SetResultTyFromOp = [this, R]() {
38     Type *ResTy = inferScalarType(R->getOperand(0));
39     for (unsigned Op = 1; Op != R->getNumOperands(); ++Op) {
40       VPValue *OtherV = R->getOperand(Op);
41       assert(inferScalarType(OtherV) == ResTy &&
42              "different types inferred for different operands");
43       CachedTypes[OtherV] = ResTy;
44     }
45     return ResTy;
46   };
47 
48   unsigned Opcode = R->getOpcode();
49   if (Instruction::isBinaryOp(Opcode) || Instruction::isUnaryOp(Opcode))
50     return SetResultTyFromOp();
51 
52   switch (Opcode) {
53   case Instruction::Select: {
54     Type *ResTy = inferScalarType(R->getOperand(1));
55     VPValue *OtherV = R->getOperand(2);
56     assert(inferScalarType(OtherV) == ResTy &&
57            "different types inferred for different operands");
58     CachedTypes[OtherV] = ResTy;
59     return ResTy;
60   }
61   case Instruction::ICmp:
62   case VPInstruction::ActiveLaneMask:
63     return inferScalarType(R->getOperand(1));
64   case VPInstruction::FirstOrderRecurrenceSplice:
65   case VPInstruction::Not:
66     return SetResultTyFromOp();
67   case VPInstruction::ExtractFromEnd: {
68     Type *BaseTy = inferScalarType(R->getOperand(0));
69     if (auto *VecTy = dyn_cast<VectorType>(BaseTy))
70       return VecTy->getElementType();
71     return BaseTy;
72   }
73   case VPInstruction::LogicalAnd:
74     return IntegerType::get(Ctx, 1);
75   case VPInstruction::PtrAdd:
76     // Return the type based on the pointer argument (i.e. first operand).
77     return inferScalarType(R->getOperand(0));
78   case VPInstruction::BranchOnCond:
79   case VPInstruction::BranchOnCount:
80     return Type::getVoidTy(Ctx);
81   default:
82     break;
83   }
84   // Type inference not implemented for opcode.
85   LLVM_DEBUG({
86     dbgs() << "LV: Found unhandled opcode for: ";
87     R->getVPSingleValue()->dump();
88   });
89   llvm_unreachable("Unhandled opcode!");
90 }
91 
92 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenRecipe *R) {
93   unsigned Opcode = R->getOpcode();
94   switch (Opcode) {
95   case Instruction::ICmp:
96   case Instruction::FCmp:
97     return IntegerType::get(Ctx, 1);
98   case Instruction::UDiv:
99   case Instruction::SDiv:
100   case Instruction::SRem:
101   case Instruction::URem:
102   case Instruction::Add:
103   case Instruction::FAdd:
104   case Instruction::Sub:
105   case Instruction::FSub:
106   case Instruction::Mul:
107   case Instruction::FMul:
108   case Instruction::FDiv:
109   case Instruction::FRem:
110   case Instruction::Shl:
111   case Instruction::LShr:
112   case Instruction::AShr:
113   case Instruction::And:
114   case Instruction::Or:
115   case Instruction::Xor: {
116     Type *ResTy = inferScalarType(R->getOperand(0));
117     assert(ResTy == inferScalarType(R->getOperand(1)) &&
118            "types for both operands must match for binary op");
119     CachedTypes[R->getOperand(1)] = ResTy;
120     return ResTy;
121   }
122   case Instruction::FNeg:
123   case Instruction::Freeze:
124     return inferScalarType(R->getOperand(0));
125   default:
126     break;
127   }
128 
129   // Type inference not implemented for opcode.
130   LLVM_DEBUG({
131     dbgs() << "LV: Found unhandled opcode for: ";
132     R->getVPSingleValue()->dump();
133   });
134   llvm_unreachable("Unhandled opcode!");
135 }
136 
137 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenCallRecipe *R) {
138   auto &CI = *cast<CallInst>(R->getUnderlyingInstr());
139   return CI.getType();
140 }
141 
142 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenMemoryRecipe *R) {
143   assert((isa<VPWidenLoadRecipe>(R) || isa<VPWidenLoadEVLRecipe>(R)) &&
144          "Store recipes should not define any values");
145   return cast<LoadInst>(&R->getIngredient())->getType();
146 }
147 
148 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPWidenSelectRecipe *R) {
149   Type *ResTy = inferScalarType(R->getOperand(1));
150   VPValue *OtherV = R->getOperand(2);
151   assert(inferScalarType(OtherV) == ResTy &&
152          "different types inferred for different operands");
153   CachedTypes[OtherV] = ResTy;
154   return ResTy;
155 }
156 
157 Type *VPTypeAnalysis::inferScalarTypeForRecipe(const VPReplicateRecipe *R) {
158   switch (R->getUnderlyingInstr()->getOpcode()) {
159   case Instruction::Call: {
160     unsigned CallIdx = R->getNumOperands() - (R->isPredicated() ? 2 : 1);
161     return cast<Function>(R->getOperand(CallIdx)->getLiveInIRValue())
162         ->getReturnType();
163   }
164   case Instruction::UDiv:
165   case Instruction::SDiv:
166   case Instruction::SRem:
167   case Instruction::URem:
168   case Instruction::Add:
169   case Instruction::FAdd:
170   case Instruction::Sub:
171   case Instruction::FSub:
172   case Instruction::Mul:
173   case Instruction::FMul:
174   case Instruction::FDiv:
175   case Instruction::FRem:
176   case Instruction::Shl:
177   case Instruction::LShr:
178   case Instruction::AShr:
179   case Instruction::And:
180   case Instruction::Or:
181   case Instruction::Xor: {
182     Type *ResTy = inferScalarType(R->getOperand(0));
183     assert(ResTy == inferScalarType(R->getOperand(1)) &&
184            "inferred types for operands of binary op don't match");
185     CachedTypes[R->getOperand(1)] = ResTy;
186     return ResTy;
187   }
188   case Instruction::Select: {
189     Type *ResTy = inferScalarType(R->getOperand(1));
190     assert(ResTy == inferScalarType(R->getOperand(2)) &&
191            "inferred types for operands of select op don't match");
192     CachedTypes[R->getOperand(2)] = ResTy;
193     return ResTy;
194   }
195   case Instruction::ICmp:
196   case Instruction::FCmp:
197     return IntegerType::get(Ctx, 1);
198   case Instruction::AddrSpaceCast:
199   case Instruction::Alloca:
200   case Instruction::BitCast:
201   case Instruction::Trunc:
202   case Instruction::SExt:
203   case Instruction::ZExt:
204   case Instruction::FPExt:
205   case Instruction::FPTrunc:
206   case Instruction::ExtractValue:
207   case Instruction::SIToFP:
208   case Instruction::UIToFP:
209   case Instruction::FPToSI:
210   case Instruction::FPToUI:
211   case Instruction::PtrToInt:
212   case Instruction::IntToPtr:
213     return R->getUnderlyingInstr()->getType();
214   case Instruction::Freeze:
215   case Instruction::FNeg:
216   case Instruction::GetElementPtr:
217     return inferScalarType(R->getOperand(0));
218   case Instruction::Load:
219     return cast<LoadInst>(R->getUnderlyingInstr())->getType();
220   case Instruction::Store:
221     // FIXME: VPReplicateRecipes with store opcodes still define a result
222     // VPValue, so we need to handle them here. Remove the code here once this
223     // is modeled accurately in VPlan.
224     return Type::getVoidTy(Ctx);
225   default:
226     break;
227   }
228   // Type inference not implemented for opcode.
229   LLVM_DEBUG({
230     dbgs() << "LV: Found unhandled opcode for: ";
231     R->getVPSingleValue()->dump();
232   });
233   llvm_unreachable("Unhandled opcode");
234 }
235 
236 Type *VPTypeAnalysis::inferScalarType(const VPValue *V) {
237   if (Type *CachedTy = CachedTypes.lookup(V))
238     return CachedTy;
239 
240   if (V->isLiveIn()) {
241     if (auto *IRValue = V->getLiveInIRValue())
242       return IRValue->getType();
243     // All VPValues without any underlying IR value (like the vector trip count
244     // or the backedge-taken count) have the same type as the canonical IV.
245     return CanonicalIVTy;
246   }
247 
248   Type *ResultTy =
249       TypeSwitch<const VPRecipeBase *, Type *>(V->getDefiningRecipe())
250           .Case<VPActiveLaneMaskPHIRecipe, VPCanonicalIVPHIRecipe,
251                 VPFirstOrderRecurrencePHIRecipe, VPReductionPHIRecipe,
252                 VPWidenPointerInductionRecipe, VPEVLBasedIVPHIRecipe>(
253               [this](const auto *R) {
254                 // Handle header phi recipes, except VPWidenIntOrFpInduction
255                 // which needs special handling due it being possibly truncated.
256                 // TODO: consider inferring/caching type of siblings, e.g.,
257                 // backedge value, here and in cases below.
258                 return inferScalarType(R->getStartValue());
259               })
260           .Case<VPWidenIntOrFpInductionRecipe, VPDerivedIVRecipe>(
261               [](const auto *R) { return R->getScalarType(); })
262           .Case<VPReductionRecipe, VPPredInstPHIRecipe, VPWidenPHIRecipe,
263                 VPScalarIVStepsRecipe, VPWidenGEPRecipe, VPVectorPointerRecipe,
264                 VPWidenCanonicalIVRecipe>([this](const VPRecipeBase *R) {
265             return inferScalarType(R->getOperand(0));
266           })
267           .Case<VPBlendRecipe, VPInstruction, VPWidenRecipe, VPWidenEVLRecipe,
268                 VPReplicateRecipe, VPWidenCallRecipe, VPWidenMemoryRecipe,
269                 VPWidenSelectRecipe>(
270               [this](const auto *R) { return inferScalarTypeForRecipe(R); })
271           .Case<VPInterleaveRecipe>([V](const VPInterleaveRecipe *R) {
272             // TODO: Use info from interleave group.
273             return V->getUnderlyingValue()->getType();
274           })
275           .Case<VPWidenCastRecipe>(
276               [](const VPWidenCastRecipe *R) { return R->getResultType(); })
277           .Case<VPScalarCastRecipe>(
278               [](const VPScalarCastRecipe *R) { return R->getResultType(); })
279           .Case<VPExpandSCEVRecipe>([](const VPExpandSCEVRecipe *R) {
280             return R->getSCEV()->getType();
281           })
282           .Case<VPReductionRecipe>([this](const auto *R) {
283             return inferScalarType(R->getChainOp());
284           });
285 
286   assert(ResultTy && "could not infer type for the given VPValue");
287   CachedTypes[V] = ResultTy;
288   return ResultTy;
289 }
290 
291 void llvm::collectEphemeralRecipesForVPlan(
292     VPlan &Plan, DenseSet<VPRecipeBase *> &EphRecipes) {
293   // First, collect seed recipes which are operands of assumes.
294   SmallVector<VPRecipeBase *> Worklist;
295   for (VPBasicBlock *VPBB : VPBlockUtils::blocksOnly<VPBasicBlock>(
296            vp_depth_first_deep(Plan.getVectorLoopRegion()->getEntry()))) {
297     for (VPRecipeBase &R : *VPBB) {
298       auto *RepR = dyn_cast<VPReplicateRecipe>(&R);
299       if (!RepR || !match(RepR->getUnderlyingInstr(),
300                           PatternMatch::m_Intrinsic<Intrinsic::assume>()))
301         continue;
302       Worklist.push_back(RepR);
303       EphRecipes.insert(RepR);
304     }
305   }
306 
307   // Process operands of candidates in worklist and add them to the set of
308   // ephemeral recipes, if they don't have side-effects and are only used by
309   // other ephemeral recipes.
310   while (!Worklist.empty()) {
311     VPRecipeBase *Cur = Worklist.pop_back_val();
312     for (VPValue *Op : Cur->operands()) {
313       auto *OpR = Op->getDefiningRecipe();
314       if (!OpR || OpR->mayHaveSideEffects() || EphRecipes.contains(OpR))
315         continue;
316       if (any_of(Op->users(), [EphRecipes](VPUser *U) {
317             auto *UR = dyn_cast<VPRecipeBase>(U);
318             return !UR || !EphRecipes.contains(UR);
319           }))
320         continue;
321       EphRecipes.insert(OpR);
322       Worklist.push_back(OpR);
323     }
324   }
325 }
326 
327 template void DomTreeBuilder::Calculate<DominatorTreeBase<VPBlockBase, false>>(
328     DominatorTreeBase<VPBlockBase, false> &DT);
329 
330 bool VPDominatorTree::properlyDominates(const VPRecipeBase *A,
331                                         const VPRecipeBase *B) {
332   if (A == B)
333     return false;
334 
335   auto LocalComesBefore = [](const VPRecipeBase *A, const VPRecipeBase *B) {
336     for (auto &R : *A->getParent()) {
337       if (&R == A)
338         return true;
339       if (&R == B)
340         return false;
341     }
342     llvm_unreachable("recipe not found");
343   };
344   const VPBlockBase *ParentA = A->getParent();
345   const VPBlockBase *ParentB = B->getParent();
346   if (ParentA == ParentB)
347     return LocalComesBefore(A, B);
348 
349 #ifndef NDEBUG
350   auto GetReplicateRegion = [](VPRecipeBase *R) -> VPRegionBlock * {
351     auto *Region = dyn_cast_or_null<VPRegionBlock>(R->getParent()->getParent());
352     if (Region && Region->isReplicator()) {
353       assert(Region->getNumSuccessors() == 1 &&
354              Region->getNumPredecessors() == 1 && "Expected SESE region!");
355       assert(R->getParent()->size() == 1 &&
356              "A recipe in an original replicator region must be the only "
357              "recipe in its block");
358       return Region;
359     }
360     return nullptr;
361   };
362   assert(!GetReplicateRegion(const_cast<VPRecipeBase *>(A)) &&
363          "No replicate regions expected at this point");
364   assert(!GetReplicateRegion(const_cast<VPRecipeBase *>(B)) &&
365          "No replicate regions expected at this point");
366 #endif
367   return Base::properlyDominates(ParentA, ParentB);
368 }
369