xref: /llvm-project/llvm/lib/CodeGen/GlobalISel/CSEMIRBuilder.cpp (revision 84b7bcfcac02ca32c2211655627c352dd99ce296)
1 //===-- llvm/CodeGen/GlobalISel/CSEMIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 /// \file
9 /// This file implements the CSEMIRBuilder class which CSEs as it builds
10 /// instructions.
11 //===----------------------------------------------------------------------===//
12 //
13 
14 #include "llvm/CodeGen/GlobalISel/CSEMIRBuilder.h"
15 #include "llvm/CodeGen/GlobalISel/CSEInfo.h"
16 #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
17 #include "llvm/CodeGen/GlobalISel/Utils.h"
18 #include "llvm/CodeGen/MachineInstrBuilder.h"
19 #include "llvm/IR/DebugInfoMetadata.h"
20 
21 using namespace llvm;
22 
23 bool CSEMIRBuilder::dominates(MachineBasicBlock::const_iterator A,
24                               MachineBasicBlock::const_iterator B) const {
25   auto MBBEnd = getMBB().end();
26   if (B == MBBEnd)
27     return true;
28   assert(A->getParent() == B->getParent() &&
29          "Iterators should be in same block");
30   const MachineBasicBlock *BBA = A->getParent();
31   MachineBasicBlock::const_iterator I = BBA->begin();
32   for (; &*I != A && &*I != B; ++I)
33     ;
34   return &*I == A;
35 }
36 
37 MachineInstrBuilder
38 CSEMIRBuilder::getDominatingInstrForID(FoldingSetNodeID &ID,
39                                        void *&NodeInsertPos) {
40   GISelCSEInfo *CSEInfo = getCSEInfo();
41   assert(CSEInfo && "Can't get here without setting CSEInfo");
42   MachineBasicBlock *CurMBB = &getMBB();
43   MachineInstr *MI =
44       CSEInfo->getMachineInstrIfExists(ID, CurMBB, NodeInsertPos);
45   if (MI) {
46     CSEInfo->countOpcodeHit(MI->getOpcode());
47     auto CurrPos = getInsertPt();
48     auto MII = MachineBasicBlock::iterator(MI);
49     if (MII == CurrPos) {
50       // Move the insert point ahead of the instruction so any future uses of
51       // this builder will have the def ready.
52       setInsertPt(*CurMBB, std::next(MII));
53     } else if (!dominates(MI, CurrPos)) {
54       // Update the spliced machineinstr's debug location by merging it with the
55       // debug location of the instruction at the insertion point.
56       auto *Loc = DILocation::getMergedLocation(getDebugLoc().get(),
57                                                 MI->getDebugLoc().get());
58       MI->setDebugLoc(Loc);
59       CurMBB->splice(CurrPos, CurMBB, MI);
60     }
61     return MachineInstrBuilder(getMF(), MI);
62   }
63   return MachineInstrBuilder();
64 }
65 
66 bool CSEMIRBuilder::canPerformCSEForOpc(unsigned Opc) const {
67   const GISelCSEInfo *CSEInfo = getCSEInfo();
68   if (!CSEInfo || !CSEInfo->shouldCSE(Opc))
69     return false;
70   return true;
71 }
72 
73 void CSEMIRBuilder::profileDstOp(const DstOp &Op,
74                                  GISelInstProfileBuilder &B) const {
75   switch (Op.getDstOpKind()) {
76   case DstOp::DstType::Ty_RC: {
77     B.addNodeIDRegType(Op.getRegClass());
78     break;
79   }
80   case DstOp::DstType::Ty_Reg: {
81     // Regs can have LLT&(RB|RC). If those exist, profile them as well.
82     B.addNodeIDReg(Op.getReg());
83     break;
84   }
85   case DstOp::DstType::Ty_LLT: {
86     B.addNodeIDRegType(Op.getLLTTy(*getMRI()));
87     break;
88   }
89   case DstOp::DstType::Ty_VRegAttrs: {
90     B.addNodeIDRegType(Op.getVRegAttrs());
91     break;
92   }
93   }
94 }
95 
96 void CSEMIRBuilder::profileSrcOp(const SrcOp &Op,
97                                  GISelInstProfileBuilder &B) const {
98   switch (Op.getSrcOpKind()) {
99   case SrcOp::SrcType::Ty_Imm:
100     B.addNodeIDImmediate(static_cast<int64_t>(Op.getImm()));
101     break;
102   case SrcOp::SrcType::Ty_Predicate:
103     B.addNodeIDImmediate(static_cast<int64_t>(Op.getPredicate()));
104     break;
105   default:
106     B.addNodeIDRegType(Op.getReg());
107     break;
108   }
109 }
110 
111 void CSEMIRBuilder::profileMBBOpcode(GISelInstProfileBuilder &B,
112                                      unsigned Opc) const {
113   // First add the MBB (Local CSE).
114   B.addNodeIDMBB(&getMBB());
115   // Then add the opcode.
116   B.addNodeIDOpcode(Opc);
117 }
118 
119 void CSEMIRBuilder::profileEverything(unsigned Opc, ArrayRef<DstOp> DstOps,
120                                       ArrayRef<SrcOp> SrcOps,
121                                       std::optional<unsigned> Flags,
122                                       GISelInstProfileBuilder &B) const {
123 
124   profileMBBOpcode(B, Opc);
125   // Then add the DstOps.
126   profileDstOps(DstOps, B);
127   // Then add the SrcOps.
128   profileSrcOps(SrcOps, B);
129   // Add Flags if passed in.
130   if (Flags)
131     B.addNodeIDFlag(*Flags);
132 }
133 
134 MachineInstrBuilder CSEMIRBuilder::memoizeMI(MachineInstrBuilder MIB,
135                                              void *NodeInsertPos) {
136   assert(canPerformCSEForOpc(MIB->getOpcode()) &&
137          "Attempting to CSE illegal op");
138   MachineInstr *MIBInstr = MIB;
139   getCSEInfo()->insertInstr(MIBInstr, NodeInsertPos);
140   return MIB;
141 }
142 
143 bool CSEMIRBuilder::checkCopyToDefsPossible(ArrayRef<DstOp> DstOps) {
144   if (DstOps.size() == 1)
145     return true; // always possible to emit copy to just 1 vreg.
146 
147   return llvm::all_of(DstOps, [](const DstOp &Op) {
148     DstOp::DstType DT = Op.getDstOpKind();
149     return DT == DstOp::DstType::Ty_LLT || DT == DstOp::DstType::Ty_RC;
150   });
151 }
152 
153 MachineInstrBuilder
154 CSEMIRBuilder::generateCopiesIfRequired(ArrayRef<DstOp> DstOps,
155                                         MachineInstrBuilder &MIB) {
156   assert(checkCopyToDefsPossible(DstOps) &&
157          "Impossible return a single MIB with copies to multiple defs");
158   if (DstOps.size() == 1) {
159     const DstOp &Op = DstOps[0];
160     if (Op.getDstOpKind() == DstOp::DstType::Ty_Reg)
161       return buildCopy(Op.getReg(), MIB.getReg(0));
162   }
163 
164   // If we didn't generate a copy then we're re-using an existing node directly
165   // instead of emitting any code. Merge the debug location we wanted to emit
166   // into the instruction we're CSE'ing with. Debug locations arent part of the
167   // profile so we don't need to recompute it.
168   if (getDebugLoc()) {
169     GISelChangeObserver *Observer = getState().Observer;
170     if (Observer)
171       Observer->changingInstr(*MIB);
172     MIB->setDebugLoc(
173         DILocation::getMergedLocation(MIB->getDebugLoc(), getDebugLoc()));
174     if (Observer)
175       Observer->changedInstr(*MIB);
176   }
177 
178   return MIB;
179 }
180 
181 MachineInstrBuilder CSEMIRBuilder::buildInstr(unsigned Opc,
182                                               ArrayRef<DstOp> DstOps,
183                                               ArrayRef<SrcOp> SrcOps,
184                                               std::optional<unsigned> Flag) {
185   switch (Opc) {
186   default:
187     break;
188   case TargetOpcode::G_ICMP: {
189     assert(SrcOps.size() == 3 && "Invalid sources");
190     assert(DstOps.size() == 1 && "Invalid dsts");
191     LLT SrcTy = SrcOps[1].getLLTTy(*getMRI());
192 
193     if (std::optional<SmallVector<APInt>> Cst =
194             ConstantFoldICmp(SrcOps[0].getPredicate(), SrcOps[1].getReg(),
195                              SrcOps[2].getReg(), *getMRI())) {
196       if (SrcTy.isVector())
197         return buildBuildVectorConstant(DstOps[0], *Cst);
198       return buildConstant(DstOps[0], Cst->front());
199     }
200     break;
201   }
202   case TargetOpcode::G_ADD:
203   case TargetOpcode::G_PTR_ADD:
204   case TargetOpcode::G_AND:
205   case TargetOpcode::G_ASHR:
206   case TargetOpcode::G_LSHR:
207   case TargetOpcode::G_MUL:
208   case TargetOpcode::G_OR:
209   case TargetOpcode::G_SHL:
210   case TargetOpcode::G_SUB:
211   case TargetOpcode::G_XOR:
212   case TargetOpcode::G_UDIV:
213   case TargetOpcode::G_SDIV:
214   case TargetOpcode::G_UREM:
215   case TargetOpcode::G_SREM:
216   case TargetOpcode::G_SMIN:
217   case TargetOpcode::G_SMAX:
218   case TargetOpcode::G_UMIN:
219   case TargetOpcode::G_UMAX: {
220     // Try to constant fold these.
221     assert(SrcOps.size() == 2 && "Invalid sources");
222     assert(DstOps.size() == 1 && "Invalid dsts");
223     LLT SrcTy = SrcOps[0].getLLTTy(*getMRI());
224 
225     if (Opc == TargetOpcode::G_PTR_ADD &&
226         getDataLayout().isNonIntegralAddressSpace(SrcTy.getAddressSpace()))
227       break;
228 
229     if (SrcTy.isVector()) {
230       // Try to constant fold vector constants.
231       SmallVector<APInt> VecCst = ConstantFoldVectorBinop(
232           Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI());
233       if (!VecCst.empty())
234         return buildBuildVectorConstant(DstOps[0], VecCst);
235       break;
236     }
237 
238     if (std::optional<APInt> Cst = ConstantFoldBinOp(
239             Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI()))
240       return buildConstant(DstOps[0], *Cst);
241     break;
242   }
243   case TargetOpcode::G_FADD:
244   case TargetOpcode::G_FSUB:
245   case TargetOpcode::G_FMUL:
246   case TargetOpcode::G_FDIV:
247   case TargetOpcode::G_FREM:
248   case TargetOpcode::G_FMINNUM:
249   case TargetOpcode::G_FMAXNUM:
250   case TargetOpcode::G_FMINNUM_IEEE:
251   case TargetOpcode::G_FMAXNUM_IEEE:
252   case TargetOpcode::G_FMINIMUM:
253   case TargetOpcode::G_FMAXIMUM:
254   case TargetOpcode::G_FCOPYSIGN: {
255     // Try to constant fold these.
256     assert(SrcOps.size() == 2 && "Invalid sources");
257     assert(DstOps.size() == 1 && "Invalid dsts");
258     if (std::optional<APFloat> Cst = ConstantFoldFPBinOp(
259             Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI()))
260       return buildFConstant(DstOps[0], *Cst);
261     break;
262   }
263   case TargetOpcode::G_SEXT_INREG: {
264     assert(DstOps.size() == 1 && "Invalid dst ops");
265     assert(SrcOps.size() == 2 && "Invalid src ops");
266     const DstOp &Dst = DstOps[0];
267     const SrcOp &Src0 = SrcOps[0];
268     const SrcOp &Src1 = SrcOps[1];
269     if (auto MaybeCst =
270             ConstantFoldExtOp(Opc, Src0.getReg(), Src1.getImm(), *getMRI()))
271       return buildConstant(Dst, *MaybeCst);
272     break;
273   }
274   case TargetOpcode::G_SITOFP:
275   case TargetOpcode::G_UITOFP: {
276     // Try to constant fold these.
277     assert(SrcOps.size() == 1 && "Invalid sources");
278     assert(DstOps.size() == 1 && "Invalid dsts");
279     if (std::optional<APFloat> Cst = ConstantFoldIntToFloat(
280             Opc, DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getReg(), *getMRI()))
281       return buildFConstant(DstOps[0], *Cst);
282     break;
283   }
284   case TargetOpcode::G_CTLZ:
285   case TargetOpcode::G_CTTZ: {
286     assert(SrcOps.size() == 1 && "Expected one source");
287     assert(DstOps.size() == 1 && "Expected one dest");
288     std::function<unsigned(APInt)> CB;
289     if (Opc == TargetOpcode::G_CTLZ)
290       CB = [](APInt V) -> unsigned { return V.countl_zero(); };
291     else
292       CB = [](APInt V) -> unsigned { return V.countTrailingZeros(); };
293     auto MaybeCsts = ConstantFoldCountZeros(SrcOps[0].getReg(), *getMRI(), CB);
294     if (!MaybeCsts)
295       break;
296     if (MaybeCsts->size() == 1)
297       return buildConstant(DstOps[0], (*MaybeCsts)[0]);
298     // This was a vector constant. Build a G_BUILD_VECTOR for them.
299     SmallVector<Register> ConstantRegs;
300     LLT VecTy = DstOps[0].getLLTTy(*getMRI());
301     for (unsigned Cst : *MaybeCsts)
302       ConstantRegs.emplace_back(
303           buildConstant(VecTy.getScalarType(), Cst).getReg(0));
304     return buildBuildVector(DstOps[0], ConstantRegs);
305   }
306   }
307   bool CanCopy = checkCopyToDefsPossible(DstOps);
308   if (!canPerformCSEForOpc(Opc))
309     return MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
310   // If we can CSE this instruction, but involves generating copies to multiple
311   // regs, give up. This frequently happens to UNMERGEs.
312   if (!CanCopy) {
313     auto MIB = MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
314     // CSEInfo would have tracked this instruction. Remove it from the temporary
315     // insts.
316     getCSEInfo()->handleRemoveInst(&*MIB);
317     return MIB;
318   }
319   FoldingSetNodeID ID;
320   GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
321   void *InsertPos = nullptr;
322   profileEverything(Opc, DstOps, SrcOps, Flag, ProfBuilder);
323   MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
324   if (MIB) {
325     // Handle generating copies here.
326     return generateCopiesIfRequired(DstOps, MIB);
327   }
328   // This instruction does not exist in the CSEInfo. Build it and CSE it.
329   MachineInstrBuilder NewMIB =
330       MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
331   return memoizeMI(NewMIB, InsertPos);
332 }
333 
334 MachineInstrBuilder CSEMIRBuilder::buildConstant(const DstOp &Res,
335                                                  const ConstantInt &Val) {
336   constexpr unsigned Opc = TargetOpcode::G_CONSTANT;
337   if (!canPerformCSEForOpc(Opc))
338     return MachineIRBuilder::buildConstant(Res, Val);
339 
340   // For vectors, CSE the element only for now.
341   LLT Ty = Res.getLLTTy(*getMRI());
342   if (Ty.isVector())
343     return buildSplatBuildVector(Res, buildConstant(Ty.getElementType(), Val));
344 
345   FoldingSetNodeID ID;
346   GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
347   void *InsertPos = nullptr;
348   profileMBBOpcode(ProfBuilder, Opc);
349   profileDstOp(Res, ProfBuilder);
350   ProfBuilder.addNodeIDMachineOperand(MachineOperand::CreateCImm(&Val));
351   MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
352   if (MIB) {
353     // Handle generating copies here.
354     return generateCopiesIfRequired({Res}, MIB);
355   }
356 
357   MachineInstrBuilder NewMIB = MachineIRBuilder::buildConstant(Res, Val);
358   return memoizeMI(NewMIB, InsertPos);
359 }
360 
361 MachineInstrBuilder CSEMIRBuilder::buildFConstant(const DstOp &Res,
362                                                   const ConstantFP &Val) {
363   constexpr unsigned Opc = TargetOpcode::G_FCONSTANT;
364   if (!canPerformCSEForOpc(Opc))
365     return MachineIRBuilder::buildFConstant(Res, Val);
366 
367   // For vectors, CSE the element only for now.
368   LLT Ty = Res.getLLTTy(*getMRI());
369   if (Ty.isVector())
370     return buildSplatBuildVector(Res, buildFConstant(Ty.getElementType(), Val));
371 
372   FoldingSetNodeID ID;
373   GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
374   void *InsertPos = nullptr;
375   profileMBBOpcode(ProfBuilder, Opc);
376   profileDstOp(Res, ProfBuilder);
377   ProfBuilder.addNodeIDMachineOperand(MachineOperand::CreateFPImm(&Val));
378   MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
379   if (MIB) {
380     // Handle generating copies here.
381     return generateCopiesIfRequired({Res}, MIB);
382   }
383   MachineInstrBuilder NewMIB = MachineIRBuilder::buildFConstant(Res, Val);
384   return memoizeMI(NewMIB, InsertPos);
385 }
386