10b57cec5SDimitry Andric //===----- RISCVMergeBaseOffset.cpp - Optimise address calculations ------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // Merge the offset of address calculation into the offset field 10bdd1243dSDimitry Andric // of instructions in a global address lowering sequence. 110b57cec5SDimitry Andric // 120b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 130b57cec5SDimitry Andric 140b57cec5SDimitry Andric #include "RISCV.h" 150b57cec5SDimitry Andric #include "RISCVTargetMachine.h" 1681ad6265SDimitry Andric #include "llvm/CodeGen/MachineFunctionPass.h" 170b57cec5SDimitry Andric #include "llvm/CodeGen/Passes.h" 18349cc55cSDimitry Andric #include "llvm/MC/TargetRegistry.h" 190b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 200b57cec5SDimitry Andric #include "llvm/Target/TargetOptions.h" 21bdd1243dSDimitry Andric #include <optional> 220b57cec5SDimitry Andric using namespace llvm; 230b57cec5SDimitry Andric 240b57cec5SDimitry Andric #define DEBUG_TYPE "riscv-merge-base-offset" 2506c3fb27SDimitry Andric #define RISCV_MERGE_BASE_OFFSET_NAME "RISC-V Merge Base Offset" 260b57cec5SDimitry Andric namespace { 270b57cec5SDimitry Andric 2806c3fb27SDimitry Andric class RISCVMergeBaseOffsetOpt : public MachineFunctionPass { 2981ad6265SDimitry Andric const RISCVSubtarget *ST = nullptr; 3006c3fb27SDimitry Andric MachineRegisterInfo *MRI; 3181ad6265SDimitry Andric 3281ad6265SDimitry Andric public: 330b57cec5SDimitry Andric static char ID; 340b57cec5SDimitry Andric bool runOnMachineFunction(MachineFunction &Fn) override; 35bdd1243dSDimitry Andric bool detectFoldable(MachineInstr &Hi, MachineInstr *&Lo); 360b57cec5SDimitry Andric 37bdd1243dSDimitry Andric bool detectAndFoldOffset(MachineInstr &Hi, MachineInstr &Lo); 38bdd1243dSDimitry Andric void foldOffset(MachineInstr &Hi, MachineInstr &Lo, MachineInstr &Tail, 390b57cec5SDimitry Andric int64_t Offset); 40bdd1243dSDimitry Andric bool foldLargeOffset(MachineInstr &Hi, MachineInstr &Lo, 41bdd1243dSDimitry Andric MachineInstr &TailAdd, Register GSReg); 42bdd1243dSDimitry Andric bool foldShiftedOffset(MachineInstr &Hi, MachineInstr &Lo, 43bdd1243dSDimitry Andric MachineInstr &TailShXAdd, Register GSReg); 44bdd1243dSDimitry Andric 45bdd1243dSDimitry Andric bool foldIntoMemoryOps(MachineInstr &Hi, MachineInstr &Lo); 4681ad6265SDimitry Andric 470b57cec5SDimitry Andric RISCVMergeBaseOffsetOpt() : MachineFunctionPass(ID) {} 480b57cec5SDimitry Andric 490b57cec5SDimitry Andric MachineFunctionProperties getRequiredProperties() const override { 500b57cec5SDimitry Andric return MachineFunctionProperties().set( 510b57cec5SDimitry Andric MachineFunctionProperties::Property::IsSSA); 520b57cec5SDimitry Andric } 530b57cec5SDimitry Andric 54349cc55cSDimitry Andric void getAnalysisUsage(AnalysisUsage &AU) const override { 55349cc55cSDimitry Andric AU.setPreservesCFG(); 56349cc55cSDimitry Andric MachineFunctionPass::getAnalysisUsage(AU); 57349cc55cSDimitry Andric } 58349cc55cSDimitry Andric 590b57cec5SDimitry Andric StringRef getPassName() const override { 600b57cec5SDimitry Andric return RISCV_MERGE_BASE_OFFSET_NAME; 610b57cec5SDimitry Andric } 620b57cec5SDimitry Andric }; 630b57cec5SDimitry Andric } // end anonymous namespace 640b57cec5SDimitry Andric 650b57cec5SDimitry Andric char RISCVMergeBaseOffsetOpt::ID = 0; 66e8d8bef9SDimitry Andric INITIALIZE_PASS(RISCVMergeBaseOffsetOpt, DEBUG_TYPE, 670b57cec5SDimitry Andric RISCV_MERGE_BASE_OFFSET_NAME, false, false) 680b57cec5SDimitry Andric 69bdd1243dSDimitry Andric // Detect either of the patterns: 70bdd1243dSDimitry Andric // 71bdd1243dSDimitry Andric // 1. (medlow pattern): 720b57cec5SDimitry Andric // lui vreg1, %hi(s) 730b57cec5SDimitry Andric // addi vreg2, vreg1, %lo(s) 740b57cec5SDimitry Andric // 75bdd1243dSDimitry Andric // 2. (medany pattern): 76bdd1243dSDimitry Andric // .Lpcrel_hi1: 77bdd1243dSDimitry Andric // auipc vreg1, %pcrel_hi(s) 78bdd1243dSDimitry Andric // addi vreg2, vreg1, %pcrel_lo(.Lpcrel_hi1) 79bdd1243dSDimitry Andric // 80bdd1243dSDimitry Andric // The pattern is only accepted if: 81bdd1243dSDimitry Andric // 1) The first instruction has only one use, which is the ADDI. 82bdd1243dSDimitry Andric // 2) The address operands have the appropriate type, reflecting the 83bdd1243dSDimitry Andric // lowering of a global address or constant pool using medlow or medany. 84bdd1243dSDimitry Andric // 3) The offset value in the Global Address or Constant Pool is 0. 85bdd1243dSDimitry Andric bool RISCVMergeBaseOffsetOpt::detectFoldable(MachineInstr &Hi, 86bdd1243dSDimitry Andric MachineInstr *&Lo) { 870fca6ea1SDimitry Andric if (Hi.getOpcode() != RISCV::LUI && Hi.getOpcode() != RISCV::AUIPC && 880fca6ea1SDimitry Andric Hi.getOpcode() != RISCV::PseudoMovAddr) 890b57cec5SDimitry Andric return false; 90bdd1243dSDimitry Andric 91bdd1243dSDimitry Andric const MachineOperand &HiOp1 = Hi.getOperand(1); 92bdd1243dSDimitry Andric unsigned ExpectedFlags = 93bdd1243dSDimitry Andric Hi.getOpcode() == RISCV::AUIPC ? RISCVII::MO_PCREL_HI : RISCVII::MO_HI; 94bdd1243dSDimitry Andric if (HiOp1.getTargetFlags() != ExpectedFlags) 950b57cec5SDimitry Andric return false; 96bdd1243dSDimitry Andric 975f757f3fSDimitry Andric if (!(HiOp1.isGlobal() || HiOp1.isCPI() || HiOp1.isBlockAddress()) || 985f757f3fSDimitry Andric HiOp1.getOffset() != 0) 99bdd1243dSDimitry Andric return false; 100bdd1243dSDimitry Andric 1010fca6ea1SDimitry Andric if (Hi.getOpcode() == RISCV::PseudoMovAddr) { 1020fca6ea1SDimitry Andric // Most of the code should handle it correctly without modification by 1030fca6ea1SDimitry Andric // setting Lo and Hi both point to PseudoMovAddr 1040fca6ea1SDimitry Andric Lo = &Hi; 1050fca6ea1SDimitry Andric } else { 106bdd1243dSDimitry Andric Register HiDestReg = Hi.getOperand(0).getReg(); 107bdd1243dSDimitry Andric if (!MRI->hasOneUse(HiDestReg)) 108bdd1243dSDimitry Andric return false; 109bdd1243dSDimitry Andric 110bdd1243dSDimitry Andric Lo = &*MRI->use_instr_begin(HiDestReg); 111bdd1243dSDimitry Andric if (Lo->getOpcode() != RISCV::ADDI) 112bdd1243dSDimitry Andric return false; 1130fca6ea1SDimitry Andric } 114bdd1243dSDimitry Andric 115bdd1243dSDimitry Andric const MachineOperand &LoOp2 = Lo->getOperand(2); 1160fca6ea1SDimitry Andric if (Hi.getOpcode() == RISCV::LUI || Hi.getOpcode() == RISCV::PseudoMovAddr) { 117bdd1243dSDimitry Andric if (LoOp2.getTargetFlags() != RISCVII::MO_LO || 1185f757f3fSDimitry Andric !(LoOp2.isGlobal() || LoOp2.isCPI() || LoOp2.isBlockAddress()) || 1195f757f3fSDimitry Andric LoOp2.getOffset() != 0) 120bdd1243dSDimitry Andric return false; 121bdd1243dSDimitry Andric } else { 122bdd1243dSDimitry Andric assert(Hi.getOpcode() == RISCV::AUIPC); 123bdd1243dSDimitry Andric if (LoOp2.getTargetFlags() != RISCVII::MO_PCREL_LO || 124bdd1243dSDimitry Andric LoOp2.getType() != MachineOperand::MO_MCSymbol) 125bdd1243dSDimitry Andric return false; 126bdd1243dSDimitry Andric } 127bdd1243dSDimitry Andric 128bdd1243dSDimitry Andric if (HiOp1.isGlobal()) { 129bdd1243dSDimitry Andric LLVM_DEBUG(dbgs() << " Found lowered global address: " 130bdd1243dSDimitry Andric << *HiOp1.getGlobal() << "\n"); 1315f757f3fSDimitry Andric } else if (HiOp1.isBlockAddress()) { 1325f757f3fSDimitry Andric LLVM_DEBUG(dbgs() << " Found lowered basic address: " 1335f757f3fSDimitry Andric << *HiOp1.getBlockAddress() << "\n"); 1345f757f3fSDimitry Andric } else if (HiOp1.isCPI()) { 135bdd1243dSDimitry Andric LLVM_DEBUG(dbgs() << " Found lowered constant pool: " << HiOp1.getIndex() 136bdd1243dSDimitry Andric << "\n"); 137bdd1243dSDimitry Andric } 138bdd1243dSDimitry Andric 1390b57cec5SDimitry Andric return true; 1400b57cec5SDimitry Andric } 1410b57cec5SDimitry Andric 142bdd1243dSDimitry Andric // Update the offset in Hi and Lo instructions. 1430b57cec5SDimitry Andric // Delete the tail instruction and update all the uses to use the 144bdd1243dSDimitry Andric // output from Lo. 145bdd1243dSDimitry Andric void RISCVMergeBaseOffsetOpt::foldOffset(MachineInstr &Hi, MachineInstr &Lo, 1460b57cec5SDimitry Andric MachineInstr &Tail, int64_t Offset) { 14781ad6265SDimitry Andric assert(isInt<32>(Offset) && "Unexpected offset"); 148bdd1243dSDimitry Andric // Put the offset back in Hi and the Lo 149bdd1243dSDimitry Andric Hi.getOperand(1).setOffset(Offset); 150bdd1243dSDimitry Andric if (Hi.getOpcode() != RISCV::AUIPC) 151bdd1243dSDimitry Andric Lo.getOperand(2).setOffset(Offset); 1520b57cec5SDimitry Andric // Delete the tail instruction. 15306c3fb27SDimitry Andric MRI->constrainRegClass(Lo.getOperand(0).getReg(), 15406c3fb27SDimitry Andric MRI->getRegClass(Tail.getOperand(0).getReg())); 155bdd1243dSDimitry Andric MRI->replaceRegWith(Tail.getOperand(0).getReg(), Lo.getOperand(0).getReg()); 156bdd1243dSDimitry Andric Tail.eraseFromParent(); 1570b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << " Merged offset " << Offset << " into base.\n" 158bdd1243dSDimitry Andric << " " << Hi << " " << Lo;); 1590b57cec5SDimitry Andric } 1600b57cec5SDimitry Andric 1610b57cec5SDimitry Andric // Detect patterns for large offsets that are passed into an ADD instruction. 162bdd1243dSDimitry Andric // If the pattern is found, updates the offset in Hi and Lo instructions 163bdd1243dSDimitry Andric // and deletes TailAdd and the instructions that produced the offset. 1640b57cec5SDimitry Andric // 1650b57cec5SDimitry Andric // Base address lowering is of the form: 166bdd1243dSDimitry Andric // Hi: lui vreg1, %hi(s) 167bdd1243dSDimitry Andric // Lo: addi vreg2, vreg1, %lo(s) 1680b57cec5SDimitry Andric // / \ 1690b57cec5SDimitry Andric // / \ 1700b57cec5SDimitry Andric // / \ 1710b57cec5SDimitry Andric // / The large offset can be of two forms: \ 1720b57cec5SDimitry Andric // 1) Offset that has non zero bits in lower 2) Offset that has non zero 1730b57cec5SDimitry Andric // 12 bits and upper 20 bits bits in upper 20 bits only 1740b57cec5SDimitry Andric // OffseLUI: lui vreg3, 4 1750b57cec5SDimitry Andric // OffsetTail: addi voff, vreg3, 188 OffsetTail: lui voff, 128 1760b57cec5SDimitry Andric // \ / 1770b57cec5SDimitry Andric // \ / 1780b57cec5SDimitry Andric // \ / 1790b57cec5SDimitry Andric // \ / 1800b57cec5SDimitry Andric // TailAdd: add vreg4, vreg2, voff 181bdd1243dSDimitry Andric bool RISCVMergeBaseOffsetOpt::foldLargeOffset(MachineInstr &Hi, 182bdd1243dSDimitry Andric MachineInstr &Lo, 183bdd1243dSDimitry Andric MachineInstr &TailAdd, 184bdd1243dSDimitry Andric Register GAReg) { 1850b57cec5SDimitry Andric assert((TailAdd.getOpcode() == RISCV::ADD) && "Expected ADD instruction!"); 1868bcb0991SDimitry Andric Register Rs = TailAdd.getOperand(1).getReg(); 1878bcb0991SDimitry Andric Register Rt = TailAdd.getOperand(2).getReg(); 1888bcb0991SDimitry Andric Register Reg = Rs == GAReg ? Rt : Rs; 1890b57cec5SDimitry Andric 1900b57cec5SDimitry Andric // Can't fold if the register has more than one use. 1917a6dacacSDimitry Andric if (!Reg.isVirtual() || !MRI->hasOneUse(Reg)) 1920b57cec5SDimitry Andric return false; 193bdd1243dSDimitry Andric // This can point to an ADDI(W) or a LUI: 1940b57cec5SDimitry Andric MachineInstr &OffsetTail = *MRI->getVRegDef(Reg); 19581ad6265SDimitry Andric if (OffsetTail.getOpcode() == RISCV::ADDI || 19681ad6265SDimitry Andric OffsetTail.getOpcode() == RISCV::ADDIW) { 1970b57cec5SDimitry Andric // The offset value has non zero bits in both %hi and %lo parts. 1980b57cec5SDimitry Andric // Detect an ADDI that feeds from a LUI instruction. 1990b57cec5SDimitry Andric MachineOperand &AddiImmOp = OffsetTail.getOperand(2); 2000b57cec5SDimitry Andric if (AddiImmOp.getTargetFlags() != RISCVII::MO_None) 2010b57cec5SDimitry Andric return false; 2027a6dacacSDimitry Andric Register AddiReg = OffsetTail.getOperand(1).getReg(); 2030b57cec5SDimitry Andric int64_t OffLo = AddiImmOp.getImm(); 2047a6dacacSDimitry Andric 2057a6dacacSDimitry Andric // Handle rs1 of ADDI is X0. 2067a6dacacSDimitry Andric if (AddiReg == RISCV::X0) { 2077a6dacacSDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instrs: " << OffsetTail); 2087a6dacacSDimitry Andric foldOffset(Hi, Lo, TailAdd, OffLo); 2097a6dacacSDimitry Andric OffsetTail.eraseFromParent(); 2107a6dacacSDimitry Andric return true; 2117a6dacacSDimitry Andric } 2127a6dacacSDimitry Andric 2137a6dacacSDimitry Andric MachineInstr &OffsetLui = *MRI->getVRegDef(AddiReg); 2140b57cec5SDimitry Andric MachineOperand &LuiImmOp = OffsetLui.getOperand(1); 2150b57cec5SDimitry Andric if (OffsetLui.getOpcode() != RISCV::LUI || 2160b57cec5SDimitry Andric LuiImmOp.getTargetFlags() != RISCVII::MO_None || 2170b57cec5SDimitry Andric !MRI->hasOneUse(OffsetLui.getOperand(0).getReg())) 2180b57cec5SDimitry Andric return false; 219bdd1243dSDimitry Andric int64_t Offset = SignExtend64<32>(LuiImmOp.getImm() << 12); 22081ad6265SDimitry Andric Offset += OffLo; 22181ad6265SDimitry Andric // RV32 ignores the upper 32 bits. ADDIW sign extends the result. 22281ad6265SDimitry Andric if (!ST->is64Bit() || OffsetTail.getOpcode() == RISCV::ADDIW) 22381ad6265SDimitry Andric Offset = SignExtend64<32>(Offset); 22481ad6265SDimitry Andric // We can only fold simm32 offsets. 22581ad6265SDimitry Andric if (!isInt<32>(Offset)) 22681ad6265SDimitry Andric return false; 2270b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instrs: " << OffsetTail 2280b57cec5SDimitry Andric << " " << OffsetLui); 229bdd1243dSDimitry Andric foldOffset(Hi, Lo, TailAdd, Offset); 230bdd1243dSDimitry Andric OffsetTail.eraseFromParent(); 231bdd1243dSDimitry Andric OffsetLui.eraseFromParent(); 2320b57cec5SDimitry Andric return true; 2330b57cec5SDimitry Andric } else if (OffsetTail.getOpcode() == RISCV::LUI) { 2340b57cec5SDimitry Andric // The offset value has all zero bits in the lower 12 bits. Only LUI 2350b57cec5SDimitry Andric // exists. 2360b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instr: " << OffsetTail); 237bdd1243dSDimitry Andric int64_t Offset = SignExtend64<32>(OffsetTail.getOperand(1).getImm() << 12); 238bdd1243dSDimitry Andric foldOffset(Hi, Lo, TailAdd, Offset); 239bdd1243dSDimitry Andric OffsetTail.eraseFromParent(); 2400b57cec5SDimitry Andric return true; 2410b57cec5SDimitry Andric } 2420b57cec5SDimitry Andric return false; 2430b57cec5SDimitry Andric } 2440b57cec5SDimitry Andric 24581ad6265SDimitry Andric // Detect patterns for offsets that are passed into a SHXADD instruction. 24681ad6265SDimitry Andric // The offset has 1, 2, or 3 trailing zeros and fits in simm13, simm14, simm15. 24781ad6265SDimitry Andric // The constant is created with addi voff, x0, C, and shXadd is used to 24881ad6265SDimitry Andric // fill insert the trailing zeros and do the addition. 249bdd1243dSDimitry Andric // If the pattern is found, updates the offset in Hi and Lo instructions 250bdd1243dSDimitry Andric // and deletes TailShXAdd and the instructions that produced the offset. 25181ad6265SDimitry Andric // 252bdd1243dSDimitry Andric // Hi: lui vreg1, %hi(s) 253bdd1243dSDimitry Andric // Lo: addi vreg2, vreg1, %lo(s) 25481ad6265SDimitry Andric // OffsetTail: addi voff, x0, C 25581ad6265SDimitry Andric // TailAdd: shXadd vreg4, voff, vreg2 256bdd1243dSDimitry Andric bool RISCVMergeBaseOffsetOpt::foldShiftedOffset(MachineInstr &Hi, 257bdd1243dSDimitry Andric MachineInstr &Lo, 258bdd1243dSDimitry Andric MachineInstr &TailShXAdd, 259bdd1243dSDimitry Andric Register GAReg) { 26081ad6265SDimitry Andric assert((TailShXAdd.getOpcode() == RISCV::SH1ADD || 26181ad6265SDimitry Andric TailShXAdd.getOpcode() == RISCV::SH2ADD || 26281ad6265SDimitry Andric TailShXAdd.getOpcode() == RISCV::SH3ADD) && 26381ad6265SDimitry Andric "Expected SHXADD instruction!"); 26481ad6265SDimitry Andric 26581ad6265SDimitry Andric if (GAReg != TailShXAdd.getOperand(2).getReg()) 26681ad6265SDimitry Andric return false; 26781ad6265SDimitry Andric 2687a6dacacSDimitry Andric // The first source is the shifted operand. 2697a6dacacSDimitry Andric Register Rs1 = TailShXAdd.getOperand(1).getReg(); 2707a6dacacSDimitry Andric 27181ad6265SDimitry Andric // Can't fold if the register has more than one use. 2727a6dacacSDimitry Andric if (!Rs1.isVirtual() || !MRI->hasOneUse(Rs1)) 27381ad6265SDimitry Andric return false; 27481ad6265SDimitry Andric // This can point to an ADDI X0, C. 27581ad6265SDimitry Andric MachineInstr &OffsetTail = *MRI->getVRegDef(Rs1); 27681ad6265SDimitry Andric if (OffsetTail.getOpcode() != RISCV::ADDI) 27781ad6265SDimitry Andric return false; 27881ad6265SDimitry Andric if (!OffsetTail.getOperand(1).isReg() || 27981ad6265SDimitry Andric OffsetTail.getOperand(1).getReg() != RISCV::X0 || 28081ad6265SDimitry Andric !OffsetTail.getOperand(2).isImm()) 28181ad6265SDimitry Andric return false; 28281ad6265SDimitry Andric 283bdd1243dSDimitry Andric int64_t Offset = OffsetTail.getOperand(2).getImm(); 28481ad6265SDimitry Andric assert(isInt<12>(Offset) && "Unexpected offset"); 28581ad6265SDimitry Andric 28681ad6265SDimitry Andric unsigned ShAmt; 28781ad6265SDimitry Andric switch (TailShXAdd.getOpcode()) { 28881ad6265SDimitry Andric default: llvm_unreachable("Unexpected opcode"); 28981ad6265SDimitry Andric case RISCV::SH1ADD: ShAmt = 1; break; 29081ad6265SDimitry Andric case RISCV::SH2ADD: ShAmt = 2; break; 29181ad6265SDimitry Andric case RISCV::SH3ADD: ShAmt = 3; break; 29281ad6265SDimitry Andric } 29381ad6265SDimitry Andric 29481ad6265SDimitry Andric Offset = (uint64_t)Offset << ShAmt; 29581ad6265SDimitry Andric 29681ad6265SDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instr: " << OffsetTail); 297bdd1243dSDimitry Andric foldOffset(Hi, Lo, TailShXAdd, Offset); 298bdd1243dSDimitry Andric OffsetTail.eraseFromParent(); 29981ad6265SDimitry Andric return true; 30081ad6265SDimitry Andric } 30181ad6265SDimitry Andric 302bdd1243dSDimitry Andric bool RISCVMergeBaseOffsetOpt::detectAndFoldOffset(MachineInstr &Hi, 303bdd1243dSDimitry Andric MachineInstr &Lo) { 304bdd1243dSDimitry Andric Register DestReg = Lo.getOperand(0).getReg(); 30581ad6265SDimitry Andric 306bdd1243dSDimitry Andric // Look for arithmetic instructions we can get an offset from. 30781ad6265SDimitry Andric // We might be able to remove the arithmetic instructions by folding the 30881ad6265SDimitry Andric // offset into the LUI+ADDI. 309bdd1243dSDimitry Andric if (!MRI->hasOneUse(DestReg)) 310bdd1243dSDimitry Andric return false; 311bdd1243dSDimitry Andric 312bdd1243dSDimitry Andric // Lo has only one use. 31381ad6265SDimitry Andric MachineInstr &Tail = *MRI->use_instr_begin(DestReg); 3140b57cec5SDimitry Andric switch (Tail.getOpcode()) { 3150b57cec5SDimitry Andric default: 3160b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Don't know how to get offset from this instr:" 3170b57cec5SDimitry Andric << Tail); 31881ad6265SDimitry Andric break; 3190b57cec5SDimitry Andric case RISCV::ADDI: { 3200b57cec5SDimitry Andric // Offset is simply an immediate operand. 3210b57cec5SDimitry Andric int64_t Offset = Tail.getOperand(2).getImm(); 32281ad6265SDimitry Andric 32381ad6265SDimitry Andric // We might have two ADDIs in a row. 32481ad6265SDimitry Andric Register TailDestReg = Tail.getOperand(0).getReg(); 32581ad6265SDimitry Andric if (MRI->hasOneUse(TailDestReg)) { 32681ad6265SDimitry Andric MachineInstr &TailTail = *MRI->use_instr_begin(TailDestReg); 32781ad6265SDimitry Andric if (TailTail.getOpcode() == RISCV::ADDI) { 32881ad6265SDimitry Andric Offset += TailTail.getOperand(2).getImm(); 32981ad6265SDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instrs: " << Tail << TailTail); 330bdd1243dSDimitry Andric foldOffset(Hi, Lo, TailTail, Offset); 331bdd1243dSDimitry Andric Tail.eraseFromParent(); 33281ad6265SDimitry Andric return true; 33381ad6265SDimitry Andric } 33481ad6265SDimitry Andric } 33581ad6265SDimitry Andric 3360b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << " Offset Instr: " << Tail); 337bdd1243dSDimitry Andric foldOffset(Hi, Lo, Tail, Offset); 3380b57cec5SDimitry Andric return true; 339349cc55cSDimitry Andric } 340bdd1243dSDimitry Andric case RISCV::ADD: 3410b57cec5SDimitry Andric // The offset is too large to fit in the immediate field of ADDI. 3420b57cec5SDimitry Andric // This can be in two forms: 3430b57cec5SDimitry Andric // 1) LUI hi_Offset followed by: 3440b57cec5SDimitry Andric // ADDI lo_offset 3450b57cec5SDimitry Andric // This happens in case the offset has non zero bits in 3460b57cec5SDimitry Andric // both hi 20 and lo 12 bits. 3470b57cec5SDimitry Andric // 2) LUI (offset20) 3480b57cec5SDimitry Andric // This happens in case the lower 12 bits of the offset are zeros. 349bdd1243dSDimitry Andric return foldLargeOffset(Hi, Lo, Tail, DestReg); 35081ad6265SDimitry Andric case RISCV::SH1ADD: 35181ad6265SDimitry Andric case RISCV::SH2ADD: 352bdd1243dSDimitry Andric case RISCV::SH3ADD: 35381ad6265SDimitry Andric // The offset is too large to fit in the immediate field of ADDI. 35481ad6265SDimitry Andric // It may be encoded as (SH2ADD (ADDI X0, C), DestReg) or 35581ad6265SDimitry Andric // (SH3ADD (ADDI X0, C), DestReg). 356bdd1243dSDimitry Andric return foldShiftedOffset(Hi, Lo, Tail, DestReg); 35781ad6265SDimitry Andric } 35881ad6265SDimitry Andric 359bdd1243dSDimitry Andric return false; 360bdd1243dSDimitry Andric } 361bdd1243dSDimitry Andric 362bdd1243dSDimitry Andric bool RISCVMergeBaseOffsetOpt::foldIntoMemoryOps(MachineInstr &Hi, 363bdd1243dSDimitry Andric MachineInstr &Lo) { 364bdd1243dSDimitry Andric Register DestReg = Lo.getOperand(0).getReg(); 365bdd1243dSDimitry Andric 366bdd1243dSDimitry Andric // If all the uses are memory ops with the same offset, we can transform: 367bdd1243dSDimitry Andric // 368bdd1243dSDimitry Andric // 1. (medlow pattern): 369bdd1243dSDimitry Andric // Hi: lui vreg1, %hi(foo) ---> lui vreg1, %hi(foo+8) 370bdd1243dSDimitry Andric // Lo: addi vreg2, vreg1, %lo(foo) ---> lw vreg3, lo(foo+8)(vreg1) 371bdd1243dSDimitry Andric // Tail: lw vreg3, 8(vreg2) 372bdd1243dSDimitry Andric // 373bdd1243dSDimitry Andric // 2. (medany pattern): 374bdd1243dSDimitry Andric // Hi: 1:auipc vreg1, %pcrel_hi(s) ---> auipc vreg1, %pcrel_hi(foo+8) 375bdd1243dSDimitry Andric // Lo: addi vreg2, vreg1, %pcrel_lo(1b) ---> lw vreg3, %pcrel_lo(1b)(vreg1) 37681ad6265SDimitry Andric // Tail: lw vreg3, 8(vreg2) 37781ad6265SDimitry Andric 378bdd1243dSDimitry Andric std::optional<int64_t> CommonOffset; 3797a6dacacSDimitry Andric DenseMap<const MachineInstr *, SmallVector<unsigned>> 3807a6dacacSDimitry Andric InlineAsmMemoryOpIndexesMap; 38181ad6265SDimitry Andric for (const MachineInstr &UseMI : MRI->use_instructions(DestReg)) { 38281ad6265SDimitry Andric switch (UseMI.getOpcode()) { 38381ad6265SDimitry Andric default: 38481ad6265SDimitry Andric LLVM_DEBUG(dbgs() << "Not a load or store instruction: " << UseMI); 38581ad6265SDimitry Andric return false; 3860b57cec5SDimitry Andric case RISCV::LB: 3870b57cec5SDimitry Andric case RISCV::LH: 3880b57cec5SDimitry Andric case RISCV::LW: 3890b57cec5SDimitry Andric case RISCV::LBU: 3900b57cec5SDimitry Andric case RISCV::LHU: 3910b57cec5SDimitry Andric case RISCV::LWU: 3920b57cec5SDimitry Andric case RISCV::LD: 393e8d8bef9SDimitry Andric case RISCV::FLH: 3940b57cec5SDimitry Andric case RISCV::FLW: 3950b57cec5SDimitry Andric case RISCV::FLD: 3960b57cec5SDimitry Andric case RISCV::SB: 3970b57cec5SDimitry Andric case RISCV::SH: 3980b57cec5SDimitry Andric case RISCV::SW: 3990b57cec5SDimitry Andric case RISCV::SD: 400e8d8bef9SDimitry Andric case RISCV::FSH: 4010b57cec5SDimitry Andric case RISCV::FSW: 4020b57cec5SDimitry Andric case RISCV::FSD: { 40381ad6265SDimitry Andric if (UseMI.getOperand(1).isFI()) 4040b57cec5SDimitry Andric return false; 405bdd1243dSDimitry Andric // Register defined by Lo should not be the value register. 40681ad6265SDimitry Andric if (DestReg == UseMI.getOperand(0).getReg()) 4070b57cec5SDimitry Andric return false; 40881ad6265SDimitry Andric assert(DestReg == UseMI.getOperand(1).getReg() && 40981ad6265SDimitry Andric "Expected base address use"); 41081ad6265SDimitry Andric // All load/store instructions must use the same offset. 41181ad6265SDimitry Andric int64_t Offset = UseMI.getOperand(2).getImm(); 41281ad6265SDimitry Andric if (CommonOffset && Offset != CommonOffset) 41381ad6265SDimitry Andric return false; 41481ad6265SDimitry Andric CommonOffset = Offset; 4157a6dacacSDimitry Andric break; 4167a6dacacSDimitry Andric } 4177a6dacacSDimitry Andric case RISCV::INLINEASM: 4187a6dacacSDimitry Andric case RISCV::INLINEASM_BR: { 4197a6dacacSDimitry Andric SmallVector<unsigned> InlineAsmMemoryOpIndexes; 4207a6dacacSDimitry Andric unsigned NumOps = 0; 4217a6dacacSDimitry Andric for (unsigned I = InlineAsm::MIOp_FirstOperand; 4227a6dacacSDimitry Andric I < UseMI.getNumOperands(); I += 1 + NumOps) { 4237a6dacacSDimitry Andric const MachineOperand &FlagsMO = UseMI.getOperand(I); 4247a6dacacSDimitry Andric // Should be an imm. 4257a6dacacSDimitry Andric if (!FlagsMO.isImm()) 4267a6dacacSDimitry Andric continue; 4277a6dacacSDimitry Andric 4287a6dacacSDimitry Andric const InlineAsm::Flag Flags(FlagsMO.getImm()); 4297a6dacacSDimitry Andric NumOps = Flags.getNumOperandRegisters(); 4307a6dacacSDimitry Andric 4317a6dacacSDimitry Andric // Memory constraints have two operands. 432*52418fc2SDimitry Andric if (NumOps != 2 || !Flags.isMemKind()) { 433*52418fc2SDimitry Andric // If the register is used by something other than a memory contraint, 434*52418fc2SDimitry Andric // we should not fold. 435*52418fc2SDimitry Andric for (unsigned J = 0; J < NumOps; ++J) { 436*52418fc2SDimitry Andric const MachineOperand &MO = UseMI.getOperand(I + 1 + J); 437*52418fc2SDimitry Andric if (MO.isReg() && MO.getReg() == DestReg) 438*52418fc2SDimitry Andric return false; 439*52418fc2SDimitry Andric } 4407a6dacacSDimitry Andric continue; 441*52418fc2SDimitry Andric } 4427a6dacacSDimitry Andric 4437a6dacacSDimitry Andric // We can't do this for constraint A because AMO instructions don't have 4447a6dacacSDimitry Andric // an immediate offset field. 4457a6dacacSDimitry Andric if (Flags.getMemoryConstraintID() == InlineAsm::ConstraintCode::A) 4467a6dacacSDimitry Andric return false; 4477a6dacacSDimitry Andric 4487a6dacacSDimitry Andric const MachineOperand &AddrMO = UseMI.getOperand(I + 1); 4497a6dacacSDimitry Andric if (!AddrMO.isReg() || AddrMO.getReg() != DestReg) 4507a6dacacSDimitry Andric continue; 4517a6dacacSDimitry Andric 4527a6dacacSDimitry Andric const MachineOperand &OffsetMO = UseMI.getOperand(I + 2); 4537a6dacacSDimitry Andric if (!OffsetMO.isImm()) 4547a6dacacSDimitry Andric continue; 4557a6dacacSDimitry Andric 4567a6dacacSDimitry Andric // All inline asm memory operands must use the same offset. 4577a6dacacSDimitry Andric int64_t Offset = OffsetMO.getImm(); 4587a6dacacSDimitry Andric if (CommonOffset && Offset != CommonOffset) 4597a6dacacSDimitry Andric return false; 4607a6dacacSDimitry Andric CommonOffset = Offset; 4617a6dacacSDimitry Andric InlineAsmMemoryOpIndexes.push_back(I + 1); 4627a6dacacSDimitry Andric } 4637a6dacacSDimitry Andric InlineAsmMemoryOpIndexesMap.insert( 4647a6dacacSDimitry Andric std::make_pair(&UseMI, InlineAsmMemoryOpIndexes)); 4657a6dacacSDimitry Andric break; 46681ad6265SDimitry Andric } 46781ad6265SDimitry Andric } 46881ad6265SDimitry Andric } 46981ad6265SDimitry Andric 47081ad6265SDimitry Andric // We found a common offset. 4710b57cec5SDimitry Andric // Update the offsets in global address lowering. 472bdd1243dSDimitry Andric // We may have already folded some arithmetic so we need to add to any 473bdd1243dSDimitry Andric // existing offset. 474bdd1243dSDimitry Andric int64_t NewOffset = Hi.getOperand(1).getOffset() + *CommonOffset; 475bdd1243dSDimitry Andric // RV32 ignores the upper 32 bits. 476bdd1243dSDimitry Andric if (!ST->is64Bit()) 477bdd1243dSDimitry Andric NewOffset = SignExtend64<32>(NewOffset); 478bdd1243dSDimitry Andric // We can only fold simm32 offsets. 479bdd1243dSDimitry Andric if (!isInt<32>(NewOffset)) 480bdd1243dSDimitry Andric return false; 481bdd1243dSDimitry Andric 482bdd1243dSDimitry Andric Hi.getOperand(1).setOffset(NewOffset); 483bdd1243dSDimitry Andric MachineOperand &ImmOp = Lo.getOperand(2); 4840fca6ea1SDimitry Andric // Expand PseudoMovAddr into LUI 4850fca6ea1SDimitry Andric if (Hi.getOpcode() == RISCV::PseudoMovAddr) { 4860fca6ea1SDimitry Andric auto *TII = ST->getInstrInfo(); 4870fca6ea1SDimitry Andric Hi.setDesc(TII->get(RISCV::LUI)); 4880fca6ea1SDimitry Andric Hi.removeOperand(2); 4890fca6ea1SDimitry Andric } 4900fca6ea1SDimitry Andric 491bdd1243dSDimitry Andric if (Hi.getOpcode() != RISCV::AUIPC) 492bdd1243dSDimitry Andric ImmOp.setOffset(NewOffset); 49381ad6265SDimitry Andric 49481ad6265SDimitry Andric // Update the immediate in the load/store instructions to add the offset. 49581ad6265SDimitry Andric for (MachineInstr &UseMI : 49681ad6265SDimitry Andric llvm::make_early_inc_range(MRI->use_instructions(DestReg))) { 4977a6dacacSDimitry Andric if (UseMI.getOpcode() == RISCV::INLINEASM || 4987a6dacacSDimitry Andric UseMI.getOpcode() == RISCV::INLINEASM_BR) { 4997a6dacacSDimitry Andric auto &InlineAsmMemoryOpIndexes = InlineAsmMemoryOpIndexesMap[&UseMI]; 5007a6dacacSDimitry Andric for (unsigned I : InlineAsmMemoryOpIndexes) { 5017a6dacacSDimitry Andric MachineOperand &MO = UseMI.getOperand(I + 1); 5027a6dacacSDimitry Andric switch (ImmOp.getType()) { 5037a6dacacSDimitry Andric case MachineOperand::MO_GlobalAddress: 5047a6dacacSDimitry Andric MO.ChangeToGA(ImmOp.getGlobal(), ImmOp.getOffset(), 5057a6dacacSDimitry Andric ImmOp.getTargetFlags()); 5067a6dacacSDimitry Andric break; 5077a6dacacSDimitry Andric case MachineOperand::MO_MCSymbol: 5087a6dacacSDimitry Andric MO.ChangeToMCSymbol(ImmOp.getMCSymbol(), ImmOp.getTargetFlags()); 5097a6dacacSDimitry Andric MO.setOffset(ImmOp.getOffset()); 5107a6dacacSDimitry Andric break; 5117a6dacacSDimitry Andric case MachineOperand::MO_BlockAddress: 5127a6dacacSDimitry Andric MO.ChangeToBA(ImmOp.getBlockAddress(), ImmOp.getOffset(), 5137a6dacacSDimitry Andric ImmOp.getTargetFlags()); 5147a6dacacSDimitry Andric break; 5157a6dacacSDimitry Andric default: 5167a6dacacSDimitry Andric report_fatal_error("unsupported machine operand type"); 5177a6dacacSDimitry Andric break; 5187a6dacacSDimitry Andric } 5197a6dacacSDimitry Andric } 5207a6dacacSDimitry Andric } else { 52181ad6265SDimitry Andric UseMI.removeOperand(2); 52281ad6265SDimitry Andric UseMI.addOperand(ImmOp); 5237a6dacacSDimitry Andric } 52481ad6265SDimitry Andric } 52581ad6265SDimitry Andric 5260fca6ea1SDimitry Andric // Prevent Lo (originally PseudoMovAddr, which is also pointed by Hi) from 5270fca6ea1SDimitry Andric // being erased 5280fca6ea1SDimitry Andric if (&Lo == &Hi) 5290fca6ea1SDimitry Andric return true; 5300fca6ea1SDimitry Andric 5317a6dacacSDimitry Andric MRI->replaceRegWith(Lo.getOperand(0).getReg(), Hi.getOperand(0).getReg()); 532bdd1243dSDimitry Andric Lo.eraseFromParent(); 5330b57cec5SDimitry Andric return true; 534349cc55cSDimitry Andric } 5350b57cec5SDimitry Andric 5360b57cec5SDimitry Andric bool RISCVMergeBaseOffsetOpt::runOnMachineFunction(MachineFunction &Fn) { 5370b57cec5SDimitry Andric if (skipFunction(Fn.getFunction())) 5380b57cec5SDimitry Andric return false; 5390b57cec5SDimitry Andric 54081ad6265SDimitry Andric ST = &Fn.getSubtarget<RISCVSubtarget>(); 54181ad6265SDimitry Andric 542349cc55cSDimitry Andric bool MadeChange = false; 5430b57cec5SDimitry Andric MRI = &Fn.getRegInfo(); 5440b57cec5SDimitry Andric for (MachineBasicBlock &MBB : Fn) { 5450b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "MBB: " << MBB.getName() << "\n"); 546bdd1243dSDimitry Andric for (MachineInstr &Hi : MBB) { 547bdd1243dSDimitry Andric MachineInstr *Lo = nullptr; 548bdd1243dSDimitry Andric if (!detectFoldable(Hi, Lo)) 5490b57cec5SDimitry Andric continue; 550bdd1243dSDimitry Andric MadeChange |= detectAndFoldOffset(Hi, *Lo); 551bdd1243dSDimitry Andric MadeChange |= foldIntoMemoryOps(Hi, *Lo); 5520b57cec5SDimitry Andric } 5530b57cec5SDimitry Andric } 554bdd1243dSDimitry Andric 555349cc55cSDimitry Andric return MadeChange; 5560b57cec5SDimitry Andric } 5570b57cec5SDimitry Andric 5580b57cec5SDimitry Andric /// Returns an instance of the Merge Base Offset Optimization pass. 5590b57cec5SDimitry Andric FunctionPass *llvm::createRISCVMergeBaseOffsetOptPass() { 5600b57cec5SDimitry Andric return new RISCVMergeBaseOffsetOpt(); 5610b57cec5SDimitry Andric } 562