1349cc55cSDimitry Andric //===- LoadStoreOpt.cpp ----------- Generic memory optimizations -*- C++ -*-==// 2349cc55cSDimitry Andric // 3349cc55cSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4349cc55cSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5349cc55cSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6349cc55cSDimitry Andric // 7349cc55cSDimitry Andric //===----------------------------------------------------------------------===// 8349cc55cSDimitry Andric /// \file 9349cc55cSDimitry Andric /// This file implements the LoadStoreOpt optimization pass. 10349cc55cSDimitry Andric //===----------------------------------------------------------------------===// 11349cc55cSDimitry Andric 12349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LoadStoreOpt.h" 1306c3fb27SDimitry Andric #include "llvm/ADT/STLExtras.h" 1406c3fb27SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 15349cc55cSDimitry Andric #include "llvm/ADT/Statistic.h" 16349cc55cSDimitry Andric #include "llvm/Analysis/AliasAnalysis.h" 17349cc55cSDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 18349cc55cSDimitry Andric #include "llvm/Analysis/OptimizationRemarkEmitter.h" 19349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/GenericMachineInstrs.h" 20349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LegalizerInfo.h" 21349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/MIPatternMatch.h" 22349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/Utils.h" 2306c3fb27SDimitry Andric #include "llvm/CodeGen/LowLevelTypeUtils.h" 24349cc55cSDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h" 25349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 26349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFunction.h" 27349cc55cSDimitry Andric #include "llvm/CodeGen/MachineInstr.h" 28349cc55cSDimitry Andric #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h" 29349cc55cSDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h" 30349cc55cSDimitry Andric #include "llvm/CodeGen/Register.h" 31349cc55cSDimitry Andric #include "llvm/CodeGen/TargetLowering.h" 32349cc55cSDimitry Andric #include "llvm/CodeGen/TargetOpcodes.h" 33349cc55cSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h" 34349cc55cSDimitry Andric #include "llvm/InitializePasses.h" 35349cc55cSDimitry Andric #include "llvm/Support/AtomicOrdering.h" 36349cc55cSDimitry Andric #include "llvm/Support/Casting.h" 37349cc55cSDimitry Andric #include "llvm/Support/Debug.h" 38349cc55cSDimitry Andric #include "llvm/Support/ErrorHandling.h" 39349cc55cSDimitry Andric #include "llvm/Support/MathExtras.h" 40349cc55cSDimitry Andric #include <algorithm> 41349cc55cSDimitry Andric 42349cc55cSDimitry Andric #define DEBUG_TYPE "loadstore-opt" 43349cc55cSDimitry Andric 44349cc55cSDimitry Andric using namespace llvm; 45349cc55cSDimitry Andric using namespace ore; 46349cc55cSDimitry Andric using namespace MIPatternMatch; 47349cc55cSDimitry Andric 48349cc55cSDimitry Andric STATISTIC(NumStoresMerged, "Number of stores merged"); 49349cc55cSDimitry Andric 50349cc55cSDimitry Andric const unsigned MaxStoreSizeToForm = 128; 51349cc55cSDimitry Andric 52349cc55cSDimitry Andric char LoadStoreOpt::ID = 0; 53349cc55cSDimitry Andric INITIALIZE_PASS_BEGIN(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations", 54349cc55cSDimitry Andric false, false) 55349cc55cSDimitry Andric INITIALIZE_PASS_END(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations", 56349cc55cSDimitry Andric false, false) 57349cc55cSDimitry Andric 58349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt(std::function<bool(const MachineFunction &)> F) 59349cc55cSDimitry Andric : MachineFunctionPass(ID), DoNotRunPass(F) {} 60349cc55cSDimitry Andric 61349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt() 62349cc55cSDimitry Andric : LoadStoreOpt([](const MachineFunction &) { return false; }) {} 63349cc55cSDimitry Andric 64349cc55cSDimitry Andric void LoadStoreOpt::init(MachineFunction &MF) { 65349cc55cSDimitry Andric this->MF = &MF; 66349cc55cSDimitry Andric MRI = &MF.getRegInfo(); 67349cc55cSDimitry Andric AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); 68349cc55cSDimitry Andric TLI = MF.getSubtarget().getTargetLowering(); 69349cc55cSDimitry Andric LI = MF.getSubtarget().getLegalizerInfo(); 70349cc55cSDimitry Andric Builder.setMF(MF); 71349cc55cSDimitry Andric IsPreLegalizer = !MF.getProperties().hasProperty( 72349cc55cSDimitry Andric MachineFunctionProperties::Property::Legalized); 73349cc55cSDimitry Andric InstsToErase.clear(); 74349cc55cSDimitry Andric } 75349cc55cSDimitry Andric 76349cc55cSDimitry Andric void LoadStoreOpt::getAnalysisUsage(AnalysisUsage &AU) const { 77349cc55cSDimitry Andric AU.addRequired<AAResultsWrapperPass>(); 7881ad6265SDimitry Andric AU.setPreservesAll(); 79349cc55cSDimitry Andric getSelectionDAGFallbackAnalysisUsage(AU); 80349cc55cSDimitry Andric MachineFunctionPass::getAnalysisUsage(AU); 81349cc55cSDimitry Andric } 82349cc55cSDimitry Andric 83349cc55cSDimitry Andric BaseIndexOffset GISelAddressing::getPointerInfo(Register Ptr, 84349cc55cSDimitry Andric MachineRegisterInfo &MRI) { 85349cc55cSDimitry Andric BaseIndexOffset Info; 86349cc55cSDimitry Andric Register PtrAddRHS; 87*5678d1d9SDimitry Andric Register BaseReg; 88*5678d1d9SDimitry Andric if (!mi_match(Ptr, MRI, m_GPtrAdd(m_Reg(BaseReg), m_Reg(PtrAddRHS)))) { 89*5678d1d9SDimitry Andric Info.setBase(Ptr); 90*5678d1d9SDimitry Andric Info.setOffset(0); 91349cc55cSDimitry Andric return Info; 92349cc55cSDimitry Andric } 93*5678d1d9SDimitry Andric Info.setBase(BaseReg); 94349cc55cSDimitry Andric auto RHSCst = getIConstantVRegValWithLookThrough(PtrAddRHS, MRI); 95349cc55cSDimitry Andric if (RHSCst) 96*5678d1d9SDimitry Andric Info.setOffset(RHSCst->Value.getSExtValue()); 97349cc55cSDimitry Andric 98349cc55cSDimitry Andric // Just recognize a simple case for now. In future we'll need to match 99349cc55cSDimitry Andric // indexing patterns for base + index + constant. 100*5678d1d9SDimitry Andric Info.setIndex(PtrAddRHS); 101349cc55cSDimitry Andric return Info; 102349cc55cSDimitry Andric } 103349cc55cSDimitry Andric 104349cc55cSDimitry Andric bool GISelAddressing::aliasIsKnownForLoadStore(const MachineInstr &MI1, 105349cc55cSDimitry Andric const MachineInstr &MI2, 106349cc55cSDimitry Andric bool &IsAlias, 107349cc55cSDimitry Andric MachineRegisterInfo &MRI) { 108349cc55cSDimitry Andric auto *LdSt1 = dyn_cast<GLoadStore>(&MI1); 109349cc55cSDimitry Andric auto *LdSt2 = dyn_cast<GLoadStore>(&MI2); 110349cc55cSDimitry Andric if (!LdSt1 || !LdSt2) 111349cc55cSDimitry Andric return false; 112349cc55cSDimitry Andric 113349cc55cSDimitry Andric BaseIndexOffset BasePtr0 = getPointerInfo(LdSt1->getPointerReg(), MRI); 114349cc55cSDimitry Andric BaseIndexOffset BasePtr1 = getPointerInfo(LdSt2->getPointerReg(), MRI); 115349cc55cSDimitry Andric 116*5678d1d9SDimitry Andric if (!BasePtr0.getBase().isValid() || !BasePtr1.getBase().isValid()) 117349cc55cSDimitry Andric return false; 118349cc55cSDimitry Andric 119349cc55cSDimitry Andric int64_t Size1 = LdSt1->getMemSize(); 120349cc55cSDimitry Andric int64_t Size2 = LdSt2->getMemSize(); 121349cc55cSDimitry Andric 122349cc55cSDimitry Andric int64_t PtrDiff; 123*5678d1d9SDimitry Andric if (BasePtr0.getBase() == BasePtr1.getBase() && BasePtr0.hasValidOffset() && 124*5678d1d9SDimitry Andric BasePtr1.hasValidOffset()) { 125*5678d1d9SDimitry Andric PtrDiff = BasePtr1.getOffset() - BasePtr0.getOffset(); 126349cc55cSDimitry Andric // If the size of memory access is unknown, do not use it to do analysis. 127349cc55cSDimitry Andric // One example of unknown size memory access is to load/store scalable 128349cc55cSDimitry Andric // vector objects on the stack. 129349cc55cSDimitry Andric // BasePtr1 is PtrDiff away from BasePtr0. They alias if none of the 130349cc55cSDimitry Andric // following situations arise: 131349cc55cSDimitry Andric if (PtrDiff >= 0 && 132349cc55cSDimitry Andric Size1 != static_cast<int64_t>(MemoryLocation::UnknownSize)) { 133349cc55cSDimitry Andric // [----BasePtr0----] 134349cc55cSDimitry Andric // [---BasePtr1--] 135349cc55cSDimitry Andric // ========PtrDiff========> 136349cc55cSDimitry Andric IsAlias = !(Size1 <= PtrDiff); 137349cc55cSDimitry Andric return true; 138349cc55cSDimitry Andric } 139349cc55cSDimitry Andric if (PtrDiff < 0 && 140349cc55cSDimitry Andric Size2 != static_cast<int64_t>(MemoryLocation::UnknownSize)) { 141349cc55cSDimitry Andric // [----BasePtr0----] 142349cc55cSDimitry Andric // [---BasePtr1--] 143349cc55cSDimitry Andric // =====(-PtrDiff)====> 144349cc55cSDimitry Andric IsAlias = !((PtrDiff + Size2) <= 0); 145349cc55cSDimitry Andric return true; 146349cc55cSDimitry Andric } 147349cc55cSDimitry Andric return false; 148349cc55cSDimitry Andric } 149349cc55cSDimitry Andric 150349cc55cSDimitry Andric // If both BasePtr0 and BasePtr1 are FrameIndexes, we will not be 151349cc55cSDimitry Andric // able to calculate their relative offset if at least one arises 152349cc55cSDimitry Andric // from an alloca. However, these allocas cannot overlap and we 153349cc55cSDimitry Andric // can infer there is no alias. 154*5678d1d9SDimitry Andric auto *Base0Def = getDefIgnoringCopies(BasePtr0.getBase(), MRI); 155*5678d1d9SDimitry Andric auto *Base1Def = getDefIgnoringCopies(BasePtr1.getBase(), MRI); 156349cc55cSDimitry Andric if (!Base0Def || !Base1Def) 157349cc55cSDimitry Andric return false; // Couldn't tell anything. 158349cc55cSDimitry Andric 159349cc55cSDimitry Andric 160349cc55cSDimitry Andric if (Base0Def->getOpcode() != Base1Def->getOpcode()) 161349cc55cSDimitry Andric return false; 162349cc55cSDimitry Andric 163349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_FRAME_INDEX) { 164349cc55cSDimitry Andric MachineFrameInfo &MFI = Base0Def->getMF()->getFrameInfo(); 165349cc55cSDimitry Andric // If the bases have the same frame index but we couldn't find a 166349cc55cSDimitry Andric // constant offset, (indices are different) be conservative. 167349cc55cSDimitry Andric if (Base0Def != Base1Def && 168349cc55cSDimitry Andric (!MFI.isFixedObjectIndex(Base0Def->getOperand(1).getIndex()) || 169349cc55cSDimitry Andric !MFI.isFixedObjectIndex(Base1Def->getOperand(1).getIndex()))) { 170349cc55cSDimitry Andric IsAlias = false; 171349cc55cSDimitry Andric return true; 172349cc55cSDimitry Andric } 173349cc55cSDimitry Andric } 174349cc55cSDimitry Andric 175349cc55cSDimitry Andric // This implementation is a lot more primitive than the SDAG one for now. 176349cc55cSDimitry Andric // FIXME: what about constant pools? 177349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_GLOBAL_VALUE) { 178349cc55cSDimitry Andric auto GV0 = Base0Def->getOperand(1).getGlobal(); 179349cc55cSDimitry Andric auto GV1 = Base1Def->getOperand(1).getGlobal(); 180349cc55cSDimitry Andric if (GV0 != GV1) { 181349cc55cSDimitry Andric IsAlias = false; 182349cc55cSDimitry Andric return true; 183349cc55cSDimitry Andric } 184349cc55cSDimitry Andric } 185349cc55cSDimitry Andric 186349cc55cSDimitry Andric // Can't tell anything about aliasing. 187349cc55cSDimitry Andric return false; 188349cc55cSDimitry Andric } 189349cc55cSDimitry Andric 190349cc55cSDimitry Andric bool GISelAddressing::instMayAlias(const MachineInstr &MI, 191349cc55cSDimitry Andric const MachineInstr &Other, 192349cc55cSDimitry Andric MachineRegisterInfo &MRI, 193349cc55cSDimitry Andric AliasAnalysis *AA) { 194349cc55cSDimitry Andric struct MemUseCharacteristics { 195349cc55cSDimitry Andric bool IsVolatile; 196349cc55cSDimitry Andric bool IsAtomic; 197349cc55cSDimitry Andric Register BasePtr; 198349cc55cSDimitry Andric int64_t Offset; 199349cc55cSDimitry Andric uint64_t NumBytes; 200349cc55cSDimitry Andric MachineMemOperand *MMO; 201349cc55cSDimitry Andric }; 202349cc55cSDimitry Andric 203349cc55cSDimitry Andric auto getCharacteristics = 204349cc55cSDimitry Andric [&](const MachineInstr *MI) -> MemUseCharacteristics { 205349cc55cSDimitry Andric if (const auto *LS = dyn_cast<GLoadStore>(MI)) { 206349cc55cSDimitry Andric Register BaseReg; 207349cc55cSDimitry Andric int64_t Offset = 0; 208349cc55cSDimitry Andric // No pre/post-inc addressing modes are considered here, unlike in SDAG. 209349cc55cSDimitry Andric if (!mi_match(LS->getPointerReg(), MRI, 210349cc55cSDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(Offset)))) { 211349cc55cSDimitry Andric BaseReg = LS->getPointerReg(); 212349cc55cSDimitry Andric Offset = 0; 213349cc55cSDimitry Andric } 214349cc55cSDimitry Andric 215349cc55cSDimitry Andric uint64_t Size = MemoryLocation::getSizeOrUnknown( 216349cc55cSDimitry Andric LS->getMMO().getMemoryType().getSizeInBytes()); 217349cc55cSDimitry Andric return {LS->isVolatile(), LS->isAtomic(), BaseReg, 218349cc55cSDimitry Andric Offset /*base offset*/, Size, &LS->getMMO()}; 219349cc55cSDimitry Andric } 220349cc55cSDimitry Andric // FIXME: support recognizing lifetime instructions. 221349cc55cSDimitry Andric // Default. 222349cc55cSDimitry Andric return {false /*isvolatile*/, 223349cc55cSDimitry Andric /*isAtomic*/ false, Register(), 224349cc55cSDimitry Andric (int64_t)0 /*offset*/, 0 /*size*/, 225349cc55cSDimitry Andric (MachineMemOperand *)nullptr}; 226349cc55cSDimitry Andric }; 227349cc55cSDimitry Andric MemUseCharacteristics MUC0 = getCharacteristics(&MI), 228349cc55cSDimitry Andric MUC1 = getCharacteristics(&Other); 229349cc55cSDimitry Andric 230349cc55cSDimitry Andric // If they are to the same address, then they must be aliases. 231349cc55cSDimitry Andric if (MUC0.BasePtr.isValid() && MUC0.BasePtr == MUC1.BasePtr && 232349cc55cSDimitry Andric MUC0.Offset == MUC1.Offset) 233349cc55cSDimitry Andric return true; 234349cc55cSDimitry Andric 235349cc55cSDimitry Andric // If they are both volatile then they cannot be reordered. 236349cc55cSDimitry Andric if (MUC0.IsVolatile && MUC1.IsVolatile) 237349cc55cSDimitry Andric return true; 238349cc55cSDimitry Andric 239349cc55cSDimitry Andric // Be conservative about atomics for the moment 240349cc55cSDimitry Andric // TODO: This is way overconservative for unordered atomics (see D66309) 241349cc55cSDimitry Andric if (MUC0.IsAtomic && MUC1.IsAtomic) 242349cc55cSDimitry Andric return true; 243349cc55cSDimitry Andric 244349cc55cSDimitry Andric // If one operation reads from invariant memory, and the other may store, they 245349cc55cSDimitry Andric // cannot alias. 246349cc55cSDimitry Andric if (MUC0.MMO && MUC1.MMO) { 247349cc55cSDimitry Andric if ((MUC0.MMO->isInvariant() && MUC1.MMO->isStore()) || 248349cc55cSDimitry Andric (MUC1.MMO->isInvariant() && MUC0.MMO->isStore())) 249349cc55cSDimitry Andric return false; 250349cc55cSDimitry Andric } 251349cc55cSDimitry Andric 252349cc55cSDimitry Andric // Try to prove that there is aliasing, or that there is no aliasing. Either 253349cc55cSDimitry Andric // way, we can return now. If nothing can be proved, proceed with more tests. 254349cc55cSDimitry Andric bool IsAlias; 255349cc55cSDimitry Andric if (GISelAddressing::aliasIsKnownForLoadStore(MI, Other, IsAlias, MRI)) 256349cc55cSDimitry Andric return IsAlias; 257349cc55cSDimitry Andric 258349cc55cSDimitry Andric // The following all rely on MMO0 and MMO1 being valid. 259349cc55cSDimitry Andric if (!MUC0.MMO || !MUC1.MMO) 260349cc55cSDimitry Andric return true; 261349cc55cSDimitry Andric 262349cc55cSDimitry Andric // FIXME: port the alignment based alias analysis from SDAG's isAlias(). 263349cc55cSDimitry Andric int64_t SrcValOffset0 = MUC0.MMO->getOffset(); 264349cc55cSDimitry Andric int64_t SrcValOffset1 = MUC1.MMO->getOffset(); 265349cc55cSDimitry Andric uint64_t Size0 = MUC0.NumBytes; 266349cc55cSDimitry Andric uint64_t Size1 = MUC1.NumBytes; 267349cc55cSDimitry Andric if (AA && MUC0.MMO->getValue() && MUC1.MMO->getValue() && 268349cc55cSDimitry Andric Size0 != MemoryLocation::UnknownSize && 269349cc55cSDimitry Andric Size1 != MemoryLocation::UnknownSize) { 270349cc55cSDimitry Andric // Use alias analysis information. 271349cc55cSDimitry Andric int64_t MinOffset = std::min(SrcValOffset0, SrcValOffset1); 272349cc55cSDimitry Andric int64_t Overlap0 = Size0 + SrcValOffset0 - MinOffset; 273349cc55cSDimitry Andric int64_t Overlap1 = Size1 + SrcValOffset1 - MinOffset; 274349cc55cSDimitry Andric if (AA->isNoAlias(MemoryLocation(MUC0.MMO->getValue(), Overlap0, 275349cc55cSDimitry Andric MUC0.MMO->getAAInfo()), 276349cc55cSDimitry Andric MemoryLocation(MUC1.MMO->getValue(), Overlap1, 277349cc55cSDimitry Andric MUC1.MMO->getAAInfo()))) 278349cc55cSDimitry Andric return false; 279349cc55cSDimitry Andric } 280349cc55cSDimitry Andric 281349cc55cSDimitry Andric // Otherwise we have to assume they alias. 282349cc55cSDimitry Andric return true; 283349cc55cSDimitry Andric } 284349cc55cSDimitry Andric 285349cc55cSDimitry Andric /// Returns true if the instruction creates an unavoidable hazard that 286349cc55cSDimitry Andric /// forces a boundary between store merge candidates. 287349cc55cSDimitry Andric static bool isInstHardMergeHazard(MachineInstr &MI) { 288349cc55cSDimitry Andric return MI.hasUnmodeledSideEffects() || MI.hasOrderedMemoryRef(); 289349cc55cSDimitry Andric } 290349cc55cSDimitry Andric 291349cc55cSDimitry Andric bool LoadStoreOpt::mergeStores(SmallVectorImpl<GStore *> &StoresToMerge) { 292349cc55cSDimitry Andric // Try to merge all the stores in the vector, splitting into separate segments 293349cc55cSDimitry Andric // as necessary. 294349cc55cSDimitry Andric assert(StoresToMerge.size() > 1 && "Expected multiple stores to merge"); 295349cc55cSDimitry Andric LLT OrigTy = MRI->getType(StoresToMerge[0]->getValueReg()); 296349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoresToMerge[0]->getPointerReg()); 297349cc55cSDimitry Andric unsigned AS = PtrTy.getAddressSpace(); 298349cc55cSDimitry Andric // Ensure the legal store info is computed for this address space. 299349cc55cSDimitry Andric initializeStoreMergeTargetInfo(AS); 300349cc55cSDimitry Andric const auto &LegalSizes = LegalStoreSizes[AS]; 301349cc55cSDimitry Andric 302349cc55cSDimitry Andric #ifndef NDEBUG 303fcaf7f86SDimitry Andric for (auto *StoreMI : StoresToMerge) 304349cc55cSDimitry Andric assert(MRI->getType(StoreMI->getValueReg()) == OrigTy); 305349cc55cSDimitry Andric #endif 306349cc55cSDimitry Andric 307349cc55cSDimitry Andric const auto &DL = MF->getFunction().getParent()->getDataLayout(); 308349cc55cSDimitry Andric bool AnyMerged = false; 309349cc55cSDimitry Andric do { 31006c3fb27SDimitry Andric unsigned NumPow2 = llvm::bit_floor(StoresToMerge.size()); 311bdd1243dSDimitry Andric unsigned MaxSizeBits = NumPow2 * OrigTy.getSizeInBits().getFixedValue(); 312349cc55cSDimitry Andric // Compute the biggest store we can generate to handle the number of stores. 313349cc55cSDimitry Andric unsigned MergeSizeBits; 314349cc55cSDimitry Andric for (MergeSizeBits = MaxSizeBits; MergeSizeBits > 1; MergeSizeBits /= 2) { 315349cc55cSDimitry Andric LLT StoreTy = LLT::scalar(MergeSizeBits); 316349cc55cSDimitry Andric EVT StoreEVT = 317349cc55cSDimitry Andric getApproximateEVTForLLT(StoreTy, DL, MF->getFunction().getContext()); 318349cc55cSDimitry Andric if (LegalSizes.size() > MergeSizeBits && LegalSizes[MergeSizeBits] && 319349cc55cSDimitry Andric TLI->canMergeStoresTo(AS, StoreEVT, *MF) && 320349cc55cSDimitry Andric (TLI->isTypeLegal(StoreEVT))) 321349cc55cSDimitry Andric break; // We can generate a MergeSize bits store. 322349cc55cSDimitry Andric } 323349cc55cSDimitry Andric if (MergeSizeBits <= OrigTy.getSizeInBits()) 324349cc55cSDimitry Andric return AnyMerged; // No greater merge. 325349cc55cSDimitry Andric 326349cc55cSDimitry Andric unsigned NumStoresToMerge = MergeSizeBits / OrigTy.getSizeInBits(); 327349cc55cSDimitry Andric // Perform the actual merging. 328349cc55cSDimitry Andric SmallVector<GStore *, 8> SingleMergeStores( 329349cc55cSDimitry Andric StoresToMerge.begin(), StoresToMerge.begin() + NumStoresToMerge); 330349cc55cSDimitry Andric AnyMerged |= doSingleStoreMerge(SingleMergeStores); 331349cc55cSDimitry Andric StoresToMerge.erase(StoresToMerge.begin(), 332349cc55cSDimitry Andric StoresToMerge.begin() + NumStoresToMerge); 333349cc55cSDimitry Andric } while (StoresToMerge.size() > 1); 334349cc55cSDimitry Andric return AnyMerged; 335349cc55cSDimitry Andric } 336349cc55cSDimitry Andric 337349cc55cSDimitry Andric bool LoadStoreOpt::isLegalOrBeforeLegalizer(const LegalityQuery &Query, 338349cc55cSDimitry Andric MachineFunction &MF) const { 339349cc55cSDimitry Andric auto Action = LI->getAction(Query).Action; 340349cc55cSDimitry Andric // If the instruction is unsupported, it can't be legalized at all. 341349cc55cSDimitry Andric if (Action == LegalizeActions::Unsupported) 342349cc55cSDimitry Andric return false; 343349cc55cSDimitry Andric return IsPreLegalizer || Action == LegalizeAction::Legal; 344349cc55cSDimitry Andric } 345349cc55cSDimitry Andric 346349cc55cSDimitry Andric bool LoadStoreOpt::doSingleStoreMerge(SmallVectorImpl<GStore *> &Stores) { 347349cc55cSDimitry Andric assert(Stores.size() > 1); 348349cc55cSDimitry Andric // We know that all the stores are consecutive and there are no aliasing 349349cc55cSDimitry Andric // operations in the range. However, the values that are being stored may be 350349cc55cSDimitry Andric // generated anywhere before each store. To ensure we have the values 351349cc55cSDimitry Andric // available, we materialize the wide value and new store at the place of the 352349cc55cSDimitry Andric // final store in the merge sequence. 353349cc55cSDimitry Andric GStore *FirstStore = Stores[0]; 354349cc55cSDimitry Andric const unsigned NumStores = Stores.size(); 355349cc55cSDimitry Andric LLT SmallTy = MRI->getType(FirstStore->getValueReg()); 356349cc55cSDimitry Andric LLT WideValueTy = 357bdd1243dSDimitry Andric LLT::scalar(NumStores * SmallTy.getSizeInBits().getFixedValue()); 358349cc55cSDimitry Andric 359349cc55cSDimitry Andric // For each store, compute pairwise merged debug locs. 360bdd1243dSDimitry Andric DebugLoc MergedLoc = Stores.front()->getDebugLoc(); 361bdd1243dSDimitry Andric for (auto *Store : drop_begin(Stores)) 362bdd1243dSDimitry Andric MergedLoc = DILocation::getMergedLocation(MergedLoc, Store->getDebugLoc()); 363bdd1243dSDimitry Andric 364349cc55cSDimitry Andric Builder.setInstr(*Stores.back()); 365349cc55cSDimitry Andric Builder.setDebugLoc(MergedLoc); 366349cc55cSDimitry Andric 367349cc55cSDimitry Andric // If all of the store values are constants, then create a wide constant 368349cc55cSDimitry Andric // directly. Otherwise, we need to generate some instructions to merge the 369349cc55cSDimitry Andric // existing values together into a wider type. 370349cc55cSDimitry Andric SmallVector<APInt, 8> ConstantVals; 371fcaf7f86SDimitry Andric for (auto *Store : Stores) { 372349cc55cSDimitry Andric auto MaybeCst = 373349cc55cSDimitry Andric getIConstantVRegValWithLookThrough(Store->getValueReg(), *MRI); 374349cc55cSDimitry Andric if (!MaybeCst) { 375349cc55cSDimitry Andric ConstantVals.clear(); 376349cc55cSDimitry Andric break; 377349cc55cSDimitry Andric } 378349cc55cSDimitry Andric ConstantVals.emplace_back(MaybeCst->Value); 379349cc55cSDimitry Andric } 380349cc55cSDimitry Andric 381349cc55cSDimitry Andric Register WideReg; 382349cc55cSDimitry Andric auto *WideMMO = 383349cc55cSDimitry Andric MF->getMachineMemOperand(&FirstStore->getMMO(), 0, WideValueTy); 384349cc55cSDimitry Andric if (ConstantVals.empty()) { 385349cc55cSDimitry Andric // Mimic the SDAG behaviour here and don't try to do anything for unknown 386349cc55cSDimitry Andric // values. In future, we should also support the cases of loads and 387349cc55cSDimitry Andric // extracted vector elements. 388349cc55cSDimitry Andric return false; 389349cc55cSDimitry Andric } 390349cc55cSDimitry Andric 391349cc55cSDimitry Andric assert(ConstantVals.size() == NumStores); 392349cc55cSDimitry Andric // Check if our wide constant is legal. 393349cc55cSDimitry Andric if (!isLegalOrBeforeLegalizer({TargetOpcode::G_CONSTANT, {WideValueTy}}, *MF)) 394349cc55cSDimitry Andric return false; 395349cc55cSDimitry Andric APInt WideConst(WideValueTy.getSizeInBits(), 0); 396349cc55cSDimitry Andric for (unsigned Idx = 0; Idx < ConstantVals.size(); ++Idx) { 397349cc55cSDimitry Andric // Insert the smaller constant into the corresponding position in the 398349cc55cSDimitry Andric // wider one. 399349cc55cSDimitry Andric WideConst.insertBits(ConstantVals[Idx], Idx * SmallTy.getSizeInBits()); 400349cc55cSDimitry Andric } 401349cc55cSDimitry Andric WideReg = Builder.buildConstant(WideValueTy, WideConst).getReg(0); 402349cc55cSDimitry Andric auto NewStore = 403349cc55cSDimitry Andric Builder.buildStore(WideReg, FirstStore->getPointerReg(), *WideMMO); 404349cc55cSDimitry Andric (void) NewStore; 40506c3fb27SDimitry Andric LLVM_DEBUG(dbgs() << "Merged " << Stores.size() 40606c3fb27SDimitry Andric << " stores into merged store: " << *NewStore); 40706c3fb27SDimitry Andric LLVM_DEBUG(for (auto *MI : Stores) dbgs() << " " << *MI;); 408349cc55cSDimitry Andric NumStoresMerged += Stores.size(); 409349cc55cSDimitry Andric 410349cc55cSDimitry Andric MachineOptimizationRemarkEmitter MORE(*MF, nullptr); 411349cc55cSDimitry Andric MORE.emit([&]() { 412349cc55cSDimitry Andric MachineOptimizationRemark R(DEBUG_TYPE, "MergedStore", 413349cc55cSDimitry Andric FirstStore->getDebugLoc(), 414349cc55cSDimitry Andric FirstStore->getParent()); 415349cc55cSDimitry Andric R << "Merged " << NV("NumMerged", Stores.size()) << " stores of " 416349cc55cSDimitry Andric << NV("OrigWidth", SmallTy.getSizeInBytes()) 417349cc55cSDimitry Andric << " bytes into a single store of " 418349cc55cSDimitry Andric << NV("NewWidth", WideValueTy.getSizeInBytes()) << " bytes"; 419349cc55cSDimitry Andric return R; 420349cc55cSDimitry Andric }); 421349cc55cSDimitry Andric 422fcaf7f86SDimitry Andric for (auto *MI : Stores) 423349cc55cSDimitry Andric InstsToErase.insert(MI); 424349cc55cSDimitry Andric return true; 425349cc55cSDimitry Andric } 426349cc55cSDimitry Andric 427349cc55cSDimitry Andric bool LoadStoreOpt::processMergeCandidate(StoreMergeCandidate &C) { 428349cc55cSDimitry Andric if (C.Stores.size() < 2) { 429349cc55cSDimitry Andric C.reset(); 430349cc55cSDimitry Andric return false; 431349cc55cSDimitry Andric } 432349cc55cSDimitry Andric 433349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Checking store merge candidate with " << C.Stores.size() 434349cc55cSDimitry Andric << " stores, starting with " << *C.Stores[0]); 435349cc55cSDimitry Andric // We know that the stores in the candidate are adjacent. 436349cc55cSDimitry Andric // Now we need to check if any potential aliasing instructions recorded 437349cc55cSDimitry Andric // during the search alias with load/stores added to the candidate after. 438349cc55cSDimitry Andric // For example, if we have the candidate: 439349cc55cSDimitry Andric // C.Stores = [ST1, ST2, ST3, ST4] 440349cc55cSDimitry Andric // and after seeing ST2 we saw a load LD1, which did not alias with ST1 or 441349cc55cSDimitry Andric // ST2, then we would have recorded it into the PotentialAliases structure 442349cc55cSDimitry Andric // with the associated index value of "1". Then we see ST3 and ST4 and add 443349cc55cSDimitry Andric // them to the candidate group. We know that LD1 does not alias with ST1 or 444349cc55cSDimitry Andric // ST2, since we already did that check. However we don't yet know if it 445349cc55cSDimitry Andric // may alias ST3 and ST4, so we perform those checks now. 446349cc55cSDimitry Andric SmallVector<GStore *> StoresToMerge; 447349cc55cSDimitry Andric 448349cc55cSDimitry Andric auto DoesStoreAliasWithPotential = [&](unsigned Idx, GStore &CheckStore) { 449349cc55cSDimitry Andric for (auto AliasInfo : reverse(C.PotentialAliases)) { 450349cc55cSDimitry Andric MachineInstr *PotentialAliasOp = AliasInfo.first; 451349cc55cSDimitry Andric unsigned PreCheckedIdx = AliasInfo.second; 45206c3fb27SDimitry Andric if (static_cast<unsigned>(Idx) < PreCheckedIdx) { 45306c3fb27SDimitry Andric // Once our store index is lower than the index associated with the 45406c3fb27SDimitry Andric // potential alias, we know that we've already checked for this alias 45506c3fb27SDimitry Andric // and all of the earlier potential aliases too. 45606c3fb27SDimitry Andric return false; 45706c3fb27SDimitry Andric } 458349cc55cSDimitry Andric // Need to check this alias. 459349cc55cSDimitry Andric if (GISelAddressing::instMayAlias(CheckStore, *PotentialAliasOp, *MRI, 460349cc55cSDimitry Andric AA)) { 461349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Potential alias " << *PotentialAliasOp 462349cc55cSDimitry Andric << " detected\n"); 463349cc55cSDimitry Andric return true; 464349cc55cSDimitry Andric } 465349cc55cSDimitry Andric } 466349cc55cSDimitry Andric return false; 467349cc55cSDimitry Andric }; 468349cc55cSDimitry Andric // Start from the last store in the group, and check if it aliases with any 469349cc55cSDimitry Andric // of the potential aliasing operations in the list. 470349cc55cSDimitry Andric for (int StoreIdx = C.Stores.size() - 1; StoreIdx >= 0; --StoreIdx) { 471349cc55cSDimitry Andric auto *CheckStore = C.Stores[StoreIdx]; 472349cc55cSDimitry Andric if (DoesStoreAliasWithPotential(StoreIdx, *CheckStore)) 473349cc55cSDimitry Andric continue; 474349cc55cSDimitry Andric StoresToMerge.emplace_back(CheckStore); 475349cc55cSDimitry Andric } 476349cc55cSDimitry Andric 477349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << StoresToMerge.size() 478349cc55cSDimitry Andric << " stores remaining after alias checks. Merging...\n"); 479349cc55cSDimitry Andric 480349cc55cSDimitry Andric // Now we've checked for aliasing hazards, merge any stores left. 481349cc55cSDimitry Andric C.reset(); 482349cc55cSDimitry Andric if (StoresToMerge.size() < 2) 483349cc55cSDimitry Andric return false; 484349cc55cSDimitry Andric return mergeStores(StoresToMerge); 485349cc55cSDimitry Andric } 486349cc55cSDimitry Andric 487349cc55cSDimitry Andric bool LoadStoreOpt::operationAliasesWithCandidate(MachineInstr &MI, 488349cc55cSDimitry Andric StoreMergeCandidate &C) { 489349cc55cSDimitry Andric if (C.Stores.empty()) 490349cc55cSDimitry Andric return false; 491349cc55cSDimitry Andric return llvm::any_of(C.Stores, [&](MachineInstr *OtherMI) { 492349cc55cSDimitry Andric return instMayAlias(MI, *OtherMI, *MRI, AA); 493349cc55cSDimitry Andric }); 494349cc55cSDimitry Andric } 495349cc55cSDimitry Andric 496349cc55cSDimitry Andric void LoadStoreOpt::StoreMergeCandidate::addPotentialAlias(MachineInstr &MI) { 497349cc55cSDimitry Andric PotentialAliases.emplace_back(std::make_pair(&MI, Stores.size() - 1)); 498349cc55cSDimitry Andric } 499349cc55cSDimitry Andric 500349cc55cSDimitry Andric bool LoadStoreOpt::addStoreToCandidate(GStore &StoreMI, 501349cc55cSDimitry Andric StoreMergeCandidate &C) { 502349cc55cSDimitry Andric // Check if the given store writes to an adjacent address, and other 503349cc55cSDimitry Andric // requirements. 504349cc55cSDimitry Andric LLT ValueTy = MRI->getType(StoreMI.getValueReg()); 505349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoreMI.getPointerReg()); 506349cc55cSDimitry Andric 507349cc55cSDimitry Andric // Only handle scalars. 508349cc55cSDimitry Andric if (!ValueTy.isScalar()) 509349cc55cSDimitry Andric return false; 510349cc55cSDimitry Andric 511349cc55cSDimitry Andric // Don't allow truncating stores for now. 512349cc55cSDimitry Andric if (StoreMI.getMemSizeInBits() != ValueTy.getSizeInBits()) 513349cc55cSDimitry Andric return false; 514349cc55cSDimitry Andric 51581ad6265SDimitry Andric // Avoid adding volatile or ordered stores to the candidate. We already have a 51681ad6265SDimitry Andric // check for this in instMayAlias() but that only get's called later between 51781ad6265SDimitry Andric // potential aliasing hazards. 51881ad6265SDimitry Andric if (!StoreMI.isSimple()) 51981ad6265SDimitry Andric return false; 52081ad6265SDimitry Andric 521349cc55cSDimitry Andric Register StoreAddr = StoreMI.getPointerReg(); 522349cc55cSDimitry Andric auto BIO = getPointerInfo(StoreAddr, *MRI); 523*5678d1d9SDimitry Andric Register StoreBase = BIO.getBase(); 524349cc55cSDimitry Andric if (C.Stores.empty()) { 525*5678d1d9SDimitry Andric C.BasePtr = StoreBase; 526*5678d1d9SDimitry Andric if (!BIO.hasValidOffset()) { 527*5678d1d9SDimitry Andric C.CurrentLowestOffset = 0; 528*5678d1d9SDimitry Andric } else { 529*5678d1d9SDimitry Andric C.CurrentLowestOffset = BIO.getOffset(); 530*5678d1d9SDimitry Andric } 531349cc55cSDimitry Andric // This is the first store of the candidate. 532349cc55cSDimitry Andric // If the offset can't possibly allow for a lower addressed store with the 533349cc55cSDimitry Andric // same base, don't bother adding it. 534*5678d1d9SDimitry Andric if (BIO.hasValidOffset() && 535*5678d1d9SDimitry Andric BIO.getOffset() < static_cast<int64_t>(ValueTy.getSizeInBytes())) 536349cc55cSDimitry Andric return false; 537349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI); 538349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Starting a new merge candidate group with: " 539349cc55cSDimitry Andric << StoreMI); 540349cc55cSDimitry Andric return true; 541349cc55cSDimitry Andric } 542349cc55cSDimitry Andric 543349cc55cSDimitry Andric // Check the store is the same size as the existing ones in the candidate. 544349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getValueReg()).getSizeInBits() != 545349cc55cSDimitry Andric ValueTy.getSizeInBits()) 546349cc55cSDimitry Andric return false; 547349cc55cSDimitry Andric 548349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getPointerReg()).getAddressSpace() != 549349cc55cSDimitry Andric PtrTy.getAddressSpace()) 550349cc55cSDimitry Andric return false; 551349cc55cSDimitry Andric 552349cc55cSDimitry Andric // There are other stores in the candidate. Check that the store address 553349cc55cSDimitry Andric // writes to the next lowest adjacent address. 554349cc55cSDimitry Andric if (C.BasePtr != StoreBase) 555349cc55cSDimitry Andric return false; 556*5678d1d9SDimitry Andric // If we don't have a valid offset, we can't guarantee to be an adjacent 557*5678d1d9SDimitry Andric // offset. 558*5678d1d9SDimitry Andric if (!BIO.hasValidOffset()) 559*5678d1d9SDimitry Andric return false; 560*5678d1d9SDimitry Andric if ((C.CurrentLowestOffset - 561*5678d1d9SDimitry Andric static_cast<int64_t>(ValueTy.getSizeInBytes())) != BIO.getOffset()) 562349cc55cSDimitry Andric return false; 563349cc55cSDimitry Andric 564349cc55cSDimitry Andric // This writes to an adjacent address. Allow it. 565349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI); 566349cc55cSDimitry Andric C.CurrentLowestOffset = C.CurrentLowestOffset - ValueTy.getSizeInBytes(); 567349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Candidate added store: " << StoreMI); 568349cc55cSDimitry Andric return true; 569349cc55cSDimitry Andric } 570349cc55cSDimitry Andric 571349cc55cSDimitry Andric bool LoadStoreOpt::mergeBlockStores(MachineBasicBlock &MBB) { 572349cc55cSDimitry Andric bool Changed = false; 573349cc55cSDimitry Andric // Walk through the block bottom-up, looking for merging candidates. 574349cc55cSDimitry Andric StoreMergeCandidate Candidate; 5750eae32dcSDimitry Andric for (MachineInstr &MI : llvm::reverse(MBB)) { 576349cc55cSDimitry Andric if (InstsToErase.contains(&MI)) 577349cc55cSDimitry Andric continue; 578349cc55cSDimitry Andric 5790eae32dcSDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI)) { 580349cc55cSDimitry Andric // We have a G_STORE. Add it to the candidate if it writes to an adjacent 581349cc55cSDimitry Andric // address. 582349cc55cSDimitry Andric if (!addStoreToCandidate(*StoreMI, Candidate)) { 583349cc55cSDimitry Andric // Store wasn't eligible to be added. May need to record it as a 584349cc55cSDimitry Andric // potential alias. 585349cc55cSDimitry Andric if (operationAliasesWithCandidate(*StoreMI, Candidate)) { 586349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 587349cc55cSDimitry Andric continue; 588349cc55cSDimitry Andric } 589349cc55cSDimitry Andric Candidate.addPotentialAlias(*StoreMI); 590349cc55cSDimitry Andric } 591349cc55cSDimitry Andric continue; 592349cc55cSDimitry Andric } 593349cc55cSDimitry Andric 594349cc55cSDimitry Andric // If we don't have any stores yet, this instruction can't pose a problem. 595349cc55cSDimitry Andric if (Candidate.Stores.empty()) 596349cc55cSDimitry Andric continue; 597349cc55cSDimitry Andric 598349cc55cSDimitry Andric // We're dealing with some other kind of instruction. 599349cc55cSDimitry Andric if (isInstHardMergeHazard(MI)) { 600349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 601349cc55cSDimitry Andric Candidate.Stores.clear(); 602349cc55cSDimitry Andric continue; 603349cc55cSDimitry Andric } 604349cc55cSDimitry Andric 605349cc55cSDimitry Andric if (!MI.mayLoadOrStore()) 606349cc55cSDimitry Andric continue; 607349cc55cSDimitry Andric 608349cc55cSDimitry Andric if (operationAliasesWithCandidate(MI, Candidate)) { 609349cc55cSDimitry Andric // We have a potential alias, so process the current candidate if we can 610349cc55cSDimitry Andric // and then continue looking for a new candidate. 611349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 612349cc55cSDimitry Andric continue; 613349cc55cSDimitry Andric } 614349cc55cSDimitry Andric 615349cc55cSDimitry Andric // Record this instruction as a potential alias for future stores that are 616349cc55cSDimitry Andric // added to the candidate. 617349cc55cSDimitry Andric Candidate.addPotentialAlias(MI); 618349cc55cSDimitry Andric } 619349cc55cSDimitry Andric 620349cc55cSDimitry Andric // Process any candidate left after finishing searching the entire block. 621349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 622349cc55cSDimitry Andric 623349cc55cSDimitry Andric // Erase instructions now that we're no longer iterating over the block. 624349cc55cSDimitry Andric for (auto *MI : InstsToErase) 625349cc55cSDimitry Andric MI->eraseFromParent(); 626349cc55cSDimitry Andric InstsToErase.clear(); 627349cc55cSDimitry Andric return Changed; 628349cc55cSDimitry Andric } 629349cc55cSDimitry Andric 63006c3fb27SDimitry Andric /// Check if the store \p Store is a truncstore that can be merged. That is, 63106c3fb27SDimitry Andric /// it's a store of a shifted value of \p SrcVal. If \p SrcVal is an empty 63206c3fb27SDimitry Andric /// Register then it does not need to match and SrcVal is set to the source 63306c3fb27SDimitry Andric /// value found. 63406c3fb27SDimitry Andric /// On match, returns the start byte offset of the \p SrcVal that is being 63506c3fb27SDimitry Andric /// stored. 63606c3fb27SDimitry Andric static std::optional<int64_t> 63706c3fb27SDimitry Andric getTruncStoreByteOffset(GStore &Store, Register &SrcVal, 63806c3fb27SDimitry Andric MachineRegisterInfo &MRI) { 63906c3fb27SDimitry Andric Register TruncVal; 64006c3fb27SDimitry Andric if (!mi_match(Store.getValueReg(), MRI, m_GTrunc(m_Reg(TruncVal)))) 64106c3fb27SDimitry Andric return std::nullopt; 64206c3fb27SDimitry Andric 64306c3fb27SDimitry Andric // The shift amount must be a constant multiple of the narrow type. 64406c3fb27SDimitry Andric // It is translated to the offset address in the wide source value "y". 64506c3fb27SDimitry Andric // 64606c3fb27SDimitry Andric // x = G_LSHR y, ShiftAmtC 64706c3fb27SDimitry Andric // s8 z = G_TRUNC x 64806c3fb27SDimitry Andric // store z, ... 64906c3fb27SDimitry Andric Register FoundSrcVal; 65006c3fb27SDimitry Andric int64_t ShiftAmt; 65106c3fb27SDimitry Andric if (!mi_match(TruncVal, MRI, 65206c3fb27SDimitry Andric m_any_of(m_GLShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt)), 65306c3fb27SDimitry Andric m_GAShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt))))) { 65406c3fb27SDimitry Andric if (!SrcVal.isValid() || TruncVal == SrcVal) { 65506c3fb27SDimitry Andric if (!SrcVal.isValid()) 65606c3fb27SDimitry Andric SrcVal = TruncVal; 65706c3fb27SDimitry Andric return 0; // If it's the lowest index store. 65806c3fb27SDimitry Andric } 65906c3fb27SDimitry Andric return std::nullopt; 66006c3fb27SDimitry Andric } 66106c3fb27SDimitry Andric 66206c3fb27SDimitry Andric unsigned NarrowBits = Store.getMMO().getMemoryType().getScalarSizeInBits(); 66306c3fb27SDimitry Andric if (ShiftAmt % NarrowBits != 0) 66406c3fb27SDimitry Andric return std::nullopt; 66506c3fb27SDimitry Andric const unsigned Offset = ShiftAmt / NarrowBits; 66606c3fb27SDimitry Andric 66706c3fb27SDimitry Andric if (SrcVal.isValid() && FoundSrcVal != SrcVal) 66806c3fb27SDimitry Andric return std::nullopt; 66906c3fb27SDimitry Andric 67006c3fb27SDimitry Andric if (!SrcVal.isValid()) 67106c3fb27SDimitry Andric SrcVal = FoundSrcVal; 67206c3fb27SDimitry Andric else if (MRI.getType(SrcVal) != MRI.getType(FoundSrcVal)) 67306c3fb27SDimitry Andric return std::nullopt; 67406c3fb27SDimitry Andric return Offset; 67506c3fb27SDimitry Andric } 67606c3fb27SDimitry Andric 67706c3fb27SDimitry Andric /// Match a pattern where a wide type scalar value is stored by several narrow 67806c3fb27SDimitry Andric /// stores. Fold it into a single store or a BSWAP and a store if the targets 67906c3fb27SDimitry Andric /// supports it. 68006c3fb27SDimitry Andric /// 68106c3fb27SDimitry Andric /// Assuming little endian target: 68206c3fb27SDimitry Andric /// i8 *p = ... 68306c3fb27SDimitry Andric /// i32 val = ... 68406c3fb27SDimitry Andric /// p[0] = (val >> 0) & 0xFF; 68506c3fb27SDimitry Andric /// p[1] = (val >> 8) & 0xFF; 68606c3fb27SDimitry Andric /// p[2] = (val >> 16) & 0xFF; 68706c3fb27SDimitry Andric /// p[3] = (val >> 24) & 0xFF; 68806c3fb27SDimitry Andric /// => 68906c3fb27SDimitry Andric /// *((i32)p) = val; 69006c3fb27SDimitry Andric /// 69106c3fb27SDimitry Andric /// i8 *p = ... 69206c3fb27SDimitry Andric /// i32 val = ... 69306c3fb27SDimitry Andric /// p[0] = (val >> 24) & 0xFF; 69406c3fb27SDimitry Andric /// p[1] = (val >> 16) & 0xFF; 69506c3fb27SDimitry Andric /// p[2] = (val >> 8) & 0xFF; 69606c3fb27SDimitry Andric /// p[3] = (val >> 0) & 0xFF; 69706c3fb27SDimitry Andric /// => 69806c3fb27SDimitry Andric /// *((i32)p) = BSWAP(val); 69906c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStore(GStore &StoreMI, 70006c3fb27SDimitry Andric SmallPtrSetImpl<GStore *> &DeletedStores) { 70106c3fb27SDimitry Andric LLT MemTy = StoreMI.getMMO().getMemoryType(); 70206c3fb27SDimitry Andric 70306c3fb27SDimitry Andric // We only handle merging simple stores of 1-4 bytes. 70406c3fb27SDimitry Andric if (!MemTy.isScalar()) 70506c3fb27SDimitry Andric return false; 70606c3fb27SDimitry Andric switch (MemTy.getSizeInBits()) { 70706c3fb27SDimitry Andric case 8: 70806c3fb27SDimitry Andric case 16: 70906c3fb27SDimitry Andric case 32: 71006c3fb27SDimitry Andric break; 71106c3fb27SDimitry Andric default: 71206c3fb27SDimitry Andric return false; 71306c3fb27SDimitry Andric } 71406c3fb27SDimitry Andric if (!StoreMI.isSimple()) 71506c3fb27SDimitry Andric return false; 71606c3fb27SDimitry Andric 71706c3fb27SDimitry Andric // We do a simple search for mergeable stores prior to this one. 71806c3fb27SDimitry Andric // Any potential alias hazard along the way terminates the search. 71906c3fb27SDimitry Andric SmallVector<GStore *> FoundStores; 72006c3fb27SDimitry Andric 72106c3fb27SDimitry Andric // We're looking for: 72206c3fb27SDimitry Andric // 1) a (store(trunc(...))) 72306c3fb27SDimitry Andric // 2) of an LSHR/ASHR of a single wide value, by the appropriate shift to get 72406c3fb27SDimitry Andric // the partial value stored. 72506c3fb27SDimitry Andric // 3) where the offsets form either a little or big-endian sequence. 72606c3fb27SDimitry Andric 72706c3fb27SDimitry Andric auto &LastStore = StoreMI; 72806c3fb27SDimitry Andric 72906c3fb27SDimitry Andric // The single base pointer that all stores must use. 73006c3fb27SDimitry Andric Register BaseReg; 73106c3fb27SDimitry Andric int64_t LastOffset; 73206c3fb27SDimitry Andric if (!mi_match(LastStore.getPointerReg(), *MRI, 73306c3fb27SDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(LastOffset)))) { 73406c3fb27SDimitry Andric BaseReg = LastStore.getPointerReg(); 73506c3fb27SDimitry Andric LastOffset = 0; 73606c3fb27SDimitry Andric } 73706c3fb27SDimitry Andric 73806c3fb27SDimitry Andric GStore *LowestIdxStore = &LastStore; 73906c3fb27SDimitry Andric int64_t LowestIdxOffset = LastOffset; 74006c3fb27SDimitry Andric 74106c3fb27SDimitry Andric Register WideSrcVal; 74206c3fb27SDimitry Andric auto LowestShiftAmt = getTruncStoreByteOffset(LastStore, WideSrcVal, *MRI); 74306c3fb27SDimitry Andric if (!LowestShiftAmt) 74406c3fb27SDimitry Andric return false; // Didn't match a trunc. 74506c3fb27SDimitry Andric assert(WideSrcVal.isValid()); 74606c3fb27SDimitry Andric 74706c3fb27SDimitry Andric LLT WideStoreTy = MRI->getType(WideSrcVal); 74806c3fb27SDimitry Andric // The wide type might not be a multiple of the memory type, e.g. s48 and s32. 74906c3fb27SDimitry Andric if (WideStoreTy.getSizeInBits() % MemTy.getSizeInBits() != 0) 75006c3fb27SDimitry Andric return false; 75106c3fb27SDimitry Andric const unsigned NumStoresRequired = 75206c3fb27SDimitry Andric WideStoreTy.getSizeInBits() / MemTy.getSizeInBits(); 75306c3fb27SDimitry Andric 75406c3fb27SDimitry Andric SmallVector<int64_t, 8> OffsetMap(NumStoresRequired, INT64_MAX); 75506c3fb27SDimitry Andric OffsetMap[*LowestShiftAmt] = LastOffset; 75606c3fb27SDimitry Andric FoundStores.emplace_back(&LastStore); 75706c3fb27SDimitry Andric 75806c3fb27SDimitry Andric const int MaxInstsToCheck = 10; 75906c3fb27SDimitry Andric int NumInstsChecked = 0; 76006c3fb27SDimitry Andric for (auto II = ++LastStore.getReverseIterator(); 76106c3fb27SDimitry Andric II != LastStore.getParent()->rend() && NumInstsChecked < MaxInstsToCheck; 76206c3fb27SDimitry Andric ++II) { 76306c3fb27SDimitry Andric NumInstsChecked++; 76406c3fb27SDimitry Andric GStore *NewStore; 76506c3fb27SDimitry Andric if ((NewStore = dyn_cast<GStore>(&*II))) { 76606c3fb27SDimitry Andric if (NewStore->getMMO().getMemoryType() != MemTy || !NewStore->isSimple()) 76706c3fb27SDimitry Andric break; 76806c3fb27SDimitry Andric } else if (II->isLoadFoldBarrier() || II->mayLoad()) { 76906c3fb27SDimitry Andric break; 77006c3fb27SDimitry Andric } else { 77106c3fb27SDimitry Andric continue; // This is a safe instruction we can look past. 77206c3fb27SDimitry Andric } 77306c3fb27SDimitry Andric 77406c3fb27SDimitry Andric Register NewBaseReg; 77506c3fb27SDimitry Andric int64_t MemOffset; 77606c3fb27SDimitry Andric // Check we're storing to the same base + some offset. 77706c3fb27SDimitry Andric if (!mi_match(NewStore->getPointerReg(), *MRI, 77806c3fb27SDimitry Andric m_GPtrAdd(m_Reg(NewBaseReg), m_ICst(MemOffset)))) { 77906c3fb27SDimitry Andric NewBaseReg = NewStore->getPointerReg(); 78006c3fb27SDimitry Andric MemOffset = 0; 78106c3fb27SDimitry Andric } 78206c3fb27SDimitry Andric if (BaseReg != NewBaseReg) 78306c3fb27SDimitry Andric break; 78406c3fb27SDimitry Andric 78506c3fb27SDimitry Andric auto ShiftByteOffset = getTruncStoreByteOffset(*NewStore, WideSrcVal, *MRI); 78606c3fb27SDimitry Andric if (!ShiftByteOffset) 78706c3fb27SDimitry Andric break; 78806c3fb27SDimitry Andric if (MemOffset < LowestIdxOffset) { 78906c3fb27SDimitry Andric LowestIdxOffset = MemOffset; 79006c3fb27SDimitry Andric LowestIdxStore = NewStore; 79106c3fb27SDimitry Andric } 79206c3fb27SDimitry Andric 79306c3fb27SDimitry Andric // Map the offset in the store and the offset in the combined value, and 79406c3fb27SDimitry Andric // early return if it has been set before. 79506c3fb27SDimitry Andric if (*ShiftByteOffset < 0 || *ShiftByteOffset >= NumStoresRequired || 79606c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] != INT64_MAX) 79706c3fb27SDimitry Andric break; 79806c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] = MemOffset; 79906c3fb27SDimitry Andric 80006c3fb27SDimitry Andric FoundStores.emplace_back(NewStore); 80106c3fb27SDimitry Andric // Reset counter since we've found a matching inst. 80206c3fb27SDimitry Andric NumInstsChecked = 0; 80306c3fb27SDimitry Andric if (FoundStores.size() == NumStoresRequired) 80406c3fb27SDimitry Andric break; 80506c3fb27SDimitry Andric } 80606c3fb27SDimitry Andric 80706c3fb27SDimitry Andric if (FoundStores.size() != NumStoresRequired) { 80806c3fb27SDimitry Andric if (FoundStores.size() == 1) 80906c3fb27SDimitry Andric return false; 81006c3fb27SDimitry Andric // We didn't find enough stores to merge into the size of the original 81106c3fb27SDimitry Andric // source value, but we may be able to generate a smaller store if we 81206c3fb27SDimitry Andric // truncate the source value. 81306c3fb27SDimitry Andric WideStoreTy = LLT::scalar(FoundStores.size() * MemTy.getScalarSizeInBits()); 81406c3fb27SDimitry Andric } 81506c3fb27SDimitry Andric 81606c3fb27SDimitry Andric unsigned NumStoresFound = FoundStores.size(); 81706c3fb27SDimitry Andric 81806c3fb27SDimitry Andric const auto &DL = LastStore.getMF()->getDataLayout(); 81906c3fb27SDimitry Andric auto &C = LastStore.getMF()->getFunction().getContext(); 82006c3fb27SDimitry Andric // Check that a store of the wide type is both allowed and fast on the target 82106c3fb27SDimitry Andric unsigned Fast = 0; 82206c3fb27SDimitry Andric bool Allowed = TLI->allowsMemoryAccess( 82306c3fb27SDimitry Andric C, DL, WideStoreTy, LowestIdxStore->getMMO(), &Fast); 82406c3fb27SDimitry Andric if (!Allowed || !Fast) 82506c3fb27SDimitry Andric return false; 82606c3fb27SDimitry Andric 82706c3fb27SDimitry Andric // Check if the pieces of the value are going to the expected places in memory 82806c3fb27SDimitry Andric // to merge the stores. 82906c3fb27SDimitry Andric unsigned NarrowBits = MemTy.getScalarSizeInBits(); 83006c3fb27SDimitry Andric auto checkOffsets = [&](bool MatchLittleEndian) { 83106c3fb27SDimitry Andric if (MatchLittleEndian) { 83206c3fb27SDimitry Andric for (unsigned i = 0; i != NumStoresFound; ++i) 83306c3fb27SDimitry Andric if (OffsetMap[i] != i * (NarrowBits / 8) + LowestIdxOffset) 83406c3fb27SDimitry Andric return false; 83506c3fb27SDimitry Andric } else { // MatchBigEndian by reversing loop counter. 83606c3fb27SDimitry Andric for (unsigned i = 0, j = NumStoresFound - 1; i != NumStoresFound; 83706c3fb27SDimitry Andric ++i, --j) 83806c3fb27SDimitry Andric if (OffsetMap[j] != i * (NarrowBits / 8) + LowestIdxOffset) 83906c3fb27SDimitry Andric return false; 84006c3fb27SDimitry Andric } 84106c3fb27SDimitry Andric return true; 84206c3fb27SDimitry Andric }; 84306c3fb27SDimitry Andric 84406c3fb27SDimitry Andric // Check if the offsets line up for the native data layout of this target. 84506c3fb27SDimitry Andric bool NeedBswap = false; 84606c3fb27SDimitry Andric bool NeedRotate = false; 84706c3fb27SDimitry Andric if (!checkOffsets(DL.isLittleEndian())) { 84806c3fb27SDimitry Andric // Special-case: check if byte offsets line up for the opposite endian. 84906c3fb27SDimitry Andric if (NarrowBits == 8 && checkOffsets(DL.isBigEndian())) 85006c3fb27SDimitry Andric NeedBswap = true; 85106c3fb27SDimitry Andric else if (NumStoresFound == 2 && checkOffsets(DL.isBigEndian())) 85206c3fb27SDimitry Andric NeedRotate = true; 85306c3fb27SDimitry Andric else 85406c3fb27SDimitry Andric return false; 85506c3fb27SDimitry Andric } 85606c3fb27SDimitry Andric 85706c3fb27SDimitry Andric if (NeedBswap && 85806c3fb27SDimitry Andric !isLegalOrBeforeLegalizer({TargetOpcode::G_BSWAP, {WideStoreTy}}, *MF)) 85906c3fb27SDimitry Andric return false; 86006c3fb27SDimitry Andric if (NeedRotate && 86106c3fb27SDimitry Andric !isLegalOrBeforeLegalizer( 86206c3fb27SDimitry Andric {TargetOpcode::G_ROTR, {WideStoreTy, WideStoreTy}}, *MF)) 86306c3fb27SDimitry Andric return false; 86406c3fb27SDimitry Andric 86506c3fb27SDimitry Andric Builder.setInstrAndDebugLoc(StoreMI); 86606c3fb27SDimitry Andric 86706c3fb27SDimitry Andric if (WideStoreTy != MRI->getType(WideSrcVal)) 86806c3fb27SDimitry Andric WideSrcVal = Builder.buildTrunc(WideStoreTy, WideSrcVal).getReg(0); 86906c3fb27SDimitry Andric 87006c3fb27SDimitry Andric if (NeedBswap) { 87106c3fb27SDimitry Andric WideSrcVal = Builder.buildBSwap(WideStoreTy, WideSrcVal).getReg(0); 87206c3fb27SDimitry Andric } else if (NeedRotate) { 87306c3fb27SDimitry Andric assert(WideStoreTy.getSizeInBits() % 2 == 0 && 87406c3fb27SDimitry Andric "Unexpected type for rotate"); 87506c3fb27SDimitry Andric auto RotAmt = 87606c3fb27SDimitry Andric Builder.buildConstant(WideStoreTy, WideStoreTy.getSizeInBits() / 2); 87706c3fb27SDimitry Andric WideSrcVal = 87806c3fb27SDimitry Andric Builder.buildRotateRight(WideStoreTy, WideSrcVal, RotAmt).getReg(0); 87906c3fb27SDimitry Andric } 88006c3fb27SDimitry Andric 88106c3fb27SDimitry Andric Builder.buildStore(WideSrcVal, LowestIdxStore->getPointerReg(), 88206c3fb27SDimitry Andric LowestIdxStore->getMMO().getPointerInfo(), 88306c3fb27SDimitry Andric LowestIdxStore->getMMO().getAlign()); 88406c3fb27SDimitry Andric 88506c3fb27SDimitry Andric // Erase the old stores. 88606c3fb27SDimitry Andric for (auto *ST : FoundStores) { 88706c3fb27SDimitry Andric ST->eraseFromParent(); 88806c3fb27SDimitry Andric DeletedStores.insert(ST); 88906c3fb27SDimitry Andric } 89006c3fb27SDimitry Andric return true; 89106c3fb27SDimitry Andric } 89206c3fb27SDimitry Andric 89306c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStoresBlock(MachineBasicBlock &BB) { 89406c3fb27SDimitry Andric bool Changed = false; 89506c3fb27SDimitry Andric SmallVector<GStore *, 16> Stores; 89606c3fb27SDimitry Andric SmallPtrSet<GStore *, 8> DeletedStores; 89706c3fb27SDimitry Andric // Walk up the block so we can see the most eligible stores. 89806c3fb27SDimitry Andric for (MachineInstr &MI : llvm::reverse(BB)) 89906c3fb27SDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI)) 90006c3fb27SDimitry Andric Stores.emplace_back(StoreMI); 90106c3fb27SDimitry Andric 90206c3fb27SDimitry Andric for (auto *StoreMI : Stores) { 90306c3fb27SDimitry Andric if (DeletedStores.count(StoreMI)) 90406c3fb27SDimitry Andric continue; 90506c3fb27SDimitry Andric if (mergeTruncStore(*StoreMI, DeletedStores)) 90606c3fb27SDimitry Andric Changed = true; 90706c3fb27SDimitry Andric } 90806c3fb27SDimitry Andric return Changed; 90906c3fb27SDimitry Andric } 91006c3fb27SDimitry Andric 911349cc55cSDimitry Andric bool LoadStoreOpt::mergeFunctionStores(MachineFunction &MF) { 912349cc55cSDimitry Andric bool Changed = false; 913349cc55cSDimitry Andric for (auto &BB : MF){ 914349cc55cSDimitry Andric Changed |= mergeBlockStores(BB); 91506c3fb27SDimitry Andric Changed |= mergeTruncStoresBlock(BB); 916349cc55cSDimitry Andric } 91706c3fb27SDimitry Andric 91806c3fb27SDimitry Andric // Erase all dead instructions left over by the merging. 91906c3fb27SDimitry Andric if (Changed) { 92006c3fb27SDimitry Andric for (auto &BB : MF) { 92106c3fb27SDimitry Andric for (auto &I : make_early_inc_range(make_range(BB.rbegin(), BB.rend()))) { 92206c3fb27SDimitry Andric if (isTriviallyDead(I, *MRI)) 92306c3fb27SDimitry Andric I.eraseFromParent(); 92406c3fb27SDimitry Andric } 92506c3fb27SDimitry Andric } 92606c3fb27SDimitry Andric } 92706c3fb27SDimitry Andric 928349cc55cSDimitry Andric return Changed; 929349cc55cSDimitry Andric } 930349cc55cSDimitry Andric 931349cc55cSDimitry Andric void LoadStoreOpt::initializeStoreMergeTargetInfo(unsigned AddrSpace) { 932349cc55cSDimitry Andric // Query the legalizer info to record what store types are legal. 933349cc55cSDimitry Andric // We record this because we don't want to bother trying to merge stores into 934349cc55cSDimitry Andric // illegal ones, which would just result in being split again. 935349cc55cSDimitry Andric 936349cc55cSDimitry Andric if (LegalStoreSizes.count(AddrSpace)) { 937349cc55cSDimitry Andric assert(LegalStoreSizes[AddrSpace].any()); 938349cc55cSDimitry Andric return; // Already cached sizes for this address space. 939349cc55cSDimitry Andric } 940349cc55cSDimitry Andric 941349cc55cSDimitry Andric // Need to reserve at least MaxStoreSizeToForm + 1 bits. 942349cc55cSDimitry Andric BitVector LegalSizes(MaxStoreSizeToForm * 2); 943349cc55cSDimitry Andric const auto &LI = *MF->getSubtarget().getLegalizerInfo(); 944349cc55cSDimitry Andric const auto &DL = MF->getFunction().getParent()->getDataLayout(); 9455f757f3fSDimitry Andric Type *IRPtrTy = PointerType::get(MF->getFunction().getContext(), AddrSpace); 9465f757f3fSDimitry Andric LLT PtrTy = getLLTForType(*IRPtrTy, DL); 947349cc55cSDimitry Andric // We assume that we're not going to be generating any stores wider than 948349cc55cSDimitry Andric // MaxStoreSizeToForm bits for now. 949349cc55cSDimitry Andric for (unsigned Size = 2; Size <= MaxStoreSizeToForm; Size *= 2) { 950349cc55cSDimitry Andric LLT Ty = LLT::scalar(Size); 951349cc55cSDimitry Andric SmallVector<LegalityQuery::MemDesc, 2> MemDescrs( 952349cc55cSDimitry Andric {{Ty, Ty.getSizeInBits(), AtomicOrdering::NotAtomic}}); 953349cc55cSDimitry Andric SmallVector<LLT> StoreTys({Ty, PtrTy}); 954349cc55cSDimitry Andric LegalityQuery Q(TargetOpcode::G_STORE, StoreTys, MemDescrs); 955349cc55cSDimitry Andric LegalizeActionStep ActionStep = LI.getAction(Q); 956349cc55cSDimitry Andric if (ActionStep.Action == LegalizeActions::Legal) 957349cc55cSDimitry Andric LegalSizes.set(Size); 958349cc55cSDimitry Andric } 959349cc55cSDimitry Andric assert(LegalSizes.any() && "Expected some store sizes to be legal!"); 960349cc55cSDimitry Andric LegalStoreSizes[AddrSpace] = LegalSizes; 961349cc55cSDimitry Andric } 962349cc55cSDimitry Andric 963349cc55cSDimitry Andric bool LoadStoreOpt::runOnMachineFunction(MachineFunction &MF) { 964349cc55cSDimitry Andric // If the ISel pipeline failed, do not bother running that pass. 965349cc55cSDimitry Andric if (MF.getProperties().hasProperty( 966349cc55cSDimitry Andric MachineFunctionProperties::Property::FailedISel)) 967349cc55cSDimitry Andric return false; 968349cc55cSDimitry Andric 969349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Begin memory optimizations for: " << MF.getName() 970349cc55cSDimitry Andric << '\n'); 971349cc55cSDimitry Andric 972349cc55cSDimitry Andric init(MF); 973349cc55cSDimitry Andric bool Changed = false; 974349cc55cSDimitry Andric Changed |= mergeFunctionStores(MF); 975349cc55cSDimitry Andric 976349cc55cSDimitry Andric LegalStoreSizes.clear(); 977349cc55cSDimitry Andric return Changed; 978349cc55cSDimitry Andric } 979