1349cc55cSDimitry Andric //===- LoadStoreOpt.cpp ----------- Generic memory optimizations -*- C++ -*-==// 2349cc55cSDimitry Andric // 3349cc55cSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4349cc55cSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5349cc55cSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6349cc55cSDimitry Andric // 7349cc55cSDimitry Andric //===----------------------------------------------------------------------===// 8349cc55cSDimitry Andric /// \file 9349cc55cSDimitry Andric /// This file implements the LoadStoreOpt optimization pass. 10349cc55cSDimitry Andric //===----------------------------------------------------------------------===// 11349cc55cSDimitry Andric 12349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LoadStoreOpt.h" 13*06c3fb27SDimitry Andric #include "llvm/ADT/STLExtras.h" 14*06c3fb27SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 15349cc55cSDimitry Andric #include "llvm/ADT/Statistic.h" 16349cc55cSDimitry Andric #include "llvm/Analysis/AliasAnalysis.h" 17349cc55cSDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 18349cc55cSDimitry Andric #include "llvm/Analysis/OptimizationRemarkEmitter.h" 19349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/GenericMachineInstrs.h" 20349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LegalizerInfo.h" 21349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/MIPatternMatch.h" 22349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/Utils.h" 23*06c3fb27SDimitry Andric #include "llvm/CodeGen/LowLevelTypeUtils.h" 24349cc55cSDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h" 25349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 26349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFunction.h" 27349cc55cSDimitry Andric #include "llvm/CodeGen/MachineInstr.h" 28349cc55cSDimitry Andric #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h" 29349cc55cSDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h" 30349cc55cSDimitry Andric #include "llvm/CodeGen/Register.h" 31349cc55cSDimitry Andric #include "llvm/CodeGen/TargetLowering.h" 32349cc55cSDimitry Andric #include "llvm/CodeGen/TargetOpcodes.h" 33349cc55cSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h" 34349cc55cSDimitry Andric #include "llvm/InitializePasses.h" 35349cc55cSDimitry Andric #include "llvm/Support/AtomicOrdering.h" 36349cc55cSDimitry Andric #include "llvm/Support/Casting.h" 37349cc55cSDimitry Andric #include "llvm/Support/Debug.h" 38349cc55cSDimitry Andric #include "llvm/Support/ErrorHandling.h" 39349cc55cSDimitry Andric #include "llvm/Support/MathExtras.h" 40349cc55cSDimitry Andric #include <algorithm> 41349cc55cSDimitry Andric 42349cc55cSDimitry Andric #define DEBUG_TYPE "loadstore-opt" 43349cc55cSDimitry Andric 44349cc55cSDimitry Andric using namespace llvm; 45349cc55cSDimitry Andric using namespace ore; 46349cc55cSDimitry Andric using namespace MIPatternMatch; 47349cc55cSDimitry Andric 48349cc55cSDimitry Andric STATISTIC(NumStoresMerged, "Number of stores merged"); 49349cc55cSDimitry Andric 50349cc55cSDimitry Andric const unsigned MaxStoreSizeToForm = 128; 51349cc55cSDimitry Andric 52349cc55cSDimitry Andric char LoadStoreOpt::ID = 0; 53349cc55cSDimitry Andric INITIALIZE_PASS_BEGIN(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations", 54349cc55cSDimitry Andric false, false) 55349cc55cSDimitry Andric INITIALIZE_PASS_END(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations", 56349cc55cSDimitry Andric false, false) 57349cc55cSDimitry Andric 58349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt(std::function<bool(const MachineFunction &)> F) 59349cc55cSDimitry Andric : MachineFunctionPass(ID), DoNotRunPass(F) {} 60349cc55cSDimitry Andric 61349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt() 62349cc55cSDimitry Andric : LoadStoreOpt([](const MachineFunction &) { return false; }) {} 63349cc55cSDimitry Andric 64349cc55cSDimitry Andric void LoadStoreOpt::init(MachineFunction &MF) { 65349cc55cSDimitry Andric this->MF = &MF; 66349cc55cSDimitry Andric MRI = &MF.getRegInfo(); 67349cc55cSDimitry Andric AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); 68349cc55cSDimitry Andric TLI = MF.getSubtarget().getTargetLowering(); 69349cc55cSDimitry Andric LI = MF.getSubtarget().getLegalizerInfo(); 70349cc55cSDimitry Andric Builder.setMF(MF); 71349cc55cSDimitry Andric IsPreLegalizer = !MF.getProperties().hasProperty( 72349cc55cSDimitry Andric MachineFunctionProperties::Property::Legalized); 73349cc55cSDimitry Andric InstsToErase.clear(); 74349cc55cSDimitry Andric } 75349cc55cSDimitry Andric 76349cc55cSDimitry Andric void LoadStoreOpt::getAnalysisUsage(AnalysisUsage &AU) const { 77349cc55cSDimitry Andric AU.addRequired<AAResultsWrapperPass>(); 7881ad6265SDimitry Andric AU.setPreservesAll(); 79349cc55cSDimitry Andric getSelectionDAGFallbackAnalysisUsage(AU); 80349cc55cSDimitry Andric MachineFunctionPass::getAnalysisUsage(AU); 81349cc55cSDimitry Andric } 82349cc55cSDimitry Andric 83349cc55cSDimitry Andric BaseIndexOffset GISelAddressing::getPointerInfo(Register Ptr, 84349cc55cSDimitry Andric MachineRegisterInfo &MRI) { 85349cc55cSDimitry Andric BaseIndexOffset Info; 86349cc55cSDimitry Andric Register PtrAddRHS; 87349cc55cSDimitry Andric if (!mi_match(Ptr, MRI, m_GPtrAdd(m_Reg(Info.BaseReg), m_Reg(PtrAddRHS)))) { 88349cc55cSDimitry Andric Info.BaseReg = Ptr; 89349cc55cSDimitry Andric Info.IndexReg = Register(); 90349cc55cSDimitry Andric Info.IsIndexSignExt = false; 91349cc55cSDimitry Andric return Info; 92349cc55cSDimitry Andric } 93349cc55cSDimitry Andric 94349cc55cSDimitry Andric auto RHSCst = getIConstantVRegValWithLookThrough(PtrAddRHS, MRI); 95349cc55cSDimitry Andric if (RHSCst) 96349cc55cSDimitry Andric Info.Offset = RHSCst->Value.getSExtValue(); 97349cc55cSDimitry Andric 98349cc55cSDimitry Andric // Just recognize a simple case for now. In future we'll need to match 99349cc55cSDimitry Andric // indexing patterns for base + index + constant. 100349cc55cSDimitry Andric Info.IndexReg = PtrAddRHS; 101349cc55cSDimitry Andric Info.IsIndexSignExt = false; 102349cc55cSDimitry Andric return Info; 103349cc55cSDimitry Andric } 104349cc55cSDimitry Andric 105349cc55cSDimitry Andric bool GISelAddressing::aliasIsKnownForLoadStore(const MachineInstr &MI1, 106349cc55cSDimitry Andric const MachineInstr &MI2, 107349cc55cSDimitry Andric bool &IsAlias, 108349cc55cSDimitry Andric MachineRegisterInfo &MRI) { 109349cc55cSDimitry Andric auto *LdSt1 = dyn_cast<GLoadStore>(&MI1); 110349cc55cSDimitry Andric auto *LdSt2 = dyn_cast<GLoadStore>(&MI2); 111349cc55cSDimitry Andric if (!LdSt1 || !LdSt2) 112349cc55cSDimitry Andric return false; 113349cc55cSDimitry Andric 114349cc55cSDimitry Andric BaseIndexOffset BasePtr0 = getPointerInfo(LdSt1->getPointerReg(), MRI); 115349cc55cSDimitry Andric BaseIndexOffset BasePtr1 = getPointerInfo(LdSt2->getPointerReg(), MRI); 116349cc55cSDimitry Andric 117349cc55cSDimitry Andric if (!BasePtr0.BaseReg.isValid() || !BasePtr1.BaseReg.isValid()) 118349cc55cSDimitry Andric return false; 119349cc55cSDimitry Andric 120349cc55cSDimitry Andric int64_t Size1 = LdSt1->getMemSize(); 121349cc55cSDimitry Andric int64_t Size2 = LdSt2->getMemSize(); 122349cc55cSDimitry Andric 123349cc55cSDimitry Andric int64_t PtrDiff; 124349cc55cSDimitry Andric if (BasePtr0.BaseReg == BasePtr1.BaseReg) { 125349cc55cSDimitry Andric PtrDiff = BasePtr1.Offset - BasePtr0.Offset; 126349cc55cSDimitry Andric // If the size of memory access is unknown, do not use it to do analysis. 127349cc55cSDimitry Andric // One example of unknown size memory access is to load/store scalable 128349cc55cSDimitry Andric // vector objects on the stack. 129349cc55cSDimitry Andric // BasePtr1 is PtrDiff away from BasePtr0. They alias if none of the 130349cc55cSDimitry Andric // following situations arise: 131349cc55cSDimitry Andric if (PtrDiff >= 0 && 132349cc55cSDimitry Andric Size1 != static_cast<int64_t>(MemoryLocation::UnknownSize)) { 133349cc55cSDimitry Andric // [----BasePtr0----] 134349cc55cSDimitry Andric // [---BasePtr1--] 135349cc55cSDimitry Andric // ========PtrDiff========> 136349cc55cSDimitry Andric IsAlias = !(Size1 <= PtrDiff); 137349cc55cSDimitry Andric return true; 138349cc55cSDimitry Andric } 139349cc55cSDimitry Andric if (PtrDiff < 0 && 140349cc55cSDimitry Andric Size2 != static_cast<int64_t>(MemoryLocation::UnknownSize)) { 141349cc55cSDimitry Andric // [----BasePtr0----] 142349cc55cSDimitry Andric // [---BasePtr1--] 143349cc55cSDimitry Andric // =====(-PtrDiff)====> 144349cc55cSDimitry Andric IsAlias = !((PtrDiff + Size2) <= 0); 145349cc55cSDimitry Andric return true; 146349cc55cSDimitry Andric } 147349cc55cSDimitry Andric return false; 148349cc55cSDimitry Andric } 149349cc55cSDimitry Andric 150349cc55cSDimitry Andric // If both BasePtr0 and BasePtr1 are FrameIndexes, we will not be 151349cc55cSDimitry Andric // able to calculate their relative offset if at least one arises 152349cc55cSDimitry Andric // from an alloca. However, these allocas cannot overlap and we 153349cc55cSDimitry Andric // can infer there is no alias. 154349cc55cSDimitry Andric auto *Base0Def = getDefIgnoringCopies(BasePtr0.BaseReg, MRI); 155349cc55cSDimitry Andric auto *Base1Def = getDefIgnoringCopies(BasePtr1.BaseReg, MRI); 156349cc55cSDimitry Andric if (!Base0Def || !Base1Def) 157349cc55cSDimitry Andric return false; // Couldn't tell anything. 158349cc55cSDimitry Andric 159349cc55cSDimitry Andric 160349cc55cSDimitry Andric if (Base0Def->getOpcode() != Base1Def->getOpcode()) 161349cc55cSDimitry Andric return false; 162349cc55cSDimitry Andric 163349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_FRAME_INDEX) { 164349cc55cSDimitry Andric MachineFrameInfo &MFI = Base0Def->getMF()->getFrameInfo(); 165349cc55cSDimitry Andric // If the bases have the same frame index but we couldn't find a 166349cc55cSDimitry Andric // constant offset, (indices are different) be conservative. 167349cc55cSDimitry Andric if (Base0Def != Base1Def && 168349cc55cSDimitry Andric (!MFI.isFixedObjectIndex(Base0Def->getOperand(1).getIndex()) || 169349cc55cSDimitry Andric !MFI.isFixedObjectIndex(Base1Def->getOperand(1).getIndex()))) { 170349cc55cSDimitry Andric IsAlias = false; 171349cc55cSDimitry Andric return true; 172349cc55cSDimitry Andric } 173349cc55cSDimitry Andric } 174349cc55cSDimitry Andric 175349cc55cSDimitry Andric // This implementation is a lot more primitive than the SDAG one for now. 176349cc55cSDimitry Andric // FIXME: what about constant pools? 177349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_GLOBAL_VALUE) { 178349cc55cSDimitry Andric auto GV0 = Base0Def->getOperand(1).getGlobal(); 179349cc55cSDimitry Andric auto GV1 = Base1Def->getOperand(1).getGlobal(); 180349cc55cSDimitry Andric if (GV0 != GV1) { 181349cc55cSDimitry Andric IsAlias = false; 182349cc55cSDimitry Andric return true; 183349cc55cSDimitry Andric } 184349cc55cSDimitry Andric } 185349cc55cSDimitry Andric 186349cc55cSDimitry Andric // Can't tell anything about aliasing. 187349cc55cSDimitry Andric return false; 188349cc55cSDimitry Andric } 189349cc55cSDimitry Andric 190349cc55cSDimitry Andric bool GISelAddressing::instMayAlias(const MachineInstr &MI, 191349cc55cSDimitry Andric const MachineInstr &Other, 192349cc55cSDimitry Andric MachineRegisterInfo &MRI, 193349cc55cSDimitry Andric AliasAnalysis *AA) { 194349cc55cSDimitry Andric struct MemUseCharacteristics { 195349cc55cSDimitry Andric bool IsVolatile; 196349cc55cSDimitry Andric bool IsAtomic; 197349cc55cSDimitry Andric Register BasePtr; 198349cc55cSDimitry Andric int64_t Offset; 199349cc55cSDimitry Andric uint64_t NumBytes; 200349cc55cSDimitry Andric MachineMemOperand *MMO; 201349cc55cSDimitry Andric }; 202349cc55cSDimitry Andric 203349cc55cSDimitry Andric auto getCharacteristics = 204349cc55cSDimitry Andric [&](const MachineInstr *MI) -> MemUseCharacteristics { 205349cc55cSDimitry Andric if (const auto *LS = dyn_cast<GLoadStore>(MI)) { 206349cc55cSDimitry Andric Register BaseReg; 207349cc55cSDimitry Andric int64_t Offset = 0; 208349cc55cSDimitry Andric // No pre/post-inc addressing modes are considered here, unlike in SDAG. 209349cc55cSDimitry Andric if (!mi_match(LS->getPointerReg(), MRI, 210349cc55cSDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(Offset)))) { 211349cc55cSDimitry Andric BaseReg = LS->getPointerReg(); 212349cc55cSDimitry Andric Offset = 0; 213349cc55cSDimitry Andric } 214349cc55cSDimitry Andric 215349cc55cSDimitry Andric uint64_t Size = MemoryLocation::getSizeOrUnknown( 216349cc55cSDimitry Andric LS->getMMO().getMemoryType().getSizeInBytes()); 217349cc55cSDimitry Andric return {LS->isVolatile(), LS->isAtomic(), BaseReg, 218349cc55cSDimitry Andric Offset /*base offset*/, Size, &LS->getMMO()}; 219349cc55cSDimitry Andric } 220349cc55cSDimitry Andric // FIXME: support recognizing lifetime instructions. 221349cc55cSDimitry Andric // Default. 222349cc55cSDimitry Andric return {false /*isvolatile*/, 223349cc55cSDimitry Andric /*isAtomic*/ false, Register(), 224349cc55cSDimitry Andric (int64_t)0 /*offset*/, 0 /*size*/, 225349cc55cSDimitry Andric (MachineMemOperand *)nullptr}; 226349cc55cSDimitry Andric }; 227349cc55cSDimitry Andric MemUseCharacteristics MUC0 = getCharacteristics(&MI), 228349cc55cSDimitry Andric MUC1 = getCharacteristics(&Other); 229349cc55cSDimitry Andric 230349cc55cSDimitry Andric // If they are to the same address, then they must be aliases. 231349cc55cSDimitry Andric if (MUC0.BasePtr.isValid() && MUC0.BasePtr == MUC1.BasePtr && 232349cc55cSDimitry Andric MUC0.Offset == MUC1.Offset) 233349cc55cSDimitry Andric return true; 234349cc55cSDimitry Andric 235349cc55cSDimitry Andric // If they are both volatile then they cannot be reordered. 236349cc55cSDimitry Andric if (MUC0.IsVolatile && MUC1.IsVolatile) 237349cc55cSDimitry Andric return true; 238349cc55cSDimitry Andric 239349cc55cSDimitry Andric // Be conservative about atomics for the moment 240349cc55cSDimitry Andric // TODO: This is way overconservative for unordered atomics (see D66309) 241349cc55cSDimitry Andric if (MUC0.IsAtomic && MUC1.IsAtomic) 242349cc55cSDimitry Andric return true; 243349cc55cSDimitry Andric 244349cc55cSDimitry Andric // If one operation reads from invariant memory, and the other may store, they 245349cc55cSDimitry Andric // cannot alias. 246349cc55cSDimitry Andric if (MUC0.MMO && MUC1.MMO) { 247349cc55cSDimitry Andric if ((MUC0.MMO->isInvariant() && MUC1.MMO->isStore()) || 248349cc55cSDimitry Andric (MUC1.MMO->isInvariant() && MUC0.MMO->isStore())) 249349cc55cSDimitry Andric return false; 250349cc55cSDimitry Andric } 251349cc55cSDimitry Andric 252349cc55cSDimitry Andric // Try to prove that there is aliasing, or that there is no aliasing. Either 253349cc55cSDimitry Andric // way, we can return now. If nothing can be proved, proceed with more tests. 254349cc55cSDimitry Andric bool IsAlias; 255349cc55cSDimitry Andric if (GISelAddressing::aliasIsKnownForLoadStore(MI, Other, IsAlias, MRI)) 256349cc55cSDimitry Andric return IsAlias; 257349cc55cSDimitry Andric 258349cc55cSDimitry Andric // The following all rely on MMO0 and MMO1 being valid. 259349cc55cSDimitry Andric if (!MUC0.MMO || !MUC1.MMO) 260349cc55cSDimitry Andric return true; 261349cc55cSDimitry Andric 262349cc55cSDimitry Andric // FIXME: port the alignment based alias analysis from SDAG's isAlias(). 263349cc55cSDimitry Andric int64_t SrcValOffset0 = MUC0.MMO->getOffset(); 264349cc55cSDimitry Andric int64_t SrcValOffset1 = MUC1.MMO->getOffset(); 265349cc55cSDimitry Andric uint64_t Size0 = MUC0.NumBytes; 266349cc55cSDimitry Andric uint64_t Size1 = MUC1.NumBytes; 267349cc55cSDimitry Andric if (AA && MUC0.MMO->getValue() && MUC1.MMO->getValue() && 268349cc55cSDimitry Andric Size0 != MemoryLocation::UnknownSize && 269349cc55cSDimitry Andric Size1 != MemoryLocation::UnknownSize) { 270349cc55cSDimitry Andric // Use alias analysis information. 271349cc55cSDimitry Andric int64_t MinOffset = std::min(SrcValOffset0, SrcValOffset1); 272349cc55cSDimitry Andric int64_t Overlap0 = Size0 + SrcValOffset0 - MinOffset; 273349cc55cSDimitry Andric int64_t Overlap1 = Size1 + SrcValOffset1 - MinOffset; 274349cc55cSDimitry Andric if (AA->isNoAlias(MemoryLocation(MUC0.MMO->getValue(), Overlap0, 275349cc55cSDimitry Andric MUC0.MMO->getAAInfo()), 276349cc55cSDimitry Andric MemoryLocation(MUC1.MMO->getValue(), Overlap1, 277349cc55cSDimitry Andric MUC1.MMO->getAAInfo()))) 278349cc55cSDimitry Andric return false; 279349cc55cSDimitry Andric } 280349cc55cSDimitry Andric 281349cc55cSDimitry Andric // Otherwise we have to assume they alias. 282349cc55cSDimitry Andric return true; 283349cc55cSDimitry Andric } 284349cc55cSDimitry Andric 285349cc55cSDimitry Andric /// Returns true if the instruction creates an unavoidable hazard that 286349cc55cSDimitry Andric /// forces a boundary between store merge candidates. 287349cc55cSDimitry Andric static bool isInstHardMergeHazard(MachineInstr &MI) { 288349cc55cSDimitry Andric return MI.hasUnmodeledSideEffects() || MI.hasOrderedMemoryRef(); 289349cc55cSDimitry Andric } 290349cc55cSDimitry Andric 291349cc55cSDimitry Andric bool LoadStoreOpt::mergeStores(SmallVectorImpl<GStore *> &StoresToMerge) { 292349cc55cSDimitry Andric // Try to merge all the stores in the vector, splitting into separate segments 293349cc55cSDimitry Andric // as necessary. 294349cc55cSDimitry Andric assert(StoresToMerge.size() > 1 && "Expected multiple stores to merge"); 295349cc55cSDimitry Andric LLT OrigTy = MRI->getType(StoresToMerge[0]->getValueReg()); 296349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoresToMerge[0]->getPointerReg()); 297349cc55cSDimitry Andric unsigned AS = PtrTy.getAddressSpace(); 298349cc55cSDimitry Andric // Ensure the legal store info is computed for this address space. 299349cc55cSDimitry Andric initializeStoreMergeTargetInfo(AS); 300349cc55cSDimitry Andric const auto &LegalSizes = LegalStoreSizes[AS]; 301349cc55cSDimitry Andric 302349cc55cSDimitry Andric #ifndef NDEBUG 303fcaf7f86SDimitry Andric for (auto *StoreMI : StoresToMerge) 304349cc55cSDimitry Andric assert(MRI->getType(StoreMI->getValueReg()) == OrigTy); 305349cc55cSDimitry Andric #endif 306349cc55cSDimitry Andric 307349cc55cSDimitry Andric const auto &DL = MF->getFunction().getParent()->getDataLayout(); 308349cc55cSDimitry Andric bool AnyMerged = false; 309349cc55cSDimitry Andric do { 310*06c3fb27SDimitry Andric unsigned NumPow2 = llvm::bit_floor(StoresToMerge.size()); 311bdd1243dSDimitry Andric unsigned MaxSizeBits = NumPow2 * OrigTy.getSizeInBits().getFixedValue(); 312349cc55cSDimitry Andric // Compute the biggest store we can generate to handle the number of stores. 313349cc55cSDimitry Andric unsigned MergeSizeBits; 314349cc55cSDimitry Andric for (MergeSizeBits = MaxSizeBits; MergeSizeBits > 1; MergeSizeBits /= 2) { 315349cc55cSDimitry Andric LLT StoreTy = LLT::scalar(MergeSizeBits); 316349cc55cSDimitry Andric EVT StoreEVT = 317349cc55cSDimitry Andric getApproximateEVTForLLT(StoreTy, DL, MF->getFunction().getContext()); 318349cc55cSDimitry Andric if (LegalSizes.size() > MergeSizeBits && LegalSizes[MergeSizeBits] && 319349cc55cSDimitry Andric TLI->canMergeStoresTo(AS, StoreEVT, *MF) && 320349cc55cSDimitry Andric (TLI->isTypeLegal(StoreEVT))) 321349cc55cSDimitry Andric break; // We can generate a MergeSize bits store. 322349cc55cSDimitry Andric } 323349cc55cSDimitry Andric if (MergeSizeBits <= OrigTy.getSizeInBits()) 324349cc55cSDimitry Andric return AnyMerged; // No greater merge. 325349cc55cSDimitry Andric 326349cc55cSDimitry Andric unsigned NumStoresToMerge = MergeSizeBits / OrigTy.getSizeInBits(); 327349cc55cSDimitry Andric // Perform the actual merging. 328349cc55cSDimitry Andric SmallVector<GStore *, 8> SingleMergeStores( 329349cc55cSDimitry Andric StoresToMerge.begin(), StoresToMerge.begin() + NumStoresToMerge); 330349cc55cSDimitry Andric AnyMerged |= doSingleStoreMerge(SingleMergeStores); 331349cc55cSDimitry Andric StoresToMerge.erase(StoresToMerge.begin(), 332349cc55cSDimitry Andric StoresToMerge.begin() + NumStoresToMerge); 333349cc55cSDimitry Andric } while (StoresToMerge.size() > 1); 334349cc55cSDimitry Andric return AnyMerged; 335349cc55cSDimitry Andric } 336349cc55cSDimitry Andric 337349cc55cSDimitry Andric bool LoadStoreOpt::isLegalOrBeforeLegalizer(const LegalityQuery &Query, 338349cc55cSDimitry Andric MachineFunction &MF) const { 339349cc55cSDimitry Andric auto Action = LI->getAction(Query).Action; 340349cc55cSDimitry Andric // If the instruction is unsupported, it can't be legalized at all. 341349cc55cSDimitry Andric if (Action == LegalizeActions::Unsupported) 342349cc55cSDimitry Andric return false; 343349cc55cSDimitry Andric return IsPreLegalizer || Action == LegalizeAction::Legal; 344349cc55cSDimitry Andric } 345349cc55cSDimitry Andric 346349cc55cSDimitry Andric bool LoadStoreOpt::doSingleStoreMerge(SmallVectorImpl<GStore *> &Stores) { 347349cc55cSDimitry Andric assert(Stores.size() > 1); 348349cc55cSDimitry Andric // We know that all the stores are consecutive and there are no aliasing 349349cc55cSDimitry Andric // operations in the range. However, the values that are being stored may be 350349cc55cSDimitry Andric // generated anywhere before each store. To ensure we have the values 351349cc55cSDimitry Andric // available, we materialize the wide value and new store at the place of the 352349cc55cSDimitry Andric // final store in the merge sequence. 353349cc55cSDimitry Andric GStore *FirstStore = Stores[0]; 354349cc55cSDimitry Andric const unsigned NumStores = Stores.size(); 355349cc55cSDimitry Andric LLT SmallTy = MRI->getType(FirstStore->getValueReg()); 356349cc55cSDimitry Andric LLT WideValueTy = 357bdd1243dSDimitry Andric LLT::scalar(NumStores * SmallTy.getSizeInBits().getFixedValue()); 358349cc55cSDimitry Andric 359349cc55cSDimitry Andric // For each store, compute pairwise merged debug locs. 360bdd1243dSDimitry Andric DebugLoc MergedLoc = Stores.front()->getDebugLoc(); 361bdd1243dSDimitry Andric for (auto *Store : drop_begin(Stores)) 362bdd1243dSDimitry Andric MergedLoc = DILocation::getMergedLocation(MergedLoc, Store->getDebugLoc()); 363bdd1243dSDimitry Andric 364349cc55cSDimitry Andric Builder.setInstr(*Stores.back()); 365349cc55cSDimitry Andric Builder.setDebugLoc(MergedLoc); 366349cc55cSDimitry Andric 367349cc55cSDimitry Andric // If all of the store values are constants, then create a wide constant 368349cc55cSDimitry Andric // directly. Otherwise, we need to generate some instructions to merge the 369349cc55cSDimitry Andric // existing values together into a wider type. 370349cc55cSDimitry Andric SmallVector<APInt, 8> ConstantVals; 371fcaf7f86SDimitry Andric for (auto *Store : Stores) { 372349cc55cSDimitry Andric auto MaybeCst = 373349cc55cSDimitry Andric getIConstantVRegValWithLookThrough(Store->getValueReg(), *MRI); 374349cc55cSDimitry Andric if (!MaybeCst) { 375349cc55cSDimitry Andric ConstantVals.clear(); 376349cc55cSDimitry Andric break; 377349cc55cSDimitry Andric } 378349cc55cSDimitry Andric ConstantVals.emplace_back(MaybeCst->Value); 379349cc55cSDimitry Andric } 380349cc55cSDimitry Andric 381349cc55cSDimitry Andric Register WideReg; 382349cc55cSDimitry Andric auto *WideMMO = 383349cc55cSDimitry Andric MF->getMachineMemOperand(&FirstStore->getMMO(), 0, WideValueTy); 384349cc55cSDimitry Andric if (ConstantVals.empty()) { 385349cc55cSDimitry Andric // Mimic the SDAG behaviour here and don't try to do anything for unknown 386349cc55cSDimitry Andric // values. In future, we should also support the cases of loads and 387349cc55cSDimitry Andric // extracted vector elements. 388349cc55cSDimitry Andric return false; 389349cc55cSDimitry Andric } 390349cc55cSDimitry Andric 391349cc55cSDimitry Andric assert(ConstantVals.size() == NumStores); 392349cc55cSDimitry Andric // Check if our wide constant is legal. 393349cc55cSDimitry Andric if (!isLegalOrBeforeLegalizer({TargetOpcode::G_CONSTANT, {WideValueTy}}, *MF)) 394349cc55cSDimitry Andric return false; 395349cc55cSDimitry Andric APInt WideConst(WideValueTy.getSizeInBits(), 0); 396349cc55cSDimitry Andric for (unsigned Idx = 0; Idx < ConstantVals.size(); ++Idx) { 397349cc55cSDimitry Andric // Insert the smaller constant into the corresponding position in the 398349cc55cSDimitry Andric // wider one. 399349cc55cSDimitry Andric WideConst.insertBits(ConstantVals[Idx], Idx * SmallTy.getSizeInBits()); 400349cc55cSDimitry Andric } 401349cc55cSDimitry Andric WideReg = Builder.buildConstant(WideValueTy, WideConst).getReg(0); 402349cc55cSDimitry Andric auto NewStore = 403349cc55cSDimitry Andric Builder.buildStore(WideReg, FirstStore->getPointerReg(), *WideMMO); 404349cc55cSDimitry Andric (void) NewStore; 405*06c3fb27SDimitry Andric LLVM_DEBUG(dbgs() << "Merged " << Stores.size() 406*06c3fb27SDimitry Andric << " stores into merged store: " << *NewStore); 407*06c3fb27SDimitry Andric LLVM_DEBUG(for (auto *MI : Stores) dbgs() << " " << *MI;); 408349cc55cSDimitry Andric NumStoresMerged += Stores.size(); 409349cc55cSDimitry Andric 410349cc55cSDimitry Andric MachineOptimizationRemarkEmitter MORE(*MF, nullptr); 411349cc55cSDimitry Andric MORE.emit([&]() { 412349cc55cSDimitry Andric MachineOptimizationRemark R(DEBUG_TYPE, "MergedStore", 413349cc55cSDimitry Andric FirstStore->getDebugLoc(), 414349cc55cSDimitry Andric FirstStore->getParent()); 415349cc55cSDimitry Andric R << "Merged " << NV("NumMerged", Stores.size()) << " stores of " 416349cc55cSDimitry Andric << NV("OrigWidth", SmallTy.getSizeInBytes()) 417349cc55cSDimitry Andric << " bytes into a single store of " 418349cc55cSDimitry Andric << NV("NewWidth", WideValueTy.getSizeInBytes()) << " bytes"; 419349cc55cSDimitry Andric return R; 420349cc55cSDimitry Andric }); 421349cc55cSDimitry Andric 422fcaf7f86SDimitry Andric for (auto *MI : Stores) 423349cc55cSDimitry Andric InstsToErase.insert(MI); 424349cc55cSDimitry Andric return true; 425349cc55cSDimitry Andric } 426349cc55cSDimitry Andric 427349cc55cSDimitry Andric bool LoadStoreOpt::processMergeCandidate(StoreMergeCandidate &C) { 428349cc55cSDimitry Andric if (C.Stores.size() < 2) { 429349cc55cSDimitry Andric C.reset(); 430349cc55cSDimitry Andric return false; 431349cc55cSDimitry Andric } 432349cc55cSDimitry Andric 433349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Checking store merge candidate with " << C.Stores.size() 434349cc55cSDimitry Andric << " stores, starting with " << *C.Stores[0]); 435349cc55cSDimitry Andric // We know that the stores in the candidate are adjacent. 436349cc55cSDimitry Andric // Now we need to check if any potential aliasing instructions recorded 437349cc55cSDimitry Andric // during the search alias with load/stores added to the candidate after. 438349cc55cSDimitry Andric // For example, if we have the candidate: 439349cc55cSDimitry Andric // C.Stores = [ST1, ST2, ST3, ST4] 440349cc55cSDimitry Andric // and after seeing ST2 we saw a load LD1, which did not alias with ST1 or 441349cc55cSDimitry Andric // ST2, then we would have recorded it into the PotentialAliases structure 442349cc55cSDimitry Andric // with the associated index value of "1". Then we see ST3 and ST4 and add 443349cc55cSDimitry Andric // them to the candidate group. We know that LD1 does not alias with ST1 or 444349cc55cSDimitry Andric // ST2, since we already did that check. However we don't yet know if it 445349cc55cSDimitry Andric // may alias ST3 and ST4, so we perform those checks now. 446349cc55cSDimitry Andric SmallVector<GStore *> StoresToMerge; 447349cc55cSDimitry Andric 448349cc55cSDimitry Andric auto DoesStoreAliasWithPotential = [&](unsigned Idx, GStore &CheckStore) { 449349cc55cSDimitry Andric for (auto AliasInfo : reverse(C.PotentialAliases)) { 450349cc55cSDimitry Andric MachineInstr *PotentialAliasOp = AliasInfo.first; 451349cc55cSDimitry Andric unsigned PreCheckedIdx = AliasInfo.second; 452*06c3fb27SDimitry Andric if (static_cast<unsigned>(Idx) < PreCheckedIdx) { 453*06c3fb27SDimitry Andric // Once our store index is lower than the index associated with the 454*06c3fb27SDimitry Andric // potential alias, we know that we've already checked for this alias 455*06c3fb27SDimitry Andric // and all of the earlier potential aliases too. 456*06c3fb27SDimitry Andric return false; 457*06c3fb27SDimitry Andric } 458349cc55cSDimitry Andric // Need to check this alias. 459349cc55cSDimitry Andric if (GISelAddressing::instMayAlias(CheckStore, *PotentialAliasOp, *MRI, 460349cc55cSDimitry Andric AA)) { 461349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Potential alias " << *PotentialAliasOp 462349cc55cSDimitry Andric << " detected\n"); 463349cc55cSDimitry Andric return true; 464349cc55cSDimitry Andric } 465349cc55cSDimitry Andric } 466349cc55cSDimitry Andric return false; 467349cc55cSDimitry Andric }; 468349cc55cSDimitry Andric // Start from the last store in the group, and check if it aliases with any 469349cc55cSDimitry Andric // of the potential aliasing operations in the list. 470349cc55cSDimitry Andric for (int StoreIdx = C.Stores.size() - 1; StoreIdx >= 0; --StoreIdx) { 471349cc55cSDimitry Andric auto *CheckStore = C.Stores[StoreIdx]; 472349cc55cSDimitry Andric if (DoesStoreAliasWithPotential(StoreIdx, *CheckStore)) 473349cc55cSDimitry Andric continue; 474349cc55cSDimitry Andric StoresToMerge.emplace_back(CheckStore); 475349cc55cSDimitry Andric } 476349cc55cSDimitry Andric 477349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << StoresToMerge.size() 478349cc55cSDimitry Andric << " stores remaining after alias checks. Merging...\n"); 479349cc55cSDimitry Andric 480349cc55cSDimitry Andric // Now we've checked for aliasing hazards, merge any stores left. 481349cc55cSDimitry Andric C.reset(); 482349cc55cSDimitry Andric if (StoresToMerge.size() < 2) 483349cc55cSDimitry Andric return false; 484349cc55cSDimitry Andric return mergeStores(StoresToMerge); 485349cc55cSDimitry Andric } 486349cc55cSDimitry Andric 487349cc55cSDimitry Andric bool LoadStoreOpt::operationAliasesWithCandidate(MachineInstr &MI, 488349cc55cSDimitry Andric StoreMergeCandidate &C) { 489349cc55cSDimitry Andric if (C.Stores.empty()) 490349cc55cSDimitry Andric return false; 491349cc55cSDimitry Andric return llvm::any_of(C.Stores, [&](MachineInstr *OtherMI) { 492349cc55cSDimitry Andric return instMayAlias(MI, *OtherMI, *MRI, AA); 493349cc55cSDimitry Andric }); 494349cc55cSDimitry Andric } 495349cc55cSDimitry Andric 496349cc55cSDimitry Andric void LoadStoreOpt::StoreMergeCandidate::addPotentialAlias(MachineInstr &MI) { 497349cc55cSDimitry Andric PotentialAliases.emplace_back(std::make_pair(&MI, Stores.size() - 1)); 498349cc55cSDimitry Andric } 499349cc55cSDimitry Andric 500349cc55cSDimitry Andric bool LoadStoreOpt::addStoreToCandidate(GStore &StoreMI, 501349cc55cSDimitry Andric StoreMergeCandidate &C) { 502349cc55cSDimitry Andric // Check if the given store writes to an adjacent address, and other 503349cc55cSDimitry Andric // requirements. 504349cc55cSDimitry Andric LLT ValueTy = MRI->getType(StoreMI.getValueReg()); 505349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoreMI.getPointerReg()); 506349cc55cSDimitry Andric 507349cc55cSDimitry Andric // Only handle scalars. 508349cc55cSDimitry Andric if (!ValueTy.isScalar()) 509349cc55cSDimitry Andric return false; 510349cc55cSDimitry Andric 511349cc55cSDimitry Andric // Don't allow truncating stores for now. 512349cc55cSDimitry Andric if (StoreMI.getMemSizeInBits() != ValueTy.getSizeInBits()) 513349cc55cSDimitry Andric return false; 514349cc55cSDimitry Andric 51581ad6265SDimitry Andric // Avoid adding volatile or ordered stores to the candidate. We already have a 51681ad6265SDimitry Andric // check for this in instMayAlias() but that only get's called later between 51781ad6265SDimitry Andric // potential aliasing hazards. 51881ad6265SDimitry Andric if (!StoreMI.isSimple()) 51981ad6265SDimitry Andric return false; 52081ad6265SDimitry Andric 521349cc55cSDimitry Andric Register StoreAddr = StoreMI.getPointerReg(); 522349cc55cSDimitry Andric auto BIO = getPointerInfo(StoreAddr, *MRI); 523349cc55cSDimitry Andric Register StoreBase = BIO.BaseReg; 524349cc55cSDimitry Andric uint64_t StoreOffCst = BIO.Offset; 525349cc55cSDimitry Andric if (C.Stores.empty()) { 526349cc55cSDimitry Andric // This is the first store of the candidate. 527349cc55cSDimitry Andric // If the offset can't possibly allow for a lower addressed store with the 528349cc55cSDimitry Andric // same base, don't bother adding it. 529349cc55cSDimitry Andric if (StoreOffCst < ValueTy.getSizeInBytes()) 530349cc55cSDimitry Andric return false; 531349cc55cSDimitry Andric C.BasePtr = StoreBase; 532349cc55cSDimitry Andric C.CurrentLowestOffset = StoreOffCst; 533349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI); 534349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Starting a new merge candidate group with: " 535349cc55cSDimitry Andric << StoreMI); 536349cc55cSDimitry Andric return true; 537349cc55cSDimitry Andric } 538349cc55cSDimitry Andric 539349cc55cSDimitry Andric // Check the store is the same size as the existing ones in the candidate. 540349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getValueReg()).getSizeInBits() != 541349cc55cSDimitry Andric ValueTy.getSizeInBits()) 542349cc55cSDimitry Andric return false; 543349cc55cSDimitry Andric 544349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getPointerReg()).getAddressSpace() != 545349cc55cSDimitry Andric PtrTy.getAddressSpace()) 546349cc55cSDimitry Andric return false; 547349cc55cSDimitry Andric 548349cc55cSDimitry Andric // There are other stores in the candidate. Check that the store address 549349cc55cSDimitry Andric // writes to the next lowest adjacent address. 550349cc55cSDimitry Andric if (C.BasePtr != StoreBase) 551349cc55cSDimitry Andric return false; 552349cc55cSDimitry Andric if ((C.CurrentLowestOffset - ValueTy.getSizeInBytes()) != 553349cc55cSDimitry Andric static_cast<uint64_t>(StoreOffCst)) 554349cc55cSDimitry Andric return false; 555349cc55cSDimitry Andric 556349cc55cSDimitry Andric // This writes to an adjacent address. Allow it. 557349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI); 558349cc55cSDimitry Andric C.CurrentLowestOffset = C.CurrentLowestOffset - ValueTy.getSizeInBytes(); 559349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Candidate added store: " << StoreMI); 560349cc55cSDimitry Andric return true; 561349cc55cSDimitry Andric } 562349cc55cSDimitry Andric 563349cc55cSDimitry Andric bool LoadStoreOpt::mergeBlockStores(MachineBasicBlock &MBB) { 564349cc55cSDimitry Andric bool Changed = false; 565349cc55cSDimitry Andric // Walk through the block bottom-up, looking for merging candidates. 566349cc55cSDimitry Andric StoreMergeCandidate Candidate; 5670eae32dcSDimitry Andric for (MachineInstr &MI : llvm::reverse(MBB)) { 568349cc55cSDimitry Andric if (InstsToErase.contains(&MI)) 569349cc55cSDimitry Andric continue; 570349cc55cSDimitry Andric 5710eae32dcSDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI)) { 572349cc55cSDimitry Andric // We have a G_STORE. Add it to the candidate if it writes to an adjacent 573349cc55cSDimitry Andric // address. 574349cc55cSDimitry Andric if (!addStoreToCandidate(*StoreMI, Candidate)) { 575349cc55cSDimitry Andric // Store wasn't eligible to be added. May need to record it as a 576349cc55cSDimitry Andric // potential alias. 577349cc55cSDimitry Andric if (operationAliasesWithCandidate(*StoreMI, Candidate)) { 578349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 579349cc55cSDimitry Andric continue; 580349cc55cSDimitry Andric } 581349cc55cSDimitry Andric Candidate.addPotentialAlias(*StoreMI); 582349cc55cSDimitry Andric } 583349cc55cSDimitry Andric continue; 584349cc55cSDimitry Andric } 585349cc55cSDimitry Andric 586349cc55cSDimitry Andric // If we don't have any stores yet, this instruction can't pose a problem. 587349cc55cSDimitry Andric if (Candidate.Stores.empty()) 588349cc55cSDimitry Andric continue; 589349cc55cSDimitry Andric 590349cc55cSDimitry Andric // We're dealing with some other kind of instruction. 591349cc55cSDimitry Andric if (isInstHardMergeHazard(MI)) { 592349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 593349cc55cSDimitry Andric Candidate.Stores.clear(); 594349cc55cSDimitry Andric continue; 595349cc55cSDimitry Andric } 596349cc55cSDimitry Andric 597349cc55cSDimitry Andric if (!MI.mayLoadOrStore()) 598349cc55cSDimitry Andric continue; 599349cc55cSDimitry Andric 600349cc55cSDimitry Andric if (operationAliasesWithCandidate(MI, Candidate)) { 601349cc55cSDimitry Andric // We have a potential alias, so process the current candidate if we can 602349cc55cSDimitry Andric // and then continue looking for a new candidate. 603349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 604349cc55cSDimitry Andric continue; 605349cc55cSDimitry Andric } 606349cc55cSDimitry Andric 607349cc55cSDimitry Andric // Record this instruction as a potential alias for future stores that are 608349cc55cSDimitry Andric // added to the candidate. 609349cc55cSDimitry Andric Candidate.addPotentialAlias(MI); 610349cc55cSDimitry Andric } 611349cc55cSDimitry Andric 612349cc55cSDimitry Andric // Process any candidate left after finishing searching the entire block. 613349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate); 614349cc55cSDimitry Andric 615349cc55cSDimitry Andric // Erase instructions now that we're no longer iterating over the block. 616349cc55cSDimitry Andric for (auto *MI : InstsToErase) 617349cc55cSDimitry Andric MI->eraseFromParent(); 618349cc55cSDimitry Andric InstsToErase.clear(); 619349cc55cSDimitry Andric return Changed; 620349cc55cSDimitry Andric } 621349cc55cSDimitry Andric 622*06c3fb27SDimitry Andric /// Check if the store \p Store is a truncstore that can be merged. That is, 623*06c3fb27SDimitry Andric /// it's a store of a shifted value of \p SrcVal. If \p SrcVal is an empty 624*06c3fb27SDimitry Andric /// Register then it does not need to match and SrcVal is set to the source 625*06c3fb27SDimitry Andric /// value found. 626*06c3fb27SDimitry Andric /// On match, returns the start byte offset of the \p SrcVal that is being 627*06c3fb27SDimitry Andric /// stored. 628*06c3fb27SDimitry Andric static std::optional<int64_t> 629*06c3fb27SDimitry Andric getTruncStoreByteOffset(GStore &Store, Register &SrcVal, 630*06c3fb27SDimitry Andric MachineRegisterInfo &MRI) { 631*06c3fb27SDimitry Andric Register TruncVal; 632*06c3fb27SDimitry Andric if (!mi_match(Store.getValueReg(), MRI, m_GTrunc(m_Reg(TruncVal)))) 633*06c3fb27SDimitry Andric return std::nullopt; 634*06c3fb27SDimitry Andric 635*06c3fb27SDimitry Andric // The shift amount must be a constant multiple of the narrow type. 636*06c3fb27SDimitry Andric // It is translated to the offset address in the wide source value "y". 637*06c3fb27SDimitry Andric // 638*06c3fb27SDimitry Andric // x = G_LSHR y, ShiftAmtC 639*06c3fb27SDimitry Andric // s8 z = G_TRUNC x 640*06c3fb27SDimitry Andric // store z, ... 641*06c3fb27SDimitry Andric Register FoundSrcVal; 642*06c3fb27SDimitry Andric int64_t ShiftAmt; 643*06c3fb27SDimitry Andric if (!mi_match(TruncVal, MRI, 644*06c3fb27SDimitry Andric m_any_of(m_GLShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt)), 645*06c3fb27SDimitry Andric m_GAShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt))))) { 646*06c3fb27SDimitry Andric if (!SrcVal.isValid() || TruncVal == SrcVal) { 647*06c3fb27SDimitry Andric if (!SrcVal.isValid()) 648*06c3fb27SDimitry Andric SrcVal = TruncVal; 649*06c3fb27SDimitry Andric return 0; // If it's the lowest index store. 650*06c3fb27SDimitry Andric } 651*06c3fb27SDimitry Andric return std::nullopt; 652*06c3fb27SDimitry Andric } 653*06c3fb27SDimitry Andric 654*06c3fb27SDimitry Andric unsigned NarrowBits = Store.getMMO().getMemoryType().getScalarSizeInBits(); 655*06c3fb27SDimitry Andric if (ShiftAmt % NarrowBits != 0) 656*06c3fb27SDimitry Andric return std::nullopt; 657*06c3fb27SDimitry Andric const unsigned Offset = ShiftAmt / NarrowBits; 658*06c3fb27SDimitry Andric 659*06c3fb27SDimitry Andric if (SrcVal.isValid() && FoundSrcVal != SrcVal) 660*06c3fb27SDimitry Andric return std::nullopt; 661*06c3fb27SDimitry Andric 662*06c3fb27SDimitry Andric if (!SrcVal.isValid()) 663*06c3fb27SDimitry Andric SrcVal = FoundSrcVal; 664*06c3fb27SDimitry Andric else if (MRI.getType(SrcVal) != MRI.getType(FoundSrcVal)) 665*06c3fb27SDimitry Andric return std::nullopt; 666*06c3fb27SDimitry Andric return Offset; 667*06c3fb27SDimitry Andric } 668*06c3fb27SDimitry Andric 669*06c3fb27SDimitry Andric /// Match a pattern where a wide type scalar value is stored by several narrow 670*06c3fb27SDimitry Andric /// stores. Fold it into a single store or a BSWAP and a store if the targets 671*06c3fb27SDimitry Andric /// supports it. 672*06c3fb27SDimitry Andric /// 673*06c3fb27SDimitry Andric /// Assuming little endian target: 674*06c3fb27SDimitry Andric /// i8 *p = ... 675*06c3fb27SDimitry Andric /// i32 val = ... 676*06c3fb27SDimitry Andric /// p[0] = (val >> 0) & 0xFF; 677*06c3fb27SDimitry Andric /// p[1] = (val >> 8) & 0xFF; 678*06c3fb27SDimitry Andric /// p[2] = (val >> 16) & 0xFF; 679*06c3fb27SDimitry Andric /// p[3] = (val >> 24) & 0xFF; 680*06c3fb27SDimitry Andric /// => 681*06c3fb27SDimitry Andric /// *((i32)p) = val; 682*06c3fb27SDimitry Andric /// 683*06c3fb27SDimitry Andric /// i8 *p = ... 684*06c3fb27SDimitry Andric /// i32 val = ... 685*06c3fb27SDimitry Andric /// p[0] = (val >> 24) & 0xFF; 686*06c3fb27SDimitry Andric /// p[1] = (val >> 16) & 0xFF; 687*06c3fb27SDimitry Andric /// p[2] = (val >> 8) & 0xFF; 688*06c3fb27SDimitry Andric /// p[3] = (val >> 0) & 0xFF; 689*06c3fb27SDimitry Andric /// => 690*06c3fb27SDimitry Andric /// *((i32)p) = BSWAP(val); 691*06c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStore(GStore &StoreMI, 692*06c3fb27SDimitry Andric SmallPtrSetImpl<GStore *> &DeletedStores) { 693*06c3fb27SDimitry Andric LLT MemTy = StoreMI.getMMO().getMemoryType(); 694*06c3fb27SDimitry Andric 695*06c3fb27SDimitry Andric // We only handle merging simple stores of 1-4 bytes. 696*06c3fb27SDimitry Andric if (!MemTy.isScalar()) 697*06c3fb27SDimitry Andric return false; 698*06c3fb27SDimitry Andric switch (MemTy.getSizeInBits()) { 699*06c3fb27SDimitry Andric case 8: 700*06c3fb27SDimitry Andric case 16: 701*06c3fb27SDimitry Andric case 32: 702*06c3fb27SDimitry Andric break; 703*06c3fb27SDimitry Andric default: 704*06c3fb27SDimitry Andric return false; 705*06c3fb27SDimitry Andric } 706*06c3fb27SDimitry Andric if (!StoreMI.isSimple()) 707*06c3fb27SDimitry Andric return false; 708*06c3fb27SDimitry Andric 709*06c3fb27SDimitry Andric // We do a simple search for mergeable stores prior to this one. 710*06c3fb27SDimitry Andric // Any potential alias hazard along the way terminates the search. 711*06c3fb27SDimitry Andric SmallVector<GStore *> FoundStores; 712*06c3fb27SDimitry Andric 713*06c3fb27SDimitry Andric // We're looking for: 714*06c3fb27SDimitry Andric // 1) a (store(trunc(...))) 715*06c3fb27SDimitry Andric // 2) of an LSHR/ASHR of a single wide value, by the appropriate shift to get 716*06c3fb27SDimitry Andric // the partial value stored. 717*06c3fb27SDimitry Andric // 3) where the offsets form either a little or big-endian sequence. 718*06c3fb27SDimitry Andric 719*06c3fb27SDimitry Andric auto &LastStore = StoreMI; 720*06c3fb27SDimitry Andric 721*06c3fb27SDimitry Andric // The single base pointer that all stores must use. 722*06c3fb27SDimitry Andric Register BaseReg; 723*06c3fb27SDimitry Andric int64_t LastOffset; 724*06c3fb27SDimitry Andric if (!mi_match(LastStore.getPointerReg(), *MRI, 725*06c3fb27SDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(LastOffset)))) { 726*06c3fb27SDimitry Andric BaseReg = LastStore.getPointerReg(); 727*06c3fb27SDimitry Andric LastOffset = 0; 728*06c3fb27SDimitry Andric } 729*06c3fb27SDimitry Andric 730*06c3fb27SDimitry Andric GStore *LowestIdxStore = &LastStore; 731*06c3fb27SDimitry Andric int64_t LowestIdxOffset = LastOffset; 732*06c3fb27SDimitry Andric 733*06c3fb27SDimitry Andric Register WideSrcVal; 734*06c3fb27SDimitry Andric auto LowestShiftAmt = getTruncStoreByteOffset(LastStore, WideSrcVal, *MRI); 735*06c3fb27SDimitry Andric if (!LowestShiftAmt) 736*06c3fb27SDimitry Andric return false; // Didn't match a trunc. 737*06c3fb27SDimitry Andric assert(WideSrcVal.isValid()); 738*06c3fb27SDimitry Andric 739*06c3fb27SDimitry Andric LLT WideStoreTy = MRI->getType(WideSrcVal); 740*06c3fb27SDimitry Andric // The wide type might not be a multiple of the memory type, e.g. s48 and s32. 741*06c3fb27SDimitry Andric if (WideStoreTy.getSizeInBits() % MemTy.getSizeInBits() != 0) 742*06c3fb27SDimitry Andric return false; 743*06c3fb27SDimitry Andric const unsigned NumStoresRequired = 744*06c3fb27SDimitry Andric WideStoreTy.getSizeInBits() / MemTy.getSizeInBits(); 745*06c3fb27SDimitry Andric 746*06c3fb27SDimitry Andric SmallVector<int64_t, 8> OffsetMap(NumStoresRequired, INT64_MAX); 747*06c3fb27SDimitry Andric OffsetMap[*LowestShiftAmt] = LastOffset; 748*06c3fb27SDimitry Andric FoundStores.emplace_back(&LastStore); 749*06c3fb27SDimitry Andric 750*06c3fb27SDimitry Andric const int MaxInstsToCheck = 10; 751*06c3fb27SDimitry Andric int NumInstsChecked = 0; 752*06c3fb27SDimitry Andric for (auto II = ++LastStore.getReverseIterator(); 753*06c3fb27SDimitry Andric II != LastStore.getParent()->rend() && NumInstsChecked < MaxInstsToCheck; 754*06c3fb27SDimitry Andric ++II) { 755*06c3fb27SDimitry Andric NumInstsChecked++; 756*06c3fb27SDimitry Andric GStore *NewStore; 757*06c3fb27SDimitry Andric if ((NewStore = dyn_cast<GStore>(&*II))) { 758*06c3fb27SDimitry Andric if (NewStore->getMMO().getMemoryType() != MemTy || !NewStore->isSimple()) 759*06c3fb27SDimitry Andric break; 760*06c3fb27SDimitry Andric } else if (II->isLoadFoldBarrier() || II->mayLoad()) { 761*06c3fb27SDimitry Andric break; 762*06c3fb27SDimitry Andric } else { 763*06c3fb27SDimitry Andric continue; // This is a safe instruction we can look past. 764*06c3fb27SDimitry Andric } 765*06c3fb27SDimitry Andric 766*06c3fb27SDimitry Andric Register NewBaseReg; 767*06c3fb27SDimitry Andric int64_t MemOffset; 768*06c3fb27SDimitry Andric // Check we're storing to the same base + some offset. 769*06c3fb27SDimitry Andric if (!mi_match(NewStore->getPointerReg(), *MRI, 770*06c3fb27SDimitry Andric m_GPtrAdd(m_Reg(NewBaseReg), m_ICst(MemOffset)))) { 771*06c3fb27SDimitry Andric NewBaseReg = NewStore->getPointerReg(); 772*06c3fb27SDimitry Andric MemOffset = 0; 773*06c3fb27SDimitry Andric } 774*06c3fb27SDimitry Andric if (BaseReg != NewBaseReg) 775*06c3fb27SDimitry Andric break; 776*06c3fb27SDimitry Andric 777*06c3fb27SDimitry Andric auto ShiftByteOffset = getTruncStoreByteOffset(*NewStore, WideSrcVal, *MRI); 778*06c3fb27SDimitry Andric if (!ShiftByteOffset) 779*06c3fb27SDimitry Andric break; 780*06c3fb27SDimitry Andric if (MemOffset < LowestIdxOffset) { 781*06c3fb27SDimitry Andric LowestIdxOffset = MemOffset; 782*06c3fb27SDimitry Andric LowestIdxStore = NewStore; 783*06c3fb27SDimitry Andric } 784*06c3fb27SDimitry Andric 785*06c3fb27SDimitry Andric // Map the offset in the store and the offset in the combined value, and 786*06c3fb27SDimitry Andric // early return if it has been set before. 787*06c3fb27SDimitry Andric if (*ShiftByteOffset < 0 || *ShiftByteOffset >= NumStoresRequired || 788*06c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] != INT64_MAX) 789*06c3fb27SDimitry Andric break; 790*06c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] = MemOffset; 791*06c3fb27SDimitry Andric 792*06c3fb27SDimitry Andric FoundStores.emplace_back(NewStore); 793*06c3fb27SDimitry Andric // Reset counter since we've found a matching inst. 794*06c3fb27SDimitry Andric NumInstsChecked = 0; 795*06c3fb27SDimitry Andric if (FoundStores.size() == NumStoresRequired) 796*06c3fb27SDimitry Andric break; 797*06c3fb27SDimitry Andric } 798*06c3fb27SDimitry Andric 799*06c3fb27SDimitry Andric if (FoundStores.size() != NumStoresRequired) { 800*06c3fb27SDimitry Andric if (FoundStores.size() == 1) 801*06c3fb27SDimitry Andric return false; 802*06c3fb27SDimitry Andric // We didn't find enough stores to merge into the size of the original 803*06c3fb27SDimitry Andric // source value, but we may be able to generate a smaller store if we 804*06c3fb27SDimitry Andric // truncate the source value. 805*06c3fb27SDimitry Andric WideStoreTy = LLT::scalar(FoundStores.size() * MemTy.getScalarSizeInBits()); 806*06c3fb27SDimitry Andric } 807*06c3fb27SDimitry Andric 808*06c3fb27SDimitry Andric unsigned NumStoresFound = FoundStores.size(); 809*06c3fb27SDimitry Andric 810*06c3fb27SDimitry Andric const auto &DL = LastStore.getMF()->getDataLayout(); 811*06c3fb27SDimitry Andric auto &C = LastStore.getMF()->getFunction().getContext(); 812*06c3fb27SDimitry Andric // Check that a store of the wide type is both allowed and fast on the target 813*06c3fb27SDimitry Andric unsigned Fast = 0; 814*06c3fb27SDimitry Andric bool Allowed = TLI->allowsMemoryAccess( 815*06c3fb27SDimitry Andric C, DL, WideStoreTy, LowestIdxStore->getMMO(), &Fast); 816*06c3fb27SDimitry Andric if (!Allowed || !Fast) 817*06c3fb27SDimitry Andric return false; 818*06c3fb27SDimitry Andric 819*06c3fb27SDimitry Andric // Check if the pieces of the value are going to the expected places in memory 820*06c3fb27SDimitry Andric // to merge the stores. 821*06c3fb27SDimitry Andric unsigned NarrowBits = MemTy.getScalarSizeInBits(); 822*06c3fb27SDimitry Andric auto checkOffsets = [&](bool MatchLittleEndian) { 823*06c3fb27SDimitry Andric if (MatchLittleEndian) { 824*06c3fb27SDimitry Andric for (unsigned i = 0; i != NumStoresFound; ++i) 825*06c3fb27SDimitry Andric if (OffsetMap[i] != i * (NarrowBits / 8) + LowestIdxOffset) 826*06c3fb27SDimitry Andric return false; 827*06c3fb27SDimitry Andric } else { // MatchBigEndian by reversing loop counter. 828*06c3fb27SDimitry Andric for (unsigned i = 0, j = NumStoresFound - 1; i != NumStoresFound; 829*06c3fb27SDimitry Andric ++i, --j) 830*06c3fb27SDimitry Andric if (OffsetMap[j] != i * (NarrowBits / 8) + LowestIdxOffset) 831*06c3fb27SDimitry Andric return false; 832*06c3fb27SDimitry Andric } 833*06c3fb27SDimitry Andric return true; 834*06c3fb27SDimitry Andric }; 835*06c3fb27SDimitry Andric 836*06c3fb27SDimitry Andric // Check if the offsets line up for the native data layout of this target. 837*06c3fb27SDimitry Andric bool NeedBswap = false; 838*06c3fb27SDimitry Andric bool NeedRotate = false; 839*06c3fb27SDimitry Andric if (!checkOffsets(DL.isLittleEndian())) { 840*06c3fb27SDimitry Andric // Special-case: check if byte offsets line up for the opposite endian. 841*06c3fb27SDimitry Andric if (NarrowBits == 8 && checkOffsets(DL.isBigEndian())) 842*06c3fb27SDimitry Andric NeedBswap = true; 843*06c3fb27SDimitry Andric else if (NumStoresFound == 2 && checkOffsets(DL.isBigEndian())) 844*06c3fb27SDimitry Andric NeedRotate = true; 845*06c3fb27SDimitry Andric else 846*06c3fb27SDimitry Andric return false; 847*06c3fb27SDimitry Andric } 848*06c3fb27SDimitry Andric 849*06c3fb27SDimitry Andric if (NeedBswap && 850*06c3fb27SDimitry Andric !isLegalOrBeforeLegalizer({TargetOpcode::G_BSWAP, {WideStoreTy}}, *MF)) 851*06c3fb27SDimitry Andric return false; 852*06c3fb27SDimitry Andric if (NeedRotate && 853*06c3fb27SDimitry Andric !isLegalOrBeforeLegalizer( 854*06c3fb27SDimitry Andric {TargetOpcode::G_ROTR, {WideStoreTy, WideStoreTy}}, *MF)) 855*06c3fb27SDimitry Andric return false; 856*06c3fb27SDimitry Andric 857*06c3fb27SDimitry Andric Builder.setInstrAndDebugLoc(StoreMI); 858*06c3fb27SDimitry Andric 859*06c3fb27SDimitry Andric if (WideStoreTy != MRI->getType(WideSrcVal)) 860*06c3fb27SDimitry Andric WideSrcVal = Builder.buildTrunc(WideStoreTy, WideSrcVal).getReg(0); 861*06c3fb27SDimitry Andric 862*06c3fb27SDimitry Andric if (NeedBswap) { 863*06c3fb27SDimitry Andric WideSrcVal = Builder.buildBSwap(WideStoreTy, WideSrcVal).getReg(0); 864*06c3fb27SDimitry Andric } else if (NeedRotate) { 865*06c3fb27SDimitry Andric assert(WideStoreTy.getSizeInBits() % 2 == 0 && 866*06c3fb27SDimitry Andric "Unexpected type for rotate"); 867*06c3fb27SDimitry Andric auto RotAmt = 868*06c3fb27SDimitry Andric Builder.buildConstant(WideStoreTy, WideStoreTy.getSizeInBits() / 2); 869*06c3fb27SDimitry Andric WideSrcVal = 870*06c3fb27SDimitry Andric Builder.buildRotateRight(WideStoreTy, WideSrcVal, RotAmt).getReg(0); 871*06c3fb27SDimitry Andric } 872*06c3fb27SDimitry Andric 873*06c3fb27SDimitry Andric Builder.buildStore(WideSrcVal, LowestIdxStore->getPointerReg(), 874*06c3fb27SDimitry Andric LowestIdxStore->getMMO().getPointerInfo(), 875*06c3fb27SDimitry Andric LowestIdxStore->getMMO().getAlign()); 876*06c3fb27SDimitry Andric 877*06c3fb27SDimitry Andric // Erase the old stores. 878*06c3fb27SDimitry Andric for (auto *ST : FoundStores) { 879*06c3fb27SDimitry Andric ST->eraseFromParent(); 880*06c3fb27SDimitry Andric DeletedStores.insert(ST); 881*06c3fb27SDimitry Andric } 882*06c3fb27SDimitry Andric return true; 883*06c3fb27SDimitry Andric } 884*06c3fb27SDimitry Andric 885*06c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStoresBlock(MachineBasicBlock &BB) { 886*06c3fb27SDimitry Andric bool Changed = false; 887*06c3fb27SDimitry Andric SmallVector<GStore *, 16> Stores; 888*06c3fb27SDimitry Andric SmallPtrSet<GStore *, 8> DeletedStores; 889*06c3fb27SDimitry Andric // Walk up the block so we can see the most eligible stores. 890*06c3fb27SDimitry Andric for (MachineInstr &MI : llvm::reverse(BB)) 891*06c3fb27SDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI)) 892*06c3fb27SDimitry Andric Stores.emplace_back(StoreMI); 893*06c3fb27SDimitry Andric 894*06c3fb27SDimitry Andric for (auto *StoreMI : Stores) { 895*06c3fb27SDimitry Andric if (DeletedStores.count(StoreMI)) 896*06c3fb27SDimitry Andric continue; 897*06c3fb27SDimitry Andric if (mergeTruncStore(*StoreMI, DeletedStores)) 898*06c3fb27SDimitry Andric Changed = true; 899*06c3fb27SDimitry Andric } 900*06c3fb27SDimitry Andric return Changed; 901*06c3fb27SDimitry Andric } 902*06c3fb27SDimitry Andric 903349cc55cSDimitry Andric bool LoadStoreOpt::mergeFunctionStores(MachineFunction &MF) { 904349cc55cSDimitry Andric bool Changed = false; 905349cc55cSDimitry Andric for (auto &BB : MF){ 906349cc55cSDimitry Andric Changed |= mergeBlockStores(BB); 907*06c3fb27SDimitry Andric Changed |= mergeTruncStoresBlock(BB); 908349cc55cSDimitry Andric } 909*06c3fb27SDimitry Andric 910*06c3fb27SDimitry Andric // Erase all dead instructions left over by the merging. 911*06c3fb27SDimitry Andric if (Changed) { 912*06c3fb27SDimitry Andric for (auto &BB : MF) { 913*06c3fb27SDimitry Andric for (auto &I : make_early_inc_range(make_range(BB.rbegin(), BB.rend()))) { 914*06c3fb27SDimitry Andric if (isTriviallyDead(I, *MRI)) 915*06c3fb27SDimitry Andric I.eraseFromParent(); 916*06c3fb27SDimitry Andric } 917*06c3fb27SDimitry Andric } 918*06c3fb27SDimitry Andric } 919*06c3fb27SDimitry Andric 920349cc55cSDimitry Andric return Changed; 921349cc55cSDimitry Andric } 922349cc55cSDimitry Andric 923349cc55cSDimitry Andric void LoadStoreOpt::initializeStoreMergeTargetInfo(unsigned AddrSpace) { 924349cc55cSDimitry Andric // Query the legalizer info to record what store types are legal. 925349cc55cSDimitry Andric // We record this because we don't want to bother trying to merge stores into 926349cc55cSDimitry Andric // illegal ones, which would just result in being split again. 927349cc55cSDimitry Andric 928349cc55cSDimitry Andric if (LegalStoreSizes.count(AddrSpace)) { 929349cc55cSDimitry Andric assert(LegalStoreSizes[AddrSpace].any()); 930349cc55cSDimitry Andric return; // Already cached sizes for this address space. 931349cc55cSDimitry Andric } 932349cc55cSDimitry Andric 933349cc55cSDimitry Andric // Need to reserve at least MaxStoreSizeToForm + 1 bits. 934349cc55cSDimitry Andric BitVector LegalSizes(MaxStoreSizeToForm * 2); 935349cc55cSDimitry Andric const auto &LI = *MF->getSubtarget().getLegalizerInfo(); 936349cc55cSDimitry Andric const auto &DL = MF->getFunction().getParent()->getDataLayout(); 937349cc55cSDimitry Andric Type *IntPtrIRTy = 938349cc55cSDimitry Andric DL.getIntPtrType(MF->getFunction().getContext(), AddrSpace); 939349cc55cSDimitry Andric LLT PtrTy = getLLTForType(*IntPtrIRTy->getPointerTo(AddrSpace), DL); 940349cc55cSDimitry Andric // We assume that we're not going to be generating any stores wider than 941349cc55cSDimitry Andric // MaxStoreSizeToForm bits for now. 942349cc55cSDimitry Andric for (unsigned Size = 2; Size <= MaxStoreSizeToForm; Size *= 2) { 943349cc55cSDimitry Andric LLT Ty = LLT::scalar(Size); 944349cc55cSDimitry Andric SmallVector<LegalityQuery::MemDesc, 2> MemDescrs( 945349cc55cSDimitry Andric {{Ty, Ty.getSizeInBits(), AtomicOrdering::NotAtomic}}); 946349cc55cSDimitry Andric SmallVector<LLT> StoreTys({Ty, PtrTy}); 947349cc55cSDimitry Andric LegalityQuery Q(TargetOpcode::G_STORE, StoreTys, MemDescrs); 948349cc55cSDimitry Andric LegalizeActionStep ActionStep = LI.getAction(Q); 949349cc55cSDimitry Andric if (ActionStep.Action == LegalizeActions::Legal) 950349cc55cSDimitry Andric LegalSizes.set(Size); 951349cc55cSDimitry Andric } 952349cc55cSDimitry Andric assert(LegalSizes.any() && "Expected some store sizes to be legal!"); 953349cc55cSDimitry Andric LegalStoreSizes[AddrSpace] = LegalSizes; 954349cc55cSDimitry Andric } 955349cc55cSDimitry Andric 956349cc55cSDimitry Andric bool LoadStoreOpt::runOnMachineFunction(MachineFunction &MF) { 957349cc55cSDimitry Andric // If the ISel pipeline failed, do not bother running that pass. 958349cc55cSDimitry Andric if (MF.getProperties().hasProperty( 959349cc55cSDimitry Andric MachineFunctionProperties::Property::FailedISel)) 960349cc55cSDimitry Andric return false; 961349cc55cSDimitry Andric 962349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Begin memory optimizations for: " << MF.getName() 963349cc55cSDimitry Andric << '\n'); 964349cc55cSDimitry Andric 965349cc55cSDimitry Andric init(MF); 966349cc55cSDimitry Andric bool Changed = false; 967349cc55cSDimitry Andric Changed |= mergeFunctionStores(MF); 968349cc55cSDimitry Andric 969349cc55cSDimitry Andric LegalStoreSizes.clear(); 970349cc55cSDimitry Andric return Changed; 971349cc55cSDimitry Andric } 972