| /openbsd-src/gnu/llvm/llvm/lib/CodeGen/ |
| H A D | TargetInstrInfo.cpp | 652 MachineInstr &LoadMI, in foldMemoryOperand() argument 654 assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand() 670 isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperand() 677 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS); in foldMemoryOperand() 685 NewMI->setMemRefs(MF, LoadMI.memoperands()); in foldMemoryOperand() 689 for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(), in foldMemoryOperand() 690 E = LoadMI.memoperands_end(); in foldMemoryOperand()
|
| H A D | InlineSpiller.cpp | 226 MachineInstr *LoadMI = nullptr); 821 MachineInstr *LoadMI) { in foldMemoryOperand() argument 869 if (LoadMI && MO.isDef()) in foldMemoryOperand() 900 LoadMI ? TII.foldMemoryOperand(*MI, FoldOps, *LoadMI, &LIS) in foldMemoryOperand()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/Mips/ |
| H A D | MipsInstrInfo.h | 101 const MachineInstr &LoadMI) const;
|
| H A D | MipsInstrInfo.cpp | 617 const MachineInstr &LoadMI) const { in SafeInLoadDelaySlot() 621 return !llvm::any_of(LoadMI.defs(), [&](const MachineOperand &Op) { in SafeInLoadDelaySlot()
|
| /openbsd-src/gnu/llvm/llvm/lib/CodeGen/GlobalISel/ |
| H A D | CombinerHelper.cpp | 518 GAnyLoad *LoadMI = dyn_cast<GAnyLoad>(&MI); in matchCombineExtendingLoads() local 519 if (!LoadMI) in matchCombineExtendingLoads() 522 Register LoadReg = LoadMI->getDstReg(); in matchCombineExtendingLoads() 555 const auto &MMO = LoadMI->getMMO(); in matchCombineExtendingLoads() 564 LLT SrcTy = MRI.getType(LoadMI->getPointerReg()); in matchCombineExtendingLoads() 721 GAnyLoad *LoadMI = dyn_cast<GAnyLoad>(MRI.getVRegDef(SrcReg)); in matchCombineLoadWithAndMask() local 722 if (!LoadMI || !MRI.hasOneNonDBGUse(LoadMI->getDstReg())) in matchCombineLoadWithAndMask() 725 Register LoadReg = LoadMI->getDstReg(); in matchCombineLoadWithAndMask() 727 Register PtrReg = LoadMI->getPointerReg(); in matchCombineLoadWithAndMask() 729 uint64_t LoadSizeBits = LoadMI->getMemSizeInBits(); in matchCombineLoadWithAndMask() [all …]
|
| H A D | LegalizerHelper.cpp | 1040 auto &LoadMI = cast<GLoad>(MI); in narrowScalar() local 1041 Register DstReg = LoadMI.getDstReg(); in narrowScalar() 1046 if (8 * LoadMI.getMemSize() != DstTy.getSizeInBits()) { in narrowScalar() 1048 MIRBuilder.buildLoad(TmpReg, LoadMI.getPointerReg(), LoadMI.getMMO()); in narrowScalar() 1050 LoadMI.eraseFromParent(); in narrowScalar() 1054 return reduceLoadStoreWidth(LoadMI, TypeIdx, NarrowTy); in narrowScalar() 1058 auto &LoadMI = cast<GExtLoad>(MI); in narrowScalar() local 1059 Register DstReg = LoadMI.getDstReg(); in narrowScalar() 1060 Register PtrReg = LoadMI.getPointerReg(); in narrowScalar() 1063 auto &MMO = LoadMI.getMMO(); in narrowScalar() [all …]
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/SystemZ/ |
| H A D | SystemZInstrInfo.h | 285 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
| H A D | SystemZInstrInfo.cpp | 1338 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl() argument
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/ |
| H A D | AArch64LoadStoreOptimizer.cpp | 1223 MachineInstr &LoadMI = *I; in findMatchingStore() local 1224 Register BaseReg = AArch64InstrInfo::getLdStBaseOp(LoadMI).getReg(); in findMatchingStore() 1252 if (MI.mayStore() && isMatchingStore(LoadMI, MI) && in findMatchingStore() 1255 isLdOffsetInRangeOfSt(LoadMI, MI, TII) && in findMatchingStore() 1273 if (MI.mayStore() && LoadMI.mayAlias(AA, MI, /*UseTBAA*/ false)) in findMatchingStore()
|
| H A D | AArch64FastISel.cpp | 4481 const auto *LoadMI = MI; in optimizeIntExtLoad() local 4482 if (LoadMI->getOpcode() == TargetOpcode::COPY && in optimizeIntExtLoad() 4483 LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) { in optimizeIntExtLoad() 4485 LoadMI = MRI.getUniqueVRegDef(LoadReg); in optimizeIntExtLoad() 4486 assert(LoadMI && "Expected valid instruction"); in optimizeIntExtLoad() 4488 if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI))) in optimizeIntExtLoad()
|
| H A D | AArch64InstrInfo.cpp | 4635 MachineInstr &LoadMI = *--InsertPt; in foldMemoryOperandImpl() local 4636 MachineOperand &LoadDst = LoadMI.getOperand(0); in foldMemoryOperandImpl() 4640 return &LoadMI; in foldMemoryOperandImpl()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AArch64/GISel/ |
| H A D | AArch64InstructionSelector.cpp | 2621 auto *LoadMI = emitLoadFromConstantPool(FPImm, MIB); in select() local 2622 if (!LoadMI) { in select() 2626 MIB.buildCopy({DefReg}, {LoadMI->getOperand(0).getReg()}); in select() 3269 auto *LoadMI = getOpcodeDef(TargetOpcode::G_LOAD, SrcReg, MRI); in select() local 3272 if (LoadMI && IsGPR) { in select() 3273 const MachineMemOperand *MemOp = *LoadMI->memoperands_begin(); in select() 4386 MachineInstr *LoadMI = nullptr; in emitLoadFromConstantPool() local 4391 LoadMI = in emitLoadFromConstantPool() 4398 LoadMI = in emitLoadFromConstantPool() 4405 LoadMI = in emitLoadFromConstantPool() [all …]
|
| /openbsd-src/gnu/llvm/llvm/include/llvm/CodeGen/ |
| H A D | TargetInstrInfo.h | 1145 MachineInstr &LoadMI, 1280 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/PowerPC/ |
| H A D | PPCMIPeephole.cpp | 554 MachineInstr *LoadMI = MRI->getVRegDef(FeedReg1); in simplifyCode() local 555 if (LoadMI && LoadMI->getOpcode() == PPC::LXVDSX) in simplifyCode()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/X86/ |
| H A D | X86InstrInfo.h | 407 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI,
|
| H A D | X86InstrInfo.cpp | 6350 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() argument 6353 unsigned Opc = LoadMI.getOpcode(); in isNonFoldablePartialRegisterLoad() 6357 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg()); in isNonFoldablePartialRegisterLoad() 6624 MachineBasicBlock::iterator InsertPt, MachineInstr &LoadMI, in foldMemoryOperandImpl() argument 6635 unsigned NumOps = LoadMI.getDesc().getNumOperands(); in foldMemoryOperandImpl() 6637 if (isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperandImpl() 6638 if (isNonFoldablePartialRegisterLoad(LoadMI, MI, MF)) in foldMemoryOperandImpl() 6654 if (LoadMI.hasOneMemOperand()) in foldMemoryOperandImpl() 6655 Alignment = (*LoadMI.memoperands_begin())->getAlign(); in foldMemoryOperandImpl() 6657 switch (LoadMI.getOpcode()) { in foldMemoryOperandImpl() [all …]
|
| H A D | X86FastISel.cpp | 786 MachineInstrBuilder LoadMI = in handleConstantAddresses() local 788 addFullAddress(LoadMI, StubAM); in handleConstantAddresses()
|