Home
last modified time | relevance | path

Searched refs:UseMI (Results 1 – 25 of 71) sorted by relevance

123

/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/AMDGPU/
H A DSIFoldOperands.cpp24 MachineInstr *UseMI; member
38 UseMI(MI), OpToFold(nullptr), ShrinkOpcode(ShrinkOp), UseOpNo(OpNo), in FoldCandidate()
88 MachineInstr *UseMI,
151 const MachineInstr &UseMI, in isInlineConstantIfFolded() argument
154 if (TII->isInlineConstant(UseMI, OpNo, OpToFold)) in isInlineConstantIfFolded()
157 unsigned Opc = UseMI.getOpcode(); in isInlineConstantIfFolded()
175 const MachineInstr &UseMI, in frameIndexMayFold() argument
181 if (TII->isMUBUF(UseMI)) in frameIndexMayFold()
182 return OpNo == AMDGPU::getNamedOperandIdx(UseMI.getOpcode(), in frameIndexMayFold()
184 if (!TII->isFLATScratch(UseMI)) in frameIndexMayFold()
[all …]
H A DSIFixSGPRCopies.cpp186 const auto *UseMI = MO.getParent(); in tryChangeVGPRtoSGPRinCopy() local
187 if (UseMI == &MI) in tryChangeVGPRtoSGPRinCopy()
189 if (MO.isDef() || UseMI->getParent() != MI.getParent() || in tryChangeVGPRtoSGPRinCopy()
190 UseMI->getOpcode() <= TargetOpcode::GENERIC_OP_END) in tryChangeVGPRtoSGPRinCopy()
193 unsigned OpIdx = UseMI->getOperandNo(&MO); in tryChangeVGPRtoSGPRinCopy()
194 if (OpIdx >= UseMI->getDesc().getNumOperands() || in tryChangeVGPRtoSGPRinCopy()
195 !TII->isOperandLegal(*UseMI, OpIdx, &Src)) in tryChangeVGPRtoSGPRinCopy()
788 const MachineInstr *UseMI = Use.getParent(); in processPHINode() local
789 AllAGPRUses &= (UseMI->isCopy() && in processPHINode()
790 TRI->isAGPR(*MRI, UseMI->getOperand(0).getReg())) || in processPHINode()
[all …]
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/Hexagon/
H A DHexagonOptAddrMode.cpp92 bool xformUseMI(MachineInstr *TfrMI, MachineInstr *UseMI,
96 bool updateAddUses(MachineInstr *AddMI, MachineInstr *UseMI);
187 MachineInstr &UseMI = *NodeAddr<StmtNode *>(IA).Addr->getCode(); in canRemoveAddasl() local
191 MI.getParent() != UseMI.getParent()) in canRemoveAddasl()
194 const MCInstrDesc &UseMID = UseMI.getDesc(); in canRemoveAddasl()
196 HII->getAddrMode(UseMI) != HexagonII::BaseImmOffset || in canRemoveAddasl()
197 getBaseWithLongOffset(UseMI) < 0) in canRemoveAddasl()
201 if (UseMID.mayStore() && UseMI.getOperand(2).isReg() && in canRemoveAddasl()
202 UseMI.getOperand(2).getReg() == MI.getOperand(0).getReg()) in canRemoveAddasl()
205 for (auto &Mo : UseMI.operands()) in canRemoveAddasl()
[all …]
H A DHexagonConstExtenders.cpp317 MachineInstr *UseMI = nullptr; member
330 return UseMI->getOperand(OpNum); in getOp()
333 return UseMI->getOperand(OpNum); in getOp()
1102 unsigned IdxOpc = getRegOffOpcode(ED.UseMI->getOpcode()); in getOffsetRange()
1112 if (!ED.UseMI->mayLoad() && !ED.UseMI->mayStore()) in getOffsetRange()
1217 ED.UseMI = &MI; in recordExtender()
1284 if (ED.UseMI->getOpcode() == Hexagon::A2_tfrsi) { in assignInits()
1489 MachineBasicBlock *DomB = ED0.UseMI->getParent(); in calculatePlacement()
1490 RefMIs.insert(ED0.UseMI); in calculatePlacement()
1494 MachineBasicBlock *MBB = ED.UseMI->getParent(); in calculatePlacement()
[all …]
/netbsd-src/external/apache2/llvm/dist/llvm/lib/CodeGen/
H A DLiveRangeEdit.cpp187 MachineInstr *DefMI = nullptr, *UseMI = nullptr; in foldAsLoad() local
199 if (UseMI && UseMI != MI) in foldAsLoad()
204 UseMI = MI; in foldAsLoad()
207 if (!DefMI || !UseMI) in foldAsLoad()
213 LIS.getInstructionIndex(*UseMI))) in foldAsLoad()
223 << " into single use: " << *UseMI); in foldAsLoad()
226 if (UseMI->readsWritesVirtualRegister(LI->reg(), &Ops).second) in foldAsLoad()
229 MachineInstr *FoldMI = TII.foldMemoryOperand(*UseMI, Ops, *DefMI, &LIS); in foldAsLoad()
233 LIS.ReplaceMachineInstrInMaps(*UseMI, *FoldMI); in foldAsLoad()
235 if (UseMI->shouldUpdateCallSiteInfo()) in foldAsLoad()
[all …]
H A DMachineTraceMetrics.cpp651 static bool getDataDeps(const MachineInstr &UseMI, in getDataDeps() argument
655 if (UseMI.isDebugInstr()) in getDataDeps()
659 for (MachineInstr::const_mop_iterator I = UseMI.operands_begin(), in getDataDeps()
660 E = UseMI.operands_end(); I != E; ++I) { in getDataDeps()
673 Deps.push_back(DataDep(MRI, Reg, UseMI.getOperandNo(I))); in getDataDeps()
681 static void getPHIDeps(const MachineInstr &UseMI, in getPHIDeps() argument
688 assert(UseMI.isPHI() && UseMI.getNumOperands() % 2 && "Bad PHI"); in getPHIDeps()
689 for (unsigned i = 1; i != UseMI.getNumOperands(); i += 2) { in getPHIDeps()
690 if (UseMI.getOperand(i + 1).getMBB() == Pred) { in getPHIDeps()
691 Register Reg = UseMI.getOperand(i).getReg(); in getPHIDeps()
[all …]
H A DTargetSchedule.cpp186 const MachineInstr *UseMI, unsigned UseOperIdx) const { in computeOperandLatency() argument
193 if (UseMI) { in computeOperandLatency()
195 *UseMI, UseOperIdx); in computeOperandLatency()
225 if (!UseMI) in computeOperandLatency()
229 const MCSchedClassDesc *UseDesc = resolveSchedClass(UseMI); in computeOperandLatency()
232 unsigned UseIdx = findUseIdx(UseMI, UseOperIdx); in computeOperandLatency()
H A DRegisterScavenging.cpp290 MachineBasicBlock::iterator &UseMI) { in findSurvivorReg() argument
347 UseMI = RestorePointMI; in findSurvivorReg()
449 MachineBasicBlock::iterator &UseMI) { in spill() argument
494 if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) { in spill()
511 TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex, in spill()
513 II = std::prev(UseMI); in spill()
545 MachineBasicBlock::iterator UseMI; in scavengeRegister() local
546 Register SReg = findSurvivorReg(I, Candidates, 25, UseMI); in scavengeRegister()
557 ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI); in scavengeRegister()
558 Scavenged.Restore = &*std::prev(UseMI); in scavengeRegister()
[all …]
H A DMachineLICM.cpp949 for (MachineInstr &UseMI : MRI->use_instructions(CopyDstReg)) { in isCopyFeedingInvariantStore()
950 if (UseMI.mayStore() && isInvariantStore(UseMI, TRI, MRI)) in isCopyFeedingInvariantStore()
1010 for (MachineInstr &UseMI : MRI->use_instructions(Reg)) { in HasLoopPHIUse()
1012 if (UseMI.isPHI()) { in HasLoopPHIUse()
1015 if (CurLoop->contains(&UseMI)) in HasLoopPHIUse()
1020 if (isExitBlock(UseMI.getParent())) in HasLoopPHIUse()
1025 if (UseMI.isCopy() && CurLoop->contains(&UseMI)) in HasLoopPHIUse()
1026 Work.push_back(&UseMI); in HasLoopPHIUse()
1040 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(Reg)) { in HasHighOperandLatency()
1041 if (UseMI.isCopyLike()) in HasHighOperandLatency()
[all …]
H A DDetectDeadLanes.cpp420 const MachineInstr &UseMI = *MO.getParent(); in determineInitialUsedLanes() local
421 if (UseMI.isKill()) in determineInitialUsedLanes()
425 if (lowersToCopies(UseMI)) { in determineInitialUsedLanes()
426 assert(UseMI.getDesc().getNumDefs() == 1); in determineInitialUsedLanes()
427 const MachineOperand &Def = *UseMI.defs().begin(); in determineInitialUsedLanes()
434 if (lowersToCopies(UseMI)) { in determineInitialUsedLanes()
436 CrossCopy = isCrossCopy(*MRI, UseMI, DstRC, MO); in determineInitialUsedLanes()
438 LLVM_DEBUG(dbgs() << "Copy across incompatible classes: " << UseMI); in determineInitialUsedLanes()
H A DOptimizePHIs.cpp157 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(DstReg)) { in IsDeadPHICycle()
158 if (!UseMI.isPHI() || !IsDeadPHICycle(&UseMI, PHIsInCycle)) in IsDeadPHICycle()
H A DMachineSSAUpdater.cpp225 MachineInstr *UseMI = U.getParent(); in RewriteUse() local
227 if (UseMI->isPHI()) { in RewriteUse()
228 MachineBasicBlock *SourceBB = findCorrespondingPred(UseMI, &U); in RewriteUse()
231 NewVR = GetValueInMiddleOfBlock(UseMI->getParent()); in RewriteUse()
H A DRegisterCoalescer.cpp866 MachineInstr *UseMI = MO.getParent(); in removeCopyByCommutingDef() local
867 unsigned OpNo = &MO - &UseMI->getOperand(0); in removeCopyByCommutingDef()
868 SlotIndex UseIdx = LIS->getInstructionIndex(*UseMI); in removeCopyByCommutingDef()
873 if (UseMI->isRegTiedToDefOperand(OpNo)) in removeCopyByCommutingDef()
916 MachineInstr *UseMI = UseMO.getParent(); in removeCopyByCommutingDef() local
917 if (UseMI->isDebugValue()) { in removeCopyByCommutingDef()
923 SlotIndex UseIdx = LIS->getInstructionIndex(*UseMI).getRegSlot(true); in removeCopyByCommutingDef()
934 if (UseMI == CopyMI) in removeCopyByCommutingDef()
936 if (!UseMI->isCopy()) in removeCopyByCommutingDef()
938 if (UseMI->getOperand(0).getReg() != IntB.reg() || in removeCopyByCommutingDef()
[all …]
H A DPeepholeOptimizer.cpp504 MachineInstr *UseMI = UseMO.getParent(); in INITIALIZE_PASS_DEPENDENCY() local
505 if (UseMI == &MI) in INITIALIZE_PASS_DEPENDENCY()
508 if (UseMI->isPHI()) { in INITIALIZE_PASS_DEPENDENCY()
534 if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG) in INITIALIZE_PASS_DEPENDENCY()
537 MachineBasicBlock *UseMBB = UseMI->getParent(); in INITIALIZE_PASS_DEPENDENCY()
540 if (!LocalMIs.count(UseMI)) in INITIALIZE_PASS_DEPENDENCY()
577 MachineInstr *UseMI = UseMO->getParent(); in INITIALIZE_PASS_DEPENDENCY() local
578 MachineBasicBlock *UseMBB = UseMI->getParent(); in INITIALIZE_PASS_DEPENDENCY()
589 MachineInstr *Copy = BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(), in INITIALIZE_PASS_DEPENDENCY()
1868 for (const MachineInstr &UseMI : MRI.use_nodbg_instructions(DefOp.getReg())) { in getNextSourceFromBitcast() local
[all …]
H A DTailDuplicator.cpp221 MachineInstr *UseMI = UseMO.getParent(); in tailDuplicateAndUpdate() local
223 if (UseMI->isDebugValue()) { in tailDuplicateAndUpdate()
228 UseMI->eraseFromParent(); in tailDuplicateAndUpdate()
231 if (UseMI->getParent() == DefBB && !UseMI->isPHI()) in tailDuplicateAndUpdate()
299 for (MachineInstr &UseMI : MRI->use_instructions(Reg)) { in isDefLiveOut()
300 if (UseMI.isDebugValue()) in isDefLiveOut()
302 if (UseMI.getParent() != BB) in isDefLiveOut()
H A DModuloSchedule.cpp88 MachineInstr *UseMI = UseOp.getParent(); in expand() local
89 int UseStage = Schedule.getStage(UseMI); in expand()
1152 MachineInstr *UseMI = UseOp.getParent(); in rewriteScheduledInstr() local
1154 if (UseMI->getParent() != BB) in rewriteScheduledInstr()
1156 if (UseMI->isPHI()) { in rewriteScheduledInstr()
1157 if (!Phi->isPHI() && UseMI->getOperand(0).getReg() == NewReg) in rewriteScheduledInstr()
1159 if (getLoopPhiReg(*UseMI, BB) != OldReg) in rewriteScheduledInstr()
1162 InstrMapTy::iterator OrigInstr = InstrMap.find(UseMI); in rewriteScheduledInstr()
1605 for (MachineInstr &UseMI : MRI.use_instructions(DefMO.getReg())) { in filterInstructions()
1608 assert(UseMI.isPHI()); in filterInstructions()
[all …]
H A DTwoAddressInstructionPass.cpp383 MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(Reg); in findOnlyInterestingUse() local
384 if (UseMI.getParent() != MBB) in findOnlyInterestingUse()
388 if (isCopyToReg(UseMI, TII, SrcReg, DstReg, IsSrcPhys, IsDstPhys)) { in findOnlyInterestingUse()
390 return &UseMI; in findOnlyInterestingUse()
393 if (isTwoAddrUse(UseMI, Reg, DstReg)) { in findOnlyInterestingUse()
395 return &UseMI; in findOnlyInterestingUse()
640 while (MachineInstr *UseMI = findOnlyInterestingUse(Reg, MBB, MRI, TII,IsCopy, in scanUses() local
642 if (IsCopy && !Processed.insert(UseMI).second) in scanUses()
645 DenseMap<MachineInstr*, unsigned>::iterator DI = DistanceMap.find(UseMI); in scanUses()
/netbsd-src/external/apache2/llvm/dist/llvm/lib/CodeGen/GlobalISel/
H A DLocalizer.cpp126 MachineInstr &UseMI = *MOUse.getParent(); in localizeInterBlock() local
127 if (MRI->hasOneUse(Reg) && !UseMI.isPHI()) in localizeInterBlock()
128 InsertMBB->insert(InsertMBB->SkipPHIsAndLabels(UseMI), LocalizedMI); in localizeInterBlock()
164 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(Reg)) { in localizeIntraBlock()
165 if (!UseMI.isPHI()) in localizeIntraBlock()
166 Users.insert(&UseMI); in localizeIntraBlock()
H A DCombinerHelper.cpp404 MachineInstr &UseMI = *UseMO.getParent(); in InsertInsnsWithoutSideEffectsBeforeUse() local
406 MachineBasicBlock *InsertBB = UseMI.getParent(); in InsertInsnsWithoutSideEffectsBeforeUse()
409 if (UseMI.isPHI()) { in InsertInsnsWithoutSideEffectsBeforeUse()
481 for (auto &UseMI : MRI.use_nodbg_instructions(LoadValue.getReg())) { in matchCombineExtendingLoads() local
482 if (UseMI.getOpcode() == TargetOpcode::G_SEXT || in matchCombineExtendingLoads()
483 UseMI.getOpcode() == TargetOpcode::G_ZEXT || in matchCombineExtendingLoads()
484 (UseMI.getOpcode() == TargetOpcode::G_ANYEXT)) { in matchCombineExtendingLoads()
487 if (MMO.isAtomic() && UseMI.getOpcode() != TargetOpcode::G_ANYEXT) in matchCombineExtendingLoads()
495 LLT UseTy = MRI.getType(UseMI.getOperand(0).getReg()); in matchCombineExtendingLoads()
502 MRI.getType(UseMI.getOperand(0).getReg()), in matchCombineExtendingLoads()
[all …]
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/ARM/
H A DMLxExpansionPass.cpp122 MachineInstr *UseMI = &*MRI->use_instr_nodbg_begin(Reg); in getDefReg() local
123 if (UseMI->getParent() != MBB) in getDefReg()
126 while (UseMI->isCopy() || UseMI->isInsertSubreg()) { in getDefReg()
127 Reg = UseMI->getOperand(0).getReg(); in getDefReg()
130 UseMI = &*MRI->use_instr_nodbg_begin(Reg); in getDefReg()
131 if (UseMI->getParent() != MBB) in getDefReg()
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/PowerPC/
H A DPPCVSXSwapRemoval.cpp677 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(DefReg)) { in recordUnoptimizableWebs()
678 int UseIdx = SwapMap[&UseMI]; in recordUnoptimizableWebs()
690 LLVM_DEBUG(UseMI.dump()); in recordUnoptimizableWebs()
699 Register SwapDefReg = UseMI.getOperand(0).getReg(); in recordUnoptimizableWebs()
711 LLVM_DEBUG(UseMI.dump()); in recordUnoptimizableWebs()
743 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(DefReg)) { in recordUnoptimizableWebs()
744 int UseIdx = SwapMap[&UseMI]; in recordUnoptimizableWebs()
785 for (MachineInstr &UseMI : MRI->use_nodbg_instructions(DefReg)) { in markSwapsForRemoval()
786 int UseIdx = SwapMap[&UseMI]; in markSwapsForRemoval()
790 LLVM_DEBUG(UseMI.dump()); in markSwapsForRemoval()
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/VE/
H A DVEInstrInfo.cpp546 bool VEInstrInfo::FoldImmediate(MachineInstr &UseMI, MachineInstr &DefMI, in FoldImmediate() argument
607 LLVM_DEBUG(UseMI.dump()); in FoldImmediate()
629 switch (UseMI.getOpcode()) { in FoldImmediate()
676 if (UseMI.getOperand(1).getReg() == Reg) { in FoldImmediate()
679 assert(UseMI.getOperand(2).getReg() == Reg); in FoldImmediate()
693 if (UseMI.getOperand(1).getReg() == Reg) { in FoldImmediate()
700 assert(UseMI.getOperand(2).getReg() == Reg); in FoldImmediate()
713 UseMI.setDesc(get(NewUseOpc)); in FoldImmediate()
715 UseMI.getOperand(1).setReg(UseMI.getOperand(UseIdx).getReg()); in FoldImmediate()
717 UseMI.getOperand(UseIdx).ChangeToImmediate(ImmVal); in FoldImmediate()
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/X86/
H A DX86SpeculativeLoadHardening.cpp1792 for (MachineInstr &UseMI : MRI->use_instructions(DefReg)) { in sinkPostLoadHardenedInst()
1795 if (HardenedInstrs.count(&UseMI)) { in sinkPostLoadHardenedInst()
1796 if (!X86InstrInfo::isDataInvariantLoad(UseMI) || isEFLAGSDefLive(UseMI)) { in sinkPostLoadHardenedInst()
1800 assert(X86InstrInfo::isDataInvariant(UseMI) && in sinkPostLoadHardenedInst()
1807 const MCInstrDesc &Desc = UseMI.getDesc(); in sinkPostLoadHardenedInst()
1814 UseMI.getOperand(MemRefBeginIdx + X86::AddrBaseReg); in sinkPostLoadHardenedInst()
1816 UseMI.getOperand(MemRefBeginIdx + X86::AddrIndexReg); in sinkPostLoadHardenedInst()
1832 if (!X86InstrInfo::isDataInvariant(UseMI) || UseMI.getParent() != MI.getParent() || in sinkPostLoadHardenedInst()
1833 isEFLAGSDefLive(UseMI)) in sinkPostLoadHardenedInst()
1838 if (UseMI.getDesc().getNumDefs() > 1) in sinkPostLoadHardenedInst()
[all …]
/netbsd-src/external/apache2/llvm/dist/llvm/lib/Target/Mips/
H A DMips16RegisterInfo.cpp58 MachineBasicBlock::iterator &UseMI, const TargetRegisterClass *RC, in saveScavengerRegister() argument
63 TII.copyPhysReg(MBB, UseMI, DL, Reg, Mips::T0, true); in saveScavengerRegister()
/netbsd-src/external/apache2/llvm/dist/llvm/include/llvm/CodeGen/
H A DRegisterScavenging.h209 MachineBasicBlock::iterator &UseMI);
221 MachineBasicBlock::iterator &UseMI);

123