| /openbsd-src/gnu/llvm/llvm/lib/Transforms/Utils/ |
| H A D | SimplifyIndVar.cpp | 78 Value *foldIVUser(Instruction *UseInst, Instruction *IVOperand); 80 bool eliminateIdentitySCEV(Instruction *UseInst, Instruction *IVOperand); 81 bool replaceIVUserWithLoopInvariant(Instruction *UseInst); 82 bool replaceFloatIVWithIntegerIV(Instruction *UseInst); 87 bool eliminateIVUser(Instruction *UseInst, Instruction *IVOperand); 123 Value *SimplifyIndvar::foldIVUser(Instruction *UseInst, Instruction *IVOperand) { in foldIVUser() argument 128 switch (UseInst->getOpcode()) { in foldIVUser() 135 if (IVOperand != UseInst->getOperand(OperIdx) || in foldIVUser() 136 !isa<ConstantInt>(UseInst->getOperand(1))) in foldIVUser() 149 ConstantInt *D = cast<ConstantInt>(UseInst->getOperand(1)); in foldIVUser() [all …]
|
| /openbsd-src/gnu/llvm/llvm/lib/Transforms/Scalar/ |
| H A D | DeadStoreElimination.cpp | 1111 Instruction *UseInst) { in isCompleteOverwrite() 1115 if (!UseInst->mayWriteToMemory()) in isCompleteOverwrite() 1118 if (auto *CB = dyn_cast<CallBase>(UseInst)) in isCompleteOverwrite() 1123 if (auto CC = getLocForWrite(UseInst)) in isCompleteOverwrite() 1124 return isOverwrite(UseInst, DefInst, *CC, DefLoc, InstWriteOffset, in isCompleteOverwrite() 1169 Instruction *UseInst = cast<MemoryUseOrDef>(UseAccess)->getMemoryInst(); in isWriteAtEndOfFunction() local 1170 if (isReadClobber(*MaybeLoc, UseInst)) { in isWriteAtEndOfFunction() 1171 LLVM_DEBUG(dbgs() << " ... hit read clobber " << *UseInst << ".\n"); in isWriteAtEndOfFunction() 1236 bool isReadClobber(const MemoryLocation &DefLoc, Instruction *UseInst) { in isReadClobber() 1237 if (isNoopIntrinsic(UseInst)) in isReadClobber() [all …]
|
| H A D | Sink.cpp | 132 Instruction *UseInst = cast<Instruction>(U.getUser()); in SinkInstruction() local 133 BasicBlock *UseBlock = UseInst->getParent(); in SinkInstruction() 137 if (PHINode *PN = dyn_cast<PHINode>(UseInst)) { in SinkInstruction()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/AMDGPU/ |
| H A D | AMDGPUPromoteAlloca.cpp | 95 Instruction *UseInst, 692 Instruction *UseInst = cast<Instruction>(User); in collectUsesWithPtrTypes() local 693 if (UseInst->getOpcode() == Instruction::PtrToInt) in collectUsesWithPtrTypes() 696 if (LoadInst *LI = dyn_cast<LoadInst>(UseInst)) { in collectUsesWithPtrTypes() 703 if (StoreInst *SI = dyn_cast<StoreInst>(UseInst)) { in collectUsesWithPtrTypes() 710 } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(UseInst)) { in collectUsesWithPtrTypes() 713 } else if (AtomicCmpXchgInst *CAS = dyn_cast<AtomicCmpXchgInst>(UseInst)) { in collectUsesWithPtrTypes() 720 if (ICmpInst *ICmp = dyn_cast<ICmpInst>(UseInst)) { in collectUsesWithPtrTypes() 728 if (UseInst->getOpcode() == Instruction::AddrSpaceCast) { in collectUsesWithPtrTypes() 730 if (PointerMayBeCaptured(UseInst, true, true)) in collectUsesWithPtrTypes() [all …]
|
| H A D | GCNNSAReassign.cpp | 220 const MachineInstr *UseInst = U.getParent(); in CheckNSA() local 221 if (UseInst->isCopy() && UseInst->getOperand(0).getReg() == PhysReg) in CheckNSA()
|
| H A D | SIPeepholeSDWA.cpp | 425 for (MachineInstr &UseInst : MRI->use_nodbg_instructions(PotentialMO->getReg())) { in potentialToConvert() 426 if (&UseInst != ParentMI) in potentialToConvert()
|
| H A D | SIInstrInfo.cpp | 8231 auto &UseInst = *Use.getParent(); in execMayBeModifiedBeforeAnyUse() local 8234 if (UseInst.getParent() != DefBB || UseInst.isPHI()) in execMayBeModifiedBeforeAnyUse()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/PowerPC/ |
| H A D | PPCPreEmitPeephole.cpp | 259 MachineBasicBlock::iterator UseInst; in addLinkerOpt() member 304 Pair.UseInst = BBI; in addLinkerOpt() 315 assert(Pair->UseInst.isValid() && Pair->StillValid && in addLinkerOpt() 322 for (; BBI != Pair->UseInst; ++BBI) { in addLinkerOpt() 344 Pair->UseInst->addOperand(ImplUse); in addLinkerOpt() 352 Pair->UseInst->addOperand(*MF, PCRelLabel); in addLinkerOpt()
|
| /openbsd-src/gnu/llvm/llvm/lib/CodeGen/ |
| H A D | MIRCanonicalizerPass.cpp | 210 MachineInstr *UseInst = UO.getParent(); in rescheduleCanonically() local 213 const unsigned UseLoc = getInstrIdx(*UseInst); in rescheduleCanonically() 216 if (UseInst->getParent() != Def->getParent()) in rescheduleCanonically() 223 UseToBringDefCloserTo = UseInst; in rescheduleCanonically()
|
| H A D | MachineSink.cpp | 333 MachineInstr *UseInst = MO.getParent(); in AllUsesDominatedByBlock() local 334 unsigned OpNo = UseInst->getOperandNo(&MO); in AllUsesDominatedByBlock() 335 MachineBasicBlock *UseBlock = UseInst->getParent(); in AllUsesDominatedByBlock() 336 return UseBlock == MBB && UseInst->isPHI() && in AllUsesDominatedByBlock() 337 UseInst->getOperand(OpNo + 1).getMBB() == DefMBB; in AllUsesDominatedByBlock() 345 MachineInstr *UseInst = MO.getParent(); in AllUsesDominatedByBlock() local 346 unsigned OpNo = &MO - &UseInst->getOperand(0); in AllUsesDominatedByBlock() 347 MachineBasicBlock *UseBlock = UseInst->getParent(); in AllUsesDominatedByBlock() 348 if (UseInst->isPHI()) { in AllUsesDominatedByBlock() 351 UseBlock = UseInst->getOperand(OpNo+1).getMBB(); in AllUsesDominatedByBlock() [all …]
|
| H A D | RegAllocFast.cpp | 389 for (const MachineInstr &UseInst : MRI->use_nodbg_instructions(VirtReg)) { in mayLiveOut() local 390 if (UseInst.getParent() != MBB || ++C >= Limit) { in mayLiveOut() 399 if (SelfLoopDef == &UseInst || in mayLiveOut() 400 !dominates(*MBB, SelfLoopDef->getIterator(), UseInst.getIterator())) { in mayLiveOut()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/WebAssembly/ |
| H A D | WebAssemblyRegStackify.cpp | 449 const MachineInstr *UseInst = Use.getParent(); in oneUseDominatesOtherUses() local 450 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst)); in oneUseDominatesOtherUses() 455 if (UseInst == OneUseInst) { in oneUseDominatesOtherUses() 462 while (!MDT.dominates(OneUseInst, UseInst)) { in oneUseDominatesOtherUses() 468 if (UseInst->getDesc().getNumDefs() == 0) in oneUseDominatesOtherUses() 470 const MachineOperand &MO = UseInst->getOperand(0); in oneUseDominatesOtherUses() 484 UseInst = NewUseInst; in oneUseDominatesOtherUses()
|
| /openbsd-src/gnu/llvm/llvm/lib/Target/X86/ |
| H A D | X86FastPreTileConfig.cpp | 144 for (const MachineInstr &UseInst : MRI->use_nodbg_instructions(VirtReg)) { in mayLiveOut() local 145 if (UseInst.getParent() != MBB) { in mayLiveOut() 154 if (dominates(*MBB, *CfgMI, UseInst)) { in mayLiveOut()
|
| /openbsd-src/gnu/llvm/llvm/lib/Analysis/ |
| H A D | MemorySSA.cpp | 288 const Instruction *UseInst, AliasAnalysisType &AA) { in instructionClobbersQuery() argument 317 if (auto *CB = dyn_cast_or_null<CallBase>(UseInst)) { in instructionClobbersQuery() 323 if (auto *UseLoad = dyn_cast_or_null<LoadInst>(UseInst)) in instructionClobbersQuery()
|
| H A D | ScalarEvolution.cpp | 9461 getConstantEvolvingPHIOperands(Instruction *UseInst, const Loop *L, in getConstantEvolvingPHIOperands() argument 9470 for (Value *Op : UseInst->operands()) { in getConstantEvolvingPHIOperands()
|
| /openbsd-src/gnu/llvm/llvm/lib/Frontend/OpenMP/ |
| H A D | OMPIRBuilder.cpp | 2586 auto *UseInst = dyn_cast<Instruction>(U.getUser()); in removeUnusedBlocksFromParent() local 2587 if (!UseInst) in removeUnusedBlocksFromParent() 2589 if (BBsToErase.count(UseInst->getParent())) in removeUnusedBlocksFromParent()
|