Lines Matching +full:0 +full:x86

3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
18 /// SAHF don't work on all x86 processors and are often quite slow compared to
23 #include "X86.h"
62 #define PASS_KEY "x86-flags-copy-lowering"
74 using CondRegArray = std::array<unsigned, X86::LAST_VALID_COND + 1>;
80 StringRef getPassName() const override { return "X86 EFLAGS copy lowering"; }
100 const DebugLoc &TestLoc, X86::CondCode Cond);
103 const DebugLoc &TestLoc, X86::CondCode Cond, CondRegArray &CondRegs);
120 "X86 EFLAGS copy lowering", false, false)
122 "X86 EFLAGS copy lowering", false, false)
128 char X86FlagsCopyLoweringPass::ID = 0;
136 return X86::isADC(Opc) || X86::isSBB(Opc) || X86::isRCL(Opc) ||
137 X86::isRCR(Opc) || (Opc == X86::SETB_C32r || Opc == X86::SETB_C64r);
149 assert(X86::getCondFromBranch(SplitI) != X86::COND_INVALID &&
155 assert(X86::getCondFromBranch(PrevI) != X86::COND_INVALID &&
161 MachineBasicBlock &UnsplitSucc = *PrevI.getOperand(0).getMBB();
244 MI.findRegisterDefOperand(X86::EFLAGS, /*TRI=*/nullptr);
247 if (FlagDef->isDead() && X86::getNFVariant(MI.getOpcode()))
261 PromoteRC = &X86::GR8RegClass;
267 if (none_of(MRI->def_instructions(X86::EFLAGS), [](const MachineInstr &MI) {
296 MI.getOperand(0).getReg() == X86::EFLAGS)
352 if (MRI->use_nodbg_empty(CopyDefI->getOperand(0).getReg())) {
358 unsigned NewOpc = X86::getNFVariant(Clobber->getOpcode());
362 Clobber->findRegisterDefOperand(X86::EFLAGS, /*TRI=*/nullptr)
370 BI->addLiveIn(X86::EFLAGS);
425 if (MRI->use_empty(CopyDefI.getOperand(0).getReg()))
430 MachineOperand &DOp = CopyI->getOperand(0);
432 assert(DOp.getReg() == X86::EFLAGS && "Unexpected copy def register!");
461 MI.findRegisterDefOperand(X86::EFLAGS, /*TRI=*/nullptr);
487 while (TestMBB->isLiveIn(X86::EFLAGS) && !TestMBB->pred_empty() &&
519 return MI.findRegisterDefOperand(X86::EFLAGS, /*TRI=*/nullptr);
582 MI.findRegisterUseOperand(X86::EFLAGS, /*TRI=*/nullptr);
583 FlagsKilled = MI.modifiesRegister(X86::EFLAGS, TRI);
600 // tail calls, as those are not introduced into the X86 MI until post-RA
604 if (X86::getCondFromBranch(MI) != X86::COND_INVALID) {
610 X86::getCondFromBranch(*JmpIt) != X86::COND_INVALID);
618 MRI->replaceRegWith(MI.getOperand(0).getReg(),
619 CopyDefI.getOperand(0).getReg());
621 } else if (X86::isSETCC(Opc)) {
641 if (SuccMBB->isLiveIn(X86::EFLAGS) &&
675 SuccMBB->removeLiveIn(X86::EFLAGS);
702 (MI.getOperand(0).getReg() == X86::EFLAGS ||
703 MI.getOperand(1).getReg() == X86::EFLAGS)) {
722 X86::CondCode Cond = X86::getCondFromSETCC(MI);
723 if (Cond != X86::COND_INVALID && !MI.mayStore() &&
724 MI.getOperand(0).isReg() && MI.getOperand(0).getReg().isVirtual()) {
725 assert(MI.getOperand(0).isDef() &&
727 CondRegs[Cond] = MI.getOperand(0).getReg();
732 if (MI.findRegisterDefOperand(X86::EFLAGS, /*TRI=*/nullptr))
740 const DebugLoc &TestLoc, X86::CondCode Cond) {
742 auto SetI = BuildMI(TestMBB, TestPos, TestLoc, TII->get(X86::SETCCr), Reg)
752 const DebugLoc &TestLoc, X86::CondCode Cond, CondRegArray &CondRegs) {
754 unsigned &InvCondReg = CondRegs[X86::GetOppositeBranchCondition(Cond)];
768 BuildMI(MBB, Pos, Loc, TII->get(X86::TEST8rr)).addReg(Reg).addReg(Reg);
779 X86::CondCode Cond = X86::getCondFromSETCC(MI);
790 assert(MI.getOperand(0).isReg() &&
792 Register OldReg = MI.getOperand(0).getReg();
803 TII->get(X86::MOV8mr));
805 for (int i = 0; i < X86::AddrNumOperands; ++i)
817 X86::CondCode Cond = X86::COND_B; // CF == 1
834 TII->get(Subtarget->hasNDD() ? X86::ADD8ri_ND : X86::ADD8ri))
841 MI.findRegisterUseOperand(X86::EFLAGS, /*TRI=*/nullptr)->setIsKill(true);
844 static X86::CondCode getImplicitCondFromMI(unsigned Opc) {
846 case X86::CMOV##A##_Fp32: \
847 case X86::CMOV##A##_Fp64: \
848 case X86::CMOV##A##_Fp80: \
849 return X86::COND_##B;
853 return X86::COND_INVALID;
866 static unsigned getOpcodeWithCC(unsigned Opc, X86::CondCode CC) {
867 assert((CC == X86::COND_E || CC == X86::COND_NE) && "Unexpected CC");
869 case X86::CMOVB_##A: \
870 case X86::CMOVE_##A: \
871 case X86::CMOVP_##A: \
872 case X86::CMOVBE_##A: \
873 case X86::CMOVNB_##A: \
874 case X86::CMOVNE_##A: \
875 case X86::CMOVNP_##A: \
876 case X86::CMOVNBE_##A: \
877 return (CC == X86::COND_E) ? X86::CMOVE_##A : X86::CMOVNE_##A;
894 X86::CondCode CC = X86::getCondFromMI(MI);
895 if (CC == X86::COND_INVALID) {
899 assert(CC != X86::COND_INVALID && "Unknown EFLAG user!");
910 X86::CondCode NewCC = Inverted ? X86::COND_E : X86::COND_NE;
916 MI.findRegisterUseOperand(X86::EFLAGS, /*TRI=*/nullptr)->setIsKill(true);