Lines Matching full:riscv

14 #include "RISCV.h"
33 static cl::opt<bool> DisableCostPerUse("riscv-disable-cost-per-use",
36 DisableRegAllocHints("riscv-disable-regalloc-hints", cl::Hidden,
41 static_assert(RISCV::X1 == RISCV::X0 + 1, "Register list not consecutive");
42 static_assert(RISCV::X31 == RISCV::X0 + 31, "Register list not consecutive");
43 static_assert(RISCV::F1_H == RISCV::F0_H + 1, "Register list not consecutive");
44 static_assert(RISCV::F31_H == RISCV::F0_H + 31,
46 static_assert(RISCV::F1_F == RISCV::F0_F + 1, "Register list not consecutive");
47 static_assert(RISCV::F31_F == RISCV::F0_F + 31,
49 static_assert(RISCV::F1_D == RISCV::F0_D + 1, "Register list not consecutive");
50 static_assert(RISCV::F31_D == RISCV::F0_D + 31,
52 static_assert(RISCV::V1 == RISCV::V0 + 1, "Register list not consecutive");
53 static_assert(RISCV::V31 == RISCV::V0 + 31, "Register list not consecutive");
56 : RISCVGenRegisterInfo(RISCV::X1, /*DwarfFlavour*/0, /*EHFlavor*/0,
118 markSuperRegs(Reserved, RISCV::X2); // sp
119 markSuperRegs(Reserved, RISCV::X3); // gp
120 markSuperRegs(Reserved, RISCV::X4); // tp
122 markSuperRegs(Reserved, RISCV::X8); // fp
130 markSuperRegs(Reserved, RISCV::DUMMY_REG_PAIR_WITH_X0);
134 for (MCPhysReg Reg = RISCV::X16; Reg <= RISCV::X31; Reg++)
138 markSuperRegs(Reserved, RISCV::VL);
139 markSuperRegs(Reserved, RISCV::VTYPE);
140 markSuperRegs(Reserved, RISCV::VXSAT);
141 markSuperRegs(Reserved, RISCV::VXRM);
144 markSuperRegs(Reserved, RISCV::FRM);
145 markSuperRegs(Reserved, RISCV::FFLAGS);
148 markSuperRegs(Reserved, RISCV::VCIX_STATE);
153 markSuperRegs(Reserved, RISCV::X23);
154 markSuperRegs(Reserved, RISCV::X27);
158 markSuperRegs(Reserved, RISCV::SSP);
191 unsigned ScalableAdjOpc = RISCV::ADD;
195 ScalableAdjOpc = RISCV::SUB;
200 ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
208 BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), ScratchReg)
211 if (ScalableAdjOpc == RISCV::ADD && ST.hasStdExtZba() &&
213 unsigned Opc = NumOfVReg == 2 ? RISCV::SH1ADD :
214 (NumOfVReg == 4 ? RISCV::SH2ADD : RISCV::SH3ADD);
235 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
253 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
257 BuildMI(MBB, II, DL, TII->get(RISCV::ADDI), DestReg)
272 Opc = RISCV::SH3ADD;
275 Opc = RISCV::SH2ADD;
279 Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
289 unsigned Opc = RISCV::ADD;
292 Opc = RISCV::SUB;
295 Register ScratchReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
314 auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode());
323 Opcode = RISCV::VS1R_V;
324 SubRegIdx = RISCV::sub_vrm1_0;
327 Opcode = RISCV::VS2R_V;
328 SubRegIdx = RISCV::sub_vrm2_0;
331 Opcode = RISCV::VS4R_V;
332 SubRegIdx = RISCV::sub_vrm4_0;
335 static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7,
337 static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3,
339 static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1,
342 Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass);
349 BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL);
352 BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL)
360 Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass);
372 BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase)
391 auto ZvlssegInfo = RISCV::isRVVSpillForZvlsseg(II->getOpcode());
400 Opcode = RISCV::VL1RE8_V;
401 SubRegIdx = RISCV::sub_vrm1_0;
404 Opcode = RISCV::VL2RE8_V;
405 SubRegIdx = RISCV::sub_vrm2_0;
408 Opcode = RISCV::VL4RE8_V;
409 SubRegIdx = RISCV::sub_vrm4_0;
412 static_assert(RISCV::sub_vrm1_7 == RISCV::sub_vrm1_0 + 7,
414 static_assert(RISCV::sub_vrm2_3 == RISCV::sub_vrm2_0 + 3,
416 static_assert(RISCV::sub_vrm4_1 == RISCV::sub_vrm4_0 + 1,
419 Register VL = MRI.createVirtualRegister(&RISCV::GPRRegClass);
426 BuildMI(MBB, II, DL, TII->get(RISCV::PseudoReadVLENB), VL);
429 BuildMI(MBB, II, DL, TII->get(RISCV::SLLI), VL)
437 Register NewBase = MRI.createVirtualRegister(&RISCV::GPRRegClass);
444 BuildMI(MBB, II, DL, TII->get(RISCV::ADD), NewBase)
467 bool IsRVVSpill = RISCV::isRVVSpill(MI);
493 if (Opc == RISCV::ADDI && !isInt<12>(Val)) {
500 } else if ((Opc == RISCV::PREFETCH_I || Opc == RISCV::PREFETCH_R ||
501 Opc == RISCV::PREFETCH_W) &&
505 } else if ((Opc == RISCV::PseudoRV32ZdinxLD ||
506 Opc == RISCV::PseudoRV32ZdinxSD) &&
524 if (MI.getOpcode() == RISCV::ADDI)
527 DestReg = MRI.createVirtualRegister(&RISCV::GPRRegClass);
540 if (MI.getOpcode() == RISCV::ADDI &&
552 case RISCV::PseudoVSPILL2_M1:
553 case RISCV::PseudoVSPILL2_M2:
554 case RISCV::PseudoVSPILL2_M4:
555 case RISCV::PseudoVSPILL3_M1:
556 case RISCV::PseudoVSPILL3_M2:
557 case RISCV::PseudoVSPILL4_M1:
558 case RISCV::PseudoVSPILL4_M2:
559 case RISCV::PseudoVSPILL5_M1:
560 case RISCV::PseudoVSPILL6_M1:
561 case RISCV::PseudoVSPILL7_M1:
562 case RISCV::PseudoVSPILL8_M1:
565 case RISCV::PseudoVRELOAD2_M1:
566 case RISCV::PseudoVRELOAD2_M2:
567 case RISCV::PseudoVRELOAD2_M4:
568 case RISCV::PseudoVRELOAD3_M1:
569 case RISCV::PseudoVRELOAD3_M2:
570 case RISCV::PseudoVRELOAD4_M1:
571 case RISCV::PseudoVRELOAD4_M2:
572 case RISCV::PseudoVRELOAD5_M1:
573 case RISCV::PseudoVRELOAD6_M1:
574 case RISCV::PseudoVRELOAD7_M1:
575 case RISCV::PseudoVRELOAD8_M1:
624 if (RISCV::GPRRegClass.contains(Reg))
625 CalleeSavedSize += getSpillSize(RISCV::GPRRegClass);
626 else if (RISCV::FPR64RegClass.contains(Reg))
627 CalleeSavedSize += getSpillSize(RISCV::FPR64RegClass);
628 else if (RISCV::FPR32RegClass.contains(Reg))
629 CalleeSavedSize += getSpillSize(RISCV::FPR32RegClass);
634 return !isFrameOffsetLegal(MI, RISCV::X8, MaxFPOffset);
643 return !isFrameOffsetLegal(MI, RISCV::X2, MaxSPOffset);
676 Register BaseReg = MFI.createVirtualRegister(&RISCV::GPRRegClass);
677 BuildMI(*MBB, MBBI, DL, TII->get(RISCV::ADDI), BaseReg)
715 return TFI->hasFP(MF) ? RISCV::X8 : RISCV::X2;
752 if (RC == &RISCV::VMV0RegClass)
753 return &RISCV::VRRegClass;
754 if (RC == &RISCV::VRNoV0RegClass)
755 return &RISCV::VRRegClass;
756 if (RC == &RISCV::VRM2NoV0RegClass)
757 return &RISCV::VRM2RegClass;
758 if (RC == &RISCV::VRM4NoV0RegClass)
759 return &RISCV::VRM4RegClass;
760 if (RC == &RISCV::VRM8NoV0RegClass)
761 return &RISCV::VRM8RegClass;
775 unsigned VLENB = getDwarfRegNum(RISCV::VLENB, true);
825 if (PhysReg && (!NeedGPRC || RISCV::GPRCRegClass.contains(PhysReg)) &&
839 case RISCV::AND:
840 case RISCV::OR:
841 case RISCV::XOR:
842 case RISCV::SUB:
843 case RISCV::ADDW:
844 case RISCV::SUBW:
847 case RISCV::ANDI: {
857 case RISCV::SRAI:
858 case RISCV::SRLI:
861 case RISCV::ADD:
862 case RISCV::SLLI:
864 case RISCV::ADDI:
865 case RISCV::ADDIW:
867 case RISCV::MUL:
868 case RISCV::SEXT_B:
869 case RISCV::SEXT_H:
870 case RISCV::ZEXT_H_RV32:
871 case RISCV::ZEXT_H_RV64:
875 case RISCV::ADD_UW:
879 MI.getOperand(2).getReg() == RISCV::X0;
880 case RISCV::XORI:
897 return PhysReg && RISCV::GPRCRegClass.contains(PhysReg);
907 MI.getOpcode() == RISCV::ADD_UW ||