Lines Matching defs:Opc

85   bool X86FastEmitExtend(ISD::NodeType Opc, EVT DstVT, unsigned Src, EVT SrcVT,
119 bool X86SelectFPExtOrFPTrunc(const Instruction *I, unsigned Opc,
332 unsigned Opc = 0;
336 Opc = X86::MOV8rm;
339 Opc = X86::MOV16rm;
342 Opc = X86::MOV32rm;
346 Opc = X86::MOV64rm;
349 Opc = HasAVX512 ? X86::VMOVSSZrm_alt
355 Opc = HasAVX512 ? X86::VMOVSDZrm_alt
365 Opc = HasVLX ? X86::VMOVNTDQAZ128rm :
368 Opc = HasVLX ? X86::VMOVAPSZ128rm :
371 Opc = HasVLX ? X86::VMOVUPSZ128rm :
376 Opc = HasVLX ? X86::VMOVNTDQAZ128rm :
379 Opc = HasVLX ? X86::VMOVAPDZ128rm :
382 Opc = HasVLX ? X86::VMOVUPDZ128rm :
390 Opc = HasVLX ? X86::VMOVNTDQAZ128rm :
393 Opc = HasVLX ? X86::VMOVDQA64Z128rm :
396 Opc = HasVLX ? X86::VMOVDQU64Z128rm :
402 Opc = HasVLX ? X86::VMOVNTDQAZ256rm : X86::VMOVNTDQAYrm;
406 Opc = HasVLX ? X86::VMOVAPSZ256rm : X86::VMOVAPSYrm;
408 Opc = HasVLX ? X86::VMOVUPSZ256rm : X86::VMOVUPSYrm;
413 Opc = HasVLX ? X86::VMOVNTDQAZ256rm : X86::VMOVNTDQAYrm;
417 Opc = HasVLX ? X86::VMOVAPDZ256rm : X86::VMOVAPDYrm;
419 Opc = HasVLX ? X86::VMOVUPDZ256rm : X86::VMOVUPDYrm;
427 Opc = HasVLX ? X86::VMOVNTDQAZ256rm : X86::VMOVNTDQAYrm;
431 Opc = HasVLX ? X86::VMOVDQA64Z256rm : X86::VMOVDQAYrm;
433 Opc = HasVLX ? X86::VMOVDQU64Z256rm : X86::VMOVDQUYrm;
438 Opc = X86::VMOVNTDQAZrm;
440 Opc = (Alignment >= 64) ? X86::VMOVAPSZrm : X86::VMOVUPSZrm;
445 Opc = X86::VMOVNTDQAZrm;
447 Opc = (Alignment >= 64) ? X86::VMOVAPDZrm : X86::VMOVUPDZrm;
457 Opc = X86::VMOVNTDQAZrm;
459 Opc = (Alignment >= 64) ? X86::VMOVDQA64Zrm : X86::VMOVDQU64Zrm;
467 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg);
489 unsigned Opc = 0;
502 case MVT::i8: Opc = X86::MOV8mr; break;
503 case MVT::i16: Opc = X86::MOV16mr; break;
505 Opc = (IsNonTemporal && HasSSE2) ? X86::MOVNTImr : X86::MOV32mr;
509 Opc = (IsNonTemporal && HasSSE2) ? X86::MOVNTI_64mr : X86::MOV64mr;
514 Opc = X86::MOVNTSS;
516 Opc = HasAVX512 ? X86::VMOVSSZmr :
519 Opc = X86::ST_Fp32m;
524 Opc = X86::MOVNTSD;
526 Opc = HasAVX512 ? X86::VMOVSDZmr :
529 Opc = X86::ST_Fp64m;
532 Opc = (IsNonTemporal && HasSSE1) ? X86::MMX_MOVNTQmr : X86::MMX_MOVQ64mr;
537 Opc = HasVLX ? X86::VMOVNTPSZ128mr :
540 Opc = HasVLX ? X86::VMOVAPSZ128mr :
543 Opc = HasVLX ? X86::VMOVUPSZ128mr :
549 Opc = HasVLX ? X86::VMOVNTPDZ128mr :
552 Opc = HasVLX ? X86::VMOVAPDZ128mr :
555 Opc = HasVLX ? X86::VMOVUPDZ128mr :
564 Opc = HasVLX ? X86::VMOVNTDQZ128mr :
567 Opc = HasVLX ? X86::VMOVDQA64Z128mr :
570 Opc = HasVLX ? X86::VMOVDQU64Z128mr :
577 Opc = HasVLX ? X86::VMOVNTPSZ256mr : X86::VMOVNTPSYmr;
579 Opc = HasVLX ? X86::VMOVAPSZ256mr : X86::VMOVAPSYmr;
581 Opc = HasVLX ? X86::VMOVUPSZ256mr : X86::VMOVUPSYmr;
587 Opc = HasVLX ? X86::VMOVNTPDZ256mr : X86::VMOVNTPDYmr;
589 Opc = HasVLX ? X86::VMOVAPDZ256mr : X86::VMOVAPDYmr;
591 Opc = HasVLX ? X86::VMOVUPDZ256mr : X86::VMOVUPDYmr;
600 Opc = HasVLX ? X86::VMOVNTDQZ256mr : X86::VMOVNTDQYmr;
602 Opc = HasVLX ? X86::VMOVDQA64Z256mr : X86::VMOVDQAYmr;
604 Opc = HasVLX ? X86::VMOVDQU64Z256mr : X86::VMOVDQUYmr;
609 Opc = IsNonTemporal ? X86::VMOVNTPSZmr : X86::VMOVAPSZmr;
611 Opc = X86::VMOVUPSZmr;
616 Opc = IsNonTemporal ? X86::VMOVNTPDZmr : X86::VMOVAPDZmr;
618 Opc = X86::VMOVUPDZmr;
628 Opc = IsNonTemporal ? X86::VMOVNTDQZmr : X86::VMOVDQA64Zmr;
630 Opc = X86::VMOVDQU64Zmr;
634 const MCInstrDesc &Desc = TII.get(Opc);
660 unsigned Opc = 0;
667 case MVT::i8: Opc = X86::MOV8mi; break;
668 case MVT::i16: Opc = X86::MOV16mi; break;
669 case MVT::i32: Opc = X86::MOV32mi; break;
673 Opc = X86::MOV64mi32;
677 if (Opc) {
679 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc));
696 /// type SrcVT to type DstVT using the specified extension opcode Opc (e.g.
698 bool X86FastISel::X86FastEmitExtend(ISD::NodeType Opc, EVT DstVT,
701 unsigned RR = fastEmit_r(SrcVT.getSimpleVT(), DstVT.getSimpleVT(), Opc, Src);
767 unsigned Opc = 0;
778 Opc = X86::MOV64rm;
781 Opc = X86::MOV32rm;
791 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), LoadReg);
2137 unsigned Opc = X86::getCMovOpcode(TRI.getRegSizeInBits(*RC) / 8, false,
2139 Register ResultReg = fastEmitInst_rri(Opc, RC, RHSReg, LHSReg, CC);
2251 const uint16_t *Opc = nullptr;
2254 case MVT::f32: Opc = &OpcTable[0][0]; break;
2255 case MVT::f64: Opc = &OpcTable[1][0]; break;
2259 Register CmpReg = fastEmitInst_rri(Opc[0], RC, CmpLHSReg, CmpRHSReg, CC);
2260 Register AndReg = fastEmitInst_rr(Opc[1], VR128, CmpReg, LHSReg);
2261 Register AndNReg = fastEmitInst_rr(Opc[2], VR128, CmpReg, RHSReg);
2262 Register OrReg = fastEmitInst_rr(Opc[3], VR128, AndNReg, AndReg);
2274 unsigned Opc;
2277 case MVT::i8: Opc = X86::CMOV_GR8; break;
2278 case MVT::i16: Opc = X86::CMOV_GR16; break;
2279 case MVT::i32: Opc = X86::CMOV_GR32; break;
2281 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR16X : X86::CMOV_FR16; break;
2283 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR32X : X86::CMOV_FR32; break;
2285 Opc = Subtarget->hasAVX512() ? X86::CMOV_FR64X : X86::CMOV_FR64; break;
2340 fastEmitInst_rri(Opc, RC, RHSReg, LHSReg, CC);
2490 unsigned Opc =
2493 return X86SelectFPExtOrFPTrunc(I, Opc, TLI.getRegClassFor(MVT::f64));
2504 unsigned Opc =
2507 return X86SelectFPExtOrFPTrunc(I, Opc, TLI.getRegClassFor(MVT::f32));
2618 unsigned Opc = Subtarget->hasVLX() ? X86::VCVTPS2PHZ128rr
2620 InputReg = fastEmitInst_ri(Opc, RC, InputReg, 4);
2623 Opc = Subtarget->hasAVX512() ? X86::VMOVPDI2DIZrr
2626 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg)
2641 unsigned Opc = Subtarget->hasVLX() ? X86::VCVTPH2PSZ128rr
2643 InputReg = fastEmitInst_r(Opc, RC, InputReg);
2667 unsigned Opc;
2672 case MVT::i32: Opc = X86::MOV32rm; RC = &X86::GR32RegClass; break;
2673 case MVT::i64: Opc = X86::MOV64rm; RC = &X86::GR64RegClass; break;
2703 TII.get(Opc), DestReg), SrcReg);
2809 unsigned Opc;
2812 case MVT::f32: Opc = SqrtOpc[AVXLevel][0]; break;
2813 case MVT::f64: Opc = SqrtOpc[AVXLevel][1]; break;
2832 MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc),
2896 static const uint16_t Opc[2][4] = {
2907 TII.get(Opc[IsDec][VT.SimpleTy-MVT::i8]), ResultReg)
2998 unsigned Opc;
3001 case MVT::i32: Opc = CvtOpc[AVXLevel][IsInputDouble][0]; break;
3002 case MVT::i64: Opc = CvtOpc[AVXLevel][IsInputDouble][1]; break;
3025 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg)
3044 unsigned Opc;
3052 Opc = GET_EGPR_IF_ENABLED(X86::CRC32r32r8);
3056 Opc = GET_EGPR_IF_ENABLED(X86::CRC32r32r16);
3060 Opc = GET_EGPR_IF_ENABLED(X86::CRC32r32r32);
3064 Opc = GET_EGPR_IF_ENABLED(X86::CRC32r64r64);
3078 Register ResultReg = fastEmitInst_rr(Opc, RC, LHSReg, RHSReg);
3618 unsigned Opc = ResVT == MVT::f32 ? X86::ST_Fp80m32 : X86::ST_Fp80m64;
3622 TII.get(Opc)), FI)
3624 Opc = ResVT == MVT::f32 ? X86::MOVSSrm_alt : X86::MOVSDrm_alt;
3626 TII.get(Opc), ResultReg + i), FI);
3751 unsigned Opc = 0;
3757 case MVT::i8: Opc = X86::MOV8ri; break;
3758 case MVT::i16: Opc = X86::MOV16ri; break;
3759 case MVT::i32: Opc = X86::MOV32ri; break;
3762 Opc = X86::MOV32ri64;
3764 Opc = X86::MOV64ri32;
3766 Opc = X86::MOV64ri;
3770 return fastEmitInst_i(Opc, TLI.getRegClassFor(VT), Imm);
3784 unsigned Opc = 0;
3792 Opc = HasAVX512 ? X86::VMOVSSZrm_alt
3798 Opc = HasAVX512 ? X86::VMOVSDZrm_alt
3832 TII.get(Opc), ResultReg);
3842 TII.get(Opc), ResultReg),
3873 unsigned Opc =
3878 TII.get(Opc), ResultReg), AM);
3900 unsigned Opc = 0;
3906 Opc = X86::LD_Fp032;
3910 Opc = X86::LD_Fp064;
3913 Opc = X86::LD_Fp080;
3917 if (Opc) {
3919 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc),
3943 unsigned Opc =
3950 TII.get(Opc), ResultReg), AM);
3963 unsigned Opc = 0;
3967 Opc = HasAVX512 ? X86::AVX512_FsFLD0SH : X86::FsFLD0SH;
3970 Opc = HasAVX512 ? X86::AVX512_FsFLD0SS
3975 Opc = HasAVX512 ? X86::AVX512_FsFLD0SD
3985 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, MIMD, TII.get(Opc), ResultReg);