Lines Matching +full:0 +full:xa
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
51 // ** printing (for example: xxswapd for xxpermdi with 0x2 as the imm). **
55 SDTCisVT<0, v4f32>, SDTCisPtrTy<1>
59 SDTCisVT<0, v2f64>, SDTCisVT<1, v4f32>, SDTCisPtrTy<2>
63 SDTCisVec<0>, SDTCisPtrTy<1>
68 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
70 def SDT_PPCstxvd2x : SDTypeProfile<0, 2, [
71 SDTCisVT<0, v2f64>, SDTCisPtrTy<1>
74 SDTCisSameAs<0, 1>
77 SDTCisVec<0>, SDTCisVec<1>, SDTCisPtrTy<2>
80 SDTCisVec<0>, SDTCisPtrTy<1>
82 def SDT_PPCst_vec_be : SDTypeProfile<0, 2, [
83 SDTCisVec<0>, SDTCisPtrTy<1>
87 SDTCisVT<0, v2f64>, SDTCisVT<1, v2f64>,
142 let AddedComplexity = 400, hasSideEffects = 0 in {
147 def NAME : XX3Form_Rc<opcode, xo, (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
149 [(set OutTy:$XT, (Int InTy:$XA, InTy:$XB))]>;
151 def _rec : XX3Form_Rc<opcode, xo, (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
154 (InTy (PPCvcmp_rec InTy:$XA, InTy:$XB, xo)))]>,
166 // (XXPERMDI (LXSIBZX xoaddr:$src), (LXSIBZX xoaddr:$src ), 0))
172 let XB = XA;
220 : XX3Form<opcode, xo, (outs xty:$XT), (ins aty:$XA, bty:$XB),
221 !strconcat(opc, " $XT, $XA, $XB"), itin, pattern>;
275 } // AddedComplexity = 400, hasSideEffects = 0
287 let hasSideEffects = 0 in {
290 let mayLoad = 1, mayStore = 0 in {
321 let mayStore = 1, mayLoad = 0 in {
354 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
355 "xsadddp $XT, $XA, $XB", IIC_VecFP,
356 [(set f64:$XT, (any_fadd f64:$XA, f64:$XB))]>;
358 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
359 "xsmuldp $XT, $XA, $XB", IIC_VecFP,
360 [(set f64:$XT, (any_fmul f64:$XA, f64:$XB))]>;
363 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
364 "xvadddp $XT, $XA, $XB", IIC_VecFP,
365 [(set v2f64:$XT, (any_fadd v2f64:$XA, v2f64:$XB))]>;
368 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
369 "xvaddsp $XT, $XA, $XB", IIC_VecFP,
370 [(set v4f32:$XT, (any_fadd v4f32:$XA, v4f32:$XB))]>;
373 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
374 "xvmuldp $XT, $XA, $XB", IIC_VecFP,
375 [(set v2f64:$XT, (any_fmul v2f64:$XA, v2f64:$XB))]>;
378 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
379 "xvmulsp $XT, $XA, $XB", IIC_VecFP,
380 [(set v4f32:$XT, (any_fmul v4f32:$XA, v4f32:$XB))]>;
385 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
386 "xssubdp $XT, $XA, $XB", IIC_VecFP,
387 [(set f64:$XT, (any_fsub f64:$XA, f64:$XB))]>;
390 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
391 "xvsubdp $XT, $XA, $XB", IIC_VecFP,
392 [(set v2f64:$XT, (any_fsub v2f64:$XA, v2f64:$XB))]>;
394 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
395 "xvsubsp $XT, $XA, $XB", IIC_VecFP,
396 [(set v4f32:$XT, (any_fsub v4f32:$XA, v4f32:$XB))]>;
402 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
403 "xsmaddadp $XT, $XA, $XB", IIC_VecFP,
404 [(set f64:$XT, (any_fma f64:$XA, f64:$XB, f64:$XTi))]>,
409 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
410 "xsmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
418 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
419 "xsmsubadp $XT, $XA, $XB", IIC_VecFP,
420 [(set f64:$XT, (any_fma f64:$XA, f64:$XB, (fneg f64:$XTi)))]>,
425 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
426 "xsmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
434 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
435 "xsnmaddadp $XT, $XA, $XB", IIC_VecFP,
436 [(set f64:$XT, (fneg (any_fma f64:$XA, f64:$XB, f64:$XTi)))]>,
441 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
442 "xsnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
450 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
451 "xsnmsubadp $XT, $XA, $XB", IIC_VecFP,
452 [(set f64:$XT, (fneg (any_fma f64:$XA, f64:$XB, (fneg f64:$XTi))))]>,
457 (outs vsfrc:$XT), (ins vsfrc:$XTi, vsfrc:$XA, vsfrc:$XB),
458 "xsnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
466 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
467 "xvmaddadp $XT, $XA, $XB", IIC_VecFP,
468 [(set v2f64:$XT, (any_fma v2f64:$XA, v2f64:$XB, v2f64:$XTi))]>,
473 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
474 "xvmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
482 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
483 "xvmaddasp $XT, $XA, $XB", IIC_VecFP,
484 [(set v4f32:$XT, (any_fma v4f32:$XA, v4f32:$XB, v4f32:$XTi))]>,
489 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
490 "xvmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
498 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
499 "xvmsubadp $XT, $XA, $XB", IIC_VecFP,
500 [(set v2f64:$XT, (any_fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi)))]>,
505 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
506 "xvmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
514 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
515 "xvmsubasp $XT, $XA, $XB", IIC_VecFP,
516 [(set v4f32:$XT, (any_fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi)))]>,
521 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
522 "xvmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
530 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
531 "xvnmaddadp $XT, $XA, $XB", IIC_VecFP,
532 [(set v2f64:$XT, (fneg (any_fma v2f64:$XA, v2f64:$XB, v2f64:$XTi)))]>,
537 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
538 "xvnmaddmdp $XT, $XA, $XB", IIC_VecFP, []>,
546 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
547 "xvnmaddasp $XT, $XA, $XB", IIC_VecFP,
548 [(set v4f32:$XT, (fneg (fma v4f32:$XA, v4f32:$XB, v4f32:$XTi)))]>,
553 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
554 "xvnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
562 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
563 "xvnmsubadp $XT, $XA, $XB", IIC_VecFP,
564 [(set v2f64:$XT, (fneg (any_fma v2f64:$XA, v2f64:$XB, (fneg v2f64:$XTi))))]>,
569 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
570 "xvnmsubmdp $XT, $XA, $XB", IIC_VecFP, []>,
578 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
579 "xvnmsubasp $XT, $XA, $XB", IIC_VecFP,
580 [(set v4f32:$XT, (fneg (any_fma v4f32:$XA, v4f32:$XB, (fneg v4f32:$XTi))))]>,
585 (outs vsrc:$XT), (ins vsrc:$XTi, vsrc:$XA, vsrc:$XB),
586 "xvnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
593 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
594 "xsdivdp $XT, $XA, $XB", IIC_FPDivD,
595 [(set f64:$XT, (any_fdiv f64:$XA, f64:$XB))]>;
610 let mayRaiseFPException = 0 in {
612 (outs crrc:$CR), (ins vsfrc:$XA, vsfrc:$XB),
613 "xstdivdp $CR, $XA, $XB", IIC_FPCompare, []>;
619 (outs crrc:$CR), (ins vsrc:$XA, vsrc:$XB),
620 "xvtdivdp $CR, $XA, $XB", IIC_FPCompare, []>;
622 (outs crrc:$CR), (ins vsrc:$XA, vsrc:$XB),
623 "xvtdivsp $CR, $XA, $XB", IIC_FPCompare, []>;
636 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
637 "xvdivdp $XT, $XA, $XB", IIC_FPDivD,
638 [(set v2f64:$XT, (any_fdiv v2f64:$XA, v2f64:$XB))]>;
640 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
641 "xvdivsp $XT, $XA, $XB", IIC_FPDivS,
642 [(set v4f32:$XT, (any_fdiv v4f32:$XA, v4f32:$XB))]>;
673 (outs crrc:$CR), (ins vsfrc:$XA, vsfrc:$XB),
674 "xscmpodp $CR, $XA, $XB", IIC_FPCompare, []>;
676 (outs crrc:$CR), (ins vsfrc:$XA, vsfrc:$XB),
677 "xscmpudp $CR, $XA, $XB", IIC_FPCompare, []>;
680 "xvcmpeqdp", "$XT, $XA, $XB", IIC_VecFPCompare,
683 "xvcmpeqsp", "$XT, $XA, $XB", IIC_VecFPCompare,
686 "xvcmpgedp", "$XT, $XA, $XB", IIC_VecFPCompare,
689 "xvcmpgesp", "$XT, $XA, $XB", IIC_VecFPCompare,
692 "xvcmpgtdp", "$XT, $XA, $XB", IIC_VecFPCompare,
695 "xvcmpgtsp", "$XT, $XA, $XB", IIC_VecFPCompare,
699 let mayRaiseFPException = 0 in {
718 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
719 "xscpsgndp $XT, $XA, $XB", IIC_VecFP,
720 [(set f64:$XT, (fcopysign f64:$XB, f64:$XA))]>;
733 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
734 "xvcpsgndp $XT, $XA, $XB", IIC_VecFP,
735 [(set v2f64:$XT, (fcopysign v2f64:$XB, v2f64:$XA))]>;
737 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
738 "xvcpsgnsp $XT, $XA, $XB", IIC_VecFP,
739 [(set v4f32:$XT, (fcopysign v4f32:$XB, v4f32:$XA))]>;
878 let mayRaiseFPException = 0 in {
902 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
903 "xsmaxdp $XT, $XA, $XB", IIC_VecFP,
905 (int_ppc_vsx_xsmaxdp vsfrc:$XA, vsfrc:$XB))]>;
907 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
908 "xsmindp $XT, $XA, $XB", IIC_VecFP,
910 (int_ppc_vsx_xsmindp vsfrc:$XA, vsfrc:$XB))]>;
913 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
914 "xvmaxdp $XT, $XA, $XB", IIC_VecFP,
916 (int_ppc_vsx_xvmaxdp vsrc:$XA, vsrc:$XB))]>;
918 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
919 "xvmindp $XT, $XA, $XB", IIC_VecFP,
921 (int_ppc_vsx_xvmindp vsrc:$XA, vsrc:$XB))]>;
924 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
925 "xvmaxsp $XT, $XA, $XB", IIC_VecFP,
927 (int_ppc_vsx_xvmaxsp vsrc:$XA, vsrc:$XB))]>;
929 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
930 "xvminsp $XT, $XA, $XB", IIC_VecFP,
932 (int_ppc_vsx_xvminsp vsrc:$XA, vsrc:$XB))]>;
992 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
993 "xxland $XT, $XA, $XB", IIC_VecGeneral,
994 [(set v4i32:$XT, (and v4i32:$XA, v4i32:$XB))]>;
996 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
997 "xxlandc $XT, $XA, $XB", IIC_VecGeneral,
998 [(set v4i32:$XT, (and v4i32:$XA,
1002 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1003 "xxlnor $XT, $XA, $XB", IIC_VecGeneral,
1004 [(set v4i32:$XT, (vnot (or v4i32:$XA,
1007 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1008 "xxlor $XT, $XA, $XB", IIC_VecGeneral,
1009 [(set v4i32:$XT, (or v4i32:$XA, v4i32:$XB))]>;
1012 (outs vsfrc:$XT), (ins vsfrc:$XA, vsfrc:$XB),
1013 "xxlor $XT, $XA, $XB", IIC_VecGeneral, []>;
1015 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1016 "xxlxor $XT, $XA, $XB", IIC_VecGeneral,
1017 [(set v4i32:$XT, (xor v4i32:$XA, v4i32:$XB))]>;
1037 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1038 "xxmrghw $XT, $XA, $XB", IIC_VecPerm, []>;
1040 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1041 "xxmrglw $XT, $XA, $XB", IIC_VecPerm, []>;
1044 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$D),
1045 "xxpermdi $XT, $XA, $XB, $D", IIC_VecPerm,
1046 [(set v2i64:$XT, (PPCxxpermdi v2i64:$XA, v2i64:$XB,
1049 // Note that the input register class for `$XA` of XXPERMDIs is `vsfrc` which
1057 def XXPERMDIs : XX3Form_2s<60, 10, (outs vsrc:$XT), (ins vsfrc:$XA, u2imm:$D),
1058 "xxpermdi $XT, $XA, $XA, $D", IIC_VecPerm, []>;
1060 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, vsrc:$XC),
1061 "xxsel $XT, $XA, $XB, $XC", IIC_VecPerm, []>;
1064 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB, u2imm:$D),
1065 "xxsldwi $XT, $XA, $XB, $D", IIC_VecPerm,
1066 [(set v4i32:$XT, (PPCvecshl v4i32:$XA, v4i32:$XB,
1071 (outs vsrc:$XT), (ins vsfrc:$XA, u2imm:$D),
1072 "xxsldwi $XT, $XA, $XA, $D", IIC_VecPerm, []>;
1088 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1089 "xxleqv $XT, $XA, $XB", IIC_VecGeneral,
1090 [(set v4i32:$XT, (vnot (xor v4i32:$XA, v4i32:$XB)))]>;
1092 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1093 "xxlnand $XT, $XA, $XB", IIC_VecGeneral,
1094 [(set v4i32:$XT, (vnot (and v4i32:$XA, v4i32:$XB)))]>;
1105 (outs vsrc:$XT), (ins vsrc:$XA, vsrc:$XB),
1106 "xxlorc $XT, $XA, $XB", IIC_VecGeneral,
1107 [(set v4i32:$XT, (or v4i32:$XA, (vnot v4i32:$XB)))]>;
1110 let mayLoad = 1, mayStore = 0 in {
1135 let mayStore = 1, mayLoad = 0 in {
1156 def XSADDSP : XX3Form<60, 0,
1157 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1158 "xsaddsp $XT, $XA, $XB", IIC_VecFP,
1159 [(set f32:$XT, (any_fadd f32:$XA, f32:$XB))]>;
1161 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1162 "xsmulsp $XT, $XA, $XB", IIC_VecFP,
1163 [(set f32:$XT, (any_fmul f32:$XA, f32:$XB))]>;
1167 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1168 "xssubsp $XT, $XA, $XB", IIC_VecFP,
1169 [(set f32:$XT, (any_fsub f32:$XA, f32:$XB))]>;
1171 (outs vssrc:$XT), (ins vssrc:$XA, vssrc:$XB),
1172 "xsdivsp $XT, $XA, $XB", IIC_FPDivS,
1173 [(set f32:$XT, (any_fdiv f32:$XA, f32:$XB))]>;
1199 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1200 "xsmaddasp $XT, $XA, $XB", IIC_VecFP,
1201 [(set f32:$XT, (any_fma f32:$XA, f32:$XB, f32:$XTi))]>,
1208 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1209 "xsmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1218 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1219 "xsmsubasp $XT, $XA, $XB", IIC_VecFP,
1220 [(set f32:$XT, (any_fma f32:$XA, f32:$XB,
1228 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1229 "xsmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1238 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1239 "xsnmaddasp $XT, $XA, $XB", IIC_VecFP,
1240 [(set f32:$XT, (fneg (any_fma f32:$XA, f32:$XB,
1248 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1249 "xsnmaddmsp $XT, $XA, $XB", IIC_VecFP, []>,
1258 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1259 "xsnmsubasp $XT, $XA, $XB", IIC_VecFP,
1260 [(set f32:$XT, (fneg (any_fma f32:$XA, f32:$XB,
1268 (ins vssrc:$XTi, vssrc:$XA, vssrc:$XB),
1269 "xsnmsubmsp $XT, $XA, $XB", IIC_VecFP, []>,
1363 def XSABSQP : X_VT5_XO5_VB5<63, 0, 804, "xsabsqp",
1452 (outs crrc:$CR), (ins vsfrc:$XA, vsfrc:$XB),
1453 "xscmpexpdp $CR, $XA, $XB", IIC_FPCompare, []>;
1462 // XT.dword[1] = 0x0000_0000_0000_0000
1524 def XSRQPI : Z23_VT5_R1_VB5_RMC2_EX1<63, 5, 0, "xsrqpi" , []>;
1530 def XSRQPXP : Z23_VT5_R1_VB5_RMC2_EX1<63, 37, 0, "xsrqpxp", []>;
1537 // XT NOTE: XT.dword[1] = 0xUUUU_UUUU_UUUU_UUUU
1542 // vB NOTE: only vB.dword[0] is used, that's why we don't use
1549 def XSXEXPDP : XX2_RT5_XO5_XB6<60, 0, 347, "xsxexpdp", []>;
1577 IIC_VecFP, [(set v2f64: $XT,(int_ppc_vsx_xviexpdp v2i64:$XA, v2i64:$XB))]>;
1579 IIC_VecFP, [(set v4f32: $XT,(int_ppc_vsx_xviexpsp v4i32:$XA, v4i32:$XB))]>;
1582 def XVXEXPDP : XX2_XT6_XO5_XB6<60, 0, 475, "xvxexpdp", vsrc,
1625 [(set f64:$XT, (PPCxsmaxc f64:$XA, f64:$XB))]>;
1628 [(set f64:$XT, (PPCxsminc f64:$XA, f64:$XB))]>;
1653 (ins vsrc:$XA, vsrc:$XTi, vsrc:$XB),
1654 "xxperm $XT, $XA, $XB", IIC_VecPerm, []>,
1657 (ins vsrc:$XA, vsrc:$XTi, vsrc:$XB),
1658 "xxpermr $XT, $XA, $XB", IIC_VecPerm, []>,
1669 let mayLoad = 1, mayStore = 0 in {
1676 // Load SP from src, convert it to DP, and place in dword[0]
1707 let mayStore = 1, mayLoad = 0 in {
1714 // Convert DP of dword[0] to SP, and Store to dst
1778 } // hasSideEffects = 0
1847 (f64 (PPCmtvsra (i64 (vector_extract v2i64:$S1, 0))))));
1851 (f64 (PPCmtvsra (i64 (vector_extract v2i64:$S2, 0))))));
1855 (f64 (PPCmtvsra (i64 (vector_extract v2i64:$S1, 0))))));
1859 (f64 (PPCmtvsra (i64 (vector_extract v2i64:$S2, 0))))));
1867 dag El0S = (f64 (PPCfcfid (PPCmtvsra (extractelt v4i32:$A, 0))));
1871 dag El0U = (f64 (PPCfcfidu (PPCmtvsrz (extractelt v4i32:$A, 0))));
1882 the value up into element 0 (both BE and LE). Namely, entities smaller than
1936 dag LE_HALF_0 = (i32 (EXTRACT_SUBREG (RLDICL LE_DWORD_0, 0, 48), sub_32));
1940 dag LE_HALF_4 = (i32 (EXTRACT_SUBREG (RLDICL LE_DWORD_1, 0, 48), sub_32));
1946 dag LE_BYTE_0 = (i32 (EXTRACT_SUBREG (RLDICL LE_DWORD_0, 0, 56), sub_32));
1954 dag LE_BYTE_8 = (i32 (EXTRACT_SUBREG (RLDICL LE_DWORD_1, 0, 56), sub_32));
1984 - For elements 0-7, we shift left by 8 bytes since they're on the right
1987 with 0x8 (i.e. clearing all bits of the index and inverting bit 60).
2003 - Truncate the element number to the range 0-7 (8-15 are symmetrical
2015 - For elements 0-3, we shift left by 8 since they're on the right
2018 AND with 0x4 (i.e. clear all bits of the index and invert bit 61).
2036 - Truncate the element number to the range 0-3 (4-7 are symmetrical
2048 - For elements 0-1, we shift left by 8 since they're on the right
2066 - Truncate the element number to the range 0-1 (2-3 are symmetrical
2078 - For element 0, we shift left by 8 since it's on the right
2097 - Shift the vector to line up the desired element to BE Word 0
2115 - The shift in the VMX register is by 0/8 for opposite element numbers so
2116 we simply AND the element number with 0x8
2118 the bits of the index prior to truncating to the range 0-7
2133 - The shift in the VMX register is by 0/8 for opposite element numbers so
2134 we simply AND the element number with 0x4 and multiply by 2
2136 the bits of the index prior to truncating to the range 0-3
2154 the bits of the index prior to truncating to the range 0-1
2181 - Shift the vector to line up the desired element to BE Word 0
2189 dag BE_32B_VFLOAT_PERM_VEC = (v16i8 (LVSL (i32 ZERO), (RLWINM $Idx, 2, 0, 29)));
2203 (RLWINM (ANDI_rec $Idx, 1), 3, 0, 28)));
2224 dag A0 = (f32 (any_fpround (f64 (extractelt v2f64:$A, 0))));
2226 dag B0 = (f32 (any_fpround (f64 (extractelt v2f64:$B, 0))));
2231 dag A0S = (i32 (PPCmfvsr (f64 (PPCfctiwz (f64 (extractelt v2f64:$A, 0))))));
2233 dag B0S = (i32 (PPCmfvsr (f64 (PPCfctiwz (f64 (extractelt v2f64:$B, 0))))));
2235 dag A0U = (i32 (PPCmfvsr (f64 (PPCfctiwuz (f64 (extractelt v2f64:$A, 0))))));
2237 dag B0U = (i32 (PPCmfvsr (f64 (PPCfctiwuz (f64 (extractelt v2f64:$B, 0))))));
2242 dag LE_A0 = (i32 (sext_inreg (i32 (vector_extract v16i8:$A, 0)), i8));
2254 (i64 (anyext (i32 (vector_extract v16i8:$A, 0)))), i8));
2264 dag LE_A0 = (i32 (sext_inreg (i32 (vector_extract v8i16:$A, 0)), i16));
2276 (i64 (anyext (i32 (vector_extract v8i16:$A, 0)))), i16));
2286 dag LE_A0 = (i64 (sext (i32 (vector_extract v4i32:$A, 0))));
2368 (SUBREG_TO_REG (i64 1), $C, sub_64), 0));
2370 (SUBREG_TO_REG (i64 1), $D, sub_64), 0));
2371 dag ABhToFlt = (XVCVDPSP (XXPERMDI $A, $B, 0));
2373 dag BAhToFlt = (XVCVDPSP (XXPERMDI $B, $A, 0));
2380 dag A0B0 = (v2f64 (XXPERMDI v2f64:$A, v2f64:$B, 0));
2388 dag B1A1 = (v2f64 (XXPERMDI v2f64:$B, v2f64:$A, 0));
2398 (SUBREG_TO_REG (i64 1), f64:$C, sub_64), 0));
2400 (SUBREG_TO_REG (i64 1), f64:$D, sub_64), 0));
2409 (SUBREG_TO_REG (i64 1), f64:$B, sub_64), 0));
2411 (SUBREG_TO_REG (i64 1), f64:$A, sub_64), 0));
2426 dag MRGSGT = (v2i64 (XXPERMDI (v2i64 (XXSPLTW SGTWOR, 0)),
2427 (v2i64 (XXSPLTW SGTWOR, 2)), 0));
2428 dag MRGUGT = (v2i64 (XXPERMDI (v2i64 (XXSPLTW UGTWOR, 0)),
2429 (v2i64 (XXSPLTW UGTWOR, 2)), 0));
2430 dag MRGEQ = (v2i64 (XXPERMDI (v2i64 (XXSPLTW EQWSHAND, 0)),
2431 (v2i64 (XXSPLTW EQWSHAND, 2)), 0));
2595 def : Pat<(v2f64 (PPCsvec2fp v4i32:$C, 0)),
2600 def : Pat<(v2f64 (PPCuvec2fp v4i32:$C, 0)),
2605 def : Pat<(v2f64 (PPCfpexth v4f32:$C, 0)), (XVCVSPDP (XXMRGHW $C, $C))>;
2615 // PPCvecshl XT, XA, XA, 2 can be selected to both XXSLDWI XT,XA,XA,2 and
2616 // XXSWAPD XT,XA (i.e. XXPERMDI XT,XA,XA,2), the later one is more profitable.
2828 (SUBREG_TO_REG (i64 1), (XSCVDPSXDS $A), sub_64), 0))>;
2831 (SUBREG_TO_REG (i64 1), (XSCVDPUXDS $A), sub_64), 0))>;
2846 (v4f32 (XXSPLTW (SUBREG_TO_REG (i64 1), (XSCVDPSP f64:$A), sub_64), 0))>;
2849 (v4f32 (XXSPLTW (v4f32 (XSCVDPSPN $A)), 0))>;
2861 (v2i64 (XXPERMDIs (LFIWZX ForceXForm:$A), 0))>;
2863 (v2i64 (XXPERMDIs (LFIWAX ForceXForm:$A), 0))>;
2868 (COPY_TO_REGCLASS (XSCVDPSXDSs $A), VSFRC), 0))>;
2871 (COPY_TO_REGCLASS (XSCVDPUXDSs $A), VSFRC), 0))>;
2880 let AddedComplexity = 0 in {
2887 } // AddedComplexity = 0
2928 def : Pat<(f64 (extractelt v2f64:$S, 0)),
2932 def : Pat<(f64 (PPCfcfid (PPCmtvsra (i64 (vector_extract v2i64:$S, 0))))),
2936 def : Pat<(f64 (PPCfcfidu (PPCmtvsra (i64 (vector_extract v2i64:$S, 0))))),
2947 (SUBREG_TO_REG (i64 1), $B, sub_64), 0))>;
2979 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
2983 (f64 (fpextend (extractelt v4f32:$A, 0))))),
2986 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
2999 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3000 (f64 (fpextend (extractelt v4f32:$B, 0))))),
3001 (v2f64 (XVCVSPDP (XXPERMDI $A, $B, 0)))>;
3007 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3011 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3030 def : Pat<(v2f64 (insertelt v2f64:$A, f64:$B, 0)),
3033 (v2f64 (XXPERMDI $A, (SUBREG_TO_REG (i64 1), $B, sub_64), 0))>;
3040 (SUBREG_TO_REG (i64 1), $A, sub_64), 0),
3043 def : Pat<(f64 (extractelt v2f64:$S, 0)),
3056 def : Pat<(f64 (PPCfcfid (PPCmtvsra (i64 (vector_extract v2i64:$S, 0))))),
3060 def : Pat<(f64 (PPCfcfidu (PPCmtvsra (i64 (vector_extract v2i64:$S, 0))))),
3072 (SUBREG_TO_REG (i64 1), $A, sub_64), 0))>;
3104 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3108 (f64 (fpextend (extractelt v4f32:$A, 0))))),
3111 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3124 def : Pat<(v2f64 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3125 (f64 (fpextend (extractelt v4f32:$B, 0))))),
3130 (v2f64 (XVCVSPDP (XXPERMDI $B, $A, 0)))>;
3140 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3144 (build_vector (f64 (fpextend (extractelt v4f32:$A, 0))),
3155 def : Pat<(v2f64 (insertelt v2f64:$A, f64:$B, 0)),
3156 (v2f64 (XXPERMDI $A, (SUBREG_TO_REG (i64 1), $B, sub_64), 0))>;
3179 (XXPERMDIs (XSCVDPSXDS (COPY_TO_REGCLASS (XFLOADf32 ForceXForm:$A), VSFRC)), 0),
3184 (XXPERMDIs (XSCVDPUXDS (COPY_TO_REGCLASS (XFLOADf32 ForceXForm:$A), VSFRC)), 0),
3216 (v8i16 (VSPLTH 0, (LVX ForceXForm:$A)))>;
3218 (v16i8 (VSPLTB 0, (LVX ForceXForm:$A)))>;
3374 def : Pat<(f32 (vector_extract v4f32:$S, 0)),
3383 def : Pat<(f32 (PPCfcfids (f64 (PPCmtvsra (i32 (extractelt v4i32:$A, 0)))))),
3384 (f32 (XSCVSPDPN (XVCVSXWSP (XXSPLTW $A, 0))))>;
3391 def : Pat<(f64 (PPCfcfid (f64 (PPCmtvsra (i32 (extractelt v4i32:$A, 0)))))),
3392 (f64 (COPY_TO_REGCLASS (XVCVSXWDP (XXSPLTW $A, 0)), VSFRC))>;
3440 // Elements in a register on a BE system are in order <0, 1, 2, 3>.
3444 foreach Idx = [ [0,3], [2,1], [3,2] ] in {
3471 def : Pat<(f32 (vector_extract v4f32:$S, 0)),
3482 def : Pat<(f32 (PPCfcfids (f64 (PPCmtvsra (i32 (extractelt v4i32:$A, 0)))))),
3489 (f32 (XSCVSPDPN (XVCVSXWSP (XXSPLTW $A, 0))))>;
3490 def : Pat<(f64 (PPCfcfid (f64 (PPCmtvsra (i32 (extractelt v4i32:$A, 0)))))),
3497 (f64 (COPY_TO_REGCLASS (XVCVSXWDP (XXSPLTW $A, 0)), VSFRC))>;
3533 // Elements in a register on a LE system are in order <3, 2, 1, 0>.
3536 // Similar logic applies for elements 0 and 1.
3537 foreach Idx = [ [0,2], [1,1], [3,3] ] in {
3549 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), ForceXForm:$src),
3551 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), ForceXForm:$src),
3563 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), ForceXForm:$src),
3566 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), ForceXForm:$src),
3658 def : Pat<(i64 (vector_extract v2i64:$S, 0)),
3681 def : Pat<(i64 (vector_extract v2i64:$S, 0)),
3691 def : Pat<(i32 (vector_extract v16i8:$S, 0)),
3727 def : Pat<(i32 (vector_extract v8i16:$S, 0)),
3747 def : Pat<(i32 (vector_extract v4i32:$S, 0)),
3761 def : Pat<(i32 (vector_extract v16i8:$S, 0)),
3797 def : Pat<(i32 (vector_extract v8i16:$S, 0)),
3817 def : Pat<(i32 (vector_extract v4i32:$S, 0)),
3835 (SUBREG_TO_REG (i64 1), (MTVSRD $B), sub_64), 0))>;
3839 (MTVSRD (RLDIMI AnyExts.B, AnyExts.A, 32, 0)), sub_64),
3841 (MTVSRD (RLDIMI AnyExts.D, AnyExts.C, 32, 0)), sub_64), 0)>;
3852 (SUBREG_TO_REG (i64 1), (MTVSRD $A), sub_64), 0))>;
3856 (MTVSRD (RLDIMI AnyExts.C, AnyExts.D, 32, 0)), sub_64),
3858 (MTVSRD (RLDIMI AnyExts.A, AnyExts.B, 32, 0)), sub_64), 0)>;
3901 def : Pat<(f128 (any_fnearbyint f128:$vB)), (f128 (XSRQPI 0, $vB, 3))>;
3903 def : Pat<(f128 (any_fround f128:$vB)), (f128 (XSRQPI 0, $vB, 0))>;
3911 def : Pat<(f128 (any_frint f128:$vB)), (f128 (XSRQPIX 0, $vB, 3))>;
3970 (XXPERMDIs (LXSIBZX ForceXForm:$src), 0),
3978 (XXPERMDIs (VEXTSB2Ds (LXSIBZX ForceXForm:$src)), 0),
3988 (XXPERMDIs (LXSIHZX ForceXForm:$src), 0),
3996 (XXPERMDIs (VEXTSH2Ds (LXSIHZX ForceXForm:$src)), 0),
4081 def : Pat<(f32 (PPCxsmaxc f32:$XA, f32:$XB)),
4082 (f32 (COPY_TO_REGCLASS (XSMAXCDP (COPY_TO_REGCLASS $XA, VSSRC),
4085 def : Pat<(f32 (PPCxsminc f32:$XA, f32:$XB)),
4086 (f32 (COPY_TO_REGCLASS (XSMINCDP (COPY_TO_REGCLASS $XA, VSSRC),
4122 (XXPERMDIs (XSCVDPSXDS (COPY_TO_REGCLASS (DFLOADf32 DSForm:$A), VSFRC)), 0),
4128 (XXPERMDIs (XSCVDPUXDS (COPY_TO_REGCLASS (DFLOADf32 DSForm:$A), VSFRC)), 0),
4179 def : Pat<(f32 (PPCfcfidus (f64 (PPCmtvsrz (i32 (extractelt v4i32:$A, 0)))))),
4180 (f32 (XSCVUXDSP (XXEXTRACTUW $A, 0)))>;
4187 def : Pat<(f64 (PPCfcfidu (f64 (PPCmtvsrz (i32 (extractelt v4i32:$A, 0)))))),
4188 (f64 (XSCVUXDDP (XXEXTRACTUW $A, 0)))>;
4195 def : Pat<(v4i32 (insertelt v4i32:$A, i32:$B, 0)),
4196 (v4i32 (XXINSERTW v4i32:$A, AlignValues.I32_TO_BE_WORD1, 0))>;
4197 def : Pat<(v4i32 (insertelt v4i32:$A, DblToInt.B, 0)),
4201 0))>;
4202 def : Pat<(v4i32 (insertelt v4i32:$A, DblToUInt.B, 0)),
4206 0))>;
4243 def : Pat<(v4f32 (insertelt v4f32:$A, f32:$B, 0)),
4244 (v4f32 (XXINSERTW v4f32:$A, AlignValues.F32_TO_BE_WORD1, 0))>;
4252 def : Pat<(v4f32 (insertelt v4f32:$A, (f32 (fpround f64:$B)), 0)),
4254 (SUBREG_TO_REG (i64 1), (XSCVDPSP f64:$B), sub_64), 0))>;
4266 def : Pat<(truncstorei8 (i32 (vector_extract v16i8:$S, 0)), ForceXForm:$dst),
4300 def : Pat<(truncstorei16 (i32 (vector_extract v8i16:$S, 0)), ForceXForm:$dst),
4335 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), XForm:$src),
4337 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), XForm:$src),
4345 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), DSForm:$src),
4347 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), DSForm:$src),
4351 def : Pat<(f128 (sint_to_fp (i64 (extractelt v2i64:$src, 0)))),
4356 def : Pat<(f128 (uint_to_fp (i64 (extractelt v2i64:$src, 0)))),
4365 foreach Idx = [0,2,3] in {
4370 foreach Idx = 0-3 in {
4376 foreach Idx = 0-7 in {
4416 foreach Idx = 0-15 in {
4463 def : Pat<(f32 (PPCfcfidus (f64 (PPCmtvsrz (i32 (extractelt v4i32:$A, 0)))))),
4470 (f32 (XSCVUXDSP (XXEXTRACTUW $A, 0)))>;
4471 def : Pat<(f64 (PPCfcfidu (f64 (PPCmtvsrz (i32 (extractelt v4i32:$A, 0)))))),
4478 (f64 (XSCVUXDDP (XXEXTRACTUW $A, 0)))>;
4479 def : Pat<(v4i32 (insertelt v4i32:$A, i32:$B, 0)),
4481 def : Pat<(v4i32 (insertelt v4i32:$A, DblToInt.B, 0)),
4486 def : Pat<(v4i32 (insertelt v4i32:$A, DblToUInt.B, 0)),
4516 (v4i32 (XXINSERTW v4i32:$A, AlignValues.I32_TO_BE_WORD1, 0))>;
4521 0))>;
4526 0))>;
4527 def : Pat<(v4f32 (insertelt v4f32:$A, f32:$B, 0)),
4534 (v4f32 (XXINSERTW v4f32:$A, AlignValues.F32_TO_BE_WORD1, 0))>;
4536 def : Pat<(v4f32 (insertelt v4f32:$A, (f32 (fpround f64:$B)), 0)),
4547 (SUBREG_TO_REG (i64 1), (XSCVDPSP f64:$B), sub_64), 0))>;
4560 def : Pat<(truncstorei8 (i32 (vector_extract v16i8:$S, 0)), ForceXForm:$dst),
4594 def : Pat<(truncstorei16 (i32 (vector_extract v8i16:$S, 0)), ForceXForm:$dst),
4628 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), XForm:$src),
4631 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), XForm:$src),
4638 def : Pat<(store (i64 (extractelt v2i64:$A, 0)), DSForm:$src),
4641 def : Pat<(store (f64 (extractelt v2f64:$A, 0)), DSForm:$src),
4650 def : Pat<(f128 (sint_to_fp (i64 (extractelt v2i64:$src, 0)))),
4655 def : Pat<(f128 (uint_to_fp (i64 (extractelt v2i64:$src, 0)))),
4662 foreach Idx = [[0,3],[1,2],[3,0]] in {
4671 foreach Idx = [[0,12],[1,8],[2,4],[3,0]] in {
4679 foreach Idx = [[0,14],[1,12],[2,10],[3,8],[4,6],[5,4],[6,2],[7,0]] in {
4723 foreach Idx = [[0,15],[1,14],[2,13],[3,12],[4,11],[5,10],[6,9],[7,8],[8,7],
4724 [9,6],[10,5],[11,4],[12,3],[13,2],[14,1],[15,0]] in {
4813 def : Pat<(i64 (anyext (i32 (vector_extract v8i16:$S, 0)))),
4814 (VEXTUHLX (LI8 0), $S)>;
4832 def : Pat<(i64 (zext (i32 (vector_extract v4i32:$S, 0)))),
4833 (VEXTUWLX (LI8 0), $S)>;
4846 def : Pat<(i64 (sext (i32 (vector_extract v4i32:$S, 0)))),
4847 (EXTSW (VEXTUWLX (LI8 0), $S))>;
4859 def : Pat<(i32 (vector_extract v16i8:$S, 0)),
4860 (i32 (EXTRACT_SUBREG (VEXTUBLX (LI8 0), $S), sub_32))>;
4895 def : Pat<(i32 (vector_extract v8i16:$S, 0)),
4896 (i32 (EXTRACT_SUBREG (VEXTUHLX (LI8 0), $S), sub_32))>;
4915 def : Pat<(i32 (vector_extract v4i32:$S, 0)),
4916 (i32 (EXTRACT_SUBREG (VEXTUWLX (LI8 0), $S), sub_32))>;
4949 def : Pat<(i64 (anyext (i32 (vector_extract v8i16:$S, 0)))),
4950 (VEXTUHRX (LI8 0), $S)>;
4968 def : Pat<(i64 (zext (i32 (vector_extract v4i32:$S, 0)))),
4969 (VEXTUWRX (LI8 0), $S)>;
4981 def : Pat<(i64 (sext (i32 (vector_extract v4i32:$S, 0)))),
4982 (EXTSW (VEXTUWRX (LI8 0), $S))>;
4994 def : Pat<(i32 (vector_extract v16i8:$S, 0)),
4995 (i32 (EXTRACT_SUBREG (VEXTUBRX (LI8 0), $S), sub_32))>;
5030 def : Pat<(i32 (vector_extract v8i16:$S, 0)),
5031 (i32 (EXTRACT_SUBREG (VEXTUHRX (LI8 0), $S), sub_32))>;
5050 def : Pat<(i32 (vector_extract v4i32:$S, 0)),
5051 (i32 (EXTRACT_SUBREG (VEXTUWRX (LI8 0), $S), sub_32))>;
5078 // ISA3.0.
5087 (RLDIMI AnyExts.B, AnyExts.A, 32, 0),
5088 (RLDIMI AnyExts.D, AnyExts.C, 32, 0))>;
5094 // Little endian VSX subtarget that supports direct moves from ISA3.0.
5096 def : Pat<(i64 (extractelt v2i64:$A, 0)),
5103 (RLDIMI AnyExts.C, AnyExts.D, 32, 0),
5104 (RLDIMI AnyExts.A, AnyExts.B, 32, 0))>;
5119 def : InstAlias<"xxspltd $XT, $XB, 0",
5120 (XXPERMDI vsrc:$XT, vsrc:$XB, vsrc:$XB, 0)>;
5123 def : InstAlias<"xxspltd $XT, $XB, 0",
5124 (XXPERMDIs vsrc:$XT, vsfrc:$XB, 0)>;
5129 def : InstAlias<"xxmrghd $XT, $XA, $XB",
5130 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 0)>;
5131 def : InstAlias<"xxmrgld $XT, $XA, $XB",
5132 (XXPERMDI vsrc:$XT, vsrc:$XA, vsrc:$XB, 3)>;
5138 (MFVRD g8rc:$rA, vrrc:$XT), 0>;
5142 (MTVRD vrrc:$XT, g8rc:$rA), 0>;
5146 (MFVRWZ gprc:$rA, vrrc:$XT), 0>;
5150 (MTVRWA vrrc:$XT, gprc:$rA), 0>;
5154 (MTVRWZ vrrc:$XT, gprc:$rA), 0>;