Lines Matching defs:s64
48 const LLT s64 = LLT::scalar(64);
65 const LLT nxv2s64 = LLT::scalable_vector(2, s64);
73 std::initializer_list<LLT> ScalarAndPtrTypesList = {s8, s16, s32, s64, p0};
96 .legalFor({p0, s8, s16, s32, s64})
100 .clampScalar(0, s8, s64)
106 .clampMaxNumElements(0, s64, 2)
111 .legalFor({p0, s16, s32, s64})
116 .clampScalar(0, s16, s64)
120 .clampMaxNumElements(0, s64, 2)
124 .legalFor({s32, s64, v4s16, v8s16, v2s32, v4s32, v2s64})
126 .clampScalar(0, s32, s64)
133 .legalFor({s32, s64, v2s32, v2s64, v4s32, v4s16, v8s16, v16s8, v8s8})
136 .clampScalar(0, s32, s64)
160 .legalFor({s32, s64, v2s32, v2s64, v4s32, v4s16, v8s16, v16s8, v8s8})
162 .clampScalar(0, s32, s64)
194 {s32, s64},
195 {s64, s64},
205 .clampScalar(1, s32, s64)
206 .clampScalar(0, s32, s64)
216 .legalFor({{p0, s64}, {v2p0, v2s64}})
217 .clampScalarOrElt(1, s64, s64)
220 getActionDefinitionsBuilder(G_PTRMASK).legalFor({{p0, s64}});
223 .legalFor({s32, s64})
225 .clampScalar(0, s32, s64)
230 .lowerFor({s8, s16, s32, s64, v2s64, v4s32, v2s32})
240 .clampScalar(0, s32, s64)
244 .legalFor({s64, v8s16, v16s8, v4s32})
249 .legalFor(HasCSSC, {s32, s64})
261 .legalFor({{s32, s32}, {s64, s32}})
262 .clampScalar(0, s32, s64)
263 .clampScalar(1, s32, s64)
270 .legalFor({s32, s64, v2s32, v4s32, v2s64})
281 .legalFor({s32, s64, v2s32, v4s32, v2s64})
292 .libcallFor({s32, s64, s128})
297 .legalFor({{s64, MinFPScalar}, {s64, s32}, {s64, s64}})
298 .libcallFor({{s64, s128}})
309 .libcallFor({s32, s64, s128});
313 .libcallFor({{s32, s32}, {s64, s32}, {s128, s32}});
316 .legalIf(all(typeInSet(0, {s32, s64, p0}),
319 .clampScalar(0, s32, s64)
323 .maxScalarIf(typeInSet(0, {s64, p0}), 1, s32);
326 .legalIf(all(typeInSet(0, {s16, s32, s64, p0}),
327 typeInSet(1, {s32, s64, s128, p0}), smallerThan(0, 1)))
333 .maxScalarIf(typeInSet(1, {s64, p0}), 0, s32)
334 .maxScalarIf(typeInSet(1, {s128}), 0, s64);
348 {s64, p0, s8, 2},
349 {s64, p0, s16, 2},
350 {s64, p0, s32, 4},
351 {s64, p0, s64, 8},
352 {p0, p0, s64, 8},
353 {v2s32, p0, s64, 8}})
355 .clampScalar(0, s32, s64)
380 {s64, p0, s64, 8},
381 {p0, p0, s64, 8},
383 {v8s8, p0, s64, 8},
385 {v4s16, p0, s64, 8},
387 {v2s32, p0, s64, 8},
392 {{s32, p0, s8, 8}, {s32, p0, s16, 8}, {s64, p0, s32, 8}})
404 .clampMaxNumElements(0, s64, 2)
407 .clampScalar(0, s8, s64)
438 {s64, p0, s8, 8}, // truncstorei8 from s64
440 {s64, p0, s16, 8}, // truncstorei16 from s64
442 {s64, p0, s64, 8}, {s64, p0, s32, 8}, // truncstorei32 from s64
443 {p0, p0, s64, 8}, {s128, p0, s128, 8}, {v16s8, p0, s128, 8},
444 {v8s8, p0, s64, 8}, {v4s16, p0, s64, 8}, {v8s16, p0, s128, 8},
445 {v2s32, p0, s64, 8}, {v4s32, p0, s128, 8}, {v2s64, p0, s128, 8}})
456 .clampScalar(0, s8, s64)
466 .clampMaxNumElements(0, s64, 2)
494 {p0, s64, s64, 8},
526 .legalIf(all(typeInSet(0, {s16, s32, s64}),
537 if (LdTy == s64)
545 .legalFor({p0, s8, s16, s32, s64})
547 .clampScalar(0, s8, s64);
549 .legalFor({s32, s64, s128})
555 .legalFor({{s32, s32}, {s32, s64}, {s32, p0}})
557 .clampScalar(1, s32, s64)
575 0, s64)
586 {s32, s64},
605 .clampMaxNumElements(1, s64, 2)
634 .clampScalar(0, s64, s64) // Just for s128, others are handled above.
670 .legalFor({s32, s64})
672 .maxScalar(0, s64)
676 .clampMaxNumElements(0, s64, 2)
682 {{s16, s32}, {s16, s64}, {s32, s64}, {v4s16, v4s32}, {v2s32, v2s64}})
683 .libcallFor({{s16, s128}, {s32, s128}, {s64, s128}})
690 {{s32, s16}, {s64, s16}, {s64, s32}, {v4s32, v4s16}, {v2s64, v2s32}})
691 .libcallFor({{s128, s64}, {s128, s32}, {s128, s16}})
699 {s64, s32},
700 {s32, s64},
701 {s64, s64},
706 {{s32, s16}, {s64, s16}, {v4s16, v4s16}, {v8s16, v8s16}})
715 changeTo(0, s64))
736 .clampMaxNumElements(0, s64, 2)
738 {{s32, s128}, {s64, s128}, {s128, s128}, {s128, s32}, {s128, s64}});
742 {s64, s32},
743 {s32, s64},
744 {s64, s64},
749 {{s32, s16}, {s64, s16}, {v4s16, v4s16}, {v8s16, v8s16}})
759 changeTo(0, s64))
782 .clampMaxNumElements(0, s64, 2);
786 {s64, s32},
787 {s32, s64},
788 {s64, s64},
793 {{s16, s32}, {s16, s64}, {v4s16, v4s16}, {v8s16, v8s16}})
816 .clampMaxNumElements(0, s64, 2)
819 {s64, s128},
822 {s128, s64}});
831 .legalFor({{s32, s32}, {s64, s32}, {p0, s32}})
833 .clampScalar(0, s32, s64)
851 .legalFor({{s64, p0}, {v2s64, v2p0}})
853 .clampScalar(0, s64, s64)
854 .clampMaxNumElements(0, s64, 2);
860 .legalFor({{p0, s64}, {v2p0, v2s64}})
861 .clampMaxNumElements(1, s64, 2);
868 .legalForCartesianProduct({s64, v8s8, v4s16, v2s32})
891 .customForCartesianProduct({s8, s16, s32, s64, p0}, {p0})
892 .clampScalar(0, s8, s64)
897 all(typeInSet(0, {s8, s16, s32, s64, s128}), typeIs(2, p0)));
902 .legalFor(!UseOutlineAtomics, {{s32, p0}, {s64, p0}})
905 {{s8, p0}, {s16, p0}, {s32, p0}, {s64, p0}, {s128, p0}})
906 .clampScalar(0, s32, s64);
911 .legalFor(!UseOutlineAtomics, {{s32, p0}, {s64, p0}})
913 {{s8, p0}, {s16, p0}, {s32, p0}, {s64, p0}})
914 .clampScalar(0, s32, s64);
920 .legalIf(all(typeInSet(0, {s32, s64}), typeIs(1, p0)))
921 .clampScalar(0, s32, s64);
932 .clampScalar(LitTyIdx, s8, s64)
957 .legalFor(HasSVE, {{s16, nxv16s8, s64},
958 {s16, nxv8s16, s64},
959 {s32, nxv4s32, s64},
960 {s64, nxv2s64, s64}})
967 .minScalar(2, s64)
976 // We want to promote to <M x s1> to <M x s64> if that wouldn't
981 0, s64)
1002 .clampMaxNumElements(1, s64, 2)
1011 .legalFor(HasSVE, {{nxv16s8, s32, s64},
1012 {nxv8s16, s32, s64},
1013 {nxv4s32, s32, s64},
1014 {nxv2s64, s64, s64}})
1020 .clampMaxNumElements(0, s64, 2)
1031 {v2s64, s64}})
1043 {s32, s64, v8s8, v16s8, v4s16, v8s16, v2s32, v4s32})
1046 .clampScalar(1, s32, s64)
1052 .legalFor({s32, s64, v8s8, v16s8})
1054 .clampScalar(0, s32, s64)
1062 .clampScalar(1, s32, s64)
1064 .legalFor(HasCSSC, {s32, s64})
1065 .customFor(!HasCSSC, {s32, s64});
1130 getActionDefinitionsBuilder(G_BRJT).legalFor({{p0, s64}});
1142 .legalForCartesianProduct({p0}, {s64}, {s64})
1143 .customForCartesianProduct({p0}, {s8}, {s64})
1147 .legalForCartesianProduct({p0}, {p0}, {s64})
1152 .legalForCartesianProduct({p0}, {p0}, {s64});
1161 .legalFor(HasCSSC, {s32, s64})
1185 .legalFor({{s32, v2s32}, {s32, v4s32}, {s64, v2s64}})
1188 .clampMaxNumElements(1, s64, 2)
1198 .clampMaxNumElements(1, s64, 2)
1217 {s64, v2s64}})
1218 .clampMaxNumElements(1, s64, 2)
1226 .legalFor({{s32, v4s32}, {s32, v2s32}, {s64, v2s64}})
1229 .clampMaxNumElements(1, s64, 2)
1256 .clampMaxNumElements(1, s64, 2)
1289 .customFor({{s32, s32}, {s32, s64}, {s64, s64}})
1293 .legalFor({{s32, s64}, {s64, s64}})
1301 .customFor({{s32, s32}, {s64, s64}});
1305 .legalFor(HasCSSC, {{s32, s32}, {s64, s64}})
1307 .customFor(!HasCSSC, {{s32, s32}, {s64, s64}})
1325 .clampMaxNumElements(0, s64, 2)
1333 .legalFor({{s64, s32}, {s64, s64}});
1365 .legalFor(HasSVE, {{nxv4s32, s32}, {nxv2s64, s64}});
1832 // allow the existing patterns for s64 to fire for p0, we just try to bitcast
1833 // the value to use s64 types.
1851 LLT s64 = LLT::scalar(64);
1868 NewI = MIRBuilder.buildInstr(Opcode, {s64, s64}, {});
1872 auto Split = MIRBuilder.buildUnmerge(s64, MI.getOperand(0));
2006 LLT s64 = LLT::scalar(64);
2008 auto Split = MIRBuilder.buildUnmerge(s64, Val);
2009 auto CTPOP1 = MIRBuilder.buildCTPOP(s64, Split->getOperand(0));
2010 auto CTPOP2 = MIRBuilder.buildCTPOP(s64, Split->getOperand(1));
2011 auto Add = MIRBuilder.buildAdd(s64, CTPOP1, CTPOP2);
2027 // s32,s64,v4s16,v2s32 -> v8i8
2112 LLT s64 = LLT::scalar(64);
2114 auto DesiredI = MIRBuilder.buildUnmerge({s64, s64}, MI.getOperand(2));
2115 auto NewI = MIRBuilder.buildUnmerge({s64, s64}, MI.getOperand(3));
2116 auto DstLo = MRI.createGenericVirtualRegister(s64);
2117 auto DstHi = MRI.createGenericVirtualRegister(s64);