Lines Matching defs:s64
52 const LLT s64 = LLT::scalar(64);
55 const LLT sMaxScalar = Subtarget.is64Bit() ? s64 : s32;
87 // 32/64-bits needs support for s64/s128 to handle cases:
88 // s64 = EXTEND (G_IMPLICIT_DEF s32) -> s64 = G_IMPLICIT_DEF
89 // s128 = EXTEND (G_IMPLICIT_DEF s32/s64) -> s128 = G_IMPLICIT_DEF
90 return typeInSet(0, {p0, s1, s8, s16, s32, s64})(Query) ||
97 (Is64Bit && typeInSet(0, {s64})(Query));
142 if (Is64Bit && typeInSet(0, {s64})(Query))
157 .clampMinNumElements(0, s64, 2)
161 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX2 ? 4 : 2))
169 (Is64Bit && typePairInSet(0, 1, {{s64, s1}})(Query));
181 if (Is64Bit && typeInSet(0, {s64})(Query))
201 .clampMinNumElements(0, s64, HasVLX ? 2 : 8)
204 .clampMaxNumElements(0, s64, 8)
212 (Is64Bit && typeInSet(0, {s64})(Query));
222 (Is64Bit && typeInSet(0, {s64})(Query));
224 .libcallFor({s64})
231 (Is64Bit && typePairInSet(0, 1, {{s64, s8}})(Query));
241 if (Is64Bit && typeInSet(0, {s64})(Query))
254 .clampMinNumElements(0, s64, 2)
258 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX ? 4 : 2))
265 const std::initializer_list<LLT> IntTypes64 = {s8, s16, s32, s64, p0};
276 (Subtarget.is64Bit() && Query.Types[0] == s64);
286 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
297 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
308 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query)));
319 (Is64Bit && typeIs(0, s64)(Query)) ||
328 .clampMinNumElements(0, s64, 2)
332 .clampMaxNumElements(0, s64, HasAVX512 ? 8 : (HasAVX ? 4 : 2))
341 const std::initializer_list<LLT> PtrTypes64 = {s1, s8, s16, s32, s64};
355 (Is64Bit && typePairInSet(0, 1, {{p0, s64}})(Query));
376 Action.legalForTypesWithMemDesc({{s64, p0, s8, 1},
377 {s64, p0, s16, 1},
378 {s64, p0, s32, 1},
379 {s64, p0, s64, 1},
410 Action.legalForTypesWithMemDesc({{s64, p0, s8, 1},
411 {s64, p0, s16, 1},
412 {s64, p0, s32, 1}});
421 (Is64Bit && Query.Types[0] == s64);
434 return (typeInSet(0, {s32, s64})(Query)) ||
441 return (typeInSet(0, {s32, s64})(Query)) ||
453 (HasSSE2 && typePairInSet(0, 1, {{s8, s64}})(Query));
456 .clampScalar(1, s32, HasSSE2 ? s64 : s32)
461 return (HasSSE2 && typePairInSet(0, 1, {{s64, s32}})(Query)) ||
468 return (HasSSE2 && typePairInSet(0, 1, {{s32, s64}})(Query)) ||
477 (Is64Bit && typePairInSet(0, 1, {{s32, s64}})(Query)))) ||
479 (typePairInSet(0, 1, {{s64, s32}})(Query) ||
480 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query))));
484 .clampScalar(0, s32, HasSSE2 ? s64 : s32)
491 (Is64Bit && typePairInSet(0, 1, {{s64, s32}})(Query)))) ||
493 (typePairInSet(0, 1, {{s32, s64}})(Query) ||
494 (Is64Bit && typePairInSet(0, 1, {{s64, s64}})(Query))));
496 .clampScalar(1, s32, HasSSE2 ? s64 : s32)
504 // s64 or s32 is obtained after widening and we shouldn't widen it to s64.
510 return HasAVX512 && typeInSet(0, {s32, s64})(Query) &&
511 typeInSet(1, {s32, s64})(Query);
516 (HasSSE2 && typeIs(0, s64)(Query))) &&
520 // Lower conversions from s64
523 (HasSSE2 && typeIs(0, s64)(Query))) &&
524 (Is64Bit && typeIs(1, s64)(Query));
526 .clampScalar(0, s32, HasSSE2 ? s64 : s32)
533 return HasAVX512 && typeInSet(0, {s32, s64})(Query) &&
534 typeInSet(1, {s32, s64})(Query);
539 (HasSSE2 && typeIs(1, s64)(Query))) &&
549 (HasSSE2 && typeIs(1, s64)(Query))) &&
550 (Is64Bit && typeIs(0, s64)(Query));
554 .clampScalar(1, s32, HasSSE2 ? s64 : s32)
612 .legalFor({{s8, s32}, {s16, s32}, {s32, s32}, {s64, s32}, {p0, s32}})
631 .legalFor({s8, s16, s32, s64, p0})
715 const LLT s64 = LLT::scalar(64);
719 auto Casted = MIRBuilder.buildFPTOSI(DstTy == s32 ? s64 : s32, Src);
734 const LLT s64 = LLT::scalar(64);
738 auto Ext = MIRBuilder.buildZExt(SrcTy == s32 ? s64 : s32, Src);