Lines Matching defs:s64

82   const LLT s64 = LLT::scalar(64);
114 const LLT nxv1s64 = LLT::scalable_vector(1, s64);
115 const LLT nxv2s64 = LLT::scalable_vector(2, s64);
116 const LLT nxv4s64 = LLT::scalable_vector(4, s64);
117 const LLT nxv8s64 = LLT::scalable_vector(8, s64);
167 .legalFor(ST.is64Bit(), {{s64, s16}, {s64, s32}})
185 all(typeIs(BigTyIdx, s64), typeIs(LitTyIdx, s32)));
242 .customFor(ST.is64Bit(), {s64})
249 .legalFor(ST.is64Bit(), {s64})
277 .legalFor(XLen == 64 || ST.hasStdExtD(), {{s64, sXLen}})
279 .clampScalar(0, s32, (XLen == 64 || ST.hasStdExtD()) ? s64 : s32)
312 {{s64, p0, s8, getScalarMemAlign(8)},
313 {s64, p0, s16, getScalarMemAlign(16)},
314 {s64, p0, s32, getScalarMemAlign(32)},
315 {s64, p0, s64, getScalarMemAlign(64)}});
317 {{s64, p0, s8, getScalarMemAlign(8)},
318 {s64, p0, s16, getScalarMemAlign(16)},
319 {s64, p0, s32, getScalarMemAlign(32)},
320 {s64, p0, s64, getScalarMemAlign(64)}});
322 {{s64, p0, s32, getScalarMemAlign(32)}});
325 {{s64, p0, s64, getScalarMemAlign(64)}});
327 {{s64, p0, s64, getScalarMemAlign(64)}});
508 .legalFor(ST.hasStdExtD(), {s64})
510 .libcallFor({s32, s64})
515 .legalFor(ST.hasStdExtD(), {s64})
517 .lowerFor({s32, s64, s128});
520 .libcallFor({s32, s64})
527 .legalFor(ST.hasStdExtD(), {{s64, s64}, {s32, s64}, {s64, s32}})
529 .legalFor(ST.hasStdExtZfh() && ST.hasStdExtD(), {{s16, s64}, {s64, s16}})
534 .legalFor(ST.hasStdExtD(), {{s32, s64}})
536 .legalFor(ST.hasStdExtZfh() && ST.hasStdExtD(), {{s16, s64}})
537 .libcallFor({{s32, s64}})
538 .libcallFor(ST.is64Bit(), {{s32, s128}, {s64, s128}});
540 .legalFor(ST.hasStdExtD(), {{s64, s32}})
542 .legalFor(ST.hasStdExtZfh() && ST.hasStdExtD(), {{s64, s16}})
543 .libcallFor({{s64, s32}})
544 .libcallFor(ST.is64Bit(), {{s128, s32}, {s128, s64}});
548 .legalFor(ST.hasStdExtD(), {{sXLen, s64}})
551 .libcallFor({{sXLen, s32}, {sXLen, s64}})
557 .customFor(ST.hasStdExtD(), {{s1, s64}})
559 .lowerFor({{s1, s32}, {s1, s64}});
563 .legalFor(ST.hasStdExtD(), {s64})
565 .lowerFor({s32, s64, s128});
569 .legalFor(ST.hasStdExtD(), {{sXLen, s64}})
572 .customFor(ST.is64Bit() && ST.hasStdExtD(), {{s32, s64}})
576 .libcallFor({{s32, s32}, {s64, s32}, {s32, s64}, {s64, s64}})
577 .libcallFor(ST.is64Bit(), {{s32, s128}, {s64, s128}}) // FIXME RV32.
578 .libcallFor(ST.is64Bit(), {{s128, s32}, {s128, s64}, {s128, s128}});
582 .legalFor(ST.hasStdExtD(), {{s64, sXLen}})
598 .libcallFor({{s32, s32}, {s64, s32}, {s32, s64}, {s64, s64}})
599 .libcallFor(ST.is64Bit(), {{s128, s32}, {s128, s64}}) // FIXME RV32.
600 .libcallFor(ST.is64Bit(), {{s32, s128}, {s64, s128}, {s128, s128}});
607 .legalFor(ST.hasStdExtZfa() && ST.hasStdExtD(), {s64})
609 .libcallFor({s32, s64})
614 .legalFor(ST.hasStdExtZfa() && ST.hasStdExtD(), {s64})
621 .libcallFor({s32, s64})
624 .libcallFor({{s32, s32}, {s64, s32}})
646 // Handle case of s64 element vectors on RV32. If the subtarget does not have
656 typeInSet(0, {nxv1s64, nxv2s64, nxv4s64, nxv8s64}), typeIs(1, s64)));
659 typeInSet(0, {nxv1s64, nxv2s64, nxv4s64, nxv8s64}), typeIs(1, s64)));
985 // introduced a case where we're build a s64 where the upper bits are undef
1021 // Handle case of s64 element vectors on rv32