xref: /llvm-project/llvm/test/CodeGen/RISCV/rvv/vp-reverse-int-fixed-vectors.ll (revision d8d131dfa99762ccdd2116661980b7d0493cd7b5)
190f76844SCraig Topper; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
290f76844SCraig Topper; RUN: llc -mtriple=riscv64 -mattr=+m,+v -verify-machineinstrs -riscv-v-vector-bits-min=128 \
390f76844SCraig Topper; RUN:   < %s | FileCheck %s
490f76844SCraig Topper
590f76844SCraig Topperdefine <2 x i64> @test_vp_reverse_v2i64_masked(<2 x i64> %src, <2 x i1> %mask, i32 zeroext %evl) {
690f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v2i64_masked:
790f76844SCraig Topper; CHECK:       # %bb.0:
890f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e64, m1, ta, ma
990f76844SCraig Topper; CHECK-NEXT:    vid.v v9, v0.t
1090f76844SCraig Topper; CHECK-NEXT:    addi a0, a0, -1
1190f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a0, v0.t
1290f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10, v0.t
1390f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
1490f76844SCraig Topper; CHECK-NEXT:    ret
1590f76844SCraig Topper  %dst = call <2 x i64> @llvm.experimental.vp.reverse.v2i64(<2 x i64> %src, <2 x i1> %mask, i32 %evl)
1690f76844SCraig Topper  ret <2 x i64> %dst
1790f76844SCraig Topper}
1890f76844SCraig Topper
1990f76844SCraig Topperdefine <2 x i64> @test_vp_reverse_v2i64(<2 x i64> %src, i32 zeroext %evl) {
2090f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v2i64:
2190f76844SCraig Topper; CHECK:       # %bb.0:
2290f76844SCraig Topper; CHECK-NEXT:    addi a1, a0, -1
2390f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e64, m1, ta, ma
2490f76844SCraig Topper; CHECK-NEXT:    vid.v v9
2590f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a1
2690f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10
2790f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
2890f76844SCraig Topper; CHECK-NEXT:    ret
2990f76844SCraig Topper
30*d8d131dfSLuke Lau  %dst = call <2 x i64> @llvm.experimental.vp.reverse.v2i64(<2 x i64> %src, <2 x i1> splat (i1 1), i32 %evl)
3190f76844SCraig Topper  ret <2 x i64> %dst
3290f76844SCraig Topper}
3390f76844SCraig Topper
3490f76844SCraig Topperdefine <4 x i32> @test_vp_reverse_v4i32_masked(<4 x i32> %src, <4 x i1> %mask, i32 zeroext %evl) {
3590f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v4i32_masked:
3690f76844SCraig Topper; CHECK:       # %bb.0:
3790f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e32, m1, ta, ma
3890f76844SCraig Topper; CHECK-NEXT:    vid.v v9, v0.t
3990f76844SCraig Topper; CHECK-NEXT:    addi a0, a0, -1
4090f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a0, v0.t
4190f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10, v0.t
4290f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
4390f76844SCraig Topper; CHECK-NEXT:    ret
4490f76844SCraig Topper  %dst = call <4 x i32> @llvm.experimental.vp.reverse.v4i32(<4 x i32> %src, <4 x i1> %mask, i32 %evl)
4590f76844SCraig Topper  ret <4 x i32> %dst
4690f76844SCraig Topper}
4790f76844SCraig Topper
4890f76844SCraig Topperdefine <4 x i32> @test_vp_reverse_v4i32(<4 x i32> %src, i32 zeroext %evl) {
4990f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v4i32:
5090f76844SCraig Topper; CHECK:       # %bb.0:
5190f76844SCraig Topper; CHECK-NEXT:    addi a1, a0, -1
5290f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e32, m1, ta, ma
5390f76844SCraig Topper; CHECK-NEXT:    vid.v v9
5490f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a1
5590f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10
5690f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
5790f76844SCraig Topper; CHECK-NEXT:    ret
5890f76844SCraig Topper
59*d8d131dfSLuke Lau  %dst = call <4 x i32> @llvm.experimental.vp.reverse.v4i32(<4 x i32> %src, <4 x i1> splat (i1 1), i32 %evl)
6090f76844SCraig Topper  ret <4 x i32> %dst
6190f76844SCraig Topper}
6290f76844SCraig Topper
6390f76844SCraig Topperdefine <8 x i16> @test_vp_reverse_v8i16_masked(<8 x i16> %src, <8 x i1> %mask, i32 zeroext %evl) {
6490f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v8i16_masked:
6590f76844SCraig Topper; CHECK:       # %bb.0:
6690f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e16, m1, ta, ma
6790f76844SCraig Topper; CHECK-NEXT:    vid.v v9, v0.t
6890f76844SCraig Topper; CHECK-NEXT:    addi a0, a0, -1
6990f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a0, v0.t
7090f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10, v0.t
7190f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
7290f76844SCraig Topper; CHECK-NEXT:    ret
7390f76844SCraig Topper  %dst = call <8 x i16> @llvm.experimental.vp.reverse.v8i16(<8 x i16> %src, <8 x i1> %mask, i32 %evl)
7490f76844SCraig Topper  ret <8 x i16> %dst
7590f76844SCraig Topper}
7690f76844SCraig Topper
7790f76844SCraig Topperdefine <8 x i16> @test_vp_reverse_v8i16(<8 x i16> %src, i32 zeroext %evl) {
7890f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v8i16:
7990f76844SCraig Topper; CHECK:       # %bb.0:
8090f76844SCraig Topper; CHECK-NEXT:    addi a1, a0, -1
8190f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e16, m1, ta, ma
8290f76844SCraig Topper; CHECK-NEXT:    vid.v v9
8390f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v9, a1
8490f76844SCraig Topper; CHECK-NEXT:    vrgather.vv v9, v8, v10
8590f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
8690f76844SCraig Topper; CHECK-NEXT:    ret
8790f76844SCraig Topper
88*d8d131dfSLuke Lau  %dst = call <8 x i16> @llvm.experimental.vp.reverse.v8i16(<8 x i16> %src, <8 x i1> splat (i1 1), i32 %evl)
8990f76844SCraig Topper  ret <8 x i16> %dst
9090f76844SCraig Topper}
9190f76844SCraig Topper
9290f76844SCraig Topperdefine <16 x i8> @test_vp_reverse_v16i8_masked(<16 x i8> %src, <16 x i1> %mask, i32 zeroext %evl) {
9390f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v16i8_masked:
9490f76844SCraig Topper; CHECK:       # %bb.0:
9590f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e16, m2, ta, ma
9690f76844SCraig Topper; CHECK-NEXT:    vid.v v10, v0.t
9790f76844SCraig Topper; CHECK-NEXT:    addi a0, a0, -1
9890f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v10, a0, v0.t
9990f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, zero, e8, m1, ta, ma
10090f76844SCraig Topper; CHECK-NEXT:    vrgatherei16.vv v9, v8, v10, v0.t
10190f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
10290f76844SCraig Topper; CHECK-NEXT:    ret
10390f76844SCraig Topper  %dst = call <16 x i8> @llvm.experimental.vp.reverse.v16i8(<16 x i8> %src, <16 x i1> %mask, i32 %evl)
10490f76844SCraig Topper  ret <16 x i8> %dst
10590f76844SCraig Topper}
10690f76844SCraig Topper
10790f76844SCraig Topperdefine <16 x i8> @test_vp_reverse_v16i8(<16 x i8> %src, i32 zeroext %evl) {
10890f76844SCraig Topper; CHECK-LABEL: test_vp_reverse_v16i8:
10990f76844SCraig Topper; CHECK:       # %bb.0:
11090f76844SCraig Topper; CHECK-NEXT:    addi a1, a0, -1
11190f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, a0, e16, m2, ta, ma
11290f76844SCraig Topper; CHECK-NEXT:    vid.v v10
11390f76844SCraig Topper; CHECK-NEXT:    vrsub.vx v10, v10, a1
11490f76844SCraig Topper; CHECK-NEXT:    vsetvli zero, zero, e8, m1, ta, ma
11590f76844SCraig Topper; CHECK-NEXT:    vrgatherei16.vv v9, v8, v10
11690f76844SCraig Topper; CHECK-NEXT:    vmv.v.v v8, v9
11790f76844SCraig Topper; CHECK-NEXT:    ret
11890f76844SCraig Topper
119*d8d131dfSLuke Lau  %dst = call <16 x i8> @llvm.experimental.vp.reverse.v16i8(<16 x i8> %src, <16 x i1> splat (i1 1), i32 %evl)
12090f76844SCraig Topper  ret <16 x i8> %dst
12190f76844SCraig Topper}
12290f76844SCraig Topper
12390f76844SCraig Topperdeclare <2 x i64> @llvm.experimental.vp.reverse.v2i64(<2 x i64>,<2 x i1>,i32)
12490f76844SCraig Topperdeclare <4 x i32> @llvm.experimental.vp.reverse.v4i32(<4 x i32>,<4 x i1>,i32)
12590f76844SCraig Topperdeclare <8 x i16> @llvm.experimental.vp.reverse.v8i16(<8 x i16>,<8 x i1>,i32)
12690f76844SCraig Topperdeclare <16 x i8> @llvm.experimental.vp.reverse.v16i8(<16 x i8>,<16 x i1>,i32)
127