1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 2 // REQUIRES: riscv-registered-target 3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ 4 // RUN: -target-feature +zvbb \ 5 // RUN: -target-feature +zvbc \ 6 // RUN: -target-feature +zvkb \ 7 // RUN: -target-feature +zvkg \ 8 // RUN: -target-feature +zvkned \ 9 // RUN: -target-feature +zvknhb \ 10 // RUN: -target-feature +zvksed \ 11 // RUN: -target-feature +zvksh \ 12 // RUN: -disable-O0-optnone \ 13 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ 14 // RUN: FileCheck --check-prefix=CHECK-RV64 %s 15 16 #include <riscv_vector.h> 17 18 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vrol_vv_u8mf8 19 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[VS2:%.*]], <vscale x 1 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { 20 // CHECK-RV64-NEXT: entry: 21 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vrol.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], <vscale x 1 x i8> [[VS1]], i64 [[VL]]) 22 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]] 23 // 24 vuint8mf8_t test_vrol_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { 25 return __riscv_vrol(vs2, vs1, vl); 26 } 27 28 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vrol_vx_u8mf8 29 // CHECK-RV64-SAME: (<vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 30 // CHECK-RV64-NEXT: entry: 31 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vrol.nxv1i8.i64.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 32 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]] 33 // 34 vuint8mf8_t test_vrol_vx_u8mf8(vuint8mf8_t vs2, size_t rs1, size_t vl) { 35 return __riscv_vrol(vs2, rs1, vl); 36 } 37 38 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vrol_vv_u8mf4 39 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[VS2:%.*]], <vscale x 2 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 40 // CHECK-RV64-NEXT: entry: 41 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vrol.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], <vscale x 2 x i8> [[VS1]], i64 [[VL]]) 42 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]] 43 // 44 vuint8mf4_t test_vrol_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { 45 return __riscv_vrol(vs2, vs1, vl); 46 } 47 48 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vrol_vx_u8mf4 49 // CHECK-RV64-SAME: (<vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 50 // CHECK-RV64-NEXT: entry: 51 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vrol.nxv2i8.i64.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 52 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]] 53 // 54 vuint8mf4_t test_vrol_vx_u8mf4(vuint8mf4_t vs2, size_t rs1, size_t vl) { 55 return __riscv_vrol(vs2, rs1, vl); 56 } 57 58 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vrol_vv_u8mf2 59 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[VS2:%.*]], <vscale x 4 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 60 // CHECK-RV64-NEXT: entry: 61 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vrol.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], <vscale x 4 x i8> [[VS1]], i64 [[VL]]) 62 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]] 63 // 64 vuint8mf2_t test_vrol_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { 65 return __riscv_vrol(vs2, vs1, vl); 66 } 67 68 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vrol_vx_u8mf2 69 // CHECK-RV64-SAME: (<vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 70 // CHECK-RV64-NEXT: entry: 71 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vrol.nxv4i8.i64.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 72 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]] 73 // 74 vuint8mf2_t test_vrol_vx_u8mf2(vuint8mf2_t vs2, size_t rs1, size_t vl) { 75 return __riscv_vrol(vs2, rs1, vl); 76 } 77 78 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vrol_vv_u8m1 79 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[VS2:%.*]], <vscale x 8 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 80 // CHECK-RV64-NEXT: entry: 81 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vrol.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], <vscale x 8 x i8> [[VS1]], i64 [[VL]]) 82 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]] 83 // 84 vuint8m1_t test_vrol_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { 85 return __riscv_vrol(vs2, vs1, vl); 86 } 87 88 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vrol_vx_u8m1 89 // CHECK-RV64-SAME: (<vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 90 // CHECK-RV64-NEXT: entry: 91 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vrol.nxv8i8.i64.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 92 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]] 93 // 94 vuint8m1_t test_vrol_vx_u8m1(vuint8m1_t vs2, size_t rs1, size_t vl) { 95 return __riscv_vrol(vs2, rs1, vl); 96 } 97 98 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vrol_vv_u8m2 99 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[VS2:%.*]], <vscale x 16 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 100 // CHECK-RV64-NEXT: entry: 101 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vrol.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], <vscale x 16 x i8> [[VS1]], i64 [[VL]]) 102 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]] 103 // 104 vuint8m2_t test_vrol_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { 105 return __riscv_vrol(vs2, vs1, vl); 106 } 107 108 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vrol_vx_u8m2 109 // CHECK-RV64-SAME: (<vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 110 // CHECK-RV64-NEXT: entry: 111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vrol.nxv16i8.i64.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 112 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]] 113 // 114 vuint8m2_t test_vrol_vx_u8m2(vuint8m2_t vs2, size_t rs1, size_t vl) { 115 return __riscv_vrol(vs2, rs1, vl); 116 } 117 118 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vrol_vv_u8m4 119 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[VS2:%.*]], <vscale x 32 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 120 // CHECK-RV64-NEXT: entry: 121 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vrol.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], <vscale x 32 x i8> [[VS1]], i64 [[VL]]) 122 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]] 123 // 124 vuint8m4_t test_vrol_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { 125 return __riscv_vrol(vs2, vs1, vl); 126 } 127 128 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vrol_vx_u8m4 129 // CHECK-RV64-SAME: (<vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 130 // CHECK-RV64-NEXT: entry: 131 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vrol.nxv32i8.i64.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 132 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]] 133 // 134 vuint8m4_t test_vrol_vx_u8m4(vuint8m4_t vs2, size_t rs1, size_t vl) { 135 return __riscv_vrol(vs2, rs1, vl); 136 } 137 138 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vrol_vv_u8m8 139 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[VS2:%.*]], <vscale x 64 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 140 // CHECK-RV64-NEXT: entry: 141 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vrol.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], <vscale x 64 x i8> [[VS1]], i64 [[VL]]) 142 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]] 143 // 144 vuint8m8_t test_vrol_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { 145 return __riscv_vrol(vs2, vs1, vl); 146 } 147 148 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vrol_vx_u8m8 149 // CHECK-RV64-SAME: (<vscale x 64 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 150 // CHECK-RV64-NEXT: entry: 151 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vrol.nxv64i8.i64.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], i64 [[RS1]], i64 [[VL]]) 152 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]] 153 // 154 vuint8m8_t test_vrol_vx_u8m8(vuint8m8_t vs2, size_t rs1, size_t vl) { 155 return __riscv_vrol(vs2, rs1, vl); 156 } 157 158 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vrol_vv_u16mf4 159 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[VS2:%.*]], <vscale x 1 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 160 // CHECK-RV64-NEXT: entry: 161 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vrol.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], <vscale x 1 x i16> [[VS1]], i64 [[VL]]) 162 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]] 163 // 164 vuint16mf4_t test_vrol_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { 165 return __riscv_vrol(vs2, vs1, vl); 166 } 167 168 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vrol_vx_u16mf4 169 // CHECK-RV64-SAME: (<vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 170 // CHECK-RV64-NEXT: entry: 171 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vrol.nxv1i16.i64.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 172 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]] 173 // 174 vuint16mf4_t test_vrol_vx_u16mf4(vuint16mf4_t vs2, size_t rs1, size_t vl) { 175 return __riscv_vrol(vs2, rs1, vl); 176 } 177 178 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vrol_vv_u16mf2 179 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[VS2:%.*]], <vscale x 2 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 180 // CHECK-RV64-NEXT: entry: 181 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vrol.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], <vscale x 2 x i16> [[VS1]], i64 [[VL]]) 182 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]] 183 // 184 vuint16mf2_t test_vrol_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { 185 return __riscv_vrol(vs2, vs1, vl); 186 } 187 188 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vrol_vx_u16mf2 189 // CHECK-RV64-SAME: (<vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 190 // CHECK-RV64-NEXT: entry: 191 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vrol.nxv2i16.i64.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 192 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]] 193 // 194 vuint16mf2_t test_vrol_vx_u16mf2(vuint16mf2_t vs2, size_t rs1, size_t vl) { 195 return __riscv_vrol(vs2, rs1, vl); 196 } 197 198 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vrol_vv_u16m1 199 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[VS2:%.*]], <vscale x 4 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 200 // CHECK-RV64-NEXT: entry: 201 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vrol.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], <vscale x 4 x i16> [[VS1]], i64 [[VL]]) 202 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]] 203 // 204 vuint16m1_t test_vrol_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { 205 return __riscv_vrol(vs2, vs1, vl); 206 } 207 208 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vrol_vx_u16m1 209 // CHECK-RV64-SAME: (<vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 210 // CHECK-RV64-NEXT: entry: 211 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vrol.nxv4i16.i64.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 212 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]] 213 // 214 vuint16m1_t test_vrol_vx_u16m1(vuint16m1_t vs2, size_t rs1, size_t vl) { 215 return __riscv_vrol(vs2, rs1, vl); 216 } 217 218 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vrol_vv_u16m2 219 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[VS2:%.*]], <vscale x 8 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 220 // CHECK-RV64-NEXT: entry: 221 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vrol.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], <vscale x 8 x i16> [[VS1]], i64 [[VL]]) 222 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]] 223 // 224 vuint16m2_t test_vrol_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { 225 return __riscv_vrol(vs2, vs1, vl); 226 } 227 228 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vrol_vx_u16m2 229 // CHECK-RV64-SAME: (<vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 230 // CHECK-RV64-NEXT: entry: 231 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vrol.nxv8i16.i64.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 232 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]] 233 // 234 vuint16m2_t test_vrol_vx_u16m2(vuint16m2_t vs2, size_t rs1, size_t vl) { 235 return __riscv_vrol(vs2, rs1, vl); 236 } 237 238 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vrol_vv_u16m4 239 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[VS2:%.*]], <vscale x 16 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 240 // CHECK-RV64-NEXT: entry: 241 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vrol.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], <vscale x 16 x i16> [[VS1]], i64 [[VL]]) 242 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]] 243 // 244 vuint16m4_t test_vrol_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { 245 return __riscv_vrol(vs2, vs1, vl); 246 } 247 248 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vrol_vx_u16m4 249 // CHECK-RV64-SAME: (<vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 250 // CHECK-RV64-NEXT: entry: 251 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vrol.nxv16i16.i64.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 252 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]] 253 // 254 vuint16m4_t test_vrol_vx_u16m4(vuint16m4_t vs2, size_t rs1, size_t vl) { 255 return __riscv_vrol(vs2, rs1, vl); 256 } 257 258 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vrol_vv_u16m8 259 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[VS2:%.*]], <vscale x 32 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 260 // CHECK-RV64-NEXT: entry: 261 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vrol.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], <vscale x 32 x i16> [[VS1]], i64 [[VL]]) 262 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]] 263 // 264 vuint16m8_t test_vrol_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { 265 return __riscv_vrol(vs2, vs1, vl); 266 } 267 268 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vrol_vx_u16m8 269 // CHECK-RV64-SAME: (<vscale x 32 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 270 // CHECK-RV64-NEXT: entry: 271 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vrol.nxv32i16.i64.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], i64 [[RS1]], i64 [[VL]]) 272 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]] 273 // 274 vuint16m8_t test_vrol_vx_u16m8(vuint16m8_t vs2, size_t rs1, size_t vl) { 275 return __riscv_vrol(vs2, rs1, vl); 276 } 277 278 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vrol_vv_u32mf2 279 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[VS2:%.*]], <vscale x 1 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 280 // CHECK-RV64-NEXT: entry: 281 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vrol.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], <vscale x 1 x i32> [[VS1]], i64 [[VL]]) 282 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]] 283 // 284 vuint32mf2_t test_vrol_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { 285 return __riscv_vrol(vs2, vs1, vl); 286 } 287 288 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vrol_vx_u32mf2 289 // CHECK-RV64-SAME: (<vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 290 // CHECK-RV64-NEXT: entry: 291 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vrol.nxv1i32.i64.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]]) 292 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]] 293 // 294 vuint32mf2_t test_vrol_vx_u32mf2(vuint32mf2_t vs2, size_t rs1, size_t vl) { 295 return __riscv_vrol(vs2, rs1, vl); 296 } 297 298 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vrol_vv_u32m1 299 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[VS2:%.*]], <vscale x 2 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 300 // CHECK-RV64-NEXT: entry: 301 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vrol.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], <vscale x 2 x i32> [[VS1]], i64 [[VL]]) 302 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]] 303 // 304 vuint32m1_t test_vrol_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { 305 return __riscv_vrol(vs2, vs1, vl); 306 } 307 308 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vrol_vx_u32m1 309 // CHECK-RV64-SAME: (<vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 310 // CHECK-RV64-NEXT: entry: 311 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vrol.nxv2i32.i64.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]]) 312 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]] 313 // 314 vuint32m1_t test_vrol_vx_u32m1(vuint32m1_t vs2, size_t rs1, size_t vl) { 315 return __riscv_vrol(vs2, rs1, vl); 316 } 317 318 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vrol_vv_u32m2 319 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[VS2:%.*]], <vscale x 4 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 320 // CHECK-RV64-NEXT: entry: 321 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vrol.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], <vscale x 4 x i32> [[VS1]], i64 [[VL]]) 322 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]] 323 // 324 vuint32m2_t test_vrol_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { 325 return __riscv_vrol(vs2, vs1, vl); 326 } 327 328 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vrol_vx_u32m2 329 // CHECK-RV64-SAME: (<vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 330 // CHECK-RV64-NEXT: entry: 331 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vrol.nxv4i32.i64.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]]) 332 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]] 333 // 334 vuint32m2_t test_vrol_vx_u32m2(vuint32m2_t vs2, size_t rs1, size_t vl) { 335 return __riscv_vrol(vs2, rs1, vl); 336 } 337 338 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vrol_vv_u32m4 339 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[VS2:%.*]], <vscale x 8 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 340 // CHECK-RV64-NEXT: entry: 341 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vrol.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], <vscale x 8 x i32> [[VS1]], i64 [[VL]]) 342 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]] 343 // 344 vuint32m4_t test_vrol_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { 345 return __riscv_vrol(vs2, vs1, vl); 346 } 347 348 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vrol_vx_u32m4 349 // CHECK-RV64-SAME: (<vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 350 // CHECK-RV64-NEXT: entry: 351 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vrol.nxv8i32.i64.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]]) 352 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]] 353 // 354 vuint32m4_t test_vrol_vx_u32m4(vuint32m4_t vs2, size_t rs1, size_t vl) { 355 return __riscv_vrol(vs2, rs1, vl); 356 } 357 358 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vrol_vv_u32m8 359 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[VS2:%.*]], <vscale x 16 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 360 // CHECK-RV64-NEXT: entry: 361 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vrol.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], <vscale x 16 x i32> [[VS1]], i64 [[VL]]) 362 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]] 363 // 364 vuint32m8_t test_vrol_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { 365 return __riscv_vrol(vs2, vs1, vl); 366 } 367 368 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vrol_vx_u32m8 369 // CHECK-RV64-SAME: (<vscale x 16 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 370 // CHECK-RV64-NEXT: entry: 371 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vrol.nxv16i32.i64.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], i64 [[RS1]], i64 [[VL]]) 372 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]] 373 // 374 vuint32m8_t test_vrol_vx_u32m8(vuint32m8_t vs2, size_t rs1, size_t vl) { 375 return __riscv_vrol(vs2, rs1, vl); 376 } 377 378 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vrol_vv_u64m1 379 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 380 // CHECK-RV64-NEXT: entry: 381 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vrol.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], i64 [[VL]]) 382 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]] 383 // 384 vuint64m1_t test_vrol_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { 385 return __riscv_vrol(vs2, vs1, vl); 386 } 387 388 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vrol_vx_u64m1 389 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 390 // CHECK-RV64-NEXT: entry: 391 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vrol.nxv1i64.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]]) 392 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]] 393 // 394 vuint64m1_t test_vrol_vx_u64m1(vuint64m1_t vs2, size_t rs1, size_t vl) { 395 return __riscv_vrol(vs2, rs1, vl); 396 } 397 398 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vrol_vv_u64m2 399 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 400 // CHECK-RV64-NEXT: entry: 401 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vrol.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], i64 [[VL]]) 402 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]] 403 // 404 vuint64m2_t test_vrol_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { 405 return __riscv_vrol(vs2, vs1, vl); 406 } 407 408 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vrol_vx_u64m2 409 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 410 // CHECK-RV64-NEXT: entry: 411 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vrol.nxv2i64.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]]) 412 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]] 413 // 414 vuint64m2_t test_vrol_vx_u64m2(vuint64m2_t vs2, size_t rs1, size_t vl) { 415 return __riscv_vrol(vs2, rs1, vl); 416 } 417 418 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vrol_vv_u64m4 419 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 420 // CHECK-RV64-NEXT: entry: 421 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vrol.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], i64 [[VL]]) 422 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]] 423 // 424 vuint64m4_t test_vrol_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { 425 return __riscv_vrol(vs2, vs1, vl); 426 } 427 428 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vrol_vx_u64m4 429 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 430 // CHECK-RV64-NEXT: entry: 431 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vrol.nxv4i64.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]]) 432 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]] 433 // 434 vuint64m4_t test_vrol_vx_u64m4(vuint64m4_t vs2, size_t rs1, size_t vl) { 435 return __riscv_vrol(vs2, rs1, vl); 436 } 437 438 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vrol_vv_u64m8 439 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 440 // CHECK-RV64-NEXT: entry: 441 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vrol.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], i64 [[VL]]) 442 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]] 443 // 444 vuint64m8_t test_vrol_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { 445 return __riscv_vrol(vs2, vs1, vl); 446 } 447 448 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vrol_vx_u64m8 449 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 450 // CHECK-RV64-NEXT: entry: 451 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vrol.nxv8i64.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], i64 [[RS1]], i64 [[VL]]) 452 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]] 453 // 454 vuint64m8_t test_vrol_vx_u64m8(vuint64m8_t vs2, size_t rs1, size_t vl) { 455 return __riscv_vrol(vs2, rs1, vl); 456 } 457 458 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vrol_vv_u8mf8_m 459 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i8> [[VS2:%.*]], <vscale x 1 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 460 // CHECK-RV64-NEXT: entry: 461 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], <vscale x 1 x i8> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 462 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]] 463 // 464 vuint8mf8_t test_vrol_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { 465 return __riscv_vrol(mask, vs2, vs1, vl); 466 } 467 468 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i8> @test_vrol_vx_u8mf8_m 469 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 470 // CHECK-RV64-NEXT: entry: 471 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i8> @llvm.riscv.vrol.mask.nxv1i8.i64.i64(<vscale x 1 x i8> poison, <vscale x 1 x i8> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 472 // CHECK-RV64-NEXT: ret <vscale x 1 x i8> [[TMP0]] 473 // 474 vuint8mf8_t test_vrol_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { 475 return __riscv_vrol(mask, vs2, rs1, vl); 476 } 477 478 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vrol_vv_u8mf4_m 479 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i8> [[VS2:%.*]], <vscale x 2 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 480 // CHECK-RV64-NEXT: entry: 481 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], <vscale x 2 x i8> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 482 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]] 483 // 484 vuint8mf4_t test_vrol_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { 485 return __riscv_vrol(mask, vs2, vs1, vl); 486 } 487 488 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i8> @test_vrol_vx_u8mf4_m 489 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 490 // CHECK-RV64-NEXT: entry: 491 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i8> @llvm.riscv.vrol.mask.nxv2i8.i64.i64(<vscale x 2 x i8> poison, <vscale x 2 x i8> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 492 // CHECK-RV64-NEXT: ret <vscale x 2 x i8> [[TMP0]] 493 // 494 vuint8mf4_t test_vrol_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { 495 return __riscv_vrol(mask, vs2, rs1, vl); 496 } 497 498 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vrol_vv_u8mf2_m 499 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i8> [[VS2:%.*]], <vscale x 4 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 500 // CHECK-RV64-NEXT: entry: 501 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], <vscale x 4 x i8> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 502 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]] 503 // 504 vuint8mf2_t test_vrol_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { 505 return __riscv_vrol(mask, vs2, vs1, vl); 506 } 507 508 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i8> @test_vrol_vx_u8mf2_m 509 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 510 // CHECK-RV64-NEXT: entry: 511 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i8> @llvm.riscv.vrol.mask.nxv4i8.i64.i64(<vscale x 4 x i8> poison, <vscale x 4 x i8> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 512 // CHECK-RV64-NEXT: ret <vscale x 4 x i8> [[TMP0]] 513 // 514 vuint8mf2_t test_vrol_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { 515 return __riscv_vrol(mask, vs2, rs1, vl); 516 } 517 518 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vrol_vv_u8m1_m 519 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i8> [[VS2:%.*]], <vscale x 8 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 520 // CHECK-RV64-NEXT: entry: 521 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], <vscale x 8 x i8> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 522 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]] 523 // 524 vuint8m1_t test_vrol_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { 525 return __riscv_vrol(mask, vs2, vs1, vl); 526 } 527 528 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i8> @test_vrol_vx_u8m1_m 529 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 530 // CHECK-RV64-NEXT: entry: 531 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vrol.mask.nxv8i8.i64.i64(<vscale x 8 x i8> poison, <vscale x 8 x i8> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 532 // CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]] 533 // 534 vuint8m1_t test_vrol_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { 535 return __riscv_vrol(mask, vs2, rs1, vl); 536 } 537 538 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vrol_vv_u8m2_m 539 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i8> [[VS2:%.*]], <vscale x 16 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 540 // CHECK-RV64-NEXT: entry: 541 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], <vscale x 16 x i8> [[VS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 542 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]] 543 // 544 vuint8m2_t test_vrol_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { 545 return __riscv_vrol(mask, vs2, vs1, vl); 546 } 547 548 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i8> @test_vrol_vx_u8m2_m 549 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 550 // CHECK-RV64-NEXT: entry: 551 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i8> @llvm.riscv.vrol.mask.nxv16i8.i64.i64(<vscale x 16 x i8> poison, <vscale x 16 x i8> [[VS2]], i64 [[RS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 552 // CHECK-RV64-NEXT: ret <vscale x 16 x i8> [[TMP0]] 553 // 554 vuint8m2_t test_vrol_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { 555 return __riscv_vrol(mask, vs2, rs1, vl); 556 } 557 558 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vrol_vv_u8m4_m 559 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i8> [[VS2:%.*]], <vscale x 32 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 560 // CHECK-RV64-NEXT: entry: 561 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], <vscale x 32 x i8> [[VS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 562 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]] 563 // 564 vuint8m4_t test_vrol_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { 565 return __riscv_vrol(mask, vs2, vs1, vl); 566 } 567 568 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i8> @test_vrol_vx_u8m4_m 569 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 570 // CHECK-RV64-NEXT: entry: 571 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i8> @llvm.riscv.vrol.mask.nxv32i8.i64.i64(<vscale x 32 x i8> poison, <vscale x 32 x i8> [[VS2]], i64 [[RS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 572 // CHECK-RV64-NEXT: ret <vscale x 32 x i8> [[TMP0]] 573 // 574 vuint8m4_t test_vrol_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { 575 return __riscv_vrol(mask, vs2, rs1, vl); 576 } 577 578 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vrol_vv_u8m8_m 579 // CHECK-RV64-SAME: (<vscale x 64 x i1> [[MASK:%.*]], <vscale x 64 x i8> [[VS2:%.*]], <vscale x 64 x i8> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 580 // CHECK-RV64-NEXT: entry: 581 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], <vscale x 64 x i8> [[VS1]], <vscale x 64 x i1> [[MASK]], i64 [[VL]], i64 3) 582 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]] 583 // 584 vuint8m8_t test_vrol_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { 585 return __riscv_vrol(mask, vs2, vs1, vl); 586 } 587 588 // CHECK-RV64-LABEL: define dso_local <vscale x 64 x i8> @test_vrol_vx_u8m8_m 589 // CHECK-RV64-SAME: (<vscale x 64 x i1> [[MASK:%.*]], <vscale x 64 x i8> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 590 // CHECK-RV64-NEXT: entry: 591 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 64 x i8> @llvm.riscv.vrol.mask.nxv64i8.i64.i64(<vscale x 64 x i8> poison, <vscale x 64 x i8> [[VS2]], i64 [[RS1]], <vscale x 64 x i1> [[MASK]], i64 [[VL]], i64 3) 592 // CHECK-RV64-NEXT: ret <vscale x 64 x i8> [[TMP0]] 593 // 594 vuint8m8_t test_vrol_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t rs1, size_t vl) { 595 return __riscv_vrol(mask, vs2, rs1, vl); 596 } 597 598 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vrol_vv_u16mf4_m 599 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i16> [[VS2:%.*]], <vscale x 1 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 600 // CHECK-RV64-NEXT: entry: 601 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], <vscale x 1 x i16> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 602 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]] 603 // 604 vuint16mf4_t test_vrol_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { 605 return __riscv_vrol(mask, vs2, vs1, vl); 606 } 607 608 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i16> @test_vrol_vx_u16mf4_m 609 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 610 // CHECK-RV64-NEXT: entry: 611 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i16> @llvm.riscv.vrol.mask.nxv1i16.i64.i64(<vscale x 1 x i16> poison, <vscale x 1 x i16> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 612 // CHECK-RV64-NEXT: ret <vscale x 1 x i16> [[TMP0]] 613 // 614 vuint16mf4_t test_vrol_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { 615 return __riscv_vrol(mask, vs2, rs1, vl); 616 } 617 618 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vrol_vv_u16mf2_m 619 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i16> [[VS2:%.*]], <vscale x 2 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 620 // CHECK-RV64-NEXT: entry: 621 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], <vscale x 2 x i16> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 622 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]] 623 // 624 vuint16mf2_t test_vrol_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { 625 return __riscv_vrol(mask, vs2, vs1, vl); 626 } 627 628 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i16> @test_vrol_vx_u16mf2_m 629 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 630 // CHECK-RV64-NEXT: entry: 631 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i16> @llvm.riscv.vrol.mask.nxv2i16.i64.i64(<vscale x 2 x i16> poison, <vscale x 2 x i16> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 632 // CHECK-RV64-NEXT: ret <vscale x 2 x i16> [[TMP0]] 633 // 634 vuint16mf2_t test_vrol_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { 635 return __riscv_vrol(mask, vs2, rs1, vl); 636 } 637 638 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vrol_vv_u16m1_m 639 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i16> [[VS2:%.*]], <vscale x 4 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 640 // CHECK-RV64-NEXT: entry: 641 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], <vscale x 4 x i16> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 642 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]] 643 // 644 vuint16m1_t test_vrol_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { 645 return __riscv_vrol(mask, vs2, vs1, vl); 646 } 647 648 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i16> @test_vrol_vx_u16m1_m 649 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 650 // CHECK-RV64-NEXT: entry: 651 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vrol.mask.nxv4i16.i64.i64(<vscale x 4 x i16> poison, <vscale x 4 x i16> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 652 // CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]] 653 // 654 vuint16m1_t test_vrol_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { 655 return __riscv_vrol(mask, vs2, rs1, vl); 656 } 657 658 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vrol_vv_u16m2_m 659 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i16> [[VS2:%.*]], <vscale x 8 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 660 // CHECK-RV64-NEXT: entry: 661 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], <vscale x 8 x i16> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 662 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]] 663 // 664 vuint16m2_t test_vrol_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { 665 return __riscv_vrol(mask, vs2, vs1, vl); 666 } 667 668 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i16> @test_vrol_vx_u16m2_m 669 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 670 // CHECK-RV64-NEXT: entry: 671 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i16> @llvm.riscv.vrol.mask.nxv8i16.i64.i64(<vscale x 8 x i16> poison, <vscale x 8 x i16> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 672 // CHECK-RV64-NEXT: ret <vscale x 8 x i16> [[TMP0]] 673 // 674 vuint16m2_t test_vrol_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { 675 return __riscv_vrol(mask, vs2, rs1, vl); 676 } 677 678 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vrol_vv_u16m4_m 679 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i16> [[VS2:%.*]], <vscale x 16 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 680 // CHECK-RV64-NEXT: entry: 681 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], <vscale x 16 x i16> [[VS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 682 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]] 683 // 684 vuint16m4_t test_vrol_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { 685 return __riscv_vrol(mask, vs2, vs1, vl); 686 } 687 688 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i16> @test_vrol_vx_u16m4_m 689 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 690 // CHECK-RV64-NEXT: entry: 691 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i16> @llvm.riscv.vrol.mask.nxv16i16.i64.i64(<vscale x 16 x i16> poison, <vscale x 16 x i16> [[VS2]], i64 [[RS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 692 // CHECK-RV64-NEXT: ret <vscale x 16 x i16> [[TMP0]] 693 // 694 vuint16m4_t test_vrol_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { 695 return __riscv_vrol(mask, vs2, rs1, vl); 696 } 697 698 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vrol_vv_u16m8_m 699 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i16> [[VS2:%.*]], <vscale x 32 x i16> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 700 // CHECK-RV64-NEXT: entry: 701 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], <vscale x 32 x i16> [[VS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 702 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]] 703 // 704 vuint16m8_t test_vrol_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { 705 return __riscv_vrol(mask, vs2, vs1, vl); 706 } 707 708 // CHECK-RV64-LABEL: define dso_local <vscale x 32 x i16> @test_vrol_vx_u16m8_m 709 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], <vscale x 32 x i16> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 710 // CHECK-RV64-NEXT: entry: 711 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 32 x i16> @llvm.riscv.vrol.mask.nxv32i16.i64.i64(<vscale x 32 x i16> poison, <vscale x 32 x i16> [[VS2]], i64 [[RS1]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 712 // CHECK-RV64-NEXT: ret <vscale x 32 x i16> [[TMP0]] 713 // 714 vuint16m8_t test_vrol_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t rs1, size_t vl) { 715 return __riscv_vrol(mask, vs2, rs1, vl); 716 } 717 718 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vrol_vv_u32mf2_m 719 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i32> [[VS2:%.*]], <vscale x 1 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 720 // CHECK-RV64-NEXT: entry: 721 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], <vscale x 1 x i32> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 722 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]] 723 // 724 vuint32mf2_t test_vrol_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { 725 return __riscv_vrol(mask, vs2, vs1, vl); 726 } 727 728 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i32> @test_vrol_vx_u32mf2_m 729 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 730 // CHECK-RV64-NEXT: entry: 731 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i32> @llvm.riscv.vrol.mask.nxv1i32.i64.i64(<vscale x 1 x i32> poison, <vscale x 1 x i32> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 732 // CHECK-RV64-NEXT: ret <vscale x 1 x i32> [[TMP0]] 733 // 734 vuint32mf2_t test_vrol_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { 735 return __riscv_vrol(mask, vs2, rs1, vl); 736 } 737 738 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vrol_vv_u32m1_m 739 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i32> [[VS2:%.*]], <vscale x 2 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 740 // CHECK-RV64-NEXT: entry: 741 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], <vscale x 2 x i32> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 742 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]] 743 // 744 vuint32m1_t test_vrol_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { 745 return __riscv_vrol(mask, vs2, vs1, vl); 746 } 747 748 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i32> @test_vrol_vx_u32m1_m 749 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 750 // CHECK-RV64-NEXT: entry: 751 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vrol.mask.nxv2i32.i64.i64(<vscale x 2 x i32> poison, <vscale x 2 x i32> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 752 // CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]] 753 // 754 vuint32m1_t test_vrol_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { 755 return __riscv_vrol(mask, vs2, rs1, vl); 756 } 757 758 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vrol_vv_u32m2_m 759 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i32> [[VS2:%.*]], <vscale x 4 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 760 // CHECK-RV64-NEXT: entry: 761 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], <vscale x 4 x i32> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 762 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]] 763 // 764 vuint32m2_t test_vrol_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { 765 return __riscv_vrol(mask, vs2, vs1, vl); 766 } 767 768 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i32> @test_vrol_vx_u32m2_m 769 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 770 // CHECK-RV64-NEXT: entry: 771 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i32> @llvm.riscv.vrol.mask.nxv4i32.i64.i64(<vscale x 4 x i32> poison, <vscale x 4 x i32> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 772 // CHECK-RV64-NEXT: ret <vscale x 4 x i32> [[TMP0]] 773 // 774 vuint32m2_t test_vrol_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { 775 return __riscv_vrol(mask, vs2, rs1, vl); 776 } 777 778 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vrol_vv_u32m4_m 779 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i32> [[VS2:%.*]], <vscale x 8 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 780 // CHECK-RV64-NEXT: entry: 781 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], <vscale x 8 x i32> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 782 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]] 783 // 784 vuint32m4_t test_vrol_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { 785 return __riscv_vrol(mask, vs2, vs1, vl); 786 } 787 788 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i32> @test_vrol_vx_u32m4_m 789 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 790 // CHECK-RV64-NEXT: entry: 791 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i32> @llvm.riscv.vrol.mask.nxv8i32.i64.i64(<vscale x 8 x i32> poison, <vscale x 8 x i32> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 792 // CHECK-RV64-NEXT: ret <vscale x 8 x i32> [[TMP0]] 793 // 794 vuint32m4_t test_vrol_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { 795 return __riscv_vrol(mask, vs2, rs1, vl); 796 } 797 798 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vrol_vv_u32m8_m 799 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i32> [[VS2:%.*]], <vscale x 16 x i32> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 800 // CHECK-RV64-NEXT: entry: 801 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], <vscale x 16 x i32> [[VS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 802 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]] 803 // 804 vuint32m8_t test_vrol_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { 805 return __riscv_vrol(mask, vs2, vs1, vl); 806 } 807 808 // CHECK-RV64-LABEL: define dso_local <vscale x 16 x i32> @test_vrol_vx_u32m8_m 809 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], <vscale x 16 x i32> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 810 // CHECK-RV64-NEXT: entry: 811 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 16 x i32> @llvm.riscv.vrol.mask.nxv16i32.i64.i64(<vscale x 16 x i32> poison, <vscale x 16 x i32> [[VS2]], i64 [[RS1]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 812 // CHECK-RV64-NEXT: ret <vscale x 16 x i32> [[TMP0]] 813 // 814 vuint32m8_t test_vrol_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t rs1, size_t vl) { 815 return __riscv_vrol(mask, vs2, rs1, vl); 816 } 817 818 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vrol_vv_u64m1_m 819 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[VS2:%.*]], <vscale x 1 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 820 // CHECK-RV64-NEXT: entry: 821 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], <vscale x 1 x i64> [[VS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 822 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]] 823 // 824 vuint64m1_t test_vrol_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { 825 return __riscv_vrol(mask, vs2, vs1, vl); 826 } 827 828 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vrol_vx_u64m1_m 829 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 830 // CHECK-RV64-NEXT: entry: 831 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vrol.mask.nxv1i64.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[VS2]], i64 [[RS1]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 832 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]] 833 // 834 vuint64m1_t test_vrol_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t rs1, size_t vl) { 835 return __riscv_vrol(mask, vs2, rs1, vl); 836 } 837 838 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vrol_vv_u64m2_m 839 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[VS2:%.*]], <vscale x 2 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 840 // CHECK-RV64-NEXT: entry: 841 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], <vscale x 2 x i64> [[VS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 842 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]] 843 // 844 vuint64m2_t test_vrol_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { 845 return __riscv_vrol(mask, vs2, vs1, vl); 846 } 847 848 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vrol_vx_u64m2_m 849 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 850 // CHECK-RV64-NEXT: entry: 851 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vrol.mask.nxv2i64.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[VS2]], i64 [[RS1]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 852 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]] 853 // 854 vuint64m2_t test_vrol_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t rs1, size_t vl) { 855 return __riscv_vrol(mask, vs2, rs1, vl); 856 } 857 858 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vrol_vv_u64m4_m 859 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[VS2:%.*]], <vscale x 4 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 860 // CHECK-RV64-NEXT: entry: 861 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], <vscale x 4 x i64> [[VS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 862 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]] 863 // 864 vuint64m4_t test_vrol_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { 865 return __riscv_vrol(mask, vs2, vs1, vl); 866 } 867 868 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vrol_vx_u64m4_m 869 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 870 // CHECK-RV64-NEXT: entry: 871 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vrol.mask.nxv4i64.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[VS2]], i64 [[RS1]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 872 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]] 873 // 874 vuint64m4_t test_vrol_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t rs1, size_t vl) { 875 return __riscv_vrol(mask, vs2, rs1, vl); 876 } 877 878 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vrol_vv_u64m8_m 879 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[VS2:%.*]], <vscale x 8 x i64> [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 880 // CHECK-RV64-NEXT: entry: 881 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], <vscale x 8 x i64> [[VS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 882 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]] 883 // 884 vuint64m8_t test_vrol_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { 885 return __riscv_vrol(mask, vs2, vs1, vl); 886 } 887 888 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vrol_vx_u64m8_m 889 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 890 // CHECK-RV64-NEXT: entry: 891 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vrol.mask.nxv8i64.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[VS2]], i64 [[RS1]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 892 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]] 893 // 894 vuint64m8_t test_vrol_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t rs1, size_t vl) { 895 return __riscv_vrol(mask, vs2, rs1, vl); 896 } 897 898