1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 2 // REQUIRES: riscv-registered-target 3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \ 4 // RUN: -target-feature +zvfhmin -disable-O0-optnone \ 5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ 6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s 7 8 #include <riscv_vector.h> 9 10 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16mf4x2 11 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { 12 // CHECK-RV64-NEXT: entry: 13 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv2i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 4) 14 // CHECK-RV64-NEXT: ret void 15 // 16 void test_vsoxseg2ei16_v_f16mf4x2(_Float16 *base, vuint16mf4_t bindex, vfloat16mf4x2_t v_tuple, size_t vl) { 17 return __riscv_vsoxseg2ei16_v_f16mf4x2(base, bindex, v_tuple, vl); 18 } 19 20 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16mf2x2 21 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 22 // CHECK-RV64-NEXT: entry: 23 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 4) 24 // CHECK-RV64-NEXT: ret void 25 // 26 void test_vsoxseg2ei16_v_f16mf2x2(_Float16 *base, vuint16mf2_t bindex, vfloat16mf2x2_t v_tuple, size_t vl) { 27 return __riscv_vsoxseg2ei16_v_f16mf2x2(base, bindex, v_tuple, vl); 28 } 29 30 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m1x2 31 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 32 // CHECK-RV64-NEXT: entry: 33 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 4) 34 // CHECK-RV64-NEXT: ret void 35 // 36 void test_vsoxseg2ei16_v_f16m1x2(_Float16 *base, vuint16m1_t bindex, vfloat16m1x2_t v_tuple, size_t vl) { 37 return __riscv_vsoxseg2ei16_v_f16m1x2(base, bindex, v_tuple, vl); 38 } 39 40 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m2x2 41 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 42 // CHECK-RV64-NEXT: entry: 43 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 4) 44 // CHECK-RV64-NEXT: ret void 45 // 46 void test_vsoxseg2ei16_v_f16m2x2(_Float16 *base, vuint16m2_t bindex, vfloat16m2x2_t v_tuple, size_t vl) { 47 return __riscv_vsoxseg2ei16_v_f16m2x2(base, bindex, v_tuple, vl); 48 } 49 50 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m4x2 51 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 52 // CHECK-RV64-NEXT: entry: 53 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv16i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], i64 [[VL]], i64 4) 54 // CHECK-RV64-NEXT: ret void 55 // 56 void test_vsoxseg2ei16_v_f16m4x2(_Float16 *base, vuint16m4_t bindex, vfloat16m4x2_t v_tuple, size_t vl) { 57 return __riscv_vsoxseg2ei16_v_f16m4x2(base, bindex, v_tuple, vl); 58 } 59 60 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32mf2x2 61 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 62 // CHECK-RV64-NEXT: entry: 63 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 5) 64 // CHECK-RV64-NEXT: ret void 65 // 66 void test_vsoxseg2ei16_v_f32mf2x2(float *base, vuint16mf4_t bindex, vfloat32mf2x2_t v_tuple, size_t vl) { 67 return __riscv_vsoxseg2ei16_v_f32mf2x2(base, bindex, v_tuple, vl); 68 } 69 70 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m1x2 71 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 72 // CHECK-RV64-NEXT: entry: 73 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 5) 74 // CHECK-RV64-NEXT: ret void 75 // 76 void test_vsoxseg2ei16_v_f32m1x2(float *base, vuint16mf2_t bindex, vfloat32m1x2_t v_tuple, size_t vl) { 77 return __riscv_vsoxseg2ei16_v_f32m1x2(base, bindex, v_tuple, vl); 78 } 79 80 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m2x2 81 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 82 // CHECK-RV64-NEXT: entry: 83 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 5) 84 // CHECK-RV64-NEXT: ret void 85 // 86 void test_vsoxseg2ei16_v_f32m2x2(float *base, vuint16m1_t bindex, vfloat32m2x2_t v_tuple, size_t vl) { 87 return __riscv_vsoxseg2ei16_v_f32m2x2(base, bindex, v_tuple, vl); 88 } 89 90 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m4x2 91 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 92 // CHECK-RV64-NEXT: entry: 93 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 5) 94 // CHECK-RV64-NEXT: ret void 95 // 96 void test_vsoxseg2ei16_v_f32m4x2(float *base, vuint16m2_t bindex, vfloat32m4x2_t v_tuple, size_t vl) { 97 return __riscv_vsoxseg2ei16_v_f32m4x2(base, bindex, v_tuple, vl); 98 } 99 100 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m1x2 101 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 102 // CHECK-RV64-NEXT: entry: 103 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 6) 104 // CHECK-RV64-NEXT: ret void 105 // 106 void test_vsoxseg2ei16_v_f64m1x2(double *base, vuint16mf4_t bindex, vfloat64m1x2_t v_tuple, size_t vl) { 107 return __riscv_vsoxseg2ei16_v_f64m1x2(base, bindex, v_tuple, vl); 108 } 109 110 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m2x2 111 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 112 // CHECK-RV64-NEXT: entry: 113 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 6) 114 // CHECK-RV64-NEXT: ret void 115 // 116 void test_vsoxseg2ei16_v_f64m2x2(double *base, vuint16mf2_t bindex, vfloat64m2x2_t v_tuple, size_t vl) { 117 return __riscv_vsoxseg2ei16_v_f64m2x2(base, bindex, v_tuple, vl); 118 } 119 120 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m4x2 121 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 122 // CHECK-RV64-NEXT: entry: 123 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 6) 124 // CHECK-RV64-NEXT: ret void 125 // 126 void test_vsoxseg2ei16_v_f64m4x2(double *base, vuint16m1_t bindex, vfloat64m4x2_t v_tuple, size_t vl) { 127 return __riscv_vsoxseg2ei16_v_f64m4x2(base, bindex, v_tuple, vl); 128 } 129 130 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf8x2 131 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 132 // CHECK-RV64-NEXT: entry: 133 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv1i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 3) 134 // CHECK-RV64-NEXT: ret void 135 // 136 void test_vsoxseg2ei16_v_i8mf8x2(int8_t *base, vuint16mf4_t bindex, vint8mf8x2_t v_tuple, size_t vl) { 137 return __riscv_vsoxseg2ei16_v_i8mf8x2(base, bindex, v_tuple, vl); 138 } 139 140 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf4x2 141 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 142 // CHECK-RV64-NEXT: entry: 143 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv2i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 3) 144 // CHECK-RV64-NEXT: ret void 145 // 146 void test_vsoxseg2ei16_v_i8mf4x2(int8_t *base, vuint16mf2_t bindex, vint8mf4x2_t v_tuple, size_t vl) { 147 return __riscv_vsoxseg2ei16_v_i8mf4x2(base, bindex, v_tuple, vl); 148 } 149 150 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf2x2 151 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 152 // CHECK-RV64-NEXT: entry: 153 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 3) 154 // CHECK-RV64-NEXT: ret void 155 // 156 void test_vsoxseg2ei16_v_i8mf2x2(int8_t *base, vuint16m1_t bindex, vint8mf2x2_t v_tuple, size_t vl) { 157 return __riscv_vsoxseg2ei16_v_i8mf2x2(base, bindex, v_tuple, vl); 158 } 159 160 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m1x2 161 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 162 // CHECK-RV64-NEXT: entry: 163 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 3) 164 // CHECK-RV64-NEXT: ret void 165 // 166 void test_vsoxseg2ei16_v_i8m1x2(int8_t *base, vuint16m2_t bindex, vint8m1x2_t v_tuple, size_t vl) { 167 return __riscv_vsoxseg2ei16_v_i8m1x2(base, bindex, v_tuple, vl); 168 } 169 170 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m2x2 171 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 172 // CHECK-RV64-NEXT: entry: 173 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv16i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], i64 [[VL]], i64 3) 174 // CHECK-RV64-NEXT: ret void 175 // 176 void test_vsoxseg2ei16_v_i8m2x2(int8_t *base, vuint16m4_t bindex, vint8m2x2_t v_tuple, size_t vl) { 177 return __riscv_vsoxseg2ei16_v_i8m2x2(base, bindex, v_tuple, vl); 178 } 179 180 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m4x2 181 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 182 // CHECK-RV64-NEXT: entry: 183 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv32i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 32 x i16> [[BINDEX]], i64 [[VL]], i64 3) 184 // CHECK-RV64-NEXT: ret void 185 // 186 void test_vsoxseg2ei16_v_i8m4x2(int8_t *base, vuint16m8_t bindex, vint8m4x2_t v_tuple, size_t vl) { 187 return __riscv_vsoxseg2ei16_v_i8m4x2(base, bindex, v_tuple, vl); 188 } 189 190 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16mf4x2 191 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 192 // CHECK-RV64-NEXT: entry: 193 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv2i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 4) 194 // CHECK-RV64-NEXT: ret void 195 // 196 void test_vsoxseg2ei16_v_i16mf4x2(int16_t *base, vuint16mf4_t bindex, vint16mf4x2_t v_tuple, size_t vl) { 197 return __riscv_vsoxseg2ei16_v_i16mf4x2(base, bindex, v_tuple, vl); 198 } 199 200 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16mf2x2 201 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 202 // CHECK-RV64-NEXT: entry: 203 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 4) 204 // CHECK-RV64-NEXT: ret void 205 // 206 void test_vsoxseg2ei16_v_i16mf2x2(int16_t *base, vuint16mf2_t bindex, vint16mf2x2_t v_tuple, size_t vl) { 207 return __riscv_vsoxseg2ei16_v_i16mf2x2(base, bindex, v_tuple, vl); 208 } 209 210 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m1x2 211 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 212 // CHECK-RV64-NEXT: entry: 213 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 4) 214 // CHECK-RV64-NEXT: ret void 215 // 216 void test_vsoxseg2ei16_v_i16m1x2(int16_t *base, vuint16m1_t bindex, vint16m1x2_t v_tuple, size_t vl) { 217 return __riscv_vsoxseg2ei16_v_i16m1x2(base, bindex, v_tuple, vl); 218 } 219 220 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m2x2 221 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 222 // CHECK-RV64-NEXT: entry: 223 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 4) 224 // CHECK-RV64-NEXT: ret void 225 // 226 void test_vsoxseg2ei16_v_i16m2x2(int16_t *base, vuint16m2_t bindex, vint16m2x2_t v_tuple, size_t vl) { 227 return __riscv_vsoxseg2ei16_v_i16m2x2(base, bindex, v_tuple, vl); 228 } 229 230 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m4x2 231 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 232 // CHECK-RV64-NEXT: entry: 233 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv16i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], i64 [[VL]], i64 4) 234 // CHECK-RV64-NEXT: ret void 235 // 236 void test_vsoxseg2ei16_v_i16m4x2(int16_t *base, vuint16m4_t bindex, vint16m4x2_t v_tuple, size_t vl) { 237 return __riscv_vsoxseg2ei16_v_i16m4x2(base, bindex, v_tuple, vl); 238 } 239 240 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32mf2x2 241 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 242 // CHECK-RV64-NEXT: entry: 243 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 5) 244 // CHECK-RV64-NEXT: ret void 245 // 246 void test_vsoxseg2ei16_v_i32mf2x2(int32_t *base, vuint16mf4_t bindex, vint32mf2x2_t v_tuple, size_t vl) { 247 return __riscv_vsoxseg2ei16_v_i32mf2x2(base, bindex, v_tuple, vl); 248 } 249 250 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m1x2 251 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 252 // CHECK-RV64-NEXT: entry: 253 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 5) 254 // CHECK-RV64-NEXT: ret void 255 // 256 void test_vsoxseg2ei16_v_i32m1x2(int32_t *base, vuint16mf2_t bindex, vint32m1x2_t v_tuple, size_t vl) { 257 return __riscv_vsoxseg2ei16_v_i32m1x2(base, bindex, v_tuple, vl); 258 } 259 260 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m2x2 261 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 262 // CHECK-RV64-NEXT: entry: 263 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 5) 264 // CHECK-RV64-NEXT: ret void 265 // 266 void test_vsoxseg2ei16_v_i32m2x2(int32_t *base, vuint16m1_t bindex, vint32m2x2_t v_tuple, size_t vl) { 267 return __riscv_vsoxseg2ei16_v_i32m2x2(base, bindex, v_tuple, vl); 268 } 269 270 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m4x2 271 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 272 // CHECK-RV64-NEXT: entry: 273 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 5) 274 // CHECK-RV64-NEXT: ret void 275 // 276 void test_vsoxseg2ei16_v_i32m4x2(int32_t *base, vuint16m2_t bindex, vint32m4x2_t v_tuple, size_t vl) { 277 return __riscv_vsoxseg2ei16_v_i32m4x2(base, bindex, v_tuple, vl); 278 } 279 280 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m1x2 281 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 282 // CHECK-RV64-NEXT: entry: 283 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 6) 284 // CHECK-RV64-NEXT: ret void 285 // 286 void test_vsoxseg2ei16_v_i64m1x2(int64_t *base, vuint16mf4_t bindex, vint64m1x2_t v_tuple, size_t vl) { 287 return __riscv_vsoxseg2ei16_v_i64m1x2(base, bindex, v_tuple, vl); 288 } 289 290 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m2x2 291 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 292 // CHECK-RV64-NEXT: entry: 293 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 6) 294 // CHECK-RV64-NEXT: ret void 295 // 296 void test_vsoxseg2ei16_v_i64m2x2(int64_t *base, vuint16mf2_t bindex, vint64m2x2_t v_tuple, size_t vl) { 297 return __riscv_vsoxseg2ei16_v_i64m2x2(base, bindex, v_tuple, vl); 298 } 299 300 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m4x2 301 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 302 // CHECK-RV64-NEXT: entry: 303 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 6) 304 // CHECK-RV64-NEXT: ret void 305 // 306 void test_vsoxseg2ei16_v_i64m4x2(int64_t *base, vuint16m1_t bindex, vint64m4x2_t v_tuple, size_t vl) { 307 return __riscv_vsoxseg2ei16_v_i64m4x2(base, bindex, v_tuple, vl); 308 } 309 310 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf8x2 311 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 312 // CHECK-RV64-NEXT: entry: 313 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv1i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 3) 314 // CHECK-RV64-NEXT: ret void 315 // 316 void test_vsoxseg2ei16_v_u8mf8x2(uint8_t *base, vuint16mf4_t bindex, vuint8mf8x2_t v_tuple, size_t vl) { 317 return __riscv_vsoxseg2ei16_v_u8mf8x2(base, bindex, v_tuple, vl); 318 } 319 320 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf4x2 321 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 322 // CHECK-RV64-NEXT: entry: 323 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv2i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 3) 324 // CHECK-RV64-NEXT: ret void 325 // 326 void test_vsoxseg2ei16_v_u8mf4x2(uint8_t *base, vuint16mf2_t bindex, vuint8mf4x2_t v_tuple, size_t vl) { 327 return __riscv_vsoxseg2ei16_v_u8mf4x2(base, bindex, v_tuple, vl); 328 } 329 330 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf2x2 331 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 332 // CHECK-RV64-NEXT: entry: 333 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 3) 334 // CHECK-RV64-NEXT: ret void 335 // 336 void test_vsoxseg2ei16_v_u8mf2x2(uint8_t *base, vuint16m1_t bindex, vuint8mf2x2_t v_tuple, size_t vl) { 337 return __riscv_vsoxseg2ei16_v_u8mf2x2(base, bindex, v_tuple, vl); 338 } 339 340 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m1x2 341 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 342 // CHECK-RV64-NEXT: entry: 343 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 3) 344 // CHECK-RV64-NEXT: ret void 345 // 346 void test_vsoxseg2ei16_v_u8m1x2(uint8_t *base, vuint16m2_t bindex, vuint8m1x2_t v_tuple, size_t vl) { 347 return __riscv_vsoxseg2ei16_v_u8m1x2(base, bindex, v_tuple, vl); 348 } 349 350 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m2x2 351 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 352 // CHECK-RV64-NEXT: entry: 353 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv16i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], i64 [[VL]], i64 3) 354 // CHECK-RV64-NEXT: ret void 355 // 356 void test_vsoxseg2ei16_v_u8m2x2(uint8_t *base, vuint16m4_t bindex, vuint8m2x2_t v_tuple, size_t vl) { 357 return __riscv_vsoxseg2ei16_v_u8m2x2(base, bindex, v_tuple, vl); 358 } 359 360 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m4x2 361 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 362 // CHECK-RV64-NEXT: entry: 363 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv32i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 32 x i16> [[BINDEX]], i64 [[VL]], i64 3) 364 // CHECK-RV64-NEXT: ret void 365 // 366 void test_vsoxseg2ei16_v_u8m4x2(uint8_t *base, vuint16m8_t bindex, vuint8m4x2_t v_tuple, size_t vl) { 367 return __riscv_vsoxseg2ei16_v_u8m4x2(base, bindex, v_tuple, vl); 368 } 369 370 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16mf4x2 371 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 372 // CHECK-RV64-NEXT: entry: 373 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv2i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 4) 374 // CHECK-RV64-NEXT: ret void 375 // 376 void test_vsoxseg2ei16_v_u16mf4x2(uint16_t *base, vuint16mf4_t bindex, vuint16mf4x2_t v_tuple, size_t vl) { 377 return __riscv_vsoxseg2ei16_v_u16mf4x2(base, bindex, v_tuple, vl); 378 } 379 380 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16mf2x2 381 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 382 // CHECK-RV64-NEXT: entry: 383 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 4) 384 // CHECK-RV64-NEXT: ret void 385 // 386 void test_vsoxseg2ei16_v_u16mf2x2(uint16_t *base, vuint16mf2_t bindex, vuint16mf2x2_t v_tuple, size_t vl) { 387 return __riscv_vsoxseg2ei16_v_u16mf2x2(base, bindex, v_tuple, vl); 388 } 389 390 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m1x2 391 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 392 // CHECK-RV64-NEXT: entry: 393 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 4) 394 // CHECK-RV64-NEXT: ret void 395 // 396 void test_vsoxseg2ei16_v_u16m1x2(uint16_t *base, vuint16m1_t bindex, vuint16m1x2_t v_tuple, size_t vl) { 397 return __riscv_vsoxseg2ei16_v_u16m1x2(base, bindex, v_tuple, vl); 398 } 399 400 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m2x2 401 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 402 // CHECK-RV64-NEXT: entry: 403 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 4) 404 // CHECK-RV64-NEXT: ret void 405 // 406 void test_vsoxseg2ei16_v_u16m2x2(uint16_t *base, vuint16m2_t bindex, vuint16m2x2_t v_tuple, size_t vl) { 407 return __riscv_vsoxseg2ei16_v_u16m2x2(base, bindex, v_tuple, vl); 408 } 409 410 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m4x2 411 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 412 // CHECK-RV64-NEXT: entry: 413 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv16i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], i64 [[VL]], i64 4) 414 // CHECK-RV64-NEXT: ret void 415 // 416 void test_vsoxseg2ei16_v_u16m4x2(uint16_t *base, vuint16m4_t bindex, vuint16m4x2_t v_tuple, size_t vl) { 417 return __riscv_vsoxseg2ei16_v_u16m4x2(base, bindex, v_tuple, vl); 418 } 419 420 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32mf2x2 421 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 422 // CHECK-RV64-NEXT: entry: 423 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv4i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 5) 424 // CHECK-RV64-NEXT: ret void 425 // 426 void test_vsoxseg2ei16_v_u32mf2x2(uint32_t *base, vuint16mf4_t bindex, vuint32mf2x2_t v_tuple, size_t vl) { 427 return __riscv_vsoxseg2ei16_v_u32mf2x2(base, bindex, v_tuple, vl); 428 } 429 430 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m1x2 431 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 432 // CHECK-RV64-NEXT: entry: 433 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 5) 434 // CHECK-RV64-NEXT: ret void 435 // 436 void test_vsoxseg2ei16_v_u32m1x2(uint32_t *base, vuint16mf2_t bindex, vuint32m1x2_t v_tuple, size_t vl) { 437 return __riscv_vsoxseg2ei16_v_u32m1x2(base, bindex, v_tuple, vl); 438 } 439 440 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m2x2 441 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 442 // CHECK-RV64-NEXT: entry: 443 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 5) 444 // CHECK-RV64-NEXT: ret void 445 // 446 void test_vsoxseg2ei16_v_u32m2x2(uint32_t *base, vuint16m1_t bindex, vuint32m2x2_t v_tuple, size_t vl) { 447 return __riscv_vsoxseg2ei16_v_u32m2x2(base, bindex, v_tuple, vl); 448 } 449 450 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m4x2 451 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 452 // CHECK-RV64-NEXT: entry: 453 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv8i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], i64 [[VL]], i64 5) 454 // CHECK-RV64-NEXT: ret void 455 // 456 void test_vsoxseg2ei16_v_u32m4x2(uint32_t *base, vuint16m2_t bindex, vuint32m4x2_t v_tuple, size_t vl) { 457 return __riscv_vsoxseg2ei16_v_u32m4x2(base, bindex, v_tuple, vl); 458 } 459 460 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m1x2 461 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 462 // CHECK-RV64-NEXT: entry: 463 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv8i8_2t.nxv1i16.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], i64 [[VL]], i64 6) 464 // CHECK-RV64-NEXT: ret void 465 // 466 void test_vsoxseg2ei16_v_u64m1x2(uint64_t *base, vuint16mf4_t bindex, vuint64m1x2_t v_tuple, size_t vl) { 467 return __riscv_vsoxseg2ei16_v_u64m1x2(base, bindex, v_tuple, vl); 468 } 469 470 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m2x2 471 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 472 // CHECK-RV64-NEXT: entry: 473 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv16i8_2t.nxv2i16.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], i64 [[VL]], i64 6) 474 // CHECK-RV64-NEXT: ret void 475 // 476 void test_vsoxseg2ei16_v_u64m2x2(uint64_t *base, vuint16mf2_t bindex, vuint64m2x2_t v_tuple, size_t vl) { 477 return __riscv_vsoxseg2ei16_v_u64m2x2(base, bindex, v_tuple, vl); 478 } 479 480 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m4x2 481 // CHECK-RV64-SAME: (ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 482 // CHECK-RV64-NEXT: entry: 483 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.triscv.vector.tuple_nxv32i8_2t.nxv4i16.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], i64 [[VL]], i64 6) 484 // CHECK-RV64-NEXT: ret void 485 // 486 void test_vsoxseg2ei16_v_u64m4x2(uint64_t *base, vuint16m1_t bindex, vuint64m4x2_t v_tuple, size_t vl) { 487 return __riscv_vsoxseg2ei16_v_u64m4x2(base, bindex, v_tuple, vl); 488 } 489 490 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16mf4x2_m 491 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 492 // CHECK-RV64-NEXT: entry: 493 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv2i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 4) 494 // CHECK-RV64-NEXT: ret void 495 // 496 void test_vsoxseg2ei16_v_f16mf4x2_m(vbool64_t mask, _Float16 *base, vuint16mf4_t bindex, vfloat16mf4x2_t v_tuple, size_t vl) { 497 return __riscv_vsoxseg2ei16_v_f16mf4x2_m(mask, base, bindex, v_tuple, vl); 498 } 499 500 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16mf2x2_m 501 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 502 // CHECK-RV64-NEXT: entry: 503 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 4) 504 // CHECK-RV64-NEXT: ret void 505 // 506 void test_vsoxseg2ei16_v_f16mf2x2_m(vbool32_t mask, _Float16 *base, vuint16mf2_t bindex, vfloat16mf2x2_t v_tuple, size_t vl) { 507 return __riscv_vsoxseg2ei16_v_f16mf2x2_m(mask, base, bindex, v_tuple, vl); 508 } 509 510 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m1x2_m 511 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 512 // CHECK-RV64-NEXT: entry: 513 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 4) 514 // CHECK-RV64-NEXT: ret void 515 // 516 void test_vsoxseg2ei16_v_f16m1x2_m(vbool16_t mask, _Float16 *base, vuint16m1_t bindex, vfloat16m1x2_t v_tuple, size_t vl) { 517 return __riscv_vsoxseg2ei16_v_f16m1x2_m(mask, base, bindex, v_tuple, vl); 518 } 519 520 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m2x2_m 521 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 522 // CHECK-RV64-NEXT: entry: 523 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 4) 524 // CHECK-RV64-NEXT: ret void 525 // 526 void test_vsoxseg2ei16_v_f16m2x2_m(vbool8_t mask, _Float16 *base, vuint16m2_t bindex, vfloat16m2x2_t v_tuple, size_t vl) { 527 return __riscv_vsoxseg2ei16_v_f16m2x2_m(mask, base, bindex, v_tuple, vl); 528 } 529 530 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f16m4x2_m 531 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 532 // CHECK-RV64-NEXT: entry: 533 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv16i16.nxv16i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 4) 534 // CHECK-RV64-NEXT: ret void 535 // 536 void test_vsoxseg2ei16_v_f16m4x2_m(vbool4_t mask, _Float16 *base, vuint16m4_t bindex, vfloat16m4x2_t v_tuple, size_t vl) { 537 return __riscv_vsoxseg2ei16_v_f16m4x2_m(mask, base, bindex, v_tuple, vl); 538 } 539 540 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32mf2x2_m 541 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 542 // CHECK-RV64-NEXT: entry: 543 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 5) 544 // CHECK-RV64-NEXT: ret void 545 // 546 void test_vsoxseg2ei16_v_f32mf2x2_m(vbool64_t mask, float *base, vuint16mf4_t bindex, vfloat32mf2x2_t v_tuple, size_t vl) { 547 return __riscv_vsoxseg2ei16_v_f32mf2x2_m(mask, base, bindex, v_tuple, vl); 548 } 549 550 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m1x2_m 551 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 552 // CHECK-RV64-NEXT: entry: 553 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 5) 554 // CHECK-RV64-NEXT: ret void 555 // 556 void test_vsoxseg2ei16_v_f32m1x2_m(vbool32_t mask, float *base, vuint16mf2_t bindex, vfloat32m1x2_t v_tuple, size_t vl) { 557 return __riscv_vsoxseg2ei16_v_f32m1x2_m(mask, base, bindex, v_tuple, vl); 558 } 559 560 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m2x2_m 561 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 562 // CHECK-RV64-NEXT: entry: 563 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 5) 564 // CHECK-RV64-NEXT: ret void 565 // 566 void test_vsoxseg2ei16_v_f32m2x2_m(vbool16_t mask, float *base, vuint16m1_t bindex, vfloat32m2x2_t v_tuple, size_t vl) { 567 return __riscv_vsoxseg2ei16_v_f32m2x2_m(mask, base, bindex, v_tuple, vl); 568 } 569 570 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f32m4x2_m 571 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 572 // CHECK-RV64-NEXT: entry: 573 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 5) 574 // CHECK-RV64-NEXT: ret void 575 // 576 void test_vsoxseg2ei16_v_f32m4x2_m(vbool8_t mask, float *base, vuint16m2_t bindex, vfloat32m4x2_t v_tuple, size_t vl) { 577 return __riscv_vsoxseg2ei16_v_f32m4x2_m(mask, base, bindex, v_tuple, vl); 578 } 579 580 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m1x2_m 581 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 582 // CHECK-RV64-NEXT: entry: 583 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 6) 584 // CHECK-RV64-NEXT: ret void 585 // 586 void test_vsoxseg2ei16_v_f64m1x2_m(vbool64_t mask, double *base, vuint16mf4_t bindex, vfloat64m1x2_t v_tuple, size_t vl) { 587 return __riscv_vsoxseg2ei16_v_f64m1x2_m(mask, base, bindex, v_tuple, vl); 588 } 589 590 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m2x2_m 591 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 592 // CHECK-RV64-NEXT: entry: 593 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 6) 594 // CHECK-RV64-NEXT: ret void 595 // 596 void test_vsoxseg2ei16_v_f64m2x2_m(vbool32_t mask, double *base, vuint16mf2_t bindex, vfloat64m2x2_t v_tuple, size_t vl) { 597 return __riscv_vsoxseg2ei16_v_f64m2x2_m(mask, base, bindex, v_tuple, vl); 598 } 599 600 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_f64m4x2_m 601 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 602 // CHECK-RV64-NEXT: entry: 603 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 6) 604 // CHECK-RV64-NEXT: ret void 605 // 606 void test_vsoxseg2ei16_v_f64m4x2_m(vbool16_t mask, double *base, vuint16m1_t bindex, vfloat64m4x2_t v_tuple, size_t vl) { 607 return __riscv_vsoxseg2ei16_v_f64m4x2_m(mask, base, bindex, v_tuple, vl); 608 } 609 610 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf8x2_m 611 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 612 // CHECK-RV64-NEXT: entry: 613 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv1i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 614 // CHECK-RV64-NEXT: ret void 615 // 616 void test_vsoxseg2ei16_v_i8mf8x2_m(vbool64_t mask, int8_t *base, vuint16mf4_t bindex, vint8mf8x2_t v_tuple, size_t vl) { 617 return __riscv_vsoxseg2ei16_v_i8mf8x2_m(mask, base, bindex, v_tuple, vl); 618 } 619 620 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf4x2_m 621 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 622 // CHECK-RV64-NEXT: entry: 623 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv2i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 624 // CHECK-RV64-NEXT: ret void 625 // 626 void test_vsoxseg2ei16_v_i8mf4x2_m(vbool32_t mask, int8_t *base, vuint16mf2_t bindex, vint8mf4x2_t v_tuple, size_t vl) { 627 return __riscv_vsoxseg2ei16_v_i8mf4x2_m(mask, base, bindex, v_tuple, vl); 628 } 629 630 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8mf2x2_m 631 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 632 // CHECK-RV64-NEXT: entry: 633 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 634 // CHECK-RV64-NEXT: ret void 635 // 636 void test_vsoxseg2ei16_v_i8mf2x2_m(vbool16_t mask, int8_t *base, vuint16m1_t bindex, vint8mf2x2_t v_tuple, size_t vl) { 637 return __riscv_vsoxseg2ei16_v_i8mf2x2_m(mask, base, bindex, v_tuple, vl); 638 } 639 640 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m1x2_m 641 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 642 // CHECK-RV64-NEXT: entry: 643 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 644 // CHECK-RV64-NEXT: ret void 645 // 646 void test_vsoxseg2ei16_v_i8m1x2_m(vbool8_t mask, int8_t *base, vuint16m2_t bindex, vint8m1x2_t v_tuple, size_t vl) { 647 return __riscv_vsoxseg2ei16_v_i8m1x2_m(mask, base, bindex, v_tuple, vl); 648 } 649 650 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m2x2_m 651 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 652 // CHECK-RV64-NEXT: entry: 653 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv16i16.nxv16i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 654 // CHECK-RV64-NEXT: ret void 655 // 656 void test_vsoxseg2ei16_v_i8m2x2_m(vbool4_t mask, int8_t *base, vuint16m4_t bindex, vint8m2x2_t v_tuple, size_t vl) { 657 return __riscv_vsoxseg2ei16_v_i8m2x2_m(mask, base, bindex, v_tuple, vl); 658 } 659 660 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i8m4x2_m 661 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 662 // CHECK-RV64-NEXT: entry: 663 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv32i16.nxv32i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 32 x i16> [[BINDEX]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 664 // CHECK-RV64-NEXT: ret void 665 // 666 void test_vsoxseg2ei16_v_i8m4x2_m(vbool2_t mask, int8_t *base, vuint16m8_t bindex, vint8m4x2_t v_tuple, size_t vl) { 667 return __riscv_vsoxseg2ei16_v_i8m4x2_m(mask, base, bindex, v_tuple, vl); 668 } 669 670 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16mf4x2_m 671 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 672 // CHECK-RV64-NEXT: entry: 673 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv2i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 4) 674 // CHECK-RV64-NEXT: ret void 675 // 676 void test_vsoxseg2ei16_v_i16mf4x2_m(vbool64_t mask, int16_t *base, vuint16mf4_t bindex, vint16mf4x2_t v_tuple, size_t vl) { 677 return __riscv_vsoxseg2ei16_v_i16mf4x2_m(mask, base, bindex, v_tuple, vl); 678 } 679 680 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16mf2x2_m 681 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 682 // CHECK-RV64-NEXT: entry: 683 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 4) 684 // CHECK-RV64-NEXT: ret void 685 // 686 void test_vsoxseg2ei16_v_i16mf2x2_m(vbool32_t mask, int16_t *base, vuint16mf2_t bindex, vint16mf2x2_t v_tuple, size_t vl) { 687 return __riscv_vsoxseg2ei16_v_i16mf2x2_m(mask, base, bindex, v_tuple, vl); 688 } 689 690 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m1x2_m 691 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 692 // CHECK-RV64-NEXT: entry: 693 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 4) 694 // CHECK-RV64-NEXT: ret void 695 // 696 void test_vsoxseg2ei16_v_i16m1x2_m(vbool16_t mask, int16_t *base, vuint16m1_t bindex, vint16m1x2_t v_tuple, size_t vl) { 697 return __riscv_vsoxseg2ei16_v_i16m1x2_m(mask, base, bindex, v_tuple, vl); 698 } 699 700 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m2x2_m 701 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 702 // CHECK-RV64-NEXT: entry: 703 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 4) 704 // CHECK-RV64-NEXT: ret void 705 // 706 void test_vsoxseg2ei16_v_i16m2x2_m(vbool8_t mask, int16_t *base, vuint16m2_t bindex, vint16m2x2_t v_tuple, size_t vl) { 707 return __riscv_vsoxseg2ei16_v_i16m2x2_m(mask, base, bindex, v_tuple, vl); 708 } 709 710 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i16m4x2_m 711 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 712 // CHECK-RV64-NEXT: entry: 713 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv16i16.nxv16i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 4) 714 // CHECK-RV64-NEXT: ret void 715 // 716 void test_vsoxseg2ei16_v_i16m4x2_m(vbool4_t mask, int16_t *base, vuint16m4_t bindex, vint16m4x2_t v_tuple, size_t vl) { 717 return __riscv_vsoxseg2ei16_v_i16m4x2_m(mask, base, bindex, v_tuple, vl); 718 } 719 720 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32mf2x2_m 721 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 722 // CHECK-RV64-NEXT: entry: 723 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 5) 724 // CHECK-RV64-NEXT: ret void 725 // 726 void test_vsoxseg2ei16_v_i32mf2x2_m(vbool64_t mask, int32_t *base, vuint16mf4_t bindex, vint32mf2x2_t v_tuple, size_t vl) { 727 return __riscv_vsoxseg2ei16_v_i32mf2x2_m(mask, base, bindex, v_tuple, vl); 728 } 729 730 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m1x2_m 731 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 732 // CHECK-RV64-NEXT: entry: 733 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 5) 734 // CHECK-RV64-NEXT: ret void 735 // 736 void test_vsoxseg2ei16_v_i32m1x2_m(vbool32_t mask, int32_t *base, vuint16mf2_t bindex, vint32m1x2_t v_tuple, size_t vl) { 737 return __riscv_vsoxseg2ei16_v_i32m1x2_m(mask, base, bindex, v_tuple, vl); 738 } 739 740 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m2x2_m 741 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 742 // CHECK-RV64-NEXT: entry: 743 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 5) 744 // CHECK-RV64-NEXT: ret void 745 // 746 void test_vsoxseg2ei16_v_i32m2x2_m(vbool16_t mask, int32_t *base, vuint16m1_t bindex, vint32m2x2_t v_tuple, size_t vl) { 747 return __riscv_vsoxseg2ei16_v_i32m2x2_m(mask, base, bindex, v_tuple, vl); 748 } 749 750 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i32m4x2_m 751 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 752 // CHECK-RV64-NEXT: entry: 753 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 5) 754 // CHECK-RV64-NEXT: ret void 755 // 756 void test_vsoxseg2ei16_v_i32m4x2_m(vbool8_t mask, int32_t *base, vuint16m2_t bindex, vint32m4x2_t v_tuple, size_t vl) { 757 return __riscv_vsoxseg2ei16_v_i32m4x2_m(mask, base, bindex, v_tuple, vl); 758 } 759 760 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m1x2_m 761 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 762 // CHECK-RV64-NEXT: entry: 763 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 6) 764 // CHECK-RV64-NEXT: ret void 765 // 766 void test_vsoxseg2ei16_v_i64m1x2_m(vbool64_t mask, int64_t *base, vuint16mf4_t bindex, vint64m1x2_t v_tuple, size_t vl) { 767 return __riscv_vsoxseg2ei16_v_i64m1x2_m(mask, base, bindex, v_tuple, vl); 768 } 769 770 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m2x2_m 771 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 772 // CHECK-RV64-NEXT: entry: 773 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 6) 774 // CHECK-RV64-NEXT: ret void 775 // 776 void test_vsoxseg2ei16_v_i64m2x2_m(vbool32_t mask, int64_t *base, vuint16mf2_t bindex, vint64m2x2_t v_tuple, size_t vl) { 777 return __riscv_vsoxseg2ei16_v_i64m2x2_m(mask, base, bindex, v_tuple, vl); 778 } 779 780 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_i64m4x2_m 781 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 782 // CHECK-RV64-NEXT: entry: 783 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 6) 784 // CHECK-RV64-NEXT: ret void 785 // 786 void test_vsoxseg2ei16_v_i64m4x2_m(vbool16_t mask, int64_t *base, vuint16m1_t bindex, vint64m4x2_t v_tuple, size_t vl) { 787 return __riscv_vsoxseg2ei16_v_i64m4x2_m(mask, base, bindex, v_tuple, vl); 788 } 789 790 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf8x2_m 791 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 792 // CHECK-RV64-NEXT: entry: 793 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv1i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 1 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 3) 794 // CHECK-RV64-NEXT: ret void 795 // 796 void test_vsoxseg2ei16_v_u8mf8x2_m(vbool64_t mask, uint8_t *base, vuint16mf4_t bindex, vuint8mf8x2_t v_tuple, size_t vl) { 797 return __riscv_vsoxseg2ei16_v_u8mf8x2_m(mask, base, bindex, v_tuple, vl); 798 } 799 800 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf4x2_m 801 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 802 // CHECK-RV64-NEXT: entry: 803 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv2i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 3) 804 // CHECK-RV64-NEXT: ret void 805 // 806 void test_vsoxseg2ei16_v_u8mf4x2_m(vbool32_t mask, uint8_t *base, vuint16mf2_t bindex, vuint8mf4x2_t v_tuple, size_t vl) { 807 return __riscv_vsoxseg2ei16_v_u8mf4x2_m(mask, base, bindex, v_tuple, vl); 808 } 809 810 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8mf2x2_m 811 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 812 // CHECK-RV64-NEXT: entry: 813 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 3) 814 // CHECK-RV64-NEXT: ret void 815 // 816 void test_vsoxseg2ei16_v_u8mf2x2_m(vbool16_t mask, uint8_t *base, vuint16m1_t bindex, vuint8mf2x2_t v_tuple, size_t vl) { 817 return __riscv_vsoxseg2ei16_v_u8mf2x2_m(mask, base, bindex, v_tuple, vl); 818 } 819 820 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m1x2_m 821 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 822 // CHECK-RV64-NEXT: entry: 823 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 3) 824 // CHECK-RV64-NEXT: ret void 825 // 826 void test_vsoxseg2ei16_v_u8m1x2_m(vbool8_t mask, uint8_t *base, vuint16m2_t bindex, vuint8m1x2_t v_tuple, size_t vl) { 827 return __riscv_vsoxseg2ei16_v_u8m1x2_m(mask, base, bindex, v_tuple, vl); 828 } 829 830 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m2x2_m 831 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 832 // CHECK-RV64-NEXT: entry: 833 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv16i16.nxv16i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 3) 834 // CHECK-RV64-NEXT: ret void 835 // 836 void test_vsoxseg2ei16_v_u8m2x2_m(vbool4_t mask, uint8_t *base, vuint16m4_t bindex, vuint8m2x2_t v_tuple, size_t vl) { 837 return __riscv_vsoxseg2ei16_v_u8m2x2_m(mask, base, bindex, v_tuple, vl); 838 } 839 840 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u8m4x2_m 841 // CHECK-RV64-SAME: (<vscale x 32 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 32 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 842 // CHECK-RV64-NEXT: entry: 843 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv32i16.nxv32i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 32 x i16> [[BINDEX]], <vscale x 32 x i1> [[MASK]], i64 [[VL]], i64 3) 844 // CHECK-RV64-NEXT: ret void 845 // 846 void test_vsoxseg2ei16_v_u8m4x2_m(vbool2_t mask, uint8_t *base, vuint16m8_t bindex, vuint8m4x2_t v_tuple, size_t vl) { 847 return __riscv_vsoxseg2ei16_v_u8m4x2_m(mask, base, bindex, v_tuple, vl); 848 } 849 850 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16mf4x2_m 851 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 852 // CHECK-RV64-NEXT: entry: 853 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv2i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 4) 854 // CHECK-RV64-NEXT: ret void 855 // 856 void test_vsoxseg2ei16_v_u16mf4x2_m(vbool64_t mask, uint16_t *base, vuint16mf4_t bindex, vuint16mf4x2_t v_tuple, size_t vl) { 857 return __riscv_vsoxseg2ei16_v_u16mf4x2_m(mask, base, bindex, v_tuple, vl); 858 } 859 860 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16mf2x2_m 861 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 862 // CHECK-RV64-NEXT: entry: 863 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 4) 864 // CHECK-RV64-NEXT: ret void 865 // 866 void test_vsoxseg2ei16_v_u16mf2x2_m(vbool32_t mask, uint16_t *base, vuint16mf2_t bindex, vuint16mf2x2_t v_tuple, size_t vl) { 867 return __riscv_vsoxseg2ei16_v_u16mf2x2_m(mask, base, bindex, v_tuple, vl); 868 } 869 870 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m1x2_m 871 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 872 // CHECK-RV64-NEXT: entry: 873 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 4) 874 // CHECK-RV64-NEXT: ret void 875 // 876 void test_vsoxseg2ei16_v_u16m1x2_m(vbool16_t mask, uint16_t *base, vuint16m1_t bindex, vuint16m1x2_t v_tuple, size_t vl) { 877 return __riscv_vsoxseg2ei16_v_u16m1x2_m(mask, base, bindex, v_tuple, vl); 878 } 879 880 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m2x2_m 881 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 882 // CHECK-RV64-NEXT: entry: 883 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 4) 884 // CHECK-RV64-NEXT: ret void 885 // 886 void test_vsoxseg2ei16_v_u16m2x2_m(vbool8_t mask, uint16_t *base, vuint16m2_t bindex, vuint16m2x2_t v_tuple, size_t vl) { 887 return __riscv_vsoxseg2ei16_v_u16m2x2_m(mask, base, bindex, v_tuple, vl); 888 } 889 890 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u16m4x2_m 891 // CHECK-RV64-SAME: (<vscale x 16 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 16 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 892 // CHECK-RV64-NEXT: entry: 893 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv16i16.nxv16i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 16 x i16> [[BINDEX]], <vscale x 16 x i1> [[MASK]], i64 [[VL]], i64 4) 894 // CHECK-RV64-NEXT: ret void 895 // 896 void test_vsoxseg2ei16_v_u16m4x2_m(vbool4_t mask, uint16_t *base, vuint16m4_t bindex, vuint16m4x2_t v_tuple, size_t vl) { 897 return __riscv_vsoxseg2ei16_v_u16m4x2_m(mask, base, bindex, v_tuple, vl); 898 } 899 900 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32mf2x2_m 901 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 902 // CHECK-RV64-NEXT: entry: 903 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv4i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 5) 904 // CHECK-RV64-NEXT: ret void 905 // 906 void test_vsoxseg2ei16_v_u32mf2x2_m(vbool64_t mask, uint32_t *base, vuint16mf4_t bindex, vuint32mf2x2_t v_tuple, size_t vl) { 907 return __riscv_vsoxseg2ei16_v_u32mf2x2_m(mask, base, bindex, v_tuple, vl); 908 } 909 910 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m1x2_m 911 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 912 // CHECK-RV64-NEXT: entry: 913 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 5) 914 // CHECK-RV64-NEXT: ret void 915 // 916 void test_vsoxseg2ei16_v_u32m1x2_m(vbool32_t mask, uint32_t *base, vuint16mf2_t bindex, vuint32m1x2_t v_tuple, size_t vl) { 917 return __riscv_vsoxseg2ei16_v_u32m1x2_m(mask, base, bindex, v_tuple, vl); 918 } 919 920 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m2x2_m 921 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 922 // CHECK-RV64-NEXT: entry: 923 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 5) 924 // CHECK-RV64-NEXT: ret void 925 // 926 void test_vsoxseg2ei16_v_u32m2x2_m(vbool16_t mask, uint32_t *base, vuint16m1_t bindex, vuint32m2x2_t v_tuple, size_t vl) { 927 return __riscv_vsoxseg2ei16_v_u32m2x2_m(mask, base, bindex, v_tuple, vl); 928 } 929 930 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u32m4x2_m 931 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 8 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 932 // CHECK-RV64-NEXT: entry: 933 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv8i16.nxv8i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 8 x i16> [[BINDEX]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 5) 934 // CHECK-RV64-NEXT: ret void 935 // 936 void test_vsoxseg2ei16_v_u32m4x2_m(vbool8_t mask, uint32_t *base, vuint16m2_t bindex, vuint32m4x2_t v_tuple, size_t vl) { 937 return __riscv_vsoxseg2ei16_v_u32m4x2_m(mask, base, bindex, v_tuple, vl); 938 } 939 940 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m1x2_m 941 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 1 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 942 // CHECK-RV64-NEXT: entry: 943 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv8i8_2t.nxv1i16.nxv1i1.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 1 x i16> [[BINDEX]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 6) 944 // CHECK-RV64-NEXT: ret void 945 // 946 void test_vsoxseg2ei16_v_u64m1x2_m(vbool64_t mask, uint64_t *base, vuint16mf4_t bindex, vuint64m1x2_t v_tuple, size_t vl) { 947 return __riscv_vsoxseg2ei16_v_u64m1x2_m(mask, base, bindex, v_tuple, vl); 948 } 949 950 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m2x2_m 951 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 2 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 952 // CHECK-RV64-NEXT: entry: 953 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv16i8_2t.nxv2i16.nxv2i1.i64(target("riscv.vector.tuple", <vscale x 16 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 2 x i16> [[BINDEX]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 6) 954 // CHECK-RV64-NEXT: ret void 955 // 956 void test_vsoxseg2ei16_v_u64m2x2_m(vbool32_t mask, uint64_t *base, vuint16mf2_t bindex, vuint64m2x2_t v_tuple, size_t vl) { 957 return __riscv_vsoxseg2ei16_v_u64m2x2_m(mask, base, bindex, v_tuple, vl); 958 } 959 960 // CHECK-RV64-LABEL: define dso_local void @test_vsoxseg2ei16_v_u64m4x2_m 961 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], ptr noundef [[BASE:%.*]], <vscale x 4 x i16> [[BINDEX:%.*]], target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 962 // CHECK-RV64-NEXT: entry: 963 // CHECK-RV64-NEXT: call void @llvm.riscv.vsoxseg2.mask.triscv.vector.tuple_nxv32i8_2t.nxv4i16.nxv4i1.i64(target("riscv.vector.tuple", <vscale x 32 x i8>, 2) [[V_TUPLE]], ptr [[BASE]], <vscale x 4 x i16> [[BINDEX]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 6) 964 // CHECK-RV64-NEXT: ret void 965 // 966 void test_vsoxseg2ei16_v_u64m4x2_m(vbool16_t mask, uint64_t *base, vuint16m1_t bindex, vuint64m4x2_t v_tuple, size_t vl) { 967 return __riscv_vsoxseg2ei16_v_u64m4x2_m(mask, base, bindex, v_tuple, vl); 968 } 969 970