1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 2 // REQUIRES: riscv-registered-target 3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \ 4 // RUN: -target-feature +zvfhmin -disable-O0-optnone \ 5 // RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ 6 // RUN: FileCheck --check-prefix=CHECK-RV64 %s 7 8 #include <riscv_vector.h> 9 10 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_f16mf4x7_tu 11 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { 12 // CHECK-RV64-NEXT: entry: 13 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv2i8_7t.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 14 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 15 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 16 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 17 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 18 // 19 vfloat16mf4x7_t test_vlseg7e16ff_v_f16mf4x7_tu(vfloat16mf4x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 20 return __riscv_vlseg7e16ff_v_f16mf4x7_tu(maskedoff_tuple, base, new_vl, vl); 21 } 22 23 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_f16mf2x7_tu 24 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 25 // CHECK-RV64-NEXT: entry: 26 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv4i8_7t.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 27 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 28 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 29 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 30 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 31 // 32 vfloat16mf2x7_t test_vlseg7e16ff_v_f16mf2x7_tu(vfloat16mf2x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 33 return __riscv_vlseg7e16ff_v_f16mf2x7_tu(maskedoff_tuple, base, new_vl, vl); 34 } 35 36 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_f16m1x7_tu 37 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 38 // CHECK-RV64-NEXT: entry: 39 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv8i8_7t.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 40 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 41 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 42 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 43 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 44 // 45 vfloat16m1x7_t test_vlseg7e16ff_v_f16m1x7_tu(vfloat16m1x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 46 return __riscv_vlseg7e16ff_v_f16m1x7_tu(maskedoff_tuple, base, new_vl, vl); 47 } 48 49 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_i16mf4x7_tu 50 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 51 // CHECK-RV64-NEXT: entry: 52 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv2i8_7t.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 53 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 54 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 55 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 56 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 57 // 58 vint16mf4x7_t test_vlseg7e16ff_v_i16mf4x7_tu(vint16mf4x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 59 return __riscv_vlseg7e16ff_v_i16mf4x7_tu(maskedoff_tuple, base, new_vl, vl); 60 } 61 62 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_i16mf2x7_tu 63 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 64 // CHECK-RV64-NEXT: entry: 65 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv4i8_7t.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 66 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 67 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 68 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 69 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 70 // 71 vint16mf2x7_t test_vlseg7e16ff_v_i16mf2x7_tu(vint16mf2x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 72 return __riscv_vlseg7e16ff_v_i16mf2x7_tu(maskedoff_tuple, base, new_vl, vl); 73 } 74 75 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_i16m1x7_tu 76 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 77 // CHECK-RV64-NEXT: entry: 78 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv8i8_7t.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 79 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 80 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 81 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 82 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 83 // 84 vint16m1x7_t test_vlseg7e16ff_v_i16m1x7_tu(vint16m1x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 85 return __riscv_vlseg7e16ff_v_i16m1x7_tu(maskedoff_tuple, base, new_vl, vl); 86 } 87 88 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_u16mf4x7_tu 89 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 90 // CHECK-RV64-NEXT: entry: 91 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv2i8_7t.i64(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 92 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 93 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 94 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 95 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 96 // 97 vuint16mf4x7_t test_vlseg7e16ff_v_u16mf4x7_tu(vuint16mf4x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 98 return __riscv_vlseg7e16ff_v_u16mf4x7_tu(maskedoff_tuple, base, new_vl, vl); 99 } 100 101 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_u16mf2x7_tu 102 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 103 // CHECK-RV64-NEXT: entry: 104 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv4i8_7t.i64(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 105 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 106 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 107 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 108 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 109 // 110 vuint16mf2x7_t test_vlseg7e16ff_v_u16mf2x7_tu(vuint16mf2x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 111 return __riscv_vlseg7e16ff_v_u16mf2x7_tu(maskedoff_tuple, base, new_vl, vl); 112 } 113 114 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_u16m1x7_tu 115 // CHECK-RV64-SAME: (target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 116 // CHECK-RV64-NEXT: entry: 117 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.triscv.vector.tuple_nxv8i8_7t.i64(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], i64 [[VL]], i64 4) 118 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 119 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 120 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 121 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 122 // 123 vuint16m1x7_t test_vlseg7e16ff_v_u16m1x7_tu(vuint16m1x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 124 return __riscv_vlseg7e16ff_v_u16m1x7_tu(maskedoff_tuple, base, new_vl, vl); 125 } 126 127 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_f16mf4x7_tum 128 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 129 // CHECK-RV64-NEXT: entry: 130 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 131 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 132 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 133 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 134 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 135 // 136 vfloat16mf4x7_t test_vlseg7e16ff_v_f16mf4x7_tum(vbool64_t mask, vfloat16mf4x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 137 return __riscv_vlseg7e16ff_v_f16mf4x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 138 } 139 140 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_f16mf2x7_tum 141 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 142 // CHECK-RV64-NEXT: entry: 143 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 144 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 145 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 146 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 147 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 148 // 149 vfloat16mf2x7_t test_vlseg7e16ff_v_f16mf2x7_tum(vbool32_t mask, vfloat16mf2x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 150 return __riscv_vlseg7e16ff_v_f16mf2x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 151 } 152 153 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_f16m1x7_tum 154 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 155 // CHECK-RV64-NEXT: entry: 156 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 157 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 158 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 159 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 160 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 161 // 162 vfloat16m1x7_t test_vlseg7e16ff_v_f16m1x7_tum(vbool16_t mask, vfloat16m1x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 163 return __riscv_vlseg7e16ff_v_f16m1x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 164 } 165 166 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_i16mf4x7_tum 167 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 168 // CHECK-RV64-NEXT: entry: 169 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 170 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 171 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 172 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 173 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 174 // 175 vint16mf4x7_t test_vlseg7e16ff_v_i16mf4x7_tum(vbool64_t mask, vint16mf4x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 176 return __riscv_vlseg7e16ff_v_i16mf4x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 177 } 178 179 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_i16mf2x7_tum 180 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 181 // CHECK-RV64-NEXT: entry: 182 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 183 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 184 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 185 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 186 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 187 // 188 vint16mf2x7_t test_vlseg7e16ff_v_i16mf2x7_tum(vbool32_t mask, vint16mf2x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 189 return __riscv_vlseg7e16ff_v_i16mf2x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 190 } 191 192 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_i16m1x7_tum 193 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 194 // CHECK-RV64-NEXT: entry: 195 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 196 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 197 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 198 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 199 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 200 // 201 vint16m1x7_t test_vlseg7e16ff_v_i16m1x7_tum(vbool16_t mask, vint16m1x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 202 return __riscv_vlseg7e16ff_v_i16m1x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 203 } 204 205 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_u16mf4x7_tum 206 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 207 // CHECK-RV64-NEXT: entry: 208 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 209 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 210 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 211 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 212 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 213 // 214 vuint16mf4x7_t test_vlseg7e16ff_v_u16mf4x7_tum(vbool64_t mask, vuint16mf4x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 215 return __riscv_vlseg7e16ff_v_u16mf4x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 216 } 217 218 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_u16mf2x7_tum 219 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 220 // CHECK-RV64-NEXT: entry: 221 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 222 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 223 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 224 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 225 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 226 // 227 vuint16mf2x7_t test_vlseg7e16ff_v_u16mf2x7_tum(vbool32_t mask, vuint16mf2x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 228 return __riscv_vlseg7e16ff_v_u16mf2x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 229 } 230 231 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_u16m1x7_tum 232 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 233 // CHECK-RV64-NEXT: entry: 234 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2, i64 4) 235 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 236 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 237 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 238 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 239 // 240 vuint16m1x7_t test_vlseg7e16ff_v_u16m1x7_tum(vbool16_t mask, vuint16m1x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 241 return __riscv_vlseg7e16ff_v_u16m1x7_tum(mask, maskedoff_tuple, base, new_vl, vl); 242 } 243 244 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_f16mf4x7_tumu 245 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 246 // CHECK-RV64-NEXT: entry: 247 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 248 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 249 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 250 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 251 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 252 // 253 vfloat16mf4x7_t test_vlseg7e16ff_v_f16mf4x7_tumu(vbool64_t mask, vfloat16mf4x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 254 return __riscv_vlseg7e16ff_v_f16mf4x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 255 } 256 257 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_f16mf2x7_tumu 258 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 259 // CHECK-RV64-NEXT: entry: 260 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 261 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 262 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 263 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 264 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 265 // 266 vfloat16mf2x7_t test_vlseg7e16ff_v_f16mf2x7_tumu(vbool32_t mask, vfloat16mf2x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 267 return __riscv_vlseg7e16ff_v_f16mf2x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 268 } 269 270 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_f16m1x7_tumu 271 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 272 // CHECK-RV64-NEXT: entry: 273 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 274 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 275 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 276 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 277 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 278 // 279 vfloat16m1x7_t test_vlseg7e16ff_v_f16m1x7_tumu(vbool16_t mask, vfloat16m1x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 280 return __riscv_vlseg7e16ff_v_f16m1x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 281 } 282 283 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_i16mf4x7_tumu 284 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 285 // CHECK-RV64-NEXT: entry: 286 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 287 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 288 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 289 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 290 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 291 // 292 vint16mf4x7_t test_vlseg7e16ff_v_i16mf4x7_tumu(vbool64_t mask, vint16mf4x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 293 return __riscv_vlseg7e16ff_v_i16mf4x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 294 } 295 296 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_i16mf2x7_tumu 297 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 298 // CHECK-RV64-NEXT: entry: 299 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 300 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 301 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 302 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 303 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 304 // 305 vint16mf2x7_t test_vlseg7e16ff_v_i16mf2x7_tumu(vbool32_t mask, vint16mf2x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 306 return __riscv_vlseg7e16ff_v_i16mf2x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 307 } 308 309 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_i16m1x7_tumu 310 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 311 // CHECK-RV64-NEXT: entry: 312 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 313 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 314 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 315 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 316 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 317 // 318 vint16m1x7_t test_vlseg7e16ff_v_i16m1x7_tumu(vbool16_t mask, vint16m1x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 319 return __riscv_vlseg7e16ff_v_i16m1x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 320 } 321 322 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_u16mf4x7_tumu 323 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 324 // CHECK-RV64-NEXT: entry: 325 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 326 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 327 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 328 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 329 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 330 // 331 vuint16mf4x7_t test_vlseg7e16ff_v_u16mf4x7_tumu(vbool64_t mask, vuint16mf4x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 332 return __riscv_vlseg7e16ff_v_u16mf4x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 333 } 334 335 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_u16mf2x7_tumu 336 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 337 // CHECK-RV64-NEXT: entry: 338 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 339 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 340 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 341 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 342 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 343 // 344 vuint16mf2x7_t test_vlseg7e16ff_v_u16mf2x7_tumu(vbool32_t mask, vuint16mf2x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 345 return __riscv_vlseg7e16ff_v_u16mf2x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 346 } 347 348 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_u16m1x7_tumu 349 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 350 // CHECK-RV64-NEXT: entry: 351 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0, i64 4) 352 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 353 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 354 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 355 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 356 // 357 vuint16m1x7_t test_vlseg7e16ff_v_u16m1x7_tumu(vbool16_t mask, vuint16m1x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 358 return __riscv_vlseg7e16ff_v_u16m1x7_tumu(mask, maskedoff_tuple, base, new_vl, vl); 359 } 360 361 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_f16mf4x7_mu 362 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 363 // CHECK-RV64-NEXT: entry: 364 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 365 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 366 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 367 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 368 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 369 // 370 vfloat16mf4x7_t test_vlseg7e16ff_v_f16mf4x7_mu(vbool64_t mask, vfloat16mf4x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 371 return __riscv_vlseg7e16ff_v_f16mf4x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 372 } 373 374 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_f16mf2x7_mu 375 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 376 // CHECK-RV64-NEXT: entry: 377 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 378 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 379 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 380 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 381 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 382 // 383 vfloat16mf2x7_t test_vlseg7e16ff_v_f16mf2x7_mu(vbool32_t mask, vfloat16mf2x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 384 return __riscv_vlseg7e16ff_v_f16mf2x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 385 } 386 387 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_f16m1x7_mu 388 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 389 // CHECK-RV64-NEXT: entry: 390 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 391 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 392 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 393 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 394 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 395 // 396 vfloat16m1x7_t test_vlseg7e16ff_v_f16m1x7_mu(vbool16_t mask, vfloat16m1x7_t maskedoff_tuple, const _Float16 *base, size_t *new_vl, size_t vl) { 397 return __riscv_vlseg7e16ff_v_f16m1x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 398 } 399 400 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_i16mf4x7_mu 401 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 402 // CHECK-RV64-NEXT: entry: 403 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 404 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 405 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 406 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 407 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 408 // 409 vint16mf4x7_t test_vlseg7e16ff_v_i16mf4x7_mu(vbool64_t mask, vint16mf4x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 410 return __riscv_vlseg7e16ff_v_i16mf4x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 411 } 412 413 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_i16mf2x7_mu 414 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 415 // CHECK-RV64-NEXT: entry: 416 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 417 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 418 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 419 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 420 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 421 // 422 vint16mf2x7_t test_vlseg7e16ff_v_i16mf2x7_mu(vbool32_t mask, vint16mf2x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 423 return __riscv_vlseg7e16ff_v_i16mf2x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 424 } 425 426 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_i16m1x7_mu 427 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 428 // CHECK-RV64-NEXT: entry: 429 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 430 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 431 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 432 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 433 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 434 // 435 vint16m1x7_t test_vlseg7e16ff_v_i16m1x7_mu(vbool16_t mask, vint16m1x7_t maskedoff_tuple, const int16_t *base, size_t *new_vl, size_t vl) { 436 return __riscv_vlseg7e16ff_v_i16m1x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 437 } 438 439 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 2 x i8>, 7) @test_vlseg7e16ff_v_u16mf4x7_mu 440 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 441 // CHECK-RV64-NEXT: entry: 442 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv2i8_7t.i64.nxv1i1(target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 443 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 0 444 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 2 x i8>, 7), i64 } [[TMP0]], 1 445 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 446 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 2 x i8>, 7) [[TMP1]] 447 // 448 vuint16mf4x7_t test_vlseg7e16ff_v_u16mf4x7_mu(vbool64_t mask, vuint16mf4x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 449 return __riscv_vlseg7e16ff_v_u16mf4x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 450 } 451 452 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 4 x i8>, 7) @test_vlseg7e16ff_v_u16mf2x7_mu 453 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 454 // CHECK-RV64-NEXT: entry: 455 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv4i8_7t.i64.nxv2i1(target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 456 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 0 457 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 4 x i8>, 7), i64 } [[TMP0]], 1 458 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 459 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 4 x i8>, 7) [[TMP1]] 460 // 461 vuint16mf2x7_t test_vlseg7e16ff_v_u16mf2x7_mu(vbool32_t mask, vuint16mf2x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 462 return __riscv_vlseg7e16ff_v_u16mf2x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 463 } 464 465 // CHECK-RV64-LABEL: define dso_local target("riscv.vector.tuple", <vscale x 8 x i8>, 7) @test_vlseg7e16ff_v_u16m1x7_mu 466 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE:%.*]], ptr noundef [[BASE:%.*]], ptr noundef [[NEW_VL:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { 467 // CHECK-RV64-NEXT: entry: 468 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } @llvm.riscv.vlseg7ff.mask.triscv.vector.tuple_nxv8i8_7t.i64.nxv4i1(target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[MASKEDOFF_TUPLE]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1, i64 4) 469 // CHECK-RV64-NEXT: [[TMP1:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 0 470 // CHECK-RV64-NEXT: [[TMP2:%.*]] = extractvalue { target("riscv.vector.tuple", <vscale x 8 x i8>, 7), i64 } [[TMP0]], 1 471 // CHECK-RV64-NEXT: store i64 [[TMP2]], ptr [[NEW_VL]], align 8 472 // CHECK-RV64-NEXT: ret target("riscv.vector.tuple", <vscale x 8 x i8>, 7) [[TMP1]] 473 // 474 vuint16m1x7_t test_vlseg7e16ff_v_u16m1x7_mu(vbool16_t mask, vuint16m1x7_t maskedoff_tuple, const uint16_t *base, size_t *new_vl, size_t vl) { 475 return __riscv_vlseg7e16ff_v_u16m1x7_mu(mask, maskedoff_tuple, base, new_vl, vl); 476 } 477 478