xref: /llvm-project/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vle64.c (revision 3055c5815ac08aa0d8597bff63569b9ed8ec0822)
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zfh \
4 // RUN:   -target-feature +zvfh -disable-O0-optnone  \
5 // RUN:   -emit-llvm %s -o - | opt -S -passes=mem2reg | \
6 // RUN:   FileCheck --check-prefix=CHECK-RV64 %s
7 
8 #include <riscv_vector.h>
9 
10 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vle64_v_f64m1_tu
11 // CHECK-RV64-SAME: (<vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] {
12 // CHECK-RV64-NEXT:  entry:
13 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vle.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
14 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
15 //
test_vle64_v_f64m1_tu(vfloat64m1_t maskedoff,const double * base,size_t vl)16 vfloat64m1_t test_vle64_v_f64m1_tu(vfloat64m1_t maskedoff, const double *base, size_t vl) {
17   return __riscv_vle64_tu(maskedoff, base, vl);
18 }
19 
20 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vle64_v_f64m2_tu
21 // CHECK-RV64-SAME: (<vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
22 // CHECK-RV64-NEXT:  entry:
23 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vle.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
24 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
25 //
test_vle64_v_f64m2_tu(vfloat64m2_t maskedoff,const double * base,size_t vl)26 vfloat64m2_t test_vle64_v_f64m2_tu(vfloat64m2_t maskedoff, const double *base, size_t vl) {
27   return __riscv_vle64_tu(maskedoff, base, vl);
28 }
29 
30 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vle64_v_f64m4_tu
31 // CHECK-RV64-SAME: (<vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
32 // CHECK-RV64-NEXT:  entry:
33 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vle.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
34 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
35 //
test_vle64_v_f64m4_tu(vfloat64m4_t maskedoff,const double * base,size_t vl)36 vfloat64m4_t test_vle64_v_f64m4_tu(vfloat64m4_t maskedoff, const double *base, size_t vl) {
37   return __riscv_vle64_tu(maskedoff, base, vl);
38 }
39 
40 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vle64_v_f64m8_tu
41 // CHECK-RV64-SAME: (<vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
42 // CHECK-RV64-NEXT:  entry:
43 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vle.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
44 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
45 //
test_vle64_v_f64m8_tu(vfloat64m8_t maskedoff,const double * base,size_t vl)46 vfloat64m8_t test_vle64_v_f64m8_tu(vfloat64m8_t maskedoff, const double *base, size_t vl) {
47   return __riscv_vle64_tu(maskedoff, base, vl);
48 }
49 
50 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_i64m1_tu
51 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
52 // CHECK-RV64-NEXT:  entry:
53 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
54 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
55 //
test_vle64_v_i64m1_tu(vint64m1_t maskedoff,const int64_t * base,size_t vl)56 vint64m1_t test_vle64_v_i64m1_tu(vint64m1_t maskedoff, const int64_t *base, size_t vl) {
57   return __riscv_vle64_tu(maskedoff, base, vl);
58 }
59 
60 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_i64m2_tu
61 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
62 // CHECK-RV64-NEXT:  entry:
63 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
64 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
65 //
test_vle64_v_i64m2_tu(vint64m2_t maskedoff,const int64_t * base,size_t vl)66 vint64m2_t test_vle64_v_i64m2_tu(vint64m2_t maskedoff, const int64_t *base, size_t vl) {
67   return __riscv_vle64_tu(maskedoff, base, vl);
68 }
69 
70 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_i64m4_tu
71 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
72 // CHECK-RV64-NEXT:  entry:
73 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
74 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
75 //
test_vle64_v_i64m4_tu(vint64m4_t maskedoff,const int64_t * base,size_t vl)76 vint64m4_t test_vle64_v_i64m4_tu(vint64m4_t maskedoff, const int64_t *base, size_t vl) {
77   return __riscv_vle64_tu(maskedoff, base, vl);
78 }
79 
80 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_i64m8_tu
81 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
82 // CHECK-RV64-NEXT:  entry:
83 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
84 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
85 //
test_vle64_v_i64m8_tu(vint64m8_t maskedoff,const int64_t * base,size_t vl)86 vint64m8_t test_vle64_v_i64m8_tu(vint64m8_t maskedoff, const int64_t *base, size_t vl) {
87   return __riscv_vle64_tu(maskedoff, base, vl);
88 }
89 
90 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_u64m1_tu
91 // CHECK-RV64-SAME: (<vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
92 // CHECK-RV64-NEXT:  entry:
93 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
94 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
95 //
test_vle64_v_u64m1_tu(vuint64m1_t maskedoff,const uint64_t * base,size_t vl)96 vuint64m1_t test_vle64_v_u64m1_tu(vuint64m1_t maskedoff, const uint64_t *base, size_t vl) {
97   return __riscv_vle64_tu(maskedoff, base, vl);
98 }
99 
100 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_u64m2_tu
101 // CHECK-RV64-SAME: (<vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
102 // CHECK-RV64-NEXT:  entry:
103 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
104 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
105 //
test_vle64_v_u64m2_tu(vuint64m2_t maskedoff,const uint64_t * base,size_t vl)106 vuint64m2_t test_vle64_v_u64m2_tu(vuint64m2_t maskedoff, const uint64_t *base, size_t vl) {
107   return __riscv_vle64_tu(maskedoff, base, vl);
108 }
109 
110 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_u64m4_tu
111 // CHECK-RV64-SAME: (<vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
112 // CHECK-RV64-NEXT:  entry:
113 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
114 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
115 //
test_vle64_v_u64m4_tu(vuint64m4_t maskedoff,const uint64_t * base,size_t vl)116 vuint64m4_t test_vle64_v_u64m4_tu(vuint64m4_t maskedoff, const uint64_t *base, size_t vl) {
117   return __riscv_vle64_tu(maskedoff, base, vl);
118 }
119 
120 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_u64m8_tu
121 // CHECK-RV64-SAME: (<vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
122 // CHECK-RV64-NEXT:  entry:
123 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], i64 [[VL]])
124 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
125 //
test_vle64_v_u64m8_tu(vuint64m8_t maskedoff,const uint64_t * base,size_t vl)126 vuint64m8_t test_vle64_v_u64m8_tu(vuint64m8_t maskedoff, const uint64_t *base, size_t vl) {
127   return __riscv_vle64_tu(maskedoff, base, vl);
128 }
129 
130 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vle64_v_f64m1_tum
131 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
132 // CHECK-RV64-NEXT:  entry:
133 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vle.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
134 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
135 //
test_vle64_v_f64m1_tum(vbool64_t mask,vfloat64m1_t maskedoff,const double * base,size_t vl)136 vfloat64m1_t test_vle64_v_f64m1_tum(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, size_t vl) {
137   return __riscv_vle64_tum(mask, maskedoff, base, vl);
138 }
139 
140 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vle64_v_f64m2_tum
141 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
142 // CHECK-RV64-NEXT:  entry:
143 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vle.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
144 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
145 //
test_vle64_v_f64m2_tum(vbool32_t mask,vfloat64m2_t maskedoff,const double * base,size_t vl)146 vfloat64m2_t test_vle64_v_f64m2_tum(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, size_t vl) {
147   return __riscv_vle64_tum(mask, maskedoff, base, vl);
148 }
149 
150 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vle64_v_f64m4_tum
151 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
152 // CHECK-RV64-NEXT:  entry:
153 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vle.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
154 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
155 //
test_vle64_v_f64m4_tum(vbool16_t mask,vfloat64m4_t maskedoff,const double * base,size_t vl)156 vfloat64m4_t test_vle64_v_f64m4_tum(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, size_t vl) {
157   return __riscv_vle64_tum(mask, maskedoff, base, vl);
158 }
159 
160 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vle64_v_f64m8_tum
161 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
162 // CHECK-RV64-NEXT:  entry:
163 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vle.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
164 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
165 //
test_vle64_v_f64m8_tum(vbool8_t mask,vfloat64m8_t maskedoff,const double * base,size_t vl)166 vfloat64m8_t test_vle64_v_f64m8_tum(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, size_t vl) {
167   return __riscv_vle64_tum(mask, maskedoff, base, vl);
168 }
169 
170 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_i64m1_tum
171 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
172 // CHECK-RV64-NEXT:  entry:
173 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
174 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
175 //
test_vle64_v_i64m1_tum(vbool64_t mask,vint64m1_t maskedoff,const int64_t * base,size_t vl)176 vint64m1_t test_vle64_v_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, size_t vl) {
177   return __riscv_vle64_tum(mask, maskedoff, base, vl);
178 }
179 
180 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_i64m2_tum
181 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
182 // CHECK-RV64-NEXT:  entry:
183 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
184 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
185 //
test_vle64_v_i64m2_tum(vbool32_t mask,vint64m2_t maskedoff,const int64_t * base,size_t vl)186 vint64m2_t test_vle64_v_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, size_t vl) {
187   return __riscv_vle64_tum(mask, maskedoff, base, vl);
188 }
189 
190 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_i64m4_tum
191 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
192 // CHECK-RV64-NEXT:  entry:
193 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
194 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
195 //
test_vle64_v_i64m4_tum(vbool16_t mask,vint64m4_t maskedoff,const int64_t * base,size_t vl)196 vint64m4_t test_vle64_v_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, size_t vl) {
197   return __riscv_vle64_tum(mask, maskedoff, base, vl);
198 }
199 
200 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_i64m8_tum
201 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
202 // CHECK-RV64-NEXT:  entry:
203 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
204 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
205 //
test_vle64_v_i64m8_tum(vbool8_t mask,vint64m8_t maskedoff,const int64_t * base,size_t vl)206 vint64m8_t test_vle64_v_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, size_t vl) {
207   return __riscv_vle64_tum(mask, maskedoff, base, vl);
208 }
209 
210 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_u64m1_tum
211 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
212 // CHECK-RV64-NEXT:  entry:
213 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 2)
214 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
215 //
test_vle64_v_u64m1_tum(vbool64_t mask,vuint64m1_t maskedoff,const uint64_t * base,size_t vl)216 vuint64m1_t test_vle64_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, size_t vl) {
217   return __riscv_vle64_tum(mask, maskedoff, base, vl);
218 }
219 
220 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_u64m2_tum
221 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
222 // CHECK-RV64-NEXT:  entry:
223 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 2)
224 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
225 //
test_vle64_v_u64m2_tum(vbool32_t mask,vuint64m2_t maskedoff,const uint64_t * base,size_t vl)226 vuint64m2_t test_vle64_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, size_t vl) {
227   return __riscv_vle64_tum(mask, maskedoff, base, vl);
228 }
229 
230 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_u64m4_tum
231 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
232 // CHECK-RV64-NEXT:  entry:
233 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 2)
234 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
235 //
test_vle64_v_u64m4_tum(vbool16_t mask,vuint64m4_t maskedoff,const uint64_t * base,size_t vl)236 vuint64m4_t test_vle64_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, size_t vl) {
237   return __riscv_vle64_tum(mask, maskedoff, base, vl);
238 }
239 
240 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_u64m8_tum
241 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
242 // CHECK-RV64-NEXT:  entry:
243 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 2)
244 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
245 //
test_vle64_v_u64m8_tum(vbool8_t mask,vuint64m8_t maskedoff,const uint64_t * base,size_t vl)246 vuint64m8_t test_vle64_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, size_t vl) {
247   return __riscv_vle64_tum(mask, maskedoff, base, vl);
248 }
249 
250 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vle64_v_f64m1_tumu
251 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
252 // CHECK-RV64-NEXT:  entry:
253 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vle.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
254 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
255 //
test_vle64_v_f64m1_tumu(vbool64_t mask,vfloat64m1_t maskedoff,const double * base,size_t vl)256 vfloat64m1_t test_vle64_v_f64m1_tumu(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, size_t vl) {
257   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
258 }
259 
260 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vle64_v_f64m2_tumu
261 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
262 // CHECK-RV64-NEXT:  entry:
263 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vle.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
264 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
265 //
test_vle64_v_f64m2_tumu(vbool32_t mask,vfloat64m2_t maskedoff,const double * base,size_t vl)266 vfloat64m2_t test_vle64_v_f64m2_tumu(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, size_t vl) {
267   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
268 }
269 
270 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vle64_v_f64m4_tumu
271 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
272 // CHECK-RV64-NEXT:  entry:
273 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vle.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
274 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
275 //
test_vle64_v_f64m4_tumu(vbool16_t mask,vfloat64m4_t maskedoff,const double * base,size_t vl)276 vfloat64m4_t test_vle64_v_f64m4_tumu(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, size_t vl) {
277   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
278 }
279 
280 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vle64_v_f64m8_tumu
281 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
282 // CHECK-RV64-NEXT:  entry:
283 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vle.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
284 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
285 //
test_vle64_v_f64m8_tumu(vbool8_t mask,vfloat64m8_t maskedoff,const double * base,size_t vl)286 vfloat64m8_t test_vle64_v_f64m8_tumu(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, size_t vl) {
287   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
288 }
289 
290 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_i64m1_tumu
291 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
292 // CHECK-RV64-NEXT:  entry:
293 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
294 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
295 //
test_vle64_v_i64m1_tumu(vbool64_t mask,vint64m1_t maskedoff,const int64_t * base,size_t vl)296 vint64m1_t test_vle64_v_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, size_t vl) {
297   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
298 }
299 
300 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_i64m2_tumu
301 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
302 // CHECK-RV64-NEXT:  entry:
303 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
304 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
305 //
test_vle64_v_i64m2_tumu(vbool32_t mask,vint64m2_t maskedoff,const int64_t * base,size_t vl)306 vint64m2_t test_vle64_v_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, size_t vl) {
307   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
308 }
309 
310 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_i64m4_tumu
311 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
312 // CHECK-RV64-NEXT:  entry:
313 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
314 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
315 //
test_vle64_v_i64m4_tumu(vbool16_t mask,vint64m4_t maskedoff,const int64_t * base,size_t vl)316 vint64m4_t test_vle64_v_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, size_t vl) {
317   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
318 }
319 
320 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_i64m8_tumu
321 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
322 // CHECK-RV64-NEXT:  entry:
323 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
324 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
325 //
test_vle64_v_i64m8_tumu(vbool8_t mask,vint64m8_t maskedoff,const int64_t * base,size_t vl)326 vint64m8_t test_vle64_v_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, size_t vl) {
327   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
328 }
329 
330 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_u64m1_tumu
331 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
332 // CHECK-RV64-NEXT:  entry:
333 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 0)
334 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
335 //
test_vle64_v_u64m1_tumu(vbool64_t mask,vuint64m1_t maskedoff,const uint64_t * base,size_t vl)336 vuint64m1_t test_vle64_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, size_t vl) {
337   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
338 }
339 
340 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_u64m2_tumu
341 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
342 // CHECK-RV64-NEXT:  entry:
343 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 0)
344 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
345 //
test_vle64_v_u64m2_tumu(vbool32_t mask,vuint64m2_t maskedoff,const uint64_t * base,size_t vl)346 vuint64m2_t test_vle64_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, size_t vl) {
347   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
348 }
349 
350 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_u64m4_tumu
351 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
352 // CHECK-RV64-NEXT:  entry:
353 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 0)
354 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
355 //
test_vle64_v_u64m4_tumu(vbool16_t mask,vuint64m4_t maskedoff,const uint64_t * base,size_t vl)356 vuint64m4_t test_vle64_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, size_t vl) {
357   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
358 }
359 
360 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_u64m8_tumu
361 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
362 // CHECK-RV64-NEXT:  entry:
363 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 0)
364 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
365 //
test_vle64_v_u64m8_tumu(vbool8_t mask,vuint64m8_t maskedoff,const uint64_t * base,size_t vl)366 vuint64m8_t test_vle64_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, size_t vl) {
367   return __riscv_vle64_tumu(mask, maskedoff, base, vl);
368 }
369 
370 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x double> @test_vle64_v_f64m1_mu
371 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
372 // CHECK-RV64-NEXT:  entry:
373 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x double> @llvm.riscv.vle.mask.nxv1f64.i64(<vscale x 1 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
374 // CHECK-RV64-NEXT:    ret <vscale x 1 x double> [[TMP0]]
375 //
test_vle64_v_f64m1_mu(vbool64_t mask,vfloat64m1_t maskedoff,const double * base,size_t vl)376 vfloat64m1_t test_vle64_v_f64m1_mu(vbool64_t mask, vfloat64m1_t maskedoff, const double *base, size_t vl) {
377   return __riscv_vle64_mu(mask, maskedoff, base, vl);
378 }
379 
380 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x double> @test_vle64_v_f64m2_mu
381 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
382 // CHECK-RV64-NEXT:  entry:
383 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x double> @llvm.riscv.vle.mask.nxv2f64.i64(<vscale x 2 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
384 // CHECK-RV64-NEXT:    ret <vscale x 2 x double> [[TMP0]]
385 //
test_vle64_v_f64m2_mu(vbool32_t mask,vfloat64m2_t maskedoff,const double * base,size_t vl)386 vfloat64m2_t test_vle64_v_f64m2_mu(vbool32_t mask, vfloat64m2_t maskedoff, const double *base, size_t vl) {
387   return __riscv_vle64_mu(mask, maskedoff, base, vl);
388 }
389 
390 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x double> @test_vle64_v_f64m4_mu
391 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
392 // CHECK-RV64-NEXT:  entry:
393 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x double> @llvm.riscv.vle.mask.nxv4f64.i64(<vscale x 4 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
394 // CHECK-RV64-NEXT:    ret <vscale x 4 x double> [[TMP0]]
395 //
test_vle64_v_f64m4_mu(vbool16_t mask,vfloat64m4_t maskedoff,const double * base,size_t vl)396 vfloat64m4_t test_vle64_v_f64m4_mu(vbool16_t mask, vfloat64m4_t maskedoff, const double *base, size_t vl) {
397   return __riscv_vle64_mu(mask, maskedoff, base, vl);
398 }
399 
400 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x double> @test_vle64_v_f64m8_mu
401 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x double> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
402 // CHECK-RV64-NEXT:  entry:
403 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x double> @llvm.riscv.vle.mask.nxv8f64.i64(<vscale x 8 x double> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
404 // CHECK-RV64-NEXT:    ret <vscale x 8 x double> [[TMP0]]
405 //
test_vle64_v_f64m8_mu(vbool8_t mask,vfloat64m8_t maskedoff,const double * base,size_t vl)406 vfloat64m8_t test_vle64_v_f64m8_mu(vbool8_t mask, vfloat64m8_t maskedoff, const double *base, size_t vl) {
407   return __riscv_vle64_mu(mask, maskedoff, base, vl);
408 }
409 
410 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_i64m1_mu
411 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
412 // CHECK-RV64-NEXT:  entry:
413 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
414 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
415 //
test_vle64_v_i64m1_mu(vbool64_t mask,vint64m1_t maskedoff,const int64_t * base,size_t vl)416 vint64m1_t test_vle64_v_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, const int64_t *base, size_t vl) {
417   return __riscv_vle64_mu(mask, maskedoff, base, vl);
418 }
419 
420 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_i64m2_mu
421 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
422 // CHECK-RV64-NEXT:  entry:
423 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
424 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
425 //
test_vle64_v_i64m2_mu(vbool32_t mask,vint64m2_t maskedoff,const int64_t * base,size_t vl)426 vint64m2_t test_vle64_v_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, const int64_t *base, size_t vl) {
427   return __riscv_vle64_mu(mask, maskedoff, base, vl);
428 }
429 
430 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_i64m4_mu
431 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
432 // CHECK-RV64-NEXT:  entry:
433 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
434 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
435 //
test_vle64_v_i64m4_mu(vbool16_t mask,vint64m4_t maskedoff,const int64_t * base,size_t vl)436 vint64m4_t test_vle64_v_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, const int64_t *base, size_t vl) {
437   return __riscv_vle64_mu(mask, maskedoff, base, vl);
438 }
439 
440 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_i64m8_mu
441 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
442 // CHECK-RV64-NEXT:  entry:
443 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
444 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
445 //
test_vle64_v_i64m8_mu(vbool8_t mask,vint64m8_t maskedoff,const int64_t * base,size_t vl)446 vint64m8_t test_vle64_v_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, const int64_t *base, size_t vl) {
447   return __riscv_vle64_mu(mask, maskedoff, base, vl);
448 }
449 
450 // CHECK-RV64-LABEL: define dso_local <vscale x 1 x i64> @test_vle64_v_u64m1_mu
451 // CHECK-RV64-SAME: (<vscale x 1 x i1> [[MASK:%.*]], <vscale x 1 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
452 // CHECK-RV64-NEXT:  entry:
453 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vle.mask.nxv1i64.i64(<vscale x 1 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 1 x i1> [[MASK]], i64 [[VL]], i64 1)
454 // CHECK-RV64-NEXT:    ret <vscale x 1 x i64> [[TMP0]]
455 //
test_vle64_v_u64m1_mu(vbool64_t mask,vuint64m1_t maskedoff,const uint64_t * base,size_t vl)456 vuint64m1_t test_vle64_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, const uint64_t *base, size_t vl) {
457   return __riscv_vle64_mu(mask, maskedoff, base, vl);
458 }
459 
460 // CHECK-RV64-LABEL: define dso_local <vscale x 2 x i64> @test_vle64_v_u64m2_mu
461 // CHECK-RV64-SAME: (<vscale x 2 x i1> [[MASK:%.*]], <vscale x 2 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
462 // CHECK-RV64-NEXT:  entry:
463 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vle.mask.nxv2i64.i64(<vscale x 2 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 2 x i1> [[MASK]], i64 [[VL]], i64 1)
464 // CHECK-RV64-NEXT:    ret <vscale x 2 x i64> [[TMP0]]
465 //
test_vle64_v_u64m2_mu(vbool32_t mask,vuint64m2_t maskedoff,const uint64_t * base,size_t vl)466 vuint64m2_t test_vle64_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, const uint64_t *base, size_t vl) {
467   return __riscv_vle64_mu(mask, maskedoff, base, vl);
468 }
469 
470 // CHECK-RV64-LABEL: define dso_local <vscale x 4 x i64> @test_vle64_v_u64m4_mu
471 // CHECK-RV64-SAME: (<vscale x 4 x i1> [[MASK:%.*]], <vscale x 4 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
472 // CHECK-RV64-NEXT:  entry:
473 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vle.mask.nxv4i64.i64(<vscale x 4 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 4 x i1> [[MASK]], i64 [[VL]], i64 1)
474 // CHECK-RV64-NEXT:    ret <vscale x 4 x i64> [[TMP0]]
475 //
test_vle64_v_u64m4_mu(vbool16_t mask,vuint64m4_t maskedoff,const uint64_t * base,size_t vl)476 vuint64m4_t test_vle64_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, const uint64_t *base, size_t vl) {
477   return __riscv_vle64_mu(mask, maskedoff, base, vl);
478 }
479 
480 // CHECK-RV64-LABEL: define dso_local <vscale x 8 x i64> @test_vle64_v_u64m8_mu
481 // CHECK-RV64-SAME: (<vscale x 8 x i1> [[MASK:%.*]], <vscale x 8 x i64> [[MASKEDOFF:%.*]], ptr noundef [[BASE:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] {
482 // CHECK-RV64-NEXT:  entry:
483 // CHECK-RV64-NEXT:    [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vle.mask.nxv8i64.i64(<vscale x 8 x i64> [[MASKEDOFF]], ptr [[BASE]], <vscale x 8 x i1> [[MASK]], i64 [[VL]], i64 1)
484 // CHECK-RV64-NEXT:    ret <vscale x 8 x i64> [[TMP0]]
485 //
test_vle64_v_u64m8_mu(vbool8_t mask,vuint64m8_t maskedoff,const uint64_t * base,size_t vl)486 vuint64m8_t test_vle64_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, const uint64_t *base, size_t vl) {
487   return __riscv_vle64_mu(mask, maskedoff, base, vl);
488 }
489 
490