xref: /llvm-project/llvm/test/Transforms/InstCombine/RISCV/riscv-vsetvli-knownbits.ll (revision b1094776152b68efa05f69b7b833f9cbc0727efc)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt < %s -passes=instcombine -S | FileCheck %s
3
4declare i32 @llvm.riscv.vsetvli.i32(i32, i32, i32)
5declare i64 @llvm.riscv.vsetvli.i64(i64, i64, i64)
6
7define i32 @vsetvli_i32() nounwind #0 {
8; CHECK-LABEL: @vsetvli_i32(
9; CHECK-NEXT:  entry:
10; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
11; CHECK-NEXT:    ret i32 [[TMP0]]
12;
13entry:
14  %0 = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
15  %1 = and i32 %0, 2147483647
16  ret i32 %1
17}
18
19define i64 @vsetvli_sext_i64() nounwind #0 {
20; CHECK-LABEL: @vsetvli_sext_i64(
21; CHECK-NEXT:  entry:
22; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
23; CHECK-NEXT:    ret i64 [[TMP0]]
24;
25entry:
26  %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
27  %1 = trunc i64 %0 to i32
28  %2 = sext i32 %1 to i64
29  ret i64 %2
30}
31
32define i64 @vsetvli_zext_i64() nounwind #0 {
33; CHECK-LABEL: @vsetvli_zext_i64(
34; CHECK-NEXT:  entry:
35; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
36; CHECK-NEXT:    ret i64 [[TMP0]]
37;
38entry:
39  %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
40  %1 = trunc i64 %0 to i32
41  %2 = zext i32 %1 to i64
42  ret i64 %2
43}
44
45define signext i32 @vsetvl_sext() nounwind #0 {
46; CHECK-LABEL: @vsetvl_sext(
47; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
48; CHECK-NEXT:    [[B:%.*]] = trunc nuw nsw i64 [[A]] to i32
49; CHECK-NEXT:    ret i32 [[B]]
50;
51  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
52  %b = trunc i64 %a to i32
53  ret i32 %b
54}
55
56define zeroext i32 @vsetvl_zext() nounwind #0 {
57; CHECK-LABEL: @vsetvl_zext(
58; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
59; CHECK-NEXT:    [[B:%.*]] = trunc nuw nsw i64 [[A]] to i32
60; CHECK-NEXT:    ret i32 [[B]]
61;
62  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 1, i64 1)
63  %b = trunc i64 %a to i32
64  ret i32 %b
65}
66
67define i32 @vsetvli_and17_i32() nounwind #0 {
68; CHECK-LABEL: @vsetvli_and17_i32(
69; CHECK-NEXT:  entry:
70; CHECK-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
71; CHECK-NEXT:    ret i32 [[TMP0]]
72;
73entry:
74  %0 = call i32 @llvm.riscv.vsetvli.i32(i32 1, i32 1, i32 1)
75  %1 = and i32 %0, 131071
76  ret i32 %1
77}
78
79define i64 @vsetvli_and17_i64() nounwind #0 {
80; CHECK-LABEL: @vsetvli_and17_i64(
81; CHECK-NEXT:  entry:
82; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
83; CHECK-NEXT:    ret i64 [[TMP0]]
84;
85entry:
86  %0 = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 1, i64 1)
87  %1 = and i64 %0, 131071
88  ret i64 %1
89}
90
91define i64 @vsetvl_e8m1_and14bits(i64 %avl) nounwind #0 {
92; CHECK-LABEL: @vsetvl_e8m1_and14bits(
93; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
94; CHECK-NEXT:    ret i64 [[A]]
95;
96  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
97  %b = and i64 %a, 16383
98  ret i64 %b
99}
100
101define i64 @vsetvl_e8m1_and13bits(i64 %avl) nounwind #0 {
102; CHECK-LABEL: @vsetvl_e8m1_and13bits(
103; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 0)
104; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
105; CHECK-NEXT:    ret i64 [[B]]
106;
107  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 0)
108  %b = and i64 %a, 8191
109  ret i64 %b
110}
111
112define i64 @vsetvl_e8m1_constant_avl() nounwind #0 {
113; CHECK-LABEL: @vsetvl_e8m1_constant_avl(
114; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 1, i64 0, i64 0)
115; CHECK-NEXT:    ret i64 [[A]]
116;
117  %a = call i64 @llvm.riscv.vsetvli(i64 1, i64 0, i64 0)
118  %b = and i64 %a, 1
119  ret i64 %b
120}
121
122define i64 @vsetvl_e8m2_and15bits(i64 %avl) nounwind #0 {
123; CHECK-LABEL: @vsetvl_e8m2_and15bits(
124; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
125; CHECK-NEXT:    ret i64 [[A]]
126;
127  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
128  %b = and i64 %a, 32767
129  ret i64 %b
130}
131
132define i64 @vsetvl_e8m2_and14bits(i64 %avl) nounwind #0 {
133; CHECK-LABEL: @vsetvl_e8m2_and14bits(
134; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 1)
135; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
136; CHECK-NEXT:    ret i64 [[B]]
137;
138  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 1)
139  %b = and i64 %a, 16383
140  ret i64 %b
141}
142
143define i64 @vsetvl_e8m4_and16bits(i64 %avl) nounwind #0 {
144; CHECK-LABEL: @vsetvl_e8m4_and16bits(
145; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
146; CHECK-NEXT:    ret i64 [[A]]
147;
148  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
149  %b = and i64 %a, 65535
150  ret i64 %b
151}
152
153define i64 @vsetvl_e8m4_and15bits(i64 %avl) nounwind #0 {
154; CHECK-LABEL: @vsetvl_e8m4_and15bits(
155; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 2)
156; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
157; CHECK-NEXT:    ret i64 [[B]]
158;
159  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 2)
160  %b = and i64 %a, 32767
161  ret i64 %b
162}
163
164define i64 @vsetvl_e8m8_and17bits(i64 %avl) nounwind #0 {
165; CHECK-LABEL: @vsetvl_e8m8_and17bits(
166; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
167; CHECK-NEXT:    ret i64 [[A]]
168;
169  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
170  %b = and i64 %a, 131071
171  ret i64 %b
172}
173
174define i64 @vsetvl_e8m8_and16bits(i64 %avl) nounwind #0 {
175; CHECK-LABEL: @vsetvl_e8m8_and16bits(
176; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 3)
177; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 65535
178; CHECK-NEXT:    ret i64 [[B]]
179;
180  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 3)
181  %b = and i64 %a, 65535
182  ret i64 %b
183}
184
185define i64 @vsetvl_e8mf2_and11bits(i64 %avl) nounwind #0 {
186; CHECK-LABEL: @vsetvl_e8mf2_and11bits(
187; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
188; CHECK-NEXT:    ret i64 [[A]]
189;
190  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
191  %b = and i64 %a, 2047
192  ret i64 %b
193}
194
195define i64 @vsetvl_e8mf2_and10bits(i64 %avl) nounwind #0 {
196; CHECK-LABEL: @vsetvl_e8mf2_and10bits(
197; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 5)
198; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
199; CHECK-NEXT:    ret i64 [[B]]
200;
201  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 5)
202  %b = and i64 %a, 1023
203  ret i64 %b
204}
205
206define i64 @vsetvl_e8mf4_and12bits(i64 %avl) nounwind #0 {
207; CHECK-LABEL: @vsetvl_e8mf4_and12bits(
208; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
209; CHECK-NEXT:    ret i64 [[A]]
210;
211  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
212  %b = and i64 %a, 4095
213  ret i64 %b
214}
215
216define i64 @vsetvl_e8mf4_and11bits(i64 %avl) nounwind #0 {
217; CHECK-LABEL: @vsetvl_e8mf4_and11bits(
218; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 6)
219; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
220; CHECK-NEXT:    ret i64 [[B]]
221;
222  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 6)
223  %b = and i64 %a, 2047
224  ret i64 %b
225}
226
227define i64 @vsetvl_e8mf8_and13bits(i64 %avl) nounwind #0 {
228; CHECK-LABEL: @vsetvl_e8mf8_and13bits(
229; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
230; CHECK-NEXT:    ret i64 [[A]]
231;
232  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
233  %b = and i64 %a, 8191
234  ret i64 %b
235}
236
237define i64 @vsetvl_e8mf8_and12bits(i64 %avl) nounwind #0 {
238; CHECK-LABEL: @vsetvl_e8mf8_and12bits(
239; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 0, i64 7)
240; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
241; CHECK-NEXT:    ret i64 [[B]]
242;
243  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 0, i64 7)
244  %b = and i64 %a, 4095
245  ret i64 %b
246}
247
248define i64 @vsetvl_e16m1_and13bits(i64 %avl) nounwind #0 {
249; CHECK-LABEL: @vsetvl_e16m1_and13bits(
250; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
251; CHECK-NEXT:    ret i64 [[A]]
252;
253  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
254  %b = and i64 %a, 8191
255  ret i64 %b
256}
257
258define i64 @vsetvl_e16m1_and12bits(i64 %avl) nounwind #0 {
259; CHECK-LABEL: @vsetvl_e16m1_and12bits(
260; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 0)
261; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
262; CHECK-NEXT:    ret i64 [[B]]
263;
264  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 0)
265  %b = and i64 %a, 4095
266  ret i64 %b
267}
268
269define i64 @vsetvl_e16m2_and14bits(i64 %avl) nounwind #0 {
270; CHECK-LABEL: @vsetvl_e16m2_and14bits(
271; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
272; CHECK-NEXT:    ret i64 [[A]]
273;
274  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
275  %b = and i64 %a, 16383
276  ret i64 %b
277}
278
279define i64 @vsetvl_e16m2_and13bits(i64 %avl) nounwind #0 {
280; CHECK-LABEL: @vsetvl_e16m2_and13bits(
281; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 1)
282; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
283; CHECK-NEXT:    ret i64 [[B]]
284;
285  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 1)
286  %b = and i64 %a, 8191
287  ret i64 %b
288}
289
290define i64 @vsetvl_e16m4_and15bits(i64 %avl) nounwind #0 {
291; CHECK-LABEL: @vsetvl_e16m4_and15bits(
292; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
293; CHECK-NEXT:    ret i64 [[A]]
294;
295  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
296  %b = and i64 %a, 32767
297  ret i64 %b
298}
299
300define i64 @vsetvl_e16m4_and14bits(i64 %avl) nounwind #0 {
301; CHECK-LABEL: @vsetvl_e16m4_and14bits(
302; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 2)
303; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
304; CHECK-NEXT:    ret i64 [[B]]
305;
306  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 2)
307  %b = and i64 %a, 16383
308  ret i64 %b
309}
310
311define i64 @vsetvl_e16m8_and16bits(i64 %avl) nounwind #0 {
312; CHECK-LABEL: @vsetvl_e16m8_and16bits(
313; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
314; CHECK-NEXT:    ret i64 [[A]]
315;
316  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
317  %b = and i64 %a, 65535
318  ret i64 %b
319}
320
321define i64 @vsetvl_e16m8_and15bits(i64 %avl) nounwind #0 {
322; CHECK-LABEL: @vsetvl_e16m8_and15bits(
323; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 3)
324; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 32767
325; CHECK-NEXT:    ret i64 [[B]]
326;
327  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 3)
328  %b = and i64 %a, 32767
329  ret i64 %b
330}
331
332define i64 @vsetvl_e16mf2_and10bits(i64 %avl) nounwind #0 {
333; CHECK-LABEL: @vsetvl_e16mf2_and10bits(
334; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
335; CHECK-NEXT:    ret i64 [[A]]
336;
337  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
338  %b = and i64 %a, 1023
339  ret i64 %b
340}
341
342define i64 @vsetvl_e16mf2_and9bits(i64 %avl) nounwind #0 {
343; CHECK-LABEL: @vsetvl_e16mf2_and9bits(
344; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 5)
345; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
346; CHECK-NEXT:    ret i64 [[B]]
347;
348  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 5)
349  %b = and i64 %a, 511
350  ret i64 %b
351}
352
353define i64 @vsetvl_e16mf4_and11bits(i64 %avl) nounwind #0 {
354; CHECK-LABEL: @vsetvl_e16mf4_and11bits(
355; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
356; CHECK-NEXT:    ret i64 [[A]]
357;
358  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
359  %b = and i64 %a, 2047
360  ret i64 %b
361}
362
363define i64 @vsetvl_e16mf4_and10bits(i64 %avl) nounwind #0 {
364; CHECK-LABEL: @vsetvl_e16mf4_and10bits(
365; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 6)
366; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
367; CHECK-NEXT:    ret i64 [[B]]
368;
369  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 6)
370  %b = and i64 %a, 1023
371  ret i64 %b
372}
373
374define i64 @vsetvl_e16mf8_and12bits(i64 %avl) nounwind #0 {
375; CHECK-LABEL: @vsetvl_e16mf8_and12bits(
376; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
377; CHECK-NEXT:    ret i64 [[A]]
378;
379  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
380  %b = and i64 %a, 4095
381  ret i64 %b
382}
383
384define i64 @vsetvl_e16mf8_and11bits(i64 %avl) nounwind #0 {
385; CHECK-LABEL: @vsetvl_e16mf8_and11bits(
386; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 1, i64 7)
387; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
388; CHECK-NEXT:    ret i64 [[B]]
389;
390  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 1, i64 7)
391  %b = and i64 %a, 2047
392  ret i64 %b
393}
394
395define i64 @vsetvl_e32m1_and12bits(i64 %avl) nounwind #0 {
396; CHECK-LABEL: @vsetvl_e32m1_and12bits(
397; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
398; CHECK-NEXT:    ret i64 [[A]]
399;
400  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
401  %b = and i64 %a, 4095
402  ret i64 %b
403}
404
405define i64 @vsetvl_e32m1_and11bits(i64 %avl) nounwind #0 {
406; CHECK-LABEL: @vsetvl_e32m1_and11bits(
407; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 0)
408; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
409; CHECK-NEXT:    ret i64 [[B]]
410;
411  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 0)
412  %b = and i64 %a, 2047
413  ret i64 %b
414}
415
416define i64 @vsetvl_e32m2_and13bits(i64 %avl) nounwind #0 {
417; CHECK-LABEL: @vsetvl_e32m2_and13bits(
418; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
419; CHECK-NEXT:    ret i64 [[A]]
420;
421  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
422  %b = and i64 %a, 8191
423  ret i64 %b
424}
425
426define i64 @vsetvl_e32m2_and12bits(i64 %avl) nounwind #0 {
427; CHECK-LABEL: @vsetvl_e32m2_and12bits(
428; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 1)
429; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
430; CHECK-NEXT:    ret i64 [[B]]
431;
432  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 1)
433  %b = and i64 %a, 4095
434  ret i64 %b
435}
436
437define i64 @vsetvl_e32m4_and14bits(i64 %avl) nounwind #0 {
438; CHECK-LABEL: @vsetvl_e32m4_and14bits(
439; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
440; CHECK-NEXT:    ret i64 [[A]]
441;
442  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
443  %b = and i64 %a, 16383
444  ret i64 %b
445}
446
447define i64 @vsetvl_e32m4_and13bits(i64 %avl) nounwind #0 {
448; CHECK-LABEL: @vsetvl_e32m4_and13bits(
449; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 2)
450; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
451; CHECK-NEXT:    ret i64 [[B]]
452;
453  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 2)
454  %b = and i64 %a, 8191
455  ret i64 %b
456}
457
458define i64 @vsetvl_e32m8_and15bits(i64 %avl) nounwind #0 {
459; CHECK-LABEL: @vsetvl_e32m8_and15bits(
460; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
461; CHECK-NEXT:    ret i64 [[A]]
462;
463  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
464  %b = and i64 %a, 32767
465  ret i64 %b
466}
467
468define i64 @vsetvl_e32m8_and14bits(i64 %avl) nounwind #0 {
469; CHECK-LABEL: @vsetvl_e32m8_and14bits(
470; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 3)
471; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 16383
472; CHECK-NEXT:    ret i64 [[B]]
473;
474  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 3)
475  %b = and i64 %a, 16383
476  ret i64 %b
477}
478
479define i64 @vsetvl_e32mf2_and9bits(i64 %avl) nounwind #0 {
480; CHECK-LABEL: @vsetvl_e32mf2_and9bits(
481; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
482; CHECK-NEXT:    ret i64 [[A]]
483;
484  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
485  %b = and i64 %a, 511
486  ret i64 %b
487}
488
489define i64 @vsetvl_e32mf2_and8bits(i64 %avl) nounwind #0 {
490; CHECK-LABEL: @vsetvl_e32mf2_and8bits(
491; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 5)
492; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
493; CHECK-NEXT:    ret i64 [[B]]
494;
495  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 5)
496  %b = and i64 %a, 255
497  ret i64 %b
498}
499
500define i64 @vsetvl_e32mf4_and10bits(i64 %avl) nounwind #0 {
501; CHECK-LABEL: @vsetvl_e32mf4_and10bits(
502; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
503; CHECK-NEXT:    ret i64 [[A]]
504;
505  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
506  %b = and i64 %a, 1023
507  ret i64 %b
508}
509
510define i64 @vsetvl_e32mf4_and9bits(i64 %avl) nounwind #0 {
511; CHECK-LABEL: @vsetvl_e32mf4_and9bits(
512; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 6)
513; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
514; CHECK-NEXT:    ret i64 [[B]]
515;
516  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 6)
517  %b = and i64 %a, 511
518  ret i64 %b
519}
520
521define i64 @vsetvl_e32mf8_and11bits(i64 %avl) nounwind #0 {
522; CHECK-LABEL: @vsetvl_e32mf8_and11bits(
523; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
524; CHECK-NEXT:    ret i64 [[A]]
525;
526  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
527  %b = and i64 %a, 2047
528  ret i64 %b
529}
530
531define i64 @vsetvl_e32mf8_and10bits(i64 %avl) nounwind #0 {
532; CHECK-LABEL: @vsetvl_e32mf8_and10bits(
533; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 2, i64 7)
534; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
535; CHECK-NEXT:    ret i64 [[B]]
536;
537  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 2, i64 7)
538  %b = and i64 %a, 1023
539  ret i64 %b
540}
541
542define i64 @vsetvl_e64m1_and11bits(i64 %avl) nounwind #0 {
543; CHECK-LABEL: @vsetvl_e64m1_and11bits(
544; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
545; CHECK-NEXT:    ret i64 [[A]]
546;
547  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
548  %b = and i64 %a, 2047
549  ret i64 %b
550}
551
552define i64 @vsetvl_e64m1_and10bits(i64 %avl) nounwind #0 {
553; CHECK-LABEL: @vsetvl_e64m1_and10bits(
554; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 0)
555; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 1023
556; CHECK-NEXT:    ret i64 [[B]]
557;
558  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 0)
559  %b = and i64 %a, 1023
560  ret i64 %b
561}
562
563define i64 @vsetvl_e64m2_and12bits(i64 %avl) nounwind #0 {
564; CHECK-LABEL: @vsetvl_e64m2_and12bits(
565; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
566; CHECK-NEXT:    ret i64 [[A]]
567;
568  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
569  %b = and i64 %a, 4095
570  ret i64 %b
571}
572
573define i64 @vsetvl_e64m2_and11bits(i64 %avl) nounwind #0 {
574; CHECK-LABEL: @vsetvl_e64m2_and11bits(
575; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 1)
576; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 2047
577; CHECK-NEXT:    ret i64 [[B]]
578;
579  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 1)
580  %b = and i64 %a, 2047
581  ret i64 %b
582}
583
584define i64 @vsetvl_e64m4_and13bits(i64 %avl) nounwind #0 {
585; CHECK-LABEL: @vsetvl_e64m4_and13bits(
586; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
587; CHECK-NEXT:    ret i64 [[A]]
588;
589  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
590  %b = and i64 %a, 8191
591  ret i64 %b
592}
593
594define i64 @vsetvl_e64m4_and12bits(i64 %avl) nounwind #0 {
595; CHECK-LABEL: @vsetvl_e64m4_and12bits(
596; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 2)
597; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 4095
598; CHECK-NEXT:    ret i64 [[B]]
599;
600  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 2)
601  %b = and i64 %a, 4095
602  ret i64 %b
603}
604
605define i64 @vsetvl_e64m8_and14bits(i64 %avl) nounwind #0 {
606; CHECK-LABEL: @vsetvl_e64m8_and14bits(
607; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
608; CHECK-NEXT:    ret i64 [[A]]
609;
610  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
611  %b = and i64 %a, 16383
612  ret i64 %b
613}
614
615define i64 @vsetvl_e64m8_and13bits(i64 %avl) nounwind #0 {
616; CHECK-LABEL: @vsetvl_e64m8_and13bits(
617; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 3)
618; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 8191
619; CHECK-NEXT:    ret i64 [[B]]
620;
621  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 3)
622  %b = and i64 %a, 8191
623  ret i64 %b
624}
625
626define i64 @vsetvl_e64mf2_and8bits(i64 %avl) nounwind #0 {
627; CHECK-LABEL: @vsetvl_e64mf2_and8bits(
628; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
629; CHECK-NEXT:    ret i64 [[A]]
630;
631  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
632  %b = and i64 %a, 255
633  ret i64 %b
634}
635
636define i64 @vsetvl_e64mf2_and7bits(i64 %avl) nounwind #0 {
637; CHECK-LABEL: @vsetvl_e64mf2_and7bits(
638; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 5)
639; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 127
640; CHECK-NEXT:    ret i64 [[B]]
641;
642  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 5)
643  %b = and i64 %a, 127
644  ret i64 %b
645}
646
647define i64 @vsetvl_e64mf4_and9bits(i64 %avl) nounwind #0 {
648; CHECK-LABEL: @vsetvl_e64mf4_and9bits(
649; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
650; CHECK-NEXT:    ret i64 [[A]]
651;
652  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
653  %b = and i64 %a, 511
654  ret i64 %b
655}
656
657define i64 @vsetvl_e64mf4_and8bits(i64 %avl) nounwind #0 {
658; CHECK-LABEL: @vsetvl_e64mf4_and8bits(
659; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 6)
660; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 255
661; CHECK-NEXT:    ret i64 [[B]]
662;
663  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 6)
664  %b = and i64 %a, 255
665  ret i64 %b
666}
667
668define i64 @vsetvl_e64mf8_and10bits(i64 %avl) nounwind #0 {
669; CHECK-LABEL: @vsetvl_e64mf8_and10bits(
670; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
671; CHECK-NEXT:    ret i64 [[A]]
672;
673  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
674  %b = and i64 %a, 1023
675  ret i64 %b
676}
677
678define i64 @vsetvl_e64mf8_and9bits(i64 %avl) nounwind #0 {
679; CHECK-LABEL: @vsetvl_e64mf8_and9bits(
680; CHECK-NEXT:    [[A:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[AVL:%.*]], i64 3, i64 7)
681; CHECK-NEXT:    [[B:%.*]] = and i64 [[A]], 511
682; CHECK-NEXT:    ret i64 [[B]]
683;
684  %a = call i64 @llvm.riscv.vsetvli(i64 %avl, i64 3, i64 7)
685  %b = and i64 %a, 511
686  ret i64 %b
687}
688
689attributes #0 = { vscale_range(2,1024) }
690