xref: /llvm-project/llvm/test/CodeGen/RISCV/sext-zext-trunc.ll (revision c1716e3fcf4e43b4a328731920f76b2fce9485d0)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefix=RV32I
4; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s -check-prefixes=RV64,RV64I
6; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
7; RUN:   | FileCheck %s -check-prefixes=RV64,RV64ZBB
8
9define i8 @sext_i1_to_i8(i1 %a) nounwind {
10; RV32I-LABEL: sext_i1_to_i8:
11; RV32I:       # %bb.0:
12; RV32I-NEXT:    slli a0, a0, 31
13; RV32I-NEXT:    srai a0, a0, 31
14; RV32I-NEXT:    ret
15;
16; RV64-LABEL: sext_i1_to_i8:
17; RV64:       # %bb.0:
18; RV64-NEXT:    slli a0, a0, 63
19; RV64-NEXT:    srai a0, a0, 63
20; RV64-NEXT:    ret
21  %1 = sext i1 %a to i8
22  ret i8 %1
23}
24
25define i16 @sext_i1_to_i16(i1 %a) nounwind {
26; RV32I-LABEL: sext_i1_to_i16:
27; RV32I:       # %bb.0:
28; RV32I-NEXT:    slli a0, a0, 31
29; RV32I-NEXT:    srai a0, a0, 31
30; RV32I-NEXT:    ret
31;
32; RV64-LABEL: sext_i1_to_i16:
33; RV64:       # %bb.0:
34; RV64-NEXT:    slli a0, a0, 63
35; RV64-NEXT:    srai a0, a0, 63
36; RV64-NEXT:    ret
37  %1 = sext i1 %a to i16
38  ret i16 %1
39}
40
41define i32 @sext_i1_to_i32(i1 %a) nounwind {
42; RV32I-LABEL: sext_i1_to_i32:
43; RV32I:       # %bb.0:
44; RV32I-NEXT:    slli a0, a0, 31
45; RV32I-NEXT:    srai a0, a0, 31
46; RV32I-NEXT:    ret
47;
48; RV64-LABEL: sext_i1_to_i32:
49; RV64:       # %bb.0:
50; RV64-NEXT:    slli a0, a0, 63
51; RV64-NEXT:    srai a0, a0, 63
52; RV64-NEXT:    ret
53  %1 = sext i1 %a to i32
54  ret i32 %1
55}
56
57define i64 @sext_i1_to_i64(i1 %a) nounwind {
58; RV32I-LABEL: sext_i1_to_i64:
59; RV32I:       # %bb.0:
60; RV32I-NEXT:    slli a0, a0, 31
61; RV32I-NEXT:    srai a0, a0, 31
62; RV32I-NEXT:    mv a1, a0
63; RV32I-NEXT:    ret
64;
65; RV64-LABEL: sext_i1_to_i64:
66; RV64:       # %bb.0:
67; RV64-NEXT:    slli a0, a0, 63
68; RV64-NEXT:    srai a0, a0, 63
69; RV64-NEXT:    ret
70  %1 = sext i1 %a to i64
71  ret i64 %1
72}
73
74define i16 @sext_i8_to_i16(i8 %a) nounwind {
75; RV32I-LABEL: sext_i8_to_i16:
76; RV32I:       # %bb.0:
77; RV32I-NEXT:    slli a0, a0, 24
78; RV32I-NEXT:    srai a0, a0, 24
79; RV32I-NEXT:    ret
80;
81; RV64I-LABEL: sext_i8_to_i16:
82; RV64I:       # %bb.0:
83; RV64I-NEXT:    slli a0, a0, 56
84; RV64I-NEXT:    srai a0, a0, 56
85; RV64I-NEXT:    ret
86;
87; RV64ZBB-LABEL: sext_i8_to_i16:
88; RV64ZBB:       # %bb.0:
89; RV64ZBB-NEXT:    sext.b a0, a0
90; RV64ZBB-NEXT:    ret
91  %1 = sext i8 %a to i16
92  ret i16 %1
93}
94
95define i32 @sext_i8_to_i32(i8 %a) nounwind {
96; RV32I-LABEL: sext_i8_to_i32:
97; RV32I:       # %bb.0:
98; RV32I-NEXT:    slli a0, a0, 24
99; RV32I-NEXT:    srai a0, a0, 24
100; RV32I-NEXT:    ret
101;
102; RV64I-LABEL: sext_i8_to_i32:
103; RV64I:       # %bb.0:
104; RV64I-NEXT:    slli a0, a0, 56
105; RV64I-NEXT:    srai a0, a0, 56
106; RV64I-NEXT:    ret
107;
108; RV64ZBB-LABEL: sext_i8_to_i32:
109; RV64ZBB:       # %bb.0:
110; RV64ZBB-NEXT:    sext.b a0, a0
111; RV64ZBB-NEXT:    ret
112  %1 = sext i8 %a to i32
113  ret i32 %1
114}
115
116define i64 @sext_i8_to_i64(i8 %a) nounwind {
117; RV32I-LABEL: sext_i8_to_i64:
118; RV32I:       # %bb.0:
119; RV32I-NEXT:    slli a1, a0, 24
120; RV32I-NEXT:    srai a0, a1, 24
121; RV32I-NEXT:    srai a1, a1, 31
122; RV32I-NEXT:    ret
123;
124; RV64I-LABEL: sext_i8_to_i64:
125; RV64I:       # %bb.0:
126; RV64I-NEXT:    slli a0, a0, 56
127; RV64I-NEXT:    srai a0, a0, 56
128; RV64I-NEXT:    ret
129;
130; RV64ZBB-LABEL: sext_i8_to_i64:
131; RV64ZBB:       # %bb.0:
132; RV64ZBB-NEXT:    sext.b a0, a0
133; RV64ZBB-NEXT:    ret
134  %1 = sext i8 %a to i64
135  ret i64 %1
136}
137
138define i32 @sext_i16_to_i32(i16 %a) nounwind {
139; RV32I-LABEL: sext_i16_to_i32:
140; RV32I:       # %bb.0:
141; RV32I-NEXT:    slli a0, a0, 16
142; RV32I-NEXT:    srai a0, a0, 16
143; RV32I-NEXT:    ret
144;
145; RV64I-LABEL: sext_i16_to_i32:
146; RV64I:       # %bb.0:
147; RV64I-NEXT:    slli a0, a0, 48
148; RV64I-NEXT:    srai a0, a0, 48
149; RV64I-NEXT:    ret
150;
151; RV64ZBB-LABEL: sext_i16_to_i32:
152; RV64ZBB:       # %bb.0:
153; RV64ZBB-NEXT:    sext.h a0, a0
154; RV64ZBB-NEXT:    ret
155  %1 = sext i16 %a to i32
156  ret i32 %1
157}
158
159define i64 @sext_i16_to_i64(i16 %a) nounwind {
160; RV32I-LABEL: sext_i16_to_i64:
161; RV32I:       # %bb.0:
162; RV32I-NEXT:    slli a1, a0, 16
163; RV32I-NEXT:    srai a0, a1, 16
164; RV32I-NEXT:    srai a1, a1, 31
165; RV32I-NEXT:    ret
166;
167; RV64I-LABEL: sext_i16_to_i64:
168; RV64I:       # %bb.0:
169; RV64I-NEXT:    slli a0, a0, 48
170; RV64I-NEXT:    srai a0, a0, 48
171; RV64I-NEXT:    ret
172;
173; RV64ZBB-LABEL: sext_i16_to_i64:
174; RV64ZBB:       # %bb.0:
175; RV64ZBB-NEXT:    sext.h a0, a0
176; RV64ZBB-NEXT:    ret
177  %1 = sext i16 %a to i64
178  ret i64 %1
179}
180
181define i64 @sext_i32_to_i64(i32 %a) nounwind {
182; RV32I-LABEL: sext_i32_to_i64:
183; RV32I:       # %bb.0:
184; RV32I-NEXT:    srai a1, a0, 31
185; RV32I-NEXT:    ret
186;
187; RV64-LABEL: sext_i32_to_i64:
188; RV64:       # %bb.0:
189; RV64-NEXT:    sext.w a0, a0
190; RV64-NEXT:    ret
191  %1 = sext i32 %a to i64
192  ret i64 %1
193}
194
195define i8 @zext_i1_to_i8(i1 %a) nounwind {
196; RV32I-LABEL: zext_i1_to_i8:
197; RV32I:       # %bb.0:
198; RV32I-NEXT:    andi a0, a0, 1
199; RV32I-NEXT:    ret
200;
201; RV64-LABEL: zext_i1_to_i8:
202; RV64:       # %bb.0:
203; RV64-NEXT:    andi a0, a0, 1
204; RV64-NEXT:    ret
205  %1 = zext i1 %a to i8
206  ret i8 %1
207}
208
209define i16 @zext_i1_to_i16(i1 %a) nounwind {
210; RV32I-LABEL: zext_i1_to_i16:
211; RV32I:       # %bb.0:
212; RV32I-NEXT:    andi a0, a0, 1
213; RV32I-NEXT:    ret
214;
215; RV64-LABEL: zext_i1_to_i16:
216; RV64:       # %bb.0:
217; RV64-NEXT:    andi a0, a0, 1
218; RV64-NEXT:    ret
219  %1 = zext i1 %a to i16
220  ret i16 %1
221}
222
223define i32 @zext_i1_to_i32(i1 %a) nounwind {
224; RV32I-LABEL: zext_i1_to_i32:
225; RV32I:       # %bb.0:
226; RV32I-NEXT:    andi a0, a0, 1
227; RV32I-NEXT:    ret
228;
229; RV64-LABEL: zext_i1_to_i32:
230; RV64:       # %bb.0:
231; RV64-NEXT:    andi a0, a0, 1
232; RV64-NEXT:    ret
233  %1 = zext i1 %a to i32
234  ret i32 %1
235}
236
237define i64 @zext_i1_to_i64(i1 %a) nounwind {
238; RV32I-LABEL: zext_i1_to_i64:
239; RV32I:       # %bb.0:
240; RV32I-NEXT:    andi a0, a0, 1
241; RV32I-NEXT:    li a1, 0
242; RV32I-NEXT:    ret
243;
244; RV64-LABEL: zext_i1_to_i64:
245; RV64:       # %bb.0:
246; RV64-NEXT:    andi a0, a0, 1
247; RV64-NEXT:    ret
248  %1 = zext i1 %a to i64
249  ret i64 %1
250}
251
252define i16 @zext_i8_to_i16(i8 %a) nounwind {
253; RV32I-LABEL: zext_i8_to_i16:
254; RV32I:       # %bb.0:
255; RV32I-NEXT:    andi a0, a0, 255
256; RV32I-NEXT:    ret
257;
258; RV64-LABEL: zext_i8_to_i16:
259; RV64:       # %bb.0:
260; RV64-NEXT:    andi a0, a0, 255
261; RV64-NEXT:    ret
262  %1 = zext i8 %a to i16
263  ret i16 %1
264}
265
266define i32 @zext_i8_to_i32(i8 %a) nounwind {
267; RV32I-LABEL: zext_i8_to_i32:
268; RV32I:       # %bb.0:
269; RV32I-NEXT:    andi a0, a0, 255
270; RV32I-NEXT:    ret
271;
272; RV64-LABEL: zext_i8_to_i32:
273; RV64:       # %bb.0:
274; RV64-NEXT:    andi a0, a0, 255
275; RV64-NEXT:    ret
276  %1 = zext i8 %a to i32
277  ret i32 %1
278}
279
280define i64 @zext_i8_to_i64(i8 %a) nounwind {
281; RV32I-LABEL: zext_i8_to_i64:
282; RV32I:       # %bb.0:
283; RV32I-NEXT:    andi a0, a0, 255
284; RV32I-NEXT:    li a1, 0
285; RV32I-NEXT:    ret
286;
287; RV64-LABEL: zext_i8_to_i64:
288; RV64:       # %bb.0:
289; RV64-NEXT:    andi a0, a0, 255
290; RV64-NEXT:    ret
291  %1 = zext i8 %a to i64
292  ret i64 %1
293}
294
295define i32 @zext_i16_to_i32(i16 %a) nounwind {
296; RV32I-LABEL: zext_i16_to_i32:
297; RV32I:       # %bb.0:
298; RV32I-NEXT:    slli a0, a0, 16
299; RV32I-NEXT:    srli a0, a0, 16
300; RV32I-NEXT:    ret
301;
302; RV64I-LABEL: zext_i16_to_i32:
303; RV64I:       # %bb.0:
304; RV64I-NEXT:    slli a0, a0, 48
305; RV64I-NEXT:    srli a0, a0, 48
306; RV64I-NEXT:    ret
307;
308; RV64ZBB-LABEL: zext_i16_to_i32:
309; RV64ZBB:       # %bb.0:
310; RV64ZBB-NEXT:    zext.h a0, a0
311; RV64ZBB-NEXT:    ret
312  %1 = zext i16 %a to i32
313  ret i32 %1
314}
315
316define i64 @zext_i16_to_i64(i16 %a) nounwind {
317; RV32I-LABEL: zext_i16_to_i64:
318; RV32I:       # %bb.0:
319; RV32I-NEXT:    slli a0, a0, 16
320; RV32I-NEXT:    srli a0, a0, 16
321; RV32I-NEXT:    li a1, 0
322; RV32I-NEXT:    ret
323;
324; RV64I-LABEL: zext_i16_to_i64:
325; RV64I:       # %bb.0:
326; RV64I-NEXT:    slli a0, a0, 48
327; RV64I-NEXT:    srli a0, a0, 48
328; RV64I-NEXT:    ret
329;
330; RV64ZBB-LABEL: zext_i16_to_i64:
331; RV64ZBB:       # %bb.0:
332; RV64ZBB-NEXT:    zext.h a0, a0
333; RV64ZBB-NEXT:    ret
334  %1 = zext i16 %a to i64
335  ret i64 %1
336}
337
338define i64 @zext_i32_to_i64(i32 %a) nounwind {
339; RV32I-LABEL: zext_i32_to_i64:
340; RV32I:       # %bb.0:
341; RV32I-NEXT:    li a1, 0
342; RV32I-NEXT:    ret
343;
344; RV64-LABEL: zext_i32_to_i64:
345; RV64:       # %bb.0:
346; RV64-NEXT:    slli a0, a0, 32
347; RV64-NEXT:    srli a0, a0, 32
348; RV64-NEXT:    ret
349  %1 = zext i32 %a to i64
350  ret i64 %1
351}
352
353define i8 @zext_nneg_i1_to_i8(i1 %a) nounwind {
354; RV32I-LABEL: zext_nneg_i1_to_i8:
355; RV32I:       # %bb.0:
356; RV32I-NEXT:    andi a0, a0, 1
357; RV32I-NEXT:    ret
358;
359; RV64-LABEL: zext_nneg_i1_to_i8:
360; RV64:       # %bb.0:
361; RV64-NEXT:    andi a0, a0, 1
362; RV64-NEXT:    ret
363  %1 = zext nneg i1 %a to i8
364  ret i8 %1
365}
366
367define i16 @zext_nneg_i1_to_i16(i1 %a) nounwind {
368; RV32I-LABEL: zext_nneg_i1_to_i16:
369; RV32I:       # %bb.0:
370; RV32I-NEXT:    andi a0, a0, 1
371; RV32I-NEXT:    ret
372;
373; RV64-LABEL: zext_nneg_i1_to_i16:
374; RV64:       # %bb.0:
375; RV64-NEXT:    andi a0, a0, 1
376; RV64-NEXT:    ret
377  %1 = zext nneg i1 %a to i16
378  ret i16 %1
379}
380
381define i32 @zext_nneg_i1_to_i32(i1 %a) nounwind {
382; RV32I-LABEL: zext_nneg_i1_to_i32:
383; RV32I:       # %bb.0:
384; RV32I-NEXT:    andi a0, a0, 1
385; RV32I-NEXT:    ret
386;
387; RV64-LABEL: zext_nneg_i1_to_i32:
388; RV64:       # %bb.0:
389; RV64-NEXT:    andi a0, a0, 1
390; RV64-NEXT:    ret
391  %1 = zext nneg i1 %a to i32
392  ret i32 %1
393}
394
395define i64 @zext_nneg_i1_to_i64(i1 %a) nounwind {
396; RV32I-LABEL: zext_nneg_i1_to_i64:
397; RV32I:       # %bb.0:
398; RV32I-NEXT:    andi a0, a0, 1
399; RV32I-NEXT:    li a1, 0
400; RV32I-NEXT:    ret
401;
402; RV64-LABEL: zext_nneg_i1_to_i64:
403; RV64:       # %bb.0:
404; RV64-NEXT:    andi a0, a0, 1
405; RV64-NEXT:    ret
406  %1 = zext nneg i1 %a to i64
407  ret i64 %1
408}
409
410define i16 @zext_nneg_i8_to_i16(i8 %a) nounwind {
411; RV32I-LABEL: zext_nneg_i8_to_i16:
412; RV32I:       # %bb.0:
413; RV32I-NEXT:    andi a0, a0, 255
414; RV32I-NEXT:    ret
415;
416; RV64-LABEL: zext_nneg_i8_to_i16:
417; RV64:       # %bb.0:
418; RV64-NEXT:    andi a0, a0, 255
419; RV64-NEXT:    ret
420  %1 = zext nneg i8 %a to i16
421  ret i16 %1
422}
423
424define i32 @zext_nneg_i8_to_i32(i8 %a) nounwind {
425; RV32I-LABEL: zext_nneg_i8_to_i32:
426; RV32I:       # %bb.0:
427; RV32I-NEXT:    andi a0, a0, 255
428; RV32I-NEXT:    ret
429;
430; RV64-LABEL: zext_nneg_i8_to_i32:
431; RV64:       # %bb.0:
432; RV64-NEXT:    andi a0, a0, 255
433; RV64-NEXT:    ret
434  %1 = zext nneg i8 %a to i32
435  ret i32 %1
436}
437
438define i64 @zext_nneg_i8_to_i64(i8 %a) nounwind {
439; RV32I-LABEL: zext_nneg_i8_to_i64:
440; RV32I:       # %bb.0:
441; RV32I-NEXT:    andi a0, a0, 255
442; RV32I-NEXT:    li a1, 0
443; RV32I-NEXT:    ret
444;
445; RV64-LABEL: zext_nneg_i8_to_i64:
446; RV64:       # %bb.0:
447; RV64-NEXT:    andi a0, a0, 255
448; RV64-NEXT:    ret
449  %1 = zext nneg i8 %a to i64
450  ret i64 %1
451}
452
453define i32 @zext_nneg_i16_to_i32(i16 %a) nounwind {
454; RV32I-LABEL: zext_nneg_i16_to_i32:
455; RV32I:       # %bb.0:
456; RV32I-NEXT:    slli a0, a0, 16
457; RV32I-NEXT:    srli a0, a0, 16
458; RV32I-NEXT:    ret
459;
460; RV64I-LABEL: zext_nneg_i16_to_i32:
461; RV64I:       # %bb.0:
462; RV64I-NEXT:    slli a0, a0, 48
463; RV64I-NEXT:    srli a0, a0, 48
464; RV64I-NEXT:    ret
465;
466; RV64ZBB-LABEL: zext_nneg_i16_to_i32:
467; RV64ZBB:       # %bb.0:
468; RV64ZBB-NEXT:    zext.h a0, a0
469; RV64ZBB-NEXT:    ret
470  %1 = zext nneg i16 %a to i32
471  ret i32 %1
472}
473
474define i64 @zext_nneg_i16_to_i64(i16 %a) nounwind {
475; RV32I-LABEL: zext_nneg_i16_to_i64:
476; RV32I:       # %bb.0:
477; RV32I-NEXT:    slli a0, a0, 16
478; RV32I-NEXT:    srli a0, a0, 16
479; RV32I-NEXT:    li a1, 0
480; RV32I-NEXT:    ret
481;
482; RV64I-LABEL: zext_nneg_i16_to_i64:
483; RV64I:       # %bb.0:
484; RV64I-NEXT:    slli a0, a0, 48
485; RV64I-NEXT:    srli a0, a0, 48
486; RV64I-NEXT:    ret
487;
488; RV64ZBB-LABEL: zext_nneg_i16_to_i64:
489; RV64ZBB:       # %bb.0:
490; RV64ZBB-NEXT:    zext.h a0, a0
491; RV64ZBB-NEXT:    ret
492  %1 = zext nneg i16 %a to i64
493  ret i64 %1
494}
495
496define i64 @zext_nneg_i32_to_i64(i32 %a) nounwind {
497; RV32I-LABEL: zext_nneg_i32_to_i64:
498; RV32I:       # %bb.0:
499; RV32I-NEXT:    li a1, 0
500; RV32I-NEXT:    ret
501;
502; RV64-LABEL: zext_nneg_i32_to_i64:
503; RV64:       # %bb.0:
504; RV64-NEXT:    sext.w a0, a0
505; RV64-NEXT:    ret
506  %1 = zext nneg i32 %a to i64
507  ret i64 %1
508}
509
510define i1 @trunc_i8_to_i1(i8 %a) nounwind {
511; RV32I-LABEL: trunc_i8_to_i1:
512; RV32I:       # %bb.0:
513; RV32I-NEXT:    ret
514;
515; RV64-LABEL: trunc_i8_to_i1:
516; RV64:       # %bb.0:
517; RV64-NEXT:    ret
518  %1 = trunc i8 %a to i1
519  ret i1 %1
520}
521
522define i1 @trunc_i16_to_i1(i16 %a) nounwind {
523; RV32I-LABEL: trunc_i16_to_i1:
524; RV32I:       # %bb.0:
525; RV32I-NEXT:    ret
526;
527; RV64-LABEL: trunc_i16_to_i1:
528; RV64:       # %bb.0:
529; RV64-NEXT:    ret
530  %1 = trunc i16 %a to i1
531  ret i1 %1
532}
533
534define i1 @trunc_i32_to_i1(i32 %a) nounwind {
535; RV32I-LABEL: trunc_i32_to_i1:
536; RV32I:       # %bb.0:
537; RV32I-NEXT:    ret
538;
539; RV64-LABEL: trunc_i32_to_i1:
540; RV64:       # %bb.0:
541; RV64-NEXT:    ret
542  %1 = trunc i32 %a to i1
543  ret i1 %1
544}
545
546define i1 @trunc_i64_to_i1(i64 %a) nounwind {
547; RV32I-LABEL: trunc_i64_to_i1:
548; RV32I:       # %bb.0:
549; RV32I-NEXT:    ret
550;
551; RV64-LABEL: trunc_i64_to_i1:
552; RV64:       # %bb.0:
553; RV64-NEXT:    ret
554  %1 = trunc i64 %a to i1
555  ret i1 %1
556}
557
558define i8 @trunc_i16_to_i8(i16 %a) nounwind {
559; RV32I-LABEL: trunc_i16_to_i8:
560; RV32I:       # %bb.0:
561; RV32I-NEXT:    ret
562;
563; RV64-LABEL: trunc_i16_to_i8:
564; RV64:       # %bb.0:
565; RV64-NEXT:    ret
566  %1 = trunc i16 %a to i8
567  ret i8 %1
568}
569
570define i8 @trunc_i32_to_i8(i32 %a) nounwind {
571; RV32I-LABEL: trunc_i32_to_i8:
572; RV32I:       # %bb.0:
573; RV32I-NEXT:    ret
574;
575; RV64-LABEL: trunc_i32_to_i8:
576; RV64:       # %bb.0:
577; RV64-NEXT:    ret
578  %1 = trunc i32 %a to i8
579  ret i8 %1
580}
581
582define i8 @trunc_i64_to_i8(i64 %a) nounwind {
583; RV32I-LABEL: trunc_i64_to_i8:
584; RV32I:       # %bb.0:
585; RV32I-NEXT:    ret
586;
587; RV64-LABEL: trunc_i64_to_i8:
588; RV64:       # %bb.0:
589; RV64-NEXT:    ret
590  %1 = trunc i64 %a to i8
591  ret i8 %1
592}
593
594define i16 @trunc_i32_to_i16(i32 %a) nounwind {
595; RV32I-LABEL: trunc_i32_to_i16:
596; RV32I:       # %bb.0:
597; RV32I-NEXT:    ret
598;
599; RV64-LABEL: trunc_i32_to_i16:
600; RV64:       # %bb.0:
601; RV64-NEXT:    ret
602  %1 = trunc i32 %a to i16
603  ret i16 %1
604}
605
606define i16 @trunc_i64_to_i16(i64 %a) nounwind {
607; RV32I-LABEL: trunc_i64_to_i16:
608; RV32I:       # %bb.0:
609; RV32I-NEXT:    ret
610;
611; RV64-LABEL: trunc_i64_to_i16:
612; RV64:       # %bb.0:
613; RV64-NEXT:    ret
614  %1 = trunc i64 %a to i16
615  ret i16 %1
616}
617
618define i32 @trunc_i64_to_i32(i64 %a) nounwind {
619; RV32I-LABEL: trunc_i64_to_i32:
620; RV32I:       # %bb.0:
621; RV32I-NEXT:    ret
622;
623; RV64-LABEL: trunc_i64_to_i32:
624; RV64:       # %bb.0:
625; RV64-NEXT:    ret
626  %1 = trunc i64 %a to i32
627  ret i32 %1
628}
629
630;; fold (sext (not x)) -> (add (zext x) -1)
631define i32 @sext_of_not_i32(i1 %x) {
632; RV32I-LABEL: sext_of_not_i32:
633; RV32I:       # %bb.0:
634; RV32I-NEXT:    andi a0, a0, 1
635; RV32I-NEXT:    addi a0, a0, -1
636; RV32I-NEXT:    ret
637;
638; RV64-LABEL: sext_of_not_i32:
639; RV64:       # %bb.0:
640; RV64-NEXT:    andi a0, a0, 1
641; RV64-NEXT:    addi a0, a0, -1
642; RV64-NEXT:    ret
643  %xor = xor i1 %x, 1
644  %sext = sext i1 %xor to i32
645  ret i32 %sext
646}
647
648define i64 @sext_of_not_i64(i1 %x) {
649; RV32I-LABEL: sext_of_not_i64:
650; RV32I:       # %bb.0:
651; RV32I-NEXT:    andi a0, a0, 1
652; RV32I-NEXT:    addi a0, a0, -1
653; RV32I-NEXT:    mv a1, a0
654; RV32I-NEXT:    ret
655;
656; RV64-LABEL: sext_of_not_i64:
657; RV64:       # %bb.0:
658; RV64-NEXT:    andi a0, a0, 1
659; RV64-NEXT:    addi a0, a0, -1
660; RV64-NEXT:    ret
661  %xor = xor i1 %x, 1
662  %sext = sext i1 %xor to i64
663  ret i64 %sext
664}
665
666;; fold (sext (not (setcc a, b, cc))) -> (sext (setcc a, b, !cc))
667define i32 @sext_of_not_cmp_i32(i32 %x) {
668; RV32I-LABEL: sext_of_not_cmp_i32:
669; RV32I:       # %bb.0:
670; RV32I-NEXT:    addi a0, a0, -7
671; RV32I-NEXT:    seqz a0, a0
672; RV32I-NEXT:    addi a0, a0, -1
673; RV32I-NEXT:    ret
674;
675; RV64-LABEL: sext_of_not_cmp_i32:
676; RV64:       # %bb.0:
677; RV64-NEXT:    sext.w a0, a0
678; RV64-NEXT:    addi a0, a0, -7
679; RV64-NEXT:    seqz a0, a0
680; RV64-NEXT:    addi a0, a0, -1
681; RV64-NEXT:    ret
682  %cmp = icmp eq i32 %x, 7
683  %xor = xor i1 %cmp, 1
684  %sext = sext i1 %xor to i32
685  ret i32 %sext
686}
687
688define i64 @sext_of_not_cmp_i64(i64 %x) {
689; RV32I-LABEL: sext_of_not_cmp_i64:
690; RV32I:       # %bb.0:
691; RV32I-NEXT:    xori a0, a0, 7
692; RV32I-NEXT:    or a0, a0, a1
693; RV32I-NEXT:    seqz a0, a0
694; RV32I-NEXT:    addi a0, a0, -1
695; RV32I-NEXT:    mv a1, a0
696; RV32I-NEXT:    ret
697;
698; RV64-LABEL: sext_of_not_cmp_i64:
699; RV64:       # %bb.0:
700; RV64-NEXT:    addi a0, a0, -7
701; RV64-NEXT:    seqz a0, a0
702; RV64-NEXT:    addi a0, a0, -1
703; RV64-NEXT:    ret
704  %cmp = icmp eq i64 %x, 7
705  %xor = xor i1 %cmp, 1
706  %sext = sext i1 %xor to i64
707  ret i64 %sext
708}
709
710;; TODO: fold (add (zext (setcc a, b, cc)), -1) -> (sext (setcc a, b, !cc))
711define i32 @dec_of_zexted_cmp_i32(i32 %x) {
712; RV32I-LABEL: dec_of_zexted_cmp_i32:
713; RV32I:       # %bb.0:
714; RV32I-NEXT:    addi a0, a0, -7
715; RV32I-NEXT:    seqz a0, a0
716; RV32I-NEXT:    addi a0, a0, -1
717; RV32I-NEXT:    ret
718;
719; RV64-LABEL: dec_of_zexted_cmp_i32:
720; RV64:       # %bb.0:
721; RV64-NEXT:    sext.w a0, a0
722; RV64-NEXT:    addi a0, a0, -7
723; RV64-NEXT:    seqz a0, a0
724; RV64-NEXT:    addi a0, a0, -1
725; RV64-NEXT:    ret
726  %cmp = icmp eq i32 %x, 7
727  %zext = zext i1 %cmp to i32
728  %dec = sub i32 %zext, 1
729  ret i32 %dec
730}
731
732define i64 @dec_of_zexted_cmp_i64(i64 %x) {
733; RV32I-LABEL: dec_of_zexted_cmp_i64:
734; RV32I:       # %bb.0:
735; RV32I-NEXT:    xori a0, a0, 7
736; RV32I-NEXT:    or a0, a0, a1
737; RV32I-NEXT:    seqz a0, a0
738; RV32I-NEXT:    addi a0, a0, -1
739; RV32I-NEXT:    mv a1, a0
740; RV32I-NEXT:    ret
741;
742; RV64-LABEL: dec_of_zexted_cmp_i64:
743; RV64:       # %bb.0:
744; RV64-NEXT:    addi a0, a0, -7
745; RV64-NEXT:    seqz a0, a0
746; RV64-NEXT:    addi a0, a0, -1
747; RV64-NEXT:    ret
748  %cmp = icmp eq i64 %x, 7
749  %zext = zext i1 %cmp to i64
750  %dec = sub i64 %zext, 1
751  ret i64 %dec
752}
753
754define void @zext_nneg_dominating_icmp_i64(i16 signext %0) {
755; RV32I-LABEL: zext_nneg_dominating_icmp_i64:
756; RV32I:       # %bb.0:
757; RV32I-NEXT:    bltz a0, .LBB46_2
758; RV32I-NEXT:  # %bb.1:
759; RV32I-NEXT:    srai a1, a0, 31
760; RV32I-NEXT:    tail bar_i64
761; RV32I-NEXT:  .LBB46_2:
762; RV32I-NEXT:    ret
763;
764; RV64-LABEL: zext_nneg_dominating_icmp_i64:
765; RV64:       # %bb.0:
766; RV64-NEXT:    bltz a0, .LBB46_2
767; RV64-NEXT:  # %bb.1:
768; RV64-NEXT:    tail bar_i64
769; RV64-NEXT:  .LBB46_2:
770; RV64-NEXT:    ret
771  %2 = icmp sgt i16 %0, -1
772  br i1 %2, label %3, label %5
773
7743:
775  %4 = zext nneg i16 %0 to i64
776  tail call void @bar_i64(i64 %4)
777  br label %5
778
7795:
780  ret void
781}
782
783declare void @bar_i64(i64)
784
785define void @zext_nneg_dominating_icmp_i32(i16 signext %0) {
786; RV32I-LABEL: zext_nneg_dominating_icmp_i32:
787; RV32I:       # %bb.0:
788; RV32I-NEXT:    bltz a0, .LBB47_2
789; RV32I-NEXT:  # %bb.1:
790; RV32I-NEXT:    tail bar_i32
791; RV32I-NEXT:  .LBB47_2:
792; RV32I-NEXT:    ret
793;
794; RV64-LABEL: zext_nneg_dominating_icmp_i32:
795; RV64:       # %bb.0:
796; RV64-NEXT:    bltz a0, .LBB47_2
797; RV64-NEXT:  # %bb.1:
798; RV64-NEXT:    tail bar_i32
799; RV64-NEXT:  .LBB47_2:
800; RV64-NEXT:    ret
801  %2 = icmp sgt i16 %0, -1
802  br i1 %2, label %3, label %5
803
8043:
805  %4 = zext nneg i16 %0 to i32
806  tail call void @bar_i32(i32 %4)
807  br label %5
808
8095:
810  ret void
811}
812
813declare void @bar_i32(i32)
814
815; Test that we propage zext nneg when we sign extend it on RV64 for the call to
816; bar_i32.
817define void @zext_nneg_dominating_icmp_i32_signext(i16 signext %0) {
818; RV32I-LABEL: zext_nneg_dominating_icmp_i32_signext:
819; RV32I:       # %bb.0:
820; RV32I-NEXT:    bltz a0, .LBB48_2
821; RV32I-NEXT:  # %bb.1:
822; RV32I-NEXT:    tail bar_i32
823; RV32I-NEXT:  .LBB48_2:
824; RV32I-NEXT:    ret
825;
826; RV64-LABEL: zext_nneg_dominating_icmp_i32_signext:
827; RV64:       # %bb.0:
828; RV64-NEXT:    bltz a0, .LBB48_2
829; RV64-NEXT:  # %bb.1:
830; RV64-NEXT:    tail bar_i32
831; RV64-NEXT:  .LBB48_2:
832; RV64-NEXT:    ret
833  %2 = icmp sgt i16 %0, -1
834  br i1 %2, label %3, label %5
835
8363:
837  %4 = zext nneg i16 %0 to i32
838  tail call void @bar_i32(i32 signext %4)
839  br label %5
840
8415:
842  ret void
843}
844
845; Test that we propage zext nneg when we zero extend it on RV64 for the call to
846; bar_i32.
847define void @zext_nneg_dominating_icmp_i32_zeroext(i16 signext %0) {
848; RV32I-LABEL: zext_nneg_dominating_icmp_i32_zeroext:
849; RV32I:       # %bb.0:
850; RV32I-NEXT:    bltz a0, .LBB49_2
851; RV32I-NEXT:  # %bb.1:
852; RV32I-NEXT:    tail bar_i32
853; RV32I-NEXT:  .LBB49_2:
854; RV32I-NEXT:    ret
855;
856; RV64-LABEL: zext_nneg_dominating_icmp_i32_zeroext:
857; RV64:       # %bb.0:
858; RV64-NEXT:    bltz a0, .LBB49_2
859; RV64-NEXT:  # %bb.1:
860; RV64-NEXT:    tail bar_i32
861; RV64-NEXT:  .LBB49_2:
862; RV64-NEXT:    ret
863  %2 = icmp sgt i16 %0, -1
864  br i1 %2, label %3, label %5
865
8663:
867  %4 = zext nneg i16 %0 to i32
868  tail call void @bar_i32(i32 signext %4)
869  br label %5
870
8715:
872  ret void
873}
874
875; The load is used extended and non-extended in the successor basic block. The
876; signed compare will cause the non-extended value to exported out of the first
877; basic block using a sext to XLen. We need to CSE the zext nneg with the sext
878; so that we can form a sextload.
879define void @load_zext_nneg_sext_cse(ptr %p) nounwind {
880; RV32I-LABEL: load_zext_nneg_sext_cse:
881; RV32I:       # %bb.0:
882; RV32I-NEXT:    addi sp, sp, -16
883; RV32I-NEXT:    sw ra, 12(sp) # 4-byte Folded Spill
884; RV32I-NEXT:    sw s0, 8(sp) # 4-byte Folded Spill
885; RV32I-NEXT:    lh s0, 0(a0)
886; RV32I-NEXT:    bltz s0, .LBB50_2
887; RV32I-NEXT:  # %bb.1: # %bb1
888; RV32I-NEXT:    mv a0, s0
889; RV32I-NEXT:    call bar_i16
890; RV32I-NEXT:    mv a0, s0
891; RV32I-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
892; RV32I-NEXT:    lw s0, 8(sp) # 4-byte Folded Reload
893; RV32I-NEXT:    addi sp, sp, 16
894; RV32I-NEXT:    tail bar_i32
895; RV32I-NEXT:  .LBB50_2: # %bb2
896; RV32I-NEXT:    lw ra, 12(sp) # 4-byte Folded Reload
897; RV32I-NEXT:    lw s0, 8(sp) # 4-byte Folded Reload
898; RV32I-NEXT:    addi sp, sp, 16
899; RV32I-NEXT:    ret
900;
901; RV64-LABEL: load_zext_nneg_sext_cse:
902; RV64:       # %bb.0:
903; RV64-NEXT:    addi sp, sp, -16
904; RV64-NEXT:    sd ra, 8(sp) # 8-byte Folded Spill
905; RV64-NEXT:    sd s0, 0(sp) # 8-byte Folded Spill
906; RV64-NEXT:    lh s0, 0(a0)
907; RV64-NEXT:    bltz s0, .LBB50_2
908; RV64-NEXT:  # %bb.1: # %bb1
909; RV64-NEXT:    mv a0, s0
910; RV64-NEXT:    call bar_i16
911; RV64-NEXT:    mv a0, s0
912; RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
913; RV64-NEXT:    ld s0, 0(sp) # 8-byte Folded Reload
914; RV64-NEXT:    addi sp, sp, 16
915; RV64-NEXT:    tail bar_i32
916; RV64-NEXT:  .LBB50_2: # %bb2
917; RV64-NEXT:    ld ra, 8(sp) # 8-byte Folded Reload
918; RV64-NEXT:    ld s0, 0(sp) # 8-byte Folded Reload
919; RV64-NEXT:    addi sp, sp, 16
920; RV64-NEXT:    ret
921  %load = load i16, ptr %p
922  %zext = zext nneg i16 %load to i32
923  %cmp = icmp sgt i16 %load, -1
924  br i1 %cmp, label %bb1, label %bb2
925
926bb1:
927  tail call void @bar_i16(i16 signext %load)
928  tail call void @bar_i32(i32 signext %zext)
929  br label %bb2
930
931bb2:
932  ret void
933}
934declare void @bar_i16(i16);
935