xref: /llvm-project/llvm/test/CodeGen/RISCV/lack-of-signed-truncation-check.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,RV32,RV32I
3; RUN: llc -mtriple=riscv64-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,RV64,RV64I
4; RUN: llc -mtriple=riscv32-unknown-linux-gnu -mattr=+zbb < %s | FileCheck %s --check-prefixes=CHECK,RV32,RV32ZBB
5; RUN: llc -mtriple=riscv64-unknown-linux-gnu -mattr=+zbb < %s | FileCheck %s --check-prefixes=CHECK,RV64,RV64ZBB
6
7; https://bugs.llvm.org/show_bug.cgi?id=38149
8
9; We are truncating from wider width, and then sign-extending
10; back to the original width. Then we inequality-comparing orig and src.
11; If they don't match, then we had signed truncation during truncation.
12
13; This can be expressed in a several ways in IR:
14;   trunc + sext + icmp ne <- not canonical
15;   shl   + ashr + icmp ne
16;   add          + icmp ult/ule
17;   add          + icmp uge/ugt
18; However only the simplest form (with two shifts) gets lowered best.
19
20; ---------------------------------------------------------------------------- ;
21; shl + ashr + icmp ne
22; ---------------------------------------------------------------------------- ;
23
24define i1 @shifts_necmp_i16_i8(i16 %x) nounwind {
25; RV32I-LABEL: shifts_necmp_i16_i8:
26; RV32I:       # %bb.0:
27; RV32I-NEXT:    slli a1, a0, 16
28; RV32I-NEXT:    slli a0, a0, 24
29; RV32I-NEXT:    srli a1, a1, 16
30; RV32I-NEXT:    srai a0, a0, 8
31; RV32I-NEXT:    srli a0, a0, 16
32; RV32I-NEXT:    xor a0, a0, a1
33; RV32I-NEXT:    snez a0, a0
34; RV32I-NEXT:    ret
35;
36; RV64I-LABEL: shifts_necmp_i16_i8:
37; RV64I:       # %bb.0:
38; RV64I-NEXT:    slli a1, a0, 48
39; RV64I-NEXT:    slli a0, a0, 56
40; RV64I-NEXT:    srli a1, a1, 48
41; RV64I-NEXT:    srai a0, a0, 8
42; RV64I-NEXT:    srli a0, a0, 48
43; RV64I-NEXT:    xor a0, a0, a1
44; RV64I-NEXT:    snez a0, a0
45; RV64I-NEXT:    ret
46;
47; RV32ZBB-LABEL: shifts_necmp_i16_i8:
48; RV32ZBB:       # %bb.0:
49; RV32ZBB-NEXT:    zext.h a1, a0
50; RV32ZBB-NEXT:    sext.b a0, a0
51; RV32ZBB-NEXT:    zext.h a0, a0
52; RV32ZBB-NEXT:    xor a0, a0, a1
53; RV32ZBB-NEXT:    snez a0, a0
54; RV32ZBB-NEXT:    ret
55;
56; RV64ZBB-LABEL: shifts_necmp_i16_i8:
57; RV64ZBB:       # %bb.0:
58; RV64ZBB-NEXT:    zext.h a1, a0
59; RV64ZBB-NEXT:    sext.b a0, a0
60; RV64ZBB-NEXT:    zext.h a0, a0
61; RV64ZBB-NEXT:    xor a0, a0, a1
62; RV64ZBB-NEXT:    snez a0, a0
63; RV64ZBB-NEXT:    ret
64  %tmp0 = shl i16 %x, 8 ; 16-8
65  %tmp1 = ashr exact i16 %tmp0, 8 ; 16-8
66  %tmp2 = icmp ne i16 %tmp1, %x
67  ret i1 %tmp2
68}
69
70define i1 @shifts_necmp_i32_i16(i32 %x) nounwind {
71; RV32I-LABEL: shifts_necmp_i32_i16:
72; RV32I:       # %bb.0:
73; RV32I-NEXT:    slli a1, a0, 16
74; RV32I-NEXT:    srai a1, a1, 16
75; RV32I-NEXT:    xor a0, a1, a0
76; RV32I-NEXT:    snez a0, a0
77; RV32I-NEXT:    ret
78;
79; RV64I-LABEL: shifts_necmp_i32_i16:
80; RV64I:       # %bb.0:
81; RV64I-NEXT:    sext.w a1, a0
82; RV64I-NEXT:    slli a0, a0, 48
83; RV64I-NEXT:    srai a0, a0, 48
84; RV64I-NEXT:    xor a0, a0, a1
85; RV64I-NEXT:    snez a0, a0
86; RV64I-NEXT:    ret
87;
88; RV32ZBB-LABEL: shifts_necmp_i32_i16:
89; RV32ZBB:       # %bb.0:
90; RV32ZBB-NEXT:    sext.h a1, a0
91; RV32ZBB-NEXT:    xor a0, a1, a0
92; RV32ZBB-NEXT:    snez a0, a0
93; RV32ZBB-NEXT:    ret
94;
95; RV64ZBB-LABEL: shifts_necmp_i32_i16:
96; RV64ZBB:       # %bb.0:
97; RV64ZBB-NEXT:    sext.w a1, a0
98; RV64ZBB-NEXT:    sext.h a0, a0
99; RV64ZBB-NEXT:    xor a0, a0, a1
100; RV64ZBB-NEXT:    snez a0, a0
101; RV64ZBB-NEXT:    ret
102  %tmp0 = shl i32 %x, 16 ; 32-16
103  %tmp1 = ashr exact i32 %tmp0, 16 ; 32-16
104  %tmp2 = icmp ne i32 %tmp1, %x
105  ret i1 %tmp2
106}
107
108define i1 @shifts_necmp_i32_i8(i32 %x) nounwind {
109; RV32I-LABEL: shifts_necmp_i32_i8:
110; RV32I:       # %bb.0:
111; RV32I-NEXT:    slli a1, a0, 24
112; RV32I-NEXT:    srai a1, a1, 24
113; RV32I-NEXT:    xor a0, a1, a0
114; RV32I-NEXT:    snez a0, a0
115; RV32I-NEXT:    ret
116;
117; RV64I-LABEL: shifts_necmp_i32_i8:
118; RV64I:       # %bb.0:
119; RV64I-NEXT:    sext.w a1, a0
120; RV64I-NEXT:    slli a0, a0, 56
121; RV64I-NEXT:    srai a0, a0, 56
122; RV64I-NEXT:    xor a0, a0, a1
123; RV64I-NEXT:    snez a0, a0
124; RV64I-NEXT:    ret
125;
126; RV32ZBB-LABEL: shifts_necmp_i32_i8:
127; RV32ZBB:       # %bb.0:
128; RV32ZBB-NEXT:    sext.b a1, a0
129; RV32ZBB-NEXT:    xor a0, a1, a0
130; RV32ZBB-NEXT:    snez a0, a0
131; RV32ZBB-NEXT:    ret
132;
133; RV64ZBB-LABEL: shifts_necmp_i32_i8:
134; RV64ZBB:       # %bb.0:
135; RV64ZBB-NEXT:    sext.w a1, a0
136; RV64ZBB-NEXT:    sext.b a0, a0
137; RV64ZBB-NEXT:    xor a0, a0, a1
138; RV64ZBB-NEXT:    snez a0, a0
139; RV64ZBB-NEXT:    ret
140  %tmp0 = shl i32 %x, 24 ; 32-8
141  %tmp1 = ashr exact i32 %tmp0, 24 ; 32-8
142  %tmp2 = icmp ne i32 %tmp1, %x
143  ret i1 %tmp2
144}
145
146define i1 @shifts_necmp_i64_i32(i64 %x) nounwind {
147; RV32-LABEL: shifts_necmp_i64_i32:
148; RV32:       # %bb.0:
149; RV32-NEXT:    srai a0, a0, 31
150; RV32-NEXT:    xor a0, a0, a1
151; RV32-NEXT:    snez a0, a0
152; RV32-NEXT:    ret
153;
154; RV64-LABEL: shifts_necmp_i64_i32:
155; RV64:       # %bb.0:
156; RV64-NEXT:    sext.w a1, a0
157; RV64-NEXT:    xor a0, a1, a0
158; RV64-NEXT:    snez a0, a0
159; RV64-NEXT:    ret
160  %tmp0 = shl i64 %x, 32 ; 64-32
161  %tmp1 = ashr exact i64 %tmp0, 32 ; 64-32
162  %tmp2 = icmp ne i64 %tmp1, %x
163  ret i1 %tmp2
164}
165
166define i1 @shifts_necmp_i64_i16(i64 %x) nounwind {
167; RV32I-LABEL: shifts_necmp_i64_i16:
168; RV32I:       # %bb.0:
169; RV32I-NEXT:    slli a2, a0, 16
170; RV32I-NEXT:    srai a3, a2, 16
171; RV32I-NEXT:    srai a2, a2, 31
172; RV32I-NEXT:    xor a1, a2, a1
173; RV32I-NEXT:    xor a0, a3, a0
174; RV32I-NEXT:    or a0, a0, a1
175; RV32I-NEXT:    snez a0, a0
176; RV32I-NEXT:    ret
177;
178; RV64I-LABEL: shifts_necmp_i64_i16:
179; RV64I:       # %bb.0:
180; RV64I-NEXT:    slli a1, a0, 48
181; RV64I-NEXT:    srai a1, a1, 48
182; RV64I-NEXT:    xor a0, a1, a0
183; RV64I-NEXT:    snez a0, a0
184; RV64I-NEXT:    ret
185;
186; RV32ZBB-LABEL: shifts_necmp_i64_i16:
187; RV32ZBB:       # %bb.0:
188; RV32ZBB-NEXT:    sext.h a2, a0
189; RV32ZBB-NEXT:    srai a3, a2, 31
190; RV32ZBB-NEXT:    xor a0, a2, a0
191; RV32ZBB-NEXT:    xor a1, a3, a1
192; RV32ZBB-NEXT:    or a0, a0, a1
193; RV32ZBB-NEXT:    snez a0, a0
194; RV32ZBB-NEXT:    ret
195;
196; RV64ZBB-LABEL: shifts_necmp_i64_i16:
197; RV64ZBB:       # %bb.0:
198; RV64ZBB-NEXT:    sext.h a1, a0
199; RV64ZBB-NEXT:    xor a0, a1, a0
200; RV64ZBB-NEXT:    snez a0, a0
201; RV64ZBB-NEXT:    ret
202  %tmp0 = shl i64 %x, 48 ; 64-16
203  %tmp1 = ashr exact i64 %tmp0, 48 ; 64-16
204  %tmp2 = icmp ne i64 %tmp1, %x
205  ret i1 %tmp2
206}
207
208define i1 @shifts_necmp_i64_i8(i64 %x) nounwind {
209; RV32I-LABEL: shifts_necmp_i64_i8:
210; RV32I:       # %bb.0:
211; RV32I-NEXT:    slli a2, a0, 24
212; RV32I-NEXT:    srai a3, a2, 24
213; RV32I-NEXT:    srai a2, a2, 31
214; RV32I-NEXT:    xor a1, a2, a1
215; RV32I-NEXT:    xor a0, a3, a0
216; RV32I-NEXT:    or a0, a0, a1
217; RV32I-NEXT:    snez a0, a0
218; RV32I-NEXT:    ret
219;
220; RV64I-LABEL: shifts_necmp_i64_i8:
221; RV64I:       # %bb.0:
222; RV64I-NEXT:    slli a1, a0, 56
223; RV64I-NEXT:    srai a1, a1, 56
224; RV64I-NEXT:    xor a0, a1, a0
225; RV64I-NEXT:    snez a0, a0
226; RV64I-NEXT:    ret
227;
228; RV32ZBB-LABEL: shifts_necmp_i64_i8:
229; RV32ZBB:       # %bb.0:
230; RV32ZBB-NEXT:    sext.b a2, a0
231; RV32ZBB-NEXT:    srai a3, a2, 31
232; RV32ZBB-NEXT:    xor a0, a2, a0
233; RV32ZBB-NEXT:    xor a1, a3, a1
234; RV32ZBB-NEXT:    or a0, a0, a1
235; RV32ZBB-NEXT:    snez a0, a0
236; RV32ZBB-NEXT:    ret
237;
238; RV64ZBB-LABEL: shifts_necmp_i64_i8:
239; RV64ZBB:       # %bb.0:
240; RV64ZBB-NEXT:    sext.b a1, a0
241; RV64ZBB-NEXT:    xor a0, a1, a0
242; RV64ZBB-NEXT:    snez a0, a0
243; RV64ZBB-NEXT:    ret
244  %tmp0 = shl i64 %x, 56 ; 64-8
245  %tmp1 = ashr exact i64 %tmp0, 56 ; 64-8
246  %tmp2 = icmp ne i64 %tmp1, %x
247  ret i1 %tmp2
248}
249
250; ---------------------------------------------------------------------------- ;
251; add + icmp ult
252; ---------------------------------------------------------------------------- ;
253
254define i1 @add_ultcmp_i16_i8(i16 %x) nounwind {
255; RV32I-LABEL: add_ultcmp_i16_i8:
256; RV32I:       # %bb.0:
257; RV32I-NEXT:    slli a0, a0, 16
258; RV32I-NEXT:    srli a0, a0, 16
259; RV32I-NEXT:    addi a0, a0, -128
260; RV32I-NEXT:    srli a0, a0, 8
261; RV32I-NEXT:    sltiu a0, a0, 255
262; RV32I-NEXT:    ret
263;
264; RV64I-LABEL: add_ultcmp_i16_i8:
265; RV64I:       # %bb.0:
266; RV64I-NEXT:    slli a0, a0, 48
267; RV64I-NEXT:    srli a0, a0, 48
268; RV64I-NEXT:    addi a0, a0, -128
269; RV64I-NEXT:    srli a0, a0, 8
270; RV64I-NEXT:    sltiu a0, a0, 255
271; RV64I-NEXT:    ret
272;
273; RV32ZBB-LABEL: add_ultcmp_i16_i8:
274; RV32ZBB:       # %bb.0:
275; RV32ZBB-NEXT:    zext.h a0, a0
276; RV32ZBB-NEXT:    addi a0, a0, -128
277; RV32ZBB-NEXT:    srli a0, a0, 8
278; RV32ZBB-NEXT:    sltiu a0, a0, 255
279; RV32ZBB-NEXT:    ret
280;
281; RV64ZBB-LABEL: add_ultcmp_i16_i8:
282; RV64ZBB:       # %bb.0:
283; RV64ZBB-NEXT:    zext.h a0, a0
284; RV64ZBB-NEXT:    addi a0, a0, -128
285; RV64ZBB-NEXT:    srli a0, a0, 8
286; RV64ZBB-NEXT:    sltiu a0, a0, 255
287; RV64ZBB-NEXT:    ret
288  %tmp0 = add i16 %x, -128 ; ~0U << (8-1)
289  %tmp1 = icmp ult i16 %tmp0, -256 ; ~0U << 8
290  ret i1 %tmp1
291}
292
293define i1 @add_ultcmp_i32_i16(i32 %x) nounwind {
294; RV32I-LABEL: add_ultcmp_i32_i16:
295; RV32I:       # %bb.0:
296; RV32I-NEXT:    lui a1, 1048568
297; RV32I-NEXT:    add a0, a0, a1
298; RV32I-NEXT:    lui a1, 1048560
299; RV32I-NEXT:    sltu a0, a0, a1
300; RV32I-NEXT:    ret
301;
302; RV64I-LABEL: add_ultcmp_i32_i16:
303; RV64I:       # %bb.0:
304; RV64I-NEXT:    lui a1, 1048568
305; RV64I-NEXT:    addw a0, a0, a1
306; RV64I-NEXT:    lui a1, 1048560
307; RV64I-NEXT:    sltu a0, a0, a1
308; RV64I-NEXT:    ret
309;
310; RV32ZBB-LABEL: add_ultcmp_i32_i16:
311; RV32ZBB:       # %bb.0:
312; RV32ZBB-NEXT:    sext.h a1, a0
313; RV32ZBB-NEXT:    xor a0, a1, a0
314; RV32ZBB-NEXT:    snez a0, a0
315; RV32ZBB-NEXT:    ret
316;
317; RV64ZBB-LABEL: add_ultcmp_i32_i16:
318; RV64ZBB:       # %bb.0:
319; RV64ZBB-NEXT:    sext.w a1, a0
320; RV64ZBB-NEXT:    sext.h a0, a0
321; RV64ZBB-NEXT:    xor a0, a0, a1
322; RV64ZBB-NEXT:    snez a0, a0
323; RV64ZBB-NEXT:    ret
324  %tmp0 = add i32 %x, -32768 ; ~0U << (16-1)
325  %tmp1 = icmp ult i32 %tmp0, -65536 ; ~0U << 16
326  ret i1 %tmp1
327}
328
329define i1 @add_ultcmp_i32_i8(i32 %x) nounwind {
330; RV32-LABEL: add_ultcmp_i32_i8:
331; RV32:       # %bb.0:
332; RV32-NEXT:    addi a0, a0, -128
333; RV32-NEXT:    sltiu a0, a0, -256
334; RV32-NEXT:    ret
335;
336; RV64-LABEL: add_ultcmp_i32_i8:
337; RV64:       # %bb.0:
338; RV64-NEXT:    addiw a0, a0, -128
339; RV64-NEXT:    sltiu a0, a0, -256
340; RV64-NEXT:    ret
341  %tmp0 = add i32 %x, -128 ; ~0U << (8-1)
342  %tmp1 = icmp ult i32 %tmp0, -256 ; ~0U << 8
343  ret i1 %tmp1
344}
345
346define i1 @add_ultcmp_i64_i32(i64 %x) nounwind {
347; RV32-LABEL: add_ultcmp_i64_i32:
348; RV32:       # %bb.0:
349; RV32-NEXT:    srai a0, a0, 31
350; RV32-NEXT:    xor a0, a0, a1
351; RV32-NEXT:    snez a0, a0
352; RV32-NEXT:    ret
353;
354; RV64-LABEL: add_ultcmp_i64_i32:
355; RV64:       # %bb.0:
356; RV64-NEXT:    sext.w a1, a0
357; RV64-NEXT:    xor a0, a1, a0
358; RV64-NEXT:    snez a0, a0
359; RV64-NEXT:    ret
360  %tmp0 = add i64 %x, -2147483648 ; ~0U << (32-1)
361  %tmp1 = icmp ult i64 %tmp0, -4294967296 ; ~0U << 32
362  ret i1 %tmp1
363}
364
365define i1 @add_ultcmp_i64_i16(i64 %x) nounwind {
366; RV32I-LABEL: add_ultcmp_i64_i16:
367; RV32I:       # %bb.0:
368; RV32I-NEXT:    lui a2, 1048568
369; RV32I-NEXT:    add a2, a0, a2
370; RV32I-NEXT:    sltu a0, a2, a0
371; RV32I-NEXT:    add a0, a1, a0
372; RV32I-NEXT:    lui a1, 1048560
373; RV32I-NEXT:    sltu a1, a2, a1
374; RV32I-NEXT:    snez a0, a0
375; RV32I-NEXT:    or a0, a1, a0
376; RV32I-NEXT:    ret
377;
378; RV64I-LABEL: add_ultcmp_i64_i16:
379; RV64I:       # %bb.0:
380; RV64I-NEXT:    lui a1, 1048568
381; RV64I-NEXT:    add a0, a0, a1
382; RV64I-NEXT:    lui a1, 1048560
383; RV64I-NEXT:    sltu a0, a0, a1
384; RV64I-NEXT:    ret
385;
386; RV32ZBB-LABEL: add_ultcmp_i64_i16:
387; RV32ZBB:       # %bb.0:
388; RV32ZBB-NEXT:    sext.h a2, a0
389; RV32ZBB-NEXT:    xor a0, a2, a0
390; RV32ZBB-NEXT:    srai a2, a2, 31
391; RV32ZBB-NEXT:    xor a1, a2, a1
392; RV32ZBB-NEXT:    or a0, a0, a1
393; RV32ZBB-NEXT:    snez a0, a0
394; RV32ZBB-NEXT:    ret
395;
396; RV64ZBB-LABEL: add_ultcmp_i64_i16:
397; RV64ZBB:       # %bb.0:
398; RV64ZBB-NEXT:    sext.h a1, a0
399; RV64ZBB-NEXT:    xor a0, a1, a0
400; RV64ZBB-NEXT:    snez a0, a0
401; RV64ZBB-NEXT:    ret
402  %tmp0 = add i64 %x, -32768 ; ~0U << (16-1)
403  %tmp1 = icmp ult i64 %tmp0, -65536 ; ~0U << 16
404  ret i1 %tmp1
405}
406
407define i1 @add_ultcmp_i64_i8(i64 %x) nounwind {
408; RV32I-LABEL: add_ultcmp_i64_i8:
409; RV32I:       # %bb.0:
410; RV32I-NEXT:    addi a2, a0, -128
411; RV32I-NEXT:    sltu a0, a2, a0
412; RV32I-NEXT:    add a0, a1, a0
413; RV32I-NEXT:    snez a0, a0
414; RV32I-NEXT:    sltiu a1, a2, -256
415; RV32I-NEXT:    or a0, a1, a0
416; RV32I-NEXT:    ret
417;
418; RV64-LABEL: add_ultcmp_i64_i8:
419; RV64:       # %bb.0:
420; RV64-NEXT:    addi a0, a0, -128
421; RV64-NEXT:    sltiu a0, a0, -256
422; RV64-NEXT:    ret
423;
424; RV32ZBB-LABEL: add_ultcmp_i64_i8:
425; RV32ZBB:       # %bb.0:
426; RV32ZBB-NEXT:    sext.b a2, a0
427; RV32ZBB-NEXT:    xor a0, a2, a0
428; RV32ZBB-NEXT:    srai a2, a2, 31
429; RV32ZBB-NEXT:    xor a1, a2, a1
430; RV32ZBB-NEXT:    or a0, a0, a1
431; RV32ZBB-NEXT:    snez a0, a0
432; RV32ZBB-NEXT:    ret
433  %tmp0 = add i64 %x, -128 ; ~0U << (8-1)
434  %tmp1 = icmp ult i64 %tmp0, -256 ; ~0U << 8
435  ret i1 %tmp1
436}
437
438; Slightly more canonical variant
439define i1 @add_ulecmp_i16_i8(i16 %x) nounwind {
440; RV32I-LABEL: add_ulecmp_i16_i8:
441; RV32I:       # %bb.0:
442; RV32I-NEXT:    slli a0, a0, 16
443; RV32I-NEXT:    srli a0, a0, 16
444; RV32I-NEXT:    addi a0, a0, -128
445; RV32I-NEXT:    srli a0, a0, 8
446; RV32I-NEXT:    sltiu a0, a0, 255
447; RV32I-NEXT:    ret
448;
449; RV64I-LABEL: add_ulecmp_i16_i8:
450; RV64I:       # %bb.0:
451; RV64I-NEXT:    slli a0, a0, 48
452; RV64I-NEXT:    srli a0, a0, 48
453; RV64I-NEXT:    addi a0, a0, -128
454; RV64I-NEXT:    srli a0, a0, 8
455; RV64I-NEXT:    sltiu a0, a0, 255
456; RV64I-NEXT:    ret
457;
458; RV32ZBB-LABEL: add_ulecmp_i16_i8:
459; RV32ZBB:       # %bb.0:
460; RV32ZBB-NEXT:    zext.h a0, a0
461; RV32ZBB-NEXT:    addi a0, a0, -128
462; RV32ZBB-NEXT:    srli a0, a0, 8
463; RV32ZBB-NEXT:    sltiu a0, a0, 255
464; RV32ZBB-NEXT:    ret
465;
466; RV64ZBB-LABEL: add_ulecmp_i16_i8:
467; RV64ZBB:       # %bb.0:
468; RV64ZBB-NEXT:    zext.h a0, a0
469; RV64ZBB-NEXT:    addi a0, a0, -128
470; RV64ZBB-NEXT:    srli a0, a0, 8
471; RV64ZBB-NEXT:    sltiu a0, a0, 255
472; RV64ZBB-NEXT:    ret
473  %tmp0 = add i16 %x, -128 ; ~0U << (8-1)
474  %tmp1 = icmp ule i16 %tmp0, -257 ; ~0U << 8 - 1
475  ret i1 %tmp1
476}
477
478; ---------------------------------------------------------------------------- ;
479; add + icmp uge
480; ---------------------------------------------------------------------------- ;
481
482define i1 @add_ugecmp_i16_i8(i16 %x) nounwind {
483; RV32I-LABEL: add_ugecmp_i16_i8:
484; RV32I:       # %bb.0:
485; RV32I-NEXT:    addi a0, a0, 128
486; RV32I-NEXT:    slli a0, a0, 16
487; RV32I-NEXT:    srli a0, a0, 16
488; RV32I-NEXT:    sltiu a0, a0, 256
489; RV32I-NEXT:    xori a0, a0, 1
490; RV32I-NEXT:    ret
491;
492; RV64I-LABEL: add_ugecmp_i16_i8:
493; RV64I:       # %bb.0:
494; RV64I-NEXT:    addi a0, a0, 128
495; RV64I-NEXT:    slli a0, a0, 48
496; RV64I-NEXT:    srli a0, a0, 48
497; RV64I-NEXT:    sltiu a0, a0, 256
498; RV64I-NEXT:    xori a0, a0, 1
499; RV64I-NEXT:    ret
500;
501; RV32ZBB-LABEL: add_ugecmp_i16_i8:
502; RV32ZBB:       # %bb.0:
503; RV32ZBB-NEXT:    addi a0, a0, 128
504; RV32ZBB-NEXT:    zext.h a0, a0
505; RV32ZBB-NEXT:    sltiu a0, a0, 256
506; RV32ZBB-NEXT:    xori a0, a0, 1
507; RV32ZBB-NEXT:    ret
508;
509; RV64ZBB-LABEL: add_ugecmp_i16_i8:
510; RV64ZBB:       # %bb.0:
511; RV64ZBB-NEXT:    addi a0, a0, 128
512; RV64ZBB-NEXT:    zext.h a0, a0
513; RV64ZBB-NEXT:    sltiu a0, a0, 256
514; RV64ZBB-NEXT:    xori a0, a0, 1
515; RV64ZBB-NEXT:    ret
516  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
517  %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8
518  ret i1 %tmp1
519}
520
521define i1 @add_ugecmp_i32_i16(i32 %x) nounwind {
522; RV32I-LABEL: add_ugecmp_i32_i16:
523; RV32I:       # %bb.0:
524; RV32I-NEXT:    lui a1, 8
525; RV32I-NEXT:    add a0, a0, a1
526; RV32I-NEXT:    srli a0, a0, 16
527; RV32I-NEXT:    snez a0, a0
528; RV32I-NEXT:    ret
529;
530; RV64I-LABEL: add_ugecmp_i32_i16:
531; RV64I:       # %bb.0:
532; RV64I-NEXT:    lui a1, 8
533; RV64I-NEXT:    add a0, a0, a1
534; RV64I-NEXT:    srliw a0, a0, 16
535; RV64I-NEXT:    snez a0, a0
536; RV64I-NEXT:    ret
537;
538; RV32ZBB-LABEL: add_ugecmp_i32_i16:
539; RV32ZBB:       # %bb.0:
540; RV32ZBB-NEXT:    sext.h a1, a0
541; RV32ZBB-NEXT:    xor a0, a1, a0
542; RV32ZBB-NEXT:    snez a0, a0
543; RV32ZBB-NEXT:    ret
544;
545; RV64ZBB-LABEL: add_ugecmp_i32_i16:
546; RV64ZBB:       # %bb.0:
547; RV64ZBB-NEXT:    sext.w a1, a0
548; RV64ZBB-NEXT:    sext.h a0, a0
549; RV64ZBB-NEXT:    xor a0, a0, a1
550; RV64ZBB-NEXT:    snez a0, a0
551; RV64ZBB-NEXT:    ret
552  %tmp0 = add i32 %x, 32768 ; 1U << (16-1)
553  %tmp1 = icmp uge i32 %tmp0, 65536 ; 1U << 16
554  ret i1 %tmp1
555}
556
557define i1 @add_ugecmp_i32_i8(i32 %x) nounwind {
558; RV32-LABEL: add_ugecmp_i32_i8:
559; RV32:       # %bb.0:
560; RV32-NEXT:    addi a0, a0, 128
561; RV32-NEXT:    sltiu a0, a0, 256
562; RV32-NEXT:    xori a0, a0, 1
563; RV32-NEXT:    ret
564;
565; RV64-LABEL: add_ugecmp_i32_i8:
566; RV64:       # %bb.0:
567; RV64-NEXT:    addiw a0, a0, 128
568; RV64-NEXT:    sltiu a0, a0, 256
569; RV64-NEXT:    xori a0, a0, 1
570; RV64-NEXT:    ret
571  %tmp0 = add i32 %x, 128 ; 1U << (8-1)
572  %tmp1 = icmp uge i32 %tmp0, 256 ; 1U << 8
573  ret i1 %tmp1
574}
575
576define i1 @add_ugecmp_i64_i32(i64 %x) nounwind {
577; RV32-LABEL: add_ugecmp_i64_i32:
578; RV32:       # %bb.0:
579; RV32-NEXT:    srai a0, a0, 31
580; RV32-NEXT:    xor a0, a0, a1
581; RV32-NEXT:    snez a0, a0
582; RV32-NEXT:    ret
583;
584; RV64-LABEL: add_ugecmp_i64_i32:
585; RV64:       # %bb.0:
586; RV64-NEXT:    sext.w a1, a0
587; RV64-NEXT:    xor a0, a1, a0
588; RV64-NEXT:    snez a0, a0
589; RV64-NEXT:    ret
590  %tmp0 = add i64 %x, 2147483648 ; 1U << (32-1)
591  %tmp1 = icmp uge i64 %tmp0, 4294967296 ; 1U << 32
592  ret i1 %tmp1
593}
594
595define i1 @add_ugecmp_i64_i16(i64 %x) nounwind {
596; RV32I-LABEL: add_ugecmp_i64_i16:
597; RV32I:       # %bb.0:
598; RV32I-NEXT:    lui a2, 8
599; RV32I-NEXT:    add a2, a0, a2
600; RV32I-NEXT:    sltu a0, a2, a0
601; RV32I-NEXT:    add a0, a1, a0
602; RV32I-NEXT:    srli a2, a2, 16
603; RV32I-NEXT:    or a0, a0, a2
604; RV32I-NEXT:    snez a0, a0
605; RV32I-NEXT:    ret
606;
607; RV64I-LABEL: add_ugecmp_i64_i16:
608; RV64I:       # %bb.0:
609; RV64I-NEXT:    lui a1, 8
610; RV64I-NEXT:    add a0, a0, a1
611; RV64I-NEXT:    srli a0, a0, 16
612; RV64I-NEXT:    snez a0, a0
613; RV64I-NEXT:    ret
614;
615; RV32ZBB-LABEL: add_ugecmp_i64_i16:
616; RV32ZBB:       # %bb.0:
617; RV32ZBB-NEXT:    sext.h a2, a0
618; RV32ZBB-NEXT:    xor a0, a2, a0
619; RV32ZBB-NEXT:    srai a2, a2, 31
620; RV32ZBB-NEXT:    xor a1, a2, a1
621; RV32ZBB-NEXT:    or a0, a0, a1
622; RV32ZBB-NEXT:    snez a0, a0
623; RV32ZBB-NEXT:    ret
624;
625; RV64ZBB-LABEL: add_ugecmp_i64_i16:
626; RV64ZBB:       # %bb.0:
627; RV64ZBB-NEXT:    sext.h a1, a0
628; RV64ZBB-NEXT:    xor a0, a1, a0
629; RV64ZBB-NEXT:    snez a0, a0
630; RV64ZBB-NEXT:    ret
631  %tmp0 = add i64 %x, 32768 ; 1U << (16-1)
632  %tmp1 = icmp uge i64 %tmp0, 65536 ; 1U << 16
633  ret i1 %tmp1
634}
635
636define i1 @add_ugecmp_i64_i8(i64 %x) nounwind {
637; RV32I-LABEL: add_ugecmp_i64_i8:
638; RV32I:       # %bb.0:
639; RV32I-NEXT:    addi a2, a0, 128
640; RV32I-NEXT:    sltu a0, a2, a0
641; RV32I-NEXT:    sltiu a2, a2, 256
642; RV32I-NEXT:    add a0, a1, a0
643; RV32I-NEXT:    snez a0, a0
644; RV32I-NEXT:    xori a1, a2, 1
645; RV32I-NEXT:    or a0, a1, a0
646; RV32I-NEXT:    ret
647;
648; RV64-LABEL: add_ugecmp_i64_i8:
649; RV64:       # %bb.0:
650; RV64-NEXT:    addi a0, a0, 128
651; RV64-NEXT:    sltiu a0, a0, 256
652; RV64-NEXT:    xori a0, a0, 1
653; RV64-NEXT:    ret
654;
655; RV32ZBB-LABEL: add_ugecmp_i64_i8:
656; RV32ZBB:       # %bb.0:
657; RV32ZBB-NEXT:    sext.b a2, a0
658; RV32ZBB-NEXT:    xor a0, a2, a0
659; RV32ZBB-NEXT:    srai a2, a2, 31
660; RV32ZBB-NEXT:    xor a1, a2, a1
661; RV32ZBB-NEXT:    or a0, a0, a1
662; RV32ZBB-NEXT:    snez a0, a0
663; RV32ZBB-NEXT:    ret
664  %tmp0 = add i64 %x, 128 ; 1U << (8-1)
665  %tmp1 = icmp uge i64 %tmp0, 256 ; 1U << 8
666  ret i1 %tmp1
667}
668
669; Slightly more canonical variant
670define i1 @add_ugtcmp_i16_i8(i16 %x) nounwind {
671; RV32I-LABEL: add_ugtcmp_i16_i8:
672; RV32I:       # %bb.0:
673; RV32I-NEXT:    addi a0, a0, 128
674; RV32I-NEXT:    slli a0, a0, 16
675; RV32I-NEXT:    srli a0, a0, 16
676; RV32I-NEXT:    sltiu a0, a0, 256
677; RV32I-NEXT:    xori a0, a0, 1
678; RV32I-NEXT:    ret
679;
680; RV64I-LABEL: add_ugtcmp_i16_i8:
681; RV64I:       # %bb.0:
682; RV64I-NEXT:    addi a0, a0, 128
683; RV64I-NEXT:    slli a0, a0, 48
684; RV64I-NEXT:    srli a0, a0, 48
685; RV64I-NEXT:    sltiu a0, a0, 256
686; RV64I-NEXT:    xori a0, a0, 1
687; RV64I-NEXT:    ret
688;
689; RV32ZBB-LABEL: add_ugtcmp_i16_i8:
690; RV32ZBB:       # %bb.0:
691; RV32ZBB-NEXT:    addi a0, a0, 128
692; RV32ZBB-NEXT:    zext.h a0, a0
693; RV32ZBB-NEXT:    sltiu a0, a0, 256
694; RV32ZBB-NEXT:    xori a0, a0, 1
695; RV32ZBB-NEXT:    ret
696;
697; RV64ZBB-LABEL: add_ugtcmp_i16_i8:
698; RV64ZBB:       # %bb.0:
699; RV64ZBB-NEXT:    addi a0, a0, 128
700; RV64ZBB-NEXT:    zext.h a0, a0
701; RV64ZBB-NEXT:    sltiu a0, a0, 256
702; RV64ZBB-NEXT:    xori a0, a0, 1
703; RV64ZBB-NEXT:    ret
704  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
705  %tmp1 = icmp ugt i16 %tmp0, 255 ; (1U << 8) - 1
706  ret i1 %tmp1
707}
708
709; Negative tests
710; ---------------------------------------------------------------------------- ;
711
712; Adding not a constant
713define i1 @add_ugecmp_bad_i16_i8_add(i16 %x, i16 %y) nounwind {
714; RV32I-LABEL: add_ugecmp_bad_i16_i8_add:
715; RV32I:       # %bb.0:
716; RV32I-NEXT:    add a0, a0, a1
717; RV32I-NEXT:    slli a0, a0, 16
718; RV32I-NEXT:    srli a0, a0, 16
719; RV32I-NEXT:    sltiu a0, a0, 256
720; RV32I-NEXT:    xori a0, a0, 1
721; RV32I-NEXT:    ret
722;
723; RV64I-LABEL: add_ugecmp_bad_i16_i8_add:
724; RV64I:       # %bb.0:
725; RV64I-NEXT:    add a0, a0, a1
726; RV64I-NEXT:    slli a0, a0, 48
727; RV64I-NEXT:    srli a0, a0, 48
728; RV64I-NEXT:    sltiu a0, a0, 256
729; RV64I-NEXT:    xori a0, a0, 1
730; RV64I-NEXT:    ret
731;
732; RV32ZBB-LABEL: add_ugecmp_bad_i16_i8_add:
733; RV32ZBB:       # %bb.0:
734; RV32ZBB-NEXT:    add a0, a0, a1
735; RV32ZBB-NEXT:    zext.h a0, a0
736; RV32ZBB-NEXT:    sltiu a0, a0, 256
737; RV32ZBB-NEXT:    xori a0, a0, 1
738; RV32ZBB-NEXT:    ret
739;
740; RV64ZBB-LABEL: add_ugecmp_bad_i16_i8_add:
741; RV64ZBB:       # %bb.0:
742; RV64ZBB-NEXT:    add a0, a0, a1
743; RV64ZBB-NEXT:    zext.h a0, a0
744; RV64ZBB-NEXT:    sltiu a0, a0, 256
745; RV64ZBB-NEXT:    xori a0, a0, 1
746; RV64ZBB-NEXT:    ret
747  %tmp0 = add i16 %x, %y
748  %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8
749  ret i1 %tmp1
750}
751
752; Comparing not with a constant
753define i1 @add_ugecmp_bad_i16_i8_cmp(i16 %x, i16 %y) nounwind {
754; RV32I-LABEL: add_ugecmp_bad_i16_i8_cmp:
755; RV32I:       # %bb.0:
756; RV32I-NEXT:    lui a2, 16
757; RV32I-NEXT:    addi a0, a0, 128
758; RV32I-NEXT:    addi a2, a2, -1
759; RV32I-NEXT:    and a1, a1, a2
760; RV32I-NEXT:    and a0, a0, a2
761; RV32I-NEXT:    sltu a0, a0, a1
762; RV32I-NEXT:    xori a0, a0, 1
763; RV32I-NEXT:    ret
764;
765; RV64I-LABEL: add_ugecmp_bad_i16_i8_cmp:
766; RV64I:       # %bb.0:
767; RV64I-NEXT:    lui a2, 16
768; RV64I-NEXT:    addi a0, a0, 128
769; RV64I-NEXT:    addiw a2, a2, -1
770; RV64I-NEXT:    and a1, a1, a2
771; RV64I-NEXT:    and a0, a0, a2
772; RV64I-NEXT:    sltu a0, a0, a1
773; RV64I-NEXT:    xori a0, a0, 1
774; RV64I-NEXT:    ret
775;
776; RV32ZBB-LABEL: add_ugecmp_bad_i16_i8_cmp:
777; RV32ZBB:       # %bb.0:
778; RV32ZBB-NEXT:    zext.h a1, a1
779; RV32ZBB-NEXT:    addi a0, a0, 128
780; RV32ZBB-NEXT:    zext.h a0, a0
781; RV32ZBB-NEXT:    sltu a0, a0, a1
782; RV32ZBB-NEXT:    xori a0, a0, 1
783; RV32ZBB-NEXT:    ret
784;
785; RV64ZBB-LABEL: add_ugecmp_bad_i16_i8_cmp:
786; RV64ZBB:       # %bb.0:
787; RV64ZBB-NEXT:    zext.h a1, a1
788; RV64ZBB-NEXT:    addi a0, a0, 128
789; RV64ZBB-NEXT:    zext.h a0, a0
790; RV64ZBB-NEXT:    sltu a0, a0, a1
791; RV64ZBB-NEXT:    xori a0, a0, 1
792; RV64ZBB-NEXT:    ret
793  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
794  %tmp1 = icmp uge i16 %tmp0, %y
795  ret i1 %tmp1
796}
797
798; Second constant is not larger than the first one
799define i1 @add_ugecmp_bad_i8_i16(i16 %x) nounwind {
800; RV32I-LABEL: add_ugecmp_bad_i8_i16:
801; RV32I:       # %bb.0:
802; RV32I-NEXT:    addi a0, a0, 128
803; RV32I-NEXT:    slli a0, a0, 16
804; RV32I-NEXT:    srli a0, a0, 16
805; RV32I-NEXT:    sltiu a0, a0, 128
806; RV32I-NEXT:    xori a0, a0, 1
807; RV32I-NEXT:    ret
808;
809; RV64I-LABEL: add_ugecmp_bad_i8_i16:
810; RV64I:       # %bb.0:
811; RV64I-NEXT:    addi a0, a0, 128
812; RV64I-NEXT:    slli a0, a0, 48
813; RV64I-NEXT:    srli a0, a0, 48
814; RV64I-NEXT:    sltiu a0, a0, 128
815; RV64I-NEXT:    xori a0, a0, 1
816; RV64I-NEXT:    ret
817;
818; RV32ZBB-LABEL: add_ugecmp_bad_i8_i16:
819; RV32ZBB:       # %bb.0:
820; RV32ZBB-NEXT:    addi a0, a0, 128
821; RV32ZBB-NEXT:    zext.h a0, a0
822; RV32ZBB-NEXT:    sltiu a0, a0, 128
823; RV32ZBB-NEXT:    xori a0, a0, 1
824; RV32ZBB-NEXT:    ret
825;
826; RV64ZBB-LABEL: add_ugecmp_bad_i8_i16:
827; RV64ZBB:       # %bb.0:
828; RV64ZBB-NEXT:    addi a0, a0, 128
829; RV64ZBB-NEXT:    zext.h a0, a0
830; RV64ZBB-NEXT:    sltiu a0, a0, 128
831; RV64ZBB-NEXT:    xori a0, a0, 1
832; RV64ZBB-NEXT:    ret
833  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
834  %tmp1 = icmp uge i16 %tmp0, 128 ; 1U << (8-1)
835  ret i1 %tmp1
836}
837
838; First constant is not power of two
839define i1 @add_ugecmp_bad_i16_i8_c0notpoweroftwo(i16 %x) nounwind {
840; RV32I-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo:
841; RV32I:       # %bb.0:
842; RV32I-NEXT:    addi a0, a0, 192
843; RV32I-NEXT:    slli a0, a0, 16
844; RV32I-NEXT:    srli a0, a0, 16
845; RV32I-NEXT:    sltiu a0, a0, 256
846; RV32I-NEXT:    xori a0, a0, 1
847; RV32I-NEXT:    ret
848;
849; RV64I-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo:
850; RV64I:       # %bb.0:
851; RV64I-NEXT:    addi a0, a0, 192
852; RV64I-NEXT:    slli a0, a0, 48
853; RV64I-NEXT:    srli a0, a0, 48
854; RV64I-NEXT:    sltiu a0, a0, 256
855; RV64I-NEXT:    xori a0, a0, 1
856; RV64I-NEXT:    ret
857;
858; RV32ZBB-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo:
859; RV32ZBB:       # %bb.0:
860; RV32ZBB-NEXT:    addi a0, a0, 192
861; RV32ZBB-NEXT:    zext.h a0, a0
862; RV32ZBB-NEXT:    sltiu a0, a0, 256
863; RV32ZBB-NEXT:    xori a0, a0, 1
864; RV32ZBB-NEXT:    ret
865;
866; RV64ZBB-LABEL: add_ugecmp_bad_i16_i8_c0notpoweroftwo:
867; RV64ZBB:       # %bb.0:
868; RV64ZBB-NEXT:    addi a0, a0, 192
869; RV64ZBB-NEXT:    zext.h a0, a0
870; RV64ZBB-NEXT:    sltiu a0, a0, 256
871; RV64ZBB-NEXT:    xori a0, a0, 1
872; RV64ZBB-NEXT:    ret
873  %tmp0 = add i16 %x, 192 ; (1U << (8-1)) + (1U << (8-1-1))
874  %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8
875  ret i1 %tmp1
876}
877
878; Second constant is not power of two
879define i1 @add_ugecmp_bad_i16_i8_c1notpoweroftwo(i16 %x) nounwind {
880; RV32I-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo:
881; RV32I:       # %bb.0:
882; RV32I-NEXT:    addi a0, a0, 128
883; RV32I-NEXT:    slli a0, a0, 16
884; RV32I-NEXT:    srli a0, a0, 16
885; RV32I-NEXT:    sltiu a0, a0, 768
886; RV32I-NEXT:    xori a0, a0, 1
887; RV32I-NEXT:    ret
888;
889; RV64I-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo:
890; RV64I:       # %bb.0:
891; RV64I-NEXT:    addi a0, a0, 128
892; RV64I-NEXT:    slli a0, a0, 48
893; RV64I-NEXT:    srli a0, a0, 48
894; RV64I-NEXT:    sltiu a0, a0, 768
895; RV64I-NEXT:    xori a0, a0, 1
896; RV64I-NEXT:    ret
897;
898; RV32ZBB-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo:
899; RV32ZBB:       # %bb.0:
900; RV32ZBB-NEXT:    addi a0, a0, 128
901; RV32ZBB-NEXT:    zext.h a0, a0
902; RV32ZBB-NEXT:    sltiu a0, a0, 768
903; RV32ZBB-NEXT:    xori a0, a0, 1
904; RV32ZBB-NEXT:    ret
905;
906; RV64ZBB-LABEL: add_ugecmp_bad_i16_i8_c1notpoweroftwo:
907; RV64ZBB:       # %bb.0:
908; RV64ZBB-NEXT:    addi a0, a0, 128
909; RV64ZBB-NEXT:    zext.h a0, a0
910; RV64ZBB-NEXT:    sltiu a0, a0, 768
911; RV64ZBB-NEXT:    xori a0, a0, 1
912; RV64ZBB-NEXT:    ret
913  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
914  %tmp1 = icmp uge i16 %tmp0, 768 ; (1U << 8)) + (1U << (8+1))
915  ret i1 %tmp1
916}
917
918; Magic check fails, 64 << 1 != 256
919define i1 @add_ugecmp_bad_i16_i8_magic(i16 %x) nounwind {
920; RV32I-LABEL: add_ugecmp_bad_i16_i8_magic:
921; RV32I:       # %bb.0:
922; RV32I-NEXT:    addi a0, a0, 64
923; RV32I-NEXT:    slli a0, a0, 16
924; RV32I-NEXT:    srli a0, a0, 16
925; RV32I-NEXT:    sltiu a0, a0, 256
926; RV32I-NEXT:    xori a0, a0, 1
927; RV32I-NEXT:    ret
928;
929; RV64I-LABEL: add_ugecmp_bad_i16_i8_magic:
930; RV64I:       # %bb.0:
931; RV64I-NEXT:    addi a0, a0, 64
932; RV64I-NEXT:    slli a0, a0, 48
933; RV64I-NEXT:    srli a0, a0, 48
934; RV64I-NEXT:    sltiu a0, a0, 256
935; RV64I-NEXT:    xori a0, a0, 1
936; RV64I-NEXT:    ret
937;
938; RV32ZBB-LABEL: add_ugecmp_bad_i16_i8_magic:
939; RV32ZBB:       # %bb.0:
940; RV32ZBB-NEXT:    addi a0, a0, 64
941; RV32ZBB-NEXT:    zext.h a0, a0
942; RV32ZBB-NEXT:    sltiu a0, a0, 256
943; RV32ZBB-NEXT:    xori a0, a0, 1
944; RV32ZBB-NEXT:    ret
945;
946; RV64ZBB-LABEL: add_ugecmp_bad_i16_i8_magic:
947; RV64ZBB:       # %bb.0:
948; RV64ZBB-NEXT:    addi a0, a0, 64
949; RV64ZBB-NEXT:    zext.h a0, a0
950; RV64ZBB-NEXT:    sltiu a0, a0, 256
951; RV64ZBB-NEXT:    xori a0, a0, 1
952; RV64ZBB-NEXT:    ret
953  %tmp0 = add i16 %x, 64 ; 1U << (8-1-1)
954  %tmp1 = icmp uge i16 %tmp0, 256 ; 1U << 8
955  ret i1 %tmp1
956}
957
958; Bad 'destination type'
959define i1 @add_ugecmp_bad_i16_i4(i16 %x) nounwind {
960; RV32I-LABEL: add_ugecmp_bad_i16_i4:
961; RV32I:       # %bb.0:
962; RV32I-NEXT:    addi a0, a0, 8
963; RV32I-NEXT:    slli a0, a0, 16
964; RV32I-NEXT:    srli a0, a0, 16
965; RV32I-NEXT:    sltiu a0, a0, 16
966; RV32I-NEXT:    xori a0, a0, 1
967; RV32I-NEXT:    ret
968;
969; RV64I-LABEL: add_ugecmp_bad_i16_i4:
970; RV64I:       # %bb.0:
971; RV64I-NEXT:    addi a0, a0, 8
972; RV64I-NEXT:    slli a0, a0, 48
973; RV64I-NEXT:    srli a0, a0, 48
974; RV64I-NEXT:    sltiu a0, a0, 16
975; RV64I-NEXT:    xori a0, a0, 1
976; RV64I-NEXT:    ret
977;
978; RV32ZBB-LABEL: add_ugecmp_bad_i16_i4:
979; RV32ZBB:       # %bb.0:
980; RV32ZBB-NEXT:    addi a0, a0, 8
981; RV32ZBB-NEXT:    zext.h a0, a0
982; RV32ZBB-NEXT:    sltiu a0, a0, 16
983; RV32ZBB-NEXT:    xori a0, a0, 1
984; RV32ZBB-NEXT:    ret
985;
986; RV64ZBB-LABEL: add_ugecmp_bad_i16_i4:
987; RV64ZBB:       # %bb.0:
988; RV64ZBB-NEXT:    addi a0, a0, 8
989; RV64ZBB-NEXT:    zext.h a0, a0
990; RV64ZBB-NEXT:    sltiu a0, a0, 16
991; RV64ZBB-NEXT:    xori a0, a0, 1
992; RV64ZBB-NEXT:    ret
993  %tmp0 = add i16 %x, 8 ; 1U << (4-1)
994  %tmp1 = icmp uge i16 %tmp0, 16 ; 1U << 4
995  ret i1 %tmp1
996}
997
998; Bad storage type
999define i1 @add_ugecmp_bad_i24_i8(i24 %x) nounwind {
1000; RV32-LABEL: add_ugecmp_bad_i24_i8:
1001; RV32:       # %bb.0:
1002; RV32-NEXT:    addi a0, a0, 128
1003; RV32-NEXT:    slli a0, a0, 8
1004; RV32-NEXT:    srli a0, a0, 8
1005; RV32-NEXT:    sltiu a0, a0, 256
1006; RV32-NEXT:    xori a0, a0, 1
1007; RV32-NEXT:    ret
1008;
1009; RV64-LABEL: add_ugecmp_bad_i24_i8:
1010; RV64:       # %bb.0:
1011; RV64-NEXT:    addi a0, a0, 128
1012; RV64-NEXT:    slli a0, a0, 40
1013; RV64-NEXT:    srli a0, a0, 40
1014; RV64-NEXT:    sltiu a0, a0, 256
1015; RV64-NEXT:    xori a0, a0, 1
1016; RV64-NEXT:    ret
1017  %tmp0 = add i24 %x, 128 ; 1U << (8-1)
1018  %tmp1 = icmp uge i24 %tmp0, 256 ; 1U << 8
1019  ret i1 %tmp1
1020}
1021
1022; Slightly more canonical variant
1023define i1 @add_ugtcmp_bad_i16_i8(i16 %x) nounwind {
1024; CHECK-LABEL: add_ugtcmp_bad_i16_i8:
1025; CHECK:       # %bb.0:
1026; CHECK-NEXT:    li a0, 0
1027; CHECK-NEXT:    ret
1028  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
1029  %tmp1 = icmp ugt i16 %tmp0, -1 ; when we +1 it, it will wrap to 0
1030  ret i1 %tmp1
1031}
1032