xref: /llvm-project/llvm/test/CodeGen/RISCV/signed-truncation-check.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,RV32,RV32I
3; RUN: llc -mtriple=riscv64-unknown-linux-gnu < %s | FileCheck %s --check-prefixes=CHECK,RV64,RV64I
4; RUN: llc -mtriple=riscv32-unknown-linux-gnu -mattr=+zbb < %s | FileCheck %s --check-prefixes=CHECK,RV32,RV32ZBB
5; RUN: llc -mtriple=riscv64-unknown-linux-gnu -mattr=+zbb < %s | FileCheck %s --check-prefixes=CHECK,RV64,RV64ZBB
6
7; https://bugs.llvm.org/show_bug.cgi?id=38149
8
9; We are truncating from wider width, and then sign-extending
10; back to the original width. Then we equality-comparing orig and src.
11; If they don't match, then we had signed truncation during truncation.
12
13; This can be expressed in a several ways in IR:
14;   trunc + sext + icmp eq <- not canonical
15;   shl   + ashr + icmp eq
16;   add          + icmp uge/ugt
17;   add          + icmp ult/ule
18; However only the simplest form (with two shifts) gets lowered best.
19
20; ---------------------------------------------------------------------------- ;
21; shl + ashr + icmp eq
22; ---------------------------------------------------------------------------- ;
23
24define i1 @shifts_eqcmp_i16_i8(i16 %x) nounwind {
25; RV32I-LABEL: shifts_eqcmp_i16_i8:
26; RV32I:       # %bb.0:
27; RV32I-NEXT:    slli a1, a0, 16
28; RV32I-NEXT:    slli a0, a0, 24
29; RV32I-NEXT:    srli a1, a1, 16
30; RV32I-NEXT:    srai a0, a0, 8
31; RV32I-NEXT:    srli a0, a0, 16
32; RV32I-NEXT:    xor a0, a0, a1
33; RV32I-NEXT:    seqz a0, a0
34; RV32I-NEXT:    ret
35;
36; RV64I-LABEL: shifts_eqcmp_i16_i8:
37; RV64I:       # %bb.0:
38; RV64I-NEXT:    slli a1, a0, 48
39; RV64I-NEXT:    slli a0, a0, 56
40; RV64I-NEXT:    srli a1, a1, 48
41; RV64I-NEXT:    srai a0, a0, 8
42; RV64I-NEXT:    srli a0, a0, 48
43; RV64I-NEXT:    xor a0, a0, a1
44; RV64I-NEXT:    seqz a0, a0
45; RV64I-NEXT:    ret
46;
47; RV32ZBB-LABEL: shifts_eqcmp_i16_i8:
48; RV32ZBB:       # %bb.0:
49; RV32ZBB-NEXT:    zext.h a1, a0
50; RV32ZBB-NEXT:    sext.b a0, a0
51; RV32ZBB-NEXT:    zext.h a0, a0
52; RV32ZBB-NEXT:    xor a0, a0, a1
53; RV32ZBB-NEXT:    seqz a0, a0
54; RV32ZBB-NEXT:    ret
55;
56; RV64ZBB-LABEL: shifts_eqcmp_i16_i8:
57; RV64ZBB:       # %bb.0:
58; RV64ZBB-NEXT:    zext.h a1, a0
59; RV64ZBB-NEXT:    sext.b a0, a0
60; RV64ZBB-NEXT:    zext.h a0, a0
61; RV64ZBB-NEXT:    xor a0, a0, a1
62; RV64ZBB-NEXT:    seqz a0, a0
63; RV64ZBB-NEXT:    ret
64  %tmp0 = shl i16 %x, 8 ; 16-8
65  %tmp1 = ashr exact i16 %tmp0, 8 ; 16-8
66  %tmp2 = icmp eq i16 %tmp1, %x
67  ret i1 %tmp2
68}
69
70define i1 @shifts_eqcmp_i32_i16(i32 %x) nounwind {
71; RV32I-LABEL: shifts_eqcmp_i32_i16:
72; RV32I:       # %bb.0:
73; RV32I-NEXT:    slli a1, a0, 16
74; RV32I-NEXT:    srai a1, a1, 16
75; RV32I-NEXT:    xor a0, a1, a0
76; RV32I-NEXT:    seqz a0, a0
77; RV32I-NEXT:    ret
78;
79; RV64I-LABEL: shifts_eqcmp_i32_i16:
80; RV64I:       # %bb.0:
81; RV64I-NEXT:    sext.w a1, a0
82; RV64I-NEXT:    slli a0, a0, 48
83; RV64I-NEXT:    srai a0, a0, 48
84; RV64I-NEXT:    xor a0, a0, a1
85; RV64I-NEXT:    seqz a0, a0
86; RV64I-NEXT:    ret
87;
88; RV32ZBB-LABEL: shifts_eqcmp_i32_i16:
89; RV32ZBB:       # %bb.0:
90; RV32ZBB-NEXT:    sext.h a1, a0
91; RV32ZBB-NEXT:    xor a0, a1, a0
92; RV32ZBB-NEXT:    seqz a0, a0
93; RV32ZBB-NEXT:    ret
94;
95; RV64ZBB-LABEL: shifts_eqcmp_i32_i16:
96; RV64ZBB:       # %bb.0:
97; RV64ZBB-NEXT:    sext.w a1, a0
98; RV64ZBB-NEXT:    sext.h a0, a0
99; RV64ZBB-NEXT:    xor a0, a0, a1
100; RV64ZBB-NEXT:    seqz a0, a0
101; RV64ZBB-NEXT:    ret
102  %tmp0 = shl i32 %x, 16 ; 32-16
103  %tmp1 = ashr exact i32 %tmp0, 16 ; 32-16
104  %tmp2 = icmp eq i32 %tmp1, %x
105  ret i1 %tmp2
106}
107
108define i1 @shifts_eqcmp_i32_i8(i32 %x) nounwind {
109; RV32I-LABEL: shifts_eqcmp_i32_i8:
110; RV32I:       # %bb.0:
111; RV32I-NEXT:    slli a1, a0, 24
112; RV32I-NEXT:    srai a1, a1, 24
113; RV32I-NEXT:    xor a0, a1, a0
114; RV32I-NEXT:    seqz a0, a0
115; RV32I-NEXT:    ret
116;
117; RV64I-LABEL: shifts_eqcmp_i32_i8:
118; RV64I:       # %bb.0:
119; RV64I-NEXT:    sext.w a1, a0
120; RV64I-NEXT:    slli a0, a0, 56
121; RV64I-NEXT:    srai a0, a0, 56
122; RV64I-NEXT:    xor a0, a0, a1
123; RV64I-NEXT:    seqz a0, a0
124; RV64I-NEXT:    ret
125;
126; RV32ZBB-LABEL: shifts_eqcmp_i32_i8:
127; RV32ZBB:       # %bb.0:
128; RV32ZBB-NEXT:    sext.b a1, a0
129; RV32ZBB-NEXT:    xor a0, a1, a0
130; RV32ZBB-NEXT:    seqz a0, a0
131; RV32ZBB-NEXT:    ret
132;
133; RV64ZBB-LABEL: shifts_eqcmp_i32_i8:
134; RV64ZBB:       # %bb.0:
135; RV64ZBB-NEXT:    sext.w a1, a0
136; RV64ZBB-NEXT:    sext.b a0, a0
137; RV64ZBB-NEXT:    xor a0, a0, a1
138; RV64ZBB-NEXT:    seqz a0, a0
139; RV64ZBB-NEXT:    ret
140  %tmp0 = shl i32 %x, 24 ; 32-8
141  %tmp1 = ashr exact i32 %tmp0, 24 ; 32-8
142  %tmp2 = icmp eq i32 %tmp1, %x
143  ret i1 %tmp2
144}
145
146define i1 @shifts_eqcmp_i64_i32(i64 %x) nounwind {
147; RV32-LABEL: shifts_eqcmp_i64_i32:
148; RV32:       # %bb.0:
149; RV32-NEXT:    srai a0, a0, 31
150; RV32-NEXT:    xor a0, a0, a1
151; RV32-NEXT:    seqz a0, a0
152; RV32-NEXT:    ret
153;
154; RV64-LABEL: shifts_eqcmp_i64_i32:
155; RV64:       # %bb.0:
156; RV64-NEXT:    sext.w a1, a0
157; RV64-NEXT:    xor a0, a1, a0
158; RV64-NEXT:    seqz a0, a0
159; RV64-NEXT:    ret
160  %tmp0 = shl i64 %x, 32 ; 64-32
161  %tmp1 = ashr exact i64 %tmp0, 32 ; 64-32
162  %tmp2 = icmp eq i64 %tmp1, %x
163  ret i1 %tmp2
164}
165
166define i1 @shifts_eqcmp_i64_i16(i64 %x) nounwind {
167; RV32I-LABEL: shifts_eqcmp_i64_i16:
168; RV32I:       # %bb.0:
169; RV32I-NEXT:    slli a2, a0, 16
170; RV32I-NEXT:    srai a3, a2, 16
171; RV32I-NEXT:    srai a2, a2, 31
172; RV32I-NEXT:    xor a1, a2, a1
173; RV32I-NEXT:    xor a0, a3, a0
174; RV32I-NEXT:    or a0, a0, a1
175; RV32I-NEXT:    seqz a0, a0
176; RV32I-NEXT:    ret
177;
178; RV64I-LABEL: shifts_eqcmp_i64_i16:
179; RV64I:       # %bb.0:
180; RV64I-NEXT:    slli a1, a0, 48
181; RV64I-NEXT:    srai a1, a1, 48
182; RV64I-NEXT:    xor a0, a1, a0
183; RV64I-NEXT:    seqz a0, a0
184; RV64I-NEXT:    ret
185;
186; RV32ZBB-LABEL: shifts_eqcmp_i64_i16:
187; RV32ZBB:       # %bb.0:
188; RV32ZBB-NEXT:    sext.h a2, a0
189; RV32ZBB-NEXT:    srai a3, a2, 31
190; RV32ZBB-NEXT:    xor a0, a2, a0
191; RV32ZBB-NEXT:    xor a1, a3, a1
192; RV32ZBB-NEXT:    or a0, a0, a1
193; RV32ZBB-NEXT:    seqz a0, a0
194; RV32ZBB-NEXT:    ret
195;
196; RV64ZBB-LABEL: shifts_eqcmp_i64_i16:
197; RV64ZBB:       # %bb.0:
198; RV64ZBB-NEXT:    sext.h a1, a0
199; RV64ZBB-NEXT:    xor a0, a1, a0
200; RV64ZBB-NEXT:    seqz a0, a0
201; RV64ZBB-NEXT:    ret
202  %tmp0 = shl i64 %x, 48 ; 64-16
203  %tmp1 = ashr exact i64 %tmp0, 48 ; 64-16
204  %tmp2 = icmp eq i64 %tmp1, %x
205  ret i1 %tmp2
206}
207
208define i1 @shifts_eqcmp_i64_i8(i64 %x) nounwind {
209; RV32I-LABEL: shifts_eqcmp_i64_i8:
210; RV32I:       # %bb.0:
211; RV32I-NEXT:    slli a2, a0, 24
212; RV32I-NEXT:    srai a3, a2, 24
213; RV32I-NEXT:    srai a2, a2, 31
214; RV32I-NEXT:    xor a1, a2, a1
215; RV32I-NEXT:    xor a0, a3, a0
216; RV32I-NEXT:    or a0, a0, a1
217; RV32I-NEXT:    seqz a0, a0
218; RV32I-NEXT:    ret
219;
220; RV64I-LABEL: shifts_eqcmp_i64_i8:
221; RV64I:       # %bb.0:
222; RV64I-NEXT:    slli a1, a0, 56
223; RV64I-NEXT:    srai a1, a1, 56
224; RV64I-NEXT:    xor a0, a1, a0
225; RV64I-NEXT:    seqz a0, a0
226; RV64I-NEXT:    ret
227;
228; RV32ZBB-LABEL: shifts_eqcmp_i64_i8:
229; RV32ZBB:       # %bb.0:
230; RV32ZBB-NEXT:    sext.b a2, a0
231; RV32ZBB-NEXT:    srai a3, a2, 31
232; RV32ZBB-NEXT:    xor a0, a2, a0
233; RV32ZBB-NEXT:    xor a1, a3, a1
234; RV32ZBB-NEXT:    or a0, a0, a1
235; RV32ZBB-NEXT:    seqz a0, a0
236; RV32ZBB-NEXT:    ret
237;
238; RV64ZBB-LABEL: shifts_eqcmp_i64_i8:
239; RV64ZBB:       # %bb.0:
240; RV64ZBB-NEXT:    sext.b a1, a0
241; RV64ZBB-NEXT:    xor a0, a1, a0
242; RV64ZBB-NEXT:    seqz a0, a0
243; RV64ZBB-NEXT:    ret
244  %tmp0 = shl i64 %x, 56 ; 64-8
245  %tmp1 = ashr exact i64 %tmp0, 56 ; 64-8
246  %tmp2 = icmp eq i64 %tmp1, %x
247  ret i1 %tmp2
248}
249
250; ---------------------------------------------------------------------------- ;
251; add + icmp uge
252; ---------------------------------------------------------------------------- ;
253
254define i1 @add_ugecmp_i16_i8(i16 %x) nounwind {
255; RV32I-LABEL: add_ugecmp_i16_i8:
256; RV32I:       # %bb.0:
257; RV32I-NEXT:    slli a0, a0, 16
258; RV32I-NEXT:    srli a0, a0, 16
259; RV32I-NEXT:    addi a0, a0, -128
260; RV32I-NEXT:    srli a0, a0, 8
261; RV32I-NEXT:    sltiu a0, a0, 255
262; RV32I-NEXT:    xori a0, a0, 1
263; RV32I-NEXT:    ret
264;
265; RV64I-LABEL: add_ugecmp_i16_i8:
266; RV64I:       # %bb.0:
267; RV64I-NEXT:    slli a0, a0, 48
268; RV64I-NEXT:    srli a0, a0, 48
269; RV64I-NEXT:    addi a0, a0, -128
270; RV64I-NEXT:    srli a0, a0, 8
271; RV64I-NEXT:    sltiu a0, a0, 255
272; RV64I-NEXT:    xori a0, a0, 1
273; RV64I-NEXT:    ret
274;
275; RV32ZBB-LABEL: add_ugecmp_i16_i8:
276; RV32ZBB:       # %bb.0:
277; RV32ZBB-NEXT:    zext.h a0, a0
278; RV32ZBB-NEXT:    addi a0, a0, -128
279; RV32ZBB-NEXT:    srli a0, a0, 8
280; RV32ZBB-NEXT:    sltiu a0, a0, 255
281; RV32ZBB-NEXT:    xori a0, a0, 1
282; RV32ZBB-NEXT:    ret
283;
284; RV64ZBB-LABEL: add_ugecmp_i16_i8:
285; RV64ZBB:       # %bb.0:
286; RV64ZBB-NEXT:    zext.h a0, a0
287; RV64ZBB-NEXT:    addi a0, a0, -128
288; RV64ZBB-NEXT:    srli a0, a0, 8
289; RV64ZBB-NEXT:    sltiu a0, a0, 255
290; RV64ZBB-NEXT:    xori a0, a0, 1
291; RV64ZBB-NEXT:    ret
292  %tmp0 = add i16 %x, -128 ; ~0U << (8-1)
293  %tmp1 = icmp uge i16 %tmp0, -256 ; ~0U << 8
294  ret i1 %tmp1
295}
296
297define i1 @add_ugecmp_i32_i16_i8(i16 %xx) nounwind {
298; RV32I-LABEL: add_ugecmp_i32_i16_i8:
299; RV32I:       # %bb.0:
300; RV32I-NEXT:    slli a0, a0, 16
301; RV32I-NEXT:    srli a0, a0, 16
302; RV32I-NEXT:    addi a0, a0, -128
303; RV32I-NEXT:    sltiu a0, a0, -256
304; RV32I-NEXT:    xori a0, a0, 1
305; RV32I-NEXT:    ret
306;
307; RV64I-LABEL: add_ugecmp_i32_i16_i8:
308; RV64I:       # %bb.0:
309; RV64I-NEXT:    slli a0, a0, 48
310; RV64I-NEXT:    srli a0, a0, 48
311; RV64I-NEXT:    addi a0, a0, -128
312; RV64I-NEXT:    sltiu a0, a0, -256
313; RV64I-NEXT:    xori a0, a0, 1
314; RV64I-NEXT:    ret
315;
316; RV32ZBB-LABEL: add_ugecmp_i32_i16_i8:
317; RV32ZBB:       # %bb.0:
318; RV32ZBB-NEXT:    zext.h a0, a0
319; RV32ZBB-NEXT:    addi a0, a0, -128
320; RV32ZBB-NEXT:    sltiu a0, a0, -256
321; RV32ZBB-NEXT:    xori a0, a0, 1
322; RV32ZBB-NEXT:    ret
323;
324; RV64ZBB-LABEL: add_ugecmp_i32_i16_i8:
325; RV64ZBB:       # %bb.0:
326; RV64ZBB-NEXT:    zext.h a0, a0
327; RV64ZBB-NEXT:    addi a0, a0, -128
328; RV64ZBB-NEXT:    sltiu a0, a0, -256
329; RV64ZBB-NEXT:    xori a0, a0, 1
330; RV64ZBB-NEXT:    ret
331  %x = zext i16 %xx to i32
332  %tmp0 = add i32 %x, -128 ; ~0U << (8-1)
333  %tmp1 = icmp uge i32 %tmp0, -256 ; ~0U << 8
334  ret i1 %tmp1
335}
336
337define i1 @add_ugecmp_i32_i16(i32 %x) nounwind {
338; RV32I-LABEL: add_ugecmp_i32_i16:
339; RV32I:       # %bb.0:
340; RV32I-NEXT:    lui a1, 1048568
341; RV32I-NEXT:    add a0, a0, a1
342; RV32I-NEXT:    lui a1, 1048560
343; RV32I-NEXT:    addi a1, a1, -1
344; RV32I-NEXT:    sltu a0, a1, a0
345; RV32I-NEXT:    ret
346;
347; RV64I-LABEL: add_ugecmp_i32_i16:
348; RV64I:       # %bb.0:
349; RV64I-NEXT:    lui a1, 1048568
350; RV64I-NEXT:    addw a0, a0, a1
351; RV64I-NEXT:    lui a1, 1048560
352; RV64I-NEXT:    addiw a1, a1, -1
353; RV64I-NEXT:    sltu a0, a1, a0
354; RV64I-NEXT:    ret
355;
356; RV32ZBB-LABEL: add_ugecmp_i32_i16:
357; RV32ZBB:       # %bb.0:
358; RV32ZBB-NEXT:    sext.h a1, a0
359; RV32ZBB-NEXT:    xor a0, a1, a0
360; RV32ZBB-NEXT:    seqz a0, a0
361; RV32ZBB-NEXT:    ret
362;
363; RV64ZBB-LABEL: add_ugecmp_i32_i16:
364; RV64ZBB:       # %bb.0:
365; RV64ZBB-NEXT:    sext.w a1, a0
366; RV64ZBB-NEXT:    sext.h a0, a0
367; RV64ZBB-NEXT:    xor a0, a0, a1
368; RV64ZBB-NEXT:    seqz a0, a0
369; RV64ZBB-NEXT:    ret
370  %tmp0 = add i32 %x, -32768 ; ~0U << (16-1)
371  %tmp1 = icmp uge i32 %tmp0, -65536 ; ~0U << 16
372  ret i1 %tmp1
373}
374
375define i1 @add_ugecmp_i32_i8(i32 %x) nounwind {
376; RV32-LABEL: add_ugecmp_i32_i8:
377; RV32:       # %bb.0:
378; RV32-NEXT:    addi a0, a0, -128
379; RV32-NEXT:    sltiu a0, a0, -256
380; RV32-NEXT:    xori a0, a0, 1
381; RV32-NEXT:    ret
382;
383; RV64-LABEL: add_ugecmp_i32_i8:
384; RV64:       # %bb.0:
385; RV64-NEXT:    addiw a0, a0, -128
386; RV64-NEXT:    sltiu a0, a0, -256
387; RV64-NEXT:    xori a0, a0, 1
388; RV64-NEXT:    ret
389  %tmp0 = add i32 %x, -128 ; ~0U << (8-1)
390  %tmp1 = icmp uge i32 %tmp0, -256 ; ~0U << 8
391  ret i1 %tmp1
392}
393
394define i1 @add_ugecmp_i64_i32(i64 %x) nounwind {
395; RV32-LABEL: add_ugecmp_i64_i32:
396; RV32:       # %bb.0:
397; RV32-NEXT:    srai a0, a0, 31
398; RV32-NEXT:    xor a0, a0, a1
399; RV32-NEXT:    seqz a0, a0
400; RV32-NEXT:    ret
401;
402; RV64-LABEL: add_ugecmp_i64_i32:
403; RV64:       # %bb.0:
404; RV64-NEXT:    sext.w a1, a0
405; RV64-NEXT:    xor a0, a1, a0
406; RV64-NEXT:    seqz a0, a0
407; RV64-NEXT:    ret
408  %tmp0 = add i64 %x, -2147483648 ; ~0U << (32-1)
409  %tmp1 = icmp uge i64 %tmp0, -4294967296 ; ~0U << 32
410  ret i1 %tmp1
411}
412
413define i1 @add_ugecmp_i64_i16(i64 %x) nounwind {
414; RV32I-LABEL: add_ugecmp_i64_i16:
415; RV32I:       # %bb.0:
416; RV32I-NEXT:    lui a2, 1048568
417; RV32I-NEXT:    add a2, a0, a2
418; RV32I-NEXT:    sltu a0, a2, a0
419; RV32I-NEXT:    add a0, a1, a0
420; RV32I-NEXT:    lui a1, 1048560
421; RV32I-NEXT:    addi a1, a1, -1
422; RV32I-NEXT:    sltu a1, a1, a2
423; RV32I-NEXT:    seqz a0, a0
424; RV32I-NEXT:    and a0, a0, a1
425; RV32I-NEXT:    ret
426;
427; RV64I-LABEL: add_ugecmp_i64_i16:
428; RV64I:       # %bb.0:
429; RV64I-NEXT:    lui a1, 1048568
430; RV64I-NEXT:    add a0, a0, a1
431; RV64I-NEXT:    lui a1, 1048560
432; RV64I-NEXT:    addiw a1, a1, -1
433; RV64I-NEXT:    sltu a0, a1, a0
434; RV64I-NEXT:    ret
435;
436; RV32ZBB-LABEL: add_ugecmp_i64_i16:
437; RV32ZBB:       # %bb.0:
438; RV32ZBB-NEXT:    sext.h a2, a0
439; RV32ZBB-NEXT:    xor a0, a2, a0
440; RV32ZBB-NEXT:    srai a2, a2, 31
441; RV32ZBB-NEXT:    xor a1, a2, a1
442; RV32ZBB-NEXT:    or a0, a0, a1
443; RV32ZBB-NEXT:    seqz a0, a0
444; RV32ZBB-NEXT:    ret
445;
446; RV64ZBB-LABEL: add_ugecmp_i64_i16:
447; RV64ZBB:       # %bb.0:
448; RV64ZBB-NEXT:    sext.h a1, a0
449; RV64ZBB-NEXT:    xor a0, a1, a0
450; RV64ZBB-NEXT:    seqz a0, a0
451; RV64ZBB-NEXT:    ret
452  %tmp0 = add i64 %x, -32768 ; ~0U << (16-1)
453  %tmp1 = icmp uge i64 %tmp0, -65536 ; ~0U << 16
454  ret i1 %tmp1
455}
456
457define i1 @add_ugecmp_i64_i8(i64 %x) nounwind {
458; RV32I-LABEL: add_ugecmp_i64_i8:
459; RV32I:       # %bb.0:
460; RV32I-NEXT:    addi a2, a0, -128
461; RV32I-NEXT:    sltu a0, a2, a0
462; RV32I-NEXT:    sltiu a2, a2, -256
463; RV32I-NEXT:    add a0, a1, a0
464; RV32I-NEXT:    seqz a0, a0
465; RV32I-NEXT:    xori a1, a2, 1
466; RV32I-NEXT:    and a0, a0, a1
467; RV32I-NEXT:    ret
468;
469; RV64-LABEL: add_ugecmp_i64_i8:
470; RV64:       # %bb.0:
471; RV64-NEXT:    addi a0, a0, -128
472; RV64-NEXT:    sltiu a0, a0, -256
473; RV64-NEXT:    xori a0, a0, 1
474; RV64-NEXT:    ret
475;
476; RV32ZBB-LABEL: add_ugecmp_i64_i8:
477; RV32ZBB:       # %bb.0:
478; RV32ZBB-NEXT:    sext.b a2, a0
479; RV32ZBB-NEXT:    xor a0, a2, a0
480; RV32ZBB-NEXT:    srai a2, a2, 31
481; RV32ZBB-NEXT:    xor a1, a2, a1
482; RV32ZBB-NEXT:    or a0, a0, a1
483; RV32ZBB-NEXT:    seqz a0, a0
484; RV32ZBB-NEXT:    ret
485  %tmp0 = add i64 %x, -128 ; ~0U << (8-1)
486  %tmp1 = icmp uge i64 %tmp0, -256 ; ~0U << 8
487  ret i1 %tmp1
488}
489
490; Slightly more canonical variant
491define i1 @add_ugtcmp_i16_i8(i16 %x) nounwind {
492; RV32I-LABEL: add_ugtcmp_i16_i8:
493; RV32I:       # %bb.0:
494; RV32I-NEXT:    slli a0, a0, 16
495; RV32I-NEXT:    srli a0, a0, 16
496; RV32I-NEXT:    addi a0, a0, -128
497; RV32I-NEXT:    srli a0, a0, 8
498; RV32I-NEXT:    sltiu a0, a0, 255
499; RV32I-NEXT:    xori a0, a0, 1
500; RV32I-NEXT:    ret
501;
502; RV64I-LABEL: add_ugtcmp_i16_i8:
503; RV64I:       # %bb.0:
504; RV64I-NEXT:    slli a0, a0, 48
505; RV64I-NEXT:    srli a0, a0, 48
506; RV64I-NEXT:    addi a0, a0, -128
507; RV64I-NEXT:    srli a0, a0, 8
508; RV64I-NEXT:    sltiu a0, a0, 255
509; RV64I-NEXT:    xori a0, a0, 1
510; RV64I-NEXT:    ret
511;
512; RV32ZBB-LABEL: add_ugtcmp_i16_i8:
513; RV32ZBB:       # %bb.0:
514; RV32ZBB-NEXT:    zext.h a0, a0
515; RV32ZBB-NEXT:    addi a0, a0, -128
516; RV32ZBB-NEXT:    srli a0, a0, 8
517; RV32ZBB-NEXT:    sltiu a0, a0, 255
518; RV32ZBB-NEXT:    xori a0, a0, 1
519; RV32ZBB-NEXT:    ret
520;
521; RV64ZBB-LABEL: add_ugtcmp_i16_i8:
522; RV64ZBB:       # %bb.0:
523; RV64ZBB-NEXT:    zext.h a0, a0
524; RV64ZBB-NEXT:    addi a0, a0, -128
525; RV64ZBB-NEXT:    srli a0, a0, 8
526; RV64ZBB-NEXT:    sltiu a0, a0, 255
527; RV64ZBB-NEXT:    xori a0, a0, 1
528; RV64ZBB-NEXT:    ret
529  %tmp0 = add i16 %x, -128 ; ~0U << (8-1)
530  %tmp1 = icmp ugt i16 %tmp0, -257 ; ~0U << 8 - 1
531  ret i1 %tmp1
532}
533
534; ---------------------------------------------------------------------------- ;
535; add + icmp ult
536; ---------------------------------------------------------------------------- ;
537
538define i1 @add_ultcmp_i16_i8(i16 %x) nounwind {
539; RV32I-LABEL: add_ultcmp_i16_i8:
540; RV32I:       # %bb.0:
541; RV32I-NEXT:    addi a0, a0, 128
542; RV32I-NEXT:    slli a0, a0, 16
543; RV32I-NEXT:    srli a0, a0, 16
544; RV32I-NEXT:    sltiu a0, a0, 256
545; RV32I-NEXT:    ret
546;
547; RV64I-LABEL: add_ultcmp_i16_i8:
548; RV64I:       # %bb.0:
549; RV64I-NEXT:    addi a0, a0, 128
550; RV64I-NEXT:    slli a0, a0, 48
551; RV64I-NEXT:    srli a0, a0, 48
552; RV64I-NEXT:    sltiu a0, a0, 256
553; RV64I-NEXT:    ret
554;
555; RV32ZBB-LABEL: add_ultcmp_i16_i8:
556; RV32ZBB:       # %bb.0:
557; RV32ZBB-NEXT:    addi a0, a0, 128
558; RV32ZBB-NEXT:    zext.h a0, a0
559; RV32ZBB-NEXT:    sltiu a0, a0, 256
560; RV32ZBB-NEXT:    ret
561;
562; RV64ZBB-LABEL: add_ultcmp_i16_i8:
563; RV64ZBB:       # %bb.0:
564; RV64ZBB-NEXT:    addi a0, a0, 128
565; RV64ZBB-NEXT:    zext.h a0, a0
566; RV64ZBB-NEXT:    sltiu a0, a0, 256
567; RV64ZBB-NEXT:    ret
568  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
569  %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
570  ret i1 %tmp1
571}
572
573define i1 @add_ultcmp_i32_i16(i32 %x) nounwind {
574; RV32I-LABEL: add_ultcmp_i32_i16:
575; RV32I:       # %bb.0:
576; RV32I-NEXT:    lui a1, 8
577; RV32I-NEXT:    add a0, a0, a1
578; RV32I-NEXT:    srli a0, a0, 16
579; RV32I-NEXT:    seqz a0, a0
580; RV32I-NEXT:    ret
581;
582; RV64I-LABEL: add_ultcmp_i32_i16:
583; RV64I:       # %bb.0:
584; RV64I-NEXT:    lui a1, 8
585; RV64I-NEXT:    add a0, a0, a1
586; RV64I-NEXT:    srliw a0, a0, 16
587; RV64I-NEXT:    seqz a0, a0
588; RV64I-NEXT:    ret
589;
590; RV32ZBB-LABEL: add_ultcmp_i32_i16:
591; RV32ZBB:       # %bb.0:
592; RV32ZBB-NEXT:    sext.h a1, a0
593; RV32ZBB-NEXT:    xor a0, a1, a0
594; RV32ZBB-NEXT:    seqz a0, a0
595; RV32ZBB-NEXT:    ret
596;
597; RV64ZBB-LABEL: add_ultcmp_i32_i16:
598; RV64ZBB:       # %bb.0:
599; RV64ZBB-NEXT:    sext.w a1, a0
600; RV64ZBB-NEXT:    sext.h a0, a0
601; RV64ZBB-NEXT:    xor a0, a0, a1
602; RV64ZBB-NEXT:    seqz a0, a0
603; RV64ZBB-NEXT:    ret
604  %tmp0 = add i32 %x, 32768 ; 1U << (16-1)
605  %tmp1 = icmp ult i32 %tmp0, 65536 ; 1U << 16
606  ret i1 %tmp1
607}
608
609define i1 @add_ultcmp_i32_i8(i32 %x) nounwind {
610; RV32-LABEL: add_ultcmp_i32_i8:
611; RV32:       # %bb.0:
612; RV32-NEXT:    addi a0, a0, 128
613; RV32-NEXT:    sltiu a0, a0, 256
614; RV32-NEXT:    ret
615;
616; RV64-LABEL: add_ultcmp_i32_i8:
617; RV64:       # %bb.0:
618; RV64-NEXT:    addiw a0, a0, 128
619; RV64-NEXT:    sltiu a0, a0, 256
620; RV64-NEXT:    ret
621  %tmp0 = add i32 %x, 128 ; 1U << (8-1)
622  %tmp1 = icmp ult i32 %tmp0, 256 ; 1U << 8
623  ret i1 %tmp1
624}
625
626define i1 @add_ultcmp_i64_i32(i64 %x) nounwind {
627; RV32-LABEL: add_ultcmp_i64_i32:
628; RV32:       # %bb.0:
629; RV32-NEXT:    srai a0, a0, 31
630; RV32-NEXT:    xor a0, a0, a1
631; RV32-NEXT:    seqz a0, a0
632; RV32-NEXT:    ret
633;
634; RV64-LABEL: add_ultcmp_i64_i32:
635; RV64:       # %bb.0:
636; RV64-NEXT:    sext.w a1, a0
637; RV64-NEXT:    xor a0, a1, a0
638; RV64-NEXT:    seqz a0, a0
639; RV64-NEXT:    ret
640  %tmp0 = add i64 %x, 2147483648 ; 1U << (32-1)
641  %tmp1 = icmp ult i64 %tmp0, 4294967296 ; 1U << 32
642  ret i1 %tmp1
643}
644
645define i1 @add_ultcmp_i64_i16(i64 %x) nounwind {
646; RV32I-LABEL: add_ultcmp_i64_i16:
647; RV32I:       # %bb.0:
648; RV32I-NEXT:    lui a2, 8
649; RV32I-NEXT:    add a2, a0, a2
650; RV32I-NEXT:    sltu a0, a2, a0
651; RV32I-NEXT:    add a0, a1, a0
652; RV32I-NEXT:    srli a2, a2, 16
653; RV32I-NEXT:    or a0, a0, a2
654; RV32I-NEXT:    seqz a0, a0
655; RV32I-NEXT:    ret
656;
657; RV64I-LABEL: add_ultcmp_i64_i16:
658; RV64I:       # %bb.0:
659; RV64I-NEXT:    lui a1, 8
660; RV64I-NEXT:    add a0, a0, a1
661; RV64I-NEXT:    srli a0, a0, 16
662; RV64I-NEXT:    seqz a0, a0
663; RV64I-NEXT:    ret
664;
665; RV32ZBB-LABEL: add_ultcmp_i64_i16:
666; RV32ZBB:       # %bb.0:
667; RV32ZBB-NEXT:    sext.h a2, a0
668; RV32ZBB-NEXT:    xor a0, a2, a0
669; RV32ZBB-NEXT:    srai a2, a2, 31
670; RV32ZBB-NEXT:    xor a1, a2, a1
671; RV32ZBB-NEXT:    or a0, a0, a1
672; RV32ZBB-NEXT:    seqz a0, a0
673; RV32ZBB-NEXT:    ret
674;
675; RV64ZBB-LABEL: add_ultcmp_i64_i16:
676; RV64ZBB:       # %bb.0:
677; RV64ZBB-NEXT:    sext.h a1, a0
678; RV64ZBB-NEXT:    xor a0, a1, a0
679; RV64ZBB-NEXT:    seqz a0, a0
680; RV64ZBB-NEXT:    ret
681  %tmp0 = add i64 %x, 32768 ; 1U << (16-1)
682  %tmp1 = icmp ult i64 %tmp0, 65536 ; 1U << 16
683  ret i1 %tmp1
684}
685
686define i1 @add_ultcmp_i64_i8(i64 %x) nounwind {
687; RV32I-LABEL: add_ultcmp_i64_i8:
688; RV32I:       # %bb.0:
689; RV32I-NEXT:    addi a2, a0, 128
690; RV32I-NEXT:    sltu a0, a2, a0
691; RV32I-NEXT:    add a0, a1, a0
692; RV32I-NEXT:    seqz a0, a0
693; RV32I-NEXT:    sltiu a1, a2, 256
694; RV32I-NEXT:    and a0, a0, a1
695; RV32I-NEXT:    ret
696;
697; RV64-LABEL: add_ultcmp_i64_i8:
698; RV64:       # %bb.0:
699; RV64-NEXT:    addi a0, a0, 128
700; RV64-NEXT:    sltiu a0, a0, 256
701; RV64-NEXT:    ret
702;
703; RV32ZBB-LABEL: add_ultcmp_i64_i8:
704; RV32ZBB:       # %bb.0:
705; RV32ZBB-NEXT:    sext.b a2, a0
706; RV32ZBB-NEXT:    xor a0, a2, a0
707; RV32ZBB-NEXT:    srai a2, a2, 31
708; RV32ZBB-NEXT:    xor a1, a2, a1
709; RV32ZBB-NEXT:    or a0, a0, a1
710; RV32ZBB-NEXT:    seqz a0, a0
711; RV32ZBB-NEXT:    ret
712  %tmp0 = add i64 %x, 128 ; 1U << (8-1)
713  %tmp1 = icmp ult i64 %tmp0, 256 ; 1U << 8
714  ret i1 %tmp1
715}
716
717; Slightly more canonical variant
718define i1 @add_ulecmp_i16_i8(i16 %x) nounwind {
719; RV32I-LABEL: add_ulecmp_i16_i8:
720; RV32I:       # %bb.0:
721; RV32I-NEXT:    addi a0, a0, 128
722; RV32I-NEXT:    slli a0, a0, 16
723; RV32I-NEXT:    srli a0, a0, 16
724; RV32I-NEXT:    sltiu a0, a0, 256
725; RV32I-NEXT:    ret
726;
727; RV64I-LABEL: add_ulecmp_i16_i8:
728; RV64I:       # %bb.0:
729; RV64I-NEXT:    addi a0, a0, 128
730; RV64I-NEXT:    slli a0, a0, 48
731; RV64I-NEXT:    srli a0, a0, 48
732; RV64I-NEXT:    sltiu a0, a0, 256
733; RV64I-NEXT:    ret
734;
735; RV32ZBB-LABEL: add_ulecmp_i16_i8:
736; RV32ZBB:       # %bb.0:
737; RV32ZBB-NEXT:    addi a0, a0, 128
738; RV32ZBB-NEXT:    zext.h a0, a0
739; RV32ZBB-NEXT:    sltiu a0, a0, 256
740; RV32ZBB-NEXT:    ret
741;
742; RV64ZBB-LABEL: add_ulecmp_i16_i8:
743; RV64ZBB:       # %bb.0:
744; RV64ZBB-NEXT:    addi a0, a0, 128
745; RV64ZBB-NEXT:    zext.h a0, a0
746; RV64ZBB-NEXT:    sltiu a0, a0, 256
747; RV64ZBB-NEXT:    ret
748  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
749  %tmp1 = icmp ule i16 %tmp0, 255 ; (1U << 8) - 1
750  ret i1 %tmp1
751}
752
753; Negative tests
754; ---------------------------------------------------------------------------- ;
755
756; Adding not a constant
757define i1 @add_ultcmp_bad_i16_i8_add(i16 %x, i16 %y) nounwind {
758; RV32I-LABEL: add_ultcmp_bad_i16_i8_add:
759; RV32I:       # %bb.0:
760; RV32I-NEXT:    add a0, a0, a1
761; RV32I-NEXT:    slli a0, a0, 16
762; RV32I-NEXT:    srli a0, a0, 16
763; RV32I-NEXT:    sltiu a0, a0, 256
764; RV32I-NEXT:    ret
765;
766; RV64I-LABEL: add_ultcmp_bad_i16_i8_add:
767; RV64I:       # %bb.0:
768; RV64I-NEXT:    add a0, a0, a1
769; RV64I-NEXT:    slli a0, a0, 48
770; RV64I-NEXT:    srli a0, a0, 48
771; RV64I-NEXT:    sltiu a0, a0, 256
772; RV64I-NEXT:    ret
773;
774; RV32ZBB-LABEL: add_ultcmp_bad_i16_i8_add:
775; RV32ZBB:       # %bb.0:
776; RV32ZBB-NEXT:    add a0, a0, a1
777; RV32ZBB-NEXT:    zext.h a0, a0
778; RV32ZBB-NEXT:    sltiu a0, a0, 256
779; RV32ZBB-NEXT:    ret
780;
781; RV64ZBB-LABEL: add_ultcmp_bad_i16_i8_add:
782; RV64ZBB:       # %bb.0:
783; RV64ZBB-NEXT:    add a0, a0, a1
784; RV64ZBB-NEXT:    zext.h a0, a0
785; RV64ZBB-NEXT:    sltiu a0, a0, 256
786; RV64ZBB-NEXT:    ret
787  %tmp0 = add i16 %x, %y
788  %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
789  ret i1 %tmp1
790}
791
792; Comparing not with a constant
793define i1 @add_ultcmp_bad_i16_i8_cmp(i16 %x, i16 %y) nounwind {
794; RV32I-LABEL: add_ultcmp_bad_i16_i8_cmp:
795; RV32I:       # %bb.0:
796; RV32I-NEXT:    lui a2, 16
797; RV32I-NEXT:    addi a0, a0, 128
798; RV32I-NEXT:    addi a2, a2, -1
799; RV32I-NEXT:    and a1, a1, a2
800; RV32I-NEXT:    and a0, a0, a2
801; RV32I-NEXT:    sltu a0, a0, a1
802; RV32I-NEXT:    ret
803;
804; RV64I-LABEL: add_ultcmp_bad_i16_i8_cmp:
805; RV64I:       # %bb.0:
806; RV64I-NEXT:    lui a2, 16
807; RV64I-NEXT:    addi a0, a0, 128
808; RV64I-NEXT:    addiw a2, a2, -1
809; RV64I-NEXT:    and a1, a1, a2
810; RV64I-NEXT:    and a0, a0, a2
811; RV64I-NEXT:    sltu a0, a0, a1
812; RV64I-NEXT:    ret
813;
814; RV32ZBB-LABEL: add_ultcmp_bad_i16_i8_cmp:
815; RV32ZBB:       # %bb.0:
816; RV32ZBB-NEXT:    zext.h a1, a1
817; RV32ZBB-NEXT:    addi a0, a0, 128
818; RV32ZBB-NEXT:    zext.h a0, a0
819; RV32ZBB-NEXT:    sltu a0, a0, a1
820; RV32ZBB-NEXT:    ret
821;
822; RV64ZBB-LABEL: add_ultcmp_bad_i16_i8_cmp:
823; RV64ZBB:       # %bb.0:
824; RV64ZBB-NEXT:    zext.h a1, a1
825; RV64ZBB-NEXT:    addi a0, a0, 128
826; RV64ZBB-NEXT:    zext.h a0, a0
827; RV64ZBB-NEXT:    sltu a0, a0, a1
828; RV64ZBB-NEXT:    ret
829  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
830  %tmp1 = icmp ult i16 %tmp0, %y
831  ret i1 %tmp1
832}
833
834; Second constant is not larger than the first one
835define i1 @add_ultcmp_bad_i8_i16(i16 %x) nounwind {
836; RV32I-LABEL: add_ultcmp_bad_i8_i16:
837; RV32I:       # %bb.0:
838; RV32I-NEXT:    addi a0, a0, 128
839; RV32I-NEXT:    slli a0, a0, 16
840; RV32I-NEXT:    srli a0, a0, 16
841; RV32I-NEXT:    sltiu a0, a0, 128
842; RV32I-NEXT:    ret
843;
844; RV64I-LABEL: add_ultcmp_bad_i8_i16:
845; RV64I:       # %bb.0:
846; RV64I-NEXT:    addi a0, a0, 128
847; RV64I-NEXT:    slli a0, a0, 48
848; RV64I-NEXT:    srli a0, a0, 48
849; RV64I-NEXT:    sltiu a0, a0, 128
850; RV64I-NEXT:    ret
851;
852; RV32ZBB-LABEL: add_ultcmp_bad_i8_i16:
853; RV32ZBB:       # %bb.0:
854; RV32ZBB-NEXT:    addi a0, a0, 128
855; RV32ZBB-NEXT:    zext.h a0, a0
856; RV32ZBB-NEXT:    sltiu a0, a0, 128
857; RV32ZBB-NEXT:    ret
858;
859; RV64ZBB-LABEL: add_ultcmp_bad_i8_i16:
860; RV64ZBB:       # %bb.0:
861; RV64ZBB-NEXT:    addi a0, a0, 128
862; RV64ZBB-NEXT:    zext.h a0, a0
863; RV64ZBB-NEXT:    sltiu a0, a0, 128
864; RV64ZBB-NEXT:    ret
865  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
866  %tmp1 = icmp ult i16 %tmp0, 128 ; 1U << (8-1)
867  ret i1 %tmp1
868}
869
870; First constant is not power of two
871define i1 @add_ultcmp_bad_i16_i8_c0notpoweroftwo(i16 %x) nounwind {
872; RV32I-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
873; RV32I:       # %bb.0:
874; RV32I-NEXT:    addi a0, a0, 192
875; RV32I-NEXT:    slli a0, a0, 16
876; RV32I-NEXT:    srli a0, a0, 16
877; RV32I-NEXT:    sltiu a0, a0, 256
878; RV32I-NEXT:    ret
879;
880; RV64I-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
881; RV64I:       # %bb.0:
882; RV64I-NEXT:    addi a0, a0, 192
883; RV64I-NEXT:    slli a0, a0, 48
884; RV64I-NEXT:    srli a0, a0, 48
885; RV64I-NEXT:    sltiu a0, a0, 256
886; RV64I-NEXT:    ret
887;
888; RV32ZBB-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
889; RV32ZBB:       # %bb.0:
890; RV32ZBB-NEXT:    addi a0, a0, 192
891; RV32ZBB-NEXT:    zext.h a0, a0
892; RV32ZBB-NEXT:    sltiu a0, a0, 256
893; RV32ZBB-NEXT:    ret
894;
895; RV64ZBB-LABEL: add_ultcmp_bad_i16_i8_c0notpoweroftwo:
896; RV64ZBB:       # %bb.0:
897; RV64ZBB-NEXT:    addi a0, a0, 192
898; RV64ZBB-NEXT:    zext.h a0, a0
899; RV64ZBB-NEXT:    sltiu a0, a0, 256
900; RV64ZBB-NEXT:    ret
901  %tmp0 = add i16 %x, 192 ; (1U << (8-1)) + (1U << (8-1-1))
902  %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
903  ret i1 %tmp1
904}
905
906; Second constant is not power of two
907define i1 @add_ultcmp_bad_i16_i8_c1notpoweroftwo(i16 %x) nounwind {
908; RV32I-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
909; RV32I:       # %bb.0:
910; RV32I-NEXT:    addi a0, a0, 128
911; RV32I-NEXT:    slli a0, a0, 16
912; RV32I-NEXT:    srli a0, a0, 16
913; RV32I-NEXT:    sltiu a0, a0, 768
914; RV32I-NEXT:    ret
915;
916; RV64I-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
917; RV64I:       # %bb.0:
918; RV64I-NEXT:    addi a0, a0, 128
919; RV64I-NEXT:    slli a0, a0, 48
920; RV64I-NEXT:    srli a0, a0, 48
921; RV64I-NEXT:    sltiu a0, a0, 768
922; RV64I-NEXT:    ret
923;
924; RV32ZBB-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
925; RV32ZBB:       # %bb.0:
926; RV32ZBB-NEXT:    addi a0, a0, 128
927; RV32ZBB-NEXT:    zext.h a0, a0
928; RV32ZBB-NEXT:    sltiu a0, a0, 768
929; RV32ZBB-NEXT:    ret
930;
931; RV64ZBB-LABEL: add_ultcmp_bad_i16_i8_c1notpoweroftwo:
932; RV64ZBB:       # %bb.0:
933; RV64ZBB-NEXT:    addi a0, a0, 128
934; RV64ZBB-NEXT:    zext.h a0, a0
935; RV64ZBB-NEXT:    sltiu a0, a0, 768
936; RV64ZBB-NEXT:    ret
937  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
938  %tmp1 = icmp ult i16 %tmp0, 768 ; (1U << 8)) + (1U << (8+1))
939  ret i1 %tmp1
940}
941
942; Magic check fails, 64 << 1 != 256
943define i1 @add_ultcmp_bad_i16_i8_magic(i16 %x) nounwind {
944; RV32I-LABEL: add_ultcmp_bad_i16_i8_magic:
945; RV32I:       # %bb.0:
946; RV32I-NEXT:    addi a0, a0, 64
947; RV32I-NEXT:    slli a0, a0, 16
948; RV32I-NEXT:    srli a0, a0, 16
949; RV32I-NEXT:    sltiu a0, a0, 256
950; RV32I-NEXT:    ret
951;
952; RV64I-LABEL: add_ultcmp_bad_i16_i8_magic:
953; RV64I:       # %bb.0:
954; RV64I-NEXT:    addi a0, a0, 64
955; RV64I-NEXT:    slli a0, a0, 48
956; RV64I-NEXT:    srli a0, a0, 48
957; RV64I-NEXT:    sltiu a0, a0, 256
958; RV64I-NEXT:    ret
959;
960; RV32ZBB-LABEL: add_ultcmp_bad_i16_i8_magic:
961; RV32ZBB:       # %bb.0:
962; RV32ZBB-NEXT:    addi a0, a0, 64
963; RV32ZBB-NEXT:    zext.h a0, a0
964; RV32ZBB-NEXT:    sltiu a0, a0, 256
965; RV32ZBB-NEXT:    ret
966;
967; RV64ZBB-LABEL: add_ultcmp_bad_i16_i8_magic:
968; RV64ZBB:       # %bb.0:
969; RV64ZBB-NEXT:    addi a0, a0, 64
970; RV64ZBB-NEXT:    zext.h a0, a0
971; RV64ZBB-NEXT:    sltiu a0, a0, 256
972; RV64ZBB-NEXT:    ret
973  %tmp0 = add i16 %x, 64 ; 1U << (8-1-1)
974  %tmp1 = icmp ult i16 %tmp0, 256 ; 1U << 8
975  ret i1 %tmp1
976}
977
978; Bad 'destination type'
979define i1 @add_ultcmp_bad_i16_i4(i16 %x) nounwind {
980; RV32I-LABEL: add_ultcmp_bad_i16_i4:
981; RV32I:       # %bb.0:
982; RV32I-NEXT:    addi a0, a0, 8
983; RV32I-NEXT:    slli a0, a0, 16
984; RV32I-NEXT:    srli a0, a0, 16
985; RV32I-NEXT:    sltiu a0, a0, 16
986; RV32I-NEXT:    ret
987;
988; RV64I-LABEL: add_ultcmp_bad_i16_i4:
989; RV64I:       # %bb.0:
990; RV64I-NEXT:    addi a0, a0, 8
991; RV64I-NEXT:    slli a0, a0, 48
992; RV64I-NEXT:    srli a0, a0, 48
993; RV64I-NEXT:    sltiu a0, a0, 16
994; RV64I-NEXT:    ret
995;
996; RV32ZBB-LABEL: add_ultcmp_bad_i16_i4:
997; RV32ZBB:       # %bb.0:
998; RV32ZBB-NEXT:    addi a0, a0, 8
999; RV32ZBB-NEXT:    zext.h a0, a0
1000; RV32ZBB-NEXT:    sltiu a0, a0, 16
1001; RV32ZBB-NEXT:    ret
1002;
1003; RV64ZBB-LABEL: add_ultcmp_bad_i16_i4:
1004; RV64ZBB:       # %bb.0:
1005; RV64ZBB-NEXT:    addi a0, a0, 8
1006; RV64ZBB-NEXT:    zext.h a0, a0
1007; RV64ZBB-NEXT:    sltiu a0, a0, 16
1008; RV64ZBB-NEXT:    ret
1009  %tmp0 = add i16 %x, 8 ; 1U << (4-1)
1010  %tmp1 = icmp ult i16 %tmp0, 16 ; 1U << 4
1011  ret i1 %tmp1
1012}
1013
1014; Bad storage type
1015define i1 @add_ultcmp_bad_i24_i8(i24 %x) nounwind {
1016; RV32-LABEL: add_ultcmp_bad_i24_i8:
1017; RV32:       # %bb.0:
1018; RV32-NEXT:    addi a0, a0, 128
1019; RV32-NEXT:    slli a0, a0, 8
1020; RV32-NEXT:    srli a0, a0, 8
1021; RV32-NEXT:    sltiu a0, a0, 256
1022; RV32-NEXT:    ret
1023;
1024; RV64-LABEL: add_ultcmp_bad_i24_i8:
1025; RV64:       # %bb.0:
1026; RV64-NEXT:    addi a0, a0, 128
1027; RV64-NEXT:    slli a0, a0, 40
1028; RV64-NEXT:    srli a0, a0, 40
1029; RV64-NEXT:    sltiu a0, a0, 256
1030; RV64-NEXT:    ret
1031  %tmp0 = add i24 %x, 128 ; 1U << (8-1)
1032  %tmp1 = icmp ult i24 %tmp0, 256 ; 1U << 8
1033  ret i1 %tmp1
1034}
1035
1036define i1 @add_ulecmp_bad_i16_i8(i16 %x) nounwind {
1037; CHECK-LABEL: add_ulecmp_bad_i16_i8:
1038; CHECK:       # %bb.0:
1039; CHECK-NEXT:    li a0, 1
1040; CHECK-NEXT:    ret
1041  %tmp0 = add i16 %x, 128 ; 1U << (8-1)
1042  %tmp1 = icmp ule i16 %tmp0, -1 ; when we +1 it, it will wrap to 0
1043  ret i1 %tmp1
1044}
1045