xref: /llvm-project/llvm/test/Transforms/InstSimplify/div-by-0-guard-before-umul_ov.ll (revision 060de415af335fdd82910f409e2be3b8457eaa43)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt %s -passes=instsimplify -S | FileCheck %s
3
4declare { i4, i1 } @llvm.umul.with.overflow.i4(i4, i4) #1
5
6define i1 @t0_umul(i4 %size, i4 %nmemb) {
7; CHECK-LABEL: @t0_umul(
8; CHECK-NEXT:    [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE:%.*]], i4 [[NMEMB:%.*]])
9; CHECK-NEXT:    [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
10; CHECK-NEXT:    ret i1 [[UMUL_OV]]
11;
12  %cmp = icmp ne i4 %size, 0
13  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
14  %umul.ov = extractvalue { i4, i1 } %umul, 1
15  %and = and i1 %umul.ov, %cmp
16  ret i1 %and
17}
18
19define i1 @t1_commutative(i4 %size, i4 %nmemb) {
20; CHECK-LABEL: @t1_commutative(
21; CHECK-NEXT:    [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE:%.*]], i4 [[NMEMB:%.*]])
22; CHECK-NEXT:    [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
23; CHECK-NEXT:    ret i1 [[UMUL_OV]]
24;
25  %cmp = icmp ne i4 %size, 0
26  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
27  %umul.ov = extractvalue { i4, i1 } %umul, 1
28  %and = and i1 %cmp, %umul.ov ; swapped
29  ret i1 %and
30}
31
32define i1 @n2_wrong_size(i4 %size0, i4 %size1, i4 %nmemb) {
33; CHECK-LABEL: @n2_wrong_size(
34; CHECK-NEXT:    [[CMP:%.*]] = icmp ne i4 [[SIZE1:%.*]], 0
35; CHECK-NEXT:    [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE0:%.*]], i4 [[NMEMB:%.*]])
36; CHECK-NEXT:    [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
37; CHECK-NEXT:    [[AND:%.*]] = and i1 [[UMUL_OV]], [[CMP]]
38; CHECK-NEXT:    ret i1 [[AND]]
39;
40  %cmp = icmp ne i4 %size1, 0 ; not %size0
41  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size0, i4 %nmemb)
42  %umul.ov = extractvalue { i4, i1 } %umul, 1
43  %and = and i1 %umul.ov, %cmp
44  ret i1 %and
45}
46
47define i1 @n3_wrong_pred(i4 %size, i4 %nmemb) {
48; CHECK-LABEL: @n3_wrong_pred(
49; CHECK-NEXT:    ret i1 false
50;
51  %cmp = icmp eq i4 %size, 0 ; not 'ne'
52  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
53  %umul.ov = extractvalue { i4, i1 } %umul, 1
54  %and = and i1 %umul.ov, %cmp
55  ret i1 %and
56}
57
58define i1 @n4_not_and(i4 %size, i4 %nmemb) {
59; CHECK-LABEL: @n4_not_and(
60; CHECK-NEXT:    [[CMP:%.*]] = icmp ne i4 [[SIZE:%.*]], 0
61; CHECK-NEXT:    ret i1 [[CMP]]
62;
63  %cmp = icmp ne i4 %size, 0
64  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
65  %umul.ov = extractvalue { i4, i1 } %umul, 1
66  %and = or i1 %umul.ov, %cmp ; not 'and'
67  ret i1 %and
68}
69
70define i1 @n5_not_zero(i4 %size, i4 %nmemb) {
71; CHECK-LABEL: @n5_not_zero(
72; CHECK-NEXT:    [[CMP:%.*]] = icmp ne i4 [[SIZE:%.*]], 1
73; CHECK-NEXT:    [[UMUL:%.*]] = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 [[SIZE]], i4 [[NMEMB:%.*]])
74; CHECK-NEXT:    [[UMUL_OV:%.*]] = extractvalue { i4, i1 } [[UMUL]], 1
75; CHECK-NEXT:    [[AND:%.*]] = and i1 [[UMUL_OV]], [[CMP]]
76; CHECK-NEXT:    ret i1 [[AND]]
77;
78  %cmp = icmp ne i4 %size, 1 ; should be '0'
79  %umul = tail call { i4, i1 } @llvm.umul.with.overflow.i4(i4 %size, i4 %nmemb)
80  %umul.ov = extractvalue { i4, i1 } %umul, 1
81  %and = and i1 %umul.ov, %cmp
82  ret i1 %and
83}
84