xref: /llvm-project/llvm/test/Analysis/ScalarEvolution/lt-overflow.ll (revision 92619956eb27ef08dd24045307593fc3d7f78db0)
1; RUN: opt %s -passes='print<scalar-evolution>' -scalar-evolution-classify-expressions=0 2>&1 | FileCheck %s
2
3target datalayout = "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
4target triple = "x86_64-unknown-linux-gnu"
5
6; A collection of tests focused on exercising logic to prove no-unsigned wrap
7; from mustprogress semantics of loops.
8
9; CHECK: Determining loop execution counts for: @test
10; CHECK: Loop %for.body: backedge-taken count is ((-1 + (2 umax %N)) /u 2)
11; CHECK: Determining loop execution counts for: @test_preinc
12; CHECK: Loop %for.body: backedge-taken count is ((1 + %N) /u 2)
13; CHECK: Determining loop execution counts for: @test_well_defined_infinite_st
14; CHECK: Loop %for.body: Unpredictable backedge-taken count.
15; CHECK: Determining loop execution counts for: @test_well_defined_infinite_ld
16; CHECK: Loop %for.body: Unpredictable backedge-taken count.
17; CHECK: Determining loop execution counts for: @test_no_mustprogress
18; CHECK: Loop %for.body: Unpredictable backedge-taken count.
19; CHECK: Determining loop execution counts for: @test_1024
20; CHECK: Loop %for.body: backedge-taken count is ((-1 + (1024 umax %N)) /u 1024)
21; CHECK: Determining loop execution counts for: @test_uneven_divide
22; CHECK: Loop %for.body: Unpredictable backedge-taken count.
23; CHECK: Determining loop execution counts for: @test_non_invariant_rhs
24; CHECK: Loop %for.body: Unpredictable backedge-taken count.
25; CHECK: Determining loop execution counts for: @test_abnormal_exit
26; CHECK: Loop %for.body: Unpredictable backedge-taken count.
27; CHECK: Determining loop execution counts for: @test_other_exit
28; CHECK: Loop %for.body: <multiple exits> Unpredictable backedge-taken count.
29; CHECK: Determining loop execution counts for: @test_gt
30; CHECK: Loop %for.body: Unpredictable backedge-taken count.
31; CHECK: Determining loop execution counts for: @test_willreturn
32; CHECK: Loop %for.body: backedge-taken count is ((-1 + (1024 umax %N)) /u 1024)
33; CHECK: Determining loop execution counts for: @test_nowillreturn
34; CHECK: Loop %for.body: Unpredictable backedge-taken count.
35; TODO: investigate why willreturn is still needed on the callsite
36; CHECK: Determining loop execution counts for: @test_willreturn_nocallsite
37; CHECK: Loop %for.body: Unpredictable backedge-taken count.
38
39define void @test(i32 %N) mustprogress {
40entry:
41  br label %for.body
42
43for.body:
44  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
45  %iv.next = add i32 %iv, 2
46  %cmp = icmp ult i32 %iv.next, %N
47  br i1 %cmp, label %for.body, label %for.cond.cleanup
48
49for.cond.cleanup:
50  ret void
51}
52
53define void @test_preinc(i32 %N) mustprogress {
54entry:
55  br label %for.body
56
57for.body:
58  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
59  %iv.next = add i32 %iv, 2
60  %cmp = icmp ult i32 %iv, %N
61  br i1 %cmp, label %for.body, label %for.cond.cleanup
62
63for.cond.cleanup:
64  ret void
65
66}
67
68@G = external global i32
69
70define void @test_well_defined_infinite_st(i32 %N) mustprogress {
71entry:
72  br label %for.body
73
74for.body:
75  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
76  %iv.next = add i32 %iv, 2
77  store volatile i32 0, ptr @G
78  %cmp = icmp ult i32 %iv.next, %N
79  br i1 %cmp, label %for.body, label %for.cond.cleanup
80
81for.cond.cleanup:
82  ret void
83}
84
85define void @test_well_defined_infinite_ld(i32 %N) mustprogress {
86entry:
87  br label %for.body
88
89for.body:
90  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
91  %iv.next = add i32 %iv, 2
92  %val = load volatile i32, ptr @G
93  %cmp = icmp ult i32 %iv.next, %N
94  br i1 %cmp, label %for.body, label %for.cond.cleanup
95
96for.cond.cleanup:
97  ret void
98}
99
100define void @test_no_mustprogress(i32 %N) {
101entry:
102  br label %for.body
103
104for.body:
105  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
106  %iv.next = add i32 %iv, 2
107  %cmp = icmp ult i32 %iv.next, %N
108  br i1 %cmp, label %for.body, label %for.cond.cleanup
109
110for.cond.cleanup:
111  ret void
112
113}
114
115
116define void @test_1024(i32 %N) mustprogress {
117entry:
118  br label %for.body
119
120for.body:
121  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
122  %iv.next = add i32 %iv, 1024
123  %cmp = icmp ult i32 %iv.next, %N
124  br i1 %cmp, label %for.body, label %for.cond.cleanup
125
126for.cond.cleanup:
127  ret void
128}
129
130define void @test_uneven_divide(i32 %N) mustprogress {
131entry:
132  br label %for.body
133
134for.body:
135  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
136  %iv.next = add i32 %iv, 3
137  %cmp = icmp ult i32 %iv.next, %N
138  br i1 %cmp, label %for.body, label %for.cond.cleanup
139
140for.cond.cleanup:
141  ret void
142}
143
144define void @test_non_invariant_rhs() mustprogress {
145entry:
146  br label %for.body
147
148for.body:
149  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
150  %iv.next = add i32 %iv, 2
151  %N = load i32, ptr @G
152  %cmp = icmp ult i32 %iv.next, %N
153  br i1 %cmp, label %for.body, label %for.cond.cleanup
154
155for.cond.cleanup:
156  ret void
157}
158
159declare void @mayexit()
160
161define void @test_abnormal_exit(i32 %N) mustprogress {
162entry:
163  br label %for.body
164
165for.body:
166  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
167  %iv.next = add i32 %iv, 2
168  call void @mayexit()
169  %cmp = icmp ult i32 %iv.next, %N
170  br i1 %cmp, label %for.body, label %for.cond.cleanup
171
172for.cond.cleanup:
173  ret void
174}
175
176
177define void @test_other_exit(i32 %N) mustprogress {
178entry:
179  br label %for.body
180
181for.body:
182  %iv = phi i32 [ %iv.next, %for.latch ], [ 0, %entry ]
183  %iv.next = add i32 %iv, 2
184  %cmp1 = icmp ult i32 %iv.next, 20
185  br i1 %cmp1, label %for.latch, label %for.cond.cleanup
186
187for.latch:
188  %cmp2 = icmp ult i32 %iv.next, %N
189  br i1 %cmp2, label %for.body, label %for.cond.cleanup
190
191for.cond.cleanup:
192  ret void
193}
194
195define void @test_gt(i32 %S, i32 %N) mustprogress {
196entry:
197  br label %for.body
198
199for.body:
200  %iv = phi i32 [ %iv.next, %for.body ], [ %S, %entry ]
201  %iv.next = add i32 %iv, -2
202  %cmp = icmp ugt i32 %iv.next, %N
203  br i1 %cmp, label %for.body, label %for.cond.cleanup
204
205for.cond.cleanup:
206  ret void
207}
208
209declare void @sideeffect()
210
211define void @test_willreturn(i32 %S, i32 %N) willreturn {
212entry:
213  br label %for.body
214
215for.body:
216  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
217  %iv.next = add i32 %iv, 1024
218  call void @sideeffect() nounwind willreturn
219  %cmp = icmp ult i32 %iv.next, %N
220  br i1 %cmp, label %for.body, label %for.cond.cleanup
221
222for.cond.cleanup:
223  ret void
224}
225
226define void @test_nowillreturn(i32 %S, i32 %N) {
227entry:
228  br label %for.body
229
230for.body:
231  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
232  %iv.next = add i32 %iv, 1024
233  call void @sideeffect() nounwind willreturn
234  %cmp = icmp ult i32 %iv.next, %N
235  br i1 %cmp, label %for.body, label %for.cond.cleanup
236
237for.cond.cleanup:
238  ret void
239}
240
241define void @test_willreturn_nocallsite(i32 %S, i32 %N) willreturn {
242entry:
243  br label %for.body
244
245for.body:
246  %iv = phi i32 [ %iv.next, %for.body ], [ 0, %entry ]
247  %iv.next = add i32 %iv, 1024
248  call void @sideeffect() nounwind
249  %cmp = icmp ult i32 %iv.next, %N
250  br i1 %cmp, label %for.body, label %for.cond.cleanup
251
252for.cond.cleanup:
253  ret void
254}
255