xref: /llvm-project/llvm/test/Transforms/InferAlignment/propagate-assume.ll (revision 0f152a55d3e4e71f7c795bf555e40c8895b97077)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
2; RUN: opt < %s -passes=infer-alignment -S | FileCheck %s
3
4; ------------------------------------------------------------------------------
5; Simple test
6; ------------------------------------------------------------------------------
7
8define void @simple_forwardpropagate(ptr %a) {
9; CHECK-LABEL: define void @simple_forwardpropagate
10; CHECK-SAME: (ptr [[A:%.*]]) {
11; CHECK-NEXT:    [[PTRINT:%.*]] = ptrtoint ptr [[A]] to i64
12; CHECK-NEXT:    [[MASKEDPTR:%.*]] = and i64 [[PTRINT]], 31
13; CHECK-NEXT:    [[MASKCOND:%.*]] = icmp eq i64 [[MASKEDPTR]], 0
14; CHECK-NEXT:    tail call void @llvm.assume(i1 [[MASKCOND]])
15; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
16; CHECK-NEXT:    store i32 345, ptr [[A]], align 32
17; CHECK-NEXT:    ret void
18;
19  %ptrint = ptrtoint ptr %a to i64
20  %maskedptr = and i64 %ptrint, 31
21  %maskcond = icmp eq i64 %maskedptr, 0
22  tail call void @llvm.assume(i1 %maskcond)
23
24  %load.a = load i32, ptr %a, align 4
25  store i32 345, ptr %a, align 4
26
27  ret void
28}
29
30define void @simple_backpropagate(ptr %a) {
31; CHECK-LABEL: define void @simple_backpropagate
32; CHECK-SAME: (ptr [[A:%.*]]) {
33; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
34; CHECK-NEXT:    store i32 345, ptr [[A]], align 32
35; CHECK-NEXT:    [[PTRINT:%.*]] = ptrtoint ptr [[A]] to i64
36; CHECK-NEXT:    [[MASKEDPTR:%.*]] = and i64 [[PTRINT]], 31
37; CHECK-NEXT:    [[MASKCOND:%.*]] = icmp eq i64 [[MASKEDPTR]], 0
38; CHECK-NEXT:    tail call void @llvm.assume(i1 [[MASKCOND]])
39; CHECK-NEXT:    ret void
40;
41  %load.a = load i32, ptr %a, align 4
42  store i32 345, ptr %a, align 4
43
44  %ptrint = ptrtoint ptr %a to i64
45  %maskedptr = and i64 %ptrint, 31
46  %maskcond = icmp eq i64 %maskedptr, 0
47  tail call void @llvm.assume(i1 %maskcond)
48
49  ret void
50}
51
52define void @simple_forwardpropagate_bundle(ptr %a) {
53; CHECK-LABEL: define void @simple_forwardpropagate_bundle
54; CHECK-SAME: (ptr [[A:%.*]]) {
55; CHECK-NEXT:    call void @llvm.assume(i1 true) [ "align"(ptr [[A]], i32 32) ]
56; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
57; CHECK-NEXT:    store i32 345, ptr [[A]], align 32
58; CHECK-NEXT:    ret void
59;
60  call void @llvm.assume(i1 true) ["align"(ptr %a, i32 32)]
61  %load.a = load i32, ptr %a, align 4
62  store i32 345, ptr %a, align 4
63  ret void
64}
65
66define void @simple_backpropagate_bundle(ptr %a) {
67; CHECK-LABEL: define void @simple_backpropagate_bundle
68; CHECK-SAME: (ptr [[A:%.*]]) {
69; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
70; CHECK-NEXT:    store i32 345, ptr [[A]], align 32
71; CHECK-NEXT:    call void @llvm.assume(i1 true) [ "align"(ptr [[A]], i32 32) ]
72; CHECK-NEXT:    ret void
73;
74  %load.a = load i32, ptr %a, align 4
75  store i32 345, ptr %a, align 4
76  call void @llvm.assume(i1 true) ["align"(ptr %a, i32 32)]
77  ret void
78}
79
80; ------------------------------------------------------------------------------
81; Complex test
82; ------------------------------------------------------------------------------
83
84define void @loop_forwardpropagate(ptr %a, ptr %b) {
85; CHECK-LABEL: define void @loop_forwardpropagate
86; CHECK-SAME: (ptr [[A:%.*]], ptr [[B:%.*]]) {
87; CHECK-NEXT:  entry:
88; CHECK-NEXT:    [[PTRINT:%.*]] = ptrtoint ptr [[A]] to i64
89; CHECK-NEXT:    [[MASKEDPTR:%.*]] = and i64 [[PTRINT]], 63
90; CHECK-NEXT:    [[MASKCOND:%.*]] = icmp eq i64 [[MASKEDPTR]], 0
91; CHECK-NEXT:    tail call void @llvm.assume(i1 [[MASKCOND]])
92; CHECK-NEXT:    [[PTRINT2:%.*]] = ptrtoint ptr [[B]] to i64
93; CHECK-NEXT:    [[MASKEDPTR2:%.*]] = and i64 [[PTRINT2]], 63
94; CHECK-NEXT:    [[MASKEDCOND2:%.*]] = icmp eq i64 [[MASKEDPTR2]], 0
95; CHECK-NEXT:    tail call void @llvm.assume(i1 [[MASKEDCOND2]])
96; CHECK-NEXT:    br label [[FOR_BODY:%.*]]
97; CHECK:       for.body:
98; CHECK-NEXT:    [[I:%.*]] = phi i64 [ 0, [[ENTRY:%.*]] ], [ [[I_NEXT:%.*]], [[FOR_BODY]] ]
99; CHECK-NEXT:    [[GEP_B:%.*]] = getelementptr inbounds i32, ptr [[B]], i64 [[I]]
100; CHECK-NEXT:    [[LOAD_B:%.*]] = load i32, ptr [[GEP_B]], align 64
101; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD_B]], 1
102; CHECK-NEXT:    [[GEP_A:%.*]] = getelementptr inbounds i32, ptr [[A]], i64 [[I]]
103; CHECK-NEXT:    store i32 [[ADD]], ptr [[GEP_A]], align 64
104; CHECK-NEXT:    [[I_NEXT]] = add nuw nsw i64 [[I]], 16
105; CHECK-NEXT:    [[CMP:%.*]] = icmp slt i64 [[I_NEXT]], 1648
106; CHECK-NEXT:    br i1 [[CMP]], label [[FOR_BODY]], label [[FOR_END:%.*]]
107; CHECK:       for.end:
108; CHECK-NEXT:    ret void
109;
110entry:
111  %ptrint = ptrtoint ptr %a to i64
112  %maskedptr = and i64 %ptrint, 63
113  %maskcond = icmp eq i64 %maskedptr, 0
114  tail call void @llvm.assume(i1 %maskcond)
115
116  %ptrint2 = ptrtoint ptr %b to i64
117  %maskedptr2 = and i64 %ptrint2, 63
118  %maskedcond2 = icmp eq i64 %maskedptr2, 0
119  tail call void @llvm.assume(i1 %maskedcond2)
120
121  br label %for.body
122
123for.body:
124  %i = phi i64 [ 0, %entry ], [ %i.next, %for.body ]
125
126  %gep.b = getelementptr inbounds i32, ptr %b, i64 %i
127  %load.b = load i32, ptr %gep.b, align 4
128  %add = add nsw i32 %load.b, 1
129
130  %gep.a = getelementptr inbounds i32, ptr %a, i64 %i
131  store i32 %add, ptr %gep.a, align 4
132
133  %i.next = add nuw nsw i64 %i, 16
134  %cmp = icmp slt i64 %i.next, 1648
135
136  br i1 %cmp, label %for.body, label %for.end
137
138for.end:
139  ret void
140}
141
142define void @loop_forwardpropagate_bundle(ptr %a, ptr %b) {
143; CHECK-LABEL: define void @loop_forwardpropagate_bundle
144; CHECK-SAME: (ptr [[A:%.*]], ptr [[B:%.*]]) {
145; CHECK-NEXT:  entry:
146; CHECK-NEXT:    tail call void @llvm.assume(i1 true) [ "align"(ptr [[A]], i32 64) ]
147; CHECK-NEXT:    tail call void @llvm.assume(i1 true) [ "align"(ptr [[B]], i32 64) ]
148; CHECK-NEXT:    br label [[FOR_BODY:%.*]]
149; CHECK:       for.body:
150; CHECK-NEXT:    [[I:%.*]] = phi i64 [ 0, [[ENTRY:%.*]] ], [ [[I_NEXT:%.*]], [[FOR_BODY]] ]
151; CHECK-NEXT:    [[GEP_B:%.*]] = getelementptr inbounds i32, ptr [[B]], i64 [[I]]
152; CHECK-NEXT:    [[LOAD_B:%.*]] = load i32, ptr [[GEP_B]], align 64
153; CHECK-NEXT:    [[ADD:%.*]] = add nsw i32 [[LOAD_B]], 1
154; CHECK-NEXT:    [[GEP_A:%.*]] = getelementptr inbounds i32, ptr [[A]], i64 [[I]]
155; CHECK-NEXT:    store i32 [[ADD]], ptr [[GEP_A]], align 64
156; CHECK-NEXT:    [[I_NEXT]] = add nuw nsw i64 [[I]], 16
157; CHECK-NEXT:    [[CMP:%.*]] = icmp slt i64 [[I_NEXT]], 1648
158; CHECK-NEXT:    br i1 [[CMP]], label [[FOR_BODY]], label [[FOR_END:%.*]]
159; CHECK:       for.end:
160; CHECK-NEXT:    ret void
161;
162entry:
163  tail call void @llvm.assume(i1 true) ["align"(ptr %a, i32 64)]
164  tail call void @llvm.assume(i1 true) ["align"(ptr %b, i32 64)]
165  br label %for.body
166
167for.body:
168  %i = phi i64 [ 0, %entry ], [ %i.next, %for.body ]
169
170  %gep.b = getelementptr inbounds i32, ptr %b, i64 %i
171  %load.b = load i32, ptr %gep.b, align 4
172  %add = add nsw i32 %load.b, 1
173
174  %gep.a = getelementptr inbounds i32, ptr %a, i64 %i
175  store i32 %add, ptr %gep.a, align 4
176
177  %i.next = add nuw nsw i64 %i, 16
178  %cmp = icmp slt i64 %i.next, 1648
179
180  br i1 %cmp, label %for.body, label %for.end
181
182for.end:
183  ret void
184}
185
186; Check that assume is propagated backwards through all
187; operations that are `isGuaranteedToTransferExecutionToSuccessor`
188; (it should reach the load and mark it as `align 32`).
189define void @complex_backpropagate(ptr %a, ptr %b, ptr %c) {
190; CHECK-LABEL: define void @complex_backpropagate
191; CHECK-SAME: (ptr [[A:%.*]], ptr [[B:%.*]], ptr [[C:%.*]]) {
192; CHECK-NEXT:    [[ALLOCA:%.*]] = alloca i64, align 8
193; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
194; CHECK-NEXT:    [[LOAD_B:%.*]] = load i32, ptr [[B]], align 4
195; CHECK-NEXT:    store i32 [[LOAD_B]], ptr [[A]], align 32
196; CHECK-NEXT:    [[OBJ_SIZE:%.*]] = call i64 @llvm.objectsize.i64.p0(ptr [[C]], i1 false, i1 false, i1 false)
197; CHECK-NEXT:    store i64 [[OBJ_SIZE]], ptr [[ALLOCA]], align 8
198; CHECK-NEXT:    [[PTRINT:%.*]] = ptrtoint ptr [[A]] to i64
199; CHECK-NEXT:    [[MASKEDPTR:%.*]] = and i64 [[PTRINT]], 31
200; CHECK-NEXT:    [[MASKCOND:%.*]] = icmp eq i64 [[MASKEDPTR]], 0
201; CHECK-NEXT:    tail call void @llvm.assume(i1 [[MASKCOND]])
202; CHECK-NEXT:    ret void
203;
204  %alloca = alloca i64
205  %load.a = load i32, ptr %a, align 4
206
207  %load.b = load i32, ptr %b
208  store i32 %load.b, ptr %a
209
210  %obj.size = call i64 @llvm.objectsize.i64.p0(ptr %c, i1 false)
211  store i64 %obj.size, ptr %alloca
212
213  %ptrint = ptrtoint ptr %a to i64
214  %maskedptr = and i64 %ptrint, 31
215  %maskcond = icmp eq i64 %maskedptr, 0
216  tail call void @llvm.assume(i1 %maskcond)
217
218  ret void
219}
220
221define void @complex_backpropagate_bundle(ptr %a, ptr %b, ptr %c) {
222; CHECK-LABEL: define void @complex_backpropagate_bundle
223; CHECK-SAME: (ptr [[A:%.*]], ptr [[B:%.*]], ptr [[C:%.*]]) {
224; CHECK-NEXT:    [[ALLOCA:%.*]] = alloca i64, align 8
225; CHECK-NEXT:    [[LOAD_A:%.*]] = load i32, ptr [[A]], align 32
226; CHECK-NEXT:    [[LOAD_B:%.*]] = load i32, ptr [[B]], align 4
227; CHECK-NEXT:    store i32 [[LOAD_B]], ptr [[A]], align 32
228; CHECK-NEXT:    [[OBJ_SIZE:%.*]] = call i64 @llvm.objectsize.i64.p0(ptr [[C]], i1 false, i1 false, i1 false)
229; CHECK-NEXT:    store i64 [[OBJ_SIZE]], ptr [[ALLOCA]], align 8
230; CHECK-NEXT:    tail call void @llvm.assume(i1 true) [ "align"(ptr [[A]], i32 32) ]
231; CHECK-NEXT:    ret void
232;
233  %alloca = alloca i64
234  %load.a = load i32, ptr %a, align 4
235
236  %load.b = load i32, ptr %b
237  store i32 %load.b, ptr %a
238
239  %obj.size = call i64 @llvm.objectsize.i64.p0(ptr %c, i1 false)
240  store i64 %obj.size, ptr %alloca
241
242  tail call void @llvm.assume(i1 true) ["align"(ptr %a, i32 32)]
243
244  ret void
245}
246
247declare i64 @llvm.objectsize.i64.p0(ptr, i1)
248declare void @llvm.assume(i1)
249