xref: /llvm-project/llvm/test/Transforms/InstCombine/builtin-dynamic-object-size.ll (revision c00f49cf12ff2916a372176f2154a83eb80f1692)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 2
2; RUN: opt -passes=instcombine -S < %s | FileCheck %s
3
4target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128-p7:32:32"
5target triple = "x86_64-apple-macosx10.14.0"
6
7; Function Attrs: nounwind ssp uwtable
8define i64 @weird_identity_but_ok(i64 %sz) {
9; CHECK-LABEL: define i64 @weird_identity_but_ok
10; CHECK-SAME: (i64 [[SZ:%.*]]) {
11; CHECK-NEXT:  entry:
12; CHECK-NEXT:    [[TMP0:%.*]] = icmp ne i64 [[SZ]], -1
13; CHECK-NEXT:    call void @llvm.assume(i1 [[TMP0]])
14; CHECK-NEXT:    ret i64 [[SZ]]
15;
16entry:
17  %call = tail call ptr @malloc(i64 %sz)
18  %calc_size = tail call i64 @llvm.objectsize.i64.p0(ptr %call, i1 false, i1 true, i1 true)
19  tail call void @free(ptr %call)
20  ret i64 %calc_size
21}
22
23define i64 @phis_are_neat(i1 %which) {
24; CHECK-LABEL: define i64 @phis_are_neat
25; CHECK-SAME: (i1 [[WHICH:%.*]]) {
26; CHECK-NEXT:  entry:
27; CHECK-NEXT:    br i1 [[WHICH]], label [[FIRST_LABEL:%.*]], label [[SECOND_LABEL:%.*]]
28; CHECK:       first_label:
29; CHECK-NEXT:    br label [[JOIN_LABEL:%.*]]
30; CHECK:       second_label:
31; CHECK-NEXT:    br label [[JOIN_LABEL]]
32; CHECK:       join_label:
33; CHECK-NEXT:    [[TMP0:%.*]] = phi i64 [ 10, [[FIRST_LABEL]] ], [ 30, [[SECOND_LABEL]] ]
34; CHECK-NEXT:    ret i64 [[TMP0]]
35;
36entry:
37  br i1 %which, label %first_label, label %second_label
38
39first_label:
40  %first_call = call ptr @malloc(i64 10)
41  br label %join_label
42
43second_label:
44  %second_call = call ptr @malloc(i64 30)
45  br label %join_label
46
47join_label:
48  %joined = phi ptr [ %first_call, %first_label ], [ %second_call, %second_label ]
49  %calc_size = tail call i64 @llvm.objectsize.i64.p0(ptr %joined, i1 false, i1 true, i1 true)
50  ret i64 %calc_size
51}
52
53define i64 @internal_pointer(i64 %sz) {
54; CHECK-LABEL: define i64 @internal_pointer
55; CHECK-SAME: (i64 [[SZ:%.*]]) {
56; CHECK-NEXT:  entry:
57; CHECK-NEXT:    [[TMP0:%.*]] = call i64 @llvm.usub.sat.i64(i64 [[SZ]], i64 2)
58; CHECK-NEXT:    ret i64 [[TMP0]]
59;
60entry:
61  %ptr = call ptr @malloc(i64 %sz)
62  %ptr2 = getelementptr inbounds i8, ptr %ptr, i32 2
63  %calc_size = call i64 @llvm.objectsize.i64.p0(ptr %ptr2, i1 false, i1 true, i1 true)
64  ret i64 %calc_size
65}
66
67define i64 @uses_nullptr_no_fold() {
68; CHECK-LABEL: define i64 @uses_nullptr_no_fold() {
69; CHECK-NEXT:  entry:
70; CHECK-NEXT:    [[RES:%.*]] = call i64 @llvm.objectsize.i64.p0(ptr null, i1 false, i1 true, i1 true)
71; CHECK-NEXT:    ret i64 [[RES]]
72;
73entry:
74  %res = call i64 @llvm.objectsize.i64.p0(ptr null, i1 false, i1 true, i1 true)
75  ret i64 %res
76}
77
78define i64 @uses_nullptr_fold() {
79; CHECK-LABEL: define i64 @uses_nullptr_fold() {
80; CHECK-NEXT:  entry:
81; CHECK-NEXT:    ret i64 0
82;
83entry:
84  ; NOTE: the third parameter to this call is false, unlike above.
85  %res = call i64 @llvm.objectsize.i64.p0(ptr null, i1 false, i1 false, i1 true)
86  ret i64 %res
87}
88
89@d = common global i8 0, align 1
90@c = common global i32 0, align 4
91
92; Function Attrs: nounwind
93define void @f() {
94; CHECK-LABEL: define void @f() {
95; CHECK-NEXT:  entry:
96; CHECK-NEXT:    [[DOTPR:%.*]] = load i32, ptr @c, align 4
97; CHECK-NEXT:    [[TOBOOL4:%.*]] = icmp eq i32 [[DOTPR]], 0
98; CHECK-NEXT:    br i1 [[TOBOOL4]], label [[FOR_END:%.*]], label [[FOR_BODY:%.*]]
99; CHECK:       for.body:
100; CHECK-NEXT:    [[DP_05:%.*]] = phi ptr [ [[ADD_PTR:%.*]], [[FOR_BODY]] ], [ @d, [[ENTRY:%.*]] ]
101; CHECK-NEXT:    [[TMP0:%.*]] = tail call i64 @llvm.objectsize.i64.p0(ptr [[DP_05]], i1 false, i1 true, i1 true)
102; CHECK-NEXT:    [[CONV:%.*]] = trunc i64 [[TMP0]] to i32
103; CHECK-NEXT:    tail call void @bury(i32 [[CONV]])
104; CHECK-NEXT:    [[TMP1:%.*]] = load i32, ptr @c, align 4
105; CHECK-NEXT:    [[IDX_EXT:%.*]] = sext i32 [[TMP1]] to i64
106; CHECK-NEXT:    [[ADD_PTR]] = getelementptr inbounds i8, ptr [[DP_05]], i64 [[IDX_EXT]]
107; CHECK-NEXT:    [[ADD:%.*]] = shl nsw i32 [[TMP1]], 1
108; CHECK-NEXT:    store i32 [[ADD]], ptr @c, align 4
109; CHECK-NEXT:    [[TOBOOL:%.*]] = icmp eq i32 [[TMP1]], 0
110; CHECK-NEXT:    br i1 [[TOBOOL]], label [[FOR_END]], label [[FOR_BODY]]
111; CHECK:       for.end:
112; CHECK-NEXT:    ret void
113;
114entry:
115  %.pr = load i32, ptr @c, align 4
116  %tobool4 = icmp eq i32 %.pr, 0
117  br i1 %tobool4, label %for.end, label %for.body
118
119for.body:                                         ; preds = %entry, %for.body
120  %dp.05 = phi ptr [ %add.ptr, %for.body ], [ @d, %entry ]
121  %0 = tail call i64 @llvm.objectsize.i64.p0(ptr %dp.05, i1 false, i1 true, i1 true)
122  %conv = trunc i64 %0 to i32
123  tail call void @bury(i32 %conv) #3
124  %1 = load i32, ptr @c, align 4
125  %idx.ext = sext i32 %1 to i64
126  %add.ptr.offs = add i64 %idx.ext, 0
127  %2 = add i64 undef, %add.ptr.offs
128  %add.ptr = getelementptr inbounds i8, ptr %dp.05, i64 %idx.ext
129  %add = shl nsw i32 %1, 1
130  store i32 %add, ptr @c, align 4
131  %tobool = icmp eq i32 %1, 0
132  br i1 %tobool, label %for.end, label %for.body
133
134for.end:                                          ; preds = %for.body, %entry
135  ret void
136}
137
138define void @bdos_cmpm1(i64 %alloc) {
139; CHECK-LABEL: define void @bdos_cmpm1
140; CHECK-SAME: (i64 [[ALLOC:%.*]]) {
141; CHECK-NEXT:  entry:
142; CHECK-NEXT:    [[OBJ:%.*]] = call ptr @malloc(i64 [[ALLOC]])
143; CHECK-NEXT:    [[TMP0:%.*]] = icmp ne i64 [[ALLOC]], -1
144; CHECK-NEXT:    call void @llvm.assume(i1 [[TMP0]])
145; CHECK-NEXT:    br i1 false, label [[IF_ELSE:%.*]], label [[IF_THEN:%.*]]
146; CHECK:       if.then:
147; CHECK-NEXT:    call void @fortified_chk(ptr [[OBJ]], i64 [[ALLOC]])
148; CHECK-NEXT:    br label [[IF_END:%.*]]
149; CHECK:       if.else:
150; CHECK-NEXT:    br label [[IF_END]]
151; CHECK:       if.end:
152; CHECK-NEXT:    ret void
153;
154entry:
155  %obj = call ptr @malloc(i64 %alloc)
156  %objsize = call i64 @llvm.objectsize.i64.p0(ptr %obj, i1 0, i1 0, i1 1)
157  %cmp.not = icmp eq i64 %objsize, -1
158  br i1 %cmp.not, label %if.else, label %if.then
159
160if.then:
161  call void @fortified_chk(ptr %obj, i64 %objsize)
162  br label %if.end
163
164if.else:
165  call void @unfortified(ptr %obj, i64 %objsize)
166  br label %if.end
167
168if.end:                                           ; preds = %if.else, %if.then
169  ret void
170}
171
172define void @bdos_cmpm1_expr(i64 %alloc, i64 %part) {
173; CHECK-LABEL: define void @bdos_cmpm1_expr
174; CHECK-SAME: (i64 [[ALLOC:%.*]], i64 [[PART:%.*]]) {
175; CHECK-NEXT:  entry:
176; CHECK-NEXT:    [[SZ:%.*]] = udiv i64 [[ALLOC]], [[PART]]
177; CHECK-NEXT:    [[OBJ:%.*]] = call ptr @malloc(i64 [[SZ]])
178; CHECK-NEXT:    [[TMP0:%.*]] = icmp ne i64 [[SZ]], -1
179; CHECK-NEXT:    call void @llvm.assume(i1 [[TMP0]])
180; CHECK-NEXT:    br i1 false, label [[IF_ELSE:%.*]], label [[IF_THEN:%.*]]
181; CHECK:       if.then:
182; CHECK-NEXT:    call void @fortified_chk(ptr [[OBJ]], i64 [[SZ]])
183; CHECK-NEXT:    br label [[IF_END:%.*]]
184; CHECK:       if.else:
185; CHECK-NEXT:    br label [[IF_END]]
186; CHECK:       if.end:
187; CHECK-NEXT:    ret void
188;
189entry:
190  %sz = udiv i64 %alloc, %part
191  %obj = call ptr @malloc(i64 %sz)
192  %objsize = call i64 @llvm.objectsize.i64.p0(ptr %obj, i1 0, i1 0, i1 1)
193  %cmp.not = icmp eq i64 %objsize, -1
194  br i1 %cmp.not, label %if.else, label %if.then
195
196if.then:
197  call void @fortified_chk(ptr %obj, i64 %objsize)
198  br label %if.end
199
200if.else:
201  call void @unfortified(ptr %obj, i64 %objsize)
202  br label %if.end
203
204if.end:                                           ; preds = %if.else, %if.then
205  ret void
206}
207
208@p7 = internal addrspace(7) global i8 0
209
210; Gracefully handle AS cast when the address spaces have different pointer widths.
211define i64 @as_cast(i1 %c) {
212; CHECK-LABEL: define i64 @as_cast
213; CHECK-SAME: (i1 [[C:%.*]]) {
214; CHECK-NEXT:  entry:
215; CHECK-NEXT:    [[TMP0:%.*]] = select i1 [[C]], i64 64, i64 0
216; CHECK-NEXT:    ret i64 [[TMP0]]
217;
218entry:
219  %p0 = tail call ptr @malloc(i64 64)
220  %gep = getelementptr i8, ptr addrspace(7) @p7, i32 1
221  %as = addrspacecast ptr addrspace(7) %gep to ptr
222  %select = select i1 %c, ptr %p0, ptr %as
223  %calc_size = tail call i64 @llvm.objectsize.i64.p0(ptr %select, i1 false, i1 true, i1 true)
224  ret i64 %calc_size
225}
226
227define i64 @constexpr_as_cast(i1 %c) {
228; CHECK-LABEL: define i64 @constexpr_as_cast
229; CHECK-SAME: (i1 [[C:%.*]]) {
230; CHECK-NEXT:  entry:
231; CHECK-NEXT:    [[TMP0:%.*]] = select i1 [[C]], i64 64, i64 0
232; CHECK-NEXT:    ret i64 [[TMP0]]
233;
234entry:
235  %p0 = tail call ptr @malloc(i64 64)
236  %select = select i1 %c, ptr %p0, ptr addrspacecast (ptr addrspace(7) getelementptr (i8, ptr addrspace(7) @p7, i32 1) to ptr)
237  %calc_size = tail call i64 @llvm.objectsize.i64.p0(ptr %select, i1 false, i1 true, i1 true)
238  ret i64 %calc_size
239}
240
241declare void @bury(i32) local_unnamed_addr #2
242
243; Function Attrs: nounwind allocsize(0)
244declare ptr @malloc(i64) nounwind allocsize(0) allockind("alloc,uninitialized") "alloc-family"="malloc"
245
246declare ptr @get_unknown_buffer()
247
248; Function Attrs: nounwind
249declare void @free(ptr nocapture) nounwind allockind("free") "alloc-family"="malloc"
250
251; Function Attrs: nounwind readnone speculatable
252declare i64 @llvm.objectsize.i64.p0(ptr, i1, i1, i1)
253
254declare void @fortified_chk(ptr, i64)
255
256declare void @unfortified(ptr, i64)
257