xref: /llvm-project/llvm/test/Transforms/InstCombine/fmul-pow.ll (revision be50a259f1fe77240b000f6b695b9b6394f4936b)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -S -passes=instcombine < %s | FileCheck %s
3
4declare double @llvm.pow.f64(double, double)
5declare void @use(double)
6
7; negative test for:
8; pow(a,b) * a --> pow(a, b+1) (requires reassoc)
9
10define double @pow_ab_a(double %a, double %b)  {
11; CHECK-LABEL: @pow_ab_a(
12; CHECK-NEXT:    [[P:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
13; CHECK-NEXT:    [[M:%.*]] = fmul double [[P]], [[A]]
14; CHECK-NEXT:    ret double [[M]]
15;
16  %p = call double @llvm.pow.f64(double %a, double %b)
17  %m = fmul double %p, %a
18  ret double %m
19}
20
21; pow(a,b) * a --> pow(a, b+1)
22
23define double @pow_ab_a_reassoc(double %a, double %b)  {
24; CHECK-LABEL: @pow_ab_a_reassoc(
25; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[B:%.*]], 1.000000e+00
26; CHECK-NEXT:    [[M:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[TMP1]])
27; CHECK-NEXT:    ret double [[M]]
28;
29  %p = call double @llvm.pow.f64(double %a, double %b)
30  %m = fmul reassoc double %p, %a
31  ret double %m
32}
33
34; a * pow(a,b) --> pow(a, b+1)
35
36define double @pow_ab_a_reassoc_commute(double %pa, double %b)  {
37; CHECK-LABEL: @pow_ab_a_reassoc_commute(
38; CHECK-NEXT:    [[A:%.*]] = fadd double [[PA:%.*]], 4.200000e+01
39; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[B:%.*]], 1.000000e+00
40; CHECK-NEXT:    [[M:%.*]] = call reassoc double @llvm.pow.f64(double [[A]], double [[TMP1]])
41; CHECK-NEXT:    ret double [[M]]
42;
43  %a = fadd double %pa, 42.0 ; thwart complexity-based canonicalization
44  %p = call double @llvm.pow.f64(double %a, double %b)
45  %m = fmul reassoc double %a, %p
46  ret double %m
47}
48
49; negative test - extra uses not allowed
50
51define double @pow_ab_a_reassoc_use(double %a, double %b)  {
52; CHECK-LABEL: @pow_ab_a_reassoc_use(
53; CHECK-NEXT:    [[P:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
54; CHECK-NEXT:    [[M:%.*]] = fmul reassoc double [[P]], [[A]]
55; CHECK-NEXT:    call void @use(double [[P]])
56; CHECK-NEXT:    ret double [[M]]
57;
58  %p = call double @llvm.pow.f64(double %a, double %b)
59  %m = fmul reassoc double %p, %a
60  call void @use(double %p)
61  ret double %m
62}
63
64; negative test for:
65; pow(a,b) * 1.0/a --> pow(a, b-1) (requires reassoc)
66
67define double @pow_ab_recip_a(double %a, double %b)  {
68; CHECK-LABEL: @pow_ab_recip_a(
69; CHECK-NEXT:    [[R:%.*]] = fdiv double 1.000000e+00, [[A:%.*]]
70; CHECK-NEXT:    [[P:%.*]] = call double @llvm.pow.f64(double [[A]], double [[B:%.*]])
71; CHECK-NEXT:    [[M:%.*]] = fmul double [[R]], [[P]]
72; CHECK-NEXT:    ret double [[M]]
73;
74  %r = fdiv double 1.0, %a
75  %p = call double @llvm.pow.f64(double %a, double %b)
76  %m = fmul double %r, %p
77  ret double %m
78}
79
80; pow(a,b) / a --> pow(a, b-1) (requires reassoc)
81
82define double @pow_ab_recip_a_reassoc(double %a, double %b)  {
83; CHECK-LABEL: @pow_ab_recip_a_reassoc(
84; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[B:%.*]], -1.000000e+00
85; CHECK-NEXT:    [[M:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[TMP1]])
86; CHECK-NEXT:    ret double [[M]]
87;
88  %r = fdiv reassoc double 1.0, %a
89  %p = call reassoc double @llvm.pow.f64(double %a, double %b)
90  %m = fmul reassoc double %r, %p
91  ret double %m
92}
93
94; pow(a,b) / a --> pow(a, b-1) (requires reassoc)
95
96define double @pow_ab_recip_a_reassoc_commute(double %a, double %b)  {
97; CHECK-LABEL: @pow_ab_recip_a_reassoc_commute(
98; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[B:%.*]], -1.000000e+00
99; CHECK-NEXT:    [[M:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[TMP1]])
100; CHECK-NEXT:    ret double [[M]]
101;
102  %r = fdiv reassoc double 1.0, %a
103  %p = call reassoc double @llvm.pow.f64(double %a, double %b)
104  %m = fmul reassoc double %p, %r
105  ret double %m
106}
107
108; TODO: extra use prevents conversion to fmul, so this needs a different pattern match.
109
110define double @pow_ab_recip_a_reassoc_use1(double %a, double %b)  {
111; CHECK-LABEL: @pow_ab_recip_a_reassoc_use1(
112; CHECK-NEXT:    [[R:%.*]] = fdiv reassoc double 1.000000e+00, [[A:%.*]]
113; CHECK-NEXT:    [[P:%.*]] = call reassoc double @llvm.pow.f64(double [[A]], double [[B:%.*]])
114; CHECK-NEXT:    [[M:%.*]] = fmul reassoc double [[R]], [[P]]
115; CHECK-NEXT:    call void @use(double [[R]])
116; CHECK-NEXT:    ret double [[M]]
117;
118  %r = fdiv reassoc double 1.0, %a
119  %p = call reassoc double @llvm.pow.f64(double %a, double %b)
120  %m = fmul reassoc double %r, %p
121  call void @use(double %r)
122  ret double %m
123}
124
125; negative test - extra pow uses not allowed
126
127define double @pow_ab_recip_a_reassoc_use2(double %a, double %b)  {
128; CHECK-LABEL: @pow_ab_recip_a_reassoc_use2(
129; CHECK-NEXT:    [[P:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
130; CHECK-NEXT:    [[M:%.*]] = fdiv reassoc double [[P]], [[A]]
131; CHECK-NEXT:    call void @use(double [[P]])
132; CHECK-NEXT:    ret double [[M]]
133;
134  %r = fdiv reassoc double 1.0, %a
135  %p = call reassoc double @llvm.pow.f64(double %a, double %b)
136  %m = fmul reassoc double %r, %p
137  call void @use(double %p)
138  ret double %m
139}
140
141; negative test - extra pow uses not allowed
142
143define double @pow_ab_recip_a_reassoc_use3(double %a, double %b)  {
144; CHECK-LABEL: @pow_ab_recip_a_reassoc_use3(
145; CHECK-NEXT:    [[R:%.*]] = fdiv reassoc double 1.000000e+00, [[A:%.*]]
146; CHECK-NEXT:    [[P:%.*]] = call reassoc double @llvm.pow.f64(double [[A]], double [[B:%.*]])
147; CHECK-NEXT:    [[M:%.*]] = fmul reassoc double [[R]], [[P]]
148; CHECK-NEXT:    call void @use(double [[R]])
149; CHECK-NEXT:    call void @use(double [[P]])
150; CHECK-NEXT:    ret double [[M]]
151;
152  %r = fdiv reassoc double 1.0, %a
153  %p = call reassoc double @llvm.pow.f64(double %a, double %b)
154  %m = fmul reassoc double %r, %p
155  call void @use(double %r)
156  call void @use(double %p)
157  ret double %m
158}
159
160; negative test for:
161; (a**b) * (c**b) --> (a*c) ** b (if mul is reassoc)
162
163define double @pow_ab_pow_cb(double %a, double %b, double %c) {
164; CHECK-LABEL: @pow_ab_pow_cb(
165; CHECK-NEXT:    [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
166; CHECK-NEXT:    [[TMP2:%.*]] = call double @llvm.pow.f64(double [[C:%.*]], double [[B]])
167; CHECK-NEXT:    [[MUL:%.*]] = fmul double [[TMP2]], [[TMP1]]
168; CHECK-NEXT:    ret double [[MUL]]
169;
170  %1 = call double @llvm.pow.f64(double %a, double %b)
171  %2 = call double @llvm.pow.f64(double %c, double %b)
172  %mul = fmul double %2, %1
173  ret double %mul
174}
175
176; (a**b) * (c**b) --> (a*c) ** b
177
178define double @pow_ab_pow_cb_reassoc(double %a, double %b, double %c) {
179; CHECK-LABEL: @pow_ab_pow_cb_reassoc(
180; CHECK-NEXT:    [[TMP1:%.*]] = fmul reassoc double [[C:%.*]], [[A:%.*]]
181; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[TMP1]], double [[B:%.*]])
182; CHECK-NEXT:    ret double [[MUL]]
183;
184  %1 = call double @llvm.pow.f64(double %a, double %b)
185  %2 = call double @llvm.pow.f64(double %c, double %b)
186  %mul = fmul reassoc double %2, %1
187  ret double %mul
188}
189
190; (a**b) * (c**b) --> (a*c) ** b
191
192define double @pow_ab_pow_cb_reassoc_use1(double %a, double %b, double %c) {
193; CHECK-LABEL: @pow_ab_pow_cb_reassoc_use1(
194; CHECK-NEXT:    [[AB:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
195; CHECK-NEXT:    [[TMP1:%.*]] = fmul reassoc double [[A]], [[C:%.*]]
196; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[TMP1]], double [[B]])
197; CHECK-NEXT:    call void @use(double [[AB]])
198; CHECK-NEXT:    ret double [[MUL]]
199;
200  %ab = call double @llvm.pow.f64(double %a, double %b)
201  %cb = call double @llvm.pow.f64(double %c, double %b)
202  %mul = fmul reassoc double %ab, %cb
203  call void @use(double %ab)
204  ret double %mul
205}
206
207; (a**b) * (c**b) --> (a*c) ** b
208
209define double @pow_ab_pow_cb_reassoc_use2(double %a, double %b, double %c) {
210; CHECK-LABEL: @pow_ab_pow_cb_reassoc_use2(
211; CHECK-NEXT:    [[CB:%.*]] = call double @llvm.pow.f64(double [[C:%.*]], double [[B:%.*]])
212; CHECK-NEXT:    [[TMP1:%.*]] = fmul reassoc double [[A:%.*]], [[C]]
213; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[TMP1]], double [[B]])
214; CHECK-NEXT:    call void @use(double [[CB]])
215; CHECK-NEXT:    ret double [[MUL]]
216;
217  %ab = call double @llvm.pow.f64(double %a, double %b)
218  %cb = call double @llvm.pow.f64(double %c, double %b)
219  %mul = fmul reassoc double %ab, %cb
220  call void @use(double %cb)
221  ret double %mul
222}
223
224; negative test - too many extra uses
225
226define double @pow_ab_pow_cb_reassoc_use3(double %a, double %b, double %c) {
227; CHECK-LABEL: @pow_ab_pow_cb_reassoc_use3(
228; CHECK-NEXT:    [[AB:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
229; CHECK-NEXT:    [[CB:%.*]] = call double @llvm.pow.f64(double [[C:%.*]], double [[B]])
230; CHECK-NEXT:    [[MUL:%.*]] = fmul reassoc double [[AB]], [[CB]]
231; CHECK-NEXT:    call void @use(double [[AB]])
232; CHECK-NEXT:    call void @use(double [[CB]])
233; CHECK-NEXT:    ret double [[MUL]]
234;
235  %ab = call double @llvm.pow.f64(double %a, double %b)
236  %cb = call double @llvm.pow.f64(double %c, double %b)
237  %mul = fmul reassoc double %ab, %cb
238  call void @use(double %ab)
239  call void @use(double %cb)
240  ret double %mul
241}
242
243define double @pow_ab_pow_ac(double %a, double %b, double %c) {
244; CHECK-LABEL: @pow_ab_pow_ac(
245; CHECK-NEXT:    [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
246; CHECK-NEXT:    [[TMP2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]])
247; CHECK-NEXT:    [[MUL:%.*]] = fmul double [[TMP2]], [[TMP1]]
248; CHECK-NEXT:    ret double [[MUL]]
249;
250  %1 = call double @llvm.pow.f64(double %a, double %b)
251  %2 = call double @llvm.pow.f64(double %a, double %c)
252  %mul = fmul double %2, %1
253  ret double %mul
254}
255
256define double @pow_ab_x_pow_ac_reassoc(double %a, double %b, double %c) {
257; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc(
258; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[C:%.*]], [[B:%.*]]
259; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[TMP1]])
260; CHECK-NEXT:    ret double [[MUL]]
261;
262  %1 = call double @llvm.pow.f64(double %a, double %b)
263  %2 = call double @llvm.pow.f64(double %a, double %c)
264  %mul = fmul reassoc double %2, %1
265  ret double %mul
266}
267
268define double @pow_ab_reassoc(double %a, double %b) {
269; CHECK-LABEL: @pow_ab_reassoc(
270; CHECK-NEXT:    [[TMP1:%.*]] = fadd reassoc double [[B:%.*]], [[B]]
271; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[TMP1]])
272; CHECK-NEXT:    ret double [[MUL]]
273;
274  %1 = call double @llvm.pow.f64(double %a, double %b)
275  %mul = fmul reassoc double %1, %1
276  ret double %mul
277}
278
279define double @pow_ab_reassoc_extra_use(double %a, double %b) {
280; CHECK-LABEL: @pow_ab_reassoc_extra_use(
281; CHECK-NEXT:    [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
282; CHECK-NEXT:    [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP1]]
283; CHECK-NEXT:    call void @use(double [[TMP1]])
284; CHECK-NEXT:    ret double [[MUL]]
285;
286  %1 = call double @llvm.pow.f64(double %a, double %b)
287  %mul = fmul reassoc double %1, %1
288  call void @use(double %1)
289  ret double %mul
290}
291
292define double @pow_ab_x_pow_ac_reassoc_extra_use(double %a, double %b, double %c) {
293; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc_extra_use(
294; CHECK-NEXT:    [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
295; CHECK-NEXT:    [[TMP2:%.*]] = fadd reassoc double [[B]], [[C:%.*]]
296; CHECK-NEXT:    [[MUL:%.*]] = call reassoc double @llvm.pow.f64(double [[A]], double [[TMP2]])
297; CHECK-NEXT:    call void @use(double [[TMP1]])
298; CHECK-NEXT:    ret double [[MUL]]
299;
300  %1 = call double @llvm.pow.f64(double %a, double %b)
301  %2 = call double @llvm.pow.f64(double %a, double %c)
302  %mul = fmul reassoc double %1, %2
303  call void @use(double %1)
304  ret double %mul
305}
306
307define double @pow_ab_x_pow_ac_reassoc_multiple_uses(double %a, double %b, double %c) {
308; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc_multiple_uses(
309; CHECK-NEXT:    [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]])
310; CHECK-NEXT:    [[TMP2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]])
311; CHECK-NEXT:    [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP2]]
312; CHECK-NEXT:    call void @use(double [[TMP1]])
313; CHECK-NEXT:    call void @use(double [[TMP2]])
314; CHECK-NEXT:    ret double [[MUL]]
315;
316  %1 = call double @llvm.pow.f64(double %a, double %b)
317  %2 = call double @llvm.pow.f64(double %a, double %c)
318  %mul = fmul reassoc double %1, %2
319  call void @use(double %1)
320  call void @use(double %2)
321  ret double %mul
322}
323