xref: /llvm-project/llvm/test/Transforms/InstCombine/addsub-constant-folding.ll (revision 38fffa630ee80163dc65e759392ad29798905679)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt < %s -passes=instcombine -S | FileCheck %s
3
4declare void @use(i32 %arg)
5declare void @vec_use(<4 x i32> %arg)
6
7; (x+c1)+c2
8
9define i32 @add_const_add_const(i32 %arg) {
10; CHECK-LABEL: @add_const_add_const(
11; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG:%.*]], 10
12; CHECK-NEXT:    ret i32 [[T1]]
13;
14  %t0 = add i32 %arg, 8
15  %t1 = add i32 %t0, 2
16  ret i32 %t1
17}
18
19define i32 @add_const_add_const_extrause(i32 %arg) {
20; CHECK-LABEL: @add_const_add_const_extrause(
21; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], 8
22; CHECK-NEXT:    call void @use(i32 [[T0]])
23; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG]], 10
24; CHECK-NEXT:    ret i32 [[T1]]
25;
26  %t0 = add i32 %arg, 8
27  call void @use(i32 %t0)
28  %t1 = add i32 %t0, 2
29  ret i32 %t1
30}
31
32define <4 x i32> @vec_add_const_add_const(<4 x i32> %arg) {
33; CHECK-LABEL: @vec_add_const_add_const(
34; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 10)
35; CHECK-NEXT:    ret <4 x i32> [[T1]]
36;
37  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
38  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
39  ret <4 x i32> %t1
40}
41
42define <4 x i32> @vec_add_const_add_const_extrause(<4 x i32> %arg) {
43; CHECK-LABEL: @vec_add_const_add_const_extrause(
44; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 8)
45; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
46; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG]], splat (i32 10)
47; CHECK-NEXT:    ret <4 x i32> [[T1]]
48;
49  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
50  call void @vec_use(<4 x i32> %t0)
51  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
52  ret <4 x i32> %t1
53}
54
55define <4 x i32> @vec_add_const_add_const_nonsplat(<4 x i32> %arg) {
56; CHECK-LABEL: @vec_add_const_add_const_nonsplat(
57; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], <i32 23, i32 undef, i32 undef, i32 10>
58; CHECK-NEXT:    ret <4 x i32> [[T1]]
59;
60  %t0 = add <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
61  %t1 = add <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
62  ret <4 x i32> %t1
63}
64
65; (x+c1)-c2
66
67define i32 @add_const_sub_const(i32 %arg) {
68; CHECK-LABEL: @add_const_sub_const(
69; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG:%.*]], 6
70; CHECK-NEXT:    ret i32 [[T1]]
71;
72  %t0 = add i32 %arg, 8
73  %t1 = sub i32 %t0, 2
74  ret i32 %t1
75}
76
77define i32 @add_const_sub_const_extrause(i32 %arg) {
78; CHECK-LABEL: @add_const_sub_const_extrause(
79; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], 8
80; CHECK-NEXT:    call void @use(i32 [[T0]])
81; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG]], 6
82; CHECK-NEXT:    ret i32 [[T1]]
83;
84  %t0 = add i32 %arg, 8
85  call void @use(i32 %t0)
86  %t1 = sub i32 %t0, 2
87  ret i32 %t1
88}
89
90define <4 x i32> @vec_add_const_sub_const(<4 x i32> %arg) {
91; CHECK-LABEL: @vec_add_const_sub_const(
92; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 6)
93; CHECK-NEXT:    ret <4 x i32> [[T1]]
94;
95  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
96  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
97  ret <4 x i32> %t1
98}
99
100define <4 x i32> @vec_add_const_sub_const_extrause(<4 x i32> %arg) {
101; CHECK-LABEL: @vec_add_const_sub_const_extrause(
102; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 8)
103; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
104; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG]], splat (i32 6)
105; CHECK-NEXT:    ret <4 x i32> [[T1]]
106;
107  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
108  call void @vec_use(<4 x i32> %t0)
109  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
110  ret <4 x i32> %t1
111}
112
113define <4 x i32> @vec_add_const_sub_const_nonsplat(<4 x i32> %arg) {
114; CHECK-LABEL: @vec_add_const_sub_const_nonsplat(
115; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], <i32 19, i32 undef, i32 undef, i32 6>
116; CHECK-NEXT:    ret <4 x i32> [[T1]]
117;
118  %t0 = add <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
119  %t1 = sub <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
120  ret <4 x i32> %t1
121}
122
123; c2-(x+c1)
124
125define i32 @add_const_const_sub(i32 %arg) {
126; CHECK-LABEL: @add_const_const_sub(
127; CHECK-NEXT:    [[T1:%.*]] = sub i32 -6, [[ARG:%.*]]
128; CHECK-NEXT:    ret i32 [[T1]]
129;
130  %t0 = add i32 %arg, 8
131  %t1 = sub i32 2, %t0
132  ret i32 %t1
133}
134
135define i8 @add_nsw_const_const_sub_nsw(i8 %arg) {
136; CHECK-LABEL: @add_nsw_const_const_sub_nsw(
137; CHECK-NEXT:    [[T1:%.*]] = sub nsw i8 -128, [[ARG:%.*]]
138; CHECK-NEXT:    ret i8 [[T1]]
139;
140  %t0 = add nsw i8 %arg, 1
141  %t1 = sub nsw i8 -127, %t0
142  ret i8 %t1
143}
144
145define i8 @add_nsw_const_const_sub(i8 %arg) {
146; CHECK-LABEL: @add_nsw_const_const_sub(
147; CHECK-NEXT:    [[T1:%.*]] = sub i8 -128, [[ARG:%.*]]
148; CHECK-NEXT:    ret i8 [[T1]]
149;
150  %t0 = add nsw i8 %arg, 1
151  %t1 = sub i8 -127, %t0
152  ret i8 %t1
153}
154
155define i8 @add_const_const_sub_nsw(i8 %arg) {
156; CHECK-LABEL: @add_const_const_sub_nsw(
157; CHECK-NEXT:    [[T1:%.*]] = sub i8 -128, [[ARG:%.*]]
158; CHECK-NEXT:    ret i8 [[T1]]
159;
160  %t0 = add i8 %arg, 1
161  %t1 = sub nsw i8 -127, %t0
162  ret i8 %t1
163}
164
165; 127-X with nsw will be more poisonous than -127-(X+2) with nsw. (see X = -1)
166define i8 @add_nsw_const_const_sub_nsw_ov(i8 %arg) {
167; CHECK-LABEL: @add_nsw_const_const_sub_nsw_ov(
168; CHECK-NEXT:    [[T1:%.*]] = sub i8 127, [[ARG:%.*]]
169; CHECK-NEXT:    ret i8 [[T1]]
170;
171  %t0 = add nsw i8 %arg, 2
172  %t1 = sub nsw i8 -127, %t0
173  ret i8 %t1
174}
175
176define i8 @add_nuw_const_const_sub_nuw(i8 %arg) {
177; CHECK-LABEL: @add_nuw_const_const_sub_nuw(
178; CHECK-NEXT:    [[T1:%.*]] = sub nuw i8 -128, [[ARG:%.*]]
179; CHECK-NEXT:    ret i8 [[T1]]
180;
181  %t0 = add nuw i8 %arg, 1
182  %t1 = sub nuw i8 -127, %t0
183  ret i8 %t1
184}
185
186define i8 @add_nuw_const_const_sub(i8 %arg) {
187; CHECK-LABEL: @add_nuw_const_const_sub(
188; CHECK-NEXT:    [[T1:%.*]] = sub i8 -128, [[ARG:%.*]]
189; CHECK-NEXT:    ret i8 [[T1]]
190;
191  %t0 = add nuw i8 %arg, 1
192  %t1 = sub i8 -127, %t0
193  ret i8 %t1
194}
195
196define i8 @add_const_const_sub_nuw(i8 %arg) {
197; CHECK-LABEL: @add_const_const_sub_nuw(
198; CHECK-NEXT:    [[T1:%.*]] = sub i8 -128, [[ARG:%.*]]
199; CHECK-NEXT:    ret i8 [[T1]]
200;
201  %t0 = add i8 %arg, 1
202  %t1 = sub nuw i8 -127, %t0
203  ret i8 %t1
204}
205
206define <2 x i8> @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov1(<2 x i8> %arg) {
207; CHECK-LABEL: @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov1(
208; CHECK-NEXT:    [[T1:%.*]] = sub nsw <2 x i8> <i8 -127, i8 -126>, [[ARG:%.*]]
209; CHECK-NEXT:    ret <2 x i8> [[T1]]
210;
211  %t0 = add nsw <2 x i8> %arg, <i8 2, i8 0>
212  %t1 = sub nsw <2 x i8> <i8 -125, i8 -126>, %t0
213  ret <2 x i8> %t1
214}
215
216define <2 x i8> @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov2(<2 x i8> %arg) {
217; CHECK-LABEL: @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov2(
218; CHECK-NEXT:    [[T1:%.*]] = sub nsw <2 x i8> <i8 -126, i8 -128>, [[ARG:%.*]]
219; CHECK-NEXT:    ret <2 x i8> [[T1]]
220;
221  %t0 = add nsw <2 x i8> %arg, <i8 1, i8 2>
222  %t1 = sub nsw <2 x i8> <i8 -125, i8 -126>, %t0
223  ret <2 x i8> %t1
224}
225
226define <2 x i8> @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov3(<2 x i8> %arg) {
227; CHECK-LABEL: @non_splat_vec_add_nsw_const_const_sub_nsw_not_ov3(
228; CHECK-NEXT:    [[T1:%.*]] = sub nsw <2 x i8> <i8 -120, i8 -127>, [[ARG:%.*]]
229; CHECK-NEXT:    ret <2 x i8> [[T1]]
230;
231  %t0 = add nsw <2 x i8> %arg, <i8 0, i8 1>
232  %t1 = sub nsw <2 x i8> <i8 -120, i8 -126>, %t0
233  ret <2 x i8> %t1
234}
235
236; 127-X with nsw will be more poisonous than -127-(X+2) with nsw. (see X = -1)
237define <2 x i8> @non_splat_vec_add_nsw_const_const_sub_nsw_ov(<2 x i8> %arg) {
238; CHECK-LABEL: @non_splat_vec_add_nsw_const_const_sub_nsw_ov(
239; CHECK-NEXT:    [[T1:%.*]] = sub <2 x i8> <i8 -127, i8 127>, [[ARG:%.*]]
240; CHECK-NEXT:    ret <2 x i8> [[T1]]
241;
242  %t0 = add nsw <2 x i8> %arg, <i8 1, i8 2>
243  %t1 = sub nsw <2 x i8> <i8 -126, i8 -127>, %t0
244  ret <2 x i8> %t1
245}
246
247
248define i32 @add_const_const_sub_extrause(i32 %arg) {
249; CHECK-LABEL: @add_const_const_sub_extrause(
250; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], 8
251; CHECK-NEXT:    call void @use(i32 [[T0]])
252; CHECK-NEXT:    [[T1:%.*]] = sub i32 -6, [[ARG]]
253; CHECK-NEXT:    ret i32 [[T1]]
254;
255  %t0 = add i32 %arg, 8
256  call void @use(i32 %t0)
257  %t1 = sub i32 2, %t0
258  ret i32 %t1
259}
260
261define <4 x i32> @vec_add_const_const_sub(<4 x i32> %arg) {
262; CHECK-LABEL: @vec_add_const_const_sub(
263; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 -6), [[ARG:%.*]]
264; CHECK-NEXT:    ret <4 x i32> [[T1]]
265;
266  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
267  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
268  ret <4 x i32> %t1
269}
270
271define <4 x i32> @vec_add_const_const_sub_extrause(<4 x i32> %arg) {
272; CHECK-LABEL: @vec_add_const_const_sub_extrause(
273; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 8)
274; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
275; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 -6), [[ARG]]
276; CHECK-NEXT:    ret <4 x i32> [[T1]]
277;
278  %t0 = add <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
279  call void @vec_use(<4 x i32> %t0)
280  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
281  ret <4 x i32> %t1
282}
283
284define <4 x i32> @vec_add_const_const_sub_nonsplat(<4 x i32> %arg) {
285; CHECK-LABEL: @vec_add_const_const_sub_nonsplat(
286; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> <i32 -19, i32 undef, i32 undef, i32 -6>, [[ARG:%.*]]
287; CHECK-NEXT:    ret <4 x i32> [[T1]]
288;
289  %t0 = add <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
290  %t1 = sub <4 x i32> <i32 2, i32 3, i32 undef, i32 2>, %t0
291  ret <4 x i32> %t1
292}
293
294; (x-c1)+c2
295
296define i32 @sub_const_add_const(i32 %arg) {
297; CHECK-LABEL: @sub_const_add_const(
298; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG:%.*]], -6
299; CHECK-NEXT:    ret i32 [[T1]]
300;
301  %t0 = sub i32 %arg, 8
302  %t1 = add i32 %t0, 2
303  ret i32 %t1
304}
305
306define i32 @sub_const_add_const_extrause(i32 %arg) {
307; CHECK-LABEL: @sub_const_add_const_extrause(
308; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], -8
309; CHECK-NEXT:    call void @use(i32 [[T0]])
310; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG]], -6
311; CHECK-NEXT:    ret i32 [[T1]]
312;
313  %t0 = sub i32 %arg, 8
314  call void @use(i32 %t0)
315  %t1 = add i32 %t0, 2
316  ret i32 %t1
317}
318
319define <4 x i32> @vec_sub_const_add_const(<4 x i32> %arg) {
320; CHECK-LABEL: @vec_sub_const_add_const(
321; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -6)
322; CHECK-NEXT:    ret <4 x i32> [[T1]]
323;
324  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
325  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
326  ret <4 x i32> %t1
327}
328
329define <4 x i32> @vec_sub_const_add_const_extrause(<4 x i32> %arg) {
330; CHECK-LABEL: @vec_sub_const_add_const_extrause(
331; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -8)
332; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
333; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG]], splat (i32 -6)
334; CHECK-NEXT:    ret <4 x i32> [[T1]]
335;
336  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
337  call void @vec_use(<4 x i32> %t0)
338  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
339  ret <4 x i32> %t1
340}
341
342define <4 x i32> @vec_sub_const_add_const_nonsplat(<4 x i32> %arg) {
343; CHECK-LABEL: @vec_sub_const_add_const_nonsplat(
344; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], <i32 -19, i32 undef, i32 undef, i32 -6>
345; CHECK-NEXT:    ret <4 x i32> [[T1]]
346;
347  %t0 = sub <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
348  %t1 = add <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
349  ret <4 x i32> %t1
350}
351
352; (x-c1)-c2
353
354define i32 @sub_const_sub_const(i32 %arg) {
355; CHECK-LABEL: @sub_const_sub_const(
356; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG:%.*]], -10
357; CHECK-NEXT:    ret i32 [[T1]]
358;
359  %t0 = sub i32 %arg, 8
360  %t1 = sub i32 %t0, 2
361  ret i32 %t1
362}
363
364define i32 @sub_const_sub_const_extrause(i32 %arg) {
365; CHECK-LABEL: @sub_const_sub_const_extrause(
366; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], -8
367; CHECK-NEXT:    call void @use(i32 [[T0]])
368; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG]], -10
369; CHECK-NEXT:    ret i32 [[T1]]
370;
371  %t0 = sub i32 %arg, 8
372  call void @use(i32 %t0)
373  %t1 = sub i32 %t0, 2
374  ret i32 %t1
375}
376
377define <4 x i32> @vec_sub_const_sub_const(<4 x i32> %arg) {
378; CHECK-LABEL: @vec_sub_const_sub_const(
379; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -10)
380; CHECK-NEXT:    ret <4 x i32> [[T1]]
381;
382  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
383  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
384  ret <4 x i32> %t1
385}
386
387define <4 x i32> @vec_sub_const_sub_const_extrause(<4 x i32> %arg) {
388; CHECK-LABEL: @vec_sub_const_sub_const_extrause(
389; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -8)
390; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
391; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG]], splat (i32 -10)
392; CHECK-NEXT:    ret <4 x i32> [[T1]]
393;
394  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
395  call void @vec_use(<4 x i32> %t0)
396  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
397  ret <4 x i32> %t1
398}
399
400define <4 x i32> @vec_sub_const_sub_const_nonsplat(<4 x i32> %arg) {
401; CHECK-LABEL: @vec_sub_const_sub_const_nonsplat(
402; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], <i32 -23, i32 undef, i32 undef, i32 -10>
403; CHECK-NEXT:    ret <4 x i32> [[T1]]
404;
405  %t0 = sub <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
406  %t1 = sub <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
407  ret <4 x i32> %t1
408}
409
410; c2-(x-c1)
411
412define i32 @sub_const_const_sub(i32 %arg) {
413; CHECK-LABEL: @sub_const_const_sub(
414; CHECK-NEXT:    [[T1:%.*]] = sub i32 10, [[ARG:%.*]]
415; CHECK-NEXT:    ret i32 [[T1]]
416;
417  %t0 = sub i32 %arg, 8
418  %t1 = sub i32 2, %t0
419  ret i32 %t1
420}
421
422define i32 @sub_const_const_sub_extrause(i32 %arg) {
423; CHECK-LABEL: @sub_const_const_sub_extrause(
424; CHECK-NEXT:    [[T0:%.*]] = add i32 [[ARG:%.*]], -8
425; CHECK-NEXT:    call void @use(i32 [[T0]])
426; CHECK-NEXT:    [[T1:%.*]] = sub i32 10, [[ARG]]
427; CHECK-NEXT:    ret i32 [[T1]]
428;
429  %t0 = sub i32 %arg, 8
430  call void @use(i32 %t0)
431  %t1 = sub i32 2, %t0
432  ret i32 %t1
433}
434
435define <4 x i32> @vec_sub_const_const_sub(<4 x i32> %arg) {
436; CHECK-LABEL: @vec_sub_const_const_sub(
437; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 10), [[ARG:%.*]]
438; CHECK-NEXT:    ret <4 x i32> [[T1]]
439;
440  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
441  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
442  ret <4 x i32> %t1
443}
444
445define <4 x i32> @vec_sub_const_const_sub_extrause(<4 x i32> %arg) {
446; CHECK-LABEL: @vec_sub_const_const_sub_extrause(
447; CHECK-NEXT:    [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -8)
448; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
449; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 10), [[ARG]]
450; CHECK-NEXT:    ret <4 x i32> [[T1]]
451;
452  %t0 = sub <4 x i32> %arg, <i32 8, i32 8, i32 8, i32 8>
453  call void @vec_use(<4 x i32> %t0)
454  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
455  ret <4 x i32> %t1
456}
457
458define <4 x i32> @vec_sub_const_const_sub_nonsplat(<4 x i32> %arg) {
459; CHECK-LABEL: @vec_sub_const_const_sub_nonsplat(
460; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> <i32 23, i32 undef, i32 undef, i32 10>, [[ARG:%.*]]
461; CHECK-NEXT:    ret <4 x i32> [[T1]]
462;
463  %t0 = sub <4 x i32> %arg, <i32 21, i32 undef, i32 8, i32 8>
464  %t1 = sub <4 x i32> <i32 2, i32 3, i32 undef, i32 2>, %t0
465  ret <4 x i32> %t1
466}
467
468; (c1-x)+c2
469
470define i32 @const_sub_add_const(i32 %arg) {
471; CHECK-LABEL: @const_sub_add_const(
472; CHECK-NEXT:    [[T1:%.*]] = sub i32 10, [[ARG:%.*]]
473; CHECK-NEXT:    ret i32 [[T1]]
474;
475  %t0 = sub i32 8, %arg
476  %t1 = add i32 %t0, 2
477  ret i32 %t1
478}
479
480define i32 @const_sub_add_const_extrause(i32 %arg) {
481; CHECK-LABEL: @const_sub_add_const_extrause(
482; CHECK-NEXT:    [[T0:%.*]] = sub i32 8, [[ARG:%.*]]
483; CHECK-NEXT:    call void @use(i32 [[T0]])
484; CHECK-NEXT:    [[T1:%.*]] = sub i32 10, [[ARG]]
485; CHECK-NEXT:    ret i32 [[T1]]
486;
487  %t0 = sub i32 8, %arg
488  call void @use(i32 %t0)
489  %t1 = add i32 %t0, 2
490  ret i32 %t1
491}
492
493define <4 x i32> @vec_const_sub_add_const(<4 x i32> %arg) {
494; CHECK-LABEL: @vec_const_sub_add_const(
495; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 10), [[ARG:%.*]]
496; CHECK-NEXT:    ret <4 x i32> [[T1]]
497;
498  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
499  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
500  ret <4 x i32> %t1
501}
502
503define <4 x i32> @vec_const_sub_add_const_extrause(<4 x i32> %arg) {
504; CHECK-LABEL: @vec_const_sub_add_const_extrause(
505; CHECK-NEXT:    [[T0:%.*]] = sub <4 x i32> splat (i32 8), [[ARG:%.*]]
506; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
507; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 10), [[ARG]]
508; CHECK-NEXT:    ret <4 x i32> [[T1]]
509;
510  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
511  call void @vec_use(<4 x i32> %t0)
512  %t1 = add <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
513  ret <4 x i32> %t1
514}
515
516define <4 x i32> @vec_const_sub_add_const_nonsplat(<4 x i32> %arg) {
517; CHECK-LABEL: @vec_const_sub_add_const_nonsplat(
518; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> <i32 23, i32 undef, i32 undef, i32 10>, [[ARG:%.*]]
519; CHECK-NEXT:    ret <4 x i32> [[T1]]
520;
521  %t0 = sub <4 x i32> <i32 21, i32 undef, i32 8, i32 8>, %arg
522  %t1 = add <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
523  ret <4 x i32> %t1
524}
525
526; (c1-x)-c2
527
528define i32 @const_sub_sub_const(i32 %arg) {
529; CHECK-LABEL: @const_sub_sub_const(
530; CHECK-NEXT:    [[T1:%.*]] = sub i32 6, [[ARG:%.*]]
531; CHECK-NEXT:    ret i32 [[T1]]
532;
533  %t0 = sub i32 8, %arg
534  %t1 = sub i32 %t0, 2
535  ret i32 %t1
536}
537
538define i32 @const_sub_sub_const_extrause(i32 %arg) {
539; CHECK-LABEL: @const_sub_sub_const_extrause(
540; CHECK-NEXT:    [[T0:%.*]] = sub i32 8, [[ARG:%.*]]
541; CHECK-NEXT:    call void @use(i32 [[T0]])
542; CHECK-NEXT:    [[T1:%.*]] = sub i32 6, [[ARG]]
543; CHECK-NEXT:    ret i32 [[T1]]
544;
545  %t0 = sub i32 8, %arg
546  call void @use(i32 %t0)
547  %t1 = sub i32 %t0, 2
548  ret i32 %t1
549}
550
551define <4 x i32> @vec_const_sub_sub_const(<4 x i32> %arg) {
552; CHECK-LABEL: @vec_const_sub_sub_const(
553; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 6), [[ARG:%.*]]
554; CHECK-NEXT:    ret <4 x i32> [[T1]]
555;
556  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
557  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
558  ret <4 x i32> %t1
559}
560
561define <4 x i32> @vec_const_sub_sub_const_extrause(<4 x i32> %arg) {
562; CHECK-LABEL: @vec_const_sub_sub_const_extrause(
563; CHECK-NEXT:    [[T0:%.*]] = sub <4 x i32> splat (i32 8), [[ARG:%.*]]
564; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
565; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> splat (i32 6), [[ARG]]
566; CHECK-NEXT:    ret <4 x i32> [[T1]]
567;
568  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
569  call void @vec_use(<4 x i32> %t0)
570  %t1 = sub <4 x i32> %t0, <i32 2, i32 2, i32 2, i32 2>
571  ret <4 x i32> %t1
572}
573
574define <4 x i32> @vec_const_sub_sub_const_nonsplat(<4 x i32> %arg) {
575; CHECK-LABEL: @vec_const_sub_sub_const_nonsplat(
576; CHECK-NEXT:    [[T1:%.*]] = sub <4 x i32> <i32 19, i32 undef, i32 undef, i32 6>, [[ARG:%.*]]
577; CHECK-NEXT:    ret <4 x i32> [[T1]]
578;
579  %t0 = sub <4 x i32> <i32 21, i32 undef, i32 8, i32 8>, %arg
580  %t1 = sub <4 x i32> %t0, <i32 2, i32 3, i32 undef, i32 2>
581  ret <4 x i32> %t1
582}
583
584; c2-(c1-x)
585; FIXME
586
587define i32 @const_sub_const_sub(i32 %arg) {
588; CHECK-LABEL: @const_sub_const_sub(
589; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG:%.*]], -6
590; CHECK-NEXT:    ret i32 [[T1]]
591;
592  %t0 = sub i32 8, %arg
593  %t1 = sub i32 2, %t0
594  ret i32 %t1
595}
596
597define i32 @const_sub_const_sub_extrause(i32 %arg) {
598; CHECK-LABEL: @const_sub_const_sub_extrause(
599; CHECK-NEXT:    [[T0:%.*]] = sub i32 8, [[ARG:%.*]]
600; CHECK-NEXT:    call void @use(i32 [[T0]])
601; CHECK-NEXT:    [[T1:%.*]] = add i32 [[ARG]], -6
602; CHECK-NEXT:    ret i32 [[T1]]
603;
604  %t0 = sub i32 8, %arg
605  call void @use(i32 %t0)
606  %t1 = sub i32 2, %t0
607  ret i32 %t1
608}
609
610define <4 x i32> @vec_const_sub_const_sub(<4 x i32> %arg) {
611; CHECK-LABEL: @vec_const_sub_const_sub(
612; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], splat (i32 -6)
613; CHECK-NEXT:    ret <4 x i32> [[T1]]
614;
615  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
616  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
617  ret <4 x i32> %t1
618}
619
620define <4 x i32> @vec_const_sub_const_sub_extrause(<4 x i32> %arg) {
621; CHECK-LABEL: @vec_const_sub_const_sub_extrause(
622; CHECK-NEXT:    [[T0:%.*]] = sub <4 x i32> splat (i32 8), [[ARG:%.*]]
623; CHECK-NEXT:    call void @vec_use(<4 x i32> [[T0]])
624; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG]], splat (i32 -6)
625; CHECK-NEXT:    ret <4 x i32> [[T1]]
626;
627  %t0 = sub <4 x i32> <i32 8, i32 8, i32 8, i32 8>, %arg
628  call void @vec_use(<4 x i32> %t0)
629  %t1 = sub <4 x i32> <i32 2, i32 2, i32 2, i32 2>, %t0
630  ret <4 x i32> %t1
631}
632
633define <4 x i32> @vec_const_sub_const_sub_nonsplat(<4 x i32> %arg) {
634; CHECK-LABEL: @vec_const_sub_const_sub_nonsplat(
635; CHECK-NEXT:    [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], <i32 -19, i32 undef, i32 undef, i32 -6>
636; CHECK-NEXT:    ret <4 x i32> [[T1]]
637;
638  %t0 = sub <4 x i32> <i32 21, i32 undef, i32 8, i32 8>, %arg
639  %t1 = sub <4 x i32> <i32 2, i32 3, i32 undef, i32 2>, %t0
640  ret <4 x i32> %t1
641}
642
643define i7 @addsub_combine_constants(i7 %x, i7 %y) {
644; CHECK-LABEL: @addsub_combine_constants(
645; CHECK-NEXT:    [[TMP1:%.*]] = sub i7 [[X:%.*]], [[Y:%.*]]
646; CHECK-NEXT:    [[A2:%.*]] = add i7 [[TMP1]], 52
647; CHECK-NEXT:    ret i7 [[A2]]
648;
649  %a1 = add i7 %x, 42
650  %s = sub i7 10, %y
651  %a2 = add nsw i7 %a1, %s
652  ret i7 %a2
653}
654
655define <4 x i32> @addsub_combine_constants_use1(<4 x i32> %x, <4 x i32> %y) {
656; CHECK-LABEL: @addsub_combine_constants_use1(
657; CHECK-NEXT:    [[A1:%.*]] = add <4 x i32> [[X:%.*]], <i32 42, i32 -7, i32 0, i32 -1>
658; CHECK-NEXT:    call void @vec_use(<4 x i32> [[A1]])
659; CHECK-NEXT:    [[TMP1:%.*]] = sub <4 x i32> [[X]], [[Y:%.*]]
660; CHECK-NEXT:    [[A2:%.*]] = add <4 x i32> [[TMP1]], <i32 -58, i32 -6, i32 -1, i32 41>
661; CHECK-NEXT:    ret <4 x i32> [[A2]]
662;
663  %a1 = add <4 x i32> %x, <i32 42, i32 -7, i32 0, i32 -1>
664  call void @vec_use(<4 x i32> %a1)
665  %s = sub <4 x i32> <i32 -100, i32 1, i32 -1, i32 42>, %y
666  %a2 = add nuw <4 x i32> %s, %a1
667  ret <4 x i32> %a2
668}
669
670define i32 @addsub_combine_constants_use2(i32 %x, i32 %y) {
671; CHECK-LABEL: @addsub_combine_constants_use2(
672; CHECK-NEXT:    [[S:%.*]] = sub i32 100, [[Y:%.*]]
673; CHECK-NEXT:    call void @use(i32 [[S]])
674; CHECK-NEXT:    [[TMP1:%.*]] = sub i32 [[X:%.*]], [[Y]]
675; CHECK-NEXT:    [[A2:%.*]] = add i32 [[TMP1]], 142
676; CHECK-NEXT:    ret i32 [[A2]]
677;
678  %a1 = add i32 %x, 42
679  %s = sub i32 100, %y
680  call void @use(i32 %s)
681  %a2 = add i32 %a1, %s
682  ret i32 %a2
683}
684
685; negative test - too many uses
686
687define i32 @addsub_combine_constants_use3(i32 %x, i32 %y) {
688; CHECK-LABEL: @addsub_combine_constants_use3(
689; CHECK-NEXT:    [[A1:%.*]] = add i32 [[X:%.*]], 42
690; CHECK-NEXT:    call void @use(i32 [[A1]])
691; CHECK-NEXT:    [[S:%.*]] = sub i32 100, [[Y:%.*]]
692; CHECK-NEXT:    call void @use(i32 [[S]])
693; CHECK-NEXT:    [[A2:%.*]] = add i32 [[A1]], [[S]]
694; CHECK-NEXT:    ret i32 [[A2]]
695;
696  %a1 = add i32 %x, 42
697  call void @use(i32 %a1)
698  %s = sub i32 100, %y
699  call void @use(i32 %s)
700  %a2 = add i32 %a1, %s
701  ret i32 %a2
702}
703
704define i5 @sub_from_constant(i5 %x, i5 %y) {
705; CHECK-LABEL: @sub_from_constant(
706; CHECK-NEXT:    [[REASS_SUB:%.*]] = sub i5 [[Y:%.*]], [[X:%.*]]
707; CHECK-NEXT:    [[R:%.*]] = add i5 [[REASS_SUB]], 10
708; CHECK-NEXT:    ret i5 [[R]]
709;
710  %sub = sub i5 10, %x
711  %r = add i5 %sub, %y
712  ret i5 %r
713}
714
715define i5 @sub_from_constant_commute(i5 %x, i5 %p) {
716; CHECK-LABEL: @sub_from_constant_commute(
717; CHECK-NEXT:    [[Y:%.*]] = mul i5 [[P:%.*]], [[P]]
718; CHECK-NEXT:    [[REASS_SUB:%.*]] = sub i5 [[Y]], [[X:%.*]]
719; CHECK-NEXT:    [[R:%.*]] = add i5 [[REASS_SUB]], 10
720; CHECK-NEXT:    ret i5 [[R]]
721;
722  %y = mul i5 %p, %p  ; thwart complexity-based canonicalization
723  %sub = sub nsw i5 10, %x
724  %r = add nsw i5 %y, %sub
725  ret i5 %r
726}
727
728define <2 x i8> @sub_from_constant_vec(<2 x i8> %x, <2 x i8> %y) {
729; CHECK-LABEL: @sub_from_constant_vec(
730; CHECK-NEXT:    [[REASS_SUB:%.*]] = sub <2 x i8> [[Y:%.*]], [[X:%.*]]
731; CHECK-NEXT:    [[R:%.*]] = add <2 x i8> [[REASS_SUB]], <i8 2, i8 -42>
732; CHECK-NEXT:    ret <2 x i8> [[R]]
733;
734  %sub = sub nuw <2 x i8> <i8 2, i8 -42>, %x
735  %r = add nuw <2 x i8> %sub, %y
736  ret <2 x i8> %r
737}
738
739; negative test - don't create extra instructions
740
741define i8 @sub_from_constant_extra_use(i8 %x, i8 %y) {
742; CHECK-LABEL: @sub_from_constant_extra_use(
743; CHECK-NEXT:    [[SUB:%.*]] = sub i8 1, [[X:%.*]]
744; CHECK-NEXT:    call void @use(i8 [[SUB]])
745; CHECK-NEXT:    [[R:%.*]] = add i8 [[SUB]], [[Y:%.*]]
746; CHECK-NEXT:    ret i8 [[R]]
747;
748  %sub = sub i8 1, %x
749  call void @use(i8 %sub)
750  %r = add i8 %sub, %y
751  ret i8 %r
752}
753