xref: /llvm-project/llvm/test/CodeGen/X86/add-sub-bool.ll (revision 005173cbb609f79adc2018e378bc6897cf84b06d)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefixes=X86
3; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefixes=X64
4; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+tbm | FileCheck %s --check-prefixes=X64
5
6; PR35908 - Fold ADD/SUB and bit extracts into ADC/SBB+BT
7;
8; int test_add_add(int x, int y, int z) { return ((x + y) + bool(z & (1 << 30))); }
9; int test_add_sub(int x, int y, int z) { return ((x - y) + bool(z & (1 << 30))); }
10; int test_sub_add(int x, int y, int z) { return ((x + y) - bool(z & (1 << 30))); }
11; int test_sub_sub(int x, int y, int z) { return (x - (y - bool(z & (1 << 30)))); }
12
13;
14; Constant Bit Indices
15;
16
17define i32 @test_i32_add_add_idx(i32 %x, i32 %y, i32 %z) nounwind {
18; X86-LABEL: test_i32_add_add_idx:
19; X86:       # %bb.0:
20; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
21; X86-NEXT:    btl $30, {{[0-9]+}}(%esp)
22; X86-NEXT:    adcl {{[0-9]+}}(%esp), %eax
23; X86-NEXT:    retl
24;
25; X64-LABEL: test_i32_add_add_idx:
26; X64:       # %bb.0:
27; X64-NEXT:    movl %edi, %eax
28; X64-NEXT:    btl $30, %edx
29; X64-NEXT:    adcl %esi, %eax
30; X64-NEXT:    retq
31  %add = add i32 %y, %x
32  %shift = lshr i32 %z, 30
33  %mask = and i32 %shift, 1
34  %add1 = add i32 %add, %mask
35  ret i32 %add1
36}
37
38define i32 @test_i32_add_add_commute_idx(i32 %x, i32 %y, i32 %z) nounwind {
39; X86-LABEL: test_i32_add_add_commute_idx:
40; X86:       # %bb.0:
41; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
42; X86-NEXT:    btl $2, {{[0-9]+}}(%esp)
43; X86-NEXT:    adcl {{[0-9]+}}(%esp), %eax
44; X86-NEXT:    retl
45;
46; X64-LABEL: test_i32_add_add_commute_idx:
47; X64:       # %bb.0:
48; X64-NEXT:    movl %edi, %eax
49; X64-NEXT:    btl $2, %edx
50; X64-NEXT:    adcl %esi, %eax
51; X64-NEXT:    retq
52  %add = add i32 %y, %x
53  %shift = lshr i32 %z, 2
54  %mask = and i32 %shift, 1
55  %add1 = add i32 %mask, %add
56  ret i32 %add1
57}
58
59define i32 @test_i32_add_add_idx0(i32 %x, i32 %y, i32 %z) nounwind {
60; X86-LABEL: test_i32_add_add_idx0:
61; X86:       # %bb.0:
62; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
63; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
64; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
65; X86-NEXT:    andl $1, %eax
66; X86-NEXT:    addl %ecx, %eax
67; X86-NEXT:    retl
68;
69; X64-LABEL: test_i32_add_add_idx0:
70; X64:       # %bb.0:
71; X64-NEXT:    # kill: def $esi killed $esi def $rsi
72; X64-NEXT:    # kill: def $edi killed $edi def $rdi
73; X64-NEXT:    leal (%rdi,%rsi), %eax
74; X64-NEXT:    andl $1, %edx
75; X64-NEXT:    addl %edx, %eax
76; X64-NEXT:    retq
77  %add = add i32 %y, %x
78  %mask = and i32 %z, 1
79  %add1 = add i32 %mask, %add
80  ret i32 %add1
81}
82
83define i24 @test_i24_add_add_idx(i24 %x, i24 %y, i24 %z) nounwind {
84; X86-LABEL: test_i24_add_add_idx:
85; X86:       # %bb.0:
86; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
87; X86-NEXT:    btl $15, {{[0-9]+}}(%esp)
88; X86-NEXT:    adcl {{[0-9]+}}(%esp), %eax
89; X86-NEXT:    retl
90;
91; X64-LABEL: test_i24_add_add_idx:
92; X64:       # %bb.0:
93; X64-NEXT:    movl %edi, %eax
94; X64-NEXT:    btl $15, %edx
95; X64-NEXT:    adcl %esi, %eax
96; X64-NEXT:    retq
97  %add = add i24 %y, %x
98  %shift = lshr i24 %z, 15
99  %mask = and i24 %shift, 1
100  %add1 = add i24 %add, %mask
101  ret i24 %add1
102}
103
104define i128 @test_i128_add_add_idx(i128 %x, i128 %y, i128 %z) nounwind {
105; X86-LABEL: test_i128_add_add_idx:
106; X86:       # %bb.0:
107; X86-NEXT:    pushl %edi
108; X86-NEXT:    pushl %esi
109; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
110; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
111; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
112; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
113; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
114; X86-NEXT:    addl {{[0-9]+}}(%esp), %esi
115; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edi
116; X86-NEXT:    adcl {{[0-9]+}}(%esp), %ecx
117; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
118; X86-NEXT:    btl $5, {{[0-9]+}}(%esp)
119; X86-NEXT:    adcl $0, %esi
120; X86-NEXT:    adcl $0, %edi
121; X86-NEXT:    adcl $0, %ecx
122; X86-NEXT:    adcl $0, %edx
123; X86-NEXT:    movl %edi, 4(%eax)
124; X86-NEXT:    movl %esi, (%eax)
125; X86-NEXT:    movl %ecx, 8(%eax)
126; X86-NEXT:    movl %edx, 12(%eax)
127; X86-NEXT:    popl %esi
128; X86-NEXT:    popl %edi
129; X86-NEXT:    retl $4
130;
131; X64-LABEL: test_i128_add_add_idx:
132; X64:       # %bb.0:
133; X64-NEXT:    movq %rdi, %rax
134; X64-NEXT:    addq %rdx, %rax
135; X64-NEXT:    adcq %rcx, %rsi
136; X64-NEXT:    btl $5, %r9d
137; X64-NEXT:    adcq $0, %rax
138; X64-NEXT:    adcq $0, %rsi
139; X64-NEXT:    movq %rsi, %rdx
140; X64-NEXT:    retq
141  %add = add i128 %y, %x
142  %shift = lshr i128 %z, 69
143  %mask = and i128 %shift, 1
144  %add1 = add i128 %add, %mask
145  ret i128 %add1
146}
147
148define i32 @test_i32_add_sub_idx(i32 %x, i32 %y, i32 %z) nounwind {
149; X86-LABEL: test_i32_add_sub_idx:
150; X86:       # %bb.0:
151; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
152; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
153; X86-NEXT:    subl {{[0-9]+}}(%esp), %ecx
154; X86-NEXT:    shrl $31, %eax
155; X86-NEXT:    addl %ecx, %eax
156; X86-NEXT:    retl
157;
158; X64-LABEL: test_i32_add_sub_idx:
159; X64:       # %bb.0:
160; X64-NEXT:    # kill: def $edx killed $edx def $rdx
161; X64-NEXT:    # kill: def $edi killed $edi def $rdi
162; X64-NEXT:    subl %esi, %edi
163; X64-NEXT:    shrl $31, %edx
164; X64-NEXT:    leal (%rdx,%rdi), %eax
165; X64-NEXT:    retq
166  %sub = sub i32 %x, %y
167  %shift = lshr i32 %z, 31
168  %mask = and i32 %shift, 1
169  %add = add i32 %sub, %mask
170  ret i32 %add
171}
172
173define i32 @test_i32_add_sub_commute_idx(i32 %x, i32 %y, i32 %z) nounwind {
174; X86-LABEL: test_i32_add_sub_commute_idx:
175; X86:       # %bb.0:
176; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
177; X86-NEXT:    subl {{[0-9]+}}(%esp), %eax
178; X86-NEXT:    btl $8, {{[0-9]+}}(%esp)
179; X86-NEXT:    adcl $0, %eax
180; X86-NEXT:    retl
181;
182; X64-LABEL: test_i32_add_sub_commute_idx:
183; X64:       # %bb.0:
184; X64-NEXT:    movl %edi, %eax
185; X64-NEXT:    subl %esi, %eax
186; X64-NEXT:    btl $8, %edx
187; X64-NEXT:    adcl $0, %eax
188; X64-NEXT:    retq
189  %sub = sub i32 %x, %y
190  %shift = lshr i32 %z, 8
191  %mask = and i32 %shift, 1
192  %add = add i32 %mask, %sub
193  ret i32 %add
194}
195
196define i32 @test_i32_sub_add_idx(i32 %x, i32 %y, i32 %z) nounwind {
197; X86-LABEL: test_i32_sub_add_idx:
198; X86:       # %bb.0:
199; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
200; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
201; X86-NEXT:    btl $1, {{[0-9]+}}(%esp)
202; X86-NEXT:    sbbl $0, %eax
203; X86-NEXT:    retl
204;
205; X64-LABEL: test_i32_sub_add_idx:
206; X64:       # %bb.0:
207; X64-NEXT:    # kill: def $esi killed $esi def $rsi
208; X64-NEXT:    # kill: def $edi killed $edi def $rdi
209; X64-NEXT:    leal (%rdi,%rsi), %eax
210; X64-NEXT:    btl $1, %edx
211; X64-NEXT:    sbbl $0, %eax
212; X64-NEXT:    retq
213  %add = add i32 %y, %x
214  %shift = lshr i32 %z, 1
215  %mask = and i32 %shift, 1
216  %sub = sub i32 %add, %mask
217  ret i32 %sub
218}
219
220define i32 @test_i32_sub_add_commute_idx(i32 %x, i32 %y, i32 %z) nounwind {
221; X86-LABEL: test_i32_sub_add_commute_idx:
222; X86:       # %bb.0:
223; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
224; X86-NEXT:    btl $1, {{[0-9]+}}(%esp)
225; X86-NEXT:    sbbl {{[0-9]+}}(%esp), %eax
226; X86-NEXT:    retl
227;
228; X64-LABEL: test_i32_sub_add_commute_idx:
229; X64:       # %bb.0:
230; X64-NEXT:    movl %edi, %eax
231; X64-NEXT:    btl $1, %edx
232; X64-NEXT:    sbbl %esi, %eax
233; X64-NEXT:    retq
234  %shift = lshr i32 %z, 1
235  %mask = and i32 %shift, 1
236  %add = add i32 %y, %mask
237  %sub = sub i32 %x, %add
238  ret i32 %sub
239}
240
241define i32 @test_i32_sub_add_sext_idx(i32 %x, i32 %y, i32 %z) nounwind {
242; X86-LABEL: test_i32_sub_add_sext_idx:
243; X86:       # %bb.0:
244; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
245; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
246; X86-NEXT:    shll $25, %ecx
247; X86-NEXT:    sarl $31, %ecx
248; X86-NEXT:    addl {{[0-9]+}}(%esp), %ecx
249; X86-NEXT:    subl %ecx, %eax
250; X86-NEXT:    retl
251;
252; X64-LABEL: test_i32_sub_add_sext_idx:
253; X64:       # %bb.0:
254; X64-NEXT:    movl %edi, %eax
255; X64-NEXT:    shll $25, %edx
256; X64-NEXT:    sarl $31, %edx
257; X64-NEXT:    addl %esi, %edx
258; X64-NEXT:    subl %edx, %eax
259; X64-NEXT:    retq
260  %shl = shl i32 %z, 25
261  %sext = ashr i32 %shl, 31
262  %add = add i32 %y, %sext
263  %sub = sub i32 %x, %add
264  ret i32 %sub
265}
266
267define i32 @test_i32_sub_sub_idx(i32 %x, i32 %y, i32 %z) nounwind {
268; X86-LABEL: test_i32_sub_sub_idx:
269; X86:       # %bb.0:
270; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
271; X86-NEXT:    btl $16, {{[0-9]+}}(%esp)
272; X86-NEXT:    adcl $0, %eax
273; X86-NEXT:    subl {{[0-9]+}}(%esp), %eax
274; X86-NEXT:    retl
275;
276; X64-LABEL: test_i32_sub_sub_idx:
277; X64:       # %bb.0:
278; X64-NEXT:    movl %edi, %eax
279; X64-NEXT:    btl $16, %edx
280; X64-NEXT:    adcl $0, %eax
281; X64-NEXT:    subl %esi, %eax
282; X64-NEXT:    retq
283  %shift = lshr i32 %z, 16
284  %mask = and i32 %shift, 1
285  %sub0 = sub i32 %y, %mask
286  %sub1 = sub i32 %x, %sub0
287  ret i32 %sub1
288}
289
290define i32 @test_i32_sub_sub_commute_idx(i32 %x, i32 %y, i32 %z) nounwind {
291; X86-LABEL: test_i32_sub_sub_commute_idx:
292; X86:       # %bb.0:
293; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
294; X86-NEXT:    btl $15, {{[0-9]+}}(%esp)
295; X86-NEXT:    sbbl {{[0-9]+}}(%esp), %eax
296; X86-NEXT:    retl
297;
298; X64-LABEL: test_i32_sub_sub_commute_idx:
299; X64:       # %bb.0:
300; X64-NEXT:    movl %edi, %eax
301; X64-NEXT:    btl $15, %edx
302; X64-NEXT:    sbbl %esi, %eax
303; X64-NEXT:    retq
304  %shift = lshr i32 %z, 15
305  %mask = and i32 %shift, 1
306  %sub0 = sub i32 %x, %y
307  %sub1 = sub i32 %sub0, %mask
308  ret i32 %sub1
309}
310
311define i32 @test_i32_sub_sum_idx(i32 %x, i32 %y, i32 %z) nounwind {
312; X86-LABEL: test_i32_sub_sum_idx:
313; X86:       # %bb.0:
314; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
315; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
316; X86-NEXT:    btl $30, {{[0-9]+}}(%esp)
317; X86-NEXT:    sbbl $0, %eax
318; X86-NEXT:    negl %eax
319; X86-NEXT:    retl
320;
321; X64-LABEL: test_i32_sub_sum_idx:
322; X64:       # %bb.0:
323; X64-NEXT:    # kill: def $esi killed $esi def $rsi
324; X64-NEXT:    # kill: def $edi killed $edi def $rdi
325; X64-NEXT:    leal (%rdi,%rsi), %eax
326; X64-NEXT:    btl $30, %edx
327; X64-NEXT:    sbbl $0, %eax
328; X64-NEXT:    negl %eax
329; X64-NEXT:    retq
330  %shift = lshr i32 %z, 30
331  %mask = and i32 %shift, 1
332  %add = add i32 %y, %x
333  %sub = sub i32 %mask, %add
334  ret i32 %sub
335}
336
337;
338; Variable Bit Indices
339;
340
341define i32 @test_i32_add_add_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
342; X86-LABEL: test_i32_add_add_var:
343; X86:       # %bb.0:
344; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
345; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
346; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
347; X86-NEXT:    btl %ecx, %edx
348; X86-NEXT:    adcl {{[0-9]+}}(%esp), %eax
349; X86-NEXT:    retl
350;
351; X64-LABEL: test_i32_add_add_var:
352; X64:       # %bb.0:
353; X64-NEXT:    movl %edi, %eax
354; X64-NEXT:    btl %ecx, %edx
355; X64-NEXT:    adcl %esi, %eax
356; X64-NEXT:    retq
357  %add = add i32 %y, %x
358  %shift = lshr i32 %z, %w
359  %mask = and i32 %shift, 1
360  %add1 = add i32 %add, %mask
361  ret i32 %add1
362}
363
364define i32 @test_i32_add_add_commute_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
365; X86-LABEL: test_i32_add_add_commute_var:
366; X86:       # %bb.0:
367; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
368; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
369; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
370; X86-NEXT:    btl %ecx, %edx
371; X86-NEXT:    adcl {{[0-9]+}}(%esp), %eax
372; X86-NEXT:    retl
373;
374; X64-LABEL: test_i32_add_add_commute_var:
375; X64:       # %bb.0:
376; X64-NEXT:    movl %edi, %eax
377; X64-NEXT:    btl %ecx, %edx
378; X64-NEXT:    adcl %esi, %eax
379; X64-NEXT:    retq
380  %add = add i32 %y, %x
381  %shift = lshr i32 %z, %w
382  %mask = and i32 %shift, 1
383  %add1 = add i32 %mask, %add
384  ret i32 %add1
385}
386
387define i64 @test_i64_add_add_var(i64 %x, i64 %y, i64 %z, i64 %w) nounwind {
388; X86-LABEL: test_i64_add_add_var:
389; X86:       # %bb.0:
390; X86-NEXT:    pushl %ebx
391; X86-NEXT:    pushl %edi
392; X86-NEXT:    pushl %esi
393; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
394; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
395; X86-NEXT:    movl {{[0-9]+}}(%esp), %ebx
396; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
397; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
398; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
399; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
400; X86-NEXT:    movl %ebx, %edi
401; X86-NEXT:    shrl %cl, %edi
402; X86-NEXT:    shrdl %cl, %ebx, %esi
403; X86-NEXT:    testb $32, %cl
404; X86-NEXT:    jne .LBB15_2
405; X86-NEXT:  # %bb.1:
406; X86-NEXT:    movl %esi, %edi
407; X86-NEXT:  .LBB15_2:
408; X86-NEXT:    andl $1, %edi
409; X86-NEXT:    addl %edi, %eax
410; X86-NEXT:    adcl $0, %edx
411; X86-NEXT:    popl %esi
412; X86-NEXT:    popl %edi
413; X86-NEXT:    popl %ebx
414; X86-NEXT:    retl
415;
416; X64-LABEL: test_i64_add_add_var:
417; X64:       # %bb.0:
418; X64-NEXT:    movq %rdi, %rax
419; X64-NEXT:    btq %rcx, %rdx
420; X64-NEXT:    adcq %rsi, %rax
421; X64-NEXT:    retq
422  %add = add i64 %y, %x
423  %shift = lshr i64 %z, %w
424  %mask = and i64 %shift, 1
425  %add1 = add i64 %add, %mask
426  ret i64 %add1
427}
428
429define i32 @test_i32_add_sub_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
430; X86-LABEL: test_i32_add_sub_var:
431; X86:       # %bb.0:
432; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
433; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
434; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
435; X86-NEXT:    subl {{[0-9]+}}(%esp), %eax
436; X86-NEXT:    btl %ecx, %edx
437; X86-NEXT:    adcl $0, %eax
438; X86-NEXT:    retl
439;
440; X64-LABEL: test_i32_add_sub_var:
441; X64:       # %bb.0:
442; X64-NEXT:    movl %edi, %eax
443; X64-NEXT:    subl %esi, %eax
444; X64-NEXT:    btl %ecx, %edx
445; X64-NEXT:    adcl $0, %eax
446; X64-NEXT:    retq
447  %sub = sub i32 %x, %y
448  %shift = lshr i32 %z, %w
449  %mask = and i32 %shift, 1
450  %add = add i32 %sub, %mask
451  ret i32 %add
452}
453
454define i32 @test_i32_add_sub_commute_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
455; X86-LABEL: test_i32_add_sub_commute_var:
456; X86:       # %bb.0:
457; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
458; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
459; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
460; X86-NEXT:    subl {{[0-9]+}}(%esp), %eax
461; X86-NEXT:    btl %ecx, %edx
462; X86-NEXT:    adcl $0, %eax
463; X86-NEXT:    retl
464;
465; X64-LABEL: test_i32_add_sub_commute_var:
466; X64:       # %bb.0:
467; X64-NEXT:    movl %edi, %eax
468; X64-NEXT:    subl %esi, %eax
469; X64-NEXT:    btl %ecx, %edx
470; X64-NEXT:    adcl $0, %eax
471; X64-NEXT:    retq
472  %sub = sub i32 %x, %y
473  %shift = lshr i32 %z, %w
474  %mask = and i32 %shift, 1
475  %add = add i32 %mask, %sub
476  ret i32 %add
477}
478
479define i32 @test_i32_sub_add_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
480; X86-LABEL: test_i32_sub_add_var:
481; X86:       # %bb.0:
482; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
483; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
484; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
485; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
486; X86-NEXT:    btl %ecx, %edx
487; X86-NEXT:    sbbl $0, %eax
488; X86-NEXT:    retl
489;
490; X64-LABEL: test_i32_sub_add_var:
491; X64:       # %bb.0:
492; X64-NEXT:    # kill: def $esi killed $esi def $rsi
493; X64-NEXT:    # kill: def $edi killed $edi def $rdi
494; X64-NEXT:    leal (%rdi,%rsi), %eax
495; X64-NEXT:    btl %ecx, %edx
496; X64-NEXT:    sbbl $0, %eax
497; X64-NEXT:    retq
498  %add = add i32 %y, %x
499  %shift = lshr i32 %z, %w
500  %mask = and i32 %shift, 1
501  %sub = sub i32 %add, %mask
502  ret i32 %sub
503}
504
505define i32 @test_i32_sub_add_commute_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
506; X86-LABEL: test_i32_sub_add_commute_var:
507; X86:       # %bb.0:
508; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
509; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
510; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
511; X86-NEXT:    btl %ecx, %edx
512; X86-NEXT:    sbbl {{[0-9]+}}(%esp), %eax
513; X86-NEXT:    retl
514;
515; X64-LABEL: test_i32_sub_add_commute_var:
516; X64:       # %bb.0:
517; X64-NEXT:    movl %edi, %eax
518; X64-NEXT:    btl %ecx, %edx
519; X64-NEXT:    sbbl %esi, %eax
520; X64-NEXT:    retq
521  %shift = lshr i32 %z, %w
522  %mask = and i32 %shift, 1
523  %add = add i32 %y, %mask
524  %sub = sub i32 %x, %add
525  ret i32 %sub
526}
527
528define i32 @test_i32_sub_add_sext_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
529; X86-LABEL: test_i32_sub_add_sext_var:
530; X86:       # %bb.0:
531; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
532; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
533; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
534; X86-NEXT:    shll %cl, %edx
535; X86-NEXT:    sarl $31, %edx
536; X86-NEXT:    addl {{[0-9]+}}(%esp), %edx
537; X86-NEXT:    subl %edx, %eax
538; X86-NEXT:    retl
539;
540; X64-LABEL: test_i32_sub_add_sext_var:
541; X64:       # %bb.0:
542; X64-NEXT:    movl %edi, %eax
543; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
544; X64-NEXT:    shll %cl, %edx
545; X64-NEXT:    sarl $31, %edx
546; X64-NEXT:    addl %esi, %edx
547; X64-NEXT:    subl %edx, %eax
548; X64-NEXT:    retq
549  %shl = shl i32 %z, %w
550  %sext = ashr i32 %shl, 31
551  %add = add i32 %y, %sext
552  %sub = sub i32 %x, %add
553  ret i32 %sub
554}
555
556define i32 @test_i32_sub_sub_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
557; X86-LABEL: test_i32_sub_sub_var:
558; X86:       # %bb.0:
559; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
560; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
561; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
562; X86-NEXT:    btl %ecx, %edx
563; X86-NEXT:    adcl $0, %eax
564; X86-NEXT:    subl {{[0-9]+}}(%esp), %eax
565; X86-NEXT:    retl
566;
567; X64-LABEL: test_i32_sub_sub_var:
568; X64:       # %bb.0:
569; X64-NEXT:    movl %edi, %eax
570; X64-NEXT:    btl %ecx, %edx
571; X64-NEXT:    adcl $0, %eax
572; X64-NEXT:    subl %esi, %eax
573; X64-NEXT:    retq
574  %shift = lshr i32 %z, %w
575  %mask = and i32 %shift, 1
576  %sub0 = sub i32 %y, %mask
577  %sub1 = sub i32 %x, %sub0
578  ret i32 %sub1
579}
580
581define i32 @test_i32_sub_sub_commute_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
582; X86-LABEL: test_i32_sub_sub_commute_var:
583; X86:       # %bb.0:
584; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
585; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
586; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
587; X86-NEXT:    btl %ecx, %edx
588; X86-NEXT:    sbbl {{[0-9]+}}(%esp), %eax
589; X86-NEXT:    retl
590;
591; X64-LABEL: test_i32_sub_sub_commute_var:
592; X64:       # %bb.0:
593; X64-NEXT:    movl %edi, %eax
594; X64-NEXT:    btl %ecx, %edx
595; X64-NEXT:    sbbl %esi, %eax
596; X64-NEXT:    retq
597  %shift = lshr i32 %z, %w
598  %mask = and i32 %shift, 1
599  %sub0 = sub i32 %x, %y
600  %sub1 = sub i32 %sub0, %mask
601  ret i32 %sub1
602}
603
604define i32 @test_i32_sub_sum_var(i32 %x, i32 %y, i32 %z, i32 %w) nounwind {
605; X86-LABEL: test_i32_sub_sum_var:
606; X86:       # %bb.0:
607; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
608; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
609; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
610; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
611; X86-NEXT:    btl %ecx, %edx
612; X86-NEXT:    sbbl $0, %eax
613; X86-NEXT:    negl %eax
614; X86-NEXT:    retl
615;
616; X64-LABEL: test_i32_sub_sum_var:
617; X64:       # %bb.0:
618; X64-NEXT:    # kill: def $esi killed $esi def $rsi
619; X64-NEXT:    # kill: def $edi killed $edi def $rdi
620; X64-NEXT:    leal (%rdi,%rsi), %eax
621; X64-NEXT:    btl %ecx, %edx
622; X64-NEXT:    sbbl $0, %eax
623; X64-NEXT:    negl %eax
624; X64-NEXT:    retq
625  %shift = lshr i32 %z, %w
626  %mask = and i32 %shift, 1
627  %add = add i32 %y, %x
628  %sub = sub i32 %mask, %add
629  ret i32 %sub
630}
631