xref: /llvm-project/llvm/test/CodeGen/X86/add-and-not.ll (revision 91105df3dfeb6c5589de987c1c1bdd3d7fe781d8)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-linux | FileCheck %s --check-prefixes=X86
3; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64
4
5declare void @use(i8)
6
7define i8 @add_and_xor(i8 %x, i8 %y) {
8; X86-LABEL: add_and_xor:
9; X86:       # %bb.0:
10; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
11; X86-NEXT:    orb {{[0-9]+}}(%esp), %al
12; X86-NEXT:    retl
13;
14; X64-LABEL: add_and_xor:
15; X64:       # %bb.0:
16; X64-NEXT:    movl %edi, %eax
17; X64-NEXT:    orl %esi, %eax
18; X64-NEXT:    # kill: def $al killed $al killed $eax
19; X64-NEXT:    retq
20  %xor = xor i8 %x, -1
21  %and = and i8 %xor, %y
22  %add = add i8 %and, %x
23  ret i8 %add
24}
25
26define i8 @add_and_xor_wrong_const(i8 %x, i8 %y) {
27; X86-LABEL: add_and_xor_wrong_const:
28; X86:       # %bb.0:
29; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
30; X86-NEXT:    movl %ecx, %eax
31; X86-NEXT:    xorb $-2, %al
32; X86-NEXT:    andb {{[0-9]+}}(%esp), %al
33; X86-NEXT:    addb %cl, %al
34; X86-NEXT:    retl
35;
36; X64-LABEL: add_and_xor_wrong_const:
37; X64:       # %bb.0:
38; X64-NEXT:    movl %edi, %eax
39; X64-NEXT:    xorb $-2, %al
40; X64-NEXT:    andb %sil, %al
41; X64-NEXT:    addb %dil, %al
42; X64-NEXT:    retq
43  %xor = xor i8 %x, -2
44  %and = and i8 %xor, %y
45  %add = add i8 %and, %x
46  ret i8 %add
47}
48
49define i8 @add_and_xor_wrong_op(i8 %x, i8 %y, i8 %z) {
50; X86-LABEL: add_and_xor_wrong_op:
51; X86:       # %bb.0:
52; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
53; X86-NEXT:    notb %al
54; X86-NEXT:    andb {{[0-9]+}}(%esp), %al
55; X86-NEXT:    addb {{[0-9]+}}(%esp), %al
56; X86-NEXT:    retl
57;
58; X64-LABEL: add_and_xor_wrong_op:
59; X64:       # %bb.0:
60; X64-NEXT:    # kill: def $edx killed $edx def $rdx
61; X64-NEXT:    # kill: def $edi killed $edi def $rdi
62; X64-NEXT:    notb %dl
63; X64-NEXT:    andb %sil, %dl
64; X64-NEXT:    leal (%rdx,%rdi), %eax
65; X64-NEXT:    # kill: def $al killed $al killed $eax
66; X64-NEXT:    retq
67  %xor = xor i8 %z, -1
68  %and = and i8 %xor, %y
69  %add = add i8 %and, %x
70  ret i8 %add
71}
72
73define i8 @add_and_xor_commuted1(i8 %x, i8 %y) {
74; X86-LABEL: add_and_xor_commuted1:
75; X86:       # %bb.0:
76; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
77; X86-NEXT:    orb {{[0-9]+}}(%esp), %al
78; X86-NEXT:    retl
79;
80; X64-LABEL: add_and_xor_commuted1:
81; X64:       # %bb.0:
82; X64-NEXT:    movl %edi, %eax
83; X64-NEXT:    orl %esi, %eax
84; X64-NEXT:    # kill: def $al killed $al killed $eax
85; X64-NEXT:    retq
86  %xor = xor i8 %x, -1
87  %and = and i8 %y, %xor
88  %add = add i8 %and, %x
89  ret i8 %add
90}
91
92define i8 @add_and_xor_commuted2(i8 %x, i8 %y) {
93; X86-LABEL: add_and_xor_commuted2:
94; X86:       # %bb.0:
95; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
96; X86-NEXT:    orb {{[0-9]+}}(%esp), %al
97; X86-NEXT:    retl
98;
99; X64-LABEL: add_and_xor_commuted2:
100; X64:       # %bb.0:
101; X64-NEXT:    movl %edi, %eax
102; X64-NEXT:    orl %esi, %eax
103; X64-NEXT:    # kill: def $al killed $al killed $eax
104; X64-NEXT:    retq
105  %xor = xor i8 %x, -1
106  %and = and i8 %xor, %y
107  %add = add i8 %x, %and
108  ret i8 %add
109}
110
111define i8 @add_and_xor_commuted3(i8 %x, i8 %y) {
112; X86-LABEL: add_and_xor_commuted3:
113; X86:       # %bb.0:
114; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
115; X86-NEXT:    orb {{[0-9]+}}(%esp), %al
116; X86-NEXT:    retl
117;
118; X64-LABEL: add_and_xor_commuted3:
119; X64:       # %bb.0:
120; X64-NEXT:    movl %edi, %eax
121; X64-NEXT:    orl %esi, %eax
122; X64-NEXT:    # kill: def $al killed $al killed $eax
123; X64-NEXT:    retq
124  %xor = xor i8 %x, -1
125  %and = and i8 %y, %xor
126  %add = add i8 %x, %and
127  ret i8 %add
128}
129
130define i8 @add_and_xor_extra_use(i8 %x, i8 %y) nounwind {
131; X86-LABEL: add_and_xor_extra_use:
132; X86:       # %bb.0:
133; X86-NEXT:    pushl %ebx
134; X86-NEXT:    subl $8, %esp
135; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ebx
136; X86-NEXT:    movb {{[0-9]+}}(%esp), %bh
137; X86-NEXT:    notb %bh
138; X86-NEXT:    movzbl %bh, %eax
139; X86-NEXT:    movl %eax, (%esp)
140; X86-NEXT:    calll use@PLT
141; X86-NEXT:    andb %bl, %bh
142; X86-NEXT:    movzbl %bh, %eax
143; X86-NEXT:    movl %eax, (%esp)
144; X86-NEXT:    calll use@PLT
145; X86-NEXT:    orb {{[0-9]+}}(%esp), %bl
146; X86-NEXT:    movl %ebx, %eax
147; X86-NEXT:    addl $8, %esp
148; X86-NEXT:    popl %ebx
149; X86-NEXT:    retl
150;
151; X64-LABEL: add_and_xor_extra_use:
152; X64:       # %bb.0:
153; X64-NEXT:    pushq %rbp
154; X64-NEXT:    pushq %r14
155; X64-NEXT:    pushq %rbx
156; X64-NEXT:    movl %esi, %ebx
157; X64-NEXT:    movl %edi, %ebp
158; X64-NEXT:    movl %ebp, %eax
159; X64-NEXT:    notb %al
160; X64-NEXT:    movzbl %al, %r14d
161; X64-NEXT:    movl %r14d, %edi
162; X64-NEXT:    callq use@PLT
163; X64-NEXT:    andb %bl, %r14b
164; X64-NEXT:    movzbl %r14b, %edi
165; X64-NEXT:    callq use@PLT
166; X64-NEXT:    orb %bpl, %bl
167; X64-NEXT:    movl %ebx, %eax
168; X64-NEXT:    popq %rbx
169; X64-NEXT:    popq %r14
170; X64-NEXT:    popq %rbp
171; X64-NEXT:    retq
172  %xor = xor i8 %x, -1
173  call void @use(i8 %xor)
174  %and = and i8 %xor, %y
175  call void @use(i8 %and)
176  %add = add i8 %and, %x
177  ret i8 %add
178}
179
180define i64 @add_and_xor_const(i64 %x) {
181; X86-LABEL: add_and_xor_const:
182; X86:       # %bb.0:
183; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
184; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
185; X86-NEXT:    orl $1, %eax
186; X86-NEXT:    retl
187;
188; X64-LABEL: add_and_xor_const:
189; X64:       # %bb.0:
190; X64-NEXT:    movq %rdi, %rax
191; X64-NEXT:    orq $1, %rax
192; X64-NEXT:    retq
193  %xor = xor i64 %x, -1
194  %and = and i64 %xor, 1
195  %add = add i64 %and, %x
196  ret i64 %add
197}
198
199define i64 @add_and_xor_const_wrong_op(i64 %x, i64 %y) {
200; X86-LABEL: add_and_xor_const_wrong_op:
201; X86:       # %bb.0:
202; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
203; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
204; X86-NEXT:    notl %eax
205; X86-NEXT:    andl $1, %eax
206; X86-NEXT:    addl {{[0-9]+}}(%esp), %eax
207; X86-NEXT:    adcl $0, %edx
208; X86-NEXT:    retl
209;
210; X64-LABEL: add_and_xor_const_wrong_op:
211; X64:       # %bb.0:
212; X64-NEXT:    notl %esi
213; X64-NEXT:    andl $1, %esi
214; X64-NEXT:    leaq (%rsi,%rdi), %rax
215; X64-NEXT:    retq
216  %xor = xor i64 %y, -1
217  %and = and i64 %xor, 1
218  %add = add i64 %and, %x
219  ret i64 %add
220}
221
222define i64 @add_and_xor_const_explicit_trunc(i64 %x) {
223; X86-LABEL: add_and_xor_const_explicit_trunc:
224; X86:       # %bb.0:
225; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
226; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
227; X86-NEXT:    orl $1, %eax
228; X86-NEXT:    retl
229;
230; X64-LABEL: add_and_xor_const_explicit_trunc:
231; X64:       # %bb.0:
232; X64-NEXT:    movq %rdi, %rax
233; X64-NEXT:    orq $1, %rax
234; X64-NEXT:    retq
235  %trunc = trunc i64 %x to i32
236  %xor = xor i32 %trunc, -1
237  %ext = sext i32 %xor to i64
238  %and = and i64 %ext, 1
239  %add = add i64 %and, %x
240  ret i64 %add
241}
242
243define i64 @add_and_xor_const_explicit_trunc_wrong_mask(i64 %x) {
244; X86-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
245; X86:       # %bb.0:
246; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
247; X86-NEXT:    movl %ecx, %eax
248; X86-NEXT:    notl %eax
249; X86-NEXT:    movl %eax, %edx
250; X86-NEXT:    shrl $31, %edx
251; X86-NEXT:    andl $1, %eax
252; X86-NEXT:    addl %ecx, %eax
253; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
254; X86-NEXT:    retl
255;
256; X64-LABEL: add_and_xor_const_explicit_trunc_wrong_mask:
257; X64:       # %bb.0:
258; X64-NEXT:    movl %edi, %eax
259; X64-NEXT:    notl %eax
260; X64-NEXT:    movslq %eax, %rcx
261; X64-NEXT:    movabsq $4294967297, %rax # imm = 0x100000001
262; X64-NEXT:    andq %rcx, %rax
263; X64-NEXT:    addq %rdi, %rax
264; X64-NEXT:    retq
265  %trunc = trunc i64 %x to i32
266  %xor = xor i32 %trunc, -1
267  %ext = sext i32 %xor to i64
268  %and = and i64 %ext, 4294967297
269  %add = add i64 %and, %x
270  ret i64 %add
271}
272
273define ptr @gep_and_xor(ptr %a, i64 %m) {
274; X86-LABEL: gep_and_xor:
275; X86:       # %bb.0:
276; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
277; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
278; X86-NEXT:    retl
279;
280; X64-LABEL: gep_and_xor:
281; X64:       # %bb.0:
282; X64-NEXT:    movq %rdi, %rax
283; X64-NEXT:    orq %rsi, %rax
284; X64-NEXT:    retq
285  %old = ptrtoint ptr %a to i64
286  %old.not = and i64 %old, %m
287  %offset = xor i64 %old.not, %m
288  %p = getelementptr i8, ptr %a, i64 %offset
289  ret ptr %p
290}
291
292define ptr @gep_and_xor_const(ptr %a) {
293; X86-LABEL: gep_and_xor_const:
294; X86:       # %bb.0:
295; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
296; X86-NEXT:    orl $1, %eax
297; X86-NEXT:    retl
298;
299; X64-LABEL: gep_and_xor_const:
300; X64:       # %bb.0:
301; X64-NEXT:    movq %rdi, %rax
302; X64-NEXT:    orq $1, %rax
303; X64-NEXT:    retq
304  %old = ptrtoint ptr %a to i64
305  %old.not = and i64 %old, 1
306  %offset = xor i64 %old.not, 1
307  %p = getelementptr i8, ptr %a, i64 %offset
308  ret ptr %p
309}
310
311define i64 @add_and_xor_const_zext_trunc(i64 %x) {
312; X86-LABEL: add_and_xor_const_zext_trunc:
313; X86:       # %bb.0:
314; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
315; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
316; X86-NEXT:    orl $1, %eax
317; X86-NEXT:    retl
318;
319; X64-LABEL: add_and_xor_const_zext_trunc:
320; X64:       # %bb.0:
321; X64-NEXT:    movq %rdi, %rax
322; X64-NEXT:    orq $1, %rax
323; X64-NEXT:    retq
324  %t = trunc i64 %x to i32
325  %xor = xor i32 %t, -1
326  %and = and i32 %xor, 1
327  %ext = zext i32 %and to i64
328  %add = add i64 %ext, %x
329  ret i64 %add
330}
331
332define i64 @add_and_xor_const_zext_trunc_var(i64 %x, i64 %y) {
333; X86-LABEL: add_and_xor_const_zext_trunc_var:
334; X86:       # %bb.0:
335; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
336; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
337; X86-NEXT:    orl {{[0-9]+}}(%esp), %eax
338; X86-NEXT:    retl
339;
340; X64-LABEL: add_and_xor_const_zext_trunc_var:
341; X64:       # %bb.0:
342; X64-NEXT:    movl %esi, %eax
343; X64-NEXT:    orq %rdi, %rax
344; X64-NEXT:    retq
345  %tx = trunc i64 %x to i32
346  %ty = trunc i64 %y to i32
347  %xor = xor i32 %tx, -1
348  %and = and i32 %ty, %xor
349  %ext = zext i32 %and to i64
350  %add = add i64 %ext, %x
351  ret i64 %add
352}
353