xref: /llvm-project/llvm/test/CodeGen/X86/andnot-patterns.ll (revision f7b5f0c805c899b59bcc37279a0a05dca35d3a25)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-- -mattr=-bmi | FileCheck %s --check-prefixes=X86,X86-NOBMI
3; RUN: llc < %s -mtriple=i686-- -mattr=+bmi | FileCheck %s --check-prefixes=X86,X86-BMI
4; RUN: llc < %s -mtriple=x86_64-- -mattr=-bmi | FileCheck %s --check-prefixes=X64,X64-NOBMI
5; RUN: llc < %s -mtriple=x86_64-- -mattr=+bmi | FileCheck %s --check-prefixes=X64,X64-BMI
6
7; TODO - PR112425 - attempt to reconstruct andnot patterns through bitwise-agnostic operations
8
9declare void @use_i64(i64)
10declare void @use_i32(i32)
11
12;
13; Fold (and X, (rotl (not Y), Z))) -> (and X, (not (rotl Y, Z)))
14;
15
16define i64 @andnot_rotl_i64(i64 %a0, i64 %a1, i64 %a2) nounwind {
17; X86-NOBMI-LABEL: andnot_rotl_i64:
18; X86-NOBMI:       # %bb.0:
19; X86-NOBMI-NEXT:    pushl %esi
20; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %esi
21; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
22; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
23; X86-NOBMI-NEXT:    testb $32, %cl
24; X86-NOBMI-NEXT:    jne .LBB0_1
25; X86-NOBMI-NEXT:  # %bb.2:
26; X86-NOBMI-NEXT:    movl %eax, %edx
27; X86-NOBMI-NEXT:    jmp .LBB0_3
28; X86-NOBMI-NEXT:  .LBB0_1:
29; X86-NOBMI-NEXT:    movl %esi, %edx
30; X86-NOBMI-NEXT:    movl %eax, %esi
31; X86-NOBMI-NEXT:  .LBB0_3:
32; X86-NOBMI-NEXT:    movl %esi, %eax
33; X86-NOBMI-NEXT:    shldl %cl, %edx, %eax
34; X86-NOBMI-NEXT:    notl %eax
35; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
36; X86-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
37; X86-NOBMI-NEXT:    shldl %cl, %esi, %edx
38; X86-NOBMI-NEXT:    notl %edx
39; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %edx
40; X86-NOBMI-NEXT:    popl %esi
41; X86-NOBMI-NEXT:    retl
42;
43; X86-BMI-LABEL: andnot_rotl_i64:
44; X86-BMI:       # %bb.0:
45; X86-BMI-NEXT:    pushl %esi
46; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %edx
47; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
48; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
49; X86-BMI-NEXT:    testb $32, %cl
50; X86-BMI-NEXT:    jne .LBB0_1
51; X86-BMI-NEXT:  # %bb.2:
52; X86-BMI-NEXT:    movl %eax, %esi
53; X86-BMI-NEXT:    jmp .LBB0_3
54; X86-BMI-NEXT:  .LBB0_1:
55; X86-BMI-NEXT:    movl %edx, %esi
56; X86-BMI-NEXT:    movl %eax, %edx
57; X86-BMI-NEXT:  .LBB0_3:
58; X86-BMI-NEXT:    movl %edx, %eax
59; X86-BMI-NEXT:    shldl %cl, %esi, %eax
60; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
61; X86-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
62; X86-BMI-NEXT:    shldl %cl, %edx, %esi
63; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %esi, %edx
64; X86-BMI-NEXT:    popl %esi
65; X86-BMI-NEXT:    retl
66;
67; X64-NOBMI-LABEL: andnot_rotl_i64:
68; X64-NOBMI:       # %bb.0:
69; X64-NOBMI-NEXT:    movq %rdx, %rcx
70; X64-NOBMI-NEXT:    movq %rsi, %rax
71; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $rcx
72; X64-NOBMI-NEXT:    rolq %cl, %rax
73; X64-NOBMI-NEXT:    notq %rax
74; X64-NOBMI-NEXT:    andq %rdi, %rax
75; X64-NOBMI-NEXT:    retq
76;
77; X64-BMI-LABEL: andnot_rotl_i64:
78; X64-BMI:       # %bb.0:
79; X64-BMI-NEXT:    movq %rdx, %rcx
80; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $rcx
81; X64-BMI-NEXT:    rolq %cl, %rsi
82; X64-BMI-NEXT:    andnq %rdi, %rsi, %rax
83; X64-BMI-NEXT:    retq
84  %not = xor i64 %a1, -1
85  %rot = tail call i64 @llvm.fshl.i64(i64 %not, i64 %not, i64 %a2)
86  %and = and i64 %rot, %a0
87  ret i64 %and
88}
89
90define i32 @andnot_rotl_i32(i32 %a0, i32 %a1, i32 %a2) nounwind {
91; X86-NOBMI-LABEL: andnot_rotl_i32:
92; X86-NOBMI:       # %bb.0:
93; X86-NOBMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
94; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
95; X86-NOBMI-NEXT:    roll %cl, %eax
96; X86-NOBMI-NEXT:    notl %eax
97; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
98; X86-NOBMI-NEXT:    retl
99;
100; X86-BMI-LABEL: andnot_rotl_i32:
101; X86-BMI:       # %bb.0:
102; X86-BMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
103; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
104; X86-BMI-NEXT:    roll %cl, %eax
105; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
106; X86-BMI-NEXT:    retl
107;
108; X64-NOBMI-LABEL: andnot_rotl_i32:
109; X64-NOBMI:       # %bb.0:
110; X64-NOBMI-NEXT:    movl %edx, %ecx
111; X64-NOBMI-NEXT:    movl %esi, %eax
112; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
113; X64-NOBMI-NEXT:    roll %cl, %eax
114; X64-NOBMI-NEXT:    notl %eax
115; X64-NOBMI-NEXT:    andl %edi, %eax
116; X64-NOBMI-NEXT:    retq
117;
118; X64-BMI-LABEL: andnot_rotl_i32:
119; X64-BMI:       # %bb.0:
120; X64-BMI-NEXT:    movl %edx, %ecx
121; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
122; X64-BMI-NEXT:    roll %cl, %esi
123; X64-BMI-NEXT:    andnl %edi, %esi, %eax
124; X64-BMI-NEXT:    retq
125  %not = xor i32 %a1, -1
126  %rot = tail call i32 @llvm.fshl.i32(i32 %not, i32 %not, i32 %a2)
127  %and = and i32 %rot, %a0
128  ret i32 %and
129}
130
131define i16 @andnot_rotl_i16(i16 %a0, i16 %a1, i16 %a2) nounwind {
132; X86-LABEL: andnot_rotl_i16:
133; X86:       # %bb.0:
134; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
135; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
136; X86-NEXT:    rolw %cl, %ax
137; X86-NEXT:    notl %eax
138; X86-NEXT:    andw {{[0-9]+}}(%esp), %ax
139; X86-NEXT:    # kill: def $ax killed $ax killed $eax
140; X86-NEXT:    retl
141;
142; X64-NOBMI-LABEL: andnot_rotl_i16:
143; X64-NOBMI:       # %bb.0:
144; X64-NOBMI-NEXT:    movl %edx, %ecx
145; X64-NOBMI-NEXT:    movl %esi, %eax
146; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
147; X64-NOBMI-NEXT:    rolw %cl, %ax
148; X64-NOBMI-NEXT:    notl %eax
149; X64-NOBMI-NEXT:    andl %edi, %eax
150; X64-NOBMI-NEXT:    # kill: def $ax killed $ax killed $eax
151; X64-NOBMI-NEXT:    retq
152;
153; X64-BMI-LABEL: andnot_rotl_i16:
154; X64-BMI:       # %bb.0:
155; X64-BMI-NEXT:    movl %edx, %ecx
156; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
157; X64-BMI-NEXT:    rolw %cl, %si
158; X64-BMI-NEXT:    andnl %edi, %esi, %eax
159; X64-BMI-NEXT:    # kill: def $ax killed $ax killed $eax
160; X64-BMI-NEXT:    retq
161  %not = xor i16 %a1, -1
162  %rot = tail call i16 @llvm.fshl.i16(i16 %not, i16 %not, i16 %a2)
163  %and = and i16 %rot, %a0
164  ret i16 %and
165}
166
167define i8 @andnot_rotl_i8(i8 %a0, i8 %a1, i8 %a2) nounwind {
168; X86-LABEL: andnot_rotl_i8:
169; X86:       # %bb.0:
170; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
171; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
172; X86-NEXT:    rolb %cl, %al
173; X86-NEXT:    notb %al
174; X86-NEXT:    andb {{[0-9]+}}(%esp), %al
175; X86-NEXT:    retl
176;
177; X64-LABEL: andnot_rotl_i8:
178; X64:       # %bb.0:
179; X64-NEXT:    movl %edx, %ecx
180; X64-NEXT:    movl %esi, %eax
181; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
182; X64-NEXT:    rolb %cl, %al
183; X64-NEXT:    notb %al
184; X64-NEXT:    andb %dil, %al
185; X64-NEXT:    # kill: def $al killed $al killed $eax
186; X64-NEXT:    retq
187  %not = xor i8 %a1, -1
188  %rot = tail call i8 @llvm.fshl.i8(i8 %not, i8 %not, i8 %a2)
189  %and = and i8 %rot, %a0
190  ret i8 %and
191}
192
193define i64 @andnot_rotl_i64_multiuse_rot(i64 %a0, i64 %a1, i64 %a2) nounwind {
194; X86-LABEL: andnot_rotl_i64_multiuse_rot:
195; X86:       # %bb.0:
196; X86-NEXT:    pushl %ebx
197; X86-NEXT:    pushl %edi
198; X86-NEXT:    pushl %esi
199; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
200; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
201; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
202; X86-NEXT:    notl %edx
203; X86-NEXT:    notl %esi
204; X86-NEXT:    testb $32, %cl
205; X86-NEXT:    jne .LBB4_1
206; X86-NEXT:  # %bb.2:
207; X86-NEXT:    movl %esi, %eax
208; X86-NEXT:    jmp .LBB4_3
209; X86-NEXT:  .LBB4_1:
210; X86-NEXT:    movl %edx, %eax
211; X86-NEXT:    movl %esi, %edx
212; X86-NEXT:  .LBB4_3:
213; X86-NEXT:    movl %edx, %ebx
214; X86-NEXT:    shldl %cl, %eax, %ebx
215; X86-NEXT:    # kill: def $cl killed $cl killed $ecx
216; X86-NEXT:    shldl %cl, %edx, %eax
217; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
218; X86-NEXT:    andl %eax, %esi
219; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
220; X86-NEXT:    andl %ebx, %edi
221; X86-NEXT:    pushl %ebx
222; X86-NEXT:    pushl %eax
223; X86-NEXT:    calll use_i64@PLT
224; X86-NEXT:    addl $8, %esp
225; X86-NEXT:    movl %esi, %eax
226; X86-NEXT:    movl %edi, %edx
227; X86-NEXT:    popl %esi
228; X86-NEXT:    popl %edi
229; X86-NEXT:    popl %ebx
230; X86-NEXT:    retl
231;
232; X64-LABEL: andnot_rotl_i64_multiuse_rot:
233; X64:       # %bb.0:
234; X64-NEXT:    pushq %rbx
235; X64-NEXT:    movq %rdx, %rcx
236; X64-NEXT:    movq %rdi, %rbx
237; X64-NEXT:    notq %rsi
238; X64-NEXT:    # kill: def $cl killed $cl killed $rcx
239; X64-NEXT:    rolq %cl, %rsi
240; X64-NEXT:    andq %rsi, %rbx
241; X64-NEXT:    movq %rsi, %rdi
242; X64-NEXT:    callq use_i64@PLT
243; X64-NEXT:    movq %rbx, %rax
244; X64-NEXT:    popq %rbx
245; X64-NEXT:    retq
246  %not = xor i64 %a1, -1
247  %rot = tail call i64 @llvm.fshl.i64(i64 %not, i64 %not, i64 %a2)
248  %and = and i64 %rot, %a0
249  call void @use_i64(i64 %rot)
250  ret i64 %and
251}
252
253;
254; Fold (and X, (rotr (not Y), Z))) -> (and X, (not (rotr Y, Z)))
255;
256
257define i64 @andnot_rotr_i64(i64 %a0, i64 %a1, i64 %a2) nounwind {
258; X86-NOBMI-LABEL: andnot_rotr_i64:
259; X86-NOBMI:       # %bb.0:
260; X86-NOBMI-NEXT:    pushl %esi
261; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
262; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %esi
263; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
264; X86-NOBMI-NEXT:    testb $32, %cl
265; X86-NOBMI-NEXT:    je .LBB5_1
266; X86-NOBMI-NEXT:  # %bb.2:
267; X86-NOBMI-NEXT:    movl %eax, %edx
268; X86-NOBMI-NEXT:    jmp .LBB5_3
269; X86-NOBMI-NEXT:  .LBB5_1:
270; X86-NOBMI-NEXT:    movl %esi, %edx
271; X86-NOBMI-NEXT:    movl %eax, %esi
272; X86-NOBMI-NEXT:  .LBB5_3:
273; X86-NOBMI-NEXT:    movl %esi, %eax
274; X86-NOBMI-NEXT:    shrdl %cl, %edx, %eax
275; X86-NOBMI-NEXT:    notl %eax
276; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
277; X86-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
278; X86-NOBMI-NEXT:    shrdl %cl, %esi, %edx
279; X86-NOBMI-NEXT:    notl %edx
280; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %edx
281; X86-NOBMI-NEXT:    popl %esi
282; X86-NOBMI-NEXT:    retl
283;
284; X86-BMI-LABEL: andnot_rotr_i64:
285; X86-BMI:       # %bb.0:
286; X86-BMI-NEXT:    pushl %esi
287; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
288; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %edx
289; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
290; X86-BMI-NEXT:    testb $32, %cl
291; X86-BMI-NEXT:    je .LBB5_1
292; X86-BMI-NEXT:  # %bb.2:
293; X86-BMI-NEXT:    movl %eax, %esi
294; X86-BMI-NEXT:    jmp .LBB5_3
295; X86-BMI-NEXT:  .LBB5_1:
296; X86-BMI-NEXT:    movl %edx, %esi
297; X86-BMI-NEXT:    movl %eax, %edx
298; X86-BMI-NEXT:  .LBB5_3:
299; X86-BMI-NEXT:    movl %edx, %eax
300; X86-BMI-NEXT:    shrdl %cl, %esi, %eax
301; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
302; X86-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
303; X86-BMI-NEXT:    shrdl %cl, %edx, %esi
304; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %esi, %edx
305; X86-BMI-NEXT:    popl %esi
306; X86-BMI-NEXT:    retl
307;
308; X64-NOBMI-LABEL: andnot_rotr_i64:
309; X64-NOBMI:       # %bb.0:
310; X64-NOBMI-NEXT:    movq %rdx, %rcx
311; X64-NOBMI-NEXT:    movq %rsi, %rax
312; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $rcx
313; X64-NOBMI-NEXT:    rorq %cl, %rax
314; X64-NOBMI-NEXT:    notq %rax
315; X64-NOBMI-NEXT:    andq %rdi, %rax
316; X64-NOBMI-NEXT:    retq
317;
318; X64-BMI-LABEL: andnot_rotr_i64:
319; X64-BMI:       # %bb.0:
320; X64-BMI-NEXT:    movq %rdx, %rcx
321; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $rcx
322; X64-BMI-NEXT:    rorq %cl, %rsi
323; X64-BMI-NEXT:    andnq %rdi, %rsi, %rax
324; X64-BMI-NEXT:    retq
325  %not = xor i64 %a1, -1
326  %rot = tail call i64 @llvm.fshr.i64(i64 %not, i64 %not, i64 %a2)
327  %and = and i64 %rot, %a0
328  ret i64 %and
329}
330
331define i32 @andnot_rotr_i32(i32 %a0, i32 %a1, i32 %a2) nounwind {
332; X86-NOBMI-LABEL: andnot_rotr_i32:
333; X86-NOBMI:       # %bb.0:
334; X86-NOBMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
335; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
336; X86-NOBMI-NEXT:    rorl %cl, %eax
337; X86-NOBMI-NEXT:    notl %eax
338; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
339; X86-NOBMI-NEXT:    retl
340;
341; X86-BMI-LABEL: andnot_rotr_i32:
342; X86-BMI:       # %bb.0:
343; X86-BMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
344; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
345; X86-BMI-NEXT:    rorl %cl, %eax
346; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
347; X86-BMI-NEXT:    retl
348;
349; X64-NOBMI-LABEL: andnot_rotr_i32:
350; X64-NOBMI:       # %bb.0:
351; X64-NOBMI-NEXT:    movl %edx, %ecx
352; X64-NOBMI-NEXT:    movl %esi, %eax
353; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
354; X64-NOBMI-NEXT:    rorl %cl, %eax
355; X64-NOBMI-NEXT:    notl %eax
356; X64-NOBMI-NEXT:    andl %edi, %eax
357; X64-NOBMI-NEXT:    retq
358;
359; X64-BMI-LABEL: andnot_rotr_i32:
360; X64-BMI:       # %bb.0:
361; X64-BMI-NEXT:    movl %edx, %ecx
362; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
363; X64-BMI-NEXT:    rorl %cl, %esi
364; X64-BMI-NEXT:    andnl %edi, %esi, %eax
365; X64-BMI-NEXT:    retq
366  %not = xor i32 %a1, -1
367  %rot = tail call i32 @llvm.fshr.i32(i32 %not, i32 %not, i32 %a2)
368  %and = and i32 %rot, %a0
369  ret i32 %and
370}
371
372define i16 @andnot_rotr_i16(i16 %a0, i16 %a1, i16 %a2) nounwind {
373; X86-LABEL: andnot_rotr_i16:
374; X86:       # %bb.0:
375; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
376; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
377; X86-NEXT:    rorw %cl, %ax
378; X86-NEXT:    notl %eax
379; X86-NEXT:    andw {{[0-9]+}}(%esp), %ax
380; X86-NEXT:    # kill: def $ax killed $ax killed $eax
381; X86-NEXT:    retl
382;
383; X64-NOBMI-LABEL: andnot_rotr_i16:
384; X64-NOBMI:       # %bb.0:
385; X64-NOBMI-NEXT:    movl %edx, %ecx
386; X64-NOBMI-NEXT:    movl %esi, %eax
387; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
388; X64-NOBMI-NEXT:    rorw %cl, %ax
389; X64-NOBMI-NEXT:    notl %eax
390; X64-NOBMI-NEXT:    andl %edi, %eax
391; X64-NOBMI-NEXT:    # kill: def $ax killed $ax killed $eax
392; X64-NOBMI-NEXT:    retq
393;
394; X64-BMI-LABEL: andnot_rotr_i16:
395; X64-BMI:       # %bb.0:
396; X64-BMI-NEXT:    movl %edx, %ecx
397; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
398; X64-BMI-NEXT:    rorw %cl, %si
399; X64-BMI-NEXT:    andnl %edi, %esi, %eax
400; X64-BMI-NEXT:    # kill: def $ax killed $ax killed $eax
401; X64-BMI-NEXT:    retq
402  %not = xor i16 %a1, -1
403  %rot = tail call i16 @llvm.fshr.i16(i16 %not, i16 %not, i16 %a2)
404  %and = and i16 %rot, %a0
405  ret i16 %and
406}
407
408define i8 @andnot_rotr_i8(i8 %a0, i8 %a1, i8 %a2) nounwind {
409; X86-LABEL: andnot_rotr_i8:
410; X86:       # %bb.0:
411; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
412; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
413; X86-NEXT:    rorb %cl, %al
414; X86-NEXT:    notb %al
415; X86-NEXT:    andb {{[0-9]+}}(%esp), %al
416; X86-NEXT:    retl
417;
418; X64-LABEL: andnot_rotr_i8:
419; X64:       # %bb.0:
420; X64-NEXT:    movl %edx, %ecx
421; X64-NEXT:    movl %esi, %eax
422; X64-NEXT:    # kill: def $cl killed $cl killed $ecx
423; X64-NEXT:    rorb %cl, %al
424; X64-NEXT:    notb %al
425; X64-NEXT:    andb %dil, %al
426; X64-NEXT:    # kill: def $al killed $al killed $eax
427; X64-NEXT:    retq
428  %not = xor i8 %a1, -1
429  %rot = tail call i8 @llvm.fshr.i8(i8 %not, i8 %not, i8 %a2)
430  %and = and i8 %rot, %a0
431  ret i8 %and
432}
433
434define i32 @andnot_rotr_i32_multiuse_not(i32 %a0, i32 %a1, i32 %a2) nounwind {
435; X86-NOBMI-LABEL: andnot_rotr_i32_multiuse_not:
436; X86-NOBMI:       # %bb.0:
437; X86-NOBMI-NEXT:    pushl %esi
438; X86-NOBMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
439; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
440; X86-NOBMI-NEXT:    notl %eax
441; X86-NOBMI-NEXT:    movl %eax, %esi
442; X86-NOBMI-NEXT:    rorl %cl, %esi
443; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %esi
444; X86-NOBMI-NEXT:    pushl %eax
445; X86-NOBMI-NEXT:    calll use_i32@PLT
446; X86-NOBMI-NEXT:    addl $4, %esp
447; X86-NOBMI-NEXT:    movl %esi, %eax
448; X86-NOBMI-NEXT:    popl %esi
449; X86-NOBMI-NEXT:    retl
450;
451; X86-BMI-LABEL: andnot_rotr_i32_multiuse_not:
452; X86-BMI:       # %bb.0:
453; X86-BMI-NEXT:    pushl %esi
454; X86-BMI-NEXT:    movzbl {{[0-9]+}}(%esp), %ecx
455; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
456; X86-BMI-NEXT:    movl %eax, %edx
457; X86-BMI-NEXT:    notl %edx
458; X86-BMI-NEXT:    rorl %cl, %eax
459; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %esi
460; X86-BMI-NEXT:    pushl %edx
461; X86-BMI-NEXT:    calll use_i32@PLT
462; X86-BMI-NEXT:    addl $4, %esp
463; X86-BMI-NEXT:    movl %esi, %eax
464; X86-BMI-NEXT:    popl %esi
465; X86-BMI-NEXT:    retl
466;
467; X64-NOBMI-LABEL: andnot_rotr_i32_multiuse_not:
468; X64-NOBMI:       # %bb.0:
469; X64-NOBMI-NEXT:    pushq %rbx
470; X64-NOBMI-NEXT:    movl %edx, %ecx
471; X64-NOBMI-NEXT:    notl %esi
472; X64-NOBMI-NEXT:    movl %esi, %ebx
473; X64-NOBMI-NEXT:    # kill: def $cl killed $cl killed $ecx
474; X64-NOBMI-NEXT:    rorl %cl, %ebx
475; X64-NOBMI-NEXT:    andl %edi, %ebx
476; X64-NOBMI-NEXT:    movl %esi, %edi
477; X64-NOBMI-NEXT:    callq use_i32@PLT
478; X64-NOBMI-NEXT:    movl %ebx, %eax
479; X64-NOBMI-NEXT:    popq %rbx
480; X64-NOBMI-NEXT:    retq
481;
482; X64-BMI-LABEL: andnot_rotr_i32_multiuse_not:
483; X64-BMI:       # %bb.0:
484; X64-BMI-NEXT:    pushq %rbx
485; X64-BMI-NEXT:    movl %edx, %ecx
486; X64-BMI-NEXT:    movl %esi, %eax
487; X64-BMI-NEXT:    notl %eax
488; X64-BMI-NEXT:    # kill: def $cl killed $cl killed $ecx
489; X64-BMI-NEXT:    rorl %cl, %esi
490; X64-BMI-NEXT:    andnl %edi, %esi, %ebx
491; X64-BMI-NEXT:    movl %eax, %edi
492; X64-BMI-NEXT:    callq use_i32@PLT
493; X64-BMI-NEXT:    movl %ebx, %eax
494; X64-BMI-NEXT:    popq %rbx
495; X64-BMI-NEXT:    retq
496  %not = xor i32 %a1, -1
497  %rot = tail call i32 @llvm.fshr.i32(i32 %not, i32 %not, i32 %a2)
498  %and = and i32 %rot, %a0
499  call void @use_i32(i32 %not)
500  ret i32 %and
501}
502
503;
504; Fold (and X, (bswap (not Y)))) -> (and X, (not (bswap Y)))
505;
506
507define i64 @andnot_bswap_i64(i64 %a0, i64 %a1) nounwind {
508; X86-NOBMI-LABEL: andnot_bswap_i64:
509; X86-NOBMI:       # %bb.0:
510; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %edx
511; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
512; X86-NOBMI-NEXT:    bswapl %eax
513; X86-NOBMI-NEXT:    notl %eax
514; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
515; X86-NOBMI-NEXT:    bswapl %edx
516; X86-NOBMI-NEXT:    notl %edx
517; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %edx
518; X86-NOBMI-NEXT:    retl
519;
520; X86-BMI-LABEL: andnot_bswap_i64:
521; X86-BMI:       # %bb.0:
522; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
523; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
524; X86-BMI-NEXT:    bswapl %eax
525; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
526; X86-BMI-NEXT:    bswapl %ecx
527; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %ecx, %edx
528; X86-BMI-NEXT:    retl
529;
530; X64-NOBMI-LABEL: andnot_bswap_i64:
531; X64-NOBMI:       # %bb.0:
532; X64-NOBMI-NEXT:    movq %rsi, %rax
533; X64-NOBMI-NEXT:    bswapq %rax
534; X64-NOBMI-NEXT:    notq %rax
535; X64-NOBMI-NEXT:    andq %rdi, %rax
536; X64-NOBMI-NEXT:    retq
537;
538; X64-BMI-LABEL: andnot_bswap_i64:
539; X64-BMI:       # %bb.0:
540; X64-BMI-NEXT:    bswapq %rsi
541; X64-BMI-NEXT:    andnq %rdi, %rsi, %rax
542; X64-BMI-NEXT:    retq
543  %not = xor i64 %a1, -1
544  %bswap = tail call i64 @llvm.bswap.i64(i64 %not)
545  %and = and i64 %bswap, %a0
546  ret i64 %and
547}
548
549define i32 @andnot_bswap_i32(i32 %a0, i32 %a1) nounwind {
550; X86-NOBMI-LABEL: andnot_bswap_i32:
551; X86-NOBMI:       # %bb.0:
552; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
553; X86-NOBMI-NEXT:    bswapl %eax
554; X86-NOBMI-NEXT:    notl %eax
555; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
556; X86-NOBMI-NEXT:    retl
557;
558; X86-BMI-LABEL: andnot_bswap_i32:
559; X86-BMI:       # %bb.0:
560; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
561; X86-BMI-NEXT:    bswapl %eax
562; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
563; X86-BMI-NEXT:    retl
564;
565; X64-NOBMI-LABEL: andnot_bswap_i32:
566; X64-NOBMI:       # %bb.0:
567; X64-NOBMI-NEXT:    movl %esi, %eax
568; X64-NOBMI-NEXT:    bswapl %eax
569; X64-NOBMI-NEXT:    notl %eax
570; X64-NOBMI-NEXT:    andl %edi, %eax
571; X64-NOBMI-NEXT:    retq
572;
573; X64-BMI-LABEL: andnot_bswap_i32:
574; X64-BMI:       # %bb.0:
575; X64-BMI-NEXT:    bswapl %esi
576; X64-BMI-NEXT:    andnl %edi, %esi, %eax
577; X64-BMI-NEXT:    retq
578  %not = xor i32 %a1, -1
579  %bswap = tail call i32 @llvm.bswap.i32(i32 %not)
580  %and = and i32 %bswap, %a0
581  ret i32 %and
582}
583
584define i16 @andnot_bswap_i16(i16 %a0, i16 %a1) nounwind {
585; X86-LABEL: andnot_bswap_i16:
586; X86:       # %bb.0:
587; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
588; X86-NEXT:    rolw $8, %ax
589; X86-NEXT:    notl %eax
590; X86-NEXT:    andw {{[0-9]+}}(%esp), %ax
591; X86-NEXT:    # kill: def $ax killed $ax killed $eax
592; X86-NEXT:    retl
593;
594; X64-NOBMI-LABEL: andnot_bswap_i16:
595; X64-NOBMI:       # %bb.0:
596; X64-NOBMI-NEXT:    movl %esi, %eax
597; X64-NOBMI-NEXT:    rolw $8, %ax
598; X64-NOBMI-NEXT:    notl %eax
599; X64-NOBMI-NEXT:    andl %edi, %eax
600; X64-NOBMI-NEXT:    # kill: def $ax killed $ax killed $eax
601; X64-NOBMI-NEXT:    retq
602;
603; X64-BMI-LABEL: andnot_bswap_i16:
604; X64-BMI:       # %bb.0:
605; X64-BMI-NEXT:    rolw $8, %si
606; X64-BMI-NEXT:    andnl %edi, %esi, %eax
607; X64-BMI-NEXT:    # kill: def $ax killed $ax killed $eax
608; X64-BMI-NEXT:    retq
609  %not = xor i16 %a1, -1
610  %bswap = tail call i16 @llvm.bswap.i16(i16 %not)
611  %and = and i16 %bswap, %a0
612  ret i16 %and
613}
614
615define i32 @andnot_bswap_i32_multiuse_bswap(i32 %a0, i32 %a1) nounwind {
616; X86-LABEL: andnot_bswap_i32_multiuse_bswap:
617; X86:       # %bb.0:
618; X86-NEXT:    pushl %esi
619; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
620; X86-NEXT:    notl %eax
621; X86-NEXT:    bswapl %eax
622; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
623; X86-NEXT:    andl %eax, %esi
624; X86-NEXT:    pushl %eax
625; X86-NEXT:    calll use_i32@PLT
626; X86-NEXT:    addl $4, %esp
627; X86-NEXT:    movl %esi, %eax
628; X86-NEXT:    popl %esi
629; X86-NEXT:    retl
630;
631; X64-LABEL: andnot_bswap_i32_multiuse_bswap:
632; X64:       # %bb.0:
633; X64-NEXT:    pushq %rbx
634; X64-NEXT:    movl %edi, %ebx
635; X64-NEXT:    notl %esi
636; X64-NEXT:    bswapl %esi
637; X64-NEXT:    andl %esi, %ebx
638; X64-NEXT:    movl %esi, %edi
639; X64-NEXT:    callq use_i32@PLT
640; X64-NEXT:    movl %ebx, %eax
641; X64-NEXT:    popq %rbx
642; X64-NEXT:    retq
643  %not = xor i32 %a1, -1
644  %bswap = tail call i32 @llvm.bswap.i32(i32 %not)
645  %and = and i32 %bswap, %a0
646  call void @use_i32(i32 %bswap)
647  ret i32 %and
648}
649
650define i32 @andnot_bswap_i32_multiuse_not(i32 %a0, i32 %a1) nounwind {
651; X86-NOBMI-LABEL: andnot_bswap_i32_multiuse_not:
652; X86-NOBMI:       # %bb.0:
653; X86-NOBMI-NEXT:    pushl %esi
654; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
655; X86-NOBMI-NEXT:    notl %eax
656; X86-NOBMI-NEXT:    movl %eax, %esi
657; X86-NOBMI-NEXT:    bswapl %esi
658; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %esi
659; X86-NOBMI-NEXT:    pushl %eax
660; X86-NOBMI-NEXT:    calll use_i32@PLT
661; X86-NOBMI-NEXT:    addl $4, %esp
662; X86-NOBMI-NEXT:    movl %esi, %eax
663; X86-NOBMI-NEXT:    popl %esi
664; X86-NOBMI-NEXT:    retl
665;
666; X86-BMI-LABEL: andnot_bswap_i32_multiuse_not:
667; X86-BMI:       # %bb.0:
668; X86-BMI-NEXT:    pushl %esi
669; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
670; X86-BMI-NEXT:    movl %eax, %ecx
671; X86-BMI-NEXT:    notl %ecx
672; X86-BMI-NEXT:    bswapl %eax
673; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %esi
674; X86-BMI-NEXT:    pushl %ecx
675; X86-BMI-NEXT:    calll use_i32@PLT
676; X86-BMI-NEXT:    addl $4, %esp
677; X86-BMI-NEXT:    movl %esi, %eax
678; X86-BMI-NEXT:    popl %esi
679; X86-BMI-NEXT:    retl
680;
681; X64-NOBMI-LABEL: andnot_bswap_i32_multiuse_not:
682; X64-NOBMI:       # %bb.0:
683; X64-NOBMI-NEXT:    pushq %rbx
684; X64-NOBMI-NEXT:    notl %esi
685; X64-NOBMI-NEXT:    movl %esi, %ebx
686; X64-NOBMI-NEXT:    bswapl %ebx
687; X64-NOBMI-NEXT:    andl %edi, %ebx
688; X64-NOBMI-NEXT:    movl %esi, %edi
689; X64-NOBMI-NEXT:    callq use_i32@PLT
690; X64-NOBMI-NEXT:    movl %ebx, %eax
691; X64-NOBMI-NEXT:    popq %rbx
692; X64-NOBMI-NEXT:    retq
693;
694; X64-BMI-LABEL: andnot_bswap_i32_multiuse_not:
695; X64-BMI:       # %bb.0:
696; X64-BMI-NEXT:    pushq %rbx
697; X64-BMI-NEXT:    movl %esi, %eax
698; X64-BMI-NEXT:    notl %eax
699; X64-BMI-NEXT:    bswapl %esi
700; X64-BMI-NEXT:    andnl %edi, %esi, %ebx
701; X64-BMI-NEXT:    movl %eax, %edi
702; X64-BMI-NEXT:    callq use_i32@PLT
703; X64-BMI-NEXT:    movl %ebx, %eax
704; X64-BMI-NEXT:    popq %rbx
705; X64-BMI-NEXT:    retq
706  %not = xor i32 %a1, -1
707  %bswap = tail call i32 @llvm.bswap.i32(i32 %not)
708  %and = and i32 %bswap, %a0
709  call void @use_i32(i32 %not)
710  ret i32 %and
711}
712
713;
714; Fold (and X, (bitreverse (not Y)))) -> (and X, (not (bitreverse Y)))
715;
716
717define i64 @andnot_bitreverse_i64(i64 %a0, i64 %a1) nounwind {
718; X86-NOBMI-LABEL: andnot_bitreverse_i64:
719; X86-NOBMI:       # %bb.0:
720; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
721; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
722; X86-NOBMI-NEXT:    bswapl %eax
723; X86-NOBMI-NEXT:    movl %eax, %edx
724; X86-NOBMI-NEXT:    andl $252645135, %edx # imm = 0xF0F0F0F
725; X86-NOBMI-NEXT:    shll $4, %edx
726; X86-NOBMI-NEXT:    shrl $4, %eax
727; X86-NOBMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
728; X86-NOBMI-NEXT:    orl %edx, %eax
729; X86-NOBMI-NEXT:    movl %eax, %edx
730; X86-NOBMI-NEXT:    andl $858993459, %edx # imm = 0x33333333
731; X86-NOBMI-NEXT:    shrl $2, %eax
732; X86-NOBMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
733; X86-NOBMI-NEXT:    leal (%eax,%edx,4), %eax
734; X86-NOBMI-NEXT:    movl %eax, %edx
735; X86-NOBMI-NEXT:    andl $1431655765, %edx # imm = 0x55555555
736; X86-NOBMI-NEXT:    shrl %eax
737; X86-NOBMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
738; X86-NOBMI-NEXT:    leal (%eax,%edx,2), %eax
739; X86-NOBMI-NEXT:    notl %eax
740; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
741; X86-NOBMI-NEXT:    bswapl %ecx
742; X86-NOBMI-NEXT:    movl %ecx, %edx
743; X86-NOBMI-NEXT:    andl $252645135, %edx # imm = 0xF0F0F0F
744; X86-NOBMI-NEXT:    shll $4, %edx
745; X86-NOBMI-NEXT:    shrl $4, %ecx
746; X86-NOBMI-NEXT:    andl $252645135, %ecx # imm = 0xF0F0F0F
747; X86-NOBMI-NEXT:    orl %edx, %ecx
748; X86-NOBMI-NEXT:    movl %ecx, %edx
749; X86-NOBMI-NEXT:    andl $858993459, %edx # imm = 0x33333333
750; X86-NOBMI-NEXT:    shrl $2, %ecx
751; X86-NOBMI-NEXT:    andl $858993459, %ecx # imm = 0x33333333
752; X86-NOBMI-NEXT:    leal (%ecx,%edx,4), %ecx
753; X86-NOBMI-NEXT:    movl %ecx, %edx
754; X86-NOBMI-NEXT:    andl $1431655765, %edx # imm = 0x55555555
755; X86-NOBMI-NEXT:    shrl %ecx
756; X86-NOBMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
757; X86-NOBMI-NEXT:    leal (%ecx,%edx,2), %edx
758; X86-NOBMI-NEXT:    notl %edx
759; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %edx
760; X86-NOBMI-NEXT:    retl
761;
762; X86-BMI-LABEL: andnot_bitreverse_i64:
763; X86-BMI:       # %bb.0:
764; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %ecx
765; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
766; X86-BMI-NEXT:    bswapl %eax
767; X86-BMI-NEXT:    movl %eax, %edx
768; X86-BMI-NEXT:    andl $252645135, %edx # imm = 0xF0F0F0F
769; X86-BMI-NEXT:    shll $4, %edx
770; X86-BMI-NEXT:    shrl $4, %eax
771; X86-BMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
772; X86-BMI-NEXT:    orl %edx, %eax
773; X86-BMI-NEXT:    movl %eax, %edx
774; X86-BMI-NEXT:    andl $858993459, %edx # imm = 0x33333333
775; X86-BMI-NEXT:    shrl $2, %eax
776; X86-BMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
777; X86-BMI-NEXT:    leal (%eax,%edx,4), %eax
778; X86-BMI-NEXT:    movl %eax, %edx
779; X86-BMI-NEXT:    andl $1431655765, %edx # imm = 0x55555555
780; X86-BMI-NEXT:    shrl %eax
781; X86-BMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
782; X86-BMI-NEXT:    leal (%eax,%edx,2), %eax
783; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
784; X86-BMI-NEXT:    bswapl %ecx
785; X86-BMI-NEXT:    movl %ecx, %edx
786; X86-BMI-NEXT:    andl $252645135, %edx # imm = 0xF0F0F0F
787; X86-BMI-NEXT:    shll $4, %edx
788; X86-BMI-NEXT:    shrl $4, %ecx
789; X86-BMI-NEXT:    andl $252645135, %ecx # imm = 0xF0F0F0F
790; X86-BMI-NEXT:    orl %edx, %ecx
791; X86-BMI-NEXT:    movl %ecx, %edx
792; X86-BMI-NEXT:    andl $858993459, %edx # imm = 0x33333333
793; X86-BMI-NEXT:    shrl $2, %ecx
794; X86-BMI-NEXT:    andl $858993459, %ecx # imm = 0x33333333
795; X86-BMI-NEXT:    leal (%ecx,%edx,4), %ecx
796; X86-BMI-NEXT:    movl %ecx, %edx
797; X86-BMI-NEXT:    andl $1431655765, %edx # imm = 0x55555555
798; X86-BMI-NEXT:    shrl %ecx
799; X86-BMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
800; X86-BMI-NEXT:    leal (%ecx,%edx,2), %ecx
801; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %ecx, %edx
802; X86-BMI-NEXT:    retl
803;
804; X64-NOBMI-LABEL: andnot_bitreverse_i64:
805; X64-NOBMI:       # %bb.0:
806; X64-NOBMI-NEXT:    bswapq %rsi
807; X64-NOBMI-NEXT:    movq %rsi, %rax
808; X64-NOBMI-NEXT:    shrq $4, %rax
809; X64-NOBMI-NEXT:    movabsq $1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F
810; X64-NOBMI-NEXT:    andq %rcx, %rax
811; X64-NOBMI-NEXT:    andq %rcx, %rsi
812; X64-NOBMI-NEXT:    shlq $4, %rsi
813; X64-NOBMI-NEXT:    orq %rax, %rsi
814; X64-NOBMI-NEXT:    movabsq $3689348814741910323, %rax # imm = 0x3333333333333333
815; X64-NOBMI-NEXT:    movq %rsi, %rcx
816; X64-NOBMI-NEXT:    andq %rax, %rcx
817; X64-NOBMI-NEXT:    shrq $2, %rsi
818; X64-NOBMI-NEXT:    andq %rax, %rsi
819; X64-NOBMI-NEXT:    leaq (%rsi,%rcx,4), %rax
820; X64-NOBMI-NEXT:    movabsq $6148914691236517205, %rcx # imm = 0x5555555555555555
821; X64-NOBMI-NEXT:    movq %rax, %rdx
822; X64-NOBMI-NEXT:    andq %rcx, %rdx
823; X64-NOBMI-NEXT:    shrq %rax
824; X64-NOBMI-NEXT:    andq %rcx, %rax
825; X64-NOBMI-NEXT:    leaq (%rax,%rdx,2), %rax
826; X64-NOBMI-NEXT:    notq %rax
827; X64-NOBMI-NEXT:    andq %rdi, %rax
828; X64-NOBMI-NEXT:    retq
829;
830; X64-BMI-LABEL: andnot_bitreverse_i64:
831; X64-BMI:       # %bb.0:
832; X64-BMI-NEXT:    bswapq %rsi
833; X64-BMI-NEXT:    movq %rsi, %rax
834; X64-BMI-NEXT:    shrq $4, %rax
835; X64-BMI-NEXT:    movabsq $1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F
836; X64-BMI-NEXT:    andq %rcx, %rax
837; X64-BMI-NEXT:    andq %rcx, %rsi
838; X64-BMI-NEXT:    shlq $4, %rsi
839; X64-BMI-NEXT:    orq %rax, %rsi
840; X64-BMI-NEXT:    movabsq $3689348814741910323, %rax # imm = 0x3333333333333333
841; X64-BMI-NEXT:    movq %rsi, %rcx
842; X64-BMI-NEXT:    andq %rax, %rcx
843; X64-BMI-NEXT:    shrq $2, %rsi
844; X64-BMI-NEXT:    andq %rax, %rsi
845; X64-BMI-NEXT:    leaq (%rsi,%rcx,4), %rax
846; X64-BMI-NEXT:    movabsq $6148914691236517205, %rcx # imm = 0x5555555555555555
847; X64-BMI-NEXT:    movq %rax, %rdx
848; X64-BMI-NEXT:    andq %rcx, %rdx
849; X64-BMI-NEXT:    shrq %rax
850; X64-BMI-NEXT:    andq %rcx, %rax
851; X64-BMI-NEXT:    leaq (%rax,%rdx,2), %rax
852; X64-BMI-NEXT:    andnq %rdi, %rax, %rax
853; X64-BMI-NEXT:    retq
854  %not = xor i64 %a1, -1
855  %bitrev = tail call i64 @llvm.bitreverse.i64(i64 %not)
856  %and = and i64 %bitrev, %a0
857  ret i64 %and
858}
859
860define i32 @andnot_bitreverse_i32(i32 %a0, i32 %a1) nounwind {
861; X86-NOBMI-LABEL: andnot_bitreverse_i32:
862; X86-NOBMI:       # %bb.0:
863; X86-NOBMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
864; X86-NOBMI-NEXT:    bswapl %eax
865; X86-NOBMI-NEXT:    movl %eax, %ecx
866; X86-NOBMI-NEXT:    andl $252645135, %ecx # imm = 0xF0F0F0F
867; X86-NOBMI-NEXT:    shll $4, %ecx
868; X86-NOBMI-NEXT:    shrl $4, %eax
869; X86-NOBMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
870; X86-NOBMI-NEXT:    orl %ecx, %eax
871; X86-NOBMI-NEXT:    movl %eax, %ecx
872; X86-NOBMI-NEXT:    andl $858993459, %ecx # imm = 0x33333333
873; X86-NOBMI-NEXT:    shrl $2, %eax
874; X86-NOBMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
875; X86-NOBMI-NEXT:    leal (%eax,%ecx,4), %eax
876; X86-NOBMI-NEXT:    movl %eax, %ecx
877; X86-NOBMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
878; X86-NOBMI-NEXT:    shrl %eax
879; X86-NOBMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
880; X86-NOBMI-NEXT:    leal (%eax,%ecx,2), %eax
881; X86-NOBMI-NEXT:    notl %eax
882; X86-NOBMI-NEXT:    andl {{[0-9]+}}(%esp), %eax
883; X86-NOBMI-NEXT:    retl
884;
885; X86-BMI-LABEL: andnot_bitreverse_i32:
886; X86-BMI:       # %bb.0:
887; X86-BMI-NEXT:    movl {{[0-9]+}}(%esp), %eax
888; X86-BMI-NEXT:    bswapl %eax
889; X86-BMI-NEXT:    movl %eax, %ecx
890; X86-BMI-NEXT:    andl $252645135, %ecx # imm = 0xF0F0F0F
891; X86-BMI-NEXT:    shll $4, %ecx
892; X86-BMI-NEXT:    shrl $4, %eax
893; X86-BMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
894; X86-BMI-NEXT:    orl %ecx, %eax
895; X86-BMI-NEXT:    movl %eax, %ecx
896; X86-BMI-NEXT:    andl $858993459, %ecx # imm = 0x33333333
897; X86-BMI-NEXT:    shrl $2, %eax
898; X86-BMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
899; X86-BMI-NEXT:    leal (%eax,%ecx,4), %eax
900; X86-BMI-NEXT:    movl %eax, %ecx
901; X86-BMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
902; X86-BMI-NEXT:    shrl %eax
903; X86-BMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
904; X86-BMI-NEXT:    leal (%eax,%ecx,2), %eax
905; X86-BMI-NEXT:    andnl {{[0-9]+}}(%esp), %eax, %eax
906; X86-BMI-NEXT:    retl
907;
908; X64-NOBMI-LABEL: andnot_bitreverse_i32:
909; X64-NOBMI:       # %bb.0:
910; X64-NOBMI-NEXT:    # kill: def $esi killed $esi def $rsi
911; X64-NOBMI-NEXT:    bswapl %esi
912; X64-NOBMI-NEXT:    movl %esi, %eax
913; X64-NOBMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
914; X64-NOBMI-NEXT:    shll $4, %eax
915; X64-NOBMI-NEXT:    shrl $4, %esi
916; X64-NOBMI-NEXT:    andl $252645135, %esi # imm = 0xF0F0F0F
917; X64-NOBMI-NEXT:    orl %eax, %esi
918; X64-NOBMI-NEXT:    movl %esi, %eax
919; X64-NOBMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
920; X64-NOBMI-NEXT:    shrl $2, %esi
921; X64-NOBMI-NEXT:    andl $858993459, %esi # imm = 0x33333333
922; X64-NOBMI-NEXT:    leal (%rsi,%rax,4), %eax
923; X64-NOBMI-NEXT:    movl %eax, %ecx
924; X64-NOBMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
925; X64-NOBMI-NEXT:    shrl %eax
926; X64-NOBMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
927; X64-NOBMI-NEXT:    leal (%rax,%rcx,2), %eax
928; X64-NOBMI-NEXT:    notl %eax
929; X64-NOBMI-NEXT:    andl %edi, %eax
930; X64-NOBMI-NEXT:    retq
931;
932; X64-BMI-LABEL: andnot_bitreverse_i32:
933; X64-BMI:       # %bb.0:
934; X64-BMI-NEXT:    # kill: def $esi killed $esi def $rsi
935; X64-BMI-NEXT:    bswapl %esi
936; X64-BMI-NEXT:    movl %esi, %eax
937; X64-BMI-NEXT:    andl $252645135, %eax # imm = 0xF0F0F0F
938; X64-BMI-NEXT:    shll $4, %eax
939; X64-BMI-NEXT:    shrl $4, %esi
940; X64-BMI-NEXT:    andl $252645135, %esi # imm = 0xF0F0F0F
941; X64-BMI-NEXT:    orl %eax, %esi
942; X64-BMI-NEXT:    movl %esi, %eax
943; X64-BMI-NEXT:    andl $858993459, %eax # imm = 0x33333333
944; X64-BMI-NEXT:    shrl $2, %esi
945; X64-BMI-NEXT:    andl $858993459, %esi # imm = 0x33333333
946; X64-BMI-NEXT:    leal (%rsi,%rax,4), %eax
947; X64-BMI-NEXT:    movl %eax, %ecx
948; X64-BMI-NEXT:    andl $1431655765, %ecx # imm = 0x55555555
949; X64-BMI-NEXT:    shrl %eax
950; X64-BMI-NEXT:    andl $1431655765, %eax # imm = 0x55555555
951; X64-BMI-NEXT:    leal (%rax,%rcx,2), %eax
952; X64-BMI-NEXT:    andnl %edi, %eax, %eax
953; X64-BMI-NEXT:    retq
954  %not = xor i32 %a1, -1
955  %bitrev = tail call i32 @llvm.bitreverse.i32(i32 %not)
956  %and = and i32 %bitrev, %a0
957  ret i32 %and
958}
959
960define i16 @andnot_bitreverse_i16(i16 %a0, i16 %a1) nounwind {
961; X86-LABEL: andnot_bitreverse_i16:
962; X86:       # %bb.0:
963; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
964; X86-NEXT:    rolw $8, %ax
965; X86-NEXT:    movl %eax, %ecx
966; X86-NEXT:    andl $3855, %ecx # imm = 0xF0F
967; X86-NEXT:    shll $4, %ecx
968; X86-NEXT:    shrl $4, %eax
969; X86-NEXT:    andl $3855, %eax # imm = 0xF0F
970; X86-NEXT:    orl %ecx, %eax
971; X86-NEXT:    movl %eax, %ecx
972; X86-NEXT:    andl $13107, %ecx # imm = 0x3333
973; X86-NEXT:    shrl $2, %eax
974; X86-NEXT:    andl $13107, %eax # imm = 0x3333
975; X86-NEXT:    leal (%eax,%ecx,4), %eax
976; X86-NEXT:    movl %eax, %ecx
977; X86-NEXT:    andl $21845, %ecx # imm = 0x5555
978; X86-NEXT:    shrl %eax
979; X86-NEXT:    andl $21845, %eax # imm = 0x5555
980; X86-NEXT:    leal (%eax,%ecx,2), %eax
981; X86-NEXT:    notl %eax
982; X86-NEXT:    andw {{[0-9]+}}(%esp), %ax
983; X86-NEXT:    # kill: def $ax killed $ax killed $eax
984; X86-NEXT:    retl
985;
986; X64-NOBMI-LABEL: andnot_bitreverse_i16:
987; X64-NOBMI:       # %bb.0:
988; X64-NOBMI-NEXT:    # kill: def $esi killed $esi def $rsi
989; X64-NOBMI-NEXT:    rolw $8, %si
990; X64-NOBMI-NEXT:    movl %esi, %eax
991; X64-NOBMI-NEXT:    andl $3855, %eax # imm = 0xF0F
992; X64-NOBMI-NEXT:    shll $4, %eax
993; X64-NOBMI-NEXT:    shrl $4, %esi
994; X64-NOBMI-NEXT:    andl $3855, %esi # imm = 0xF0F
995; X64-NOBMI-NEXT:    orl %eax, %esi
996; X64-NOBMI-NEXT:    movl %esi, %eax
997; X64-NOBMI-NEXT:    andl $13107, %eax # imm = 0x3333
998; X64-NOBMI-NEXT:    shrl $2, %esi
999; X64-NOBMI-NEXT:    andl $13107, %esi # imm = 0x3333
1000; X64-NOBMI-NEXT:    leal (%rsi,%rax,4), %eax
1001; X64-NOBMI-NEXT:    movl %eax, %ecx
1002; X64-NOBMI-NEXT:    andl $21845, %ecx # imm = 0x5555
1003; X64-NOBMI-NEXT:    shrl %eax
1004; X64-NOBMI-NEXT:    andl $21845, %eax # imm = 0x5555
1005; X64-NOBMI-NEXT:    leal (%rax,%rcx,2), %eax
1006; X64-NOBMI-NEXT:    notl %eax
1007; X64-NOBMI-NEXT:    andl %edi, %eax
1008; X64-NOBMI-NEXT:    # kill: def $ax killed $ax killed $eax
1009; X64-NOBMI-NEXT:    retq
1010;
1011; X64-BMI-LABEL: andnot_bitreverse_i16:
1012; X64-BMI:       # %bb.0:
1013; X64-BMI-NEXT:    # kill: def $esi killed $esi def $rsi
1014; X64-BMI-NEXT:    rolw $8, %si
1015; X64-BMI-NEXT:    movl %esi, %eax
1016; X64-BMI-NEXT:    andl $3855, %eax # imm = 0xF0F
1017; X64-BMI-NEXT:    shll $4, %eax
1018; X64-BMI-NEXT:    shrl $4, %esi
1019; X64-BMI-NEXT:    andl $3855, %esi # imm = 0xF0F
1020; X64-BMI-NEXT:    orl %eax, %esi
1021; X64-BMI-NEXT:    movl %esi, %eax
1022; X64-BMI-NEXT:    andl $13107, %eax # imm = 0x3333
1023; X64-BMI-NEXT:    shrl $2, %esi
1024; X64-BMI-NEXT:    andl $13107, %esi # imm = 0x3333
1025; X64-BMI-NEXT:    leal (%rsi,%rax,4), %eax
1026; X64-BMI-NEXT:    movl %eax, %ecx
1027; X64-BMI-NEXT:    andl $21845, %ecx # imm = 0x5555
1028; X64-BMI-NEXT:    shrl %eax
1029; X64-BMI-NEXT:    andl $21845, %eax # imm = 0x5555
1030; X64-BMI-NEXT:    leal (%rax,%rcx,2), %eax
1031; X64-BMI-NEXT:    andnl %edi, %eax, %eax
1032; X64-BMI-NEXT:    # kill: def $ax killed $ax killed $eax
1033; X64-BMI-NEXT:    retq
1034  %not = xor i16 %a1, -1
1035  %bitrev = tail call i16 @llvm.bitreverse.i16(i16 %not)
1036  %and = and i16 %bitrev, %a0
1037  ret i16 %and
1038}
1039
1040define i8 @andnot_bitreverse_i8(i8 %a0, i8 %a1) nounwind {
1041; X86-LABEL: andnot_bitreverse_i8:
1042; X86:       # %bb.0:
1043; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
1044; X86-NEXT:    rolb $4, %al
1045; X86-NEXT:    movl %eax, %ecx
1046; X86-NEXT:    andb $51, %cl
1047; X86-NEXT:    shlb $2, %cl
1048; X86-NEXT:    shrb $2, %al
1049; X86-NEXT:    andb $51, %al
1050; X86-NEXT:    orb %cl, %al
1051; X86-NEXT:    movl %eax, %ecx
1052; X86-NEXT:    andb $85, %cl
1053; X86-NEXT:    addb %cl, %cl
1054; X86-NEXT:    shrb %al
1055; X86-NEXT:    andb $85, %al
1056; X86-NEXT:    orb %cl, %al
1057; X86-NEXT:    notb %al
1058; X86-NEXT:    andb {{[0-9]+}}(%esp), %al
1059; X86-NEXT:    retl
1060;
1061; X64-LABEL: andnot_bitreverse_i8:
1062; X64:       # %bb.0:
1063; X64-NEXT:    rolb $4, %sil
1064; X64-NEXT:    movl %esi, %eax
1065; X64-NEXT:    andb $51, %al
1066; X64-NEXT:    shlb $2, %al
1067; X64-NEXT:    shrb $2, %sil
1068; X64-NEXT:    andb $51, %sil
1069; X64-NEXT:    orb %sil, %al
1070; X64-NEXT:    movl %eax, %ecx
1071; X64-NEXT:    andb $85, %cl
1072; X64-NEXT:    addb %cl, %cl
1073; X64-NEXT:    shrb %al
1074; X64-NEXT:    andb $85, %al
1075; X64-NEXT:    orb %cl, %al
1076; X64-NEXT:    notb %al
1077; X64-NEXT:    andb %dil, %al
1078; X64-NEXT:    retq
1079  %not = xor i8 %a1, -1
1080  %bitrev = tail call i8 @llvm.bitreverse.i8(i8 %not)
1081  %and = and i8 %bitrev, %a0
1082  ret i8 %and
1083}
1084