xref: /llvm-project/llvm/test/CodeGen/X86/const-shift-of-constmasked.ll (revision d96529af3c362c53ef2e8c883a9e571fb3626927)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefix=X86
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64
4
5; The mask is all-ones, potentially shifted.
6
7;------------------------------------------------------------------------------;
8; 8-bit
9;------------------------------------------------------------------------------;
10
11; lshr
12
13define i8 @test_i8_7_mask_lshr_1(i8 %a0) {
14; X86-LABEL: test_i8_7_mask_lshr_1:
15; X86:       # %bb.0:
16; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
17; X86-NEXT:    andb $6, %al
18; X86-NEXT:    shrb %al
19; X86-NEXT:    retl
20;
21; X64-LABEL: test_i8_7_mask_lshr_1:
22; X64:       # %bb.0:
23; X64-NEXT:    movl %edi, %eax
24; X64-NEXT:    andb $6, %al
25; X64-NEXT:    shrb %al
26; X64-NEXT:    # kill: def $al killed $al killed $eax
27; X64-NEXT:    retq
28  %t0 = and i8 %a0, 7
29  %t1 = lshr i8 %t0, 1
30  ret i8 %t1
31}
32
33define i8 @test_i8_28_mask_lshr_1(i8 %a0) {
34; X86-LABEL: test_i8_28_mask_lshr_1:
35; X86:       # %bb.0:
36; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
37; X86-NEXT:    andb $28, %al
38; X86-NEXT:    shrb %al
39; X86-NEXT:    retl
40;
41; X64-LABEL: test_i8_28_mask_lshr_1:
42; X64:       # %bb.0:
43; X64-NEXT:    movl %edi, %eax
44; X64-NEXT:    andb $28, %al
45; X64-NEXT:    shrb %al
46; X64-NEXT:    # kill: def $al killed $al killed $eax
47; X64-NEXT:    retq
48  %t0 = and i8 %a0, 28
49  %t1 = lshr i8 %t0, 1
50  ret i8 %t1
51}
52define i8 @test_i8_28_mask_lshr_2(i8 %a0) {
53; X86-LABEL: test_i8_28_mask_lshr_2:
54; X86:       # %bb.0:
55; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
56; X86-NEXT:    andb $28, %al
57; X86-NEXT:    shrb $2, %al
58; X86-NEXT:    retl
59;
60; X64-LABEL: test_i8_28_mask_lshr_2:
61; X64:       # %bb.0:
62; X64-NEXT:    movl %edi, %eax
63; X64-NEXT:    andb $28, %al
64; X64-NEXT:    shrb $2, %al
65; X64-NEXT:    # kill: def $al killed $al killed $eax
66; X64-NEXT:    retq
67  %t0 = and i8 %a0, 28
68  %t1 = lshr i8 %t0, 2
69  ret i8 %t1
70}
71define i8 @test_i8_28_mask_lshr_3(i8 %a0) {
72; X86-LABEL: test_i8_28_mask_lshr_3:
73; X86:       # %bb.0:
74; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
75; X86-NEXT:    andb $24, %al
76; X86-NEXT:    shrb $3, %al
77; X86-NEXT:    retl
78;
79; X64-LABEL: test_i8_28_mask_lshr_3:
80; X64:       # %bb.0:
81; X64-NEXT:    movl %edi, %eax
82; X64-NEXT:    andb $24, %al
83; X64-NEXT:    shrb $3, %al
84; X64-NEXT:    # kill: def $al killed $al killed $eax
85; X64-NEXT:    retq
86  %t0 = and i8 %a0, 28
87  %t1 = lshr i8 %t0, 3
88  ret i8 %t1
89}
90define i8 @test_i8_28_mask_lshr_4(i8 %a0) {
91; X86-LABEL: test_i8_28_mask_lshr_4:
92; X86:       # %bb.0:
93; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
94; X86-NEXT:    andb $16, %al
95; X86-NEXT:    shrb $4, %al
96; X86-NEXT:    retl
97;
98; X64-LABEL: test_i8_28_mask_lshr_4:
99; X64:       # %bb.0:
100; X64-NEXT:    movl %edi, %eax
101; X64-NEXT:    andb $16, %al
102; X64-NEXT:    shrb $4, %al
103; X64-NEXT:    # kill: def $al killed $al killed $eax
104; X64-NEXT:    retq
105  %t0 = and i8 %a0, 28
106  %t1 = lshr i8 %t0, 4
107  ret i8 %t1
108}
109
110define i8 @test_i8_224_mask_lshr_1(i8 %a0) {
111; X86-LABEL: test_i8_224_mask_lshr_1:
112; X86:       # %bb.0:
113; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
114; X86-NEXT:    andb $-32, %al
115; X86-NEXT:    shrb %al
116; X86-NEXT:    retl
117;
118; X64-LABEL: test_i8_224_mask_lshr_1:
119; X64:       # %bb.0:
120; X64-NEXT:    movl %edi, %eax
121; X64-NEXT:    andb $-32, %al
122; X64-NEXT:    shrb %al
123; X64-NEXT:    # kill: def $al killed $al killed $eax
124; X64-NEXT:    retq
125  %t0 = and i8 %a0, 224
126  %t1 = lshr i8 %t0, 1
127  ret i8 %t1
128}
129define i8 @test_i8_224_mask_lshr_4(i8 %a0) {
130; X86-LABEL: test_i8_224_mask_lshr_4:
131; X86:       # %bb.0:
132; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
133; X86-NEXT:    andb $-32, %al
134; X86-NEXT:    shrb $4, %al
135; X86-NEXT:    retl
136;
137; X64-LABEL: test_i8_224_mask_lshr_4:
138; X64:       # %bb.0:
139; X64-NEXT:    movl %edi, %eax
140; X64-NEXT:    andb $-32, %al
141; X64-NEXT:    shrb $4, %al
142; X64-NEXT:    # kill: def $al killed $al killed $eax
143; X64-NEXT:    retq
144  %t0 = and i8 %a0, 224
145  %t1 = lshr i8 %t0, 4
146  ret i8 %t1
147}
148define i8 @test_i8_224_mask_lshr_5(i8 %a0) {
149; X86-LABEL: test_i8_224_mask_lshr_5:
150; X86:       # %bb.0:
151; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
152; X86-NEXT:    shrb $5, %al
153; X86-NEXT:    retl
154;
155; X64-LABEL: test_i8_224_mask_lshr_5:
156; X64:       # %bb.0:
157; X64-NEXT:    movl %edi, %eax
158; X64-NEXT:    shrb $5, %al
159; X64-NEXT:    # kill: def $al killed $al killed $eax
160; X64-NEXT:    retq
161  %t0 = and i8 %a0, 224
162  %t1 = lshr i8 %t0, 5
163  ret i8 %t1
164}
165define i8 @test_i8_224_mask_lshr_6(i8 %a0) {
166; X86-LABEL: test_i8_224_mask_lshr_6:
167; X86:       # %bb.0:
168; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
169; X86-NEXT:    shrb $6, %al
170; X86-NEXT:    retl
171;
172; X64-LABEL: test_i8_224_mask_lshr_6:
173; X64:       # %bb.0:
174; X64-NEXT:    movl %edi, %eax
175; X64-NEXT:    shrb $6, %al
176; X64-NEXT:    # kill: def $al killed $al killed $eax
177; X64-NEXT:    retq
178  %t0 = and i8 %a0, 224
179  %t1 = lshr i8 %t0, 6
180  ret i8 %t1
181}
182
183; ashr
184
185define i8 @test_i8_7_mask_ashr_1(i8 %a0) {
186; X86-LABEL: test_i8_7_mask_ashr_1:
187; X86:       # %bb.0:
188; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
189; X86-NEXT:    andb $6, %al
190; X86-NEXT:    shrb %al
191; X86-NEXT:    retl
192;
193; X64-LABEL: test_i8_7_mask_ashr_1:
194; X64:       # %bb.0:
195; X64-NEXT:    movl %edi, %eax
196; X64-NEXT:    andb $6, %al
197; X64-NEXT:    shrb %al
198; X64-NEXT:    # kill: def $al killed $al killed $eax
199; X64-NEXT:    retq
200  %t0 = and i8 %a0, 7
201  %t1 = ashr i8 %t0, 1
202  ret i8 %t1
203}
204
205define i8 @test_i8_28_mask_ashr_1(i8 %a0) {
206; X86-LABEL: test_i8_28_mask_ashr_1:
207; X86:       # %bb.0:
208; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
209; X86-NEXT:    andb $28, %al
210; X86-NEXT:    shrb %al
211; X86-NEXT:    retl
212;
213; X64-LABEL: test_i8_28_mask_ashr_1:
214; X64:       # %bb.0:
215; X64-NEXT:    movl %edi, %eax
216; X64-NEXT:    andb $28, %al
217; X64-NEXT:    shrb %al
218; X64-NEXT:    # kill: def $al killed $al killed $eax
219; X64-NEXT:    retq
220  %t0 = and i8 %a0, 28
221  %t1 = ashr i8 %t0, 1
222  ret i8 %t1
223}
224define i8 @test_i8_28_mask_ashr_2(i8 %a0) {
225; X86-LABEL: test_i8_28_mask_ashr_2:
226; X86:       # %bb.0:
227; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
228; X86-NEXT:    andb $28, %al
229; X86-NEXT:    shrb $2, %al
230; X86-NEXT:    retl
231;
232; X64-LABEL: test_i8_28_mask_ashr_2:
233; X64:       # %bb.0:
234; X64-NEXT:    movl %edi, %eax
235; X64-NEXT:    andb $28, %al
236; X64-NEXT:    shrb $2, %al
237; X64-NEXT:    # kill: def $al killed $al killed $eax
238; X64-NEXT:    retq
239  %t0 = and i8 %a0, 28
240  %t1 = ashr i8 %t0, 2
241  ret i8 %t1
242}
243define i8 @test_i8_28_mask_ashr_3(i8 %a0) {
244; X86-LABEL: test_i8_28_mask_ashr_3:
245; X86:       # %bb.0:
246; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
247; X86-NEXT:    andb $24, %al
248; X86-NEXT:    shrb $3, %al
249; X86-NEXT:    retl
250;
251; X64-LABEL: test_i8_28_mask_ashr_3:
252; X64:       # %bb.0:
253; X64-NEXT:    movl %edi, %eax
254; X64-NEXT:    andb $24, %al
255; X64-NEXT:    shrb $3, %al
256; X64-NEXT:    # kill: def $al killed $al killed $eax
257; X64-NEXT:    retq
258  %t0 = and i8 %a0, 28
259  %t1 = ashr i8 %t0, 3
260  ret i8 %t1
261}
262define i8 @test_i8_28_mask_ashr_4(i8 %a0) {
263; X86-LABEL: test_i8_28_mask_ashr_4:
264; X86:       # %bb.0:
265; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
266; X86-NEXT:    andb $16, %al
267; X86-NEXT:    shrb $4, %al
268; X86-NEXT:    retl
269;
270; X64-LABEL: test_i8_28_mask_ashr_4:
271; X64:       # %bb.0:
272; X64-NEXT:    movl %edi, %eax
273; X64-NEXT:    andb $16, %al
274; X64-NEXT:    shrb $4, %al
275; X64-NEXT:    # kill: def $al killed $al killed $eax
276; X64-NEXT:    retq
277  %t0 = and i8 %a0, 28
278  %t1 = ashr i8 %t0, 4
279  ret i8 %t1
280}
281
282define i8 @test_i8_224_mask_ashr_1(i8 %a0) {
283; X86-LABEL: test_i8_224_mask_ashr_1:
284; X86:       # %bb.0:
285; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
286; X86-NEXT:    andb $-32, %al
287; X86-NEXT:    sarb %al
288; X86-NEXT:    retl
289;
290; X64-LABEL: test_i8_224_mask_ashr_1:
291; X64:       # %bb.0:
292; X64-NEXT:    movl %edi, %eax
293; X64-NEXT:    andb $-32, %al
294; X64-NEXT:    sarb %al
295; X64-NEXT:    # kill: def $al killed $al killed $eax
296; X64-NEXT:    retq
297  %t0 = and i8 %a0, 224
298  %t1 = ashr i8 %t0, 1
299  ret i8 %t1
300}
301define i8 @test_i8_224_mask_ashr_4(i8 %a0) {
302; X86-LABEL: test_i8_224_mask_ashr_4:
303; X86:       # %bb.0:
304; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
305; X86-NEXT:    andb $-32, %al
306; X86-NEXT:    sarb $4, %al
307; X86-NEXT:    retl
308;
309; X64-LABEL: test_i8_224_mask_ashr_4:
310; X64:       # %bb.0:
311; X64-NEXT:    movl %edi, %eax
312; X64-NEXT:    andb $-32, %al
313; X64-NEXT:    sarb $4, %al
314; X64-NEXT:    # kill: def $al killed $al killed $eax
315; X64-NEXT:    retq
316  %t0 = and i8 %a0, 224
317  %t1 = ashr i8 %t0, 4
318  ret i8 %t1
319}
320define i8 @test_i8_224_mask_ashr_5(i8 %a0) {
321; X86-LABEL: test_i8_224_mask_ashr_5:
322; X86:       # %bb.0:
323; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
324; X86-NEXT:    sarb $5, %al
325; X86-NEXT:    retl
326;
327; X64-LABEL: test_i8_224_mask_ashr_5:
328; X64:       # %bb.0:
329; X64-NEXT:    movl %edi, %eax
330; X64-NEXT:    sarb $5, %al
331; X64-NEXT:    # kill: def $al killed $al killed $eax
332; X64-NEXT:    retq
333  %t0 = and i8 %a0, 224
334  %t1 = ashr i8 %t0, 5
335  ret i8 %t1
336}
337define i8 @test_i8_224_mask_ashr_6(i8 %a0) {
338; X86-LABEL: test_i8_224_mask_ashr_6:
339; X86:       # %bb.0:
340; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
341; X86-NEXT:    sarb $6, %al
342; X86-NEXT:    retl
343;
344; X64-LABEL: test_i8_224_mask_ashr_6:
345; X64:       # %bb.0:
346; X64-NEXT:    movl %edi, %eax
347; X64-NEXT:    sarb $6, %al
348; X64-NEXT:    # kill: def $al killed $al killed $eax
349; X64-NEXT:    retq
350  %t0 = and i8 %a0, 224
351  %t1 = ashr i8 %t0, 6
352  ret i8 %t1
353}
354
355; shl
356
357define i8 @test_i8_7_mask_shl_1(i8 %a0) {
358; X86-LABEL: test_i8_7_mask_shl_1:
359; X86:       # %bb.0:
360; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
361; X86-NEXT:    andb $7, %al
362; X86-NEXT:    addb %al, %al
363; X86-NEXT:    retl
364;
365; X64-LABEL: test_i8_7_mask_shl_1:
366; X64:       # %bb.0:
367; X64-NEXT:    # kill: def $edi killed $edi def $rdi
368; X64-NEXT:    andb $7, %dil
369; X64-NEXT:    leal (%rdi,%rdi), %eax
370; X64-NEXT:    # kill: def $al killed $al killed $eax
371; X64-NEXT:    retq
372  %t0 = and i8 %a0, 7
373  %t1 = shl i8 %t0, 1
374  ret i8 %t1
375}
376define i8 @test_i8_7_mask_shl_4(i8 %a0) {
377; X86-LABEL: test_i8_7_mask_shl_4:
378; X86:       # %bb.0:
379; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
380; X86-NEXT:    andb $7, %al
381; X86-NEXT:    shlb $4, %al
382; X86-NEXT:    retl
383;
384; X64-LABEL: test_i8_7_mask_shl_4:
385; X64:       # %bb.0:
386; X64-NEXT:    movl %edi, %eax
387; X64-NEXT:    andb $7, %al
388; X64-NEXT:    shlb $4, %al
389; X64-NEXT:    # kill: def $al killed $al killed $eax
390; X64-NEXT:    retq
391  %t0 = and i8 %a0, 7
392  %t1 = shl i8 %t0, 4
393  ret i8 %t1
394}
395define i8 @test_i8_7_mask_shl_5(i8 %a0) {
396; X86-LABEL: test_i8_7_mask_shl_5:
397; X86:       # %bb.0:
398; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
399; X86-NEXT:    shlb $5, %al
400; X86-NEXT:    retl
401;
402; X64-LABEL: test_i8_7_mask_shl_5:
403; X64:       # %bb.0:
404; X64-NEXT:    movl %edi, %eax
405; X64-NEXT:    shlb $5, %al
406; X64-NEXT:    # kill: def $al killed $al killed $eax
407; X64-NEXT:    retq
408  %t0 = and i8 %a0, 7
409  %t1 = shl i8 %t0, 5
410  ret i8 %t1
411}
412define i8 @test_i8_7_mask_shl_6(i8 %a0) {
413; X86-LABEL: test_i8_7_mask_shl_6:
414; X86:       # %bb.0:
415; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
416; X86-NEXT:    shlb $6, %al
417; X86-NEXT:    retl
418;
419; X64-LABEL: test_i8_7_mask_shl_6:
420; X64:       # %bb.0:
421; X64-NEXT:    movl %edi, %eax
422; X64-NEXT:    shlb $6, %al
423; X64-NEXT:    # kill: def $al killed $al killed $eax
424; X64-NEXT:    retq
425  %t0 = and i8 %a0, 7
426  %t1 = shl i8 %t0, 6
427  ret i8 %t1
428}
429
430define i8 @test_i8_28_mask_shl_1(i8 %a0) {
431; X86-LABEL: test_i8_28_mask_shl_1:
432; X86:       # %bb.0:
433; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
434; X86-NEXT:    andb $28, %al
435; X86-NEXT:    addb %al, %al
436; X86-NEXT:    retl
437;
438; X64-LABEL: test_i8_28_mask_shl_1:
439; X64:       # %bb.0:
440; X64-NEXT:    # kill: def $edi killed $edi def $rdi
441; X64-NEXT:    andb $28, %dil
442; X64-NEXT:    leal (%rdi,%rdi), %eax
443; X64-NEXT:    # kill: def $al killed $al killed $eax
444; X64-NEXT:    retq
445  %t0 = and i8 %a0, 28
446  %t1 = shl i8 %t0, 1
447  ret i8 %t1
448}
449define i8 @test_i8_28_mask_shl_2(i8 %a0) {
450; X86-LABEL: test_i8_28_mask_shl_2:
451; X86:       # %bb.0:
452; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
453; X86-NEXT:    andb $28, %al
454; X86-NEXT:    shlb $2, %al
455; X86-NEXT:    retl
456;
457; X64-LABEL: test_i8_28_mask_shl_2:
458; X64:       # %bb.0:
459; X64-NEXT:    # kill: def $edi killed $edi def $rdi
460; X64-NEXT:    andb $28, %dil
461; X64-NEXT:    leal (,%rdi,4), %eax
462; X64-NEXT:    # kill: def $al killed $al killed $eax
463; X64-NEXT:    retq
464  %t0 = and i8 %a0, 28
465  %t1 = shl i8 %t0, 2
466  ret i8 %t1
467}
468define i8 @test_i8_28_mask_shl_3(i8 %a0) {
469; X86-LABEL: test_i8_28_mask_shl_3:
470; X86:       # %bb.0:
471; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
472; X86-NEXT:    andb $28, %al
473; X86-NEXT:    shlb $3, %al
474; X86-NEXT:    retl
475;
476; X64-LABEL: test_i8_28_mask_shl_3:
477; X64:       # %bb.0:
478; X64-NEXT:    # kill: def $edi killed $edi def $rdi
479; X64-NEXT:    andb $28, %dil
480; X64-NEXT:    leal (,%rdi,8), %eax
481; X64-NEXT:    # kill: def $al killed $al killed $eax
482; X64-NEXT:    retq
483  %t0 = and i8 %a0, 28
484  %t1 = shl i8 %t0, 3
485  ret i8 %t1
486}
487define i8 @test_i8_28_mask_shl_4(i8 %a0) {
488; X86-LABEL: test_i8_28_mask_shl_4:
489; X86:       # %bb.0:
490; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
491; X86-NEXT:    andb $12, %al
492; X86-NEXT:    shlb $4, %al
493; X86-NEXT:    retl
494;
495; X64-LABEL: test_i8_28_mask_shl_4:
496; X64:       # %bb.0:
497; X64-NEXT:    movl %edi, %eax
498; X64-NEXT:    andb $12, %al
499; X64-NEXT:    shlb $4, %al
500; X64-NEXT:    # kill: def $al killed $al killed $eax
501; X64-NEXT:    retq
502  %t0 = and i8 %a0, 28
503  %t1 = shl i8 %t0, 4
504  ret i8 %t1
505}
506
507define i8 @test_i8_224_mask_shl_1(i8 %a0) {
508; X86-LABEL: test_i8_224_mask_shl_1:
509; X86:       # %bb.0:
510; X86-NEXT:    movzbl {{[0-9]+}}(%esp), %eax
511; X86-NEXT:    andb $96, %al
512; X86-NEXT:    addb %al, %al
513; X86-NEXT:    retl
514;
515; X64-LABEL: test_i8_224_mask_shl_1:
516; X64:       # %bb.0:
517; X64-NEXT:    # kill: def $edi killed $edi def $rdi
518; X64-NEXT:    andb $96, %dil
519; X64-NEXT:    leal (%rdi,%rdi), %eax
520; X64-NEXT:    # kill: def $al killed $al killed $eax
521; X64-NEXT:    retq
522  %t0 = and i8 %a0, 224
523  %t1 = shl i8 %t0, 1
524  ret i8 %t1
525}
526
527;------------------------------------------------------------------------------;
528; 16-bit
529;------------------------------------------------------------------------------;
530
531; lshr
532
533define i16 @test_i16_127_mask_lshr_1(i16 %a0) {
534; X86-LABEL: test_i16_127_mask_lshr_1:
535; X86:       # %bb.0:
536; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
537; X86-NEXT:    andl $126, %eax
538; X86-NEXT:    shrl %eax
539; X86-NEXT:    # kill: def $ax killed $ax killed $eax
540; X86-NEXT:    retl
541;
542; X64-LABEL: test_i16_127_mask_lshr_1:
543; X64:       # %bb.0:
544; X64-NEXT:    movl %edi, %eax
545; X64-NEXT:    andl $126, %eax
546; X64-NEXT:    shrl %eax
547; X64-NEXT:    # kill: def $ax killed $ax killed $eax
548; X64-NEXT:    retq
549  %t0 = and i16 %a0, 127
550  %t1 = lshr i16 %t0, 1
551  ret i16 %t1
552}
553
554define i16 @test_i16_2032_mask_lshr_3(i16 %a0) {
555; X86-LABEL: test_i16_2032_mask_lshr_3:
556; X86:       # %bb.0:
557; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
558; X86-NEXT:    andl $2032, %eax # imm = 0x7F0
559; X86-NEXT:    shrl $3, %eax
560; X86-NEXT:    # kill: def $ax killed $ax killed $eax
561; X86-NEXT:    retl
562;
563; X64-LABEL: test_i16_2032_mask_lshr_3:
564; X64:       # %bb.0:
565; X64-NEXT:    movl %edi, %eax
566; X64-NEXT:    andl $2032, %eax # imm = 0x7F0
567; X64-NEXT:    shrl $3, %eax
568; X64-NEXT:    # kill: def $ax killed $ax killed $eax
569; X64-NEXT:    retq
570  %t0 = and i16 %a0, 2032
571  %t1 = lshr i16 %t0, 3
572  ret i16 %t1
573}
574define i16 @test_i16_2032_mask_lshr_4(i16 %a0) {
575; X86-LABEL: test_i16_2032_mask_lshr_4:
576; X86:       # %bb.0:
577; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
578; X86-NEXT:    shrl $4, %eax
579; X86-NEXT:    andl $127, %eax
580; X86-NEXT:    # kill: def $ax killed $ax killed $eax
581; X86-NEXT:    retl
582;
583; X64-LABEL: test_i16_2032_mask_lshr_4:
584; X64:       # %bb.0:
585; X64-NEXT:    movl %edi, %eax
586; X64-NEXT:    shrl $4, %eax
587; X64-NEXT:    andl $127, %eax
588; X64-NEXT:    # kill: def $ax killed $ax killed $eax
589; X64-NEXT:    retq
590  %t0 = and i16 %a0, 2032
591  %t1 = lshr i16 %t0, 4
592  ret i16 %t1
593}
594define i16 @test_i16_2032_mask_lshr_5(i16 %a0) {
595; X86-LABEL: test_i16_2032_mask_lshr_5:
596; X86:       # %bb.0:
597; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
598; X86-NEXT:    shrl $5, %eax
599; X86-NEXT:    andl $63, %eax
600; X86-NEXT:    # kill: def $ax killed $ax killed $eax
601; X86-NEXT:    retl
602;
603; X64-LABEL: test_i16_2032_mask_lshr_5:
604; X64:       # %bb.0:
605; X64-NEXT:    movl %edi, %eax
606; X64-NEXT:    shrl $5, %eax
607; X64-NEXT:    andl $63, %eax
608; X64-NEXT:    # kill: def $ax killed $ax killed $eax
609; X64-NEXT:    retq
610  %t0 = and i16 %a0, 2032
611  %t1 = lshr i16 %t0, 5
612  ret i16 %t1
613}
614define i16 @test_i16_2032_mask_lshr_6(i16 %a0) {
615; X86-LABEL: test_i16_2032_mask_lshr_6:
616; X86:       # %bb.0:
617; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
618; X86-NEXT:    shrl $6, %eax
619; X86-NEXT:    andl $31, %eax
620; X86-NEXT:    # kill: def $ax killed $ax killed $eax
621; X86-NEXT:    retl
622;
623; X64-LABEL: test_i16_2032_mask_lshr_6:
624; X64:       # %bb.0:
625; X64-NEXT:    movl %edi, %eax
626; X64-NEXT:    shrl $6, %eax
627; X64-NEXT:    andl $31, %eax
628; X64-NEXT:    # kill: def $ax killed $ax killed $eax
629; X64-NEXT:    retq
630  %t0 = and i16 %a0, 2032
631  %t1 = lshr i16 %t0, 6
632  ret i16 %t1
633}
634
635define i16 @test_i16_65024_mask_lshr_1(i16 %a0) {
636; X86-LABEL: test_i16_65024_mask_lshr_1:
637; X86:       # %bb.0:
638; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
639; X86-NEXT:    andl $65024, %eax # imm = 0xFE00
640; X86-NEXT:    shrl %eax
641; X86-NEXT:    # kill: def $ax killed $ax killed $eax
642; X86-NEXT:    retl
643;
644; X64-LABEL: test_i16_65024_mask_lshr_1:
645; X64:       # %bb.0:
646; X64-NEXT:    movl %edi, %eax
647; X64-NEXT:    andl $65024, %eax # imm = 0xFE00
648; X64-NEXT:    shrl %eax
649; X64-NEXT:    # kill: def $ax killed $ax killed $eax
650; X64-NEXT:    retq
651  %t0 = and i16 %a0, 65024
652  %t1 = lshr i16 %t0, 1
653  ret i16 %t1
654}
655define i16 @test_i16_65024_mask_lshr_8(i16 %a0) {
656; X86-LABEL: test_i16_65024_mask_lshr_8:
657; X86:       # %bb.0:
658; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
659; X86-NEXT:    andl $65024, %eax # imm = 0xFE00
660; X86-NEXT:    shrl $8, %eax
661; X86-NEXT:    # kill: def $ax killed $ax killed $eax
662; X86-NEXT:    retl
663;
664; X64-LABEL: test_i16_65024_mask_lshr_8:
665; X64:       # %bb.0:
666; X64-NEXT:    movl %edi, %eax
667; X64-NEXT:    andl $65024, %eax # imm = 0xFE00
668; X64-NEXT:    shrl $8, %eax
669; X64-NEXT:    # kill: def $ax killed $ax killed $eax
670; X64-NEXT:    retq
671  %t0 = and i16 %a0, 65024
672  %t1 = lshr i16 %t0, 8
673  ret i16 %t1
674}
675define i16 @test_i16_65024_mask_lshr_9(i16 %a0) {
676; X86-LABEL: test_i16_65024_mask_lshr_9:
677; X86:       # %bb.0:
678; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
679; X86-NEXT:    shrl $9, %eax
680; X86-NEXT:    # kill: def $ax killed $ax killed $eax
681; X86-NEXT:    retl
682;
683; X64-LABEL: test_i16_65024_mask_lshr_9:
684; X64:       # %bb.0:
685; X64-NEXT:    movzwl %di, %eax
686; X64-NEXT:    shrl $9, %eax
687; X64-NEXT:    # kill: def $ax killed $ax killed $eax
688; X64-NEXT:    retq
689  %t0 = and i16 %a0, 65024
690  %t1 = lshr i16 %t0, 9
691  ret i16 %t1
692}
693define i16 @test_i16_65024_mask_lshr_10(i16 %a0) {
694; X86-LABEL: test_i16_65024_mask_lshr_10:
695; X86:       # %bb.0:
696; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
697; X86-NEXT:    shrl $10, %eax
698; X86-NEXT:    # kill: def $ax killed $ax killed $eax
699; X86-NEXT:    retl
700;
701; X64-LABEL: test_i16_65024_mask_lshr_10:
702; X64:       # %bb.0:
703; X64-NEXT:    movzwl %di, %eax
704; X64-NEXT:    shrl $10, %eax
705; X64-NEXT:    # kill: def $ax killed $ax killed $eax
706; X64-NEXT:    retq
707  %t0 = and i16 %a0, 65024
708  %t1 = lshr i16 %t0, 10
709  ret i16 %t1
710}
711
712; ashr
713
714define i16 @test_i16_127_mask_ashr_1(i16 %a0) {
715; X86-LABEL: test_i16_127_mask_ashr_1:
716; X86:       # %bb.0:
717; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
718; X86-NEXT:    andl $126, %eax
719; X86-NEXT:    shrl %eax
720; X86-NEXT:    # kill: def $ax killed $ax killed $eax
721; X86-NEXT:    retl
722;
723; X64-LABEL: test_i16_127_mask_ashr_1:
724; X64:       # %bb.0:
725; X64-NEXT:    movl %edi, %eax
726; X64-NEXT:    andl $126, %eax
727; X64-NEXT:    shrl %eax
728; X64-NEXT:    # kill: def $ax killed $ax killed $eax
729; X64-NEXT:    retq
730  %t0 = and i16 %a0, 127
731  %t1 = ashr i16 %t0, 1
732  ret i16 %t1
733}
734
735define i16 @test_i16_2032_mask_ashr_3(i16 %a0) {
736; X86-LABEL: test_i16_2032_mask_ashr_3:
737; X86:       # %bb.0:
738; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
739; X86-NEXT:    andl $2032, %eax # imm = 0x7F0
740; X86-NEXT:    shrl $3, %eax
741; X86-NEXT:    # kill: def $ax killed $ax killed $eax
742; X86-NEXT:    retl
743;
744; X64-LABEL: test_i16_2032_mask_ashr_3:
745; X64:       # %bb.0:
746; X64-NEXT:    movl %edi, %eax
747; X64-NEXT:    andl $2032, %eax # imm = 0x7F0
748; X64-NEXT:    shrl $3, %eax
749; X64-NEXT:    # kill: def $ax killed $ax killed $eax
750; X64-NEXT:    retq
751  %t0 = and i16 %a0, 2032
752  %t1 = ashr i16 %t0, 3
753  ret i16 %t1
754}
755define i16 @test_i16_2032_mask_ashr_4(i16 %a0) {
756; X86-LABEL: test_i16_2032_mask_ashr_4:
757; X86:       # %bb.0:
758; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
759; X86-NEXT:    shrl $4, %eax
760; X86-NEXT:    andl $127, %eax
761; X86-NEXT:    # kill: def $ax killed $ax killed $eax
762; X86-NEXT:    retl
763;
764; X64-LABEL: test_i16_2032_mask_ashr_4:
765; X64:       # %bb.0:
766; X64-NEXT:    movl %edi, %eax
767; X64-NEXT:    shrl $4, %eax
768; X64-NEXT:    andl $127, %eax
769; X64-NEXT:    # kill: def $ax killed $ax killed $eax
770; X64-NEXT:    retq
771  %t0 = and i16 %a0, 2032
772  %t1 = ashr i16 %t0, 4
773  ret i16 %t1
774}
775define i16 @test_i16_2032_mask_ashr_5(i16 %a0) {
776; X86-LABEL: test_i16_2032_mask_ashr_5:
777; X86:       # %bb.0:
778; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
779; X86-NEXT:    shrl $5, %eax
780; X86-NEXT:    andl $63, %eax
781; X86-NEXT:    # kill: def $ax killed $ax killed $eax
782; X86-NEXT:    retl
783;
784; X64-LABEL: test_i16_2032_mask_ashr_5:
785; X64:       # %bb.0:
786; X64-NEXT:    movl %edi, %eax
787; X64-NEXT:    shrl $5, %eax
788; X64-NEXT:    andl $63, %eax
789; X64-NEXT:    # kill: def $ax killed $ax killed $eax
790; X64-NEXT:    retq
791  %t0 = and i16 %a0, 2032
792  %t1 = ashr i16 %t0, 5
793  ret i16 %t1
794}
795define i16 @test_i16_2032_mask_ashr_6(i16 %a0) {
796; X86-LABEL: test_i16_2032_mask_ashr_6:
797; X86:       # %bb.0:
798; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
799; X86-NEXT:    shrl $6, %eax
800; X86-NEXT:    andl $31, %eax
801; X86-NEXT:    # kill: def $ax killed $ax killed $eax
802; X86-NEXT:    retl
803;
804; X64-LABEL: test_i16_2032_mask_ashr_6:
805; X64:       # %bb.0:
806; X64-NEXT:    movl %edi, %eax
807; X64-NEXT:    shrl $6, %eax
808; X64-NEXT:    andl $31, %eax
809; X64-NEXT:    # kill: def $ax killed $ax killed $eax
810; X64-NEXT:    retq
811  %t0 = and i16 %a0, 2032
812  %t1 = ashr i16 %t0, 6
813  ret i16 %t1
814}
815
816define i16 @test_i16_65024_mask_ashr_1(i16 %a0) {
817; X86-LABEL: test_i16_65024_mask_ashr_1:
818; X86:       # %bb.0:
819; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
820; X86-NEXT:    andl $65024, %eax # imm = 0xFE00
821; X86-NEXT:    cwtl
822; X86-NEXT:    shrl %eax
823; X86-NEXT:    # kill: def $ax killed $ax killed $eax
824; X86-NEXT:    retl
825;
826; X64-LABEL: test_i16_65024_mask_ashr_1:
827; X64:       # %bb.0:
828; X64-NEXT:    andl $65024, %edi # imm = 0xFE00
829; X64-NEXT:    movswl %di, %eax
830; X64-NEXT:    shrl %eax
831; X64-NEXT:    # kill: def $ax killed $ax killed $eax
832; X64-NEXT:    retq
833  %t0 = and i16 %a0, 65024
834  %t1 = ashr i16 %t0, 1
835  ret i16 %t1
836}
837define i16 @test_i16_65024_mask_ashr_8(i16 %a0) {
838; X86-LABEL: test_i16_65024_mask_ashr_8:
839; X86:       # %bb.0:
840; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
841; X86-NEXT:    andl $65024, %eax # imm = 0xFE00
842; X86-NEXT:    cwtl
843; X86-NEXT:    shrl $8, %eax
844; X86-NEXT:    # kill: def $ax killed $ax killed $eax
845; X86-NEXT:    retl
846;
847; X64-LABEL: test_i16_65024_mask_ashr_8:
848; X64:       # %bb.0:
849; X64-NEXT:    andl $65024, %edi # imm = 0xFE00
850; X64-NEXT:    movswl %di, %eax
851; X64-NEXT:    shrl $8, %eax
852; X64-NEXT:    # kill: def $ax killed $ax killed $eax
853; X64-NEXT:    retq
854  %t0 = and i16 %a0, 65024
855  %t1 = ashr i16 %t0, 8
856  ret i16 %t1
857}
858define i16 @test_i16_65024_mask_ashr_9(i16 %a0) {
859; X86-LABEL: test_i16_65024_mask_ashr_9:
860; X86:       # %bb.0:
861; X86-NEXT:    movswl {{[0-9]+}}(%esp), %eax
862; X86-NEXT:    shrl $9, %eax
863; X86-NEXT:    # kill: def $ax killed $ax killed $eax
864; X86-NEXT:    retl
865;
866; X64-LABEL: test_i16_65024_mask_ashr_9:
867; X64:       # %bb.0:
868; X64-NEXT:    movswl %di, %eax
869; X64-NEXT:    shrl $9, %eax
870; X64-NEXT:    # kill: def $ax killed $ax killed $eax
871; X64-NEXT:    retq
872  %t0 = and i16 %a0, 65024
873  %t1 = ashr i16 %t0, 9
874  ret i16 %t1
875}
876define i16 @test_i16_65024_mask_ashr_10(i16 %a0) {
877; X86-LABEL: test_i16_65024_mask_ashr_10:
878; X86:       # %bb.0:
879; X86-NEXT:    movswl {{[0-9]+}}(%esp), %eax
880; X86-NEXT:    shrl $10, %eax
881; X86-NEXT:    # kill: def $ax killed $ax killed $eax
882; X86-NEXT:    retl
883;
884; X64-LABEL: test_i16_65024_mask_ashr_10:
885; X64:       # %bb.0:
886; X64-NEXT:    movswl %di, %eax
887; X64-NEXT:    shrl $10, %eax
888; X64-NEXT:    # kill: def $ax killed $ax killed $eax
889; X64-NEXT:    retq
890  %t0 = and i16 %a0, 65024
891  %t1 = ashr i16 %t0, 10
892  ret i16 %t1
893}
894
895; shl
896
897define i16 @test_i16_127_mask_shl_1(i16 %a0) {
898; X86-LABEL: test_i16_127_mask_shl_1:
899; X86:       # %bb.0:
900; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
901; X86-NEXT:    andl $127, %eax
902; X86-NEXT:    addl %eax, %eax
903; X86-NEXT:    # kill: def $ax killed $ax killed $eax
904; X86-NEXT:    retl
905;
906; X64-LABEL: test_i16_127_mask_shl_1:
907; X64:       # %bb.0:
908; X64-NEXT:    # kill: def $edi killed $edi def $rdi
909; X64-NEXT:    andl $127, %edi
910; X64-NEXT:    leal (%rdi,%rdi), %eax
911; X64-NEXT:    # kill: def $ax killed $ax killed $eax
912; X64-NEXT:    retq
913  %t0 = and i16 %a0, 127
914  %t1 = shl i16 %t0, 1
915  ret i16 %t1
916}
917define i16 @test_i16_127_mask_shl_8(i16 %a0) {
918; X86-LABEL: test_i16_127_mask_shl_8:
919; X86:       # %bb.0:
920; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
921; X86-NEXT:    andl $127, %eax
922; X86-NEXT:    shll $8, %eax
923; X86-NEXT:    # kill: def $ax killed $ax killed $eax
924; X86-NEXT:    retl
925;
926; X64-LABEL: test_i16_127_mask_shl_8:
927; X64:       # %bb.0:
928; X64-NEXT:    movl %edi, %eax
929; X64-NEXT:    andl $127, %eax
930; X64-NEXT:    shll $8, %eax
931; X64-NEXT:    # kill: def $ax killed $ax killed $eax
932; X64-NEXT:    retq
933  %t0 = and i16 %a0, 127
934  %t1 = shl i16 %t0, 8
935  ret i16 %t1
936}
937define i16 @test_i16_127_mask_shl_9(i16 %a0) {
938; X86-LABEL: test_i16_127_mask_shl_9:
939; X86:       # %bb.0:
940; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
941; X86-NEXT:    shll $9, %eax
942; X86-NEXT:    # kill: def $ax killed $ax killed $eax
943; X86-NEXT:    retl
944;
945; X64-LABEL: test_i16_127_mask_shl_9:
946; X64:       # %bb.0:
947; X64-NEXT:    movl %edi, %eax
948; X64-NEXT:    shll $9, %eax
949; X64-NEXT:    # kill: def $ax killed $ax killed $eax
950; X64-NEXT:    retq
951  %t0 = and i16 %a0, 127
952  %t1 = shl i16 %t0, 9
953  ret i16 %t1
954}
955define i16 @test_i16_127_mask_shl_10(i16 %a0) {
956; X86-LABEL: test_i16_127_mask_shl_10:
957; X86:       # %bb.0:
958; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
959; X86-NEXT:    shll $10, %eax
960; X86-NEXT:    # kill: def $ax killed $ax killed $eax
961; X86-NEXT:    retl
962;
963; X64-LABEL: test_i16_127_mask_shl_10:
964; X64:       # %bb.0:
965; X64-NEXT:    movl %edi, %eax
966; X64-NEXT:    shll $10, %eax
967; X64-NEXT:    # kill: def $ax killed $ax killed $eax
968; X64-NEXT:    retq
969  %t0 = and i16 %a0, 127
970  %t1 = shl i16 %t0, 10
971  ret i16 %t1
972}
973
974define i16 @test_i16_2032_mask_shl_3(i16 %a0) {
975; X86-LABEL: test_i16_2032_mask_shl_3:
976; X86:       # %bb.0:
977; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
978; X86-NEXT:    andl $2032, %eax # imm = 0x7F0
979; X86-NEXT:    shll $3, %eax
980; X86-NEXT:    # kill: def $ax killed $ax killed $eax
981; X86-NEXT:    retl
982;
983; X64-LABEL: test_i16_2032_mask_shl_3:
984; X64:       # %bb.0:
985; X64-NEXT:    # kill: def $edi killed $edi def $rdi
986; X64-NEXT:    andl $2032, %edi # imm = 0x7F0
987; X64-NEXT:    leal (,%rdi,8), %eax
988; X64-NEXT:    # kill: def $ax killed $ax killed $eax
989; X64-NEXT:    retq
990  %t0 = and i16 %a0, 2032
991  %t1 = shl i16 %t0, 3
992  ret i16 %t1
993}
994define i16 @test_i16_2032_mask_shl_4(i16 %a0) {
995; X86-LABEL: test_i16_2032_mask_shl_4:
996; X86:       # %bb.0:
997; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
998; X86-NEXT:    andl $2032, %eax # imm = 0x7F0
999; X86-NEXT:    shll $4, %eax
1000; X86-NEXT:    # kill: def $ax killed $ax killed $eax
1001; X86-NEXT:    retl
1002;
1003; X64-LABEL: test_i16_2032_mask_shl_4:
1004; X64:       # %bb.0:
1005; X64-NEXT:    movl %edi, %eax
1006; X64-NEXT:    andl $2032, %eax # imm = 0x7F0
1007; X64-NEXT:    shll $4, %eax
1008; X64-NEXT:    # kill: def $ax killed $ax killed $eax
1009; X64-NEXT:    retq
1010  %t0 = and i16 %a0, 2032
1011  %t1 = shl i16 %t0, 4
1012  ret i16 %t1
1013}
1014define i16 @test_i16_2032_mask_shl_5(i16 %a0) {
1015; X86-LABEL: test_i16_2032_mask_shl_5:
1016; X86:       # %bb.0:
1017; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
1018; X86-NEXT:    andl $2032, %eax # imm = 0x7F0
1019; X86-NEXT:    shll $5, %eax
1020; X86-NEXT:    # kill: def $ax killed $ax killed $eax
1021; X86-NEXT:    retl
1022;
1023; X64-LABEL: test_i16_2032_mask_shl_5:
1024; X64:       # %bb.0:
1025; X64-NEXT:    movl %edi, %eax
1026; X64-NEXT:    andl $2032, %eax # imm = 0x7F0
1027; X64-NEXT:    shll $5, %eax
1028; X64-NEXT:    # kill: def $ax killed $ax killed $eax
1029; X64-NEXT:    retq
1030  %t0 = and i16 %a0, 2032
1031  %t1 = shl i16 %t0, 5
1032  ret i16 %t1
1033}
1034define i16 @test_i16_2032_mask_shl_6(i16 %a0) {
1035; X86-LABEL: test_i16_2032_mask_shl_6:
1036; X86:       # %bb.0:
1037; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
1038; X86-NEXT:    andl $1008, %eax # imm = 0x3F0
1039; X86-NEXT:    shll $6, %eax
1040; X86-NEXT:    # kill: def $ax killed $ax killed $eax
1041; X86-NEXT:    retl
1042;
1043; X64-LABEL: test_i16_2032_mask_shl_6:
1044; X64:       # %bb.0:
1045; X64-NEXT:    movl %edi, %eax
1046; X64-NEXT:    andl $1008, %eax # imm = 0x3F0
1047; X64-NEXT:    shll $6, %eax
1048; X64-NEXT:    # kill: def $ax killed $ax killed $eax
1049; X64-NEXT:    retq
1050  %t0 = and i16 %a0, 2032
1051  %t1 = shl i16 %t0, 6
1052  ret i16 %t1
1053}
1054
1055define i16 @test_i16_65024_mask_shl_1(i16 %a0) {
1056; X86-LABEL: test_i16_65024_mask_shl_1:
1057; X86:       # %bb.0:
1058; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
1059; X86-NEXT:    andl $32256, %eax # imm = 0x7E00
1060; X86-NEXT:    addl %eax, %eax
1061; X86-NEXT:    # kill: def $ax killed $ax killed $eax
1062; X86-NEXT:    retl
1063;
1064; X64-LABEL: test_i16_65024_mask_shl_1:
1065; X64:       # %bb.0:
1066; X64-NEXT:    # kill: def $edi killed $edi def $rdi
1067; X64-NEXT:    andl $32256, %edi # imm = 0x7E00
1068; X64-NEXT:    leal (%rdi,%rdi), %eax
1069; X64-NEXT:    # kill: def $ax killed $ax killed $eax
1070; X64-NEXT:    retq
1071  %t0 = and i16 %a0, 65024
1072  %t1 = shl i16 %t0, 1
1073  ret i16 %t1
1074}
1075
1076;------------------------------------------------------------------------------;
1077; 32-bit
1078;------------------------------------------------------------------------------;
1079
1080; lshr
1081
1082define i32 @test_i32_32767_mask_lshr_1(i32 %a0) {
1083; X86-LABEL: test_i32_32767_mask_lshr_1:
1084; X86:       # %bb.0:
1085; X86-NEXT:    movl $32766, %eax # imm = 0x7FFE
1086; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1087; X86-NEXT:    shrl %eax
1088; X86-NEXT:    retl
1089;
1090; X64-LABEL: test_i32_32767_mask_lshr_1:
1091; X64:       # %bb.0:
1092; X64-NEXT:    movl %edi, %eax
1093; X64-NEXT:    andl $32766, %eax # imm = 0x7FFE
1094; X64-NEXT:    shrl %eax
1095; X64-NEXT:    retq
1096  %t0 = and i32 %a0, 32767
1097  %t1 = lshr i32 %t0, 1
1098  ret i32 %t1
1099}
1100
1101define i32 @test_i32_8388352_mask_lshr_7(i32 %a0) {
1102; X86-LABEL: test_i32_8388352_mask_lshr_7:
1103; X86:       # %bb.0:
1104; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1105; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1106; X86-NEXT:    shrl $7, %eax
1107; X86-NEXT:    retl
1108;
1109; X64-LABEL: test_i32_8388352_mask_lshr_7:
1110; X64:       # %bb.0:
1111; X64-NEXT:    movl %edi, %eax
1112; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1113; X64-NEXT:    shrl $7, %eax
1114; X64-NEXT:    retq
1115  %t0 = and i32 %a0, 8388352
1116  %t1 = lshr i32 %t0, 7
1117  ret i32 %t1
1118}
1119define i32 @test_i32_8388352_mask_lshr_8(i32 %a0) {
1120; X86-LABEL: test_i32_8388352_mask_lshr_8:
1121; X86:       # %bb.0:
1122; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1123; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1124; X86-NEXT:    shrl $8, %eax
1125; X86-NEXT:    retl
1126;
1127; X64-LABEL: test_i32_8388352_mask_lshr_8:
1128; X64:       # %bb.0:
1129; X64-NEXT:    movl %edi, %eax
1130; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1131; X64-NEXT:    shrl $8, %eax
1132; X64-NEXT:    retq
1133  %t0 = and i32 %a0, 8388352
1134  %t1 = lshr i32 %t0, 8
1135  ret i32 %t1
1136}
1137define i32 @test_i32_8388352_mask_lshr_9(i32 %a0) {
1138; X86-LABEL: test_i32_8388352_mask_lshr_9:
1139; X86:       # %bb.0:
1140; X86-NEXT:    movl $8388096, %eax # imm = 0x7FFE00
1141; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1142; X86-NEXT:    shrl $9, %eax
1143; X86-NEXT:    retl
1144;
1145; X64-LABEL: test_i32_8388352_mask_lshr_9:
1146; X64:       # %bb.0:
1147; X64-NEXT:    movl %edi, %eax
1148; X64-NEXT:    andl $8388096, %eax # imm = 0x7FFE00
1149; X64-NEXT:    shrl $9, %eax
1150; X64-NEXT:    retq
1151  %t0 = and i32 %a0, 8388352
1152  %t1 = lshr i32 %t0, 9
1153  ret i32 %t1
1154}
1155define i32 @test_i32_8388352_mask_lshr_10(i32 %a0) {
1156; X86-LABEL: test_i32_8388352_mask_lshr_10:
1157; X86:       # %bb.0:
1158; X86-NEXT:    movl $8387584, %eax # imm = 0x7FFC00
1159; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1160; X86-NEXT:    shrl $10, %eax
1161; X86-NEXT:    retl
1162;
1163; X64-LABEL: test_i32_8388352_mask_lshr_10:
1164; X64:       # %bb.0:
1165; X64-NEXT:    movl %edi, %eax
1166; X64-NEXT:    andl $8387584, %eax # imm = 0x7FFC00
1167; X64-NEXT:    shrl $10, %eax
1168; X64-NEXT:    retq
1169  %t0 = and i32 %a0, 8388352
1170  %t1 = lshr i32 %t0, 10
1171  ret i32 %t1
1172}
1173
1174define i32 @test_i32_4294836224_mask_lshr_1(i32 %a0) {
1175; X86-LABEL: test_i32_4294836224_mask_lshr_1:
1176; X86:       # %bb.0:
1177; X86-NEXT:    movl $-131072, %eax # imm = 0xFFFE0000
1178; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1179; X86-NEXT:    shrl %eax
1180; X86-NEXT:    retl
1181;
1182; X64-LABEL: test_i32_4294836224_mask_lshr_1:
1183; X64:       # %bb.0:
1184; X64-NEXT:    movl %edi, %eax
1185; X64-NEXT:    andl $-131072, %eax # imm = 0xFFFE0000
1186; X64-NEXT:    shrl %eax
1187; X64-NEXT:    retq
1188  %t0 = and i32 %a0, 4294836224
1189  %t1 = lshr i32 %t0, 1
1190  ret i32 %t1
1191}
1192define i32 @test_i32_4294836224_mask_lshr_16(i32 %a0) {
1193; X86-LABEL: test_i32_4294836224_mask_lshr_16:
1194; X86:       # %bb.0:
1195; X86-NEXT:    movl $-131072, %eax # imm = 0xFFFE0000
1196; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1197; X86-NEXT:    shrl $16, %eax
1198; X86-NEXT:    retl
1199;
1200; X64-LABEL: test_i32_4294836224_mask_lshr_16:
1201; X64:       # %bb.0:
1202; X64-NEXT:    movl %edi, %eax
1203; X64-NEXT:    andl $-131072, %eax # imm = 0xFFFE0000
1204; X64-NEXT:    shrl $16, %eax
1205; X64-NEXT:    retq
1206  %t0 = and i32 %a0, 4294836224
1207  %t1 = lshr i32 %t0, 16
1208  ret i32 %t1
1209}
1210define i32 @test_i32_4294836224_mask_lshr_17(i32 %a0) {
1211; X86-LABEL: test_i32_4294836224_mask_lshr_17:
1212; X86:       # %bb.0:
1213; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1214; X86-NEXT:    shrl $17, %eax
1215; X86-NEXT:    retl
1216;
1217; X64-LABEL: test_i32_4294836224_mask_lshr_17:
1218; X64:       # %bb.0:
1219; X64-NEXT:    movl %edi, %eax
1220; X64-NEXT:    shrl $17, %eax
1221; X64-NEXT:    retq
1222  %t0 = and i32 %a0, 4294836224
1223  %t1 = lshr i32 %t0, 17
1224  ret i32 %t1
1225}
1226define i32 @test_i32_4294836224_mask_lshr_18(i32 %a0) {
1227; X86-LABEL: test_i32_4294836224_mask_lshr_18:
1228; X86:       # %bb.0:
1229; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1230; X86-NEXT:    shrl $18, %eax
1231; X86-NEXT:    retl
1232;
1233; X64-LABEL: test_i32_4294836224_mask_lshr_18:
1234; X64:       # %bb.0:
1235; X64-NEXT:    movl %edi, %eax
1236; X64-NEXT:    shrl $18, %eax
1237; X64-NEXT:    retq
1238  %t0 = and i32 %a0, 4294836224
1239  %t1 = lshr i32 %t0, 18
1240  ret i32 %t1
1241}
1242
1243; ashr
1244
1245define i32 @test_i32_32767_mask_ashr_1(i32 %a0) {
1246; X86-LABEL: test_i32_32767_mask_ashr_1:
1247; X86:       # %bb.0:
1248; X86-NEXT:    movl $32766, %eax # imm = 0x7FFE
1249; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1250; X86-NEXT:    shrl %eax
1251; X86-NEXT:    retl
1252;
1253; X64-LABEL: test_i32_32767_mask_ashr_1:
1254; X64:       # %bb.0:
1255; X64-NEXT:    movl %edi, %eax
1256; X64-NEXT:    andl $32766, %eax # imm = 0x7FFE
1257; X64-NEXT:    shrl %eax
1258; X64-NEXT:    retq
1259  %t0 = and i32 %a0, 32767
1260  %t1 = ashr i32 %t0, 1
1261  ret i32 %t1
1262}
1263
1264define i32 @test_i32_8388352_mask_ashr_7(i32 %a0) {
1265; X86-LABEL: test_i32_8388352_mask_ashr_7:
1266; X86:       # %bb.0:
1267; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1268; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1269; X86-NEXT:    shrl $7, %eax
1270; X86-NEXT:    retl
1271;
1272; X64-LABEL: test_i32_8388352_mask_ashr_7:
1273; X64:       # %bb.0:
1274; X64-NEXT:    movl %edi, %eax
1275; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1276; X64-NEXT:    shrl $7, %eax
1277; X64-NEXT:    retq
1278  %t0 = and i32 %a0, 8388352
1279  %t1 = ashr i32 %t0, 7
1280  ret i32 %t1
1281}
1282define i32 @test_i32_8388352_mask_ashr_8(i32 %a0) {
1283; X86-LABEL: test_i32_8388352_mask_ashr_8:
1284; X86:       # %bb.0:
1285; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1286; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1287; X86-NEXT:    shrl $8, %eax
1288; X86-NEXT:    retl
1289;
1290; X64-LABEL: test_i32_8388352_mask_ashr_8:
1291; X64:       # %bb.0:
1292; X64-NEXT:    movl %edi, %eax
1293; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1294; X64-NEXT:    shrl $8, %eax
1295; X64-NEXT:    retq
1296  %t0 = and i32 %a0, 8388352
1297  %t1 = ashr i32 %t0, 8
1298  ret i32 %t1
1299}
1300define i32 @test_i32_8388352_mask_ashr_9(i32 %a0) {
1301; X86-LABEL: test_i32_8388352_mask_ashr_9:
1302; X86:       # %bb.0:
1303; X86-NEXT:    movl $8388096, %eax # imm = 0x7FFE00
1304; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1305; X86-NEXT:    shrl $9, %eax
1306; X86-NEXT:    retl
1307;
1308; X64-LABEL: test_i32_8388352_mask_ashr_9:
1309; X64:       # %bb.0:
1310; X64-NEXT:    movl %edi, %eax
1311; X64-NEXT:    andl $8388096, %eax # imm = 0x7FFE00
1312; X64-NEXT:    shrl $9, %eax
1313; X64-NEXT:    retq
1314  %t0 = and i32 %a0, 8388352
1315  %t1 = ashr i32 %t0, 9
1316  ret i32 %t1
1317}
1318define i32 @test_i32_8388352_mask_ashr_10(i32 %a0) {
1319; X86-LABEL: test_i32_8388352_mask_ashr_10:
1320; X86:       # %bb.0:
1321; X86-NEXT:    movl $8387584, %eax # imm = 0x7FFC00
1322; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1323; X86-NEXT:    shrl $10, %eax
1324; X86-NEXT:    retl
1325;
1326; X64-LABEL: test_i32_8388352_mask_ashr_10:
1327; X64:       # %bb.0:
1328; X64-NEXT:    movl %edi, %eax
1329; X64-NEXT:    andl $8387584, %eax # imm = 0x7FFC00
1330; X64-NEXT:    shrl $10, %eax
1331; X64-NEXT:    retq
1332  %t0 = and i32 %a0, 8388352
1333  %t1 = ashr i32 %t0, 10
1334  ret i32 %t1
1335}
1336
1337define i32 @test_i32_4294836224_mask_ashr_1(i32 %a0) {
1338; X86-LABEL: test_i32_4294836224_mask_ashr_1:
1339; X86:       # %bb.0:
1340; X86-NEXT:    movl $-131072, %eax # imm = 0xFFFE0000
1341; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1342; X86-NEXT:    sarl %eax
1343; X86-NEXT:    retl
1344;
1345; X64-LABEL: test_i32_4294836224_mask_ashr_1:
1346; X64:       # %bb.0:
1347; X64-NEXT:    movl %edi, %eax
1348; X64-NEXT:    andl $-131072, %eax # imm = 0xFFFE0000
1349; X64-NEXT:    sarl %eax
1350; X64-NEXT:    retq
1351  %t0 = and i32 %a0, 4294836224
1352  %t1 = ashr i32 %t0, 1
1353  ret i32 %t1
1354}
1355define i32 @test_i32_4294836224_mask_ashr_16(i32 %a0) {
1356; X86-LABEL: test_i32_4294836224_mask_ashr_16:
1357; X86:       # %bb.0:
1358; X86-NEXT:    movl $-131072, %eax # imm = 0xFFFE0000
1359; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1360; X86-NEXT:    sarl $16, %eax
1361; X86-NEXT:    retl
1362;
1363; X64-LABEL: test_i32_4294836224_mask_ashr_16:
1364; X64:       # %bb.0:
1365; X64-NEXT:    movl %edi, %eax
1366; X64-NEXT:    andl $-131072, %eax # imm = 0xFFFE0000
1367; X64-NEXT:    sarl $16, %eax
1368; X64-NEXT:    retq
1369  %t0 = and i32 %a0, 4294836224
1370  %t1 = ashr i32 %t0, 16
1371  ret i32 %t1
1372}
1373define i32 @test_i32_4294836224_mask_ashr_17(i32 %a0) {
1374; X86-LABEL: test_i32_4294836224_mask_ashr_17:
1375; X86:       # %bb.0:
1376; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1377; X86-NEXT:    sarl $17, %eax
1378; X86-NEXT:    retl
1379;
1380; X64-LABEL: test_i32_4294836224_mask_ashr_17:
1381; X64:       # %bb.0:
1382; X64-NEXT:    movl %edi, %eax
1383; X64-NEXT:    sarl $17, %eax
1384; X64-NEXT:    retq
1385  %t0 = and i32 %a0, 4294836224
1386  %t1 = ashr i32 %t0, 17
1387  ret i32 %t1
1388}
1389define i32 @test_i32_4294836224_mask_ashr_18(i32 %a0) {
1390; X86-LABEL: test_i32_4294836224_mask_ashr_18:
1391; X86:       # %bb.0:
1392; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1393; X86-NEXT:    sarl $18, %eax
1394; X86-NEXT:    retl
1395;
1396; X64-LABEL: test_i32_4294836224_mask_ashr_18:
1397; X64:       # %bb.0:
1398; X64-NEXT:    movl %edi, %eax
1399; X64-NEXT:    sarl $18, %eax
1400; X64-NEXT:    retq
1401  %t0 = and i32 %a0, 4294836224
1402  %t1 = ashr i32 %t0, 18
1403  ret i32 %t1
1404}
1405
1406; shl
1407
1408define i32 @test_i32_32767_mask_shl_1(i32 %a0) {
1409; X86-LABEL: test_i32_32767_mask_shl_1:
1410; X86:       # %bb.0:
1411; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1412; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1413; X86-NEXT:    addl %eax, %eax
1414; X86-NEXT:    retl
1415;
1416; X64-LABEL: test_i32_32767_mask_shl_1:
1417; X64:       # %bb.0:
1418; X64-NEXT:    # kill: def $edi killed $edi def $rdi
1419; X64-NEXT:    andl $32767, %edi # imm = 0x7FFF
1420; X64-NEXT:    leal (%rdi,%rdi), %eax
1421; X64-NEXT:    retq
1422  %t0 = and i32 %a0, 32767
1423  %t1 = shl i32 %t0, 1
1424  ret i32 %t1
1425}
1426define i32 @test_i32_32767_mask_shl_16(i32 %a0) {
1427; X86-LABEL: test_i32_32767_mask_shl_16:
1428; X86:       # %bb.0:
1429; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1430; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1431; X86-NEXT:    shll $16, %eax
1432; X86-NEXT:    retl
1433;
1434; X64-LABEL: test_i32_32767_mask_shl_16:
1435; X64:       # %bb.0:
1436; X64-NEXT:    movl %edi, %eax
1437; X64-NEXT:    andl $32767, %eax # imm = 0x7FFF
1438; X64-NEXT:    shll $16, %eax
1439; X64-NEXT:    retq
1440  %t0 = and i32 %a0, 32767
1441  %t1 = shl i32 %t0, 16
1442  ret i32 %t1
1443}
1444define i32 @test_i32_32767_mask_shl_17(i32 %a0) {
1445; X86-LABEL: test_i32_32767_mask_shl_17:
1446; X86:       # %bb.0:
1447; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1448; X86-NEXT:    shll $17, %eax
1449; X86-NEXT:    retl
1450;
1451; X64-LABEL: test_i32_32767_mask_shl_17:
1452; X64:       # %bb.0:
1453; X64-NEXT:    movl %edi, %eax
1454; X64-NEXT:    shll $17, %eax
1455; X64-NEXT:    retq
1456  %t0 = and i32 %a0, 32767
1457  %t1 = shl i32 %t0, 17
1458  ret i32 %t1
1459}
1460define i32 @test_i32_32767_mask_shl_18(i32 %a0) {
1461; X86-LABEL: test_i32_32767_mask_shl_18:
1462; X86:       # %bb.0:
1463; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1464; X86-NEXT:    shll $18, %eax
1465; X86-NEXT:    retl
1466;
1467; X64-LABEL: test_i32_32767_mask_shl_18:
1468; X64:       # %bb.0:
1469; X64-NEXT:    movl %edi, %eax
1470; X64-NEXT:    shll $18, %eax
1471; X64-NEXT:    retq
1472  %t0 = and i32 %a0, 32767
1473  %t1 = shl i32 %t0, 18
1474  ret i32 %t1
1475}
1476
1477define i32 @test_i32_8388352_mask_shl_7(i32 %a0) {
1478; X86-LABEL: test_i32_8388352_mask_shl_7:
1479; X86:       # %bb.0:
1480; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1481; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1482; X86-NEXT:    shll $7, %eax
1483; X86-NEXT:    retl
1484;
1485; X64-LABEL: test_i32_8388352_mask_shl_7:
1486; X64:       # %bb.0:
1487; X64-NEXT:    movl %edi, %eax
1488; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1489; X64-NEXT:    shll $7, %eax
1490; X64-NEXT:    retq
1491  %t0 = and i32 %a0, 8388352
1492  %t1 = shl i32 %t0, 7
1493  ret i32 %t1
1494}
1495define i32 @test_i32_8388352_mask_shl_8(i32 %a0) {
1496; X86-LABEL: test_i32_8388352_mask_shl_8:
1497; X86:       # %bb.0:
1498; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1499; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1500; X86-NEXT:    shll $8, %eax
1501; X86-NEXT:    retl
1502;
1503; X64-LABEL: test_i32_8388352_mask_shl_8:
1504; X64:       # %bb.0:
1505; X64-NEXT:    movl %edi, %eax
1506; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1507; X64-NEXT:    shll $8, %eax
1508; X64-NEXT:    retq
1509  %t0 = and i32 %a0, 8388352
1510  %t1 = shl i32 %t0, 8
1511  ret i32 %t1
1512}
1513define i32 @test_i32_8388352_mask_shl_9(i32 %a0) {
1514; X86-LABEL: test_i32_8388352_mask_shl_9:
1515; X86:       # %bb.0:
1516; X86-NEXT:    movl $8388352, %eax # imm = 0x7FFF00
1517; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1518; X86-NEXT:    shll $9, %eax
1519; X86-NEXT:    retl
1520;
1521; X64-LABEL: test_i32_8388352_mask_shl_9:
1522; X64:       # %bb.0:
1523; X64-NEXT:    movl %edi, %eax
1524; X64-NEXT:    andl $8388352, %eax # imm = 0x7FFF00
1525; X64-NEXT:    shll $9, %eax
1526; X64-NEXT:    retq
1527  %t0 = and i32 %a0, 8388352
1528  %t1 = shl i32 %t0, 9
1529  ret i32 %t1
1530}
1531define i32 @test_i32_8388352_mask_shl_10(i32 %a0) {
1532; X86-LABEL: test_i32_8388352_mask_shl_10:
1533; X86:       # %bb.0:
1534; X86-NEXT:    movl $4194048, %eax # imm = 0x3FFF00
1535; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1536; X86-NEXT:    shll $10, %eax
1537; X86-NEXT:    retl
1538;
1539; X64-LABEL: test_i32_8388352_mask_shl_10:
1540; X64:       # %bb.0:
1541; X64-NEXT:    movl %edi, %eax
1542; X64-NEXT:    andl $4194048, %eax # imm = 0x3FFF00
1543; X64-NEXT:    shll $10, %eax
1544; X64-NEXT:    retq
1545  %t0 = and i32 %a0, 8388352
1546  %t1 = shl i32 %t0, 10
1547  ret i32 %t1
1548}
1549
1550define i32 @test_i32_4294836224_mask_shl_1(i32 %a0) {
1551; X86-LABEL: test_i32_4294836224_mask_shl_1:
1552; X86:       # %bb.0:
1553; X86-NEXT:    movl $2147352576, %eax # imm = 0x7FFE0000
1554; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1555; X86-NEXT:    addl %eax, %eax
1556; X86-NEXT:    retl
1557;
1558; X64-LABEL: test_i32_4294836224_mask_shl_1:
1559; X64:       # %bb.0:
1560; X64-NEXT:    # kill: def $edi killed $edi def $rdi
1561; X64-NEXT:    andl $2147352576, %edi # imm = 0x7FFE0000
1562; X64-NEXT:    leal (%rdi,%rdi), %eax
1563; X64-NEXT:    retq
1564  %t0 = and i32 %a0, 4294836224
1565  %t1 = shl i32 %t0, 1
1566  ret i32 %t1
1567}
1568
1569;------------------------------------------------------------------------------;
1570; 64-bit
1571;------------------------------------------------------------------------------;
1572
1573; lshr
1574
1575define i64 @test_i64_2147483647_mask_lshr_1(i64 %a0) {
1576; X86-LABEL: test_i64_2147483647_mask_lshr_1:
1577; X86:       # %bb.0:
1578; X86-NEXT:    movl $2147483646, %eax # imm = 0x7FFFFFFE
1579; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1580; X86-NEXT:    shrl %eax
1581; X86-NEXT:    xorl %edx, %edx
1582; X86-NEXT:    retl
1583;
1584; X64-LABEL: test_i64_2147483647_mask_lshr_1:
1585; X64:       # %bb.0:
1586; X64-NEXT:    movq %rdi, %rax
1587; X64-NEXT:    andl $2147483646, %eax # imm = 0x7FFFFFFE
1588; X64-NEXT:    shrl %eax
1589; X64-NEXT:    retq
1590  %t0 = and i64 %a0, 2147483647
1591  %t1 = lshr i64 %t0, 1
1592  ret i64 %t1
1593}
1594
1595define i64 @test_i64_140737488289792_mask_lshr_15(i64 %a0) {
1596; X86-LABEL: test_i64_140737488289792_mask_lshr_15:
1597; X86:       # %bb.0:
1598; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
1599; X86-NEXT:    shll $16, %ecx
1600; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1601; X86-NEXT:    shldl $17, %ecx, %eax
1602; X86-NEXT:    xorl %edx, %edx
1603; X86-NEXT:    retl
1604;
1605; X64-LABEL: test_i64_140737488289792_mask_lshr_15:
1606; X64:       # %bb.0:
1607; X64-NEXT:    movabsq $140737488289792, %rax # imm = 0x7FFFFFFF0000
1608; X64-NEXT:    andq %rdi, %rax
1609; X64-NEXT:    shrq $15, %rax
1610; X64-NEXT:    retq
1611  %t0 = and i64 %a0, 140737488289792
1612  %t1 = lshr i64 %t0, 15
1613  ret i64 %t1
1614}
1615define i64 @test_i64_140737488289792_mask_lshr_16(i64 %a0) {
1616; X86-LABEL: test_i64_140737488289792_mask_lshr_16:
1617; X86:       # %bb.0:
1618; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1619; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1620; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1621; X86-NEXT:    shldl $16, %ecx, %eax
1622; X86-NEXT:    xorl %edx, %edx
1623; X86-NEXT:    retl
1624;
1625; X64-LABEL: test_i64_140737488289792_mask_lshr_16:
1626; X64:       # %bb.0:
1627; X64-NEXT:    movq %rdi, %rax
1628; X64-NEXT:    shrq $16, %rax
1629; X64-NEXT:    andl $2147483647, %eax # imm = 0x7FFFFFFF
1630; X64-NEXT:    retq
1631  %t0 = and i64 %a0, 140737488289792
1632  %t1 = lshr i64 %t0, 16
1633  ret i64 %t1
1634}
1635define i64 @test_i64_140737488289792_mask_lshr_17(i64 %a0) {
1636; X86-LABEL: test_i64_140737488289792_mask_lshr_17:
1637; X86:       # %bb.0:
1638; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1639; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1640; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1641; X86-NEXT:    shldl $15, %ecx, %eax
1642; X86-NEXT:    xorl %edx, %edx
1643; X86-NEXT:    retl
1644;
1645; X64-LABEL: test_i64_140737488289792_mask_lshr_17:
1646; X64:       # %bb.0:
1647; X64-NEXT:    movq %rdi, %rax
1648; X64-NEXT:    shrq $17, %rax
1649; X64-NEXT:    andl $1073741823, %eax # imm = 0x3FFFFFFF
1650; X64-NEXT:    retq
1651  %t0 = and i64 %a0, 140737488289792
1652  %t1 = lshr i64 %t0, 17
1653  ret i64 %t1
1654}
1655define i64 @test_i64_140737488289792_mask_lshr_18(i64 %a0) {
1656; X86-LABEL: test_i64_140737488289792_mask_lshr_18:
1657; X86:       # %bb.0:
1658; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1659; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1660; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1661; X86-NEXT:    shldl $14, %ecx, %eax
1662; X86-NEXT:    xorl %edx, %edx
1663; X86-NEXT:    retl
1664;
1665; X64-LABEL: test_i64_140737488289792_mask_lshr_18:
1666; X64:       # %bb.0:
1667; X64-NEXT:    movq %rdi, %rax
1668; X64-NEXT:    shrq $18, %rax
1669; X64-NEXT:    andl $536870911, %eax # imm = 0x1FFFFFFF
1670; X64-NEXT:    retq
1671  %t0 = and i64 %a0, 140737488289792
1672  %t1 = lshr i64 %t0, 18
1673  ret i64 %t1
1674}
1675
1676define i64 @test_i64_18446744065119617024_mask_lshr_1(i64 %a0) {
1677; X86-LABEL: test_i64_18446744065119617024_mask_lshr_1:
1678; X86:       # %bb.0:
1679; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1680; X86-NEXT:    shrl %edx
1681; X86-NEXT:    xorl %eax, %eax
1682; X86-NEXT:    retl
1683;
1684; X64-LABEL: test_i64_18446744065119617024_mask_lshr_1:
1685; X64:       # %bb.0:
1686; X64-NEXT:    movabsq $-8589934592, %rax # imm = 0xFFFFFFFE00000000
1687; X64-NEXT:    andq %rdi, %rax
1688; X64-NEXT:    shrq %rax
1689; X64-NEXT:    retq
1690  %t0 = and i64 %a0, 18446744065119617024
1691  %t1 = lshr i64 %t0, 1
1692  ret i64 %t1
1693}
1694define i64 @test_i64_18446744065119617024_mask_lshr_32(i64 %a0) {
1695; X86-LABEL: test_i64_18446744065119617024_mask_lshr_32:
1696; X86:       # %bb.0:
1697; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1698; X86-NEXT:    andl $-2, %eax
1699; X86-NEXT:    xorl %edx, %edx
1700; X86-NEXT:    retl
1701;
1702; X64-LABEL: test_i64_18446744065119617024_mask_lshr_32:
1703; X64:       # %bb.0:
1704; X64-NEXT:    movabsq $-8589934592, %rax # imm = 0xFFFFFFFE00000000
1705; X64-NEXT:    andq %rdi, %rax
1706; X64-NEXT:    shrq $32, %rax
1707; X64-NEXT:    retq
1708  %t0 = and i64 %a0, 18446744065119617024
1709  %t1 = lshr i64 %t0, 32
1710  ret i64 %t1
1711}
1712define i64 @test_i64_18446744065119617024_mask_lshr_33(i64 %a0) {
1713; X86-LABEL: test_i64_18446744065119617024_mask_lshr_33:
1714; X86:       # %bb.0:
1715; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1716; X86-NEXT:    shrl %eax
1717; X86-NEXT:    xorl %edx, %edx
1718; X86-NEXT:    retl
1719;
1720; X64-LABEL: test_i64_18446744065119617024_mask_lshr_33:
1721; X64:       # %bb.0:
1722; X64-NEXT:    movq %rdi, %rax
1723; X64-NEXT:    shrq $33, %rax
1724; X64-NEXT:    retq
1725  %t0 = and i64 %a0, 18446744065119617024
1726  %t1 = lshr i64 %t0, 33
1727  ret i64 %t1
1728}
1729define i64 @test_i64_18446744065119617024_mask_lshr_34(i64 %a0) {
1730; X86-LABEL: test_i64_18446744065119617024_mask_lshr_34:
1731; X86:       # %bb.0:
1732; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1733; X86-NEXT:    shrl $2, %eax
1734; X86-NEXT:    xorl %edx, %edx
1735; X86-NEXT:    retl
1736;
1737; X64-LABEL: test_i64_18446744065119617024_mask_lshr_34:
1738; X64:       # %bb.0:
1739; X64-NEXT:    movq %rdi, %rax
1740; X64-NEXT:    shrq $34, %rax
1741; X64-NEXT:    retq
1742  %t0 = and i64 %a0, 18446744065119617024
1743  %t1 = lshr i64 %t0, 34
1744  ret i64 %t1
1745}
1746
1747; ashr
1748
1749define i64 @test_i64_2147483647_mask_ashr_1(i64 %a0) {
1750; X86-LABEL: test_i64_2147483647_mask_ashr_1:
1751; X86:       # %bb.0:
1752; X86-NEXT:    movl $2147483646, %eax # imm = 0x7FFFFFFE
1753; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1754; X86-NEXT:    shrl %eax
1755; X86-NEXT:    xorl %edx, %edx
1756; X86-NEXT:    retl
1757;
1758; X64-LABEL: test_i64_2147483647_mask_ashr_1:
1759; X64:       # %bb.0:
1760; X64-NEXT:    movq %rdi, %rax
1761; X64-NEXT:    andl $2147483646, %eax # imm = 0x7FFFFFFE
1762; X64-NEXT:    shrl %eax
1763; X64-NEXT:    retq
1764  %t0 = and i64 %a0, 2147483647
1765  %t1 = ashr i64 %t0, 1
1766  ret i64 %t1
1767}
1768
1769define i64 @test_i64_140737488289792_mask_ashr_15(i64 %a0) {
1770; X86-LABEL: test_i64_140737488289792_mask_ashr_15:
1771; X86:       # %bb.0:
1772; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %ecx
1773; X86-NEXT:    shll $16, %ecx
1774; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1775; X86-NEXT:    shldl $17, %ecx, %eax
1776; X86-NEXT:    xorl %edx, %edx
1777; X86-NEXT:    retl
1778;
1779; X64-LABEL: test_i64_140737488289792_mask_ashr_15:
1780; X64:       # %bb.0:
1781; X64-NEXT:    movabsq $140737488289792, %rax # imm = 0x7FFFFFFF0000
1782; X64-NEXT:    andq %rdi, %rax
1783; X64-NEXT:    shrq $15, %rax
1784; X64-NEXT:    retq
1785  %t0 = and i64 %a0, 140737488289792
1786  %t1 = ashr i64 %t0, 15
1787  ret i64 %t1
1788}
1789define i64 @test_i64_140737488289792_mask_ashr_16(i64 %a0) {
1790; X86-LABEL: test_i64_140737488289792_mask_ashr_16:
1791; X86:       # %bb.0:
1792; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1793; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1794; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1795; X86-NEXT:    shldl $16, %ecx, %eax
1796; X86-NEXT:    xorl %edx, %edx
1797; X86-NEXT:    retl
1798;
1799; X64-LABEL: test_i64_140737488289792_mask_ashr_16:
1800; X64:       # %bb.0:
1801; X64-NEXT:    movq %rdi, %rax
1802; X64-NEXT:    shrq $16, %rax
1803; X64-NEXT:    andl $2147483647, %eax # imm = 0x7FFFFFFF
1804; X64-NEXT:    retq
1805  %t0 = and i64 %a0, 140737488289792
1806  %t1 = ashr i64 %t0, 16
1807  ret i64 %t1
1808}
1809define i64 @test_i64_140737488289792_mask_ashr_17(i64 %a0) {
1810; X86-LABEL: test_i64_140737488289792_mask_ashr_17:
1811; X86:       # %bb.0:
1812; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1813; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1814; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1815; X86-NEXT:    shldl $15, %ecx, %eax
1816; X86-NEXT:    xorl %edx, %edx
1817; X86-NEXT:    retl
1818;
1819; X64-LABEL: test_i64_140737488289792_mask_ashr_17:
1820; X64:       # %bb.0:
1821; X64-NEXT:    movq %rdi, %rax
1822; X64-NEXT:    shrq $17, %rax
1823; X64-NEXT:    andl $1073741823, %eax # imm = 0x3FFFFFFF
1824; X64-NEXT:    retq
1825  %t0 = and i64 %a0, 140737488289792
1826  %t1 = ashr i64 %t0, 17
1827  ret i64 %t1
1828}
1829define i64 @test_i64_140737488289792_mask_ashr_18(i64 %a0) {
1830; X86-LABEL: test_i64_140737488289792_mask_ashr_18:
1831; X86:       # %bb.0:
1832; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
1833; X86-NEXT:    movl $32767, %eax # imm = 0x7FFF
1834; X86-NEXT:    andl {{[0-9]+}}(%esp), %eax
1835; X86-NEXT:    shldl $14, %ecx, %eax
1836; X86-NEXT:    xorl %edx, %edx
1837; X86-NEXT:    retl
1838;
1839; X64-LABEL: test_i64_140737488289792_mask_ashr_18:
1840; X64:       # %bb.0:
1841; X64-NEXT:    movq %rdi, %rax
1842; X64-NEXT:    shrq $18, %rax
1843; X64-NEXT:    andl $536870911, %eax # imm = 0x1FFFFFFF
1844; X64-NEXT:    retq
1845  %t0 = and i64 %a0, 140737488289792
1846  %t1 = ashr i64 %t0, 18
1847  ret i64 %t1
1848}
1849
1850define i64 @test_i64_18446744065119617024_mask_ashr_1(i64 %a0) {
1851; X86-LABEL: test_i64_18446744065119617024_mask_ashr_1:
1852; X86:       # %bb.0:
1853; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1854; X86-NEXT:    sarl %edx
1855; X86-NEXT:    xorl %eax, %eax
1856; X86-NEXT:    retl
1857;
1858; X64-LABEL: test_i64_18446744065119617024_mask_ashr_1:
1859; X64:       # %bb.0:
1860; X64-NEXT:    movabsq $-8589934592, %rax # imm = 0xFFFFFFFE00000000
1861; X64-NEXT:    andq %rdi, %rax
1862; X64-NEXT:    sarq %rax
1863; X64-NEXT:    retq
1864  %t0 = and i64 %a0, 18446744065119617024
1865  %t1 = ashr i64 %t0, 1
1866  ret i64 %t1
1867}
1868define i64 @test_i64_18446744065119617024_mask_ashr_32(i64 %a0) {
1869; X86-LABEL: test_i64_18446744065119617024_mask_ashr_32:
1870; X86:       # %bb.0:
1871; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1872; X86-NEXT:    movl %edx, %eax
1873; X86-NEXT:    andl $-2, %eax
1874; X86-NEXT:    sarl $31, %edx
1875; X86-NEXT:    retl
1876;
1877; X64-LABEL: test_i64_18446744065119617024_mask_ashr_32:
1878; X64:       # %bb.0:
1879; X64-NEXT:    movabsq $-8589934592, %rax # imm = 0xFFFFFFFE00000000
1880; X64-NEXT:    andq %rdi, %rax
1881; X64-NEXT:    sarq $32, %rax
1882; X64-NEXT:    retq
1883  %t0 = and i64 %a0, 18446744065119617024
1884  %t1 = ashr i64 %t0, 32
1885  ret i64 %t1
1886}
1887define i64 @test_i64_18446744065119617024_mask_ashr_33(i64 %a0) {
1888; X86-LABEL: test_i64_18446744065119617024_mask_ashr_33:
1889; X86:       # %bb.0:
1890; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1891; X86-NEXT:    movl %edx, %eax
1892; X86-NEXT:    sarl %eax
1893; X86-NEXT:    sarl $31, %edx
1894; X86-NEXT:    retl
1895;
1896; X64-LABEL: test_i64_18446744065119617024_mask_ashr_33:
1897; X64:       # %bb.0:
1898; X64-NEXT:    movq %rdi, %rax
1899; X64-NEXT:    sarq $33, %rax
1900; X64-NEXT:    retq
1901  %t0 = and i64 %a0, 18446744065119617024
1902  %t1 = ashr i64 %t0, 33
1903  ret i64 %t1
1904}
1905define i64 @test_i64_18446744065119617024_mask_ashr_34(i64 %a0) {
1906; X86-LABEL: test_i64_18446744065119617024_mask_ashr_34:
1907; X86:       # %bb.0:
1908; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1909; X86-NEXT:    movl %edx, %eax
1910; X86-NEXT:    sarl $2, %eax
1911; X86-NEXT:    sarl $31, %edx
1912; X86-NEXT:    retl
1913;
1914; X64-LABEL: test_i64_18446744065119617024_mask_ashr_34:
1915; X64:       # %bb.0:
1916; X64-NEXT:    movq %rdi, %rax
1917; X64-NEXT:    sarq $34, %rax
1918; X64-NEXT:    retq
1919  %t0 = and i64 %a0, 18446744065119617024
1920  %t1 = ashr i64 %t0, 34
1921  ret i64 %t1
1922}
1923
1924; shl
1925
1926define i64 @test_i64_2147483647_mask_shl_1(i64 %a0) {
1927; X86-LABEL: test_i64_2147483647_mask_shl_1:
1928; X86:       # %bb.0:
1929; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1930; X86-NEXT:    addl %eax, %eax
1931; X86-NEXT:    xorl %edx, %edx
1932; X86-NEXT:    retl
1933;
1934; X64-LABEL: test_i64_2147483647_mask_shl_1:
1935; X64:       # %bb.0:
1936; X64-NEXT:    leal (%rdi,%rdi), %eax
1937; X64-NEXT:    retq
1938  %t0 = and i64 %a0, 2147483647
1939  %t1 = shl i64 %t0, 1
1940  ret i64 %t1
1941}
1942define i64 @test_i64_2147483647_mask_shl_32(i64 %a0) {
1943; X86-LABEL: test_i64_2147483647_mask_shl_32:
1944; X86:       # %bb.0:
1945; X86-NEXT:    movl $2147483647, %edx # imm = 0x7FFFFFFF
1946; X86-NEXT:    andl {{[0-9]+}}(%esp), %edx
1947; X86-NEXT:    xorl %eax, %eax
1948; X86-NEXT:    retl
1949;
1950; X64-LABEL: test_i64_2147483647_mask_shl_32:
1951; X64:       # %bb.0:
1952; X64-NEXT:    movq %rdi, %rax
1953; X64-NEXT:    andl $2147483647, %eax # imm = 0x7FFFFFFF
1954; X64-NEXT:    shlq $32, %rax
1955; X64-NEXT:    retq
1956  %t0 = and i64 %a0, 2147483647
1957  %t1 = shl i64 %t0, 32
1958  ret i64 %t1
1959}
1960define i64 @test_i64_2147483647_mask_shl_33(i64 %a0) {
1961; X86-LABEL: test_i64_2147483647_mask_shl_33:
1962; X86:       # %bb.0:
1963; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1964; X86-NEXT:    addl %edx, %edx
1965; X86-NEXT:    xorl %eax, %eax
1966; X86-NEXT:    retl
1967;
1968; X64-LABEL: test_i64_2147483647_mask_shl_33:
1969; X64:       # %bb.0:
1970; X64-NEXT:    movq %rdi, %rax
1971; X64-NEXT:    shlq $33, %rax
1972; X64-NEXT:    retq
1973  %t0 = and i64 %a0, 2147483647
1974  %t1 = shl i64 %t0, 33
1975  ret i64 %t1
1976}
1977define i64 @test_i64_2147483647_mask_shl_34(i64 %a0) {
1978; X86-LABEL: test_i64_2147483647_mask_shl_34:
1979; X86:       # %bb.0:
1980; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
1981; X86-NEXT:    shll $2, %edx
1982; X86-NEXT:    xorl %eax, %eax
1983; X86-NEXT:    retl
1984;
1985; X64-LABEL: test_i64_2147483647_mask_shl_34:
1986; X64:       # %bb.0:
1987; X64-NEXT:    movq %rdi, %rax
1988; X64-NEXT:    shlq $34, %rax
1989; X64-NEXT:    retq
1990  %t0 = and i64 %a0, 2147483647
1991  %t1 = shl i64 %t0, 34
1992  ret i64 %t1
1993}
1994
1995define i64 @test_i64_140737488289792_mask_shl_15(i64 %a0) {
1996; X86-LABEL: test_i64_140737488289792_mask_shl_15:
1997; X86:       # %bb.0:
1998; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
1999; X86-NEXT:    movl $32767, %edx # imm = 0x7FFF
2000; X86-NEXT:    andl {{[0-9]+}}(%esp), %edx
2001; X86-NEXT:    shldl $15, %eax, %edx
2002; X86-NEXT:    andl $65536, %eax # imm = 0x10000
2003; X86-NEXT:    shll $15, %eax
2004; X86-NEXT:    retl
2005;
2006; X64-LABEL: test_i64_140737488289792_mask_shl_15:
2007; X64:       # %bb.0:
2008; X64-NEXT:    movabsq $140737488289792, %rax # imm = 0x7FFFFFFF0000
2009; X64-NEXT:    andq %rdi, %rax
2010; X64-NEXT:    shlq $15, %rax
2011; X64-NEXT:    retq
2012  %t0 = and i64 %a0, 140737488289792
2013  %t1 = shl i64 %t0, 15
2014  ret i64 %t1
2015}
2016define i64 @test_i64_140737488289792_mask_shl_16(i64 %a0) {
2017; X86-LABEL: test_i64_140737488289792_mask_shl_16:
2018; X86:       # %bb.0:
2019; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
2020; X86-NEXT:    movl $32767, %edx # imm = 0x7FFF
2021; X86-NEXT:    andl {{[0-9]+}}(%esp), %edx
2022; X86-NEXT:    shldl $16, %eax, %edx
2023; X86-NEXT:    xorl %eax, %eax
2024; X86-NEXT:    retl
2025;
2026; X64-LABEL: test_i64_140737488289792_mask_shl_16:
2027; X64:       # %bb.0:
2028; X64-NEXT:    movabsq $140737488289792, %rax # imm = 0x7FFFFFFF0000
2029; X64-NEXT:    andq %rdi, %rax
2030; X64-NEXT:    shlq $16, %rax
2031; X64-NEXT:    retq
2032  %t0 = and i64 %a0, 140737488289792
2033  %t1 = shl i64 %t0, 16
2034  ret i64 %t1
2035}
2036define i64 @test_i64_140737488289792_mask_shl_17(i64 %a0) {
2037; X86-LABEL: test_i64_140737488289792_mask_shl_17:
2038; X86:       # %bb.0:
2039; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
2040; X86-NEXT:    shll $16, %eax
2041; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
2042; X86-NEXT:    shldl $17, %eax, %edx
2043; X86-NEXT:    xorl %eax, %eax
2044; X86-NEXT:    retl
2045;
2046; X64-LABEL: test_i64_140737488289792_mask_shl_17:
2047; X64:       # %bb.0:
2048; X64-NEXT:    movabsq $140737488289792, %rax # imm = 0x7FFFFFFF0000
2049; X64-NEXT:    andq %rdi, %rax
2050; X64-NEXT:    shlq $17, %rax
2051; X64-NEXT:    retq
2052  %t0 = and i64 %a0, 140737488289792
2053  %t1 = shl i64 %t0, 17
2054  ret i64 %t1
2055}
2056define i64 @test_i64_140737488289792_mask_shl_18(i64 %a0) {
2057; X86-LABEL: test_i64_140737488289792_mask_shl_18:
2058; X86:       # %bb.0:
2059; X86-NEXT:    movzwl {{[0-9]+}}(%esp), %eax
2060; X86-NEXT:    shll $16, %eax
2061; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
2062; X86-NEXT:    shldl $18, %eax, %edx
2063; X86-NEXT:    xorl %eax, %eax
2064; X86-NEXT:    retl
2065;
2066; X64-LABEL: test_i64_140737488289792_mask_shl_18:
2067; X64:       # %bb.0:
2068; X64-NEXT:    movabsq $70368744112128, %rax # imm = 0x3FFFFFFF0000
2069; X64-NEXT:    andq %rdi, %rax
2070; X64-NEXT:    shlq $18, %rax
2071; X64-NEXT:    retq
2072  %t0 = and i64 %a0, 140737488289792
2073  %t1 = shl i64 %t0, 18
2074  ret i64 %t1
2075}
2076
2077define i64 @test_i64_18446744065119617024_mask_shl_1(i64 %a0) {
2078; X86-LABEL: test_i64_18446744065119617024_mask_shl_1:
2079; X86:       # %bb.0:
2080; X86-NEXT:    movl $2147483646, %edx # imm = 0x7FFFFFFE
2081; X86-NEXT:    andl {{[0-9]+}}(%esp), %edx
2082; X86-NEXT:    addl %edx, %edx
2083; X86-NEXT:    xorl %eax, %eax
2084; X86-NEXT:    retl
2085;
2086; X64-LABEL: test_i64_18446744065119617024_mask_shl_1:
2087; X64:       # %bb.0:
2088; X64-NEXT:    movabsq $9223372028264841216, %rax # imm = 0x7FFFFFFE00000000
2089; X64-NEXT:    andq %rdi, %rax
2090; X64-NEXT:    addq %rax, %rax
2091; X64-NEXT:    retq
2092  %t0 = and i64 %a0, 18446744065119617024
2093  %t1 = shl i64 %t0, 1
2094  ret i64 %t1
2095}
2096