1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=i686-pc-linux-gnu | FileCheck %s --check-prefix=X86 3; RUN: llc < %s -mtriple=x86_64-pc-linux-gnu | FileCheck %s --check-prefix=X64 4 5define i32 @from_cmpeq(i32 %xx, i32 %y) { 6; X86-LABEL: from_cmpeq: 7; X86: # %bb.0: 8; X86-NEXT: xorl %eax, %eax 9; X86-NEXT: cmpl $9, {{[0-9]+}}(%esp) 10; X86-NEXT: sete %al 11; X86-NEXT: andl {{[0-9]+}}(%esp), %eax 12; X86-NEXT: retl 13; 14; X64-LABEL: from_cmpeq: 15; X64: # %bb.0: 16; X64-NEXT: xorl %eax, %eax 17; X64-NEXT: cmpl $9, %edi 18; X64-NEXT: sete %al 19; X64-NEXT: andl %esi, %eax 20; X64-NEXT: retq 21 %x = icmp eq i32 %xx, 9 22 %masked = and i32 %y, 1 23 24 %r = select i1 %x, i32 %masked, i32 0 25 ret i32 %r 26} 27 28define i32 @from_cmpeq_fail_bad_andmask(i32 %xx, i32 %y) { 29; X86-LABEL: from_cmpeq_fail_bad_andmask: 30; X86: # %bb.0: 31; X86-NEXT: cmpl $9, {{[0-9]+}}(%esp) 32; X86-NEXT: je .LBB1_1 33; X86-NEXT: # %bb.2: 34; X86-NEXT: xorl %eax, %eax 35; X86-NEXT: retl 36; X86-NEXT: .LBB1_1: 37; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 38; X86-NEXT: andl $3, %eax 39; X86-NEXT: retl 40; 41; X64-LABEL: from_cmpeq_fail_bad_andmask: 42; X64: # %bb.0: 43; X64-NEXT: andl $3, %esi 44; X64-NEXT: xorl %eax, %eax 45; X64-NEXT: cmpl $9, %edi 46; X64-NEXT: cmovel %esi, %eax 47; X64-NEXT: retq 48 %x = icmp eq i32 %xx, 9 49 %masked = and i32 %y, 3 50 %r = select i1 %x, i32 %masked, i32 0 51 ret i32 %r 52} 53 54define i32 @from_i1(i1 %x, i32 %y) { 55; X86-LABEL: from_i1: 56; X86: # %bb.0: 57; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 58; X86-NEXT: andl {{[0-9]+}}(%esp), %eax 59; X86-NEXT: andl $1, %eax 60; X86-NEXT: retl 61; 62; X64-LABEL: from_i1: 63; X64: # %bb.0: 64; X64-NEXT: movl %edi, %eax 65; X64-NEXT: andl %esi, %eax 66; X64-NEXT: andl $1, %eax 67; X64-NEXT: retq 68 %masked = and i32 %y, 1 69 %r = select i1 %x, i32 %masked, i32 0 70 ret i32 %r 71} 72 73define i32 @from_trunc_i8(i8 %xx, i32 %y) { 74; X86-LABEL: from_trunc_i8: 75; X86: # %bb.0: 76; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 77; X86-NEXT: andl {{[0-9]+}}(%esp), %eax 78; X86-NEXT: andl $1, %eax 79; X86-NEXT: retl 80; 81; X64-LABEL: from_trunc_i8: 82; X64: # %bb.0: 83; X64-NEXT: movl %edi, %eax 84; X64-NEXT: andl %esi, %eax 85; X64-NEXT: andl $1, %eax 86; X64-NEXT: retq 87 %masked = and i32 %y, 1 88 %x = trunc i8 %xx to i1 89 %r = select i1 %x, i32 %masked, i32 0 90 ret i32 %r 91} 92 93define i32 @from_trunc_i64(i64 %xx, i32 %y) { 94; X86-LABEL: from_trunc_i64: 95; X86: # %bb.0: 96; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 97; X86-NEXT: andl {{[0-9]+}}(%esp), %eax 98; X86-NEXT: andl $1, %eax 99; X86-NEXT: retl 100; 101; X64-LABEL: from_trunc_i64: 102; X64: # %bb.0: 103; X64-NEXT: movq %rdi, %rax 104; X64-NEXT: andl %esi, %eax 105; X64-NEXT: andl $1, %eax 106; X64-NEXT: # kill: def $eax killed $eax killed $rax 107; X64-NEXT: retq 108 %masked = and i32 %y, 1 109 %x = trunc i64 %xx to i1 110 %r = select i1 %x, i32 %masked, i32 0 111 ret i32 %r 112} 113 114define i32 @from_i1_fail_bad_select0(i1 %x, i32 %y) { 115; X86-LABEL: from_i1_fail_bad_select0: 116; X86: # %bb.0: 117; X86-NEXT: testb $1, {{[0-9]+}}(%esp) 118; X86-NEXT: jne .LBB5_1 119; X86-NEXT: # %bb.2: 120; X86-NEXT: movl $1, %eax 121; X86-NEXT: retl 122; X86-NEXT: .LBB5_1: 123; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 124; X86-NEXT: andl $1, %eax 125; X86-NEXT: retl 126; 127; X64-LABEL: from_i1_fail_bad_select0: 128; X64: # %bb.0: 129; X64-NEXT: andl $1, %esi 130; X64-NEXT: testb $1, %dil 131; X64-NEXT: movl $1, %eax 132; X64-NEXT: cmovnel %esi, %eax 133; X64-NEXT: retq 134 %masked = and i32 %y, 1 135 %r = select i1 %x, i32 %masked, i32 1 136 ret i32 %r 137} 138 139define i32 @from_i1_fail_bad_select1(i1 %x, i32 %y) { 140; X86-LABEL: from_i1_fail_bad_select1: 141; X86: # %bb.0: 142; X86-NEXT: xorl %eax, %eax 143; X86-NEXT: testb $1, {{[0-9]+}}(%esp) 144; X86-NEXT: jne .LBB6_2 145; X86-NEXT: # %bb.1: 146; X86-NEXT: movl {{[0-9]+}}(%esp), %eax 147; X86-NEXT: andl $1, %eax 148; X86-NEXT: .LBB6_2: 149; X86-NEXT: retl 150; 151; X64-LABEL: from_i1_fail_bad_select1: 152; X64: # %bb.0: 153; X64-NEXT: andl $1, %esi 154; X64-NEXT: xorl %eax, %eax 155; X64-NEXT: testb $1, %dil 156; X64-NEXT: cmovel %esi, %eax 157; X64-NEXT: retq 158 %masked = and i32 %y, 1 159 %r = select i1 %x, i32 0, i32 %masked 160 ret i32 %r 161} 162