1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5 2; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=CHECK,X86 3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=CHECK,X64 4 5; PR82242 6define <8 x i32> @cmp_eq_bitcast(<8 x i32> %x) { 7; X86-LABEL: cmp_eq_bitcast: 8; X86: # %bb.0: 9; X86-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 10; X86-NEXT: vcvtdq2ps %ymm0, %ymm0 11; X86-NEXT: vcmpeqps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 12; X86-NEXT: retl 13; 14; X64-LABEL: cmp_eq_bitcast: 15; X64: # %bb.0: 16; X64-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 17; X64-NEXT: vcvtdq2ps %ymm0, %ymm0 18; X64-NEXT: vcmpeqps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 19; X64-NEXT: retq 20 %and = and <8 x i32> %x, <i32 7, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 21 %cmp = icmp eq <8 x i32> %and, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 22 %sext = sext <8 x i1> %cmp to <8 x i32> 23 ret <8 x i32> %sext 24} 25 26define <8 x i32> @cmp_ne_sitofp(<8 x i32> %x) { 27; CHECK-LABEL: cmp_ne_sitofp: 28; CHECK: # %bb.0: 29; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 30; CHECK-NEXT: vbroadcastss {{.*#+}} xmm2 = [3,3,3,3] 31; CHECK-NEXT: vpcmpeqd %xmm2, %xmm1, %xmm1 32; CHECK-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 33; CHECK-NEXT: vpxor %xmm3, %xmm1, %xmm1 34; CHECK-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0 35; CHECK-NEXT: vpxor %xmm3, %xmm0, %xmm0 36; CHECK-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 37; CHECK-NEXT: ret{{[l|q]}} 38 %cmp = icmp ne <8 x i32> %x, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 39 %sext = sext <8 x i1> %cmp to <8 x i32> 40 ret <8 x i32> %sext 41} 42 43define <8 x i32> @cmp_slt_fail_no_const(<8 x i32> %x, <8 x i32> %y) { 44; X86-LABEL: cmp_slt_fail_no_const: 45; X86: # %bb.0: 46; X86-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 47; X86-NEXT: vextractf128 $1, %ymm1, %xmm2 48; X86-NEXT: vextractf128 $1, %ymm0, %xmm3 49; X86-NEXT: vpcmpgtd %xmm3, %xmm2, %xmm2 50; X86-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 51; X86-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0 52; X86-NEXT: retl 53; 54; X64-LABEL: cmp_slt_fail_no_const: 55; X64: # %bb.0: 56; X64-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 57; X64-NEXT: vextractf128 $1, %ymm1, %xmm2 58; X64-NEXT: vextractf128 $1, %ymm0, %xmm3 59; X64-NEXT: vpcmpgtd %xmm3, %xmm2, %xmm2 60; X64-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 61; X64-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0 62; X64-NEXT: retq 63 %and = and <8 x i32> %x, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 64 %cmp = icmp slt <8 x i32> %and, %y 65 %sext = sext <8 x i1> %cmp to <8 x i32> 66 ret <8 x i32> %sext 67} 68 69define <8 x i32> @cmp_eq_sitofp(<8 x i32> %x) { 70; CHECK-LABEL: cmp_eq_sitofp: 71; CHECK: # %bb.0: 72; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 73; CHECK-NEXT: vbroadcastss {{.*#+}} xmm2 = [4294967293,4294967293,4294967293,4294967293] 74; CHECK-NEXT: vpcmpeqd %xmm2, %xmm1, %xmm1 75; CHECK-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0 76; CHECK-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 77; CHECK-NEXT: ret{{[l|q]}} 78 %cmp = icmp eq <8 x i32> %x, <i32 -3, i32 -3, i32 -3, i32 -3, i32 -3, i32 -3, i32 -3, i32 -3> 79 %sext = sext <8 x i1> %cmp to <8 x i32> 80 ret <8 x i32> %sext 81} 82 83define <8 x i32> @cmp_sgt_fail_no_bounds(<8 x i32> %x, <8 x i32> %y) { 84; CHECK-LABEL: cmp_sgt_fail_no_bounds: 85; CHECK: # %bb.0: 86; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm2 87; CHECK-NEXT: vextractf128 $1, %ymm1, %xmm3 88; CHECK-NEXT: vpcmpgtd %xmm2, %xmm3, %xmm2 89; CHECK-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 90; CHECK-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0 91; CHECK-NEXT: ret{{[l|q]}} 92 %cmp = icmp slt <8 x i32> %x, %y 93 %sext = sext <8 x i1> %cmp to <8 x i32> 94 ret <8 x i32> %sext 95} 96 97define <8 x i32> @cmp_sgt_bitcast(<8 x i32> %xx, <8 x i32> %yy) { 98; CHECK-LABEL: cmp_sgt_bitcast: 99; CHECK: # %bb.0: 100; CHECK-NEXT: vbroadcastss {{.*#+}} ymm2 = [2139095040,2139095040,2139095040,2139095040,2139095040,2139095040,2139095040,2139095040] 101; CHECK-NEXT: vandps %ymm2, %ymm0, %ymm0 102; CHECK-NEXT: vandps %ymm2, %ymm1, %ymm1 103; CHECK-NEXT: vextractf128 $1, %ymm1, %xmm2 104; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm3 105; CHECK-NEXT: vpcmpgtd %xmm2, %xmm3, %xmm2 106; CHECK-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 107; CHECK-NEXT: vinsertf128 $1, %xmm2, %ymm0, %ymm0 108; CHECK-NEXT: ret{{[l|q]}} 109 %x = and <8 x i32> %xx, <i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040> 110 %y = and <8 x i32> %yy, <i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040> 111 112 %cmp = icmp sgt <8 x i32> %x, %y 113 %sext = sext <8 x i1> %cmp to <8 x i32> 114 ret <8 x i32> %sext 115} 116 117define <8 x i32> @cmp_sle_fail_out_of_bounds(<8 x i32> %xx) { 118; X86-LABEL: cmp_sle_fail_out_of_bounds: 119; X86: # %bb.0: 120; X86-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 121; X86-NEXT: vextractf128 $1, %ymm0, %xmm1 122; X86-NEXT: vbroadcastss {{.*#+}} xmm2 = [2139095041,2139095041,2139095041,2139095041] 123; X86-NEXT: vpcmpgtd %xmm1, %xmm2, %xmm1 124; X86-NEXT: vpcmpgtd %xmm0, %xmm2, %xmm0 125; X86-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 126; X86-NEXT: retl 127; 128; X64-LABEL: cmp_sle_fail_out_of_bounds: 129; X64: # %bb.0: 130; X64-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 131; X64-NEXT: vextractf128 $1, %ymm0, %xmm1 132; X64-NEXT: vbroadcastss {{.*#+}} xmm2 = [2139095041,2139095041,2139095041,2139095041] 133; X64-NEXT: vpcmpgtd %xmm1, %xmm2, %xmm1 134; X64-NEXT: vpcmpgtd %xmm0, %xmm2, %xmm0 135; X64-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 136; X64-NEXT: retq 137 %x = and <8 x i32> %xx, <i32 2139095041, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040> 138 %cmp = icmp sle <8 x i32> %x, <i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040> 139 %sext = sext <8 x i1> %cmp to <8 x i32> 140 ret <8 x i32> %sext 141} 142 143define <8 x i32> @cmp_eq_fail_out_of_bounds(<8 x i32> %x) { 144; CHECK-LABEL: cmp_eq_fail_out_of_bounds: 145; CHECK: # %bb.0: 146; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 147; CHECK-NEXT: vbroadcastss {{.*#+}} xmm2 = [16777216,16777216,16777216,16777216] 148; CHECK-NEXT: vpcmpeqd %xmm2, %xmm1, %xmm1 149; CHECK-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0 150; CHECK-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 151; CHECK-NEXT: ret{{[l|q]}} 152 %cmp = icmp eq <8 x i32> %x, <i32 16777216, i32 16777216, i32 16777216, i32 16777216, i32 16777216, i32 16777216, i32 16777216, i32 16777216> 153 %sext = sext <8 x i1> %cmp to <8 x i32> 154 ret <8 x i32> %sext 155} 156 157define <8 x i32> @cmp_eq_fail_out_of_bounds2(<8 x i32> %x) { 158; CHECK-LABEL: cmp_eq_fail_out_of_bounds2: 159; CHECK: # %bb.0: 160; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 161; CHECK-NEXT: vbroadcastss {{.*#+}} xmm2 = [4278190080,4278190080,4278190080,4278190080] 162; CHECK-NEXT: vpcmpeqd %xmm2, %xmm1, %xmm1 163; CHECK-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0 164; CHECK-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 165; CHECK-NEXT: ret{{[l|q]}} 166 %cmp = icmp eq <8 x i32> %x, <i32 -16777216, i32 -16777216, i32 -16777216, i32 -16777216, i32 -16777216, i32 -16777216, i32 -16777216, i32 -16777216> 167 %sext = sext <8 x i1> %cmp to <8 x i32> 168 ret <8 x i32> %sext 169} 170 171define <8 x i32> @cmp_eq_todo(<8 x i32> %x) { 172; X86-LABEL: cmp_eq_todo: 173; X86: # %bb.0: 174; X86-NEXT: vpcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm1 175; X86-NEXT: vextractf128 $1, %ymm0, %xmm0 176; X86-NEXT: vpcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0 177; X86-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 178; X86-NEXT: retl 179; 180; X64-LABEL: cmp_eq_todo: 181; X64: # %bb.0: 182; X64-NEXT: vpcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 183; X64-NEXT: vextractf128 $1, %ymm0, %xmm0 184; X64-NEXT: vpcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 185; X64-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 186; X64-NEXT: retq 187 %cmp = icmp eq <8 x i32> %x, <i32 -16777215, i32 16777215, i32 16777215, i32 -16777215, i32 16777215, i32 -16777215, i32 16777215, i32 -16777215> 188 %sext = sext <8 x i1> %cmp to <8 x i32> 189 ret <8 x i32> %sext 190} 191 192define <8 x i32> @cmp_ult_fail_maybe_negative(<8 x i32> %x) { 193; CHECK-LABEL: cmp_ult_fail_maybe_negative: 194; CHECK: # %bb.0: 195; CHECK-NEXT: vextractf128 $1, %ymm0, %xmm1 196; CHECK-NEXT: vbroadcastss {{.*#+}} xmm2 = [2,2,2,2] 197; CHECK-NEXT: vpminud %xmm2, %xmm1, %xmm3 198; CHECK-NEXT: vpcmpeqd %xmm3, %xmm1, %xmm1 199; CHECK-NEXT: vpminud %xmm2, %xmm0, %xmm2 200; CHECK-NEXT: vpcmpeqd %xmm2, %xmm0, %xmm0 201; CHECK-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 202; CHECK-NEXT: ret{{[l|q]}} 203 %cmp = icmp ult <8 x i32> %x, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 204 %sext = sext <8 x i1> %cmp to <8 x i32> 205 ret <8 x i32> %sext 206} 207 208define <8 x i32> @cmp_ule_bitcast(<8 x i32> %xx) { 209; X86-LABEL: cmp_ule_bitcast: 210; X86: # %bb.0: 211; X86-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 212; X86-NEXT: vextractf128 $1, %ymm0, %xmm1 213; X86-NEXT: vbroadcastss {{.*#+}} xmm2 = [4,4,4,4] 214; X86-NEXT: vpcmpgtd %xmm1, %xmm2, %xmm1 215; X86-NEXT: vpcmpgtd %xmm0, %xmm2, %xmm0 216; X86-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 217; X86-NEXT: retl 218; 219; X64-LABEL: cmp_ule_bitcast: 220; X64: # %bb.0: 221; X64-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 222; X64-NEXT: vextractf128 $1, %ymm0, %xmm1 223; X64-NEXT: vbroadcastss {{.*#+}} xmm2 = [4,4,4,4] 224; X64-NEXT: vpcmpgtd %xmm1, %xmm2, %xmm1 225; X64-NEXT: vpcmpgtd %xmm0, %xmm2, %xmm0 226; X64-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 227; X64-NEXT: retq 228 %x = and <8 x i32> %xx, <i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040, i32 2139095040> 229 %cmp = icmp ule <8 x i32> %x, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 230 %sext = sext <8 x i1> %cmp to <8 x i32> 231 ret <8 x i32> %sext 232} 233 234define <8 x i32> @cmp_ugt_sitofp(<8 x i32> %xx) { 235; X86-LABEL: cmp_ugt_sitofp: 236; X86: # %bb.0: 237; X86-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0 238; X86-NEXT: vextractf128 $1, %ymm0, %xmm1 239; X86-NEXT: vbroadcastss {{.*#+}} xmm2 = [3,3,3,3] 240; X86-NEXT: vpcmpgtd %xmm2, %xmm1, %xmm1 241; X86-NEXT: vpcmpgtd %xmm2, %xmm0, %xmm0 242; X86-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 243; X86-NEXT: retl 244; 245; X64-LABEL: cmp_ugt_sitofp: 246; X64: # %bb.0: 247; X64-NEXT: vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 248; X64-NEXT: vextractf128 $1, %ymm0, %xmm1 249; X64-NEXT: vbroadcastss {{.*#+}} xmm2 = [3,3,3,3] 250; X64-NEXT: vpcmpgtd %xmm2, %xmm1, %xmm1 251; X64-NEXT: vpcmpgtd %xmm2, %xmm0, %xmm0 252; X64-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 253; X64-NEXT: retq 254 %x = and <8 x i32> %xx, <i32 2147483647, i32 2147483647, i32 2147483647, i32 2147483647, i32 2147483647, i32 2147483647, i32 2147483647, i32 2147483647> 255 %cmp = icmp ugt <8 x i32> %x, <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3> 256 %sext = sext <8 x i1> %cmp to <8 x i32> 257 ret <8 x i32> %sext 258} 259