1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=ppc32-unknown-unknown | FileCheck %s --check-prefixes=ALL,PPC32 3; RUN: llc < %s -mcpu=ppc -mtriple=powerpc64-unknown-unknown | FileCheck %s --check-prefixes=ALL,PPC64,PPC64BE 4; RUN: llc < %s -mtriple=powerpc64le-unknown-unknown | FileCheck %s --check-prefixes=ALL,PPC64,PPC64LE 5 6; These two forms are equivalent: 7; sub %y, (xor %x, -1) 8; add (add %x, 1), %y 9; Some targets may prefer one to the other. 10 11define i8 @scalar_i8(i8 %x, i8 %y) nounwind { 12; ALL-LABEL: scalar_i8: 13; ALL: # %bb.0: 14; ALL-NEXT: add 3, 4, 3 15; ALL-NEXT: addi 3, 3, 1 16; ALL-NEXT: blr 17 %t0 = xor i8 %x, -1 18 %t1 = sub i8 %y, %t0 19 ret i8 %t1 20} 21 22define i16 @scalar_i16(i16 %x, i16 %y) nounwind { 23; ALL-LABEL: scalar_i16: 24; ALL: # %bb.0: 25; ALL-NEXT: add 3, 4, 3 26; ALL-NEXT: addi 3, 3, 1 27; ALL-NEXT: blr 28 %t0 = xor i16 %x, -1 29 %t1 = sub i16 %y, %t0 30 ret i16 %t1 31} 32 33define i32 @scalar_i32(i32 %x, i32 %y) nounwind { 34; ALL-LABEL: scalar_i32: 35; ALL: # %bb.0: 36; ALL-NEXT: add 3, 4, 3 37; ALL-NEXT: addi 3, 3, 1 38; ALL-NEXT: blr 39 %t0 = xor i32 %x, -1 40 %t1 = sub i32 %y, %t0 41 ret i32 %t1 42} 43 44define i64 @scalar_i64(i64 %x, i64 %y) nounwind { 45; PPC32-LABEL: scalar_i64: 46; PPC32: # %bb.0: 47; PPC32-NEXT: addc 4, 6, 4 48; PPC32-NEXT: adde 3, 5, 3 49; PPC32-NEXT: addic 4, 4, 1 50; PPC32-NEXT: addze 3, 3 51; PPC32-NEXT: blr 52; 53; PPC64-LABEL: scalar_i64: 54; PPC64: # %bb.0: 55; PPC64-NEXT: add 3, 4, 3 56; PPC64-NEXT: addi 3, 3, 1 57; PPC64-NEXT: blr 58 %t0 = xor i64 %x, -1 59 %t1 = sub i64 %y, %t0 60 ret i64 %t1 61} 62 63define <16 x i8> @vector_i128_i8(<16 x i8> %x, <16 x i8> %y) nounwind { 64; PPC32-LABEL: vector_i128_i8: 65; PPC32: # %bb.0: 66; PPC32-NEXT: stwu 1, -64(1) 67; PPC32-NEXT: stw 21, 20(1) # 4-byte Folded Spill 68; PPC32-NEXT: stw 22, 24(1) # 4-byte Folded Spill 69; PPC32-NEXT: lbz 4, 115(1) 70; PPC32-NEXT: lbz 22, 119(1) 71; PPC32-NEXT: lbz 21, 123(1) 72; PPC32-NEXT: add 4, 4, 5 73; PPC32-NEXT: add 5, 22, 6 74; PPC32-NEXT: lbz 22, 131(1) 75; PPC32-NEXT: add 6, 21, 7 76; PPC32-NEXT: lbz 21, 135(1) 77; PPC32-NEXT: addi 6, 6, 1 78; PPC32-NEXT: stw 20, 16(1) # 4-byte Folded Spill 79; PPC32-NEXT: add 9, 22, 9 80; PPC32-NEXT: lbz 20, 127(1) 81; PPC32-NEXT: add 10, 21, 10 82; PPC32-NEXT: stw 25, 36(1) # 4-byte Folded Spill 83; PPC32-NEXT: addi 5, 5, 1 84; PPC32-NEXT: lbz 25, 83(1) 85; PPC32-NEXT: add 7, 20, 8 86; PPC32-NEXT: lbz 21, 147(1) 87; PPC32-NEXT: addi 7, 7, 1 88; PPC32-NEXT: stw 24, 32(1) # 4-byte Folded Spill 89; PPC32-NEXT: addi 4, 4, 1 90; PPC32-NEXT: lbz 24, 79(1) 91; PPC32-NEXT: add 25, 21, 25 92; PPC32-NEXT: lbz 22, 143(1) 93; PPC32-NEXT: stw 23, 28(1) # 4-byte Folded Spill 94; PPC32-NEXT: lbz 23, 75(1) 95; PPC32-NEXT: add 24, 22, 24 96; PPC32-NEXT: lbz 8, 139(1) 97; PPC32-NEXT: stw 28, 48(1) # 4-byte Folded Spill 98; PPC32-NEXT: lbz 28, 95(1) 99; PPC32-NEXT: add 8, 8, 23 100; PPC32-NEXT: lbz 21, 159(1) 101; PPC32-NEXT: addi 8, 8, 1 102; PPC32-NEXT: stw 27, 44(1) # 4-byte Folded Spill 103; PPC32-NEXT: lbz 27, 91(1) 104; PPC32-NEXT: add 28, 21, 28 105; PPC32-NEXT: lbz 22, 155(1) 106; PPC32-NEXT: stw 26, 40(1) # 4-byte Folded Spill 107; PPC32-NEXT: lbz 26, 87(1) 108; PPC32-NEXT: add 27, 22, 27 109; PPC32-NEXT: lbz 23, 151(1) 110; PPC32-NEXT: lbz 11, 111(1) 111; PPC32-NEXT: lbz 21, 175(1) 112; PPC32-NEXT: add 26, 23, 26 113; PPC32-NEXT: lbz 12, 107(1) 114; PPC32-NEXT: lbz 0, 171(1) 115; PPC32-NEXT: add 11, 21, 11 116; PPC32-NEXT: stw 30, 56(1) # 4-byte Folded Spill 117; PPC32-NEXT: addi 11, 11, 1 118; PPC32-NEXT: lbz 30, 103(1) 119; PPC32-NEXT: add 12, 0, 12 120; PPC32-NEXT: lbz 22, 167(1) 121; PPC32-NEXT: stw 29, 52(1) # 4-byte Folded Spill 122; PPC32-NEXT: lbz 29, 99(1) 123; PPC32-NEXT: add 30, 22, 30 124; PPC32-NEXT: lbz 23, 163(1) 125; PPC32-NEXT: stb 11, 15(3) 126; PPC32-NEXT: addi 11, 12, 1 127; PPC32-NEXT: add 29, 23, 29 128; PPC32-NEXT: stb 11, 14(3) 129; PPC32-NEXT: addi 11, 30, 1 130; PPC32-NEXT: stb 11, 13(3) 131; PPC32-NEXT: addi 11, 29, 1 132; PPC32-NEXT: stb 11, 12(3) 133; PPC32-NEXT: addi 11, 28, 1 134; PPC32-NEXT: stb 11, 11(3) 135; PPC32-NEXT: addi 11, 27, 1 136; PPC32-NEXT: stb 11, 10(3) 137; PPC32-NEXT: addi 11, 26, 1 138; PPC32-NEXT: stb 11, 9(3) 139; PPC32-NEXT: addi 11, 25, 1 140; PPC32-NEXT: stb 8, 6(3) 141; PPC32-NEXT: addi 8, 10, 1 142; PPC32-NEXT: stb 11, 8(3) 143; PPC32-NEXT: addi 11, 24, 1 144; PPC32-NEXT: stb 8, 5(3) 145; PPC32-NEXT: addi 8, 9, 1 146; PPC32-NEXT: stb 11, 7(3) 147; PPC32-NEXT: stb 8, 4(3) 148; PPC32-NEXT: stb 7, 3(3) 149; PPC32-NEXT: stb 6, 2(3) 150; PPC32-NEXT: stb 5, 1(3) 151; PPC32-NEXT: stb 4, 0(3) 152; PPC32-NEXT: lwz 30, 56(1) # 4-byte Folded Reload 153; PPC32-NEXT: lwz 29, 52(1) # 4-byte Folded Reload 154; PPC32-NEXT: lwz 28, 48(1) # 4-byte Folded Reload 155; PPC32-NEXT: lwz 27, 44(1) # 4-byte Folded Reload 156; PPC32-NEXT: lwz 26, 40(1) # 4-byte Folded Reload 157; PPC32-NEXT: lwz 25, 36(1) # 4-byte Folded Reload 158; PPC32-NEXT: lwz 24, 32(1) # 4-byte Folded Reload 159; PPC32-NEXT: lwz 23, 28(1) # 4-byte Folded Reload 160; PPC32-NEXT: lwz 22, 24(1) # 4-byte Folded Reload 161; PPC32-NEXT: lwz 21, 20(1) # 4-byte Folded Reload 162; PPC32-NEXT: lwz 20, 16(1) # 4-byte Folded Reload 163; PPC32-NEXT: addi 1, 1, 64 164; PPC32-NEXT: blr 165; 166; PPC64BE-LABEL: vector_i128_i8: 167; PPC64BE: # %bb.0: 168; PPC64BE-NEXT: std 21, -88(1) # 8-byte Folded Spill 169; PPC64BE-NEXT: lbz 21, 207(1) 170; PPC64BE-NEXT: std 22, -80(1) # 8-byte Folded Spill 171; PPC64BE-NEXT: std 23, -72(1) # 8-byte Folded Spill 172; PPC64BE-NEXT: std 25, -56(1) # 8-byte Folded Spill 173; PPC64BE-NEXT: std 24, -64(1) # 8-byte Folded Spill 174; PPC64BE-NEXT: std 28, -32(1) # 8-byte Folded Spill 175; PPC64BE-NEXT: std 27, -40(1) # 8-byte Folded Spill 176; PPC64BE-NEXT: std 26, -48(1) # 8-byte Folded Spill 177; PPC64BE-NEXT: std 30, -16(1) # 8-byte Folded Spill 178; PPC64BE-NEXT: std 29, -24(1) # 8-byte Folded Spill 179; PPC64BE-NEXT: lbz 22, 199(1) 180; PPC64BE-NEXT: lbz 23, 191(1) 181; PPC64BE-NEXT: add 6, 21, 6 182; PPC64BE-NEXT: lbz 21, 231(1) 183; PPC64BE-NEXT: add 5, 22, 5 184; PPC64BE-NEXT: lbz 22, 223(1) 185; PPC64BE-NEXT: add 4, 23, 4 186; PPC64BE-NEXT: lbz 23, 215(1) 187; PPC64BE-NEXT: add 9, 21, 9 188; PPC64BE-NEXT: lbz 25, 127(1) 189; PPC64BE-NEXT: add 8, 22, 8 190; PPC64BE-NEXT: lbz 21, 255(1) 191; PPC64BE-NEXT: add 7, 23, 7 192; PPC64BE-NEXT: lbz 24, 119(1) 193; PPC64BE-NEXT: addi 9, 9, 1 194; PPC64BE-NEXT: lbz 22, 247(1) 195; PPC64BE-NEXT: add 25, 21, 25 196; PPC64BE-NEXT: lbz 23, 239(1) 197; PPC64BE-NEXT: addi 8, 8, 1 198; PPC64BE-NEXT: lbz 28, 151(1) 199; PPC64BE-NEXT: add 24, 22, 24 200; PPC64BE-NEXT: lbz 21, 279(1) 201; PPC64BE-NEXT: add 10, 23, 10 202; PPC64BE-NEXT: lbz 27, 143(1) 203; PPC64BE-NEXT: addi 10, 10, 1 204; PPC64BE-NEXT: lbz 22, 271(1) 205; PPC64BE-NEXT: add 28, 21, 28 206; PPC64BE-NEXT: lbz 26, 135(1) 207; PPC64BE-NEXT: addi 7, 7, 1 208; PPC64BE-NEXT: lbz 23, 263(1) 209; PPC64BE-NEXT: add 27, 22, 27 210; PPC64BE-NEXT: lbz 11, 183(1) 211; PPC64BE-NEXT: addi 6, 6, 1 212; PPC64BE-NEXT: lbz 21, 311(1) 213; PPC64BE-NEXT: add 26, 23, 26 214; PPC64BE-NEXT: lbz 12, 175(1) 215; PPC64BE-NEXT: addi 5, 5, 1 216; PPC64BE-NEXT: lbz 0, 303(1) 217; PPC64BE-NEXT: add 11, 21, 11 218; PPC64BE-NEXT: lbz 30, 167(1) 219; PPC64BE-NEXT: addi 11, 11, 1 220; PPC64BE-NEXT: lbz 22, 295(1) 221; PPC64BE-NEXT: add 12, 0, 12 222; PPC64BE-NEXT: lbz 29, 159(1) 223; PPC64BE-NEXT: addi 4, 4, 1 224; PPC64BE-NEXT: lbz 23, 287(1) 225; PPC64BE-NEXT: add 30, 22, 30 226; PPC64BE-NEXT: stb 11, 15(3) 227; PPC64BE-NEXT: addi 11, 12, 1 228; PPC64BE-NEXT: add 29, 23, 29 229; PPC64BE-NEXT: stb 11, 14(3) 230; PPC64BE-NEXT: addi 11, 30, 1 231; PPC64BE-NEXT: stb 11, 13(3) 232; PPC64BE-NEXT: addi 11, 29, 1 233; PPC64BE-NEXT: stb 11, 12(3) 234; PPC64BE-NEXT: addi 11, 28, 1 235; PPC64BE-NEXT: stb 11, 11(3) 236; PPC64BE-NEXT: addi 11, 27, 1 237; PPC64BE-NEXT: stb 11, 10(3) 238; PPC64BE-NEXT: addi 11, 26, 1 239; PPC64BE-NEXT: stb 11, 9(3) 240; PPC64BE-NEXT: addi 11, 25, 1 241; PPC64BE-NEXT: stb 11, 8(3) 242; PPC64BE-NEXT: addi 11, 24, 1 243; PPC64BE-NEXT: stb 11, 7(3) 244; PPC64BE-NEXT: stb 10, 6(3) 245; PPC64BE-NEXT: stb 9, 5(3) 246; PPC64BE-NEXT: stb 8, 4(3) 247; PPC64BE-NEXT: stb 7, 3(3) 248; PPC64BE-NEXT: stb 6, 2(3) 249; PPC64BE-NEXT: stb 5, 1(3) 250; PPC64BE-NEXT: stb 4, 0(3) 251; PPC64BE-NEXT: ld 30, -16(1) # 8-byte Folded Reload 252; PPC64BE-NEXT: ld 29, -24(1) # 8-byte Folded Reload 253; PPC64BE-NEXT: ld 28, -32(1) # 8-byte Folded Reload 254; PPC64BE-NEXT: ld 27, -40(1) # 8-byte Folded Reload 255; PPC64BE-NEXT: ld 26, -48(1) # 8-byte Folded Reload 256; PPC64BE-NEXT: ld 25, -56(1) # 8-byte Folded Reload 257; PPC64BE-NEXT: ld 24, -64(1) # 8-byte Folded Reload 258; PPC64BE-NEXT: ld 23, -72(1) # 8-byte Folded Reload 259; PPC64BE-NEXT: ld 22, -80(1) # 8-byte Folded Reload 260; PPC64BE-NEXT: ld 21, -88(1) # 8-byte Folded Reload 261; PPC64BE-NEXT: blr 262; 263; PPC64LE-LABEL: vector_i128_i8: 264; PPC64LE: # %bb.0: 265; PPC64LE-NEXT: xxlnor 34, 34, 34 266; PPC64LE-NEXT: vsububm 2, 3, 2 267; PPC64LE-NEXT: blr 268 %t0 = xor <16 x i8> %x, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1> 269 %t1 = sub <16 x i8> %y, %t0 270 ret <16 x i8> %t1 271} 272 273define <8 x i16> @vector_i128_i16(<8 x i16> %x, <8 x i16> %y) nounwind { 274; PPC32-LABEL: vector_i128_i16: 275; PPC32: # %bb.0: 276; PPC32-NEXT: stwu 1, -32(1) 277; PPC32-NEXT: stw 26, 8(1) # 4-byte Folded Spill 278; PPC32-NEXT: stw 27, 12(1) # 4-byte Folded Spill 279; PPC32-NEXT: stw 28, 16(1) # 4-byte Folded Spill 280; PPC32-NEXT: stw 29, 20(1) # 4-byte Folded Spill 281; PPC32-NEXT: stw 30, 24(1) # 4-byte Folded Spill 282; PPC32-NEXT: lhz 11, 70(1) 283; PPC32-NEXT: lhz 12, 66(1) 284; PPC32-NEXT: lhz 0, 62(1) 285; PPC32-NEXT: add 10, 11, 10 286; PPC32-NEXT: lhz 30, 58(1) 287; PPC32-NEXT: add 9, 12, 9 288; PPC32-NEXT: lhz 29, 50(1) 289; PPC32-NEXT: add 8, 0, 8 290; PPC32-NEXT: lhz 28, 42(1) 291; PPC32-NEXT: add 7, 30, 7 292; PPC32-NEXT: lhz 27, 46(1) 293; PPC32-NEXT: add 5, 29, 5 294; PPC32-NEXT: lhz 26, 54(1) 295; PPC32-NEXT: add 3, 28, 3 296; PPC32-NEXT: add 4, 27, 4 297; PPC32-NEXT: addi 3, 3, 1 298; PPC32-NEXT: add 6, 26, 6 299; PPC32-NEXT: addi 4, 4, 1 300; PPC32-NEXT: addi 5, 5, 1 301; PPC32-NEXT: addi 6, 6, 1 302; PPC32-NEXT: addi 7, 7, 1 303; PPC32-NEXT: addi 8, 8, 1 304; PPC32-NEXT: addi 9, 9, 1 305; PPC32-NEXT: addi 10, 10, 1 306; PPC32-NEXT: lwz 30, 24(1) # 4-byte Folded Reload 307; PPC32-NEXT: lwz 29, 20(1) # 4-byte Folded Reload 308; PPC32-NEXT: lwz 28, 16(1) # 4-byte Folded Reload 309; PPC32-NEXT: lwz 27, 12(1) # 4-byte Folded Reload 310; PPC32-NEXT: lwz 26, 8(1) # 4-byte Folded Reload 311; PPC32-NEXT: addi 1, 1, 32 312; PPC32-NEXT: blr 313; 314; PPC64BE-LABEL: vector_i128_i16: 315; PPC64BE: # %bb.0: 316; PPC64BE-NEXT: std 25, -56(1) # 8-byte Folded Spill 317; PPC64BE-NEXT: std 26, -48(1) # 8-byte Folded Spill 318; PPC64BE-NEXT: std 27, -40(1) # 8-byte Folded Spill 319; PPC64BE-NEXT: std 28, -32(1) # 8-byte Folded Spill 320; PPC64BE-NEXT: std 29, -24(1) # 8-byte Folded Spill 321; PPC64BE-NEXT: std 30, -16(1) # 8-byte Folded Spill 322; PPC64BE-NEXT: lhz 11, 118(1) 323; PPC64BE-NEXT: lhz 12, 182(1) 324; PPC64BE-NEXT: lhz 0, 174(1) 325; PPC64BE-NEXT: lhz 30, 166(1) 326; PPC64BE-NEXT: add 11, 12, 11 327; PPC64BE-NEXT: lhz 29, 158(1) 328; PPC64BE-NEXT: add 10, 0, 10 329; PPC64BE-NEXT: lhz 28, 142(1) 330; PPC64BE-NEXT: add 9, 30, 9 331; PPC64BE-NEXT: lhz 27, 126(1) 332; PPC64BE-NEXT: add 8, 29, 8 333; PPC64BE-NEXT: lhz 26, 134(1) 334; PPC64BE-NEXT: add 6, 28, 6 335; PPC64BE-NEXT: lhz 25, 150(1) 336; PPC64BE-NEXT: add 4, 27, 4 337; PPC64BE-NEXT: add 5, 26, 5 338; PPC64BE-NEXT: addi 11, 11, 1 339; PPC64BE-NEXT: add 7, 25, 7 340; PPC64BE-NEXT: addi 10, 10, 1 341; PPC64BE-NEXT: addi 9, 9, 1 342; PPC64BE-NEXT: addi 8, 8, 1 343; PPC64BE-NEXT: addi 7, 7, 1 344; PPC64BE-NEXT: addi 6, 6, 1 345; PPC64BE-NEXT: addi 5, 5, 1 346; PPC64BE-NEXT: addi 4, 4, 1 347; PPC64BE-NEXT: sth 11, 14(3) 348; PPC64BE-NEXT: sth 10, 12(3) 349; PPC64BE-NEXT: sth 9, 10(3) 350; PPC64BE-NEXT: sth 8, 8(3) 351; PPC64BE-NEXT: sth 7, 6(3) 352; PPC64BE-NEXT: sth 6, 4(3) 353; PPC64BE-NEXT: sth 5, 2(3) 354; PPC64BE-NEXT: sth 4, 0(3) 355; PPC64BE-NEXT: ld 30, -16(1) # 8-byte Folded Reload 356; PPC64BE-NEXT: ld 29, -24(1) # 8-byte Folded Reload 357; PPC64BE-NEXT: ld 28, -32(1) # 8-byte Folded Reload 358; PPC64BE-NEXT: ld 27, -40(1) # 8-byte Folded Reload 359; PPC64BE-NEXT: ld 26, -48(1) # 8-byte Folded Reload 360; PPC64BE-NEXT: ld 25, -56(1) # 8-byte Folded Reload 361; PPC64BE-NEXT: blr 362; 363; PPC64LE-LABEL: vector_i128_i16: 364; PPC64LE: # %bb.0: 365; PPC64LE-NEXT: xxlnor 34, 34, 34 366; PPC64LE-NEXT: vsubuhm 2, 3, 2 367; PPC64LE-NEXT: blr 368 %t0 = xor <8 x i16> %x, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1> 369 %t1 = sub <8 x i16> %y, %t0 370 ret <8 x i16> %t1 371} 372 373define <4 x i32> @vector_i128_i32(<4 x i32> %x, <4 x i32> %y) nounwind { 374; PPC32-LABEL: vector_i128_i32: 375; PPC32: # %bb.0: 376; PPC32-NEXT: add 3, 7, 3 377; PPC32-NEXT: add 4, 8, 4 378; PPC32-NEXT: add 5, 9, 5 379; PPC32-NEXT: add 6, 10, 6 380; PPC32-NEXT: addi 3, 3, 1 381; PPC32-NEXT: addi 4, 4, 1 382; PPC32-NEXT: addi 5, 5, 1 383; PPC32-NEXT: addi 6, 6, 1 384; PPC32-NEXT: blr 385; 386; PPC64BE-LABEL: vector_i128_i32: 387; PPC64BE: # %bb.0: 388; PPC64BE-NEXT: add 6, 10, 6 389; PPC64BE-NEXT: add 5, 9, 5 390; PPC64BE-NEXT: add 4, 8, 4 391; PPC64BE-NEXT: add 3, 7, 3 392; PPC64BE-NEXT: addi 6, 6, 1 393; PPC64BE-NEXT: addi 5, 5, 1 394; PPC64BE-NEXT: addi 4, 4, 1 395; PPC64BE-NEXT: addi 3, 3, 1 396; PPC64BE-NEXT: blr 397; 398; PPC64LE-LABEL: vector_i128_i32: 399; PPC64LE: # %bb.0: 400; PPC64LE-NEXT: xxlnor 34, 34, 34 401; PPC64LE-NEXT: vsubuwm 2, 3, 2 402; PPC64LE-NEXT: blr 403 %t0 = xor <4 x i32> %x, <i32 -1, i32 -1, i32 -1, i32 -1> 404 %t1 = sub <4 x i32> %y, %t0 405 ret <4 x i32> %t1 406} 407 408define <2 x i64> @vector_i128_i64(<2 x i64> %x, <2 x i64> %y) nounwind { 409; PPC32-LABEL: vector_i128_i64: 410; PPC32: # %bb.0: 411; PPC32-NEXT: not 4, 4 412; PPC32-NEXT: not 3, 3 413; PPC32-NEXT: subc 4, 8, 4 414; PPC32-NEXT: not 6, 6 415; PPC32-NEXT: subfe 3, 3, 7 416; PPC32-NEXT: not 5, 5 417; PPC32-NEXT: subc 6, 10, 6 418; PPC32-NEXT: subfe 5, 5, 9 419; PPC32-NEXT: blr 420; 421; PPC64BE-LABEL: vector_i128_i64: 422; PPC64BE: # %bb.0: 423; PPC64BE-NEXT: add 3, 5, 3 424; PPC64BE-NEXT: add 4, 6, 4 425; PPC64BE-NEXT: addi 3, 3, 1 426; PPC64BE-NEXT: addi 4, 4, 1 427; PPC64BE-NEXT: blr 428; 429; PPC64LE-LABEL: vector_i128_i64: 430; PPC64LE: # %bb.0: 431; PPC64LE-NEXT: xxlnor 34, 34, 34 432; PPC64LE-NEXT: vsubudm 2, 3, 2 433; PPC64LE-NEXT: blr 434 %t0 = xor <2 x i64> %x, <i64 -1, i64 -1> 435 %t1 = sub <2 x i64> %y, %t0 436 ret <2 x i64> %t1 437} 438