1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -verify-machineinstrs -mtriple=powerpc64le-unknown-linux-gnu \ 3; RUN: -mcpu=pwr10 -ppc-asm-full-reg-names -ppc-vsr-nums-as-vr < %s | \ 4; RUN: FileCheck %s 5; RUN: llc -verify-machineinstrs -mtriple=powerpc64-unknown-aix \ 6; RUN: -mcpu=pwr10 -ppc-asm-full-reg-names -ppc-vsr-nums-as-vr < %s | \ 7; RUN: FileCheck %s 8 9; These test cases aim to test the bit manipulation operations on Power10. 10 11declare <2 x i64> @llvm.ppc.altivec.vpdepd(<2 x i64>, <2 x i64>) 12declare <2 x i64> @llvm.ppc.altivec.vpextd(<2 x i64>, <2 x i64>) 13declare i64 @llvm.ppc.pdepd(i64, i64) 14declare i64 @llvm.ppc.pextd(i64, i64) 15declare <2 x i64> @llvm.ppc.altivec.vcfuged(<2 x i64>, <2 x i64>) 16declare i64 @llvm.ppc.cfuged(i64, i64) 17declare i64 @llvm.ppc.altivec.vgnb(<1 x i128>, i32) 18declare <2 x i64> @llvm.ppc.vsx.xxeval(<2 x i64>, <2 x i64>, <2 x i64>, i32) 19declare <2 x i64> @llvm.ppc.altivec.vclzdm(<2 x i64>, <2 x i64>) 20declare <2 x i64> @llvm.ppc.altivec.vctzdm(<2 x i64>, <2 x i64>) 21declare i64 @llvm.ppc.cntlzdm(i64, i64) 22declare i64 @llvm.ppc.cnttzdm(i64, i64) 23 24define <2 x i64> @test_vpdepd(<2 x i64> %a, <2 x i64> %b) { 25; CHECK-LABEL: test_vpdepd: 26; CHECK: # %bb.0: # %entry 27; CHECK-NEXT: vpdepd v2, v2, v3 28; CHECK-NEXT: blr 29entry: 30 %tmp = tail call <2 x i64> @llvm.ppc.altivec.vpdepd(<2 x i64> %a, <2 x i64> %b) 31 ret <2 x i64> %tmp 32} 33 34define <2 x i64> @test_vpextd(<2 x i64> %a, <2 x i64> %b) { 35; CHECK-LABEL: test_vpextd: 36; CHECK: # %bb.0: # %entry 37; CHECK-NEXT: vpextd v2, v2, v3 38; CHECK-NEXT: blr 39entry: 40 %tmp = tail call <2 x i64> @llvm.ppc.altivec.vpextd(<2 x i64> %a, <2 x i64> %b) 41 ret <2 x i64> %tmp 42} 43 44define i64 @test_pdepd(i64 %a, i64 %b) { 45; CHECK-LABEL: test_pdepd: 46; CHECK: # %bb.0: # %entry 47; CHECK-NEXT: pdepd r3, r3, r4 48; CHECK-NEXT: blr 49entry: 50 %tmp = tail call i64 @llvm.ppc.pdepd(i64 %a, i64 %b) 51 ret i64 %tmp 52} 53 54define i64 @test_pextd(i64 %a, i64 %b) { 55; CHECK-LABEL: test_pextd: 56; CHECK: # %bb.0: # %entry 57; CHECK-NEXT: pextd r3, r3, r4 58; CHECK-NEXT: blr 59entry: 60 %tmp = tail call i64 @llvm.ppc.pextd(i64 %a, i64 %b) 61 ret i64 %tmp 62} 63 64define <2 x i64> @test_vcfuged(<2 x i64> %a, <2 x i64> %b) { 65; CHECK-LABEL: test_vcfuged: 66; CHECK: # %bb.0: # %entry 67; CHECK-NEXT: vcfuged v2, v2, v3 68; CHECK-NEXT: blr 69entry: 70 %tmp = tail call <2 x i64> @llvm.ppc.altivec.vcfuged(<2 x i64> %a, <2 x i64> %b) 71 ret <2 x i64> %tmp 72} 73 74define i64 @test_cfuged(i64 %a, i64 %b) { 75; CHECK-LABEL: test_cfuged: 76; CHECK: # %bb.0: # %entry 77; CHECK-NEXT: cfuged r3, r3, r4 78; CHECK-NEXT: blr 79entry: 80 %tmp = tail call i64 @llvm.ppc.cfuged(i64 %a, i64 %b) 81 ret i64 %tmp 82} 83 84define i64 @test_vgnb_1(<1 x i128> %a) { 85; CHECK-LABEL: test_vgnb_1: 86; CHECK: # %bb.0: # %entry 87; CHECK-NEXT: vgnb r3, v2, 2 88; CHECK-NEXT: blr 89entry: 90 %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 2) 91 ret i64 %tmp 92} 93 94define i64 @test_vgnb_2(<1 x i128> %a) { 95; CHECK-LABEL: test_vgnb_2: 96; CHECK: # %bb.0: # %entry 97; CHECK-NEXT: vgnb r3, v2, 7 98; CHECK-NEXT: blr 99entry: 100 %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 7) 101 ret i64 %tmp 102} 103 104define i64 @test_vgnb_3(<1 x i128> %a) { 105; CHECK-LABEL: test_vgnb_3: 106; CHECK: # %bb.0: # %entry 107; CHECK-NEXT: vgnb r3, v2, 5 108; CHECK-NEXT: blr 109entry: 110 %tmp = tail call i64 @llvm.ppc.altivec.vgnb(<1 x i128> %a, i32 5) 111 ret i64 %tmp 112} 113 114define <2 x i64> @test_xxeval(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c) { 115; CHECK-LABEL: test_xxeval: 116; CHECK: # %bb.0: # %entry 117; CHECK-NEXT: xxeval v2, v2, v3, v4, 255 118; CHECK-NEXT: blr 119entry: 120 %tmp = tail call <2 x i64> @llvm.ppc.vsx.xxeval(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c, i32 255) 121 ret <2 x i64> %tmp 122} 123 124define <2 x i64> @test_vclzdm(<2 x i64> %a, <2 x i64> %b) { 125; CHECK-LABEL: test_vclzdm: 126; CHECK: # %bb.0: # %entry 127; CHECK-NEXT: vclzdm v2, v2, v3 128; CHECK-NEXT: blr 129entry: 130 %tmp = tail call <2 x i64> @llvm.ppc.altivec.vclzdm(<2 x i64> %a, <2 x i64> %b) 131 ret <2 x i64> %tmp 132} 133 134define <2 x i64> @test_vctzdm(<2 x i64> %a, <2 x i64> %b) { 135; CHECK-LABEL: test_vctzdm: 136; CHECK: # %bb.0: # %entry 137; CHECK-NEXT: vctzdm v2, v2, v3 138; CHECK-NEXT: blr 139entry: 140 %tmp = tail call <2 x i64> @llvm.ppc.altivec.vctzdm(<2 x i64> %a, <2 x i64> %b) 141 ret <2 x i64> %tmp 142} 143 144define i64 @test_cntlzdm(i64 %a, i64 %b) { 145; CHECK-LABEL: test_cntlzdm: 146; CHECK: # %bb.0: # %entry 147; CHECK-NEXT: cntlzdm r3, r3, r4 148; CHECK-NEXT: blr 149entry: 150 %tmp = tail call i64 @llvm.ppc.cntlzdm(i64 %a, i64 %b) 151 ret i64 %tmp 152} 153 154define i64 @test_cnttzdm(i64 %a, i64 %b) { 155; CHECK-LABEL: test_cnttzdm: 156; CHECK: # %bb.0: # %entry 157; CHECK-NEXT: cnttzdm r3, r3, r4 158; CHECK-NEXT: blr 159entry: 160 %tmp = tail call i64 @llvm.ppc.cnttzdm(i64 %a, i64 %b) 161 ret i64 %tmp 162} 163