1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \ 3; RUN: | FileCheck %s -check-prefixes=CHECK,RV32ZBKB 4; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \ 5; RUN: | FileCheck %s -check-prefixes=CHECK,RV64ZBKB 6 7; These tests can be optimised 8; fold (bitreverse(srl (bitreverse c), x)) -> (shl c, x) 9; fold (bitreverse(shl (bitreverse c), x)) -> (srl c, x) 10 11declare i8 @llvm.bitreverse.i8(i8) 12declare i16 @llvm.bitreverse.i16(i16) 13declare i32 @llvm.bitreverse.i32(i32) 14declare i64 @llvm.bitreverse.i64(i64) 15 16define i8 @test_bitreverse_srli_bitreverse_i8(i8 %a) nounwind { 17; CHECK-LABEL: test_bitreverse_srli_bitreverse_i8: 18; CHECK: # %bb.0: 19; CHECK-NEXT: slli a0, a0, 3 20; CHECK-NEXT: ret 21 %1 = call i8 @llvm.bitreverse.i8(i8 %a) 22 %2 = lshr i8 %1, 3 23 %3 = call i8 @llvm.bitreverse.i8(i8 %2) 24 ret i8 %3 25} 26 27define i16 @test_bitreverse_srli_bitreverse_i16(i16 %a) nounwind { 28; CHECK-LABEL: test_bitreverse_srli_bitreverse_i16: 29; CHECK: # %bb.0: 30; CHECK-NEXT: slli a0, a0, 7 31; CHECK-NEXT: ret 32 %1 = call i16 @llvm.bitreverse.i16(i16 %a) 33 %2 = lshr i16 %1, 7 34 %3 = call i16 @llvm.bitreverse.i16(i16 %2) 35 ret i16 %3 36} 37 38define i32 @test_bitreverse_srli_bitreverse_i32(i32 %a) nounwind { 39; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32: 40; RV32ZBKB: # %bb.0: 41; RV32ZBKB-NEXT: slli a0, a0, 15 42; RV32ZBKB-NEXT: ret 43; 44; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32: 45; RV64ZBKB: # %bb.0: 46; RV64ZBKB-NEXT: slliw a0, a0, 15 47; RV64ZBKB-NEXT: ret 48 %1 = call i32 @llvm.bitreverse.i32(i32 %a) 49 %2 = lshr i32 %1, 15 50 %3 = call i32 @llvm.bitreverse.i32(i32 %2) 51 ret i32 %3 52} 53 54define i64 @test_bitreverse_srli_bitreverse_i64(i64 %a) nounwind { 55; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64: 56; RV32ZBKB: # %bb.0: 57; RV32ZBKB-NEXT: slli a1, a0, 1 58; RV32ZBKB-NEXT: li a0, 0 59; RV32ZBKB-NEXT: ret 60; 61; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64: 62; RV64ZBKB: # %bb.0: 63; RV64ZBKB-NEXT: slli a0, a0, 33 64; RV64ZBKB-NEXT: ret 65 %1 = call i64 @llvm.bitreverse.i64(i64 %a) 66 %2 = lshr i64 %1, 33 67 %3 = call i64 @llvm.bitreverse.i64(i64 %2) 68 ret i64 %3 69} 70 71define i8 @test_bitreverse_shli_bitreverse_i8(i8 %a) nounwind { 72; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8: 73; RV32ZBKB: # %bb.0: 74; RV32ZBKB-NEXT: slli a0, a0, 24 75; RV32ZBKB-NEXT: srli a0, a0, 27 76; RV32ZBKB-NEXT: ret 77; 78; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8: 79; RV64ZBKB: # %bb.0: 80; RV64ZBKB-NEXT: slli a0, a0, 56 81; RV64ZBKB-NEXT: srli a0, a0, 59 82; RV64ZBKB-NEXT: ret 83 %1 = call i8 @llvm.bitreverse.i8(i8 %a) 84 %2 = shl i8 %1, 3 85 %3 = call i8 @llvm.bitreverse.i8(i8 %2) 86 ret i8 %3 87} 88 89define i16 @test_bitreverse_shli_bitreverse_i16(i16 %a) nounwind { 90; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16: 91; RV32ZBKB: # %bb.0: 92; RV32ZBKB-NEXT: slli a0, a0, 16 93; RV32ZBKB-NEXT: srli a0, a0, 23 94; RV32ZBKB-NEXT: ret 95; 96; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16: 97; RV64ZBKB: # %bb.0: 98; RV64ZBKB-NEXT: slli a0, a0, 48 99; RV64ZBKB-NEXT: srli a0, a0, 55 100; RV64ZBKB-NEXT: ret 101 %1 = call i16 @llvm.bitreverse.i16(i16 %a) 102 %2 = shl i16 %1, 7 103 %3 = call i16 @llvm.bitreverse.i16(i16 %2) 104 ret i16 %3 105} 106 107define i32 @test_bitreverse_shli_bitreverse_i32(i32 %a) nounwind { 108; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32: 109; RV32ZBKB: # %bb.0: 110; RV32ZBKB-NEXT: srli a0, a0, 15 111; RV32ZBKB-NEXT: ret 112; 113; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32: 114; RV64ZBKB: # %bb.0: 115; RV64ZBKB-NEXT: srliw a0, a0, 15 116; RV64ZBKB-NEXT: ret 117 %1 = call i32 @llvm.bitreverse.i32(i32 %a) 118 %2 = shl i32 %1, 15 119 %3 = call i32 @llvm.bitreverse.i32(i32 %2) 120 ret i32 %3 121} 122 123define i64 @test_bitreverse_shli_bitreverse_i64(i64 %a) nounwind { 124; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64: 125; RV32ZBKB: # %bb.0: 126; RV32ZBKB-NEXT: srli a0, a1, 1 127; RV32ZBKB-NEXT: li a1, 0 128; RV32ZBKB-NEXT: ret 129; 130; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64: 131; RV64ZBKB: # %bb.0: 132; RV64ZBKB-NEXT: srli a0, a0, 33 133; RV64ZBKB-NEXT: ret 134 %1 = call i64 @llvm.bitreverse.i64(i64 %a) 135 %2 = shl i64 %1, 33 136 %3 = call i64 @llvm.bitreverse.i64(i64 %2) 137 ret i64 %3 138} 139