1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc --mtriple=loongarch32 -mattr=+d --verify-machineinstrs < %s \ 3; RUN: | FileCheck %s --check-prefix=LA32 4; RUN: llc --mtriple=loongarch64 -mattr=+d --verify-machineinstrs < %s \ 5; RUN: | FileCheck %s --check-prefix=LA64 6 7declare i7 @llvm.bitreverse.i7(i7) 8declare i8 @llvm.bitreverse.i8(i8) 9declare i16 @llvm.bitreverse.i16(i16) 10declare i24 @llvm.bitreverse.i24(i24) 11declare i32 @llvm.bitreverse.i32(i32) 12declare i48 @llvm.bitreverse.i48(i48) 13declare i64 @llvm.bitreverse.i64(i64) 14declare i77 @llvm.bitreverse.i77(i77) 15declare i128 @llvm.bitreverse.i128(i128) 16 17define i8 @test_bitreverse_i8(i8 %a) nounwind { 18; LA32-LABEL: test_bitreverse_i8: 19; LA32: # %bb.0: 20; LA32-NEXT: bitrev.4b $a0, $a0 21; LA32-NEXT: ret 22; 23; LA64-LABEL: test_bitreverse_i8: 24; LA64: # %bb.0: 25; LA64-NEXT: bitrev.4b $a0, $a0 26; LA64-NEXT: ret 27 %tmp = call i8 @llvm.bitreverse.i8(i8 %a) 28 ret i8 %tmp 29} 30 31define i16 @test_bitreverse_i16(i16 %a) nounwind { 32; LA32-LABEL: test_bitreverse_i16: 33; LA32: # %bb.0: 34; LA32-NEXT: bitrev.w $a0, $a0 35; LA32-NEXT: srli.w $a0, $a0, 16 36; LA32-NEXT: ret 37; 38; LA64-LABEL: test_bitreverse_i16: 39; LA64: # %bb.0: 40; LA64-NEXT: bitrev.d $a0, $a0 41; LA64-NEXT: srli.d $a0, $a0, 48 42; LA64-NEXT: ret 43 %tmp = call i16 @llvm.bitreverse.i16(i16 %a) 44 ret i16 %tmp 45} 46 47define i32 @test_bitreverse_i32(i32 %a) nounwind { 48; LA32-LABEL: test_bitreverse_i32: 49; LA32: # %bb.0: 50; LA32-NEXT: bitrev.w $a0, $a0 51; LA32-NEXT: ret 52; 53; LA64-LABEL: test_bitreverse_i32: 54; LA64: # %bb.0: 55; LA64-NEXT: bitrev.w $a0, $a0 56; LA64-NEXT: ret 57 %tmp = call i32 @llvm.bitreverse.i32(i32 %a) 58 ret i32 %tmp 59} 60 61define i64 @test_bitreverse_i64(i64 %a) nounwind { 62; LA32-LABEL: test_bitreverse_i64: 63; LA32: # %bb.0: 64; LA32-NEXT: bitrev.w $a2, $a1 65; LA32-NEXT: bitrev.w $a1, $a0 66; LA32-NEXT: move $a0, $a2 67; LA32-NEXT: ret 68; 69; LA64-LABEL: test_bitreverse_i64: 70; LA64: # %bb.0: 71; LA64-NEXT: bitrev.d $a0, $a0 72; LA64-NEXT: ret 73 %tmp = call i64 @llvm.bitreverse.i64(i64 %a) 74 ret i64 %tmp 75} 76 77;; Bitreverse on non-native integer widths. 78 79define i7 @test_bitreverse_i7(i7 %a) nounwind { 80; LA32-LABEL: test_bitreverse_i7: 81; LA32: # %bb.0: 82; LA32-NEXT: bitrev.w $a0, $a0 83; LA32-NEXT: srli.w $a0, $a0, 25 84; LA32-NEXT: ret 85; 86; LA64-LABEL: test_bitreverse_i7: 87; LA64: # %bb.0: 88; LA64-NEXT: bitrev.d $a0, $a0 89; LA64-NEXT: srli.d $a0, $a0, 57 90; LA64-NEXT: ret 91 %tmp = call i7 @llvm.bitreverse.i7(i7 %a) 92 ret i7 %tmp 93} 94 95define i24 @test_bitreverse_i24(i24 %a) nounwind { 96; LA32-LABEL: test_bitreverse_i24: 97; LA32: # %bb.0: 98; LA32-NEXT: bitrev.w $a0, $a0 99; LA32-NEXT: srli.w $a0, $a0, 8 100; LA32-NEXT: ret 101; 102; LA64-LABEL: test_bitreverse_i24: 103; LA64: # %bb.0: 104; LA64-NEXT: bitrev.d $a0, $a0 105; LA64-NEXT: srli.d $a0, $a0, 40 106; LA64-NEXT: ret 107 %tmp = call i24 @llvm.bitreverse.i24(i24 %a) 108 ret i24 %tmp 109} 110 111define i48 @test_bitreverse_i48(i48 %a) nounwind { 112; LA32-LABEL: test_bitreverse_i48: 113; LA32: # %bb.0: 114; LA32-NEXT: bitrev.w $a2, $a0 115; LA32-NEXT: bitrev.w $a0, $a1 116; LA32-NEXT: bytepick.w $a0, $a0, $a2, 2 117; LA32-NEXT: srli.w $a1, $a2, 16 118; LA32-NEXT: ret 119; 120; LA64-LABEL: test_bitreverse_i48: 121; LA64: # %bb.0: 122; LA64-NEXT: bitrev.d $a0, $a0 123; LA64-NEXT: srli.d $a0, $a0, 16 124; LA64-NEXT: ret 125 %tmp = call i48 @llvm.bitreverse.i48(i48 %a) 126 ret i48 %tmp 127} 128 129define i77 @test_bitreverse_i77(i77 %a) nounwind { 130; LA32-LABEL: test_bitreverse_i77: 131; LA32: # %bb.0: 132; LA32-NEXT: ld.w $a2, $a1, 4 133; LA32-NEXT: ld.w $a3, $a1, 8 134; LA32-NEXT: ld.w $a1, $a1, 0 135; LA32-NEXT: bitrev.w $a2, $a2 136; LA32-NEXT: slli.w $a4, $a2, 13 137; LA32-NEXT: bitrev.w $a3, $a3 138; LA32-NEXT: srli.w $a3, $a3, 19 139; LA32-NEXT: or $a3, $a3, $a4 140; LA32-NEXT: srli.w $a2, $a2, 19 141; LA32-NEXT: bitrev.w $a1, $a1 142; LA32-NEXT: slli.w $a4, $a1, 13 143; LA32-NEXT: or $a2, $a4, $a2 144; LA32-NEXT: srli.w $a1, $a1, 19 145; LA32-NEXT: st.h $a1, $a0, 8 146; LA32-NEXT: st.w $a2, $a0, 4 147; LA32-NEXT: st.w $a3, $a0, 0 148; LA32-NEXT: ret 149; 150; LA64-LABEL: test_bitreverse_i77: 151; LA64: # %bb.0: 152; LA64-NEXT: bitrev.d $a2, $a0 153; LA64-NEXT: slli.d $a0, $a2, 13 154; LA64-NEXT: bitrev.d $a1, $a1 155; LA64-NEXT: srli.d $a1, $a1, 51 156; LA64-NEXT: or $a0, $a1, $a0 157; LA64-NEXT: srli.d $a1, $a2, 51 158; LA64-NEXT: ret 159 %tmp = call i77 @llvm.bitreverse.i77(i77 %a) 160 ret i77 %tmp 161} 162 163define i128 @test_bitreverse_i128(i128 %a) nounwind { 164; LA32-LABEL: test_bitreverse_i128: 165; LA32: # %bb.0: 166; LA32-NEXT: ld.w $a2, $a1, 12 167; LA32-NEXT: ld.w $a3, $a1, 8 168; LA32-NEXT: ld.w $a4, $a1, 4 169; LA32-NEXT: ld.w $a1, $a1, 0 170; LA32-NEXT: bitrev.w $a2, $a2 171; LA32-NEXT: bitrev.w $a3, $a3 172; LA32-NEXT: bitrev.w $a4, $a4 173; LA32-NEXT: bitrev.w $a1, $a1 174; LA32-NEXT: st.w $a1, $a0, 12 175; LA32-NEXT: st.w $a4, $a0, 8 176; LA32-NEXT: st.w $a3, $a0, 4 177; LA32-NEXT: st.w $a2, $a0, 0 178; LA32-NEXT: ret 179; 180; LA64-LABEL: test_bitreverse_i128: 181; LA64: # %bb.0: 182; LA64-NEXT: bitrev.d $a2, $a1 183; LA64-NEXT: bitrev.d $a1, $a0 184; LA64-NEXT: move $a0, $a2 185; LA64-NEXT: ret 186 %tmp = call i128 @llvm.bitreverse.i128(i128 %a) 187 ret i128 %tmp 188} 189