xref: /llvm-project/llvm/test/CodeGen/RISCV/rv64zbb-intrinsic.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefix=RV64ZBB
4
5declare i32 @llvm.riscv.orc.b.i32(i32)
6
7define signext i32 @orcb32(i32 signext %a) nounwind {
8; RV64ZBB-LABEL: orcb32:
9; RV64ZBB:       # %bb.0:
10; RV64ZBB-NEXT:    orc.b a0, a0
11; RV64ZBB-NEXT:    sext.w a0, a0
12; RV64ZBB-NEXT:    ret
13  %tmp = call i32 @llvm.riscv.orc.b.i32(i32 %a)
14  ret i32 %tmp
15}
16
17define zeroext i32 @orcb32_zext(i32 zeroext %a) nounwind {
18; RV64ZBB-LABEL: orcb32_zext:
19; RV64ZBB:       # %bb.0:
20; RV64ZBB-NEXT:    orc.b a0, a0
21; RV64ZBB-NEXT:    ret
22  %tmp = call i32 @llvm.riscv.orc.b.i32(i32 %a)
23  ret i32 %tmp
24}
25
26; Second and+or is redundant with the first, make sure we remove them.
27define signext i32 @orcb32_knownbits(i32 signext %a) nounwind {
28; RV64ZBB-LABEL: orcb32_knownbits:
29; RV64ZBB:       # %bb.0:
30; RV64ZBB-NEXT:    lui a1, 1044480
31; RV64ZBB-NEXT:    and a0, a0, a1
32; RV64ZBB-NEXT:    lui a1, 2048
33; RV64ZBB-NEXT:    addi a1, a1, 1
34; RV64ZBB-NEXT:    or a0, a0, a1
35; RV64ZBB-NEXT:    orc.b a0, a0
36; RV64ZBB-NEXT:    sext.w a0, a0
37; RV64ZBB-NEXT:    ret
38  %tmp = and i32 %a, 4278190080 ; 0xFF000000
39  %tmp2 = or i32 %tmp, 8388609 ; 0x800001
40  %tmp3 = call i32 @llvm.riscv.orc.b.i32(i32 %tmp2)
41  %tmp4 = and i32 %tmp3, 4278190080 ; 0xFF000000
42  %tmp5 = or i32 %tmp4, 16711935 ; 0xFF00FF
43  ret i32 %tmp5
44}
45
46declare i64 @llvm.riscv.orc.b.i64(i64)
47
48define i64 @orcb64(i64 %a) nounwind {
49; RV64ZBB-LABEL: orcb64:
50; RV64ZBB:       # %bb.0:
51; RV64ZBB-NEXT:    orc.b a0, a0
52; RV64ZBB-NEXT:    ret
53  %tmp = call i64 @llvm.riscv.orc.b.i64(i64 %a)
54  ret i64 %tmp
55}
56
57; Second and+or is redundant with the first, make sure we remove them.
58define i64 @orcb64_knownbits(i64 %a) nounwind {
59; RV64ZBB-LABEL: orcb64_knownbits:
60; RV64ZBB:       # %bb.0:
61; RV64ZBB-NEXT:    lui a1, 65535
62; RV64ZBB-NEXT:    lui a2, 256
63; RV64ZBB-NEXT:    slli a1, a1, 12
64; RV64ZBB-NEXT:    addiw a2, a2, 8
65; RV64ZBB-NEXT:    and a0, a0, a1
66; RV64ZBB-NEXT:    slli a1, a2, 42
67; RV64ZBB-NEXT:    add a1, a2, a1
68; RV64ZBB-NEXT:    or a0, a0, a1
69; RV64ZBB-NEXT:    orc.b a0, a0
70; RV64ZBB-NEXT:    ret
71  %tmp = and i64 %a, 1099494850560 ; 0x000000ffff000000
72  %tmp2 = or i64 %tmp, 4611721202800525320 ; 0x4000200000100008
73  %tmp3 = call i64 @llvm.riscv.orc.b.i64(i64 %tmp2)
74  %tmp4 = and i64 %tmp3, 1099494850560 ; 0x000000ffff000000
75  %tmp5 = or i64 %tmp4, 18374966855153418495 ; 0xff00ff0000ff00ff
76  ret i64 %tmp5
77}
78