xref: /llvm-project/llvm/test/CodeGen/RISCV/rv64zimop-intrinsic.ll (revision 364028a1a51689d2b33d3ec50c426fbeac269679)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv64 -mattr=+zimop -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefix=RV64ZIMOP
4
5declare i64 @llvm.riscv.mopr.i64(i64 %a, i64 %b)
6
7define i64 @mopr0_64(i64 %a) nounwind {
8; RV64ZIMOP-LABEL: mopr0_64:
9; RV64ZIMOP:       # %bb.0:
10; RV64ZIMOP-NEXT:    mop.r.0 a0, a0
11; RV64ZIMOP-NEXT:    ret
12  %tmp = call i64 @llvm.riscv.mopr.i64(i64 %a, i64 0)
13  ret i64 %tmp
14}
15
16define i64 @mopr31_64(i64 %a) nounwind {
17; RV64ZIMOP-LABEL: mopr31_64:
18; RV64ZIMOP:       # %bb.0:
19; RV64ZIMOP-NEXT:    mop.r.31 a0, a0
20; RV64ZIMOP-NEXT:    ret
21  %tmp = call i64 @llvm.riscv.mopr.i64(i64 %a, i64 31)
22  ret i64 %tmp
23}
24
25declare i64 @llvm.riscv.moprr.i64(i64 %a, i64 %b, i64 %c)
26
27define i64 @moprr0_64(i64 %a, i64 %b) nounwind {
28; RV64ZIMOP-LABEL: moprr0_64:
29; RV64ZIMOP:       # %bb.0:
30; RV64ZIMOP-NEXT:    mop.rr.0 a0, a0, a1
31; RV64ZIMOP-NEXT:    ret
32  %tmp = call i64 @llvm.riscv.moprr.i64(i64 %a, i64 %b, i64 0)
33  ret i64 %tmp
34}
35
36define i64 @moprr7_64(i64 %a, i64 %b) nounwind {
37; RV64ZIMOP-LABEL: moprr7_64:
38; RV64ZIMOP:       # %bb.0:
39; RV64ZIMOP-NEXT:    mop.rr.7 a0, a0, a1
40; RV64ZIMOP-NEXT:    ret
41  %tmp = call i64 @llvm.riscv.moprr.i64(i64 %a, i64 %b, i64 7)
42  ret i64 %tmp
43}
44
45declare i32 @llvm.riscv.mopr.i32(i32 %a, i32 %b)
46
47define signext i32 @mopr0_32(i32 signext %a) nounwind {
48; RV64ZIMOP-LABEL: mopr0_32:
49; RV64ZIMOP:       # %bb.0:
50; RV64ZIMOP-NEXT:    mop.r.0 a0, a0
51; RV64ZIMOP-NEXT:    sext.w a0, a0
52; RV64ZIMOP-NEXT:    ret
53  %tmp = call i32 @llvm.riscv.mopr.i32(i32 %a, i32 0)
54  ret i32 %tmp
55}
56
57define signext i32 @mopr31_32(i32 signext %a) nounwind {
58; RV64ZIMOP-LABEL: mopr31_32:
59; RV64ZIMOP:       # %bb.0:
60; RV64ZIMOP-NEXT:    mop.r.31 a0, a0
61; RV64ZIMOP-NEXT:    sext.w a0, a0
62; RV64ZIMOP-NEXT:    ret
63  %tmp = call i32 @llvm.riscv.mopr.i32(i32 %a, i32 31)
64  ret i32 %tmp
65}
66
67declare i32 @llvm.riscv.moprr.i32(i32 %a, i32 %b, i32 %c)
68
69define signext i32 @moprr0_32(i32 signext %a, i32 signext %b) nounwind {
70; RV64ZIMOP-LABEL: moprr0_32:
71; RV64ZIMOP:       # %bb.0:
72; RV64ZIMOP-NEXT:    mop.rr.0 a0, a0, a1
73; RV64ZIMOP-NEXT:    sext.w a0, a0
74; RV64ZIMOP-NEXT:    ret
75  %tmp = call i32 @llvm.riscv.moprr.i32(i32 %a, i32 %b, i32 0)
76  ret i32 %tmp
77}
78
79define signext i32 @moprr7_32(i32 signext %a, i32 signext %b) nounwind {
80; RV64ZIMOP-LABEL: moprr7_32:
81; RV64ZIMOP:       # %bb.0:
82; RV64ZIMOP-NEXT:    mop.rr.7 a0, a0, a1
83; RV64ZIMOP-NEXT:    sext.w a0, a0
84; RV64ZIMOP-NEXT:    ret
85  %tmp = call i32 @llvm.riscv.moprr.i32(i32 %a, i32 %b, i32 7)
86  ret i32 %tmp
87}
88
89