xref: /llvm-project/llvm/test/CodeGen/RISCV/GlobalISel/iabs.ll (revision dde5546b79f784ab71cac325e0a0698c67c4dcde)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -global-isel -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s --check-prefix=RV32I
4; RUN: llc -mtriple=riscv32 -global-isel -mattr=+zbb -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s --check-prefix=RV32ZBB
6; RUN: llc -mtriple=riscv64 -global-isel -verify-machineinstrs < %s \
7; RUN:   | FileCheck %s --check-prefix=RV64I
8; RUN: llc -mtriple=riscv64 -global-isel -mattr=+zbb -verify-machineinstrs < %s \
9; RUN:   | FileCheck %s --check-prefix=RV64ZBB
10
11declare i8 @llvm.abs.i8(i8, i1 immarg)
12declare i16 @llvm.abs.i16(i16, i1 immarg)
13declare i32 @llvm.abs.i32(i32, i1 immarg)
14declare i64 @llvm.abs.i64(i64, i1 immarg)
15
16define i8 @abs8(i8 %x) {
17; RV32I-LABEL: abs8:
18; RV32I:       # %bb.0:
19; RV32I-NEXT:    slli a1, a0, 24
20; RV32I-NEXT:    srai a1, a1, 31
21; RV32I-NEXT:    add a0, a0, a1
22; RV32I-NEXT:    xor a0, a0, a1
23; RV32I-NEXT:    ret
24;
25; RV32ZBB-LABEL: abs8:
26; RV32ZBB:       # %bb.0:
27; RV32ZBB-NEXT:    sext.b a0, a0
28; RV32ZBB-NEXT:    neg a1, a0
29; RV32ZBB-NEXT:    max a0, a0, a1
30; RV32ZBB-NEXT:    ret
31;
32; RV64I-LABEL: abs8:
33; RV64I:       # %bb.0:
34; RV64I-NEXT:    slli a1, a0, 56
35; RV64I-NEXT:    srai a1, a1, 63
36; RV64I-NEXT:    add a0, a0, a1
37; RV64I-NEXT:    xor a0, a0, a1
38; RV64I-NEXT:    ret
39;
40; RV64ZBB-LABEL: abs8:
41; RV64ZBB:       # %bb.0:
42; RV64ZBB-NEXT:    sext.b a0, a0
43; RV64ZBB-NEXT:    neg a1, a0
44; RV64ZBB-NEXT:    max a0, a0, a1
45; RV64ZBB-NEXT:    ret
46  %abs = tail call i8 @llvm.abs.i8(i8 %x, i1 true)
47  ret i8 %abs
48}
49
50define i16 @abs16(i16 %x) {
51; RV32I-LABEL: abs16:
52; RV32I:       # %bb.0:
53; RV32I-NEXT:    slli a1, a0, 16
54; RV32I-NEXT:    srai a1, a1, 31
55; RV32I-NEXT:    add a0, a0, a1
56; RV32I-NEXT:    xor a0, a0, a1
57; RV32I-NEXT:    ret
58;
59; RV32ZBB-LABEL: abs16:
60; RV32ZBB:       # %bb.0:
61; RV32ZBB-NEXT:    sext.h a0, a0
62; RV32ZBB-NEXT:    neg a1, a0
63; RV32ZBB-NEXT:    max a0, a0, a1
64; RV32ZBB-NEXT:    ret
65;
66; RV64I-LABEL: abs16:
67; RV64I:       # %bb.0:
68; RV64I-NEXT:    slli a1, a0, 48
69; RV64I-NEXT:    srai a1, a1, 63
70; RV64I-NEXT:    add a0, a0, a1
71; RV64I-NEXT:    xor a0, a0, a1
72; RV64I-NEXT:    ret
73;
74; RV64ZBB-LABEL: abs16:
75; RV64ZBB:       # %bb.0:
76; RV64ZBB-NEXT:    sext.h a0, a0
77; RV64ZBB-NEXT:    neg a1, a0
78; RV64ZBB-NEXT:    max a0, a0, a1
79; RV64ZBB-NEXT:    ret
80  %abs = tail call i16 @llvm.abs.i16(i16 %x, i1 true)
81  ret i16 %abs
82}
83
84define i32 @abs32(i32 %x) {
85; RV32I-LABEL: abs32:
86; RV32I:       # %bb.0:
87; RV32I-NEXT:    srai a1, a0, 31
88; RV32I-NEXT:    add a0, a0, a1
89; RV32I-NEXT:    xor a0, a0, a1
90; RV32I-NEXT:    ret
91;
92; RV32ZBB-LABEL: abs32:
93; RV32ZBB:       # %bb.0:
94; RV32ZBB-NEXT:    neg a1, a0
95; RV32ZBB-NEXT:    max a0, a0, a1
96; RV32ZBB-NEXT:    ret
97;
98; RV64I-LABEL: abs32:
99; RV64I:       # %bb.0:
100; RV64I-NEXT:    sraiw a1, a0, 31
101; RV64I-NEXT:    addw a0, a0, a1
102; RV64I-NEXT:    xor a0, a0, a1
103; RV64I-NEXT:    ret
104;
105; RV64ZBB-LABEL: abs32:
106; RV64ZBB:       # %bb.0:
107; RV64ZBB-NEXT:    sext.w a0, a0
108; RV64ZBB-NEXT:    neg a1, a0
109; RV64ZBB-NEXT:    max a0, a0, a1
110; RV64ZBB-NEXT:    ret
111  %abs = tail call i32 @llvm.abs.i32(i32 %x, i1 true)
112  ret i32 %abs
113}
114
115define i64 @abs64(i64 %x) {
116; RV32I-LABEL: abs64:
117; RV32I:       # %bb.0:
118; RV32I-NEXT:    srai a2, a1, 31
119; RV32I-NEXT:    add a0, a0, a2
120; RV32I-NEXT:    add a1, a1, a2
121; RV32I-NEXT:    sltu a3, a0, a2
122; RV32I-NEXT:    add a1, a1, a3
123; RV32I-NEXT:    xor a0, a0, a2
124; RV32I-NEXT:    xor a1, a1, a2
125; RV32I-NEXT:    ret
126;
127; RV32ZBB-LABEL: abs64:
128; RV32ZBB:       # %bb.0:
129; RV32ZBB-NEXT:    srai a2, a1, 31
130; RV32ZBB-NEXT:    add a0, a0, a2
131; RV32ZBB-NEXT:    add a1, a1, a2
132; RV32ZBB-NEXT:    sltu a3, a0, a2
133; RV32ZBB-NEXT:    add a1, a1, a3
134; RV32ZBB-NEXT:    xor a0, a0, a2
135; RV32ZBB-NEXT:    xor a1, a1, a2
136; RV32ZBB-NEXT:    ret
137;
138; RV64I-LABEL: abs64:
139; RV64I:       # %bb.0:
140; RV64I-NEXT:    srai a1, a0, 63
141; RV64I-NEXT:    add a0, a0, a1
142; RV64I-NEXT:    xor a0, a0, a1
143; RV64I-NEXT:    ret
144;
145; RV64ZBB-LABEL: abs64:
146; RV64ZBB:       # %bb.0:
147; RV64ZBB-NEXT:    neg a1, a0
148; RV64ZBB-NEXT:    max a0, a0, a1
149; RV64ZBB-NEXT:    ret
150  %abs = tail call i64 @llvm.abs.i64(i64 %x, i1 true)
151  ret i64 %abs
152}
153