xref: /llvm-project/llvm/test/CodeGen/RISCV/GlobalISel/bitmanip.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
2; RUN: llc -mtriple=riscv32 -global-isel -global-isel-abort=1 < %s 2>&1 | FileCheck %s --check-prefixes=RV32
3; RUN: llc -mtriple=riscv64 -global-isel -global-isel-abort=1 < %s 2>&1 | FileCheck %s --check-prefixes=RV64
4
5define i2 @bitreverse_i2(i2 %x) {
6; RV32-LABEL: bitreverse_i2:
7; RV32:       # %bb.0:
8; RV32-NEXT:    slli a1, a0, 1
9; RV32-NEXT:    andi a0, a0, 3
10; RV32-NEXT:    andi a1, a1, 2
11; RV32-NEXT:    srli a0, a0, 1
12; RV32-NEXT:    or a0, a1, a0
13; RV32-NEXT:    ret
14;
15; RV64-LABEL: bitreverse_i2:
16; RV64:       # %bb.0:
17; RV64-NEXT:    slli a1, a0, 1
18; RV64-NEXT:    andi a0, a0, 3
19; RV64-NEXT:    andi a1, a1, 2
20; RV64-NEXT:    srli a0, a0, 1
21; RV64-NEXT:    or a0, a1, a0
22; RV64-NEXT:    ret
23  %rev = call i2 @llvm.bitreverse.i2(i2 %x)
24  ret i2 %rev
25}
26
27define i3 @bitreverse_i3(i3 %x) {
28; RV32-LABEL: bitreverse_i3:
29; RV32:       # %bb.0:
30; RV32-NEXT:    slli a1, a0, 2
31; RV32-NEXT:    andi a0, a0, 7
32; RV32-NEXT:    andi a1, a1, 4
33; RV32-NEXT:    andi a2, a0, 2
34; RV32-NEXT:    or a1, a1, a2
35; RV32-NEXT:    srli a0, a0, 2
36; RV32-NEXT:    or a0, a1, a0
37; RV32-NEXT:    ret
38;
39; RV64-LABEL: bitreverse_i3:
40; RV64:       # %bb.0:
41; RV64-NEXT:    slli a1, a0, 2
42; RV64-NEXT:    andi a0, a0, 7
43; RV64-NEXT:    andi a1, a1, 4
44; RV64-NEXT:    andi a2, a0, 2
45; RV64-NEXT:    or a1, a1, a2
46; RV64-NEXT:    srli a0, a0, 2
47; RV64-NEXT:    or a0, a1, a0
48; RV64-NEXT:    ret
49  %rev = call i3 @llvm.bitreverse.i3(i3 %x)
50  ret i3 %rev
51}
52
53define i4 @bitreverse_i4(i4 %x) {
54; RV32-LABEL: bitreverse_i4:
55; RV32:       # %bb.0:
56; RV32-NEXT:    slli a1, a0, 3
57; RV32-NEXT:    slli a2, a0, 1
58; RV32-NEXT:    andi a0, a0, 15
59; RV32-NEXT:    andi a1, a1, 8
60; RV32-NEXT:    andi a2, a2, 4
61; RV32-NEXT:    or a1, a1, a2
62; RV32-NEXT:    srli a2, a0, 1
63; RV32-NEXT:    andi a2, a2, 2
64; RV32-NEXT:    or a1, a1, a2
65; RV32-NEXT:    srli a0, a0, 3
66; RV32-NEXT:    or a0, a1, a0
67; RV32-NEXT:    ret
68;
69; RV64-LABEL: bitreverse_i4:
70; RV64:       # %bb.0:
71; RV64-NEXT:    slli a1, a0, 3
72; RV64-NEXT:    slli a2, a0, 1
73; RV64-NEXT:    andi a0, a0, 15
74; RV64-NEXT:    andi a1, a1, 8
75; RV64-NEXT:    andi a2, a2, 4
76; RV64-NEXT:    or a1, a1, a2
77; RV64-NEXT:    srli a2, a0, 1
78; RV64-NEXT:    andi a2, a2, 2
79; RV64-NEXT:    or a1, a1, a2
80; RV64-NEXT:    srli a0, a0, 3
81; RV64-NEXT:    or a0, a1, a0
82; RV64-NEXT:    ret
83  %rev = call i4 @llvm.bitreverse.i4(i4 %x)
84  ret i4 %rev
85}
86
87define i7 @bitreverse_i7(i7 %x) {
88; RV32-LABEL: bitreverse_i7:
89; RV32:       # %bb.0:
90; RV32-NEXT:    slli a1, a0, 6
91; RV32-NEXT:    slli a2, a0, 4
92; RV32-NEXT:    slli a3, a0, 2
93; RV32-NEXT:    andi a0, a0, 127
94; RV32-NEXT:    andi a1, a1, 64
95; RV32-NEXT:    andi a2, a2, 32
96; RV32-NEXT:    andi a3, a3, 16
97; RV32-NEXT:    or a1, a1, a2
98; RV32-NEXT:    andi a2, a0, 8
99; RV32-NEXT:    or a2, a3, a2
100; RV32-NEXT:    srli a3, a0, 2
101; RV32-NEXT:    or a1, a1, a2
102; RV32-NEXT:    srli a2, a0, 4
103; RV32-NEXT:    andi a3, a3, 4
104; RV32-NEXT:    andi a2, a2, 2
105; RV32-NEXT:    or a2, a3, a2
106; RV32-NEXT:    or a1, a1, a2
107; RV32-NEXT:    srli a0, a0, 6
108; RV32-NEXT:    or a0, a1, a0
109; RV32-NEXT:    ret
110;
111; RV64-LABEL: bitreverse_i7:
112; RV64:       # %bb.0:
113; RV64-NEXT:    slli a1, a0, 6
114; RV64-NEXT:    slli a2, a0, 4
115; RV64-NEXT:    slli a3, a0, 2
116; RV64-NEXT:    andi a0, a0, 127
117; RV64-NEXT:    andi a1, a1, 64
118; RV64-NEXT:    andi a2, a2, 32
119; RV64-NEXT:    andi a3, a3, 16
120; RV64-NEXT:    or a1, a1, a2
121; RV64-NEXT:    andi a2, a0, 8
122; RV64-NEXT:    or a2, a3, a2
123; RV64-NEXT:    srli a3, a0, 2
124; RV64-NEXT:    or a1, a1, a2
125; RV64-NEXT:    srli a2, a0, 4
126; RV64-NEXT:    andi a3, a3, 4
127; RV64-NEXT:    andi a2, a2, 2
128; RV64-NEXT:    or a2, a3, a2
129; RV64-NEXT:    or a1, a1, a2
130; RV64-NEXT:    srli a0, a0, 6
131; RV64-NEXT:    or a0, a1, a0
132; RV64-NEXT:    ret
133  %rev = call i7 @llvm.bitreverse.i7(i7 %x)
134  ret i7 %rev
135}
136
137define i24 @bitreverse_i24(i24 %x) {
138; RV32-LABEL: bitreverse_i24:
139; RV32:       # %bb.0:
140; RV32-NEXT:    slli a1, a0, 16
141; RV32-NEXT:    lui a2, 4096
142; RV32-NEXT:    lui a3, 1048335
143; RV32-NEXT:    addi a2, a2, -1
144; RV32-NEXT:    addi a3, a3, 240
145; RV32-NEXT:    and a0, a0, a2
146; RV32-NEXT:    srli a0, a0, 16
147; RV32-NEXT:    or a0, a0, a1
148; RV32-NEXT:    and a1, a3, a2
149; RV32-NEXT:    and a1, a0, a1
150; RV32-NEXT:    slli a0, a0, 4
151; RV32-NEXT:    and a0, a0, a3
152; RV32-NEXT:    lui a3, 1047757
153; RV32-NEXT:    addi a3, a3, -820
154; RV32-NEXT:    srli a1, a1, 4
155; RV32-NEXT:    or a0, a1, a0
156; RV32-NEXT:    and a1, a3, a2
157; RV32-NEXT:    and a1, a0, a1
158; RV32-NEXT:    slli a0, a0, 2
159; RV32-NEXT:    and a0, a0, a3
160; RV32-NEXT:    lui a3, 1047211
161; RV32-NEXT:    addi a3, a3, -1366
162; RV32-NEXT:    and a2, a3, a2
163; RV32-NEXT:    srli a1, a1, 2
164; RV32-NEXT:    or a0, a1, a0
165; RV32-NEXT:    and a2, a0, a2
166; RV32-NEXT:    slli a0, a0, 1
167; RV32-NEXT:    srli a2, a2, 1
168; RV32-NEXT:    and a0, a0, a3
169; RV32-NEXT:    or a0, a2, a0
170; RV32-NEXT:    ret
171;
172; RV64-LABEL: bitreverse_i24:
173; RV64:       # %bb.0:
174; RV64-NEXT:    slli a1, a0, 16
175; RV64-NEXT:    lui a2, 4096
176; RV64-NEXT:    lui a3, 1048335
177; RV64-NEXT:    addiw a2, a2, -1
178; RV64-NEXT:    addiw a3, a3, 240
179; RV64-NEXT:    and a0, a0, a2
180; RV64-NEXT:    srli a0, a0, 16
181; RV64-NEXT:    or a0, a0, a1
182; RV64-NEXT:    and a1, a3, a2
183; RV64-NEXT:    and a1, a0, a1
184; RV64-NEXT:    slli a0, a0, 4
185; RV64-NEXT:    and a0, a0, a3
186; RV64-NEXT:    lui a3, 1047757
187; RV64-NEXT:    addiw a3, a3, -820
188; RV64-NEXT:    srli a1, a1, 4
189; RV64-NEXT:    or a0, a1, a0
190; RV64-NEXT:    and a1, a3, a2
191; RV64-NEXT:    and a1, a0, a1
192; RV64-NEXT:    slli a0, a0, 2
193; RV64-NEXT:    and a0, a0, a3
194; RV64-NEXT:    lui a3, 1047211
195; RV64-NEXT:    addiw a3, a3, -1366
196; RV64-NEXT:    and a2, a3, a2
197; RV64-NEXT:    srli a1, a1, 2
198; RV64-NEXT:    or a0, a1, a0
199; RV64-NEXT:    and a2, a0, a2
200; RV64-NEXT:    slli a0, a0, 1
201; RV64-NEXT:    srli a2, a2, 1
202; RV64-NEXT:    and a0, a0, a3
203; RV64-NEXT:    or a0, a2, a0
204; RV64-NEXT:    ret
205  %rev = call i24 @llvm.bitreverse.i24(i24 %x)
206  ret i24 %rev
207}
208