xref: /llvm-project/llvm/test/CodeGen/RISCV/bswap-bitreverse.ll (revision 9122c5235ec85ce0c0ad337e862b006e7b349d84)
1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefix=RV32I
4; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s -check-prefix=RV64I
6; RUN: llc -mtriple=riscv32 -mattr=+zbb -verify-machineinstrs < %s \
7; RUN:   | FileCheck %s -check-prefixes=RV32ZB,RV32ZBB
8; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
9; RUN:   | FileCheck %s -check-prefixes=RV64ZB,RV64ZBB
10; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \
11; RUN:   | FileCheck %s -check-prefixes=RV32ZB,RV32ZBKB
12; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \
13; RUN:   | FileCheck %s -check-prefixes=RV64ZB,RV64ZBKB
14
15declare i16 @llvm.bswap.i16(i16)
16declare i32 @llvm.bswap.i32(i32)
17declare i64 @llvm.bswap.i64(i64)
18declare i8 @llvm.bitreverse.i8(i8)
19declare i16 @llvm.bitreverse.i16(i16)
20declare i32 @llvm.bitreverse.i32(i32)
21declare i64 @llvm.bitreverse.i64(i64)
22
23define i16 @test_bswap_i16(i16 %a) nounwind {
24; RV32I-LABEL: test_bswap_i16:
25; RV32I:       # %bb.0:
26; RV32I-NEXT:    slli a1, a0, 8
27; RV32I-NEXT:    slli a0, a0, 16
28; RV32I-NEXT:    srli a0, a0, 24
29; RV32I-NEXT:    or a0, a1, a0
30; RV32I-NEXT:    ret
31;
32; RV64I-LABEL: test_bswap_i16:
33; RV64I:       # %bb.0:
34; RV64I-NEXT:    slli a1, a0, 8
35; RV64I-NEXT:    slli a0, a0, 48
36; RV64I-NEXT:    srli a0, a0, 56
37; RV64I-NEXT:    or a0, a1, a0
38; RV64I-NEXT:    ret
39;
40; RV32ZB-LABEL: test_bswap_i16:
41; RV32ZB:       # %bb.0:
42; RV32ZB-NEXT:    rev8 a0, a0
43; RV32ZB-NEXT:    srli a0, a0, 16
44; RV32ZB-NEXT:    ret
45;
46; RV64ZB-LABEL: test_bswap_i16:
47; RV64ZB:       # %bb.0:
48; RV64ZB-NEXT:    rev8 a0, a0
49; RV64ZB-NEXT:    srli a0, a0, 48
50; RV64ZB-NEXT:    ret
51  %tmp = call i16 @llvm.bswap.i16(i16 %a)
52  ret i16 %tmp
53}
54
55define i32 @test_bswap_i32(i32 %a) nounwind {
56; RV32I-LABEL: test_bswap_i32:
57; RV32I:       # %bb.0:
58; RV32I-NEXT:    srli a1, a0, 8
59; RV32I-NEXT:    lui a2, 16
60; RV32I-NEXT:    srli a3, a0, 24
61; RV32I-NEXT:    addi a2, a2, -256
62; RV32I-NEXT:    and a1, a1, a2
63; RV32I-NEXT:    and a2, a0, a2
64; RV32I-NEXT:    or a1, a1, a3
65; RV32I-NEXT:    slli a2, a2, 8
66; RV32I-NEXT:    slli a0, a0, 24
67; RV32I-NEXT:    or a0, a0, a2
68; RV32I-NEXT:    or a0, a0, a1
69; RV32I-NEXT:    ret
70;
71; RV64I-LABEL: test_bswap_i32:
72; RV64I:       # %bb.0:
73; RV64I-NEXT:    srli a1, a0, 8
74; RV64I-NEXT:    lui a2, 16
75; RV64I-NEXT:    srliw a3, a0, 24
76; RV64I-NEXT:    addiw a2, a2, -256
77; RV64I-NEXT:    and a1, a1, a2
78; RV64I-NEXT:    and a2, a0, a2
79; RV64I-NEXT:    or a1, a1, a3
80; RV64I-NEXT:    slli a2, a2, 8
81; RV64I-NEXT:    slliw a0, a0, 24
82; RV64I-NEXT:    or a0, a0, a2
83; RV64I-NEXT:    or a0, a0, a1
84; RV64I-NEXT:    ret
85;
86; RV32ZB-LABEL: test_bswap_i32:
87; RV32ZB:       # %bb.0:
88; RV32ZB-NEXT:    rev8 a0, a0
89; RV32ZB-NEXT:    ret
90;
91; RV64ZB-LABEL: test_bswap_i32:
92; RV64ZB:       # %bb.0:
93; RV64ZB-NEXT:    rev8 a0, a0
94; RV64ZB-NEXT:    srli a0, a0, 32
95; RV64ZB-NEXT:    ret
96  %tmp = call i32 @llvm.bswap.i32(i32 %a)
97  ret i32 %tmp
98}
99
100define i64 @test_bswap_i64(i64 %a) nounwind {
101; RV32I-LABEL: test_bswap_i64:
102; RV32I:       # %bb.0:
103; RV32I-NEXT:    srli a2, a1, 8
104; RV32I-NEXT:    lui a3, 16
105; RV32I-NEXT:    srli a4, a1, 24
106; RV32I-NEXT:    srli a5, a0, 8
107; RV32I-NEXT:    addi a3, a3, -256
108; RV32I-NEXT:    and a2, a2, a3
109; RV32I-NEXT:    or a2, a2, a4
110; RV32I-NEXT:    srli a4, a0, 24
111; RV32I-NEXT:    and a5, a5, a3
112; RV32I-NEXT:    or a4, a5, a4
113; RV32I-NEXT:    slli a5, a1, 24
114; RV32I-NEXT:    and a1, a1, a3
115; RV32I-NEXT:    slli a1, a1, 8
116; RV32I-NEXT:    or a1, a5, a1
117; RV32I-NEXT:    and a3, a0, a3
118; RV32I-NEXT:    slli a0, a0, 24
119; RV32I-NEXT:    slli a3, a3, 8
120; RV32I-NEXT:    or a3, a0, a3
121; RV32I-NEXT:    or a0, a1, a2
122; RV32I-NEXT:    or a1, a3, a4
123; RV32I-NEXT:    ret
124;
125; RV64I-LABEL: test_bswap_i64:
126; RV64I:       # %bb.0:
127; RV64I-NEXT:    srli a1, a0, 40
128; RV64I-NEXT:    lui a2, 16
129; RV64I-NEXT:    srli a3, a0, 56
130; RV64I-NEXT:    srli a4, a0, 24
131; RV64I-NEXT:    lui a5, 4080
132; RV64I-NEXT:    addiw a2, a2, -256
133; RV64I-NEXT:    and a1, a1, a2
134; RV64I-NEXT:    or a1, a1, a3
135; RV64I-NEXT:    srli a3, a0, 8
136; RV64I-NEXT:    and a4, a4, a5
137; RV64I-NEXT:    srliw a3, a3, 24
138; RV64I-NEXT:    slli a3, a3, 24
139; RV64I-NEXT:    or a3, a3, a4
140; RV64I-NEXT:    srliw a4, a0, 24
141; RV64I-NEXT:    and a5, a0, a5
142; RV64I-NEXT:    and a2, a0, a2
143; RV64I-NEXT:    slli a0, a0, 56
144; RV64I-NEXT:    slli a4, a4, 32
145; RV64I-NEXT:    slli a5, a5, 24
146; RV64I-NEXT:    or a4, a5, a4
147; RV64I-NEXT:    slli a2, a2, 40
148; RV64I-NEXT:    or a1, a3, a1
149; RV64I-NEXT:    or a0, a0, a2
150; RV64I-NEXT:    or a0, a0, a4
151; RV64I-NEXT:    or a0, a0, a1
152; RV64I-NEXT:    ret
153;
154; RV32ZB-LABEL: test_bswap_i64:
155; RV32ZB:       # %bb.0:
156; RV32ZB-NEXT:    rev8 a2, a1
157; RV32ZB-NEXT:    rev8 a1, a0
158; RV32ZB-NEXT:    mv a0, a2
159; RV32ZB-NEXT:    ret
160;
161; RV64ZB-LABEL: test_bswap_i64:
162; RV64ZB:       # %bb.0:
163; RV64ZB-NEXT:    rev8 a0, a0
164; RV64ZB-NEXT:    ret
165  %tmp = call i64 @llvm.bswap.i64(i64 %a)
166  ret i64 %tmp
167}
168
169define i8 @test_bitreverse_i8(i8 %a) nounwind {
170; RV32I-LABEL: test_bitreverse_i8:
171; RV32I:       # %bb.0:
172; RV32I-NEXT:    andi a1, a0, 15
173; RV32I-NEXT:    slli a0, a0, 24
174; RV32I-NEXT:    slli a1, a1, 4
175; RV32I-NEXT:    srli a0, a0, 28
176; RV32I-NEXT:    or a0, a0, a1
177; RV32I-NEXT:    andi a1, a0, 51
178; RV32I-NEXT:    srli a0, a0, 2
179; RV32I-NEXT:    slli a1, a1, 2
180; RV32I-NEXT:    andi a0, a0, 51
181; RV32I-NEXT:    or a0, a0, a1
182; RV32I-NEXT:    andi a1, a0, 85
183; RV32I-NEXT:    srli a0, a0, 1
184; RV32I-NEXT:    slli a1, a1, 1
185; RV32I-NEXT:    andi a0, a0, 85
186; RV32I-NEXT:    or a0, a0, a1
187; RV32I-NEXT:    ret
188;
189; RV64I-LABEL: test_bitreverse_i8:
190; RV64I:       # %bb.0:
191; RV64I-NEXT:    andi a1, a0, 15
192; RV64I-NEXT:    slli a0, a0, 56
193; RV64I-NEXT:    slli a1, a1, 4
194; RV64I-NEXT:    srli a0, a0, 60
195; RV64I-NEXT:    or a0, a0, a1
196; RV64I-NEXT:    andi a1, a0, 51
197; RV64I-NEXT:    srli a0, a0, 2
198; RV64I-NEXT:    slli a1, a1, 2
199; RV64I-NEXT:    andi a0, a0, 51
200; RV64I-NEXT:    or a0, a0, a1
201; RV64I-NEXT:    andi a1, a0, 85
202; RV64I-NEXT:    srli a0, a0, 1
203; RV64I-NEXT:    slli a1, a1, 1
204; RV64I-NEXT:    andi a0, a0, 85
205; RV64I-NEXT:    or a0, a0, a1
206; RV64I-NEXT:    ret
207;
208; RV32ZBB-LABEL: test_bitreverse_i8:
209; RV32ZBB:       # %bb.0:
210; RV32ZBB-NEXT:    andi a1, a0, 15
211; RV32ZBB-NEXT:    slli a0, a0, 24
212; RV32ZBB-NEXT:    slli a1, a1, 4
213; RV32ZBB-NEXT:    srli a0, a0, 28
214; RV32ZBB-NEXT:    or a0, a0, a1
215; RV32ZBB-NEXT:    andi a1, a0, 51
216; RV32ZBB-NEXT:    srli a0, a0, 2
217; RV32ZBB-NEXT:    slli a1, a1, 2
218; RV32ZBB-NEXT:    andi a0, a0, 51
219; RV32ZBB-NEXT:    or a0, a0, a1
220; RV32ZBB-NEXT:    andi a1, a0, 85
221; RV32ZBB-NEXT:    srli a0, a0, 1
222; RV32ZBB-NEXT:    slli a1, a1, 1
223; RV32ZBB-NEXT:    andi a0, a0, 85
224; RV32ZBB-NEXT:    or a0, a0, a1
225; RV32ZBB-NEXT:    ret
226;
227; RV64ZBB-LABEL: test_bitreverse_i8:
228; RV64ZBB:       # %bb.0:
229; RV64ZBB-NEXT:    andi a1, a0, 15
230; RV64ZBB-NEXT:    slli a0, a0, 56
231; RV64ZBB-NEXT:    slli a1, a1, 4
232; RV64ZBB-NEXT:    srli a0, a0, 60
233; RV64ZBB-NEXT:    or a0, a0, a1
234; RV64ZBB-NEXT:    andi a1, a0, 51
235; RV64ZBB-NEXT:    srli a0, a0, 2
236; RV64ZBB-NEXT:    slli a1, a1, 2
237; RV64ZBB-NEXT:    andi a0, a0, 51
238; RV64ZBB-NEXT:    or a0, a0, a1
239; RV64ZBB-NEXT:    andi a1, a0, 85
240; RV64ZBB-NEXT:    srli a0, a0, 1
241; RV64ZBB-NEXT:    slli a1, a1, 1
242; RV64ZBB-NEXT:    andi a0, a0, 85
243; RV64ZBB-NEXT:    or a0, a0, a1
244; RV64ZBB-NEXT:    ret
245;
246; RV32ZBKB-LABEL: test_bitreverse_i8:
247; RV32ZBKB:       # %bb.0:
248; RV32ZBKB-NEXT:    rev8 a0, a0
249; RV32ZBKB-NEXT:    brev8 a0, a0
250; RV32ZBKB-NEXT:    srli a0, a0, 24
251; RV32ZBKB-NEXT:    ret
252;
253; RV64ZBKB-LABEL: test_bitreverse_i8:
254; RV64ZBKB:       # %bb.0:
255; RV64ZBKB-NEXT:    rev8 a0, a0
256; RV64ZBKB-NEXT:    brev8 a0, a0
257; RV64ZBKB-NEXT:    srli a0, a0, 56
258; RV64ZBKB-NEXT:    ret
259  %tmp = call i8 @llvm.bitreverse.i8(i8 %a)
260  ret i8 %tmp
261}
262
263define i16 @test_bitreverse_i16(i16 %a) nounwind {
264; RV32I-LABEL: test_bitreverse_i16:
265; RV32I:       # %bb.0:
266; RV32I-NEXT:    slli a1, a0, 8
267; RV32I-NEXT:    slli a0, a0, 16
268; RV32I-NEXT:    lui a2, 1
269; RV32I-NEXT:    srli a0, a0, 24
270; RV32I-NEXT:    addi a2, a2, -241
271; RV32I-NEXT:    or a0, a1, a0
272; RV32I-NEXT:    srli a1, a0, 4
273; RV32I-NEXT:    and a0, a0, a2
274; RV32I-NEXT:    and a1, a1, a2
275; RV32I-NEXT:    lui a2, 3
276; RV32I-NEXT:    addi a2, a2, 819
277; RV32I-NEXT:    slli a0, a0, 4
278; RV32I-NEXT:    or a0, a1, a0
279; RV32I-NEXT:    srli a1, a0, 2
280; RV32I-NEXT:    and a0, a0, a2
281; RV32I-NEXT:    and a1, a1, a2
282; RV32I-NEXT:    lui a2, 5
283; RV32I-NEXT:    addi a2, a2, 1365
284; RV32I-NEXT:    slli a0, a0, 2
285; RV32I-NEXT:    or a0, a1, a0
286; RV32I-NEXT:    srli a1, a0, 1
287; RV32I-NEXT:    and a0, a0, a2
288; RV32I-NEXT:    and a1, a1, a2
289; RV32I-NEXT:    slli a0, a0, 1
290; RV32I-NEXT:    or a0, a1, a0
291; RV32I-NEXT:    ret
292;
293; RV64I-LABEL: test_bitreverse_i16:
294; RV64I:       # %bb.0:
295; RV64I-NEXT:    slli a1, a0, 8
296; RV64I-NEXT:    slli a0, a0, 48
297; RV64I-NEXT:    lui a2, 1
298; RV64I-NEXT:    srli a0, a0, 56
299; RV64I-NEXT:    addiw a2, a2, -241
300; RV64I-NEXT:    or a0, a1, a0
301; RV64I-NEXT:    srli a1, a0, 4
302; RV64I-NEXT:    and a0, a0, a2
303; RV64I-NEXT:    and a1, a1, a2
304; RV64I-NEXT:    lui a2, 3
305; RV64I-NEXT:    addiw a2, a2, 819
306; RV64I-NEXT:    slli a0, a0, 4
307; RV64I-NEXT:    or a0, a1, a0
308; RV64I-NEXT:    srli a1, a0, 2
309; RV64I-NEXT:    and a0, a0, a2
310; RV64I-NEXT:    and a1, a1, a2
311; RV64I-NEXT:    lui a2, 5
312; RV64I-NEXT:    addiw a2, a2, 1365
313; RV64I-NEXT:    slli a0, a0, 2
314; RV64I-NEXT:    or a0, a1, a0
315; RV64I-NEXT:    srli a1, a0, 1
316; RV64I-NEXT:    and a0, a0, a2
317; RV64I-NEXT:    and a1, a1, a2
318; RV64I-NEXT:    slli a0, a0, 1
319; RV64I-NEXT:    or a0, a1, a0
320; RV64I-NEXT:    ret
321;
322; RV32ZBB-LABEL: test_bitreverse_i16:
323; RV32ZBB:       # %bb.0:
324; RV32ZBB-NEXT:    rev8 a0, a0
325; RV32ZBB-NEXT:    lui a1, 15
326; RV32ZBB-NEXT:    srli a2, a0, 12
327; RV32ZBB-NEXT:    addi a1, a1, 240
328; RV32ZBB-NEXT:    and a1, a2, a1
329; RV32ZBB-NEXT:    lui a2, 3
330; RV32ZBB-NEXT:    srli a0, a0, 20
331; RV32ZBB-NEXT:    addi a2, a2, 819
332; RV32ZBB-NEXT:    andi a0, a0, -241
333; RV32ZBB-NEXT:    or a0, a0, a1
334; RV32ZBB-NEXT:    srli a1, a0, 2
335; RV32ZBB-NEXT:    and a0, a0, a2
336; RV32ZBB-NEXT:    and a1, a1, a2
337; RV32ZBB-NEXT:    lui a2, 5
338; RV32ZBB-NEXT:    addi a2, a2, 1365
339; RV32ZBB-NEXT:    slli a0, a0, 2
340; RV32ZBB-NEXT:    or a0, a1, a0
341; RV32ZBB-NEXT:    srli a1, a0, 1
342; RV32ZBB-NEXT:    and a0, a0, a2
343; RV32ZBB-NEXT:    and a1, a1, a2
344; RV32ZBB-NEXT:    slli a0, a0, 1
345; RV32ZBB-NEXT:    or a0, a1, a0
346; RV32ZBB-NEXT:    ret
347;
348; RV64ZBB-LABEL: test_bitreverse_i16:
349; RV64ZBB:       # %bb.0:
350; RV64ZBB-NEXT:    rev8 a0, a0
351; RV64ZBB-NEXT:    lui a1, 15
352; RV64ZBB-NEXT:    srli a2, a0, 44
353; RV64ZBB-NEXT:    addiw a1, a1, 240
354; RV64ZBB-NEXT:    and a1, a2, a1
355; RV64ZBB-NEXT:    lui a2, 3
356; RV64ZBB-NEXT:    srli a0, a0, 52
357; RV64ZBB-NEXT:    addiw a2, a2, 819
358; RV64ZBB-NEXT:    andi a0, a0, -241
359; RV64ZBB-NEXT:    or a0, a0, a1
360; RV64ZBB-NEXT:    srli a1, a0, 2
361; RV64ZBB-NEXT:    and a0, a0, a2
362; RV64ZBB-NEXT:    and a1, a1, a2
363; RV64ZBB-NEXT:    lui a2, 5
364; RV64ZBB-NEXT:    addiw a2, a2, 1365
365; RV64ZBB-NEXT:    slli a0, a0, 2
366; RV64ZBB-NEXT:    or a0, a1, a0
367; RV64ZBB-NEXT:    srli a1, a0, 1
368; RV64ZBB-NEXT:    and a0, a0, a2
369; RV64ZBB-NEXT:    and a1, a1, a2
370; RV64ZBB-NEXT:    slli a0, a0, 1
371; RV64ZBB-NEXT:    or a0, a1, a0
372; RV64ZBB-NEXT:    ret
373;
374; RV32ZBKB-LABEL: test_bitreverse_i16:
375; RV32ZBKB:       # %bb.0:
376; RV32ZBKB-NEXT:    rev8 a0, a0
377; RV32ZBKB-NEXT:    brev8 a0, a0
378; RV32ZBKB-NEXT:    srli a0, a0, 16
379; RV32ZBKB-NEXT:    ret
380;
381; RV64ZBKB-LABEL: test_bitreverse_i16:
382; RV64ZBKB:       # %bb.0:
383; RV64ZBKB-NEXT:    rev8 a0, a0
384; RV64ZBKB-NEXT:    brev8 a0, a0
385; RV64ZBKB-NEXT:    srli a0, a0, 48
386; RV64ZBKB-NEXT:    ret
387  %tmp = call i16 @llvm.bitreverse.i16(i16 %a)
388  ret i16 %tmp
389}
390
391define i32 @test_bitreverse_i32(i32 %a) nounwind {
392; RV32I-LABEL: test_bitreverse_i32:
393; RV32I:       # %bb.0:
394; RV32I-NEXT:    srli a1, a0, 8
395; RV32I-NEXT:    lui a2, 16
396; RV32I-NEXT:    srli a3, a0, 24
397; RV32I-NEXT:    addi a2, a2, -256
398; RV32I-NEXT:    and a1, a1, a2
399; RV32I-NEXT:    and a2, a0, a2
400; RV32I-NEXT:    slli a0, a0, 24
401; RV32I-NEXT:    or a1, a1, a3
402; RV32I-NEXT:    lui a3, 61681
403; RV32I-NEXT:    slli a2, a2, 8
404; RV32I-NEXT:    or a0, a0, a2
405; RV32I-NEXT:    lui a2, 209715
406; RV32I-NEXT:    addi a3, a3, -241
407; RV32I-NEXT:    or a0, a0, a1
408; RV32I-NEXT:    srli a1, a0, 4
409; RV32I-NEXT:    and a0, a0, a3
410; RV32I-NEXT:    and a1, a1, a3
411; RV32I-NEXT:    lui a3, 349525
412; RV32I-NEXT:    addi a2, a2, 819
413; RV32I-NEXT:    addi a3, a3, 1365
414; RV32I-NEXT:    slli a0, a0, 4
415; RV32I-NEXT:    or a0, a1, a0
416; RV32I-NEXT:    srli a1, a0, 2
417; RV32I-NEXT:    and a0, a0, a2
418; RV32I-NEXT:    and a1, a1, a2
419; RV32I-NEXT:    slli a0, a0, 2
420; RV32I-NEXT:    or a0, a1, a0
421; RV32I-NEXT:    srli a1, a0, 1
422; RV32I-NEXT:    and a0, a0, a3
423; RV32I-NEXT:    and a1, a1, a3
424; RV32I-NEXT:    slli a0, a0, 1
425; RV32I-NEXT:    or a0, a1, a0
426; RV32I-NEXT:    ret
427;
428; RV64I-LABEL: test_bitreverse_i32:
429; RV64I:       # %bb.0:
430; RV64I-NEXT:    srli a1, a0, 8
431; RV64I-NEXT:    lui a2, 16
432; RV64I-NEXT:    srliw a3, a0, 24
433; RV64I-NEXT:    addiw a2, a2, -256
434; RV64I-NEXT:    and a1, a1, a2
435; RV64I-NEXT:    and a2, a0, a2
436; RV64I-NEXT:    slliw a0, a0, 24
437; RV64I-NEXT:    or a1, a1, a3
438; RV64I-NEXT:    lui a3, 61681
439; RV64I-NEXT:    slli a2, a2, 8
440; RV64I-NEXT:    or a0, a0, a2
441; RV64I-NEXT:    lui a2, 209715
442; RV64I-NEXT:    addiw a3, a3, -241
443; RV64I-NEXT:    or a0, a0, a1
444; RV64I-NEXT:    srli a1, a0, 4
445; RV64I-NEXT:    and a0, a0, a3
446; RV64I-NEXT:    and a1, a1, a3
447; RV64I-NEXT:    lui a3, 349525
448; RV64I-NEXT:    addiw a2, a2, 819
449; RV64I-NEXT:    addiw a3, a3, 1365
450; RV64I-NEXT:    slliw a0, a0, 4
451; RV64I-NEXT:    or a0, a1, a0
452; RV64I-NEXT:    srli a1, a0, 2
453; RV64I-NEXT:    and a0, a0, a2
454; RV64I-NEXT:    and a1, a1, a2
455; RV64I-NEXT:    slliw a0, a0, 2
456; RV64I-NEXT:    or a0, a1, a0
457; RV64I-NEXT:    srli a1, a0, 1
458; RV64I-NEXT:    and a0, a0, a3
459; RV64I-NEXT:    and a1, a1, a3
460; RV64I-NEXT:    slliw a0, a0, 1
461; RV64I-NEXT:    or a0, a1, a0
462; RV64I-NEXT:    ret
463;
464; RV32ZBB-LABEL: test_bitreverse_i32:
465; RV32ZBB:       # %bb.0:
466; RV32ZBB-NEXT:    rev8 a0, a0
467; RV32ZBB-NEXT:    lui a1, 61681
468; RV32ZBB-NEXT:    srli a2, a0, 4
469; RV32ZBB-NEXT:    addi a1, a1, -241
470; RV32ZBB-NEXT:    and a2, a2, a1
471; RV32ZBB-NEXT:    and a0, a0, a1
472; RV32ZBB-NEXT:    lui a1, 209715
473; RV32ZBB-NEXT:    addi a1, a1, 819
474; RV32ZBB-NEXT:    slli a0, a0, 4
475; RV32ZBB-NEXT:    or a0, a2, a0
476; RV32ZBB-NEXT:    srli a2, a0, 2
477; RV32ZBB-NEXT:    and a0, a0, a1
478; RV32ZBB-NEXT:    and a1, a2, a1
479; RV32ZBB-NEXT:    lui a2, 349525
480; RV32ZBB-NEXT:    addi a2, a2, 1365
481; RV32ZBB-NEXT:    slli a0, a0, 2
482; RV32ZBB-NEXT:    or a0, a1, a0
483; RV32ZBB-NEXT:    srli a1, a0, 1
484; RV32ZBB-NEXT:    and a0, a0, a2
485; RV32ZBB-NEXT:    and a1, a1, a2
486; RV32ZBB-NEXT:    slli a0, a0, 1
487; RV32ZBB-NEXT:    or a0, a1, a0
488; RV32ZBB-NEXT:    ret
489;
490; RV64ZBB-LABEL: test_bitreverse_i32:
491; RV64ZBB:       # %bb.0:
492; RV64ZBB-NEXT:    rev8 a0, a0
493; RV64ZBB-NEXT:    lui a1, 61681
494; RV64ZBB-NEXT:    srli a2, a0, 36
495; RV64ZBB-NEXT:    addiw a1, a1, -241
496; RV64ZBB-NEXT:    and a1, a2, a1
497; RV64ZBB-NEXT:    lui a2, 986895
498; RV64ZBB-NEXT:    srli a0, a0, 28
499; RV64ZBB-NEXT:    addi a2, a2, 240
500; RV64ZBB-NEXT:    and a0, a0, a2
501; RV64ZBB-NEXT:    lui a2, 209715
502; RV64ZBB-NEXT:    addiw a2, a2, 819
503; RV64ZBB-NEXT:    sext.w a0, a0
504; RV64ZBB-NEXT:    or a0, a1, a0
505; RV64ZBB-NEXT:    srli a1, a0, 2
506; RV64ZBB-NEXT:    and a0, a0, a2
507; RV64ZBB-NEXT:    and a1, a1, a2
508; RV64ZBB-NEXT:    lui a2, 349525
509; RV64ZBB-NEXT:    addiw a2, a2, 1365
510; RV64ZBB-NEXT:    slliw a0, a0, 2
511; RV64ZBB-NEXT:    or a0, a1, a0
512; RV64ZBB-NEXT:    srli a1, a0, 1
513; RV64ZBB-NEXT:    and a0, a0, a2
514; RV64ZBB-NEXT:    and a1, a1, a2
515; RV64ZBB-NEXT:    slliw a0, a0, 1
516; RV64ZBB-NEXT:    or a0, a1, a0
517; RV64ZBB-NEXT:    ret
518;
519; RV32ZBKB-LABEL: test_bitreverse_i32:
520; RV32ZBKB:       # %bb.0:
521; RV32ZBKB-NEXT:    rev8 a0, a0
522; RV32ZBKB-NEXT:    brev8 a0, a0
523; RV32ZBKB-NEXT:    ret
524;
525; RV64ZBKB-LABEL: test_bitreverse_i32:
526; RV64ZBKB:       # %bb.0:
527; RV64ZBKB-NEXT:    rev8 a0, a0
528; RV64ZBKB-NEXT:    brev8 a0, a0
529; RV64ZBKB-NEXT:    srli a0, a0, 32
530; RV64ZBKB-NEXT:    ret
531  %tmp = call i32 @llvm.bitreverse.i32(i32 %a)
532  ret i32 %tmp
533}
534
535define i64 @test_bitreverse_i64(i64 %a) nounwind {
536; RV32I-LABEL: test_bitreverse_i64:
537; RV32I:       # %bb.0:
538; RV32I-NEXT:    srli a2, a1, 8
539; RV32I-NEXT:    lui a3, 16
540; RV32I-NEXT:    srli a4, a1, 24
541; RV32I-NEXT:    slli a5, a1, 24
542; RV32I-NEXT:    lui a6, 61681
543; RV32I-NEXT:    srli a7, a0, 8
544; RV32I-NEXT:    addi a3, a3, -256
545; RV32I-NEXT:    and a2, a2, a3
546; RV32I-NEXT:    or a2, a2, a4
547; RV32I-NEXT:    srli a4, a0, 24
548; RV32I-NEXT:    and a7, a7, a3
549; RV32I-NEXT:    or a4, a7, a4
550; RV32I-NEXT:    lui a7, 209715
551; RV32I-NEXT:    and a1, a1, a3
552; RV32I-NEXT:    slli a1, a1, 8
553; RV32I-NEXT:    or a1, a5, a1
554; RV32I-NEXT:    lui a5, 349525
555; RV32I-NEXT:    and a3, a0, a3
556; RV32I-NEXT:    slli a0, a0, 24
557; RV32I-NEXT:    addi a6, a6, -241
558; RV32I-NEXT:    addi a7, a7, 819
559; RV32I-NEXT:    addi a5, a5, 1365
560; RV32I-NEXT:    slli a3, a3, 8
561; RV32I-NEXT:    or a0, a0, a3
562; RV32I-NEXT:    or a1, a1, a2
563; RV32I-NEXT:    or a0, a0, a4
564; RV32I-NEXT:    srli a2, a1, 4
565; RV32I-NEXT:    and a1, a1, a6
566; RV32I-NEXT:    srli a3, a0, 4
567; RV32I-NEXT:    and a0, a0, a6
568; RV32I-NEXT:    and a2, a2, a6
569; RV32I-NEXT:    slli a1, a1, 4
570; RV32I-NEXT:    and a3, a3, a6
571; RV32I-NEXT:    slli a0, a0, 4
572; RV32I-NEXT:    or a1, a2, a1
573; RV32I-NEXT:    or a0, a3, a0
574; RV32I-NEXT:    srli a2, a1, 2
575; RV32I-NEXT:    and a1, a1, a7
576; RV32I-NEXT:    srli a3, a0, 2
577; RV32I-NEXT:    and a0, a0, a7
578; RV32I-NEXT:    and a2, a2, a7
579; RV32I-NEXT:    slli a1, a1, 2
580; RV32I-NEXT:    and a3, a3, a7
581; RV32I-NEXT:    slli a0, a0, 2
582; RV32I-NEXT:    or a1, a2, a1
583; RV32I-NEXT:    or a0, a3, a0
584; RV32I-NEXT:    srli a2, a1, 1
585; RV32I-NEXT:    and a1, a1, a5
586; RV32I-NEXT:    srli a3, a0, 1
587; RV32I-NEXT:    and a0, a0, a5
588; RV32I-NEXT:    and a2, a2, a5
589; RV32I-NEXT:    slli a1, a1, 1
590; RV32I-NEXT:    and a3, a3, a5
591; RV32I-NEXT:    slli a4, a0, 1
592; RV32I-NEXT:    or a0, a2, a1
593; RV32I-NEXT:    or a1, a3, a4
594; RV32I-NEXT:    ret
595;
596; RV64I-LABEL: test_bitreverse_i64:
597; RV64I:       # %bb.0:
598; RV64I-NEXT:    srli a1, a0, 40
599; RV64I-NEXT:    lui a2, 16
600; RV64I-NEXT:    srli a3, a0, 56
601; RV64I-NEXT:    srli a4, a0, 24
602; RV64I-NEXT:    lui a5, 4080
603; RV64I-NEXT:    srli a6, a0, 8
604; RV64I-NEXT:    srliw a7, a0, 24
605; RV64I-NEXT:    lui t0, 61681
606; RV64I-NEXT:    addiw a2, a2, -256
607; RV64I-NEXT:    and a1, a1, a2
608; RV64I-NEXT:    or a1, a1, a3
609; RV64I-NEXT:    lui a3, 209715
610; RV64I-NEXT:    and a4, a4, a5
611; RV64I-NEXT:    srliw a6, a6, 24
612; RV64I-NEXT:    slli a6, a6, 24
613; RV64I-NEXT:    or a4, a6, a4
614; RV64I-NEXT:    lui a6, 349525
615; RV64I-NEXT:    and a5, a0, a5
616; RV64I-NEXT:    slli a7, a7, 32
617; RV64I-NEXT:    addiw t0, t0, -241
618; RV64I-NEXT:    addiw a3, a3, 819
619; RV64I-NEXT:    addiw a6, a6, 1365
620; RV64I-NEXT:    slli a5, a5, 24
621; RV64I-NEXT:    or a5, a5, a7
622; RV64I-NEXT:    slli a7, t0, 32
623; RV64I-NEXT:    add a7, t0, a7
624; RV64I-NEXT:    slli t0, a3, 32
625; RV64I-NEXT:    add a3, a3, t0
626; RV64I-NEXT:    slli t0, a6, 32
627; RV64I-NEXT:    add a6, a6, t0
628; RV64I-NEXT:    or a1, a4, a1
629; RV64I-NEXT:    and a2, a0, a2
630; RV64I-NEXT:    slli a0, a0, 56
631; RV64I-NEXT:    slli a2, a2, 40
632; RV64I-NEXT:    or a0, a0, a2
633; RV64I-NEXT:    or a0, a0, a5
634; RV64I-NEXT:    or a0, a0, a1
635; RV64I-NEXT:    srli a1, a0, 4
636; RV64I-NEXT:    and a0, a0, a7
637; RV64I-NEXT:    and a1, a1, a7
638; RV64I-NEXT:    slli a0, a0, 4
639; RV64I-NEXT:    or a0, a1, a0
640; RV64I-NEXT:    srli a1, a0, 2
641; RV64I-NEXT:    and a0, a0, a3
642; RV64I-NEXT:    and a1, a1, a3
643; RV64I-NEXT:    slli a0, a0, 2
644; RV64I-NEXT:    or a0, a1, a0
645; RV64I-NEXT:    srli a1, a0, 1
646; RV64I-NEXT:    and a0, a0, a6
647; RV64I-NEXT:    and a1, a1, a6
648; RV64I-NEXT:    slli a0, a0, 1
649; RV64I-NEXT:    or a0, a1, a0
650; RV64I-NEXT:    ret
651;
652; RV32ZBB-LABEL: test_bitreverse_i64:
653; RV32ZBB:       # %bb.0:
654; RV32ZBB-NEXT:    rev8 a1, a1
655; RV32ZBB-NEXT:    lui a2, 61681
656; RV32ZBB-NEXT:    lui a3, 209715
657; RV32ZBB-NEXT:    rev8 a0, a0
658; RV32ZBB-NEXT:    srli a4, a1, 4
659; RV32ZBB-NEXT:    addi a2, a2, -241
660; RV32ZBB-NEXT:    srli a5, a0, 4
661; RV32ZBB-NEXT:    and a4, a4, a2
662; RV32ZBB-NEXT:    and a1, a1, a2
663; RV32ZBB-NEXT:    and a5, a5, a2
664; RV32ZBB-NEXT:    and a0, a0, a2
665; RV32ZBB-NEXT:    lui a2, 349525
666; RV32ZBB-NEXT:    addi a3, a3, 819
667; RV32ZBB-NEXT:    addi a2, a2, 1365
668; RV32ZBB-NEXT:    slli a1, a1, 4
669; RV32ZBB-NEXT:    slli a0, a0, 4
670; RV32ZBB-NEXT:    or a1, a4, a1
671; RV32ZBB-NEXT:    or a0, a5, a0
672; RV32ZBB-NEXT:    srli a4, a1, 2
673; RV32ZBB-NEXT:    and a1, a1, a3
674; RV32ZBB-NEXT:    srli a5, a0, 2
675; RV32ZBB-NEXT:    and a0, a0, a3
676; RV32ZBB-NEXT:    and a4, a4, a3
677; RV32ZBB-NEXT:    slli a1, a1, 2
678; RV32ZBB-NEXT:    and a3, a5, a3
679; RV32ZBB-NEXT:    slli a0, a0, 2
680; RV32ZBB-NEXT:    or a1, a4, a1
681; RV32ZBB-NEXT:    or a0, a3, a0
682; RV32ZBB-NEXT:    srli a3, a1, 1
683; RV32ZBB-NEXT:    and a1, a1, a2
684; RV32ZBB-NEXT:    srli a4, a0, 1
685; RV32ZBB-NEXT:    and a0, a0, a2
686; RV32ZBB-NEXT:    and a3, a3, a2
687; RV32ZBB-NEXT:    slli a1, a1, 1
688; RV32ZBB-NEXT:    and a2, a4, a2
689; RV32ZBB-NEXT:    slli a4, a0, 1
690; RV32ZBB-NEXT:    or a0, a3, a1
691; RV32ZBB-NEXT:    or a1, a2, a4
692; RV32ZBB-NEXT:    ret
693;
694; RV64ZBB-LABEL: test_bitreverse_i64:
695; RV64ZBB:       # %bb.0:
696; RV64ZBB-NEXT:    rev8 a0, a0
697; RV64ZBB-NEXT:    lui a1, 61681
698; RV64ZBB-NEXT:    lui a2, 209715
699; RV64ZBB-NEXT:    lui a3, 349525
700; RV64ZBB-NEXT:    addiw a1, a1, -241
701; RV64ZBB-NEXT:    addiw a2, a2, 819
702; RV64ZBB-NEXT:    addiw a3, a3, 1365
703; RV64ZBB-NEXT:    slli a4, a1, 32
704; RV64ZBB-NEXT:    add a1, a1, a4
705; RV64ZBB-NEXT:    slli a4, a2, 32
706; RV64ZBB-NEXT:    add a2, a2, a4
707; RV64ZBB-NEXT:    slli a4, a3, 32
708; RV64ZBB-NEXT:    add a3, a3, a4
709; RV64ZBB-NEXT:    srli a4, a0, 4
710; RV64ZBB-NEXT:    and a4, a4, a1
711; RV64ZBB-NEXT:    and a0, a0, a1
712; RV64ZBB-NEXT:    slli a0, a0, 4
713; RV64ZBB-NEXT:    or a0, a4, a0
714; RV64ZBB-NEXT:    srli a1, a0, 2
715; RV64ZBB-NEXT:    and a0, a0, a2
716; RV64ZBB-NEXT:    and a1, a1, a2
717; RV64ZBB-NEXT:    slli a0, a0, 2
718; RV64ZBB-NEXT:    or a0, a1, a0
719; RV64ZBB-NEXT:    srli a1, a0, 1
720; RV64ZBB-NEXT:    and a0, a0, a3
721; RV64ZBB-NEXT:    and a1, a1, a3
722; RV64ZBB-NEXT:    slli a0, a0, 1
723; RV64ZBB-NEXT:    or a0, a1, a0
724; RV64ZBB-NEXT:    ret
725;
726; RV32ZBKB-LABEL: test_bitreverse_i64:
727; RV32ZBKB:       # %bb.0:
728; RV32ZBKB-NEXT:    rev8 a1, a1
729; RV32ZBKB-NEXT:    rev8 a2, a0
730; RV32ZBKB-NEXT:    brev8 a0, a1
731; RV32ZBKB-NEXT:    brev8 a1, a2
732; RV32ZBKB-NEXT:    ret
733;
734; RV64ZBKB-LABEL: test_bitreverse_i64:
735; RV64ZBKB:       # %bb.0:
736; RV64ZBKB-NEXT:    rev8 a0, a0
737; RV64ZBKB-NEXT:    brev8 a0, a0
738; RV64ZBKB-NEXT:    ret
739  %tmp = call i64 @llvm.bitreverse.i64(i64 %a)
740  ret i64 %tmp
741}
742
743define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
744; RV32I-LABEL: test_bswap_bitreverse_i16:
745; RV32I:       # %bb.0:
746; RV32I-NEXT:    srli a1, a0, 4
747; RV32I-NEXT:    lui a2, 1
748; RV32I-NEXT:    addi a2, a2, -241
749; RV32I-NEXT:    and a1, a1, a2
750; RV32I-NEXT:    and a0, a0, a2
751; RV32I-NEXT:    lui a2, 3
752; RV32I-NEXT:    addi a2, a2, 819
753; RV32I-NEXT:    slli a0, a0, 4
754; RV32I-NEXT:    or a0, a1, a0
755; RV32I-NEXT:    srli a1, a0, 2
756; RV32I-NEXT:    and a0, a0, a2
757; RV32I-NEXT:    and a1, a1, a2
758; RV32I-NEXT:    lui a2, 5
759; RV32I-NEXT:    addi a2, a2, 1365
760; RV32I-NEXT:    slli a0, a0, 2
761; RV32I-NEXT:    or a0, a1, a0
762; RV32I-NEXT:    srli a1, a0, 1
763; RV32I-NEXT:    and a0, a0, a2
764; RV32I-NEXT:    and a1, a1, a2
765; RV32I-NEXT:    slli a0, a0, 1
766; RV32I-NEXT:    or a0, a1, a0
767; RV32I-NEXT:    ret
768;
769; RV64I-LABEL: test_bswap_bitreverse_i16:
770; RV64I:       # %bb.0:
771; RV64I-NEXT:    srli a1, a0, 4
772; RV64I-NEXT:    lui a2, 1
773; RV64I-NEXT:    addiw a2, a2, -241
774; RV64I-NEXT:    and a1, a1, a2
775; RV64I-NEXT:    and a0, a0, a2
776; RV64I-NEXT:    lui a2, 3
777; RV64I-NEXT:    addiw a2, a2, 819
778; RV64I-NEXT:    slli a0, a0, 4
779; RV64I-NEXT:    or a0, a1, a0
780; RV64I-NEXT:    srli a1, a0, 2
781; RV64I-NEXT:    and a0, a0, a2
782; RV64I-NEXT:    and a1, a1, a2
783; RV64I-NEXT:    lui a2, 5
784; RV64I-NEXT:    addiw a2, a2, 1365
785; RV64I-NEXT:    slli a0, a0, 2
786; RV64I-NEXT:    or a0, a1, a0
787; RV64I-NEXT:    srli a1, a0, 1
788; RV64I-NEXT:    and a0, a0, a2
789; RV64I-NEXT:    and a1, a1, a2
790; RV64I-NEXT:    slli a0, a0, 1
791; RV64I-NEXT:    or a0, a1, a0
792; RV64I-NEXT:    ret
793;
794; RV32ZBB-LABEL: test_bswap_bitreverse_i16:
795; RV32ZBB:       # %bb.0:
796; RV32ZBB-NEXT:    srli a1, a0, 4
797; RV32ZBB-NEXT:    lui a2, 1
798; RV32ZBB-NEXT:    addi a2, a2, -241
799; RV32ZBB-NEXT:    and a1, a1, a2
800; RV32ZBB-NEXT:    and a0, a0, a2
801; RV32ZBB-NEXT:    lui a2, 3
802; RV32ZBB-NEXT:    addi a2, a2, 819
803; RV32ZBB-NEXT:    slli a0, a0, 4
804; RV32ZBB-NEXT:    or a0, a1, a0
805; RV32ZBB-NEXT:    srli a1, a0, 2
806; RV32ZBB-NEXT:    and a0, a0, a2
807; RV32ZBB-NEXT:    and a1, a1, a2
808; RV32ZBB-NEXT:    lui a2, 5
809; RV32ZBB-NEXT:    addi a2, a2, 1365
810; RV32ZBB-NEXT:    slli a0, a0, 2
811; RV32ZBB-NEXT:    or a0, a1, a0
812; RV32ZBB-NEXT:    srli a1, a0, 1
813; RV32ZBB-NEXT:    and a0, a0, a2
814; RV32ZBB-NEXT:    and a1, a1, a2
815; RV32ZBB-NEXT:    slli a0, a0, 1
816; RV32ZBB-NEXT:    or a0, a1, a0
817; RV32ZBB-NEXT:    ret
818;
819; RV64ZBB-LABEL: test_bswap_bitreverse_i16:
820; RV64ZBB:       # %bb.0:
821; RV64ZBB-NEXT:    srli a1, a0, 4
822; RV64ZBB-NEXT:    lui a2, 1
823; RV64ZBB-NEXT:    addiw a2, a2, -241
824; RV64ZBB-NEXT:    and a1, a1, a2
825; RV64ZBB-NEXT:    and a0, a0, a2
826; RV64ZBB-NEXT:    lui a2, 3
827; RV64ZBB-NEXT:    addiw a2, a2, 819
828; RV64ZBB-NEXT:    slli a0, a0, 4
829; RV64ZBB-NEXT:    or a0, a1, a0
830; RV64ZBB-NEXT:    srli a1, a0, 2
831; RV64ZBB-NEXT:    and a0, a0, a2
832; RV64ZBB-NEXT:    and a1, a1, a2
833; RV64ZBB-NEXT:    lui a2, 5
834; RV64ZBB-NEXT:    addiw a2, a2, 1365
835; RV64ZBB-NEXT:    slli a0, a0, 2
836; RV64ZBB-NEXT:    or a0, a1, a0
837; RV64ZBB-NEXT:    srli a1, a0, 1
838; RV64ZBB-NEXT:    and a0, a0, a2
839; RV64ZBB-NEXT:    and a1, a1, a2
840; RV64ZBB-NEXT:    slli a0, a0, 1
841; RV64ZBB-NEXT:    or a0, a1, a0
842; RV64ZBB-NEXT:    ret
843;
844; RV32ZBKB-LABEL: test_bswap_bitreverse_i16:
845; RV32ZBKB:       # %bb.0:
846; RV32ZBKB-NEXT:    brev8 a0, a0
847; RV32ZBKB-NEXT:    ret
848;
849; RV64ZBKB-LABEL: test_bswap_bitreverse_i16:
850; RV64ZBKB:       # %bb.0:
851; RV64ZBKB-NEXT:    brev8 a0, a0
852; RV64ZBKB-NEXT:    ret
853  %tmp = call i16 @llvm.bswap.i16(i16 %a)
854  %tmp2 = call i16 @llvm.bitreverse.i16(i16 %tmp)
855  ret i16 %tmp2
856}
857
858define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
859; RV32I-LABEL: test_bswap_bitreverse_i32:
860; RV32I:       # %bb.0:
861; RV32I-NEXT:    srli a1, a0, 4
862; RV32I-NEXT:    lui a2, 61681
863; RV32I-NEXT:    addi a2, a2, -241
864; RV32I-NEXT:    and a1, a1, a2
865; RV32I-NEXT:    and a0, a0, a2
866; RV32I-NEXT:    lui a2, 209715
867; RV32I-NEXT:    addi a2, a2, 819
868; RV32I-NEXT:    slli a0, a0, 4
869; RV32I-NEXT:    or a0, a1, a0
870; RV32I-NEXT:    srli a1, a0, 2
871; RV32I-NEXT:    and a0, a0, a2
872; RV32I-NEXT:    and a1, a1, a2
873; RV32I-NEXT:    lui a2, 349525
874; RV32I-NEXT:    addi a2, a2, 1365
875; RV32I-NEXT:    slli a0, a0, 2
876; RV32I-NEXT:    or a0, a1, a0
877; RV32I-NEXT:    srli a1, a0, 1
878; RV32I-NEXT:    and a0, a0, a2
879; RV32I-NEXT:    and a1, a1, a2
880; RV32I-NEXT:    slli a0, a0, 1
881; RV32I-NEXT:    or a0, a1, a0
882; RV32I-NEXT:    ret
883;
884; RV64I-LABEL: test_bswap_bitreverse_i32:
885; RV64I:       # %bb.0:
886; RV64I-NEXT:    srli a1, a0, 4
887; RV64I-NEXT:    lui a2, 61681
888; RV64I-NEXT:    addiw a2, a2, -241
889; RV64I-NEXT:    and a1, a1, a2
890; RV64I-NEXT:    and a0, a0, a2
891; RV64I-NEXT:    lui a2, 209715
892; RV64I-NEXT:    addiw a2, a2, 819
893; RV64I-NEXT:    slliw a0, a0, 4
894; RV64I-NEXT:    or a0, a1, a0
895; RV64I-NEXT:    srli a1, a0, 2
896; RV64I-NEXT:    and a0, a0, a2
897; RV64I-NEXT:    and a1, a1, a2
898; RV64I-NEXT:    lui a2, 349525
899; RV64I-NEXT:    addiw a2, a2, 1365
900; RV64I-NEXT:    slliw a0, a0, 2
901; RV64I-NEXT:    or a0, a1, a0
902; RV64I-NEXT:    srli a1, a0, 1
903; RV64I-NEXT:    and a0, a0, a2
904; RV64I-NEXT:    and a1, a1, a2
905; RV64I-NEXT:    slliw a0, a0, 1
906; RV64I-NEXT:    or a0, a1, a0
907; RV64I-NEXT:    ret
908;
909; RV32ZBB-LABEL: test_bswap_bitreverse_i32:
910; RV32ZBB:       # %bb.0:
911; RV32ZBB-NEXT:    srli a1, a0, 4
912; RV32ZBB-NEXT:    lui a2, 61681
913; RV32ZBB-NEXT:    addi a2, a2, -241
914; RV32ZBB-NEXT:    and a1, a1, a2
915; RV32ZBB-NEXT:    and a0, a0, a2
916; RV32ZBB-NEXT:    lui a2, 209715
917; RV32ZBB-NEXT:    addi a2, a2, 819
918; RV32ZBB-NEXT:    slli a0, a0, 4
919; RV32ZBB-NEXT:    or a0, a1, a0
920; RV32ZBB-NEXT:    srli a1, a0, 2
921; RV32ZBB-NEXT:    and a0, a0, a2
922; RV32ZBB-NEXT:    and a1, a1, a2
923; RV32ZBB-NEXT:    lui a2, 349525
924; RV32ZBB-NEXT:    addi a2, a2, 1365
925; RV32ZBB-NEXT:    slli a0, a0, 2
926; RV32ZBB-NEXT:    or a0, a1, a0
927; RV32ZBB-NEXT:    srli a1, a0, 1
928; RV32ZBB-NEXT:    and a0, a0, a2
929; RV32ZBB-NEXT:    and a1, a1, a2
930; RV32ZBB-NEXT:    slli a0, a0, 1
931; RV32ZBB-NEXT:    or a0, a1, a0
932; RV32ZBB-NEXT:    ret
933;
934; RV64ZBB-LABEL: test_bswap_bitreverse_i32:
935; RV64ZBB:       # %bb.0:
936; RV64ZBB-NEXT:    srli a1, a0, 4
937; RV64ZBB-NEXT:    lui a2, 61681
938; RV64ZBB-NEXT:    addiw a2, a2, -241
939; RV64ZBB-NEXT:    and a1, a1, a2
940; RV64ZBB-NEXT:    and a0, a0, a2
941; RV64ZBB-NEXT:    lui a2, 209715
942; RV64ZBB-NEXT:    addiw a2, a2, 819
943; RV64ZBB-NEXT:    slliw a0, a0, 4
944; RV64ZBB-NEXT:    or a0, a1, a0
945; RV64ZBB-NEXT:    srli a1, a0, 2
946; RV64ZBB-NEXT:    and a0, a0, a2
947; RV64ZBB-NEXT:    and a1, a1, a2
948; RV64ZBB-NEXT:    lui a2, 349525
949; RV64ZBB-NEXT:    addiw a2, a2, 1365
950; RV64ZBB-NEXT:    slliw a0, a0, 2
951; RV64ZBB-NEXT:    or a0, a1, a0
952; RV64ZBB-NEXT:    srli a1, a0, 1
953; RV64ZBB-NEXT:    and a0, a0, a2
954; RV64ZBB-NEXT:    and a1, a1, a2
955; RV64ZBB-NEXT:    slliw a0, a0, 1
956; RV64ZBB-NEXT:    or a0, a1, a0
957; RV64ZBB-NEXT:    ret
958;
959; RV32ZBKB-LABEL: test_bswap_bitreverse_i32:
960; RV32ZBKB:       # %bb.0:
961; RV32ZBKB-NEXT:    brev8 a0, a0
962; RV32ZBKB-NEXT:    ret
963;
964; RV64ZBKB-LABEL: test_bswap_bitreverse_i32:
965; RV64ZBKB:       # %bb.0:
966; RV64ZBKB-NEXT:    brev8 a0, a0
967; RV64ZBKB-NEXT:    ret
968  %tmp = call i32 @llvm.bswap.i32(i32 %a)
969  %tmp2 = call i32 @llvm.bitreverse.i32(i32 %tmp)
970  ret i32 %tmp2
971}
972
973define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind {
974; RV32I-LABEL: test_bswap_bitreverse_i64:
975; RV32I:       # %bb.0:
976; RV32I-NEXT:    srli a2, a0, 4
977; RV32I-NEXT:    lui a3, 61681
978; RV32I-NEXT:    lui a4, 209715
979; RV32I-NEXT:    srli a5, a1, 4
980; RV32I-NEXT:    addi a3, a3, -241
981; RV32I-NEXT:    and a2, a2, a3
982; RV32I-NEXT:    and a0, a0, a3
983; RV32I-NEXT:    and a5, a5, a3
984; RV32I-NEXT:    and a1, a1, a3
985; RV32I-NEXT:    lui a3, 349525
986; RV32I-NEXT:    addi a4, a4, 819
987; RV32I-NEXT:    addi a3, a3, 1365
988; RV32I-NEXT:    slli a0, a0, 4
989; RV32I-NEXT:    slli a1, a1, 4
990; RV32I-NEXT:    or a0, a2, a0
991; RV32I-NEXT:    or a1, a5, a1
992; RV32I-NEXT:    srli a2, a0, 2
993; RV32I-NEXT:    and a0, a0, a4
994; RV32I-NEXT:    srli a5, a1, 2
995; RV32I-NEXT:    and a1, a1, a4
996; RV32I-NEXT:    and a2, a2, a4
997; RV32I-NEXT:    slli a0, a0, 2
998; RV32I-NEXT:    and a4, a5, a4
999; RV32I-NEXT:    slli a1, a1, 2
1000; RV32I-NEXT:    or a0, a2, a0
1001; RV32I-NEXT:    or a1, a4, a1
1002; RV32I-NEXT:    srli a2, a0, 1
1003; RV32I-NEXT:    and a0, a0, a3
1004; RV32I-NEXT:    srli a4, a1, 1
1005; RV32I-NEXT:    and a1, a1, a3
1006; RV32I-NEXT:    and a2, a2, a3
1007; RV32I-NEXT:    slli a0, a0, 1
1008; RV32I-NEXT:    and a3, a4, a3
1009; RV32I-NEXT:    slli a1, a1, 1
1010; RV32I-NEXT:    or a0, a2, a0
1011; RV32I-NEXT:    or a1, a3, a1
1012; RV32I-NEXT:    ret
1013;
1014; RV64I-LABEL: test_bswap_bitreverse_i64:
1015; RV64I:       # %bb.0:
1016; RV64I-NEXT:    lui a1, 61681
1017; RV64I-NEXT:    lui a2, 209715
1018; RV64I-NEXT:    lui a3, 349525
1019; RV64I-NEXT:    addiw a1, a1, -241
1020; RV64I-NEXT:    addiw a2, a2, 819
1021; RV64I-NEXT:    addiw a3, a3, 1365
1022; RV64I-NEXT:    slli a4, a1, 32
1023; RV64I-NEXT:    add a1, a1, a4
1024; RV64I-NEXT:    slli a4, a2, 32
1025; RV64I-NEXT:    add a2, a2, a4
1026; RV64I-NEXT:    slli a4, a3, 32
1027; RV64I-NEXT:    add a3, a3, a4
1028; RV64I-NEXT:    srli a4, a0, 4
1029; RV64I-NEXT:    and a4, a4, a1
1030; RV64I-NEXT:    and a0, a0, a1
1031; RV64I-NEXT:    slli a0, a0, 4
1032; RV64I-NEXT:    or a0, a4, a0
1033; RV64I-NEXT:    srli a1, a0, 2
1034; RV64I-NEXT:    and a0, a0, a2
1035; RV64I-NEXT:    and a1, a1, a2
1036; RV64I-NEXT:    slli a0, a0, 2
1037; RV64I-NEXT:    or a0, a1, a0
1038; RV64I-NEXT:    srli a1, a0, 1
1039; RV64I-NEXT:    and a0, a0, a3
1040; RV64I-NEXT:    and a1, a1, a3
1041; RV64I-NEXT:    slli a0, a0, 1
1042; RV64I-NEXT:    or a0, a1, a0
1043; RV64I-NEXT:    ret
1044;
1045; RV32ZBB-LABEL: test_bswap_bitreverse_i64:
1046; RV32ZBB:       # %bb.0:
1047; RV32ZBB-NEXT:    srli a2, a0, 4
1048; RV32ZBB-NEXT:    lui a3, 61681
1049; RV32ZBB-NEXT:    lui a4, 209715
1050; RV32ZBB-NEXT:    srli a5, a1, 4
1051; RV32ZBB-NEXT:    addi a3, a3, -241
1052; RV32ZBB-NEXT:    and a2, a2, a3
1053; RV32ZBB-NEXT:    and a0, a0, a3
1054; RV32ZBB-NEXT:    and a5, a5, a3
1055; RV32ZBB-NEXT:    and a1, a1, a3
1056; RV32ZBB-NEXT:    lui a3, 349525
1057; RV32ZBB-NEXT:    addi a4, a4, 819
1058; RV32ZBB-NEXT:    addi a3, a3, 1365
1059; RV32ZBB-NEXT:    slli a0, a0, 4
1060; RV32ZBB-NEXT:    slli a1, a1, 4
1061; RV32ZBB-NEXT:    or a0, a2, a0
1062; RV32ZBB-NEXT:    or a1, a5, a1
1063; RV32ZBB-NEXT:    srli a2, a0, 2
1064; RV32ZBB-NEXT:    and a0, a0, a4
1065; RV32ZBB-NEXT:    srli a5, a1, 2
1066; RV32ZBB-NEXT:    and a1, a1, a4
1067; RV32ZBB-NEXT:    and a2, a2, a4
1068; RV32ZBB-NEXT:    slli a0, a0, 2
1069; RV32ZBB-NEXT:    and a4, a5, a4
1070; RV32ZBB-NEXT:    slli a1, a1, 2
1071; RV32ZBB-NEXT:    or a0, a2, a0
1072; RV32ZBB-NEXT:    or a1, a4, a1
1073; RV32ZBB-NEXT:    srli a2, a0, 1
1074; RV32ZBB-NEXT:    and a0, a0, a3
1075; RV32ZBB-NEXT:    srli a4, a1, 1
1076; RV32ZBB-NEXT:    and a1, a1, a3
1077; RV32ZBB-NEXT:    and a2, a2, a3
1078; RV32ZBB-NEXT:    slli a0, a0, 1
1079; RV32ZBB-NEXT:    and a3, a4, a3
1080; RV32ZBB-NEXT:    slli a1, a1, 1
1081; RV32ZBB-NEXT:    or a0, a2, a0
1082; RV32ZBB-NEXT:    or a1, a3, a1
1083; RV32ZBB-NEXT:    ret
1084;
1085; RV64ZBB-LABEL: test_bswap_bitreverse_i64:
1086; RV64ZBB:       # %bb.0:
1087; RV64ZBB-NEXT:    lui a1, 61681
1088; RV64ZBB-NEXT:    lui a2, 209715
1089; RV64ZBB-NEXT:    lui a3, 349525
1090; RV64ZBB-NEXT:    addiw a1, a1, -241
1091; RV64ZBB-NEXT:    addiw a2, a2, 819
1092; RV64ZBB-NEXT:    addiw a3, a3, 1365
1093; RV64ZBB-NEXT:    slli a4, a1, 32
1094; RV64ZBB-NEXT:    add a1, a1, a4
1095; RV64ZBB-NEXT:    slli a4, a2, 32
1096; RV64ZBB-NEXT:    add a2, a2, a4
1097; RV64ZBB-NEXT:    slli a4, a3, 32
1098; RV64ZBB-NEXT:    add a3, a3, a4
1099; RV64ZBB-NEXT:    srli a4, a0, 4
1100; RV64ZBB-NEXT:    and a4, a4, a1
1101; RV64ZBB-NEXT:    and a0, a0, a1
1102; RV64ZBB-NEXT:    slli a0, a0, 4
1103; RV64ZBB-NEXT:    or a0, a4, a0
1104; RV64ZBB-NEXT:    srli a1, a0, 2
1105; RV64ZBB-NEXT:    and a0, a0, a2
1106; RV64ZBB-NEXT:    and a1, a1, a2
1107; RV64ZBB-NEXT:    slli a0, a0, 2
1108; RV64ZBB-NEXT:    or a0, a1, a0
1109; RV64ZBB-NEXT:    srli a1, a0, 1
1110; RV64ZBB-NEXT:    and a0, a0, a3
1111; RV64ZBB-NEXT:    and a1, a1, a3
1112; RV64ZBB-NEXT:    slli a0, a0, 1
1113; RV64ZBB-NEXT:    or a0, a1, a0
1114; RV64ZBB-NEXT:    ret
1115;
1116; RV32ZBKB-LABEL: test_bswap_bitreverse_i64:
1117; RV32ZBKB:       # %bb.0:
1118; RV32ZBKB-NEXT:    brev8 a0, a0
1119; RV32ZBKB-NEXT:    brev8 a1, a1
1120; RV32ZBKB-NEXT:    ret
1121;
1122; RV64ZBKB-LABEL: test_bswap_bitreverse_i64:
1123; RV64ZBKB:       # %bb.0:
1124; RV64ZBKB-NEXT:    brev8 a0, a0
1125; RV64ZBKB-NEXT:    ret
1126  %tmp = call i64 @llvm.bswap.i64(i64 %a)
1127  %tmp2 = call i64 @llvm.bitreverse.i64(i64 %tmp)
1128  ret i64 %tmp2
1129}
1130
1131define i16 @test_bitreverse_bswap_i16(i16 %a) nounwind {
1132; RV32I-LABEL: test_bitreverse_bswap_i16:
1133; RV32I:       # %bb.0:
1134; RV32I-NEXT:    srli a1, a0, 4
1135; RV32I-NEXT:    lui a2, 1
1136; RV32I-NEXT:    addi a2, a2, -241
1137; RV32I-NEXT:    and a1, a1, a2
1138; RV32I-NEXT:    and a0, a0, a2
1139; RV32I-NEXT:    lui a2, 3
1140; RV32I-NEXT:    addi a2, a2, 819
1141; RV32I-NEXT:    slli a0, a0, 4
1142; RV32I-NEXT:    or a0, a1, a0
1143; RV32I-NEXT:    srli a1, a0, 2
1144; RV32I-NEXT:    and a0, a0, a2
1145; RV32I-NEXT:    and a1, a1, a2
1146; RV32I-NEXT:    lui a2, 5
1147; RV32I-NEXT:    addi a2, a2, 1365
1148; RV32I-NEXT:    slli a0, a0, 2
1149; RV32I-NEXT:    or a0, a1, a0
1150; RV32I-NEXT:    srli a1, a0, 1
1151; RV32I-NEXT:    and a0, a0, a2
1152; RV32I-NEXT:    and a1, a1, a2
1153; RV32I-NEXT:    slli a0, a0, 1
1154; RV32I-NEXT:    or a0, a1, a0
1155; RV32I-NEXT:    ret
1156;
1157; RV64I-LABEL: test_bitreverse_bswap_i16:
1158; RV64I:       # %bb.0:
1159; RV64I-NEXT:    srli a1, a0, 4
1160; RV64I-NEXT:    lui a2, 1
1161; RV64I-NEXT:    addiw a2, a2, -241
1162; RV64I-NEXT:    and a1, a1, a2
1163; RV64I-NEXT:    and a0, a0, a2
1164; RV64I-NEXT:    lui a2, 3
1165; RV64I-NEXT:    addiw a2, a2, 819
1166; RV64I-NEXT:    slli a0, a0, 4
1167; RV64I-NEXT:    or a0, a1, a0
1168; RV64I-NEXT:    srli a1, a0, 2
1169; RV64I-NEXT:    and a0, a0, a2
1170; RV64I-NEXT:    and a1, a1, a2
1171; RV64I-NEXT:    lui a2, 5
1172; RV64I-NEXT:    addiw a2, a2, 1365
1173; RV64I-NEXT:    slli a0, a0, 2
1174; RV64I-NEXT:    or a0, a1, a0
1175; RV64I-NEXT:    srli a1, a0, 1
1176; RV64I-NEXT:    and a0, a0, a2
1177; RV64I-NEXT:    and a1, a1, a2
1178; RV64I-NEXT:    slli a0, a0, 1
1179; RV64I-NEXT:    or a0, a1, a0
1180; RV64I-NEXT:    ret
1181;
1182; RV32ZBB-LABEL: test_bitreverse_bswap_i16:
1183; RV32ZBB:       # %bb.0:
1184; RV32ZBB-NEXT:    srli a1, a0, 4
1185; RV32ZBB-NEXT:    lui a2, 1
1186; RV32ZBB-NEXT:    addi a2, a2, -241
1187; RV32ZBB-NEXT:    and a1, a1, a2
1188; RV32ZBB-NEXT:    and a0, a0, a2
1189; RV32ZBB-NEXT:    lui a2, 3
1190; RV32ZBB-NEXT:    addi a2, a2, 819
1191; RV32ZBB-NEXT:    slli a0, a0, 4
1192; RV32ZBB-NEXT:    or a0, a1, a0
1193; RV32ZBB-NEXT:    srli a1, a0, 2
1194; RV32ZBB-NEXT:    and a0, a0, a2
1195; RV32ZBB-NEXT:    and a1, a1, a2
1196; RV32ZBB-NEXT:    lui a2, 5
1197; RV32ZBB-NEXT:    addi a2, a2, 1365
1198; RV32ZBB-NEXT:    slli a0, a0, 2
1199; RV32ZBB-NEXT:    or a0, a1, a0
1200; RV32ZBB-NEXT:    srli a1, a0, 1
1201; RV32ZBB-NEXT:    and a0, a0, a2
1202; RV32ZBB-NEXT:    and a1, a1, a2
1203; RV32ZBB-NEXT:    slli a0, a0, 1
1204; RV32ZBB-NEXT:    or a0, a1, a0
1205; RV32ZBB-NEXT:    ret
1206;
1207; RV64ZBB-LABEL: test_bitreverse_bswap_i16:
1208; RV64ZBB:       # %bb.0:
1209; RV64ZBB-NEXT:    srli a1, a0, 4
1210; RV64ZBB-NEXT:    lui a2, 1
1211; RV64ZBB-NEXT:    addiw a2, a2, -241
1212; RV64ZBB-NEXT:    and a1, a1, a2
1213; RV64ZBB-NEXT:    and a0, a0, a2
1214; RV64ZBB-NEXT:    lui a2, 3
1215; RV64ZBB-NEXT:    addiw a2, a2, 819
1216; RV64ZBB-NEXT:    slli a0, a0, 4
1217; RV64ZBB-NEXT:    or a0, a1, a0
1218; RV64ZBB-NEXT:    srli a1, a0, 2
1219; RV64ZBB-NEXT:    and a0, a0, a2
1220; RV64ZBB-NEXT:    and a1, a1, a2
1221; RV64ZBB-NEXT:    lui a2, 5
1222; RV64ZBB-NEXT:    addiw a2, a2, 1365
1223; RV64ZBB-NEXT:    slli a0, a0, 2
1224; RV64ZBB-NEXT:    or a0, a1, a0
1225; RV64ZBB-NEXT:    srli a1, a0, 1
1226; RV64ZBB-NEXT:    and a0, a0, a2
1227; RV64ZBB-NEXT:    and a1, a1, a2
1228; RV64ZBB-NEXT:    slli a0, a0, 1
1229; RV64ZBB-NEXT:    or a0, a1, a0
1230; RV64ZBB-NEXT:    ret
1231;
1232; RV32ZBKB-LABEL: test_bitreverse_bswap_i16:
1233; RV32ZBKB:       # %bb.0:
1234; RV32ZBKB-NEXT:    brev8 a0, a0
1235; RV32ZBKB-NEXT:    ret
1236;
1237; RV64ZBKB-LABEL: test_bitreverse_bswap_i16:
1238; RV64ZBKB:       # %bb.0:
1239; RV64ZBKB-NEXT:    brev8 a0, a0
1240; RV64ZBKB-NEXT:    ret
1241  %tmp = call i16 @llvm.bitreverse.i16(i16 %a)
1242  %tmp2 = call i16 @llvm.bswap.i16(i16 %tmp)
1243  ret i16 %tmp2
1244}
1245
1246define i32 @test_bitreverse_bswap_i32(i32 %a) nounwind {
1247; RV32I-LABEL: test_bitreverse_bswap_i32:
1248; RV32I:       # %bb.0:
1249; RV32I-NEXT:    srli a1, a0, 4
1250; RV32I-NEXT:    lui a2, 61681
1251; RV32I-NEXT:    addi a2, a2, -241
1252; RV32I-NEXT:    and a1, a1, a2
1253; RV32I-NEXT:    and a0, a0, a2
1254; RV32I-NEXT:    lui a2, 209715
1255; RV32I-NEXT:    addi a2, a2, 819
1256; RV32I-NEXT:    slli a0, a0, 4
1257; RV32I-NEXT:    or a0, a1, a0
1258; RV32I-NEXT:    srli a1, a0, 2
1259; RV32I-NEXT:    and a0, a0, a2
1260; RV32I-NEXT:    and a1, a1, a2
1261; RV32I-NEXT:    lui a2, 349525
1262; RV32I-NEXT:    addi a2, a2, 1365
1263; RV32I-NEXT:    slli a0, a0, 2
1264; RV32I-NEXT:    or a0, a1, a0
1265; RV32I-NEXT:    srli a1, a0, 1
1266; RV32I-NEXT:    and a0, a0, a2
1267; RV32I-NEXT:    and a1, a1, a2
1268; RV32I-NEXT:    slli a0, a0, 1
1269; RV32I-NEXT:    or a0, a1, a0
1270; RV32I-NEXT:    ret
1271;
1272; RV64I-LABEL: test_bitreverse_bswap_i32:
1273; RV64I:       # %bb.0:
1274; RV64I-NEXT:    srli a1, a0, 4
1275; RV64I-NEXT:    lui a2, 61681
1276; RV64I-NEXT:    addiw a2, a2, -241
1277; RV64I-NEXT:    and a1, a1, a2
1278; RV64I-NEXT:    and a0, a0, a2
1279; RV64I-NEXT:    lui a2, 209715
1280; RV64I-NEXT:    addiw a2, a2, 819
1281; RV64I-NEXT:    slliw a0, a0, 4
1282; RV64I-NEXT:    or a0, a1, a0
1283; RV64I-NEXT:    srli a1, a0, 2
1284; RV64I-NEXT:    and a0, a0, a2
1285; RV64I-NEXT:    and a1, a1, a2
1286; RV64I-NEXT:    lui a2, 349525
1287; RV64I-NEXT:    addiw a2, a2, 1365
1288; RV64I-NEXT:    slliw a0, a0, 2
1289; RV64I-NEXT:    or a0, a1, a0
1290; RV64I-NEXT:    srli a1, a0, 1
1291; RV64I-NEXT:    and a0, a0, a2
1292; RV64I-NEXT:    and a1, a1, a2
1293; RV64I-NEXT:    slliw a0, a0, 1
1294; RV64I-NEXT:    or a0, a1, a0
1295; RV64I-NEXT:    ret
1296;
1297; RV32ZBB-LABEL: test_bitreverse_bswap_i32:
1298; RV32ZBB:       # %bb.0:
1299; RV32ZBB-NEXT:    srli a1, a0, 4
1300; RV32ZBB-NEXT:    lui a2, 61681
1301; RV32ZBB-NEXT:    addi a2, a2, -241
1302; RV32ZBB-NEXT:    and a1, a1, a2
1303; RV32ZBB-NEXT:    and a0, a0, a2
1304; RV32ZBB-NEXT:    lui a2, 209715
1305; RV32ZBB-NEXT:    addi a2, a2, 819
1306; RV32ZBB-NEXT:    slli a0, a0, 4
1307; RV32ZBB-NEXT:    or a0, a1, a0
1308; RV32ZBB-NEXT:    srli a1, a0, 2
1309; RV32ZBB-NEXT:    and a0, a0, a2
1310; RV32ZBB-NEXT:    and a1, a1, a2
1311; RV32ZBB-NEXT:    lui a2, 349525
1312; RV32ZBB-NEXT:    addi a2, a2, 1365
1313; RV32ZBB-NEXT:    slli a0, a0, 2
1314; RV32ZBB-NEXT:    or a0, a1, a0
1315; RV32ZBB-NEXT:    srli a1, a0, 1
1316; RV32ZBB-NEXT:    and a0, a0, a2
1317; RV32ZBB-NEXT:    and a1, a1, a2
1318; RV32ZBB-NEXT:    slli a0, a0, 1
1319; RV32ZBB-NEXT:    or a0, a1, a0
1320; RV32ZBB-NEXT:    ret
1321;
1322; RV64ZBB-LABEL: test_bitreverse_bswap_i32:
1323; RV64ZBB:       # %bb.0:
1324; RV64ZBB-NEXT:    srli a1, a0, 4
1325; RV64ZBB-NEXT:    lui a2, 61681
1326; RV64ZBB-NEXT:    addiw a2, a2, -241
1327; RV64ZBB-NEXT:    and a1, a1, a2
1328; RV64ZBB-NEXT:    and a0, a0, a2
1329; RV64ZBB-NEXT:    lui a2, 209715
1330; RV64ZBB-NEXT:    addiw a2, a2, 819
1331; RV64ZBB-NEXT:    slliw a0, a0, 4
1332; RV64ZBB-NEXT:    or a0, a1, a0
1333; RV64ZBB-NEXT:    srli a1, a0, 2
1334; RV64ZBB-NEXT:    and a0, a0, a2
1335; RV64ZBB-NEXT:    and a1, a1, a2
1336; RV64ZBB-NEXT:    lui a2, 349525
1337; RV64ZBB-NEXT:    addiw a2, a2, 1365
1338; RV64ZBB-NEXT:    slliw a0, a0, 2
1339; RV64ZBB-NEXT:    or a0, a1, a0
1340; RV64ZBB-NEXT:    srli a1, a0, 1
1341; RV64ZBB-NEXT:    and a0, a0, a2
1342; RV64ZBB-NEXT:    and a1, a1, a2
1343; RV64ZBB-NEXT:    slliw a0, a0, 1
1344; RV64ZBB-NEXT:    or a0, a1, a0
1345; RV64ZBB-NEXT:    ret
1346;
1347; RV32ZBKB-LABEL: test_bitreverse_bswap_i32:
1348; RV32ZBKB:       # %bb.0:
1349; RV32ZBKB-NEXT:    brev8 a0, a0
1350; RV32ZBKB-NEXT:    ret
1351;
1352; RV64ZBKB-LABEL: test_bitreverse_bswap_i32:
1353; RV64ZBKB:       # %bb.0:
1354; RV64ZBKB-NEXT:    brev8 a0, a0
1355; RV64ZBKB-NEXT:    ret
1356  %tmp = call i32 @llvm.bitreverse.i32(i32 %a)
1357  %tmp2 = call i32 @llvm.bswap.i32(i32 %tmp)
1358  ret i32 %tmp2
1359}
1360
1361define i64 @test_bitreverse_bswap_i64(i64 %a) nounwind {
1362; RV32I-LABEL: test_bitreverse_bswap_i64:
1363; RV32I:       # %bb.0:
1364; RV32I-NEXT:    srli a2, a0, 4
1365; RV32I-NEXT:    lui a3, 61681
1366; RV32I-NEXT:    lui a4, 209715
1367; RV32I-NEXT:    srli a5, a1, 4
1368; RV32I-NEXT:    addi a3, a3, -241
1369; RV32I-NEXT:    and a2, a2, a3
1370; RV32I-NEXT:    and a0, a0, a3
1371; RV32I-NEXT:    and a5, a5, a3
1372; RV32I-NEXT:    and a1, a1, a3
1373; RV32I-NEXT:    lui a3, 349525
1374; RV32I-NEXT:    addi a4, a4, 819
1375; RV32I-NEXT:    addi a3, a3, 1365
1376; RV32I-NEXT:    slli a0, a0, 4
1377; RV32I-NEXT:    slli a1, a1, 4
1378; RV32I-NEXT:    or a0, a2, a0
1379; RV32I-NEXT:    or a1, a5, a1
1380; RV32I-NEXT:    srli a2, a0, 2
1381; RV32I-NEXT:    and a0, a0, a4
1382; RV32I-NEXT:    srli a5, a1, 2
1383; RV32I-NEXT:    and a1, a1, a4
1384; RV32I-NEXT:    and a2, a2, a4
1385; RV32I-NEXT:    slli a0, a0, 2
1386; RV32I-NEXT:    and a4, a5, a4
1387; RV32I-NEXT:    slli a1, a1, 2
1388; RV32I-NEXT:    or a0, a2, a0
1389; RV32I-NEXT:    or a1, a4, a1
1390; RV32I-NEXT:    srli a2, a0, 1
1391; RV32I-NEXT:    and a0, a0, a3
1392; RV32I-NEXT:    srli a4, a1, 1
1393; RV32I-NEXT:    and a1, a1, a3
1394; RV32I-NEXT:    and a2, a2, a3
1395; RV32I-NEXT:    slli a0, a0, 1
1396; RV32I-NEXT:    and a3, a4, a3
1397; RV32I-NEXT:    slli a1, a1, 1
1398; RV32I-NEXT:    or a0, a2, a0
1399; RV32I-NEXT:    or a1, a3, a1
1400; RV32I-NEXT:    ret
1401;
1402; RV64I-LABEL: test_bitreverse_bswap_i64:
1403; RV64I:       # %bb.0:
1404; RV64I-NEXT:    lui a1, 61681
1405; RV64I-NEXT:    lui a2, 209715
1406; RV64I-NEXT:    lui a3, 349525
1407; RV64I-NEXT:    addiw a1, a1, -241
1408; RV64I-NEXT:    addiw a2, a2, 819
1409; RV64I-NEXT:    addiw a3, a3, 1365
1410; RV64I-NEXT:    slli a4, a1, 32
1411; RV64I-NEXT:    add a1, a1, a4
1412; RV64I-NEXT:    slli a4, a2, 32
1413; RV64I-NEXT:    add a2, a2, a4
1414; RV64I-NEXT:    slli a4, a3, 32
1415; RV64I-NEXT:    add a3, a3, a4
1416; RV64I-NEXT:    srli a4, a0, 4
1417; RV64I-NEXT:    and a4, a4, a1
1418; RV64I-NEXT:    and a0, a0, a1
1419; RV64I-NEXT:    slli a0, a0, 4
1420; RV64I-NEXT:    or a0, a4, a0
1421; RV64I-NEXT:    srli a1, a0, 2
1422; RV64I-NEXT:    and a0, a0, a2
1423; RV64I-NEXT:    and a1, a1, a2
1424; RV64I-NEXT:    slli a0, a0, 2
1425; RV64I-NEXT:    or a0, a1, a0
1426; RV64I-NEXT:    srli a1, a0, 1
1427; RV64I-NEXT:    and a0, a0, a3
1428; RV64I-NEXT:    and a1, a1, a3
1429; RV64I-NEXT:    slli a0, a0, 1
1430; RV64I-NEXT:    or a0, a1, a0
1431; RV64I-NEXT:    ret
1432;
1433; RV32ZBB-LABEL: test_bitreverse_bswap_i64:
1434; RV32ZBB:       # %bb.0:
1435; RV32ZBB-NEXT:    srli a2, a0, 4
1436; RV32ZBB-NEXT:    lui a3, 61681
1437; RV32ZBB-NEXT:    lui a4, 209715
1438; RV32ZBB-NEXT:    srli a5, a1, 4
1439; RV32ZBB-NEXT:    addi a3, a3, -241
1440; RV32ZBB-NEXT:    and a2, a2, a3
1441; RV32ZBB-NEXT:    and a0, a0, a3
1442; RV32ZBB-NEXT:    and a5, a5, a3
1443; RV32ZBB-NEXT:    and a1, a1, a3
1444; RV32ZBB-NEXT:    lui a3, 349525
1445; RV32ZBB-NEXT:    addi a4, a4, 819
1446; RV32ZBB-NEXT:    addi a3, a3, 1365
1447; RV32ZBB-NEXT:    slli a0, a0, 4
1448; RV32ZBB-NEXT:    slli a1, a1, 4
1449; RV32ZBB-NEXT:    or a0, a2, a0
1450; RV32ZBB-NEXT:    or a1, a5, a1
1451; RV32ZBB-NEXT:    srli a2, a0, 2
1452; RV32ZBB-NEXT:    and a0, a0, a4
1453; RV32ZBB-NEXT:    srli a5, a1, 2
1454; RV32ZBB-NEXT:    and a1, a1, a4
1455; RV32ZBB-NEXT:    and a2, a2, a4
1456; RV32ZBB-NEXT:    slli a0, a0, 2
1457; RV32ZBB-NEXT:    and a4, a5, a4
1458; RV32ZBB-NEXT:    slli a1, a1, 2
1459; RV32ZBB-NEXT:    or a0, a2, a0
1460; RV32ZBB-NEXT:    or a1, a4, a1
1461; RV32ZBB-NEXT:    srli a2, a0, 1
1462; RV32ZBB-NEXT:    and a0, a0, a3
1463; RV32ZBB-NEXT:    srli a4, a1, 1
1464; RV32ZBB-NEXT:    and a1, a1, a3
1465; RV32ZBB-NEXT:    and a2, a2, a3
1466; RV32ZBB-NEXT:    slli a0, a0, 1
1467; RV32ZBB-NEXT:    and a3, a4, a3
1468; RV32ZBB-NEXT:    slli a1, a1, 1
1469; RV32ZBB-NEXT:    or a0, a2, a0
1470; RV32ZBB-NEXT:    or a1, a3, a1
1471; RV32ZBB-NEXT:    ret
1472;
1473; RV64ZBB-LABEL: test_bitreverse_bswap_i64:
1474; RV64ZBB:       # %bb.0:
1475; RV64ZBB-NEXT:    lui a1, 61681
1476; RV64ZBB-NEXT:    lui a2, 209715
1477; RV64ZBB-NEXT:    lui a3, 349525
1478; RV64ZBB-NEXT:    addiw a1, a1, -241
1479; RV64ZBB-NEXT:    addiw a2, a2, 819
1480; RV64ZBB-NEXT:    addiw a3, a3, 1365
1481; RV64ZBB-NEXT:    slli a4, a1, 32
1482; RV64ZBB-NEXT:    add a1, a1, a4
1483; RV64ZBB-NEXT:    slli a4, a2, 32
1484; RV64ZBB-NEXT:    add a2, a2, a4
1485; RV64ZBB-NEXT:    slli a4, a3, 32
1486; RV64ZBB-NEXT:    add a3, a3, a4
1487; RV64ZBB-NEXT:    srli a4, a0, 4
1488; RV64ZBB-NEXT:    and a4, a4, a1
1489; RV64ZBB-NEXT:    and a0, a0, a1
1490; RV64ZBB-NEXT:    slli a0, a0, 4
1491; RV64ZBB-NEXT:    or a0, a4, a0
1492; RV64ZBB-NEXT:    srli a1, a0, 2
1493; RV64ZBB-NEXT:    and a0, a0, a2
1494; RV64ZBB-NEXT:    and a1, a1, a2
1495; RV64ZBB-NEXT:    slli a0, a0, 2
1496; RV64ZBB-NEXT:    or a0, a1, a0
1497; RV64ZBB-NEXT:    srli a1, a0, 1
1498; RV64ZBB-NEXT:    and a0, a0, a3
1499; RV64ZBB-NEXT:    and a1, a1, a3
1500; RV64ZBB-NEXT:    slli a0, a0, 1
1501; RV64ZBB-NEXT:    or a0, a1, a0
1502; RV64ZBB-NEXT:    ret
1503;
1504; RV32ZBKB-LABEL: test_bitreverse_bswap_i64:
1505; RV32ZBKB:       # %bb.0:
1506; RV32ZBKB-NEXT:    brev8 a0, a0
1507; RV32ZBKB-NEXT:    brev8 a1, a1
1508; RV32ZBKB-NEXT:    ret
1509;
1510; RV64ZBKB-LABEL: test_bitreverse_bswap_i64:
1511; RV64ZBKB:       # %bb.0:
1512; RV64ZBKB-NEXT:    brev8 a0, a0
1513; RV64ZBKB-NEXT:    ret
1514  %tmp = call i64 @llvm.bitreverse.i64(i64 %a)
1515  %tmp2 = call i64 @llvm.bswap.i64(i64 %tmp)
1516  ret i64 %tmp2
1517}
1518
1519define i32 @pr55484(i32 %0) {
1520; RV32I-LABEL: pr55484:
1521; RV32I:       # %bb.0:
1522; RV32I-NEXT:    slli a1, a0, 8
1523; RV32I-NEXT:    slli a0, a0, 24
1524; RV32I-NEXT:    or a0, a0, a1
1525; RV32I-NEXT:    srai a0, a0, 16
1526; RV32I-NEXT:    ret
1527;
1528; RV64I-LABEL: pr55484:
1529; RV64I:       # %bb.0:
1530; RV64I-NEXT:    slli a1, a0, 40
1531; RV64I-NEXT:    slli a0, a0, 56
1532; RV64I-NEXT:    or a0, a0, a1
1533; RV64I-NEXT:    srai a0, a0, 48
1534; RV64I-NEXT:    ret
1535;
1536; RV32ZBB-LABEL: pr55484:
1537; RV32ZBB:       # %bb.0:
1538; RV32ZBB-NEXT:    srli a1, a0, 8
1539; RV32ZBB-NEXT:    slli a0, a0, 8
1540; RV32ZBB-NEXT:    or a0, a1, a0
1541; RV32ZBB-NEXT:    sext.h a0, a0
1542; RV32ZBB-NEXT:    ret
1543;
1544; RV64ZBB-LABEL: pr55484:
1545; RV64ZBB:       # %bb.0:
1546; RV64ZBB-NEXT:    srli a1, a0, 8
1547; RV64ZBB-NEXT:    slli a0, a0, 8
1548; RV64ZBB-NEXT:    or a0, a1, a0
1549; RV64ZBB-NEXT:    sext.h a0, a0
1550; RV64ZBB-NEXT:    ret
1551;
1552; RV32ZBKB-LABEL: pr55484:
1553; RV32ZBKB:       # %bb.0:
1554; RV32ZBKB-NEXT:    slli a1, a0, 8
1555; RV32ZBKB-NEXT:    slli a0, a0, 24
1556; RV32ZBKB-NEXT:    or a0, a0, a1
1557; RV32ZBKB-NEXT:    srai a0, a0, 16
1558; RV32ZBKB-NEXT:    ret
1559;
1560; RV64ZBKB-LABEL: pr55484:
1561; RV64ZBKB:       # %bb.0:
1562; RV64ZBKB-NEXT:    slli a1, a0, 40
1563; RV64ZBKB-NEXT:    slli a0, a0, 56
1564; RV64ZBKB-NEXT:    or a0, a0, a1
1565; RV64ZBKB-NEXT:    srai a0, a0, 48
1566; RV64ZBKB-NEXT:    ret
1567  %2 = lshr i32 %0, 8
1568  %3 = shl i32 %0, 8
1569  %4 = or i32 %2, %3
1570  %5 = trunc i32 %4 to i16
1571  %6 = sext i16 %5 to i32
1572  ret i32 %6
1573}
1574