xref: /llvm-project/clang/test/CodeGen/RISCV/rvb-intrinsics/zbb.c (revision d26791b09bae4f8bf0f9531957a14864f8696f15)
1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // RUN: %clang_cc1 -triple riscv32 -target-feature +zbb -emit-llvm %s -o - \
3 // RUN:     -disable-O0-optnone | opt -S -passes=mem2reg \
4 // RUN:     | FileCheck %s  -check-prefix=RV32ZBB
5 // RUN: %clang_cc1 -triple riscv64 -target-feature +zbb -emit-llvm %s -o - \
6 // RUN:     -disable-O0-optnone | opt -S -passes=mem2reg \
7 // RUN:     | FileCheck %s  -check-prefix=RV64ZBB
8 
9 #include <riscv_bitmanip.h>
10 
11 // RV32ZBB-LABEL: @orc_b_32(
12 // RV32ZBB-NEXT:  entry:
13 // RV32ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
14 // RV32ZBB-NEXT:    ret i32 [[TMP0]]
15 //
16 // RV64ZBB-LABEL: @orc_b_32(
17 // RV64ZBB-NEXT:  entry:
18 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
19 // RV64ZBB-NEXT:    ret i32 [[TMP0]]
20 //
orc_b_32(uint32_t a)21 uint32_t orc_b_32(uint32_t a) {
22   return __riscv_orc_b_32(a);
23 }
24 
25 #if __riscv_xlen == 64
26 // RV64ZBB-LABEL: @orc_b_64(
27 // RV64ZBB-NEXT:  entry:
28 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i64 @llvm.riscv.orc.b.i64(i64 [[A:%.*]])
29 // RV64ZBB-NEXT:    ret i64 [[TMP0]]
30 //
orc_b_64(uint64_t a)31 uint64_t orc_b_64(uint64_t a) {
32   return __riscv_orc_b_64(a);
33 }
34 #endif
35 
36 // RV32ZBB-LABEL: @clz_32(
37 // RV32ZBB-NEXT:  entry:
38 // RV32ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
39 // RV32ZBB-NEXT:    ret i32 [[TMP0]]
40 //
41 // RV64ZBB-LABEL: @clz_32(
42 // RV64ZBB-NEXT:  entry:
43 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
44 // RV64ZBB-NEXT:    ret i32 [[TMP0]]
45 //
clz_32(uint32_t a)46 unsigned int clz_32(uint32_t a) {
47   return __riscv_clz_32(a);
48 }
49 
50 #if __riscv_xlen == 64
51 // RV64ZBB-LABEL: @clz_64(
52 // RV64ZBB-NEXT:  entry:
53 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
54 // RV64ZBB-NEXT:    [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
55 // RV64ZBB-NEXT:    ret i32 [[CAST_I]]
56 //
clz_64(uint64_t a)57 unsigned int clz_64(uint64_t a) {
58   return __riscv_clz_64(a);
59 }
60 #endif
61 
62 // RV32ZBB-LABEL: @ctz_32(
63 // RV32ZBB-NEXT:  entry:
64 // RV32ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
65 // RV32ZBB-NEXT:    ret i32 [[TMP0]]
66 //
67 // RV64ZBB-LABEL: @ctz_32(
68 // RV64ZBB-NEXT:  entry:
69 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
70 // RV64ZBB-NEXT:    ret i32 [[TMP0]]
71 //
ctz_32(uint32_t a)72 unsigned int ctz_32(uint32_t a) {
73   return __riscv_ctz_32(a);
74 }
75 
76 #if __riscv_xlen == 64
77 // RV64ZBB-LABEL: @ctz_64(
78 // RV64ZBB-NEXT:  entry:
79 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
80 // RV64ZBB-NEXT:    [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
81 // RV64ZBB-NEXT:    ret i32 [[CAST_I]]
82 //
ctz_64(uint64_t a)83 unsigned int ctz_64(uint64_t a) {
84   return __riscv_ctz_64(a);
85 }
86 #endif
87 
88 // RV32ZBB-LABEL: @cpop_32(
89 // RV32ZBB-NEXT:  entry:
90 // RV32ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.ctpop.i32(i32 [[A:%.*]])
91 // RV32ZBB-NEXT:    ret i32 [[TMP0]]
92 //
93 // RV64ZBB-LABEL: @cpop_32(
94 // RV64ZBB-NEXT:  entry:
95 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i32 @llvm.ctpop.i32(i32 [[A:%.*]])
96 // RV64ZBB-NEXT:    ret i32 [[TMP0]]
97 //
cpop_32(uint32_t a)98 unsigned int cpop_32(uint32_t a) {
99   return __riscv_cpop_32(a);
100 }
101 
102 #if __riscv_xlen == 64
103 // RV64ZBB-LABEL: @cpop_64(
104 // RV64ZBB-NEXT:  entry:
105 // RV64ZBB-NEXT:    [[TMP0:%.*]] = call i64 @llvm.ctpop.i64(i64 [[A:%.*]])
106 // RV64ZBB-NEXT:    [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
107 // RV64ZBB-NEXT:    ret i32 [[CAST_I]]
108 //
cpop_64(uint64_t a)109 unsigned int cpop_64(uint64_t a) {
110   return __riscv_cpop_64(a);
111 }
112 #endif
113