1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // REQUIRES: riscv-registered-target
3 // RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
4 // NOTE: This test file contains eew=64 of vmulhu.
5 // NOTE: The purpose of separating this instructions from vmulhu.c is that
6 // eew=64 versions only enable when V extension is specified. (Not for zve)
7
8 #include <riscv_vector.h>
9
10 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m1(
11 // CHECK-RV64-NEXT: entry:
12 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vmulhu.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[OP1:%.*]], <vscale x 1 x i64> [[OP2:%.*]], i64 [[VL:%.*]])
13 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
14 //
test_vmulhu_vv_u64m1(vuint64m1_t op1,vuint64m1_t op2,size_t vl)15 vuint64m1_t test_vmulhu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) {
16 return __riscv_vmulhu_vv_u64m1(op1, op2, vl);
17 }
18
19 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m1(
20 // CHECK-RV64-NEXT: entry:
21 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vmulhu.nxv1i64.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], i64 [[VL:%.*]])
22 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
23 //
test_vmulhu_vx_u64m1(vuint64m1_t op1,uint64_t op2,size_t vl)24 vuint64m1_t test_vmulhu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) {
25 return __riscv_vmulhu_vx_u64m1(op1, op2, vl);
26 }
27
28 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m2(
29 // CHECK-RV64-NEXT: entry:
30 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vmulhu.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[OP1:%.*]], <vscale x 2 x i64> [[OP2:%.*]], i64 [[VL:%.*]])
31 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
32 //
test_vmulhu_vv_u64m2(vuint64m2_t op1,vuint64m2_t op2,size_t vl)33 vuint64m2_t test_vmulhu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) {
34 return __riscv_vmulhu_vv_u64m2(op1, op2, vl);
35 }
36
37 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m2(
38 // CHECK-RV64-NEXT: entry:
39 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vmulhu.nxv2i64.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], i64 [[VL:%.*]])
40 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
41 //
test_vmulhu_vx_u64m2(vuint64m2_t op1,uint64_t op2,size_t vl)42 vuint64m2_t test_vmulhu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) {
43 return __riscv_vmulhu_vx_u64m2(op1, op2, vl);
44 }
45
46 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m4(
47 // CHECK-RV64-NEXT: entry:
48 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vmulhu.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[OP1:%.*]], <vscale x 4 x i64> [[OP2:%.*]], i64 [[VL:%.*]])
49 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
50 //
test_vmulhu_vv_u64m4(vuint64m4_t op1,vuint64m4_t op2,size_t vl)51 vuint64m4_t test_vmulhu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) {
52 return __riscv_vmulhu_vv_u64m4(op1, op2, vl);
53 }
54
55 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m4(
56 // CHECK-RV64-NEXT: entry:
57 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vmulhu.nxv4i64.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], i64 [[VL:%.*]])
58 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
59 //
test_vmulhu_vx_u64m4(vuint64m4_t op1,uint64_t op2,size_t vl)60 vuint64m4_t test_vmulhu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) {
61 return __riscv_vmulhu_vx_u64m4(op1, op2, vl);
62 }
63
64 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m8(
65 // CHECK-RV64-NEXT: entry:
66 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vmulhu.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[OP1:%.*]], <vscale x 8 x i64> [[OP2:%.*]], i64 [[VL:%.*]])
67 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
68 //
test_vmulhu_vv_u64m8(vuint64m8_t op1,vuint64m8_t op2,size_t vl)69 vuint64m8_t test_vmulhu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) {
70 return __riscv_vmulhu_vv_u64m8(op1, op2, vl);
71 }
72
73 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m8(
74 // CHECK-RV64-NEXT: entry:
75 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vmulhu.nxv8i64.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], i64 [[VL:%.*]])
76 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
77 //
test_vmulhu_vx_u64m8(vuint64m8_t op1,uint64_t op2,size_t vl)78 vuint64m8_t test_vmulhu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) {
79 return __riscv_vmulhu_vx_u64m8(op1, op2, vl);
80 }
81
82 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m1_m(
83 // CHECK-RV64-NEXT: entry:
84 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vmulhu.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[OP1:%.*]], <vscale x 1 x i64> [[OP2:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
85 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
86 //
test_vmulhu_vv_u64m1_m(vbool64_t mask,vuint64m1_t op1,vuint64m1_t op2,size_t vl)87 vuint64m1_t test_vmulhu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) {
88 return __riscv_vmulhu_vv_u64m1_m(mask, op1, op2, vl);
89 }
90
91 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m1_m(
92 // CHECK-RV64-NEXT: entry:
93 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vmulhu.mask.nxv1i64.i64.i64(<vscale x 1 x i64> poison, <vscale x 1 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
94 // CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
95 //
test_vmulhu_vx_u64m1_m(vbool64_t mask,vuint64m1_t op1,uint64_t op2,size_t vl)96 vuint64m1_t test_vmulhu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) {
97 return __riscv_vmulhu_vx_u64m1_m(mask, op1, op2, vl);
98 }
99
100 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m2_m(
101 // CHECK-RV64-NEXT: entry:
102 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vmulhu.mask.nxv2i64.nxv2i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[OP1:%.*]], <vscale x 2 x i64> [[OP2:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
103 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
104 //
test_vmulhu_vv_u64m2_m(vbool32_t mask,vuint64m2_t op1,vuint64m2_t op2,size_t vl)105 vuint64m2_t test_vmulhu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) {
106 return __riscv_vmulhu_vv_u64m2_m(mask, op1, op2, vl);
107 }
108
109 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m2_m(
110 // CHECK-RV64-NEXT: entry:
111 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i64> @llvm.riscv.vmulhu.mask.nxv2i64.i64.i64(<vscale x 2 x i64> poison, <vscale x 2 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
112 // CHECK-RV64-NEXT: ret <vscale x 2 x i64> [[TMP0]]
113 //
test_vmulhu_vx_u64m2_m(vbool32_t mask,vuint64m2_t op1,uint64_t op2,size_t vl)114 vuint64m2_t test_vmulhu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) {
115 return __riscv_vmulhu_vx_u64m2_m(mask, op1, op2, vl);
116 }
117
118 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m4_m(
119 // CHECK-RV64-NEXT: entry:
120 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vmulhu.mask.nxv4i64.nxv4i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[OP1:%.*]], <vscale x 4 x i64> [[OP2:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
121 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
122 //
test_vmulhu_vv_u64m4_m(vbool16_t mask,vuint64m4_t op1,vuint64m4_t op2,size_t vl)123 vuint64m4_t test_vmulhu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) {
124 return __riscv_vmulhu_vv_u64m4_m(mask, op1, op2, vl);
125 }
126
127 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m4_m(
128 // CHECK-RV64-NEXT: entry:
129 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i64> @llvm.riscv.vmulhu.mask.nxv4i64.i64.i64(<vscale x 4 x i64> poison, <vscale x 4 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
130 // CHECK-RV64-NEXT: ret <vscale x 4 x i64> [[TMP0]]
131 //
test_vmulhu_vx_u64m4_m(vbool16_t mask,vuint64m4_t op1,uint64_t op2,size_t vl)132 vuint64m4_t test_vmulhu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) {
133 return __riscv_vmulhu_vx_u64m4_m(mask, op1, op2, vl);
134 }
135
136 // CHECK-RV64-LABEL: @test_vmulhu_vv_u64m8_m(
137 // CHECK-RV64-NEXT: entry:
138 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vmulhu.mask.nxv8i64.nxv8i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[OP1:%.*]], <vscale x 8 x i64> [[OP2:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
139 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
140 //
test_vmulhu_vv_u64m8_m(vbool8_t mask,vuint64m8_t op1,vuint64m8_t op2,size_t vl)141 vuint64m8_t test_vmulhu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) {
142 return __riscv_vmulhu_vv_u64m8_m(mask, op1, op2, vl);
143 }
144
145 // CHECK-RV64-LABEL: @test_vmulhu_vx_u64m8_m(
146 // CHECK-RV64-NEXT: entry:
147 // CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i64> @llvm.riscv.vmulhu.mask.nxv8i64.i64.i64(<vscale x 8 x i64> poison, <vscale x 8 x i64> [[OP1:%.*]], i64 [[OP2:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]], i64 3)
148 // CHECK-RV64-NEXT: ret <vscale x 8 x i64> [[TMP0]]
149 //
test_vmulhu_vx_u64m8_m(vbool8_t mask,vuint64m8_t op1,uint64_t op2,size_t vl)150 vuint64m8_t test_vmulhu_vx_u64m8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) {
151 return __riscv_vmulhu_vx_u64m8_m(mask, op1, op2, vl);
152 }
153