xref: /llvm-project/llvm/test/Transforms/DCE/intrinsics-ve.ll (revision 99a10f1fe8a7e4b0fdb4c6dd5e7f24f87e0d3695)
1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5
2; RUN: opt -S < %s -passes=dce  | FileCheck %s
3
4declare i64 @llvm.ve.vl.pack.f32p(ptr, ptr)
5
6define void @test_llvm_ve_vl_pack_f32p(ptr %a, ptr %b) {
7; CHECK-LABEL: define void @test_llvm_ve_vl_pack_f32p(
8; CHECK-SAME: ptr [[A:%.*]], ptr [[B:%.*]]) {
9; CHECK-NEXT:    ret void
10;
11  %v = call i64 @llvm.ve.vl.pack.f32p(ptr %a, ptr %b)
12  ret void
13}
14
15
16declare i64 @llvm.ve.vl.pack.f32a(ptr)
17
18define void @test_llvm_ve_vl_pack_f32a(ptr %a) {
19; CHECK-LABEL: define void @test_llvm_ve_vl_pack_f32a(
20; CHECK-SAME: ptr [[A:%.*]]) {
21; CHECK-NEXT:    ret void
22;
23  %v = call i64 @llvm.ve.vl.pack.f32a(ptr %a)
24  ret void
25}
26
27declare <256 x double> @llvm.ve.vl.vld.vssl(i64, ptr, i32)
28
29define void @test_llvm_ve_vl_vld_vssl(i64 %a, ptr %b, i32 %c) {
30; CHECK-LABEL: define void @test_llvm_ve_vl_vld_vssl(
31; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
32; CHECK-NEXT:    ret void
33;
34  %v = call <256 x double> @llvm.ve.vl.vld.vssl(i64 %a, ptr %b, i32 %c)
35  ret void
36}
37
38declare <256 x double> @llvm.ve.vl.vld.vssvl(i64, ptr, <256 x double>, i32)
39
40define void @test_llvm_ve_vl_vld_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
41; CHECK-LABEL: define void @test_llvm_ve_vl_vld_vssvl(
42; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
43; CHECK-NEXT:    ret void
44;
45  %v = call <256 x double> @llvm.ve.vl.vld.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
46  ret void
47}
48
49declare <256 x double> @llvm.ve.vl.vldnc.vssl(i64, ptr, i32)
50
51define void @test_llvm_ve_vl_vldnc_vssl(i64 %a, ptr %b, i32 %c) {
52; CHECK-LABEL: define void @test_llvm_ve_vl_vldnc_vssl(
53; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
54; CHECK-NEXT:    ret void
55;
56  %v = call <256 x double> @llvm.ve.vl.vldnc.vssl(i64 %a, ptr %b, i32 %c)
57  ret void
58}
59
60declare <256 x double> @llvm.ve.vl.vldnc.vssvl(i64, ptr, <256 x double>, i32)
61
62define void @test_llvm_ve_vl_vldnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
63; CHECK-LABEL: define void @test_llvm_ve_vl_vldnc_vssvl(
64; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
65; CHECK-NEXT:    ret void
66;
67  %v = call <256 x double> @llvm.ve.vl.vldnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
68  ret void
69}
70
71declare <256 x double> @llvm.ve.vl.vldu.vssl(i64, ptr, i32)
72
73define void @test_llvm_ve_vl_vldu_vssl(i64 %a, ptr %b, i32 %c) {
74; CHECK-LABEL: define void @test_llvm_ve_vl_vldu_vssl(
75; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
76; CHECK-NEXT:    ret void
77;
78  %v = call <256 x double> @llvm.ve.vl.vldu.vssl(i64 %a, ptr %b, i32 %c)
79  ret void
80}
81
82declare <256 x double> @llvm.ve.vl.vldu.vssvl(i64, ptr, <256 x double>, i32)
83
84define void @test_llvm_ve_vl_vldu_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
85; CHECK-LABEL: define void @test_llvm_ve_vl_vldu_vssvl(
86; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
87; CHECK-NEXT:    ret void
88;
89  %v = call <256 x double> @llvm.ve.vl.vldu.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
90  ret void
91}
92
93declare <256 x double> @llvm.ve.vl.vldunc.vssl(i64, ptr, i32)
94
95define void @test_llvm_ve_vl_vldunc_vssl(i64 %a, ptr %b, i32 %c) {
96; CHECK-LABEL: define void @test_llvm_ve_vl_vldunc_vssl(
97; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
98; CHECK-NEXT:    ret void
99;
100  %v = call <256 x double> @llvm.ve.vl.vldunc.vssl(i64 %a, ptr %b, i32 %c)
101  ret void
102}
103
104declare <256 x double> @llvm.ve.vl.vldunc.vssvl(i64, ptr, <256 x double>, i32)
105
106define void @test_llvm_ve_vl_vldunc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
107; CHECK-LABEL: define void @test_llvm_ve_vl_vldunc_vssvl(
108; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
109; CHECK-NEXT:    ret void
110;
111  %v = call <256 x double> @llvm.ve.vl.vldunc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
112  ret void
113}
114
115declare <256 x double> @llvm.ve.vl.vldlsx.vssl(i64, ptr, i32)
116
117define void @test_llvm_ve_vl_vldlsx_vssl(i64 %a, ptr %b, i32 %c) {
118; CHECK-LABEL: define void @test_llvm_ve_vl_vldlsx_vssl(
119; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
120; CHECK-NEXT:    ret void
121;
122  %v = call <256 x double> @llvm.ve.vl.vldlsx.vssl(i64 %a, ptr %b, i32 %c)
123  ret void
124}
125
126declare <256 x double> @llvm.ve.vl.vldlsx.vssvl(i64, ptr, <256 x double>, i32)
127
128define void @test_llvm_ve_vl_vldlsx_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
129; CHECK-LABEL: define void @test_llvm_ve_vl_vldlsx_vssvl(
130; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
131; CHECK-NEXT:    ret void
132;
133  %v = call <256 x double> @llvm.ve.vl.vldlsx.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
134  ret void
135}
136
137declare <256 x double> @llvm.ve.vl.vldlsxnc.vssl(i64, ptr, i32)
138
139define void @test_llvm_ve_vl_vldlsxnc_vssl(i64 %a, ptr %b, i32 %c) {
140; CHECK-LABEL: define void @test_llvm_ve_vl_vldlsxnc_vssl(
141; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
142; CHECK-NEXT:    ret void
143;
144  %v = call <256 x double> @llvm.ve.vl.vldlsxnc.vssl(i64 %a, ptr %b, i32 %c)
145  ret void
146}
147
148declare <256 x double> @llvm.ve.vl.vldlsxnc.vssvl(i64, ptr, <256 x double>, i32)
149
150define void @test_llvm_ve_vl_vldlsxnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
151; CHECK-LABEL: define void @test_llvm_ve_vl_vldlsxnc_vssvl(
152; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
153; CHECK-NEXT:    ret void
154;
155  %v = call <256 x double> @llvm.ve.vl.vldlsxnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
156  ret void
157}
158
159declare <256 x double> @llvm.ve.vl.vldlzx.vssl(i64, ptr, i32)
160
161define void @test_llvm_ve_vl_vldlzx_vssl(i64 %a, ptr %b, i32 %c) {
162; CHECK-LABEL: define void @test_llvm_ve_vl_vldlzx_vssl(
163; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
164; CHECK-NEXT:    ret void
165;
166  %v = call <256 x double> @llvm.ve.vl.vldlzx.vssl(i64 %a, ptr %b, i32 %c)
167  ret void
168}
169
170declare <256 x double> @llvm.ve.vl.vldlzx.vssvl(i64, ptr, <256 x double>, i32)
171
172define void @test_llvm_ve_vl_vldlzx_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
173; CHECK-LABEL: define void @test_llvm_ve_vl_vldlzx_vssvl(
174; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
175; CHECK-NEXT:    ret void
176;
177  %v = call <256 x double> @llvm.ve.vl.vldlzx.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
178  ret void
179}
180
181declare <256 x double> @llvm.ve.vl.vldlzxnc.vssl(i64, ptr, i32)
182
183define void @test_llvm_ve_vl_vldlzxnc_vssl(i64 %a, ptr %b, i32 %c) {
184; CHECK-LABEL: define void @test_llvm_ve_vl_vldlzxnc_vssl(
185; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
186; CHECK-NEXT:    ret void
187;
188  %v = call <256 x double> @llvm.ve.vl.vldlzxnc.vssl(i64 %a, ptr %b, i32 %c)
189  ret void
190}
191
192declare <256 x double> @llvm.ve.vl.vldlzxnc.vssvl(i64, ptr, <256 x double>, i32)
193
194define void @test_llvm_ve_vl_vldlzxnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
195; CHECK-LABEL: define void @test_llvm_ve_vl_vldlzxnc_vssvl(
196; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
197; CHECK-NEXT:    ret void
198;
199  %v = call <256 x double> @llvm.ve.vl.vldlzxnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
200  ret void
201}
202
203declare <256 x double> @llvm.ve.vl.vld2d.vssl(i64, ptr, i32)
204
205define void @test_llvm_ve_vl_vld2d_vssl(i64 %a, ptr %b, i32 %c) {
206; CHECK-LABEL: define void @test_llvm_ve_vl_vld2d_vssl(
207; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
208; CHECK-NEXT:    ret void
209;
210  %v = call <256 x double> @llvm.ve.vl.vld2d.vssl(i64 %a, ptr %b, i32 %c)
211  ret void
212}
213
214declare <256 x double> @llvm.ve.vl.vld2d.vssvl(i64, ptr, <256 x double>, i32)
215
216define void @test_llvm_ve_vl_vld2d_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
217; CHECK-LABEL: define void @test_llvm_ve_vl_vld2d_vssvl(
218; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
219; CHECK-NEXT:    ret void
220;
221  %v = call <256 x double> @llvm.ve.vl.vld2d.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
222  ret void
223}
224
225declare <256 x double> @llvm.ve.vl.vld2dnc.vssl(i64, ptr, i32)
226
227define void @test_llvm_ve_vl_vld2dnc_vssl(i64 %a, ptr %b, i32 %c) {
228; CHECK-LABEL: define void @test_llvm_ve_vl_vld2dnc_vssl(
229; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
230; CHECK-NEXT:    ret void
231;
232  %v = call <256 x double> @llvm.ve.vl.vld2dnc.vssl(i64 %a, ptr %b, i32 %c)
233  ret void
234}
235
236declare <256 x double> @llvm.ve.vl.vld2dnc.vssvl(i64, ptr, <256 x double>, i32)
237
238define void @test_llvm_ve_vl_vld2dnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
239; CHECK-LABEL: define void @test_llvm_ve_vl_vld2dnc_vssvl(
240; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
241; CHECK-NEXT:    ret void
242;
243  %v = call <256 x double> @llvm.ve.vl.vld2dnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
244  ret void
245}
246
247declare <256 x double> @llvm.ve.vl.vldu2d.vssl(i64, ptr, i32)
248
249define void @test_llvm_ve_vl_vldu2d_vssl(i64 %a, ptr %b, i32 %c) {
250; CHECK-LABEL: define void @test_llvm_ve_vl_vldu2d_vssl(
251; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
252; CHECK-NEXT:    ret void
253;
254  %v = call <256 x double> @llvm.ve.vl.vldu2d.vssl(i64 %a, ptr %b, i32 %c)
255  ret void
256}
257
258declare <256 x double> @llvm.ve.vl.vldu2d.vssvl(i64, ptr, <256 x double>, i32)
259
260define void @test_llvm_ve_vl_vldu2d_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
261; CHECK-LABEL: define void @test_llvm_ve_vl_vldu2d_vssvl(
262; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
263; CHECK-NEXT:    ret void
264;
265  %v = call <256 x double> @llvm.ve.vl.vldu2d.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
266  ret void
267}
268
269declare <256 x double> @llvm.ve.vl.vldu2dnc.vssl(i64, ptr, i32)
270
271define void @test_llvm_ve_vl_vldu2dnc_vssl(i64 %a, ptr %b, i32 %c) {
272; CHECK-LABEL: define void @test_llvm_ve_vl_vldu2dnc_vssl(
273; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
274; CHECK-NEXT:    ret void
275;
276  %v = call <256 x double> @llvm.ve.vl.vldu2dnc.vssl(i64 %a, ptr %b, i32 %c)
277  ret void
278}
279
280declare <256 x double> @llvm.ve.vl.vldu2dnc.vssvl(i64, ptr, <256 x double>, i32)
281
282define void @test_llvm_ve_vl_vldu2dnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
283; CHECK-LABEL: define void @test_llvm_ve_vl_vldu2dnc_vssvl(
284; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
285; CHECK-NEXT:    ret void
286;
287  %v = call <256 x double> @llvm.ve.vl.vldu2dnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
288  ret void
289}
290
291declare <256 x double> @llvm.ve.vl.vldl2dsx.vssl(i64, ptr, i32)
292
293define void @test_llvm_ve_vl_vldl2dsx_vssl(i64 %a, ptr %b, i32 %c) {
294; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dsx_vssl(
295; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
296; CHECK-NEXT:    ret void
297;
298  %v = call <256 x double> @llvm.ve.vl.vldl2dsx.vssl(i64 %a, ptr %b, i32 %c)
299  ret void
300}
301
302declare <256 x double> @llvm.ve.vl.vldl2dsx.vssvl(i64, ptr, <256 x double>, i32)
303
304define void @test_llvm_ve_vl_vldl2dsx_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
305; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dsx_vssvl(
306; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
307; CHECK-NEXT:    ret void
308;
309  %v = call <256 x double> @llvm.ve.vl.vldl2dsx.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
310  ret void
311}
312
313declare <256 x double> @llvm.ve.vl.vldl2dsxnc.vssl(i64, ptr, i32)
314
315define void @test_llvm_ve_vl_vldl2dsxnc_vssl(i64 %a, ptr %b, i32 %c) {
316; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dsxnc_vssl(
317; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
318; CHECK-NEXT:    ret void
319;
320  %v = call <256 x double> @llvm.ve.vl.vldl2dsxnc.vssl(i64 %a, ptr %b, i32 %c)
321  ret void
322}
323
324declare <256 x double> @llvm.ve.vl.vldl2dsxnc.vssvl(i64, ptr, <256 x double>, i32)
325
326define void @test_llvm_ve_vl_vldl2dsxnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
327; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dsxnc_vssvl(
328; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
329; CHECK-NEXT:    ret void
330;
331  %v = call <256 x double> @llvm.ve.vl.vldl2dsxnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
332  ret void
333}
334
335declare <256 x double> @llvm.ve.vl.vldl2dzx.vssl(i64, ptr, i32)
336
337define void @test_llvm_ve_vl_vldl2dzx_vssl(i64 %a, ptr %b, i32 %c) {
338; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dzx_vssl(
339; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
340; CHECK-NEXT:    ret void
341;
342  %v = call <256 x double> @llvm.ve.vl.vldl2dsxnc.vssl(i64 %a, ptr %b, i32 %c)
343  ret void
344}
345
346declare <256 x double> @llvm.ve.vl.vldl2dzx.vssvl(i64, ptr, <256 x double>, i32)
347
348define void @test_llvm_ve_vl_vldl2dzx_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
349; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dzx_vssvl(
350; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
351; CHECK-NEXT:    ret void
352;
353  %v = call <256 x double> @llvm.ve.vl.vldl2dzx.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
354  ret void
355}
356
357declare <256 x double> @llvm.ve.vl.vldl2dzxnc.vssl(i64, ptr, i32)
358
359define void @test_llvm_ve_vl_vldl2dzxnc_vssl(i64 %a, ptr %b, i32 %c) {
360; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dzxnc_vssl(
361; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], i32 [[C:%.*]]) {
362; CHECK-NEXT:    ret void
363;
364  %v = call <256 x double> @llvm.ve.vl.vldl2dzxnc.vssl(i64 %a, ptr %b, i32 %c)
365  ret void
366}
367
368declare <256 x double> @llvm.ve.vl.vldl2dzxnc.vssvl(i64, ptr, <256 x double>, i32)
369
370define void @test_llvm_ve_vl_vldl2dzxnc_vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d) {
371; CHECK-LABEL: define void @test_llvm_ve_vl_vldl2dzxnc_vssvl(
372; CHECK-SAME: i64 [[A:%.*]], ptr [[B:%.*]], <256 x double> [[C:%.*]], i32 [[D:%.*]]) {
373; CHECK-NEXT:    ret void
374;
375  %v = call <256 x double> @llvm.ve.vl.vldl2dzxnc.vssvl(i64 %a, ptr %b, <256 x double> %c, i32 %d)
376  ret void
377}
378
379declare <256 x double> @llvm.ve.vl.vgt.vvssl(<256 x double>, i64, i64, i32)
380
381define void @test_llvm_ve_vl_vgt_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
382; CHECK-LABEL: define void @test_llvm_ve_vl_vgt_vvssl(
383; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
384; CHECK-NEXT:    ret void
385;
386  %v = call <256 x double> @llvm.ve.vl.vgt.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
387  ret void
388}
389
390declare <256 x double> @llvm.ve.vl.vgt.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
391
392define void @test_llvm_ve_vl_vgt_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
393; CHECK-LABEL: define void @test_llvm_ve_vl_vgt_vvssvl(
394; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
395; CHECK-NEXT:    ret void
396;
397  %v = call <256 x double> @llvm.ve.vl.vgt.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
398  ret void
399}
400
401declare <256 x double> @llvm.ve.vl.vgt.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
402
403define void @test_llvm_ve_vl_vgt_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
404; CHECK-LABEL: define void @test_llvm_ve_vl_vgt_vvssml(
405; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
406; CHECK-NEXT:    ret void
407;
408  %v = call <256 x double> @llvm.ve.vl.vgt.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
409  ret void
410}
411
412declare <256 x double> @llvm.ve.vl.vgt.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
413
414define void @test_llvm_ve_vl_vgt_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
415; CHECK-LABEL: define void @test_llvm_ve_vl_vgt_vvssmvl(
416; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
417; CHECK-NEXT:    ret void
418;
419  %v = call <256 x double> @llvm.ve.vl.vgt.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
420  ret void
421}
422
423declare <256 x double> @llvm.ve.vl.vgtnc.vvssl(<256 x double>, i64, i64, i32)
424
425define void @test_llvm_ve_vl_vgtnc_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
426; CHECK-LABEL: define void @test_llvm_ve_vl_vgtnc_vvssl(
427; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
428; CHECK-NEXT:    ret void
429;
430  %v = call <256 x double> @llvm.ve.vl.vgtnc.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
431  ret void
432}
433
434declare <256 x double> @llvm.ve.vl.vgtnc.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
435
436define void @test_llvm_ve_vl_vgtnc_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
437; CHECK-LABEL: define void @test_llvm_ve_vl_vgtnc_vvssvl(
438; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
439; CHECK-NEXT:    ret void
440;
441  %v = call <256 x double> @llvm.ve.vl.vgtnc.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
442  ret void
443}
444
445declare <256 x double> @llvm.ve.vl.vgtnc.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
446
447define void @test_llvm_ve_vl_vgtnc_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
448; CHECK-LABEL: define void @test_llvm_ve_vl_vgtnc_vvssml(
449; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
450; CHECK-NEXT:    ret void
451;
452  %v = call <256 x double> @llvm.ve.vl.vgtnc.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
453  ret void
454}
455
456declare <256 x double> @llvm.ve.vl.vgtnc.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
457
458define void @test_llvm_ve_vl_vgtnc_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
459; CHECK-LABEL: define void @test_llvm_ve_vl_vgtnc_vvssmvl(
460; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
461; CHECK-NEXT:    ret void
462;
463  %v = call <256 x double> @llvm.ve.vl.vgtnc.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
464  ret void
465}
466
467declare <256 x double> @llvm.ve.vl.vgtu.vvssl(<256 x double>, i64, i64, i32)
468
469define void @test_llvm_ve_vl_vgtu_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
470; CHECK-LABEL: define void @test_llvm_ve_vl_vgtu_vvssl(
471; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
472; CHECK-NEXT:    ret void
473;
474  %v = call <256 x double> @llvm.ve.vl.vgtu.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
475  ret void
476}
477
478declare <256 x double> @llvm.ve.vl.vgtu.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
479
480define void @test_llvm_ve_vl_vgtu_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
481; CHECK-LABEL: define void @test_llvm_ve_vl_vgtu_vvssvl(
482; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
483; CHECK-NEXT:    ret void
484;
485  %v = call <256 x double> @llvm.ve.vl.vgtu.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
486  ret void
487}
488
489declare <256 x double> @llvm.ve.vl.vgtu.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
490
491define void @test_llvm_ve_vl_vgtu_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
492; CHECK-LABEL: define void @test_llvm_ve_vl_vgtu_vvssml(
493; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
494; CHECK-NEXT:    ret void
495;
496  %v = call <256 x double> @llvm.ve.vl.vgtu.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
497  ret void
498}
499
500declare <256 x double> @llvm.ve.vl.vgtu.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
501
502define void @test_llvm_ve_vl_vgtu_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
503; CHECK-LABEL: define void @test_llvm_ve_vl_vgtu_vvssmvl(
504; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
505; CHECK-NEXT:    ret void
506;
507  %v = call <256 x double> @llvm.ve.vl.vgtu.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
508  ret void
509}
510
511declare <256 x double> @llvm.ve.vl.vgtunc.vvssl(<256 x double>, i64, i64, i32)
512
513define void @test_llvm_ve_vl_vgtunc_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
514; CHECK-LABEL: define void @test_llvm_ve_vl_vgtunc_vvssl(
515; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
516; CHECK-NEXT:    ret void
517;
518  %v = call <256 x double> @llvm.ve.vl.vgtunc.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
519  ret void
520}
521
522declare <256 x double> @llvm.ve.vl.vgtunc.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
523
524define void @test_llvm_ve_vl_vgtunc_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
525; CHECK-LABEL: define void @test_llvm_ve_vl_vgtunc_vvssvl(
526; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
527; CHECK-NEXT:    ret void
528;
529  %v = call <256 x double> @llvm.ve.vl.vgtunc.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
530  ret void
531}
532
533declare <256 x double> @llvm.ve.vl.vgtunc.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
534
535define void @test_llvm_ve_vl_vgtunc_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
536; CHECK-LABEL: define void @test_llvm_ve_vl_vgtunc_vvssml(
537; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
538; CHECK-NEXT:    ret void
539;
540  %v = call <256 x double> @llvm.ve.vl.vgtunc.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
541  ret void
542}
543
544declare <256 x double> @llvm.ve.vl.vgtunc.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
545
546define void @test_llvm_ve_vl_vgtunc_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
547; CHECK-LABEL: define void @test_llvm_ve_vl_vgtunc_vvssmvl(
548; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
549; CHECK-NEXT:    ret void
550;
551  %v = call <256 x double> @llvm.ve.vl.vgtunc.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
552  ret void
553}
554
555declare <256 x double> @llvm.ve.vl.vgtlsx.vvssl(<256 x double>, i64, i64, i32)
556
557define void @test_llvm_ve_vl_vgtlsx_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
558; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsx_vvssl(
559; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
560; CHECK-NEXT:    ret void
561;
562  %v = call <256 x double> @llvm.ve.vl.vgtlsx.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
563  ret void
564}
565
566declare <256 x double> @llvm.ve.vl.vgtlsx.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
567
568define void @test_llvm_ve_vl_vgtlsx_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
569; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsx_vvssvl(
570; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
571; CHECK-NEXT:    ret void
572;
573  %v = call <256 x double> @llvm.ve.vl.vgtlsx.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
574  ret void
575}
576
577declare <256 x double> @llvm.ve.vl.vgtlsx.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
578
579define void @test_llvm_ve_vl_vgtlsx_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
580; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsx_vvssml(
581; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
582; CHECK-NEXT:    ret void
583;
584  %v = call <256 x double> @llvm.ve.vl.vgtlsx.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
585  ret void
586}
587
588declare <256 x double> @llvm.ve.vl.vgtlsx.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
589
590define void @test_llvm_ve_vl_vgtlsx_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
591; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsx_vvssmvl(
592; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
593; CHECK-NEXT:    ret void
594;
595  %v = call <256 x double> @llvm.ve.vl.vgtlsx.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
596  ret void
597}
598
599declare <256 x double> @llvm.ve.vl.vgtlsxnc.vvssl(<256 x double>, i64, i64, i32)
600
601define void @test_llvm_ve_vl_vgtlsxnc_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
602; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsxnc_vvssl(
603; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
604; CHECK-NEXT:    ret void
605;
606  %v = call <256 x double> @llvm.ve.vl.vgtlsxnc.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
607  ret void
608}
609
610declare <256 x double> @llvm.ve.vl.vgtlsxnc.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
611
612define void @test_llvm_ve_vl_vgtlsxnc_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
613; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsxnc_vvssvl(
614; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
615; CHECK-NEXT:    ret void
616;
617  %v = call <256 x double> @llvm.ve.vl.vgtlsxnc.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
618  ret void
619}
620
621declare <256 x double> @llvm.ve.vl.vgtlsxnc.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
622
623define void @test_llvm_ve_vl_vgtlsxnc_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
624; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsxnc_vvssml(
625; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
626; CHECK-NEXT:    ret void
627;
628  %v = call <256 x double> @llvm.ve.vl.vgtlsxnc.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
629  ret void
630}
631
632declare <256 x double> @llvm.ve.vl.vgtlsxnc.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
633
634define void @test_llvm_ve_vl_vgtlsxnc_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
635; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlsxnc_vvssmvl(
636; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
637; CHECK-NEXT:    ret void
638;
639  %v = call <256 x double> @llvm.ve.vl.vgtlsxnc.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
640  ret void
641}
642
643declare <256 x double> @llvm.ve.vl.vgtlzx.vvssl(<256 x double>, i64, i64, i32)
644
645define void @test_llvm_ve_vl_vgtlzx_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
646; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzx_vvssl(
647; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
648; CHECK-NEXT:    ret void
649;
650  %v = call <256 x double> @llvm.ve.vl.vgtlzx.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
651  ret void
652}
653
654declare <256 x double> @llvm.ve.vl.vgtlzx.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
655
656define void @test_llvm_ve_vl_vgtlzx_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
657; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzx_vvssvl(
658; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
659; CHECK-NEXT:    ret void
660;
661  %v = call <256 x double> @llvm.ve.vl.vgtlzx.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
662  ret void
663}
664
665declare <256 x double> @llvm.ve.vl.vgtlzx.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
666
667define void @test_llvm_ve_vl_vgtlzx_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
668; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzx_vvssml(
669; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
670; CHECK-NEXT:    ret void
671;
672  %v = call <256 x double> @llvm.ve.vl.vgtlzx.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
673  ret void
674}
675
676declare <256 x double> @llvm.ve.vl.vgtlzx.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
677
678define void @test_llvm_ve_vl_vgtlzx_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
679; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzx_vvssmvl(
680; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
681; CHECK-NEXT:    ret void
682;
683  %v = call <256 x double> @llvm.ve.vl.vgtlzx.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
684  ret void
685}
686
687declare <256 x double> @llvm.ve.vl.vgtlzxnc.vvssl(<256 x double>, i64, i64, i32)
688
689define void @test_llvm_ve_vl_vgtlzxnc_vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d) {
690; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzxnc_vvssl(
691; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], i32 [[D:%.*]]) {
692; CHECK-NEXT:    ret void
693;
694  %v = call <256 x double> @llvm.ve.vl.vgtlzxnc.vvssl(<256 x double> %a, i64 %b, i64 %c, i32 %d)
695  ret void
696}
697
698declare <256 x double> @llvm.ve.vl.vgtlzxnc.vvssvl(<256 x double>, i64, i64, <256 x double>, i32)
699
700define void @test_llvm_ve_vl_vgtlzxnc_vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e) {
701; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzxnc_vvssvl(
702; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x double> [[D:%.*]], i32 [[E:%.*]]) {
703; CHECK-NEXT:    ret void
704;
705  %v = call <256 x double> @llvm.ve.vl.vgtlzxnc.vvssvl(<256 x double> %a, i64 %b, i64 %c, <256 x double> %d, i32 %e)
706  ret void
707}
708
709declare <256 x double> @llvm.ve.vl.vgtlzxnc.vvssml(<256 x double>, i64, i64, <256 x i1>, i32)
710
711define void @test_llvm_ve_vl_vgtlzxnc_vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e) {
712; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzxnc_vvssml(
713; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], i32 [[E:%.*]]) {
714; CHECK-NEXT:    ret void
715;
716  %v = call <256 x double> @llvm.ve.vl.vgtlzxnc.vvssml(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, i32 %e)
717  ret void
718}
719
720declare <256 x double> @llvm.ve.vl.vgtlzxnc.vvssmvl(<256 x double>, i64, i64, <256 x i1>, <256 x double>, i32)
721
722define void @test_llvm_ve_vl_vgtlzxnc_vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f) {
723; CHECK-LABEL: define void @test_llvm_ve_vl_vgtlzxnc_vvssmvl(
724; CHECK-SAME: <256 x double> [[A:%.*]], i64 [[B:%.*]], i64 [[C:%.*]], <256 x i1> [[D:%.*]], <256 x double> [[E:%.*]], i32 [[F:%.*]]) {
725; CHECK-NEXT:    ret void
726;
727  %v = call <256 x double> @llvm.ve.vl.vgtlzxnc.vvssmvl(<256 x double> %a, i64 %b, i64 %c, <256 x i1> %d, <256 x double> %e, i32 %f)
728  ret void
729}
730