1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --version 5
2; RUN: opt -S -passes=instcombine < %s | FileCheck %s
3target triple = "aarch64-unknown-linux-gnu"
4
5define <vscale x 16 x i8> @test_ld1(ptr %a)  #0 {
6; CHECK-LABEL: define <vscale x 16 x i8> @test_ld1(
7; CHECK-SAME: ptr [[A:%.*]]) {
8; CHECK-NEXT:  [[ENTRY:.*:]]
9; CHECK-NEXT:    ret <vscale x 16 x i8> zeroinitializer
10;
11entry:
12  %res = call <vscale x 16 x i8> @llvm.aarch64.sve.ld1.nxv16i8(<vscale x 16 x i1> zeroinitializer, ptr %a)
13  ret <vscale x 16 x i8> %res
14}
15
16define <vscale x 4 x i32> @test_ld1_gather(ptr %a, <vscale x 4 x i64> %b)  #0 {
17; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather(
18; CHECK-SAME: ptr [[A:%.*]], <vscale x 4 x i64> [[B:%.*]]) {
19; CHECK-NEXT:  [[ENTRY:.*:]]
20; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
21;
22entry:
23  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %a, <vscale x 4 x i64> %b)
24  ret <vscale x 4 x i32> %0
25}
26
27define <vscale x 2 x i64> @test_ld1_gather_index(ptr %a, <vscale x 2 x i64> %b)  #0 {
28; CHECK-LABEL: define <vscale x 2 x i64> @test_ld1_gather_index(
29; CHECK-SAME: ptr [[A:%.*]], <vscale x 2 x i64> [[B:%.*]]) {
30; CHECK-NEXT:  [[ENTRY:.*:]]
31; CHECK-NEXT:    ret <vscale x 2 x i64> zeroinitializer
32;
33entry:
34  %0 = tail call <vscale x 2 x i64> @llvm.aarch64.sve.ld1.gather.index.nxv2i64(<vscale x 2 x i1> zeroinitializer, ptr %a, <vscale x 2 x i64> %b)
35  ret <vscale x 2 x i64> %0
36}
37
38define <vscale x 4 x i32> @test_ld1_gather_scalar_offset(<vscale x 4 x i32> %a)  #0 {
39; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather_scalar_offset(
40; CHECK-SAME: <vscale x 4 x i32> [[A:%.*]]) {
41; CHECK-NEXT:  [[ENTRY:.*:]]
42; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
43;
44entry:
45  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> zeroinitializer, <vscale x 4 x i32> %a, i64 0)
46  ret <vscale x 4 x i32> %0
47}
48
49define <vscale x 4 x i32> @test_ld1_gather_sxtw(ptr %b, <vscale x 4 x i32> %a)  #0 {
50; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather_sxtw(
51; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
52; CHECK-NEXT:  [[ENTRY:.*:]]
53; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
54;
55entry:
56  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.sxtw.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
57  ret <vscale x 4 x i32> %0
58}
59
60define <vscale x 4 x i32> @test_ld1_gather_sxtw_index(ptr %b, <vscale x 4 x i32> %a)  #0 {
61; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather_sxtw_index(
62; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
63; CHECK-NEXT:  [[ENTRY:.*:]]
64; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
65;
66entry:
67  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.sxtw.index.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
68  ret <vscale x 4 x i32> %0
69}
70
71define <vscale x 4 x i32> @test_ld1_gather_uxtw(ptr %b, <vscale x 4 x i32> %a)  #0 {
72; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather_uxtw(
73; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
74; CHECK-NEXT:  [[ENTRY:.*:]]
75; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
76;
77entry:
78  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.uxtw.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
79  ret <vscale x 4 x i32> %0
80}
81
82define <vscale x 4 x i32> @test_ld1_gather_uxtw_index(ptr %b, <vscale x 4 x i32> %a)  #0 {
83; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1_gather_uxtw_index(
84; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
85; CHECK-NEXT:  [[ENTRY:.*:]]
86; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
87;
88entry:
89  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ld1.gather.uxtw.index.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
90  ret <vscale x 4 x i32> %0
91}
92
93
94define <vscale x 2 x i64> @test_ld1q_gather_index(ptr %a, <vscale x 2 x i64> %b)  #0 {
95; CHECK-LABEL: define <vscale x 2 x i64> @test_ld1q_gather_index(
96; CHECK-SAME: ptr [[A:%.*]], <vscale x 2 x i64> [[B:%.*]]) {
97; CHECK-NEXT:  [[ENTRY:.*:]]
98; CHECK-NEXT:    ret <vscale x 2 x i64> zeroinitializer
99;
100entry:
101  %0 = tail call <vscale x 2 x i64> @llvm.aarch64.sve.ld1q.gather.index.nxv2i64(<vscale x 1 x i1> zeroinitializer, ptr %a, <vscale x 2 x i64> %b)
102  ret <vscale x 2 x i64> %0
103}
104
105define <vscale x 8 x i16> @test_ld1q_gather_scalar_offset(<vscale x 2 x i64> %a)  #0 {
106; CHECK-LABEL: define <vscale x 8 x i16> @test_ld1q_gather_scalar_offset(
107; CHECK-SAME: <vscale x 2 x i64> [[A:%.*]]) {
108; CHECK-NEXT:  [[ENTRY:.*:]]
109; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
110;
111entry:
112  %0 = tail call <vscale x 8 x i16> @llvm.aarch64.sve.ld1q.gather.scalar.offset.nxv8i16.nxv2i64(<vscale x 1 x i1> zeroinitializer, <vscale x 2 x i64> %a, i64 0)
113  ret <vscale x 8 x i16> %0
114}
115
116define <vscale x 16 x i8> @test_ld1q_gather_vector_offset(ptr %a, <vscale x 2 x i64> %b)  #0 {
117; CHECK-LABEL: define <vscale x 16 x i8> @test_ld1q_gather_vector_offset(
118; CHECK-SAME: ptr [[A:%.*]], <vscale x 2 x i64> [[B:%.*]]) {
119; CHECK-NEXT:  [[ENTRY:.*:]]
120; CHECK-NEXT:    ret <vscale x 16 x i8> zeroinitializer
121;
122entry:
123  %0 = tail call <vscale x 16 x i8> @llvm.aarch64.sve.ld1q.gather.vector.offset.nxv16i8(<vscale x 1 x i1> zeroinitializer, ptr %a, <vscale x 2 x i64> %b)
124  ret <vscale x 16 x i8> %0
125}
126
127define <vscale x 16 x i8> @test_ld1ro(ptr %a)  #0 {
128; CHECK-LABEL: define <vscale x 16 x i8> @test_ld1ro(
129; CHECK-SAME: ptr [[A:%.*]]) {
130; CHECK-NEXT:  [[ENTRY:.*:]]
131; CHECK-NEXT:    ret <vscale x 16 x i8> zeroinitializer
132;
133entry:
134  %res = call <vscale x 16 x i8> @llvm.aarch64.sve.ld1ro.nxv16i8(<vscale x 16 x i1> zeroinitializer, ptr %a)
135  ret <vscale x 16 x i8> %res
136}
137
138define <vscale x 16 x i8> @test_ld1rq(ptr %a)  #0 {
139; CHECK-LABEL: define <vscale x 16 x i8> @test_ld1rq(
140; CHECK-SAME: ptr [[A:%.*]]) {
141; CHECK-NEXT:  [[ENTRY:.*:]]
142; CHECK-NEXT:    ret <vscale x 16 x i8> zeroinitializer
143;
144entry:
145  %res = call <vscale x 16 x i8> @llvm.aarch64.sve.ld1rq.nxv16i8(<vscale x 16 x i1> zeroinitializer, ptr %a)
146  ret <vscale x 16 x i8> %res
147}
148
149define <vscale x 2 x i64> @test_ld1udq(ptr %a)  #0 {
150; CHECK-LABEL: define <vscale x 2 x i64> @test_ld1udq(
151; CHECK-SAME: ptr [[A:%.*]]) {
152; CHECK-NEXT:  [[ENTRY:.*:]]
153; CHECK-NEXT:    ret <vscale x 2 x i64> zeroinitializer
154;
155entry:
156  %res = call <vscale x 2 x i64> @llvm.aarch64.sve.ld1udq.nxv2i64(<vscale x 1 x i1> zeroinitializer, ptr %a)
157  ret <vscale x 2 x i64> %res
158}
159
160define <vscale x 4 x i32> @test_ld1uwq(ptr %a)  #0 {
161; CHECK-LABEL: define <vscale x 4 x i32> @test_ld1uwq(
162; CHECK-SAME: ptr [[A:%.*]]) {
163; CHECK-NEXT:  [[ENTRY:.*:]]
164; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
165;
166entry:
167  %res = call <vscale x 4 x i32> @llvm.aarch64.sve.ld1uwq.nxv4i32(<vscale x 1 x i1> zeroinitializer, ptr %a)
168  ret <vscale x 4 x i32> %res
169}
170
171define <vscale x 8 x i16> @test_ld2_sret(ptr %a)  #0 {
172; CHECK-LABEL: define <vscale x 8 x i16> @test_ld2_sret(
173; CHECK-SAME: ptr [[A:%.*]]) {
174; CHECK-NEXT:  [[ENTRY:.*:]]
175; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
176;
177entry:
178  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld2.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
179  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
180  ret <vscale x 8 x i16> %1
181}
182
183define <vscale x 8 x i16> @test_ld2q_sret(ptr %a)  #0 {
184; CHECK-LABEL: define <vscale x 8 x i16> @test_ld2q_sret(
185; CHECK-SAME: ptr [[A:%.*]]) {
186; CHECK-NEXT:  [[ENTRY:.*:]]
187; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
188;
189entry:
190  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld2q.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
191  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
192  ret <vscale x 8 x i16> %1
193}
194
195define <vscale x 8 x i16> @test_ld3(ptr %a)  #0 {
196; CHECK-LABEL: define <vscale x 8 x i16> @test_ld3(
197; CHECK-SAME: ptr [[A:%.*]]) {
198; CHECK-NEXT:  [[ENTRY:.*:]]
199; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
200;
201entry:
202  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld3.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
203  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
204  ret <vscale x 8 x i16> %1
205}
206
207define <vscale x 8 x i16> @test_ld3q(ptr %a)  #0 {
208; CHECK-LABEL: define <vscale x 8 x i16> @test_ld3q(
209; CHECK-SAME: ptr [[A:%.*]]) {
210; CHECK-NEXT:  [[ENTRY:.*:]]
211; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
212;
213entry:
214  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld3q.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
215  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
216  ret <vscale x 8 x i16> %1
217}
218
219define <vscale x 8 x i16> @test_ld4(ptr %a)  #0 {
220; CHECK-LABEL: define <vscale x 8 x i16> @test_ld4(
221; CHECK-SAME: ptr [[A:%.*]]) {
222; CHECK-NEXT:  [[ENTRY:.*:]]
223; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
224;
225entry:
226  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld4.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
227  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
228  ret <vscale x 8 x i16> %1
229}
230
231define <vscale x 8 x i16> @test_ld4q(ptr %a)  #0 {
232; CHECK-LABEL: define <vscale x 8 x i16> @test_ld4q(
233; CHECK-SAME: ptr [[A:%.*]]) {
234; CHECK-NEXT:  [[ENTRY:.*:]]
235; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
236;
237entry:
238  %0 = tail call { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } @llvm.aarch64.sve.ld4q.sret.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
239  %1 = extractvalue { <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16>, <vscale x 8 x i16> } %0, 0
240  ret <vscale x 8 x i16> %1
241}
242
243define <vscale x 8 x i16> @test_ldff1(ptr %a)  #0 {
244; CHECK-LABEL: define <vscale x 8 x i16> @test_ldff1(
245; CHECK-SAME: ptr [[A:%.*]]) {
246; CHECK-NEXT:  [[ENTRY:.*:]]
247; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
248;
249entry:
250  %0 = tail call <vscale x 8 x i16> @llvm.aarch64.sve.ldff1.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
251  ret <vscale x 8 x i16> %0
252}
253
254define <vscale x 4 x i32> @test_ldff1_gather(ptr %a, <vscale x 4 x i64> %b)  #0 {
255; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather(
256; CHECK-SAME: ptr [[A:%.*]], <vscale x 4 x i64> [[B:%.*]]) {
257; CHECK-NEXT:  [[ENTRY:.*:]]
258; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
259;
260entry:
261  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %a, <vscale x 4 x i64> %b)
262  ret <vscale x 4 x i32> %0
263}
264
265define <vscale x 2 x i64> @test_ldff1_gather_index(ptr %a, <vscale x 2 x i64> %b)  #0 {
266; CHECK-LABEL: define <vscale x 2 x i64> @test_ldff1_gather_index(
267; CHECK-SAME: ptr [[A:%.*]], <vscale x 2 x i64> [[B:%.*]]) {
268; CHECK-NEXT:  [[ENTRY:.*:]]
269; CHECK-NEXT:    ret <vscale x 2 x i64> zeroinitializer
270;
271entry:
272  %0 = tail call <vscale x 2 x i64> @llvm.aarch64.sve.ldff1.gather.index.nxv2i64(<vscale x 2 x i1> zeroinitializer, ptr %a, <vscale x 2 x i64> %b)
273  ret <vscale x 2 x i64> %0
274}
275
276define <vscale x 4 x i32> @test_ldff1_gather_scalar_offset(<vscale x 4 x i32> %a)  #0 {
277; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather_scalar_offset(
278; CHECK-SAME: <vscale x 4 x i32> [[A:%.*]]) {
279; CHECK-NEXT:  [[ENTRY:.*:]]
280; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
281;
282entry:
283  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> zeroinitializer, <vscale x 4 x i32> %a, i64 0)
284  ret <vscale x 4 x i32> %0
285}
286
287define <vscale x 4 x i32> @test_ldff1_gather_sxtw(ptr %b, <vscale x 4 x i32> %a)  #0 {
288; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather_sxtw(
289; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
290; CHECK-NEXT:  [[ENTRY:.*:]]
291; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
292;
293entry:
294  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.sxtw.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
295  ret <vscale x 4 x i32> %0
296}
297
298define <vscale x 4 x i32> @test_ldff1_gather_sxtw_index(ptr %b, <vscale x 4 x i32> %a)  #0 {
299; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather_sxtw_index(
300; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
301; CHECK-NEXT:  [[ENTRY:.*:]]
302; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
303;
304entry:
305  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.sxtw.index.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
306  ret <vscale x 4 x i32> %0
307}
308
309define <vscale x 4 x i32> @test_ldff1_gather_uxtw(ptr %b, <vscale x 4 x i32> %a)  #0 {
310; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather_uxtw(
311; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
312; CHECK-NEXT:  [[ENTRY:.*:]]
313; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
314;
315entry:
316  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.uxtw.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
317  ret <vscale x 4 x i32> %0
318}
319
320define <vscale x 4 x i32> @test_ldff1_gather_uxtw_index(ptr %b, <vscale x 4 x i32> %a)  #0 {
321; CHECK-LABEL: define <vscale x 4 x i32> @test_ldff1_gather_uxtw_index(
322; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
323; CHECK-NEXT:  [[ENTRY:.*:]]
324; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
325;
326entry:
327  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldff1.gather.uxtw.index.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
328  ret <vscale x 4 x i32> %0
329}
330
331define <vscale x 16 x i8> @test_ldnf1(ptr %a)  #0 {
332; CHECK-LABEL: define <vscale x 16 x i8> @test_ldnf1(
333; CHECK-SAME: ptr [[A:%.*]]) {
334; CHECK-NEXT:  [[ENTRY:.*:]]
335; CHECK-NEXT:    ret <vscale x 16 x i8> zeroinitializer
336;
337entry:
338  %0 = tail call <vscale x 16 x i8> @llvm.aarch64.sve.ldnf1.nxv16i8(<vscale x 16 x i1> zeroinitializer, ptr %a)
339  ret <vscale x 16 x i8> %0
340}
341
342define <vscale x 8 x i16> @test_ldnt1(ptr %a)  #0 {
343; CHECK-LABEL: define <vscale x 8 x i16> @test_ldnt1(
344; CHECK-SAME: ptr [[A:%.*]]) {
345; CHECK-NEXT:  [[ENTRY:.*:]]
346; CHECK-NEXT:    ret <vscale x 8 x i16> zeroinitializer
347;
348entry:
349  %0 = tail call <vscale x 8 x i16> @llvm.aarch64.sve.ldnt1.nxv8i16(<vscale x 8 x i1> zeroinitializer, ptr %a)
350  ret <vscale x 8 x i16> %0
351}
352
353define <vscale x 4 x i32> @test_ldnt1_gather(ptr %a, <vscale x 4 x i64> %b)  #0 {
354; CHECK-LABEL: define <vscale x 4 x i32> @test_ldnt1_gather(
355; CHECK-SAME: ptr [[A:%.*]], <vscale x 4 x i64> [[B:%.*]]) {
356; CHECK-NEXT:  [[ENTRY:.*:]]
357; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
358;
359entry:
360  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %a, <vscale x 4 x i64> %b)
361  ret <vscale x 4 x i32> %0
362}
363
364define <vscale x 2 x i64> @test_ldnt1_gather_index(ptr %a, <vscale x 2 x i64> %b)  #0 {
365; CHECK-LABEL: define <vscale x 2 x i64> @test_ldnt1_gather_index(
366; CHECK-SAME: ptr [[A:%.*]], <vscale x 2 x i64> [[B:%.*]]) {
367; CHECK-NEXT:  [[ENTRY:.*:]]
368; CHECK-NEXT:    ret <vscale x 2 x i64> zeroinitializer
369;
370entry:
371  %0 = tail call <vscale x 2 x i64> @llvm.aarch64.sve.ldnt1.gather.index.nxv2i64(<vscale x 2 x i1> zeroinitializer, ptr %a, <vscale x 2 x i64> %b)
372  ret <vscale x 2 x i64> %0
373}
374
375define <vscale x 4 x i32> @test_ldnt1_gather_scalar_offset(<vscale x 4 x i32> %a)  #0 {
376; CHECK-LABEL: define <vscale x 4 x i32> @test_ldnt1_gather_scalar_offset(
377; CHECK-SAME: <vscale x 4 x i32> [[A:%.*]]) {
378; CHECK-NEXT:  [[ENTRY:.*:]]
379; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
380;
381entry:
382  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.scalar.offset.nxv4i32.nxv4i32(<vscale x 4 x i1> zeroinitializer, <vscale x 4 x i32> %a, i64 0)
383  ret <vscale x 4 x i32> %0
384}
385
386define <vscale x 4 x i32> @test_ldnt1_gather_uxtw(ptr %b, <vscale x 4 x i32> %a)  #0 {
387; CHECK-LABEL: define <vscale x 4 x i32> @test_ldnt1_gather_uxtw(
388; CHECK-SAME: ptr [[B:%.*]], <vscale x 4 x i32> [[A:%.*]]) {
389; CHECK-NEXT:  [[ENTRY:.*:]]
390; CHECK-NEXT:    ret <vscale x 4 x i32> zeroinitializer
391;
392entry:
393  %0 = tail call <vscale x 4 x i32> @llvm.aarch64.sve.ldnt1.gather.uxtw.nxv4i32(<vscale x 4 x i1> zeroinitializer, ptr %b, <vscale x 4 x i32> %a)
394  ret <vscale x 4 x i32> %0
395}
396