xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc (revision 8e33eff89e26cf71871ead62f0d5063e1313c33a)
1 /* ACLE support for AArch64 SVE (__ARM_FEATURE_SVE2 intrinsics)
2    Copyright (C) 2020 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify it
7    under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful, but
12    WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14    General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "memmodel.h"
28 #include "insn-codes.h"
29 #include "optabs.h"
30 #include "recog.h"
31 #include "expr.h"
32 #include "basic-block.h"
33 #include "function.h"
34 #include "fold-const.h"
35 #include "gimple.h"
36 #include "gimple-iterator.h"
37 #include "gimplify.h"
38 #include "explow.h"
39 #include "emit-rtl.h"
40 #include "tree-vector-builder.h"
41 #include "rtx-vector-builder.h"
42 #include "vec-perm-indices.h"
43 #include "aarch64-sve-builtins.h"
44 #include "aarch64-sve-builtins-shapes.h"
45 #include "aarch64-sve-builtins-base.h"
46 #include "aarch64-sve-builtins-sve2.h"
47 #include "aarch64-sve-builtins-functions.h"
48 
49 using namespace aarch64_sve;
50 
51 namespace {
52 
53 /* Return the UNSPEC_CDOT* unspec for rotation amount ROT.  */
54 static int
55 unspec_cdot (int rot)
56 {
57   switch (rot)
58     {
59     case 0: return UNSPEC_CDOT;
60     case 90: return UNSPEC_CDOT90;
61     case 180: return UNSPEC_CDOT180;
62     case 270: return UNSPEC_CDOT270;
63     default: gcc_unreachable ();
64     }
65 }
66 
67 /* Return the UNSPEC_SQRDCMLAH* unspec for rotation amount ROT.  */
68 static int
69 unspec_sqrdcmlah (int rot)
70 {
71   switch (rot)
72     {
73     case 0: return UNSPEC_SQRDCMLAH;
74     case 90: return UNSPEC_SQRDCMLAH90;
75     case 180: return UNSPEC_SQRDCMLAH180;
76     case 270: return UNSPEC_SQRDCMLAH270;
77     default: gcc_unreachable ();
78     }
79 }
80 
81 class svaba_impl : public function_base
82 {
83 public:
84   rtx
85   expand (function_expander &e) const OVERRIDE
86   {
87     rtx_code max_code = e.type_suffix (0).unsigned_p ? UMAX : SMAX;
88     machine_mode mode = e.vector_mode (0);
89     return e.use_exact_insn (code_for_aarch64_sve2_aba (max_code, mode));
90   }
91 };
92 
93 class svcdot_impl : public function_base
94 {
95 public:
96   rtx
97   expand (function_expander &e) const OVERRIDE
98   {
99     /* Convert the rotation amount into a specific unspec.  */
100     int rot = INTVAL (e.args.pop ());
101     return e.use_exact_insn (code_for_aarch64_sve (unspec_cdot (rot),
102 						   e.vector_mode (0)));
103   }
104 };
105 
106 class svcdot_lane_impl : public function_base
107 {
108 public:
109   rtx
110   expand (function_expander &e) const OVERRIDE
111   {
112     /* Convert the rotation amount into a specific unspec.  */
113     int rot = INTVAL (e.args.pop ());
114     return e.use_exact_insn (code_for_aarch64_lane (unspec_cdot (rot),
115 						    e.vector_mode (0)));
116   }
117 };
118 
119 class svldnt1_gather_impl : public full_width_access
120 {
121 public:
122   unsigned int
123   call_properties (const function_instance &) const OVERRIDE
124   {
125     return CP_READ_MEMORY;
126   }
127 
128   rtx
129   expand (function_expander &e) const OVERRIDE
130   {
131     e.prepare_gather_address_operands (1, false);
132     machine_mode mem_mode = e.memory_vector_mode ();
133     return e.use_exact_insn (code_for_aarch64_gather_ldnt (mem_mode));
134   }
135 };
136 
137 /* Implements extending forms of svldnt1_gather.  */
138 class svldnt1_gather_extend_impl : public extending_load
139 {
140 public:
141   CONSTEXPR svldnt1_gather_extend_impl (type_suffix_index memory_type)
142     : extending_load (memory_type) {}
143 
144   rtx
145   expand (function_expander &e) const OVERRIDE
146   {
147     e.prepare_gather_address_operands (1, false);
148     /* Add a constant predicate for the extension rtx.  */
149     e.args.quick_push (CONSTM1_RTX (VNx16BImode));
150     insn_code icode = code_for_aarch64_gather_ldnt (extend_rtx_code (),
151 						    e.vector_mode (0),
152 						    e.memory_vector_mode ());
153     return e.use_exact_insn (icode);
154   }
155 };
156 
157 /* Implements both svmatch and svnmatch; the unspec parameter decides
158    between them.  */
159 class svmatch_svnmatch_impl : public function_base
160 {
161 public:
162   CONSTEXPR svmatch_svnmatch_impl (int unspec) : m_unspec (unspec) {}
163 
164   rtx
165   expand (function_expander &e) const OVERRIDE
166   {
167     /* These are UNSPEC_PRED_Z operations and so need a hint operand.  */
168     e.add_ptrue_hint (0, e.gp_mode (0));
169     return e.use_exact_insn (code_for_aarch64_pred (m_unspec,
170 						    e.vector_mode (0)));
171   }
172 
173   int m_unspec;
174 };
175 
176 /* Implements both svmovlb and svmovlt; the unspec parameters decide
177    between them.  */
178 class svmovl_lb_impl : public unspec_based_function_base
179 {
180 public:
181   CONSTEXPR svmovl_lb_impl (int unspec_for_sint, int unspec_for_uint,
182 			    int unspec_for_fp)
183     : unspec_based_function_base (unspec_for_sint, unspec_for_uint,
184 				  unspec_for_fp)
185   {}
186 
187   rtx
188   expand (function_expander &e) const OVERRIDE
189   {
190     e.args.quick_push (const0_rtx);
191     return e.map_to_unspecs (m_unspec_for_sint, m_unspec_for_uint,
192 			     m_unspec_for_fp);
193   }
194 };
195 
196 class svqcadd_impl : public function_base
197 {
198 public:
199   rtx
200   expand (function_expander &e) const OVERRIDE
201   {
202     /* Convert the rotation amount into a specific unspec.  */
203     int rot = INTVAL (e.args.pop ());
204     if (rot == 90)
205       return e.map_to_unspecs (UNSPEC_SQCADD90, -1, -1);
206     if (rot == 270)
207       return e.map_to_unspecs (UNSPEC_SQCADD270, -1, -1);
208     gcc_unreachable ();
209   }
210 };
211 
212 class svqrdcmlah_impl : public function_base
213 {
214 public:
215   rtx
216   expand (function_expander &e) const OVERRIDE
217   {
218     /* Convert the rotation amount into a specific unspec.  */
219     int rot = INTVAL (e.args.pop ());
220     return e.use_exact_insn (code_for_aarch64_sve (unspec_sqrdcmlah (rot),
221 						   e.vector_mode (0)));
222   }
223 };
224 
225 class svqrdcmlah_lane_impl : public function_base
226 {
227 public:
228   rtx
229   expand (function_expander &e) const OVERRIDE
230   {
231     /* Convert the rotation amount into a specific unspec.  */
232     int rot = INTVAL (e.args.pop ());
233     return e.use_exact_insn (code_for_aarch64_lane (unspec_sqrdcmlah (rot),
234 						    e.vector_mode (0)));
235   }
236 };
237 
238 class svqrshl_impl : public unspec_based_function
239 {
240 public:
241   CONSTEXPR svqrshl_impl ()
242     : unspec_based_function (UNSPEC_SQRSHL, UNSPEC_UQRSHL, -1) {}
243 
244   gimple *
245   fold (gimple_folder &f) const OVERRIDE
246   {
247     if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
248       {
249 	if (wi::to_widest (amount) >= 0)
250 	  {
251 	    /* The rounding has no effect, and [SU]QSHL has immediate forms
252 	       that we can use for sensible shift amounts.  */
253 	    function_instance instance ("svqshl", functions::svqshl,
254 					shapes::binary_int_opt_n, MODE_n,
255 					f.type_suffix_ids, f.pred);
256 	    return f.redirect_call (instance);
257 	  }
258 	else
259 	  {
260 	    /* The saturation has no effect, and [SU]RSHL has immediate forms
261 	       that we can use for sensible shift amounts.  */
262 	    function_instance instance ("svrshl", functions::svrshl,
263 					shapes::binary_int_opt_n, MODE_n,
264 					f.type_suffix_ids, f.pred);
265 	    return f.redirect_call (instance);
266 	  }
267       }
268     return NULL;
269   }
270 };
271 
272 class svqshl_impl : public unspec_based_function
273 {
274 public:
275   CONSTEXPR svqshl_impl ()
276     : unspec_based_function (UNSPEC_SQSHL, UNSPEC_UQSHL, -1) {}
277 
278   gimple *
279   fold (gimple_folder &f) const OVERRIDE
280   {
281     if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
282       {
283 	int element_bits = f.type_suffix (0).element_bits;
284 	if (wi::to_widest (amount) >= -element_bits
285 	    && wi::to_widest (amount) < 0)
286 	  {
287 	    /* The saturation has no effect for right shifts, so we can
288 	       use the immediate form of ASR or LSR.  */
289 	    amount = wide_int_to_tree (TREE_TYPE (amount),
290 				       -wi::to_wide (amount));
291 	    function_instance instance ("svasr", functions::svasr,
292 					shapes::binary_uint_opt_n, MODE_n,
293 					f.type_suffix_ids, f.pred);
294 	    if (f.type_suffix (0).unsigned_p)
295 	      {
296 		instance.base_name = "svlsr";
297 		instance.base = functions::svlsr;
298 	      }
299 	    gcall *call = as_a <gcall *> (f.redirect_call (instance));
300 	    gimple_call_set_arg (call, 2, amount);
301 	    return call;
302 	  }
303       }
304     return NULL;
305   }
306 };
307 
308 class svrshl_impl : public unspec_based_function
309 {
310 public:
311   CONSTEXPR svrshl_impl ()
312     : unspec_based_function (UNSPEC_SRSHL, UNSPEC_URSHL, -1) {}
313 
314   gimple *
315   fold (gimple_folder &f) const OVERRIDE
316   {
317     if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
318       {
319 	if (wi::to_widest (amount) >= 0)
320 	  {
321 	    /* The rounding has no effect, and LSL has immediate forms
322 	       that we can use for sensible shift amounts.  */
323 	    function_instance instance ("svlsl", functions::svlsl,
324 					shapes::binary_uint_opt_n, MODE_n,
325 					f.type_suffix_ids, f.pred);
326 	    gcall *call = as_a <gcall *> (f.redirect_call (instance));
327 	    gimple_call_set_arg (call, 2, amount);
328 	    return call;
329 	  }
330 	int element_bits = f.type_suffix (0).element_bits;
331 	if (wi::to_widest (amount) >= -element_bits)
332 	  {
333 	    /* The shift amount is in range of [SU]RSHR.  */
334 	    amount = wide_int_to_tree (TREE_TYPE (amount),
335 				       -wi::to_wide (amount));
336 	    function_instance instance ("svrshr", functions::svrshr,
337 					shapes::shift_right_imm, MODE_n,
338 					f.type_suffix_ids, f.pred);
339 	    gcall *call = as_a <gcall *> (f.redirect_call (instance));
340 	    gimple_call_set_arg (call, 2, amount);
341 	    return call;
342 	  }
343       }
344     return NULL;
345   }
346 };
347 
348 class svsqadd_impl : public function_base
349 {
350 public:
351   rtx
352   expand (function_expander &e) const OVERRIDE
353   {
354     machine_mode mode = e.vector_mode (0);
355     if (e.pred == PRED_x
356 	&& aarch64_sve_sqadd_sqsub_immediate_p (mode, e.args[2], false))
357       return e.map_to_rtx_codes (UNKNOWN, US_PLUS, -1);
358     return e.map_to_unspecs (-1, UNSPEC_USQADD, -1);
359   }
360 };
361 
362 class svsra_impl : public function_base
363 {
364 public:
365   rtx
366   expand (function_expander &e) const OVERRIDE
367   {
368     rtx_code shift_code = e.type_suffix (0).unsigned_p ? LSHIFTRT : ASHIFTRT;
369     machine_mode mode = e.vector_mode (0);
370     return e.use_exact_insn (code_for_aarch64_sve_add (shift_code, mode));
371   }
372 };
373 
374 class svstnt1_scatter_impl : public full_width_access
375 {
376 public:
377   unsigned int
378   call_properties (const function_instance &) const OVERRIDE
379   {
380     return CP_WRITE_MEMORY;
381   }
382 
383   rtx
384   expand (function_expander &e) const OVERRIDE
385   {
386     e.prepare_gather_address_operands (1, false);
387     machine_mode mem_mode = e.memory_vector_mode ();
388     return e.use_exact_insn (code_for_aarch64_scatter_stnt (mem_mode));
389   }
390 };
391 
392 /* Implements truncating forms of svstnt1_scatter.  */
393 class svstnt1_scatter_truncate_impl : public truncating_store
394 {
395 public:
396   CONSTEXPR svstnt1_scatter_truncate_impl (scalar_int_mode to_mode)
397     : truncating_store (to_mode) {}
398 
399   rtx
400   expand (function_expander &e) const OVERRIDE
401   {
402     e.prepare_gather_address_operands (1, false);
403     insn_code icode = code_for_aarch64_scatter_stnt (e.vector_mode (0),
404 						     e.memory_vector_mode ());
405     return e.use_exact_insn (icode);
406   }
407 };
408 
409 class svtbl2_impl : public quiet<multi_vector_function>
410 {
411 public:
412   CONSTEXPR svtbl2_impl () : quiet<multi_vector_function> (2) {}
413 
414   rtx
415   expand (function_expander &e) const OVERRIDE
416   {
417     return e.use_exact_insn (code_for_aarch64_sve2_tbl2 (e.vector_mode (0)));
418   }
419 };
420 
421 class svuqadd_impl : public function_base
422 {
423 public:
424   rtx
425   expand (function_expander &e) const OVERRIDE
426   {
427     machine_mode mode = e.vector_mode (0);
428     if (e.pred == PRED_x
429 	&& aarch64_sve_arith_immediate_p (mode, e.args[2], false))
430       return e.use_unpred_insn (code_for_aarch64_sve_suqadd_const (mode));
431     return e.map_to_unspecs (UNSPEC_SUQADD, -1, -1);
432   }
433 };
434 
435 /* Implements both svwhilerw and svwhilewr; the unspec parameter decides
436    between them.  */
437 class svwhilerw_svwhilewr_impl : public full_width_access
438 {
439 public:
440   CONSTEXPR svwhilerw_svwhilewr_impl (int unspec) : m_unspec (unspec) {}
441 
442   rtx
443   expand (function_expander &e) const OVERRIDE
444   {
445     for (unsigned int i = 0; i < 2; ++i)
446       e.args[i] = e.convert_to_pmode (e.args[i]);
447     return e.use_exact_insn (code_for_while (m_unspec, Pmode, e.gp_mode (0)));
448   }
449 
450   int m_unspec;
451 };
452 
453 } /* end anonymous namespace */
454 
455 namespace aarch64_sve {
456 
457 FUNCTION (svaba, svaba_impl,)
458 FUNCTION (svabalb, unspec_based_add_function, (UNSPEC_SABDLB,
459 					       UNSPEC_UABDLB, -1))
460 FUNCTION (svabalt, unspec_based_add_function, (UNSPEC_SABDLT,
461 					       UNSPEC_UABDLT, -1))
462 FUNCTION (svadclb, unspec_based_function, (-1, UNSPEC_ADCLB, -1))
463 FUNCTION (svadclt, unspec_based_function, (-1, UNSPEC_ADCLT, -1))
464 FUNCTION (svaddhnb, unspec_based_function, (UNSPEC_ADDHNB, UNSPEC_ADDHNB, -1))
465 FUNCTION (svaddhnt, unspec_based_function, (UNSPEC_ADDHNT, UNSPEC_ADDHNT, -1))
466 FUNCTION (svabdlb, unspec_based_function, (UNSPEC_SABDLB, UNSPEC_UABDLB, -1))
467 FUNCTION (svabdlt, unspec_based_function, (UNSPEC_SABDLT, UNSPEC_UABDLT, -1))
468 FUNCTION (svadalp, unspec_based_function, (UNSPEC_SADALP, UNSPEC_UADALP, -1))
469 FUNCTION (svaddlb, unspec_based_function, (UNSPEC_SADDLB, UNSPEC_UADDLB, -1))
470 FUNCTION (svaddlbt, unspec_based_function, (UNSPEC_SADDLBT, -1, -1))
471 FUNCTION (svaddlt, unspec_based_function, (UNSPEC_SADDLT, UNSPEC_UADDLT, -1))
472 FUNCTION (svaddwb, unspec_based_function, (UNSPEC_SADDWB, UNSPEC_UADDWB, -1))
473 FUNCTION (svaddwt, unspec_based_function, (UNSPEC_SADDWT, UNSPEC_UADDWT, -1))
474 FUNCTION (svaddp, unspec_based_pred_function, (UNSPEC_ADDP, UNSPEC_ADDP,
475 					       UNSPEC_FADDP))
476 FUNCTION (svaesd, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesd))
477 FUNCTION (svaese, fixed_insn_function, (CODE_FOR_aarch64_sve2_aese))
478 FUNCTION (svaesimc, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesimc))
479 FUNCTION (svaesmc, fixed_insn_function, (CODE_FOR_aarch64_sve2_aesmc))
480 FUNCTION (svbcax, CODE_FOR_MODE0 (aarch64_sve2_bcax),)
481 FUNCTION (svbdep, unspec_based_function, (UNSPEC_BDEP, UNSPEC_BDEP, -1))
482 FUNCTION (svbext, unspec_based_function, (UNSPEC_BEXT, UNSPEC_BEXT, -1))
483 FUNCTION (svbgrp, unspec_based_function, (UNSPEC_BGRP, UNSPEC_BGRP, -1))
484 FUNCTION (svbsl, CODE_FOR_MODE0 (aarch64_sve2_bsl),)
485 FUNCTION (svbsl1n, CODE_FOR_MODE0 (aarch64_sve2_bsl1n),)
486 FUNCTION (svbsl2n, CODE_FOR_MODE0 (aarch64_sve2_bsl2n),)
487 FUNCTION (svcdot, svcdot_impl,)
488 FUNCTION (svcdot_lane, svcdot_lane_impl,)
489 FUNCTION (svcvtlt, unspec_based_function, (-1, -1, UNSPEC_COND_FCVTLT))
490 FUNCTION (svcvtx, unspec_based_function, (-1, -1, UNSPEC_COND_FCVTX))
491 FUNCTION (svcvtxnt, CODE_FOR_MODE1 (aarch64_sve2_cvtxnt),)
492 FUNCTION (sveor3, CODE_FOR_MODE0 (aarch64_sve2_eor3),)
493 FUNCTION (sveorbt, unspec_based_function, (UNSPEC_EORBT, UNSPEC_EORBT, -1))
494 FUNCTION (sveortb, unspec_based_function, (UNSPEC_EORTB, UNSPEC_EORTB, -1))
495 FUNCTION (svhadd, unspec_based_function, (UNSPEC_SHADD, UNSPEC_UHADD, -1))
496 FUNCTION (svhsub, unspec_based_function, (UNSPEC_SHSUB, UNSPEC_UHSUB, -1))
497 FUNCTION (svhistcnt, CODE_FOR_MODE0 (aarch64_sve2_histcnt),)
498 FUNCTION (svhistseg, CODE_FOR_MODE0 (aarch64_sve2_histseg),)
499 FUNCTION (svhsubr, unspec_based_function_rotated, (UNSPEC_SHSUB,
500 						   UNSPEC_UHSUB, -1))
501 FUNCTION (svldnt1_gather, svldnt1_gather_impl,)
502 FUNCTION (svldnt1sb_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_s8))
503 FUNCTION (svldnt1sh_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_s16))
504 FUNCTION (svldnt1sw_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_s32))
505 FUNCTION (svldnt1ub_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u8))
506 FUNCTION (svldnt1uh_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u16))
507 FUNCTION (svldnt1uw_gather, svldnt1_gather_extend_impl, (TYPE_SUFFIX_u32))
508 FUNCTION (svlogb, unspec_based_function, (-1, -1, UNSPEC_COND_FLOGB))
509 FUNCTION (svmatch, svmatch_svnmatch_impl, (UNSPEC_MATCH))
510 FUNCTION (svmaxp, unspec_based_pred_function, (UNSPEC_SMAXP, UNSPEC_UMAXP,
511 					       UNSPEC_FMAXP))
512 FUNCTION (svmaxnmp, unspec_based_pred_function, (-1, -1, UNSPEC_FMAXNMP))
513 FUNCTION (svminp, unspec_based_pred_function, (UNSPEC_SMINP, UNSPEC_UMINP,
514 					       UNSPEC_FMINP))
515 FUNCTION (svminnmp, unspec_based_pred_function, (-1, -1, UNSPEC_FMINNMP))
516 FUNCTION (svmlalb, unspec_based_mla_function, (UNSPEC_SMULLB,
517 					       UNSPEC_UMULLB, UNSPEC_FMLALB))
518 FUNCTION (svmlalb_lane, unspec_based_mla_lane_function, (UNSPEC_SMULLB,
519 							 UNSPEC_UMULLB,
520 							 UNSPEC_FMLALB))
521 FUNCTION (svmlalt, unspec_based_mla_function, (UNSPEC_SMULLT,
522 					       UNSPEC_UMULLT, UNSPEC_FMLALT))
523 FUNCTION (svmlalt_lane, unspec_based_mla_lane_function, (UNSPEC_SMULLT,
524 							 UNSPEC_UMULLT,
525 							 UNSPEC_FMLALT))
526 FUNCTION (svmlslb, unspec_based_mls_function, (UNSPEC_SMULLB,
527 					       UNSPEC_UMULLB, UNSPEC_FMLSLB))
528 FUNCTION (svmlslb_lane, unspec_based_mls_lane_function, (UNSPEC_SMULLB,
529 							 UNSPEC_UMULLB,
530 							 UNSPEC_FMLSLB))
531 FUNCTION (svmlslt, unspec_based_mls_function, (UNSPEC_SMULLT,
532 					       UNSPEC_UMULLT, UNSPEC_FMLSLT))
533 FUNCTION (svmlslt_lane, unspec_based_mls_lane_function, (UNSPEC_SMULLT,
534 							 UNSPEC_UMULLT,
535 							 UNSPEC_FMLSLT))
536 FUNCTION (svmovlb, svmovl_lb_impl, (UNSPEC_SSHLLB, UNSPEC_USHLLB, -1))
537 FUNCTION (svmovlt, svmovl_lb_impl, (UNSPEC_SSHLLT, UNSPEC_USHLLT, -1))
538 FUNCTION (svmullb, unspec_based_function, (UNSPEC_SMULLB, UNSPEC_UMULLB, -1))
539 FUNCTION (svmullb_lane, unspec_based_lane_function, (UNSPEC_SMULLB,
540 						     UNSPEC_UMULLB, -1))
541 FUNCTION (svmullt, unspec_based_function, (UNSPEC_SMULLT, UNSPEC_UMULLT, -1))
542 FUNCTION (svmullt_lane, unspec_based_lane_function, (UNSPEC_SMULLT,
543 						     UNSPEC_UMULLT, -1))
544 FUNCTION (svnbsl, CODE_FOR_MODE0 (aarch64_sve2_nbsl),)
545 FUNCTION (svnmatch, svmatch_svnmatch_impl, (UNSPEC_NMATCH))
546 FUNCTION (svpmul, CODE_FOR_MODE0 (aarch64_sve2_pmul),)
547 FUNCTION (svpmullb, unspec_based_function, (-1, UNSPEC_PMULLB, -1))
548 FUNCTION (svpmullb_pair, unspec_based_function, (-1, UNSPEC_PMULLB_PAIR, -1))
549 FUNCTION (svpmullt, unspec_based_function, (-1, UNSPEC_PMULLT, -1))
550 FUNCTION (svpmullt_pair, unspec_based_function, (-1, UNSPEC_PMULLT_PAIR, -1))
551 FUNCTION (svqabs, rtx_code_function, (SS_ABS, UNKNOWN, UNKNOWN))
552 FUNCTION (svqcadd, svqcadd_impl,)
553 FUNCTION (svqdmlalb, unspec_based_qadd_function, (UNSPEC_SQDMULLB, -1, -1))
554 FUNCTION (svqdmlalb_lane, unspec_based_qadd_lane_function, (UNSPEC_SQDMULLB,
555 							    -1, -1))
556 FUNCTION (svqdmlalbt, unspec_based_qadd_function, (UNSPEC_SQDMULLBT, -1, -1))
557 FUNCTION (svqdmlalt, unspec_based_qadd_function, (UNSPEC_SQDMULLT, -1, -1))
558 FUNCTION (svqdmlalt_lane, unspec_based_qadd_lane_function, (UNSPEC_SQDMULLT,
559 							    -1, -1))
560 FUNCTION (svqdmlslb, unspec_based_qsub_function, (UNSPEC_SQDMULLB, -1, -1))
561 FUNCTION (svqdmlslb_lane, unspec_based_qsub_lane_function, (UNSPEC_SQDMULLB,
562 							    -1, -1))
563 FUNCTION (svqdmlslbt, unspec_based_qsub_function, (UNSPEC_SQDMULLBT, -1, -1))
564 FUNCTION (svqdmlslt, unspec_based_qsub_function, (UNSPEC_SQDMULLT, -1, -1))
565 FUNCTION (svqdmlslt_lane, unspec_based_qsub_lane_function, (UNSPEC_SQDMULLT,
566 							    -1, -1))
567 FUNCTION (svqdmulh, unspec_based_function, (UNSPEC_SQDMULH, -1, -1))
568 FUNCTION (svqdmulh_lane, unspec_based_lane_function, (UNSPEC_SQDMULH, -1, -1))
569 FUNCTION (svqdmullb, unspec_based_function, (UNSPEC_SQDMULLB, -1, -1))
570 FUNCTION (svqdmullb_lane, unspec_based_lane_function, (UNSPEC_SQDMULLB,
571 						       -1, -1))
572 FUNCTION (svqdmullt, unspec_based_function, (UNSPEC_SQDMULLT, -1, -1))
573 FUNCTION (svqdmullt_lane, unspec_based_lane_function, (UNSPEC_SQDMULLT,
574 						       -1, -1))
575 FUNCTION (svqneg, rtx_code_function, (SS_NEG, UNKNOWN, UNKNOWN))
576 FUNCTION (svqrdcmlah, svqrdcmlah_impl,)
577 FUNCTION (svqrdcmlah_lane, svqrdcmlah_lane_impl,)
578 FUNCTION (svqrdmulh, unspec_based_function, (UNSPEC_SQRDMULH, -1, -1))
579 FUNCTION (svqrdmulh_lane, unspec_based_lane_function, (UNSPEC_SQRDMULH,
580 						       -1, -1))
581 FUNCTION (svqrdmlah, unspec_based_function, (UNSPEC_SQRDMLAH, -1, -1))
582 FUNCTION (svqrdmlah_lane, unspec_based_lane_function, (UNSPEC_SQRDMLAH,
583 						       -1, -1))
584 FUNCTION (svqrdmlsh, unspec_based_function, (UNSPEC_SQRDMLSH, -1, -1))
585 FUNCTION (svqrdmlsh_lane, unspec_based_lane_function, (UNSPEC_SQRDMLSH,
586 						       -1, -1))
587 FUNCTION (svqrshl, svqrshl_impl,)
588 FUNCTION (svqrshrnb, unspec_based_function, (UNSPEC_SQRSHRNB,
589 					     UNSPEC_UQRSHRNB, -1))
590 FUNCTION (svqrshrnt, unspec_based_function, (UNSPEC_SQRSHRNT,
591 					     UNSPEC_UQRSHRNT, -1))
592 FUNCTION (svqrshrunb, unspec_based_function, (UNSPEC_SQRSHRUNB, -1, -1))
593 FUNCTION (svqrshrunt, unspec_based_function, (UNSPEC_SQRSHRUNT, -1, -1))
594 FUNCTION (svqshl, svqshl_impl,)
595 FUNCTION (svqshlu, unspec_based_function, (UNSPEC_SQSHLU, -1, -1))
596 FUNCTION (svqshrnb, unspec_based_function, (UNSPEC_SQSHRNB,
597 					    UNSPEC_UQSHRNB, -1))
598 FUNCTION (svqshrnt, unspec_based_function, (UNSPEC_SQSHRNT,
599 					    UNSPEC_UQSHRNT, -1))
600 FUNCTION (svqshrunb, unspec_based_function, (UNSPEC_SQSHRUNB, -1, -1))
601 FUNCTION (svqshrunt, unspec_based_function, (UNSPEC_SQSHRUNT, -1, -1))
602 FUNCTION (svqsubr, rtx_code_function_rotated, (SS_MINUS, US_MINUS, -1))
603 FUNCTION (svqxtnb, unspec_based_function, (UNSPEC_SQXTNB, UNSPEC_UQXTNB, -1))
604 FUNCTION (svqxtnt, unspec_based_function, (UNSPEC_SQXTNT, UNSPEC_UQXTNT, -1))
605 FUNCTION (svqxtunb, unspec_based_function, (UNSPEC_SQXTUNB, -1, -1))
606 FUNCTION (svqxtunt, unspec_based_function, (UNSPEC_SQXTUNT, -1, -1))
607 FUNCTION (svraddhnb, unspec_based_function, (UNSPEC_RADDHNB,
608 					     UNSPEC_RADDHNB, -1))
609 FUNCTION (svraddhnt, unspec_based_function, (UNSPEC_RADDHNT,
610 					     UNSPEC_RADDHNT, -1))
611 FUNCTION (svrax1, fixed_insn_function, (CODE_FOR_aarch64_sve2_rax1))
612 FUNCTION (svrhadd, unspec_based_function, (UNSPEC_SRHADD, UNSPEC_URHADD, -1))
613 FUNCTION (svrshl, svrshl_impl,)
614 FUNCTION (svrshr, unspec_based_function, (UNSPEC_SRSHR, UNSPEC_URSHR, -1))
615 FUNCTION (svrshrnb, unspec_based_function, (UNSPEC_RSHRNB, UNSPEC_RSHRNB, -1))
616 FUNCTION (svrshrnt, unspec_based_function, (UNSPEC_RSHRNT, UNSPEC_RSHRNT, -1))
617 FUNCTION (svrsra, unspec_based_add_function, (UNSPEC_SRSHR, UNSPEC_URSHR, -1))
618 FUNCTION (svrsubhnb, unspec_based_function, (UNSPEC_RSUBHNB,
619 					     UNSPEC_RSUBHNB, -1))
620 FUNCTION (svrsubhnt, unspec_based_function, (UNSPEC_RSUBHNT,
621 					     UNSPEC_RSUBHNT, -1))
622 FUNCTION (svsbclb, unspec_based_function, (-1, UNSPEC_SBCLB, -1))
623 FUNCTION (svsbclt, unspec_based_function, (-1, UNSPEC_SBCLT, -1))
624 FUNCTION (svshllb, unspec_based_function, (UNSPEC_SSHLLB, UNSPEC_USHLLB, -1))
625 FUNCTION (svshllt, unspec_based_function, (UNSPEC_SSHLLT, UNSPEC_USHLLT, -1))
626 FUNCTION (svshrnb, unspec_based_function, (UNSPEC_SHRNB, UNSPEC_SHRNB, -1))
627 FUNCTION (svshrnt, unspec_based_function, (UNSPEC_SHRNT, UNSPEC_SHRNT, -1))
628 FUNCTION (svsli, unspec_based_function, (UNSPEC_SLI, UNSPEC_SLI, -1))
629 FUNCTION (svsm4e, fixed_insn_function, (CODE_FOR_aarch64_sve2_sm4e))
630 FUNCTION (svsm4ekey, fixed_insn_function, (CODE_FOR_aarch64_sve2_sm4ekey))
631 FUNCTION (svsqadd, svsqadd_impl,)
632 FUNCTION (svsra, svsra_impl,)
633 FUNCTION (svsri, unspec_based_function, (UNSPEC_SRI, UNSPEC_SRI, -1))
634 FUNCTION (svstnt1_scatter, svstnt1_scatter_impl,)
635 FUNCTION (svstnt1b_scatter, svstnt1_scatter_truncate_impl, (QImode))
636 FUNCTION (svstnt1h_scatter, svstnt1_scatter_truncate_impl, (HImode))
637 FUNCTION (svstnt1w_scatter, svstnt1_scatter_truncate_impl, (SImode))
638 FUNCTION (svsubhnb, unspec_based_function, (UNSPEC_SUBHNB, UNSPEC_SUBHNB, -1))
639 FUNCTION (svsubhnt, unspec_based_function, (UNSPEC_SUBHNT, UNSPEC_SUBHNT, -1))
640 FUNCTION (svsublb, unspec_based_function, (UNSPEC_SSUBLB, UNSPEC_USUBLB, -1))
641 FUNCTION (svsublbt, unspec_based_function, (UNSPEC_SSUBLBT, -1, -1))
642 FUNCTION (svsublt, unspec_based_function, (UNSPEC_SSUBLT, UNSPEC_USUBLT, -1))
643 FUNCTION (svsubltb, unspec_based_function, (UNSPEC_SSUBLTB, -1, -1))
644 FUNCTION (svsubwb, unspec_based_function, (UNSPEC_SSUBWB, UNSPEC_USUBWB, -1))
645 FUNCTION (svsubwt, unspec_based_function, (UNSPEC_SSUBWT, UNSPEC_USUBWT, -1))
646 FUNCTION (svtbl2, svtbl2_impl,)
647 FUNCTION (svtbx, CODE_FOR_MODE0 (aarch64_sve2_tbx),)
648 FUNCTION (svuqadd, svuqadd_impl,)
649 FUNCTION (svwhilege, while_comparison, (UNSPEC_WHILEGE, UNSPEC_WHILEHS))
650 FUNCTION (svwhilegt, while_comparison, (UNSPEC_WHILEGT, UNSPEC_WHILEHI))
651 FUNCTION (svwhilerw, svwhilerw_svwhilewr_impl, (UNSPEC_WHILERW))
652 FUNCTION (svwhilewr, svwhilerw_svwhilewr_impl, (UNSPEC_WHILEWR))
653 FUNCTION (svxar, CODE_FOR_MODE0 (aarch64_sve2_xar),)
654 
655 } /* end namespace aarch64_sve */
656