1;; Unspec defintions. 2;; Copyright (C) 2012-2020 Free Software Foundation, Inc. 3;; Contributed by ARM Ltd. 4 5;; This file is part of GCC. 6 7;; GCC is free software; you can redistribute it and/or modify it 8;; under the terms of the GNU General Public License as published 9;; by the Free Software Foundation; either version 3, or (at your 10;; option) any later version. 11 12;; GCC is distributed in the hope that it will be useful, but WITHOUT 13;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY 14;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public 15;; License for more details. 16 17;; You should have received a copy of the GNU General Public License 18;; along with GCC; see the file COPYING3. If not see 19;; <http://www.gnu.org/licenses/>. 20 21;; UNSPEC Usage: 22;; Note: sin and cos are no-longer used. 23;; Unspec enumerators for Neon are defined in neon.md. 24;; Unspec enumerators for iwmmxt2 are defined in iwmmxt2.md 25 26(define_c_enum "unspec" [ 27 UNSPEC_PUSH_MULT ; `push multiple' operation: 28 ; operand 0 is the first register, 29 ; subsequent registers are in parallel (use ...) 30 ; expressions. 31 UNSPEC_PIC_SYM ; A symbol that has been treated properly for pic 32 ; usage, that is, we will add the pic_register 33 ; value to it before trying to dereference it. 34 UNSPEC_PIC_BASE ; Add PC and all but the last operand together, 35 ; The last operand is the number of a PIC_LABEL 36 ; that points at the containing instruction. 37 UNSPEC_PRLG_STK ; A special barrier that prevents frame accesses 38 ; being scheduled before the stack adjustment insn. 39 UNSPEC_REGISTER_USE ; As USE insns are not meaningful after reload, 40 ; this unspec is used to prevent the deletion of 41 ; instructions setting registers for EH handling 42 ; and stack frame generation. Operand 0 is the 43 ; register to "use". 44 UNSPEC_CHECK_ARCH ; Set CCs to indicate 26-bit or 32-bit mode. 45 UNSPEC_WSHUFH ; Used by the intrinsic form of the iWMMXt WSHUFH instruction. 46 UNSPEC_WACC ; Used by the intrinsic form of the iWMMXt WACC instruction. 47 UNSPEC_TMOVMSK ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction. 48 UNSPEC_WSAD ; Used by the intrinsic form of the iWMMXt WSAD instruction. 49 UNSPEC_WSADZ ; Used by the intrinsic form of the iWMMXt WSADZ instruction. 50 UNSPEC_WMACS ; Used by the intrinsic form of the iWMMXt WMACS instruction. 51 UNSPEC_WMACU ; Used by the intrinsic form of the iWMMXt WMACU instruction. 52 UNSPEC_WMACSZ ; Used by the intrinsic form of the iWMMXt WMACSZ instruction. 53 UNSPEC_WMACUZ ; Used by the intrinsic form of the iWMMXt WMACUZ instruction. 54 UNSPEC_CLRDI ; Used by the intrinsic form of the iWMMXt CLRDI instruction. 55 UNSPEC_WALIGNI ; Used by the intrinsic form of the iWMMXt WALIGN instruction. 56 UNSPEC_TLS ; A symbol that has been treated properly for TLS usage. 57 UNSPEC_PIC_LABEL ; A label used for PIC access that does not appear in the 58 ; instruction stream. 59 UNSPEC_PIC_OFFSET ; A symbolic 12-bit OFFSET that has been treated 60 ; correctly for PIC usage. 61 UNSPEC_GOTSYM_OFF ; The offset of the start of the GOT from a 62 ; a given symbolic address. 63 UNSPEC_THUMB1_CASESI ; A Thumb1 compressed dispatch-table call. 64 UNSPEC_RBIT ; rbit operation. 65 UNSPEC_SYMBOL_OFFSET ; The offset of the start of the symbol from 66 ; another symbolic address. 67 UNSPEC_MEMORY_BARRIER ; Represent a memory barrier. 68 UNSPEC_UNALIGNED_LOAD ; Used to represent ldr/ldrh instructions that access 69 ; unaligned locations, on architectures which support 70 ; that. 71 UNSPEC_UNALIGNED_STORE ; Same for str/strh. 72 UNSPEC_PIC_UNIFIED ; Create a common pic addressing form. 73 UNSPEC_Q_SET ; Represent setting the Q bit. 74 UNSPEC_GE_SET ; Represent setting the GE bits. 75 UNSPEC_APSR_READ ; Represent reading the APSR. 76 77 UNSPEC_LL ; Represent an unpaired load-register-exclusive. 78 UNSPEC_VRINTZ ; Represent a float to integral float rounding 79 ; towards zero. 80 UNSPEC_VRINTP ; Represent a float to integral float rounding 81 ; towards +Inf. 82 UNSPEC_VRINTM ; Represent a float to integral float rounding 83 ; towards -Inf. 84 UNSPEC_VRINTR ; Represent a float to integral float rounding 85 ; FPSCR rounding mode. 86 UNSPEC_VRINTX ; Represent a float to integral float rounding 87 ; FPSCR rounding mode and signal inexactness. 88 UNSPEC_VRINTA ; Represent a float to integral float rounding 89 ; towards nearest, ties away from zero. 90 UNSPEC_PROBE_STACK ; Probe stack memory reference 91 UNSPEC_NONSECURE_MEM ; Represent non-secure memory in ARMv8-M with 92 ; security extension 93 UNSPEC_SP_SET ; Represent the setting of stack protector's canary 94 UNSPEC_SP_TEST ; Represent the testing of stack protector's canary 95 ; against the guard. 96 UNSPEC_PIC_RESTORE ; Use to restore fdpic register 97 98 UNSPEC_SXTAB16 ; Represent the SXTAB16 operation. 99 UNSPEC_UXTAB16 ; Represent the UXTAB16 operation. 100 UNSPEC_SXTB16 ; Represent the SXTB16 operation. 101 UNSPEC_UXTB16 ; Represent the UXTB16 operation. 102 UNSPEC_QADD8 ; Represent the QADD8 operation. 103 UNSPEC_QSUB8 ; Represent the QSUB8 operation. 104 UNSPEC_SHADD8 ; Represent the SHADD8 operation. 105 UNSPEC_SHSUB8 ; Represent the SHSUB8 operation. 106 UNSPEC_UHADD8 ; Represent the UHADD8 operation. 107 UNSPEC_UHSUB8 ; Represent the UHSUB8 operation. 108 UNSPEC_UQADD8 ; Represent the UQADD8 operation. 109 UNSPEC_UQSUB8 ; Represent the UQSUB8 operation. 110 UNSPEC_QADD16 ; Represent the QADD16 operation. 111 UNSPEC_QASX ; Represent the QASX operation. 112 UNSPEC_QSAX ; Represent the QSAX operation. 113 UNSPEC_QSUB16 ; Represent the QSUB16 operation. 114 UNSPEC_SHADD16 ; Represent the SHADD16 operation. 115 UNSPEC_SHASX ; Represent the SHASX operation. 116 UNSPEC_SHSAX ; Represent the SSAX operation. 117 UNSPEC_SHSUB16 ; Represent the SHSUB16 operation. 118 UNSPEC_UHADD16 ; Represent the UHADD16 operation. 119 UNSPEC_UHASX ; Represent the UHASX operation. 120 UNSPEC_UHSAX ; Represent the USAX operation. 121 UNSPEC_UHSUB16 ; Represent the UHSUB16 operation. 122 UNSPEC_UQADD16 ; Represent the UQADD16 operation. 123 UNSPEC_UQASX ; Represent the UQASX operation. 124 UNSPEC_UQSAX ; Represent the UQSAX operation. 125 UNSPEC_UQSUB16 ; Represent the UQSUB16 operation. 126 UNSPEC_SMUSD ; Represent the SMUSD operation. 127 UNSPEC_SMUSDX ; Represent the SMUSDX operation. 128 UNSPEC_USAD8 ; Represent the USAD8 operation. 129 UNSPEC_USADA8 ; Represent the USADA8 operation. 130 UNSPEC_SMLALD ; Represent the SMLALD operation. 131 UNSPEC_SMLALDX ; Represent the SMLALDX operation. 132 UNSPEC_SMLSLD ; Represent the SMLSLD operation. 133 UNSPEC_SMLSLDX ; Represent the SMLSLDX operation. 134 UNSPEC_SMLAWB ; Represent the SMLAWB operation. 135 UNSPEC_SMLAWT ; Represent the SMLAWT operation. 136 UNSPEC_SEL ; Represent the SEL operation. 137 UNSPEC_SADD8 ; Represent the SADD8 operation. 138 UNSPEC_SSUB8 ; Represent the SSUB8 operation. 139 UNSPEC_UADD8 ; Represent the UADD8 operation. 140 UNSPEC_USUB8 ; Represent the USUB8 operation. 141 UNSPEC_SADD16 ; Represent the SADD16 operation. 142 UNSPEC_SASX ; Represent the SASX operation. 143 UNSPEC_SSAX ; Represent the SSAX operation. 144 UNSPEC_SSUB16 ; Represent the SSUB16 operation. 145 UNSPEC_UADD16 ; Represent the UADD16 operation. 146 UNSPEC_UASX ; Represent the UASX operation. 147 UNSPEC_USAX ; Represent the USAX operation. 148 UNSPEC_USUB16 ; Represent the USUB16 operation. 149 UNSPEC_SMLAD ; Represent the SMLAD operation. 150 UNSPEC_SMLADX ; Represent the SMLADX operation. 151 UNSPEC_SMLSD ; Represent the SMLSD operation. 152 UNSPEC_SMLSDX ; Represent the SMLSDX operation. 153 UNSPEC_SMUAD ; Represent the SMUAD operation. 154 UNSPEC_SMUADX ; Represent the SMUADX operation. 155 UNSPEC_SSAT16 ; Represent the SSAT16 operation. 156 UNSPEC_USAT16 ; Represent the USAT16 operation. 157 UNSPEC_CDE ; Custom Datapath Extension instruction. 158 UNSPEC_CDEA ; Custom Datapath Extension instruction. 159 UNSPEC_VCDE ; Custom Datapath Extension instruction. 160 UNSPEC_VCDEA ; Custom Datapath Extension instruction. 161]) 162 163 164(define_c_enum "unspec" [ 165 UNSPEC_WADDC ; Used by the intrinsic form of the iWMMXt WADDC instruction. 166 UNSPEC_WABS ; Used by the intrinsic form of the iWMMXt WABS instruction. 167 UNSPEC_WQMULWMR ; Used by the intrinsic form of the iWMMXt WQMULWMR instruction. 168 UNSPEC_WQMULMR ; Used by the intrinsic form of the iWMMXt WQMULMR instruction. 169 UNSPEC_WQMULWM ; Used by the intrinsic form of the iWMMXt WQMULWM instruction. 170 UNSPEC_WQMULM ; Used by the intrinsic form of the iWMMXt WQMULM instruction. 171 UNSPEC_WQMIAxyn ; Used by the intrinsic form of the iWMMXt WMIAxyn instruction. 172 UNSPEC_WQMIAxy ; Used by the intrinsic form of the iWMMXt WMIAxy instruction. 173 UNSPEC_TANDC ; Used by the intrinsic form of the iWMMXt TANDC instruction. 174 UNSPEC_TORC ; Used by the intrinsic form of the iWMMXt TORC instruction. 175 UNSPEC_TORVSC ; Used by the intrinsic form of the iWMMXt TORVSC instruction. 176 UNSPEC_TEXTRC ; Used by the intrinsic form of the iWMMXt TEXTRC instruction. 177 UNSPEC_GET_FPSCR_NZCVQC ; Represent fetch of FPSCR_nzcvqc content. 178]) 179 180 181;; UNSPEC_VOLATILE Usage: 182 183(define_c_enum "unspecv" [ 184 VUNSPEC_BLOCKAGE ; `blockage' insn to prevent scheduling across an 185 ; insn in the code. 186 VUNSPEC_EPILOGUE ; `epilogue' insn, used to represent any part of the 187 ; instruction epilogue sequence that isn't expanded 188 ; into normal RTL. Used for both normal and sibcall 189 ; epilogues. 190 VUNSPEC_THUMB1_INTERWORK ; `prologue_thumb1_interwork' insn, used to swap 191 ; modes from arm to thumb. 192 VUNSPEC_ALIGN ; `align' insn. Used at the head of a minipool table 193 ; for inlined constants. 194 VUNSPEC_POOL_END ; `end-of-table'. Used to mark the end of a minipool 195 ; table. 196 VUNSPEC_POOL_1 ; `pool-entry(1)'. An entry in the constant pool for 197 ; an 8-bit object. 198 VUNSPEC_POOL_2 ; `pool-entry(2)'. An entry in the constant pool for 199 ; a 16-bit object. 200 VUNSPEC_POOL_4 ; `pool-entry(4)'. An entry in the constant pool for 201 ; a 32-bit object. 202 VUNSPEC_POOL_8 ; `pool-entry(8)'. An entry in the constant pool for 203 ; a 64-bit object. 204 VUNSPEC_POOL_16 ; `pool-entry(16)'. An entry in the constant pool for 205 ; a 128-bit object. 206 VUNSPEC_TMRC ; Used by the iWMMXt TMRC instruction. 207 VUNSPEC_TMCR ; Used by the iWMMXt TMCR instruction. 208 VUNSPEC_ALIGN8 ; 8-byte alignment version of VUNSPEC_ALIGN 209 VUNSPEC_WCMP_EQ ; Used by the iWMMXt WCMPEQ instructions 210 VUNSPEC_WCMP_GTU ; Used by the iWMMXt WCMPGTU instructions 211 VUNSPEC_WCMP_GT ; Used by the iwMMXT WCMPGT instructions 212 VUNSPEC_EH_RETURN ; Use to override the return address for exception 213 ; handling. 214 VUNSPEC_ATOMIC_CAS ; Represent an atomic compare swap. 215 VUNSPEC_ATOMIC_XCHG ; Represent an atomic exchange. 216 VUNSPEC_ATOMIC_OP ; Represent an atomic operation. 217 VUNSPEC_LL ; Represent a load-register-exclusive. 218 VUNSPEC_LDRD_ATOMIC ; Represent an LDRD used as an atomic DImode load. 219 VUNSPEC_SC ; Represent a store-register-exclusive. 220 VUNSPEC_LAX ; Represent a load-register-acquire-exclusive. 221 VUNSPEC_SLX ; Represent a store-register-release-exclusive. 222 VUNSPEC_LDA ; Represent a store-register-acquire. 223 VUNSPEC_STL ; Represent a store-register-release. 224 VUNSPEC_GET_FPSCR ; Represent fetch of FPSCR content. 225 VUNSPEC_SET_FPSCR ; Represent assign of FPSCR content. 226 VUNSPEC_SET_FPSCR_NZCVQC ; Represent assign of FPSCR_nzcvqc content. 227 VUNSPEC_PROBE_STACK_RANGE ; Represent stack range probing. 228 VUNSPEC_CDP ; Represent the coprocessor cdp instruction. 229 VUNSPEC_CDP2 ; Represent the coprocessor cdp2 instruction. 230 VUNSPEC_LDC ; Represent the coprocessor ldc instruction. 231 VUNSPEC_LDC2 ; Represent the coprocessor ldc2 instruction. 232 VUNSPEC_LDCL ; Represent the coprocessor ldcl instruction. 233 VUNSPEC_LDC2L ; Represent the coprocessor ldc2l instruction. 234 VUNSPEC_STC ; Represent the coprocessor stc instruction. 235 VUNSPEC_STC2 ; Represent the coprocessor stc2 instruction. 236 VUNSPEC_STCL ; Represent the coprocessor stcl instruction. 237 VUNSPEC_STC2L ; Represent the coprocessor stc2l instruction. 238 VUNSPEC_MCR ; Represent the coprocessor mcr instruction. 239 VUNSPEC_MCR2 ; Represent the coprocessor mcr2 instruction. 240 VUNSPEC_MRC ; Represent the coprocessor mrc instruction. 241 VUNSPEC_MRC2 ; Represent the coprocessor mrc2 instruction. 242 VUNSPEC_MCRR ; Represent the coprocessor mcrr instruction. 243 VUNSPEC_MCRR2 ; Represent the coprocessor mcrr2 instruction. 244 VUNSPEC_MRRC ; Represent the coprocessor mrrc instruction. 245 VUNSPEC_MRRC2 ; Represent the coprocessor mrrc2 instruction. 246 VUNSPEC_SPECULATION_BARRIER ; Represents an unconditional speculation barrier. 247 VUNSPEC_APSR_WRITE ; Represent writing the APSR. 248 VUNSPEC_VSTR_VLDR ; Represent the vstr/vldr instruction. 249 VUNSPEC_CLRM_APSR ; Represent the clearing of APSR with clrm instruction. 250 VUNSPEC_VSCCLRM_VPR ; Represent the clearing of VPR with vscclrm 251 ; instruction. 252 VUNSPEC_VLSTM ; Represent the lazy store multiple with vlstm 253 ; instruction. 254 VUNSPEC_VLLDM ; Represent the lazy load multiple with vlldm 255 ; instruction. 256]) 257 258;; Enumerators for NEON unspecs. 259(define_c_enum "unspec" [ 260 UNSPEC_ASHIFT_SIGNED 261 UNSPEC_ASHIFT_UNSIGNED 262 UNSPEC_CRC32B 263 UNSPEC_CRC32H 264 UNSPEC_CRC32W 265 UNSPEC_CRC32CB 266 UNSPEC_CRC32CH 267 UNSPEC_CRC32CW 268 UNSPEC_AESD 269 UNSPEC_AESE 270 UNSPEC_AESIMC 271 UNSPEC_AESMC 272 UNSPEC_SHA1C 273 UNSPEC_SHA1M 274 UNSPEC_SHA1P 275 UNSPEC_SHA1H 276 UNSPEC_SHA1SU0 277 UNSPEC_SHA1SU1 278 UNSPEC_SHA256H 279 UNSPEC_SHA256H2 280 UNSPEC_SHA256SU0 281 UNSPEC_SHA256SU1 282 UNSPEC_VMULLP64 283 UNSPEC_LOAD_COUNT 284 UNSPEC_VABAL_S 285 UNSPEC_VABAL_U 286 UNSPEC_VABD_F 287 UNSPEC_VABD_S 288 UNSPEC_VABD_U 289 UNSPEC_VABDL_S 290 UNSPEC_VABDL_U 291 UNSPEC_VADD 292 UNSPEC_VADDHN 293 UNSPEC_VRADDHN 294 UNSPEC_VADDL_S 295 UNSPEC_VADDL_U 296 UNSPEC_VADDW_S 297 UNSPEC_VADDW_U 298 UNSPEC_VBSL 299 UNSPEC_VCAGE 300 UNSPEC_VCAGT 301 UNSPEC_VCALE 302 UNSPEC_VCALT 303 UNSPEC_VCEQ 304 UNSPEC_VCGE 305 UNSPEC_VCGEU 306 UNSPEC_VCGT 307 UNSPEC_VCGTU 308 UNSPEC_VCLS 309 UNSPEC_VCONCAT 310 UNSPEC_VCVT 311 UNSPEC_VCVT_S 312 UNSPEC_VCVT_U 313 UNSPEC_VCVT_S_N 314 UNSPEC_VCVT_U_N 315 UNSPEC_VCVT_HF_S_N 316 UNSPEC_VCVT_HF_U_N 317 UNSPEC_VCVT_SI_S_N 318 UNSPEC_VCVT_SI_U_N 319 UNSPEC_VCVTH_S 320 UNSPEC_VCVTH_U 321 UNSPEC_VCVTA_S 322 UNSPEC_VCVTA_U 323 UNSPEC_VCVTM_S 324 UNSPEC_VCVTM_U 325 UNSPEC_VCVTN_S 326 UNSPEC_VCVTN_U 327 UNSPEC_VCVTP_S 328 UNSPEC_VCVTP_U 329 UNSPEC_VEXT 330 UNSPEC_VHADD_S 331 UNSPEC_VHADD_U 332 UNSPEC_VRHADD_S 333 UNSPEC_VRHADD_U 334 UNSPEC_VHSUB_S 335 UNSPEC_VHSUB_U 336 UNSPEC_VLD1 337 UNSPEC_VLD1_LANE 338 UNSPEC_VLD2 339 UNSPEC_VLD2_DUP 340 UNSPEC_VLD2_LANE 341 UNSPEC_VLD3 342 UNSPEC_VLD3A 343 UNSPEC_VLD3B 344 UNSPEC_VLD3_DUP 345 UNSPEC_VLD3_LANE 346 UNSPEC_VLD4 347 UNSPEC_VLD4A 348 UNSPEC_VLD4B 349 UNSPEC_VLD4_DUP 350 UNSPEC_VLD4_LANE 351 UNSPEC_VMAX 352 UNSPEC_VMAX_U 353 UNSPEC_VMAXNM 354 UNSPEC_VMIN 355 UNSPEC_VMIN_U 356 UNSPEC_VMINNM 357 UNSPEC_VMLA 358 UNSPEC_VMLA_LANE 359 UNSPEC_VMLAL_S 360 UNSPEC_VMLAL_U 361 UNSPEC_VMLAL_S_LANE 362 UNSPEC_VMLAL_U_LANE 363 UNSPEC_VMLS 364 UNSPEC_VMLS_LANE 365 UNSPEC_VMLSL_S 366 UNSPEC_VMLSL_U 367 UNSPEC_VMLSL_S_LANE 368 UNSPEC_VMLSL_U_LANE 369 UNSPEC_VMLSL_LANE 370 UNSPEC_VFMA_LANE 371 UNSPEC_VFMS_LANE 372 UNSPEC_VMOVL_S 373 UNSPEC_VMOVL_U 374 UNSPEC_VMOVN 375 UNSPEC_VMUL 376 UNSPEC_VMULL_P 377 UNSPEC_VMULL_S 378 UNSPEC_VMULL_U 379 UNSPEC_VMUL_LANE 380 UNSPEC_VMULL_S_LANE 381 UNSPEC_VMULL_U_LANE 382 UNSPEC_VPADAL_S 383 UNSPEC_VPADAL_U 384 UNSPEC_VPADD 385 UNSPEC_VPADDL_S 386 UNSPEC_VPADDL_U 387 UNSPEC_VPMAX 388 UNSPEC_VPMAX_U 389 UNSPEC_VPMIN 390 UNSPEC_VPMIN_U 391 UNSPEC_VPSMAX 392 UNSPEC_VPSMIN 393 UNSPEC_VPUMAX 394 UNSPEC_VPUMIN 395 UNSPEC_VQABS 396 UNSPEC_VQADD_S 397 UNSPEC_VQADD_U 398 UNSPEC_VQDMLAL 399 UNSPEC_VQDMLAL_LANE 400 UNSPEC_VQDMLSL 401 UNSPEC_VQDMLSL_LANE 402 UNSPEC_VQDMULH 403 UNSPEC_VQDMULH_LANE 404 UNSPEC_VQRDMULH 405 UNSPEC_VQRDMULH_LANE 406 UNSPEC_VQDMULL 407 UNSPEC_VQDMULL_LANE 408 UNSPEC_VQMOVN_S 409 UNSPEC_VQMOVN_U 410 UNSPEC_VQMOVUN 411 UNSPEC_VQNEG 412 UNSPEC_VQSHL_S 413 UNSPEC_VQSHL_U 414 UNSPEC_VQRSHL_S 415 UNSPEC_VQRSHL_U 416 UNSPEC_VQSHL_S_N 417 UNSPEC_VQSHL_U_N 418 UNSPEC_VQSHLU_N 419 UNSPEC_VQSHRN_S_N 420 UNSPEC_VQSHRN_U_N 421 UNSPEC_VQRSHRN_S_N 422 UNSPEC_VQRSHRN_U_N 423 UNSPEC_VQSHRUN_N 424 UNSPEC_VQRSHRUN_N 425 UNSPEC_VQSUB_S 426 UNSPEC_VQSUB_U 427 UNSPEC_VRECPE 428 UNSPEC_VRECPS 429 UNSPEC_VREV16 430 UNSPEC_VREV32 431 UNSPEC_VREV64 432 UNSPEC_VRSQRTE 433 UNSPEC_VRSQRTS 434 UNSPEC_VSHL_S 435 UNSPEC_VSHL_U 436 UNSPEC_VRSHL_S 437 UNSPEC_VRSHL_U 438 UNSPEC_VSHLL_S_N 439 UNSPEC_VSHLL_U_N 440 UNSPEC_VSHL_N 441 UNSPEC_VSHR_S_N 442 UNSPEC_VSHR_U_N 443 UNSPEC_VRSHR_S_N 444 UNSPEC_VRSHR_U_N 445 UNSPEC_VSHRN_N 446 UNSPEC_VRSHRN_N 447 UNSPEC_VSLI 448 UNSPEC_VSRA_S_N 449 UNSPEC_VSRA_U_N 450 UNSPEC_VRSRA_S_N 451 UNSPEC_VRSRA_U_N 452 UNSPEC_VSRI 453 UNSPEC_VST1 454 UNSPEC_VST1_LANE 455 UNSPEC_VST2 456 UNSPEC_VST2_LANE 457 UNSPEC_VST3 458 UNSPEC_VST3A 459 UNSPEC_VST3B 460 UNSPEC_VST3_LANE 461 UNSPEC_VST4 462 UNSPEC_VST4A 463 UNSPEC_VST4B 464 UNSPEC_VST4_LANE 465 UNSPEC_VSTRUCTDUMMY 466 UNSPEC_VSUB 467 UNSPEC_VSUBHN 468 UNSPEC_VRSUBHN 469 UNSPEC_VSUBL_S 470 UNSPEC_VSUBL_U 471 UNSPEC_VSUBW_S 472 UNSPEC_VSUBW_U 473 UNSPEC_VTBL 474 UNSPEC_VTBX 475 UNSPEC_VTRN1 476 UNSPEC_VTRN2 477 UNSPEC_VTST 478 UNSPEC_VUZP1 479 UNSPEC_VUZP2 480 UNSPEC_VZIP1 481 UNSPEC_VZIP2 482 UNSPEC_MISALIGNED_ACCESS 483 UNSPEC_VCLE 484 UNSPEC_VCLT 485 UNSPEC_NVRINTZ 486 UNSPEC_NVRINTP 487 UNSPEC_NVRINTM 488 UNSPEC_NVRINTX 489 UNSPEC_NVRINTA 490 UNSPEC_NVRINTN 491 UNSPEC_VQRDMLAH 492 UNSPEC_VQRDMLSH 493 UNSPEC_VRND 494 UNSPEC_VRNDA 495 UNSPEC_VRNDI 496 UNSPEC_VRNDM 497 UNSPEC_VRNDN 498 UNSPEC_VRNDP 499 UNSPEC_VRNDX 500 UNSPEC_DOT_S 501 UNSPEC_DOT_U 502 UNSPEC_DOT_US 503 UNSPEC_DOT_SU 504 UNSPEC_VFML_LO 505 UNSPEC_VFML_HI 506 UNSPEC_VCADD90 507 UNSPEC_VCADD270 508 UNSPEC_VCMLA 509 UNSPEC_VCMLA90 510 UNSPEC_VCMLA180 511 UNSPEC_VCMLA270 512 UNSPEC_MATMUL_S 513 UNSPEC_MATMUL_U 514 UNSPEC_MATMUL_US 515 UNSPEC_BFCVT 516 UNSPEC_BFCVT_HIGH 517 UNSPEC_BFMMLA 518 UNSPEC_BFMAB 519 UNSPEC_BFMAT 520]) 521 522;; Enumerators for MVE unspecs. 523(define_c_enum "unspec" [ 524 VST4Q 525 VRNDXQ_F 526 VRNDQ_F 527 VRNDPQ_F 528 VRNDNQ_F 529 VRNDMQ_F 530 VRNDAQ_F 531 VREV64Q_F 532 VNEGQ_F 533 VDUPQ_N_F 534 VABSQ_F 535 VREV32Q_F 536 VCVTTQ_F32_F16 537 VCVTBQ_F32_F16 538 VCVTQ_TO_F_S 539 VQNEGQ_S 540 VCVTQ_TO_F_U 541 VREV16Q_S 542 VREV16Q_U 543 VADDLVQ_S 544 VMVNQ_N_S 545 VMVNQ_N_U 546 VCVTAQ_S 547 VCVTAQ_U 548 VREV64Q_S 549 VREV64Q_U 550 VQABSQ_S 551 VNEGQ_S 552 VMVNQ_S 553 VMVNQ_U 554 VDUPQ_N_U 555 VDUPQ_N_S 556 VCLZQ_U 557 VCLZQ_S 558 VCLSQ_S 559 VADDVQ_S 560 VADDVQ_U 561 VABSQ_S 562 VREV32Q_U 563 VREV32Q_S 564 VMOVLTQ_U 565 VMOVLTQ_S 566 VMOVLBQ_S 567 VMOVLBQ_U 568 VCVTQ_FROM_F_S 569 VCVTQ_FROM_F_U 570 VCVTPQ_S 571 VCVTPQ_U 572 VCVTNQ_S 573 VCVTNQ_U 574 VCVTMQ_S 575 VCVTMQ_U 576 VADDLVQ_U 577 VCTP8Q 578 VCTP16Q 579 VCTP32Q 580 VCTP64Q 581 VPNOT 582 VCREATEQ_F 583 VCVTQ_N_TO_F_S 584 VCVTQ_N_TO_F_U 585 VBRSRQ_N_F 586 VSUBQ_N_F 587 VCREATEQ_U 588 VCREATEQ_S 589 VSHRQ_N_S 590 VSHRQ_N_U 591 VCVTQ_N_FROM_F_S 592 VCVTQ_N_FROM_F_U 593 VADDLVQ_P_S 594 VADDLVQ_P_U 595 VCMPNEQ_U 596 VCMPNEQ_S 597 VSHLQ_S 598 VSHLQ_U 599 VABDQ_S 600 VADDQ_N_S 601 VADDVAQ_S 602 VADDVQ_P_S 603 VANDQ_S 604 VBICQ_S 605 VBRSRQ_N_S 606 VCADDQ_ROT270_S 607 VCADDQ_ROT90_S 608 VCMPEQQ_S 609 VCMPEQQ_N_S 610 VCMPNEQ_N_S 611 VEORQ_S 612 VHADDQ_S 613 VHADDQ_N_S 614 VHSUBQ_S 615 VHSUBQ_N_S 616 VMAXQ_S 617 VMAXVQ_S 618 VMINQ_S 619 VMINVQ_S 620 VMLADAVQ_S 621 VMULHQ_S 622 VMULLBQ_INT_S 623 VMULLTQ_INT_S 624 VMULQ_S 625 VMULQ_N_S 626 VORNQ_S 627 VORRQ_S 628 VQADDQ_S 629 VQADDQ_N_S 630 VQRSHLQ_S 631 VQRSHLQ_N_S 632 VQSHLQ_S 633 VQSHLQ_N_S 634 VQSHLQ_R_S 635 VQSUBQ_S 636 VQSUBQ_N_S 637 VRHADDQ_S 638 VRMULHQ_S 639 VRSHLQ_S 640 VRSHLQ_N_S 641 VRSHRQ_N_S 642 VSHLQ_N_S 643 VSHLQ_R_S 644 VSUBQ_S 645 VSUBQ_N_S 646 VABDQ_U 647 VADDQ_N_U 648 VADDVAQ_U 649 VADDVQ_P_U 650 VANDQ_U 651 VBICQ_U 652 VBRSRQ_N_U 653 VCADDQ_ROT270_U 654 VCADDQ_ROT90_U 655 VCMPEQQ_U 656 VCMPEQQ_N_U 657 VCMPNEQ_N_U 658 VEORQ_U 659 VHADDQ_U 660 VHADDQ_N_U 661 VHSUBQ_U 662 VHSUBQ_N_U 663 VMAXQ_U 664 VMAXVQ_U 665 VMINQ_U 666 VMINVQ_U 667 VMLADAVQ_U 668 VMULHQ_U 669 VMULLBQ_INT_U 670 VMULLTQ_INT_U 671 VMULQ_U 672 VMULQ_N_U 673 VORNQ_U 674 VORRQ_U 675 VQADDQ_U 676 VQADDQ_N_U 677 VQRSHLQ_U 678 VQRSHLQ_N_U 679 VQSHLQ_U 680 VQSHLQ_N_U 681 VQSHLQ_R_U 682 VQSUBQ_U 683 VQSUBQ_N_U 684 VRHADDQ_U 685 VRMULHQ_U 686 VRSHLQ_U 687 VRSHLQ_N_U 688 VRSHRQ_N_U 689 VSHLQ_N_U 690 VSHLQ_R_U 691 VSUBQ_U 692 VSUBQ_N_U 693 VCMPGEQ_N_S 694 VCMPGEQ_S 695 VCMPGTQ_N_S 696 VCMPGTQ_S 697 VCMPLEQ_N_S 698 VCMPLEQ_S 699 VCMPLTQ_N_S 700 VCMPLTQ_S 701 VHCADDQ_ROT270_S 702 VHCADDQ_ROT90_S 703 VMAXAQ_S 704 VMAXAVQ_S 705 VMINAQ_S 706 VMINAVQ_S 707 VMLADAVXQ_S 708 VMLSDAVQ_S 709 VMLSDAVXQ_S 710 VQDMULHQ_N_S 711 VQDMULHQ_S 712 VQRDMULHQ_N_S 713 VQRDMULHQ_S 714 VQSHLUQ_N_S 715 VCMPCSQ_N_U 716 VCMPCSQ_U 717 VCMPHIQ_N_U 718 VCMPHIQ_U 719 VABDQ_M_S 720 VABDQ_M_U 721 VABDQ_F 722 VADDQ_N_F 723 VANDQ_F 724 VBICQ_F 725 VCADDQ_ROT270_F 726 VCADDQ_ROT90_F 727 VCMPEQQ_F 728 VCMPEQQ_N_F 729 VCMPGEQ_F 730 VCMPGEQ_N_F 731 VCMPGTQ_F 732 VCMPGTQ_N_F 733 VCMPLEQ_F 734 VCMPLEQ_N_F 735 VCMPLTQ_F 736 VCMPLTQ_N_F 737 VCMPNEQ_F 738 VCMPNEQ_N_F 739 VCMULQ_F 740 VCMULQ_ROT180_F 741 VCMULQ_ROT270_F 742 VCMULQ_ROT90_F 743 VEORQ_F 744 VMAXNMAQ_F 745 VMAXNMAVQ_F 746 VMAXNMQ_F 747 VMAXNMVQ_F 748 VMINNMAQ_F 749 VMINNMAVQ_F 750 VMINNMQ_F 751 VMINNMVQ_F 752 VMULQ_F 753 VMULQ_N_F 754 VORNQ_F 755 VORRQ_F 756 VSUBQ_F 757 VADDLVAQ_U 758 VADDLVAQ_S 759 VBICQ_N_U 760 VBICQ_N_S 761 VCTP8Q_M 762 VCTP16Q_M 763 VCTP32Q_M 764 VCTP64Q_M 765 VCVTBQ_F16_F32 766 VCVTTQ_F16_F32 767 VMLALDAVQ_U 768 VMLALDAVXQ_U 769 VMLALDAVXQ_S 770 VMLALDAVQ_S 771 VMLSLDAVQ_S 772 VMLSLDAVXQ_S 773 VMOVNBQ_U 774 VMOVNBQ_S 775 VMOVNTQ_U 776 VMOVNTQ_S 777 VORRQ_N_S 778 VORRQ_N_U 779 VQDMULLBQ_N_S 780 VQDMULLBQ_S 781 VQDMULLTQ_N_S 782 VQDMULLTQ_S 783 VQMOVNBQ_U 784 VQMOVNBQ_S 785 VQMOVUNBQ_S 786 VQMOVUNTQ_S 787 VRMLALDAVHXQ_S 788 VRMLSLDAVHQ_S 789 VRMLSLDAVHXQ_S 790 VSHLLBQ_S 791 VSHLLBQ_U 792 VSHLLTQ_U 793 VSHLLTQ_S 794 VQMOVNTQ_U 795 VQMOVNTQ_S 796 VSHLLBQ_N_S 797 VSHLLBQ_N_U 798 VSHLLTQ_N_U 799 VSHLLTQ_N_S 800 VRMLALDAVHQ_U 801 VRMLALDAVHQ_S 802 VMULLTQ_POLY_P 803 VMULLBQ_POLY_P 804 VBICQ_M_N_S 805 VBICQ_M_N_U 806 VCMPEQQ_M_F 807 VCVTAQ_M_S 808 VCVTAQ_M_U 809 VCVTQ_M_TO_F_S 810 VCVTQ_M_TO_F_U 811 VQRSHRNBQ_N_U 812 VQRSHRNBQ_N_S 813 VQRSHRUNBQ_N_S 814 VRMLALDAVHAQ_S 815 VABAVQ_S 816 VABAVQ_U 817 VSHLCQ_S 818 VSHLCQ_U 819 VRMLALDAVHAQ_U 820 VABSQ_M_S 821 VADDVAQ_P_S 822 VADDVAQ_P_U 823 VCLSQ_M_S 824 VCLZQ_M_S 825 VCLZQ_M_U 826 VCMPCSQ_M_N_U 827 VCMPCSQ_M_U 828 VCMPEQQ_M_N_S 829 VCMPEQQ_M_N_U 830 VCMPEQQ_M_S 831 VCMPEQQ_M_U 832 VCMPGEQ_M_N_S 833 VCMPGEQ_M_S 834 VCMPGTQ_M_N_S 835 VCMPGTQ_M_S 836 VCMPHIQ_M_N_U 837 VCMPHIQ_M_U 838 VCMPLEQ_M_N_S 839 VCMPLEQ_M_S 840 VCMPLTQ_M_N_S 841 VCMPLTQ_M_S 842 VCMPNEQ_M_N_S 843 VCMPNEQ_M_N_U 844 VCMPNEQ_M_S 845 VCMPNEQ_M_U 846 VDUPQ_M_N_S 847 VDUPQ_M_N_U 848 VDWDUPQ_N_U 849 VDWDUPQ_WB_U 850 VIWDUPQ_N_U 851 VIWDUPQ_WB_U 852 VMAXAQ_M_S 853 VMAXAVQ_P_S 854 VMAXVQ_P_S 855 VMAXVQ_P_U 856 VMINAQ_M_S 857 VMINAVQ_P_S 858 VMINVQ_P_S 859 VMINVQ_P_U 860 VMLADAVAQ_S 861 VMLADAVAQ_U 862 VMLADAVQ_P_S 863 VMLADAVQ_P_U 864 VMLADAVXQ_P_S 865 VMLAQ_N_S 866 VMLAQ_N_U 867 VMLASQ_N_S 868 VMLASQ_N_U 869 VMLSDAVQ_P_S 870 VMLSDAVXQ_P_S 871 VMVNQ_M_S 872 VMVNQ_M_U 873 VNEGQ_M_S 874 VPSELQ_S 875 VPSELQ_U 876 VQABSQ_M_S 877 VQDMLAHQ_N_S 878 VQDMLASHQ_N_S 879 VQNEGQ_M_S 880 VQRDMLADHQ_S 881 VQRDMLADHXQ_S 882 VQRDMLAHQ_N_S 883 VQRDMLASHQ_N_S 884 VQRDMLSDHQ_S 885 VQRDMLSDHXQ_S 886 VQRSHLQ_M_N_S 887 VQRSHLQ_M_N_U 888 VQSHLQ_M_R_S 889 VQSHLQ_M_R_U 890 VREV64Q_M_S 891 VREV64Q_M_U 892 VRSHLQ_M_N_S 893 VRSHLQ_M_N_U 894 VSHLQ_M_R_S 895 VSHLQ_M_R_U 896 VSLIQ_N_S 897 VSLIQ_N_U 898 VSRIQ_N_S 899 VSRIQ_N_U 900 VQDMLSDHXQ_S 901 VQDMLSDHQ_S 902 VQDMLADHXQ_S 903 VQDMLADHQ_S 904 VMLSDAVAXQ_S 905 VMLSDAVAQ_S 906 VMLADAVAXQ_S 907 VCMPGEQ_M_F 908 VCMPGTQ_M_N_F 909 VMLSLDAVQ_P_S 910 VRMLALDAVHAXQ_S 911 VMLSLDAVXQ_P_S 912 VFMAQ_F 913 VMLSLDAVAQ_S 914 VQSHRUNBQ_N_S 915 VQRSHRUNTQ_N_S 916 VCMLAQ_F 917 VMINNMAQ_M_F 918 VFMASQ_N_F 919 VDUPQ_M_N_F 920 VCMPGTQ_M_F 921 VCMPLTQ_M_F 922 VRMLSLDAVHQ_P_S 923 VQSHRUNTQ_N_S 924 VABSQ_M_F 925 VMAXNMAVQ_P_F 926 VFMAQ_N_F 927 VRMLSLDAVHXQ_P_S 928 VREV32Q_M_F 929 VRMLSLDAVHAQ_S 930 VRMLSLDAVHAXQ_S 931 VCMPLTQ_M_N_F 932 VCMPNEQ_M_F 933 VRNDAQ_M_F 934 VRNDPQ_M_F 935 VADDLVAQ_P_S 936 VQMOVUNBQ_M_S 937 VCMPLEQ_M_F 938 VCMLAQ_ROT180_F 939 VMLSLDAVAXQ_S 940 VRNDXQ_M_F 941 VFMSQ_F 942 VMINNMVQ_P_F 943 VMAXNMVQ_P_F 944 VPSELQ_F 945 VCMLAQ_ROT90_F 946 VQMOVUNTQ_M_S 947 VREV64Q_M_F 948 VNEGQ_M_F 949 VRNDMQ_M_F 950 VCMPLEQ_M_N_F 951 VCMPGEQ_M_N_F 952 VRNDNQ_M_F 953 VMINNMAVQ_P_F 954 VCMPNEQ_M_N_F 955 VRMLALDAVHQ_P_S 956 VRMLALDAVHXQ_P_S 957 VCMPEQQ_M_N_F 958 VCMLAQ_ROT270_F 959 VMAXNMAQ_M_F 960 VRNDQ_M_F 961 VMLALDAVQ_P_U 962 VMLALDAVQ_P_S 963 VQMOVNBQ_M_S 964 VQMOVNBQ_M_U 965 VMOVLTQ_M_U 966 VMOVLTQ_M_S 967 VMOVNBQ_M_U 968 VMOVNBQ_M_S 969 VRSHRNTQ_N_U 970 VRSHRNTQ_N_S 971 VORRQ_M_N_S 972 VORRQ_M_N_U 973 VREV32Q_M_S 974 VREV32Q_M_U 975 VQRSHRNTQ_N_U 976 VQRSHRNTQ_N_S 977 VMOVNTQ_M_U 978 VMOVNTQ_M_S 979 VMOVLBQ_M_U 980 VMOVLBQ_M_S 981 VMLALDAVAQ_S 982 VMLALDAVAQ_U 983 VQSHRNBQ_N_U 984 VQSHRNBQ_N_S 985 VSHRNBQ_N_U 986 VSHRNBQ_N_S 987 VRSHRNBQ_N_S 988 VRSHRNBQ_N_U 989 VMLALDAVXQ_P_U 990 VMLALDAVXQ_P_S 991 VQMOVNTQ_M_U 992 VQMOVNTQ_M_S 993 VMVNQ_M_N_U 994 VMVNQ_M_N_S 995 VQSHRNTQ_N_U 996 VQSHRNTQ_N_S 997 VMLALDAVAXQ_S 998 VMLALDAVAXQ_U 999 VSHRNTQ_N_S 1000 VSHRNTQ_N_U 1001 VCVTBQ_M_F16_F32 1002 VCVTBQ_M_F32_F16 1003 VCVTTQ_M_F16_F32 1004 VCVTTQ_M_F32_F16 1005 VCVTMQ_M_S 1006 VCVTMQ_M_U 1007 VCVTNQ_M_S 1008 VCVTPQ_M_S 1009 VCVTPQ_M_U 1010 VCVTQ_M_N_FROM_F_S 1011 VCVTNQ_M_U 1012 VREV16Q_M_S 1013 VREV16Q_M_U 1014 VREV32Q_M 1015 VCVTQ_M_FROM_F_U 1016 VCVTQ_M_FROM_F_S 1017 VRMLALDAVHQ_P_U 1018 VADDLVAQ_P_U 1019 VCVTQ_M_N_FROM_F_U 1020 VQSHLUQ_M_N_S 1021 VABAVQ_P_S 1022 VABAVQ_P_U 1023 VSHLQ_M_S 1024 VSHLQ_M_U 1025 VSRIQ_M_N_S 1026 VSRIQ_M_N_U 1027 VSUBQ_M_U 1028 VSUBQ_M_S 1029 VCVTQ_M_N_TO_F_U 1030 VCVTQ_M_N_TO_F_S 1031 VQADDQ_M_U 1032 VQADDQ_M_S 1033 VRSHRQ_M_N_S 1034 VSUBQ_M_N_S 1035 VSUBQ_M_N_U 1036 VBRSRQ_M_N_S 1037 VSUBQ_M_N_F 1038 VBICQ_M_F 1039 VHADDQ_M_U 1040 VBICQ_M_U 1041 VBICQ_M_S 1042 VMULQ_M_N_U 1043 VHADDQ_M_S 1044 VORNQ_M_F 1045 VMLAQ_M_N_S 1046 VQSUBQ_M_U 1047 VQSUBQ_M_S 1048 VMLAQ_M_N_U 1049 VQSUBQ_M_N_U 1050 VQSUBQ_M_N_S 1051 VMULLTQ_INT_M_S 1052 VMULLTQ_INT_M_U 1053 VMULQ_M_N_S 1054 VMULQ_M_N_F 1055 VMLASQ_M_N_U 1056 VMLASQ_M_N_S 1057 VMAXQ_M_U 1058 VQRDMLAHQ_M_N_U 1059 VCADDQ_ROT270_M_F 1060 VCADDQ_ROT270_M_U 1061 VCADDQ_ROT270_M_S 1062 VQRSHLQ_M_S 1063 VMULQ_M_F 1064 VRHADDQ_M_U 1065 VSHRQ_M_N_U 1066 VRHADDQ_M_S 1067 VMULQ_M_S 1068 VMULQ_M_U 1069 VQDMLASHQ_M_N_S 1070 VQRDMLASHQ_M_N_S 1071 VRSHLQ_M_S 1072 VRSHLQ_M_U 1073 VRSHRQ_M_N_U 1074 VADDQ_M_N_F 1075 VADDQ_M_N_S 1076 VADDQ_M_N_U 1077 VQRDMLASHQ_M_N_U 1078 VMAXQ_M_S 1079 VQRDMLAHQ_M_N_S 1080 VORRQ_M_S 1081 VORRQ_M_U 1082 VORRQ_M_F 1083 VQRSHLQ_M_U 1084 VRMULHQ_M_U 1085 VRMULHQ_M_S 1086 VMINQ_M_S 1087 VMINQ_M_U 1088 VANDQ_M_F 1089 VANDQ_M_U 1090 VANDQ_M_S 1091 VHSUBQ_M_N_S 1092 VHSUBQ_M_N_U 1093 VMULHQ_M_S 1094 VMULHQ_M_U 1095 VMULLBQ_INT_M_U 1096 VMULLBQ_INT_M_S 1097 VCADDQ_ROT90_M_F 1098 VSHRQ_M_N_S 1099 VADDQ_M_U 1100 VSLIQ_M_N_U 1101 VQADDQ_M_N_S 1102 VBRSRQ_M_N_F 1103 VABDQ_M_F 1104 VBRSRQ_M_N_U 1105 VEORQ_M_F 1106 VSHLQ_M_N_S 1107 VQDMLAHQ_M_N_U 1108 VQDMLAHQ_M_N_S 1109 VSHLQ_M_N_U 1110 VMLADAVAQ_P_U 1111 VMLADAVAQ_P_S 1112 VSLIQ_M_N_S 1113 VQSHLQ_M_U 1114 VQSHLQ_M_S 1115 VCADDQ_ROT90_M_U 1116 VCADDQ_ROT90_M_S 1117 VORNQ_M_U 1118 VORNQ_M_S 1119 VQSHLQ_M_N_S 1120 VQSHLQ_M_N_U 1121 VADDQ_M_S 1122 VHADDQ_M_N_S 1123 VADDQ_M_F 1124 VQADDQ_M_N_U 1125 VEORQ_M_S 1126 VEORQ_M_U 1127 VHSUBQ_M_S 1128 VHSUBQ_M_U 1129 VHADDQ_M_N_U 1130 VHCADDQ_ROT90_M_S 1131 VQRDMLSDHQ_M_S 1132 VQRDMLSDHXQ_M_S 1133 VQRDMLADHXQ_M_S 1134 VQDMULHQ_M_S 1135 VMLADAVAXQ_P_S 1136 VQDMLADHXQ_M_S 1137 VQRDMULHQ_M_S 1138 VMLSDAVAXQ_P_S 1139 VQDMULHQ_M_N_S 1140 VHCADDQ_ROT270_M_S 1141 VQDMLSDHQ_M_S 1142 VQDMLSDHXQ_M_S 1143 VMLSDAVAQ_P_S 1144 VQRDMLADHQ_M_S 1145 VQDMLADHQ_M_S 1146 VMLALDAVAQ_P_U 1147 VMLALDAVAQ_P_S 1148 VQRSHRNBQ_M_N_U 1149 VQRSHRNBQ_M_N_S 1150 VQRSHRNTQ_M_N_S 1151 VQSHRNBQ_M_N_U 1152 VQSHRNBQ_M_N_S 1153 VQSHRNTQ_M_N_S 1154 VRSHRNBQ_M_N_U 1155 VRSHRNBQ_M_N_S 1156 VRSHRNTQ_M_N_U 1157 VSHLLBQ_M_N_U 1158 VSHLLBQ_M_N_S 1159 VSHLLTQ_M_N_U 1160 VSHLLTQ_M_N_S 1161 VSHRNBQ_M_N_S 1162 VSHRNBQ_M_N_U 1163 VSHRNTQ_M_N_S 1164 VSHRNTQ_M_N_U 1165 VMLALDAVAXQ_P_S 1166 VQRSHRNTQ_M_N_U 1167 VQSHRNTQ_M_N_U 1168 VRSHRNTQ_M_N_S 1169 VQRDMULHQ_M_N_S 1170 VRMLALDAVHAQ_P_S 1171 VMLSLDAVAQ_P_S 1172 VMLSLDAVAXQ_P_S 1173 VMULLBQ_POLY_M_P 1174 VMULLTQ_POLY_M_P 1175 VQDMULLBQ_M_N_S 1176 VQDMULLBQ_M_S 1177 VQDMULLTQ_M_N_S 1178 VQDMULLTQ_M_S 1179 VQRSHRUNBQ_M_N_S 1180 VQSHRUNBQ_M_N_S 1181 VQSHRUNTQ_M_N_S 1182 VRMLALDAVHAQ_P_U 1183 VRMLALDAVHAXQ_P_S 1184 VRMLSLDAVHAQ_P_S 1185 VRMLSLDAVHAXQ_P_S 1186 VQRSHRUNTQ_M_N_S 1187 VCMLAQ_M_F 1188 VCMLAQ_ROT180_M_F 1189 VCMLAQ_ROT270_M_F 1190 VCMLAQ_ROT90_M_F 1191 VCMULQ_M_F 1192 VCMULQ_ROT180_M_F 1193 VCMULQ_ROT270_M_F 1194 VCMULQ_ROT90_M_F 1195 VFMAQ_M_F 1196 VFMAQ_M_N_F 1197 VFMASQ_M_N_F 1198 VFMSQ_M_F 1199 VMAXNMQ_M_F 1200 VMINNMQ_M_F 1201 VSUBQ_M_F 1202 VSTRWQSB_S 1203 VSTRWQSB_U 1204 VSTRBQSO_S 1205 VSTRBQSO_U 1206 VSTRBQ_S 1207 VSTRBQ_U 1208 VLDRBQGO_S 1209 VLDRBQGO_U 1210 VLDRBQ_S 1211 VLDRBQ_U 1212 VLDRWQGB_S 1213 VLDRWQGB_U 1214 VLD1Q_F 1215 VLD1Q_S 1216 VLD1Q_U 1217 VLDRHQ_F 1218 VLDRHQGO_S 1219 VLDRHQGO_U 1220 VLDRHQGSO_S 1221 VLDRHQGSO_U 1222 VLDRHQ_S 1223 VLDRHQ_U 1224 VLDRWQ_F 1225 VLDRWQ_S 1226 VLDRWQ_U 1227 VLDRDQGB_S 1228 VLDRDQGB_U 1229 VLDRDQGO_S 1230 VLDRDQGO_U 1231 VLDRDQGSO_S 1232 VLDRDQGSO_U 1233 VLDRHQGO_F 1234 VLDRHQGSO_F 1235 VLDRWQGB_F 1236 VLDRWQGO_F 1237 VLDRWQGO_S 1238 VLDRWQGO_U 1239 VLDRWQGSO_F 1240 VLDRWQGSO_S 1241 VLDRWQGSO_U 1242 VSTRHQ_F 1243 VST1Q_S 1244 VST1Q_U 1245 VSTRHQSO_S 1246 VSTRHQ_U 1247 VSTRWQ_S 1248 VSTRWQ_U 1249 VSTRWQ_F 1250 VST1Q_F 1251 VSTRDQSB_S 1252 VSTRDQSB_U 1253 VSTRDQSO_S 1254 VSTRDQSO_U 1255 VSTRDQSSO_S 1256 VSTRDQSSO_U 1257 VSTRWQSO_S 1258 VSTRWQSO_U 1259 VSTRWQSSO_S 1260 VSTRWQSSO_U 1261 VSTRHQSO_F 1262 VSTRHQSSO_F 1263 VSTRWQSB_F 1264 VSTRWQSO_F 1265 VSTRWQSSO_F 1266 VDDUPQ 1267 VDDUPQ_M 1268 VDWDUPQ 1269 VDWDUPQ_M 1270 VIDUPQ 1271 VIDUPQ_M 1272 VIWDUPQ 1273 VIWDUPQ_M 1274 VSTRWQSBWB_S 1275 VSTRWQSBWB_U 1276 VLDRWQGBWB_S 1277 VLDRWQGBWB_U 1278 VSTRWQSBWB_F 1279 VLDRWQGBWB_F 1280 VSTRDQSBWB_S 1281 VSTRDQSBWB_U 1282 VLDRDQGBWB_S 1283 VLDRDQGBWB_U 1284 VADCQ_U 1285 VADCQ_M_U 1286 VADCQ_S 1287 VADCQ_M_S 1288 VSBCIQ_U 1289 VSBCIQ_S 1290 VSBCIQ_M_U 1291 VSBCIQ_M_S 1292 VSBCQ_U 1293 VSBCQ_S 1294 VSBCQ_M_U 1295 VSBCQ_M_S 1296 VADCIQ_U 1297 VADCIQ_M_U 1298 VADCIQ_S 1299 VADCIQ_M_S 1300 VLD2Q 1301 VLD4Q 1302 VST2Q 1303 VSHLCQ_M_U 1304 VSHLCQ_M_S 1305 VSTRHQSO_U 1306 VSTRHQSSO_S 1307 VSTRHQSSO_U 1308 VSTRHQ_S 1309 SRSHRL 1310 SRSHR 1311 URSHR 1312 URSHRL 1313 SQRSHR 1314 UQRSHL 1315 UQRSHLL_64 1316 UQRSHLL_48 1317 SQRSHRL_64 1318 SQRSHRL_48 1319 VSHLCQ_M_ 1320]) 1321