1 /* Copyright (C) 2013-2020 Free Software Foundation, Inc. 2 3 This file is part of GCC. 4 5 GCC is free software; you can redistribute it and/or modify 6 it under the terms of the GNU General Public License as published by 7 the Free Software Foundation; either version 3, or (at your option) 8 any later version. 9 10 GCC is distributed in the hope that it will be useful, 11 but WITHOUT ANY WARRANTY; without even the implied warranty of 12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 GNU General Public License for more details. 14 15 Under Section 7 of GPL version 3, you are granted additional 16 permissions described in the GCC Runtime Library Exception, version 17 3.1, as published by the Free Software Foundation. 18 19 You should have received a copy of the GNU General Public License and 20 a copy of the GCC Runtime Library Exception along with this program; 21 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 22 <http://www.gnu.org/licenses/>. */ 23 24 #ifndef _IMMINTRIN_H_INCLUDED 25 #error "Never use <avx512vbmi2intrin.h> directly; include <immintrin.h> instead." 26 #endif 27 28 #ifndef __AVX512VBMI2INTRIN_H_INCLUDED 29 #define __AVX512VBMI2INTRIN_H_INCLUDED 30 31 #if !defined(__AVX512VBMI2__) 32 #pragma GCC push_options 33 #pragma GCC target("avx512vbmi2") 34 #define __DISABLE_AVX512VBMI2__ 35 #endif /* __AVX512VBMI2__ */ 36 37 #ifdef __OPTIMIZE__ 38 extern __inline __m512i 39 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 40 _mm512_shrdi_epi16 (__m512i __A, __m512i __B, int __C) 41 { 42 return (__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)__A, (__v32hi) __B, 43 __C); 44 } 45 46 extern __inline __m512i 47 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 48 _mm512_shrdi_epi32 (__m512i __A, __m512i __B, int __C) 49 { 50 return (__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)__A, (__v16si) __B, 51 __C); 52 } 53 54 extern __inline __m512i 55 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 56 _mm512_mask_shrdi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D, 57 int __E) 58 { 59 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__C, 60 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B); 61 } 62 63 extern __inline __m512i 64 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 65 _mm512_maskz_shrdi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D) 66 { 67 return (__m512i)__builtin_ia32_vpshrd_v16si_mask ((__v16si)__B, 68 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A); 69 } 70 71 extern __inline __m512i 72 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 73 _mm512_shrdi_epi64 (__m512i __A, __m512i __B, int __C) 74 { 75 return (__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)__A, (__v8di) __B, __C); 76 } 77 78 extern __inline __m512i 79 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 80 _mm512_mask_shrdi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D, 81 int __E) 82 { 83 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__C, (__v8di) __D, 84 __E, (__v8di) __A, (__mmask8)__B); 85 } 86 87 extern __inline __m512i 88 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 89 _mm512_maskz_shrdi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D) 90 { 91 return (__m512i)__builtin_ia32_vpshrd_v8di_mask ((__v8di)__B, (__v8di) __C, 92 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A); 93 } 94 95 extern __inline __m512i 96 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 97 _mm512_shldi_epi16 (__m512i __A, __m512i __B, int __C) 98 { 99 return (__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)__A, (__v32hi) __B, 100 __C); 101 } 102 103 extern __inline __m512i 104 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 105 _mm512_shldi_epi32 (__m512i __A, __m512i __B, int __C) 106 { 107 return (__m512i) __builtin_ia32_vpshld_v16si ((__v16si)__A, (__v16si) __B, 108 __C); 109 } 110 111 extern __inline __m512i 112 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 113 _mm512_mask_shldi_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D, 114 int __E) 115 { 116 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__C, 117 (__v16si) __D, __E, (__v16si) __A, (__mmask16)__B); 118 } 119 120 extern __inline __m512i 121 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 122 _mm512_maskz_shldi_epi32 (__mmask16 __A, __m512i __B, __m512i __C, int __D) 123 { 124 return (__m512i)__builtin_ia32_vpshld_v16si_mask ((__v16si)__B, 125 (__v16si) __C, __D, (__v16si) _mm512_setzero_si512 (), (__mmask16)__A); 126 } 127 128 extern __inline __m512i 129 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 130 _mm512_shldi_epi64 (__m512i __A, __m512i __B, int __C) 131 { 132 return (__m512i) __builtin_ia32_vpshld_v8di ((__v8di)__A, (__v8di) __B, __C); 133 } 134 135 extern __inline __m512i 136 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 137 _mm512_mask_shldi_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D, 138 int __E) 139 { 140 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__C, (__v8di) __D, 141 __E, (__v8di) __A, (__mmask8)__B); 142 } 143 144 extern __inline __m512i 145 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 146 _mm512_maskz_shldi_epi64 (__mmask8 __A, __m512i __B, __m512i __C, int __D) 147 { 148 return (__m512i)__builtin_ia32_vpshld_v8di_mask ((__v8di)__B, (__v8di) __C, 149 __D, (__v8di) _mm512_setzero_si512 (), (__mmask8)__A); 150 } 151 #else 152 #define _mm512_shrdi_epi16(A, B, C) \ 153 ((__m512i) __builtin_ia32_vpshrd_v32hi ((__v32hi)(__m512i)(A), \ 154 (__v32hi)(__m512i)(B),(int)(C))) 155 #define _mm512_shrdi_epi32(A, B, C) \ 156 ((__m512i) __builtin_ia32_vpshrd_v16si ((__v16si)(__m512i)(A), \ 157 (__v16si)(__m512i)(B),(int)(C))) 158 #define _mm512_mask_shrdi_epi32(A, B, C, D, E) \ 159 ((__m512i) __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(C), \ 160 (__v16si)(__m512i)(D), \ 161 (int)(E), \ 162 (__v16si)(__m512i)(A), \ 163 (__mmask16)(B))) 164 #define _mm512_maskz_shrdi_epi32(A, B, C, D) \ 165 ((__m512i) \ 166 __builtin_ia32_vpshrd_v16si_mask ((__v16si)(__m512i)(B), \ 167 (__v16si)(__m512i)(C),(int)(D), \ 168 (__v16si)(__m512i)_mm512_setzero_si512 (), \ 169 (__mmask16)(A))) 170 #define _mm512_shrdi_epi64(A, B, C) \ 171 ((__m512i) __builtin_ia32_vpshrd_v8di ((__v8di)(__m512i)(A), \ 172 (__v8di)(__m512i)(B),(int)(C))) 173 #define _mm512_mask_shrdi_epi64(A, B, C, D, E) \ 174 ((__m512i) __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(C), \ 175 (__v8di)(__m512i)(D), (int)(E), \ 176 (__v8di)(__m512i)(A), \ 177 (__mmask8)(B))) 178 #define _mm512_maskz_shrdi_epi64(A, B, C, D) \ 179 ((__m512i) \ 180 __builtin_ia32_vpshrd_v8di_mask ((__v8di)(__m512i)(B), \ 181 (__v8di)(__m512i)(C),(int)(D), \ 182 (__v8di)(__m512i)_mm512_setzero_si512 (), \ 183 (__mmask8)(A))) 184 #define _mm512_shldi_epi16(A, B, C) \ 185 ((__m512i) __builtin_ia32_vpshld_v32hi ((__v32hi)(__m512i)(A), \ 186 (__v32hi)(__m512i)(B),(int)(C))) 187 #define _mm512_shldi_epi32(A, B, C) \ 188 ((__m512i) __builtin_ia32_vpshld_v16si ((__v16si)(__m512i)(A), \ 189 (__v16si)(__m512i)(B),(int)(C))) 190 #define _mm512_mask_shldi_epi32(A, B, C, D, E) \ 191 ((__m512i) __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(C), \ 192 (__v16si)(__m512i)(D), \ 193 (int)(E), \ 194 (__v16si)(__m512i)(A), \ 195 (__mmask16)(B))) 196 #define _mm512_maskz_shldi_epi32(A, B, C, D) \ 197 ((__m512i) \ 198 __builtin_ia32_vpshld_v16si_mask ((__v16si)(__m512i)(B), \ 199 (__v16si)(__m512i)(C),(int)(D), \ 200 (__v16si)(__m512i)_mm512_setzero_si512 (), \ 201 (__mmask16)(A))) 202 #define _mm512_shldi_epi64(A, B, C) \ 203 ((__m512i) __builtin_ia32_vpshld_v8di ((__v8di)(__m512i)(A), \ 204 (__v8di)(__m512i)(B), (int)(C))) 205 #define _mm512_mask_shldi_epi64(A, B, C, D, E) \ 206 ((__m512i) __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(C), \ 207 (__v8di)(__m512i)(D), (int)(E), \ 208 (__v8di)(__m512i)(A), \ 209 (__mmask8)(B))) 210 #define _mm512_maskz_shldi_epi64(A, B, C, D) \ 211 ((__m512i) \ 212 __builtin_ia32_vpshld_v8di_mask ((__v8di)(__m512i)(B), \ 213 (__v8di)(__m512i)(C),(int)(D), \ 214 (__v8di)(__m512i)_mm512_setzero_si512 (), \ 215 (__mmask8)(A))) 216 #endif 217 218 extern __inline __m512i 219 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 220 _mm512_shrdv_epi16 (__m512i __A, __m512i __B, __m512i __C) 221 { 222 return (__m512i) __builtin_ia32_vpshrdv_v32hi ((__v32hi)__A, (__v32hi) __B, 223 (__v32hi) __C); 224 } 225 226 extern __inline __m512i 227 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 228 _mm512_shrdv_epi32 (__m512i __A, __m512i __B, __m512i __C) 229 { 230 return (__m512i) __builtin_ia32_vpshrdv_v16si ((__v16si)__A, (__v16si) __B, 231 (__v16si) __C); 232 } 233 234 extern __inline __m512i 235 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 236 _mm512_mask_shrdv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D) 237 { 238 return (__m512i)__builtin_ia32_vpshrdv_v16si_mask ((__v16si)__A, 239 (__v16si) __C, (__v16si) __D, (__mmask16)__B); 240 } 241 242 extern __inline __m512i 243 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 244 _mm512_maskz_shrdv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D) 245 { 246 return (__m512i)__builtin_ia32_vpshrdv_v16si_maskz ((__v16si)__B, 247 (__v16si) __C, (__v16si) __D, (__mmask16)__A); 248 } 249 250 extern __inline __m512i 251 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 252 _mm512_shrdv_epi64 (__m512i __A, __m512i __B, __m512i __C) 253 { 254 return (__m512i) __builtin_ia32_vpshrdv_v8di ((__v8di)__A, (__v8di) __B, 255 (__v8di) __C); 256 } 257 258 extern __inline __m512i 259 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 260 _mm512_mask_shrdv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D) 261 { 262 return (__m512i)__builtin_ia32_vpshrdv_v8di_mask ((__v8di)__A, (__v8di) __C, 263 (__v8di) __D, (__mmask8)__B); 264 } 265 266 extern __inline __m512i 267 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 268 _mm512_maskz_shrdv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D) 269 { 270 return (__m512i)__builtin_ia32_vpshrdv_v8di_maskz ((__v8di)__B, (__v8di) __C, 271 (__v8di) __D, (__mmask8)__A); 272 } 273 extern __inline __m512i 274 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 275 _mm512_shldv_epi16 (__m512i __A, __m512i __B, __m512i __C) 276 { 277 return (__m512i) __builtin_ia32_vpshldv_v32hi ((__v32hi)__A, (__v32hi) __B, 278 (__v32hi) __C); 279 } 280 281 extern __inline __m512i 282 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 283 _mm512_shldv_epi32 (__m512i __A, __m512i __B, __m512i __C) 284 { 285 return (__m512i) __builtin_ia32_vpshldv_v16si ((__v16si)__A, (__v16si) __B, 286 (__v16si) __C); 287 } 288 289 extern __inline __m512i 290 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 291 _mm512_mask_shldv_epi32 (__m512i __A, __mmask16 __B, __m512i __C, __m512i __D) 292 { 293 return (__m512i)__builtin_ia32_vpshldv_v16si_mask ((__v16si)__A, 294 (__v16si) __C, (__v16si) __D, (__mmask16)__B); 295 } 296 297 extern __inline __m512i 298 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 299 _mm512_maskz_shldv_epi32 (__mmask16 __A, __m512i __B, __m512i __C, __m512i __D) 300 { 301 return (__m512i)__builtin_ia32_vpshldv_v16si_maskz ((__v16si)__B, 302 (__v16si) __C, (__v16si) __D, (__mmask16)__A); 303 } 304 305 extern __inline __m512i 306 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 307 _mm512_shldv_epi64 (__m512i __A, __m512i __B, __m512i __C) 308 { 309 return (__m512i) __builtin_ia32_vpshldv_v8di ((__v8di)__A, (__v8di) __B, 310 (__v8di) __C); 311 } 312 313 extern __inline __m512i 314 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 315 _mm512_mask_shldv_epi64 (__m512i __A, __mmask8 __B, __m512i __C, __m512i __D) 316 { 317 return (__m512i)__builtin_ia32_vpshldv_v8di_mask ((__v8di)__A, (__v8di) __C, 318 (__v8di) __D, (__mmask8)__B); 319 } 320 321 extern __inline __m512i 322 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 323 _mm512_maskz_shldv_epi64 (__mmask8 __A, __m512i __B, __m512i __C, __m512i __D) 324 { 325 return (__m512i)__builtin_ia32_vpshldv_v8di_maskz ((__v8di)__B, (__v8di) __C, 326 (__v8di) __D, (__mmask8)__A); 327 } 328 329 #ifdef __DISABLE_AVX512VBMI2__ 330 #undef __DISABLE_AVX512VBMI2__ 331 332 #pragma GCC pop_options 333 #endif /* __DISABLE_AVX512VBMI2__ */ 334 335 #if !defined(__AVX512VBMI2__) || !defined(__AVX512BW__) 336 #pragma GCC push_options 337 #pragma GCC target("avx512vbmi2,avx512bw") 338 #define __DISABLE_AVX512VBMI2BW__ 339 #endif /* __AVX512VBMI2BW__ */ 340 341 extern __inline __m512i 342 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 343 _mm512_mask_compress_epi8 (__m512i __A, __mmask64 __B, __m512i __C) 344 { 345 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__C, 346 (__v64qi)__A, (__mmask64)__B); 347 } 348 349 350 extern __inline __m512i 351 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 352 _mm512_maskz_compress_epi8 (__mmask64 __A, __m512i __B) 353 { 354 return (__m512i) __builtin_ia32_compressqi512_mask ((__v64qi)__B, 355 (__v64qi)_mm512_setzero_si512 (), (__mmask64)__A); 356 } 357 358 359 extern __inline void 360 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 361 _mm512_mask_compressstoreu_epi8 (void * __A, __mmask64 __B, __m512i __C) 362 { 363 __builtin_ia32_compressstoreuqi512_mask ((__v64qi *) __A, (__v64qi) __C, 364 (__mmask64) __B); 365 } 366 367 extern __inline __m512i 368 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 369 _mm512_mask_compress_epi16 (__m512i __A, __mmask32 __B, __m512i __C) 370 { 371 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__C, 372 (__v32hi)__A, (__mmask32)__B); 373 } 374 375 extern __inline __m512i 376 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 377 _mm512_maskz_compress_epi16 (__mmask32 __A, __m512i __B) 378 { 379 return (__m512i) __builtin_ia32_compresshi512_mask ((__v32hi)__B, 380 (__v32hi)_mm512_setzero_si512 (), (__mmask32)__A); 381 } 382 383 extern __inline void 384 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 385 _mm512_mask_compressstoreu_epi16 (void * __A, __mmask32 __B, __m512i __C) 386 { 387 __builtin_ia32_compressstoreuhi512_mask ((__v32hi *) __A, (__v32hi) __C, 388 (__mmask32) __B); 389 } 390 391 extern __inline __m512i 392 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 393 _mm512_mask_expand_epi8 (__m512i __A, __mmask64 __B, __m512i __C) 394 { 395 return (__m512i) __builtin_ia32_expandqi512_mask ((__v64qi) __C, 396 (__v64qi) __A, 397 (__mmask64) __B); 398 } 399 400 extern __inline __m512i 401 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 402 _mm512_maskz_expand_epi8 (__mmask64 __A, __m512i __B) 403 { 404 return (__m512i) __builtin_ia32_expandqi512_maskz ((__v64qi) __B, 405 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A); 406 } 407 408 extern __inline __m512i 409 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 410 _mm512_mask_expandloadu_epi8 (__m512i __A, __mmask64 __B, const void * __C) 411 { 412 return (__m512i) __builtin_ia32_expandloadqi512_mask ((const __v64qi *) __C, 413 (__v64qi) __A, (__mmask64) __B); 414 } 415 416 extern __inline __m512i 417 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 418 _mm512_maskz_expandloadu_epi8 (__mmask64 __A, const void * __B) 419 { 420 return (__m512i) __builtin_ia32_expandloadqi512_maskz ((const __v64qi *) __B, 421 (__v64qi) _mm512_setzero_si512 (), (__mmask64) __A); 422 } 423 424 extern __inline __m512i 425 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 426 _mm512_mask_expand_epi16 (__m512i __A, __mmask32 __B, __m512i __C) 427 { 428 return (__m512i) __builtin_ia32_expandhi512_mask ((__v32hi) __C, 429 (__v32hi) __A, 430 (__mmask32) __B); 431 } 432 433 extern __inline __m512i 434 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 435 _mm512_maskz_expand_epi16 (__mmask32 __A, __m512i __B) 436 { 437 return (__m512i) __builtin_ia32_expandhi512_maskz ((__v32hi) __B, 438 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A); 439 } 440 441 extern __inline __m512i 442 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 443 _mm512_mask_expandloadu_epi16 (__m512i __A, __mmask32 __B, const void * __C) 444 { 445 return (__m512i) __builtin_ia32_expandloadhi512_mask ((const __v32hi *) __C, 446 (__v32hi) __A, (__mmask32) __B); 447 } 448 449 extern __inline __m512i 450 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 451 _mm512_maskz_expandloadu_epi16 (__mmask32 __A, const void * __B) 452 { 453 return (__m512i) __builtin_ia32_expandloadhi512_maskz ((const __v32hi *) __B, 454 (__v32hi) _mm512_setzero_si512 (), (__mmask32) __A); 455 } 456 457 #ifdef __OPTIMIZE__ 458 extern __inline __m512i 459 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 460 _mm512_mask_shrdi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D, 461 int __E) 462 { 463 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__C, 464 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B); 465 } 466 467 extern __inline __m512i 468 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 469 _mm512_maskz_shrdi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D) 470 { 471 return (__m512i)__builtin_ia32_vpshrd_v32hi_mask ((__v32hi)__B, 472 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A); 473 } 474 475 extern __inline __m512i 476 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 477 _mm512_mask_shldi_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D, 478 int __E) 479 { 480 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__C, 481 (__v32hi) __D, __E, (__v32hi) __A, (__mmask32)__B); 482 } 483 484 extern __inline __m512i 485 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 486 _mm512_maskz_shldi_epi16 (__mmask32 __A, __m512i __B, __m512i __C, int __D) 487 { 488 return (__m512i)__builtin_ia32_vpshld_v32hi_mask ((__v32hi)__B, 489 (__v32hi) __C, __D, (__v32hi) _mm512_setzero_si512 (), (__mmask32)__A); 490 } 491 492 #else 493 #define _mm512_mask_shrdi_epi16(A, B, C, D, E) \ 494 ((__m512i) __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(C), \ 495 (__v32hi)(__m512i)(D), \ 496 (int)(E), \ 497 (__v32hi)(__m512i)(A), \ 498 (__mmask32)(B))) 499 #define _mm512_maskz_shrdi_epi16(A, B, C, D) \ 500 ((__m512i) \ 501 __builtin_ia32_vpshrd_v32hi_mask ((__v32hi)(__m512i)(B), \ 502 (__v32hi)(__m512i)(C),(int)(D), \ 503 (__v32hi)(__m512i)_mm512_setzero_si512 (), \ 504 (__mmask32)(A))) 505 #define _mm512_mask_shldi_epi16(A, B, C, D, E) \ 506 ((__m512i) __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(C), \ 507 (__v32hi)(__m512i)(D), \ 508 (int)(E), \ 509 (__v32hi)(__m512i)(A), \ 510 (__mmask32)(B))) 511 #define _mm512_maskz_shldi_epi16(A, B, C, D) \ 512 ((__m512i) \ 513 __builtin_ia32_vpshld_v32hi_mask ((__v32hi)(__m512i)(B), \ 514 (__v32hi)(__m512i)(C),(int)(D), \ 515 (__v32hi)(__m512i)_mm512_setzero_si512 (), \ 516 (__mmask32)(A))) 517 #endif 518 519 extern __inline __m512i 520 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 521 _mm512_mask_shrdv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D) 522 { 523 return (__m512i)__builtin_ia32_vpshrdv_v32hi_mask ((__v32hi)__A, 524 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B); 525 } 526 527 extern __inline __m512i 528 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 529 _mm512_maskz_shrdv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D) 530 { 531 return (__m512i)__builtin_ia32_vpshrdv_v32hi_maskz ((__v32hi)__B, 532 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A); 533 } 534 535 extern __inline __m512i 536 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 537 _mm512_mask_shldv_epi16 (__m512i __A, __mmask32 __B, __m512i __C, __m512i __D) 538 { 539 return (__m512i)__builtin_ia32_vpshldv_v32hi_mask ((__v32hi)__A, 540 (__v32hi) __C, (__v32hi) __D, (__mmask32)__B); 541 } 542 543 extern __inline __m512i 544 __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 545 _mm512_maskz_shldv_epi16 (__mmask32 __A, __m512i __B, __m512i __C, __m512i __D) 546 { 547 return (__m512i)__builtin_ia32_vpshldv_v32hi_maskz ((__v32hi)__B, 548 (__v32hi) __C, (__v32hi) __D, (__mmask32)__A); 549 } 550 551 #ifdef __DISABLE_AVX512VBMI2BW__ 552 #undef __DISABLE_AVX512VBMI2BW__ 553 554 #pragma GCC pop_options 555 #endif /* __DISABLE_AVX512VBMI2BW__ */ 556 557 #endif /* __AVX512VBMI2INTRIN_H_INCLUDED */ 558