Searched refs:aarch64_sve_vg (Results 1 – 7 of 7) sorted by relevance
| /netbsd-src/external/gpl3/gcc.old/dist/gcc/config/aarch64/ |
| H A D | aarch64-modes.def | 55 ADJUST_NUNITS (VNx16BI, aarch64_sve_vg * 8); 56 ADJUST_NUNITS (VNx8BI, aarch64_sve_vg * 4); 57 ADJUST_NUNITS (VNx4BI, aarch64_sve_vg * 2); 58 ADJUST_NUNITS (VNx2BI, aarch64_sve_vg); 92 ADJUST_NUNITS (VB##QI, aarch64_sve_vg * NVECS * 8); \ 93 ADJUST_NUNITS (VH##HI, aarch64_sve_vg * NVECS * 4); \ 94 ADJUST_NUNITS (VS##SI, aarch64_sve_vg * NVECS * 2); \ 95 ADJUST_NUNITS (VD##DI, aarch64_sve_vg * NVECS); \ 96 ADJUST_NUNITS (VH##BF, aarch64_sve_vg * NVECS * 4); \ 97 ADJUST_NUNITS (VH##HF, aarch64_sve_vg * NVECS * 4); \ [all …]
|
| H A D | aarch64.h | 1247 extern poly_uint16 aarch64_sve_vg; 1250 #define BITS_PER_SVE_VECTOR (poly_uint16 (aarch64_sve_vg * 64)) 1251 #define BYTES_PER_SVE_VECTOR (poly_uint16 (aarch64_sve_vg * 8)) 1255 #define BYTES_PER_SVE_PRED aarch64_sve_vg
|
| H A D | aarch64.c | 277 poly_uint16 aarch64_sve_vg; variable 2949 if (!aarch64_sve_vg.is_constant ()) in aarch64_regmode_natural_size() 4005 else if (aarch64_sve_vg.is_constant (&const_vg)) in aarch64_fold_sve_cnt_pat() 4022 poly_uint64 nelts_all = exact_div (aarch64_sve_vg, 2) * nelts_per_vq; in aarch64_fold_sve_cnt_pat() 15356 aarch64_sve_vg = aarch64_convert_sve_vector_bits (aarch64_sve_vector_bits); in aarch64_override_options()
|
| /netbsd-src/external/gpl3/gcc/dist/gcc/config/aarch64/ |
| H A D | aarch64-modes.def | 55 ADJUST_NUNITS (VNx16BI, aarch64_sve_vg * 8); 56 ADJUST_NUNITS (VNx8BI, aarch64_sve_vg * 4); 57 ADJUST_NUNITS (VNx4BI, aarch64_sve_vg * 2); 58 ADJUST_NUNITS (VNx2BI, aarch64_sve_vg); 153 ADJUST_NUNITS (VB##QI, aarch64_sve_vg * NVECS * 8); \ 154 ADJUST_NUNITS (VH##HI, aarch64_sve_vg * NVECS * 4); \ 155 ADJUST_NUNITS (VS##SI, aarch64_sve_vg * NVECS * 2); \ 156 ADJUST_NUNITS (VD##DI, aarch64_sve_vg * NVECS); \ 157 ADJUST_NUNITS (VH##BF, aarch64_sve_vg * NVECS * 4); \ 158 ADJUST_NUNITS (VH##HF, aarch64_sve_vg * NVECS * 4); \ [all …]
|
| H A D | aarch64.h | 1349 extern poly_uint16 aarch64_sve_vg; 1352 #define BITS_PER_SVE_VECTOR (poly_uint16 (aarch64_sve_vg * 64)) 1353 #define BYTES_PER_SVE_VECTOR (poly_uint16 (aarch64_sve_vg * 8)) 1357 #define BYTES_PER_SVE_PRED aarch64_sve_vg
|
| H A D | aarch64.cc | 282 poly_uint16 aarch64_sve_vg; variable 4210 if (!aarch64_sve_vg.is_constant ()) in aarch64_regmode_natural_size() 5292 else if (aarch64_sve_vg.is_constant (&const_vg)) in aarch64_fold_sve_cnt_pat() 5309 poly_uint64 nelts_all = exact_div (aarch64_sve_vg, 2) * nelts_per_vq; in aarch64_fold_sve_cnt_pat() 15973 || aarch64_sve_vg.is_constant ()) in record_potential_advsimd_unrolling() 18432 aarch64_sve_vg = aarch64_convert_sve_vector_bits (aarch64_sve_vector_bits); in aarch64_override_options()
|
| /netbsd-src/external/gpl3/gcc/dist/gcc/ |
| H A D | ChangeLog-2018 | 32868 modes. Adjust their number of units based on aarch64_sve_vg. 32906 (aarch64_sve_vg): Declare. 32915 (aarch64_sve_vg): New variable.
|