| /isa-l/include/ |
| H A D | raid.h | 64 xor_gen(int vects, int len, void **array); 81 xor_check(int vects, int len, void **array); 101 pq_gen(int vects, int len, void **array); 120 pq_check(int vects, int len, void **array); 140 xor_gen_sse(int vects, int len, void **array); 156 xor_gen_avx(int vects, int len, void **array); 171 xor_check_sse(int vects, int len, void **array); 189 pq_gen_sse(int vects, int len, void **array); 207 pq_gen_avx(int vects, int len, void **array); 225 pq_gen_avx2(int vects, int len, void **array); [all …]
|
| /isa-l/raid/ |
| H A D | raid_base_aliases.c | 33 pq_gen(int vects, int len, void **array) in pq_gen() argument 35 return pq_gen_base(vects, len, array); in pq_gen() 39 pq_check(int vects, int len, void **array) in pq_check() argument 41 return pq_check_base(vects, len, array); in pq_check() 45 xor_gen(int vects, int len, void **array) in xor_gen() argument 47 return xor_gen_base(vects, len, array); in xor_gen() 51 xor_check(int vects, int len, void **array) in xor_check() argument 53 return xor_check_base(vects, len, array); in xor_check()
|
| H A D | raid_base.c | 44 pq_gen_base(int vects, int len, void **array) in pq_gen_base() argument 48 unsigned long **src = (unsigned long **) array; in pq_gen_base() 71 pq_check_base(int vects, int len, void **array) in pq_check_base() argument 75 unsigned char **src = (unsigned char **) array; in pq_check_base() 100 xor_gen_base(int vects, int len, void **array) in xor_gen_base() argument 104 unsigned char **src = (unsigned char **) array; in xor_gen_base() 121 xor_check_base(int vects, int len, void **array) in xor_check_base() argument 126 unsigned char **src = (unsigned char **) array; in xor_check_base()
|
| H A D | xor_gen_avx512.asm | 31 ;;; int xor_gen_avx512(int vects, int len, void **array) 33 ;;; Generates xor parity vector from N (vects-1) sources in array of pointers 34 ;;; (**array). Last pointer is the dest. 122 mov tmp2, [arg2+vec*PS] ;Fetch last pointer in array 124 XLDR zmm0, [tmp2+pos] ;Start with end of array in last vector 152 mov ptr, [arg2+vec*PS] ;Fetch last pointer in array 153 mov tmp2.b, [ptr+len-1] ;Get array n 187 mov ptr, [arg2+vec*PS] ;Fetch last pointer in array 188 mov tmp2, [ptr+len-PS] ;Get array n
|
| H A D | xor_gen_avx.asm | 31 ;;; int xor_gen_avx(int vects, int len, void **array) 33 ;;; Generates xor parity vector from N (vects-1) sources in array of pointers 34 ;;; (**array). Last pointer is the dest. 121 mov tmp2, [arg2+vec*PS] ;Fetch last pointer in array 123 XLDR ymm0, [tmp2+pos] ;Start with end of array in last vector 159 mov ptr, [arg2+vec*PS] ;Fetch last pointer in array 160 mov tmp2.b, [ptr+len-1] ;Get array n 194 mov ptr, [arg2+vec*PS] ;Fetch last pointer in array 195 mov tmp2, [ptr+len-PS] ;Get array n
|
| H A D | xor_check_sse.asm | 31 ;;; int xor_check_sse(int vects, int len, void **array) 33 ;;; Checks that array has XOR parity sum of 0 across all vectors in **array. 163 mov tmp2, [arg2+tmp*PS] ;Fetch last pointer in array 165 XLDR xmm0, [tmp2+pos] ;Start with end of array in last vector 217 mov ptr, [arg2+tmp*PS] ;Fetch last pointer in array 218 mov tmp2.b, [ptr+len-1] ;Get array n 251 mov ptr, [arg2+tmp*PS] ;Fetch last pointer in array 252 mov tmp2, [ptr+len-PS] ;Get array n
|
| H A D | xor_gen_sse.asm | 31 ;;; int xor_gen_sse(int vects, int len, void **array) 33 ;;; Generates xor parity vector from N (vects-1) sources in array of pointers 34 ;;; (**array). Last pointer is the dest. 164 mov tmp2, [arg2+tmp*PS] ;Fetch last pointer in array 166 XLDR xmm0, [tmp2+pos] ;Start with end of array in last vector 217 mov ptr, [arg2+tmp*PS] ;Fetch last pointer in array 218 mov tmp2.b, [ptr+len-1] ;Get array n 251 mov ptr, [arg2+tmp*PS] ;Fetch last pointer in array 252 mov tmp2, [ptr+len-PS] ;Get array n
|
| H A D | pq_check_test.c | 45 ref_multi_pq(int vects, int len, void **array) in ref_multi_pq() argument 49 unsigned char **src = (unsigned char **) array; in ref_multi_pq()
|
| /isa-l/erasure_code/ |
| H A D | gf_6vect_mad_sse.asm | 206 movdqu xgft5_lo, [tmp3+2*tmp] ;Load array Ex{00}, Ex{01}, ..., Ex{0f} 210 movdqu xgft6_lo, [tmp3+mul_array] ;Load array Fx{00}, Fx{01}, ..., Fx{0f} 225 movdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 261 movdqa xtmph1, xgft4_hi ;Reload const array registers 262 movdqa xtmpl1, xgft4_lo ;Reload const array registers 263 movdqa xtmph2, xgft5_hi ;Reload const array registers 264 movdqa xtmpl2, xgft5_lo ;Reload const array registers 265 movdqa xtmph3, xgft6_hi ;Reload const array registers 266 movdqa xtmpl3, xgft6_lo ;Reload const array registers 350 movdqu xgft4_lo, [tmp3] ;Load array Ax{00}, Ax{01}, Ax{02}, ... [all …]
|
| H A D | gf_6vect_mad_avx512.asm | 202 vmovdqu xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 203 vmovdqu xgft2_loy, [tmp+vec] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 204 vmovdqu xgft3_loy, [tmp+2*vec] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 205 vmovdqu xgft4_loy, [tmp+vec_i] ;Load array Dx{00}..{0f}, Dx{00}..{f0} 206 vmovdqu xgft5_loy, [tmp+4*vec] ;Load array Ex{00}..{0f}, Ex{00}..{f0} 207 vmovdqu xgft6_loy, [tmp+mul_array] ;Load array Fx{00}..{0f}, Fx{00}..{f0}
|
| H A D | gf_5vect_mad_sse.asm | 196 movdqu xgft4_lo, [tmp3+tmp] ;Load array Dx{00}, Dx{01}, Dx{02}, ... 210 movdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 290 movdqu xtmpl1, [tmp3] ;Load array Ax{00}, Ax{01}, Ax{02}, ... 294 movdqu xtmpl3, [tmp3+2*vec] ;Load array Cx{00}, Cx{01}, Cx{02}, ... 295 movdqu xtmpl5, [tmp3+4*vec] ;Load array Ex{00}, Ex{01}, ..., Ex{0f}
|
| H A D | gf_6vect_dot_prod_avx512.asm | 255 vmovdqu8 xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 256 vmovdqu8 xgft2_loy, [tmp+vec*(32/PS)] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 257 vmovdqu8 xgft3_loy, [tmp+vec*(64/PS)] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 258 vmovdqu8 xgft4_loy, [tmp+vskip3] ;Load array Dx{00}..{0f}, Dx{00}..{f0} 259 vmovdqu8 xgft5_loy, [tmp+vskip1*4] ;Load array Ex{00}..{0f}, Ex{00}..{f0} 261 vmovdqu8 xgft6_loy, [tmp+ptr] ;Load array Fx{00}..{0f}, Fx{00}..{f0}
|
| H A D | gf_5vect_mad_avx512.asm | 181 vmovdqu xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 182 vmovdqu xgft2_loy, [tmp+vec] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 183 vmovdqu xgft3_loy, [tmp+2*vec] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 184 vmovdqu xgft5_loy, [tmp+4*vec] ;Load array Ex{00}..{0f}, Ex{00}..{f0} 186 vmovdqu xgft4_loy, [tmp+2*vec] ;Load array Dx{00}..{0f}, Dx{00}..{f0}
|
| H A D | gf_5vect_mad_avx.asm | 196 vmovdqu xgft4_lo, [tmp3+tmp] ;Load array Dx{00}, Dx{01}, Dx{02}, ... 210 vmovdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 285 vmovdqu xtmpl1, [tmp3] ;Load array Ax{00}, Ax{01}, Ax{02}, ... 289 vmovdqu xtmpl3, [tmp3+2*vec] ;Load array Cx{00}, Cx{01}, Cx{02}, ... 290 vmovdqu xtmpl5, [tmp3+4*vec] ;Load array Ex{00}, Ex{01}, ..., Ex{0f}
|
| H A D | gf_5vect_mad_avx2.asm | 183 vmovdqu xgft1_lo, [tmp] ;Load array Ax{00}, Ax{01}, ..., Ax{0f} 187 vmovdqu xgft3_lo, [tmp+2*vec] ;Load array Cx{00}, Cx{01}, ..., Cx{0f} 192 vmovdqu xgft4_lo, [tmp+2*vec] ;Load array Dx{00}, Dx{01}, ..., Dx{0f}
|
| H A D | gf_5vect_dot_prod_avx512.asm | 251 vmovdqu8 xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 252 vmovdqu8 xgft2_loy, [tmp+vec*(32/PS)] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 253 vmovdqu8 xgft3_loy, [tmp+vec*(64/PS)] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 254 vmovdqu8 xgft4_loy, [tmp+vskip3] ;Load array Dx{00}..{0f}, Dx{00}..{f0} 255 vmovdqu8 xgft5_loy, [tmp+vskip1*4] ;Load array Ex{00}..{0f}, Ex{00}..{f0}
|
| H A D | gf_6vect_mad_avx.asm | 203 vmovdqu xgft5_lo, [tmp3+2*tmp] ;Load array Ex{00}, Ex{01}, ..., Ex{0f} 207 vmovdqu xgft6_lo, [tmp3+mul_array] ;Load array Fx{00}, Fx{01}, ..., Fx{0f} 222 vmovdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 339 vmovdqu xgft4_lo, [tmp3] ;Load array Ax{00}, Ax{01}, Ax{02}, ... 343 vmovdqu xgft6_lo, [tmp3+2*vec] ;Load array Cx{00}, Cx{01}, Cx{02}, ...
|
| H A D | gf_4vect_mad_avx512.asm | 173 vmovdqu xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 174 vmovdqu xgft2_loy, [tmp+vec] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 175 vmovdqu xgft3_loy, [tmp+2*vec] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 177 vmovdqu xgft4_loy, [tmp+2*vec] ;Load array Dx{00}..{0f}, Dx{00}..{f0}
|
| H A D | gf_4vect_dot_prod_avx512.asm | 227 vmovdqu8 xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 228 vmovdqu8 xgft2_loy, [tmp+vec*(32/PS)] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 229 vmovdqu8 xgft3_loy, [tmp+vec*(64/PS)] ;Load array Cx{00}..{0f}, Cx{00}..{f0} 230 vmovdqu8 xgft4_loy, [tmp+vskip3] ;Load array Dx{00}..{0f}, Dx{00}..{f0}
|
| H A D | gf_2vect_mad_sse.asm | 168 movdqu xgft1_lo,[tmp] ;Load array Ax{00}, Ax{01}, Ax{02}, ... 183 movdqa xtmph1, xgft1_hi ;Reload const array registers 185 movdqa xtmph2, xgft2_hi ;Reload const array registers
|
| H A D | gf_4vect_mad_sse.asm | 201 movdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 202 movdqu xtmpl3, [tmp3+2*vec] ;Load array Cx{00}, Cx{01}, Cx{02}, ... 266 movdqu xtmpl2, [tmp3+vec] ;Load array Bx{00}, Bx{01}, Bx{02}, ... 267 movdqu xtmpl3, [tmp3+2*vec] ;Load array Cx{00}, Cx{01}, Cx{02}, ...
|
| H A D | gf_5vect_dot_prod_sse.asm | 218 movdqu xgft1_lo, [tmp] ;Load array Ax{00}, Ax{01}, ..., Ax{0f} 222 movdqu xgft3_lo, [tmp+vskip1*2] ;Load array Cx{00}, Cx{01}, ..., Cx{0f} 242 movdqu xgft1_lo, [tmp+vskip1*4] ;Load array Ex{00}, Ex{01}, ..., Ex{0f}
|
| H A D | gf_3vect_mad_avx512.asm | 166 vmovdqu xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 167 vmovdqu xgft2_loy, [tmp+vec] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 168 vmovdqu xgft3_loy, [tmp+2*vec] ;Load array Cx{00}..{0f}, Cx{00}..{f0}
|
| H A D | gf_3vect_dot_prod_sse.asm | 293 movdqu xgft1_lo, [tmp] ;Load array Ax{00}, Ax{01}, ..., Ax{0f} 298 movdqu xgft3_lo, [tmp+vec*(64/PS)] ;Load array Cx{00}, Cx{01}, ..., Cx{0f} 326 movdqu xgft3_lo, [tmp+vec*(32/PS)] ;Load array Cx{00}, Cx{01}, ..., Cx{0f}
|
| H A D | gf_3vect_dot_prod_avx512.asm | 196 vmovdqu8 xgft1_loy, [tmp] ;Load array Ax{00}..{0f}, Ax{00}..{f0} 197 vmovdqu8 xgft2_loy, [tmp+vec*(32/PS)] ;Load array Bx{00}..{0f}, Bx{00}..{f0} 198 vmovdqu8 xgft3_loy, [tmp+vec*(64/PS)] ;Load array Cx{00}..{0f}, Cx{00}..{f0}
|