Lines Matching full:l

12 static inline __vr _vel_approx_vfdivs_vvvl(__vr v0, __vr v1, int l) {  in _vel_approx_vfdivs_vvvl()  argument
15 v5 = _vel_vrcps_vvl(v1, l); in _vel_approx_vfdivs_vvvl()
17 v4 = _vel_vfnmsbs_vsvvl(s0, v1, v5, l); in _vel_approx_vfdivs_vvvl()
18 v3 = _vel_vfmads_vvvvl(v5, v5, v4, l); in _vel_approx_vfdivs_vvvl()
19 v2 = _vel_vfmuls_vvvl(v0, v3, l); in _vel_approx_vfdivs_vvvl()
20 v4 = _vel_vfnmsbs_vvvvl(v0, v2, v1, l); in _vel_approx_vfdivs_vvvl()
21 v2 = _vel_vfmads_vvvvl(v2, v5, v4, l); in _vel_approx_vfdivs_vvvl()
22 v0 = _vel_vfnmsbs_vvvvl(v0, v2, v1, l); in _vel_approx_vfdivs_vvvl()
23 v0 = _vel_vfmads_vvvvl(v2, v3, v0, l); in _vel_approx_vfdivs_vvvl()
27 static inline __vr _vel_approx_pvfdiv_vvvl(__vr v0, __vr v1, int l) { in _vel_approx_pvfdiv_vvvl() argument
30 v5 = _vel_pvrcp_vvl(v1, l); in _vel_approx_pvfdiv_vvvl()
32 v4 = _vel_pvfnmsb_vsvvl(s0, v1, v5, l); in _vel_approx_pvfdiv_vvvl()
33 v3 = _vel_pvfmad_vvvvl(v5, v5, v4, l); in _vel_approx_pvfdiv_vvvl()
34 v2 = _vel_pvfmul_vvvl(v0, v3, l); in _vel_approx_pvfdiv_vvvl()
35 v4 = _vel_pvfnmsb_vvvvl(v0, v2, v1, l); in _vel_approx_pvfdiv_vvvl()
36 v2 = _vel_pvfmad_vvvvl(v2, v5, v4, l); in _vel_approx_pvfdiv_vvvl()
37 v0 = _vel_pvfnmsb_vvvvl(v0, v2, v1, l); in _vel_approx_pvfdiv_vvvl()
38 v0 = _vel_pvfmad_vvvvl(v2, v3, v0, l); in _vel_approx_pvfdiv_vvvl()
42 static inline __vr _vel_approx_vfdivs_vsvl(float s0, __vr v0, int l) { in _vel_approx_vfdivs_vsvl() argument
45 v4 = _vel_vrcps_vvl(v0, l); in _vel_approx_vfdivs_vsvl()
47 v2 = _vel_vfnmsbs_vsvvl(s1, v0, v4, l); in _vel_approx_vfdivs_vsvl()
48 v2 = _vel_vfmads_vvvvl(v4, v4, v2, l); in _vel_approx_vfdivs_vsvl()
49 v1 = _vel_vfmuls_vsvl(s0, v2, l); in _vel_approx_vfdivs_vsvl()
50 v3 = _vel_vfnmsbs_vsvvl(s0, v1, v0, l); in _vel_approx_vfdivs_vsvl()
51 v1 = _vel_vfmads_vvvvl(v1, v4, v3, l); in _vel_approx_vfdivs_vsvl()
52 v3 = _vel_vfnmsbs_vsvvl(s0, v1, v0, l); in _vel_approx_vfdivs_vsvl()
53 v0 = _vel_vfmads_vvvvl(v1, v2, v3, l); in _vel_approx_vfdivs_vsvl()
57 static inline __vr _vel_approx_vfdivs_vvsl(__vr v0, float s0, int l) { in _vel_approx_vfdivs_vvsl() argument
61 v1 = _vel_vfmuls_vsvl(s1, v0, l); in _vel_approx_vfdivs_vvsl()
62 v2 = _vel_vfnmsbs_vvsvl(v0, s0, v1, l); in _vel_approx_vfdivs_vvsl()
63 v0 = _vel_vfmads_vvsvl(v1, s1, v2, l); in _vel_approx_vfdivs_vvsl()
67 static inline __vr _vel_approx_vfdivd_vsvl(double s0, __vr v0, int l) { in _vel_approx_vfdivd_vsvl() argument
69 v2 = _vel_vrcpd_vvl(v0, l); in _vel_approx_vfdivd_vsvl()
71 v3 = _vel_vfnmsbd_vsvvl(s1, v0, v2, l); in _vel_approx_vfdivd_vsvl()
72 v2 = _vel_vfmadd_vvvvl(v2, v2, v3, l); in _vel_approx_vfdivd_vsvl()
73 v1 = _vel_vfnmsbd_vsvvl(s1, v0, v2, l); in _vel_approx_vfdivd_vsvl()
74 v1 = _vel_vfmadd_vvvvl(v2, v2, v1, l); in _vel_approx_vfdivd_vsvl()
75 v1 = _vel_vaddul_vsvl(1, v1, l); in _vel_approx_vfdivd_vsvl()
76 v3 = _vel_vfnmsbd_vsvvl(s1, v0, v1, l); in _vel_approx_vfdivd_vsvl()
77 v3 = _vel_vfmadd_vvvvl(v1, v1, v3, l); in _vel_approx_vfdivd_vsvl()
78 v1 = _vel_vfmuld_vsvl(s0, v3, l); in _vel_approx_vfdivd_vsvl()
79 v0 = _vel_vfnmsbd_vsvvl(s0, v1, v0, l); in _vel_approx_vfdivd_vsvl()
80 v0 = _vel_vfmadd_vvvvl(v1, v3, v0, l); in _vel_approx_vfdivd_vsvl()
84 static inline __vr _vel_approx_vfsqrtd_vvl(__vr v0, int l) { in _vel_approx_vfsqrtd_vvl() argument
87 v2 = _vel_vrsqrtdnex_vvl(v0, l); in _vel_approx_vfsqrtd_vvl()
88 v1 = _vel_vfmuld_vvvl(v0, v2, l); in _vel_approx_vfsqrtd_vvl()
91 v3 = _vel_vfnmsbd_vsvvl(s0, v1, v2, l); in _vel_approx_vfsqrtd_vvl()
92 v3 = _vel_vfmuld_vsvl(s1, v3, l); in _vel_approx_vfsqrtd_vvl()
93 v2 = _vel_vfmadd_vvvvl(v2, v2, v3, l); in _vel_approx_vfsqrtd_vvl()
94 v1 = _vel_vfmuld_vvvl(v0, v2, l); in _vel_approx_vfsqrtd_vvl()
95 v3 = _vel_vfnmsbd_vsvvl(s0, v1, v2, l); in _vel_approx_vfsqrtd_vvl()
96 v3 = _vel_vfmuld_vsvl(s1, v3, l); in _vel_approx_vfsqrtd_vvl()
97 v0 = _vel_vfmadd_vvvvl(v1, v1, v3, l); in _vel_approx_vfsqrtd_vvl()
101 static inline __vr _vel_approx_vfsqrts_vvl(__vr v0, int l) { in _vel_approx_vfsqrts_vvl() argument
104 v0 = _vel_vcvtds_vvl(v0, l); in _vel_approx_vfsqrts_vvl()
105 v2 = _vel_vrsqrtdnex_vvl(v0, l); in _vel_approx_vfsqrts_vvl()
106 v1 = _vel_vfmuld_vvvl(v0, v2, l); in _vel_approx_vfsqrts_vvl()
109 v3 = _vel_vfnmsbd_vsvvl(s0, v1, v2, l); in _vel_approx_vfsqrts_vvl()
110 v3 = _vel_vfmuld_vsvl(s1, v3, l); in _vel_approx_vfsqrts_vvl()
111 v2 = _vel_vfmadd_vvvvl(v2, v2, v3, l); in _vel_approx_vfsqrts_vvl()
112 v1 = _vel_vfmuld_vvvl(v0, v2, l); in _vel_approx_vfsqrts_vvl()
113 v3 = _vel_vfnmsbd_vsvvl(s0, v1, v2, l); in _vel_approx_vfsqrts_vvl()
114 v3 = _vel_vfmuld_vsvl(s1, v3, l); in _vel_approx_vfsqrts_vvl()
115 v0 = _vel_vfmadd_vvvvl(v1, v1, v3, l); in _vel_approx_vfsqrts_vvl()
116 v0 = _vel_vcvtsd_vvl(v0, l); in _vel_approx_vfsqrts_vvl()