xref: /dflybsd-src/contrib/gmp/mpn/generic/addmul_1.c (revision 86d7f5d305c6adaa56ff4582ece9859d73106103)
186d7f5d3SJohn Marino /* mpn_addmul_1 -- multiply the N long limb vector pointed to by UP by VL,
286d7f5d3SJohn Marino    add the N least significant limbs of the product to the limb vector
386d7f5d3SJohn Marino    pointed to by RP.  Return the most significant limb of the product,
486d7f5d3SJohn Marino    adjusted for carry-out from the addition.
586d7f5d3SJohn Marino 
686d7f5d3SJohn Marino Copyright 1992, 1993, 1994, 1996, 2000, 2002, 2004 Free Software Foundation,
786d7f5d3SJohn Marino Inc.
886d7f5d3SJohn Marino 
986d7f5d3SJohn Marino This file is part of the GNU MP Library.
1086d7f5d3SJohn Marino 
1186d7f5d3SJohn Marino The GNU MP Library is free software; you can redistribute it and/or modify
1286d7f5d3SJohn Marino it under the terms of the GNU Lesser General Public License as published by
1386d7f5d3SJohn Marino the Free Software Foundation; either version 3 of the License, or (at your
1486d7f5d3SJohn Marino option) any later version.
1586d7f5d3SJohn Marino 
1686d7f5d3SJohn Marino The GNU MP Library is distributed in the hope that it will be useful, but
1786d7f5d3SJohn Marino WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
1886d7f5d3SJohn Marino or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
1986d7f5d3SJohn Marino License for more details.
2086d7f5d3SJohn Marino 
2186d7f5d3SJohn Marino You should have received a copy of the GNU Lesser General Public License
2286d7f5d3SJohn Marino along with the GNU MP Library.  If not, see http://www.gnu.org/licenses/.  */
2386d7f5d3SJohn Marino 
2486d7f5d3SJohn Marino #include "gmp.h"
2586d7f5d3SJohn Marino #include "gmp-impl.h"
2686d7f5d3SJohn Marino #include "longlong.h"
2786d7f5d3SJohn Marino 
2886d7f5d3SJohn Marino 
2986d7f5d3SJohn Marino #if GMP_NAIL_BITS == 0
3086d7f5d3SJohn Marino 
3186d7f5d3SJohn Marino mp_limb_t
mpn_addmul_1(mp_ptr rp,mp_srcptr up,mp_size_t n,mp_limb_t vl)3286d7f5d3SJohn Marino mpn_addmul_1 (mp_ptr rp, mp_srcptr up, mp_size_t n, mp_limb_t vl)
3386d7f5d3SJohn Marino {
3486d7f5d3SJohn Marino   mp_limb_t ul, cl, hpl, lpl, rl;
3586d7f5d3SJohn Marino 
3686d7f5d3SJohn Marino   ASSERT (n >= 1);
3786d7f5d3SJohn Marino   ASSERT (MPN_SAME_OR_SEPARATE_P (rp, up, n));
3886d7f5d3SJohn Marino 
3986d7f5d3SJohn Marino   cl = 0;
4086d7f5d3SJohn Marino   do
4186d7f5d3SJohn Marino     {
4286d7f5d3SJohn Marino       ul = *up++;
4386d7f5d3SJohn Marino       umul_ppmm (hpl, lpl, ul, vl);
4486d7f5d3SJohn Marino 
4586d7f5d3SJohn Marino       lpl += cl;
4686d7f5d3SJohn Marino       cl = (lpl < cl) + hpl;
4786d7f5d3SJohn Marino 
4886d7f5d3SJohn Marino       rl = *rp;
4986d7f5d3SJohn Marino       lpl = rl + lpl;
5086d7f5d3SJohn Marino       cl += lpl < rl;
5186d7f5d3SJohn Marino       *rp++ = lpl;
5286d7f5d3SJohn Marino     }
5386d7f5d3SJohn Marino   while (--n != 0);
5486d7f5d3SJohn Marino 
5586d7f5d3SJohn Marino   return cl;
5686d7f5d3SJohn Marino }
5786d7f5d3SJohn Marino 
5886d7f5d3SJohn Marino #endif
5986d7f5d3SJohn Marino 
6086d7f5d3SJohn Marino #if GMP_NAIL_BITS == 1
6186d7f5d3SJohn Marino 
6286d7f5d3SJohn Marino mp_limb_t
mpn_addmul_1(mp_ptr rp,mp_srcptr up,mp_size_t n,mp_limb_t vl)6386d7f5d3SJohn Marino mpn_addmul_1 (mp_ptr rp, mp_srcptr up, mp_size_t n, mp_limb_t vl)
6486d7f5d3SJohn Marino {
6586d7f5d3SJohn Marino   mp_limb_t shifted_vl, ul, rl, lpl, hpl, prev_hpl, cl, xl, c1, c2, c3;
6686d7f5d3SJohn Marino 
6786d7f5d3SJohn Marino   ASSERT (n >= 1);
6886d7f5d3SJohn Marino   ASSERT (MPN_SAME_OR_SEPARATE_P (rp, up, n));
6986d7f5d3SJohn Marino   ASSERT_MPN (rp, n);
7086d7f5d3SJohn Marino   ASSERT_MPN (up, n);
7186d7f5d3SJohn Marino   ASSERT_LIMB (vl);
7286d7f5d3SJohn Marino 
7386d7f5d3SJohn Marino   shifted_vl = vl << GMP_NAIL_BITS;
7486d7f5d3SJohn Marino   cl = 0;
7586d7f5d3SJohn Marino   prev_hpl = 0;
7686d7f5d3SJohn Marino   do
7786d7f5d3SJohn Marino     {
7886d7f5d3SJohn Marino       ul = *up++;
7986d7f5d3SJohn Marino       rl = *rp;
8086d7f5d3SJohn Marino       umul_ppmm (hpl, lpl, ul, shifted_vl);
8186d7f5d3SJohn Marino       lpl >>= GMP_NAIL_BITS;
8286d7f5d3SJohn Marino       ADDC_LIMB (c1, xl, prev_hpl, lpl);
8386d7f5d3SJohn Marino       ADDC_LIMB (c2, xl, xl, rl);
8486d7f5d3SJohn Marino       ADDC_LIMB (c3, xl, xl, cl);
8586d7f5d3SJohn Marino       cl = c1 + c2 + c3;
8686d7f5d3SJohn Marino       *rp++ = xl;
8786d7f5d3SJohn Marino       prev_hpl = hpl;
8886d7f5d3SJohn Marino     }
8986d7f5d3SJohn Marino   while (--n != 0);
9086d7f5d3SJohn Marino 
9186d7f5d3SJohn Marino   return prev_hpl + cl;
9286d7f5d3SJohn Marino }
9386d7f5d3SJohn Marino 
9486d7f5d3SJohn Marino #endif
9586d7f5d3SJohn Marino 
9686d7f5d3SJohn Marino #if GMP_NAIL_BITS >= 2
9786d7f5d3SJohn Marino 
9886d7f5d3SJohn Marino mp_limb_t
mpn_addmul_1(mp_ptr rp,mp_srcptr up,mp_size_t n,mp_limb_t vl)9986d7f5d3SJohn Marino mpn_addmul_1 (mp_ptr rp, mp_srcptr up, mp_size_t n, mp_limb_t vl)
10086d7f5d3SJohn Marino {
10186d7f5d3SJohn Marino   mp_limb_t shifted_vl, ul, rl, lpl, hpl, prev_hpl, xw, cl, xl;
10286d7f5d3SJohn Marino 
10386d7f5d3SJohn Marino   ASSERT (n >= 1);
10486d7f5d3SJohn Marino   ASSERT (MPN_SAME_OR_SEPARATE_P (rp, up, n));
10586d7f5d3SJohn Marino   ASSERT_MPN (rp, n);
10686d7f5d3SJohn Marino   ASSERT_MPN (up, n);
10786d7f5d3SJohn Marino   ASSERT_LIMB (vl);
10886d7f5d3SJohn Marino 
10986d7f5d3SJohn Marino   shifted_vl = vl << GMP_NAIL_BITS;
11086d7f5d3SJohn Marino   cl = 0;
11186d7f5d3SJohn Marino   prev_hpl = 0;
11286d7f5d3SJohn Marino   do
11386d7f5d3SJohn Marino     {
11486d7f5d3SJohn Marino       ul = *up++;
11586d7f5d3SJohn Marino       rl = *rp;
11686d7f5d3SJohn Marino       umul_ppmm (hpl, lpl, ul, shifted_vl);
11786d7f5d3SJohn Marino       lpl >>= GMP_NAIL_BITS;
11886d7f5d3SJohn Marino       xw = prev_hpl + lpl + rl + cl;
11986d7f5d3SJohn Marino       cl = xw >> GMP_NUMB_BITS;
12086d7f5d3SJohn Marino       xl = xw & GMP_NUMB_MASK;
12186d7f5d3SJohn Marino       *rp++ = xl;
12286d7f5d3SJohn Marino       prev_hpl = hpl;
12386d7f5d3SJohn Marino     }
12486d7f5d3SJohn Marino   while (--n != 0);
12586d7f5d3SJohn Marino 
12686d7f5d3SJohn Marino   return prev_hpl + cl;
12786d7f5d3SJohn Marino }
12886d7f5d3SJohn Marino 
12986d7f5d3SJohn Marino #endif
130