xref: /netbsd-src/external/lgpl3/gmp/dist/mpf/sub.c (revision 72c7faa4dbb41dbb0238d6b4a109da0d4b236dd4)
1 /* mpf_sub -- Subtract two floats.
2 
3 Copyright 1993-1996, 1999-2002, 2004, 2005, 2011, 2014 Free Software Foundation, Inc.
4 
5 This file is part of the GNU MP Library.
6 
7 The GNU MP Library is free software; you can redistribute it and/or modify
8 it under the terms of either:
9 
10   * the GNU Lesser General Public License as published by the Free
11     Software Foundation; either version 3 of the License, or (at your
12     option) any later version.
13 
14 or
15 
16   * the GNU General Public License as published by the Free Software
17     Foundation; either version 2 of the License, or (at your option) any
18     later version.
19 
20 or both in parallel, as here.
21 
22 The GNU MP Library is distributed in the hope that it will be useful, but
23 WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
24 or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
25 for more details.
26 
27 You should have received copies of the GNU General Public License and the
28 GNU Lesser General Public License along with the GNU MP Library.  If not,
29 see https://www.gnu.org/licenses/.  */
30 
31 #include "gmp-impl.h"
32 
33 void
mpf_sub(mpf_ptr r,mpf_srcptr u,mpf_srcptr v)34 mpf_sub (mpf_ptr r, mpf_srcptr u, mpf_srcptr v)
35 {
36   mp_srcptr up, vp;
37   mp_ptr rp, tp;
38   mp_size_t usize, vsize, rsize;
39   mp_size_t prec;
40   mp_exp_t exp;
41   mp_size_t ediff;
42   int negate;
43   TMP_DECL;
44 
45   usize = SIZ (u);
46   vsize = SIZ (v);
47 
48   /* Handle special cases that don't work in generic code below.  */
49   if (usize == 0)
50     {
51       mpf_neg (r, v);
52       return;
53     }
54   if (vsize == 0)
55     {
56       if (r != u)
57         mpf_set (r, u);
58       return;
59     }
60 
61   /* If signs of U and V are different, perform addition.  */
62   if ((usize ^ vsize) < 0)
63     {
64       __mpf_struct v_negated;
65       v_negated._mp_size = -vsize;
66       v_negated._mp_exp = EXP (v);
67       v_negated._mp_d = PTR (v);
68       mpf_add (r, u, &v_negated);
69       return;
70     }
71 
72   TMP_MARK;
73 
74   /* Signs are now known to be the same.  */
75   negate = usize < 0;
76 
77   /* Make U be the operand with the largest exponent.  */
78   if (EXP (u) < EXP (v))
79     {
80       mpf_srcptr t;
81       t = u; u = v; v = t;
82       negate ^= 1;
83       usize = SIZ (u);
84       vsize = SIZ (v);
85     }
86 
87   usize = ABS (usize);
88   vsize = ABS (vsize);
89   up = PTR (u);
90   vp = PTR (v);
91   rp = PTR (r);
92   prec = PREC (r) + 1;
93   exp = EXP (u);
94   ediff = exp - EXP (v);
95 
96   /* If ediff is 0 or 1, we might have a situation where the operands are
97      extremely close.  We need to scan the operands from the most significant
98      end ignore the initial parts that are equal.  */
99   if (ediff <= 1)
100     {
101       if (ediff == 0)
102 	{
103 	  /* Skip leading limbs in U and V that are equal.  */
104 	      /* This loop normally exits immediately.  Optimize for that.  */
105 	      while (up[usize - 1] == vp[vsize - 1])
106 		{
107 		  usize--;
108 		  vsize--;
109 		  exp--;
110 
111 		  if (usize == 0)
112 		    {
113                       /* u cancels high limbs of v, result is rest of v */
114 		      negate ^= 1;
115                     cancellation:
116                       /* strip high zeros before truncating to prec */
117                       while (vsize != 0 && vp[vsize - 1] == 0)
118                         {
119                           vsize--;
120                           exp--;
121                         }
122 		      if (vsize > prec)
123 			{
124 			  vp += vsize - prec;
125 			  vsize = prec;
126 			}
127                       MPN_COPY_INCR (rp, vp, vsize);
128                       rsize = vsize;
129                       goto done;
130 		    }
131 		  if (vsize == 0)
132 		    {
133                       vp = up;
134                       vsize = usize;
135                       goto cancellation;
136 		    }
137 		}
138 
139 	  if (up[usize - 1] < vp[vsize - 1])
140 	    {
141 	      /* For simplicity, swap U and V.  Note that since the loop above
142 		 wouldn't have exited unless up[usize - 1] and vp[vsize - 1]
143 		 were non-equal, this if-statement catches all cases where U
144 		 is smaller than V.  */
145 	      MPN_SRCPTR_SWAP (up,usize, vp,vsize);
146 	      negate ^= 1;
147 	      /* negating ediff not necessary since it is 0.  */
148 	    }
149 
150 	  /* Check for
151 	     x+1 00000000 ...
152 	      x  ffffffff ... */
153 	  if (up[usize - 1] != vp[vsize - 1] + 1)
154 	    goto general_case;
155 	  usize--;
156 	  vsize--;
157 	  exp--;
158 	}
159       else /* ediff == 1 */
160 	{
161 	  /* Check for
162 	     1 00000000 ...
163 	     0 ffffffff ... */
164 
165 	  if (up[usize - 1] != 1 || vp[vsize - 1] != GMP_NUMB_MAX
166 	      || (usize >= 2 && up[usize - 2] != 0))
167 	    goto general_case;
168 
169 	  usize--;
170 	  exp--;
171 	}
172 
173       /* Skip sequences of 00000000/ffffffff */
174       while (vsize != 0 && usize != 0 && up[usize - 1] == 0
175 	     && vp[vsize - 1] == GMP_NUMB_MAX)
176 	{
177 	  usize--;
178 	  vsize--;
179 	  exp--;
180 	}
181 
182       if (usize == 0)
183 	{
184 	  while (vsize != 0 && vp[vsize - 1] == GMP_NUMB_MAX)
185 	    {
186 	      vsize--;
187 	      exp--;
188 	    }
189 	}
190       else if (usize > prec - 1)
191 	{
192 	  up += usize - (prec - 1);
193 	  usize = prec - 1;
194 	}
195       if (vsize > prec - 1)
196 	{
197 	  vp += vsize - (prec - 1);
198 	  vsize = prec - 1;
199 	}
200 
201       tp = TMP_ALLOC_LIMBS (prec);
202       {
203 	mp_limb_t cy_limb;
204 	if (vsize == 0)
205 	  {
206 	    MPN_COPY (tp, up, usize);
207 	    tp[usize] = 1;
208 	    rsize = usize + 1;
209 	    exp++;
210 	    goto normalized;
211 	  }
212 	if (usize == 0)
213 	  {
214 	    cy_limb = mpn_neg (tp, vp, vsize);
215 	    rsize = vsize;
216 	  }
217 	else if (usize >= vsize)
218 	  {
219 	    /* uuuu     */
220 	    /* vv       */
221 	    mp_size_t size;
222 	    size = usize - vsize;
223 	    MPN_COPY (tp, up, size);
224 	    cy_limb = mpn_sub_n (tp + size, up + size, vp, vsize);
225 	    rsize = usize;
226 	  }
227 	else /* (usize < vsize) */
228 	  {
229 	    /* uuuu     */
230 	    /* vvvvvvv  */
231 	    mp_size_t size;
232 	    size = vsize - usize;
233 	    cy_limb = mpn_neg (tp, vp, size);
234 	    cy_limb = mpn_sub_nc (tp + size, up, vp + size, usize, cy_limb);
235 	    rsize = vsize;
236 	  }
237 	if (cy_limb == 0)
238 	  {
239 	    tp[rsize] = 1;
240 	    rsize++;
241 	    exp++;
242 	    goto normalized;
243 	  }
244 	goto normalize;
245       }
246     }
247 
248 general_case:
249   /* If U extends beyond PREC, ignore the part that does.  */
250   if (usize > prec)
251     {
252       up += usize - prec;
253       usize = prec;
254     }
255 
256   /* If V extends beyond PREC, ignore the part that does.
257      Note that this may make vsize negative.  */
258   if (vsize + ediff > prec)
259     {
260       vp += vsize + ediff - prec;
261       vsize = prec - ediff;
262     }
263 
264   if (ediff >= prec)
265     {
266       /* V completely cancelled.  */
267       if (rp != up)
268 	MPN_COPY (rp, up, usize);
269       rsize = usize;
270     }
271   else
272     {
273       /* Allocate temp space for the result.  Allocate
274 	 just vsize + ediff later???  */
275       tp = TMP_ALLOC_LIMBS (prec);
276 
277       /* Locate the least significant non-zero limb in (the needed
278 	 parts of) U and V, to simplify the code below.  */
279       for (;;)
280 	{
281 	  if (vsize == 0)
282 	    {
283 	      MPN_COPY (rp, up, usize);
284 	      rsize = usize;
285 	      goto done;
286 	    }
287 	  if (vp[0] != 0)
288 	    break;
289 	  vp++, vsize--;
290 	}
291       for (;;)
292 	{
293 	  if (usize == 0)
294 	    {
295 	      MPN_COPY (rp, vp, vsize);
296 	      rsize = vsize;
297 	      negate ^= 1;
298 	      goto done;
299 	    }
300 	  if (up[0] != 0)
301 	    break;
302 	  up++, usize--;
303 	}
304 
305       /* uuuu     |  uuuu     |  uuuu     |  uuuu     |  uuuu    */
306       /* vvvvvvv  |  vv       |    vvvvv  |    v      |       vv */
307 
308       if (usize > ediff)
309 	{
310 	  /* U and V partially overlaps.  */
311 	  if (ediff == 0)
312 	    {
313 	      /* Have to compare the leading limbs of u and v
314 		 to determine whether to compute u - v or v - u.  */
315 	      if (usize >= vsize)
316 		{
317 		  /* uuuu     */
318 		  /* vv       */
319 		  mp_size_t size;
320 		  size = usize - vsize;
321 		  MPN_COPY (tp, up, size);
322 		  mpn_sub_n (tp + size, up + size, vp, vsize);
323 		  rsize = usize;
324 		}
325 	      else /* (usize < vsize) */
326 		{
327 		  /* uuuu     */
328 		  /* vvvvvvv  */
329 		  mp_size_t size;
330 		  size = vsize - usize;
331 		  ASSERT_CARRY (mpn_neg (tp, vp, size));
332 		  mpn_sub_nc (tp + size, up, vp + size, usize, CNST_LIMB (1));
333 		  rsize = vsize;
334 		}
335 	    }
336 	  else
337 	    {
338 	      if (vsize + ediff <= usize)
339 		{
340 		  /* uuuu     */
341 		  /*   v      */
342 		  mp_size_t size;
343 		  size = usize - ediff - vsize;
344 		  MPN_COPY (tp, up, size);
345 		  mpn_sub (tp + size, up + size, usize - size, vp, vsize);
346 		  rsize = usize;
347 		}
348 	      else
349 		{
350 		  /* uuuu     */
351 		  /*   vvvvv  */
352 		  mp_size_t size;
353 		  rsize = vsize + ediff;
354 		  size = rsize - usize;
355 		  ASSERT_CARRY (mpn_neg (tp, vp, size));
356 		  mpn_sub (tp + size, up, usize, vp + size, usize - ediff);
357 		  /* Should we use sub_nc then sub_1? */
358 		  MPN_DECR_U (tp + size, usize, CNST_LIMB (1));
359 		}
360 	    }
361 	}
362       else
363 	{
364 	  /* uuuu     */
365 	  /*      vv  */
366 	  mp_size_t size, i;
367 	  size = vsize + ediff - usize;
368 	  ASSERT_CARRY (mpn_neg (tp, vp, vsize));
369 	  for (i = vsize; i < size; i++)
370 	    tp[i] = GMP_NUMB_MAX;
371 	  mpn_sub_1 (tp + size, up, usize, (mp_limb_t) 1);
372 	  rsize = size + usize;
373 	}
374 
375     normalize:
376       /* Full normalize.  Optimize later.  */
377       while (rsize != 0 && tp[rsize - 1] == 0)
378 	{
379 	  rsize--;
380 	  exp--;
381 	}
382     normalized:
383       MPN_COPY (rp, tp, rsize);
384     }
385 
386  done:
387   TMP_FREE;
388   if (rsize == 0) {
389     SIZ (r) = 0;
390     EXP (r) = 0;
391   } else {
392     SIZ (r) = negate ? -rsize : rsize;
393     EXP (r) = exp;
394   }
395 }
396