xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-affine.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* Operations with affine combinations of trees.
2    Copyright (C) 2005-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pretty-print.h"
28 #include "fold-const.h"
29 #include "tree-affine.h"
30 #include "gimplify.h"
31 #include "dumpfile.h"
32 #include "cfgexpand.h"
33 
34 /* Extends CST as appropriate for the affine combinations COMB.  */
35 
36 widest_int
37 wide_int_ext_for_comb (const widest_int &cst, tree type)
38 {
39   return wi::sext (cst, TYPE_PRECISION (type));
40 }
41 
42 /* Initializes affine combination COMB so that its value is zero in TYPE.  */
43 
44 static void
45 aff_combination_zero (aff_tree *comb, tree type)
46 {
47   int i;
48   comb->type = type;
49   comb->offset = 0;
50   comb->n = 0;
51   for (i = 0; i < MAX_AFF_ELTS; i++)
52     comb->elts[i].coef = 0;
53   comb->rest = NULL_TREE;
54 }
55 
56 /* Sets COMB to CST.  */
57 
58 void
59 aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
60 {
61   aff_combination_zero (comb, type);
62   comb->offset = wide_int_ext_for_comb (cst, comb->type);;
63 }
64 
65 /* Sets COMB to single element ELT.  */
66 
67 void
68 aff_combination_elt (aff_tree *comb, tree type, tree elt)
69 {
70   aff_combination_zero (comb, type);
71 
72   comb->n = 1;
73   comb->elts[0].val = elt;
74   comb->elts[0].coef = 1;
75 }
76 
77 /* Scales COMB by SCALE.  */
78 
79 void
80 aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
81 {
82   unsigned i, j;
83 
84   widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
85   if (scale == 1)
86     return;
87 
88   if (scale == 0)
89     {
90       aff_combination_zero (comb, comb->type);
91       return;
92     }
93 
94   comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
95   for (i = 0, j = 0; i < comb->n; i++)
96     {
97       widest_int new_coef
98 	= wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
99       /* A coefficient may become zero due to overflow.  Remove the zero
100 	 elements.  */
101       if (new_coef == 0)
102 	continue;
103       comb->elts[j].coef = new_coef;
104       comb->elts[j].val = comb->elts[i].val;
105       j++;
106     }
107   comb->n = j;
108 
109   if (comb->rest)
110     {
111       tree type = comb->type;
112       if (POINTER_TYPE_P (type))
113 	type = sizetype;
114       if (comb->n < MAX_AFF_ELTS)
115 	{
116 	  comb->elts[comb->n].coef = scale;
117 	  comb->elts[comb->n].val = comb->rest;
118 	  comb->rest = NULL_TREE;
119 	  comb->n++;
120 	}
121       else
122 	comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
123 				  wide_int_to_tree (type, scale));
124     }
125 }
126 
127 /* Adds ELT * SCALE to COMB.  */
128 
129 void
130 aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
131 {
132   unsigned i;
133   tree type;
134 
135   widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
136   if (scale == 0)
137     return;
138 
139   for (i = 0; i < comb->n; i++)
140     if (operand_equal_p (comb->elts[i].val, elt, 0))
141       {
142 	widest_int new_coef
143 	  = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
144 	if (new_coef != 0)
145 	  {
146 	    comb->elts[i].coef = new_coef;
147 	    return;
148 	  }
149 
150 	comb->n--;
151 	comb->elts[i] = comb->elts[comb->n];
152 
153 	if (comb->rest)
154 	  {
155 	    gcc_assert (comb->n == MAX_AFF_ELTS - 1);
156 	    comb->elts[comb->n].coef = 1;
157 	    comb->elts[comb->n].val = comb->rest;
158 	    comb->rest = NULL_TREE;
159 	    comb->n++;
160 	  }
161 	return;
162       }
163   if (comb->n < MAX_AFF_ELTS)
164     {
165       comb->elts[comb->n].coef = scale;
166       comb->elts[comb->n].val = elt;
167       comb->n++;
168       return;
169     }
170 
171   type = comb->type;
172   if (POINTER_TYPE_P (type))
173     type = sizetype;
174 
175   if (scale == 1)
176     elt = fold_convert (type, elt);
177   else
178     elt = fold_build2 (MULT_EXPR, type,
179 		       fold_convert (type, elt),
180 		       wide_int_to_tree (type, scale));
181 
182   if (comb->rest)
183     comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
184 			      elt);
185   else
186     comb->rest = elt;
187 }
188 
189 /* Adds CST to C.  */
190 
191 static void
192 aff_combination_add_cst (aff_tree *c, const widest_int &cst)
193 {
194   c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
195 }
196 
197 /* Adds COMB2 to COMB1.  */
198 
199 void
200 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
201 {
202   unsigned i;
203 
204   aff_combination_add_cst (comb1, comb2->offset);
205   for (i = 0; i < comb2->n; i++)
206     aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
207   if (comb2->rest)
208     aff_combination_add_elt (comb1, comb2->rest, 1);
209 }
210 
211 /* Converts affine combination COMB to TYPE.  */
212 
213 void
214 aff_combination_convert (aff_tree *comb, tree type)
215 {
216   unsigned i, j;
217   tree comb_type = comb->type;
218 
219   if  (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
220     {
221       tree val = fold_convert (type, aff_combination_to_tree (comb));
222       tree_to_aff_combination (val, type, comb);
223       return;
224     }
225 
226   comb->type = type;
227   if (comb->rest && !POINTER_TYPE_P (type))
228     comb->rest = fold_convert (type, comb->rest);
229 
230   if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
231     return;
232 
233   comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
234   for (i = j = 0; i < comb->n; i++)
235     {
236       if (comb->elts[i].coef == 0)
237 	continue;
238       comb->elts[j].coef = comb->elts[i].coef;
239       comb->elts[j].val = fold_convert (type, comb->elts[i].val);
240       j++;
241     }
242 
243   comb->n = j;
244   if (comb->n < MAX_AFF_ELTS && comb->rest)
245     {
246       comb->elts[comb->n].coef = 1;
247       comb->elts[comb->n].val = comb->rest;
248       comb->rest = NULL_TREE;
249       comb->n++;
250     }
251 }
252 
253 /* Splits EXPR into an affine combination of parts.  */
254 
255 void
256 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
257 {
258   aff_tree tmp;
259   enum tree_code code;
260   tree cst, core, toffset;
261   HOST_WIDE_INT bitpos, bitsize;
262   machine_mode mode;
263   int unsignedp, reversep, volatilep;
264 
265   STRIP_NOPS (expr);
266 
267   code = TREE_CODE (expr);
268   switch (code)
269     {
270     case INTEGER_CST:
271       aff_combination_const (comb, type, wi::to_widest (expr));
272       return;
273 
274     case POINTER_PLUS_EXPR:
275       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
276       tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
277       aff_combination_add (comb, &tmp);
278       return;
279 
280     case PLUS_EXPR:
281     case MINUS_EXPR:
282       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
283       tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
284       if (code == MINUS_EXPR)
285 	aff_combination_scale (&tmp, -1);
286       aff_combination_add (comb, &tmp);
287       return;
288 
289     case MULT_EXPR:
290       cst = TREE_OPERAND (expr, 1);
291       if (TREE_CODE (cst) != INTEGER_CST)
292 	break;
293       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
294       aff_combination_scale (comb, wi::to_widest (cst));
295       return;
296 
297     case NEGATE_EXPR:
298       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299       aff_combination_scale (comb, -1);
300       return;
301 
302     case BIT_NOT_EXPR:
303       /* ~x = -x - 1 */
304       tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
305       aff_combination_scale (comb, -1);
306       aff_combination_add_cst (comb, -1);
307       return;
308 
309     case ADDR_EXPR:
310       /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
311       if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
312 	{
313 	  expr = TREE_OPERAND (expr, 0);
314 	  tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
315 	  tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
316 	  aff_combination_add (comb, &tmp);
317 	  return;
318 	}
319       core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
320 				  &toffset, &mode, &unsignedp, &reversep,
321 				  &volatilep);
322       if (bitpos % BITS_PER_UNIT != 0)
323 	break;
324       aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
325       if (TREE_CODE (core) == MEM_REF)
326 	{
327 	  aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
328 	  core = TREE_OPERAND (core, 0);
329 	}
330       else
331 	core = build_fold_addr_expr (core);
332 
333       if (TREE_CODE (core) == ADDR_EXPR)
334 	aff_combination_add_elt (comb, core, 1);
335       else
336 	{
337 	  tree_to_aff_combination (core, type, &tmp);
338 	  aff_combination_add (comb, &tmp);
339 	}
340       if (toffset)
341 	{
342 	  tree_to_aff_combination (toffset, type, &tmp);
343 	  aff_combination_add (comb, &tmp);
344 	}
345       return;
346 
347     case MEM_REF:
348       if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
349 	tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
350 				 type, comb);
351       else if (integer_zerop (TREE_OPERAND (expr, 1)))
352 	{
353 	  aff_combination_elt (comb, type, expr);
354 	  return;
355 	}
356       else
357 	aff_combination_elt (comb, type,
358 			     build2 (MEM_REF, TREE_TYPE (expr),
359 				     TREE_OPERAND (expr, 0),
360 				     build_int_cst
361 				      (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
362       tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
363       aff_combination_add (comb, &tmp);
364       return;
365 
366     default:
367       break;
368     }
369 
370   aff_combination_elt (comb, type, expr);
371 }
372 
373 /* Creates EXPR + ELT * SCALE in TYPE.  EXPR is taken from affine
374    combination COMB.  */
375 
376 static tree
377 add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
378 {
379   enum tree_code code;
380 
381   widest_int scale = wide_int_ext_for_comb (scale_in, type);
382 
383   elt = fold_convert (type, elt);
384   if (scale == 1)
385     {
386       if (!expr)
387 	return elt;
388 
389       return fold_build2 (PLUS_EXPR, type, expr, elt);
390     }
391 
392   if (scale == -1)
393     {
394       if (!expr)
395 	return fold_build1 (NEGATE_EXPR, type, elt);
396 
397       return fold_build2 (MINUS_EXPR, type, expr, elt);
398     }
399 
400   if (!expr)
401     return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
402 
403   if (wi::neg_p (scale))
404     {
405       code = MINUS_EXPR;
406       scale = -scale;
407     }
408   else
409     code = PLUS_EXPR;
410 
411   elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
412   return fold_build2 (code, type, expr, elt);
413 }
414 
415 /* Makes tree from the affine combination COMB.  */
416 
417 tree
418 aff_combination_to_tree (aff_tree *comb)
419 {
420   tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
421   unsigned i;
422   widest_int off, sgn;
423 
424   gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
425 
426   i = 0;
427   if (POINTER_TYPE_P (type))
428     {
429       type = sizetype;
430       if (comb->n > 0 && comb->elts[0].coef == 1
431 	  && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
432 	{
433 	  base = comb->elts[0].val;
434 	  ++i;
435 	}
436     }
437 
438   for (; i < comb->n; i++)
439     expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
440 
441   if (comb->rest)
442     expr = add_elt_to_tree (expr, type, comb->rest, 1);
443 
444   /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
445      unsigned.  */
446   if (wi::neg_p (comb->offset))
447     {
448       off = -comb->offset;
449       sgn = -1;
450     }
451   else
452     {
453       off = comb->offset;
454       sgn = 1;
455     }
456   expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
457 
458   if (base)
459     return fold_build_pointer_plus (base, expr);
460   else
461     return fold_convert (comb->type, expr);
462 }
463 
464 /* Copies the tree elements of COMB to ensure that they are not shared.  */
465 
466 void
467 unshare_aff_combination (aff_tree *comb)
468 {
469   unsigned i;
470 
471   for (i = 0; i < comb->n; i++)
472     comb->elts[i].val = unshare_expr (comb->elts[i].val);
473   if (comb->rest)
474     comb->rest = unshare_expr (comb->rest);
475 }
476 
477 /* Remove M-th element from COMB.  */
478 
479 void
480 aff_combination_remove_elt (aff_tree *comb, unsigned m)
481 {
482   comb->n--;
483   if (m <= comb->n)
484     comb->elts[m] = comb->elts[comb->n];
485   if (comb->rest)
486     {
487       comb->elts[comb->n].coef = 1;
488       comb->elts[comb->n].val = comb->rest;
489       comb->rest = NULL_TREE;
490       comb->n++;
491     }
492 }
493 
494 /* Adds C * COEF * VAL to R.  VAL may be NULL, in that case only
495    C * COEF is added to R.  */
496 
497 
498 static void
499 aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
500 			     aff_tree *r)
501 {
502   unsigned i;
503   tree aval, type;
504 
505   for (i = 0; i < c->n; i++)
506     {
507       aval = c->elts[i].val;
508       if (val)
509 	{
510 	  type = TREE_TYPE (aval);
511 	  aval = fold_build2 (MULT_EXPR, type, aval,
512 			      fold_convert (type, val));
513 	}
514 
515       aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
516     }
517 
518   if (c->rest)
519     {
520       aval = c->rest;
521       if (val)
522 	{
523 	  type = TREE_TYPE (aval);
524 	  aval = fold_build2 (MULT_EXPR, type, aval,
525 			      fold_convert (type, val));
526 	}
527 
528       aff_combination_add_elt (r, aval, coef);
529     }
530 
531   if (val)
532     aff_combination_add_elt (r, val, coef * c->offset);
533   else
534     aff_combination_add_cst (r, coef * c->offset);
535 }
536 
537 /* Multiplies C1 by C2, storing the result to R  */
538 
539 void
540 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
541 {
542   unsigned i;
543   gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
544 
545   aff_combination_zero (r, c1->type);
546 
547   for (i = 0; i < c2->n; i++)
548     aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
549   if (c2->rest)
550     aff_combination_add_product (c1, 1, c2->rest, r);
551   aff_combination_add_product (c1, c2->offset, NULL, r);
552 }
553 
554 /* Returns the element of COMB whose value is VAL, or NULL if no such
555    element exists.  If IDX is not NULL, it is set to the index of VAL in
556    COMB.  */
557 
558 static struct aff_comb_elt *
559 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
560 {
561   unsigned i;
562 
563   for (i = 0; i < comb->n; i++)
564     if (operand_equal_p (comb->elts[i].val, val, 0))
565       {
566 	if (idx)
567 	  *idx = i;
568 
569 	return &comb->elts[i];
570       }
571 
572   return NULL;
573 }
574 
575 /* Element of the cache that maps ssa name NAME to its expanded form
576    as an affine expression EXPANSION.  */
577 
578 struct name_expansion
579 {
580   aff_tree expansion;
581 
582   /* True if the expansion for the name is just being generated.  */
583   unsigned in_progress : 1;
584 };
585 
586 /* Expands SSA names in COMB recursively.  CACHE is used to cache the
587    results.  */
588 
589 void
590 aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
591 			hash_map<tree, name_expansion *> **cache)
592 {
593   unsigned i;
594   aff_tree to_add, current, curre;
595   tree e, rhs;
596   gimple *def;
597   widest_int scale;
598   struct name_expansion *exp;
599 
600   aff_combination_zero (&to_add, comb->type);
601   for (i = 0; i < comb->n; i++)
602     {
603       tree type, name;
604       enum tree_code code;
605 
606       e = comb->elts[i].val;
607       type = TREE_TYPE (e);
608       name = e;
609       /* Look through some conversions.  */
610       if (CONVERT_EXPR_P (e)
611           && (TYPE_PRECISION (type)
612 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
613 	name = TREE_OPERAND (e, 0);
614       if (TREE_CODE (name) != SSA_NAME)
615 	continue;
616       def = SSA_NAME_DEF_STMT (name);
617       if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
618 	continue;
619 
620       code = gimple_assign_rhs_code (def);
621       if (code != SSA_NAME
622 	  && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
623 	  && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
624 	      || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
625 	continue;
626 
627       /* We do not know whether the reference retains its value at the
628 	 place where the expansion is used.  */
629       if (TREE_CODE_CLASS (code) == tcc_reference)
630 	continue;
631 
632       if (!*cache)
633 	*cache = new hash_map<tree, name_expansion *>;
634       name_expansion **slot = &(*cache)->get_or_insert (e);
635       exp = *slot;
636 
637       if (!exp)
638 	{
639 	  exp = XNEW (struct name_expansion);
640 	  exp->in_progress = 1;
641 	  *slot = exp;
642 	  /* In principle this is a generally valid folding, but
643 	     it is not unconditionally an optimization, so do it
644 	     here and not in fold_unary.  */
645 	  /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
646 	     than the type of X and overflow for the type of X is
647 	     undefined.  */
648 	  if (e != name
649 	      && INTEGRAL_TYPE_P (type)
650 	      && INTEGRAL_TYPE_P (TREE_TYPE (name))
651 	      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
652 	      && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
653 	      && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
654 	      && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
655 	    rhs = fold_build2 (code, type,
656 			       fold_convert (type, gimple_assign_rhs1 (def)),
657 			       fold_convert (type, gimple_assign_rhs2 (def)));
658 	  else
659 	    {
660 	      rhs = gimple_assign_rhs_to_tree (def);
661 	      if (e != name)
662 		rhs = fold_convert (type, rhs);
663 	    }
664 	  tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
665 	  exp->expansion = current;
666 	  exp->in_progress = 0;
667 	}
668       else
669 	{
670 	  /* Since we follow the definitions in the SSA form, we should not
671 	     enter a cycle unless we pass through a phi node.  */
672 	  gcc_assert (!exp->in_progress);
673 	  current = exp->expansion;
674 	}
675 
676       /* Accumulate the new terms to TO_ADD, so that we do not modify
677 	 COMB while traversing it; include the term -coef * E, to remove
678          it from COMB.  */
679       scale = comb->elts[i].coef;
680       aff_combination_zero (&curre, comb->type);
681       aff_combination_add_elt (&curre, e, -scale);
682       aff_combination_scale (&current, scale);
683       aff_combination_add (&to_add, &current);
684       aff_combination_add (&to_add, &curre);
685     }
686   aff_combination_add (comb, &to_add);
687 }
688 
689 /* Similar to tree_to_aff_combination, but follows SSA name definitions
690    and expands them recursively.  CACHE is used to cache the expansions
691    of the ssa names, to avoid exponential time complexity for cases
692    like
693 
694    a1 = a0 + a0;
695    a2 = a1 + a1;
696    a3 = a2 + a2;
697    ...  */
698 
699 void
700 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
701 				hash_map<tree, name_expansion *> **cache)
702 {
703   tree_to_aff_combination (expr, type, comb);
704   aff_combination_expand (comb, cache);
705 }
706 
707 /* Frees memory occupied by struct name_expansion in *VALUE.  Callback for
708    hash_map::traverse.  */
709 
710 bool
711 free_name_expansion (tree const &, name_expansion **value, void *)
712 {
713   free (*value);
714   return true;
715 }
716 
717 /* Frees memory allocated for the CACHE used by
718    tree_to_aff_combination_expand.  */
719 
720 void
721 free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
722 {
723   if (!*cache)
724     return;
725 
726   (*cache)->traverse<void *, free_name_expansion> (NULL);
727   delete (*cache);
728   *cache = NULL;
729 }
730 
731 /* If VAL != CST * DIV for any constant CST, returns false.
732    Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
733    and if they are different, returns false.  Finally, if neither of these
734    two cases occur, true is returned, and CST is stored to MULT and MULT_SET
735    is set to true.  */
736 
737 static bool
738 wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
739 			      bool *mult_set, widest_int *mult)
740 {
741   widest_int rem, cst;
742 
743   if (val == 0)
744     {
745       if (*mult_set && *mult != 0)
746 	return false;
747       *mult_set = true;
748       *mult = 0;
749       return true;
750     }
751 
752   if (div == 0)
753     return false;
754 
755   if (!wi::multiple_of_p (val, div, SIGNED, &cst))
756     return false;
757 
758   if (*mult_set && *mult != cst)
759     return false;
760 
761   *mult_set = true;
762   *mult = cst;
763   return true;
764 }
765 
766 /* Returns true if VAL = X * DIV for some constant X.  If this is the case,
767    X is stored to MULT.  */
768 
769 bool
770 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
771 				     widest_int *mult)
772 {
773   bool mult_set = false;
774   unsigned i;
775 
776   if (val->n == 0 && val->offset == 0)
777     {
778       *mult = 0;
779       return true;
780     }
781   if (val->n != div->n)
782     return false;
783 
784   if (val->rest || div->rest)
785     return false;
786 
787   if (!wide_int_constant_multiple_p (val->offset, div->offset,
788 				     &mult_set, mult))
789     return false;
790 
791   for (i = 0; i < div->n; i++)
792     {
793       struct aff_comb_elt *elt
794 	      = aff_combination_find_elt (val, div->elts[i].val, NULL);
795       if (!elt)
796 	return false;
797       if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
798 					 &mult_set, mult))
799 	return false;
800     }
801 
802   gcc_assert (mult_set);
803   return true;
804 }
805 
806 /* Prints the affine VAL to the FILE. */
807 
808 static void
809 print_aff (FILE *file, aff_tree *val)
810 {
811   unsigned i;
812   signop sgn = TYPE_SIGN (val->type);
813   if (POINTER_TYPE_P (val->type))
814     sgn = SIGNED;
815   fprintf (file, "{\n  type = ");
816   print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
817   fprintf (file, "\n  offset = ");
818   print_dec (val->offset, file, sgn);
819   if (val->n > 0)
820     {
821       fprintf (file, "\n  elements = {\n");
822       for (i = 0; i < val->n; i++)
823 	{
824 	  fprintf (file, "    [%d] = ", i);
825 	  print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
826 
827 	  fprintf (file, " * ");
828 	  print_dec (val->elts[i].coef, file, sgn);
829 	  if (i != val->n - 1)
830 	    fprintf (file, ", \n");
831 	}
832       fprintf (file, "\n  }");
833   }
834   if (val->rest)
835     {
836       fprintf (file, "\n  rest = ");
837       print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
838     }
839   fprintf (file, "\n}");
840 }
841 
842 /* Prints the affine VAL to the standard error, used for debugging.  */
843 
844 DEBUG_FUNCTION void
845 debug_aff (aff_tree *val)
846 {
847   print_aff (stderr, val);
848   fprintf (stderr, "\n");
849 }
850 
851 /* Computes address of the reference REF in ADDR.  The size of the accessed
852    location is stored to SIZE.  Returns the ultimate containing object to
853    which REF refers.  */
854 
855 tree
856 get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
857 {
858   HOST_WIDE_INT bitsize, bitpos;
859   tree toff;
860   machine_mode mode;
861   int uns, rev, vol;
862   aff_tree tmp;
863   tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
864 				   &uns, &rev, &vol);
865   tree base_addr = build_fold_addr_expr (base);
866 
867   /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
868 
869   tree_to_aff_combination (base_addr, sizetype, addr);
870 
871   if (toff)
872     {
873       tree_to_aff_combination (toff, sizetype, &tmp);
874       aff_combination_add (addr, &tmp);
875     }
876 
877   aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
878   aff_combination_add (addr, &tmp);
879 
880   *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
881 
882   return base;
883 }
884 
885 /* Returns true if a region of size SIZE1 at position 0 and a region of
886    size SIZE2 at position DIFF cannot overlap.  */
887 
888 bool
889 aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
890 			   const widest_int &size2)
891 {
892   /* Unless the difference is a constant, we fail.  */
893   if (diff->n != 0)
894     return false;
895 
896   if (wi::neg_p (diff->offset))
897     {
898       /* The second object is before the first one, we succeed if the last
899 	 element of the second object is before the start of the first one.  */
900       return wi::neg_p (diff->offset + size2 - 1);
901     }
902   else
903     {
904       /* We succeed if the second object starts after the first one ends.  */
905       return size1 <= diff->offset;
906     }
907 }
908 
909