xref: /netbsd-src/external/gpl3/gcc/dist/gcc/ipa-modref-tree.h (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Data structure for the modref pass.
2    Copyright (C) 2020-2022 Free Software Foundation, Inc.
3    Contributed by David Cepelik and Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* modref_tree represent a decision tree that can be used by alias analysis
22    oracle to determine whether given memory access can be affected by a function
23    call.  For every function we collect two trees, one for loads and other
24    for stores.  Tree consist of following levels:
25 
26    1) Base: this level represent base alias set of the access and refers
27       to sons (ref nodes). Flag all_refs means that all possible references
28       are aliasing.
29 
30       Because for LTO streaming we need to stream types rather than alias sets
31       modref_base_node is implemented as a template.
32    2) Ref: this level represent ref alias set and links to accesses unless
33       all_refs flag is set.
34       Again ref is an template to allow LTO streaming.
35    3) Access: this level represent info about individual accesses.  Presently
36       we record whether access is through a dereference of a function parameter
37       and if so we record the access range.
38 */
39 
40 #ifndef GCC_MODREF_TREE_H
41 #define GCC_MODREF_TREE_H
42 
43 struct ipa_modref_summary;
44 
45 /* parm indexes greater than 0 are normal parms.
46    Some negative values have special meaning.  */
47 enum modref_special_parms {
48   MODREF_UNKNOWN_PARM = -1,
49   MODREF_STATIC_CHAIN_PARM = -2,
50   MODREF_RETSLOT_PARM = -3,
51   /* Used for bases that points to memory that escapes from function.  */
52   MODREF_GLOBAL_MEMORY_PARM = -4,
53   /* Used in modref_parm_map to take references which can be removed
54      from the summary during summary update since they now points to local
55      memory.  */
56   MODREF_LOCAL_MEMORY_PARM = -5
57 };
58 
59 /* Modref record accesses relative to function parameters.
60    This is entry for single access specifying its base and access range.
61 
62    Accesses can be collected to boundedly sized arrays using
63    modref_access_node::insert.  */
64 struct GTY(()) modref_access_node
65 {
66   /* Access range information (in bits).  */
67   poly_int64 offset;
68   poly_int64 size;
69   poly_int64 max_size;
70 
71   /* Offset from parameter pointer to the base of the access (in bytes).  */
72   poly_int64 parm_offset;
73 
74   /* Index of parameter which specifies the base of access. -1 if base is not
75      a function parameter.  */
76   int parm_index;
77   bool parm_offset_known;
78   /* Number of times interval was extended during dataflow.
79      This has to be limited in order to keep dataflow finite.  */
80   unsigned char adjustments;
81 
82   /* Return true if access node holds some useful info.  */
useful_pmodref_access_node83   bool useful_p () const
84     {
85       return parm_index != MODREF_UNKNOWN_PARM;
86     }
87   /* Return true if access can be used to determine a kill.  */
useful_for_kill_pmodref_access_node88   bool useful_for_kill_p () const
89     {
90       return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
91 	     && parm_index != MODREF_GLOBAL_MEMORY_PARM
92 	     && parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
93 	     && known_eq (max_size, size)
94 	     && known_gt (size, 0);
95     }
96   /* Dump range to debug OUT.  */
97   void dump (FILE *out);
98   /* Return true if both accesses are the same.  */
99   bool operator == (modref_access_node &a) const;
100   /* Return true if range info is useful.  */
101   bool range_info_useful_p () const;
102   /* Return tree corresponding to parameter of the range in STMT.  */
103   tree get_call_arg (const gcall *stmt) const;
104   /* Build ao_ref corresponding to the access and return true if successful.  */
105   bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
106   /* Stream access to OB.  */
107   void stream_out (struct output_block *ob) const;
108   /* Stream access in from IB.  */
109   static modref_access_node stream_in (struct lto_input_block *ib);
110   /* Insert A into vector ACCESSES.  Limit size of vector to MAX_ACCESSES and
111      if RECORD_ADJUSTMENT is true keep track of adjustment counts.
112      Return 0 if nothing changed, 1 is insertion succeeded and -1 if failed.  */
113   static int insert (vec <modref_access_node, va_gc> *&accesses,
114 		     modref_access_node a, size_t max_accesses,
115 		     bool record_adjustments);
116   /* Same as insert but for kills where we are conservative the other way
117      around: if information is lost, the kill is lost.  */
118   static bool insert_kill (vec<modref_access_node> &kills,
119 			   modref_access_node &a, bool record_adjustments);
120 private:
121   bool contains (const modref_access_node &) const;
122   bool contains_for_kills (const modref_access_node &) const;
123   void update (poly_int64, poly_int64, poly_int64, poly_int64, bool);
124   bool update_for_kills (poly_int64, poly_int64, poly_int64,
125 			 poly_int64, poly_int64, bool);
126   bool merge (const modref_access_node &, bool);
127   bool merge_for_kills (const modref_access_node &, bool);
128   static bool closer_pair_p (const modref_access_node &,
129 			     const modref_access_node &,
130 			     const modref_access_node &,
131 			     const modref_access_node &);
132   void forced_merge (const modref_access_node &, bool);
133   void update2 (poly_int64, poly_int64, poly_int64, poly_int64,
134 		poly_int64, poly_int64, poly_int64, bool);
135   bool combined_offsets (const modref_access_node &,
136 			 poly_int64 *, poly_int64 *, poly_int64 *) const;
137   static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t);
138 };
139 
140 /* Access node specifying no useful info.  */
141 const modref_access_node unspecified_modref_access_node
142 		 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
143 
144 template <typename T>
145 struct GTY((user)) modref_ref_node
146 {
147   T ref;
148   bool every_access;
149   vec <modref_access_node, va_gc> *accesses;
150 
modref_ref_nodemodref_ref_node151   modref_ref_node (T ref):
152     ref (ref),
153     every_access (false),
154     accesses (NULL)
155   {}
156 
157   /* Collapse the tree.  */
collapsemodref_ref_node158   void collapse ()
159   {
160     vec_free (accesses);
161     accesses = NULL;
162     every_access = true;
163   }
164 
165   /* Insert access with OFFSET and SIZE.
166      Collapse tree if it has more than MAX_ACCESSES entries.
167      If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
168      Return true if record was changed.  */
insert_accessmodref_ref_node169   bool insert_access (modref_access_node a, size_t max_accesses,
170 		      bool record_adjustments)
171   {
172     /* If this base->ref pair has no access information, bail out.  */
173     if (every_access)
174       return false;
175 
176     /* Only the following kind of parameters needs to be tracked.
177        We do not track return slots because they are seen as a direct store
178        in the caller.  */
179     gcc_checking_assert (a.parm_index >= 0
180 			 || a.parm_index == MODREF_STATIC_CHAIN_PARM
181 			 || a.parm_index == MODREF_GLOBAL_MEMORY_PARM
182 			 || a.parm_index == MODREF_UNKNOWN_PARM);
183 
184     if (!a.useful_p ())
185       {
186 	if (!every_access)
187 	  {
188 	    collapse ();
189 	    return true;
190 	  }
191 	return false;
192       }
193 
194     int ret = modref_access_node::insert (accesses, a, max_accesses,
195 					  record_adjustments);
196     if (ret == -1)
197       {
198 	if (dump_file)
199 	  fprintf (dump_file,
200 		   "--param modref-max-accesses limit reached; collapsing\n");
201 	collapse ();
202       }
203     return ret != 0;
204   }
205 };
206 
207 /* Base of an access.  */
208 template <typename T>
209 struct GTY((user)) modref_base_node
210 {
211   T base;
212   vec <modref_ref_node <T> *, va_gc> *refs;
213   bool every_ref;
214 
modref_base_nodemodref_base_node215   modref_base_node (T base):
216     base (base),
217     refs (NULL),
218     every_ref (false) {}
219 
220   /* Search REF; return NULL if failed.  */
searchmodref_base_node221   modref_ref_node <T> *search (T ref)
222   {
223     size_t i;
224     modref_ref_node <T> *n;
225     FOR_EACH_VEC_SAFE_ELT (refs, i, n)
226       if (n->ref == ref)
227 	return n;
228     return NULL;
229   }
230 
231   /* Insert REF; collapse tree if there are more than MAX_REFS.
232      Return inserted ref and if CHANGED is non-null set it to true if
233      something changed.  */
234   modref_ref_node <T> *insert_ref (T ref, size_t max_refs,
235 				   bool *changed = NULL)
236   {
237     modref_ref_node <T> *ref_node;
238 
239     /* If the node is collapsed, don't do anything.  */
240     if (every_ref)
241       return NULL;
242 
243     /* Otherwise, insert a node for the ref of the access under the base.  */
244     ref_node = search (ref);
245     if (ref_node)
246       return ref_node;
247 
248     /* We always allow inserting ref 0.  For non-0 refs there is upper
249        limit on number of entries and if exceeded,
250        drop ref conservatively to 0.  */
251     if (ref && refs && refs->length () >= max_refs)
252       {
253 	if (dump_file)
254 	  fprintf (dump_file, "--param modref-max-refs limit reached;"
255 		   " using 0\n");
256 	ref = 0;
257 	ref_node = search (ref);
258 	if (ref_node)
259 	  return ref_node;
260       }
261 
262     if (changed)
263       *changed = true;
264 
265     ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T>
266 								 (ref);
267     vec_safe_push (refs, ref_node);
268     return ref_node;
269   }
270 
collapsemodref_base_node271   void collapse ()
272   {
273     size_t i;
274     modref_ref_node <T> *r;
275 
276     if (refs)
277       {
278 	FOR_EACH_VEC_SAFE_ELT (refs, i, r)
279 	  {
280 	    r->collapse ();
281 	    ggc_free (r);
282 	  }
283 	vec_free (refs);
284       }
285     refs = NULL;
286     every_ref = true;
287   }
288 };
289 
290 /* Map translating parameters across function call.  */
291 
292 struct modref_parm_map
293 {
294   /* Default constructor.  */
modref_parm_mapmodref_parm_map295   modref_parm_map ()
296   : parm_index (MODREF_UNKNOWN_PARM), parm_offset_known (false), parm_offset ()
297   {}
298 
299   /* Index of parameter we translate to.
300      Values from special_params enum are permitted too.  */
301   int parm_index;
302   bool parm_offset_known;
303   poly_int64 parm_offset;
304 };
305 
306 /* Access tree for a single function.  */
307 template <typename T>
308 struct GTY((user)) modref_tree
309 {
310   vec <modref_base_node <T> *, va_gc> *bases;
311   bool every_base;
312 
modref_treemodref_tree313   modref_tree ():
314     bases (NULL),
315     every_base (false) {}
316 
317   /* Insert BASE; collapse tree if there are more than MAX_REFS.
318      Return inserted base and if CHANGED is non-null set it to true if
319      something changed.
320      If table gets full, try to insert REF instead.  */
321 
322   modref_base_node <T> *insert_base (T base, T ref,
323 				     unsigned int max_bases,
324 				     bool *changed = NULL)
325   {
326     modref_base_node <T> *base_node;
327 
328     /* If the node is collapsed, don't do anything.  */
329     if (every_base)
330       return NULL;
331 
332     /* Otherwise, insert a node for the base of the access into the tree.  */
333     base_node = search (base);
334     if (base_node)
335       return base_node;
336 
337     /* We always allow inserting base 0.  For non-0 base there is upper
338        limit on number of entries and if exceeded,
339        drop base conservatively to ref and if it still does not fit to 0.  */
340     if (base && bases && bases->length () >= max_bases)
341       {
342 	base_node = search (ref);
343 	if (base_node)
344 	  {
345 	    if (dump_file)
346 	      fprintf (dump_file, "--param modref-max-bases"
347 		       " limit reached; using ref\n");
348 	    return base_node;
349 	  }
350 	if (dump_file)
351 	  fprintf (dump_file, "--param modref-max-bases"
352 		   " limit reached; using 0\n");
353 	base = 0;
354 	base_node = search (base);
355 	if (base_node)
356 	  return base_node;
357       }
358 
359     if (changed)
360       *changed = true;
361 
362     base_node = new (ggc_alloc <modref_base_node <T> > ())
363 			 modref_base_node <T> (base);
364     vec_safe_push (bases, base_node);
365     return base_node;
366   }
367 
368   /* Insert memory access to the tree.
369      Return true if something changed.  */
insertmodref_tree370   bool insert (unsigned int max_bases,
371 	       unsigned int max_refs,
372 	       unsigned int max_accesses,
373 	       T base, T ref, modref_access_node a,
374 	       bool record_adjustments)
375   {
376     if (every_base)
377       return false;
378 
379     bool changed = false;
380 
381     /* We may end up with max_size being less than size for accesses past the
382        end of array.  Those are undefined and safe to ignore.  */
383     if (a.range_info_useful_p ()
384 	&& known_size_p (a.size) && known_size_p (a.max_size)
385 	&& known_lt (a.max_size, a.size))
386       {
387 	if (dump_file)
388 	  fprintf (dump_file,
389 		   "   - Paradoxical range. Ignoring\n");
390 	return false;
391       }
392     if (known_size_p (a.size)
393 	&& known_eq (a.size, 0))
394       {
395 	if (dump_file)
396 	  fprintf (dump_file,
397 		   "   - Zero size. Ignoring\n");
398 	return false;
399       }
400     if (known_size_p (a.max_size)
401 	&& known_eq (a.max_size, 0))
402       {
403 	if (dump_file)
404 	  fprintf (dump_file,
405 		   "   - Zero max_size. Ignoring\n");
406 	return false;
407       }
408     gcc_checking_assert (!known_size_p (a.max_size)
409 			 || !known_le (a.max_size, 0));
410 
411     /* No useful information tracked; collapse everything.  */
412     if (!base && !ref && !a.useful_p ())
413       {
414 	collapse ();
415 	return true;
416       }
417 
418     modref_base_node <T> *base_node
419       = insert_base (base, ref, max_bases, &changed);
420     base = base_node->base;
421     /* If table got full we may end up with useless base.  */
422     if (!base && !ref && !a.useful_p ())
423       {
424 	collapse ();
425 	return true;
426       }
427     if (base_node->every_ref)
428       return changed;
429     gcc_checking_assert (search (base) != NULL);
430 
431     /* No useful ref info tracked; collapse base.  */
432     if (!ref && !a.useful_p ())
433       {
434 	base_node->collapse ();
435 	return true;
436       }
437 
438     modref_ref_node <T> *ref_node
439 	    = base_node->insert_ref (ref, max_refs, &changed);
440     ref = ref_node->ref;
441 
442     if (ref_node->every_access)
443       return changed;
444     changed |= ref_node->insert_access (a, max_accesses,
445 					record_adjustments);
446     /* See if we failed to add useful access.  */
447     if (ref_node->every_access)
448       {
449 	/* Collapse everything if there is no useful base and ref.  */
450 	if (!base && !ref)
451 	  {
452 	    collapse ();
453 	    gcc_checking_assert (changed);
454 	  }
455 	/* Collapse base if there is no useful ref.  */
456 	else if (!ref)
457 	  {
458 	    base_node->collapse ();
459 	    gcc_checking_assert (changed);
460 	  }
461       }
462     return changed;
463   }
464 
465   /* Insert memory access to the tree.
466      Return true if something changed.  */
insertmodref_tree467   bool insert (tree fndecl,
468 	       T base, T ref, const modref_access_node &a,
469 	       bool record_adjustments)
470   {
471      return insert (opt_for_fn (fndecl, param_modref_max_bases),
472 		    opt_for_fn (fndecl, param_modref_max_refs),
473 		    opt_for_fn (fndecl, param_modref_max_accesses),
474 		    base, ref, a, record_adjustments);
475   }
476 
477  /* Remove tree branches that are not useful (i.e. they will always pass).  */
478 
cleanupmodref_tree479  void cleanup ()
480  {
481    size_t i, j;
482    modref_base_node <T> *base_node;
483    modref_ref_node <T> *ref_node;
484 
485    if (!bases)
486      return;
487 
488    for (i = 0; vec_safe_iterate (bases, i, &base_node);)
489      {
490        if (base_node->refs)
491 	 for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);)
492 	   {
493 	     if (!ref_node->every_access
494 		 && (!ref_node->accesses
495 		     || !ref_node->accesses->length ()))
496 	       {
497 		 base_node->refs->unordered_remove (j);
498 		 vec_free (ref_node->accesses);
499 		 ggc_delete (ref_node);
500 	       }
501 	     else
502 	       j++;
503 	   }
504        if (!base_node->every_ref
505 	   && (!base_node->refs || !base_node->refs->length ()))
506 	 {
507 	   bases->unordered_remove (i);
508 	   vec_free (base_node->refs);
509 	   ggc_delete (base_node);
510 	 }
511        else
512 	 i++;
513      }
514    if (bases && !bases->length ())
515      {
516        vec_free (bases);
517        bases = NULL;
518      }
519  }
520 
521   /* Merge OTHER into the tree.
522      PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
523      Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
524      Return true if something has changed.  */
525   bool merge (unsigned int max_bases,
526 	      unsigned int max_refs,
527 	      unsigned int max_accesses,
528 	      modref_tree <T> *other, vec <modref_parm_map> *parm_map,
529 	      modref_parm_map *static_chain_map,
530 	      bool record_accesses,
531 	      bool promote_unknown_to_global = false)
532   {
533     if (!other || every_base)
534       return false;
535     if (other->every_base)
536       {
537 	collapse ();
538 	return true;
539       }
540 
541     bool changed = false;
542     size_t i, j, k;
543     modref_base_node <T> *base_node, *my_base_node;
544     modref_ref_node <T> *ref_node;
545     modref_access_node *access_node;
546     bool release = false;
547 
548     /* For self-recursive functions we may end up merging summary into itself;
549        produce copy first so we do not modify summary under our own hands.  */
550     if (other == this)
551       {
552 	release = true;
553 	other = modref_tree<T>::create_ggc ();
554 	other->copy_from (this);
555       }
556 
557     FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node)
558       {
559 	if (base_node->every_ref)
560 	  {
561 	    my_base_node = insert_base (base_node->base, 0,
562 					max_bases, &changed);
563 	    if (my_base_node && !my_base_node->every_ref)
564 	      {
565 		my_base_node->collapse ();
566 		cleanup ();
567 		changed = true;
568 	      }
569 	  }
570 	else
571 	  FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
572 	    {
573 	      if (ref_node->every_access)
574 		{
575 		  changed |= insert (max_bases, max_refs, max_accesses,
576 				     base_node->base,
577 				     ref_node->ref,
578 				     unspecified_modref_access_node,
579 				     record_accesses);
580 		}
581 	      else
582 		FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
583 		  {
584 		    modref_access_node a = *access_node;
585 
586 		    if (a.parm_index != MODREF_UNKNOWN_PARM
587 			&& a.parm_index != MODREF_GLOBAL_MEMORY_PARM
588 			&& parm_map)
589 		      {
590 			if (a.parm_index >= (int)parm_map->length ())
591 			  a.parm_index = MODREF_UNKNOWN_PARM;
592 			else
593 			  {
594 			    modref_parm_map &m
595 				    = a.parm_index == MODREF_STATIC_CHAIN_PARM
596 				      ? *static_chain_map
597 				      : (*parm_map) [a.parm_index];
598 			    if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
599 			      continue;
600 			    a.parm_offset += m.parm_offset;
601 			    a.parm_offset_known &= m.parm_offset_known;
602 			    a.parm_index = m.parm_index;
603 			  }
604 		      }
605 		    if (a.parm_index == MODREF_UNKNOWN_PARM
606 			&& promote_unknown_to_global)
607 		      a.parm_index = MODREF_GLOBAL_MEMORY_PARM;
608 		    changed |= insert (max_bases, max_refs, max_accesses,
609 				       base_node->base, ref_node->ref,
610 				       a, record_accesses);
611 		  }
612 	    }
613       }
614     if (release)
615       ggc_delete (other);
616     return changed;
617   }
618 
619   /* Merge OTHER into the tree.
620      PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
621      Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
622      Return true if something has changed.  */
623   bool merge (tree fndecl,
624 	      modref_tree <T> *other, vec <modref_parm_map> *parm_map,
625 	      modref_parm_map *static_chain_map,
626 	      bool record_accesses,
627 	      bool promote_unknown_to_global = false)
628   {
629      return merge (opt_for_fn (fndecl, param_modref_max_bases),
630 		   opt_for_fn (fndecl, param_modref_max_refs),
631 		   opt_for_fn (fndecl, param_modref_max_accesses),
632 		   other, parm_map, static_chain_map, record_accesses,
633 		   promote_unknown_to_global);
634   }
635 
636   /* Copy OTHER to THIS.  */
copy_frommodref_tree637   void copy_from (modref_tree <T> *other)
638   {
639     merge (INT_MAX, INT_MAX, INT_MAX, other, NULL, NULL, false);
640   }
641 
642   /* Search BASE in tree; return NULL if failed.  */
searchmodref_tree643   modref_base_node <T> *search (T base)
644   {
645     size_t i;
646     modref_base_node <T> *n;
647     FOR_EACH_VEC_SAFE_ELT (bases, i, n)
648       if (n->base == base)
649 	return n;
650     return NULL;
651   }
652 
653   /* Return true if tree contains access to global memory.  */
global_access_pmodref_tree654   bool global_access_p ()
655   {
656     size_t i, j, k;
657     modref_base_node <T> *base_node;
658     modref_ref_node <T> *ref_node;
659     modref_access_node *access_node;
660     if (every_base)
661       return true;
662     FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
663       {
664 	if (base_node->every_ref)
665 	  return true;
666 	FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
667 	  {
668 	    if (ref_node->every_access)
669 	      return true;
670 	    FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
671 	      if (access_node->parm_index == MODREF_UNKNOWN_PARM
672 		  || access_node->parm_index == MODREF_GLOBAL_MEMORY_PARM)
673 		return true;
674 	  }
675       }
676     return false;
677   }
678 
679   /* Return ggc allocated instance.  We explicitly call destructors via
680      ggc_delete and do not want finalizers to be registered and
681      called at the garbage collection time.  */
create_ggcmodref_tree682   static modref_tree<T> *create_ggc ()
683   {
684     return new (ggc_alloc_no_dtor<modref_tree<T>> ())
685 	 modref_tree<T> ();
686   }
687 
688   /* Remove all records and mark tree to alias with everything.  */
collapsemodref_tree689   void collapse ()
690   {
691     size_t i;
692     modref_base_node <T> *n;
693 
694     if (bases)
695       {
696 	FOR_EACH_VEC_SAFE_ELT (bases, i, n)
697 	  {
698 	    n->collapse ();
699 	    ggc_free (n);
700 	  }
701 	vec_free (bases);
702       }
703     bases = NULL;
704     every_base = true;
705   }
706 
707   /* Release memory.  */
~modref_treemodref_tree708   ~modref_tree ()
709   {
710     collapse ();
711   }
712 
713   /* Update parameter indexes in TT according to MAP.  */
714   void
remap_paramsmodref_tree715   remap_params (vec <int> *map)
716   {
717     size_t i;
718     modref_base_node <T> *base_node;
719     FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
720       {
721 	size_t j;
722 	modref_ref_node <T> *ref_node;
723 	FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
724 	  {
725 	    size_t k;
726 	    modref_access_node *access_node;
727 	    FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
728 	      if (access_node->parm_index >= 0)
729 		{
730 		  if (access_node->parm_index < (int)map->length ())
731 		    access_node->parm_index = (*map)[access_node->parm_index];
732 		  else
733 		    access_node->parm_index = MODREF_UNKNOWN_PARM;
734 		}
735 	  }
736       }
737   }
738 };
739 
740 void gt_ggc_mx (modref_tree <int>* const&);
741 void gt_ggc_mx (modref_tree <tree_node*>* const&);
742 void gt_pch_nx (modref_tree <int>* const&);
743 void gt_pch_nx (modref_tree <tree_node*>* const&);
744 void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie);
745 void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op,
746 		void *cookie);
747 
748 void gt_ggc_mx (modref_base_node <int>*);
749 void gt_ggc_mx (modref_base_node <tree_node*>* &);
750 void gt_pch_nx (modref_base_node <int>* const&);
751 void gt_pch_nx (modref_base_node <tree_node*>* const&);
752 void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op,
753 		void *cookie);
754 void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op,
755 		void *cookie);
756 
757 void gt_ggc_mx (modref_ref_node <int>*);
758 void gt_ggc_mx (modref_ref_node <tree_node*>* &);
759 void gt_pch_nx (modref_ref_node <int>* const&);
760 void gt_pch_nx (modref_ref_node <tree_node*>* const&);
761 void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op,
762 		void *cookie);
763 void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op,
764 		void *cookie);
765 
766 #endif
767