xref: /openbsd-src/gnu/usr.bin/gcc/gcc/ggc-common.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1 /* Simple garbage collection for the GNU compiler.
2    Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING.  If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA.  */
20 
21 /* Generic garbage collection (GC) functions and data, not specific to
22    any particular GC implementation.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "hashtab.h"
30 #include "varray.h"
31 #include "ggc.h"
32 #include "langhooks.h"
33 #include "params.h"
34 #ifdef HAVE_SYS_RESOURCE_H
35 # include <sys/resource.h>
36 #endif
37 #ifdef ENABLE_VALGRIND_CHECKING
38 #include <valgrind.h>
39 #else
40 /* Avoid #ifdef:s when we can help it.  */
41 #define VALGRIND_DISCARD(x)
42 #endif
43 
44 /* Statistics about the allocation.  */
45 static ggc_statistics *ggc_stats;
46 
47 static int ggc_htab_delete PARAMS ((void **, void *));
48 static double ggc_rlimit_bound PARAMS ((double));
49 
50 /* Maintain global roots that are preserved during GC.  */
51 
52 /* Global roots that are preserved during calls to gc.  */
53 
54 struct ggc_root
55 {
56   struct ggc_root *next;
57   void *base;
58   int nelt;
59   int size;
60   void (*cb) PARAMS ((void *));
61 };
62 
63 static struct ggc_root *roots;
64 
65 /* Add BASE as a new garbage collection root.  It is an array of
66    length NELT with each element SIZE bytes long.  CB is a
67    function that will be called with a pointer to each element
68    of the array; it is the intention that CB call the appropriate
69    routine to mark gc-able memory for that element.  */
70 
71 void
ggc_add_root(base,nelt,size,cb)72 ggc_add_root (base, nelt, size, cb)
73      void *base;
74      int nelt, size;
75      void (*cb) PARAMS ((void *));
76 {
77   struct ggc_root *x = (struct ggc_root *) xmalloc (sizeof (*x));
78 
79   x->next = roots;
80   x->base = base;
81   x->nelt = nelt;
82   x->size = size;
83   x->cb = cb;
84 
85   roots = x;
86 }
87 
88 /* Process a slot of an htab by deleting it if it has not been marked.  */
89 
90 static int
ggc_htab_delete(slot,info)91 ggc_htab_delete (slot, info)
92      void **slot;
93      void *info;
94 {
95   const struct ggc_cache_tab *r = (const struct ggc_cache_tab *) info;
96 
97   if (! (*r->marked_p) (*slot))
98     htab_clear_slot (*r->base, slot);
99   else
100     (*r->cb) (*slot);
101 
102   return 1;
103 }
104 
105 /* Iterate through all registered roots and mark each element.  */
106 
107 void
ggc_mark_roots()108 ggc_mark_roots ()
109 {
110   struct ggc_root *x;
111   const struct ggc_root_tab *const *rt;
112   const struct ggc_root_tab *rti;
113   const struct ggc_cache_tab *const *ct;
114   const struct ggc_cache_tab *cti;
115   size_t i;
116 
117   for (rt = gt_ggc_deletable_rtab; *rt; rt++)
118     for (rti = *rt; rti->base != NULL; rti++)
119       memset (rti->base, 0, rti->stride);
120 
121   for (rt = gt_ggc_rtab; *rt; rt++)
122     for (rti = *rt; rti->base != NULL; rti++)
123       for (i = 0; i < rti->nelt; i++)
124 	(*rti->cb)(*(void **)((char *)rti->base + rti->stride * i));
125 
126   for (x = roots; x != NULL; x = x->next)
127     {
128       char *elt = x->base;
129       int s = x->size, n = x->nelt;
130       void (*cb) PARAMS ((void *)) = x->cb;
131       int i;
132 
133       for (i = 0; i < n; ++i, elt += s)
134 	(*cb)(elt);
135     }
136 
137   /* Now scan all hash tables that have objects which are to be deleted if
138      they are not already marked.  */
139   for (ct = gt_ggc_cache_rtab; *ct; ct++)
140     for (cti = *ct; cti->base != NULL; cti++)
141       if (*cti->base)
142 	htab_traverse (*cti->base, ggc_htab_delete, (PTR) cti);
143 }
144 
145 /* Allocate a block of memory, then clear it.  */
146 void *
ggc_alloc_cleared(size)147 ggc_alloc_cleared (size)
148      size_t size;
149 {
150   void *buf = ggc_alloc (size);
151   memset (buf, 0, size);
152   return buf;
153 }
154 
155 /* Resize a block of memory, possibly re-allocating it.  */
156 void *
ggc_realloc(x,size)157 ggc_realloc (x, size)
158      void *x;
159      size_t size;
160 {
161   void *r;
162   size_t old_size;
163 
164   if (x == NULL)
165     return ggc_alloc (size);
166 
167   old_size = ggc_get_size (x);
168   if (size <= old_size)
169     {
170       /* Mark the unwanted memory as unaccessible.  We also need to make
171 	 the "new" size accessible, since ggc_get_size returns the size of
172 	 the pool, not the size of the individually allocated object, the
173 	 size which was previously made accessible.  Unfortunately, we
174 	 don't know that previously allocated size.  Without that
175 	 knowledge we have to lose some initialization-tracking for the
176 	 old parts of the object.  An alternative is to mark the whole
177 	 old_size as reachable, but that would lose tracking of writes
178 	 after the end of the object (by small offsets).  Discard the
179 	 handle to avoid handle leak.  */
180       VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS ((char *) x + size,
181 						old_size - size));
182       VALGRIND_DISCARD (VALGRIND_MAKE_READABLE (x, size));
183       return x;
184     }
185 
186   r = ggc_alloc (size);
187 
188   /* Since ggc_get_size returns the size of the pool, not the size of the
189      individually allocated object, we'd access parts of the old object
190      that were marked invalid with the memcpy below.  We lose a bit of the
191      initialization-tracking since some of it may be uninitialized.  */
192   VALGRIND_DISCARD (VALGRIND_MAKE_READABLE (x, old_size));
193 
194   memcpy (r, x, old_size);
195 
196   /* The old object is not supposed to be used anymore.  */
197   VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS (x, old_size));
198 
199   return r;
200 }
201 
202 /* Like ggc_alloc_cleared, but performs a multiplication.  */
203 void *
ggc_calloc(s1,s2)204 ggc_calloc (s1, s2)
205      size_t s1, s2;
206 {
207   return ggc_alloc_cleared (s1 * s2);
208 }
209 
210 /* Print statistics that are independent of the collector in use.  */
211 #define SCALE(x) ((unsigned long) ((x) < 1024*10 \
212 		  ? (x) \
213 		  : ((x) < 1024*1024*10 \
214 		     ? (x) / 1024 \
215 		     : (x) / (1024*1024))))
216 #define LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
217 
218 void
ggc_print_common_statistics(stream,stats)219 ggc_print_common_statistics (stream, stats)
220      FILE *stream;
221      ggc_statistics *stats;
222 {
223   int code;
224 
225   /* Set the pointer so that during collection we will actually gather
226      the statistics.  */
227   ggc_stats = stats;
228 
229   /* Then do one collection to fill in the statistics.  */
230   ggc_collect ();
231 
232   /* Total the statistics.  */
233   for (code = 0; code < MAX_TREE_CODES; ++code)
234     {
235       stats->total_num_trees += stats->num_trees[code];
236       stats->total_size_trees += stats->size_trees[code];
237     }
238   for (code = 0; code < NUM_RTX_CODE; ++code)
239     {
240       stats->total_num_rtxs += stats->num_rtxs[code];
241       stats->total_size_rtxs += stats->size_rtxs[code];
242     }
243 
244   /* Print the statistics for trees.  */
245   fprintf (stream, "\n%-17s%10s %16s %10s\n", "Tree",
246 	   "Number", "Bytes", "% Total");
247   for (code = 0; code < MAX_TREE_CODES; ++code)
248     if (ggc_stats->num_trees[code])
249       {
250 	fprintf (stream, "%-17s%10u%16ld%c %10.3f\n",
251 		 tree_code_name[code],
252 		 ggc_stats->num_trees[code],
253 		 SCALE (ggc_stats->size_trees[code]),
254 		 LABEL (ggc_stats->size_trees[code]),
255 		 (100 * ((double) ggc_stats->size_trees[code])
256 		  / ggc_stats->total_size_trees));
257       }
258   fprintf (stream,
259 	   "%-17s%10u%16ld%c\n", "Total",
260 	   ggc_stats->total_num_trees,
261 	   SCALE (ggc_stats->total_size_trees),
262 	   LABEL (ggc_stats->total_size_trees));
263 
264   /* Print the statistics for RTL.  */
265   fprintf (stream, "\n%-17s%10s %16s %10s\n", "RTX",
266 	   "Number", "Bytes", "% Total");
267   for (code = 0; code < NUM_RTX_CODE; ++code)
268     if (ggc_stats->num_rtxs[code])
269       {
270 	fprintf (stream, "%-17s%10u%16ld%c %10.3f\n",
271 		 rtx_name[code],
272 		 ggc_stats->num_rtxs[code],
273 		 SCALE (ggc_stats->size_rtxs[code]),
274 		 LABEL (ggc_stats->size_rtxs[code]),
275 		 (100 * ((double) ggc_stats->size_rtxs[code])
276 		  / ggc_stats->total_size_rtxs));
277       }
278   fprintf (stream,
279 	   "%-17s%10u%16ld%c\n", "Total",
280 	   ggc_stats->total_num_rtxs,
281 	   SCALE (ggc_stats->total_size_rtxs),
282 	   LABEL (ggc_stats->total_size_rtxs));
283 
284   /* Don't gather statistics any more.  */
285   ggc_stats = NULL;
286 }
287 
288 /* Modify the bound based on rlimits.  Keep the smallest number found.  */
289 static double
ggc_rlimit_bound(limit)290 ggc_rlimit_bound (limit)
291      double limit;
292 {
293 #if defined(HAVE_GETRLIMIT)
294   struct rlimit rlim;
295 # ifdef RLIMIT_RSS
296   if (getrlimit (RLIMIT_RSS, &rlim) == 0
297       && rlim.rlim_cur != (rlim_t) RLIM_INFINITY
298       && rlim.rlim_cur < limit)
299     limit = rlim.rlim_cur;
300 # endif
301 # ifdef RLIMIT_DATA
302   if (getrlimit (RLIMIT_DATA, &rlim) == 0
303       && rlim.rlim_cur != (rlim_t) RLIM_INFINITY
304       && rlim.rlim_cur < limit)
305     limit = rlim.rlim_cur;
306 # endif
307 # ifdef RLIMIT_AS
308   if (getrlimit (RLIMIT_AS, &rlim) == 0
309       && rlim.rlim_cur != (rlim_t) RLIM_INFINITY
310       && rlim.rlim_cur < limit)
311     limit = rlim.rlim_cur;
312 # endif
313 #endif /* HAVE_GETRLIMIT */
314 
315   return limit;
316 }
317 
318 /* Heuristic to set a default for GGC_MIN_EXPAND.  */
319 int
ggc_min_expand_heuristic()320 ggc_min_expand_heuristic()
321 {
322   double min_expand = physmem_total();
323 
324   /* Adjust for rlimits.  */
325   min_expand = ggc_rlimit_bound (min_expand);
326 
327   /* The heuristic is a percentage equal to 30% + 70%*(RAM/1GB), yielding
328      a lower bound of 30% and an upper bound of 100% (when RAM >= 1GB).  */
329   min_expand /= 1024*1024*1024;
330   min_expand *= 70;
331   min_expand = MIN (min_expand, 70);
332   min_expand += 30;
333 
334   return min_expand;
335 }
336 
337 /* Heuristic to set a default for GGC_MIN_HEAPSIZE.  */
338 int
ggc_min_heapsize_heuristic()339 ggc_min_heapsize_heuristic()
340 {
341   double min_heap_kbytes = physmem_total();
342 
343   /* Adjust for rlimits.  */
344   min_heap_kbytes = ggc_rlimit_bound (min_heap_kbytes);
345 
346   min_heap_kbytes /= 1024; /* convert to Kbytes. */
347 
348   /* The heuristic is RAM/8, with a lower bound of 4M and an upper
349      bound of 128M (when RAM >= 1GB).  */
350   min_heap_kbytes /= 8;
351   min_heap_kbytes = MAX (min_heap_kbytes, 4 * 1024);
352   min_heap_kbytes = MIN (min_heap_kbytes, 128 * 1024);
353 
354   return min_heap_kbytes;
355 }
356 
357 void
init_ggc_heuristics()358 init_ggc_heuristics ()
359 {
360 #ifndef ENABLE_GC_ALWAYS_COLLECT
361   set_param_value ("ggc-min-expand", ggc_min_expand_heuristic());
362   set_param_value ("ggc-min-heapsize", ggc_min_heapsize_heuristic());
363 #endif
364 }
365