xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-object-size.c (revision f78a39098a724dc9cf9de2015260a6c3ffa0f8d7)
1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31 
32 struct object_size_info
33 {
34   int object_size_type;
35   bitmap visited, reexamine;
36   int pass;
37   bool changed;
38   unsigned int *depths;
39   unsigned int *stack, *tos;
40 };
41 
42 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
43 
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
46 						const_tree, int);
47 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
48 static tree pass_through_call (const_gimple);
49 static void collect_object_sizes_for (struct object_size_info *, tree);
50 static void expr_object_size (struct object_size_info *, tree, tree);
51 static bool merge_object_sizes (struct object_size_info *, tree, tree,
52 				unsigned HOST_WIDE_INT);
53 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
54 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
55 static unsigned int compute_object_sizes (void);
56 static void init_offset_limit (void);
57 static void check_for_plus_in_loops (struct object_size_info *, tree);
58 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
59 				       unsigned int);
60 
61 /* object_sizes[0] is upper bound for number of bytes till the end of
62    the object.
63    object_sizes[1] is upper bound for number of bytes till the end of
64    the subobject (innermost array or field with address taken).
65    object_sizes[2] is lower bound for number of bytes till the end of
66    the object and object_sizes[3] lower bound for subobject.  */
67 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
68 
69 /* Bitmaps what object sizes have been computed already.  */
70 static bitmap computed[4];
71 
72 /* Maximum value of offset we consider to be addition.  */
73 static unsigned HOST_WIDE_INT offset_limit;
74 
75 
76 /* Initialize OFFSET_LIMIT variable.  */
77 static void
78 init_offset_limit (void)
79 {
80   if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
81     offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
82   else
83     offset_limit = -1;
84   offset_limit /= 2;
85 }
86 
87 
88 /* Compute offset of EXPR within VAR.  Return error_mark_node
89    if unknown.  */
90 
91 static tree
92 compute_object_offset (const_tree expr, const_tree var)
93 {
94   enum tree_code code = PLUS_EXPR;
95   tree base, off, t;
96 
97   if (expr == var)
98     return size_zero_node;
99 
100   switch (TREE_CODE (expr))
101     {
102     case COMPONENT_REF:
103       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
104       if (base == error_mark_node)
105 	return base;
106 
107       t = TREE_OPERAND (expr, 1);
108       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
109 			size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
110 				  / BITS_PER_UNIT));
111       break;
112 
113     case REALPART_EXPR:
114     CASE_CONVERT:
115     case VIEW_CONVERT_EXPR:
116     case NON_LVALUE_EXPR:
117       return compute_object_offset (TREE_OPERAND (expr, 0), var);
118 
119     case IMAGPART_EXPR:
120       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
121       if (base == error_mark_node)
122 	return base;
123 
124       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
125       break;
126 
127     case ARRAY_REF:
128       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129       if (base == error_mark_node)
130 	return base;
131 
132       t = TREE_OPERAND (expr, 1);
133       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
134 	{
135 	  code = MINUS_EXPR;
136 	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
137 	}
138       t = fold_convert (sizetype, t);
139       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
140       break;
141 
142     case MEM_REF:
143       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
144       return double_int_to_tree (sizetype, mem_ref_offset (expr));
145 
146     default:
147       return error_mark_node;
148     }
149 
150   return size_binop (code, base, off);
151 }
152 
153 
154 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
155    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
156    If unknown, return unknown[object_size_type].  */
157 
158 static unsigned HOST_WIDE_INT
159 addr_object_size (struct object_size_info *osi, const_tree ptr,
160 		  int object_size_type)
161 {
162   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
163 
164   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
165 
166   pt_var = TREE_OPERAND (ptr, 0);
167   while (handled_component_p (pt_var))
168     pt_var = TREE_OPERAND (pt_var, 0);
169 
170   if (pt_var
171       && TREE_CODE (pt_var) == MEM_REF)
172     {
173       unsigned HOST_WIDE_INT sz;
174 
175       if (!osi || (object_size_type & 1) != 0
176 	  || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
177 	{
178 	  sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
179 					    object_size_type & ~1);
180 	}
181       else
182 	{
183 	  tree var = TREE_OPERAND (pt_var, 0);
184 	  if (osi->pass == 0)
185 	    collect_object_sizes_for (osi, var);
186 	  if (bitmap_bit_p (computed[object_size_type],
187 			    SSA_NAME_VERSION (var)))
188 	    sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
189 	  else
190 	    sz = unknown[object_size_type];
191 	}
192       if (sz != unknown[object_size_type])
193 	{
194 	  double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
195 	  if (dsz.is_negative ())
196 	    sz = 0;
197 	  else if (dsz.fits_uhwi ())
198 	    sz = dsz.to_uhwi ();
199 	  else
200 	    sz = unknown[object_size_type];
201 	}
202 
203       if (sz != unknown[object_size_type] && sz < offset_limit)
204 	pt_var_size = size_int (sz);
205     }
206   else if (pt_var
207 	   && DECL_P (pt_var)
208 	   && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
209 	   && (unsigned HOST_WIDE_INT)
210 	        tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
211     pt_var_size = DECL_SIZE_UNIT (pt_var);
212   else if (pt_var
213 	   && TREE_CODE (pt_var) == STRING_CST
214 	   && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
215 	   && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
216 	   && (unsigned HOST_WIDE_INT)
217 	      tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
218 	      < offset_limit)
219     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
220   else
221     return unknown[object_size_type];
222 
223   if (pt_var != TREE_OPERAND (ptr, 0))
224     {
225       tree var;
226 
227       if (object_size_type & 1)
228 	{
229 	  var = TREE_OPERAND (ptr, 0);
230 
231 	  while (var != pt_var
232 		 && TREE_CODE (var) != BIT_FIELD_REF
233 		 && TREE_CODE (var) != COMPONENT_REF
234 		 && TREE_CODE (var) != ARRAY_REF
235 		 && TREE_CODE (var) != ARRAY_RANGE_REF
236 		 && TREE_CODE (var) != REALPART_EXPR
237 		 && TREE_CODE (var) != IMAGPART_EXPR)
238 	    var = TREE_OPERAND (var, 0);
239 	  if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
240 	    var = TREE_OPERAND (var, 0);
241 	  if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
242 	      || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
243 	      || (pt_var_size
244 		  && tree_int_cst_lt (pt_var_size,
245 				      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
246 	    var = pt_var;
247 	  else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
248 	    {
249 	      tree v = var;
250 	      /* For &X->fld, compute object size only if fld isn't the last
251 		 field, as struct { int i; char c[1]; } is often used instead
252 		 of flexible array member.  */
253 	      while (v && v != pt_var)
254 		switch (TREE_CODE (v))
255 		  {
256 		  case ARRAY_REF:
257 		    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
258 			&& TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
259 		      {
260 			tree domain
261 			  = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
262 			if (domain
263 			    && TYPE_MAX_VALUE (domain)
264 			    && TREE_CODE (TYPE_MAX_VALUE (domain))
265 			       == INTEGER_CST
266 			    && tree_int_cst_lt (TREE_OPERAND (v, 1),
267 						TYPE_MAX_VALUE (domain)))
268 			  {
269 			    v = NULL_TREE;
270 			    break;
271 			  }
272 		      }
273 		    v = TREE_OPERAND (v, 0);
274 		    break;
275 		  case REALPART_EXPR:
276 		  case IMAGPART_EXPR:
277 		    v = NULL_TREE;
278 		    break;
279 		  case COMPONENT_REF:
280 		    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
281 		      {
282 			v = NULL_TREE;
283 			break;
284 		      }
285 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
286 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
287 			  != UNION_TYPE
288 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
289 			  != QUAL_UNION_TYPE)
290 			break;
291 		      else
292 			v = TREE_OPERAND (v, 0);
293 		    if (TREE_CODE (v) == COMPONENT_REF
294 			&& TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 			   == RECORD_TYPE)
296 		      {
297 			tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
298 			for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
299 			  if (TREE_CODE (fld_chain) == FIELD_DECL)
300 			    break;
301 
302 			if (fld_chain)
303 			  {
304 			    v = NULL_TREE;
305 			    break;
306 			  }
307 			v = TREE_OPERAND (v, 0);
308 		      }
309 		    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
310 		      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 			  != UNION_TYPE
312 			  && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
313 			  != QUAL_UNION_TYPE)
314 			break;
315 		      else
316 			v = TREE_OPERAND (v, 0);
317 		    if (v != pt_var)
318 		      v = NULL_TREE;
319 		    else
320 		      v = pt_var;
321 		    break;
322 		  default:
323 		    v = pt_var;
324 		    break;
325 		  }
326 	      if (v == pt_var)
327 		var = pt_var;
328 	    }
329 	}
330       else
331 	var = pt_var;
332 
333       if (var != pt_var)
334 	var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
335       else if (!pt_var_size)
336 	return unknown[object_size_type];
337       else
338 	var_size = pt_var_size;
339       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
340       if (bytes != error_mark_node)
341 	{
342 	  if (TREE_CODE (bytes) == INTEGER_CST
343 	      && tree_int_cst_lt (var_size, bytes))
344 	    bytes = size_zero_node;
345 	  else
346 	    bytes = size_binop (MINUS_EXPR, var_size, bytes);
347 	}
348       if (var != pt_var
349 	  && pt_var_size
350 	  && TREE_CODE (pt_var) == MEM_REF
351 	  && bytes != error_mark_node)
352 	{
353 	  tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
354 	  if (bytes2 != error_mark_node)
355 	    {
356 	      if (TREE_CODE (bytes2) == INTEGER_CST
357 		  && tree_int_cst_lt (pt_var_size, bytes2))
358 		bytes2 = size_zero_node;
359 	      else
360 		bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
361 	      bytes = size_binop (MIN_EXPR, bytes, bytes2);
362 	    }
363 	}
364     }
365   else if (!pt_var_size)
366     return unknown[object_size_type];
367   else
368     bytes = pt_var_size;
369 
370   if (host_integerp (bytes, 1))
371     return tree_low_cst (bytes, 1);
372 
373   return unknown[object_size_type];
374 }
375 
376 
377 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
378    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
379    argument from __builtin_object_size.  If unknown, return
380    unknown[object_size_type].  */
381 
382 static unsigned HOST_WIDE_INT
383 alloc_object_size (const_gimple call, int object_size_type)
384 {
385   tree callee, bytes = NULL_TREE;
386   tree alloc_size;
387   int arg1 = -1, arg2 = -1;
388 
389   gcc_assert (is_gimple_call (call));
390 
391   callee = gimple_call_fndecl (call);
392   if (!callee)
393     return unknown[object_size_type];
394 
395   alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
396   if (alloc_size && TREE_VALUE (alloc_size))
397     {
398       tree p = TREE_VALUE (alloc_size);
399 
400       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
401       if (TREE_CHAIN (p))
402         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
403     }
404 
405   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
406     switch (DECL_FUNCTION_CODE (callee))
407       {
408       case BUILT_IN_CALLOC:
409 	arg2 = 1;
410 	/* fall through */
411       case BUILT_IN_MALLOC:
412       case BUILT_IN_ALLOCA:
413       case BUILT_IN_ALLOCA_WITH_ALIGN:
414 	arg1 = 0;
415       default:
416 	break;
417       }
418 
419   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
420       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
421       || (arg2 >= 0
422 	  && (arg2 >= (int)gimple_call_num_args (call)
423 	      || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
424     return unknown[object_size_type];
425 
426   if (arg2 >= 0)
427     bytes = size_binop (MULT_EXPR,
428 	fold_convert (sizetype, gimple_call_arg (call, arg1)),
429 	fold_convert (sizetype, gimple_call_arg (call, arg2)));
430   else if (arg1 >= 0)
431     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
432 
433   if (bytes && host_integerp (bytes, 1))
434     return tree_low_cst (bytes, 1);
435 
436   return unknown[object_size_type];
437 }
438 
439 
440 /* If object size is propagated from one of function's arguments directly
441    to its return value, return that argument for GIMPLE_CALL statement CALL.
442    Otherwise return NULL.  */
443 
444 static tree
445 pass_through_call (const_gimple call)
446 {
447   tree callee = gimple_call_fndecl (call);
448 
449   if (callee
450       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
451     switch (DECL_FUNCTION_CODE (callee))
452       {
453       case BUILT_IN_MEMCPY:
454       case BUILT_IN_MEMMOVE:
455       case BUILT_IN_MEMSET:
456       case BUILT_IN_STRCPY:
457       case BUILT_IN_STRNCPY:
458       case BUILT_IN_STRCAT:
459       case BUILT_IN_STRNCAT:
460       case BUILT_IN_MEMCPY_CHK:
461       case BUILT_IN_MEMMOVE_CHK:
462       case BUILT_IN_MEMSET_CHK:
463       case BUILT_IN_STRCPY_CHK:
464       case BUILT_IN_STRNCPY_CHK:
465       case BUILT_IN_STPNCPY_CHK:
466       case BUILT_IN_STRCAT_CHK:
467       case BUILT_IN_STRNCAT_CHK:
468       case BUILT_IN_ASSUME_ALIGNED:
469 	if (gimple_call_num_args (call) >= 1)
470 	  return gimple_call_arg (call, 0);
471 	break;
472       default:
473 	break;
474       }
475 
476   return NULL_TREE;
477 }
478 
479 
480 /* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
481    second argument from __builtin_object_size.  */
482 
483 unsigned HOST_WIDE_INT
484 compute_builtin_object_size (tree ptr, int object_size_type)
485 {
486   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
487 
488   if (! offset_limit)
489     init_offset_limit ();
490 
491   if (TREE_CODE (ptr) == ADDR_EXPR)
492     return addr_object_size (NULL, ptr, object_size_type);
493 
494   if (TREE_CODE (ptr) == SSA_NAME
495       && POINTER_TYPE_P (TREE_TYPE (ptr))
496       && computed[object_size_type] != NULL)
497     {
498       if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
499 	{
500 	  struct object_size_info osi;
501 	  bitmap_iterator bi;
502 	  unsigned int i;
503 
504 	  if (num_ssa_names > object_sizes[object_size_type].length ())
505 	    object_sizes[object_size_type].safe_grow (num_ssa_names);
506 	  if (dump_file)
507 	    {
508 	      fprintf (dump_file, "Computing %s %sobject size for ",
509 		       (object_size_type & 2) ? "minimum" : "maximum",
510 		       (object_size_type & 1) ? "sub" : "");
511 	      print_generic_expr (dump_file, ptr, dump_flags);
512 	      fprintf (dump_file, ":\n");
513 	    }
514 
515 	  osi.visited = BITMAP_ALLOC (NULL);
516 	  osi.reexamine = BITMAP_ALLOC (NULL);
517 	  osi.object_size_type = object_size_type;
518 	  osi.depths = NULL;
519 	  osi.stack = NULL;
520 	  osi.tos = NULL;
521 
522 	  /* First pass: walk UD chains, compute object sizes that
523 	     can be computed.  osi.reexamine bitmap at the end will
524 	     contain what variables were found in dependency cycles
525 	     and therefore need to be reexamined.  */
526 	  osi.pass = 0;
527 	  osi.changed = false;
528 	  collect_object_sizes_for (&osi, ptr);
529 
530 	  /* Second pass: keep recomputing object sizes of variables
531 	     that need reexamination, until no object sizes are
532 	     increased or all object sizes are computed.  */
533 	  if (! bitmap_empty_p (osi.reexamine))
534 	    {
535 	      bitmap reexamine = BITMAP_ALLOC (NULL);
536 
537 	      /* If looking for minimum instead of maximum object size,
538 		 detect cases where a pointer is increased in a loop.
539 		 Although even without this detection pass 2 would eventually
540 		 terminate, it could take a long time.  If a pointer is
541 		 increasing this way, we need to assume 0 object size.
542 		 E.g. p = &buf[0]; while (cond) p = p + 4;  */
543 	      if (object_size_type & 2)
544 		{
545 		  osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
546 		  osi.stack = XNEWVEC (unsigned int, num_ssa_names);
547 		  osi.tos = osi.stack;
548 		  osi.pass = 1;
549 		  /* collect_object_sizes_for is changing
550 		     osi.reexamine bitmap, so iterate over a copy.  */
551 		  bitmap_copy (reexamine, osi.reexamine);
552 		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
553 		    if (bitmap_bit_p (osi.reexamine, i))
554 		      check_for_plus_in_loops (&osi, ssa_name (i));
555 
556 		  free (osi.depths);
557 		  osi.depths = NULL;
558 		  free (osi.stack);
559 		  osi.stack = NULL;
560 		  osi.tos = NULL;
561 		}
562 
563 	      do
564 		{
565 		  osi.pass = 2;
566 		  osi.changed = false;
567 		  /* collect_object_sizes_for is changing
568 		     osi.reexamine bitmap, so iterate over a copy.  */
569 		  bitmap_copy (reexamine, osi.reexamine);
570 		  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
571 		    if (bitmap_bit_p (osi.reexamine, i))
572 		      {
573 			collect_object_sizes_for (&osi, ssa_name (i));
574 			if (dump_file && (dump_flags & TDF_DETAILS))
575 			  {
576 			    fprintf (dump_file, "Reexamining ");
577 			    print_generic_expr (dump_file, ssa_name (i),
578 						dump_flags);
579 			    fprintf (dump_file, "\n");
580 			  }
581 		      }
582 		}
583 	      while (osi.changed);
584 
585 	      BITMAP_FREE (reexamine);
586 	    }
587 	  EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
588 	    bitmap_set_bit (computed[object_size_type], i);
589 
590 	  /* Debugging dumps.  */
591 	  if (dump_file)
592 	    {
593 	      EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
594 		if (object_sizes[object_size_type][i]
595 		    != unknown[object_size_type])
596 		  {
597 		    print_generic_expr (dump_file, ssa_name (i),
598 					dump_flags);
599 		    fprintf (dump_file,
600 			     ": %s %sobject size "
601 			     HOST_WIDE_INT_PRINT_UNSIGNED "\n",
602 			     (object_size_type & 2) ? "minimum" : "maximum",
603 			     (object_size_type & 1) ? "sub" : "",
604 			     object_sizes[object_size_type][i]);
605 		  }
606 	    }
607 
608 	  BITMAP_FREE (osi.reexamine);
609 	  BITMAP_FREE (osi.visited);
610 	}
611 
612       return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
613     }
614 
615   return unknown[object_size_type];
616 }
617 
618 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
619 
620 static void
621 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
622 {
623   int object_size_type = osi->object_size_type;
624   unsigned int varno = SSA_NAME_VERSION (ptr);
625   unsigned HOST_WIDE_INT bytes;
626 
627   gcc_assert (object_sizes[object_size_type][varno]
628 	      != unknown[object_size_type]);
629   gcc_assert (osi->pass == 0);
630 
631   if (TREE_CODE (value) == WITH_SIZE_EXPR)
632     value = TREE_OPERAND (value, 0);
633 
634   /* Pointer variables should have been handled by merge_object_sizes.  */
635   gcc_assert (TREE_CODE (value) != SSA_NAME
636 	      || !POINTER_TYPE_P (TREE_TYPE (value)));
637 
638   if (TREE_CODE (value) == ADDR_EXPR)
639     bytes = addr_object_size (osi, value, object_size_type);
640   else
641     bytes = unknown[object_size_type];
642 
643   if ((object_size_type & 2) == 0)
644     {
645       if (object_sizes[object_size_type][varno] < bytes)
646 	object_sizes[object_size_type][varno] = bytes;
647     }
648   else
649     {
650       if (object_sizes[object_size_type][varno] > bytes)
651 	object_sizes[object_size_type][varno] = bytes;
652     }
653 }
654 
655 
656 /* Compute object_sizes for PTR, defined to the result of a call.  */
657 
658 static void
659 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
660 {
661   int object_size_type = osi->object_size_type;
662   unsigned int varno = SSA_NAME_VERSION (ptr);
663   unsigned HOST_WIDE_INT bytes;
664 
665   gcc_assert (is_gimple_call (call));
666 
667   gcc_assert (object_sizes[object_size_type][varno]
668 	      != unknown[object_size_type]);
669   gcc_assert (osi->pass == 0);
670 
671   bytes = alloc_object_size (call, object_size_type);
672 
673   if ((object_size_type & 2) == 0)
674     {
675       if (object_sizes[object_size_type][varno] < bytes)
676 	object_sizes[object_size_type][varno] = bytes;
677     }
678   else
679     {
680       if (object_sizes[object_size_type][varno] > bytes)
681 	object_sizes[object_size_type][varno] = bytes;
682     }
683 }
684 
685 
686 /* Compute object_sizes for PTR, defined to an unknown value.  */
687 
688 static void
689 unknown_object_size (struct object_size_info *osi, tree ptr)
690 {
691   int object_size_type = osi->object_size_type;
692   unsigned int varno = SSA_NAME_VERSION (ptr);
693   unsigned HOST_WIDE_INT bytes;
694 
695   gcc_assert (object_sizes[object_size_type][varno]
696 	      != unknown[object_size_type]);
697   gcc_assert (osi->pass == 0);
698 
699   bytes = unknown[object_size_type];
700 
701   if ((object_size_type & 2) == 0)
702     {
703       if (object_sizes[object_size_type][varno] < bytes)
704 	object_sizes[object_size_type][varno] = bytes;
705     }
706   else
707     {
708       if (object_sizes[object_size_type][varno] > bytes)
709 	object_sizes[object_size_type][varno] = bytes;
710     }
711 }
712 
713 
714 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
715    the object size might need reexamination later.  */
716 
717 static bool
718 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
719 		    unsigned HOST_WIDE_INT offset)
720 {
721   int object_size_type = osi->object_size_type;
722   unsigned int varno = SSA_NAME_VERSION (dest);
723   unsigned HOST_WIDE_INT orig_bytes;
724 
725   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
726     return false;
727   if (offset >= offset_limit)
728     {
729       object_sizes[object_size_type][varno] = unknown[object_size_type];
730       return false;
731     }
732 
733   if (osi->pass == 0)
734     collect_object_sizes_for (osi, orig);
735 
736   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
737   if (orig_bytes != unknown[object_size_type])
738     orig_bytes = (offset > orig_bytes)
739 		 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
740 
741   if ((object_size_type & 2) == 0)
742     {
743       if (object_sizes[object_size_type][varno] < orig_bytes)
744 	{
745 	  object_sizes[object_size_type][varno] = orig_bytes;
746 	  osi->changed = true;
747 	}
748     }
749   else
750     {
751       if (object_sizes[object_size_type][varno] > orig_bytes)
752 	{
753 	  object_sizes[object_size_type][varno] = orig_bytes;
754 	  osi->changed = true;
755 	}
756     }
757   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
758 }
759 
760 
761 /* Compute object_sizes for VAR, defined to the result of an assignment
762    with operator POINTER_PLUS_EXPR.  Return true if the object size might
763    need reexamination  later.  */
764 
765 static bool
766 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
767 {
768   int object_size_type = osi->object_size_type;
769   unsigned int varno = SSA_NAME_VERSION (var);
770   unsigned HOST_WIDE_INT bytes;
771   tree op0, op1;
772 
773   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
774     {
775       op0 = gimple_assign_rhs1 (stmt);
776       op1 = gimple_assign_rhs2 (stmt);
777     }
778   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
779     {
780       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
781       gcc_assert (TREE_CODE (rhs) == MEM_REF);
782       op0 = TREE_OPERAND (rhs, 0);
783       op1 = TREE_OPERAND (rhs, 1);
784     }
785   else
786     gcc_unreachable ();
787 
788   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
789     return false;
790 
791   /* Handle PTR + OFFSET here.  */
792   if (TREE_CODE (op1) == INTEGER_CST
793       && (TREE_CODE (op0) == SSA_NAME
794 	  || TREE_CODE (op0) == ADDR_EXPR))
795     {
796       if (! host_integerp (op1, 1))
797 	bytes = unknown[object_size_type];
798       else if (TREE_CODE (op0) == SSA_NAME)
799 	return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
800       else
801 	{
802 	  unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
803 
804           /* op0 will be ADDR_EXPR here.  */
805 	  bytes = addr_object_size (osi, op0, object_size_type);
806 	  if (bytes == unknown[object_size_type])
807 	    ;
808 	  else if (off > offset_limit)
809 	    bytes = unknown[object_size_type];
810 	  else if (off > bytes)
811 	    bytes = 0;
812 	  else
813 	    bytes -= off;
814 	}
815     }
816   else
817     bytes = unknown[object_size_type];
818 
819   if ((object_size_type & 2) == 0)
820     {
821       if (object_sizes[object_size_type][varno] < bytes)
822 	object_sizes[object_size_type][varno] = bytes;
823     }
824   else
825     {
826       if (object_sizes[object_size_type][varno] > bytes)
827 	object_sizes[object_size_type][varno] = bytes;
828     }
829   return false;
830 }
831 
832 
833 /* Compute object_sizes for VAR, defined at STMT, which is
834    a COND_EXPR.  Return true if the object size might need reexamination
835    later.  */
836 
837 static bool
838 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
839 {
840   tree then_, else_;
841   int object_size_type = osi->object_size_type;
842   unsigned int varno = SSA_NAME_VERSION (var);
843   bool reexamine = false;
844 
845   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
846 
847   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
848     return false;
849 
850   then_ = gimple_assign_rhs2 (stmt);
851   else_ = gimple_assign_rhs3 (stmt);
852 
853   if (TREE_CODE (then_) == SSA_NAME)
854     reexamine |= merge_object_sizes (osi, var, then_, 0);
855   else
856     expr_object_size (osi, var, then_);
857 
858   if (TREE_CODE (else_) == SSA_NAME)
859     reexamine |= merge_object_sizes (osi, var, else_, 0);
860   else
861     expr_object_size (osi, var, else_);
862 
863   return reexamine;
864 }
865 
866 /* Compute object sizes for VAR.
867    For ADDR_EXPR an object size is the number of remaining bytes
868    to the end of the object (where what is considered an object depends on
869    OSI->object_size_type).
870    For allocation GIMPLE_CALL like malloc or calloc object size is the size
871    of the allocation.
872    For POINTER_PLUS_EXPR where second operand is a constant integer,
873    object size is object size of the first operand minus the constant.
874    If the constant is bigger than the number of remaining bytes until the
875    end of the object, object size is 0, but if it is instead a pointer
876    subtraction, object size is unknown[object_size_type].
877    To differentiate addition from subtraction, ADDR_EXPR returns
878    unknown[object_size_type] for all objects bigger than half of the address
879    space, and constants less than half of the address space are considered
880    addition, while bigger constants subtraction.
881    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
882    object size is object size of that argument.
883    Otherwise, object size is the maximum of object sizes of variables
884    that it might be set to.  */
885 
886 static void
887 collect_object_sizes_for (struct object_size_info *osi, tree var)
888 {
889   int object_size_type = osi->object_size_type;
890   unsigned int varno = SSA_NAME_VERSION (var);
891   gimple stmt;
892   bool reexamine;
893 
894   if (bitmap_bit_p (computed[object_size_type], varno))
895     return;
896 
897   if (osi->pass == 0)
898     {
899       if (bitmap_set_bit (osi->visited, varno))
900 	{
901 	  object_sizes[object_size_type][varno]
902 	    = (object_size_type & 2) ? -1 : 0;
903 	}
904       else
905 	{
906 	  /* Found a dependency loop.  Mark the variable for later
907 	     re-examination.  */
908 	  bitmap_set_bit (osi->reexamine, varno);
909 	  if (dump_file && (dump_flags & TDF_DETAILS))
910 	    {
911 	      fprintf (dump_file, "Found a dependency loop at ");
912 	      print_generic_expr (dump_file, var, dump_flags);
913 	      fprintf (dump_file, "\n");
914 	    }
915 	  return;
916 	}
917     }
918 
919   if (dump_file && (dump_flags & TDF_DETAILS))
920     {
921       fprintf (dump_file, "Visiting use-def links for ");
922       print_generic_expr (dump_file, var, dump_flags);
923       fprintf (dump_file, "\n");
924     }
925 
926   stmt = SSA_NAME_DEF_STMT (var);
927   reexamine = false;
928 
929   switch (gimple_code (stmt))
930     {
931     case GIMPLE_ASSIGN:
932       {
933 	tree rhs = gimple_assign_rhs1 (stmt);
934         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
935 	    || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
936 		&& TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
937           reexamine = plus_stmt_object_size (osi, var, stmt);
938 	else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
939 	  reexamine = cond_expr_object_size (osi, var, stmt);
940         else if (gimple_assign_single_p (stmt)
941                  || gimple_assign_unary_nop_p (stmt))
942           {
943             if (TREE_CODE (rhs) == SSA_NAME
944                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
945               reexamine = merge_object_sizes (osi, var, rhs, 0);
946             else
947               expr_object_size (osi, var, rhs);
948           }
949         else
950           unknown_object_size (osi, var);
951         break;
952       }
953 
954     case GIMPLE_CALL:
955       {
956         tree arg = pass_through_call (stmt);
957         if (arg)
958           {
959             if (TREE_CODE (arg) == SSA_NAME
960                 && POINTER_TYPE_P (TREE_TYPE (arg)))
961               reexamine = merge_object_sizes (osi, var, arg, 0);
962             else
963               expr_object_size (osi, var, arg);
964           }
965         else
966           call_object_size (osi, var, stmt);
967 	break;
968       }
969 
970     case GIMPLE_ASM:
971       /* Pointers defined by __asm__ statements can point anywhere.  */
972       object_sizes[object_size_type][varno] = unknown[object_size_type];
973       break;
974 
975     case GIMPLE_NOP:
976       if (SSA_NAME_VAR (var)
977 	  && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
978 	expr_object_size (osi, var, SSA_NAME_VAR (var));
979       else
980 	/* Uninitialized SSA names point nowhere.  */
981 	object_sizes[object_size_type][varno] = unknown[object_size_type];
982       break;
983 
984     case GIMPLE_PHI:
985       {
986 	unsigned i;
987 
988 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
989 	  {
990 	    tree rhs = gimple_phi_arg (stmt, i)->def;
991 
992 	    if (object_sizes[object_size_type][varno]
993 		== unknown[object_size_type])
994 	      break;
995 
996 	    if (TREE_CODE (rhs) == SSA_NAME)
997 	      reexamine |= merge_object_sizes (osi, var, rhs, 0);
998 	    else if (osi->pass == 0)
999 	      expr_object_size (osi, var, rhs);
1000 	  }
1001 	break;
1002       }
1003 
1004     default:
1005       gcc_unreachable ();
1006     }
1007 
1008   if (! reexamine
1009       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1010     {
1011       bitmap_set_bit (computed[object_size_type], varno);
1012       bitmap_clear_bit (osi->reexamine, varno);
1013     }
1014   else
1015     {
1016       bitmap_set_bit (osi->reexamine, varno);
1017       if (dump_file && (dump_flags & TDF_DETAILS))
1018 	{
1019 	  fprintf (dump_file, "Need to reexamine ");
1020 	  print_generic_expr (dump_file, var, dump_flags);
1021 	  fprintf (dump_file, "\n");
1022 	}
1023     }
1024 }
1025 
1026 
1027 /* Helper function for check_for_plus_in_loops.  Called recursively
1028    to detect loops.  */
1029 
1030 static void
1031 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1032 			   unsigned int depth)
1033 {
1034   gimple stmt = SSA_NAME_DEF_STMT (var);
1035   unsigned int varno = SSA_NAME_VERSION (var);
1036 
1037   if (osi->depths[varno])
1038     {
1039       if (osi->depths[varno] != depth)
1040 	{
1041 	  unsigned int *sp;
1042 
1043 	  /* Found a loop involving pointer addition.  */
1044 	  for (sp = osi->tos; sp > osi->stack; )
1045 	    {
1046 	      --sp;
1047 	      bitmap_clear_bit (osi->reexamine, *sp);
1048 	      bitmap_set_bit (computed[osi->object_size_type], *sp);
1049 	      object_sizes[osi->object_size_type][*sp] = 0;
1050 	      if (*sp == varno)
1051 		break;
1052 	    }
1053 	}
1054       return;
1055     }
1056   else if (! bitmap_bit_p (osi->reexamine, varno))
1057     return;
1058 
1059   osi->depths[varno] = depth;
1060   *osi->tos++ = varno;
1061 
1062   switch (gimple_code (stmt))
1063     {
1064 
1065     case GIMPLE_ASSIGN:
1066       {
1067         if ((gimple_assign_single_p (stmt)
1068              || gimple_assign_unary_nop_p (stmt))
1069             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1070           {
1071             tree rhs = gimple_assign_rhs1 (stmt);
1072 
1073             check_for_plus_in_loops_1 (osi, rhs, depth);
1074           }
1075         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1076           {
1077             tree basevar = gimple_assign_rhs1 (stmt);
1078             tree cst = gimple_assign_rhs2 (stmt);
1079 
1080             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1081 
1082             check_for_plus_in_loops_1 (osi, basevar,
1083                                        depth + !integer_zerop (cst));
1084           }
1085         else
1086           gcc_unreachable ();
1087         break;
1088       }
1089 
1090     case GIMPLE_CALL:
1091       {
1092         tree arg = pass_through_call (stmt);
1093         if (arg)
1094           {
1095             if (TREE_CODE (arg) == SSA_NAME)
1096               check_for_plus_in_loops_1 (osi, arg, depth);
1097             else
1098               gcc_unreachable ();
1099           }
1100         break;
1101       }
1102 
1103     case GIMPLE_PHI:
1104       {
1105 	unsigned i;
1106 
1107 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
1108 	  {
1109 	    tree rhs = gimple_phi_arg (stmt, i)->def;
1110 
1111 	    if (TREE_CODE (rhs) == SSA_NAME)
1112 	      check_for_plus_in_loops_1 (osi, rhs, depth);
1113 	  }
1114 	break;
1115       }
1116 
1117     default:
1118       gcc_unreachable ();
1119     }
1120 
1121   osi->depths[varno] = 0;
1122   osi->tos--;
1123 }
1124 
1125 
1126 /* Check if some pointer we are computing object size of is being increased
1127    within a loop.  If yes, assume all the SSA variables participating in
1128    that loop have minimum object sizes 0.  */
1129 
1130 static void
1131 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1132 {
1133   gimple stmt = SSA_NAME_DEF_STMT (var);
1134 
1135   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1136      and looked for a POINTER_PLUS_EXPR in the pass-through
1137      argument, if any.  In GIMPLE, however, such an expression
1138      is not a valid call operand.  */
1139 
1140   if (is_gimple_assign (stmt)
1141       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1142     {
1143       tree basevar = gimple_assign_rhs1 (stmt);
1144       tree cst = gimple_assign_rhs2 (stmt);
1145 
1146       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1147 
1148       if (integer_zerop (cst))
1149         return;
1150 
1151       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1152       *osi->tos++ = SSA_NAME_VERSION (basevar);
1153       check_for_plus_in_loops_1 (osi, var, 2);
1154       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1155       osi->tos--;
1156     }
1157 }
1158 
1159 
1160 /* Initialize data structures for the object size computation.  */
1161 
1162 void
1163 init_object_sizes (void)
1164 {
1165   int object_size_type;
1166 
1167   if (computed[0])
1168     return;
1169 
1170   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1171     {
1172       object_sizes[object_size_type].safe_grow (num_ssa_names);
1173       computed[object_size_type] = BITMAP_ALLOC (NULL);
1174     }
1175 
1176   init_offset_limit ();
1177 }
1178 
1179 
1180 /* Destroy data structures after the object size computation.  */
1181 
1182 void
1183 fini_object_sizes (void)
1184 {
1185   int object_size_type;
1186 
1187   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1188     {
1189       object_sizes[object_size_type].release ();
1190       BITMAP_FREE (computed[object_size_type]);
1191     }
1192 }
1193 
1194 
1195 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1196 
1197 static unsigned int
1198 compute_object_sizes (void)
1199 {
1200   basic_block bb;
1201   FOR_EACH_BB (bb)
1202     {
1203       gimple_stmt_iterator i;
1204       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1205 	{
1206 	  tree result;
1207 	  gimple call = gsi_stmt (i);
1208 	  if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1209 	    continue;
1210 
1211 	  init_object_sizes ();
1212 	  result = fold_call_stmt (call, false);
1213 	  if (!result)
1214 	    {
1215 	      if (gimple_call_num_args (call) == 2
1216 		  && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1217 		{
1218 		  tree ost = gimple_call_arg (call, 1);
1219 
1220 		  if (host_integerp (ost, 1))
1221 		    {
1222 		      unsigned HOST_WIDE_INT object_size_type
1223 			= tree_low_cst (ost, 1);
1224 
1225 		      if (object_size_type < 2)
1226 			result = fold_convert (size_type_node,
1227 					       integer_minus_one_node);
1228 		      else if (object_size_type < 4)
1229 			result = build_zero_cst (size_type_node);
1230 		    }
1231 		}
1232 
1233 	      if (!result)
1234 		continue;
1235 	    }
1236 
1237 	  gcc_assert (TREE_CODE (result) == INTEGER_CST);
1238 
1239 	  if (dump_file && (dump_flags & TDF_DETAILS))
1240 	    {
1241 	      fprintf (dump_file, "Simplified\n  ");
1242 	      print_gimple_stmt (dump_file, call, 0, dump_flags);
1243 	      fprintf (dump_file, " to ");
1244 	      print_generic_expr (dump_file, result, 0);
1245 	      fprintf (dump_file, "\n");
1246 	    }
1247 
1248 	  tree lhs = gimple_call_lhs (call);
1249 	  if (!lhs)
1250 	    continue;
1251 
1252 	  /* Propagate into all uses and fold those stmts.  */
1253 	  gimple use_stmt;
1254 	  imm_use_iterator iter;
1255 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1256 	    {
1257 	      use_operand_p use_p;
1258 	      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1259 		SET_USE (use_p, result);
1260 	      gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1261 	      fold_stmt (&gsi);
1262 	      update_stmt (gsi_stmt (gsi));
1263 	    }
1264 	}
1265     }
1266 
1267   fini_object_sizes ();
1268   return 0;
1269 }
1270 
1271 struct gimple_opt_pass pass_object_sizes =
1272 {
1273  {
1274   GIMPLE_PASS,
1275   "objsz",				/* name */
1276   OPTGROUP_NONE,                        /* optinfo_flags */
1277   NULL,					/* gate */
1278   compute_object_sizes,			/* execute */
1279   NULL,					/* sub */
1280   NULL,					/* next */
1281   0,					/* static_pass_number */
1282   TV_NONE,				/* tv_id */
1283   PROP_cfg | PROP_ssa,			/* properties_required */
1284   0,					/* properties_provided */
1285   0,					/* properties_destroyed */
1286   0,					/* todo_flags_start */
1287   TODO_verify_ssa	                /* todo_flags_finish */
1288  }
1289 };
1290