1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
40
41 struct object_size_info
42 {
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine, unknowns;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
49 };
50
51 struct GTY(()) object_size
52 {
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
57 };
58
59 static tree compute_object_offset (tree, const_tree);
60 static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62 static tree alloc_object_size (const gcall *, int);
63 static tree pass_through_call (const gcall *);
64 static void collect_object_sizes_for (struct object_size_info *, tree);
65 static void expr_object_size (struct object_size_info *, tree, tree);
66 static bool merge_object_sizes (struct object_size_info *, tree, tree);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
73
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
80
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec<object_size> object_sizes[OST_END];
85
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed[OST_END];
88
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit;
91
92 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
93
94 static inline bool
size_initval_p(tree val,int object_size_type)95 size_initval_p (tree val, int object_size_type)
96 {
97 return ((object_size_type & OST_MINIMUM)
98 ? integer_all_onesp (val) : integer_zerop (val));
99 }
100
101 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
102
103 static inline bool
size_unknown_p(tree val,int object_size_type)104 size_unknown_p (tree val, int object_size_type)
105 {
106 return ((object_size_type & OST_MINIMUM)
107 ? integer_zerop (val) : integer_all_onesp (val));
108 }
109
110 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
111
112 static inline bool
size_valid_p(tree val,int object_size_type)113 size_valid_p (tree val, int object_size_type)
114 {
115 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
116 }
117
118 /* Return true if VAL is usable as an object size in the object_sizes
119 vectors. */
120
121 static inline bool
size_usable_p(tree val)122 size_usable_p (tree val)
123 {
124 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
125 }
126
127 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
128
129 static inline tree
size_initval(int object_size_type)130 size_initval (int object_size_type)
131 {
132 return ((object_size_type & OST_MINIMUM)
133 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
134 }
135
136 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
137
138 static inline tree
size_unknown(int object_size_type)139 size_unknown (int object_size_type)
140 {
141 return ((object_size_type & OST_MINIMUM)
142 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
143 }
144
145 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
146
147 static inline void
object_sizes_grow(int object_size_type)148 object_sizes_grow (int object_size_type)
149 {
150 if (num_ssa_names > object_sizes[object_size_type].length ())
151 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
152 }
153
154 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
155
156 static inline void
object_sizes_release(int object_size_type)157 object_sizes_release (int object_size_type)
158 {
159 object_sizes[object_size_type].release ();
160 }
161
162 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
163
164 static inline bool
object_sizes_unknown_p(int object_size_type,unsigned varno)165 object_sizes_unknown_p (int object_size_type, unsigned varno)
166 {
167 return size_unknown_p (object_sizes[object_size_type][varno].size,
168 object_size_type);
169 }
170
171 /* Return the raw size expression for VARNO corresponding to OSI. This returns
172 the TREE_VEC as is and should only be used during gimplification. */
173
174 static inline object_size
object_sizes_get_raw(struct object_size_info * osi,unsigned varno)175 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
176 {
177 gcc_assert (osi->pass != 0);
178 return object_sizes[osi->object_size_type][varno];
179 }
180
181 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
182 the whole object size. Use this for building size expressions based on size
183 of VARNO. */
184
185 static inline tree
object_sizes_get(struct object_size_info * osi,unsigned varno,bool whole=false)186 object_sizes_get (struct object_size_info *osi, unsigned varno,
187 bool whole = false)
188 {
189 tree ret;
190 int object_size_type = osi->object_size_type;
191
192 if (whole)
193 ret = object_sizes[object_size_type][varno].wholesize;
194 else
195 ret = object_sizes[object_size_type][varno].size;
196
197 if (object_size_type & OST_DYNAMIC)
198 {
199 if (TREE_CODE (ret) == MODIFY_EXPR)
200 return TREE_OPERAND (ret, 0);
201 else if (TREE_CODE (ret) == TREE_VEC)
202 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
203 else
204 gcc_checking_assert (size_usable_p (ret));
205 }
206
207 return ret;
208 }
209
210 /* Set size for VARNO corresponding to OSI to VAL. */
211
212 static inline void
object_sizes_initialize(struct object_size_info * osi,unsigned varno,tree val,tree wholeval)213 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
214 tree val, tree wholeval)
215 {
216 int object_size_type = osi->object_size_type;
217
218 object_sizes[object_size_type][varno].size = val;
219 object_sizes[object_size_type][varno].wholesize = wholeval;
220 }
221
222 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
223 TREE_VEC is returned only in case of PHI nodes. */
224
225 static tree
bundle_sizes(tree name,tree expr)226 bundle_sizes (tree name, tree expr)
227 {
228 gcc_checking_assert (TREE_TYPE (name) == sizetype);
229
230 if (TREE_CODE (expr) == TREE_VEC)
231 {
232 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
233 return expr;
234 }
235
236 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
237 return build2 (MODIFY_EXPR, sizetype, name, expr);
238 }
239
240 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
241 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
242 throughout the computation. For dynamic sizes, each element may either be a
243 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
244 expressions that need to be gimplified. TREE_VECs are special, they're
245 emitted only for GIMPLE_PHI and the PHI result variable is the last element
246 of the vector. */
247
248 static bool
object_sizes_set(struct object_size_info * osi,unsigned varno,tree val,tree wholeval)249 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
250 tree wholeval)
251 {
252 int object_size_type = osi->object_size_type;
253 object_size osize = object_sizes[object_size_type][varno];
254 bool changed = true;
255
256 tree oldval = osize.size;
257 tree old_wholeval = osize.wholesize;
258
259 if (object_size_type & OST_DYNAMIC)
260 {
261 if (bitmap_bit_p (osi->reexamine, varno))
262 {
263 if (size_unknown_p (val, object_size_type))
264 {
265 oldval = object_sizes_get (osi, varno);
266 old_wholeval = object_sizes_get (osi, varno, true);
267 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (oldval));
268 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (old_wholeval));
269 bitmap_clear_bit (osi->reexamine, varno);
270 }
271 else
272 {
273 val = bundle_sizes (oldval, val);
274 wholeval = bundle_sizes (old_wholeval, wholeval);
275 }
276 }
277 else
278 {
279 gcc_checking_assert (size_initval_p (oldval, object_size_type));
280 gcc_checking_assert (size_initval_p (old_wholeval,
281 object_size_type));
282 /* For dynamic object sizes, all object sizes that are not gimple
283 variables will need to be gimplified. */
284 if (wholeval != val && !size_usable_p (wholeval))
285 {
286 bitmap_set_bit (osi->reexamine, varno);
287 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
288 }
289 if (!size_usable_p (val))
290 {
291 bitmap_set_bit (osi->reexamine, varno);
292 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
293 if (val == wholeval)
294 wholeval = newval;
295 val = newval;
296 }
297 /* If the new value is a temporary variable, mark it for
298 reexamination. */
299 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
300 bitmap_set_bit (osi->reexamine, varno);
301 }
302 }
303 else
304 {
305 enum tree_code code = (object_size_type & OST_MINIMUM
306 ? MIN_EXPR : MAX_EXPR);
307
308 val = size_binop (code, val, oldval);
309 wholeval = size_binop (code, wholeval, old_wholeval);
310 changed = (tree_int_cst_compare (val, oldval) != 0
311 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
312 }
313
314 object_sizes[object_size_type][varno].size = val;
315 object_sizes[object_size_type][varno].wholesize = wholeval;
316
317 return changed;
318 }
319
320 /* Set temporary SSA names for object size and whole size to resolve dependency
321 loops in dynamic size computation. */
322
323 static inline void
object_sizes_set_temp(struct object_size_info * osi,unsigned varno)324 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
325 {
326 tree val = object_sizes_get (osi, varno);
327
328 if (size_initval_p (val, osi->object_size_type))
329 object_sizes_set (osi, varno,
330 make_ssa_name (sizetype),
331 make_ssa_name (sizetype));
332 }
333
334 /* Initialize OFFSET_LIMIT variable. */
335 static void
init_offset_limit(void)336 init_offset_limit (void)
337 {
338 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
339 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
340 else
341 offset_limit = -1;
342 offset_limit /= 2;
343 }
344
345 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
346 NULL_TREE, use it to get the net offset of the pointer, which should always
347 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
348
349 static tree
size_for_offset(tree sz,tree offset,tree wholesize=NULL_TREE)350 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
351 {
352 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
353
354 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
355 offset from the whole object. */
356 if (wholesize && wholesize != sz
357 && (TREE_CODE (sz) != INTEGER_CST
358 || TREE_CODE (wholesize) != INTEGER_CST
359 || tree_int_cst_compare (sz, wholesize)))
360 {
361 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
362 sizetype));
363
364 /* Restructure SZ - OFFSET as
365 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
366 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
367 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
368 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
369 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
370 sz = tmp;
371 }
372
373 /* Safe to convert now, since a valid net offset should be non-negative. */
374 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
375 offset = fold_convert (sizetype, offset);
376
377 if (TREE_CODE (offset) == INTEGER_CST)
378 {
379 if (integer_zerop (offset))
380 return sz;
381
382 /* Negative or too large offset even after adjustment, cannot be within
383 bounds of an object. */
384 if (compare_tree_int (offset, offset_limit) > 0)
385 return size_zero_node;
386 }
387
388 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
389 }
390
391 /* Compute offset of EXPR within VAR. Return error_mark_node
392 if unknown. */
393
394 static tree
compute_object_offset(tree expr,const_tree var)395 compute_object_offset (tree expr, const_tree var)
396 {
397 enum tree_code code = PLUS_EXPR;
398 tree base, off, t;
399
400 if (expr == var)
401 return size_zero_node;
402
403 switch (TREE_CODE (expr))
404 {
405 case COMPONENT_REF:
406 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
407 if (base == error_mark_node)
408 return base;
409
410 t = TREE_OPERAND (expr, 1);
411 off = size_binop (PLUS_EXPR,
412 component_ref_field_offset (expr),
413 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
414 / BITS_PER_UNIT));
415 break;
416
417 case REALPART_EXPR:
418 CASE_CONVERT:
419 case VIEW_CONVERT_EXPR:
420 case NON_LVALUE_EXPR:
421 return compute_object_offset (TREE_OPERAND (expr, 0), var);
422
423 case IMAGPART_EXPR:
424 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
425 if (base == error_mark_node)
426 return base;
427
428 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
429 break;
430
431 case ARRAY_REF:
432 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
433 if (base == error_mark_node)
434 return base;
435
436 t = TREE_OPERAND (expr, 1);
437 tree low_bound, unit_size;
438 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
439 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
440 if (! integer_zerop (low_bound))
441 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
442 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
443 {
444 code = MINUS_EXPR;
445 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
446 }
447 t = fold_convert (sizetype, t);
448 off = size_binop (MULT_EXPR, unit_size, t);
449 break;
450
451 case MEM_REF:
452 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
453 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
454
455 default:
456 return error_mark_node;
457 }
458
459 return size_binop (code, base, off);
460 }
461
462 /* Returns the size of the object designated by DECL considering its
463 initializer if it either has one or if it would not affect its size,
464 otherwise the size of the object without the initializer when MIN
465 is true, else null. An object's initializer affects the object's
466 size if it's a struct type with a flexible array member. */
467
468 tree
decl_init_size(tree decl,bool min)469 decl_init_size (tree decl, bool min)
470 {
471 tree size = DECL_SIZE_UNIT (decl);
472 tree type = TREE_TYPE (decl);
473 if (TREE_CODE (type) != RECORD_TYPE)
474 return size;
475
476 tree last = last_field (type);
477 if (!last)
478 return size;
479
480 tree last_type = TREE_TYPE (last);
481 if (TREE_CODE (last_type) != ARRAY_TYPE
482 || TYPE_SIZE (last_type))
483 return size;
484
485 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
486 of the initializer and sometimes doesn't. */
487 size = TYPE_SIZE_UNIT (type);
488 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
489 tree compsize = component_ref_size (ref);
490 if (!compsize)
491 return min ? size : NULL_TREE;
492
493 /* The size includes tail padding and initializer elements. */
494 tree pos = byte_position (last);
495 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
496 return size;
497 }
498
499 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
500 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
501 If unknown, return size_unknown (object_size_type). */
502
503 static bool
addr_object_size(struct object_size_info * osi,const_tree ptr,int object_size_type,tree * psize,tree * pwholesize)504 addr_object_size (struct object_size_info *osi, const_tree ptr,
505 int object_size_type, tree *psize, tree *pwholesize)
506 {
507 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
508 tree var_size, bytes, wholebytes;
509
510 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
511
512 /* Set to unknown and overwrite just before returning if the size
513 could be determined. */
514 *psize = size_unknown (object_size_type);
515 if (pwholesize)
516 *pwholesize = size_unknown (object_size_type);
517
518 pt_var = TREE_OPERAND (ptr, 0);
519 while (handled_component_p (pt_var))
520 pt_var = TREE_OPERAND (pt_var, 0);
521
522 if (!pt_var)
523 return false;
524
525 if (TREE_CODE (pt_var) == MEM_REF)
526 {
527 tree sz, wholesize;
528
529 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
530 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
531 {
532 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
533 object_size_type & ~OST_SUBOBJECT, &sz);
534 wholesize = sz;
535 }
536 else
537 {
538 tree var = TREE_OPERAND (pt_var, 0);
539 if (osi->pass == 0)
540 collect_object_sizes_for (osi, var);
541 if (bitmap_bit_p (computed[object_size_type],
542 SSA_NAME_VERSION (var)))
543 {
544 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
545 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
546 }
547 else
548 sz = wholesize = size_unknown (object_size_type);
549 }
550 if (!size_unknown_p (sz, object_size_type))
551 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
552
553 if (!size_unknown_p (sz, object_size_type)
554 && (TREE_CODE (sz) != INTEGER_CST
555 || compare_tree_int (sz, offset_limit) < 0))
556 {
557 pt_var_size = sz;
558 pt_var_wholesize = wholesize;
559 }
560 }
561 else if (DECL_P (pt_var))
562 {
563 pt_var_size = pt_var_wholesize
564 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
565 if (!pt_var_size)
566 return false;
567 }
568 else if (TREE_CODE (pt_var) == STRING_CST)
569 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
570 else
571 return false;
572
573 if (pt_var_size)
574 {
575 /* Validate the size determined above if it is a constant. */
576 if (TREE_CODE (pt_var_size) == INTEGER_CST
577 && compare_tree_int (pt_var_size, offset_limit) >= 0)
578 return false;
579 }
580
581 if (pt_var != TREE_OPERAND (ptr, 0))
582 {
583 tree var;
584
585 if (object_size_type & OST_SUBOBJECT)
586 {
587 var = TREE_OPERAND (ptr, 0);
588
589 while (var != pt_var
590 && TREE_CODE (var) != BIT_FIELD_REF
591 && TREE_CODE (var) != COMPONENT_REF
592 && TREE_CODE (var) != ARRAY_REF
593 && TREE_CODE (var) != ARRAY_RANGE_REF
594 && TREE_CODE (var) != REALPART_EXPR
595 && TREE_CODE (var) != IMAGPART_EXPR)
596 var = TREE_OPERAND (var, 0);
597 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
598 var = TREE_OPERAND (var, 0);
599 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
600 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
601 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
602 && tree_int_cst_lt (pt_var_size,
603 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
604 var = pt_var;
605 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
606 {
607 tree v = var;
608 /* For &X->fld, compute object size only if fld isn't the last
609 field, as struct { int i; char c[1]; } is often used instead
610 of flexible array member. */
611 while (v && v != pt_var)
612 switch (TREE_CODE (v))
613 {
614 case ARRAY_REF:
615 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
616 {
617 tree domain
618 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
619 if (domain && TYPE_MAX_VALUE (domain))
620 {
621 v = NULL_TREE;
622 break;
623 }
624 }
625 v = TREE_OPERAND (v, 0);
626 break;
627 case REALPART_EXPR:
628 case IMAGPART_EXPR:
629 v = NULL_TREE;
630 break;
631 case COMPONENT_REF:
632 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
633 {
634 v = NULL_TREE;
635 break;
636 }
637 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
638 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
639 != UNION_TYPE
640 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
641 != QUAL_UNION_TYPE)
642 break;
643 else
644 v = TREE_OPERAND (v, 0);
645 if (TREE_CODE (v) == COMPONENT_REF
646 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
647 == RECORD_TYPE)
648 {
649 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
650 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
651 if (TREE_CODE (fld_chain) == FIELD_DECL)
652 break;
653
654 if (fld_chain)
655 {
656 v = NULL_TREE;
657 break;
658 }
659 v = TREE_OPERAND (v, 0);
660 }
661 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
662 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
663 != UNION_TYPE
664 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
665 != QUAL_UNION_TYPE)
666 break;
667 else
668 v = TREE_OPERAND (v, 0);
669 if (v != pt_var)
670 v = NULL_TREE;
671 else
672 v = pt_var;
673 break;
674 default:
675 v = pt_var;
676 break;
677 }
678 if (v == pt_var)
679 var = pt_var;
680 }
681 }
682 else
683 var = pt_var;
684
685 if (var != pt_var)
686 {
687 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
688 if (!TREE_CONSTANT (var_size))
689 var_size = get_or_create_ssa_default_def (cfun, var_size);
690 if (!var_size)
691 return false;
692 }
693 else if (!pt_var_size)
694 return false;
695 else
696 var_size = pt_var_size;
697 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
698 if (bytes != error_mark_node)
699 {
700 bytes = size_for_offset (var_size, bytes);
701 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
702 {
703 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
704 pt_var);
705 if (bytes2 != error_mark_node)
706 {
707 bytes2 = size_for_offset (pt_var_size, bytes2);
708 bytes = size_binop (MIN_EXPR, bytes, bytes2);
709 }
710 }
711 }
712 else
713 bytes = size_unknown (object_size_type);
714
715 wholebytes
716 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
717 }
718 else if (!pt_var_size)
719 return false;
720 else
721 {
722 bytes = pt_var_size;
723 wholebytes = pt_var_wholesize;
724 }
725
726 if (!size_unknown_p (bytes, object_size_type)
727 && size_valid_p (bytes, object_size_type)
728 && !size_unknown_p (bytes, object_size_type)
729 && size_valid_p (wholebytes, object_size_type))
730 {
731 *psize = bytes;
732 if (pwholesize)
733 *pwholesize = wholebytes;
734 return true;
735 }
736
737 return false;
738 }
739
740
741 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
742 Handles calls to functions declared with attribute alloc_size.
743 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
744 If unknown, return size_unknown (object_size_type). */
745
746 static tree
alloc_object_size(const gcall * call,int object_size_type)747 alloc_object_size (const gcall *call, int object_size_type)
748 {
749 gcc_assert (is_gimple_call (call));
750
751 tree calltype;
752 tree callfn = gimple_call_fndecl (call);
753 if (callfn)
754 calltype = TREE_TYPE (callfn);
755 else
756 calltype = gimple_call_fntype (call);
757
758 if (!calltype)
759 return size_unknown (object_size_type);
760
761 /* Set to positions of alloc_size arguments. */
762 int arg1 = -1, arg2 = -1;
763 tree alloc_size = lookup_attribute ("alloc_size",
764 TYPE_ATTRIBUTES (calltype));
765 if (alloc_size && TREE_VALUE (alloc_size))
766 {
767 tree p = TREE_VALUE (alloc_size);
768
769 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
770 if (TREE_CHAIN (p))
771 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
772 }
773 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
774 && callfn
775 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
776 arg1 = 0;
777
778 /* Non-const arguments are OK here, let the caller handle constness. */
779 if (arg1 < 0
780 || (unsigned) arg1 >= gimple_call_num_args (call)
781 || (arg2 >= 0 && (unsigned) arg2 >= gimple_call_num_args (call)))
782 return size_unknown (object_size_type);
783
784 tree targ1 = gimple_call_arg (call, arg1);
785 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ1))
786 || TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (sizetype))
787 return size_unknown (object_size_type);
788 targ1 = fold_convert (sizetype, targ1);
789 tree bytes = NULL_TREE;
790 if (arg2 >= 0)
791 {
792 tree targ2 = gimple_call_arg (call, arg2);
793 if (!INTEGRAL_TYPE_P (TREE_TYPE (targ2))
794 || TYPE_PRECISION (TREE_TYPE (targ2)) > TYPE_PRECISION (sizetype))
795 return size_unknown (object_size_type);
796 targ2 = fold_convert (sizetype, targ2);
797 bytes = size_binop (MULT_EXPR, targ1, targ2);
798 }
799 else
800 bytes = targ1;
801
802 return bytes ? bytes : size_unknown (object_size_type);
803 }
804
805
806 /* If object size is propagated from one of function's arguments directly
807 to its return value, return that argument for GIMPLE_CALL statement CALL.
808 Otherwise return NULL. */
809
810 static tree
pass_through_call(const gcall * call)811 pass_through_call (const gcall *call)
812 {
813 unsigned rf = gimple_call_return_flags (call);
814 if (rf & ERF_RETURNS_ARG)
815 {
816 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
817 if (argnum < gimple_call_num_args (call))
818 return gimple_call_arg (call, argnum);
819 }
820
821 /* __builtin_assume_aligned is intentionally not marked RET1. */
822 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
823 return gimple_call_arg (call, 0);
824
825 return NULL_TREE;
826 }
827
828 /* Emit PHI nodes for size expressions fo. */
829
830 static void
emit_phi_nodes(gimple * stmt,tree size,tree wholesize)831 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
832 {
833 tree phires;
834 gphi *wholephi = NULL;
835
836 if (wholesize != size)
837 {
838 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
839 wholephi = create_phi_node (phires, gimple_bb (stmt));
840 }
841
842 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
843 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
844 gphi *obj_phi = as_a <gphi *> (stmt);
845
846 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
847 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
848
849 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
850 {
851 gimple_seq seq = NULL;
852 tree wsz = TREE_VEC_ELT (wholesize, i);
853 tree sz = TREE_VEC_ELT (size, i);
854
855 /* If we built an expression, we will need to build statements
856 and insert them on the edge right away. */
857 if (TREE_CODE (wsz) != SSA_NAME)
858 wsz = force_gimple_operand (wsz, &seq, true, NULL);
859 if (TREE_CODE (sz) != SSA_NAME)
860 {
861 gimple_seq s;
862 sz = force_gimple_operand (sz, &s, true, NULL);
863 gimple_seq_add_seq (&seq, s);
864 }
865
866 if (seq)
867 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
868
869 if (wholephi)
870 add_phi_arg (wholephi, wsz,
871 gimple_phi_arg_edge (obj_phi, i),
872 gimple_phi_arg_location (obj_phi, i));
873
874 add_phi_arg (phi, sz,
875 gimple_phi_arg_edge (obj_phi, i),
876 gimple_phi_arg_location (obj_phi, i));
877 }
878 }
879
880 /* Descend through EXPR and return size_unknown if it uses any SSA variable
881 object_size_set or object_size_set_temp generated, which turned out to be
882 size_unknown, as noted in UNKNOWNS. */
883
884 static tree
propagate_unknowns(object_size_info * osi,tree expr)885 propagate_unknowns (object_size_info *osi, tree expr)
886 {
887 int object_size_type = osi->object_size_type;
888
889 switch (TREE_CODE (expr))
890 {
891 case SSA_NAME:
892 if (bitmap_bit_p (osi->unknowns, SSA_NAME_VERSION (expr)))
893 return size_unknown (object_size_type);
894 return expr;
895
896 case MIN_EXPR:
897 case MAX_EXPR:
898 {
899 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
900 if (size_unknown_p (res, object_size_type))
901 return res;
902
903 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
904 if (size_unknown_p (res, object_size_type))
905 return res;
906
907 return expr;
908 }
909 case MODIFY_EXPR:
910 {
911 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
912 if (size_unknown_p (res, object_size_type))
913 return res;
914 return expr;
915 }
916 case TREE_VEC:
917 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
918 {
919 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i));
920 if (size_unknown_p (res, object_size_type))
921 return res;
922 }
923 return expr;
924 case PLUS_EXPR:
925 case MINUS_EXPR:
926 {
927 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
928 if (size_unknown_p (res, object_size_type))
929 return res;
930
931 return expr;
932 }
933 default:
934 return expr;
935 }
936 }
937
938 /* Walk through size expressions that need reexamination and generate
939 statements for them. */
940
941 static void
gimplify_size_expressions(object_size_info * osi)942 gimplify_size_expressions (object_size_info *osi)
943 {
944 int object_size_type = osi->object_size_type;
945 bitmap_iterator bi;
946 unsigned int i;
947 bool changed;
948
949 /* Step 1: Propagate unknowns into expressions. */
950 bitmap reexamine = BITMAP_ALLOC (NULL);
951 bitmap_copy (reexamine, osi->reexamine);
952 do
953 {
954 changed = false;
955 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
956 {
957 object_size cur = object_sizes_get_raw (osi, i);
958
959 if (size_unknown_p (propagate_unknowns (osi, cur.size),
960 object_size_type)
961 || size_unknown_p (propagate_unknowns (osi, cur.wholesize),
962 object_size_type))
963 {
964 object_sizes_set (osi, i,
965 size_unknown (object_size_type),
966 size_unknown (object_size_type));
967 changed = true;
968 }
969 }
970 bitmap_copy (reexamine, osi->reexamine);
971 }
972 while (changed);
973
974 /* Release all unknowns. */
975 EXECUTE_IF_SET_IN_BITMAP (osi->unknowns, 0, i, bi)
976 release_ssa_name (ssa_name (i));
977
978 /* Expand all size expressions to put their definitions close to the objects
979 for which size is being computed. */
980 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
981 {
982 gimple_seq seq = NULL;
983 object_size osize = object_sizes_get_raw (osi, i);
984
985 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
986 enum gimple_code code = gimple_code (stmt);
987
988 /* PHI nodes need special attention. */
989 if (code == GIMPLE_PHI)
990 emit_phi_nodes (stmt, osize.size, osize.wholesize);
991 else
992 {
993 tree size_expr = NULL_TREE;
994
995 /* Bundle wholesize in with the size to gimplify if needed. */
996 if (osize.wholesize != osize.size
997 && !size_usable_p (osize.wholesize))
998 size_expr = size_binop (COMPOUND_EXPR,
999 osize.wholesize,
1000 osize.size);
1001 else if (!size_usable_p (osize.size))
1002 size_expr = osize.size;
1003
1004 if (size_expr)
1005 {
1006 gimple_stmt_iterator gsi;
1007 if (code == GIMPLE_NOP)
1008 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1009 else
1010 gsi = gsi_for_stmt (stmt);
1011
1012 force_gimple_operand (size_expr, &seq, true, NULL);
1013 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
1014 }
1015 }
1016
1017 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1018 object_sizes_initialize (osi, i,
1019 object_sizes_get (osi, i),
1020 object_sizes_get (osi, i, true));
1021 }
1022 }
1023
1024 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1025 the resulting value. If the declared object is known and PDECL
1026 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1027 is the second argument to __builtin_object_size.
1028 Returns true on success and false when the object size could not
1029 be determined. */
1030
1031 bool
compute_builtin_object_size(tree ptr,int object_size_type,tree * psize)1032 compute_builtin_object_size (tree ptr, int object_size_type,
1033 tree *psize)
1034 {
1035 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1036
1037 /* Set to unknown and overwrite just before returning if the size
1038 could be determined. */
1039 *psize = size_unknown (object_size_type);
1040
1041 if (! offset_limit)
1042 init_offset_limit ();
1043
1044 if (TREE_CODE (ptr) == ADDR_EXPR)
1045 return addr_object_size (NULL, ptr, object_size_type, psize);
1046
1047 if (TREE_CODE (ptr) != SSA_NAME
1048 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1049 return false;
1050
1051 if (computed[object_size_type] == NULL)
1052 {
1053 if (optimize || object_size_type & OST_SUBOBJECT)
1054 return false;
1055
1056 /* When not optimizing, rather than failing, make a small effort
1057 to determine the object size without the full benefit of
1058 the (costly) computation below. */
1059 gimple *def = SSA_NAME_DEF_STMT (ptr);
1060 if (gimple_code (def) == GIMPLE_ASSIGN)
1061 {
1062 tree_code code = gimple_assign_rhs_code (def);
1063 if (code == POINTER_PLUS_EXPR)
1064 {
1065 tree offset = gimple_assign_rhs2 (def);
1066 ptr = gimple_assign_rhs1 (def);
1067
1068 if (((object_size_type & OST_DYNAMIC)
1069 || (tree_fits_shwi_p (offset)
1070 && compare_tree_int (offset, offset_limit) <= 0))
1071 && compute_builtin_object_size (ptr, object_size_type,
1072 psize))
1073 {
1074 *psize = size_for_offset (*psize, offset);
1075 return true;
1076 }
1077 }
1078 }
1079 return false;
1080 }
1081
1082 struct object_size_info osi;
1083 osi.object_size_type = object_size_type;
1084 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1085 {
1086 bitmap_iterator bi;
1087 unsigned int i;
1088
1089 object_sizes_grow (object_size_type);
1090 if (dump_file)
1091 {
1092 fprintf (dump_file, "Computing %s %s%sobject size for ",
1093 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1094 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1095 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1096 print_generic_expr (dump_file, ptr, dump_flags);
1097 fprintf (dump_file, ":\n");
1098 }
1099
1100 osi.visited = BITMAP_ALLOC (NULL);
1101 osi.reexamine = BITMAP_ALLOC (NULL);
1102
1103 if (object_size_type & OST_DYNAMIC)
1104 osi.unknowns = BITMAP_ALLOC (NULL);
1105 else
1106 {
1107 osi.depths = NULL;
1108 osi.stack = NULL;
1109 osi.tos = NULL;
1110 }
1111
1112 /* First pass: walk UD chains, compute object sizes that
1113 can be computed. osi.reexamine bitmap at the end will
1114 contain what variables were found in dependency cycles
1115 and therefore need to be reexamined. */
1116 osi.pass = 0;
1117 osi.changed = false;
1118 collect_object_sizes_for (&osi, ptr);
1119
1120 if (object_size_type & OST_DYNAMIC)
1121 {
1122 osi.pass = 1;
1123 gimplify_size_expressions (&osi);
1124 BITMAP_FREE (osi.unknowns);
1125 bitmap_clear (osi.reexamine);
1126 }
1127
1128 /* Second pass: keep recomputing object sizes of variables
1129 that need reexamination, until no object sizes are
1130 increased or all object sizes are computed. */
1131 if (! bitmap_empty_p (osi.reexamine))
1132 {
1133 bitmap reexamine = BITMAP_ALLOC (NULL);
1134
1135 /* If looking for minimum instead of maximum object size,
1136 detect cases where a pointer is increased in a loop.
1137 Although even without this detection pass 2 would eventually
1138 terminate, it could take a long time. If a pointer is
1139 increasing this way, we need to assume 0 object size.
1140 E.g. p = &buf[0]; while (cond) p = p + 4; */
1141 if (object_size_type & OST_MINIMUM)
1142 {
1143 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1144 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1145 osi.tos = osi.stack;
1146 osi.pass = 1;
1147 /* collect_object_sizes_for is changing
1148 osi.reexamine bitmap, so iterate over a copy. */
1149 bitmap_copy (reexamine, osi.reexamine);
1150 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1151 if (bitmap_bit_p (osi.reexamine, i))
1152 check_for_plus_in_loops (&osi, ssa_name (i));
1153
1154 free (osi.depths);
1155 osi.depths = NULL;
1156 free (osi.stack);
1157 osi.stack = NULL;
1158 osi.tos = NULL;
1159 }
1160
1161 do
1162 {
1163 osi.pass = 2;
1164 osi.changed = false;
1165 /* collect_object_sizes_for is changing
1166 osi.reexamine bitmap, so iterate over a copy. */
1167 bitmap_copy (reexamine, osi.reexamine);
1168 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1169 if (bitmap_bit_p (osi.reexamine, i))
1170 {
1171 collect_object_sizes_for (&osi, ssa_name (i));
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1173 {
1174 fprintf (dump_file, "Reexamining ");
1175 print_generic_expr (dump_file, ssa_name (i),
1176 dump_flags);
1177 fprintf (dump_file, "\n");
1178 }
1179 }
1180 }
1181 while (osi.changed);
1182
1183 BITMAP_FREE (reexamine);
1184 }
1185 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1186 bitmap_set_bit (computed[object_size_type], i);
1187
1188 /* Debugging dumps. */
1189 if (dump_file)
1190 {
1191 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1192 if (!object_sizes_unknown_p (object_size_type, i))
1193 {
1194 print_generic_expr (dump_file, ssa_name (i),
1195 dump_flags);
1196 fprintf (dump_file,
1197 ": %s %s%sobject size ",
1198 ((object_size_type & OST_MINIMUM) ? "minimum"
1199 : "maximum"),
1200 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1201 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1202 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1203 dump_flags);
1204 fprintf (dump_file, "\n");
1205 }
1206 }
1207
1208 BITMAP_FREE (osi.reexamine);
1209 BITMAP_FREE (osi.visited);
1210 }
1211
1212 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1213 return !size_unknown_p (*psize, object_size_type);
1214 }
1215
1216 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1217
1218 static void
expr_object_size(struct object_size_info * osi,tree ptr,tree value)1219 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1220 {
1221 int object_size_type = osi->object_size_type;
1222 unsigned int varno = SSA_NAME_VERSION (ptr);
1223 tree bytes, wholesize;
1224
1225 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1226 gcc_assert (osi->pass == 0);
1227
1228 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1229 value = TREE_OPERAND (value, 0);
1230
1231 /* Pointer variables should have been handled by merge_object_sizes. */
1232 gcc_assert (TREE_CODE (value) != SSA_NAME
1233 || !POINTER_TYPE_P (TREE_TYPE (value)));
1234
1235 if (TREE_CODE (value) == ADDR_EXPR)
1236 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1237 else
1238 bytes = wholesize = size_unknown (object_size_type);
1239
1240 object_sizes_set (osi, varno, bytes, wholesize);
1241 }
1242
1243
1244 /* Compute object_sizes for PTR, defined to the result of a call. */
1245
1246 static void
call_object_size(struct object_size_info * osi,tree ptr,gcall * call)1247 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1248 {
1249 int object_size_type = osi->object_size_type;
1250 unsigned int varno = SSA_NAME_VERSION (ptr);
1251
1252 gcc_assert (is_gimple_call (call));
1253
1254 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1255 gcc_assert (osi->pass == 0);
1256 tree bytes = alloc_object_size (call, object_size_type);
1257
1258 if (!size_valid_p (bytes, object_size_type))
1259 bytes = size_unknown (object_size_type);
1260
1261 object_sizes_set (osi, varno, bytes, bytes);
1262 }
1263
1264
1265 /* Compute object_sizes for PTR, defined to an unknown value. */
1266
1267 static void
unknown_object_size(struct object_size_info * osi,tree ptr)1268 unknown_object_size (struct object_size_info *osi, tree ptr)
1269 {
1270 int object_size_type = osi->object_size_type;
1271 unsigned int varno = SSA_NAME_VERSION (ptr);
1272
1273 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1274 gcc_checking_assert (osi->pass == 0);
1275 tree bytes = size_unknown (object_size_type);
1276
1277 object_sizes_set (osi, varno, bytes, bytes);
1278 }
1279
1280
1281 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1282 the object size might need reexamination later. */
1283
1284 static bool
merge_object_sizes(struct object_size_info * osi,tree dest,tree orig)1285 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1286 {
1287 int object_size_type = osi->object_size_type;
1288 unsigned int varno = SSA_NAME_VERSION (dest);
1289 tree orig_bytes, wholesize;
1290
1291 if (object_sizes_unknown_p (object_size_type, varno))
1292 return false;
1293
1294 if (osi->pass == 0)
1295 collect_object_sizes_for (osi, orig);
1296
1297 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1298 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1299
1300 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1301 osi->changed = true;
1302
1303 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1304 }
1305
1306
1307 /* Compute object_sizes for VAR, defined to the result of an assignment
1308 with operator POINTER_PLUS_EXPR. Return true if the object size might
1309 need reexamination later. */
1310
1311 static bool
plus_stmt_object_size(struct object_size_info * osi,tree var,gimple * stmt)1312 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1313 {
1314 int object_size_type = osi->object_size_type;
1315 unsigned int varno = SSA_NAME_VERSION (var);
1316 tree bytes, wholesize;
1317 tree op0, op1;
1318 bool reexamine = false;
1319
1320 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1321 {
1322 op0 = gimple_assign_rhs1 (stmt);
1323 op1 = gimple_assign_rhs2 (stmt);
1324 }
1325 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1326 {
1327 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1328 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1329 op0 = TREE_OPERAND (rhs, 0);
1330 op1 = TREE_OPERAND (rhs, 1);
1331 }
1332 else
1333 gcc_unreachable ();
1334
1335 if (object_sizes_unknown_p (object_size_type, varno))
1336 return false;
1337
1338 /* Handle PTR + OFFSET here. */
1339 if (size_valid_p (op1, object_size_type)
1340 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1341 {
1342 if (TREE_CODE (op0) == SSA_NAME)
1343 {
1344 if (osi->pass == 0)
1345 collect_object_sizes_for (osi, op0);
1346
1347 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1348 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1349 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1350 }
1351 else
1352 {
1353 /* op0 will be ADDR_EXPR here. We should never come here during
1354 reexamination. */
1355 gcc_checking_assert (osi->pass == 0);
1356 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1357 }
1358
1359 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1360 since the wholesize could be non-zero and a negative offset could give
1361 a non-zero size. */
1362 if (size_unknown_p (bytes, 0))
1363 ;
1364 else if ((object_size_type & OST_DYNAMIC)
1365 || compare_tree_int (op1, offset_limit) <= 0)
1366 bytes = size_for_offset (bytes, op1, wholesize);
1367 /* In the static case, with a negative offset, the best estimate for
1368 minimum size is size_unknown but for maximum size, the wholesize is a
1369 better estimate than size_unknown. */
1370 else if (object_size_type & OST_MINIMUM)
1371 bytes = size_unknown (object_size_type);
1372 else
1373 bytes = wholesize;
1374 }
1375 else
1376 bytes = wholesize = size_unknown (object_size_type);
1377
1378 if (!size_valid_p (bytes, object_size_type)
1379 || !size_valid_p (wholesize, object_size_type))
1380 bytes = wholesize = size_unknown (object_size_type);
1381
1382 if (object_sizes_set (osi, varno, bytes, wholesize))
1383 osi->changed = true;
1384 return reexamine;
1385 }
1386
1387 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1388 WHOLESIZE. */
1389
1390 static void
dynamic_object_size(struct object_size_info * osi,tree var,tree * size,tree * wholesize)1391 dynamic_object_size (struct object_size_info *osi, tree var,
1392 tree *size, tree *wholesize)
1393 {
1394 int object_size_type = osi->object_size_type;
1395
1396 if (TREE_CODE (var) == SSA_NAME)
1397 {
1398 unsigned varno = SSA_NAME_VERSION (var);
1399
1400 collect_object_sizes_for (osi, var);
1401 *size = object_sizes_get (osi, varno);
1402 *wholesize = object_sizes_get (osi, varno, true);
1403 }
1404 else if (TREE_CODE (var) == ADDR_EXPR)
1405 addr_object_size (osi, var, object_size_type, size, wholesize);
1406 else
1407 *size = *wholesize = size_unknown (object_size_type);
1408 }
1409
1410 /* Compute object_sizes for VAR, defined at STMT, which is
1411 a COND_EXPR. Return true if the object size might need reexamination
1412 later. */
1413
1414 static bool
cond_expr_object_size(struct object_size_info * osi,tree var,gimple * stmt)1415 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1416 {
1417 tree then_, else_;
1418 int object_size_type = osi->object_size_type;
1419 unsigned int varno = SSA_NAME_VERSION (var);
1420 bool reexamine = false;
1421
1422 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1423
1424 if (object_sizes_unknown_p (object_size_type, varno))
1425 return false;
1426
1427 then_ = gimple_assign_rhs2 (stmt);
1428 else_ = gimple_assign_rhs3 (stmt);
1429
1430 if (object_size_type & OST_DYNAMIC)
1431 {
1432 tree then_size, then_wholesize, else_size, else_wholesize;
1433
1434 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1435 if (!size_unknown_p (then_size, object_size_type))
1436 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1437
1438 tree cond_size, cond_wholesize;
1439 if (size_unknown_p (then_size, object_size_type)
1440 || size_unknown_p (else_size, object_size_type))
1441 cond_size = cond_wholesize = size_unknown (object_size_type);
1442 else
1443 {
1444 cond_size = fold_build3 (COND_EXPR, sizetype,
1445 gimple_assign_rhs1 (stmt),
1446 then_size, else_size);
1447 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1448 gimple_assign_rhs1 (stmt),
1449 then_wholesize, else_wholesize);
1450 }
1451
1452 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1453
1454 return false;
1455 }
1456
1457 if (TREE_CODE (then_) == SSA_NAME)
1458 reexamine |= merge_object_sizes (osi, var, then_);
1459 else
1460 expr_object_size (osi, var, then_);
1461
1462 if (object_sizes_unknown_p (object_size_type, varno))
1463 return reexamine;
1464
1465 if (TREE_CODE (else_) == SSA_NAME)
1466 reexamine |= merge_object_sizes (osi, var, else_);
1467 else
1468 expr_object_size (osi, var, else_);
1469
1470 return reexamine;
1471 }
1472
1473 /* Find size of an object passed as a parameter to the function. */
1474
1475 static void
parm_object_size(struct object_size_info * osi,tree var)1476 parm_object_size (struct object_size_info *osi, tree var)
1477 {
1478 int object_size_type = osi->object_size_type;
1479 tree parm = SSA_NAME_VAR (var);
1480
1481 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1482 {
1483 expr_object_size (osi, var, parm);
1484 return;
1485 }
1486
1487 /* Look for access attribute. */
1488 rdwr_map rdwr_idx;
1489
1490 tree fndecl = cfun->decl;
1491 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1492 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1493 tree sz = NULL_TREE;
1494
1495 /* If we have an explicit access attribute with a usable size argument... */
1496 if (access && access->sizarg != UINT_MAX && !access->internal_p
1497 /* ... and either PARM is void * or has a type that is complete and has a
1498 constant size... */
1499 && ((typesize && poly_int_tree_p (typesize))
1500 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1501 {
1502 tree fnargs = DECL_ARGUMENTS (fndecl);
1503 tree arg = NULL_TREE;
1504 unsigned argpos = 0;
1505
1506 /* ... then walk through the parameters to pick the size parameter and
1507 safely scale it by the type size if needed. */
1508 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1509 if (argpos == access->sizarg && INTEGRAL_TYPE_P (TREE_TYPE (arg)))
1510 {
1511 sz = get_or_create_ssa_default_def (cfun, arg);
1512 if (sz != NULL_TREE)
1513 {
1514 sz = fold_convert (sizetype, sz);
1515 if (typesize)
1516 sz = size_binop (MULT_EXPR, sz, typesize);
1517 }
1518 break;
1519 }
1520 }
1521 if (!sz)
1522 sz = size_unknown (object_size_type);
1523
1524 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1525 }
1526
1527 /* Compute an object size expression for VAR, which is the result of a PHI
1528 node. */
1529
1530 static void
phi_dynamic_object_size(struct object_size_info * osi,tree var)1531 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1532 {
1533 int object_size_type = osi->object_size_type;
1534 unsigned int varno = SSA_NAME_VERSION (var);
1535 gimple *stmt = SSA_NAME_DEF_STMT (var);
1536 unsigned i, num_args = gimple_phi_num_args (stmt);
1537 bool wholesize_needed = false;
1538
1539 /* The extra space is for the PHI result at the end, which object_sizes_set
1540 sets for us. */
1541 tree sizes = make_tree_vec (num_args + 1);
1542 tree wholesizes = make_tree_vec (num_args + 1);
1543
1544 /* Bail out if the size of any of the PHI arguments cannot be
1545 determined. */
1546 for (i = 0; i < num_args; i++)
1547 {
1548 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1549 if (e->flags & EDGE_COMPLEX)
1550 break;
1551
1552 tree rhs = gimple_phi_arg_def (stmt, i);
1553 tree size, wholesize;
1554
1555 dynamic_object_size (osi, rhs, &size, &wholesize);
1556
1557 if (size_unknown_p (size, object_size_type))
1558 break;
1559
1560 if (size != wholesize)
1561 wholesize_needed = true;
1562
1563 TREE_VEC_ELT (sizes, i) = size;
1564 TREE_VEC_ELT (wholesizes, i) = wholesize;
1565 }
1566
1567 if (i < num_args)
1568 {
1569 ggc_free (sizes);
1570 ggc_free (wholesizes);
1571 sizes = wholesizes = size_unknown (object_size_type);
1572 }
1573
1574 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1575 nodes. */
1576 else if (!wholesize_needed)
1577 {
1578 ggc_free (wholesizes);
1579 wholesizes = sizes;
1580 }
1581
1582 object_sizes_set (osi, varno, sizes, wholesizes);
1583 }
1584
1585 /* Compute object sizes for VAR.
1586 For ADDR_EXPR an object size is the number of remaining bytes
1587 to the end of the object (where what is considered an object depends on
1588 OSI->object_size_type).
1589 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1590 of the allocation.
1591 For POINTER_PLUS_EXPR where second operand is a constant integer,
1592 object size is object size of the first operand minus the constant.
1593 If the constant is bigger than the number of remaining bytes until the
1594 end of the object, object size is 0, but if it is instead a pointer
1595 subtraction, object size is size_unknown (object_size_type).
1596 To differentiate addition from subtraction, ADDR_EXPR returns
1597 size_unknown (object_size_type) for all objects bigger than half of the
1598 address space, and constants less than half of the address space are
1599 considered addition, while bigger constants subtraction.
1600 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1601 object size is object size of that argument.
1602 Otherwise, object size is the maximum of object sizes of variables
1603 that it might be set to. */
1604
1605 static void
collect_object_sizes_for(struct object_size_info * osi,tree var)1606 collect_object_sizes_for (struct object_size_info *osi, tree var)
1607 {
1608 int object_size_type = osi->object_size_type;
1609 unsigned int varno = SSA_NAME_VERSION (var);
1610 gimple *stmt;
1611 bool reexamine;
1612
1613 if (bitmap_bit_p (computed[object_size_type], varno))
1614 return;
1615
1616 if (osi->pass == 0)
1617 {
1618 if (bitmap_set_bit (osi->visited, varno))
1619 {
1620 /* Initialize to 0 for maximum size and M1U for minimum size so that
1621 it gets immediately overridden. */
1622 object_sizes_initialize (osi, varno,
1623 size_initval (object_size_type),
1624 size_initval (object_size_type));
1625 }
1626 else
1627 {
1628 /* Found a dependency loop. Mark the variable for later
1629 re-examination. */
1630 if (object_size_type & OST_DYNAMIC)
1631 object_sizes_set_temp (osi, varno);
1632
1633 bitmap_set_bit (osi->reexamine, varno);
1634 if (dump_file && (dump_flags & TDF_DETAILS))
1635 {
1636 fprintf (dump_file, "Found a dependency loop at ");
1637 print_generic_expr (dump_file, var, dump_flags);
1638 fprintf (dump_file, "\n");
1639 }
1640 return;
1641 }
1642 }
1643
1644 if (dump_file && (dump_flags & TDF_DETAILS))
1645 {
1646 fprintf (dump_file, "Visiting use-def links for ");
1647 print_generic_expr (dump_file, var, dump_flags);
1648 fprintf (dump_file, "\n");
1649 }
1650
1651 stmt = SSA_NAME_DEF_STMT (var);
1652 reexamine = false;
1653
1654 switch (gimple_code (stmt))
1655 {
1656 case GIMPLE_ASSIGN:
1657 {
1658 tree rhs = gimple_assign_rhs1 (stmt);
1659 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1660 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1661 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1662 reexamine = plus_stmt_object_size (osi, var, stmt);
1663 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1664 reexamine = cond_expr_object_size (osi, var, stmt);
1665 else if (gimple_assign_single_p (stmt)
1666 || gimple_assign_unary_nop_p (stmt))
1667 {
1668 if (TREE_CODE (rhs) == SSA_NAME
1669 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1670 reexamine = merge_object_sizes (osi, var, rhs);
1671 else
1672 expr_object_size (osi, var, rhs);
1673 }
1674 else
1675 unknown_object_size (osi, var);
1676 break;
1677 }
1678
1679 case GIMPLE_CALL:
1680 {
1681 gcall *call_stmt = as_a <gcall *> (stmt);
1682 tree arg = pass_through_call (call_stmt);
1683 if (arg)
1684 {
1685 if (TREE_CODE (arg) == SSA_NAME
1686 && POINTER_TYPE_P (TREE_TYPE (arg)))
1687 reexamine = merge_object_sizes (osi, var, arg);
1688 else
1689 expr_object_size (osi, var, arg);
1690 }
1691 else
1692 call_object_size (osi, var, call_stmt);
1693 break;
1694 }
1695
1696 case GIMPLE_ASM:
1697 /* Pointers defined by __asm__ statements can point anywhere. */
1698 unknown_object_size (osi, var);
1699 break;
1700
1701 case GIMPLE_NOP:
1702 if (SSA_NAME_VAR (var)
1703 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1704 parm_object_size (osi, var);
1705 else
1706 /* Uninitialized SSA names point nowhere. */
1707 unknown_object_size (osi, var);
1708 break;
1709
1710 case GIMPLE_PHI:
1711 {
1712 unsigned i;
1713
1714 if (object_size_type & OST_DYNAMIC)
1715 {
1716 phi_dynamic_object_size (osi, var);
1717 break;
1718 }
1719
1720 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1721 {
1722 tree rhs = gimple_phi_arg (stmt, i)->def;
1723
1724 if (object_sizes_unknown_p (object_size_type, varno))
1725 break;
1726
1727 if (TREE_CODE (rhs) == SSA_NAME)
1728 reexamine |= merge_object_sizes (osi, var, rhs);
1729 else if (osi->pass == 0)
1730 expr_object_size (osi, var, rhs);
1731 }
1732 break;
1733 }
1734
1735 default:
1736 gcc_unreachable ();
1737 }
1738
1739 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1740 {
1741 bitmap_set_bit (computed[object_size_type], varno);
1742 if (!(object_size_type & OST_DYNAMIC))
1743 bitmap_clear_bit (osi->reexamine, varno);
1744 }
1745 else
1746 {
1747 bitmap_set_bit (osi->reexamine, varno);
1748 if (dump_file && (dump_flags & TDF_DETAILS))
1749 {
1750 fprintf (dump_file, "Need to reexamine ");
1751 print_generic_expr (dump_file, var, dump_flags);
1752 fprintf (dump_file, "\n");
1753 }
1754 }
1755 }
1756
1757
1758 /* Helper function for check_for_plus_in_loops. Called recursively
1759 to detect loops. */
1760
1761 static void
check_for_plus_in_loops_1(struct object_size_info * osi,tree var,unsigned int depth)1762 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1763 unsigned int depth)
1764 {
1765 gimple *stmt = SSA_NAME_DEF_STMT (var);
1766 unsigned int varno = SSA_NAME_VERSION (var);
1767
1768 if (osi->depths[varno])
1769 {
1770 if (osi->depths[varno] != depth)
1771 {
1772 unsigned int *sp;
1773
1774 /* Found a loop involving pointer addition. */
1775 for (sp = osi->tos; sp > osi->stack; )
1776 {
1777 --sp;
1778 bitmap_clear_bit (osi->reexamine, *sp);
1779 bitmap_set_bit (computed[osi->object_size_type], *sp);
1780 object_sizes_set (osi, *sp, size_zero_node,
1781 object_sizes_get (osi, *sp, true));
1782 if (*sp == varno)
1783 break;
1784 }
1785 }
1786 return;
1787 }
1788 else if (! bitmap_bit_p (osi->reexamine, varno))
1789 return;
1790
1791 osi->depths[varno] = depth;
1792 *osi->tos++ = varno;
1793
1794 switch (gimple_code (stmt))
1795 {
1796
1797 case GIMPLE_ASSIGN:
1798 {
1799 if ((gimple_assign_single_p (stmt)
1800 || gimple_assign_unary_nop_p (stmt))
1801 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1802 {
1803 tree rhs = gimple_assign_rhs1 (stmt);
1804
1805 check_for_plus_in_loops_1 (osi, rhs, depth);
1806 }
1807 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1808 {
1809 tree basevar = gimple_assign_rhs1 (stmt);
1810 tree cst = gimple_assign_rhs2 (stmt);
1811
1812 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1813
1814 check_for_plus_in_loops_1 (osi, basevar,
1815 depth + !integer_zerop (cst));
1816 }
1817 else
1818 gcc_unreachable ();
1819 break;
1820 }
1821
1822 case GIMPLE_CALL:
1823 {
1824 gcall *call_stmt = as_a <gcall *> (stmt);
1825 tree arg = pass_through_call (call_stmt);
1826 if (arg)
1827 {
1828 if (TREE_CODE (arg) == SSA_NAME)
1829 check_for_plus_in_loops_1 (osi, arg, depth);
1830 else
1831 gcc_unreachable ();
1832 }
1833 break;
1834 }
1835
1836 case GIMPLE_PHI:
1837 {
1838 unsigned i;
1839
1840 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1841 {
1842 tree rhs = gimple_phi_arg (stmt, i)->def;
1843
1844 if (TREE_CODE (rhs) == SSA_NAME)
1845 check_for_plus_in_loops_1 (osi, rhs, depth);
1846 }
1847 break;
1848 }
1849
1850 default:
1851 gcc_unreachable ();
1852 }
1853
1854 osi->depths[varno] = 0;
1855 osi->tos--;
1856 }
1857
1858
1859 /* Check if some pointer we are computing object size of is being increased
1860 within a loop. If yes, assume all the SSA variables participating in
1861 that loop have minimum object sizes 0. */
1862
1863 static void
check_for_plus_in_loops(struct object_size_info * osi,tree var)1864 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1865 {
1866 gimple *stmt = SSA_NAME_DEF_STMT (var);
1867
1868 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1869 and looked for a POINTER_PLUS_EXPR in the pass-through
1870 argument, if any. In GIMPLE, however, such an expression
1871 is not a valid call operand. */
1872
1873 if (is_gimple_assign (stmt)
1874 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1875 {
1876 tree basevar = gimple_assign_rhs1 (stmt);
1877 tree cst = gimple_assign_rhs2 (stmt);
1878
1879 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1880
1881 /* Skip non-positive offsets. */
1882 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1883 return;
1884
1885 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1886 *osi->tos++ = SSA_NAME_VERSION (basevar);
1887 check_for_plus_in_loops_1 (osi, var, 2);
1888 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1889 osi->tos--;
1890 }
1891 }
1892
1893
1894 /* Initialize data structures for the object size computation. */
1895
1896 void
init_object_sizes(void)1897 init_object_sizes (void)
1898 {
1899 int object_size_type;
1900
1901 if (computed[0])
1902 return;
1903
1904 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1905 {
1906 object_sizes_grow (object_size_type);
1907 computed[object_size_type] = BITMAP_ALLOC (NULL);
1908 }
1909
1910 init_offset_limit ();
1911 }
1912
1913
1914 /* Destroy data structures after the object size computation. */
1915
1916 void
fini_object_sizes(void)1917 fini_object_sizes (void)
1918 {
1919 int object_size_type;
1920
1921 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1922 {
1923 object_sizes_release (object_size_type);
1924 BITMAP_FREE (computed[object_size_type]);
1925 }
1926 }
1927
1928 /* Dummy valueize function. */
1929
1930 static tree
do_valueize(tree t)1931 do_valueize (tree t)
1932 {
1933 return t;
1934 }
1935
1936 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1937 CALL early for subobjects before any object information is lost due to
1938 optimization. Insert a MIN or MAX expression of the result and
1939 __builtin_object_size at I so that it may be processed in the second pass.
1940 __builtin_dynamic_object_size is treated like __builtin_object_size here
1941 since we're only looking for constant bounds. */
1942
1943 static void
early_object_sizes_execute_one(gimple_stmt_iterator * i,gimple * call)1944 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1945 {
1946 tree ost = gimple_call_arg (call, 1);
1947 tree lhs = gimple_call_lhs (call);
1948 gcc_assert (lhs != NULL_TREE);
1949
1950 if (!tree_fits_uhwi_p (ost))
1951 return;
1952
1953 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1954 tree ptr = gimple_call_arg (call, 0);
1955
1956 if (object_size_type != 1 && object_size_type != 3)
1957 return;
1958
1959 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
1960 return;
1961
1962 tree type = TREE_TYPE (lhs);
1963 tree bytes;
1964 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
1965 || !int_fits_type_p (bytes, type))
1966 return;
1967
1968 tree tem = make_ssa_name (type);
1969 gimple_call_set_lhs (call, tem);
1970 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
1971 tree cst = fold_convert (type, bytes);
1972 gimple *g = gimple_build_assign (lhs, code, tem, cst);
1973 gsi_insert_after (i, g, GSI_NEW_STMT);
1974 update_stmt (call);
1975 }
1976
1977 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
1978 expression and insert it at I. Return true if it succeeds. */
1979
1980 static bool
dynamic_object_sizes_execute_one(gimple_stmt_iterator * i,gimple * call)1981 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1982 {
1983 gcc_assert (gimple_call_num_args (call) == 2);
1984
1985 tree args[2];
1986 args[0] = gimple_call_arg (call, 0);
1987 args[1] = gimple_call_arg (call, 1);
1988
1989 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
1990 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
1991 tree result = fold_builtin_call_array (loc, result_type,
1992 gimple_call_fn (call), 2, args);
1993
1994 if (!result)
1995 return false;
1996
1997 /* fold_builtin_call_array may wrap the result inside a
1998 NOP_EXPR. */
1999 STRIP_NOPS (result);
2000 gimplify_and_update_call_from_tree (i, result);
2001
2002 if (dump_file && (dump_flags & TDF_DETAILS))
2003 {
2004 fprintf (dump_file, "Simplified (dynamic)\n ");
2005 print_gimple_stmt (dump_file, call, 0, dump_flags);
2006 fprintf (dump_file, " to ");
2007 print_generic_expr (dump_file, result);
2008 fprintf (dump_file, "\n");
2009 }
2010 return true;
2011 }
2012
2013 static unsigned int
object_sizes_execute(function * fun,bool early)2014 object_sizes_execute (function *fun, bool early)
2015 {
2016 basic_block bb;
2017 FOR_EACH_BB_FN (bb, fun)
2018 {
2019 gimple_stmt_iterator i;
2020 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2021 {
2022 tree result;
2023 bool dynamic = false;
2024
2025 gimple *call = gsi_stmt (i);
2026 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2027 dynamic = true;
2028 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2029 continue;
2030
2031 tree lhs = gimple_call_lhs (call);
2032 if (!lhs)
2033 continue;
2034
2035 init_object_sizes ();
2036
2037 /* If early, only attempt to fold
2038 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2039 and rather than folding the builtin to the constant if any,
2040 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2041 call result and the computed constant. Do the same for
2042 __builtin_dynamic_object_size too. */
2043 if (early)
2044 {
2045 early_object_sizes_execute_one (&i, call);
2046 continue;
2047 }
2048
2049 if (dynamic)
2050 {
2051 if (dynamic_object_sizes_execute_one (&i, call))
2052 continue;
2053 else
2054 {
2055 /* If we could not find a suitable size expression, lower to
2056 __builtin_object_size so that we may at least get a
2057 constant lower or higher estimate. */
2058 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2059 gimple_call_set_fndecl (call, bosfn);
2060 update_stmt (call);
2061
2062 if (dump_file && (dump_flags & TDF_DETAILS))
2063 {
2064 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2065 dump_flags);
2066 fprintf (dump_file,
2067 ": Retrying as __builtin_object_size\n");
2068 }
2069 }
2070 }
2071
2072 result = gimple_fold_stmt_to_constant (call, do_valueize);
2073 if (!result)
2074 {
2075 tree ost = gimple_call_arg (call, 1);
2076
2077 if (tree_fits_uhwi_p (ost))
2078 {
2079 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2080
2081 if (object_size_type & OST_MINIMUM)
2082 result = build_zero_cst (size_type_node);
2083 else if (object_size_type < OST_END)
2084 result = fold_convert (size_type_node,
2085 integer_minus_one_node);
2086 }
2087
2088 if (!result)
2089 continue;
2090 }
2091
2092 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2093
2094 if (dump_file && (dump_flags & TDF_DETAILS))
2095 {
2096 fprintf (dump_file, "Simplified\n ");
2097 print_gimple_stmt (dump_file, call, 0, dump_flags);
2098 fprintf (dump_file, " to ");
2099 print_generic_expr (dump_file, result);
2100 fprintf (dump_file, "\n");
2101 }
2102
2103 /* Propagate into all uses and fold those stmts. */
2104 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2105 replace_uses_by (lhs, result);
2106 else
2107 replace_call_with_value (&i, result);
2108 }
2109 }
2110
2111 fini_object_sizes ();
2112 return 0;
2113 }
2114
2115 /* Simple pass to optimize all __builtin_object_size () builtins. */
2116
2117 namespace {
2118
2119 const pass_data pass_data_object_sizes =
2120 {
2121 GIMPLE_PASS, /* type */
2122 "objsz", /* name */
2123 OPTGROUP_NONE, /* optinfo_flags */
2124 TV_NONE, /* tv_id */
2125 ( PROP_cfg | PROP_ssa ), /* properties_required */
2126 PROP_objsz, /* properties_provided */
2127 0, /* properties_destroyed */
2128 0, /* todo_flags_start */
2129 0, /* todo_flags_finish */
2130 };
2131
2132 class pass_object_sizes : public gimple_opt_pass
2133 {
2134 public:
pass_object_sizes(gcc::context * ctxt)2135 pass_object_sizes (gcc::context *ctxt)
2136 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2137 {}
2138
2139 /* opt_pass methods: */
clone()2140 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
execute(function * fun)2141 virtual unsigned int execute (function *fun)
2142 {
2143 return object_sizes_execute (fun, false);
2144 }
2145 }; // class pass_object_sizes
2146
2147 } // anon namespace
2148
2149 gimple_opt_pass *
make_pass_object_sizes(gcc::context * ctxt)2150 make_pass_object_sizes (gcc::context *ctxt)
2151 {
2152 return new pass_object_sizes (ctxt);
2153 }
2154
2155 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2156
2157 namespace {
2158
2159 const pass_data pass_data_early_object_sizes =
2160 {
2161 GIMPLE_PASS, /* type */
2162 "early_objsz", /* name */
2163 OPTGROUP_NONE, /* optinfo_flags */
2164 TV_NONE, /* tv_id */
2165 ( PROP_cfg | PROP_ssa ), /* properties_required */
2166 0, /* properties_provided */
2167 0, /* properties_destroyed */
2168 0, /* todo_flags_start */
2169 0, /* todo_flags_finish */
2170 };
2171
2172 class pass_early_object_sizes : public gimple_opt_pass
2173 {
2174 public:
pass_early_object_sizes(gcc::context * ctxt)2175 pass_early_object_sizes (gcc::context *ctxt)
2176 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2177 {}
2178
2179 /* opt_pass methods: */
execute(function * fun)2180 virtual unsigned int execute (function *fun)
2181 {
2182 return object_sizes_execute (fun, true);
2183 }
2184 }; // class pass_object_sizes
2185
2186 } // anon namespace
2187
2188 gimple_opt_pass *
make_pass_early_object_sizes(gcc::context * ctxt)2189 make_pass_early_object_sizes (gcc::context *ctxt)
2190 {
2191 return new pass_early_object_sizes (ctxt);
2192 }
2193