xref: /netbsd-src/external/gpl3/gcc/dist/gcc/pointer-query.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Definitions of the pointer_query and related classes.
2 
3    Copyright (C) 2020-2022 Free Software Foundation, Inc.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it under
8    the terms of the GNU General Public License as published by the Free
9    Software Foundation; either version 3, or (at your option) any later
10    version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13    WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-fold.h"
37 #include "gimple-ssa.h"
38 #include "intl.h"
39 #include "attr-fnspec.h"
40 #include "gimple-range.h"
41 #include "pointer-query.h"
42 #include "tree-pretty-print.h"
43 #include "tree-ssanames.h"
44 #include "target.h"
45 
46 static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *,
47 			       ssa_name_limit_t &, pointer_query *);
48 
49 /* Wrapper around the wide_int overload of get_range that accepts
50    offset_int instead.  For middle end expressions returns the same
51    result.  For a subset of nonconstamt expressions emitted by the front
52    end determines a more precise range than would be possible otherwise.  */
53 
54 static bool
get_offset_range(tree x,gimple * stmt,offset_int r[2],range_query * rvals)55 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
56 {
57   offset_int add = 0;
58   if (TREE_CODE (x) == PLUS_EXPR)
59     {
60       /* Handle constant offsets in pointer addition expressions seen
61 	 n the front end IL.  */
62       tree op = TREE_OPERAND (x, 1);
63       if (TREE_CODE (op) == INTEGER_CST)
64 	{
65 	  op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
66 	  add = wi::to_offset (op);
67 	  x = TREE_OPERAND (x, 0);
68 	}
69     }
70 
71   if (TREE_CODE (x) == NOP_EXPR)
72     /* Also handle conversions to sizetype seen in the front end IL.  */
73     x = TREE_OPERAND (x, 0);
74 
75   tree type = TREE_TYPE (x);
76   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
77     return false;
78 
79    if (TREE_CODE (x) != INTEGER_CST
80       && TREE_CODE (x) != SSA_NAME)
81     {
82       if (TYPE_UNSIGNED (type)
83 	  && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
84 	type = signed_type_for (type);
85 
86       r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
87       r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
88       return x;
89     }
90 
91   wide_int wr[2];
92   if (!get_range (x, stmt, wr, rvals))
93     return false;
94 
95   signop sgn = SIGNED;
96   /* Only convert signed integers or unsigned sizetype to a signed
97      offset and avoid converting large positive values in narrower
98      types to negative offsets.  */
99   if (TYPE_UNSIGNED (type)
100       && wr[0].get_precision () < TYPE_PRECISION (sizetype))
101     sgn = UNSIGNED;
102 
103   r[0] = offset_int::from (wr[0], sgn);
104   r[1] = offset_int::from (wr[1], sgn);
105   return true;
106 }
107 
108 /* Return the argument that the call STMT to a built-in function returns
109    or null if it doesn't.  On success, set OFFRNG[] to the range of offsets
110    from the argument reflected in the value returned by the built-in if it
111    can be determined, otherwise to 0 and HWI_M1U respectively.  Set
112    *PAST_END for functions like mempcpy that might return a past the end
113    pointer (most functions return a dereferenceable pointer to an existing
114    element of an array).  */
115 
116 static tree
gimple_call_return_array(gimple * stmt,offset_int offrng[2],bool * past_end,ssa_name_limit_t & snlim,pointer_query * qry)117 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
118 			  ssa_name_limit_t &snlim, pointer_query *qry)
119 {
120   /* Clear and set below for the rare function(s) that might return
121      a past-the-end pointer.  */
122   *past_end = false;
123 
124   {
125     /* Check for attribute fn spec to see if the function returns one
126        of its arguments.  */
127     attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
128     unsigned int argno;
129     if (fnspec.returns_arg (&argno))
130       {
131 	/* Functions return the first argument (not a range).  */
132 	offrng[0] = offrng[1] = 0;
133 	return gimple_call_arg (stmt, argno);
134       }
135   }
136 
137   if (gimple_call_num_args (stmt) < 1)
138     return NULL_TREE;
139 
140   tree fn = gimple_call_fndecl (stmt);
141   if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
142     {
143       /* See if this is a call to placement new.  */
144       if (!fn
145 	  || !DECL_IS_OPERATOR_NEW_P (fn)
146 	  || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
147 	return NULL_TREE;
148 
149       /* Check the mangling, keeping in mind that operator new takes
150 	 a size_t which could be unsigned int or unsigned long.  */
151       tree fname = DECL_ASSEMBLER_NAME (fn);
152       if (!id_equal (fname, "_ZnwjPv")       // ordinary form
153 	  && !id_equal (fname, "_ZnwmPv")    // ordinary form
154 	  && !id_equal (fname, "_ZnajPv")    // array form
155 	  && !id_equal (fname, "_ZnamPv"))   // array form
156 	return NULL_TREE;
157 
158       if (gimple_call_num_args (stmt) != 2)
159 	return NULL_TREE;
160 
161       /* Allocation functions return a pointer to the beginning.  */
162       offrng[0] = offrng[1] = 0;
163       return gimple_call_arg (stmt, 1);
164     }
165 
166   switch (DECL_FUNCTION_CODE (fn))
167     {
168     case BUILT_IN_MEMCPY:
169     case BUILT_IN_MEMCPY_CHK:
170     case BUILT_IN_MEMMOVE:
171     case BUILT_IN_MEMMOVE_CHK:
172     case BUILT_IN_MEMSET:
173     case BUILT_IN_STRCAT:
174     case BUILT_IN_STRCAT_CHK:
175     case BUILT_IN_STRCPY:
176     case BUILT_IN_STRCPY_CHK:
177     case BUILT_IN_STRNCAT:
178     case BUILT_IN_STRNCAT_CHK:
179     case BUILT_IN_STRNCPY:
180     case BUILT_IN_STRNCPY_CHK:
181       /* Functions return the first argument (not a range).  */
182       offrng[0] = offrng[1] = 0;
183       return gimple_call_arg (stmt, 0);
184 
185     case BUILT_IN_MEMPCPY:
186     case BUILT_IN_MEMPCPY_CHK:
187       {
188 	/* The returned pointer is in a range constrained by the smaller
189 	   of the upper bound of the size argument and the source object
190 	   size.  */
191 	offrng[0] = 0;
192 	offrng[1] = HOST_WIDE_INT_M1U;
193 	tree off = gimple_call_arg (stmt, 2);
194 	bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
195 	if (!off_valid || offrng[0] != offrng[1])
196 	  {
197 	    /* If the offset is either indeterminate or in some range,
198 	       try to constrain its upper bound to at most the size
199 	       of the source object.  */
200 	    access_ref aref;
201 	    tree src = gimple_call_arg (stmt, 1);
202 	    if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
203 		&& aref.sizrng[1] < offrng[1])
204 	      offrng[1] = aref.sizrng[1];
205 	  }
206 
207 	/* Mempcpy may return a past-the-end pointer.  */
208 	*past_end = true;
209 	return gimple_call_arg (stmt, 0);
210       }
211 
212     case BUILT_IN_MEMCHR:
213       {
214 	tree off = gimple_call_arg (stmt, 2);
215 	if (get_offset_range (off, stmt, offrng, qry->rvals))
216 	  offrng[1] -= 1;
217 	else
218 	  offrng[1] = HOST_WIDE_INT_M1U;
219 
220 	offrng[0] = 0;
221 	return gimple_call_arg (stmt, 0);
222       }
223 
224     case BUILT_IN_STRCHR:
225     case BUILT_IN_STRRCHR:
226     case BUILT_IN_STRSTR:
227       offrng[0] = 0;
228       offrng[1] = HOST_WIDE_INT_M1U;
229       return gimple_call_arg (stmt, 0);
230 
231     case BUILT_IN_STPCPY:
232     case BUILT_IN_STPCPY_CHK:
233       {
234 	access_ref aref;
235 	tree src = gimple_call_arg (stmt, 1);
236 	if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry))
237 	  offrng[1] = aref.sizrng[1] - 1;
238 	else
239 	  offrng[1] = HOST_WIDE_INT_M1U;
240 
241 	offrng[0] = 0;
242 	return gimple_call_arg (stmt, 0);
243       }
244 
245     case BUILT_IN_STPNCPY:
246     case BUILT_IN_STPNCPY_CHK:
247       {
248 	/* The returned pointer is in a range between the first argument
249 	   and it plus the smaller of the upper bound of the size argument
250 	   and the source object size.  */
251 	offrng[1] = HOST_WIDE_INT_M1U;
252 	tree off = gimple_call_arg (stmt, 2);
253 	if (!get_offset_range (off, stmt, offrng, qry->rvals)
254 	    || offrng[0] != offrng[1])
255 	  {
256 	    /* If the offset is either indeterminate or in some range,
257 	       try to constrain its upper bound to at most the size
258 	       of the source object.  */
259 	    access_ref aref;
260 	    tree src = gimple_call_arg (stmt, 1);
261 	    if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
262 		&& aref.sizrng[1] < offrng[1])
263 	      offrng[1] = aref.sizrng[1];
264 	  }
265 
266 	/* When the source is the empty string the returned pointer is
267 	   a copy of the argument.  Otherwise stpcpy can also return
268 	   a past-the-end pointer.  */
269 	offrng[0] = 0;
270 	*past_end = true;
271 	return gimple_call_arg (stmt, 0);
272       }
273 
274     default:
275       break;
276     }
277 
278   return NULL_TREE;
279 }
280 
281 /* Return true when EXP's range can be determined and set RANGE[] to it
282    after adjusting it if necessary to make EXP a represents a valid size
283    of object, or a valid size argument to an allocation function declared
284    with attribute alloc_size (whose argument may be signed), or to a string
285    manipulation function like memset.
286    When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
287    a size in an anti-range [1, N] where N > PTRDIFF_MAX.  A zero range is
288    a (nearly) invalid argument to allocation functions like malloc but it
289    is a valid argument to functions like memset.
290    When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
291    in a multi-range, otherwise to the smallest valid subrange.  */
292 
293 bool
get_size_range(range_query * query,tree exp,gimple * stmt,tree range[2],int flags)294 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
295 		int flags /* = 0 */)
296 {
297   if (!exp)
298     return false;
299 
300   if (tree_fits_uhwi_p (exp))
301     {
302       /* EXP is a constant.  */
303       range[0] = range[1] = exp;
304       return true;
305     }
306 
307   tree exptype = TREE_TYPE (exp);
308   bool integral = INTEGRAL_TYPE_P (exptype);
309 
310   wide_int min, max;
311   enum value_range_kind range_type;
312 
313   if (!query)
314     query = get_range_query (cfun);
315 
316   if (integral)
317     {
318       value_range vr;
319 
320       query->range_of_expr (vr, exp, stmt);
321 
322       if (vr.undefined_p ())
323 	vr.set_varying (TREE_TYPE (exp));
324       range_type = vr.kind ();
325       min = wi::to_wide (vr.min ());
326       max = wi::to_wide (vr.max ());
327     }
328   else
329     range_type = VR_VARYING;
330 
331   if (range_type == VR_VARYING)
332     {
333       if (integral)
334 	{
335 	  /* Use the full range of the type of the expression when
336 	     no value range information is available.  */
337 	  range[0] = TYPE_MIN_VALUE (exptype);
338 	  range[1] = TYPE_MAX_VALUE (exptype);
339 	  return true;
340 	}
341 
342       range[0] = NULL_TREE;
343       range[1] = NULL_TREE;
344       return false;
345     }
346 
347   unsigned expprec = TYPE_PRECISION (exptype);
348 
349   bool signed_p = !TYPE_UNSIGNED (exptype);
350 
351   if (range_type == VR_ANTI_RANGE)
352     {
353       if (signed_p)
354 	{
355 	  if (wi::les_p (max, 0))
356 	    {
357 	      /* EXP is not in a strictly negative range.  That means
358 		 it must be in some (not necessarily strictly) positive
359 		 range which includes zero.  Since in signed to unsigned
360 		 conversions negative values end up converted to large
361 		 positive values, and otherwise they are not valid sizes,
362 		 the resulting range is in both cases [0, TYPE_MAX].  */
363 	      min = wi::zero (expprec);
364 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
365 	    }
366 	  else if (wi::les_p (min - 1, 0))
367 	    {
368 	      /* EXP is not in a negative-positive range.  That means EXP
369 		 is either negative, or greater than max.  Since negative
370 		 sizes are invalid make the range [MAX + 1, TYPE_MAX].  */
371 	      min = max + 1;
372 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
373 	    }
374 	  else
375 	    {
376 	      max = min - 1;
377 	      min = wi::zero (expprec);
378 	    }
379 	}
380       else
381 	{
382 	  wide_int maxsize = wi::to_wide (max_object_size ());
383 	  min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
384 	  max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
385 	  if (wi::eq_p (0, min - 1))
386 	    {
387 	      /* EXP is unsigned and not in the range [1, MAX].  That means
388 		 it's either zero or greater than MAX.  Even though 0 would
389 		 normally be detected by -Walloc-zero, unless ALLOW_ZERO
390 		 is set, set the range to [MAX, TYPE_MAX] so that when MAX
391 		 is greater than the limit the whole range is diagnosed.  */
392 	      wide_int maxsize = wi::to_wide (max_object_size ());
393 	      if (flags & SR_ALLOW_ZERO)
394 		{
395 		  if (wi::leu_p (maxsize, max + 1)
396 		      || !(flags & SR_USE_LARGEST))
397 		    min = max = wi::zero (expprec);
398 		  else
399 		    {
400 		      min = max + 1;
401 		      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
402 		    }
403 		}
404 	      else
405 		{
406 		  min = max + 1;
407 		  max = wi::to_wide (TYPE_MAX_VALUE (exptype));
408 		}
409 	    }
410 	  else if ((flags & SR_USE_LARGEST)
411 		   && wi::ltu_p (max + 1, maxsize))
412 	    {
413 	      /* When USE_LARGEST is set and the larger of the two subranges
414 		 is a valid size, use it...  */
415 	      min = max + 1;
416 	      max = maxsize;
417 	    }
418 	  else
419 	    {
420 	      /* ...otherwise use the smaller subrange.  */
421 	      max = min - 1;
422 	      min = wi::zero (expprec);
423 	    }
424 	}
425     }
426 
427   range[0] = wide_int_to_tree (exptype, min);
428   range[1] = wide_int_to_tree (exptype, max);
429 
430   return true;
431 }
432 
433 bool
get_size_range(tree exp,tree range[2],int flags)434 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
435 {
436   return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
437 }
438 
439 /* If STMT is a call to an allocation function, returns the constant
440    maximum size of the object allocated by the call represented as
441    sizetype.  If nonnull, sets RNG1[] to the range of the size.
442    When nonnull, uses RVALS for range information, otherwise gets global
443    range info.
444    Returns null when STMT is not a call to a valid allocation function.  */
445 
446 tree
gimple_call_alloc_size(gimple * stmt,wide_int rng1[2],range_query * qry)447 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
448 			range_query *qry /* = NULL */)
449 {
450   if (!stmt || !is_gimple_call (stmt))
451     return NULL_TREE;
452 
453   tree allocfntype;
454   if (tree fndecl = gimple_call_fndecl (stmt))
455     allocfntype = TREE_TYPE (fndecl);
456   else
457     allocfntype = gimple_call_fntype (stmt);
458 
459   if (!allocfntype)
460     return NULL_TREE;
461 
462   unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
463   tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
464   if (!at)
465     {
466       if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
467 	return NULL_TREE;
468 
469       argidx1 = 0;
470     }
471 
472   unsigned nargs = gimple_call_num_args (stmt);
473 
474   if (argidx1 == UINT_MAX)
475     {
476       tree atval = TREE_VALUE (at);
477       if (!atval)
478 	return NULL_TREE;
479 
480       argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
481       if (nargs <= argidx1)
482 	return NULL_TREE;
483 
484       atval = TREE_CHAIN (atval);
485       if (atval)
486 	{
487 	  argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
488 	  if (nargs <= argidx2)
489 	    return NULL_TREE;
490 	}
491     }
492 
493   tree size = gimple_call_arg (stmt, argidx1);
494 
495   wide_int rng1_buf[2];
496   /* If RNG1 is not set, use the buffer.  */
497   if (!rng1)
498     rng1 = rng1_buf;
499 
500   /* Use maximum precision to avoid overflow below.  */
501   const int prec = ADDR_MAX_PRECISION;
502 
503   {
504     tree r[2];
505     /* Determine the largest valid range size, including zero.  */
506     if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
507       return NULL_TREE;
508     rng1[0] = wi::to_wide (r[0], prec);
509     rng1[1] = wi::to_wide (r[1], prec);
510   }
511 
512   if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
513     return fold_convert (sizetype, size);
514 
515   /* To handle ranges do the math in wide_int and return the product
516      of the upper bounds as a constant.  Ignore anti-ranges.  */
517   tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
518   wide_int rng2[2];
519   {
520     tree r[2];
521       /* As above, use the full non-negative range on failure.  */
522     if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
523       return NULL_TREE;
524     rng2[0] = wi::to_wide (r[0], prec);
525     rng2[1] = wi::to_wide (r[1], prec);
526   }
527 
528   /* Compute products of both bounds for the caller but return the lesser
529      of SIZE_MAX and the product of the upper bounds as a constant.  */
530   rng1[0] = rng1[0] * rng2[0];
531   rng1[1] = rng1[1] * rng2[1];
532 
533   const tree size_max = TYPE_MAX_VALUE (sizetype);
534   if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
535     {
536       rng1[1] = wi::to_wide (size_max, prec);
537       return size_max;
538     }
539 
540   return wide_int_to_tree (sizetype, rng1[1]);
541 }
542 
543 /* For an access to an object referenced to by the function parameter PTR
544    of pointer type, and set RNG[] to the range of sizes of the object
545    obtainedfrom the attribute access specification for the current function.
546    Set STATIC_ARRAY if the array parameter has been declared [static].
547    Return the function parameter on success and null otherwise.  */
548 
549 static tree
gimple_parm_array_size(tree ptr,wide_int rng[2],bool * static_array)550 gimple_parm_array_size (tree ptr, wide_int rng[2],
551 			bool *static_array /* = NULL */)
552 {
553   /* For a function argument try to determine the byte size of the array
554      from the current function declaratation (e.g., attribute access or
555      related).  */
556   tree var = SSA_NAME_VAR (ptr);
557   if (TREE_CODE (var) != PARM_DECL || !POINTER_TYPE_P (TREE_TYPE (var)))
558     return NULL_TREE;
559 
560   const unsigned prec = TYPE_PRECISION (sizetype);
561 
562   rdwr_map rdwr_idx;
563   attr_access *access = get_parm_access (rdwr_idx, var);
564   if (!access)
565     return NULL_TREE;
566 
567   if (access->sizarg != UINT_MAX)
568     {
569       /* TODO: Try to extract the range from the argument based on
570 	 those of subsequent assertions or based on known calls to
571 	 the current function.  */
572       return NULL_TREE;
573     }
574 
575   if (!access->minsize)
576     return NULL_TREE;
577 
578   /* Only consider ordinary array bound at level 2 (or above if it's
579      ever added).  */
580   if (warn_array_parameter < 2 && !access->static_p)
581     return NULL_TREE;
582 
583   if (static_array)
584     *static_array = access->static_p;
585 
586   rng[0] = wi::zero (prec);
587   rng[1] = wi::uhwi (access->minsize, prec);
588   /* Multiply the array bound encoded in the attribute by the size
589      of what the pointer argument to which it decays points to.  */
590   tree eltype = TREE_TYPE (TREE_TYPE (ptr));
591   tree size = TYPE_SIZE_UNIT (eltype);
592   if (!size || TREE_CODE (size) != INTEGER_CST)
593     return NULL_TREE;
594 
595   rng[1] *= wi::to_wide (size, prec);
596   return var;
597 }
598 
599 /* Initialize the object.  */
600 
access_ref()601 access_ref::access_ref ()
602   : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
603     base0 (true), parmarray ()
604 {
605   /* Set to valid.  */
606   offrng[0] = offrng[1] = 0;
607   offmax[0] = offmax[1] = 0;
608   /* Invalidate.   */
609   sizrng[0] = sizrng[1] = -1;
610 }
611 
612 /* Return the PHI node REF refers to or null if it doesn't.  */
613 
614 gphi *
phi() const615 access_ref::phi () const
616 {
617   if (!ref || TREE_CODE (ref) != SSA_NAME)
618     return NULL;
619 
620   gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
621   if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
622     return NULL;
623 
624   return as_a <gphi *> (def_stmt);
625 }
626 
627 /* Determine the size and offset for ARG, append it to ALL_REFS, and
628    merge the result with *THIS.  Ignore ARG if SKIP_NULL is set and
629    ARG refers to the null pointer.  Return true on success and false
630    on failure.  */
631 
632 void
merge_ref(vec<access_ref> * all_refs,tree arg,gimple * stmt,int ostype,bool skip_null,ssa_name_limit_t & snlim,pointer_query & qry)633 access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt,
634 		       int ostype, bool skip_null,
635 		       ssa_name_limit_t &snlim, pointer_query &qry)
636 {
637   access_ref aref;
638   if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry)
639       || aref.sizrng[0] < 0)
640     {
641       /* This may be a PHI with all null pointer arguments.  Handle it
642 	 conservatively by setting all properties to the most permissive
643 	 values. */
644       base0 = false;
645       offrng[0] = offrng[1] = 0;
646       add_max_offset ();
647       set_max_size_range ();
648       return;
649     }
650 
651   if (all_refs)
652     {
653       access_ref dummy_ref;
654       aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry);
655     }
656 
657   if (TREE_CODE (arg) == SSA_NAME)
658     qry.put_ref (arg, aref, ostype);
659 
660   if (all_refs)
661     all_refs->safe_push (aref);
662 
663   aref.deref += deref;
664 
665   bool merged_parmarray = aref.parmarray;
666 
667   const bool nullp = skip_null && integer_zerop (arg);
668   const offset_int maxobjsize = wi::to_offset (max_object_size ());
669   offset_int minsize = sizrng[0];
670 
671   if (sizrng[0] < 0)
672     {
673       /* If *THIS doesn't contain a meaningful result yet set it to AREF
674 	 unless the argument is null and it's okay to ignore it.  */
675       if (!nullp)
676 	*this = aref;
677 
678       /* Set if the current argument refers to one or more objects of
679 	 known size (or range of sizes), as opposed to referring to
680 	 one or more unknown object(s).  */
681       const bool arg_known_size = (aref.sizrng[0] != 0
682 				   || aref.sizrng[1] != maxobjsize);
683       if (arg_known_size)
684 	sizrng[0] = aref.sizrng[0];
685 
686       return;
687     }
688 
689   /* Disregard null pointers in PHIs with two or more arguments.
690      TODO: Handle this better!  */
691   if (nullp)
692     return;
693 
694   const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize);
695 
696   if (known_size && aref.sizrng[0] < minsize)
697     minsize = aref.sizrng[0];
698 
699   /* Extend the size and offset of *THIS to account for AREF.  The result
700      can be cached but results in false negatives.  */
701 
702   offset_int orng[2];
703   if (sizrng[1] < aref.sizrng[1])
704     {
705       orng[0] = offrng[0];
706       orng[1] = offrng[1];
707       *this = aref;
708     }
709   else
710     {
711       orng[0] = aref.offrng[0];
712       orng[1] = aref.offrng[1];
713     }
714 
715   if (orng[0] < offrng[0])
716     offrng[0] = orng[0];
717   if (offrng[1] < orng[1])
718     offrng[1] = orng[1];
719 
720   /* Reset the PHI's BASE0 flag if any of the nonnull arguments
721      refers to an object at an unknown offset.  */
722   if (!aref.base0)
723     base0 = false;
724 
725   sizrng[0] = minsize;
726   parmarray = merged_parmarray;
727 
728   return;
729 }
730 
731 /* Determine and return the largest object to which *THIS refers.  If
732    *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
733    of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
734    argument ARG.  */
735 
736 tree
get_ref(vec<access_ref> * all_refs,access_ref * pref,int ostype,ssa_name_limit_t * psnlim,pointer_query * qry) const737 access_ref::get_ref (vec<access_ref> *all_refs,
738 		     access_ref *pref /* = NULL */,
739 		     int ostype /* = 1 */,
740 		     ssa_name_limit_t *psnlim /* = NULL */,
741 		     pointer_query *qry /* = NULL */) const
742 {
743   if (!ref || TREE_CODE (ref) != SSA_NAME)
744     return NULL;
745 
746   /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
747      cause unbounded recursion.  */
748   ssa_name_limit_t snlim_buf;
749   if (!psnlim)
750     psnlim = &snlim_buf;
751 
752   pointer_query empty_qry;
753   if (!qry)
754     qry = &empty_qry;
755 
756   if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref))
757     {
758       if (is_gimple_assign (def_stmt))
759 	{
760 	  tree_code code = gimple_assign_rhs_code (def_stmt);
761 	  if (code != MIN_EXPR && code != MAX_EXPR)
762 	    return NULL_TREE;
763 
764 	  access_ref aref;
765 	  tree arg1 = gimple_assign_rhs1 (def_stmt);
766 	  aref.merge_ref (all_refs, arg1, def_stmt, ostype, false,
767 			  *psnlim, *qry);
768 
769 	  tree arg2 = gimple_assign_rhs2 (def_stmt);
770 	  aref.merge_ref (all_refs, arg2, def_stmt, ostype, false,
771 			  *psnlim, *qry);
772 
773 	  if (pref && pref != this)
774 	    {
775 	      tree ref = pref->ref;
776 	      *pref = aref;
777 	      pref->ref = ref;
778 	    }
779 
780 	  return aref.ref;
781 	}
782     }
783   else
784     return NULL_TREE;
785 
786   gphi *phi_stmt = this->phi ();
787   if (!phi_stmt)
788     return ref;
789 
790   if (!psnlim->visit_phi (ref))
791     return NULL_TREE;
792 
793   /* The conservative result of the PHI reflecting the offset and size
794      of the largest PHI argument, regardless of whether or not they all
795      refer to the same object.  */
796   access_ref phi_ref;
797   if (pref)
798     {
799       /* The identity of the object has not been determined yet but
800 	 PREF->REF is set by the caller to the PHI for convenience.
801 	 The size is negative/invalid and the offset is zero (it's
802 	 updated only after the identity of the object has been
803 	 established).  */
804       gcc_assert (pref->sizrng[0] < 0);
805       gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
806 
807       phi_ref = *pref;
808     }
809 
810   const offset_int maxobjsize = wi::to_offset (max_object_size ());
811   const unsigned nargs = gimple_phi_num_args (phi_stmt);
812   for (unsigned i = 0; i < nargs; ++i)
813     {
814       access_ref phi_arg_ref;
815       bool skip_null = i || i + 1 < nargs;
816       tree arg = gimple_phi_arg_def (phi_stmt, i);
817       phi_ref.merge_ref (all_refs, arg, phi_stmt, ostype, skip_null,
818 			 *psnlim, *qry);
819 
820       if (!phi_ref.base0
821 	  && phi_ref.sizrng[0] == 0
822 	  && phi_ref.sizrng[1] >= maxobjsize)
823 	/* When an argument results in the most permissive result,
824 	   the remaining arguments cannot constrain it.  Short-circuit
825 	   the evaluation.  */
826 	break;
827     }
828 
829   if (phi_ref.sizrng[0] < 0)
830     {
831       /* Fail if none of the PHI's arguments resulted in updating PHI_REF
832 	 (perhaps because they have all been already visited by prior
833 	 recursive calls).  */
834       psnlim->leave_phi (ref);
835       return NULL_TREE;
836     }
837 
838   /* Avoid changing *THIS.  */
839   if (pref && pref != this)
840     {
841       /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
842 	 can be referred to later if necessary.  This is useful even if
843 	 they all refer to the same object.  */
844       tree ref = pref->ref;
845       *pref = phi_ref;
846       pref->ref = ref;
847     }
848 
849   psnlim->leave_phi (ref);
850 
851   return phi_ref.ref;
852 }
853 
854 /* Return the maximum amount of space remaining and if non-null, set
855    argument to the minimum.  */
856 
857 offset_int
size_remaining(offset_int * pmin) const858 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
859 {
860   offset_int minbuf;
861   if (!pmin)
862     pmin = &minbuf;
863 
864   if (sizrng[0] < 0)
865     {
866       /* If the identity of the object hasn't been determined return
867 	 the maximum size range.  */
868       *pmin = 0;
869       return wi::to_offset (max_object_size ());
870     }
871 
872   /* add_offset() ensures the offset range isn't inverted.  */
873   gcc_checking_assert (offrng[0] <= offrng[1]);
874 
875   if (base0)
876     {
877       /* The offset into referenced object is zero-based (i.e., it's
878 	 not referenced by a pointer into middle of some unknown object).  */
879       if (offrng[0] < 0 && offrng[1] < 0)
880 	{
881 	  /* If the offset is negative the remaining size is zero.  */
882 	  *pmin = 0;
883 	  return 0;
884 	}
885 
886       if (sizrng[1] <= offrng[0])
887 	{
888 	  /* If the starting offset is greater than or equal to the upper
889 	     bound on the size of the object, the space remaining is zero.
890 	     As a special case, if it's equal, set *PMIN to -1 to let
891 	     the caller know the offset is valid and just past the end.  */
892 	  *pmin = sizrng[1] == offrng[0] ? -1 : 0;
893 	  return 0;
894 	}
895 
896       /* Otherwise return the size minus the lower bound of the offset.  */
897       offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
898 
899       *pmin = sizrng[0] - or0;
900       return sizrng[1] - or0;
901     }
902 
903   /* The offset to the referenced object isn't zero-based (i.e., it may
904      refer to a byte other than the first.  The size of such an object
905      is constrained only by the size of the address space (the result
906      of max_object_size()).  */
907   if (sizrng[1] <= offrng[0])
908     {
909       *pmin = 0;
910       return 0;
911     }
912 
913   offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
914 
915   *pmin = sizrng[0] - or0;
916   return sizrng[1] - or0;
917 }
918 
919 /* Return true if the offset and object size are in range for SIZE.  */
920 
921 bool
offset_in_range(const offset_int & size) const922 access_ref::offset_in_range (const offset_int &size) const
923 {
924   if (size_remaining () < size)
925     return false;
926 
927   if (base0)
928     return offmax[0] >= 0 && offmax[1] <= sizrng[1];
929 
930   offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
931   return offmax[0] > -maxoff && offmax[1] < maxoff;
932 }
933 
934 /* Add the range [MIN, MAX] to the offset range.  For known objects (with
935    zero-based offsets) at least one of whose offset's bounds is in range,
936    constrain the other (or both) to the bounds of the object (i.e., zero
937    and the upper bound of its size).  This improves the quality of
938    diagnostics.  */
939 
add_offset(const offset_int & min,const offset_int & max)940 void access_ref::add_offset (const offset_int &min, const offset_int &max)
941 {
942   if (min <= max)
943     {
944       /* To add an ordinary range just add it to the bounds.  */
945       offrng[0] += min;
946       offrng[1] += max;
947     }
948   else if (!base0)
949     {
950       /* To add an inverted range to an offset to an unknown object
951 	 expand it to the maximum.  */
952       add_max_offset ();
953       return;
954     }
955   else
956     {
957       /* To add an inverted range to an offset to an known object set
958 	 the upper bound to the maximum representable offset value
959 	 (which may be greater than MAX_OBJECT_SIZE).
960 	 The lower bound is either the sum of the current offset and
961 	 MIN when abs(MAX) is greater than the former, or zero otherwise.
962 	 Zero because then the inverted range includes the negative of
963 	 the lower bound.  */
964       offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
965       offrng[1] = maxoff;
966 
967       if (max >= 0)
968 	{
969 	  offrng[0] = 0;
970 	  if (offmax[0] > 0)
971 	    offmax[0] = 0;
972 	  return;
973 	}
974 
975       offset_int absmax = wi::abs (max);
976       if (offrng[0] < absmax)
977 	{
978 	  offrng[0] += min;
979 	  /* Cap the lower bound at the upper (set to MAXOFF above)
980 	     to avoid inadvertently recreating an inverted range.  */
981 	  if (offrng[1] < offrng[0])
982 	    offrng[0] = offrng[1];
983 	}
984       else
985 	offrng[0] = 0;
986     }
987 
988   /* Set the minimum and maximmum computed so far. */
989   if (offrng[1] < 0 && offrng[1] < offmax[0])
990     offmax[0] = offrng[1];
991   if (offrng[0] > 0 && offrng[0] > offmax[1])
992     offmax[1] = offrng[0];
993 
994   if (!base0)
995     return;
996 
997   /* When referencing a known object check to see if the offset computed
998      so far is in bounds... */
999   offset_int remrng[2];
1000   remrng[1] = size_remaining (remrng);
1001   if (remrng[1] > 0 || remrng[0] < 0)
1002     {
1003       /* ...if so, constrain it so that neither bound exceeds the size of
1004 	 the object.  Out of bounds offsets are left unchanged, and, for
1005 	 better or worse, become in bounds later.  They should be detected
1006 	 and diagnosed at the point they first become invalid by
1007 	 -Warray-bounds.  */
1008       if (offrng[0] < 0)
1009 	offrng[0] = 0;
1010       if (offrng[1] > sizrng[1])
1011 	offrng[1] = sizrng[1];
1012     }
1013 }
1014 
1015 /* Issue one inform message describing each target of an access REF.
1016    WRITE is set for a write access and clear for a read access.  */
1017 
1018 void
inform_access(access_mode mode,int ostype) const1019 access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const
1020 {
1021   const access_ref &aref = *this;
1022   if (!aref.ref)
1023     return;
1024 
1025   if (phi ())
1026     {
1027       /* Set MAXREF to refer to the largest object and fill ALL_REFS
1028 	 with data for all objects referenced by the PHI arguments.  */
1029       access_ref maxref;
1030       auto_vec<access_ref> all_refs;
1031       if (!get_ref (&all_refs, &maxref, ostype))
1032 	return;
1033 
1034       if (all_refs.length ())
1035 	{
1036 	  /* Except for MAXREF, the rest of the arguments' offsets need not
1037 	     reflect one added to the PHI itself.  Determine the latter from
1038 	     MAXREF on which the result is based.  */
1039 	  const offset_int orng[] =
1040 	    {
1041 	     offrng[0] - maxref.offrng[0],
1042 	     wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
1043 	    };
1044 
1045 	  /* Add the final PHI's offset to that of each of the arguments
1046 	     and recurse to issue an inform message for it.  */
1047 	  for (unsigned i = 0; i != all_refs.length (); ++i)
1048 	    {
1049 	      /* Skip any PHIs; those could lead to infinite recursion.  */
1050 	      if (all_refs[i].phi ())
1051 		continue;
1052 
1053 	      all_refs[i].add_offset (orng[0], orng[1]);
1054 	      all_refs[i].inform_access (mode, ostype);
1055 	    }
1056 	  return;
1057 	}
1058     }
1059 
1060   /* Convert offset range and avoid including a zero range since it
1061      isn't necessarily meaningful.  */
1062   HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
1063   HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1064   HOST_WIDE_INT minoff;
1065   HOST_WIDE_INT maxoff = diff_max;
1066   if (wi::fits_shwi_p (aref.offrng[0]))
1067     minoff = aref.offrng[0].to_shwi ();
1068   else
1069     minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1070 
1071   if (wi::fits_shwi_p (aref.offrng[1]))
1072     maxoff = aref.offrng[1].to_shwi ();
1073 
1074   if (maxoff <= diff_min || maxoff >= diff_max)
1075     /* Avoid mentioning an upper bound that's equal to or in excess
1076        of the maximum of ptrdiff_t.  */
1077     maxoff = minoff;
1078 
1079   /* Convert size range and always include it since all sizes are
1080      meaningful. */
1081   unsigned long long minsize = 0, maxsize = 0;
1082   if (wi::fits_shwi_p (aref.sizrng[0])
1083       && wi::fits_shwi_p (aref.sizrng[1]))
1084     {
1085       minsize = aref.sizrng[0].to_shwi ();
1086       maxsize = aref.sizrng[1].to_shwi ();
1087     }
1088 
1089   /* SIZRNG doesn't necessarily have the same range as the allocation
1090      size determined by gimple_call_alloc_size ().  */
1091   char sizestr[80];
1092   if (minsize == maxsize)
1093     sprintf (sizestr, "%llu", minsize);
1094   else
1095     sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1096 
1097   char offstr[80];
1098   if (minoff == 0
1099       && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1100     offstr[0] = '\0';
1101   else if (minoff == maxoff)
1102     sprintf (offstr, "%lli", (long long) minoff);
1103   else
1104     sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1105 
1106   location_t loc = UNKNOWN_LOCATION;
1107 
1108   tree ref = this->ref;
1109   tree allocfn = NULL_TREE;
1110   if (TREE_CODE (ref) == SSA_NAME)
1111     {
1112       gimple *stmt = SSA_NAME_DEF_STMT (ref);
1113       if (!stmt)
1114 	return;
1115 
1116       if (is_gimple_call (stmt))
1117 	{
1118 	  loc = gimple_location (stmt);
1119 	  if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1120 	    {
1121 	      /* Strip the SSA_NAME suffix from the variable name and
1122 		 recreate an identifier with the VLA's original name.  */
1123 	      ref = gimple_call_lhs (stmt);
1124 	      if (SSA_NAME_IDENTIFIER (ref))
1125 		{
1126 		  ref = SSA_NAME_IDENTIFIER (ref);
1127 		  const char *id = IDENTIFIER_POINTER (ref);
1128 		  size_t len = strcspn (id, ".$");
1129 		  if (!len)
1130 		    len = strlen (id);
1131 		  ref = get_identifier_with_length (id, len);
1132 		}
1133 	    }
1134 	  else
1135 	    {
1136 	      /* Except for VLAs, retrieve the allocation function.  */
1137 	      allocfn = gimple_call_fndecl (stmt);
1138 	      if (!allocfn)
1139 		allocfn = gimple_call_fn (stmt);
1140 	      if (TREE_CODE (allocfn) == SSA_NAME)
1141 		{
1142 		  /* For an ALLOC_CALL via a function pointer make a small
1143 		     effort to determine the destination of the pointer.  */
1144 		  gimple *def = SSA_NAME_DEF_STMT (allocfn);
1145 		  if (gimple_assign_single_p (def))
1146 		    {
1147 		      tree rhs = gimple_assign_rhs1 (def);
1148 		      if (DECL_P (rhs))
1149 			allocfn = rhs;
1150 		      else if (TREE_CODE (rhs) == COMPONENT_REF)
1151 			allocfn = TREE_OPERAND (rhs, 1);
1152 		    }
1153 		}
1154 	    }
1155 	}
1156       else if (gimple_nop_p (stmt))
1157 	/* Handle DECL_PARM below.  */
1158 	ref = SSA_NAME_VAR (ref);
1159       else if (is_gimple_assign (stmt)
1160 	       && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1161 		   || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1162 	{
1163 	  /* MIN or MAX_EXPR here implies a reference to a known object
1164 	     and either an unknown or distinct one (the latter being
1165 	     the result of an invalid relational expression).  Determine
1166 	     the identity of the former and point to it in the note.
1167 	     TODO: Consider merging with PHI handling.  */
1168 	  access_ref arg_ref[2];
1169 	  tree arg = gimple_assign_rhs1 (stmt);
1170 	  compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1171 	  arg = gimple_assign_rhs2 (stmt);
1172 	  compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1173 
1174 	  /* Use the argument that references a known object with more
1175 	     space remaining.  */
1176 	  const bool idx
1177 	    = (!arg_ref[0].ref || !arg_ref[0].base0
1178 	       || (arg_ref[0].base0 && arg_ref[1].base0
1179 		   && (arg_ref[0].size_remaining ()
1180 		       < arg_ref[1].size_remaining ())));
1181 
1182 	  arg_ref[idx].offrng[0] = offrng[0];
1183 	  arg_ref[idx].offrng[1] = offrng[1];
1184 	  arg_ref[idx].inform_access (mode);
1185 	  return;
1186 	}
1187     }
1188 
1189   if (DECL_P (ref))
1190     loc = DECL_SOURCE_LOCATION (ref);
1191   else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1192     loc = EXPR_LOCATION (ref);
1193   else if (TREE_CODE (ref) != IDENTIFIER_NODE
1194 	   && TREE_CODE (ref) != SSA_NAME)
1195     return;
1196 
1197   if (mode == access_read_write || mode == access_write_only)
1198     {
1199       if (allocfn == NULL_TREE)
1200 	{
1201 	  if (*offstr)
1202 	    inform (loc, "at offset %s into destination object %qE of size %s",
1203 		    offstr, ref, sizestr);
1204 	  else
1205 	    inform (loc, "destination object %qE of size %s", ref, sizestr);
1206 	  return;
1207 	}
1208 
1209       if (*offstr)
1210 	inform (loc,
1211 		"at offset %s into destination object of size %s "
1212 		"allocated by %qE", offstr, sizestr, allocfn);
1213       else
1214 	inform (loc, "destination object of size %s allocated by %qE",
1215 		sizestr, allocfn);
1216       return;
1217     }
1218 
1219   if (mode == access_read_only)
1220     {
1221       if (allocfn == NULL_TREE)
1222 	{
1223 	  if (*offstr)
1224 	    inform (loc, "at offset %s into source object %qE of size %s",
1225 		    offstr, ref, sizestr);
1226 	  else
1227 	    inform (loc, "source object %qE of size %s", ref, sizestr);
1228 
1229 	  return;
1230 	}
1231 
1232       if (*offstr)
1233 	inform (loc,
1234 		"at offset %s into source object of size %s allocated by %qE",
1235 		offstr, sizestr, allocfn);
1236       else
1237 	inform (loc, "source object of size %s allocated by %qE",
1238 		sizestr, allocfn);
1239       return;
1240     }
1241 
1242   if (allocfn == NULL_TREE)
1243     {
1244       if (*offstr)
1245 	inform (loc, "at offset %s into object %qE of size %s",
1246 		offstr, ref, sizestr);
1247       else
1248 	inform (loc, "object %qE of size %s", ref, sizestr);
1249 
1250       return;
1251     }
1252 
1253   if (*offstr)
1254     inform (loc,
1255 	    "at offset %s into object of size %s allocated by %qE",
1256 	    offstr, sizestr, allocfn);
1257   else
1258     inform (loc, "object of size %s allocated by %qE",
1259 	    sizestr, allocfn);
1260 }
1261 
1262 /* Dump *THIS to FILE.  */
1263 
1264 void
dump(FILE * file) const1265 access_ref::dump (FILE *file) const
1266 {
1267   for (int i = deref; i < 0; ++i)
1268     fputc ('&', file);
1269 
1270   for (int i = 0; i < deref; ++i)
1271     fputc ('*', file);
1272 
1273   if (gphi *phi_stmt = phi ())
1274     {
1275       fputs ("PHI <", file);
1276       unsigned nargs = gimple_phi_num_args (phi_stmt);
1277       for (unsigned i = 0; i != nargs; ++i)
1278 	{
1279 	  tree arg = gimple_phi_arg_def (phi_stmt, i);
1280 	  print_generic_expr (file, arg);
1281 	  if (i + 1 < nargs)
1282 	    fputs (", ", file);
1283 	}
1284       fputc ('>', file);
1285     }
1286   else
1287     print_generic_expr (file, ref);
1288 
1289   if (offrng[0] != offrng[1])
1290     fprintf (file, " + [%lli, %lli]",
1291 	     (long long) offrng[0].to_shwi (),
1292 	     (long long) offrng[1].to_shwi ());
1293   else if (offrng[0] != 0)
1294     fprintf (file, " %c %lli",
1295 	     offrng[0] < 0 ? '-' : '+',
1296 	     (long long) offrng[0].to_shwi ());
1297 
1298   if (base0)
1299     fputs (" (base0)", file);
1300 
1301   fputs ("; size: ", file);
1302   if (sizrng[0] != sizrng[1])
1303     {
1304       offset_int maxsize = wi::to_offset (max_object_size ());
1305       if (sizrng[0] == 0 && sizrng[1] >= maxsize)
1306 	fputs ("unknown", file);
1307       else
1308 	fprintf (file, "[%llu, %llu]",
1309 		 (unsigned long long) sizrng[0].to_uhwi (),
1310 		 (unsigned long long) sizrng[1].to_uhwi ());
1311     }
1312   else if (sizrng[0] != 0)
1313     fprintf (file, "%llu",
1314 	     (unsigned long long) sizrng[0].to_uhwi ());
1315 
1316   fputc ('\n', file);
1317 }
1318 
1319 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1320    when MINWRITE or MINREAD, respectively, is set.  */
access_data(range_query * query,gimple * stmt,access_mode mode,tree maxwrite,bool minwrite,tree maxread,bool minread)1321 access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1322 			  tree maxwrite /* = NULL_TREE */,
1323 			  bool minwrite /* = false */,
1324 			  tree maxread /* = NULL_TREE */,
1325 			  bool minread /* = false */)
1326   : stmt (stmt), call (), dst (), src (), mode (mode), ostype ()
1327 {
1328   set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1329   set_bound (src_bndrng, maxread, minread, query, stmt);
1330 }
1331 
1332 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1333    when MINWRITE or MINREAD, respectively, is set.  */
access_data(range_query * query,tree expr,access_mode mode,tree maxwrite,bool minwrite,tree maxread,bool minread)1334 access_data::access_data (range_query *query, tree expr, access_mode mode,
1335 			  tree maxwrite /* = NULL_TREE */,
1336 			  bool minwrite /* = false */,
1337 			  tree maxread /* = NULL_TREE */,
1338 			  bool minread /* = false */)
1339   : stmt (), call (expr),  dst (), src (), mode (mode), ostype ()
1340 {
1341   set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1342   set_bound (src_bndrng, maxread, minread, query, stmt);
1343 }
1344 
1345 /* Set BNDRNG to the range of BOUND for the statement STMT.  */
1346 
1347 void
set_bound(offset_int bndrng[2],tree bound,bool minaccess,range_query * query,gimple * stmt)1348 access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1349 			range_query *query, gimple *stmt)
1350 {
1351   /* Set the default bounds of the access and adjust below.  */
1352   bndrng[0] = minaccess ? 1 : 0;
1353   bndrng[1] = HOST_WIDE_INT_M1U;
1354 
1355   /* When BOUND is nonnull and a range can be extracted from it,
1356      set the bounds of the access to reflect both it and MINACCESS.
1357      BNDRNG[0] is the size of the minimum access.  */
1358   tree rng[2];
1359   if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1360     {
1361       bndrng[0] = wi::to_offset (rng[0]);
1362       bndrng[1] = wi::to_offset (rng[1]);
1363       bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1364     }
1365 }
1366 
1367 /* Set a bit for the PHI in VISITED and return true if it wasn't
1368    already set.  */
1369 
1370 bool
visit_phi(tree ssa_name)1371 ssa_name_limit_t::visit_phi (tree ssa_name)
1372 {
1373   if (!visited)
1374     visited = BITMAP_ALLOC (NULL);
1375 
1376   /* Return false if SSA_NAME has already been visited.  */
1377   return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1378 }
1379 
1380 /* Clear a bit for the PHI in VISITED.  */
1381 
1382 void
leave_phi(tree ssa_name)1383 ssa_name_limit_t::leave_phi (tree ssa_name)
1384 {
1385   /* Return false if SSA_NAME has already been visited.  */
1386   bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1387 }
1388 
1389 /* Return false if the SSA_NAME chain length counter has reached
1390    the limit, otherwise increment the counter and return true.  */
1391 
1392 bool
next()1393 ssa_name_limit_t::next ()
1394 {
1395   /* Return a negative value to let caller avoid recursing beyond
1396      the specified limit.  */
1397   if (ssa_def_max == 0)
1398     return false;
1399 
1400   --ssa_def_max;
1401   return true;
1402 }
1403 
1404 /* If the SSA_NAME has already been "seen" return a positive value.
1405    Otherwise add it to VISITED.  If the SSA_NAME limit has been
1406    reached, return a negative value.  Otherwise return zero.  */
1407 
1408 int
next_phi(tree ssa_name)1409 ssa_name_limit_t::next_phi (tree ssa_name)
1410 {
1411   {
1412     gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1413     /* Return a positive value if the PHI has already been visited.  */
1414     if (gimple_code (def_stmt) == GIMPLE_PHI
1415 	&& !visit_phi (ssa_name))
1416       return 1;
1417   }
1418 
1419   /* Return a negative value to let caller avoid recursing beyond
1420      the specified limit.  */
1421   if (ssa_def_max == 0)
1422     return -1;
1423 
1424   --ssa_def_max;
1425 
1426   return 0;
1427 }
1428 
~ssa_name_limit_t()1429 ssa_name_limit_t::~ssa_name_limit_t ()
1430 {
1431   if (visited)
1432     BITMAP_FREE (visited);
1433 }
1434 
1435 /* Default ctor.  Initialize object with pointers to the range_query
1436    instance to use or null.  */
1437 
pointer_query(range_query * qry)1438 pointer_query::pointer_query (range_query *qry /* = NULL */)
1439   : rvals (qry), hits (), misses (), failures (), depth (), max_depth (),
1440     var_cache ()
1441 {
1442   /* No op.  */
1443 }
1444 
1445 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1446    PTR if it's there or null otherwise.  */
1447 
1448 const access_ref *
get_ref(tree ptr,int ostype) const1449 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1450 {
1451   unsigned version = SSA_NAME_VERSION (ptr);
1452   unsigned idx = version << 1 | (ostype & 1);
1453   if (var_cache.indices.length () <= idx)
1454     {
1455       ++misses;
1456       return NULL;
1457     }
1458 
1459   unsigned cache_idx = var_cache.indices[idx];
1460   if (var_cache.access_refs.length () <= cache_idx)
1461     {
1462       ++misses;
1463       return NULL;
1464     }
1465 
1466   const access_ref &cache_ref = var_cache.access_refs[cache_idx];
1467   if (cache_ref.ref)
1468     {
1469       ++hits;
1470       return &cache_ref;
1471     }
1472 
1473   ++misses;
1474   return NULL;
1475 }
1476 
1477 /* Retrieve the access_ref instance for a variable from the cache if it's
1478    there or compute it and insert it into the cache if it's nonnonull.  */
1479 
1480 bool
get_ref(tree ptr,gimple * stmt,access_ref * pref,int ostype)1481 pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref,
1482 			int ostype /* = 1 */)
1483 {
1484   const unsigned version
1485     = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1486 
1487   if (version)
1488     {
1489       unsigned idx = version << 1 | (ostype & 1);
1490       if (idx < var_cache.indices.length ())
1491 	{
1492 	  unsigned cache_idx = var_cache.indices[idx] - 1;
1493 	  if (cache_idx < var_cache.access_refs.length ()
1494 	      && var_cache.access_refs[cache_idx].ref)
1495 	    {
1496 	      ++hits;
1497 	      *pref = var_cache.access_refs[cache_idx];
1498 	      return true;
1499 	    }
1500 	}
1501 
1502       ++misses;
1503     }
1504 
1505   if (!compute_objsize (ptr, stmt, ostype, pref, this))
1506     {
1507       ++failures;
1508       return false;
1509     }
1510 
1511   return true;
1512 }
1513 
1514 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1515    nonnull.  */
1516 
1517 void
put_ref(tree ptr,const access_ref & ref,int ostype)1518 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1519 {
1520   /* Only add populated/valid entries.  */
1521   if (!ref.ref || ref.sizrng[0] < 0)
1522     return;
1523 
1524   /* Add REF to the two-level cache.  */
1525   unsigned version = SSA_NAME_VERSION (ptr);
1526   unsigned idx = version << 1 | (ostype & 1);
1527 
1528   /* Grow INDICES if necessary.  An index is valid if it's nonzero.
1529      Its value minus one is the index into ACCESS_REFS.  Not all
1530      entries are valid.  */
1531   if (var_cache.indices.length () <= idx)
1532     var_cache.indices.safe_grow_cleared (idx + 1);
1533 
1534   if (!var_cache.indices[idx])
1535     var_cache.indices[idx] = var_cache.access_refs.length () + 1;
1536 
1537   /* Grow ACCESS_REF cache if necessary.  An entry is valid if its
1538      REF member is nonnull.  All entries except for the last two
1539      are valid.  Once nonnull, the REF value must stay unchanged.  */
1540   unsigned cache_idx = var_cache.indices[idx];
1541   if (var_cache.access_refs.length () <= cache_idx)
1542     var_cache.access_refs.safe_grow_cleared (cache_idx + 1);
1543 
1544   access_ref &cache_ref = var_cache.access_refs[cache_idx];
1545   if (cache_ref.ref)
1546   {
1547     gcc_checking_assert (cache_ref.ref == ref.ref);
1548     return;
1549   }
1550 
1551   cache_ref = ref;
1552 }
1553 
1554 /* Flush the cache if it's nonnull.  */
1555 
1556 void
flush_cache()1557 pointer_query::flush_cache ()
1558 {
1559   var_cache.indices.release ();
1560   var_cache.access_refs.release ();
1561 }
1562 
1563 /* Dump statistics and, optionally, cache contents to DUMP_FILE.  */
1564 
1565 void
dump(FILE * dump_file,bool contents)1566 pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1567 {
1568   unsigned nused = 0, nrefs = 0;
1569   unsigned nidxs = var_cache.indices.length ();
1570   for (unsigned i = 0; i != nidxs; ++i)
1571     {
1572       unsigned ari = var_cache.indices[i];
1573       if (!ari)
1574 	continue;
1575 
1576       ++nused;
1577 
1578       const access_ref &aref = var_cache.access_refs[ari];
1579       if (!aref.ref)
1580 	continue;
1581 
1582       ++nrefs;
1583     }
1584 
1585   fprintf (dump_file, "pointer_query counters:\n"
1586 	   "  index cache size:   %u\n"
1587 	   "  index entries:      %u\n"
1588 	   "  access cache size:  %u\n"
1589 	   "  access entries:     %u\n"
1590 	   "  hits:               %u\n"
1591 	   "  misses:             %u\n"
1592 	   "  failures:           %u\n"
1593 	   "  max_depth:          %u\n",
1594 	   nidxs, nused,
1595 	   var_cache.access_refs.length (), nrefs,
1596 	   hits, misses, failures, max_depth);
1597 
1598   if (!contents || !nidxs)
1599     return;
1600 
1601   fputs ("\npointer_query cache contents:\n", dump_file);
1602 
1603   for (unsigned i = 0; i != nidxs; ++i)
1604     {
1605       unsigned ari = var_cache.indices[i];
1606       if (!ari)
1607 	continue;
1608 
1609       const access_ref &aref = var_cache.access_refs[ari];
1610       if (!aref.ref)
1611 	continue;
1612 
1613       /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1614 	 shifted left by one and ORed with the Object Size Type in
1615 	 the lowest bit.  Print the two separately.  */
1616       unsigned ver = i >> 1;
1617       unsigned ost = i & 1;
1618 
1619       fprintf (dump_file, "  %u.%u[%u]: ", ver, ost, ari);
1620       if (tree name = ssa_name (ver))
1621 	{
1622 	  print_generic_expr (dump_file, name);
1623 	  fputs (" = ", dump_file);
1624 	}
1625       else
1626 	fprintf (dump_file, "  _%u = ", ver);
1627 
1628       aref.dump (dump_file);
1629     }
1630 
1631   fputc ('\n', dump_file);
1632 }
1633 
1634 /* A helper of compute_objsize_r() to determine the size from an assignment
1635    statement STMT with the RHS of either MIN_EXPR or MAX_EXPR.  On success
1636    set PREF->REF to the operand with more or less space remaining,
1637    respectively, if both refer to the same (sub)object, or to PTR if they
1638    might not, and return true.  Otherwise, if the identity of neither
1639    operand can be determined, return false.  */
1640 
1641 static bool
handle_min_max_size(tree ptr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)1642 handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1643 		     ssa_name_limit_t &snlim, pointer_query *qry)
1644 {
1645   gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1646   const tree_code code = gimple_assign_rhs_code (stmt);
1647 
1648   /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1649      Determine the size/offset of each and use the one with more or less
1650      space remaining, respectively.  If either fails, use the information
1651      determined from the other instead, adjusted up or down as appropriate
1652      for the expression.  */
1653   access_ref aref[2] = { *pref, *pref };
1654   tree arg1 = gimple_assign_rhs1 (stmt);
1655   if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry))
1656     {
1657       aref[0].base0 = false;
1658       aref[0].offrng[0] = aref[0].offrng[1] = 0;
1659       aref[0].add_max_offset ();
1660       aref[0].set_max_size_range ();
1661     }
1662 
1663   tree arg2 = gimple_assign_rhs2 (stmt);
1664   if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry))
1665     {
1666       aref[1].base0 = false;
1667       aref[1].offrng[0] = aref[1].offrng[1] = 0;
1668       aref[1].add_max_offset ();
1669       aref[1].set_max_size_range ();
1670     }
1671 
1672   if (!aref[0].ref && !aref[1].ref)
1673     /* Fail if the identity of neither argument could be determined.  */
1674     return false;
1675 
1676   bool i0 = false;
1677   if (aref[0].ref && aref[0].base0)
1678     {
1679       if (aref[1].ref && aref[1].base0)
1680 	{
1681 	  /* If the object referenced by both arguments has been determined
1682 	     set *PREF to the one with more or less space remainng, whichever
1683 	     is appopriate for CODE.
1684 	     TODO: Indicate when the objects are distinct so it can be
1685 	     diagnosed.  */
1686 	  i0 = code == MAX_EXPR;
1687 	  const bool i1 = !i0;
1688 
1689 	  if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1690 	    *pref = aref[i1];
1691 	  else
1692 	    *pref = aref[i0];
1693 
1694 	  if (aref[i0].ref != aref[i1].ref)
1695 	    /* If the operands don't refer to the same (sub)object set
1696 	       PREF->REF to the SSA_NAME from which STMT was obtained
1697 	       so that both can be identified in a diagnostic.  */
1698 	    pref->ref = ptr;
1699 
1700 	  return true;
1701 	}
1702 
1703       /* If only the object referenced by one of the arguments could be
1704 	 determined, use it and...  */
1705       *pref = aref[0];
1706       i0 = true;
1707     }
1708   else
1709     *pref = aref[1];
1710 
1711   const bool i1 = !i0;
1712   /* ...see if the offset obtained from the other pointer can be used
1713      to tighten up the bound on the offset obtained from the first.  */
1714   if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1715       || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1716     {
1717       pref->offrng[0] = aref[i0].offrng[0];
1718       pref->offrng[1] = aref[i0].offrng[1];
1719     }
1720 
1721   /* Replace PTR->REF with the SSA_NAME to indicate the expression
1722      might not refer to the same (sub)object.  */
1723   pref->ref = ptr;
1724   return true;
1725 }
1726 
1727 /* A helper of compute_objsize_r() to determine the size of a DECL.
1728    Return true on success and (possibly in the future) false on failure.  */
1729 
1730 static bool
handle_decl(tree decl,bool addr,access_ref * pref)1731 handle_decl (tree decl, bool addr, access_ref *pref)
1732 {
1733   tree decl_type = TREE_TYPE (decl);
1734 
1735   pref->ref = decl;
1736 
1737   /* Reset the offset in case it was set by a prior call and not
1738      cleared by the caller.  The offset is only adjusted after
1739      the identity of the object has been determined.  */
1740   pref->offrng[0] = pref->offrng[1] = 0;
1741 
1742   if (!addr && POINTER_TYPE_P (decl_type))
1743     {
1744       /* Set the maximum size if the reference is to the pointer
1745 	 itself (as opposed to what it points to), and clear
1746 	 BASE0 since the offset isn't necessarily zero-based.  */
1747       pref->set_max_size_range ();
1748       pref->base0 = false;
1749       return true;
1750     }
1751 
1752   /* Valid offsets into the object are nonnegative.  */
1753   pref->base0 = true;
1754 
1755   if (tree size = decl_init_size (decl, false))
1756     if (TREE_CODE (size) == INTEGER_CST)
1757       {
1758 	pref->sizrng[0] = wi::to_offset (size);
1759 	pref->sizrng[1] = pref->sizrng[0];
1760 	return true;
1761       }
1762 
1763   pref->set_max_size_range ();
1764   return true;
1765 }
1766 
1767 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1768    AREF.  ADDR is true if PTR is the operand of ADDR_EXPR.  Return true
1769    on success and false on failure.  */
1770 
1771 static bool
handle_array_ref(tree aref,gimple * stmt,bool addr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)1772 handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1773 		  access_ref *pref, ssa_name_limit_t &snlim,
1774 		  pointer_query *qry)
1775 {
1776   gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1777 
1778   tree arefop = TREE_OPERAND (aref, 0);
1779   tree reftype = TREE_TYPE (arefop);
1780   if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1781     /* Avoid arrays of pointers.  FIXME: Hande pointers to arrays
1782        of known bound.  */
1783     return false;
1784 
1785   if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry))
1786     return false;
1787 
1788   offset_int orng[2];
1789   tree off = pref->eval (TREE_OPERAND (aref, 1));
1790   range_query *const rvals = qry ? qry->rvals : NULL;
1791   if (!get_offset_range (off, stmt, orng, rvals))
1792     {
1793       /* Set ORNG to the maximum offset representable in ptrdiff_t.  */
1794       orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1795       orng[0] = -orng[1] - 1;
1796     }
1797 
1798   /* Convert the array index range determined above to a byte
1799      offset.  */
1800   tree lowbnd = array_ref_low_bound (aref);
1801   if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1802     {
1803       /* Adjust the index by the low bound of the array domain
1804 	 (normally zero but 1 in Fortran).  */
1805       unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1806       orng[0] -= lb;
1807       orng[1] -= lb;
1808     }
1809 
1810   tree eltype = TREE_TYPE (aref);
1811   tree tpsize = TYPE_SIZE_UNIT (eltype);
1812   if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1813     {
1814       pref->add_max_offset ();
1815       return true;
1816     }
1817 
1818   offset_int sz = wi::to_offset (tpsize);
1819   orng[0] *= sz;
1820   orng[1] *= sz;
1821 
1822   if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1823     {
1824       /* Except for the permissive raw memory functions which use
1825 	 the size of the whole object determined above, use the size
1826 	 of the referenced array.  Because the overall offset is from
1827 	 the beginning of the complete array object add this overall
1828 	 offset to the size of array.  */
1829       offset_int sizrng[2] =
1830 	{
1831 	 pref->offrng[0] + orng[0] + sz,
1832 	 pref->offrng[1] + orng[1] + sz
1833 	};
1834       if (sizrng[1] < sizrng[0])
1835 	std::swap (sizrng[0], sizrng[1]);
1836       if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1837 	pref->sizrng[0] = sizrng[0];
1838       if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1839 	pref->sizrng[1] = sizrng[1];
1840     }
1841 
1842   pref->add_offset (orng[0], orng[1]);
1843   return true;
1844 }
1845 
1846 /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1847    member.  */
1848 
1849 static void
set_component_ref_size(tree cref,access_ref * pref)1850 set_component_ref_size (tree cref, access_ref *pref)
1851 {
1852   const tree base = TREE_OPERAND (cref, 0);
1853   const tree base_type = TREE_TYPE (base);
1854 
1855   /* SAM is set for array members that might need special treatment.  */
1856   special_array_member sam;
1857   tree size = component_ref_size (cref, &sam);
1858   if (sam == special_array_member::int_0)
1859     pref->sizrng[0] = pref->sizrng[1] = 0;
1860   else if (!pref->trail1special && sam == special_array_member::trail_1)
1861     pref->sizrng[0] = pref->sizrng[1] = 1;
1862   else if (size && TREE_CODE (size) == INTEGER_CST)
1863     pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1864   else
1865     {
1866       /* When the size of the member is unknown it's either a flexible
1867 	 array member or a trailing special array member (either zero
1868 	 length or one-element).  Set the size to the maximum minus
1869 	 the constant size of the base object's type.  */
1870       pref->sizrng[0] = 0;
1871       pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1872       if (tree base_size = TYPE_SIZE_UNIT (base_type))
1873 	if (TREE_CODE (base_size) == INTEGER_CST)
1874 	  pref->sizrng[1] -= wi::to_offset (base_size);
1875     }
1876 }
1877 
1878 /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1879    CREF.  Return true on success and false on failure.  */
1880 
1881 static bool
handle_component_ref(tree cref,gimple * stmt,bool addr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)1882 handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype,
1883 		      access_ref *pref, ssa_name_limit_t &snlim,
1884 		      pointer_query *qry)
1885 {
1886   gcc_assert (TREE_CODE (cref) == COMPONENT_REF);
1887 
1888   const tree base = TREE_OPERAND (cref, 0);
1889   const tree field = TREE_OPERAND (cref, 1);
1890   access_ref base_ref = *pref;
1891 
1892   /* Unconditionally determine the size of the base object (it could
1893      be smaller than the referenced member when the object is stored
1894      in a buffer with an insufficient size).  */
1895   if (!compute_objsize_r (base, stmt, addr, 0, &base_ref, snlim, qry))
1896     return false;
1897 
1898   /* Add the offset of the member to the offset into the object computed
1899      so far.  */
1900   tree offset = byte_position (field);
1901   if (TREE_CODE (offset) == INTEGER_CST)
1902     base_ref.add_offset (wi::to_offset (offset));
1903   else
1904     base_ref.add_max_offset ();
1905 
1906   if (!base_ref.ref)
1907     /* PREF->REF may have been already set to an SSA_NAME earlier
1908        to provide better context for diagnostics.  In that case,
1909        leave it unchanged.  */
1910     base_ref.ref = base;
1911 
1912   const tree base_type = TREE_TYPE (base);
1913   if (TREE_CODE (base_type) == UNION_TYPE)
1914     /* In accesses through union types consider the entire unions
1915        rather than just their members.  */
1916     ostype = 0;
1917 
1918   if (ostype == 0)
1919     {
1920       /* In OSTYPE zero (for raw memory functions like memcpy), use
1921 	 the maximum size instead if the identity of the enclosing
1922 	 object cannot be determined.  */
1923       *pref = base_ref;
1924       return true;
1925     }
1926 
1927   pref->ref = field;
1928 
1929   if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1930     {
1931       /* Set maximum size if the reference is to the pointer member
1932 	 itself (as opposed to what it points to).  */
1933       pref->set_max_size_range ();
1934       return true;
1935     }
1936 
1937   set_component_ref_size (cref, pref);
1938 
1939   if (base_ref.size_remaining () < pref->size_remaining ())
1940     /* Use the base object if it's smaller than the member.  */
1941     *pref = base_ref;
1942 
1943   return true;
1944 }
1945 
1946 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1947    MREF.  Return true on success and false on failure.  */
1948 
1949 static bool
handle_mem_ref(tree mref,gimple * stmt,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)1950 handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
1951 		ssa_name_limit_t &snlim, pointer_query *qry)
1952 {
1953   gcc_assert (TREE_CODE (mref) == MEM_REF);
1954 
1955   tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref));
1956   if (VECTOR_TYPE_P (mreftype))
1957       {
1958       /* Hack: Handle MEM_REFs of vector types as those to complete
1959 	 objects; those may be synthesized from multiple assignments
1960 	 to consecutive data members (see PR 93200 and 96963).
1961 	 FIXME: Vectorized assignments should only be present after
1962 	 vectorization so this hack is only necessary after it has
1963 	 run and could be avoided in calls from prior passes (e.g.,
1964 	 tree-ssa-strlen.cc).
1965 	 FIXME: Deal with this more generally, e.g., by marking up
1966 	 such MEM_REFs at the time they're created.  */
1967       ostype = 0;
1968     }
1969 
1970   tree mrefop = TREE_OPERAND (mref, 0);
1971   if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry))
1972     return false;
1973 
1974   ++pref->deref;
1975 
1976   offset_int orng[2];
1977   tree off = pref->eval (TREE_OPERAND (mref, 1));
1978   range_query *const rvals = qry ? qry->rvals : NULL;
1979   if (!get_offset_range (off, stmt, orng, rvals))
1980     {
1981       /* Set ORNG to the maximum offset representable in ptrdiff_t.  */
1982       orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1983       orng[0] = -orng[1] - 1;
1984     }
1985 
1986   pref->add_offset (orng[0], orng[1]);
1987   return true;
1988 }
1989 
1990 /* A helper of compute_objsize_r() to determine the size from SSA_NAME
1991    PTR.  Return true on success and false on failure.  */
1992 
1993 static bool
handle_ssa_name(tree ptr,bool addr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)1994 handle_ssa_name (tree ptr, bool addr, int ostype,
1995 		 access_ref *pref, ssa_name_limit_t &snlim,
1996 		 pointer_query *qry)
1997 {
1998   if (!snlim.next ())
1999     return false;
2000 
2001   /* Only process an SSA_NAME if the recursion limit has not yet
2002      been reached.  */
2003   if (qry)
2004     {
2005       if (++qry->depth > qry->max_depth)
2006 	qry->max_depth = qry->depth;
2007       if (const access_ref *cache_ref = qry->get_ref (ptr, ostype))
2008 	{
2009 	  /* Add the number of DEREFerences accummulated so far.  */
2010 	  const int deref = pref->deref;
2011 	  *pref = *cache_ref;
2012 	  pref->deref += deref;
2013 	  return true;
2014 	}
2015     }
2016 
2017   gimple *stmt = SSA_NAME_DEF_STMT (ptr);
2018   if (is_gimple_call (stmt))
2019     {
2020       /* If STMT is a call to an allocation function get the size
2021 	 from its argument(s).  If successful, also set *PREF->REF
2022 	 to PTR for the caller to include in diagnostics.  */
2023       wide_int wr[2];
2024       range_query *const rvals = qry ? qry->rvals : NULL;
2025       if (gimple_call_alloc_size (stmt, wr, rvals))
2026 	{
2027 	  pref->ref = ptr;
2028 	  pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2029 	  pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2030 	  /* Constrain both bounds to a valid size.  */
2031 	  offset_int maxsize = wi::to_offset (max_object_size ());
2032 	  if (pref->sizrng[0] > maxsize)
2033 	    pref->sizrng[0] = maxsize;
2034 	  if (pref->sizrng[1] > maxsize)
2035 	    pref->sizrng[1] = maxsize;
2036 	}
2037       else
2038 	{
2039 	  /* For functions known to return one of their pointer arguments
2040 	     try to determine what the returned pointer points to, and on
2041 	     success add OFFRNG which was set to the offset added by
2042 	     the function (e.g., memchr) to the overall offset.  */
2043 	  bool past_end;
2044 	  offset_int offrng[2];
2045 	  if (tree ret = gimple_call_return_array (stmt, offrng, &past_end,
2046 						   snlim, qry))
2047 	    {
2048 	      if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry))
2049 		return false;
2050 
2051 	      /* Cap OFFRNG[1] to at most the remaining size of
2052 		 the object.  */
2053 	      offset_int remrng[2];
2054 	      remrng[1] = pref->size_remaining (remrng);
2055 	      if (remrng[1] != 0 && !past_end)
2056 		/* Decrement the size for functions that never return
2057 		   a past-the-end pointer.  */
2058 		remrng[1] -= 1;
2059 
2060 	      if (remrng[1] < offrng[1])
2061 		offrng[1] = remrng[1];
2062 	      pref->add_offset (offrng[0], offrng[1]);
2063 	    }
2064 	  else
2065 	    {
2066 	      /* For other calls that might return arbitrary pointers
2067 		 including into the middle of objects set the size
2068 		 range to maximum, clear PREF->BASE0, and also set
2069 		 PREF->REF to include in diagnostics.  */
2070 	      pref->set_max_size_range ();
2071 	      pref->base0 = false;
2072 	      pref->ref = ptr;
2073 	    }
2074 	}
2075       qry->put_ref (ptr, *pref, ostype);
2076       return true;
2077     }
2078 
2079   if (gimple_nop_p (stmt))
2080     {
2081       /* For a function argument try to determine the byte size
2082 	 of the array from the current function declaratation
2083 	 (e.g., attribute access or related).  */
2084       wide_int wr[2];
2085       bool static_array = false;
2086       if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2087 	{
2088 	  pref->parmarray = !static_array;
2089 	  pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2090 	  pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2091 	  pref->ref = ref;
2092 	  qry->put_ref (ptr, *pref, ostype);
2093 	  return true;
2094 	}
2095 
2096       pref->set_max_size_range ();
2097       pref->base0 = false;
2098       pref->ref = ptr;
2099       qry->put_ref (ptr, *pref, ostype);
2100       return true;
2101     }
2102 
2103   if (gimple_code (stmt) == GIMPLE_PHI)
2104     {
2105       /* Pass PTR to get_ref() via PREF.  If all PHI arguments refer
2106 	 to the same object the function will replace it with it.  */
2107       pref->ref = ptr;
2108       access_ref phi_ref = *pref;
2109       if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2110 	return false;
2111       *pref = phi_ref;
2112       qry->put_ref (ptr, *pref, ostype);
2113       return true;
2114     }
2115 
2116   if (!is_gimple_assign (stmt))
2117     {
2118       /* Clear BASE0 since the assigned pointer might point into
2119 	 the middle of the object, set the maximum size range and,
2120 	 if the SSA_NAME refers to a function argumnent, set
2121 	 PREF->REF to it.  */
2122       pref->base0 = false;
2123       pref->set_max_size_range ();
2124       pref->ref = ptr;
2125       return true;
2126     }
2127 
2128   tree_code code = gimple_assign_rhs_code (stmt);
2129 
2130   if (code == MAX_EXPR || code == MIN_EXPR)
2131     {
2132       if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2133 	return false;
2134 
2135       qry->put_ref (ptr, *pref, ostype);
2136       return true;
2137     }
2138 
2139   tree rhs = gimple_assign_rhs1 (stmt);
2140 
2141   if (code == ASSERT_EXPR)
2142     {
2143       rhs = TREE_OPERAND (rhs, 0);
2144       return compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry);
2145     }
2146 
2147   if (code == POINTER_PLUS_EXPR
2148       && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2149     {
2150       /* Compute the size of the object first. */
2151       if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2152 	return false;
2153 
2154       offset_int orng[2];
2155       tree off = gimple_assign_rhs2 (stmt);
2156       range_query *const rvals = qry ? qry->rvals : NULL;
2157       if (get_offset_range (off, stmt, orng, rvals))
2158 	pref->add_offset (orng[0], orng[1]);
2159       else
2160 	pref->add_max_offset ();
2161 
2162       qry->put_ref (ptr, *pref, ostype);
2163       return true;
2164     }
2165 
2166   if (code == ADDR_EXPR || code == SSA_NAME)
2167     {
2168       if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2169 	return false;
2170       qry->put_ref (ptr, *pref, ostype);
2171       return true;
2172     }
2173 
2174   if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs)))
2175     {
2176       /* When determining the qualifiers follow the pointer but
2177 	 avoid caching the result.  As the pointer is added to
2178 	 and/or dereferenced the computed size and offset need
2179 	 not be meaningful for other queries involving the same
2180 	 pointer.  */
2181       if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2182 	return false;
2183 
2184       rhs = pref->ref;
2185     }
2186 
2187   /* (This could also be an assignment from a nonlocal pointer.)  Save
2188      PTR to mention in diagnostics but otherwise treat it as a pointer
2189      to an unknown object.  */
2190   pref->ref = rhs;
2191   pref->base0 = false;
2192   pref->set_max_size_range ();
2193   return true;
2194 }
2195 
2196 /* Helper to compute the size of the object referenced by the PTR
2197    expression which must have pointer type, using Object Size type
2198    OSTYPE (only the least significant 2 bits are used).
2199    On success, sets PREF->REF to the DECL of the referenced object
2200    if it's unique, otherwise to null, PREF->OFFRNG to the range of
2201    offsets into it, and PREF->SIZRNG to the range of sizes of
2202    the object(s).
2203    ADDR is true for an enclosing ADDR_EXPR.
2204    SNLIM is used to avoid visiting the same PHI operand multiple
2205    times, and, when nonnull, RVALS to determine range information.
2206    Returns true on success, false when a meaningful size (or range)
2207    cannot be determined.
2208 
2209    The function is intended for diagnostics and should not be used
2210    to influence code generation or optimization.  */
2211 
2212 static bool
compute_objsize_r(tree ptr,gimple * stmt,bool addr,int ostype,access_ref * pref,ssa_name_limit_t & snlim,pointer_query * qry)2213 compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype,
2214 		   access_ref *pref, ssa_name_limit_t &snlim,
2215 		   pointer_query *qry)
2216 {
2217   STRIP_NOPS (ptr);
2218 
2219   if (DECL_P (ptr))
2220     return handle_decl (ptr, addr, pref);
2221 
2222   switch (TREE_CODE (ptr))
2223     {
2224     case ADDR_EXPR:
2225       {
2226 	tree ref = TREE_OPERAND (ptr, 0);
2227 	if (!compute_objsize_r (ref, stmt, true, ostype, pref, snlim, qry))
2228 	  return false;
2229 
2230 	--pref->deref;
2231 	return true;
2232       }
2233 
2234     case BIT_FIELD_REF:
2235       {
2236 	tree ref = TREE_OPERAND (ptr, 0);
2237 	if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2238 	  return false;
2239 
2240 	offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
2241 	pref->add_offset (off / BITS_PER_UNIT);
2242 	return true;
2243       }
2244 
2245     case ARRAY_REF:
2246       return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2247 
2248     case COMPONENT_REF:
2249       return handle_component_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2250 
2251     case MEM_REF:
2252       return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2253 
2254     case TARGET_MEM_REF:
2255       {
2256 	tree ref = TREE_OPERAND (ptr, 0);
2257 	if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2258 	  return false;
2259 
2260 	/* TODO: Handle remaining operands.  Until then, add maximum offset.  */
2261 	pref->ref = ptr;
2262 	pref->add_max_offset ();
2263 	return true;
2264       }
2265 
2266     case INTEGER_CST:
2267       /* Pointer constants other than null smaller than param_min_pagesize
2268 	 might be the result of erroneous null pointer addition/subtraction.
2269 	 Unless zero is a valid address set size to zero.  For null pointers,
2270 	 set size to the maximum for now since those may be the result of
2271 	 jump threading.  Similarly, for values >= param_min_pagesize in
2272 	 order to support (type *) 0x7cdeab00.  */
2273       if (integer_zerop (ptr)
2274 	  || wi::to_widest (ptr) >= param_min_pagesize)
2275 	pref->set_max_size_range ();
2276       else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2277 	{
2278 	  tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
2279 	  addr_space_t as = TYPE_ADDR_SPACE (deref_type);
2280 	  if (targetm.addr_space.zero_address_valid (as))
2281 	    pref->set_max_size_range ();
2282 	  else
2283 	    pref->sizrng[0] = pref->sizrng[1] = 0;
2284 	}
2285       else
2286 	pref->sizrng[0] = pref->sizrng[1] = 0;
2287 
2288       pref->ref = ptr;
2289       return true;
2290 
2291     case STRING_CST:
2292       pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
2293       pref->ref = ptr;
2294       return true;
2295 
2296     case POINTER_PLUS_EXPR:
2297     {
2298       tree ref = TREE_OPERAND (ptr, 0);
2299       if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2300 	return false;
2301 
2302       /* The below only makes sense if the offset is being applied to the
2303 	 address of the object.  */
2304       if (pref->deref != -1)
2305 	return false;
2306 
2307       offset_int orng[2];
2308       tree off = pref->eval (TREE_OPERAND (ptr, 1));
2309       if (get_offset_range (off, stmt, orng, qry->rvals))
2310 	pref->add_offset (orng[0], orng[1]);
2311       else
2312 	pref->add_max_offset ();
2313       return true;
2314     }
2315 
2316     case VIEW_CONVERT_EXPR:
2317       ptr = TREE_OPERAND (ptr, 0);
2318       return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry);
2319 
2320     case SSA_NAME:
2321       return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry);
2322 
2323     default:
2324       break;
2325     }
2326 
2327   /* Assume all other expressions point into an unknown object
2328      of the maximum valid size.  */
2329   pref->ref = ptr;
2330   pref->base0 = false;
2331   pref->set_max_size_range ();
2332   if (TREE_CODE (ptr) == SSA_NAME)
2333     qry->put_ref (ptr, *pref);
2334   return true;
2335 }
2336 
2337 /* A "public" wrapper around the above.  Clients should use this overload
2338    instead.  */
2339 
2340 tree
compute_objsize(tree ptr,gimple * stmt,int ostype,access_ref * pref,pointer_query * ptr_qry)2341 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2342 		 pointer_query *ptr_qry)
2343 {
2344   pointer_query qry;
2345   if (ptr_qry)
2346     ptr_qry->depth = 0;
2347   else
2348     ptr_qry = &qry;
2349 
2350   /* Clear and invalidate in case *PREF is being reused.  */
2351   pref->offrng[0] = pref->offrng[1] = 0;
2352   pref->sizrng[0] = pref->sizrng[1] = -1;
2353 
2354   ssa_name_limit_t snlim;
2355   if (!compute_objsize_r (ptr, stmt, false, ostype, pref, snlim, ptr_qry))
2356     return NULL_TREE;
2357 
2358   offset_int maxsize = pref->size_remaining ();
2359   if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2360     pref->offrng[0] = 0;
2361   return wide_int_to_tree (sizetype, maxsize);
2362 }
2363 
2364 /* Transitional wrapper.  The function should be removed once callers
2365    transition to the pointer_query API.  */
2366 
2367 tree
compute_objsize(tree ptr,gimple * stmt,int ostype,access_ref * pref,range_query * rvals)2368 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2369 		 range_query *rvals /* = NULL */)
2370 {
2371   pointer_query qry;
2372   qry.rvals = rvals;
2373   return compute_objsize (ptr, stmt, ostype, pref, &qry);
2374 }
2375 
2376 /* Legacy wrapper around the above.  The function should be removed
2377    once callers transition to one of the two above.  */
2378 
2379 tree
compute_objsize(tree ptr,gimple * stmt,int ostype,tree * pdecl,tree * poff,range_query * rvals)2380 compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */,
2381 		 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2382 {
2383   /* Set the initial offsets to zero and size to negative to indicate
2384      none has been computed yet.  */
2385   access_ref ref;
2386   tree size = compute_objsize (ptr, stmt, ostype, &ref, rvals);
2387   if (!size || !ref.base0)
2388     return NULL_TREE;
2389 
2390   if (pdecl)
2391     *pdecl = ref.ref;
2392 
2393   if (poff)
2394     *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2395 
2396   return size;
2397 }
2398 
2399 /* Determine the offset *FLDOFF of the first byte of a struct member
2400    of TYPE (possibly recursively) into which the byte offset OFF points,
2401    starting after the field START_AFTER if it's non-null.  On success,
2402    if nonnull, set *FLDOFF to the offset of the first byte, and return
2403    the field decl.  If nonnull, set *NEXTOFF to the offset of the next
2404    field (which reflects any padding between the returned field and
2405    the next).  Otherwise, if no such member can be found, return null.  */
2406 
2407 tree
field_at_offset(tree type,tree start_after,HOST_WIDE_INT off,HOST_WIDE_INT * fldoff,HOST_WIDE_INT * nextoff)2408 field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2409 		 HOST_WIDE_INT *fldoff /* = nullptr */,
2410 		 HOST_WIDE_INT *nextoff /* = nullptr */)
2411 {
2412   tree first_fld = TYPE_FIELDS (type);
2413 
2414   HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2415   if (!fldoff)
2416     fldoff = &offbuf;
2417   if (!nextoff)
2418     nextoff = &nextbuf;
2419 
2420   *nextoff = 0;
2421 
2422   /* The field to return.  */
2423   tree last_fld = NULL_TREE;
2424   /* The next field to advance to.  */
2425   tree next_fld = NULL_TREE;
2426 
2427   /* NEXT_FLD's cached offset.  */
2428   HOST_WIDE_INT next_pos = -1;
2429 
2430   for (tree fld = first_fld; fld; fld = next_fld)
2431     {
2432       next_fld = fld;
2433       do
2434 	/* Advance to the next relevant data member.  */
2435 	next_fld = TREE_CHAIN (next_fld);
2436       while (next_fld
2437 	     && (TREE_CODE (next_fld) != FIELD_DECL
2438 		 || DECL_ARTIFICIAL (next_fld)));
2439 
2440       if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2441 	continue;
2442 
2443       if (fld == start_after)
2444 	continue;
2445 
2446       tree fldtype = TREE_TYPE (fld);
2447       /* The offset of FLD within its immediately enclosing structure.  */
2448       HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2449 
2450       tree typesize = TYPE_SIZE_UNIT (fldtype);
2451       if (typesize && TREE_CODE (typesize) != INTEGER_CST)
2452 	/* Bail if FLD is a variable length member.  */
2453 	return NULL_TREE;
2454 
2455       /* If the size is not available the field is a flexible array
2456 	 member.  Treat this case as success.  */
2457       HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2458 			       ? tree_to_uhwi (typesize)
2459 			       : off);
2460 
2461       /* If OFF is beyond the end of the current field continue.  */
2462       HOST_WIDE_INT fldend = fldpos + fldsize;
2463       if (fldend < off)
2464 	continue;
2465 
2466       if (next_fld)
2467 	{
2468 	  /* If OFF is equal to the offset of the next field continue
2469 	     to it and skip the array/struct business below.  */
2470 	  tree pos = byte_position (next_fld);
2471 	  if (!tree_fits_shwi_p (pos))
2472 	    /* Bail if NEXT_FLD is a variable length member.  */
2473 	    return NULL_TREE;
2474 	  next_pos = tree_to_shwi (pos);
2475 	  *nextoff = *fldoff + next_pos;
2476 	  if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2477 	    continue;
2478 	}
2479       else
2480 	*nextoff = HOST_WIDE_INT_MAX;
2481 
2482       /* OFF refers somewhere into the current field or just past its end,
2483 	 which could mean it refers to the next field.  */
2484       if (TREE_CODE (fldtype) == ARRAY_TYPE)
2485 	{
2486 	  /* Will be set to the offset of the first byte of the array
2487 	     element (which may be an array) of FLDTYPE into which
2488 	     OFF - FLDPOS points (which may be past ELTOFF).  */
2489 	  HOST_WIDE_INT eltoff = 0;
2490 	  if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2491 	    fldtype = ft;
2492 	  else
2493 	    continue;
2494 
2495 	  /* Advance the position to include the array element above.
2496 	     If OFF - FLPOS refers to a member of FLDTYPE, the member
2497 	     will be determined below.  */
2498 	  fldpos += eltoff;
2499 	}
2500 
2501       *fldoff += fldpos;
2502 
2503       if (TREE_CODE (fldtype) == RECORD_TYPE)
2504 	/* Drill down into the current field if it's a struct.  */
2505 	fld = field_at_offset (fldtype, start_after, off - fldpos,
2506 			       fldoff, nextoff);
2507 
2508       last_fld = fld;
2509 
2510       /* Unless the offset is just past the end of the field return it.
2511 	 Otherwise save it and return it only if the offset of the next
2512 	 next field is greater (i.e., there is padding between the two)
2513 	 or if there is no next field.  */
2514       if (off < fldend)
2515 	break;
2516     }
2517 
2518   if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2519     *nextoff = next_pos;
2520 
2521   return last_fld;
2522 }
2523 
2524 /* Determine the offset *ELTOFF of the first byte of the array element
2525    of array ARTYPE into which the byte offset OFF points.  On success
2526    set *ELTOFF to the offset of the first byte and return type.
2527    Otherwise, if no such element can be found, return null.  */
2528 
2529 tree
array_elt_at_offset(tree artype,HOST_WIDE_INT off,HOST_WIDE_INT * eltoff,HOST_WIDE_INT * subar_size)2530 array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2531 		     HOST_WIDE_INT *eltoff /* = nullptr */,
2532 		     HOST_WIDE_INT *subar_size /* = nullptr */)
2533 {
2534   gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2535 
2536   HOST_WIDE_INT dummy;
2537   if (!eltoff)
2538     eltoff = &dummy;
2539   if (!subar_size)
2540     subar_size = &dummy;
2541 
2542   tree eltype = artype;
2543   while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2544     eltype = TREE_TYPE (eltype);
2545 
2546   tree subartype = eltype;
2547   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2548       || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2549     eltype = TREE_TYPE (eltype);
2550 
2551   *subar_size = int_size_in_bytes (subartype);
2552 
2553   if (eltype == artype)
2554     {
2555       *eltoff = 0;
2556       return artype;
2557     }
2558 
2559   HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2560   HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2561 
2562   if (off < artype_size)// * eltype_size)
2563     {
2564       *eltoff = (off / eltype_size) * eltype_size;
2565       return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2566     }
2567 
2568   return NULL_TREE;
2569 }
2570 
2571 /* Wrapper around build_array_type_nelts that makes sure the array
2572    can be created at all and handles zero sized arrays specially.  */
2573 
2574 tree
build_printable_array_type(tree eltype,unsigned HOST_WIDE_INT nelts)2575 build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2576 {
2577   if (TYPE_SIZE_UNIT (eltype)
2578       && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2579       && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2580       && TYPE_ALIGN_UNIT (eltype) > 1
2581       && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2582 		   ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2583     eltype = TYPE_MAIN_VARIANT (eltype);
2584 
2585   /* Consider excessive NELTS an array of unknown bound.  */
2586   tree idxtype = NULL_TREE;
2587   if (nelts < HOST_WIDE_INT_MAX)
2588     {
2589       if (nelts)
2590 	return build_array_type_nelts (eltype, nelts);
2591       idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2592     }
2593 
2594   tree arrtype = build_array_type (eltype, idxtype);
2595   arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2596   TYPE_SIZE (arrtype) = bitsize_zero_node;
2597   TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2598   return arrtype;
2599 }
2600