xref: /netbsd-src/external/gpl3/gcc/dist/gcc/gimple-fold.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2022 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.cc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71 
72 enum strlen_range_kind {
73   /* Compute the exact constant string length.  */
74   SRK_STRLEN,
75   /* Compute the maximum constant string length.  */
76   SRK_STRLENMAX,
77   /* Compute a range of string lengths bounded by object sizes.  When
78      the length of a string cannot be determined, consider as the upper
79      bound the size of the enclosing object the string may be a member
80      or element of.  Also determine the size of the largest character
81      array the string may refer to.  */
82   SRK_LENRANGE,
83   /* Determine the integer value of the argument (not string length).  */
84   SRK_INT_VALUE
85 };
86 
87 static bool
88 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
89 
90 /* Return true when DECL can be referenced from current unit.
91    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92    We can get declarations that are not possible to reference for various
93    reasons:
94 
95      1) When analyzing C++ virtual tables.
96 	C++ virtual tables do have known constructors even
97 	when they are keyed to other compilation unit.
98 	Those tables can contain pointers to methods and vars
99 	in other units.  Those methods have both STATIC and EXTERNAL
100 	set.
101      2) In WHOPR mode devirtualization might lead to reference
102 	to method that was partitioned elsehwere.
103 	In this case we have static VAR_DECL or FUNCTION_DECL
104 	that has no corresponding callgraph/varpool node
105 	declaring the body.
106      3) COMDAT functions referred by external vtables that
107         we devirtualize only during final compilation stage.
108         At this time we already decided that we will not output
109         the function body and thus we can't reference the symbol
110         directly.  */
111 
112 static bool
can_refer_decl_in_current_unit_p(tree decl,tree from_decl)113 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
114 {
115   varpool_node *vnode;
116   struct cgraph_node *node;
117   symtab_node *snode;
118 
119   if (DECL_ABSTRACT_P (decl))
120     return false;
121 
122   /* We are concerned only about static/external vars and functions.  */
123   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
124       || !VAR_OR_FUNCTION_DECL_P (decl))
125     return true;
126 
127   /* Static objects can be referred only if they are defined and not optimized
128      out yet.  */
129   if (!TREE_PUBLIC (decl))
130     {
131       if (DECL_EXTERNAL (decl))
132 	return false;
133       /* Before we start optimizing unreachable code we can be sure all
134 	 static objects are defined.  */
135       if (symtab->function_flags_ready)
136 	return true;
137       snode = symtab_node::get (decl);
138       if (!snode || !snode->definition)
139 	return false;
140       node = dyn_cast <cgraph_node *> (snode);
141       return !node || !node->inlined_to;
142     }
143 
144   /* We will later output the initializer, so we can refer to it.
145      So we are concerned only when DECL comes from initializer of
146      external var or var that has been optimized out.  */
147   if (!from_decl
148       || !VAR_P (from_decl)
149       || (!DECL_EXTERNAL (from_decl)
150 	  && (vnode = varpool_node::get (from_decl)) != NULL
151 	  && vnode->definition)
152       || (flag_ltrans
153 	  && (vnode = varpool_node::get (from_decl)) != NULL
154 	  && vnode->in_other_partition))
155     return true;
156   /* We are folding reference from external vtable.  The vtable may reffer
157      to a symbol keyed to other compilation unit.  The other compilation
158      unit may be in separate DSO and the symbol may be hidden.  */
159   if (DECL_VISIBILITY_SPECIFIED (decl)
160       && DECL_EXTERNAL (decl)
161       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
162       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
163     return false;
164   /* When function is public, we always can introduce new reference.
165      Exception are the COMDAT functions where introducing a direct
166      reference imply need to include function body in the curren tunit.  */
167   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
168     return true;
169   /* We have COMDAT.  We are going to check if we still have definition
170      or if the definition is going to be output in other partition.
171      Bypass this when gimplifying; all needed functions will be produced.
172 
173      As observed in PR20991 for already optimized out comdat virtual functions
174      it may be tempting to not necessarily give up because the copy will be
175      output elsewhere when corresponding vtable is output.
176      This is however not possible - ABI specify that COMDATs are output in
177      units where they are used and when the other unit was compiled with LTO
178      it is possible that vtable was kept public while the function itself
179      was privatized. */
180   if (!symtab->function_flags_ready)
181     return true;
182 
183   snode = symtab_node::get (decl);
184   if (!snode
185       || ((!snode->definition || DECL_EXTERNAL (decl))
186 	  && (!snode->in_other_partition
187 	      || (!snode->forced_by_abi && !snode->force_output))))
188     return false;
189   node = dyn_cast <cgraph_node *> (snode);
190   return !node || !node->inlined_to;
191 }
192 
193 /* Create a temporary for TYPE for a statement STMT.  If the current function
194    is in SSA form, a SSA name is created.  Otherwise a temporary register
195    is made.  */
196 
197 tree
create_tmp_reg_or_ssa_name(tree type,gimple * stmt)198 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
199 {
200   if (gimple_in_ssa_p (cfun))
201     return make_ssa_name (type, stmt);
202   else
203     return create_tmp_reg (type);
204 }
205 
206 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
207    acceptable form for is_gimple_min_invariant.
208    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
209 
210 tree
canonicalize_constructor_val(tree cval,tree from_decl)211 canonicalize_constructor_val (tree cval, tree from_decl)
212 {
213   if (CONSTANT_CLASS_P (cval))
214     return cval;
215 
216   tree orig_cval = cval;
217   STRIP_NOPS (cval);
218   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
219       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
220     {
221       tree ptr = TREE_OPERAND (cval, 0);
222       if (is_gimple_min_invariant (ptr))
223 	cval = build1_loc (EXPR_LOCATION (cval),
224 			   ADDR_EXPR, TREE_TYPE (ptr),
225 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
226 					ptr,
227 					fold_convert (ptr_type_node,
228 						      TREE_OPERAND (cval, 1))));
229     }
230   if (TREE_CODE (cval) == ADDR_EXPR)
231     {
232       tree base = NULL_TREE;
233       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
234 	{
235 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
236 	  if (base)
237 	    TREE_OPERAND (cval, 0) = base;
238 	}
239       else
240 	base = get_base_address (TREE_OPERAND (cval, 0));
241       if (!base)
242 	return NULL_TREE;
243 
244       if (VAR_OR_FUNCTION_DECL_P (base)
245 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
246 	return NULL_TREE;
247       if (TREE_TYPE (base) == error_mark_node)
248 	return NULL_TREE;
249       if (VAR_P (base))
250 	/* ???  We should be able to assert that TREE_ADDRESSABLE is set,
251 	   but since the use can be in a debug stmt we can't.  */
252 	;
253       else if (TREE_CODE (base) == FUNCTION_DECL)
254 	{
255 	  /* Make sure we create a cgraph node for functions we'll reference.
256 	     They can be non-existent if the reference comes from an entry
257 	     of an external vtable for example.  */
258 	  cgraph_node::get_create (base);
259 	}
260       /* Fixup types in global initializers.  */
261       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
262 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
263 
264       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
265 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
266       return cval;
267     }
268   /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0.  */
269   if (TREE_CODE (cval) == INTEGER_CST)
270     {
271       if (TREE_OVERFLOW_P (cval))
272 	cval = drop_tree_overflow (cval);
273       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
274 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
275       return cval;
276     }
277   return orig_cval;
278 }
279 
280 /* If SYM is a constant variable with known value, return the value.
281    NULL_TREE is returned otherwise.  */
282 
283 tree
get_symbol_constant_value(tree sym)284 get_symbol_constant_value (tree sym)
285 {
286   tree val = ctor_for_folding (sym);
287   if (val != error_mark_node)
288     {
289       if (val)
290 	{
291 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
292 	  if (val
293 	      && is_gimple_min_invariant (val)
294 	      && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
295 	    return val;
296 	  else
297 	    return NULL_TREE;
298 	}
299       /* Variables declared 'const' without an initializer
300 	 have zero as the initializer if they may not be
301 	 overridden at link or run time.  */
302       if (!val
303           && is_gimple_reg_type (TREE_TYPE (sym)))
304 	return build_zero_cst (TREE_TYPE (sym));
305     }
306 
307   return NULL_TREE;
308 }
309 
310 
311 
312 /* Subroutine of fold_stmt.  We perform constant folding of the
313    memory reference tree EXPR.  */
314 
315 static tree
maybe_fold_reference(tree expr)316 maybe_fold_reference (tree expr)
317 {
318   tree result = NULL_TREE;
319 
320   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
321        || TREE_CODE (expr) == REALPART_EXPR
322        || TREE_CODE (expr) == IMAGPART_EXPR)
323       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
324     result = fold_unary_loc (EXPR_LOCATION (expr),
325 			     TREE_CODE (expr),
326 			     TREE_TYPE (expr),
327 			     TREE_OPERAND (expr, 0));
328   else if (TREE_CODE (expr) == BIT_FIELD_REF
329 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
330     result = fold_ternary_loc (EXPR_LOCATION (expr),
331 			       TREE_CODE (expr),
332 			       TREE_TYPE (expr),
333 			       TREE_OPERAND (expr, 0),
334 			       TREE_OPERAND (expr, 1),
335 			       TREE_OPERAND (expr, 2));
336   else
337     result = fold_const_aggregate_ref (expr);
338 
339   if (result && is_gimple_min_invariant (result))
340     return result;
341 
342   return NULL_TREE;
343 }
344 
345 /* Return true if EXPR is an acceptable right-hand-side for a
346    GIMPLE assignment.  We validate the entire tree, not just
347    the root node, thus catching expressions that embed complex
348    operands that are not permitted in GIMPLE.  This function
349    is needed because the folding routines in fold-const.cc
350    may return such expressions in some cases, e.g., an array
351    access with an embedded index addition.  It may make more
352    sense to have folding routines that are sensitive to the
353    constraints on GIMPLE operands, rather than abandoning any
354    any attempt to fold if the usual folding turns out to be too
355    aggressive.  */
356 
357 bool
valid_gimple_rhs_p(tree expr)358 valid_gimple_rhs_p (tree expr)
359 {
360   enum tree_code code = TREE_CODE (expr);
361 
362   switch (TREE_CODE_CLASS (code))
363     {
364     case tcc_declaration:
365       if (!is_gimple_variable (expr))
366 	return false;
367       break;
368 
369     case tcc_constant:
370       /* All constants are ok.  */
371       break;
372 
373     case tcc_comparison:
374       /* GENERIC allows comparisons with non-boolean types, reject
375 	 those for GIMPLE.  Let vector-typed comparisons pass - rules
376 	 for GENERIC and GIMPLE are the same here.  */
377       if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
378 	    && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
379 		|| TYPE_PRECISION (TREE_TYPE (expr)) == 1))
380 	  && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
381 	return false;
382 
383       /* Fallthru.  */
384     case tcc_binary:
385       if (!is_gimple_val (TREE_OPERAND (expr, 0))
386 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
387 	return false;
388       break;
389 
390     case tcc_unary:
391       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
392 	return false;
393       break;
394 
395     case tcc_expression:
396       switch (code)
397 	{
398 	case ADDR_EXPR:
399 	  {
400 	    tree t;
401 	    if (is_gimple_min_invariant (expr))
402 	      return true;
403 	    t = TREE_OPERAND (expr, 0);
404 	    while (handled_component_p (t))
405 	      {
406 		/* ??? More checks needed, see the GIMPLE verifier.  */
407 		if ((TREE_CODE (t) == ARRAY_REF
408 		     || TREE_CODE (t) == ARRAY_RANGE_REF)
409 		    && !is_gimple_val (TREE_OPERAND (t, 1)))
410 		  return false;
411 		t = TREE_OPERAND (t, 0);
412 	      }
413 	    if (!is_gimple_id (t))
414 	      return false;
415 	  }
416 	  break;
417 
418 	default:
419 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
420 	    {
421 	      if ((code == COND_EXPR
422 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
423 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
424 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
425 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
426 		return false;
427 	      break;
428 	    }
429 	  return false;
430 	}
431       break;
432 
433     case tcc_vl_exp:
434       return false;
435 
436     case tcc_exceptional:
437       if (code == CONSTRUCTOR)
438 	{
439 	  unsigned i;
440 	  tree elt;
441 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
442 	    if (!is_gimple_val (elt))
443 	      return false;
444 	  return true;
445 	}
446       if (code != SSA_NAME)
447 	return false;
448       break;
449 
450     case tcc_reference:
451       if (code == BIT_FIELD_REF)
452 	return is_gimple_val (TREE_OPERAND (expr, 0));
453       return false;
454 
455     default:
456       return false;
457     }
458 
459   return true;
460 }
461 
462 
463 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
464    replacement rhs for the statement or NULL_TREE if no simplification
465    could be made.  It is assumed that the operands have been previously
466    folded.  */
467 
468 static tree
fold_gimple_assign(gimple_stmt_iterator * si)469 fold_gimple_assign (gimple_stmt_iterator *si)
470 {
471   gimple *stmt = gsi_stmt (*si);
472   enum tree_code subcode = gimple_assign_rhs_code (stmt);
473   location_t loc = gimple_location (stmt);
474 
475   tree result = NULL_TREE;
476 
477   switch (get_gimple_rhs_class (subcode))
478     {
479     case GIMPLE_SINGLE_RHS:
480       {
481         tree rhs = gimple_assign_rhs1 (stmt);
482 
483 	if (TREE_CLOBBER_P (rhs))
484 	  return NULL_TREE;
485 
486 	if (REFERENCE_CLASS_P (rhs))
487 	  return maybe_fold_reference (rhs);
488 
489 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
490 	  {
491 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
492 	    if (is_gimple_min_invariant (val))
493 	      return val;
494 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
495 	      {
496 		bool final;
497 		vec <cgraph_node *>targets
498 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
499 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
500 		  {
501 		    if (dump_enabled_p ())
502 		      {
503 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
504 					 "resolving virtual function address "
505 					 "reference to function %s\n",
506 					 targets.length () == 1
507 					 ? targets[0]->name ()
508 					 : "NULL");
509 		      }
510 		    if (targets.length () == 1)
511 		      {
512 			val = fold_convert (TREE_TYPE (val),
513 					    build_fold_addr_expr_loc
514 					      (loc, targets[0]->decl));
515 			STRIP_USELESS_TYPE_CONVERSION (val);
516 		      }
517 		    else
518 		      /* We cannot use __builtin_unreachable here because it
519 			 cannot have address taken.  */
520 		      val = build_int_cst (TREE_TYPE (val), 0);
521 		    return val;
522 		  }
523 	      }
524 	  }
525 
526 	else if (TREE_CODE (rhs) == ADDR_EXPR)
527 	  {
528 	    tree ref = TREE_OPERAND (rhs, 0);
529 	    if (TREE_CODE (ref) == MEM_REF
530 		&& integer_zerop (TREE_OPERAND (ref, 1)))
531 	      {
532 		result = TREE_OPERAND (ref, 0);
533 		if (!useless_type_conversion_p (TREE_TYPE (rhs),
534 						TREE_TYPE (result)))
535 		  result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
536 		return result;
537 	      }
538 	  }
539 
540 	else if (TREE_CODE (rhs) == CONSTRUCTOR
541 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
542 	  {
543 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
544 	    unsigned i;
545 	    tree val;
546 
547 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
548 	      if (! CONSTANT_CLASS_P (val))
549 		return NULL_TREE;
550 
551 	    return build_vector_from_ctor (TREE_TYPE (rhs),
552 					   CONSTRUCTOR_ELTS (rhs));
553 	  }
554 
555 	else if (DECL_P (rhs)
556 		 && is_gimple_reg_type (TREE_TYPE (rhs)))
557 	  return get_symbol_constant_value (rhs);
558       }
559       break;
560 
561     case GIMPLE_UNARY_RHS:
562       break;
563 
564     case GIMPLE_BINARY_RHS:
565       break;
566 
567     case GIMPLE_TERNARY_RHS:
568       result = fold_ternary_loc (loc, subcode,
569 				 TREE_TYPE (gimple_assign_lhs (stmt)),
570 				 gimple_assign_rhs1 (stmt),
571 				 gimple_assign_rhs2 (stmt),
572 				 gimple_assign_rhs3 (stmt));
573 
574       if (result)
575         {
576           STRIP_USELESS_TYPE_CONVERSION (result);
577           if (valid_gimple_rhs_p (result))
578 	    return result;
579         }
580       break;
581 
582     case GIMPLE_INVALID_RHS:
583       gcc_unreachable ();
584     }
585 
586   return NULL_TREE;
587 }
588 
589 
590 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
591    adjusting the replacement stmts location and virtual operands.
592    If the statement has a lhs the last stmt in the sequence is expected
593    to assign to that lhs.  */
594 
595 static void
gsi_replace_with_seq_vops(gimple_stmt_iterator * si_p,gimple_seq stmts)596 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
597 {
598   gimple *stmt = gsi_stmt (*si_p);
599 
600   if (gimple_has_location (stmt))
601     annotate_all_with_location (stmts, gimple_location (stmt));
602 
603   /* First iterate over the replacement statements backward, assigning
604      virtual operands to their defining statements.  */
605   gimple *laststore = NULL;
606   for (gimple_stmt_iterator i = gsi_last (stmts);
607        !gsi_end_p (i); gsi_prev (&i))
608     {
609       gimple *new_stmt = gsi_stmt (i);
610       if ((gimple_assign_single_p (new_stmt)
611 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
612 	  || (is_gimple_call (new_stmt)
613 	      && (gimple_call_flags (new_stmt)
614 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
615 	{
616 	  tree vdef;
617 	  if (!laststore)
618 	    vdef = gimple_vdef (stmt);
619 	  else
620 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
621 	  gimple_set_vdef (new_stmt, vdef);
622 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
623 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
624 	  laststore = new_stmt;
625 	}
626     }
627 
628   /* Second iterate over the statements forward, assigning virtual
629      operands to their uses.  */
630   tree reaching_vuse = gimple_vuse (stmt);
631   for (gimple_stmt_iterator i = gsi_start (stmts);
632        !gsi_end_p (i); gsi_next (&i))
633     {
634       gimple *new_stmt = gsi_stmt (i);
635       /* If the new statement possibly has a VUSE, update it with exact SSA
636 	 name we know will reach this one.  */
637       if (gimple_has_mem_ops (new_stmt))
638 	gimple_set_vuse (new_stmt, reaching_vuse);
639       gimple_set_modified (new_stmt, true);
640       if (gimple_vdef (new_stmt))
641 	reaching_vuse = gimple_vdef (new_stmt);
642     }
643 
644   /* If the new sequence does not do a store release the virtual
645      definition of the original statement.  */
646   if (reaching_vuse
647       && reaching_vuse == gimple_vuse (stmt))
648     {
649       tree vdef = gimple_vdef (stmt);
650       if (vdef
651 	  && TREE_CODE (vdef) == SSA_NAME)
652 	{
653 	  unlink_stmt_vdef (stmt);
654 	  release_ssa_name (vdef);
655 	}
656     }
657 
658   /* Finally replace the original statement with the sequence.  */
659   gsi_replace_with_seq (si_p, stmts, false);
660 }
661 
662 /* Helper function for update_gimple_call and
663    gimplify_and_update_call_from_tree.  A GIMPLE_CALL STMT is being replaced
664    with GIMPLE_CALL NEW_STMT.  */
665 
666 static void
finish_update_gimple_call(gimple_stmt_iterator * si_p,gimple * new_stmt,gimple * stmt)667 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
668 			   gimple *stmt)
669 {
670   tree lhs = gimple_call_lhs (stmt);
671   gimple_call_set_lhs (new_stmt, lhs);
672   if (lhs && TREE_CODE (lhs) == SSA_NAME)
673     SSA_NAME_DEF_STMT (lhs) = new_stmt;
674   gimple_move_vops (new_stmt, stmt);
675   gimple_set_location (new_stmt, gimple_location (stmt));
676   if (gimple_block (new_stmt) == NULL_TREE)
677     gimple_set_block (new_stmt, gimple_block (stmt));
678   gsi_replace (si_p, new_stmt, false);
679 }
680 
681 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
682    with number of arguments NARGS, where the arguments in GIMPLE form
683    follow NARGS argument.  */
684 
685 bool
update_gimple_call(gimple_stmt_iterator * si_p,tree fn,int nargs,...)686 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
687 {
688   va_list ap;
689   gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
690 
691   gcc_assert (is_gimple_call (stmt));
692   va_start (ap, nargs);
693   new_stmt = gimple_build_call_valist (fn, nargs, ap);
694   finish_update_gimple_call (si_p, new_stmt, stmt);
695   va_end (ap);
696   return true;
697 }
698 
699 /* Return true if EXPR is a CALL_EXPR suitable for representation
700    as a single GIMPLE_CALL statement.  If the arguments require
701    further gimplification, return false.  */
702 
703 static bool
valid_gimple_call_p(tree expr)704 valid_gimple_call_p (tree expr)
705 {
706   unsigned i, nargs;
707 
708   if (TREE_CODE (expr) != CALL_EXPR)
709     return false;
710 
711   nargs = call_expr_nargs (expr);
712   for (i = 0; i < nargs; i++)
713     {
714       tree arg = CALL_EXPR_ARG (expr, i);
715       if (is_gimple_reg_type (TREE_TYPE (arg)))
716 	{
717 	  if (!is_gimple_val (arg))
718 	    return false;
719 	}
720       else
721 	if (!is_gimple_lvalue (arg))
722 	  return false;
723     }
724 
725   return true;
726 }
727 
728 /* Convert EXPR into a GIMPLE value suitable for substitution on the
729    RHS of an assignment.  Insert the necessary statements before
730    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
731    is replaced.  If the call is expected to produces a result, then it
732    is replaced by an assignment of the new RHS to the result variable.
733    If the result is to be ignored, then the call is replaced by a
734    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
735    VUSE and the last VDEF of the whole sequence be the same as the replaced
736    statement and using new SSA names for stores in between.  */
737 
738 void
gimplify_and_update_call_from_tree(gimple_stmt_iterator * si_p,tree expr)739 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
740 {
741   tree lhs;
742   gimple *stmt, *new_stmt;
743   gimple_stmt_iterator i;
744   gimple_seq stmts = NULL;
745 
746   stmt = gsi_stmt (*si_p);
747 
748   gcc_assert (is_gimple_call (stmt));
749 
750   if (valid_gimple_call_p (expr))
751     {
752       /* The call has simplified to another call.  */
753       tree fn = CALL_EXPR_FN (expr);
754       unsigned i;
755       unsigned nargs = call_expr_nargs (expr);
756       vec<tree> args = vNULL;
757       gcall *new_stmt;
758 
759       if (nargs > 0)
760 	{
761 	  args.create (nargs);
762 	  args.safe_grow_cleared (nargs, true);
763 
764 	  for (i = 0; i < nargs; i++)
765 	    args[i] = CALL_EXPR_ARG (expr, i);
766 	}
767 
768       new_stmt = gimple_build_call_vec (fn, args);
769       finish_update_gimple_call (si_p, new_stmt, stmt);
770       args.release ();
771       return;
772     }
773 
774   lhs = gimple_call_lhs (stmt);
775   if (lhs == NULL_TREE)
776     {
777       push_gimplify_context (gimple_in_ssa_p (cfun));
778       gimplify_and_add (expr, &stmts);
779       pop_gimplify_context (NULL);
780 
781       /* We can end up with folding a memcpy of an empty class assignment
782 	 which gets optimized away by C++ gimplification.  */
783       if (gimple_seq_empty_p (stmts))
784 	{
785 	  if (gimple_in_ssa_p (cfun))
786 	    {
787 	      unlink_stmt_vdef (stmt);
788 	      release_defs (stmt);
789 	    }
790 	  gsi_replace (si_p, gimple_build_nop (), false);
791 	  return;
792 	}
793     }
794   else
795     {
796       tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
797       new_stmt = gimple_build_assign (lhs, tmp);
798       i = gsi_last (stmts);
799       gsi_insert_after_without_update (&i, new_stmt,
800 				       GSI_CONTINUE_LINKING);
801     }
802 
803   gsi_replace_with_seq_vops (si_p, stmts);
804 }
805 
806 
807 /* Replace the call at *GSI with the gimple value VAL.  */
808 
809 void
replace_call_with_value(gimple_stmt_iterator * gsi,tree val)810 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
811 {
812   gimple *stmt = gsi_stmt (*gsi);
813   tree lhs = gimple_call_lhs (stmt);
814   gimple *repl;
815   if (lhs)
816     {
817       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
818 	val = fold_convert (TREE_TYPE (lhs), val);
819       repl = gimple_build_assign (lhs, val);
820     }
821   else
822     repl = gimple_build_nop ();
823   tree vdef = gimple_vdef (stmt);
824   if (vdef && TREE_CODE (vdef) == SSA_NAME)
825     {
826       unlink_stmt_vdef (stmt);
827       release_ssa_name (vdef);
828     }
829   gsi_replace (gsi, repl, false);
830 }
831 
832 /* Replace the call at *GSI with the new call REPL and fold that
833    again.  */
834 
835 static void
replace_call_with_call_and_fold(gimple_stmt_iterator * gsi,gimple * repl)836 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
837 {
838   gimple *stmt = gsi_stmt (*gsi);
839   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
840   gimple_set_location (repl, gimple_location (stmt));
841   gimple_move_vops (repl, stmt);
842   gsi_replace (gsi, repl, false);
843   fold_stmt (gsi);
844 }
845 
846 /* Return true if VAR is a VAR_DECL or a component thereof.  */
847 
848 static bool
var_decl_component_p(tree var)849 var_decl_component_p (tree var)
850 {
851   tree inner = var;
852   while (handled_component_p (inner))
853     inner = TREE_OPERAND (inner, 0);
854   return (DECL_P (inner)
855 	  || (TREE_CODE (inner) == MEM_REF
856 	      && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
857 }
858 
859 /* Return TRUE if the SIZE argument, representing the size of an
860    object, is in a range of values of which exactly zero is valid.  */
861 
862 static bool
size_must_be_zero_p(tree size)863 size_must_be_zero_p (tree size)
864 {
865   if (integer_zerop (size))
866     return true;
867 
868   if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
869     return false;
870 
871   tree type = TREE_TYPE (size);
872   int prec = TYPE_PRECISION (type);
873 
874   /* Compute the value of SSIZE_MAX, the largest positive value that
875      can be stored in ssize_t, the signed counterpart of size_t.  */
876   wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
877   value_range valid_range (build_int_cst (type, 0),
878 			   wide_int_to_tree (type, ssize_max));
879   value_range vr;
880   if (cfun)
881     get_range_query (cfun)->range_of_expr (vr, size);
882   else
883     get_global_range_query ()->range_of_expr (vr, size);
884   if (vr.undefined_p ())
885     vr.set_varying (TREE_TYPE (size));
886   vr.intersect (&valid_range);
887   return vr.zero_p ();
888 }
889 
890 /* Fold function call to builtin mem{{,p}cpy,move}.  Try to detect and
891    diagnose (otherwise undefined) overlapping copies without preventing
892    folding.  When folded, GCC guarantees that overlapping memcpy has
893    the same semantics as memmove.  Call to the library memcpy need not
894    provide the same guarantee.  Return false if no simplification can
895    be made.  */
896 
897 static bool
gimple_fold_builtin_memory_op(gimple_stmt_iterator * gsi,tree dest,tree src,enum built_in_function code)898 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
899 			       tree dest, tree src, enum built_in_function code)
900 {
901   gimple *stmt = gsi_stmt (*gsi);
902   tree lhs = gimple_call_lhs (stmt);
903   tree len = gimple_call_arg (stmt, 2);
904   location_t loc = gimple_location (stmt);
905 
906   /* If the LEN parameter is a constant zero or in range where
907      the only valid value is zero, return DEST.  */
908   if (size_must_be_zero_p (len))
909     {
910       gimple *repl;
911       if (gimple_call_lhs (stmt))
912 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
913       else
914 	repl = gimple_build_nop ();
915       tree vdef = gimple_vdef (stmt);
916       if (vdef && TREE_CODE (vdef) == SSA_NAME)
917 	{
918 	  unlink_stmt_vdef (stmt);
919 	  release_ssa_name (vdef);
920 	}
921       gsi_replace (gsi, repl, false);
922       return true;
923     }
924 
925   /* If SRC and DEST are the same (and not volatile), return
926      DEST{,+LEN,+LEN-1}.  */
927   if (operand_equal_p (src, dest, 0))
928     {
929       /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
930 	 It's safe and may even be emitted by GCC itself (see bug
931 	 32667).  */
932       unlink_stmt_vdef (stmt);
933       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
934 	release_ssa_name (gimple_vdef (stmt));
935       if (!lhs)
936 	{
937 	  gsi_replace (gsi, gimple_build_nop (), false);
938 	  return true;
939 	}
940       goto done;
941     }
942   else
943     {
944       /* We cannot (easily) change the type of the copy if it is a storage
945 	 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
946 	 modify the storage order of objects (see storage_order_barrier_p).  */
947       tree srctype
948 	= POINTER_TYPE_P (TREE_TYPE (src))
949 	  ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
950       tree desttype
951 	= POINTER_TYPE_P (TREE_TYPE (dest))
952 	  ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
953       tree destvar, srcvar, srcoff;
954       unsigned int src_align, dest_align;
955       unsigned HOST_WIDE_INT tmp_len;
956       const char *tmp_str;
957 
958       /* Build accesses at offset zero with a ref-all character type.  */
959       tree off0
960 	= build_int_cst (build_pointer_type_for_mode (char_type_node,
961 						      ptr_mode, true), 0);
962 
963       /* If we can perform the copy efficiently with first doing all loads
964          and then all stores inline it that way.  Currently efficiently
965 	 means that we can load all the memory into a single integer
966 	 register which is what MOVE_MAX gives us.  */
967       src_align = get_pointer_alignment (src);
968       dest_align = get_pointer_alignment (dest);
969       if (tree_fits_uhwi_p (len)
970 	  && compare_tree_int (len, MOVE_MAX) <= 0
971 	  /* FIXME: Don't transform copies from strings with known length.
972 	     Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
973 	     from being handled, and the case was XFAILed for that reason.
974 	     Now that it is handled and the XFAIL removed, as soon as other
975 	     strlenopt tests that rely on it for passing are adjusted, this
976 	     hack can be removed.  */
977 	  && !c_strlen (src, 1)
978 	  && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
979 	       && memchr (tmp_str, 0, tmp_len) == NULL)
980 	  && !(srctype
981 	       && AGGREGATE_TYPE_P (srctype)
982 	       && TYPE_REVERSE_STORAGE_ORDER (srctype))
983 	  && !(desttype
984 	       && AGGREGATE_TYPE_P (desttype)
985 	       && TYPE_REVERSE_STORAGE_ORDER (desttype)))
986 	{
987 	  unsigned ilen = tree_to_uhwi (len);
988 	  if (pow2p_hwi (ilen))
989 	    {
990 	      /* Detect out-of-bounds accesses without issuing warnings.
991 		 Avoid folding out-of-bounds copies but to avoid false
992 		 positives for unreachable code defer warning until after
993 		 DCE has worked its magic.
994 		 -Wrestrict is still diagnosed.  */
995 	      if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
996 							 dest, src, len, len,
997 							 false, false))
998 		if (warning != OPT_Wrestrict)
999 		  return false;
1000 
1001 	      scalar_int_mode mode;
1002 	      if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1003 		  && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1004 		  /* If the destination pointer is not aligned we must be able
1005 		     to emit an unaligned store.  */
1006 		  && (dest_align >= GET_MODE_ALIGNMENT (mode)
1007 		      || !targetm.slow_unaligned_access (mode, dest_align)
1008 		      || (optab_handler (movmisalign_optab, mode)
1009 			  != CODE_FOR_nothing)))
1010 		{
1011 		  tree type = build_nonstandard_integer_type (ilen * 8, 1);
1012 		  tree srctype = type;
1013 		  tree desttype = type;
1014 		  if (src_align < GET_MODE_ALIGNMENT (mode))
1015 		    srctype = build_aligned_type (type, src_align);
1016 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1017 		  tree tem = fold_const_aggregate_ref (srcmem);
1018 		  if (tem)
1019 		    srcmem = tem;
1020 		  else if (src_align < GET_MODE_ALIGNMENT (mode)
1021 			   && targetm.slow_unaligned_access (mode, src_align)
1022 			   && (optab_handler (movmisalign_optab, mode)
1023 			       == CODE_FOR_nothing))
1024 		    srcmem = NULL_TREE;
1025 		  if (srcmem)
1026 		    {
1027 		      gimple *new_stmt;
1028 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 			{
1030 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1031 			  srcmem
1032 			    = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1033 							  new_stmt);
1034 			  gimple_assign_set_lhs (new_stmt, srcmem);
1035 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1036 			  gimple_set_location (new_stmt, loc);
1037 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 			}
1039 		      if (dest_align < GET_MODE_ALIGNMENT (mode))
1040 			desttype = build_aligned_type (type, dest_align);
1041 		      new_stmt
1042 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
1043 							    dest, off0),
1044 					       srcmem);
1045 		      gimple_move_vops (new_stmt, stmt);
1046 		      if (!lhs)
1047 			{
1048 			  gsi_replace (gsi, new_stmt, false);
1049 			  return true;
1050 			}
1051 		      gimple_set_location (new_stmt, loc);
1052 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 		      goto done;
1054 		    }
1055 		}
1056 	    }
1057 	}
1058 
1059       if (code == BUILT_IN_MEMMOVE)
1060 	{
1061 	  /* Both DEST and SRC must be pointer types.
1062 	     ??? This is what old code did.  Is the testing for pointer types
1063 	     really mandatory?
1064 
1065 	     If either SRC is readonly or length is 1, we can use memcpy.  */
1066 	  if (!dest_align || !src_align)
1067 	    return false;
1068 	  if (readonly_data_expr (src)
1069 	      || (tree_fits_uhwi_p (len)
1070 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
1071 		      >= tree_to_uhwi (len))))
1072 	    {
1073 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1074 	      if (!fn)
1075 		return false;
1076 	      gimple_call_set_fndecl (stmt, fn);
1077 	      gimple_call_set_arg (stmt, 0, dest);
1078 	      gimple_call_set_arg (stmt, 1, src);
1079 	      fold_stmt (gsi);
1080 	      return true;
1081 	    }
1082 
1083 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
1084 	  if (TREE_CODE (src) == ADDR_EXPR
1085 	      && TREE_CODE (dest) == ADDR_EXPR)
1086 	    {
1087 	      tree src_base, dest_base, fn;
1088 	      poly_int64 src_offset = 0, dest_offset = 0;
1089 	      poly_uint64 maxsize;
1090 
1091 	      srcvar = TREE_OPERAND (src, 0);
1092 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1093 	      if (src_base == NULL)
1094 		src_base = srcvar;
1095 	      destvar = TREE_OPERAND (dest, 0);
1096 	      dest_base = get_addr_base_and_unit_offset (destvar,
1097 							 &dest_offset);
1098 	      if (dest_base == NULL)
1099 		dest_base = destvar;
1100 	      if (!poly_int_tree_p (len, &maxsize))
1101 		maxsize = -1;
1102 	      if (SSA_VAR_P (src_base)
1103 		  && SSA_VAR_P (dest_base))
1104 		{
1105 		  if (operand_equal_p (src_base, dest_base, 0)
1106 		      && ranges_maybe_overlap_p (src_offset, maxsize,
1107 						 dest_offset, maxsize))
1108 		    return false;
1109 		}
1110 	      else if (TREE_CODE (src_base) == MEM_REF
1111 		       && TREE_CODE (dest_base) == MEM_REF)
1112 		{
1113 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1114 					 TREE_OPERAND (dest_base, 0), 0))
1115 		    return false;
1116 		  poly_offset_int full_src_offset
1117 		    = mem_ref_offset (src_base) + src_offset;
1118 		  poly_offset_int full_dest_offset
1119 		    = mem_ref_offset (dest_base) + dest_offset;
1120 		  if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1121 					      full_dest_offset, maxsize))
1122 		    return false;
1123 		}
1124 	      else
1125 		return false;
1126 
1127 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1128 	      if (!fn)
1129 		return false;
1130 	      gimple_call_set_fndecl (stmt, fn);
1131 	      gimple_call_set_arg (stmt, 0, dest);
1132 	      gimple_call_set_arg (stmt, 1, src);
1133 	      fold_stmt (gsi);
1134 	      return true;
1135 	    }
1136 
1137 	  /* If the destination and source do not alias optimize into
1138 	     memcpy as well.  */
1139 	  if ((is_gimple_min_invariant (dest)
1140 	       || TREE_CODE (dest) == SSA_NAME)
1141 	      && (is_gimple_min_invariant (src)
1142 		  || TREE_CODE (src) == SSA_NAME))
1143 	    {
1144 	      ao_ref destr, srcr;
1145 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
1146 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
1147 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
1148 		{
1149 		  tree fn;
1150 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1151 		  if (!fn)
1152 		    return false;
1153 		  gimple_call_set_fndecl (stmt, fn);
1154 		  gimple_call_set_arg (stmt, 0, dest);
1155 		  gimple_call_set_arg (stmt, 1, src);
1156 		  fold_stmt (gsi);
1157 		  return true;
1158 		}
1159 	    }
1160 
1161 	  return false;
1162 	}
1163 
1164       if (!tree_fits_shwi_p (len))
1165 	return false;
1166       if (!srctype
1167 	  || (AGGREGATE_TYPE_P (srctype)
1168 	      && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1169 	return false;
1170       if (!desttype
1171 	  || (AGGREGATE_TYPE_P (desttype)
1172 	      && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1173 	return false;
1174       /* In the following try to find a type that is most natural to be
1175 	 used for the memcpy source and destination and that allows
1176 	 the most optimization when memcpy is turned into a plain assignment
1177 	 using that type.  In theory we could always use a char[len] type
1178 	 but that only gains us that the destination and source possibly
1179 	 no longer will have their address taken.  */
1180       if (TREE_CODE (srctype) == ARRAY_TYPE
1181 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1182 	srctype = TREE_TYPE (srctype);
1183       if (TREE_CODE (desttype) == ARRAY_TYPE
1184 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1185 	desttype = TREE_TYPE (desttype);
1186       if (TREE_ADDRESSABLE (srctype)
1187 	  || TREE_ADDRESSABLE (desttype))
1188 	return false;
1189 
1190       /* Make sure we are not copying using a floating-point mode or
1191          a type whose size possibly does not match its precision.  */
1192       if (FLOAT_MODE_P (TYPE_MODE (desttype))
1193 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
1194 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
1195 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1196       if (FLOAT_MODE_P (TYPE_MODE (srctype))
1197 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
1198 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
1199 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1200       if (!srctype)
1201 	srctype = desttype;
1202       if (!desttype)
1203 	desttype = srctype;
1204       if (!srctype)
1205 	return false;
1206 
1207       src_align = get_pointer_alignment (src);
1208       dest_align = get_pointer_alignment (dest);
1209 
1210       /* Choose between src and destination type for the access based
1211          on alignment, whether the access constitutes a register access
1212 	 and whether it may actually expose a declaration for SSA rewrite
1213 	 or SRA decomposition.  Also try to expose a string constant, we
1214 	 might be able to concatenate several of them later into a single
1215 	 string store.  */
1216       destvar = NULL_TREE;
1217       srcvar = NULL_TREE;
1218       if (TREE_CODE (dest) == ADDR_EXPR
1219 	  && var_decl_component_p (TREE_OPERAND (dest, 0))
1220 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1221 	  && dest_align >= TYPE_ALIGN (desttype)
1222 	  && (is_gimple_reg_type (desttype)
1223 	      || src_align >= TYPE_ALIGN (desttype)))
1224 	destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1225       else if (TREE_CODE (src) == ADDR_EXPR
1226 	       && var_decl_component_p (TREE_OPERAND (src, 0))
1227 	       && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1228 	       && src_align >= TYPE_ALIGN (srctype)
1229 	       && (is_gimple_reg_type (srctype)
1230 		   || dest_align >= TYPE_ALIGN (srctype)))
1231 	srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1232       /* FIXME: Don't transform copies from strings with known original length.
1233 	 As soon as strlenopt tests that rely on it for passing are adjusted,
1234 	 this hack can be removed.  */
1235       else if (gimple_call_alloca_for_var_p (stmt)
1236 	       && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1237 	       && integer_zerop (srcoff)
1238 	       && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1239 	       && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1240 	srctype = TREE_TYPE (srcvar);
1241       else
1242 	return false;
1243 
1244       /* Now that we chose an access type express the other side in
1245          terms of it if the target allows that with respect to alignment
1246 	 constraints.  */
1247       if (srcvar == NULL_TREE)
1248 	{
1249 	  if (src_align >= TYPE_ALIGN (desttype))
1250 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1251 	  else
1252 	    {
1253 	      enum machine_mode mode = TYPE_MODE (desttype);
1254 	      if ((mode == BLKmode && STRICT_ALIGNMENT)
1255 		  || (targetm.slow_unaligned_access (mode, src_align)
1256 		      && (optab_handler (movmisalign_optab, mode)
1257 			  == CODE_FOR_nothing)))
1258 		return false;
1259 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1260 					    src_align);
1261 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1262 	    }
1263 	}
1264       else if (destvar == NULL_TREE)
1265 	{
1266 	  if (dest_align >= TYPE_ALIGN (srctype))
1267 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1268 	  else
1269 	    {
1270 	      enum machine_mode mode = TYPE_MODE (srctype);
1271 	      if ((mode == BLKmode && STRICT_ALIGNMENT)
1272 		  || (targetm.slow_unaligned_access (mode, dest_align)
1273 		      && (optab_handler (movmisalign_optab, mode)
1274 			  == CODE_FOR_nothing)))
1275 		return false;
1276 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1277 					     dest_align);
1278 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1279 	    }
1280 	}
1281 
1282       /* Same as above, detect out-of-bounds accesses without issuing
1283 	 warnings.  Avoid folding out-of-bounds copies but to avoid
1284 	 false positives for unreachable code defer warning until
1285 	 after DCE has worked its magic.
1286 	 -Wrestrict is still diagnosed.  */
1287       if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1288 						 dest, src, len, len,
1289 						 false, false))
1290 	if (warning != OPT_Wrestrict)
1291 	  return false;
1292 
1293       gimple *new_stmt;
1294       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1295 	{
1296 	  tree tem = fold_const_aggregate_ref (srcvar);
1297 	  if (tem)
1298 	    srcvar = tem;
1299 	  if (! is_gimple_min_invariant (srcvar))
1300 	    {
1301 	      new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1302 	      srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1303 						   new_stmt);
1304 	      gimple_assign_set_lhs (new_stmt, srcvar);
1305 	      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1306 	      gimple_set_location (new_stmt, loc);
1307 	      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1308 	    }
1309 	  new_stmt = gimple_build_assign (destvar, srcvar);
1310 	  goto set_vop_and_replace;
1311 	}
1312 
1313       /* We get an aggregate copy.  If the source is a STRING_CST, then
1314 	 directly use its type to perform the copy.  */
1315       if (TREE_CODE (srcvar) == STRING_CST)
1316 	  desttype = srctype;
1317 
1318       /* Or else, use an unsigned char[] type to perform the copy in order
1319 	 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1320 	 types or float modes behavior on copying.  */
1321       else
1322 	{
1323 	  desttype = build_array_type_nelts (unsigned_char_type_node,
1324 					     tree_to_uhwi (len));
1325 	  srctype = desttype;
1326 	  if (src_align > TYPE_ALIGN (srctype))
1327 	    srctype = build_aligned_type (srctype, src_align);
1328 	  srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1329 	}
1330 
1331       if (dest_align > TYPE_ALIGN (desttype))
1332 	desttype = build_aligned_type (desttype, dest_align);
1333       destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1334       new_stmt = gimple_build_assign (destvar, srcvar);
1335 
1336 set_vop_and_replace:
1337       gimple_move_vops (new_stmt, stmt);
1338       if (!lhs)
1339 	{
1340 	  gsi_replace (gsi, new_stmt, false);
1341 	  return true;
1342 	}
1343       gimple_set_location (new_stmt, loc);
1344       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1345     }
1346 
1347 done:
1348   gimple_seq stmts = NULL;
1349   if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1350     len = NULL_TREE;
1351   else if (code == BUILT_IN_MEMPCPY)
1352     {
1353       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1354       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1355 			   TREE_TYPE (dest), dest, len);
1356     }
1357   else
1358     gcc_unreachable ();
1359 
1360   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1361   gimple *repl = gimple_build_assign (lhs, dest);
1362   gsi_replace (gsi, repl, false);
1363   return true;
1364 }
1365 
1366 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1367    to built-in memcmp (a, b, len).  */
1368 
1369 static bool
gimple_fold_builtin_bcmp(gimple_stmt_iterator * gsi)1370 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1371 {
1372   tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1373 
1374   if (!fn)
1375     return false;
1376 
1377   /* Transform bcmp (a, b, len) into memcmp (a, b, len).  */
1378 
1379   gimple *stmt = gsi_stmt (*gsi);
1380   tree a = gimple_call_arg (stmt, 0);
1381   tree b = gimple_call_arg (stmt, 1);
1382   tree len = gimple_call_arg (stmt, 2);
1383 
1384   gimple *repl = gimple_build_call (fn, 3, a, b, len);
1385   replace_call_with_call_and_fold (gsi, repl);
1386 
1387   return true;
1388 }
1389 
1390 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391    to built-in memmove (dest, src, len).  */
1392 
1393 static bool
gimple_fold_builtin_bcopy(gimple_stmt_iterator * gsi)1394 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395 {
1396   tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397 
1398   if (!fn)
1399     return false;
1400 
1401   /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402      it's quivalent to memmove (not memcpy).  Transform bcopy (src, dest,
1403      len) into memmove (dest, src, len).  */
1404 
1405   gimple *stmt = gsi_stmt (*gsi);
1406   tree src = gimple_call_arg (stmt, 0);
1407   tree dest = gimple_call_arg (stmt, 1);
1408   tree len = gimple_call_arg (stmt, 2);
1409 
1410   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1411   gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1412   replace_call_with_call_and_fold (gsi, repl);
1413 
1414   return true;
1415 }
1416 
1417 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1418    to built-in memset (dest, 0, len).  */
1419 
1420 static bool
gimple_fold_builtin_bzero(gimple_stmt_iterator * gsi)1421 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1422 {
1423   tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1424 
1425   if (!fn)
1426     return false;
1427 
1428   /* Transform bzero (dest, len) into memset (dest, 0, len).  */
1429 
1430   gimple *stmt = gsi_stmt (*gsi);
1431   tree dest = gimple_call_arg (stmt, 0);
1432   tree len = gimple_call_arg (stmt, 1);
1433 
1434   gimple_seq seq = NULL;
1435   gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1436   gimple_seq_add_stmt_without_update (&seq, repl);
1437   gsi_replace_with_seq_vops (gsi, seq);
1438   fold_stmt (gsi);
1439 
1440   return true;
1441 }
1442 
1443 /* Fold function call to builtin memset or bzero at *GSI setting the
1444    memory of size LEN to VAL.  Return whether a simplification was made.  */
1445 
1446 static bool
gimple_fold_builtin_memset(gimple_stmt_iterator * gsi,tree c,tree len)1447 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1448 {
1449   gimple *stmt = gsi_stmt (*gsi);
1450   tree etype;
1451   unsigned HOST_WIDE_INT length, cval;
1452 
1453   /* If the LEN parameter is zero, return DEST.  */
1454   if (integer_zerop (len))
1455     {
1456       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1457       return true;
1458     }
1459 
1460   if (! tree_fits_uhwi_p (len))
1461     return false;
1462 
1463   if (TREE_CODE (c) != INTEGER_CST)
1464     return false;
1465 
1466   tree dest = gimple_call_arg (stmt, 0);
1467   tree var = dest;
1468   if (TREE_CODE (var) != ADDR_EXPR)
1469     return false;
1470 
1471   var = TREE_OPERAND (var, 0);
1472   if (TREE_THIS_VOLATILE (var))
1473     return false;
1474 
1475   etype = TREE_TYPE (var);
1476   if (TREE_CODE (etype) == ARRAY_TYPE)
1477     etype = TREE_TYPE (etype);
1478 
1479   if (!INTEGRAL_TYPE_P (etype)
1480       && !POINTER_TYPE_P (etype))
1481     return NULL_TREE;
1482 
1483   if (! var_decl_component_p (var))
1484     return NULL_TREE;
1485 
1486   length = tree_to_uhwi (len);
1487   if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1488       || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1489 	  != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1490       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1491     return NULL_TREE;
1492 
1493   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1494     return NULL_TREE;
1495 
1496   if (!type_has_mode_precision_p (etype))
1497     etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1498 					    TYPE_UNSIGNED (etype));
1499 
1500   if (integer_zerop (c))
1501     cval = 0;
1502   else
1503     {
1504       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1505 	return NULL_TREE;
1506 
1507       cval = TREE_INT_CST_LOW (c);
1508       cval &= 0xff;
1509       cval |= cval << 8;
1510       cval |= cval << 16;
1511       cval |= (cval << 31) << 1;
1512     }
1513 
1514   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1515   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1516   gimple_move_vops (store, stmt);
1517   gimple_set_location (store, gimple_location (stmt));
1518   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1519   if (gimple_call_lhs (stmt))
1520     {
1521       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1522       gsi_replace (gsi, asgn, false);
1523     }
1524   else
1525     {
1526       gimple_stmt_iterator gsi2 = *gsi;
1527       gsi_prev (gsi);
1528       gsi_remove (&gsi2, true);
1529     }
1530 
1531   return true;
1532 }
1533 
1534 /* Helper of get_range_strlen for ARG that is not an SSA_NAME.  */
1535 
1536 static bool
get_range_strlen_tree(tree arg,bitmap visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1537 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1538 		       c_strlen_data *pdata, unsigned eltsize)
1539 {
1540   gcc_assert (TREE_CODE (arg) != SSA_NAME);
1541 
1542   /* The length computed by this invocation of the function.  */
1543   tree val = NULL_TREE;
1544 
1545   /* True if VAL is an optimistic (tight) bound determined from
1546      the size of the character array in which the string may be
1547      stored.  In that case, the computed VAL is used to set
1548      PDATA->MAXBOUND.  */
1549   bool tight_bound = false;
1550 
1551   /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1552   if (TREE_CODE (arg) == ADDR_EXPR
1553       && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1554     {
1555       tree op = TREE_OPERAND (arg, 0);
1556       if (integer_zerop (TREE_OPERAND (op, 1)))
1557 	{
1558 	  tree aop0 = TREE_OPERAND (op, 0);
1559 	  if (TREE_CODE (aop0) == INDIRECT_REF
1560 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1561 	    return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1562 				     pdata, eltsize);
1563 	}
1564       else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1565 	       && rkind == SRK_LENRANGE)
1566 	{
1567 	  /* Fail if an array is the last member of a struct object
1568 	     since it could be treated as a (fake) flexible array
1569 	     member.  */
1570 	  tree idx = TREE_OPERAND (op, 1);
1571 
1572 	  arg = TREE_OPERAND (op, 0);
1573 	  tree optype = TREE_TYPE (arg);
1574 	  if (tree dom = TYPE_DOMAIN (optype))
1575 	    if (tree bound = TYPE_MAX_VALUE (dom))
1576 	      if (TREE_CODE (bound) == INTEGER_CST
1577 		  && TREE_CODE (idx) == INTEGER_CST
1578 		  && tree_int_cst_lt (bound, idx))
1579 		return false;
1580 	}
1581     }
1582 
1583   if (rkind == SRK_INT_VALUE)
1584     {
1585       /* We are computing the maximum value (not string length).  */
1586       val = arg;
1587       if (TREE_CODE (val) != INTEGER_CST
1588 	  || tree_int_cst_sgn (val) < 0)
1589 	return false;
1590     }
1591   else
1592     {
1593       c_strlen_data lendata = { };
1594       val = c_strlen (arg, 1, &lendata, eltsize);
1595 
1596       if (!val && lendata.decl)
1597 	{
1598 	  /* ARG refers to an unterminated const character array.
1599 	     DATA.DECL with size DATA.LEN.  */
1600 	  val = lendata.minlen;
1601 	  pdata->decl = lendata.decl;
1602 	}
1603     }
1604 
1605   /* Set if VAL represents the maximum length based on array size (set
1606      when exact length cannot be determined).  */
1607   bool maxbound = false;
1608 
1609   if (!val && rkind == SRK_LENRANGE)
1610     {
1611       if (TREE_CODE (arg) == ADDR_EXPR)
1612 	return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1613 				 pdata, eltsize);
1614 
1615       if (TREE_CODE (arg) == ARRAY_REF)
1616 	{
1617 	  tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1618 
1619 	  /* Determine the "innermost" array type.  */
1620 	  while (TREE_CODE (optype) == ARRAY_TYPE
1621 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1622 	    optype = TREE_TYPE (optype);
1623 
1624 	  /* Avoid arrays of pointers.  */
1625 	  tree eltype = TREE_TYPE (optype);
1626 	  if (TREE_CODE (optype) != ARRAY_TYPE
1627 	      || !INTEGRAL_TYPE_P (eltype))
1628 	    return false;
1629 
1630 	  /* Fail when the array bound is unknown or zero.  */
1631 	  val = TYPE_SIZE_UNIT (optype);
1632 	  if (!val
1633 	      || TREE_CODE (val) != INTEGER_CST
1634 	      || integer_zerop (val))
1635 	    return false;
1636 
1637 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1638 			      integer_one_node);
1639 
1640 	  /* Set the minimum size to zero since the string in
1641 	     the array could have zero length.  */
1642 	  pdata->minlen = ssize_int (0);
1643 
1644 	  tight_bound = true;
1645 	}
1646       else if (TREE_CODE (arg) == COMPONENT_REF
1647 	       && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1648 		   == ARRAY_TYPE))
1649 	{
1650 	  /* Use the type of the member array to determine the upper
1651 	     bound on the length of the array.  This may be overly
1652 	     optimistic if the array itself isn't NUL-terminated and
1653 	     the caller relies on the subsequent member to contain
1654 	     the NUL but that would only be considered valid if
1655 	     the array were the last member of a struct.  */
1656 
1657 	  tree fld = TREE_OPERAND (arg, 1);
1658 
1659 	  tree optype = TREE_TYPE (fld);
1660 
1661 	  /* Determine the "innermost" array type.  */
1662 	  while (TREE_CODE (optype) == ARRAY_TYPE
1663 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1664 	    optype = TREE_TYPE (optype);
1665 
1666 	  /* Fail when the array bound is unknown or zero.  */
1667 	  val = TYPE_SIZE_UNIT (optype);
1668 	  if (!val
1669 	      || TREE_CODE (val) != INTEGER_CST
1670 	      || integer_zerop (val))
1671 	    return false;
1672 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1673 			     integer_one_node);
1674 
1675 	  /* Set the minimum size to zero since the string in
1676 	     the array could have zero length.  */
1677 	  pdata->minlen = ssize_int (0);
1678 
1679 	  /* The array size determined above is an optimistic bound
1680 	     on the length.  If the array isn't nul-terminated the
1681 	     length computed by the library function would be greater.
1682 	     Even though using strlen to cross the subobject boundary
1683 	     is undefined, avoid drawing conclusions from the member
1684 	     type about the length here.  */
1685 	  tight_bound = true;
1686 	}
1687       else if (TREE_CODE (arg) == MEM_REF
1688 	       && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1689 	       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1690 	       && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1691 	{
1692 	  /* Handle a MEM_REF into a DECL accessing an array of integers,
1693 	     being conservative about references to extern structures with
1694 	     flexible array members that can be initialized to arbitrary
1695 	     numbers of elements as an extension (static structs are okay).
1696 	     FIXME: Make this less conservative -- see
1697 	     component_ref_size in tree.cc.  */
1698 	  tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1699 	  if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1700 	      && (decl_binds_to_current_def_p (ref)
1701 		  || !array_at_struct_end_p (arg)))
1702 	    {
1703 	      /* Fail if the offset is out of bounds.  Such accesses
1704 		 should be diagnosed at some point.  */
1705 	      val = DECL_SIZE_UNIT (ref);
1706 	      if (!val
1707 		  || TREE_CODE (val) != INTEGER_CST
1708 		  || integer_zerop (val))
1709 		return false;
1710 
1711 	      poly_offset_int psiz = wi::to_offset (val);
1712 	      poly_offset_int poff = mem_ref_offset (arg);
1713 	      if (known_le (psiz, poff))
1714 		return false;
1715 
1716 	      pdata->minlen = ssize_int (0);
1717 
1718 	      /* Subtract the offset and one for the terminating nul.  */
1719 	      psiz -= poff;
1720 	      psiz -= 1;
1721 	      val = wide_int_to_tree (TREE_TYPE (val), psiz);
1722 	      /* Since VAL reflects the size of a declared object
1723 		 rather the type of the access it is not a tight bound.  */
1724 	    }
1725 	}
1726       else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1727 	{
1728 	  /* Avoid handling pointers to arrays.  GCC might misuse
1729 	     a pointer to an array of one bound to point to an array
1730 	     object of a greater bound.  */
1731 	  tree argtype = TREE_TYPE (arg);
1732 	  if (TREE_CODE (argtype) == ARRAY_TYPE)
1733 	    {
1734 	      val = TYPE_SIZE_UNIT (argtype);
1735 	      if (!val
1736 		  || TREE_CODE (val) != INTEGER_CST
1737 		  || integer_zerop (val))
1738 		return false;
1739 	      val = wide_int_to_tree (TREE_TYPE (val),
1740 				      wi::sub (wi::to_wide (val), 1));
1741 
1742 	      /* Set the minimum size to zero since the string in
1743 		 the array could have zero length.  */
1744 	      pdata->minlen = ssize_int (0);
1745 	    }
1746 	}
1747       maxbound = true;
1748     }
1749 
1750   if (!val)
1751     return false;
1752 
1753   /* Adjust the lower bound on the string length as necessary.  */
1754   if (!pdata->minlen
1755       || (rkind != SRK_STRLEN
1756 	  && TREE_CODE (pdata->minlen) == INTEGER_CST
1757 	  && TREE_CODE (val) == INTEGER_CST
1758 	  && tree_int_cst_lt (val, pdata->minlen)))
1759     pdata->minlen = val;
1760 
1761   if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1762     {
1763       /* Adjust the tighter (more optimistic) string length bound
1764 	 if necessary and proceed to adjust the more conservative
1765 	 bound.  */
1766       if (TREE_CODE (val) == INTEGER_CST)
1767 	{
1768 	  if (tree_int_cst_lt (pdata->maxbound, val))
1769 	    pdata->maxbound = val;
1770 	}
1771       else
1772 	pdata->maxbound = val;
1773     }
1774   else if (pdata->maxbound || maxbound)
1775     /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1776        if VAL corresponds to the maximum length determined based
1777        on the type of the object.  */
1778     pdata->maxbound = val;
1779 
1780   if (tight_bound)
1781     {
1782       /* VAL computed above represents an optimistically tight bound
1783 	 on the length of the string based on the referenced object's
1784 	 or subobject's type.  Determine the conservative upper bound
1785 	 based on the enclosing object's size if possible.  */
1786       if (rkind == SRK_LENRANGE)
1787 	{
1788 	  poly_int64 offset;
1789 	  tree base = get_addr_base_and_unit_offset (arg, &offset);
1790 	  if (!base)
1791 	    {
1792 	      /* When the call above fails due to a non-constant offset
1793 		 assume the offset is zero and use the size of the whole
1794 		 enclosing object instead.  */
1795 	      base = get_base_address (arg);
1796 	      offset = 0;
1797 	    }
1798 	  /* If the base object is a pointer no upper bound on the length
1799 	     can be determined.  Otherwise the maximum length is equal to
1800 	     the size of the enclosing object minus the offset of
1801 	     the referenced subobject minus 1 (for the terminating nul).  */
1802 	  tree type = TREE_TYPE (base);
1803 	  if (TREE_CODE (type) == POINTER_TYPE
1804 	      || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1805 	      || !(val = DECL_SIZE_UNIT (base)))
1806 	    val = build_all_ones_cst (size_type_node);
1807 	  else
1808 	    {
1809 	      val = DECL_SIZE_UNIT (base);
1810 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1811 				 size_int (offset + 1));
1812 	    }
1813 	}
1814       else
1815 	return false;
1816     }
1817 
1818   if (pdata->maxlen)
1819     {
1820       /* Adjust the more conservative bound if possible/necessary
1821 	 and fail otherwise.  */
1822       if (rkind != SRK_STRLEN)
1823 	{
1824 	  if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1825 	      || TREE_CODE (val) != INTEGER_CST)
1826 	    return false;
1827 
1828 	  if (tree_int_cst_lt (pdata->maxlen, val))
1829 	    pdata->maxlen = val;
1830 	  return true;
1831 	}
1832       else if (simple_cst_equal (val, pdata->maxlen) != 1)
1833 	{
1834 	  /* Fail if the length of this ARG is different from that
1835 	     previously determined from another ARG.  */
1836 	  return false;
1837 	}
1838     }
1839 
1840   pdata->maxlen = val;
1841   return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1842 }
1843 
1844 /* For an ARG referencing one or more strings, try to obtain the range
1845    of their lengths, or the size of the largest array ARG referes to if
1846    the range of lengths cannot be determined, and store all in *PDATA.
1847    For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1848    the maximum constant value.
1849    If ARG is an SSA_NAME, follow its use-def chains.  When RKIND ==
1850    SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1851    length or if we are unable to determine the length, return false.
1852    VISITED is a bitmap of visited variables.
1853    RKIND determines the kind of value or range to obtain (see
1854    strlen_range_kind).
1855    Set PDATA->DECL if ARG refers to an unterminated constant array.
1856    On input, set ELTSIZE to 1 for normal single byte character strings,
1857    and either 2 or 4 for wide characer strings (the size of wchar_t).
1858    Return true if *PDATA was successfully populated and false otherwise.  */
1859 
1860 static bool
get_range_strlen(tree arg,bitmap visited,strlen_range_kind rkind,c_strlen_data * pdata,unsigned eltsize)1861 get_range_strlen (tree arg, bitmap visited,
1862 		  strlen_range_kind rkind,
1863 		  c_strlen_data *pdata, unsigned eltsize)
1864 {
1865 
1866   if (TREE_CODE (arg) != SSA_NAME)
1867     return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1868 
1869   /* If ARG is registered for SSA update we cannot look at its defining
1870      statement.  */
1871   if (name_registered_for_update_p (arg))
1872     return false;
1873 
1874   /* If we were already here, break the infinite cycle.  */
1875   if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1876     return true;
1877 
1878   tree var = arg;
1879   gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1880 
1881   switch (gimple_code (def_stmt))
1882     {
1883       case GIMPLE_ASSIGN:
1884 	/* The RHS of the statement defining VAR must either have a
1885 	   constant length or come from another SSA_NAME with a constant
1886 	   length.  */
1887         if (gimple_assign_single_p (def_stmt)
1888             || gimple_assign_unary_nop_p (def_stmt))
1889           {
1890 	    tree rhs = gimple_assign_rhs1 (def_stmt);
1891 	    return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1892           }
1893 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1894 	  {
1895 	    tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1896 			    gimple_assign_rhs3 (def_stmt) };
1897 
1898 	    for (unsigned int i = 0; i < 2; i++)
1899 	      if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1900 		{
1901 		  if (rkind != SRK_LENRANGE)
1902 		    return false;
1903 		  /* Set the upper bound to the maximum to prevent
1904 		     it from being adjusted in the next iteration but
1905 		     leave MINLEN and the more conservative MAXBOUND
1906 		     determined so far alone (or leave them null if
1907 		     they haven't been set yet).  That the MINLEN is
1908 		     in fact zero can be determined from MAXLEN being
1909 		     unbounded but the discovered minimum is used for
1910 		     diagnostics.  */
1911 		  pdata->maxlen = build_all_ones_cst (size_type_node);
1912 		}
1913 	    return true;
1914 	  }
1915         return false;
1916 
1917       case GIMPLE_PHI:
1918 	/* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1919 	   must have a constant length.  */
1920 	for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1921           {
1922             tree arg = gimple_phi_arg (def_stmt, i)->def;
1923 
1924             /* If this PHI has itself as an argument, we cannot
1925                determine the string length of this argument.  However,
1926                if we can find a constant string length for the other
1927                PHI args then we can still be sure that this is a
1928                constant string length.  So be optimistic and just
1929                continue with the next argument.  */
1930             if (arg == gimple_phi_result (def_stmt))
1931               continue;
1932 
1933 	    if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1934 	      {
1935 		if (rkind != SRK_LENRANGE)
1936 		  return false;
1937 		/* Set the upper bound to the maximum to prevent
1938 		   it from being adjusted in the next iteration but
1939 		   leave MINLEN and the more conservative MAXBOUND
1940 		   determined so far alone (or leave them null if
1941 		   they haven't been set yet).  That the MINLEN is
1942 		   in fact zero can be determined from MAXLEN being
1943 		   unbounded but the discovered minimum is used for
1944 		   diagnostics.  */
1945 		pdata->maxlen = build_all_ones_cst (size_type_node);
1946 	      }
1947           }
1948         return true;
1949 
1950       default:
1951         return false;
1952     }
1953 }
1954 
1955 /* Try to obtain the range of the lengths of the string(s) referenced
1956    by ARG, or the size of the largest array ARG refers to if the range
1957    of lengths cannot be determined, and store all in *PDATA which must
1958    be zero-initialized on input except PDATA->MAXBOUND may be set to
1959    a non-null tree node other than INTEGER_CST to request to have it
1960    set to the length of the longest string in a PHI.  ELTSIZE is
1961    the expected size of the string element in bytes: 1 for char and
1962    some power of 2 for wide characters.
1963    Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1964    for optimization.  Returning false means that a nonzero PDATA->MINLEN
1965    doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1966    is -1 (in that case, the actual range is indeterminate, i.e.,
1967    [0, PTRDIFF_MAX - 2].  */
1968 
1969 bool
get_range_strlen(tree arg,c_strlen_data * pdata,unsigned eltsize)1970 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1971 {
1972   auto_bitmap visited;
1973   tree maxbound = pdata->maxbound;
1974 
1975   if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1976     {
1977       /* On failure extend the length range to an impossible maximum
1978 	 (a valid MAXLEN must be less than PTRDIFF_MAX - 1).  Other
1979 	 members can stay unchanged regardless.  */
1980       pdata->minlen = ssize_int (0);
1981       pdata->maxlen = build_all_ones_cst (size_type_node);
1982     }
1983   else if (!pdata->minlen)
1984     pdata->minlen = ssize_int (0);
1985 
1986   /* If it's unchanged from it initial non-null value, set the conservative
1987      MAXBOUND to SIZE_MAX.  Otherwise leave it null (if it is null).  */
1988   if (maxbound && pdata->maxbound == maxbound)
1989     pdata->maxbound = build_all_ones_cst (size_type_node);
1990 
1991   return !integer_all_onesp (pdata->maxlen);
1992 }
1993 
1994 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1995    For ARG of pointer types, NONSTR indicates if the caller is prepared
1996    to handle unterminated strings.   For integer ARG and when RKIND ==
1997    SRK_INT_VALUE, NONSTR must be null.
1998 
1999    If an unterminated array is discovered and our caller handles
2000    unterminated arrays, then bubble up the offending DECL and
2001    return the maximum size.  Otherwise return NULL.  */
2002 
2003 static tree
get_maxval_strlen(tree arg,strlen_range_kind rkind,tree * nonstr=NULL)2004 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2005 {
2006   /* A non-null NONSTR is meaningless when determining the maximum
2007      value of an integer ARG.  */
2008   gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2009   /* ARG must have an integral type when RKIND says so.  */
2010   gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2011 
2012   auto_bitmap visited;
2013 
2014   /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2015      is unbounded.  */
2016   c_strlen_data lendata = { };
2017   if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2018     lendata.maxlen = NULL_TREE;
2019   else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2020     lendata.maxlen = NULL_TREE;
2021 
2022   if (nonstr)
2023     {
2024       /* For callers prepared to handle unterminated arrays set
2025 	 *NONSTR to point to the declaration of the array and return
2026 	 the maximum length/size. */
2027       *nonstr = lendata.decl;
2028       return lendata.maxlen;
2029     }
2030 
2031   /* Fail if the constant array isn't nul-terminated.  */
2032   return lendata.decl ? NULL_TREE : lendata.maxlen;
2033 }
2034 
2035 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2036    true, strictly less than) the lower bound of SIZE at compile time and false
2037    otherwise.  */
2038 
2039 static bool
known_lower(gimple * stmt,tree len,tree size,bool strict=false)2040 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2041 {
2042   if (len == NULL_TREE)
2043     return false;
2044 
2045   wide_int size_range[2];
2046   wide_int len_range[2];
2047   if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2048     {
2049       if (strict)
2050 	return wi::ltu_p (len_range[1], size_range[0]);
2051       else
2052        return wi::leu_p (len_range[1], size_range[0]);
2053     }
2054 
2055   return false;
2056 }
2057 
2058 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2059    If LEN is not NULL, it represents the length of the string to be
2060    copied.  Return NULL_TREE if no simplification can be made.  */
2061 
2062 static bool
gimple_fold_builtin_strcpy(gimple_stmt_iterator * gsi,tree dest,tree src)2063 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2064 			    tree dest, tree src)
2065 {
2066   gimple *stmt = gsi_stmt (*gsi);
2067   location_t loc = gimple_location (stmt);
2068   tree fn;
2069 
2070   /* If SRC and DEST are the same (and not volatile), return DEST.  */
2071   if (operand_equal_p (src, dest, 0))
2072     {
2073       /* Issue -Wrestrict unless the pointers are null (those do
2074 	 not point to objects and so do not indicate an overlap;
2075 	 such calls could be the result of sanitization and jump
2076 	 threading).  */
2077       if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2078 	{
2079 	  tree func = gimple_call_fndecl (stmt);
2080 
2081 	  warning_at (loc, OPT_Wrestrict,
2082 		      "%qD source argument is the same as destination",
2083 		      func);
2084 	}
2085 
2086       replace_call_with_value (gsi, dest);
2087       return true;
2088     }
2089 
2090   if (optimize_function_for_size_p (cfun))
2091     return false;
2092 
2093   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2094   if (!fn)
2095     return false;
2096 
2097   /* Set to non-null if ARG refers to an unterminated array.  */
2098   tree nonstr = NULL;
2099   tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2100 
2101   if (nonstr)
2102     {
2103       /* Avoid folding calls with unterminated arrays.  */
2104       if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2105 	warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2106       suppress_warning (stmt, OPT_Wstringop_overread);
2107       return false;
2108     }
2109 
2110   if (!len)
2111     return false;
2112 
2113   len = fold_convert_loc (loc, size_type_node, len);
2114   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2115   len = force_gimple_operand_gsi (gsi, len, true,
2116 				  NULL_TREE, true, GSI_SAME_STMT);
2117   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2118   replace_call_with_call_and_fold (gsi, repl);
2119   return true;
2120 }
2121 
2122 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2123    If SLEN is not NULL, it represents the length of the source string.
2124    Return NULL_TREE if no simplification can be made.  */
2125 
2126 static bool
gimple_fold_builtin_strncpy(gimple_stmt_iterator * gsi,tree dest,tree src,tree len)2127 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2128 			     tree dest, tree src, tree len)
2129 {
2130   gimple *stmt = gsi_stmt (*gsi);
2131   location_t loc = gimple_location (stmt);
2132   bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2133 
2134   /* If the LEN parameter is zero, return DEST.  */
2135   if (integer_zerop (len))
2136     {
2137       /* Avoid warning if the destination refers to an array/pointer
2138 	 decorate with attribute nonstring.  */
2139       if (!nonstring)
2140 	{
2141 	  tree fndecl = gimple_call_fndecl (stmt);
2142 
2143 	  /* Warn about the lack of nul termination: the result is not
2144 	     a (nul-terminated) string.  */
2145 	  tree slen = get_maxval_strlen (src, SRK_STRLEN);
2146 	  if (slen && !integer_zerop (slen))
2147 	    warning_at (loc, OPT_Wstringop_truncation,
2148 			"%qD destination unchanged after copying no bytes "
2149 			"from a string of length %E",
2150 			fndecl, slen);
2151 	  else
2152 	    warning_at (loc, OPT_Wstringop_truncation,
2153 			"%qD destination unchanged after copying no bytes",
2154 			fndecl);
2155 	}
2156 
2157       replace_call_with_value (gsi, dest);
2158       return true;
2159     }
2160 
2161   /* We can't compare slen with len as constants below if len is not a
2162      constant.  */
2163   if (TREE_CODE (len) != INTEGER_CST)
2164     return false;
2165 
2166   /* Now, we must be passed a constant src ptr parameter.  */
2167   tree slen = get_maxval_strlen (src, SRK_STRLEN);
2168   if (!slen || TREE_CODE (slen) != INTEGER_CST)
2169     return false;
2170 
2171   /* The size of the source string including the terminating nul.  */
2172   tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2173 
2174   /* We do not support simplification of this case, though we do
2175      support it when expanding trees into RTL.  */
2176   /* FIXME: generate a call to __builtin_memset.  */
2177   if (tree_int_cst_lt (ssize, len))
2178     return false;
2179 
2180   /* Diagnose truncation that leaves the copy unterminated.  */
2181   maybe_diag_stxncpy_trunc (*gsi, src, len);
2182 
2183   /* OK transform into builtin memcpy.  */
2184   tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2185   if (!fn)
2186     return false;
2187 
2188   len = fold_convert_loc (loc, size_type_node, len);
2189   len = force_gimple_operand_gsi (gsi, len, true,
2190 				  NULL_TREE, true, GSI_SAME_STMT);
2191   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2192   replace_call_with_call_and_fold (gsi, repl);
2193 
2194   return true;
2195 }
2196 
2197 /* Fold function call to builtin strchr or strrchr.
2198    If both arguments are constant, evaluate and fold the result,
2199    otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2200    In general strlen is significantly faster than strchr
2201    due to being a simpler operation.  */
2202 static bool
gimple_fold_builtin_strchr(gimple_stmt_iterator * gsi,bool is_strrchr)2203 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2204 {
2205   gimple *stmt = gsi_stmt (*gsi);
2206   tree str = gimple_call_arg (stmt, 0);
2207   tree c = gimple_call_arg (stmt, 1);
2208   location_t loc = gimple_location (stmt);
2209   const char *p;
2210   char ch;
2211 
2212   if (!gimple_call_lhs (stmt))
2213     return false;
2214 
2215   /* Avoid folding if the first argument is not a nul-terminated array.
2216      Defer warning until later.  */
2217   if (!check_nul_terminated_array (NULL_TREE, str))
2218     return false;
2219 
2220   if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2221     {
2222       const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2223 
2224       if (p1 == NULL)
2225 	{
2226 	  replace_call_with_value (gsi, integer_zero_node);
2227 	  return true;
2228 	}
2229 
2230       tree len = build_int_cst (size_type_node, p1 - p);
2231       gimple_seq stmts = NULL;
2232       gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2233 					      POINTER_PLUS_EXPR, str, len);
2234       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2235       gsi_replace_with_seq_vops (gsi, stmts);
2236       return true;
2237     }
2238 
2239   if (!integer_zerop (c))
2240     return false;
2241 
2242   /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size.  */
2243   if (is_strrchr && optimize_function_for_size_p (cfun))
2244     {
2245       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2246 
2247       if (strchr_fn)
2248 	{
2249 	  gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2250 	  replace_call_with_call_and_fold (gsi, repl);
2251 	  return true;
2252 	}
2253 
2254       return false;
2255     }
2256 
2257   tree len;
2258   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2259 
2260   if (!strlen_fn)
2261     return false;
2262 
2263   /* Create newstr = strlen (str).  */
2264   gimple_seq stmts = NULL;
2265   gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2266   gimple_set_location (new_stmt, loc);
2267   len = create_tmp_reg_or_ssa_name (size_type_node);
2268   gimple_call_set_lhs (new_stmt, len);
2269   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2270 
2271   /* Create (str p+ strlen (str)).  */
2272   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2273 				  POINTER_PLUS_EXPR, str, len);
2274   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2275   gsi_replace_with_seq_vops (gsi, stmts);
2276   /* gsi now points at the assignment to the lhs, get a
2277      stmt iterator to the strlen.
2278      ???  We can't use gsi_for_stmt as that doesn't work when the
2279      CFG isn't built yet.  */
2280   gimple_stmt_iterator gsi2 = *gsi;
2281   gsi_prev (&gsi2);
2282   fold_stmt (&gsi2);
2283   return true;
2284 }
2285 
2286 /* Fold function call to builtin strstr.
2287    If both arguments are constant, evaluate and fold the result,
2288    additionally fold strstr (x, "") into x and strstr (x, "c")
2289    into strchr (x, 'c').  */
2290 static bool
gimple_fold_builtin_strstr(gimple_stmt_iterator * gsi)2291 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2292 {
2293   gimple *stmt = gsi_stmt (*gsi);
2294   if (!gimple_call_lhs (stmt))
2295     return false;
2296 
2297   tree haystack = gimple_call_arg (stmt, 0);
2298   tree needle = gimple_call_arg (stmt, 1);
2299 
2300   /* Avoid folding if either argument is not a nul-terminated array.
2301      Defer warning until later.  */
2302   if (!check_nul_terminated_array (NULL_TREE, haystack)
2303       || !check_nul_terminated_array (NULL_TREE, needle))
2304     return false;
2305 
2306   const char *q = c_getstr (needle);
2307   if (q == NULL)
2308     return false;
2309 
2310   if (const char *p = c_getstr (haystack))
2311     {
2312       const char *r = strstr (p, q);
2313 
2314       if (r == NULL)
2315 	{
2316 	  replace_call_with_value (gsi, integer_zero_node);
2317 	  return true;
2318 	}
2319 
2320       tree len = build_int_cst (size_type_node, r - p);
2321       gimple_seq stmts = NULL;
2322       gimple *new_stmt
2323 	= gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2324 			       haystack, len);
2325       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2326       gsi_replace_with_seq_vops (gsi, stmts);
2327       return true;
2328     }
2329 
2330   /* For strstr (x, "") return x.  */
2331   if (q[0] == '\0')
2332     {
2333       replace_call_with_value (gsi, haystack);
2334       return true;
2335     }
2336 
2337   /* Transform strstr (x, "c") into strchr (x, 'c').  */
2338   if (q[1] == '\0')
2339     {
2340       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2341       if (strchr_fn)
2342 	{
2343 	  tree c = build_int_cst (integer_type_node, q[0]);
2344 	  gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2345 	  replace_call_with_call_and_fold (gsi, repl);
2346 	  return true;
2347 	}
2348     }
2349 
2350   return false;
2351 }
2352 
2353 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
2354    to the call.
2355 
2356    Return NULL_TREE if no simplification was possible, otherwise return the
2357    simplified form of the call as a tree.
2358 
2359    The simplified form may be a constant or other expression which
2360    computes the same value, but in a more efficient manner (including
2361    calls to other builtin functions).
2362 
2363    The call may contain arguments which need to be evaluated, but
2364    which are not useful to determine the result of the call.  In
2365    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
2366    COMPOUND_EXPR will be an argument which must be evaluated.
2367    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
2368    COMPOUND_EXPR in the chain will contain the tree for the simplified
2369    form of the builtin function call.  */
2370 
2371 static bool
gimple_fold_builtin_strcat(gimple_stmt_iterator * gsi,tree dst,tree src)2372 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2373 {
2374   gimple *stmt = gsi_stmt (*gsi);
2375   location_t loc = gimple_location (stmt);
2376 
2377   const char *p = c_getstr (src);
2378 
2379   /* If the string length is zero, return the dst parameter.  */
2380   if (p && *p == '\0')
2381     {
2382       replace_call_with_value (gsi, dst);
2383       return true;
2384     }
2385 
2386   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2387     return false;
2388 
2389   /* See if we can store by pieces into (dst + strlen(dst)).  */
2390   tree newdst;
2391   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2392   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2393 
2394   if (!strlen_fn || !memcpy_fn)
2395     return false;
2396 
2397   /* If the length of the source string isn't computable don't
2398      split strcat into strlen and memcpy.  */
2399   tree len = get_maxval_strlen (src, SRK_STRLEN);
2400   if (! len)
2401     return false;
2402 
2403   /* Create strlen (dst).  */
2404   gimple_seq stmts = NULL, stmts2;
2405   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2406   gimple_set_location (repl, loc);
2407   newdst = create_tmp_reg_or_ssa_name (size_type_node);
2408   gimple_call_set_lhs (repl, newdst);
2409   gimple_seq_add_stmt_without_update (&stmts, repl);
2410 
2411   /* Create (dst p+ strlen (dst)).  */
2412   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2413   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2414   gimple_seq_add_seq_without_update (&stmts, stmts2);
2415 
2416   len = fold_convert_loc (loc, size_type_node, len);
2417   len = size_binop_loc (loc, PLUS_EXPR, len,
2418 			build_int_cst (size_type_node, 1));
2419   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2420   gimple_seq_add_seq_without_update (&stmts, stmts2);
2421 
2422   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2423   gimple_seq_add_stmt_without_update (&stmts, repl);
2424   if (gimple_call_lhs (stmt))
2425     {
2426       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2427       gimple_seq_add_stmt_without_update (&stmts, repl);
2428       gsi_replace_with_seq_vops (gsi, stmts);
2429       /* gsi now points at the assignment to the lhs, get a
2430          stmt iterator to the memcpy call.
2431 	 ???  We can't use gsi_for_stmt as that doesn't work when the
2432 	 CFG isn't built yet.  */
2433       gimple_stmt_iterator gsi2 = *gsi;
2434       gsi_prev (&gsi2);
2435       fold_stmt (&gsi2);
2436     }
2437   else
2438     {
2439       gsi_replace_with_seq_vops (gsi, stmts);
2440       fold_stmt (gsi);
2441     }
2442   return true;
2443 }
2444 
2445 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
2446    are the arguments to the call.  */
2447 
2448 static bool
gimple_fold_builtin_strcat_chk(gimple_stmt_iterator * gsi)2449 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2450 {
2451   gimple *stmt = gsi_stmt (*gsi);
2452   tree dest = gimple_call_arg (stmt, 0);
2453   tree src = gimple_call_arg (stmt, 1);
2454   tree size = gimple_call_arg (stmt, 2);
2455   tree fn;
2456   const char *p;
2457 
2458 
2459   p = c_getstr (src);
2460   /* If the SRC parameter is "", return DEST.  */
2461   if (p && *p == '\0')
2462     {
2463       replace_call_with_value (gsi, dest);
2464       return true;
2465     }
2466 
2467   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2468     return false;
2469 
2470   /* If __builtin_strcat_chk is used, assume strcat is available.  */
2471   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2472   if (!fn)
2473     return false;
2474 
2475   gimple *repl = gimple_build_call (fn, 2, dest, src);
2476   replace_call_with_call_and_fold (gsi, repl);
2477   return true;
2478 }
2479 
2480 /* Simplify a call to the strncat builtin.  */
2481 
2482 static bool
gimple_fold_builtin_strncat(gimple_stmt_iterator * gsi)2483 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2484 {
2485   gimple *stmt = gsi_stmt (*gsi);
2486   tree dst = gimple_call_arg (stmt, 0);
2487   tree src = gimple_call_arg (stmt, 1);
2488   tree len = gimple_call_arg (stmt, 2);
2489   tree src_len = c_strlen (src, 1);
2490 
2491   /* If the requested length is zero, or the src parameter string
2492      length is zero, return the dst parameter.  */
2493   if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2494     {
2495       replace_call_with_value (gsi, dst);
2496       return true;
2497     }
2498 
2499   /* Return early if the requested len is less than the string length.
2500      Warnings will be issued elsewhere later.  */
2501   if (!src_len || known_lower (stmt, len, src_len, true))
2502     return false;
2503 
2504   /* Warn on constant LEN.  */
2505   if (TREE_CODE (len) == INTEGER_CST)
2506     {
2507       bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2508       tree dstsize;
2509 
2510       if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2511 	  && TREE_CODE (dstsize) == INTEGER_CST)
2512 	{
2513 	  int cmpdst = tree_int_cst_compare (len, dstsize);
2514 
2515 	  if (cmpdst >= 0)
2516 	    {
2517 	      tree fndecl = gimple_call_fndecl (stmt);
2518 
2519 	      /* Strncat copies (at most) LEN bytes and always appends
2520 		 the terminating NUL so the specified bound should never
2521 		 be equal to (or greater than) the size of the destination.
2522 		 If it is, the copy could overflow.  */
2523 	      location_t loc = gimple_location (stmt);
2524 	      nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2525 				   cmpdst == 0
2526 				   ? G_("%qD specified bound %E equals "
2527 					"destination size")
2528 				   : G_("%qD specified bound %E exceeds "
2529 					"destination size %E"),
2530 				   fndecl, len, dstsize);
2531 	      if (nowarn)
2532 		suppress_warning (stmt, OPT_Wstringop_overflow_);
2533 	    }
2534 	}
2535 
2536       if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2537 	  && tree_int_cst_compare (src_len, len) == 0)
2538 	{
2539 	  tree fndecl = gimple_call_fndecl (stmt);
2540 	  location_t loc = gimple_location (stmt);
2541 
2542 	  /* To avoid possible overflow the specified bound should also
2543 	     not be equal to the length of the source, even when the size
2544 	     of the destination is unknown (it's not an uncommon mistake
2545 	     to specify as the bound to strncpy the length of the source).  */
2546 	  if (warning_at (loc, OPT_Wstringop_overflow_,
2547 			  "%qD specified bound %E equals source length",
2548 			  fndecl, len))
2549 	    suppress_warning (stmt, OPT_Wstringop_overflow_);
2550 	}
2551     }
2552 
2553   if (!known_lower (stmt, src_len, len))
2554     return false;
2555 
2556   tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2557 
2558   /* If the replacement _DECL isn't initialized, don't do the
2559      transformation.  */
2560   if (!fn)
2561     return false;
2562 
2563   /* Otherwise, emit a call to strcat.  */
2564   gcall *repl = gimple_build_call (fn, 2, dst, src);
2565   replace_call_with_call_and_fold (gsi, repl);
2566   return true;
2567 }
2568 
2569 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2570    LEN, and SIZE.  */
2571 
2572 static bool
gimple_fold_builtin_strncat_chk(gimple_stmt_iterator * gsi)2573 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2574 {
2575   gimple *stmt = gsi_stmt (*gsi);
2576   tree dest = gimple_call_arg (stmt, 0);
2577   tree src = gimple_call_arg (stmt, 1);
2578   tree len = gimple_call_arg (stmt, 2);
2579   tree size = gimple_call_arg (stmt, 3);
2580   tree fn;
2581   const char *p;
2582 
2583   p = c_getstr (src);
2584   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
2585   if ((p && *p == '\0')
2586       || integer_zerop (len))
2587     {
2588       replace_call_with_value (gsi, dest);
2589       return true;
2590     }
2591 
2592   if (! integer_all_onesp (size))
2593     {
2594       tree src_len = c_strlen (src, 1);
2595       if (known_lower (stmt, src_len, len))
2596 	{
2597 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
2598 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2599 	  if (!fn)
2600 	    return false;
2601 
2602 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2603 	  replace_call_with_call_and_fold (gsi, repl);
2604 	  return true;
2605 	}
2606       return false;
2607     }
2608 
2609   /* If __builtin_strncat_chk is used, assume strncat is available.  */
2610   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2611   if (!fn)
2612     return false;
2613 
2614   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2615   replace_call_with_call_and_fold (gsi, repl);
2616   return true;
2617 }
2618 
2619 /* Build and append gimple statements to STMTS that would load a first
2620    character of a memory location identified by STR.  LOC is location
2621    of the statement.  */
2622 
2623 static tree
gimple_load_first_char(location_t loc,tree str,gimple_seq * stmts)2624 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2625 {
2626   tree var;
2627 
2628   tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2629   tree cst_uchar_ptr_node
2630     = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2631   tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2632 
2633   tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2634   gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2635   var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2636 
2637   gimple_assign_set_lhs (stmt, var);
2638   gimple_seq_add_stmt_without_update (stmts, stmt);
2639 
2640   return var;
2641 }
2642 
2643 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.  */
2644 
2645 static bool
gimple_fold_builtin_string_compare(gimple_stmt_iterator * gsi)2646 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2647 {
2648   gimple *stmt = gsi_stmt (*gsi);
2649   tree callee = gimple_call_fndecl (stmt);
2650   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2651 
2652   tree type = integer_type_node;
2653   tree str1 = gimple_call_arg (stmt, 0);
2654   tree str2 = gimple_call_arg (stmt, 1);
2655   tree lhs = gimple_call_lhs (stmt);
2656 
2657   tree bound_node = NULL_TREE;
2658   unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2659 
2660   /* Handle strncmp and strncasecmp functions.  */
2661   if (gimple_call_num_args (stmt) == 3)
2662     {
2663       bound_node = gimple_call_arg (stmt, 2);
2664       if (tree_fits_uhwi_p (bound_node))
2665 	bound = tree_to_uhwi (bound_node);
2666     }
2667 
2668   /* If the BOUND parameter is zero, return zero.  */
2669   if (bound == 0)
2670     {
2671       replace_call_with_value (gsi, integer_zero_node);
2672       return true;
2673     }
2674 
2675   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
2676   if (operand_equal_p (str1, str2, 0))
2677     {
2678       replace_call_with_value (gsi, integer_zero_node);
2679       return true;
2680     }
2681 
2682   /* Initially set to the number of characters, including the terminating
2683      nul if each array has one.   LENx == strnlen (Sx, LENx) implies that
2684      the array Sx is not terminated by a nul.
2685      For nul-terminated strings then adjusted to their length so that
2686      LENx == NULPOSx holds.  */
2687   unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2688   const char *p1 = getbyterep (str1, &len1);
2689   const char *p2 = getbyterep (str2, &len2);
2690 
2691   /* The position of the terminating nul character if one exists, otherwise
2692      a value greater than LENx.  */
2693   unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2694 
2695   if (p1)
2696     {
2697       size_t n = strnlen (p1, len1);
2698       if (n < len1)
2699 	len1 = nulpos1 = n;
2700     }
2701 
2702   if (p2)
2703     {
2704       size_t n = strnlen (p2, len2);
2705       if (n < len2)
2706 	len2 = nulpos2 = n;
2707     }
2708 
2709   /* For known strings, return an immediate value.  */
2710   if (p1 && p2)
2711     {
2712       int r = 0;
2713       bool known_result = false;
2714 
2715       switch (fcode)
2716 	{
2717 	case BUILT_IN_STRCMP:
2718 	case BUILT_IN_STRCMP_EQ:
2719 	  if (len1 != nulpos1 || len2 != nulpos2)
2720 	    break;
2721 
2722 	  r = strcmp (p1, p2);
2723 	  known_result = true;
2724 	  break;
2725 
2726 	case BUILT_IN_STRNCMP:
2727 	case BUILT_IN_STRNCMP_EQ:
2728 	  {
2729 	    if (bound == HOST_WIDE_INT_M1U)
2730 	      break;
2731 
2732 	    /* Reduce the bound to be no more than the length
2733 	       of the shorter of the two strings, or the sizes
2734 	       of the unterminated arrays.  */
2735 	    unsigned HOST_WIDE_INT n = bound;
2736 
2737 	    if (len1 == nulpos1 && len1 < n)
2738 	      n = len1 + 1;
2739 	    if (len2 == nulpos2 && len2 < n)
2740 	      n = len2 + 1;
2741 
2742 	    if (MIN (nulpos1, nulpos2) + 1 < n)
2743 	      break;
2744 
2745 	    r = strncmp (p1, p2, n);
2746 	    known_result = true;
2747 	    break;
2748 	  }
2749 	/* Only handleable situation is where the string are equal (result 0),
2750 	   which is already handled by operand_equal_p case.  */
2751 	case BUILT_IN_STRCASECMP:
2752 	  break;
2753 	case BUILT_IN_STRNCASECMP:
2754 	  {
2755 	    if (bound == HOST_WIDE_INT_M1U)
2756 	      break;
2757 	    r = strncmp (p1, p2, bound);
2758 	    if (r == 0)
2759 	      known_result = true;
2760 	    break;
2761 	  }
2762 	default:
2763 	  gcc_unreachable ();
2764 	}
2765 
2766       if (known_result)
2767 	{
2768 	  replace_call_with_value (gsi, build_cmp_result (type, r));
2769 	  return true;
2770 	}
2771     }
2772 
2773   bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2774     || fcode == BUILT_IN_STRCMP
2775     || fcode == BUILT_IN_STRCMP_EQ
2776     || fcode == BUILT_IN_STRCASECMP;
2777 
2778   location_t loc = gimple_location (stmt);
2779 
2780   /* If the second arg is "", return *(const unsigned char*)arg1.  */
2781   if (p2 && *p2 == '\0' && nonzero_bound)
2782     {
2783       gimple_seq stmts = NULL;
2784       tree var = gimple_load_first_char (loc, str1, &stmts);
2785       if (lhs)
2786 	{
2787 	  stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2788 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2789 	}
2790 
2791       gsi_replace_with_seq_vops (gsi, stmts);
2792       return true;
2793     }
2794 
2795   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
2796   if (p1 && *p1 == '\0' && nonzero_bound)
2797     {
2798       gimple_seq stmts = NULL;
2799       tree var = gimple_load_first_char (loc, str2, &stmts);
2800 
2801       if (lhs)
2802 	{
2803 	  tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2804 	  stmt = gimple_build_assign (c, NOP_EXPR, var);
2805 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2806 
2807 	  stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2808 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2809 	}
2810 
2811       gsi_replace_with_seq_vops (gsi, stmts);
2812       return true;
2813     }
2814 
2815   /* If BOUND is one, return an expression corresponding to
2816      (*(const unsigned char*)arg2 - *(const unsigned char*)arg1).  */
2817   if (fcode == BUILT_IN_STRNCMP && bound == 1)
2818     {
2819       gimple_seq stmts = NULL;
2820       tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2821       tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2822 
2823       if (lhs)
2824 	{
2825 	  tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2826 	  gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2827 	  gimple_seq_add_stmt_without_update (&stmts, convert1);
2828 
2829 	  tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2830 	  gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2831 	  gimple_seq_add_stmt_without_update (&stmts, convert2);
2832 
2833 	  stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2834 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2835 	}
2836 
2837       gsi_replace_with_seq_vops (gsi, stmts);
2838       return true;
2839     }
2840 
2841   /* If BOUND is greater than the length of one constant string,
2842      and the other argument is also a nul-terminated string, replace
2843      strncmp with strcmp.  */
2844   if (fcode == BUILT_IN_STRNCMP
2845       && bound > 0 && bound < HOST_WIDE_INT_M1U
2846       && ((p2 && len2 < bound && len2 == nulpos2)
2847 	  || (p1 && len1 < bound && len1 == nulpos1)))
2848     {
2849       tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2850       if (!fn)
2851         return false;
2852       gimple *repl = gimple_build_call (fn, 2, str1, str2);
2853       replace_call_with_call_and_fold (gsi, repl);
2854       return true;
2855     }
2856 
2857   return false;
2858 }
2859 
2860 /* Fold a call to the memchr pointed by GSI iterator.  */
2861 
2862 static bool
gimple_fold_builtin_memchr(gimple_stmt_iterator * gsi)2863 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2864 {
2865   gimple *stmt = gsi_stmt (*gsi);
2866   tree lhs = gimple_call_lhs (stmt);
2867   tree arg1 = gimple_call_arg (stmt, 0);
2868   tree arg2 = gimple_call_arg (stmt, 1);
2869   tree len = gimple_call_arg (stmt, 2);
2870 
2871   /* If the LEN parameter is zero, return zero.  */
2872   if (integer_zerop (len))
2873     {
2874       replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2875       return true;
2876     }
2877 
2878   char c;
2879   if (TREE_CODE (arg2) != INTEGER_CST
2880       || !tree_fits_uhwi_p (len)
2881       || !target_char_cst_p (arg2, &c))
2882     return false;
2883 
2884   unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2885   unsigned HOST_WIDE_INT string_length;
2886   const char *p1 = getbyterep (arg1, &string_length);
2887 
2888   if (p1)
2889     {
2890       const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2891       if (r == NULL)
2892 	{
2893 	  tree mem_size, offset_node;
2894 	  byte_representation (arg1, &offset_node, &mem_size, NULL);
2895 	  unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2896 					  ? 0 : tree_to_uhwi (offset_node);
2897 	  /* MEM_SIZE is the size of the array the string literal
2898 	     is stored in.  */
2899 	  unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2900 	  gcc_checking_assert (string_length <= string_size);
2901 	  if (length <= string_size)
2902 	    {
2903 	      replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2904 	      return true;
2905 	    }
2906 	}
2907       else
2908 	{
2909 	  unsigned HOST_WIDE_INT offset = r - p1;
2910 	  gimple_seq stmts = NULL;
2911 	  if (lhs != NULL_TREE)
2912 	    {
2913 	      tree offset_cst = build_int_cst (sizetype, offset);
2914 	      gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2915 						   arg1, offset_cst);
2916 	      gimple_seq_add_stmt_without_update (&stmts, stmt);
2917 	    }
2918 	  else
2919 	    gimple_seq_add_stmt_without_update (&stmts,
2920 						gimple_build_nop ());
2921 
2922 	  gsi_replace_with_seq_vops (gsi, stmts);
2923 	  return true;
2924 	}
2925     }
2926 
2927   return false;
2928 }
2929 
2930 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
2931    to the call.  IGNORE is true if the value returned
2932    by the builtin will be ignored.  UNLOCKED is true is true if this
2933    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
2934    the known length of the string.  Return NULL_TREE if no simplification
2935    was possible.  */
2936 
2937 static bool
gimple_fold_builtin_fputs(gimple_stmt_iterator * gsi,tree arg0,tree arg1,bool unlocked)2938 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2939 			   tree arg0, tree arg1,
2940 			   bool unlocked)
2941 {
2942   gimple *stmt = gsi_stmt (*gsi);
2943 
2944   /* If we're using an unlocked function, assume the other unlocked
2945      functions exist explicitly.  */
2946   tree const fn_fputc = (unlocked
2947 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2948 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
2949   tree const fn_fwrite = (unlocked
2950 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2951 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
2952 
2953   /* If the return value is used, don't do the transformation.  */
2954   if (gimple_call_lhs (stmt))
2955     return false;
2956 
2957   /* Get the length of the string passed to fputs.  If the length
2958      can't be determined, punt.  */
2959   tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2960   if (!len
2961       || TREE_CODE (len) != INTEGER_CST)
2962     return false;
2963 
2964   switch (compare_tree_int (len, 1))
2965     {
2966     case -1: /* length is 0, delete the call entirely .  */
2967       replace_call_with_value (gsi, integer_zero_node);
2968       return true;
2969 
2970     case 0: /* length is 1, call fputc.  */
2971       {
2972 	const char *p = c_getstr (arg0);
2973 	if (p != NULL)
2974 	  {
2975 	    if (!fn_fputc)
2976 	      return false;
2977 
2978 	    gimple *repl = gimple_build_call (fn_fputc, 2,
2979 					     build_int_cst
2980 					     (integer_type_node, p[0]), arg1);
2981 	    replace_call_with_call_and_fold (gsi, repl);
2982 	    return true;
2983 	  }
2984       }
2985       /* FALLTHROUGH */
2986     case 1: /* length is greater than 1, call fwrite.  */
2987       {
2988 	/* If optimizing for size keep fputs.  */
2989 	if (optimize_function_for_size_p (cfun))
2990 	  return false;
2991 	/* New argument list transforming fputs(string, stream) to
2992 	   fwrite(string, 1, len, stream).  */
2993 	if (!fn_fwrite)
2994 	  return false;
2995 
2996 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2997 					 size_one_node, len, arg1);
2998 	replace_call_with_call_and_fold (gsi, repl);
2999 	return true;
3000       }
3001     default:
3002       gcc_unreachable ();
3003     }
3004 }
3005 
3006 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3007    DEST, SRC, LEN, and SIZE are the arguments to the call.
3008    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
3009    code of the builtin.  If MAXLEN is not NULL, it is maximum length
3010    passed as third argument.  */
3011 
3012 static bool
gimple_fold_builtin_memory_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)3013 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3014 				tree dest, tree src, tree len, tree size,
3015 				enum built_in_function fcode)
3016 {
3017   gimple *stmt = gsi_stmt (*gsi);
3018   location_t loc = gimple_location (stmt);
3019   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3020   tree fn;
3021 
3022   /* If SRC and DEST are the same (and not volatile), return DEST
3023      (resp. DEST+LEN for __mempcpy_chk).  */
3024   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3025     {
3026       if (fcode != BUILT_IN_MEMPCPY_CHK)
3027 	{
3028 	  replace_call_with_value (gsi, dest);
3029 	  return true;
3030 	}
3031       else
3032 	{
3033 	  gimple_seq stmts = NULL;
3034 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3035 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3036 				    TREE_TYPE (dest), dest, len);
3037 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3038 	  replace_call_with_value (gsi, temp);
3039 	  return true;
3040 	}
3041     }
3042 
3043   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3044   if (! integer_all_onesp (size)
3045       && !known_lower (stmt, len, size)
3046       && !known_lower (stmt, maxlen, size))
3047     {
3048       /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3049 	 least try to optimize (void) __mempcpy_chk () into
3050 	 (void) __memcpy_chk () */
3051       if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3052 	{
3053 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3054 	  if (!fn)
3055 	    return false;
3056 
3057 	  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3058 	  replace_call_with_call_and_fold (gsi, repl);
3059 	  return true;
3060 	}
3061       return false;
3062     }
3063 
3064   fn = NULL_TREE;
3065   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3066      mem{cpy,pcpy,move,set} is available.  */
3067   switch (fcode)
3068     {
3069     case BUILT_IN_MEMCPY_CHK:
3070       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3071       break;
3072     case BUILT_IN_MEMPCPY_CHK:
3073       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3074       break;
3075     case BUILT_IN_MEMMOVE_CHK:
3076       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3077       break;
3078     case BUILT_IN_MEMSET_CHK:
3079       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3080       break;
3081     default:
3082       break;
3083     }
3084 
3085   if (!fn)
3086     return false;
3087 
3088   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3089   replace_call_with_call_and_fold (gsi, repl);
3090   return true;
3091 }
3092 
3093 /* Print a message in the dump file recording transformation of FROM to TO.  */
3094 
3095 static void
dump_transformation(gcall * from,gcall * to)3096 dump_transformation (gcall *from, gcall *to)
3097 {
3098   if (dump_enabled_p ())
3099     dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3100 		     gimple_call_fn (from), gimple_call_fn (to));
3101 }
3102 
3103 /* Fold a call to the __st[rp]cpy_chk builtin.
3104    DEST, SRC, and SIZE are the arguments to the call.
3105    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
3106    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
3107    strings passed as second argument.  */
3108 
3109 static bool
gimple_fold_builtin_stxcpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree size,enum built_in_function fcode)3110 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3111 				tree dest,
3112 				tree src, tree size,
3113 				enum built_in_function fcode)
3114 {
3115   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3116   location_t loc = gimple_location (stmt);
3117   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3118   tree len, fn;
3119 
3120   /* If SRC and DEST are the same (and not volatile), return DEST.  */
3121   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3122     {
3123       /* Issue -Wrestrict unless the pointers are null (those do
3124 	 not point to objects and so do not indicate an overlap;
3125 	 such calls could be the result of sanitization and jump
3126 	 threading).  */
3127       if (!integer_zerop (dest)
3128 	  && !warning_suppressed_p (stmt, OPT_Wrestrict))
3129 	{
3130 	  tree func = gimple_call_fndecl (stmt);
3131 
3132 	  warning_at (loc, OPT_Wrestrict,
3133 		      "%qD source argument is the same as destination",
3134 		      func);
3135 	}
3136 
3137       replace_call_with_value (gsi, dest);
3138       return true;
3139     }
3140 
3141   tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3142   if (! integer_all_onesp (size))
3143     {
3144       len = c_strlen (src, 1);
3145       if (!known_lower (stmt, len, size, true)
3146 	  && !known_lower (stmt, maxlen, size, true))
3147 	{
3148 	  if (fcode == BUILT_IN_STPCPY_CHK)
3149 	    {
3150 	      if (! ignore)
3151 		return false;
3152 
3153 	      /* If return value of __stpcpy_chk is ignored,
3154 		 optimize into __strcpy_chk.  */
3155 	      fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3156 	      if (!fn)
3157 		return false;
3158 
3159 	      gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3160 	      replace_call_with_call_and_fold (gsi, repl);
3161 	      return true;
3162 	    }
3163 
3164 	  if (! len || TREE_SIDE_EFFECTS (len))
3165 	    return false;
3166 
3167 	  /* If c_strlen returned something, but not provably less than size,
3168 	     transform __strcpy_chk into __memcpy_chk.  */
3169 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3170 	  if (!fn)
3171 	    return false;
3172 
3173 	  gimple_seq stmts = NULL;
3174 	  len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3175 	  len = gimple_convert (&stmts, loc, size_type_node, len);
3176 	  len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3177 			      build_int_cst (size_type_node, 1));
3178 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3179 	  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3180 	  replace_call_with_call_and_fold (gsi, repl);
3181 	  return true;
3182 	}
3183     }
3184 
3185   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
3186   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3187 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3188   if (!fn)
3189     return false;
3190 
3191   gcall *repl = gimple_build_call (fn, 2, dest, src);
3192   dump_transformation (stmt, repl);
3193   replace_call_with_call_and_fold (gsi, repl);
3194   return true;
3195 }
3196 
3197 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
3198    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
3199    length passed as third argument. IGNORE is true if return value can be
3200    ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201 
3202 static bool
gimple_fold_builtin_stxncpy_chk(gimple_stmt_iterator * gsi,tree dest,tree src,tree len,tree size,enum built_in_function fcode)3203 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3204 				 tree dest, tree src,
3205 				 tree len, tree size,
3206 				 enum built_in_function fcode)
3207 {
3208   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3209   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3210   tree fn;
3211 
3212   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3213   if (! integer_all_onesp (size)
3214       && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3215     {
3216       if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3217 	{
3218 	  /* If return value of __stpncpy_chk is ignored,
3219 	     optimize into __strncpy_chk.  */
3220 	  fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3221 	  if (fn)
3222 	    {
3223 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3224 	      replace_call_with_call_and_fold (gsi, repl);
3225 	      return true;
3226 	    }
3227 	}
3228       return false;
3229     }
3230 
3231   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
3232   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3233 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3234   if (!fn)
3235     return false;
3236 
3237   gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3238   dump_transformation (stmt, repl);
3239   replace_call_with_call_and_fold (gsi, repl);
3240   return true;
3241 }
3242 
3243 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3244    Return NULL_TREE if no simplification can be made.  */
3245 
3246 static bool
gimple_fold_builtin_stpcpy(gimple_stmt_iterator * gsi)3247 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3248 {
3249   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3250   location_t loc = gimple_location (stmt);
3251   tree dest = gimple_call_arg (stmt, 0);
3252   tree src = gimple_call_arg (stmt, 1);
3253   tree fn, lenp1;
3254 
3255   /* If the result is unused, replace stpcpy with strcpy.  */
3256   if (gimple_call_lhs (stmt) == NULL_TREE)
3257     {
3258       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3259       if (!fn)
3260 	return false;
3261       gimple_call_set_fndecl (stmt, fn);
3262       fold_stmt (gsi);
3263       return true;
3264     }
3265 
3266   /* Set to non-null if ARG refers to an unterminated array.  */
3267   c_strlen_data data = { };
3268   /* The size of the unterminated array if SRC referes to one.  */
3269   tree size;
3270   /* True if the size is exact/constant, false if it's the lower bound
3271      of a range.  */
3272   bool exact;
3273   tree len = c_strlen (src, 1, &data, 1);
3274   if (!len
3275       || TREE_CODE (len) != INTEGER_CST)
3276     {
3277       data.decl = unterminated_array (src, &size, &exact);
3278       if (!data.decl)
3279 	return false;
3280     }
3281 
3282   if (data.decl)
3283     {
3284       /* Avoid folding calls with unterminated arrays.  */
3285       if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3286 	warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3287 			    exact);
3288       suppress_warning (stmt, OPT_Wstringop_overread);
3289       return false;
3290     }
3291 
3292   if (optimize_function_for_size_p (cfun)
3293       /* If length is zero it's small enough.  */
3294       && !integer_zerop (len))
3295     return false;
3296 
3297   /* If the source has a known length replace stpcpy with memcpy.  */
3298   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299   if (!fn)
3300     return false;
3301 
3302   gimple_seq stmts = NULL;
3303   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3304   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3305 			tem, build_int_cst (size_type_node, 1));
3306   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3307   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3308   gimple_move_vops (repl, stmt);
3309   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3310   /* Replace the result with dest + len.  */
3311   stmts = NULL;
3312   tem = gimple_convert (&stmts, loc, sizetype, len);
3313   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3314   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3315 				      POINTER_PLUS_EXPR, dest, tem);
3316   gsi_replace (gsi, ret, false);
3317   /* Finally fold the memcpy call.  */
3318   gimple_stmt_iterator gsi2 = *gsi;
3319   gsi_prev (&gsi2);
3320   fold_stmt (&gsi2);
3321   return true;
3322 }
3323 
3324 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
3325    NULL_TREE if a normal call should be emitted rather than expanding
3326    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
3327    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
3328    passed as second argument.  */
3329 
3330 static bool
gimple_fold_builtin_snprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3331 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3332 				  enum built_in_function fcode)
3333 {
3334   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3335   tree dest, size, len, fn, fmt, flag;
3336   const char *fmt_str;
3337 
3338   /* Verify the required arguments in the original call.  */
3339   if (gimple_call_num_args (stmt) < 5)
3340     return false;
3341 
3342   dest = gimple_call_arg (stmt, 0);
3343   len = gimple_call_arg (stmt, 1);
3344   flag = gimple_call_arg (stmt, 2);
3345   size = gimple_call_arg (stmt, 3);
3346   fmt = gimple_call_arg (stmt, 4);
3347 
3348   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3349   if (! integer_all_onesp (size)
3350       && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3351     return false;
3352 
3353   if (!init_target_chars ())
3354     return false;
3355 
3356   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3357      or if format doesn't contain % chars or is "%s".  */
3358   if (! integer_zerop (flag))
3359     {
3360       fmt_str = c_getstr (fmt);
3361       if (fmt_str == NULL)
3362 	return false;
3363       if (strchr (fmt_str, target_percent) != NULL
3364 	  && strcmp (fmt_str, target_percent_s))
3365 	return false;
3366     }
3367 
3368   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3369      available.  */
3370   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3371 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3372   if (!fn)
3373     return false;
3374 
3375   /* Replace the called function and the first 5 argument by 3 retaining
3376      trailing varargs.  */
3377   gimple_call_set_fndecl (stmt, fn);
3378   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3379   gimple_call_set_arg (stmt, 0, dest);
3380   gimple_call_set_arg (stmt, 1, len);
3381   gimple_call_set_arg (stmt, 2, fmt);
3382   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3383     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3384   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3385   fold_stmt (gsi);
3386   return true;
3387 }
3388 
3389 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3390    Return NULL_TREE if a normal call should be emitted rather than
3391    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
3392    or BUILT_IN_VSPRINTF_CHK.  */
3393 
3394 static bool
gimple_fold_builtin_sprintf_chk(gimple_stmt_iterator * gsi,enum built_in_function fcode)3395 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3396 				 enum built_in_function fcode)
3397 {
3398   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3399   tree dest, size, len, fn, fmt, flag;
3400   const char *fmt_str;
3401   unsigned nargs = gimple_call_num_args (stmt);
3402 
3403   /* Verify the required arguments in the original call.  */
3404   if (nargs < 4)
3405     return false;
3406   dest = gimple_call_arg (stmt, 0);
3407   flag = gimple_call_arg (stmt, 1);
3408   size = gimple_call_arg (stmt, 2);
3409   fmt = gimple_call_arg (stmt, 3);
3410 
3411   len = NULL_TREE;
3412 
3413   if (!init_target_chars ())
3414     return false;
3415 
3416   /* Check whether the format is a literal string constant.  */
3417   fmt_str = c_getstr (fmt);
3418   if (fmt_str != NULL)
3419     {
3420       /* If the format doesn't contain % args or %%, we know the size.  */
3421       if (strchr (fmt_str, target_percent) == 0)
3422 	{
3423 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3424 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
3425 	}
3426       /* If the format is "%s" and first ... argument is a string literal,
3427 	 we know the size too.  */
3428       else if (fcode == BUILT_IN_SPRINTF_CHK
3429 	       && strcmp (fmt_str, target_percent_s) == 0)
3430 	{
3431 	  tree arg;
3432 
3433 	  if (nargs == 5)
3434 	    {
3435 	      arg = gimple_call_arg (stmt, 4);
3436 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
3437 		len = c_strlen (arg, 1);
3438 	    }
3439 	}
3440     }
3441 
3442   if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3443     return false;
3444 
3445   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3446      or if format doesn't contain % chars or is "%s".  */
3447   if (! integer_zerop (flag))
3448     {
3449       if (fmt_str == NULL)
3450 	return false;
3451       if (strchr (fmt_str, target_percent) != NULL
3452 	  && strcmp (fmt_str, target_percent_s))
3453 	return false;
3454     }
3455 
3456   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
3457   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3458 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3459   if (!fn)
3460     return false;
3461 
3462   /* Replace the called function and the first 4 argument by 2 retaining
3463      trailing varargs.  */
3464   gimple_call_set_fndecl (stmt, fn);
3465   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3466   gimple_call_set_arg (stmt, 0, dest);
3467   gimple_call_set_arg (stmt, 1, fmt);
3468   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3469     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3470   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3471   fold_stmt (gsi);
3472   return true;
3473 }
3474 
3475 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3476    ORIG may be null if this is a 2-argument call.  We don't attempt to
3477    simplify calls with more than 3 arguments.
3478 
3479    Return true if simplification was possible, otherwise false.  */
3480 
3481 bool
gimple_fold_builtin_sprintf(gimple_stmt_iterator * gsi)3482 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3483 {
3484   gimple *stmt = gsi_stmt (*gsi);
3485 
3486   /* Verify the required arguments in the original call.  We deal with two
3487      types of sprintf() calls: 'sprintf (str, fmt)' and
3488      'sprintf (dest, "%s", orig)'.  */
3489   if (gimple_call_num_args (stmt) > 3)
3490     return false;
3491 
3492   tree orig = NULL_TREE;
3493   if (gimple_call_num_args (stmt) == 3)
3494     orig = gimple_call_arg (stmt, 2);
3495 
3496   /* Check whether the format is a literal string constant.  */
3497   tree fmt = gimple_call_arg (stmt, 1);
3498   const char *fmt_str = c_getstr (fmt);
3499   if (fmt_str == NULL)
3500     return false;
3501 
3502   tree dest = gimple_call_arg (stmt, 0);
3503 
3504   if (!init_target_chars ())
3505     return false;
3506 
3507   tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3508   if (!fn)
3509     return false;
3510 
3511   /* If the format doesn't contain % args or %%, use strcpy.  */
3512   if (strchr (fmt_str, target_percent) == NULL)
3513     {
3514       /* Don't optimize sprintf (buf, "abc", ptr++).  */
3515       if (orig)
3516 	return false;
3517 
3518       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3519 	 'format' is known to contain no % formats.  */
3520       gimple_seq stmts = NULL;
3521       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3522 
3523       /* Propagate the NO_WARNING bit to avoid issuing the same
3524 	 warning more than once.  */
3525       copy_warning (repl, stmt);
3526 
3527       gimple_seq_add_stmt_without_update (&stmts, repl);
3528       if (tree lhs = gimple_call_lhs (stmt))
3529 	{
3530 	  repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3531 							  strlen (fmt_str)));
3532 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3533 	  gsi_replace_with_seq_vops (gsi, stmts);
3534 	  /* gsi now points at the assignment to the lhs, get a
3535 	     stmt iterator to the memcpy call.
3536 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3537 	     CFG isn't built yet.  */
3538 	  gimple_stmt_iterator gsi2 = *gsi;
3539 	  gsi_prev (&gsi2);
3540 	  fold_stmt (&gsi2);
3541 	}
3542       else
3543 	{
3544 	  gsi_replace_with_seq_vops (gsi, stmts);
3545 	  fold_stmt (gsi);
3546 	}
3547       return true;
3548     }
3549 
3550   /* If the format is "%s", use strcpy if the result isn't used.  */
3551   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3552     {
3553       /* Don't crash on sprintf (str1, "%s").  */
3554       if (!orig)
3555 	return false;
3556 
3557       /* Don't fold calls with source arguments of invalid (nonpointer)
3558 	 types.  */
3559       if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3560 	return false;
3561 
3562       tree orig_len = NULL_TREE;
3563       if (gimple_call_lhs (stmt))
3564 	{
3565 	  orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3566 	  if (!orig_len)
3567 	    return false;
3568 	}
3569 
3570       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
3571       gimple_seq stmts = NULL;
3572       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3573 
3574       /* Propagate the NO_WARNING bit to avoid issuing the same
3575 	 warning more than once.  */
3576       copy_warning (repl, stmt);
3577 
3578       gimple_seq_add_stmt_without_update (&stmts, repl);
3579       if (tree lhs = gimple_call_lhs (stmt))
3580 	{
3581 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3582 					  TREE_TYPE (orig_len)))
3583 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3584 	  repl = gimple_build_assign (lhs, orig_len);
3585 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3586 	  gsi_replace_with_seq_vops (gsi, stmts);
3587 	  /* gsi now points at the assignment to the lhs, get a
3588 	     stmt iterator to the memcpy call.
3589 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3590 	     CFG isn't built yet.  */
3591 	  gimple_stmt_iterator gsi2 = *gsi;
3592 	  gsi_prev (&gsi2);
3593 	  fold_stmt (&gsi2);
3594 	}
3595       else
3596 	{
3597 	  gsi_replace_with_seq_vops (gsi, stmts);
3598 	  fold_stmt (gsi);
3599 	}
3600       return true;
3601     }
3602   return false;
3603 }
3604 
3605 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3606    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
3607    attempt to simplify calls with more than 4 arguments.
3608 
3609    Return true if simplification was possible, otherwise false.  */
3610 
3611 bool
gimple_fold_builtin_snprintf(gimple_stmt_iterator * gsi)3612 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3613 {
3614   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3615   tree dest = gimple_call_arg (stmt, 0);
3616   tree destsize = gimple_call_arg (stmt, 1);
3617   tree fmt = gimple_call_arg (stmt, 2);
3618   tree orig = NULL_TREE;
3619   const char *fmt_str = NULL;
3620 
3621   if (gimple_call_num_args (stmt) > 4)
3622     return false;
3623 
3624   if (gimple_call_num_args (stmt) == 4)
3625     orig = gimple_call_arg (stmt, 3);
3626 
3627   /* Check whether the format is a literal string constant.  */
3628   fmt_str = c_getstr (fmt);
3629   if (fmt_str == NULL)
3630     return false;
3631 
3632   if (!init_target_chars ())
3633     return false;
3634 
3635   /* If the format doesn't contain % args or %%, use strcpy.  */
3636   if (strchr (fmt_str, target_percent) == NULL)
3637     {
3638       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3639       if (!fn)
3640 	return false;
3641 
3642       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
3643       if (orig)
3644 	return false;
3645 
3646       tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3647 
3648       /* We could expand this as
3649 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3650 	 or to
3651 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
3652 	 but in the former case that might increase code size
3653 	 and in the latter case grow .rodata section too much.
3654 	 So punt for now.  */
3655       if (!known_lower (stmt, len, destsize, true))
3656 	return false;
3657 
3658       gimple_seq stmts = NULL;
3659       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3660       gimple_seq_add_stmt_without_update (&stmts, repl);
3661       if (tree lhs = gimple_call_lhs (stmt))
3662 	{
3663 	  repl = gimple_build_assign (lhs,
3664 				      fold_convert (TREE_TYPE (lhs), len));
3665 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3666 	  gsi_replace_with_seq_vops (gsi, stmts);
3667 	  /* gsi now points at the assignment to the lhs, get a
3668 	     stmt iterator to the memcpy call.
3669 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3670 	     CFG isn't built yet.  */
3671 	  gimple_stmt_iterator gsi2 = *gsi;
3672 	  gsi_prev (&gsi2);
3673 	  fold_stmt (&gsi2);
3674 	}
3675       else
3676 	{
3677 	  gsi_replace_with_seq_vops (gsi, stmts);
3678 	  fold_stmt (gsi);
3679 	}
3680       return true;
3681     }
3682 
3683   /* If the format is "%s", use strcpy if the result isn't used.  */
3684   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3685     {
3686       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3687       if (!fn)
3688 	return false;
3689 
3690       /* Don't crash on snprintf (str1, cst, "%s").  */
3691       if (!orig)
3692 	return false;
3693 
3694       tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3695 
3696       /* We could expand this as
3697 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3698 	 or to
3699 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
3700 	 but in the former case that might increase code size
3701 	 and in the latter case grow .rodata section too much.
3702 	 So punt for now.  */
3703       if (!known_lower (stmt, orig_len, destsize, true))
3704 	return false;
3705 
3706       /* Convert snprintf (str1, cst, "%s", str2) into
3707 	 strcpy (str1, str2) if strlen (str2) < cst.  */
3708       gimple_seq stmts = NULL;
3709       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3710       gimple_seq_add_stmt_without_update (&stmts, repl);
3711       if (tree lhs = gimple_call_lhs (stmt))
3712 	{
3713 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3714 					  TREE_TYPE (orig_len)))
3715 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3716 	  repl = gimple_build_assign (lhs, orig_len);
3717 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3718 	  gsi_replace_with_seq_vops (gsi, stmts);
3719 	  /* gsi now points at the assignment to the lhs, get a
3720 	     stmt iterator to the memcpy call.
3721 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3722 	     CFG isn't built yet.  */
3723 	  gimple_stmt_iterator gsi2 = *gsi;
3724 	  gsi_prev (&gsi2);
3725 	  fold_stmt (&gsi2);
3726 	}
3727       else
3728 	{
3729 	  gsi_replace_with_seq_vops (gsi, stmts);
3730 	  fold_stmt (gsi);
3731 	}
3732       return true;
3733     }
3734   return false;
3735 }
3736 
3737 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3738    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
3739    more than 3 arguments, and ARG may be null in the 2-argument case.
3740 
3741    Return NULL_TREE if no simplification was possible, otherwise return the
3742    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3743    code of the function to be simplified.  */
3744 
3745 static bool
gimple_fold_builtin_fprintf(gimple_stmt_iterator * gsi,tree fp,tree fmt,tree arg,enum built_in_function fcode)3746 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3747 			     tree fp, tree fmt, tree arg,
3748 			     enum built_in_function fcode)
3749 {
3750   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3751   tree fn_fputc, fn_fputs;
3752   const char *fmt_str = NULL;
3753 
3754   /* If the return value is used, don't do the transformation.  */
3755   if (gimple_call_lhs (stmt) != NULL_TREE)
3756     return false;
3757 
3758   /* Check whether the format is a literal string constant.  */
3759   fmt_str = c_getstr (fmt);
3760   if (fmt_str == NULL)
3761     return false;
3762 
3763   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3764     {
3765       /* If we're using an unlocked function, assume the other
3766 	 unlocked functions exist explicitly.  */
3767       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3768       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3769     }
3770   else
3771     {
3772       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3773       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3774     }
3775 
3776   if (!init_target_chars ())
3777     return false;
3778 
3779   /* If the format doesn't contain % args or %%, use strcpy.  */
3780   if (strchr (fmt_str, target_percent) == NULL)
3781     {
3782       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3783 	  && arg)
3784 	return false;
3785 
3786       /* If the format specifier was "", fprintf does nothing.  */
3787       if (fmt_str[0] == '\0')
3788 	{
3789 	  replace_call_with_value (gsi, NULL_TREE);
3790 	  return true;
3791 	}
3792 
3793       /* When "string" doesn't contain %, replace all cases of
3794 	 fprintf (fp, string) with fputs (string, fp).  The fputs
3795 	 builtin will take care of special cases like length == 1.  */
3796       if (fn_fputs)
3797 	{
3798 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3799 	  replace_call_with_call_and_fold (gsi, repl);
3800 	  return true;
3801 	}
3802     }
3803 
3804   /* The other optimizations can be done only on the non-va_list variants.  */
3805   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3806     return false;
3807 
3808   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
3809   else if (strcmp (fmt_str, target_percent_s) == 0)
3810     {
3811       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3812 	return false;
3813       if (fn_fputs)
3814 	{
3815 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3816 	  replace_call_with_call_and_fold (gsi, repl);
3817 	  return true;
3818 	}
3819     }
3820 
3821   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
3822   else if (strcmp (fmt_str, target_percent_c) == 0)
3823     {
3824       if (!arg
3825 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3826 	return false;
3827       if (fn_fputc)
3828 	{
3829 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3830 	  replace_call_with_call_and_fold (gsi, repl);
3831 	  return true;
3832 	}
3833     }
3834 
3835   return false;
3836 }
3837 
3838 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3839    FMT and ARG are the arguments to the call; we don't fold cases with
3840    more than 2 arguments, and ARG may be null if this is a 1-argument case.
3841 
3842    Return NULL_TREE if no simplification was possible, otherwise return the
3843    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3844    code of the function to be simplified.  */
3845 
3846 static bool
gimple_fold_builtin_printf(gimple_stmt_iterator * gsi,tree fmt,tree arg,enum built_in_function fcode)3847 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3848 			    tree arg, enum built_in_function fcode)
3849 {
3850   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3851   tree fn_putchar, fn_puts, newarg;
3852   const char *fmt_str = NULL;
3853 
3854   /* If the return value is used, don't do the transformation.  */
3855   if (gimple_call_lhs (stmt) != NULL_TREE)
3856     return false;
3857 
3858   /* Check whether the format is a literal string constant.  */
3859   fmt_str = c_getstr (fmt);
3860   if (fmt_str == NULL)
3861     return false;
3862 
3863   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3864     {
3865       /* If we're using an unlocked function, assume the other
3866 	 unlocked functions exist explicitly.  */
3867       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3868       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3869     }
3870   else
3871     {
3872       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3873       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3874     }
3875 
3876   if (!init_target_chars ())
3877     return false;
3878 
3879   if (strcmp (fmt_str, target_percent_s) == 0
3880       || strchr (fmt_str, target_percent) == NULL)
3881     {
3882       const char *str;
3883 
3884       if (strcmp (fmt_str, target_percent_s) == 0)
3885 	{
3886 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3887 	    return false;
3888 
3889 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3890 	    return false;
3891 
3892 	  str = c_getstr (arg);
3893 	  if (str == NULL)
3894 	    return false;
3895 	}
3896       else
3897 	{
3898 	  /* The format specifier doesn't contain any '%' characters.  */
3899 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3900 	      && arg)
3901 	    return false;
3902 	  str = fmt_str;
3903 	}
3904 
3905       /* If the string was "", printf does nothing.  */
3906       if (str[0] == '\0')
3907 	{
3908 	  replace_call_with_value (gsi, NULL_TREE);
3909 	  return true;
3910 	}
3911 
3912       /* If the string has length of 1, call putchar.  */
3913       if (str[1] == '\0')
3914 	{
3915 	  /* Given printf("c"), (where c is any one character,)
3916 	     convert "c"[0] to an int and pass that to the replacement
3917 	     function.  */
3918 	  newarg = build_int_cst (integer_type_node, str[0]);
3919 	  if (fn_putchar)
3920 	    {
3921 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3922 	      replace_call_with_call_and_fold (gsi, repl);
3923 	      return true;
3924 	    }
3925 	}
3926       else
3927 	{
3928 	  /* If the string was "string\n", call puts("string").  */
3929 	  size_t len = strlen (str);
3930 	  if ((unsigned char)str[len - 1] == target_newline
3931 	      && (size_t) (int) len == len
3932 	      && (int) len > 0)
3933 	    {
3934 	      char *newstr;
3935 
3936 	      /* Create a NUL-terminated string that's one char shorter
3937 		 than the original, stripping off the trailing '\n'.  */
3938 	      newstr = xstrdup (str);
3939 	      newstr[len - 1] = '\0';
3940 	      newarg = build_string_literal (len, newstr);
3941 	      free (newstr);
3942 	      if (fn_puts)
3943 		{
3944 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3945 		  replace_call_with_call_and_fold (gsi, repl);
3946 		  return true;
3947 		}
3948 	    }
3949 	  else
3950 	    /* We'd like to arrange to call fputs(string,stdout) here,
3951 	       but we need stdout and don't have a way to get it yet.  */
3952 	    return false;
3953 	}
3954     }
3955 
3956   /* The other optimizations can be done only on the non-va_list variants.  */
3957   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3958     return false;
3959 
3960   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
3961   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3962     {
3963       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3964 	return false;
3965       if (fn_puts)
3966 	{
3967 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
3968 	  replace_call_with_call_and_fold (gsi, repl);
3969 	  return true;
3970 	}
3971     }
3972 
3973   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
3974   else if (strcmp (fmt_str, target_percent_c) == 0)
3975     {
3976       if (!arg || ! useless_type_conversion_p (integer_type_node,
3977 					       TREE_TYPE (arg)))
3978 	return false;
3979       if (fn_putchar)
3980 	{
3981 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3982 	  replace_call_with_call_and_fold (gsi, repl);
3983 	  return true;
3984 	}
3985     }
3986 
3987   return false;
3988 }
3989 
3990 
3991 
3992 /* Fold a call to __builtin_strlen with known length LEN.  */
3993 
3994 static bool
gimple_fold_builtin_strlen(gimple_stmt_iterator * gsi)3995 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3996 {
3997   gimple *stmt = gsi_stmt (*gsi);
3998   tree arg = gimple_call_arg (stmt, 0);
3999 
4000   wide_int minlen;
4001   wide_int maxlen;
4002 
4003   c_strlen_data lendata = { };
4004   if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4005       && !lendata.decl
4006       && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4007       && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4008     {
4009       /* The range of lengths refers to either a single constant
4010 	 string or to the longest and shortest constant string
4011 	 referenced by the argument of the strlen() call, or to
4012 	 the strings that can possibly be stored in the arrays
4013 	 the argument refers to.  */
4014       minlen = wi::to_wide (lendata.minlen);
4015       maxlen = wi::to_wide (lendata.maxlen);
4016     }
4017   else
4018     {
4019       unsigned prec = TYPE_PRECISION (sizetype);
4020 
4021       minlen = wi::shwi (0, prec);
4022       maxlen = wi::to_wide (max_object_size (), prec) - 2;
4023     }
4024 
4025   if (minlen == maxlen)
4026     {
4027       /* Fold the strlen call to a constant.  */
4028       tree type = TREE_TYPE (lendata.minlen);
4029       tree len = force_gimple_operand_gsi (gsi,
4030 					   wide_int_to_tree (type, minlen),
4031 					   true, NULL, true, GSI_SAME_STMT);
4032       replace_call_with_value (gsi, len);
4033       return true;
4034     }
4035 
4036   /* Set the strlen() range to [0, MAXLEN].  */
4037   if (tree lhs = gimple_call_lhs (stmt))
4038     set_strlen_range (lhs, minlen, maxlen);
4039 
4040   return false;
4041 }
4042 
4043 /* Fold a call to __builtin_acc_on_device.  */
4044 
4045 static bool
gimple_fold_builtin_acc_on_device(gimple_stmt_iterator * gsi,tree arg0)4046 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4047 {
4048   /* Defer folding until we know which compiler we're in.  */
4049   if (symtab->state != EXPANSION)
4050     return false;
4051 
4052   unsigned val_host = GOMP_DEVICE_HOST;
4053   unsigned val_dev = GOMP_DEVICE_NONE;
4054 
4055 #ifdef ACCEL_COMPILER
4056   val_host = GOMP_DEVICE_NOT_HOST;
4057   val_dev = ACCEL_COMPILER_acc_device;
4058 #endif
4059 
4060   location_t loc = gimple_location (gsi_stmt (*gsi));
4061 
4062   tree host_eq = make_ssa_name (boolean_type_node);
4063   gimple *host_ass = gimple_build_assign
4064     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4065   gimple_set_location (host_ass, loc);
4066   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4067 
4068   tree dev_eq = make_ssa_name (boolean_type_node);
4069   gimple *dev_ass = gimple_build_assign
4070     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4071   gimple_set_location (dev_ass, loc);
4072   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4073 
4074   tree result = make_ssa_name (boolean_type_node);
4075   gimple *result_ass = gimple_build_assign
4076     (result, BIT_IOR_EXPR, host_eq, dev_eq);
4077   gimple_set_location (result_ass, loc);
4078   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4079 
4080   replace_call_with_value (gsi, result);
4081 
4082   return true;
4083 }
4084 
4085 /* Fold realloc (0, n) -> malloc (n).  */
4086 
4087 static bool
gimple_fold_builtin_realloc(gimple_stmt_iterator * gsi)4088 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4089 {
4090   gimple *stmt = gsi_stmt (*gsi);
4091   tree arg = gimple_call_arg (stmt, 0);
4092   tree size = gimple_call_arg (stmt, 1);
4093 
4094   if (operand_equal_p (arg, null_pointer_node, 0))
4095     {
4096       tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4097       if (fn_malloc)
4098 	{
4099 	  gcall *repl = gimple_build_call (fn_malloc, 1, size);
4100 	  replace_call_with_call_and_fold (gsi, repl);
4101 	  return true;
4102 	}
4103     }
4104   return false;
4105 }
4106 
4107 /* Number of bytes into which any type but aggregate or vector types
4108    should fit.  */
4109 static constexpr size_t clear_padding_unit
4110   = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4111 /* Buffer size on which __builtin_clear_padding folding code works.  */
4112 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4113 
4114 /* Data passed through __builtin_clear_padding folding.  */
4115 struct clear_padding_struct {
4116   location_t loc;
4117   /* 0 during __builtin_clear_padding folding, nonzero during
4118      clear_type_padding_in_mask.  In that case, instead of clearing the
4119      non-padding bits in union_ptr array clear the padding bits in there.  */
4120   bool clear_in_mask;
4121   tree base;
4122   tree alias_type;
4123   gimple_stmt_iterator *gsi;
4124   /* Alignment of buf->base + 0.  */
4125   unsigned align;
4126   /* Offset from buf->base.  Should be always a multiple of UNITS_PER_WORD.  */
4127   HOST_WIDE_INT off;
4128   /* Number of padding bytes before buf->off that don't have padding clear
4129      code emitted yet.  */
4130   HOST_WIDE_INT padding_bytes;
4131   /* The size of the whole object.  Never emit code to touch
4132      buf->base + buf->sz or following bytes.  */
4133   HOST_WIDE_INT sz;
4134   /* Number of bytes recorded in buf->buf.  */
4135   size_t size;
4136   /* When inside union, instead of emitting code we and bits inside of
4137      the union_ptr array.  */
4138   unsigned char *union_ptr;
4139   /* Set bits mean padding bits that need to be cleared by the builtin.  */
4140   unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4141 };
4142 
4143 /* Emit code to clear padding requested in BUF->buf - set bits
4144    in there stand for padding that should be cleared.  FULL is true
4145    if everything from the buffer should be flushed, otherwise
4146    it can leave up to 2 * clear_padding_unit bytes for further
4147    processing.  */
4148 
4149 static void
clear_padding_flush(clear_padding_struct * buf,bool full)4150 clear_padding_flush (clear_padding_struct *buf, bool full)
4151 {
4152   gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4153   if (!full && buf->size < 2 * clear_padding_unit)
4154     return;
4155   gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4156   size_t end = buf->size;
4157   if (!full)
4158     end = ((end - clear_padding_unit - 1) / clear_padding_unit
4159 	   * clear_padding_unit);
4160   size_t padding_bytes = buf->padding_bytes;
4161   if (buf->union_ptr)
4162     {
4163       if (buf->clear_in_mask)
4164 	{
4165 	  /* During clear_type_padding_in_mask, clear the padding
4166 	     bits set in buf->buf in the buf->union_ptr mask.  */
4167 	  for (size_t i = 0; i < end; i++)
4168 	    {
4169 	      if (buf->buf[i] == (unsigned char) ~0)
4170 		padding_bytes++;
4171 	      else
4172 		{
4173 		  memset (&buf->union_ptr[buf->off + i - padding_bytes],
4174 			  0, padding_bytes);
4175 		  padding_bytes = 0;
4176 		  buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4177 		}
4178 	    }
4179 	  if (full)
4180 	    {
4181 	      memset (&buf->union_ptr[buf->off + end - padding_bytes],
4182 		      0, padding_bytes);
4183 	      buf->off = 0;
4184 	      buf->size = 0;
4185 	      buf->padding_bytes = 0;
4186 	    }
4187 	  else
4188 	    {
4189 	      memmove (buf->buf, buf->buf + end, buf->size - end);
4190 	      buf->off += end;
4191 	      buf->size -= end;
4192 	      buf->padding_bytes = padding_bytes;
4193 	    }
4194 	  return;
4195 	}
4196       /* Inside of a union, instead of emitting any code, instead
4197 	 clear all bits in the union_ptr buffer that are clear
4198 	 in buf.  Whole padding bytes don't clear anything.  */
4199       for (size_t i = 0; i < end; i++)
4200 	{
4201 	  if (buf->buf[i] == (unsigned char) ~0)
4202 	    padding_bytes++;
4203 	  else
4204 	    {
4205 	      padding_bytes = 0;
4206 	      buf->union_ptr[buf->off + i] &= buf->buf[i];
4207 	    }
4208 	}
4209       if (full)
4210 	{
4211 	  buf->off = 0;
4212 	  buf->size = 0;
4213 	  buf->padding_bytes = 0;
4214 	}
4215       else
4216 	{
4217 	  memmove (buf->buf, buf->buf + end, buf->size - end);
4218 	  buf->off += end;
4219 	  buf->size -= end;
4220 	  buf->padding_bytes = padding_bytes;
4221 	}
4222       return;
4223     }
4224   size_t wordsize = UNITS_PER_WORD;
4225   for (size_t i = 0; i < end; i += wordsize)
4226     {
4227       size_t nonzero_first = wordsize;
4228       size_t nonzero_last = 0;
4229       size_t zero_first = wordsize;
4230       size_t zero_last = 0;
4231       bool all_ones = true, bytes_only = true;
4232       if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4233 	  > (unsigned HOST_WIDE_INT) buf->sz)
4234 	{
4235 	  gcc_assert (wordsize > 1);
4236 	  wordsize /= 2;
4237 	  i -= wordsize;
4238 	  continue;
4239 	}
4240       for (size_t j = i; j < i + wordsize && j < end; j++)
4241 	{
4242 	  if (buf->buf[j])
4243 	    {
4244 	      if (nonzero_first == wordsize)
4245 		{
4246 		  nonzero_first = j - i;
4247 		  nonzero_last = j - i;
4248 		}
4249 	      if (nonzero_last != j - i)
4250 		all_ones = false;
4251 	      nonzero_last = j + 1 - i;
4252 	    }
4253 	  else
4254 	    {
4255 	      if (zero_first == wordsize)
4256 		zero_first = j - i;
4257 	      zero_last = j + 1 - i;
4258 	    }
4259 	  if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4260 	    {
4261 	      all_ones = false;
4262 	      bytes_only = false;
4263 	    }
4264 	}
4265       size_t padding_end = i;
4266       if (padding_bytes)
4267 	{
4268 	  if (nonzero_first == 0
4269 	      && nonzero_last == wordsize
4270 	      && all_ones)
4271 	    {
4272 	      /* All bits are padding and we had some padding
4273 		 before too.  Just extend it.  */
4274 	      padding_bytes += wordsize;
4275 	      continue;
4276 	    }
4277 	  if (all_ones && nonzero_first == 0)
4278 	    {
4279 	      padding_bytes += nonzero_last;
4280 	      padding_end += nonzero_last;
4281 	      nonzero_first = wordsize;
4282 	      nonzero_last = 0;
4283 	    }
4284 	  else if (bytes_only && nonzero_first == 0)
4285 	    {
4286 	      gcc_assert (zero_first && zero_first != wordsize);
4287 	      padding_bytes += zero_first;
4288 	      padding_end += zero_first;
4289 	    }
4290 	  tree atype, src;
4291 	  if (padding_bytes == 1)
4292 	    {
4293 	      atype = char_type_node;
4294 	      src = build_zero_cst (char_type_node);
4295 	    }
4296 	  else
4297 	    {
4298 	      atype = build_array_type_nelts (char_type_node, padding_bytes);
4299 	      src = build_constructor (atype, NULL);
4300 	    }
4301 	  tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4302 				 build_int_cst (buf->alias_type,
4303 						buf->off + padding_end
4304 						- padding_bytes));
4305 	  gimple *g = gimple_build_assign (dst, src);
4306 	  gimple_set_location (g, buf->loc);
4307 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4308 	  padding_bytes = 0;
4309 	  buf->padding_bytes = 0;
4310 	}
4311       if (nonzero_first == wordsize)
4312 	/* All bits in a word are 0, there are no padding bits.  */
4313 	continue;
4314       if (all_ones && nonzero_last == wordsize)
4315 	{
4316 	  /* All bits between nonzero_first and end of word are padding
4317 	     bits, start counting padding_bytes.  */
4318 	  padding_bytes = nonzero_last - nonzero_first;
4319 	  continue;
4320 	}
4321       if (bytes_only)
4322 	{
4323 	  /* If bitfields aren't involved in this word, prefer storing
4324 	     individual bytes or groups of them over performing a RMW
4325 	     operation on the whole word.  */
4326 	  gcc_assert (i + zero_last <= end);
4327 	  for (size_t j = padding_end; j < i + zero_last; j++)
4328 	    {
4329 	      if (buf->buf[j])
4330 		{
4331 		  size_t k;
4332 		  for (k = j; k < i + zero_last; k++)
4333 		    if (buf->buf[k] == 0)
4334 		      break;
4335 		  HOST_WIDE_INT off = buf->off + j;
4336 		  tree atype, src;
4337 		  if (k - j == 1)
4338 		    {
4339 		      atype = char_type_node;
4340 		      src = build_zero_cst (char_type_node);
4341 		    }
4342 		  else
4343 		    {
4344 		      atype = build_array_type_nelts (char_type_node, k - j);
4345 		      src = build_constructor (atype, NULL);
4346 		    }
4347 		  tree dst = build2_loc (buf->loc, MEM_REF, atype,
4348 					 buf->base,
4349 					 build_int_cst (buf->alias_type, off));
4350 		  gimple *g = gimple_build_assign (dst, src);
4351 		  gimple_set_location (g, buf->loc);
4352 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4353 		  j = k;
4354 		}
4355 	    }
4356 	  if (nonzero_last == wordsize)
4357 	    padding_bytes = nonzero_last - zero_last;
4358 	  continue;
4359 	}
4360       for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4361 	{
4362 	  if (nonzero_last - nonzero_first <= eltsz
4363 	      && ((nonzero_first & ~(eltsz - 1))
4364 		  == ((nonzero_last - 1) & ~(eltsz - 1))))
4365 	    {
4366 	      tree type;
4367 	      if (eltsz == 1)
4368 		type = char_type_node;
4369 	      else
4370 		type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4371 						       0);
4372 	      size_t start = nonzero_first & ~(eltsz - 1);
4373 	      HOST_WIDE_INT off = buf->off + i + start;
4374 	      tree atype = type;
4375 	      if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4376 		atype = build_aligned_type (type, buf->align);
4377 	      tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4378 				     build_int_cst (buf->alias_type, off));
4379 	      tree src;
4380 	      gimple *g;
4381 	      if (all_ones
4382 		  && nonzero_first == start
4383 		  && nonzero_last == start + eltsz)
4384 		src = build_zero_cst (type);
4385 	      else
4386 		{
4387 		  src = make_ssa_name (type);
4388 		  tree tmp_dst = unshare_expr (dst);
4389 		  /* The folding introduces a read from the tmp_dst, we should
4390 		     prevent uninitialized warning analysis from issuing warning
4391 		     for such fake read.  In order to suppress warning only for
4392 		     this expr, we should set the location of tmp_dst to
4393 		     UNKNOWN_LOCATION first, then suppress_warning will call
4394 		     set_no_warning_bit to set the no_warning flag only for
4395 		     tmp_dst.  */
4396 		  SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4397 		  suppress_warning (tmp_dst, OPT_Wuninitialized);
4398 		  g = gimple_build_assign (src, tmp_dst);
4399 		  gimple_set_location (g, buf->loc);
4400 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4401 		  tree mask = native_interpret_expr (type,
4402 						     buf->buf + i + start,
4403 						     eltsz);
4404 		  gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4405 		  mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4406 		  tree src_masked = make_ssa_name (type);
4407 		  g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4408 					   src, mask);
4409 		  gimple_set_location (g, buf->loc);
4410 		  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4411 		  src = src_masked;
4412 		}
4413 	      g = gimple_build_assign (dst, src);
4414 	      gimple_set_location (g, buf->loc);
4415 	      gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4416 	      break;
4417 	    }
4418 	}
4419     }
4420   if (full)
4421     {
4422       if (padding_bytes)
4423 	{
4424 	  tree atype, src;
4425 	  if (padding_bytes == 1)
4426 	    {
4427 	      atype = char_type_node;
4428 	      src = build_zero_cst (char_type_node);
4429 	    }
4430 	  else
4431 	    {
4432 	      atype = build_array_type_nelts (char_type_node, padding_bytes);
4433 	      src = build_constructor (atype, NULL);
4434 	    }
4435 	  tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4436 				 build_int_cst (buf->alias_type,
4437 						buf->off + end
4438 						- padding_bytes));
4439 	  gimple *g = gimple_build_assign (dst, src);
4440 	  gimple_set_location (g, buf->loc);
4441 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4442 	}
4443       size_t end_rem = end % UNITS_PER_WORD;
4444       buf->off += end - end_rem;
4445       buf->size = end_rem;
4446       memset (buf->buf, 0, buf->size);
4447       buf->padding_bytes = 0;
4448     }
4449   else
4450     {
4451       memmove (buf->buf, buf->buf + end, buf->size - end);
4452       buf->off += end;
4453       buf->size -= end;
4454       buf->padding_bytes = padding_bytes;
4455     }
4456 }
4457 
4458 /* Append PADDING_BYTES padding bytes.  */
4459 
4460 static void
clear_padding_add_padding(clear_padding_struct * buf,HOST_WIDE_INT padding_bytes)4461 clear_padding_add_padding (clear_padding_struct *buf,
4462 			   HOST_WIDE_INT padding_bytes)
4463 {
4464   if (padding_bytes == 0)
4465     return;
4466   if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4467       > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4468     clear_padding_flush (buf, false);
4469   if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4470       > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4471     {
4472       memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4473       padding_bytes -= clear_padding_buf_size - buf->size;
4474       buf->size = clear_padding_buf_size;
4475       clear_padding_flush (buf, false);
4476       gcc_assert (buf->padding_bytes);
4477       /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4478 	 is guaranteed to be all ones.  */
4479       padding_bytes += buf->size;
4480       buf->size = padding_bytes % UNITS_PER_WORD;
4481       memset (buf->buf, ~0, buf->size);
4482       buf->off += padding_bytes - buf->size;
4483       buf->padding_bytes += padding_bytes - buf->size;
4484     }
4485   else
4486     {
4487       memset (buf->buf + buf->size, ~0, padding_bytes);
4488       buf->size += padding_bytes;
4489     }
4490 }
4491 
4492 static void clear_padding_type (clear_padding_struct *, tree,
4493 				HOST_WIDE_INT, bool);
4494 
4495 /* Clear padding bits of union type TYPE.  */
4496 
4497 static void
clear_padding_union(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz,bool for_auto_init)4498 clear_padding_union (clear_padding_struct *buf, tree type,
4499 		     HOST_WIDE_INT sz, bool for_auto_init)
4500 {
4501   clear_padding_struct *union_buf;
4502   HOST_WIDE_INT start_off = 0, next_off = 0;
4503   size_t start_size = 0;
4504   if (buf->union_ptr)
4505     {
4506       start_off = buf->off + buf->size;
4507       next_off = start_off + sz;
4508       start_size = start_off % UNITS_PER_WORD;
4509       start_off -= start_size;
4510       clear_padding_flush (buf, true);
4511       union_buf = buf;
4512     }
4513   else
4514     {
4515       if (sz + buf->size > clear_padding_buf_size)
4516 	clear_padding_flush (buf, false);
4517       union_buf = XALLOCA (clear_padding_struct);
4518       union_buf->loc = buf->loc;
4519       union_buf->clear_in_mask = buf->clear_in_mask;
4520       union_buf->base = NULL_TREE;
4521       union_buf->alias_type = NULL_TREE;
4522       union_buf->gsi = NULL;
4523       union_buf->align = 0;
4524       union_buf->off = 0;
4525       union_buf->padding_bytes = 0;
4526       union_buf->sz = sz;
4527       union_buf->size = 0;
4528       if (sz + buf->size <= clear_padding_buf_size)
4529 	union_buf->union_ptr = buf->buf + buf->size;
4530       else
4531 	union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4532       memset (union_buf->union_ptr, ~0, sz);
4533     }
4534 
4535   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4536     if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4537       {
4538 	if (DECL_SIZE_UNIT (field) == NULL_TREE)
4539 	  {
4540 	    if (TREE_TYPE (field) == error_mark_node)
4541 	      continue;
4542 	    gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4543 			&& !COMPLETE_TYPE_P (TREE_TYPE (field)));
4544 	    if (!buf->clear_in_mask && !for_auto_init)
4545 	      error_at (buf->loc, "flexible array member %qD does not have "
4546 				  "well defined padding bits for %qs",
4547 			field, "__builtin_clear_padding");
4548 	    continue;
4549 	  }
4550 	HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4551 	gcc_assert (union_buf->size == 0);
4552 	union_buf->off = start_off;
4553 	union_buf->size = start_size;
4554 	memset (union_buf->buf, ~0, start_size);
4555 	clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4556 	clear_padding_add_padding (union_buf, sz - fldsz);
4557 	clear_padding_flush (union_buf, true);
4558       }
4559 
4560   if (buf == union_buf)
4561     {
4562       buf->off = next_off;
4563       buf->size = next_off % UNITS_PER_WORD;
4564       buf->off -= buf->size;
4565       memset (buf->buf, ~0, buf->size);
4566     }
4567   else if (sz + buf->size <= clear_padding_buf_size)
4568     buf->size += sz;
4569   else
4570     {
4571       unsigned char *union_ptr = union_buf->union_ptr;
4572       while (sz)
4573 	{
4574 	  clear_padding_flush (buf, false);
4575 	  HOST_WIDE_INT this_sz
4576 	    = MIN ((unsigned HOST_WIDE_INT) sz,
4577 		   clear_padding_buf_size - buf->size);
4578 	  memcpy (buf->buf + buf->size, union_ptr, this_sz);
4579 	  buf->size += this_sz;
4580 	  union_ptr += this_sz;
4581 	  sz -= this_sz;
4582 	}
4583       XDELETE (union_buf->union_ptr);
4584     }
4585 }
4586 
4587 /* The only known floating point formats with padding bits are the
4588    IEEE extended ones.  */
4589 
4590 static bool
clear_padding_real_needs_padding_p(tree type)4591 clear_padding_real_needs_padding_p (tree type)
4592 {
4593   const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4594   return (fmt->b == 2
4595 	  && fmt->signbit_ro == fmt->signbit_rw
4596 	  && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4597 }
4598 
4599 /* Return true if TYPE might contain any padding bits.  */
4600 
4601 bool
clear_padding_type_may_have_padding_p(tree type)4602 clear_padding_type_may_have_padding_p (tree type)
4603 {
4604   switch (TREE_CODE (type))
4605     {
4606     case RECORD_TYPE:
4607     case UNION_TYPE:
4608       return true;
4609     case ARRAY_TYPE:
4610     case COMPLEX_TYPE:
4611     case VECTOR_TYPE:
4612       return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4613     case REAL_TYPE:
4614       return clear_padding_real_needs_padding_p (type);
4615     default:
4616       return false;
4617     }
4618 }
4619 
4620 /* Emit a runtime loop:
4621    for (; buf.base != end; buf.base += sz)
4622      __builtin_clear_padding (buf.base);  */
4623 
4624 static void
clear_padding_emit_loop(clear_padding_struct * buf,tree type,tree end,bool for_auto_init)4625 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4626 			 tree end, bool for_auto_init)
4627 {
4628   tree l1 = create_artificial_label (buf->loc);
4629   tree l2 = create_artificial_label (buf->loc);
4630   tree l3 = create_artificial_label (buf->loc);
4631   gimple *g = gimple_build_goto (l2);
4632   gimple_set_location (g, buf->loc);
4633   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4634   g = gimple_build_label (l1);
4635   gimple_set_location (g, buf->loc);
4636   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4637   clear_padding_type (buf, type, buf->sz, for_auto_init);
4638   clear_padding_flush (buf, true);
4639   g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4640 			   size_int (buf->sz));
4641   gimple_set_location (g, buf->loc);
4642   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4643   g = gimple_build_label (l2);
4644   gimple_set_location (g, buf->loc);
4645   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4646   g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4647   gimple_set_location (g, buf->loc);
4648   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4649   g = gimple_build_label (l3);
4650   gimple_set_location (g, buf->loc);
4651   gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4652 }
4653 
4654 /* Clear padding bits for TYPE.  Called recursively from
4655    gimple_fold_builtin_clear_padding.  If FOR_AUTO_INIT is true,
4656    the __builtin_clear_padding is not called by the end user,
4657    instead, it's inserted by the compiler to initialize the
4658    paddings of automatic variable.  Therefore, we should not
4659    emit the error messages for flexible array members to confuse
4660    the end user.  */
4661 
4662 static void
clear_padding_type(clear_padding_struct * buf,tree type,HOST_WIDE_INT sz,bool for_auto_init)4663 clear_padding_type (clear_padding_struct *buf, tree type,
4664 		    HOST_WIDE_INT sz, bool for_auto_init)
4665 {
4666   switch (TREE_CODE (type))
4667     {
4668     case RECORD_TYPE:
4669       HOST_WIDE_INT cur_pos;
4670       cur_pos = 0;
4671       for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4672 	if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4673 	  {
4674 	    tree ftype = TREE_TYPE (field);
4675 	    if (DECL_BIT_FIELD (field))
4676 	      {
4677 		HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4678 		if (fldsz == 0)
4679 		  continue;
4680 		HOST_WIDE_INT pos = int_byte_position (field);
4681 		if (pos >= sz)
4682 		  continue;
4683 		HOST_WIDE_INT bpos
4684 		  = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4685 		bpos %= BITS_PER_UNIT;
4686 		HOST_WIDE_INT end
4687 		  = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4688 		if (pos + end > cur_pos)
4689 		  {
4690 		    clear_padding_add_padding (buf, pos + end - cur_pos);
4691 		    cur_pos = pos + end;
4692 		  }
4693 		gcc_assert (cur_pos > pos
4694 			    && ((unsigned HOST_WIDE_INT) buf->size
4695 				>= (unsigned HOST_WIDE_INT) cur_pos - pos));
4696 		unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4697 		if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4698 		  sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4699 				      " in %qs", "__builtin_clear_padding");
4700 		else if (BYTES_BIG_ENDIAN)
4701 		  {
4702 		    /* Big endian.  */
4703 		    if (bpos + fldsz <= BITS_PER_UNIT)
4704 		      *p &= ~(((1 << fldsz) - 1)
4705 			      << (BITS_PER_UNIT - bpos - fldsz));
4706 		    else
4707 		      {
4708 			if (bpos)
4709 			  {
4710 			    *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4711 			    p++;
4712 			    fldsz -= BITS_PER_UNIT - bpos;
4713 			  }
4714 			memset (p, 0, fldsz / BITS_PER_UNIT);
4715 			p += fldsz / BITS_PER_UNIT;
4716 			fldsz %= BITS_PER_UNIT;
4717 			if (fldsz)
4718 			  *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4719 		      }
4720 		  }
4721 		else
4722 		  {
4723 		    /* Little endian.  */
4724 		    if (bpos + fldsz <= BITS_PER_UNIT)
4725 		      *p &= ~(((1 << fldsz) - 1) << bpos);
4726 		    else
4727 		      {
4728 			if (bpos)
4729 			  {
4730 			    *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4731 			    p++;
4732 			    fldsz -= BITS_PER_UNIT - bpos;
4733 			  }
4734 			memset (p, 0, fldsz / BITS_PER_UNIT);
4735 			p += fldsz / BITS_PER_UNIT;
4736 			fldsz %= BITS_PER_UNIT;
4737 			if (fldsz)
4738 			  *p &= ~((1 << fldsz) - 1);
4739 		      }
4740 		  }
4741 	      }
4742 	    else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4743 	      {
4744 		if (ftype == error_mark_node)
4745 		  continue;
4746 		gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4747 			    && !COMPLETE_TYPE_P (ftype));
4748 		if (!buf->clear_in_mask && !for_auto_init)
4749 		  error_at (buf->loc, "flexible array member %qD does not "
4750 				      "have well defined padding bits for %qs",
4751 			    field, "__builtin_clear_padding");
4752 	      }
4753 	    else if (is_empty_type (ftype))
4754 	      continue;
4755 	    else
4756 	      {
4757 		HOST_WIDE_INT pos = int_byte_position (field);
4758 		if (pos >= sz)
4759 		  continue;
4760 		HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4761 		gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4762 		clear_padding_add_padding (buf, pos - cur_pos);
4763 		cur_pos = pos;
4764 		if (tree asbase = lang_hooks.types.classtype_as_base (field))
4765 		  ftype = asbase;
4766 		clear_padding_type (buf, ftype, fldsz, for_auto_init);
4767 		cur_pos += fldsz;
4768 	      }
4769 	  }
4770       gcc_assert (sz >= cur_pos);
4771       clear_padding_add_padding (buf, sz - cur_pos);
4772       break;
4773     case ARRAY_TYPE:
4774       HOST_WIDE_INT nelts, fldsz;
4775       fldsz = int_size_in_bytes (TREE_TYPE (type));
4776       if (fldsz == 0)
4777 	break;
4778       nelts = sz / fldsz;
4779       if (nelts > 1
4780 	  && sz > 8 * UNITS_PER_WORD
4781 	  && buf->union_ptr == NULL
4782 	  && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4783 	{
4784 	  /* For sufficiently large array of more than one elements,
4785 	     emit a runtime loop to keep code size manageable.  */
4786 	  tree base = buf->base;
4787 	  unsigned int prev_align = buf->align;
4788 	  HOST_WIDE_INT off = buf->off + buf->size;
4789 	  HOST_WIDE_INT prev_sz = buf->sz;
4790 	  clear_padding_flush (buf, true);
4791 	  tree elttype = TREE_TYPE (type);
4792 	  buf->base = create_tmp_var (build_pointer_type (elttype));
4793 	  tree end = make_ssa_name (TREE_TYPE (buf->base));
4794 	  gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4795 					   base, size_int (off));
4796 	  gimple_set_location (g, buf->loc);
4797 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4798 	  g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4799 				   size_int (sz));
4800 	  gimple_set_location (g, buf->loc);
4801 	  gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4802 	  buf->sz = fldsz;
4803 	  buf->align = TYPE_ALIGN (elttype);
4804 	  buf->off = 0;
4805 	  buf->size = 0;
4806 	  clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4807 	  buf->base = base;
4808 	  buf->sz = prev_sz;
4809 	  buf->align = prev_align;
4810 	  buf->size = off % UNITS_PER_WORD;
4811 	  buf->off = off - buf->size;
4812 	  memset (buf->buf, 0, buf->size);
4813 	  break;
4814 	}
4815       for (HOST_WIDE_INT i = 0; i < nelts; i++)
4816 	clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4817       break;
4818     case UNION_TYPE:
4819       clear_padding_union (buf, type, sz, for_auto_init);
4820       break;
4821     case REAL_TYPE:
4822       gcc_assert ((size_t) sz <= clear_padding_unit);
4823       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4824 	clear_padding_flush (buf, false);
4825       if (clear_padding_real_needs_padding_p (type))
4826 	{
4827 	  /* Use native_interpret_real + native_encode_expr to figure out
4828 	     which bits are padding.  */
4829 	  memset (buf->buf + buf->size, ~0, sz);
4830 	  tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4831 	  gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4832 	  int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4833 	  gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4834 	  for (size_t i = 0; i < (size_t) sz; i++)
4835 	    buf->buf[buf->size + i] ^= ~0;
4836 	}
4837       else
4838 	memset (buf->buf + buf->size, 0, sz);
4839       buf->size += sz;
4840       break;
4841     case COMPLEX_TYPE:
4842       fldsz = int_size_in_bytes (TREE_TYPE (type));
4843       clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4844       clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4845       break;
4846     case VECTOR_TYPE:
4847       nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4848       fldsz = int_size_in_bytes (TREE_TYPE (type));
4849       for (HOST_WIDE_INT i = 0; i < nelts; i++)
4850 	clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4851       break;
4852     case NULLPTR_TYPE:
4853       gcc_assert ((size_t) sz <= clear_padding_unit);
4854       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4855 	clear_padding_flush (buf, false);
4856       memset (buf->buf + buf->size, ~0, sz);
4857       buf->size += sz;
4858       break;
4859     default:
4860       gcc_assert ((size_t) sz <= clear_padding_unit);
4861       if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4862 	clear_padding_flush (buf, false);
4863       memset (buf->buf + buf->size, 0, sz);
4864       buf->size += sz;
4865       break;
4866     }
4867 }
4868 
4869 /* Clear padding bits of TYPE in MASK.  */
4870 
4871 void
clear_type_padding_in_mask(tree type,unsigned char * mask)4872 clear_type_padding_in_mask (tree type, unsigned char *mask)
4873 {
4874   clear_padding_struct buf;
4875   buf.loc = UNKNOWN_LOCATION;
4876   buf.clear_in_mask = true;
4877   buf.base = NULL_TREE;
4878   buf.alias_type = NULL_TREE;
4879   buf.gsi = NULL;
4880   buf.align = 0;
4881   buf.off = 0;
4882   buf.padding_bytes = 0;
4883   buf.sz = int_size_in_bytes (type);
4884   buf.size = 0;
4885   buf.union_ptr = mask;
4886   clear_padding_type (&buf, type, buf.sz, false);
4887   clear_padding_flush (&buf, true);
4888 }
4889 
4890 /* Fold __builtin_clear_padding builtin.  */
4891 
4892 static bool
gimple_fold_builtin_clear_padding(gimple_stmt_iterator * gsi)4893 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4894 {
4895   gimple *stmt = gsi_stmt (*gsi);
4896   gcc_assert (gimple_call_num_args (stmt) == 2);
4897   tree ptr = gimple_call_arg (stmt, 0);
4898   tree typearg = gimple_call_arg (stmt, 1);
4899   /* The 2nd argument of __builtin_clear_padding's value is used to
4900      distinguish whether this call is made by the user or by the compiler
4901      for automatic variable initialization.  */
4902   bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4903   tree type = TREE_TYPE (TREE_TYPE (typearg));
4904   location_t loc = gimple_location (stmt);
4905   clear_padding_struct buf;
4906   gimple_stmt_iterator gsiprev = *gsi;
4907   /* This should be folded during the lower pass.  */
4908   gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4909   gcc_assert (COMPLETE_TYPE_P (type));
4910   gsi_prev (&gsiprev);
4911 
4912   buf.loc = loc;
4913   buf.clear_in_mask = false;
4914   buf.base = ptr;
4915   buf.alias_type = NULL_TREE;
4916   buf.gsi = gsi;
4917   buf.align = get_pointer_alignment (ptr);
4918   unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4919   buf.align = MAX (buf.align, talign);
4920   buf.off = 0;
4921   buf.padding_bytes = 0;
4922   buf.size = 0;
4923   buf.sz = int_size_in_bytes (type);
4924   buf.union_ptr = NULL;
4925   if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4926     sorry_at (loc, "%s not supported for variable length aggregates",
4927 	      "__builtin_clear_padding");
4928   /* The implementation currently assumes 8-bit host and target
4929      chars which is the case for all currently supported targets
4930      and hosts and is required e.g. for native_{encode,interpret}* APIs.  */
4931   else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4932     sorry_at (loc, "%s not supported on this target",
4933 	      "__builtin_clear_padding");
4934   else if (!clear_padding_type_may_have_padding_p (type))
4935     ;
4936   else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4937     {
4938       tree sz = TYPE_SIZE_UNIT (type);
4939       tree elttype = type;
4940       /* Only supports C/C++ VLAs and flattens all the VLA levels.  */
4941       while (TREE_CODE (elttype) == ARRAY_TYPE
4942 	     && int_size_in_bytes (elttype) < 0)
4943 	elttype = TREE_TYPE (elttype);
4944       HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4945       gcc_assert (eltsz >= 0);
4946       if (eltsz)
4947 	{
4948 	  buf.base = create_tmp_var (build_pointer_type (elttype));
4949 	  tree end = make_ssa_name (TREE_TYPE (buf.base));
4950 	  gimple *g = gimple_build_assign (buf.base, ptr);
4951 	  gimple_set_location (g, loc);
4952 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4953 	  g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4954 	  gimple_set_location (g, loc);
4955 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4956 	  buf.sz = eltsz;
4957 	  buf.align = TYPE_ALIGN (elttype);
4958 	  buf.alias_type = build_pointer_type (elttype);
4959 	  clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
4960 	}
4961     }
4962   else
4963     {
4964       if (!is_gimple_mem_ref_addr (buf.base))
4965 	{
4966 	  buf.base = make_ssa_name (TREE_TYPE (ptr));
4967 	  gimple *g = gimple_build_assign (buf.base, ptr);
4968 	  gimple_set_location (g, loc);
4969 	  gsi_insert_before (gsi, g, GSI_SAME_STMT);
4970 	}
4971       buf.alias_type = build_pointer_type (type);
4972       clear_padding_type (&buf, type, buf.sz, for_auto_init);
4973       clear_padding_flush (&buf, true);
4974     }
4975 
4976   gimple_stmt_iterator gsiprev2 = *gsi;
4977   gsi_prev (&gsiprev2);
4978   if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4979     gsi_replace (gsi, gimple_build_nop (), true);
4980   else
4981     {
4982       gsi_remove (gsi, true);
4983       *gsi = gsiprev2;
4984     }
4985   return true;
4986 }
4987 
4988 /* Fold the non-target builtin at *GSI and return whether any simplification
4989    was made.  */
4990 
4991 static bool
gimple_fold_builtin(gimple_stmt_iterator * gsi)4992 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4993 {
4994   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4995   tree callee = gimple_call_fndecl (stmt);
4996 
4997   /* Give up for always_inline inline builtins until they are
4998      inlined.  */
4999   if (avoid_folding_inline_builtin (callee))
5000     return false;
5001 
5002   unsigned n = gimple_call_num_args (stmt);
5003   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5004   switch (fcode)
5005     {
5006     case BUILT_IN_BCMP:
5007       return gimple_fold_builtin_bcmp (gsi);
5008     case BUILT_IN_BCOPY:
5009       return gimple_fold_builtin_bcopy (gsi);
5010     case BUILT_IN_BZERO:
5011       return gimple_fold_builtin_bzero (gsi);
5012 
5013     case BUILT_IN_MEMSET:
5014       return gimple_fold_builtin_memset (gsi,
5015 					 gimple_call_arg (stmt, 1),
5016 					 gimple_call_arg (stmt, 2));
5017     case BUILT_IN_MEMCPY:
5018     case BUILT_IN_MEMPCPY:
5019     case BUILT_IN_MEMMOVE:
5020       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5021 					    gimple_call_arg (stmt, 1), fcode);
5022     case BUILT_IN_SPRINTF_CHK:
5023     case BUILT_IN_VSPRINTF_CHK:
5024       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5025     case BUILT_IN_STRCAT_CHK:
5026       return gimple_fold_builtin_strcat_chk (gsi);
5027     case BUILT_IN_STRNCAT_CHK:
5028       return gimple_fold_builtin_strncat_chk (gsi);
5029     case BUILT_IN_STRLEN:
5030       return gimple_fold_builtin_strlen (gsi);
5031     case BUILT_IN_STRCPY:
5032       return gimple_fold_builtin_strcpy (gsi,
5033 					 gimple_call_arg (stmt, 0),
5034 					 gimple_call_arg (stmt, 1));
5035     case BUILT_IN_STRNCPY:
5036       return gimple_fold_builtin_strncpy (gsi,
5037 					  gimple_call_arg (stmt, 0),
5038 					  gimple_call_arg (stmt, 1),
5039 					  gimple_call_arg (stmt, 2));
5040     case BUILT_IN_STRCAT:
5041       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5042 					 gimple_call_arg (stmt, 1));
5043     case BUILT_IN_STRNCAT:
5044       return gimple_fold_builtin_strncat (gsi);
5045     case BUILT_IN_INDEX:
5046     case BUILT_IN_STRCHR:
5047       return gimple_fold_builtin_strchr (gsi, false);
5048     case BUILT_IN_RINDEX:
5049     case BUILT_IN_STRRCHR:
5050       return gimple_fold_builtin_strchr (gsi, true);
5051     case BUILT_IN_STRSTR:
5052       return gimple_fold_builtin_strstr (gsi);
5053     case BUILT_IN_STRCMP:
5054     case BUILT_IN_STRCMP_EQ:
5055     case BUILT_IN_STRCASECMP:
5056     case BUILT_IN_STRNCMP:
5057     case BUILT_IN_STRNCMP_EQ:
5058     case BUILT_IN_STRNCASECMP:
5059       return gimple_fold_builtin_string_compare (gsi);
5060     case BUILT_IN_MEMCHR:
5061       return gimple_fold_builtin_memchr (gsi);
5062     case BUILT_IN_FPUTS:
5063       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5064 					gimple_call_arg (stmt, 1), false);
5065     case BUILT_IN_FPUTS_UNLOCKED:
5066       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5067 					gimple_call_arg (stmt, 1), true);
5068     case BUILT_IN_MEMCPY_CHK:
5069     case BUILT_IN_MEMPCPY_CHK:
5070     case BUILT_IN_MEMMOVE_CHK:
5071     case BUILT_IN_MEMSET_CHK:
5072       return gimple_fold_builtin_memory_chk (gsi,
5073 					     gimple_call_arg (stmt, 0),
5074 					     gimple_call_arg (stmt, 1),
5075 					     gimple_call_arg (stmt, 2),
5076 					     gimple_call_arg (stmt, 3),
5077 					     fcode);
5078     case BUILT_IN_STPCPY:
5079       return gimple_fold_builtin_stpcpy (gsi);
5080     case BUILT_IN_STRCPY_CHK:
5081     case BUILT_IN_STPCPY_CHK:
5082       return gimple_fold_builtin_stxcpy_chk (gsi,
5083 					     gimple_call_arg (stmt, 0),
5084 					     gimple_call_arg (stmt, 1),
5085 					     gimple_call_arg (stmt, 2),
5086 					     fcode);
5087     case BUILT_IN_STRNCPY_CHK:
5088     case BUILT_IN_STPNCPY_CHK:
5089       return gimple_fold_builtin_stxncpy_chk (gsi,
5090 					      gimple_call_arg (stmt, 0),
5091 					      gimple_call_arg (stmt, 1),
5092 					      gimple_call_arg (stmt, 2),
5093 					      gimple_call_arg (stmt, 3),
5094 					      fcode);
5095     case BUILT_IN_SNPRINTF_CHK:
5096     case BUILT_IN_VSNPRINTF_CHK:
5097       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5098 
5099     case BUILT_IN_FPRINTF:
5100     case BUILT_IN_FPRINTF_UNLOCKED:
5101     case BUILT_IN_VFPRINTF:
5102       if (n == 2 || n == 3)
5103 	return gimple_fold_builtin_fprintf (gsi,
5104 					    gimple_call_arg (stmt, 0),
5105 					    gimple_call_arg (stmt, 1),
5106 					    n == 3
5107 					    ? gimple_call_arg (stmt, 2)
5108 					    : NULL_TREE,
5109 					    fcode);
5110       break;
5111     case BUILT_IN_FPRINTF_CHK:
5112     case BUILT_IN_VFPRINTF_CHK:
5113       if (n == 3 || n == 4)
5114 	return gimple_fold_builtin_fprintf (gsi,
5115 					    gimple_call_arg (stmt, 0),
5116 					    gimple_call_arg (stmt, 2),
5117 					    n == 4
5118 					    ? gimple_call_arg (stmt, 3)
5119 					    : NULL_TREE,
5120 					    fcode);
5121       break;
5122     case BUILT_IN_PRINTF:
5123     case BUILT_IN_PRINTF_UNLOCKED:
5124     case BUILT_IN_VPRINTF:
5125       if (n == 1 || n == 2)
5126 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5127 					   n == 2
5128 					   ? gimple_call_arg (stmt, 1)
5129 					   : NULL_TREE, fcode);
5130       break;
5131     case BUILT_IN_PRINTF_CHK:
5132     case BUILT_IN_VPRINTF_CHK:
5133       if (n == 2 || n == 3)
5134 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5135 					   n == 3
5136 					   ? gimple_call_arg (stmt, 2)
5137 					   : NULL_TREE, fcode);
5138       break;
5139     case BUILT_IN_ACC_ON_DEVICE:
5140       return gimple_fold_builtin_acc_on_device (gsi,
5141 						gimple_call_arg (stmt, 0));
5142     case BUILT_IN_REALLOC:
5143       return gimple_fold_builtin_realloc (gsi);
5144 
5145     case BUILT_IN_CLEAR_PADDING:
5146       return gimple_fold_builtin_clear_padding (gsi);
5147 
5148     default:;
5149     }
5150 
5151   /* Try the generic builtin folder.  */
5152   bool ignore = (gimple_call_lhs (stmt) == NULL);
5153   tree result = fold_call_stmt (stmt, ignore);
5154   if (result)
5155     {
5156       if (ignore)
5157 	STRIP_NOPS (result);
5158       else
5159 	result = fold_convert (gimple_call_return_type (stmt), result);
5160       gimplify_and_update_call_from_tree (gsi, result);
5161       return true;
5162     }
5163 
5164   return false;
5165 }
5166 
5167 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5168    function calls to constants, where possible.  */
5169 
5170 static tree
fold_internal_goacc_dim(const gimple * call)5171 fold_internal_goacc_dim (const gimple *call)
5172 {
5173   int axis = oacc_get_ifn_dim_arg (call);
5174   int size = oacc_get_fn_dim_size (current_function_decl, axis);
5175   tree result = NULL_TREE;
5176   tree type = TREE_TYPE (gimple_call_lhs (call));
5177 
5178   switch (gimple_call_internal_fn (call))
5179     {
5180     case IFN_GOACC_DIM_POS:
5181       /* If the size is 1, we know the answer.  */
5182       if (size == 1)
5183 	result = build_int_cst (type, 0);
5184       break;
5185     case IFN_GOACC_DIM_SIZE:
5186       /* If the size is not dynamic, we know the answer.  */
5187       if (size)
5188 	result = build_int_cst (type, size);
5189       break;
5190     default:
5191       break;
5192     }
5193 
5194   return result;
5195 }
5196 
5197 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5198    for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5199    &var where var is only addressable because of such calls.  */
5200 
5201 bool
optimize_atomic_compare_exchange_p(gimple * stmt)5202 optimize_atomic_compare_exchange_p (gimple *stmt)
5203 {
5204   if (gimple_call_num_args (stmt) != 6
5205       || !flag_inline_atomics
5206       || !optimize
5207       || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5208       || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5209       || !gimple_vdef (stmt)
5210       || !gimple_vuse (stmt))
5211     return false;
5212 
5213   tree fndecl = gimple_call_fndecl (stmt);
5214   switch (DECL_FUNCTION_CODE (fndecl))
5215     {
5216     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5217     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5218     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5219     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5220     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5221       break;
5222     default:
5223       return false;
5224     }
5225 
5226   tree expected = gimple_call_arg (stmt, 1);
5227   if (TREE_CODE (expected) != ADDR_EXPR
5228       || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5229     return false;
5230 
5231   tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5232   if (!is_gimple_reg_type (etype)
5233       || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5234       || TREE_THIS_VOLATILE (etype)
5235       || VECTOR_TYPE_P (etype)
5236       || TREE_CODE (etype) == COMPLEX_TYPE
5237       /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5238 	 might not preserve all the bits.  See PR71716.  */
5239       || SCALAR_FLOAT_TYPE_P (etype)
5240       || maybe_ne (TYPE_PRECISION (etype),
5241 		   GET_MODE_BITSIZE (TYPE_MODE (etype))))
5242     return false;
5243 
5244   tree weak = gimple_call_arg (stmt, 3);
5245   if (!integer_zerop (weak) && !integer_onep (weak))
5246     return false;
5247 
5248   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5249   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5250   machine_mode mode = TYPE_MODE (itype);
5251 
5252   if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5253       == CODE_FOR_nothing
5254       && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5255     return false;
5256 
5257   if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5258     return false;
5259 
5260   return true;
5261 }
5262 
5263 /* Fold
5264      r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5265    into
5266      _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5267      i = IMAGPART_EXPR <t>;
5268      r = (_Bool) i;
5269      e = REALPART_EXPR <t>;  */
5270 
5271 void
fold_builtin_atomic_compare_exchange(gimple_stmt_iterator * gsi)5272 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5273 {
5274   gimple *stmt = gsi_stmt (*gsi);
5275   tree fndecl = gimple_call_fndecl (stmt);
5276   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5277   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5278   tree ctype = build_complex_type (itype);
5279   tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5280   bool throws = false;
5281   edge e = NULL;
5282   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5283 				   expected);
5284   gsi_insert_before (gsi, g, GSI_SAME_STMT);
5285   gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5286   if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5287     {
5288       g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5289 			       build1 (VIEW_CONVERT_EXPR, itype,
5290 				       gimple_assign_lhs (g)));
5291       gsi_insert_before (gsi, g, GSI_SAME_STMT);
5292     }
5293   int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5294 	     + int_size_in_bytes (itype);
5295   g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5296 				  gimple_call_arg (stmt, 0),
5297 				  gimple_assign_lhs (g),
5298 				  gimple_call_arg (stmt, 2),
5299 				  build_int_cst (integer_type_node, flag),
5300 				  gimple_call_arg (stmt, 4),
5301 				  gimple_call_arg (stmt, 5));
5302   tree lhs = make_ssa_name (ctype);
5303   gimple_call_set_lhs (g, lhs);
5304   gimple_move_vops (g, stmt);
5305   tree oldlhs = gimple_call_lhs (stmt);
5306   if (stmt_can_throw_internal (cfun, stmt))
5307     {
5308       throws = true;
5309       e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5310     }
5311   gimple_call_set_nothrow (as_a <gcall *> (g),
5312 			   gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5313   gimple_call_set_lhs (stmt, NULL_TREE);
5314   gsi_replace (gsi, g, true);
5315   if (oldlhs)
5316     {
5317       g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5318 			       build1 (IMAGPART_EXPR, itype, lhs));
5319       if (throws)
5320 	{
5321 	  gsi_insert_on_edge_immediate (e, g);
5322 	  *gsi = gsi_for_stmt (g);
5323 	}
5324       else
5325 	gsi_insert_after (gsi, g, GSI_NEW_STMT);
5326       g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5327       gsi_insert_after (gsi, g, GSI_NEW_STMT);
5328     }
5329   g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5330 			   build1 (REALPART_EXPR, itype, lhs));
5331   if (throws && oldlhs == NULL_TREE)
5332     {
5333       gsi_insert_on_edge_immediate (e, g);
5334       *gsi = gsi_for_stmt (g);
5335     }
5336   else
5337     gsi_insert_after (gsi, g, GSI_NEW_STMT);
5338   if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5339     {
5340       g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5341 			       VIEW_CONVERT_EXPR,
5342 			       build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5343 				       gimple_assign_lhs (g)));
5344       gsi_insert_after (gsi, g, GSI_NEW_STMT);
5345     }
5346   g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5347   gsi_insert_after (gsi, g, GSI_NEW_STMT);
5348   *gsi = gsiret;
5349 }
5350 
5351 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5352    doesn't fit into TYPE.  The test for overflow should be regardless of
5353    -fwrapv, and even for unsigned types.  */
5354 
5355 bool
arith_overflowed_p(enum tree_code code,const_tree type,const_tree arg0,const_tree arg1)5356 arith_overflowed_p (enum tree_code code, const_tree type,
5357 		    const_tree arg0, const_tree arg1)
5358 {
5359   widest2_int warg0 = widest2_int_cst (arg0);
5360   widest2_int warg1 = widest2_int_cst (arg1);
5361   widest2_int wres;
5362   switch (code)
5363     {
5364     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5365     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5366     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5367     default: gcc_unreachable ();
5368     }
5369   signop sign = TYPE_SIGN (type);
5370   if (sign == UNSIGNED && wi::neg_p (wres))
5371     return true;
5372   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5373 }
5374 
5375 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5376    for the memory it references, otherwise return null.  VECTYPE is the
5377    type of the memory vector.  */
5378 
5379 static tree
gimple_fold_mask_load_store_mem_ref(gcall * call,tree vectype)5380 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5381 {
5382   tree ptr = gimple_call_arg (call, 0);
5383   tree alias_align = gimple_call_arg (call, 1);
5384   tree mask = gimple_call_arg (call, 2);
5385   if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5386     return NULL_TREE;
5387 
5388   unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5389   if (TYPE_ALIGN (vectype) != align)
5390     vectype = build_aligned_type (vectype, align);
5391   tree offset = build_zero_cst (TREE_TYPE (alias_align));
5392   return fold_build2 (MEM_REF, vectype, ptr, offset);
5393 }
5394 
5395 /* Try to fold IFN_MASK_LOAD call CALL.  Return true on success.  */
5396 
5397 static bool
gimple_fold_mask_load(gimple_stmt_iterator * gsi,gcall * call)5398 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5399 {
5400   tree lhs = gimple_call_lhs (call);
5401   if (!lhs)
5402     return false;
5403 
5404   if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5405     {
5406       gassign *new_stmt = gimple_build_assign (lhs, rhs);
5407       gimple_set_location (new_stmt, gimple_location (call));
5408       gimple_move_vops (new_stmt, call);
5409       gsi_replace (gsi, new_stmt, false);
5410       return true;
5411     }
5412   return false;
5413 }
5414 
5415 /* Try to fold IFN_MASK_STORE call CALL.  Return true on success.  */
5416 
5417 static bool
gimple_fold_mask_store(gimple_stmt_iterator * gsi,gcall * call)5418 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5419 {
5420   tree rhs = gimple_call_arg (call, 3);
5421   if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5422     {
5423       gassign *new_stmt = gimple_build_assign (lhs, rhs);
5424       gimple_set_location (new_stmt, gimple_location (call));
5425       gimple_move_vops (new_stmt, call);
5426       gsi_replace (gsi, new_stmt, false);
5427       return true;
5428     }
5429   return false;
5430 }
5431 
5432 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5433    The statement may be replaced by another statement, e.g., if the call
5434    simplifies to a constant value. Return true if any changes were made.
5435    It is assumed that the operands have been previously folded.  */
5436 
5437 static bool
gimple_fold_call(gimple_stmt_iterator * gsi,bool inplace)5438 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5439 {
5440   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5441   tree callee;
5442   bool changed = false;
5443 
5444   /* Check for virtual calls that became direct calls.  */
5445   callee = gimple_call_fn (stmt);
5446   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5447     {
5448       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5449 	{
5450           if (dump_file && virtual_method_call_p (callee)
5451 	      && !possible_polymorphic_call_target_p
5452 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5453 						     (OBJ_TYPE_REF_EXPR (callee)))))
5454 	    {
5455 	      fprintf (dump_file,
5456 		       "Type inheritance inconsistent devirtualization of ");
5457 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5458 	      fprintf (dump_file, " to ");
5459 	      print_generic_expr (dump_file, callee, TDF_SLIM);
5460 	      fprintf (dump_file, "\n");
5461 	    }
5462 
5463 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5464 	  changed = true;
5465 	}
5466       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5467 	{
5468 	  bool final;
5469 	  vec <cgraph_node *>targets
5470 	    = possible_polymorphic_call_targets (callee, stmt, &final);
5471 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
5472 	    {
5473 	      tree lhs = gimple_call_lhs (stmt);
5474 	      if (dump_enabled_p ())
5475 		{
5476 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5477 				   "folding virtual function call to %s\n",
5478 		 		   targets.length () == 1
5479 		  		   ? targets[0]->name ()
5480 		  		   : "__builtin_unreachable");
5481 		}
5482 	      if (targets.length () == 1)
5483 		{
5484 		  tree fndecl = targets[0]->decl;
5485 		  gimple_call_set_fndecl (stmt, fndecl);
5486 		  changed = true;
5487 		  /* If changing the call to __cxa_pure_virtual
5488 		     or similar noreturn function, adjust gimple_call_fntype
5489 		     too.  */
5490 		  if (gimple_call_noreturn_p (stmt)
5491 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5492 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5493 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5494 			  == void_type_node))
5495 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5496 		  /* If the call becomes noreturn, remove the lhs.  */
5497 		  if (lhs
5498 		      && gimple_call_noreturn_p (stmt)
5499 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5500 			  || should_remove_lhs_p (lhs)))
5501 		    {
5502 		      if (TREE_CODE (lhs) == SSA_NAME)
5503 			{
5504 			  tree var = create_tmp_var (TREE_TYPE (lhs));
5505 			  tree def = get_or_create_ssa_default_def (cfun, var);
5506 			  gimple *new_stmt = gimple_build_assign (lhs, def);
5507 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5508 			}
5509 		      gimple_call_set_lhs (stmt, NULL_TREE);
5510 		    }
5511 		  maybe_remove_unused_call_args (cfun, stmt);
5512 		}
5513 	      else
5514 		{
5515 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5516 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
5517 		  gimple_set_location (new_stmt, gimple_location (stmt));
5518 		  /* If the call had a SSA name as lhs morph that into
5519 		     an uninitialized value.  */
5520 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
5521 		    {
5522 		      tree var = create_tmp_var (TREE_TYPE (lhs));
5523 		      SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5524 		      SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5525 		      set_ssa_default_def (cfun, var, lhs);
5526 		    }
5527 		  gimple_move_vops (new_stmt, stmt);
5528 		  gsi_replace (gsi, new_stmt, false);
5529 		  return true;
5530 		}
5531 	    }
5532 	}
5533     }
5534 
5535   /* Check for indirect calls that became direct calls, and then
5536      no longer require a static chain.  */
5537   if (gimple_call_chain (stmt))
5538     {
5539       tree fn = gimple_call_fndecl (stmt);
5540       if (fn && !DECL_STATIC_CHAIN (fn))
5541 	{
5542 	  gimple_call_set_chain (stmt, NULL);
5543 	  changed = true;
5544 	}
5545     }
5546 
5547   if (inplace)
5548     return changed;
5549 
5550   /* Check for builtins that CCP can handle using information not
5551      available in the generic fold routines.  */
5552   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5553     {
5554       if (gimple_fold_builtin (gsi))
5555         changed = true;
5556     }
5557   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5558     {
5559 	changed |= targetm.gimple_fold_builtin (gsi);
5560     }
5561   else if (gimple_call_internal_p (stmt))
5562     {
5563       enum tree_code subcode = ERROR_MARK;
5564       tree result = NULL_TREE;
5565       bool cplx_result = false;
5566       tree overflow = NULL_TREE;
5567       switch (gimple_call_internal_fn (stmt))
5568 	{
5569 	case IFN_BUILTIN_EXPECT:
5570 	  result = fold_builtin_expect (gimple_location (stmt),
5571 					gimple_call_arg (stmt, 0),
5572 					gimple_call_arg (stmt, 1),
5573 					gimple_call_arg (stmt, 2),
5574 					NULL_TREE);
5575 	  break;
5576 	case IFN_UBSAN_OBJECT_SIZE:
5577 	  {
5578 	    tree offset = gimple_call_arg (stmt, 1);
5579 	    tree objsize = gimple_call_arg (stmt, 2);
5580 	    if (integer_all_onesp (objsize)
5581 		|| (TREE_CODE (offset) == INTEGER_CST
5582 		    && TREE_CODE (objsize) == INTEGER_CST
5583 		    && tree_int_cst_le (offset, objsize)))
5584 	      {
5585 		replace_call_with_value (gsi, NULL_TREE);
5586 		return true;
5587 	      }
5588 	  }
5589 	  break;
5590 	case IFN_UBSAN_PTR:
5591 	  if (integer_zerop (gimple_call_arg (stmt, 1)))
5592 	    {
5593 	      replace_call_with_value (gsi, NULL_TREE);
5594 	      return true;
5595 	    }
5596 	  break;
5597 	case IFN_UBSAN_BOUNDS:
5598 	  {
5599 	    tree index = gimple_call_arg (stmt, 1);
5600 	    tree bound = gimple_call_arg (stmt, 2);
5601 	    if (TREE_CODE (index) == INTEGER_CST
5602 		&& TREE_CODE (bound) == INTEGER_CST)
5603 	      {
5604 		index = fold_convert (TREE_TYPE (bound), index);
5605 		if (TREE_CODE (index) == INTEGER_CST
5606 		    && tree_int_cst_le (index, bound))
5607 		  {
5608 		    replace_call_with_value (gsi, NULL_TREE);
5609 		    return true;
5610 		  }
5611 	      }
5612 	  }
5613 	  break;
5614 	case IFN_GOACC_DIM_SIZE:
5615 	case IFN_GOACC_DIM_POS:
5616 	  result = fold_internal_goacc_dim (stmt);
5617 	  break;
5618 	case IFN_UBSAN_CHECK_ADD:
5619 	  subcode = PLUS_EXPR;
5620 	  break;
5621 	case IFN_UBSAN_CHECK_SUB:
5622 	  subcode = MINUS_EXPR;
5623 	  break;
5624 	case IFN_UBSAN_CHECK_MUL:
5625 	  subcode = MULT_EXPR;
5626 	  break;
5627 	case IFN_ADD_OVERFLOW:
5628 	  subcode = PLUS_EXPR;
5629 	  cplx_result = true;
5630 	  break;
5631 	case IFN_SUB_OVERFLOW:
5632 	  subcode = MINUS_EXPR;
5633 	  cplx_result = true;
5634 	  break;
5635 	case IFN_MUL_OVERFLOW:
5636 	  subcode = MULT_EXPR;
5637 	  cplx_result = true;
5638 	  break;
5639 	case IFN_MASK_LOAD:
5640 	  changed |= gimple_fold_mask_load (gsi, stmt);
5641 	  break;
5642 	case IFN_MASK_STORE:
5643 	  changed |= gimple_fold_mask_store (gsi, stmt);
5644 	  break;
5645 	default:
5646 	  break;
5647 	}
5648       if (subcode != ERROR_MARK)
5649 	{
5650 	  tree arg0 = gimple_call_arg (stmt, 0);
5651 	  tree arg1 = gimple_call_arg (stmt, 1);
5652 	  tree type = TREE_TYPE (arg0);
5653 	  if (cplx_result)
5654 	    {
5655 	      tree lhs = gimple_call_lhs (stmt);
5656 	      if (lhs == NULL_TREE)
5657 		type = NULL_TREE;
5658 	      else
5659 		type = TREE_TYPE (TREE_TYPE (lhs));
5660 	    }
5661 	  if (type == NULL_TREE)
5662 	    ;
5663 	  /* x = y + 0; x = y - 0; x = y * 0; */
5664 	  else if (integer_zerop (arg1))
5665 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5666 	  /* x = 0 + y; x = 0 * y; */
5667 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5668 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5669 	  /* x = y - y; */
5670 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5671 	    result = integer_zero_node;
5672 	  /* x = y * 1; x = 1 * y; */
5673 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
5674 	    result = arg0;
5675 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
5676 	    result = arg1;
5677 	  else if (TREE_CODE (arg0) == INTEGER_CST
5678 		   && TREE_CODE (arg1) == INTEGER_CST)
5679 	    {
5680 	      if (cplx_result)
5681 		result = int_const_binop (subcode, fold_convert (type, arg0),
5682 					  fold_convert (type, arg1));
5683 	      else
5684 		result = int_const_binop (subcode, arg0, arg1);
5685 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5686 		{
5687 		  if (cplx_result)
5688 		    overflow = build_one_cst (type);
5689 		  else
5690 		    result = NULL_TREE;
5691 		}
5692 	    }
5693 	  if (result)
5694 	    {
5695 	      if (result == integer_zero_node)
5696 		result = build_zero_cst (type);
5697 	      else if (cplx_result && TREE_TYPE (result) != type)
5698 		{
5699 		  if (TREE_CODE (result) == INTEGER_CST)
5700 		    {
5701 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
5702 					      integer_zero_node))
5703 			overflow = build_one_cst (type);
5704 		    }
5705 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5706 			    && TYPE_UNSIGNED (type))
5707 			   || (TYPE_PRECISION (type)
5708 			       < (TYPE_PRECISION (TREE_TYPE (result))
5709 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
5710 				     && !TYPE_UNSIGNED (type)))))
5711 		    result = NULL_TREE;
5712 		  if (result)
5713 		    result = fold_convert (type, result);
5714 		}
5715 	    }
5716 	}
5717 
5718       if (result)
5719 	{
5720 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5721 	    result = drop_tree_overflow (result);
5722 	  if (cplx_result)
5723 	    {
5724 	      if (overflow == NULL_TREE)
5725 		overflow = build_zero_cst (TREE_TYPE (result));
5726 	      tree ctype = build_complex_type (TREE_TYPE (result));
5727 	      if (TREE_CODE (result) == INTEGER_CST
5728 		  && TREE_CODE (overflow) == INTEGER_CST)
5729 		result = build_complex (ctype, result, overflow);
5730 	      else
5731 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5732 				     ctype, result, overflow);
5733 	    }
5734 	  gimplify_and_update_call_from_tree (gsi, result);
5735 	  changed = true;
5736 	}
5737     }
5738 
5739   return changed;
5740 }
5741 
5742 
5743 /* Return true whether NAME has a use on STMT.  */
5744 
5745 static bool
has_use_on_stmt(tree name,gimple * stmt)5746 has_use_on_stmt (tree name, gimple *stmt)
5747 {
5748   imm_use_iterator iter;
5749   use_operand_p use_p;
5750   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5751     if (USE_STMT (use_p) == stmt)
5752       return true;
5753   return false;
5754 }
5755 
5756 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5757    gimple_simplify.
5758 
5759    Replaces *GSI with the simplification result in RCODE and OPS
5760    and the associated statements in *SEQ.  Does the replacement
5761    according to INPLACE and returns true if the operation succeeded.  */
5762 
5763 static bool
replace_stmt_with_simplification(gimple_stmt_iterator * gsi,gimple_match_op * res_op,gimple_seq * seq,bool inplace)5764 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5765 				  gimple_match_op *res_op,
5766 				  gimple_seq *seq, bool inplace)
5767 {
5768   gimple *stmt = gsi_stmt (*gsi);
5769   tree *ops = res_op->ops;
5770   unsigned int num_ops = res_op->num_ops;
5771 
5772   /* Play safe and do not allow abnormals to be mentioned in
5773      newly created statements.  See also maybe_push_res_to_seq.
5774      As an exception allow such uses if there was a use of the
5775      same SSA name on the old stmt.  */
5776   for (unsigned int i = 0; i < num_ops; ++i)
5777     if (TREE_CODE (ops[i]) == SSA_NAME
5778 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5779 	&& !has_use_on_stmt (ops[i], stmt))
5780       return false;
5781 
5782   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5783     for (unsigned int i = 0; i < 2; ++i)
5784       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5785 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5786 	  && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5787 	return false;
5788 
5789   /* Don't insert new statements when INPLACE is true, even if we could
5790      reuse STMT for the final statement.  */
5791   if (inplace && !gimple_seq_empty_p (*seq))
5792     return false;
5793 
5794   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5795     {
5796       gcc_assert (res_op->code.is_tree_code ());
5797       auto code = tree_code (res_op->code);
5798       if (TREE_CODE_CLASS (code) == tcc_comparison
5799 	  /* GIMPLE_CONDs condition may not throw.  */
5800 	  && (!flag_exceptions
5801 	      || !cfun->can_throw_non_call_exceptions
5802 	      || !operation_could_trap_p (code,
5803 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5804 					  false, NULL_TREE)))
5805 	gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5806       else if (code == SSA_NAME)
5807 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5808 				   build_zero_cst (TREE_TYPE (ops[0])));
5809       else if (code == INTEGER_CST)
5810 	{
5811 	  if (integer_zerop (ops[0]))
5812 	    gimple_cond_make_false (cond_stmt);
5813 	  else
5814 	    gimple_cond_make_true (cond_stmt);
5815 	}
5816       else if (!inplace)
5817 	{
5818 	  tree res = maybe_push_res_to_seq (res_op, seq);
5819 	  if (!res)
5820 	    return false;
5821 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5822 				     build_zero_cst (TREE_TYPE (res)));
5823 	}
5824       else
5825 	return false;
5826       if (dump_file && (dump_flags & TDF_DETAILS))
5827 	{
5828 	  fprintf (dump_file, "gimple_simplified to ");
5829 	  if (!gimple_seq_empty_p (*seq))
5830 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5831 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5832 			     0, TDF_SLIM);
5833 	}
5834       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5835       return true;
5836     }
5837   else if (is_gimple_assign (stmt)
5838 	   && res_op->code.is_tree_code ())
5839     {
5840       auto code = tree_code (res_op->code);
5841       if (!inplace
5842 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5843 	{
5844 	  maybe_build_generic_op (res_op);
5845 	  gimple_assign_set_rhs_with_ops (gsi, code,
5846 					  res_op->op_or_null (0),
5847 					  res_op->op_or_null (1),
5848 					  res_op->op_or_null (2));
5849 	  if (dump_file && (dump_flags & TDF_DETAILS))
5850 	    {
5851 	      fprintf (dump_file, "gimple_simplified to ");
5852 	      if (!gimple_seq_empty_p (*seq))
5853 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5854 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5855 				 0, TDF_SLIM);
5856 	    }
5857 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5858 	  return true;
5859 	}
5860     }
5861   else if (res_op->code.is_fn_code ()
5862 	   && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5863     {
5864       gcc_assert (num_ops == gimple_call_num_args (stmt));
5865       for (unsigned int i = 0; i < num_ops; ++i)
5866 	gimple_call_set_arg (stmt, i, ops[i]);
5867       if (dump_file && (dump_flags & TDF_DETAILS))
5868 	{
5869 	  fprintf (dump_file, "gimple_simplified to ");
5870 	  if (!gimple_seq_empty_p (*seq))
5871 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5872 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5873 	}
5874       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5875       return true;
5876     }
5877   else if (!inplace)
5878     {
5879       if (gimple_has_lhs (stmt))
5880 	{
5881 	  tree lhs = gimple_get_lhs (stmt);
5882 	  if (!maybe_push_res_to_seq (res_op, seq, lhs))
5883 	    return false;
5884 	  if (dump_file && (dump_flags & TDF_DETAILS))
5885 	    {
5886 	      fprintf (dump_file, "gimple_simplified to ");
5887 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5888 	    }
5889 	  gsi_replace_with_seq_vops (gsi, *seq);
5890 	  return true;
5891 	}
5892       else
5893 	gcc_unreachable ();
5894     }
5895 
5896   return false;
5897 }
5898 
5899 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
5900 
5901 static bool
maybe_canonicalize_mem_ref_addr(tree * t,bool is_debug=false)5902 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5903 {
5904   bool res = false;
5905   tree *orig_t = t;
5906 
5907   if (TREE_CODE (*t) == ADDR_EXPR)
5908     t = &TREE_OPERAND (*t, 0);
5909 
5910   /* The C and C++ frontends use an ARRAY_REF for indexing with their
5911      generic vector extension.  The actual vector referenced is
5912      view-converted to an array type for this purpose.  If the index
5913      is constant the canonical representation in the middle-end is a
5914      BIT_FIELD_REF so re-write the former to the latter here.  */
5915   if (TREE_CODE (*t) == ARRAY_REF
5916       && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5917       && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5918       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5919     {
5920       tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5921       if (VECTOR_TYPE_P (vtype))
5922 	{
5923 	  tree low = array_ref_low_bound (*t);
5924 	  if (TREE_CODE (low) == INTEGER_CST)
5925 	    {
5926 	      if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5927 		{
5928 		  widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5929 					    wi::to_widest (low));
5930 		  idx = wi::mul (idx, wi::to_widest
5931 					 (TYPE_SIZE (TREE_TYPE (*t))));
5932 		  widest_int ext
5933 		    = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5934 		  if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5935 		    {
5936 		      *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5937 				       TREE_TYPE (*t),
5938 				       TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5939 				       TYPE_SIZE (TREE_TYPE (*t)),
5940 				       wide_int_to_tree (bitsizetype, idx));
5941 		      res = true;
5942 		    }
5943 		}
5944 	    }
5945 	}
5946     }
5947 
5948   while (handled_component_p (*t))
5949     t = &TREE_OPERAND (*t, 0);
5950 
5951   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5952      of invariant addresses into a SSA name MEM_REF address.  */
5953   if (TREE_CODE (*t) == MEM_REF
5954       || TREE_CODE (*t) == TARGET_MEM_REF)
5955     {
5956       tree addr = TREE_OPERAND (*t, 0);
5957       if (TREE_CODE (addr) == ADDR_EXPR
5958 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5959 	      || handled_component_p (TREE_OPERAND (addr, 0))))
5960 	{
5961 	  tree base;
5962 	  poly_int64 coffset;
5963 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5964 						&coffset);
5965 	  if (!base)
5966 	    {
5967 	      if (is_debug)
5968 		return false;
5969 	      gcc_unreachable ();
5970 	    }
5971 
5972 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5973 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5974 						  TREE_OPERAND (*t, 1),
5975 						  size_int (coffset));
5976 	  res = true;
5977 	}
5978       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5979 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5980     }
5981 
5982   /* Canonicalize back MEM_REFs to plain reference trees if the object
5983      accessed is a decl that has the same access semantics as the MEM_REF.  */
5984   if (TREE_CODE (*t) == MEM_REF
5985       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5986       && integer_zerop (TREE_OPERAND (*t, 1))
5987       && MR_DEPENDENCE_CLIQUE (*t) == 0)
5988     {
5989       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5990       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5991       if (/* Same volatile qualification.  */
5992 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5993 	  /* Same TBAA behavior with -fstrict-aliasing.  */
5994 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5995 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5996 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5997 	  /* Same alignment.  */
5998 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5999 	  /* We have to look out here to not drop a required conversion
6000 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
6001 	     if it appears on the rhs.  Thus require strict type
6002 	     compatibility.  */
6003 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6004 	{
6005 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6006 	  res = true;
6007 	}
6008     }
6009 
6010   else if (TREE_CODE (*orig_t) == ADDR_EXPR
6011 	   && TREE_CODE (*t) == MEM_REF
6012 	   && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6013     {
6014       tree base;
6015       poly_int64 coffset;
6016       base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6017 					    &coffset);
6018       if (base)
6019 	{
6020 	  gcc_assert (TREE_CODE (base) == MEM_REF);
6021 	  poly_int64 moffset;
6022 	  if (mem_ref_offset (base).to_shwi (&moffset))
6023 	    {
6024 	      coffset += moffset;
6025 	      if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6026 		{
6027 		  coffset += moffset;
6028 		  *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6029 		  return true;
6030 		}
6031 	    }
6032 	}
6033     }
6034 
6035   /* Canonicalize TARGET_MEM_REF in particular with respect to
6036      the indexes becoming constant.  */
6037   else if (TREE_CODE (*t) == TARGET_MEM_REF)
6038     {
6039       tree tem = maybe_fold_tmr (*t);
6040       if (tem)
6041 	{
6042 	  *t = tem;
6043 	  if (TREE_CODE (*orig_t) == ADDR_EXPR)
6044 	    recompute_tree_invariant_for_addr_expr (*orig_t);
6045 	  res = true;
6046 	}
6047     }
6048 
6049   return res;
6050 }
6051 
6052 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
6053    distinguishes both cases.  */
6054 
6055 static bool
fold_stmt_1(gimple_stmt_iterator * gsi,bool inplace,tree (* valueize)(tree))6056 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6057 {
6058   bool changed = false;
6059   gimple *stmt = gsi_stmt (*gsi);
6060   bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6061   unsigned i;
6062   fold_defer_overflow_warnings ();
6063 
6064   /* First do required canonicalization of [TARGET_]MEM_REF addresses
6065      after propagation.
6066      ???  This shouldn't be done in generic folding but in the
6067      propagation helpers which also know whether an address was
6068      propagated.
6069      Also canonicalize operand order.  */
6070   switch (gimple_code (stmt))
6071     {
6072     case GIMPLE_ASSIGN:
6073       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6074 	{
6075 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
6076 	  if ((REFERENCE_CLASS_P (*rhs)
6077 	       || TREE_CODE (*rhs) == ADDR_EXPR)
6078 	      && maybe_canonicalize_mem_ref_addr (rhs))
6079 	    changed = true;
6080 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
6081 	  if (REFERENCE_CLASS_P (*lhs)
6082 	      && maybe_canonicalize_mem_ref_addr (lhs))
6083 	    changed = true;
6084 	  /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6085 	     This cannot be done in maybe_canonicalize_mem_ref_addr
6086 	     as the gimple now has two operands rather than one.
6087 	     The same reason why this can't be done in
6088 	     maybe_canonicalize_mem_ref_addr is the same reason why
6089 	     this can't be done inplace.  */
6090 	  if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6091 	    {
6092 	      tree inner = TREE_OPERAND (*rhs, 0);
6093 	      if (TREE_CODE (inner) == MEM_REF
6094 		  && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6095 		  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6096 		{
6097 		  tree ptr = TREE_OPERAND (inner, 0);
6098 		  tree addon = TREE_OPERAND (inner, 1);
6099 		  addon = fold_convert (sizetype, addon);
6100 		  gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6101 						  ptr, addon);
6102 		  changed = true;
6103 		  stmt = gsi_stmt (*gsi);
6104 		}
6105 	    }
6106 	}
6107       else
6108 	{
6109 	  /* Canonicalize operand order.  */
6110 	  enum tree_code code = gimple_assign_rhs_code (stmt);
6111 	  if (TREE_CODE_CLASS (code) == tcc_comparison
6112 	      || commutative_tree_code (code)
6113 	      || commutative_ternary_tree_code (code))
6114 	    {
6115 	      tree rhs1 = gimple_assign_rhs1 (stmt);
6116 	      tree rhs2 = gimple_assign_rhs2 (stmt);
6117 	      if (tree_swap_operands_p (rhs1, rhs2))
6118 		{
6119 		  gimple_assign_set_rhs1 (stmt, rhs2);
6120 		  gimple_assign_set_rhs2 (stmt, rhs1);
6121 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
6122 		    gimple_assign_set_rhs_code (stmt,
6123 						swap_tree_comparison (code));
6124 		  changed = true;
6125 		}
6126 	    }
6127 	}
6128       break;
6129     case GIMPLE_CALL:
6130       {
6131 	gcall *call = as_a<gcall *> (stmt);
6132 	for (i = 0; i < gimple_call_num_args (call); ++i)
6133 	  {
6134 	    tree *arg = gimple_call_arg_ptr (call, i);
6135 	    if (REFERENCE_CLASS_P (*arg)
6136 		&& maybe_canonicalize_mem_ref_addr (arg))
6137 	      changed = true;
6138 	  }
6139 	tree *lhs = gimple_call_lhs_ptr (call);
6140 	if (*lhs
6141 	    && REFERENCE_CLASS_P (*lhs)
6142 	    && maybe_canonicalize_mem_ref_addr (lhs))
6143 	  changed = true;
6144 	if (*lhs)
6145 	  {
6146 	    combined_fn cfn = gimple_call_combined_fn (call);
6147 	    internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6148 	    int opno = first_commutative_argument (ifn);
6149 	    if (opno >= 0)
6150 	      {
6151 		tree arg1 = gimple_call_arg (call, opno);
6152 		tree arg2 = gimple_call_arg (call, opno + 1);
6153 		if (tree_swap_operands_p (arg1, arg2))
6154 		  {
6155 		    gimple_call_set_arg (call, opno, arg2);
6156 		    gimple_call_set_arg (call, opno + 1, arg1);
6157 		    changed = true;
6158 		  }
6159 	      }
6160 	  }
6161 	break;
6162       }
6163     case GIMPLE_ASM:
6164       {
6165 	gasm *asm_stmt = as_a <gasm *> (stmt);
6166 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6167 	  {
6168 	    tree link = gimple_asm_output_op (asm_stmt, i);
6169 	    tree op = TREE_VALUE (link);
6170 	    if (REFERENCE_CLASS_P (op)
6171 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6172 	      changed = true;
6173 	  }
6174 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6175 	  {
6176 	    tree link = gimple_asm_input_op (asm_stmt, i);
6177 	    tree op = TREE_VALUE (link);
6178 	    if ((REFERENCE_CLASS_P (op)
6179 		 || TREE_CODE (op) == ADDR_EXPR)
6180 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6181 	      changed = true;
6182 	  }
6183       }
6184       break;
6185     case GIMPLE_DEBUG:
6186       if (gimple_debug_bind_p (stmt))
6187 	{
6188 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
6189 	  if (*val
6190 	      && (REFERENCE_CLASS_P (*val)
6191 		  || TREE_CODE (*val) == ADDR_EXPR)
6192 	      && maybe_canonicalize_mem_ref_addr (val, true))
6193 	    changed = true;
6194 	}
6195       break;
6196     case GIMPLE_COND:
6197       {
6198 	/* Canonicalize operand order.  */
6199 	tree lhs = gimple_cond_lhs (stmt);
6200 	tree rhs = gimple_cond_rhs (stmt);
6201 	if (tree_swap_operands_p (lhs, rhs))
6202 	  {
6203 	    gcond *gc = as_a <gcond *> (stmt);
6204 	    gimple_cond_set_lhs (gc, rhs);
6205 	    gimple_cond_set_rhs (gc, lhs);
6206 	    gimple_cond_set_code (gc,
6207 				  swap_tree_comparison (gimple_cond_code (gc)));
6208 	    changed = true;
6209 	  }
6210       }
6211     default:;
6212     }
6213 
6214   /* Dispatch to pattern-based folding.  */
6215   if (!inplace
6216       || is_gimple_assign (stmt)
6217       || gimple_code (stmt) == GIMPLE_COND)
6218     {
6219       gimple_seq seq = NULL;
6220       gimple_match_op res_op;
6221       if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6222 			   valueize, valueize))
6223 	{
6224 	  if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6225 	    changed = true;
6226 	  else
6227 	    gimple_seq_discard (seq);
6228 	}
6229     }
6230 
6231   stmt = gsi_stmt (*gsi);
6232 
6233   /* Fold the main computation performed by the statement.  */
6234   switch (gimple_code (stmt))
6235     {
6236     case GIMPLE_ASSIGN:
6237       {
6238 	/* Try to canonicalize for boolean-typed X the comparisons
6239 	   X == 0, X == 1, X != 0, and X != 1.  */
6240 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6241 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
6242 	  {
6243 	    tree lhs = gimple_assign_lhs (stmt);
6244 	    tree op1 = gimple_assign_rhs1 (stmt);
6245 	    tree op2 = gimple_assign_rhs2 (stmt);
6246 	    tree type = TREE_TYPE (op1);
6247 
6248 	    /* Check whether the comparison operands are of the same boolean
6249 	       type as the result type is.
6250 	       Check that second operand is an integer-constant with value
6251 	       one or zero.  */
6252 	    if (TREE_CODE (op2) == INTEGER_CST
6253 		&& (integer_zerop (op2) || integer_onep (op2))
6254 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
6255 	      {
6256 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6257 		bool is_logical_not = false;
6258 
6259 		/* X == 0 and X != 1 is a logical-not.of X
6260 		   X == 1 and X != 0 is X  */
6261 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6262 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
6263 		  is_logical_not = true;
6264 
6265 		if (is_logical_not == false)
6266 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6267 		/* Only for one-bit precision typed X the transformation
6268 		   !X -> ~X is valied.  */
6269 		else if (TYPE_PRECISION (type) == 1)
6270 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6271 		/* Otherwise we use !X -> X ^ 1.  */
6272 		else
6273 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6274 						  build_int_cst (type, 1));
6275 		changed = true;
6276 		break;
6277 	      }
6278 	  }
6279 
6280 	unsigned old_num_ops = gimple_num_ops (stmt);
6281 	tree lhs = gimple_assign_lhs (stmt);
6282 	tree new_rhs = fold_gimple_assign (gsi);
6283 	if (new_rhs
6284 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
6285 					   TREE_TYPE (new_rhs)))
6286 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6287 	if (new_rhs
6288 	    && (!inplace
6289 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6290 	  {
6291 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6292 	    changed = true;
6293 	  }
6294 	break;
6295       }
6296 
6297     case GIMPLE_CALL:
6298       changed |= gimple_fold_call (gsi, inplace);
6299       break;
6300 
6301     case GIMPLE_DEBUG:
6302       if (gimple_debug_bind_p (stmt))
6303 	{
6304 	  tree val = gimple_debug_bind_get_value (stmt);
6305 	  if (val && REFERENCE_CLASS_P (val))
6306 	    {
6307 	      tree tem = maybe_fold_reference (val);
6308 	      if (tem)
6309 		{
6310 		  gimple_debug_bind_set_value (stmt, tem);
6311 		  changed = true;
6312 		}
6313 	    }
6314 	}
6315       break;
6316 
6317     case GIMPLE_RETURN:
6318       {
6319 	greturn *ret_stmt = as_a<greturn *> (stmt);
6320 	tree ret = gimple_return_retval(ret_stmt);
6321 
6322 	if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6323 	  {
6324 	    tree val = valueize (ret);
6325 	    if (val && val != ret
6326 		&& may_propagate_copy (ret, val))
6327 	      {
6328 		gimple_return_set_retval (ret_stmt, val);
6329 		changed = true;
6330 	      }
6331 	  }
6332       }
6333       break;
6334 
6335     default:;
6336     }
6337 
6338   stmt = gsi_stmt (*gsi);
6339 
6340   fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6341   return changed;
6342 }
6343 
6344 /* Valueziation callback that ends up not following SSA edges.  */
6345 
6346 tree
no_follow_ssa_edges(tree)6347 no_follow_ssa_edges (tree)
6348 {
6349   return NULL_TREE;
6350 }
6351 
6352 /* Valueization callback that ends up following single-use SSA edges only.  */
6353 
6354 tree
follow_single_use_edges(tree val)6355 follow_single_use_edges (tree val)
6356 {
6357   if (TREE_CODE (val) == SSA_NAME
6358       && !has_single_use (val))
6359     return NULL_TREE;
6360   return val;
6361 }
6362 
6363 /* Valueization callback that follows all SSA edges.  */
6364 
6365 tree
follow_all_ssa_edges(tree val)6366 follow_all_ssa_edges (tree val)
6367 {
6368   return val;
6369 }
6370 
6371 /* Fold the statement pointed to by GSI.  In some cases, this function may
6372    replace the whole statement with a new one.  Returns true iff folding
6373    makes any changes.
6374    The statement pointed to by GSI should be in valid gimple form but may
6375    be in unfolded state as resulting from for example constant propagation
6376    which can produce *&x = 0.  */
6377 
6378 bool
fold_stmt(gimple_stmt_iterator * gsi)6379 fold_stmt (gimple_stmt_iterator *gsi)
6380 {
6381   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6382 }
6383 
6384 bool
fold_stmt(gimple_stmt_iterator * gsi,tree (* valueize)(tree))6385 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6386 {
6387   return fold_stmt_1 (gsi, false, valueize);
6388 }
6389 
6390 /* Perform the minimal folding on statement *GSI.  Only operations like
6391    *&x created by constant propagation are handled.  The statement cannot
6392    be replaced with a new one.  Return true if the statement was
6393    changed, false otherwise.
6394    The statement *GSI should be in valid gimple form but may
6395    be in unfolded state as resulting from for example constant propagation
6396    which can produce *&x = 0.  */
6397 
6398 bool
fold_stmt_inplace(gimple_stmt_iterator * gsi)6399 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6400 {
6401   gimple *stmt = gsi_stmt (*gsi);
6402   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6403   gcc_assert (gsi_stmt (*gsi) == stmt);
6404   return changed;
6405 }
6406 
6407 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6408    if EXPR is null or we don't know how.
6409    If non-null, the result always has boolean type.  */
6410 
6411 static tree
canonicalize_bool(tree expr,bool invert)6412 canonicalize_bool (tree expr, bool invert)
6413 {
6414   if (!expr)
6415     return NULL_TREE;
6416   else if (invert)
6417     {
6418       if (integer_nonzerop (expr))
6419 	return boolean_false_node;
6420       else if (integer_zerop (expr))
6421 	return boolean_true_node;
6422       else if (TREE_CODE (expr) == SSA_NAME)
6423 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6424 			    build_int_cst (TREE_TYPE (expr), 0));
6425       else if (COMPARISON_CLASS_P (expr))
6426 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6427 			    boolean_type_node,
6428 			    TREE_OPERAND (expr, 0),
6429 			    TREE_OPERAND (expr, 1));
6430       else
6431 	return NULL_TREE;
6432     }
6433   else
6434     {
6435       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6436 	return expr;
6437       if (integer_nonzerop (expr))
6438 	return boolean_true_node;
6439       else if (integer_zerop (expr))
6440 	return boolean_false_node;
6441       else if (TREE_CODE (expr) == SSA_NAME)
6442 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
6443 			    build_int_cst (TREE_TYPE (expr), 0));
6444       else if (COMPARISON_CLASS_P (expr))
6445 	return fold_build2 (TREE_CODE (expr),
6446 			    boolean_type_node,
6447 			    TREE_OPERAND (expr, 0),
6448 			    TREE_OPERAND (expr, 1));
6449       else
6450 	return NULL_TREE;
6451     }
6452 }
6453 
6454 /* Check to see if a boolean expression EXPR is logically equivalent to the
6455    comparison (OP1 CODE OP2).  Check for various identities involving
6456    SSA_NAMEs.  */
6457 
6458 static bool
same_bool_comparison_p(const_tree expr,enum tree_code code,const_tree op1,const_tree op2)6459 same_bool_comparison_p (const_tree expr, enum tree_code code,
6460 			const_tree op1, const_tree op2)
6461 {
6462   gimple *s;
6463 
6464   /* The obvious case.  */
6465   if (TREE_CODE (expr) == code
6466       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6467       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6468     return true;
6469 
6470   /* Check for comparing (name, name != 0) and the case where expr
6471      is an SSA_NAME with a definition matching the comparison.  */
6472   if (TREE_CODE (expr) == SSA_NAME
6473       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6474     {
6475       if (operand_equal_p (expr, op1, 0))
6476 	return ((code == NE_EXPR && integer_zerop (op2))
6477 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
6478       s = SSA_NAME_DEF_STMT (expr);
6479       if (is_gimple_assign (s)
6480 	  && gimple_assign_rhs_code (s) == code
6481 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6482 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6483 	return true;
6484     }
6485 
6486   /* If op1 is of the form (name != 0) or (name == 0), and the definition
6487      of name is a comparison, recurse.  */
6488   if (TREE_CODE (op1) == SSA_NAME
6489       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6490     {
6491       s = SSA_NAME_DEF_STMT (op1);
6492       if (is_gimple_assign (s)
6493 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6494 	{
6495 	  enum tree_code c = gimple_assign_rhs_code (s);
6496 	  if ((c == NE_EXPR && integer_zerop (op2))
6497 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
6498 	    return same_bool_comparison_p (expr, c,
6499 					   gimple_assign_rhs1 (s),
6500 					   gimple_assign_rhs2 (s));
6501 	  if ((c == EQ_EXPR && integer_zerop (op2))
6502 	      || (c == NE_EXPR && integer_nonzerop (op2)))
6503 	    return same_bool_comparison_p (expr,
6504 					   invert_tree_comparison (c, false),
6505 					   gimple_assign_rhs1 (s),
6506 					   gimple_assign_rhs2 (s));
6507 	}
6508     }
6509   return false;
6510 }
6511 
6512 /* Check to see if two boolean expressions OP1 and OP2 are logically
6513    equivalent.  */
6514 
6515 static bool
same_bool_result_p(const_tree op1,const_tree op2)6516 same_bool_result_p (const_tree op1, const_tree op2)
6517 {
6518   /* Simple cases first.  */
6519   if (operand_equal_p (op1, op2, 0))
6520     return true;
6521 
6522   /* Check the cases where at least one of the operands is a comparison.
6523      These are a bit smarter than operand_equal_p in that they apply some
6524      identifies on SSA_NAMEs.  */
6525   if (COMPARISON_CLASS_P (op2)
6526       && same_bool_comparison_p (op1, TREE_CODE (op2),
6527 				 TREE_OPERAND (op2, 0),
6528 				 TREE_OPERAND (op2, 1)))
6529     return true;
6530   if (COMPARISON_CLASS_P (op1)
6531       && same_bool_comparison_p (op2, TREE_CODE (op1),
6532 				 TREE_OPERAND (op1, 0),
6533 				 TREE_OPERAND (op1, 1)))
6534     return true;
6535 
6536   /* Default case.  */
6537   return false;
6538 }
6539 
6540 /* Forward declarations for some mutually recursive functions.  */
6541 
6542 static tree
6543 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6544 		   enum tree_code code2, tree op2a, tree op2b, basic_block);
6545 static tree
6546 and_var_with_comparison (tree type, tree var, bool invert,
6547 			 enum tree_code code2, tree op2a, tree op2b,
6548 			 basic_block);
6549 static tree
6550 and_var_with_comparison_1 (tree type, gimple *stmt,
6551 			   enum tree_code code2, tree op2a, tree op2b,
6552 			   basic_block);
6553 static tree
6554 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6555 		  enum tree_code code2, tree op2a, tree op2b,
6556 		  basic_block);
6557 static tree
6558 or_var_with_comparison (tree, tree var, bool invert,
6559 			enum tree_code code2, tree op2a, tree op2b,
6560 			basic_block);
6561 static tree
6562 or_var_with_comparison_1 (tree, gimple *stmt,
6563 			  enum tree_code code2, tree op2a, tree op2b,
6564 			  basic_block);
6565 
6566 /* Helper function for and_comparisons_1:  try to simplify the AND of the
6567    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6568    If INVERT is true, invert the value of the VAR before doing the AND.
6569    Return NULL_EXPR if we can't simplify this to a single expression.  */
6570 
6571 static tree
and_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)6572 and_var_with_comparison (tree type, tree var, bool invert,
6573 			 enum tree_code code2, tree op2a, tree op2b,
6574 			 basic_block outer_cond_bb)
6575 {
6576   tree t;
6577   gimple *stmt = SSA_NAME_DEF_STMT (var);
6578 
6579   /* We can only deal with variables whose definitions are assignments.  */
6580   if (!is_gimple_assign (stmt))
6581     return NULL_TREE;
6582 
6583   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6584      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6585      Then we only have to consider the simpler non-inverted cases.  */
6586   if (invert)
6587     t = or_var_with_comparison_1 (type, stmt,
6588 				  invert_tree_comparison (code2, false),
6589 				  op2a, op2b, outer_cond_bb);
6590   else
6591     t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6592 				   outer_cond_bb);
6593   return canonicalize_bool (t, invert);
6594 }
6595 
6596 /* Try to simplify the AND of the ssa variable defined by the assignment
6597    STMT with the comparison specified by (OP2A CODE2 OP2B).
6598    Return NULL_EXPR if we can't simplify this to a single expression.  */
6599 
6600 static tree
and_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)6601 and_var_with_comparison_1 (tree type, gimple *stmt,
6602 			   enum tree_code code2, tree op2a, tree op2b,
6603 			   basic_block outer_cond_bb)
6604 {
6605   tree var = gimple_assign_lhs (stmt);
6606   tree true_test_var = NULL_TREE;
6607   tree false_test_var = NULL_TREE;
6608   enum tree_code innercode = gimple_assign_rhs_code (stmt);
6609 
6610   /* Check for identities like (var AND (var == 0)) => false.  */
6611   if (TREE_CODE (op2a) == SSA_NAME
6612       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6613     {
6614       if ((code2 == NE_EXPR && integer_zerop (op2b))
6615 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6616 	{
6617 	  true_test_var = op2a;
6618 	  if (var == true_test_var)
6619 	    return var;
6620 	}
6621       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6622 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6623 	{
6624 	  false_test_var = op2a;
6625 	  if (var == false_test_var)
6626 	    return boolean_false_node;
6627 	}
6628     }
6629 
6630   /* If the definition is a comparison, recurse on it.  */
6631   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6632     {
6633       tree t = and_comparisons_1 (type, innercode,
6634 				  gimple_assign_rhs1 (stmt),
6635 				  gimple_assign_rhs2 (stmt),
6636 				  code2,
6637 				  op2a,
6638 				  op2b, outer_cond_bb);
6639       if (t)
6640 	return t;
6641     }
6642 
6643   /* If the definition is an AND or OR expression, we may be able to
6644      simplify by reassociating.  */
6645   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6646       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6647     {
6648       tree inner1 = gimple_assign_rhs1 (stmt);
6649       tree inner2 = gimple_assign_rhs2 (stmt);
6650       gimple *s;
6651       tree t;
6652       tree partial = NULL_TREE;
6653       bool is_and = (innercode == BIT_AND_EXPR);
6654 
6655       /* Check for boolean identities that don't require recursive examination
6656 	 of inner1/inner2:
6657 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6658 	 inner1 AND (inner1 OR inner2) => inner1
6659 	 !inner1 AND (inner1 AND inner2) => false
6660 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6661          Likewise for similar cases involving inner2.  */
6662       if (inner1 == true_test_var)
6663 	return (is_and ? var : inner1);
6664       else if (inner2 == true_test_var)
6665 	return (is_and ? var : inner2);
6666       else if (inner1 == false_test_var)
6667 	return (is_and
6668 		? boolean_false_node
6669 		: and_var_with_comparison (type, inner2, false, code2, op2a,
6670 					   op2b, outer_cond_bb));
6671       else if (inner2 == false_test_var)
6672 	return (is_and
6673 		? boolean_false_node
6674 		: and_var_with_comparison (type, inner1, false, code2, op2a,
6675 					   op2b, outer_cond_bb));
6676 
6677       /* Next, redistribute/reassociate the AND across the inner tests.
6678 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
6679       if (TREE_CODE (inner1) == SSA_NAME
6680 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6681 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6682 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6683 					      gimple_assign_rhs1 (s),
6684 					      gimple_assign_rhs2 (s),
6685 					      code2, op2a, op2b,
6686 					      outer_cond_bb)))
6687 	{
6688 	  /* Handle the AND case, where we are reassociating:
6689 	     (inner1 AND inner2) AND (op2a code2 op2b)
6690 	     => (t AND inner2)
6691 	     If the partial result t is a constant, we win.  Otherwise
6692 	     continue on to try reassociating with the other inner test.  */
6693 	  if (is_and)
6694 	    {
6695 	      if (integer_onep (t))
6696 		return inner2;
6697 	      else if (integer_zerop (t))
6698 		return boolean_false_node;
6699 	    }
6700 
6701 	  /* Handle the OR case, where we are redistributing:
6702 	     (inner1 OR inner2) AND (op2a code2 op2b)
6703 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
6704 	  else if (integer_onep (t))
6705 	    return boolean_true_node;
6706 
6707 	  /* Save partial result for later.  */
6708 	  partial = t;
6709 	}
6710 
6711       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6712       if (TREE_CODE (inner2) == SSA_NAME
6713 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6714 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6715 	  && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6716 					      gimple_assign_rhs1 (s),
6717 					      gimple_assign_rhs2 (s),
6718 					      code2, op2a, op2b,
6719 					      outer_cond_bb)))
6720 	{
6721 	  /* Handle the AND case, where we are reassociating:
6722 	     (inner1 AND inner2) AND (op2a code2 op2b)
6723 	     => (inner1 AND t)  */
6724 	  if (is_and)
6725 	    {
6726 	      if (integer_onep (t))
6727 		return inner1;
6728 	      else if (integer_zerop (t))
6729 		return boolean_false_node;
6730 	      /* If both are the same, we can apply the identity
6731 		 (x AND x) == x.  */
6732 	      else if (partial && same_bool_result_p (t, partial))
6733 		return t;
6734 	    }
6735 
6736 	  /* Handle the OR case. where we are redistributing:
6737 	     (inner1 OR inner2) AND (op2a code2 op2b)
6738 	     => (t OR (inner1 AND (op2a code2 op2b)))
6739 	     => (t OR partial)  */
6740 	  else
6741 	    {
6742 	      if (integer_onep (t))
6743 		return boolean_true_node;
6744 	      else if (partial)
6745 		{
6746 		  /* We already got a simplification for the other
6747 		     operand to the redistributed OR expression.  The
6748 		     interesting case is when at least one is false.
6749 		     Or, if both are the same, we can apply the identity
6750 		     (x OR x) == x.  */
6751 		  if (integer_zerop (partial))
6752 		    return t;
6753 		  else if (integer_zerop (t))
6754 		    return partial;
6755 		  else if (same_bool_result_p (t, partial))
6756 		    return t;
6757 		}
6758 	    }
6759 	}
6760     }
6761   return NULL_TREE;
6762 }
6763 
6764 /* Try to simplify the AND of two comparisons defined by
6765    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6766    If this can be done without constructing an intermediate value,
6767    return the resulting tree; otherwise NULL_TREE is returned.
6768    This function is deliberately asymmetric as it recurses on SSA_DEFs
6769    in the first comparison but not the second.  */
6770 
6771 static tree
and_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)6772 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6773 		   enum tree_code code2, tree op2a, tree op2b,
6774 		   basic_block outer_cond_bb)
6775 {
6776   tree truth_type = truth_type_for (TREE_TYPE (op1a));
6777 
6778   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
6779   if (operand_equal_p (op1a, op2a, 0)
6780       && operand_equal_p (op1b, op2b, 0))
6781     {
6782       /* Result will be either NULL_TREE, or a combined comparison.  */
6783       tree t = combine_comparisons (UNKNOWN_LOCATION,
6784 				    TRUTH_ANDIF_EXPR, code1, code2,
6785 				    truth_type, op1a, op1b);
6786       if (t)
6787 	return t;
6788     }
6789 
6790   /* Likewise the swapped case of the above.  */
6791   if (operand_equal_p (op1a, op2b, 0)
6792       && operand_equal_p (op1b, op2a, 0))
6793     {
6794       /* Result will be either NULL_TREE, or a combined comparison.  */
6795       tree t = combine_comparisons (UNKNOWN_LOCATION,
6796 				    TRUTH_ANDIF_EXPR, code1,
6797 				    swap_tree_comparison (code2),
6798 				    truth_type, op1a, op1b);
6799       if (t)
6800 	return t;
6801     }
6802 
6803   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6804      NAME's definition is a truth value.  See if there are any simplifications
6805      that can be done against the NAME's definition.  */
6806   if (TREE_CODE (op1a) == SSA_NAME
6807       && (code1 == NE_EXPR || code1 == EQ_EXPR)
6808       && (integer_zerop (op1b) || integer_onep (op1b)))
6809     {
6810       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6811 		     || (code1 == NE_EXPR && integer_onep (op1b)));
6812       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6813       switch (gimple_code (stmt))
6814 	{
6815 	case GIMPLE_ASSIGN:
6816 	  /* Try to simplify by copy-propagating the definition.  */
6817 	  return and_var_with_comparison (type, op1a, invert, code2, op2a,
6818 					  op2b, outer_cond_bb);
6819 
6820 	case GIMPLE_PHI:
6821 	  /* If every argument to the PHI produces the same result when
6822 	     ANDed with the second comparison, we win.
6823 	     Do not do this unless the type is bool since we need a bool
6824 	     result here anyway.  */
6825 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6826 	    {
6827 	      tree result = NULL_TREE;
6828 	      unsigned i;
6829 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
6830 		{
6831 		  tree arg = gimple_phi_arg_def (stmt, i);
6832 
6833 		  /* If this PHI has itself as an argument, ignore it.
6834 		     If all the other args produce the same result,
6835 		     we're still OK.  */
6836 		  if (arg == gimple_phi_result (stmt))
6837 		    continue;
6838 		  else if (TREE_CODE (arg) == INTEGER_CST)
6839 		    {
6840 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6841 			{
6842 			  if (!result)
6843 			    result = boolean_false_node;
6844 			  else if (!integer_zerop (result))
6845 			    return NULL_TREE;
6846 			}
6847 		      else if (!result)
6848 			result = fold_build2 (code2, boolean_type_node,
6849 					      op2a, op2b);
6850 		      else if (!same_bool_comparison_p (result,
6851 							code2, op2a, op2b))
6852 			return NULL_TREE;
6853 		    }
6854 		  else if (TREE_CODE (arg) == SSA_NAME
6855 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
6856 		    {
6857 		      tree temp;
6858 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6859 		      /* In simple cases we can look through PHI nodes,
6860 			 but we have to be careful with loops.
6861 			 See PR49073.  */
6862 		      if (! dom_info_available_p (CDI_DOMINATORS)
6863 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
6864 			  || dominated_by_p (CDI_DOMINATORS,
6865 					     gimple_bb (def_stmt),
6866 					     gimple_bb (stmt)))
6867 			return NULL_TREE;
6868 		      temp = and_var_with_comparison (type, arg, invert, code2,
6869 						      op2a, op2b,
6870 						      outer_cond_bb);
6871 		      if (!temp)
6872 			return NULL_TREE;
6873 		      else if (!result)
6874 			result = temp;
6875 		      else if (!same_bool_result_p (result, temp))
6876 			return NULL_TREE;
6877 		    }
6878 		  else
6879 		    return NULL_TREE;
6880 		}
6881 	      return result;
6882 	    }
6883 
6884 	default:
6885 	  break;
6886 	}
6887     }
6888   return NULL_TREE;
6889 }
6890 
6891 static basic_block fosa_bb;
6892 static tree
follow_outer_ssa_edges(tree val)6893 follow_outer_ssa_edges (tree val)
6894 {
6895   if (TREE_CODE (val) == SSA_NAME
6896       && !SSA_NAME_IS_DEFAULT_DEF (val))
6897     {
6898       basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
6899       if (!def_bb
6900 	  || def_bb == fosa_bb
6901 	  || (dom_info_available_p (CDI_DOMINATORS)
6902 	      && (def_bb == fosa_bb
6903 		  || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
6904 	return val;
6905       return NULL_TREE;
6906     }
6907   return val;
6908 }
6909 
6910 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6911    : try to simplify the AND/OR of the ssa variable VAR with the comparison
6912    specified by (OP2A CODE2 OP2B) from match.pd.  Return NULL_EXPR if we can't
6913    simplify this to a single expression.  As we are going to lower the cost
6914    of building SSA names / gimple stmts significantly, we need to allocate
6915    them ont the stack.  This will cause the code to be a bit ugly.  */
6916 
6917 static tree
maybe_fold_comparisons_from_match_pd(tree type,enum tree_code code,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)6918 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6919 				      enum tree_code code1,
6920 				      tree op1a, tree op1b,
6921 				      enum tree_code code2, tree op2a,
6922 				      tree op2b,
6923 				      basic_block outer_cond_bb)
6924 {
6925   /* Allocate gimple stmt1 on the stack.  */
6926   gassign *stmt1
6927     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6928   gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6929   gimple_assign_set_rhs_code (stmt1, code1);
6930   gimple_assign_set_rhs1 (stmt1, op1a);
6931   gimple_assign_set_rhs2 (stmt1, op1b);
6932   gimple_set_bb (stmt1, NULL);
6933 
6934   /* Allocate gimple stmt2 on the stack.  */
6935   gassign *stmt2
6936     = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6937   gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6938   gimple_assign_set_rhs_code (stmt2, code2);
6939   gimple_assign_set_rhs1 (stmt2, op2a);
6940   gimple_assign_set_rhs2 (stmt2, op2b);
6941   gimple_set_bb (stmt2, NULL);
6942 
6943   /* Allocate SSA names(lhs1) on the stack.  */
6944   tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6945   memset (lhs1, 0, sizeof (tree_ssa_name));
6946   TREE_SET_CODE (lhs1, SSA_NAME);
6947   TREE_TYPE (lhs1) = type;
6948   init_ssa_name_imm_use (lhs1);
6949 
6950   /* Allocate SSA names(lhs2) on the stack.  */
6951   tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6952   memset (lhs2, 0, sizeof (tree_ssa_name));
6953   TREE_SET_CODE (lhs2, SSA_NAME);
6954   TREE_TYPE (lhs2) = type;
6955   init_ssa_name_imm_use (lhs2);
6956 
6957   gimple_assign_set_lhs (stmt1, lhs1);
6958   gimple_assign_set_lhs (stmt2, lhs2);
6959 
6960   gimple_match_op op (gimple_match_cond::UNCOND, code,
6961 		      type, gimple_assign_lhs (stmt1),
6962 		      gimple_assign_lhs (stmt2));
6963   fosa_bb = outer_cond_bb;
6964   if (op.resimplify (NULL, (!outer_cond_bb
6965 			    ? follow_all_ssa_edges : follow_outer_ssa_edges)))
6966     {
6967       if (gimple_simplified_result_is_gimple_val (&op))
6968 	{
6969 	  tree res = op.ops[0];
6970 	  if (res == lhs1)
6971 	    return build2 (code1, type, op1a, op1b);
6972 	  else if (res == lhs2)
6973 	    return build2 (code2, type, op2a, op2b);
6974 	  else
6975 	    return res;
6976 	}
6977       else if (op.code.is_tree_code ()
6978 	       && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6979 	{
6980 	  tree op0 = op.ops[0];
6981 	  tree op1 = op.ops[1];
6982 	  if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6983 	    return NULL_TREE;  /* not simple */
6984 
6985 	  return build2 ((enum tree_code)op.code, op.type, op0, op1);
6986 	}
6987     }
6988 
6989   return NULL_TREE;
6990 }
6991 
6992 /* Try to simplify the AND of two comparisons, specified by
6993    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6994    If this can be simplified to a single expression (without requiring
6995    introducing more SSA variables to hold intermediate values),
6996    return the resulting tree.  Otherwise return NULL_TREE.
6997    If the result expression is non-null, it has boolean type.  */
6998 
6999 tree
maybe_fold_and_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)7000 maybe_fold_and_comparisons (tree type,
7001 			    enum tree_code code1, tree op1a, tree op1b,
7002 			    enum tree_code code2, tree op2a, tree op2b,
7003 			    basic_block outer_cond_bb)
7004 {
7005   if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7006 				  outer_cond_bb))
7007     return t;
7008 
7009   if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7010 				  outer_cond_bb))
7011     return t;
7012 
7013   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7014 						     op1a, op1b, code2, op2a,
7015 						     op2b, outer_cond_bb))
7016     return t;
7017 
7018   return NULL_TREE;
7019 }
7020 
7021 /* Helper function for or_comparisons_1:  try to simplify the OR of the
7022    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7023    If INVERT is true, invert the value of VAR before doing the OR.
7024    Return NULL_EXPR if we can't simplify this to a single expression.  */
7025 
7026 static tree
or_var_with_comparison(tree type,tree var,bool invert,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)7027 or_var_with_comparison (tree type, tree var, bool invert,
7028 			enum tree_code code2, tree op2a, tree op2b,
7029 			basic_block outer_cond_bb)
7030 {
7031   tree t;
7032   gimple *stmt = SSA_NAME_DEF_STMT (var);
7033 
7034   /* We can only deal with variables whose definitions are assignments.  */
7035   if (!is_gimple_assign (stmt))
7036     return NULL_TREE;
7037 
7038   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7039      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7040      Then we only have to consider the simpler non-inverted cases.  */
7041   if (invert)
7042     t = and_var_with_comparison_1 (type, stmt,
7043 				   invert_tree_comparison (code2, false),
7044 				   op2a, op2b, outer_cond_bb);
7045   else
7046     t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7047 				  outer_cond_bb);
7048   return canonicalize_bool (t, invert);
7049 }
7050 
7051 /* Try to simplify the OR of the ssa variable defined by the assignment
7052    STMT with the comparison specified by (OP2A CODE2 OP2B).
7053    Return NULL_EXPR if we can't simplify this to a single expression.  */
7054 
7055 static tree
or_var_with_comparison_1(tree type,gimple * stmt,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)7056 or_var_with_comparison_1 (tree type, gimple *stmt,
7057 			  enum tree_code code2, tree op2a, tree op2b,
7058 			  basic_block outer_cond_bb)
7059 {
7060   tree var = gimple_assign_lhs (stmt);
7061   tree true_test_var = NULL_TREE;
7062   tree false_test_var = NULL_TREE;
7063   enum tree_code innercode = gimple_assign_rhs_code (stmt);
7064 
7065   /* Check for identities like (var OR (var != 0)) => true .  */
7066   if (TREE_CODE (op2a) == SSA_NAME
7067       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7068     {
7069       if ((code2 == NE_EXPR && integer_zerop (op2b))
7070 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7071 	{
7072 	  true_test_var = op2a;
7073 	  if (var == true_test_var)
7074 	    return var;
7075 	}
7076       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7077 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7078 	{
7079 	  false_test_var = op2a;
7080 	  if (var == false_test_var)
7081 	    return boolean_true_node;
7082 	}
7083     }
7084 
7085   /* If the definition is a comparison, recurse on it.  */
7086   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7087     {
7088       tree t = or_comparisons_1 (type, innercode,
7089 				 gimple_assign_rhs1 (stmt),
7090 				 gimple_assign_rhs2 (stmt),
7091 				 code2, op2a, op2b, outer_cond_bb);
7092       if (t)
7093 	return t;
7094     }
7095 
7096   /* If the definition is an AND or OR expression, we may be able to
7097      simplify by reassociating.  */
7098   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7099       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7100     {
7101       tree inner1 = gimple_assign_rhs1 (stmt);
7102       tree inner2 = gimple_assign_rhs2 (stmt);
7103       gimple *s;
7104       tree t;
7105       tree partial = NULL_TREE;
7106       bool is_or = (innercode == BIT_IOR_EXPR);
7107 
7108       /* Check for boolean identities that don't require recursive examination
7109 	 of inner1/inner2:
7110 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7111 	 inner1 OR (inner1 AND inner2) => inner1
7112 	 !inner1 OR (inner1 OR inner2) => true
7113 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7114       */
7115       if (inner1 == true_test_var)
7116 	return (is_or ? var : inner1);
7117       else if (inner2 == true_test_var)
7118 	return (is_or ? var : inner2);
7119       else if (inner1 == false_test_var)
7120 	return (is_or
7121 		? boolean_true_node
7122 		: or_var_with_comparison (type, inner2, false, code2, op2a,
7123 					  op2b, outer_cond_bb));
7124       else if (inner2 == false_test_var)
7125 	return (is_or
7126 		? boolean_true_node
7127 		: or_var_with_comparison (type, inner1, false, code2, op2a,
7128 					  op2b, outer_cond_bb));
7129 
7130       /* Next, redistribute/reassociate the OR across the inner tests.
7131 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
7132       if (TREE_CODE (inner1) == SSA_NAME
7133 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7134 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7135 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7136 					     gimple_assign_rhs1 (s),
7137 					     gimple_assign_rhs2 (s),
7138 					     code2, op2a, op2b,
7139 					     outer_cond_bb)))
7140 	{
7141 	  /* Handle the OR case, where we are reassociating:
7142 	     (inner1 OR inner2) OR (op2a code2 op2b)
7143 	     => (t OR inner2)
7144 	     If the partial result t is a constant, we win.  Otherwise
7145 	     continue on to try reassociating with the other inner test.  */
7146 	  if (is_or)
7147 	    {
7148 	      if (integer_onep (t))
7149 		return boolean_true_node;
7150 	      else if (integer_zerop (t))
7151 		return inner2;
7152 	    }
7153 
7154 	  /* Handle the AND case, where we are redistributing:
7155 	     (inner1 AND inner2) OR (op2a code2 op2b)
7156 	     => (t AND (inner2 OR (op2a code op2b)))  */
7157 	  else if (integer_zerop (t))
7158 	    return boolean_false_node;
7159 
7160 	  /* Save partial result for later.  */
7161 	  partial = t;
7162 	}
7163 
7164       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7165       if (TREE_CODE (inner2) == SSA_NAME
7166 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7167 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7168 	  && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7169 					     gimple_assign_rhs1 (s),
7170 					     gimple_assign_rhs2 (s),
7171 					     code2, op2a, op2b,
7172 					     outer_cond_bb)))
7173 	{
7174 	  /* Handle the OR case, where we are reassociating:
7175 	     (inner1 OR inner2) OR (op2a code2 op2b)
7176 	     => (inner1 OR t)
7177 	     => (t OR partial)  */
7178 	  if (is_or)
7179 	    {
7180 	      if (integer_zerop (t))
7181 		return inner1;
7182 	      else if (integer_onep (t))
7183 		return boolean_true_node;
7184 	      /* If both are the same, we can apply the identity
7185 		 (x OR x) == x.  */
7186 	      else if (partial && same_bool_result_p (t, partial))
7187 		return t;
7188 	    }
7189 
7190 	  /* Handle the AND case, where we are redistributing:
7191 	     (inner1 AND inner2) OR (op2a code2 op2b)
7192 	     => (t AND (inner1 OR (op2a code2 op2b)))
7193 	     => (t AND partial)  */
7194 	  else
7195 	    {
7196 	      if (integer_zerop (t))
7197 		return boolean_false_node;
7198 	      else if (partial)
7199 		{
7200 		  /* We already got a simplification for the other
7201 		     operand to the redistributed AND expression.  The
7202 		     interesting case is when at least one is true.
7203 		     Or, if both are the same, we can apply the identity
7204 		     (x AND x) == x.  */
7205 		  if (integer_onep (partial))
7206 		    return t;
7207 		  else if (integer_onep (t))
7208 		    return partial;
7209 		  else if (same_bool_result_p (t, partial))
7210 		    return t;
7211 		}
7212 	    }
7213 	}
7214     }
7215   return NULL_TREE;
7216 }
7217 
7218 /* Try to simplify the OR of two comparisons defined by
7219    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7220    If this can be done without constructing an intermediate value,
7221    return the resulting tree; otherwise NULL_TREE is returned.
7222    This function is deliberately asymmetric as it recurses on SSA_DEFs
7223    in the first comparison but not the second.  */
7224 
7225 static tree
or_comparisons_1(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)7226 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7227 		  enum tree_code code2, tree op2a, tree op2b,
7228 		  basic_block outer_cond_bb)
7229 {
7230   tree truth_type = truth_type_for (TREE_TYPE (op1a));
7231 
7232   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
7233   if (operand_equal_p (op1a, op2a, 0)
7234       && operand_equal_p (op1b, op2b, 0))
7235     {
7236       /* Result will be either NULL_TREE, or a combined comparison.  */
7237       tree t = combine_comparisons (UNKNOWN_LOCATION,
7238 				    TRUTH_ORIF_EXPR, code1, code2,
7239 				    truth_type, op1a, op1b);
7240       if (t)
7241 	return t;
7242     }
7243 
7244   /* Likewise the swapped case of the above.  */
7245   if (operand_equal_p (op1a, op2b, 0)
7246       && operand_equal_p (op1b, op2a, 0))
7247     {
7248       /* Result will be either NULL_TREE, or a combined comparison.  */
7249       tree t = combine_comparisons (UNKNOWN_LOCATION,
7250 				    TRUTH_ORIF_EXPR, code1,
7251 				    swap_tree_comparison (code2),
7252 				    truth_type, op1a, op1b);
7253       if (t)
7254 	return t;
7255     }
7256 
7257   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7258      NAME's definition is a truth value.  See if there are any simplifications
7259      that can be done against the NAME's definition.  */
7260   if (TREE_CODE (op1a) == SSA_NAME
7261       && (code1 == NE_EXPR || code1 == EQ_EXPR)
7262       && (integer_zerop (op1b) || integer_onep (op1b)))
7263     {
7264       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7265 		     || (code1 == NE_EXPR && integer_onep (op1b)));
7266       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7267       switch (gimple_code (stmt))
7268 	{
7269 	case GIMPLE_ASSIGN:
7270 	  /* Try to simplify by copy-propagating the definition.  */
7271 	  return or_var_with_comparison (type, op1a, invert, code2, op2a,
7272 					 op2b, outer_cond_bb);
7273 
7274 	case GIMPLE_PHI:
7275 	  /* If every argument to the PHI produces the same result when
7276 	     ORed with the second comparison, we win.
7277 	     Do not do this unless the type is bool since we need a bool
7278 	     result here anyway.  */
7279 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7280 	    {
7281 	      tree result = NULL_TREE;
7282 	      unsigned i;
7283 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
7284 		{
7285 		  tree arg = gimple_phi_arg_def (stmt, i);
7286 
7287 		  /* If this PHI has itself as an argument, ignore it.
7288 		     If all the other args produce the same result,
7289 		     we're still OK.  */
7290 		  if (arg == gimple_phi_result (stmt))
7291 		    continue;
7292 		  else if (TREE_CODE (arg) == INTEGER_CST)
7293 		    {
7294 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7295 			{
7296 			  if (!result)
7297 			    result = boolean_true_node;
7298 			  else if (!integer_onep (result))
7299 			    return NULL_TREE;
7300 			}
7301 		      else if (!result)
7302 			result = fold_build2 (code2, boolean_type_node,
7303 					      op2a, op2b);
7304 		      else if (!same_bool_comparison_p (result,
7305 							code2, op2a, op2b))
7306 			return NULL_TREE;
7307 		    }
7308 		  else if (TREE_CODE (arg) == SSA_NAME
7309 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
7310 		    {
7311 		      tree temp;
7312 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7313 		      /* In simple cases we can look through PHI nodes,
7314 			 but we have to be careful with loops.
7315 			 See PR49073.  */
7316 		      if (! dom_info_available_p (CDI_DOMINATORS)
7317 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
7318 			  || dominated_by_p (CDI_DOMINATORS,
7319 					     gimple_bb (def_stmt),
7320 					     gimple_bb (stmt)))
7321 			return NULL_TREE;
7322 		      temp = or_var_with_comparison (type, arg, invert, code2,
7323 						     op2a, op2b, outer_cond_bb);
7324 		      if (!temp)
7325 			return NULL_TREE;
7326 		      else if (!result)
7327 			result = temp;
7328 		      else if (!same_bool_result_p (result, temp))
7329 			return NULL_TREE;
7330 		    }
7331 		  else
7332 		    return NULL_TREE;
7333 		}
7334 	      return result;
7335 	    }
7336 
7337 	default:
7338 	  break;
7339 	}
7340     }
7341   return NULL_TREE;
7342 }
7343 
7344 /* Try to simplify the OR of two comparisons, specified by
7345    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7346    If this can be simplified to a single expression (without requiring
7347    introducing more SSA variables to hold intermediate values),
7348    return the resulting tree.  Otherwise return NULL_TREE.
7349    If the result expression is non-null, it has boolean type.  */
7350 
7351 tree
maybe_fold_or_comparisons(tree type,enum tree_code code1,tree op1a,tree op1b,enum tree_code code2,tree op2a,tree op2b,basic_block outer_cond_bb)7352 maybe_fold_or_comparisons (tree type,
7353 			   enum tree_code code1, tree op1a, tree op1b,
7354 			   enum tree_code code2, tree op2a, tree op2b,
7355 			   basic_block outer_cond_bb)
7356 {
7357   if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7358 				 outer_cond_bb))
7359     return t;
7360 
7361   if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7362 				 outer_cond_bb))
7363     return t;
7364 
7365   if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7366 						     op1a, op1b, code2, op2a,
7367 						     op2b, outer_cond_bb))
7368     return t;
7369 
7370   return NULL_TREE;
7371 }
7372 
7373 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7374 
7375    Either NULL_TREE, a simplified but non-constant or a constant
7376    is returned.
7377 
7378    ???  This should go into a gimple-fold-inline.h file to be eventually
7379    privatized with the single valueize function used in the various TUs
7380    to avoid the indirect function call overhead.  */
7381 
7382 tree
gimple_fold_stmt_to_constant_1(gimple * stmt,tree (* valueize)(tree),tree (* gvalueize)(tree))7383 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7384 				tree (*gvalueize) (tree))
7385 {
7386   gimple_match_op res_op;
7387   /* ???  The SSA propagators do not correctly deal with following SSA use-def
7388      edges if there are intermediate VARYING defs.  For this reason
7389      do not follow SSA edges here even though SCCVN can technically
7390      just deal fine with that.  */
7391   if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7392     {
7393       tree res = NULL_TREE;
7394       if (gimple_simplified_result_is_gimple_val (&res_op))
7395 	res = res_op.ops[0];
7396       else if (mprts_hook)
7397 	res = mprts_hook (&res_op);
7398       if (res)
7399 	{
7400 	  if (dump_file && dump_flags & TDF_DETAILS)
7401 	    {
7402 	      fprintf (dump_file, "Match-and-simplified ");
7403 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7404 	      fprintf (dump_file, " to ");
7405 	      print_generic_expr (dump_file, res);
7406 	      fprintf (dump_file, "\n");
7407 	    }
7408 	  return res;
7409 	}
7410     }
7411 
7412   location_t loc = gimple_location (stmt);
7413   switch (gimple_code (stmt))
7414     {
7415     case GIMPLE_ASSIGN:
7416       {
7417         enum tree_code subcode = gimple_assign_rhs_code (stmt);
7418 
7419         switch (get_gimple_rhs_class (subcode))
7420           {
7421           case GIMPLE_SINGLE_RHS:
7422             {
7423               tree rhs = gimple_assign_rhs1 (stmt);
7424               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7425 
7426               if (TREE_CODE (rhs) == SSA_NAME)
7427                 {
7428                   /* If the RHS is an SSA_NAME, return its known constant value,
7429                      if any.  */
7430                   return (*valueize) (rhs);
7431                 }
7432 	      /* Handle propagating invariant addresses into address
7433 		 operations.  */
7434 	      else if (TREE_CODE (rhs) == ADDR_EXPR
7435 		       && !is_gimple_min_invariant (rhs))
7436 		{
7437 		  poly_int64 offset = 0;
7438 		  tree base;
7439 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7440 							  &offset,
7441 							  valueize);
7442 		  if (base
7443 		      && (CONSTANT_CLASS_P (base)
7444 			  || decl_address_invariant_p (base)))
7445 		    return build_invariant_address (TREE_TYPE (rhs),
7446 						    base, offset);
7447 		}
7448 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
7449 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7450 		       && known_eq (CONSTRUCTOR_NELTS (rhs),
7451 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7452 		{
7453 		  unsigned i, nelts;
7454 		  tree val;
7455 
7456 		  nelts = CONSTRUCTOR_NELTS (rhs);
7457 		  tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7458 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7459 		    {
7460 		      val = (*valueize) (val);
7461 		      if (TREE_CODE (val) == INTEGER_CST
7462 			  || TREE_CODE (val) == REAL_CST
7463 			  || TREE_CODE (val) == FIXED_CST)
7464 			vec.quick_push (val);
7465 		      else
7466 			return NULL_TREE;
7467 		    }
7468 
7469 		  return vec.build ();
7470 		}
7471 	      if (subcode == OBJ_TYPE_REF)
7472 		{
7473 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7474 		  /* If callee is constant, we can fold away the wrapper.  */
7475 		  if (is_gimple_min_invariant (val))
7476 		    return val;
7477 		}
7478 
7479               if (kind == tcc_reference)
7480 		{
7481 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7482 		       || TREE_CODE (rhs) == REALPART_EXPR
7483 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
7484 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7485 		    {
7486 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7487 		      return fold_unary_loc (EXPR_LOCATION (rhs),
7488 					     TREE_CODE (rhs),
7489 					     TREE_TYPE (rhs), val);
7490 		    }
7491 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
7492 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7493 		    {
7494 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7495 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
7496 					       TREE_CODE (rhs),
7497 					       TREE_TYPE (rhs), val,
7498 					       TREE_OPERAND (rhs, 1),
7499 					       TREE_OPERAND (rhs, 2));
7500 		    }
7501 		  else if (TREE_CODE (rhs) == MEM_REF
7502 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7503 		    {
7504 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7505 		      if (TREE_CODE (val) == ADDR_EXPR
7506 			  && is_gimple_min_invariant (val))
7507 			{
7508 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7509 						  unshare_expr (val),
7510 						  TREE_OPERAND (rhs, 1));
7511 			  if (tem)
7512 			    rhs = tem;
7513 			}
7514 		    }
7515 		  return fold_const_aggregate_ref_1 (rhs, valueize);
7516 		}
7517               else if (kind == tcc_declaration)
7518                 return get_symbol_constant_value (rhs);
7519               return rhs;
7520             }
7521 
7522           case GIMPLE_UNARY_RHS:
7523 	    return NULL_TREE;
7524 
7525           case GIMPLE_BINARY_RHS:
7526 	    /* Translate &x + CST into an invariant form suitable for
7527 	       further propagation.  */
7528 	    if (subcode == POINTER_PLUS_EXPR)
7529 	      {
7530 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7531 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7532 		if (TREE_CODE (op0) == ADDR_EXPR
7533 		    && TREE_CODE (op1) == INTEGER_CST)
7534 		  {
7535 		    tree off = fold_convert (ptr_type_node, op1);
7536 		    return build1_loc
7537 			(loc, ADDR_EXPR, TREE_TYPE (op0),
7538 			 fold_build2 (MEM_REF,
7539 				      TREE_TYPE (TREE_TYPE (op0)),
7540 				      unshare_expr (op0), off));
7541 		  }
7542 	      }
7543 	    /* Canonicalize bool != 0 and bool == 0 appearing after
7544 	       valueization.  While gimple_simplify handles this
7545 	       it can get confused by the ~X == 1 -> X == 0 transform
7546 	       which we cant reduce to a SSA name or a constant
7547 	       (and we have no way to tell gimple_simplify to not
7548 	       consider those transforms in the first place).  */
7549 	    else if (subcode == EQ_EXPR
7550 		     || subcode == NE_EXPR)
7551 	      {
7552 		tree lhs = gimple_assign_lhs (stmt);
7553 		tree op0 = gimple_assign_rhs1 (stmt);
7554 		if (useless_type_conversion_p (TREE_TYPE (lhs),
7555 					       TREE_TYPE (op0)))
7556 		  {
7557 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7558 		    op0 = (*valueize) (op0);
7559 		    if (TREE_CODE (op0) == INTEGER_CST)
7560 		      std::swap (op0, op1);
7561 		    if (TREE_CODE (op1) == INTEGER_CST
7562 			&& ((subcode == NE_EXPR && integer_zerop (op1))
7563 			    || (subcode == EQ_EXPR && integer_onep (op1))))
7564 		      return op0;
7565 		  }
7566 	      }
7567 	    return NULL_TREE;
7568 
7569           case GIMPLE_TERNARY_RHS:
7570             {
7571               /* Handle ternary operators that can appear in GIMPLE form.  */
7572               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7573               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7574               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7575               return fold_ternary_loc (loc, subcode,
7576 				       TREE_TYPE (gimple_assign_lhs (stmt)),
7577 				       op0, op1, op2);
7578             }
7579 
7580           default:
7581             gcc_unreachable ();
7582           }
7583       }
7584 
7585     case GIMPLE_CALL:
7586       {
7587 	tree fn;
7588 	gcall *call_stmt = as_a <gcall *> (stmt);
7589 
7590 	if (gimple_call_internal_p (stmt))
7591 	  {
7592 	    enum tree_code subcode = ERROR_MARK;
7593 	    switch (gimple_call_internal_fn (stmt))
7594 	      {
7595 	      case IFN_UBSAN_CHECK_ADD:
7596 		subcode = PLUS_EXPR;
7597 		break;
7598 	      case IFN_UBSAN_CHECK_SUB:
7599 		subcode = MINUS_EXPR;
7600 		break;
7601 	      case IFN_UBSAN_CHECK_MUL:
7602 		subcode = MULT_EXPR;
7603 		break;
7604 	      case IFN_BUILTIN_EXPECT:
7605 		  {
7606 		    tree arg0 = gimple_call_arg (stmt, 0);
7607 		    tree op0 = (*valueize) (arg0);
7608 		    if (TREE_CODE (op0) == INTEGER_CST)
7609 		      return op0;
7610 		    return NULL_TREE;
7611 		  }
7612 	      default:
7613 		return NULL_TREE;
7614 	      }
7615 	    tree arg0 = gimple_call_arg (stmt, 0);
7616 	    tree arg1 = gimple_call_arg (stmt, 1);
7617 	    tree op0 = (*valueize) (arg0);
7618 	    tree op1 = (*valueize) (arg1);
7619 
7620 	    if (TREE_CODE (op0) != INTEGER_CST
7621 		|| TREE_CODE (op1) != INTEGER_CST)
7622 	      {
7623 		switch (subcode)
7624 		  {
7625 		  case MULT_EXPR:
7626 		    /* x * 0 = 0 * x = 0 without overflow.  */
7627 		    if (integer_zerop (op0) || integer_zerop (op1))
7628 		      return build_zero_cst (TREE_TYPE (arg0));
7629 		    break;
7630 		  case MINUS_EXPR:
7631 		    /* y - y = 0 without overflow.  */
7632 		    if (operand_equal_p (op0, op1, 0))
7633 		      return build_zero_cst (TREE_TYPE (arg0));
7634 		    break;
7635 		  default:
7636 		    break;
7637 		  }
7638 	      }
7639 	    tree res
7640 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7641 	    if (res
7642 		&& TREE_CODE (res) == INTEGER_CST
7643 		&& !TREE_OVERFLOW (res))
7644 	      return res;
7645 	    return NULL_TREE;
7646 	  }
7647 
7648 	fn = (*valueize) (gimple_call_fn (stmt));
7649 	if (TREE_CODE (fn) == ADDR_EXPR
7650 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7651 	    && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7652 	    && gimple_builtin_call_types_compatible_p (stmt,
7653 						       TREE_OPERAND (fn, 0)))
7654 	  {
7655 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7656 	    tree retval;
7657 	    unsigned i;
7658 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
7659 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
7660 	    retval = fold_builtin_call_array (loc,
7661 					 gimple_call_return_type (call_stmt),
7662 					 fn, gimple_call_num_args (stmt), args);
7663 	    if (retval)
7664 	      {
7665 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
7666 		STRIP_NOPS (retval);
7667 		retval = fold_convert (gimple_call_return_type (call_stmt),
7668 				       retval);
7669 	      }
7670 	    return retval;
7671 	  }
7672 	return NULL_TREE;
7673       }
7674 
7675     default:
7676       return NULL_TREE;
7677     }
7678 }
7679 
7680 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7681    Returns NULL_TREE if folding to a constant is not possible, otherwise
7682    returns a constant according to is_gimple_min_invariant.  */
7683 
7684 tree
gimple_fold_stmt_to_constant(gimple * stmt,tree (* valueize)(tree))7685 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7686 {
7687   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7688   if (res && is_gimple_min_invariant (res))
7689     return res;
7690   return NULL_TREE;
7691 }
7692 
7693 
7694 /* The following set of functions are supposed to fold references using
7695    their constant initializers.  */
7696 
7697 /* See if we can find constructor defining value of BASE.
7698    When we know the consructor with constant offset (such as
7699    base is array[40] and we do know constructor of array), then
7700    BIT_OFFSET is adjusted accordingly.
7701 
7702    As a special case, return error_mark_node when constructor
7703    is not explicitly available, but it is known to be zero
7704    such as 'static const int a;'.  */
7705 static tree
get_base_constructor(tree base,poly_int64_pod * bit_offset,tree (* valueize)(tree))7706 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7707 		      tree (*valueize)(tree))
7708 {
7709   poly_int64 bit_offset2, size, max_size;
7710   bool reverse;
7711 
7712   if (TREE_CODE (base) == MEM_REF)
7713     {
7714       poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7715       if (!boff.to_shwi (bit_offset))
7716 	return NULL_TREE;
7717 
7718       if (valueize
7719 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7720 	base = valueize (TREE_OPERAND (base, 0));
7721       if (!base || TREE_CODE (base) != ADDR_EXPR)
7722         return NULL_TREE;
7723       base = TREE_OPERAND (base, 0);
7724     }
7725   else if (valueize
7726 	   && TREE_CODE (base) == SSA_NAME)
7727     base = valueize (base);
7728 
7729   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
7730      DECL_INITIAL.  If BASE is a nested reference into another
7731      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7732      the inner reference.  */
7733   switch (TREE_CODE (base))
7734     {
7735     case VAR_DECL:
7736     case CONST_DECL:
7737       {
7738 	tree init = ctor_for_folding (base);
7739 
7740 	/* Our semantic is exact opposite of ctor_for_folding;
7741 	   NULL means unknown, while error_mark_node is 0.  */
7742 	if (init == error_mark_node)
7743 	  return NULL_TREE;
7744 	if (!init)
7745 	  return error_mark_node;
7746 	return init;
7747       }
7748 
7749     case VIEW_CONVERT_EXPR:
7750       return get_base_constructor (TREE_OPERAND (base, 0),
7751 				   bit_offset, valueize);
7752 
7753     case ARRAY_REF:
7754     case COMPONENT_REF:
7755       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7756 				      &reverse);
7757       if (!known_size_p (max_size) || maybe_ne (size, max_size))
7758 	return NULL_TREE;
7759       *bit_offset +=  bit_offset2;
7760       return get_base_constructor (base, bit_offset, valueize);
7761 
7762     case CONSTRUCTOR:
7763       return base;
7764 
7765     default:
7766       if (CONSTANT_CLASS_P (base))
7767 	return base;
7768 
7769       return NULL_TREE;
7770     }
7771 }
7772 
7773 /* CTOR is a CONSTRUCTOR of an array or vector type.  Fold a reference of SIZE
7774    bits to the memory at bit OFFSET.  If non-null, TYPE is the expected type of
7775    the reference; otherwise the type of the referenced element is used instead.
7776    When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7777    refers to.  Increment *SUBOFF by the bit offset of the accessed element.  */
7778 
7779 static tree
fold_array_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7780 fold_array_ctor_reference (tree type, tree ctor,
7781 			   unsigned HOST_WIDE_INT offset,
7782 			   unsigned HOST_WIDE_INT size,
7783 			   tree from_decl,
7784 			   unsigned HOST_WIDE_INT *suboff)
7785 {
7786   offset_int low_bound;
7787   offset_int elt_size;
7788   offset_int access_index;
7789   tree domain_type = NULL_TREE;
7790   HOST_WIDE_INT inner_offset;
7791 
7792   /* Compute low bound and elt size.  */
7793   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7794     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7795   if (domain_type && TYPE_MIN_VALUE (domain_type))
7796     {
7797       /* Static constructors for variably sized objects make no sense.  */
7798       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7799 	return NULL_TREE;
7800       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7801     }
7802   else
7803     low_bound = 0;
7804   /* Static constructors for variably sized objects make no sense.  */
7805   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7806     return NULL_TREE;
7807   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7808 
7809   /* When TYPE is non-null, verify that it specifies a constant-sized
7810      access of a multiple of the array element size.  Avoid division
7811      by zero below when ELT_SIZE is zero, such as with the result of
7812      an initializer for a zero-length array or an empty struct.  */
7813   if (elt_size == 0
7814       || (type
7815 	  && (!TYPE_SIZE_UNIT (type)
7816 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7817     return NULL_TREE;
7818 
7819   /* Compute the array index we look for.  */
7820   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7821 				 elt_size);
7822   access_index += low_bound;
7823 
7824   /* And offset within the access.  */
7825   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7826 
7827   unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7828   if (size > elt_sz * BITS_PER_UNIT)
7829     {
7830       /* native_encode_expr constraints.  */
7831       if (size > MAX_BITSIZE_MODE_ANY_MODE
7832 	  || size % BITS_PER_UNIT != 0
7833 	  || inner_offset % BITS_PER_UNIT != 0
7834 	  || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7835 	return NULL_TREE;
7836 
7837       unsigned ctor_idx;
7838       tree val = get_array_ctor_element_at_index (ctor, access_index,
7839 						  &ctor_idx);
7840       if (!val && ctor_idx >= CONSTRUCTOR_NELTS  (ctor))
7841 	return build_zero_cst (type);
7842 
7843       /* native-encode adjacent ctor elements.  */
7844       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7845       unsigned bufoff = 0;
7846       offset_int index = 0;
7847       offset_int max_index = access_index;
7848       constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7849       if (!val)
7850 	val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7851       else if (!CONSTANT_CLASS_P (val))
7852 	return NULL_TREE;
7853       if (!elt->index)
7854 	;
7855       else if (TREE_CODE (elt->index) == RANGE_EXPR)
7856 	{
7857 	  index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7858 	  max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7859 	}
7860       else
7861 	index = max_index = wi::to_offset (elt->index);
7862       index = wi::umax (index, access_index);
7863       do
7864 	{
7865 	  if (bufoff + elt_sz > sizeof (buf))
7866 	    elt_sz = sizeof (buf) - bufoff;
7867 	  int len = native_encode_expr (val, buf + bufoff, elt_sz,
7868 					inner_offset / BITS_PER_UNIT);
7869 	  if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7870 	    return NULL_TREE;
7871 	  inner_offset = 0;
7872 	  bufoff += len;
7873 
7874 	  access_index += 1;
7875 	  if (wi::cmpu (access_index, index) == 0)
7876 	    val = elt->value;
7877 	  else if (wi::cmpu (access_index, max_index) > 0)
7878 	    {
7879 	      ctor_idx++;
7880 	      if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7881 		{
7882 		  val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7883 		  ++max_index;
7884 		}
7885 	      else
7886 		{
7887 		  elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7888 		  index = 0;
7889 		  max_index = access_index;
7890 		  if (!elt->index)
7891 		    ;
7892 		  else if (TREE_CODE (elt->index) == RANGE_EXPR)
7893 		    {
7894 		      index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7895 		      max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7896 		    }
7897 		  else
7898 		    index = max_index = wi::to_offset (elt->index);
7899 		  index = wi::umax (index, access_index);
7900 		  if (wi::cmpu (access_index, index) == 0)
7901 		    val = elt->value;
7902 		  else
7903 		    val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7904 		}
7905 	    }
7906 	}
7907       while (bufoff < size / BITS_PER_UNIT);
7908       *suboff += size;
7909       return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7910     }
7911 
7912   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7913     {
7914       if (!size && TREE_CODE (val) != CONSTRUCTOR)
7915 	{
7916 	  /* For the final reference to the entire accessed element
7917 	     (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7918 	     may be null) in favor of the type of the element, and set
7919 	     SIZE to the size of the accessed element.  */
7920 	  inner_offset = 0;
7921 	  type = TREE_TYPE (val);
7922 	  size = elt_sz * BITS_PER_UNIT;
7923 	}
7924       else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7925 	       && TREE_CODE (val) == CONSTRUCTOR
7926 	       && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7927 	/* If this isn't the last element in the CTOR and a CTOR itself
7928 	   and it does not cover the whole object we are requesting give up
7929 	   since we're not set up for combining from multiple CTORs.  */
7930 	return NULL_TREE;
7931 
7932       *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7933       return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7934 				  suboff);
7935     }
7936 
7937   /* Memory not explicitly mentioned in constructor is 0 (or
7938      the reference is out of range).  */
7939   return type ? build_zero_cst (type) : NULL_TREE;
7940 }
7941 
7942 /* CTOR is a CONSTRUCTOR of a record or union type.  Fold a reference of SIZE
7943    bits to the memory at bit OFFSET.  If non-null, TYPE is the expected type of
7944    the reference; otherwise the type of the referenced member is used instead.
7945    When SIZE is zero, attempt to fold a reference to the entire member OFFSET
7946    refers to.  Increment *SUBOFF by the bit offset of the accessed member.  */
7947 
7948 static tree
fold_nonarray_ctor_reference(tree type,tree ctor,unsigned HOST_WIDE_INT offset,unsigned HOST_WIDE_INT size,tree from_decl,unsigned HOST_WIDE_INT * suboff)7949 fold_nonarray_ctor_reference (tree type, tree ctor,
7950 			      unsigned HOST_WIDE_INT offset,
7951 			      unsigned HOST_WIDE_INT size,
7952 			      tree from_decl,
7953 			      unsigned HOST_WIDE_INT *suboff)
7954 {
7955   unsigned HOST_WIDE_INT cnt;
7956   tree cfield, cval;
7957 
7958   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
7959     {
7960       tree byte_offset = DECL_FIELD_OFFSET (cfield);
7961       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7962       tree field_size = DECL_SIZE (cfield);
7963 
7964       if (!field_size)
7965 	{
7966 	  /* Determine the size of the flexible array member from
7967 	     the size of the initializer provided for it.  */
7968 	  field_size = TYPE_SIZE (TREE_TYPE (cval));
7969 	}
7970 
7971       /* Variable sized objects in static constructors makes no sense,
7972 	 but field_size can be NULL for flexible array members.  */
7973       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7974 		  && TREE_CODE (byte_offset) == INTEGER_CST
7975 		  && (field_size != NULL_TREE
7976 		      ? TREE_CODE (field_size) == INTEGER_CST
7977 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7978 
7979       /* Compute bit offset of the field.  */
7980       offset_int bitoffset
7981 	= (wi::to_offset (field_offset)
7982 	   + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7983       /* Compute bit offset where the field ends.  */
7984       offset_int bitoffset_end;
7985       if (field_size != NULL_TREE)
7986 	bitoffset_end = bitoffset + wi::to_offset (field_size);
7987       else
7988 	bitoffset_end = 0;
7989 
7990       /* Compute the bit offset of the end of the desired access.
7991 	 As a special case, if the size of the desired access is
7992 	 zero, assume the access is to the entire field (and let
7993 	 the caller make any necessary adjustments by storing
7994 	 the actual bounds of the field in FIELDBOUNDS).  */
7995       offset_int access_end = offset_int (offset);
7996       if (size)
7997 	access_end += size;
7998       else
7999 	access_end = bitoffset_end;
8000 
8001       /* Is there any overlap between the desired access at
8002 	 [OFFSET, OFFSET+SIZE) and the offset of the field within
8003 	 the object at [BITOFFSET, BITOFFSET_END)?  */
8004       if (wi::cmps (access_end, bitoffset) > 0
8005 	  && (field_size == NULL_TREE
8006 	      || wi::lts_p (offset, bitoffset_end)))
8007 	{
8008 	  *suboff += bitoffset.to_uhwi ();
8009 
8010 	  if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8011 	    {
8012 	      /* For the final reference to the entire accessed member
8013 		 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8014 		 be null) in favor of the type of the member, and set
8015 		 SIZE to the size of the accessed member.  */
8016 	      offset = bitoffset.to_uhwi ();
8017 	      type = TREE_TYPE (cval);
8018 	      size = (bitoffset_end - bitoffset).to_uhwi ();
8019 	    }
8020 
8021 	  /* We do have overlap.  Now see if the field is large enough
8022 	     to cover the access.  Give up for accesses that extend
8023 	     beyond the end of the object or that span multiple fields.  */
8024 	  if (wi::cmps (access_end, bitoffset_end) > 0)
8025 	    return NULL_TREE;
8026 	  if (offset < bitoffset)
8027 	    return NULL_TREE;
8028 
8029 	  offset_int inner_offset = offset_int (offset) - bitoffset;
8030 
8031 	  /* Integral bit-fields are left-justified on big-endian targets, so
8032 	     we must arrange for native_encode_int to start at their MSB.  */
8033 	  if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8034 	    {
8035 	      if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8036 		return NULL_TREE;
8037 	      const unsigned int encoding_size
8038 		= GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8039 	      if (BYTES_BIG_ENDIAN)
8040 		inner_offset += encoding_size - wi::to_offset (field_size);
8041 	    }
8042 
8043 	  return fold_ctor_reference (type, cval,
8044 				      inner_offset.to_uhwi (), size,
8045 				      from_decl, suboff);
8046 	}
8047     }
8048 
8049   if (!type)
8050     return NULL_TREE;
8051 
8052   return build_zero_cst (type);
8053 }
8054 
8055 /* CTOR is a value initializing memory.  Fold a reference of TYPE and
8056    bit size POLY_SIZE to the memory at bit POLY_OFFSET.  When POLY_SIZE
8057    is zero, attempt to fold a reference to the entire subobject
8058    which OFFSET refers to.  This is used when folding accesses to
8059    string members of aggregates.  When non-null, set *SUBOFF to
8060    the bit offset of the accessed subobject.  */
8061 
8062 tree
fold_ctor_reference(tree type,tree ctor,const poly_uint64 & poly_offset,const poly_uint64 & poly_size,tree from_decl,unsigned HOST_WIDE_INT * suboff)8063 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8064 		     const poly_uint64 &poly_size, tree from_decl,
8065 		     unsigned HOST_WIDE_INT *suboff /* = NULL */)
8066 {
8067   tree ret;
8068 
8069   /* We found the field with exact match.  */
8070   if (type
8071       && useless_type_conversion_p (type, TREE_TYPE (ctor))
8072       && known_eq (poly_offset, 0U))
8073     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8074 
8075   /* The remaining optimizations need a constant size and offset.  */
8076   unsigned HOST_WIDE_INT size, offset;
8077   if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8078     return NULL_TREE;
8079 
8080   /* We are at the end of walk, see if we can view convert the
8081      result.  */
8082   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8083       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
8084       && !compare_tree_int (TYPE_SIZE (type), size)
8085       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8086     {
8087       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8088       if (ret)
8089 	{
8090 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8091 	  if (ret)
8092 	    STRIP_USELESS_TYPE_CONVERSION (ret);
8093 	}
8094       return ret;
8095     }
8096 
8097   /* For constants and byte-aligned/sized reads, try to go through
8098      native_encode/interpret.  */
8099   if (CONSTANT_CLASS_P (ctor)
8100       && BITS_PER_UNIT == 8
8101       && offset % BITS_PER_UNIT == 0
8102       && offset / BITS_PER_UNIT <= INT_MAX
8103       && size % BITS_PER_UNIT == 0
8104       && size <= MAX_BITSIZE_MODE_ANY_MODE
8105       && can_native_interpret_type_p (type))
8106     {
8107       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8108       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8109 				    offset / BITS_PER_UNIT);
8110       if (len > 0)
8111 	return native_interpret_expr (type, buf, len);
8112     }
8113 
8114   /* For constructors, try first a recursive local processing, but in any case
8115      this requires the native storage order.  */
8116   if (TREE_CODE (ctor) == CONSTRUCTOR
8117       && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8118 	   && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
8119     {
8120       unsigned HOST_WIDE_INT dummy = 0;
8121       if (!suboff)
8122 	suboff = &dummy;
8123 
8124       tree ret;
8125       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8126 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8127 	ret = fold_array_ctor_reference (type, ctor, offset, size,
8128 					 from_decl, suboff);
8129       else
8130 	ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8131 					    from_decl, suboff);
8132 
8133       /* Otherwise fall back to native_encode_initializer.  This may be done
8134 	 only from the outermost fold_ctor_reference call (because it itself
8135 	 recurses into CONSTRUCTORs and doesn't update suboff).  */
8136       if (ret == NULL_TREE
8137 	  && suboff == &dummy
8138 	  && BITS_PER_UNIT == 8
8139 	  && offset % BITS_PER_UNIT == 0
8140 	  && offset / BITS_PER_UNIT <= INT_MAX
8141 	  && size % BITS_PER_UNIT == 0
8142 	  && size <= MAX_BITSIZE_MODE_ANY_MODE
8143 	  && can_native_interpret_type_p (type))
8144 	{
8145 	  unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8146 	  int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8147 					       offset / BITS_PER_UNIT);
8148 	  if (len > 0)
8149 	    return native_interpret_expr (type, buf, len);
8150 	}
8151 
8152       return ret;
8153     }
8154 
8155   return NULL_TREE;
8156 }
8157 
8158 /* Return the tree representing the element referenced by T if T is an
8159    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8160    names using VALUEIZE.  Return NULL_TREE otherwise.  */
8161 
8162 tree
fold_const_aggregate_ref_1(tree t,tree (* valueize)(tree))8163 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8164 {
8165   tree ctor, idx, base;
8166   poly_int64 offset, size, max_size;
8167   tree tem;
8168   bool reverse;
8169 
8170   if (TREE_THIS_VOLATILE (t))
8171     return NULL_TREE;
8172 
8173   if (DECL_P (t))
8174     return get_symbol_constant_value (t);
8175 
8176   tem = fold_read_from_constant_string (t);
8177   if (tem)
8178     return tem;
8179 
8180   switch (TREE_CODE (t))
8181     {
8182     case ARRAY_REF:
8183     case ARRAY_RANGE_REF:
8184       /* Constant indexes are handled well by get_base_constructor.
8185 	 Only special case variable offsets.
8186 	 FIXME: This code can't handle nested references with variable indexes
8187 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
8188 	 get_ref_base_and_extent here and make it use a valueize callback.  */
8189       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8190 	  && valueize
8191 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8192 	  && poly_int_tree_p (idx))
8193 	{
8194 	  tree low_bound, unit_size;
8195 
8196 	  /* If the resulting bit-offset is constant, track it.  */
8197 	  if ((low_bound = array_ref_low_bound (t),
8198 	       poly_int_tree_p (low_bound))
8199 	      && (unit_size = array_ref_element_size (t),
8200 		  tree_fits_uhwi_p (unit_size)))
8201 	    {
8202 	      poly_offset_int woffset
8203 		= wi::sext (wi::to_poly_offset (idx)
8204 			    - wi::to_poly_offset (low_bound),
8205 			    TYPE_PRECISION (sizetype));
8206 	      woffset *= tree_to_uhwi (unit_size);
8207 	      woffset *= BITS_PER_UNIT;
8208 	      if (woffset.to_shwi (&offset))
8209 		{
8210 		  base = TREE_OPERAND (t, 0);
8211 		  ctor = get_base_constructor (base, &offset, valueize);
8212 		  /* Empty constructor.  Always fold to 0.  */
8213 		  if (ctor == error_mark_node)
8214 		    return build_zero_cst (TREE_TYPE (t));
8215 		  /* Out of bound array access.  Value is undefined,
8216 		     but don't fold.  */
8217 		  if (maybe_lt (offset, 0))
8218 		    return NULL_TREE;
8219 		  /* We cannot determine ctor.  */
8220 		  if (!ctor)
8221 		    return NULL_TREE;
8222 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8223 					      tree_to_uhwi (unit_size)
8224 					      * BITS_PER_UNIT,
8225 					      base);
8226 		}
8227 	    }
8228 	}
8229       /* Fallthru.  */
8230 
8231     case COMPONENT_REF:
8232     case BIT_FIELD_REF:
8233     case TARGET_MEM_REF:
8234     case MEM_REF:
8235       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8236       ctor = get_base_constructor (base, &offset, valueize);
8237 
8238       /* Empty constructor.  Always fold to 0.  */
8239       if (ctor == error_mark_node)
8240 	return build_zero_cst (TREE_TYPE (t));
8241       /* We do not know precise address.  */
8242       if (!known_size_p (max_size) || maybe_ne (max_size, size))
8243 	return NULL_TREE;
8244       /* We cannot determine ctor.  */
8245       if (!ctor)
8246 	return NULL_TREE;
8247 
8248       /* Out of bound array access.  Value is undefined, but don't fold.  */
8249       if (maybe_lt (offset, 0))
8250 	return NULL_TREE;
8251 
8252       tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8253       if (tem)
8254 	return tem;
8255 
8256       /* For bit field reads try to read the representative and
8257 	 adjust.  */
8258       if (TREE_CODE (t) == COMPONENT_REF
8259 	  && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8260 	  && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8261 	{
8262 	  HOST_WIDE_INT csize, coffset;
8263 	  tree field = TREE_OPERAND (t, 1);
8264 	  tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8265 	  if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8266 	      && size.is_constant (&csize)
8267 	      && offset.is_constant (&coffset)
8268 	      && (coffset % BITS_PER_UNIT != 0
8269 		  || csize % BITS_PER_UNIT != 0)
8270 	      && !reverse
8271 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8272 	    {
8273 	      poly_int64 bitoffset;
8274 	      poly_uint64 field_offset, repr_offset;
8275 	      if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8276 		  && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8277 		bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8278 	      else
8279 		bitoffset = 0;
8280 	      bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8281 			    - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8282 	      HOST_WIDE_INT bitoff;
8283 	      int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8284 			  - TYPE_PRECISION (TREE_TYPE (field)));
8285 	      if (bitoffset.is_constant (&bitoff)
8286 		  && bitoff >= 0
8287 		  && bitoff <= diff)
8288 		{
8289 		  offset -= bitoff;
8290 		  size = tree_to_uhwi (DECL_SIZE (repr));
8291 
8292 		  tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8293 					     size, base);
8294 		  if (tem && TREE_CODE (tem) == INTEGER_CST)
8295 		    {
8296 		      if (!BYTES_BIG_ENDIAN)
8297 			tem = wide_int_to_tree (TREE_TYPE (field),
8298 						wi::lrshift (wi::to_wide (tem),
8299 							     bitoff));
8300 		      else
8301 			tem = wide_int_to_tree (TREE_TYPE (field),
8302 						wi::lrshift (wi::to_wide (tem),
8303 							     diff - bitoff));
8304 		      return tem;
8305 		    }
8306 		}
8307 	    }
8308 	}
8309       break;
8310 
8311     case REALPART_EXPR:
8312     case IMAGPART_EXPR:
8313       {
8314 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8315 	if (c && TREE_CODE (c) == COMPLEX_CST)
8316 	  return fold_build1_loc (EXPR_LOCATION (t),
8317 				  TREE_CODE (t), TREE_TYPE (t), c);
8318 	break;
8319       }
8320 
8321     default:
8322       break;
8323     }
8324 
8325   return NULL_TREE;
8326 }
8327 
8328 tree
fold_const_aggregate_ref(tree t)8329 fold_const_aggregate_ref (tree t)
8330 {
8331   return fold_const_aggregate_ref_1 (t, NULL);
8332 }
8333 
8334 /* Lookup virtual method with index TOKEN in a virtual table V
8335    at OFFSET.
8336    Set CAN_REFER if non-NULL to false if method
8337    is not referable or if the virtual table is ill-formed (such as rewriten
8338    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
8339 
8340 tree
gimple_get_virt_method_for_vtable(HOST_WIDE_INT token,tree v,unsigned HOST_WIDE_INT offset,bool * can_refer)8341 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8342 				   tree v,
8343 				   unsigned HOST_WIDE_INT offset,
8344 				   bool *can_refer)
8345 {
8346   tree vtable = v, init, fn;
8347   unsigned HOST_WIDE_INT size;
8348   unsigned HOST_WIDE_INT elt_size, access_index;
8349   tree domain_type;
8350 
8351   if (can_refer)
8352     *can_refer = true;
8353 
8354   /* First of all double check we have virtual table.  */
8355   if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8356     {
8357       /* Pass down that we lost track of the target.  */
8358       if (can_refer)
8359 	*can_refer = false;
8360       return NULL_TREE;
8361     }
8362 
8363   init = ctor_for_folding (v);
8364 
8365   /* The virtual tables should always be born with constructors
8366      and we always should assume that they are avaialble for
8367      folding.  At the moment we do not stream them in all cases,
8368      but it should never happen that ctor seem unreachable.  */
8369   gcc_assert (init);
8370   if (init == error_mark_node)
8371     {
8372       /* Pass down that we lost track of the target.  */
8373       if (can_refer)
8374 	*can_refer = false;
8375       return NULL_TREE;
8376     }
8377   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8378   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8379   offset *= BITS_PER_UNIT;
8380   offset += token * size;
8381 
8382   /* Lookup the value in the constructor that is assumed to be array.
8383      This is equivalent to
8384      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8385 			       offset, size, NULL);
8386      but in a constant time.  We expect that frontend produced a simple
8387      array without indexed initializers.  */
8388 
8389   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8390   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8391   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8392   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8393 
8394   access_index = offset / BITS_PER_UNIT / elt_size;
8395   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8396 
8397   /* The C++ FE can now produce indexed fields, and we check if the indexes
8398      match.  */
8399   if (access_index < CONSTRUCTOR_NELTS (init))
8400     {
8401       fn = CONSTRUCTOR_ELT (init, access_index)->value;
8402       tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8403       gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8404       STRIP_NOPS (fn);
8405     }
8406   else
8407     fn = NULL;
8408 
8409   /* For type inconsistent program we may end up looking up virtual method
8410      in virtual table that does not contain TOKEN entries.  We may overrun
8411      the virtual table and pick up a constant or RTTI info pointer.
8412      In any case the call is undefined.  */
8413   if (!fn
8414       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8415       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8416     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8417   else
8418     {
8419       fn = TREE_OPERAND (fn, 0);
8420 
8421       /* When cgraph node is missing and function is not public, we cannot
8422 	 devirtualize.  This can happen in WHOPR when the actual method
8423 	 ends up in other partition, because we found devirtualization
8424 	 possibility too late.  */
8425       if (!can_refer_decl_in_current_unit_p (fn, vtable))
8426 	{
8427 	  if (can_refer)
8428 	    {
8429 	      *can_refer = false;
8430 	      return fn;
8431 	    }
8432 	  return NULL_TREE;
8433 	}
8434     }
8435 
8436   /* Make sure we create a cgraph node for functions we'll reference.
8437      They can be non-existent if the reference comes from an entry
8438      of an external vtable for example.  */
8439   cgraph_node::get_create (fn);
8440 
8441   return fn;
8442 }
8443 
8444 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8445    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8446    KNOWN_BINFO carries the binfo describing the true type of
8447    OBJ_TYPE_REF_OBJECT(REF).
8448    Set CAN_REFER if non-NULL to false if method
8449    is not referable or if the virtual table is ill-formed (such as rewriten
8450    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
8451 
8452 tree
gimple_get_virt_method_for_binfo(HOST_WIDE_INT token,tree known_binfo,bool * can_refer)8453 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8454 				  bool *can_refer)
8455 {
8456   unsigned HOST_WIDE_INT offset;
8457   tree v;
8458 
8459   v = BINFO_VTABLE (known_binfo);
8460   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
8461   if (!v)
8462     return NULL_TREE;
8463 
8464   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8465     {
8466       if (can_refer)
8467 	*can_refer = false;
8468       return NULL_TREE;
8469     }
8470   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8471 }
8472 
8473 /* Given a pointer value T, return a simplified version of an
8474    indirection through T, or NULL_TREE if no simplification is
8475    possible.  Note that the resulting type may be different from
8476    the type pointed to in the sense that it is still compatible
8477    from the langhooks point of view. */
8478 
8479 tree
gimple_fold_indirect_ref(tree t)8480 gimple_fold_indirect_ref (tree t)
8481 {
8482   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8483   tree sub = t;
8484   tree subtype;
8485 
8486   STRIP_NOPS (sub);
8487   subtype = TREE_TYPE (sub);
8488   if (!POINTER_TYPE_P (subtype)
8489       || TYPE_REF_CAN_ALIAS_ALL (ptype))
8490     return NULL_TREE;
8491 
8492   if (TREE_CODE (sub) == ADDR_EXPR)
8493     {
8494       tree op = TREE_OPERAND (sub, 0);
8495       tree optype = TREE_TYPE (op);
8496       /* *&p => p */
8497       if (useless_type_conversion_p (type, optype))
8498         return op;
8499 
8500       /* *(foo *)&fooarray => fooarray[0] */
8501       if (TREE_CODE (optype) == ARRAY_TYPE
8502 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8503 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
8504        {
8505          tree type_domain = TYPE_DOMAIN (optype);
8506          tree min_val = size_zero_node;
8507          if (type_domain && TYPE_MIN_VALUE (type_domain))
8508            min_val = TYPE_MIN_VALUE (type_domain);
8509 	 if (TREE_CODE (min_val) == INTEGER_CST)
8510 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8511        }
8512       /* *(foo *)&complexfoo => __real__ complexfoo */
8513       else if (TREE_CODE (optype) == COMPLEX_TYPE
8514                && useless_type_conversion_p (type, TREE_TYPE (optype)))
8515         return fold_build1 (REALPART_EXPR, type, op);
8516       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8517       else if (TREE_CODE (optype) == VECTOR_TYPE
8518                && useless_type_conversion_p (type, TREE_TYPE (optype)))
8519         {
8520           tree part_width = TYPE_SIZE (type);
8521           tree index = bitsize_int (0);
8522           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8523         }
8524     }
8525 
8526   /* *(p + CST) -> ...  */
8527   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8528       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8529     {
8530       tree addr = TREE_OPERAND (sub, 0);
8531       tree off = TREE_OPERAND (sub, 1);
8532       tree addrtype;
8533 
8534       STRIP_NOPS (addr);
8535       addrtype = TREE_TYPE (addr);
8536 
8537       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8538       if (TREE_CODE (addr) == ADDR_EXPR
8539 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8540 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8541 	  && tree_fits_uhwi_p (off))
8542 	{
8543           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8544           tree part_width = TYPE_SIZE (type);
8545           unsigned HOST_WIDE_INT part_widthi
8546             = tree_to_shwi (part_width) / BITS_PER_UNIT;
8547           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8548           tree index = bitsize_int (indexi);
8549 	  if (known_lt (offset / part_widthi,
8550 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8551             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8552                                 part_width, index);
8553 	}
8554 
8555       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8556       if (TREE_CODE (addr) == ADDR_EXPR
8557 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8558 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8559         {
8560           tree size = TYPE_SIZE_UNIT (type);
8561           if (tree_int_cst_equal (size, off))
8562             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8563         }
8564 
8565       /* *(p + CST) -> MEM_REF <p, CST>.  */
8566       if (TREE_CODE (addr) != ADDR_EXPR
8567 	  || DECL_P (TREE_OPERAND (addr, 0)))
8568 	return fold_build2 (MEM_REF, type,
8569 			    addr,
8570 			    wide_int_to_tree (ptype, wi::to_wide (off)));
8571     }
8572 
8573   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8574   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8575       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8576       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8577     {
8578       tree type_domain;
8579       tree min_val = size_zero_node;
8580       tree osub = sub;
8581       sub = gimple_fold_indirect_ref (sub);
8582       if (! sub)
8583 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8584       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8585       if (type_domain && TYPE_MIN_VALUE (type_domain))
8586         min_val = TYPE_MIN_VALUE (type_domain);
8587       if (TREE_CODE (min_val) == INTEGER_CST)
8588 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8589     }
8590 
8591   return NULL_TREE;
8592 }
8593 
8594 /* Return true if CODE is an operation that when operating on signed
8595    integer types involves undefined behavior on overflow and the
8596    operation can be expressed with unsigned arithmetic.  */
8597 
8598 bool
arith_code_with_undefined_signed_overflow(tree_code code)8599 arith_code_with_undefined_signed_overflow (tree_code code)
8600 {
8601   switch (code)
8602     {
8603     case ABS_EXPR:
8604     case PLUS_EXPR:
8605     case MINUS_EXPR:
8606     case MULT_EXPR:
8607     case NEGATE_EXPR:
8608     case POINTER_PLUS_EXPR:
8609       return true;
8610     default:
8611       return false;
8612     }
8613 }
8614 
8615 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8616    operation that can be transformed to unsigned arithmetic by converting
8617    its operand, carrying out the operation in the corresponding unsigned
8618    type and converting the result back to the original type.
8619 
8620    If IN_PLACE is true, adjust the stmt in place and return NULL.
8621    Otherwise returns a sequence of statements that replace STMT and also
8622    contain a modified form of STMT itself.  */
8623 
8624 gimple_seq
rewrite_to_defined_overflow(gimple * stmt,bool in_place)8625 rewrite_to_defined_overflow (gimple *stmt, bool in_place /* = false */)
8626 {
8627   if (dump_file && (dump_flags & TDF_DETAILS))
8628     {
8629       fprintf (dump_file, "rewriting stmt with undefined signed "
8630 	       "overflow ");
8631       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8632     }
8633 
8634   tree lhs = gimple_assign_lhs (stmt);
8635   tree type = unsigned_type_for (TREE_TYPE (lhs));
8636   gimple_seq stmts = NULL;
8637   if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8638     gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8639   else
8640     for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8641       {
8642 	tree op = gimple_op (stmt, i);
8643 	op = gimple_convert (&stmts, type, op);
8644 	gimple_set_op (stmt, i, op);
8645       }
8646   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8647   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8648     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8649   gimple_set_modified (stmt, true);
8650   if (in_place)
8651     {
8652       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8653       if (stmts)
8654 	gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8655       stmts = NULL;
8656     }
8657   else
8658     gimple_seq_add_stmt (&stmts, stmt);
8659   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8660   if (in_place)
8661     {
8662       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8663       gsi_insert_after (&gsi, cvt, GSI_SAME_STMT);
8664       update_stmt (stmt);
8665     }
8666   else
8667     gimple_seq_add_stmt (&stmts, cvt);
8668 
8669   return stmts;
8670 }
8671 
8672 
8673 /* The valueization hook we use for the gimple_build API simplification.
8674    This makes us match fold_buildN behavior by only combining with
8675    statements in the sequence(s) we are currently building.  */
8676 
8677 static tree
gimple_build_valueize(tree op)8678 gimple_build_valueize (tree op)
8679 {
8680   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8681     return op;
8682   return NULL_TREE;
8683 }
8684 
8685 /* Build the expression CODE OP0 of type TYPE with location LOC,
8686    simplifying it first if possible.  Returns the built
8687    expression value and appends statements possibly defining it
8688    to SEQ.  */
8689 
8690 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0)8691 gimple_build (gimple_seq *seq, location_t loc,
8692 	      enum tree_code code, tree type, tree op0)
8693 {
8694   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8695   if (!res)
8696     {
8697       res = create_tmp_reg_or_ssa_name (type);
8698       gimple *stmt;
8699       if (code == REALPART_EXPR
8700 	  || code == IMAGPART_EXPR
8701 	  || code == VIEW_CONVERT_EXPR)
8702 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8703       else
8704 	stmt = gimple_build_assign (res, code, op0);
8705       gimple_set_location (stmt, loc);
8706       gimple_seq_add_stmt_without_update (seq, stmt);
8707     }
8708   return res;
8709 }
8710 
8711 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8712    simplifying it first if possible.  Returns the built
8713    expression value and appends statements possibly defining it
8714    to SEQ.  */
8715 
8716 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1)8717 gimple_build (gimple_seq *seq, location_t loc,
8718 	      enum tree_code code, tree type, tree op0, tree op1)
8719 {
8720   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8721   if (!res)
8722     {
8723       res = create_tmp_reg_or_ssa_name (type);
8724       gimple *stmt = gimple_build_assign (res, code, op0, op1);
8725       gimple_set_location (stmt, loc);
8726       gimple_seq_add_stmt_without_update (seq, stmt);
8727     }
8728   return res;
8729 }
8730 
8731 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8732    simplifying it first if possible.  Returns the built
8733    expression value and appends statements possibly defining it
8734    to SEQ.  */
8735 
8736 tree
gimple_build(gimple_seq * seq,location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)8737 gimple_build (gimple_seq *seq, location_t loc,
8738 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
8739 {
8740   tree res = gimple_simplify (code, type, op0, op1, op2,
8741 			      seq, gimple_build_valueize);
8742   if (!res)
8743     {
8744       res = create_tmp_reg_or_ssa_name (type);
8745       gimple *stmt;
8746       if (code == BIT_FIELD_REF)
8747 	stmt = gimple_build_assign (res, code,
8748 				    build3 (code, type, op0, op1, op2));
8749       else
8750 	stmt = gimple_build_assign (res, code, op0, op1, op2);
8751       gimple_set_location (stmt, loc);
8752       gimple_seq_add_stmt_without_update (seq, stmt);
8753     }
8754   return res;
8755 }
8756 
8757 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8758    void) with a location LOC.  Returns the built expression value (or NULL_TREE
8759    if TYPE is void) and appends statements possibly defining it to SEQ.  */
8760 
8761 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type)8762 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8763 {
8764   tree res = NULL_TREE;
8765   gcall *stmt;
8766   if (internal_fn_p (fn))
8767     stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8768   else
8769     {
8770       tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8771       stmt = gimple_build_call (decl, 0);
8772     }
8773   if (!VOID_TYPE_P (type))
8774     {
8775       res = create_tmp_reg_or_ssa_name (type);
8776       gimple_call_set_lhs (stmt, res);
8777     }
8778   gimple_set_location (stmt, loc);
8779   gimple_seq_add_stmt_without_update (seq, stmt);
8780   return res;
8781 }
8782 
8783 /* Build the call FN (ARG0) with a result of type TYPE
8784    (or no result if TYPE is void) with location LOC,
8785    simplifying it first if possible.  Returns the built
8786    expression value (or NULL_TREE if TYPE is void) and appends
8787    statements possibly defining it to SEQ.  */
8788 
8789 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0)8790 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8791 	      tree type, tree arg0)
8792 {
8793   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8794   if (!res)
8795     {
8796       gcall *stmt;
8797       if (internal_fn_p (fn))
8798 	stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8799       else
8800 	{
8801 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8802 	  stmt = gimple_build_call (decl, 1, arg0);
8803 	}
8804       if (!VOID_TYPE_P (type))
8805 	{
8806 	  res = create_tmp_reg_or_ssa_name (type);
8807 	  gimple_call_set_lhs (stmt, res);
8808 	}
8809       gimple_set_location (stmt, loc);
8810       gimple_seq_add_stmt_without_update (seq, stmt);
8811     }
8812   return res;
8813 }
8814 
8815 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8816    (or no result if TYPE is void) with location LOC,
8817    simplifying it first if possible.  Returns the built
8818    expression value (or NULL_TREE if TYPE is void) and appends
8819    statements possibly defining it to SEQ.  */
8820 
8821 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1)8822 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8823 	      tree type, tree arg0, tree arg1)
8824 {
8825   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8826   if (!res)
8827     {
8828       gcall *stmt;
8829       if (internal_fn_p (fn))
8830 	stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8831       else
8832 	{
8833 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8834 	  stmt = gimple_build_call (decl, 2, arg0, arg1);
8835 	}
8836       if (!VOID_TYPE_P (type))
8837 	{
8838 	  res = create_tmp_reg_or_ssa_name (type);
8839 	  gimple_call_set_lhs (stmt, res);
8840 	}
8841       gimple_set_location (stmt, loc);
8842       gimple_seq_add_stmt_without_update (seq, stmt);
8843     }
8844   return res;
8845 }
8846 
8847 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8848    (or no result if TYPE is void) with location LOC,
8849    simplifying it first if possible.  Returns the built
8850    expression value (or NULL_TREE if TYPE is void) and appends
8851    statements possibly defining it to SEQ.  */
8852 
8853 tree
gimple_build(gimple_seq * seq,location_t loc,combined_fn fn,tree type,tree arg0,tree arg1,tree arg2)8854 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8855 	      tree type, tree arg0, tree arg1, tree arg2)
8856 {
8857   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8858 			      seq, gimple_build_valueize);
8859   if (!res)
8860     {
8861       gcall *stmt;
8862       if (internal_fn_p (fn))
8863 	stmt = gimple_build_call_internal (as_internal_fn (fn),
8864 					   3, arg0, arg1, arg2);
8865       else
8866 	{
8867 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8868 	  stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8869 	}
8870       if (!VOID_TYPE_P (type))
8871 	{
8872 	  res = create_tmp_reg_or_ssa_name (type);
8873 	  gimple_call_set_lhs (stmt, res);
8874 	}
8875       gimple_set_location (stmt, loc);
8876       gimple_seq_add_stmt_without_update (seq, stmt);
8877     }
8878   return res;
8879 }
8880 
8881 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8882    void) with location LOC, simplifying it first if possible.  Returns the
8883    built expression value (or NULL_TREE if TYPE is void) and appends
8884    statements possibly defining it to SEQ.  */
8885 
8886 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0)8887 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8888 	      tree type, tree op0)
8889 {
8890   if (code.is_tree_code ())
8891     return gimple_build (seq, loc, tree_code (code), type, op0);
8892   return gimple_build (seq, loc, combined_fn (code), type, op0);
8893 }
8894 
8895 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8896    void) with location LOC, simplifying it first if possible.  Returns the
8897    built expression value (or NULL_TREE if TYPE is void) and appends
8898    statements possibly defining it to SEQ.  */
8899 
8900 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0,tree op1)8901 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8902 	      tree type, tree op0, tree op1)
8903 {
8904   if (code.is_tree_code ())
8905     return gimple_build (seq, loc, tree_code (code), type, op0, op1);
8906   return gimple_build (seq, loc, combined_fn (code), type, op0, op1);
8907 }
8908 
8909 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8910    is void) with location LOC, simplifying it first if possible.  Returns the
8911    built expression value (or NULL_TREE if TYPE is void) and appends statements
8912    possibly defining it to SEQ.  */
8913 
8914 tree
gimple_build(gimple_seq * seq,location_t loc,code_helper code,tree type,tree op0,tree op1,tree op2)8915 gimple_build (gimple_seq *seq, location_t loc, code_helper code,
8916 	      tree type, tree op0, tree op1, tree op2)
8917 {
8918   if (code.is_tree_code ())
8919     return gimple_build (seq, loc, tree_code (code), type, op0, op1, op2);
8920   return gimple_build (seq, loc, combined_fn (code), type, op0, op1, op2);
8921 }
8922 
8923 /* Build the conversion (TYPE) OP with a result of type TYPE
8924    with location LOC if such conversion is neccesary in GIMPLE,
8925    simplifying it first.
8926    Returns the built expression value and appends
8927    statements possibly defining it to SEQ.  */
8928 
8929 tree
gimple_convert(gimple_seq * seq,location_t loc,tree type,tree op)8930 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8931 {
8932   if (useless_type_conversion_p (type, TREE_TYPE (op)))
8933     return op;
8934   return gimple_build (seq, loc, NOP_EXPR, type, op);
8935 }
8936 
8937 /* Build the conversion (ptrofftype) OP with a result of a type
8938    compatible with ptrofftype with location LOC if such conversion
8939    is neccesary in GIMPLE, simplifying it first.
8940    Returns the built expression value and appends
8941    statements possibly defining it to SEQ.  */
8942 
8943 tree
gimple_convert_to_ptrofftype(gimple_seq * seq,location_t loc,tree op)8944 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8945 {
8946   if (ptrofftype_p (TREE_TYPE (op)))
8947     return op;
8948   return gimple_convert (seq, loc, sizetype, op);
8949 }
8950 
8951 /* Build a vector of type TYPE in which each element has the value OP.
8952    Return a gimple value for the result, appending any new statements
8953    to SEQ.  */
8954 
8955 tree
gimple_build_vector_from_val(gimple_seq * seq,location_t loc,tree type,tree op)8956 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8957 			      tree op)
8958 {
8959   if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8960       && !CONSTANT_CLASS_P (op))
8961     return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8962 
8963   tree res, vec = build_vector_from_val (type, op);
8964   if (is_gimple_val (vec))
8965     return vec;
8966   if (gimple_in_ssa_p (cfun))
8967     res = make_ssa_name (type);
8968   else
8969     res = create_tmp_reg (type);
8970   gimple *stmt = gimple_build_assign (res, vec);
8971   gimple_set_location (stmt, loc);
8972   gimple_seq_add_stmt_without_update (seq, stmt);
8973   return res;
8974 }
8975 
8976 /* Build a vector from BUILDER, handling the case in which some elements
8977    are non-constant.  Return a gimple value for the result, appending any
8978    new instructions to SEQ.
8979 
8980    BUILDER must not have a stepped encoding on entry.  This is because
8981    the function is not geared up to handle the arithmetic that would
8982    be needed in the variable case, and any code building a vector that
8983    is known to be constant should use BUILDER->build () directly.  */
8984 
8985 tree
gimple_build_vector(gimple_seq * seq,location_t loc,tree_vector_builder * builder)8986 gimple_build_vector (gimple_seq *seq, location_t loc,
8987 		     tree_vector_builder *builder)
8988 {
8989   gcc_assert (builder->nelts_per_pattern () <= 2);
8990   unsigned int encoded_nelts = builder->encoded_nelts ();
8991   for (unsigned int i = 0; i < encoded_nelts; ++i)
8992     if (!CONSTANT_CLASS_P ((*builder)[i]))
8993       {
8994 	tree type = builder->type ();
8995 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8996 	vec<constructor_elt, va_gc> *v;
8997 	vec_alloc (v, nelts);
8998 	for (i = 0; i < nelts; ++i)
8999 	  CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9000 
9001 	tree res;
9002 	if (gimple_in_ssa_p (cfun))
9003 	  res = make_ssa_name (type);
9004 	else
9005 	  res = create_tmp_reg (type);
9006 	gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9007 	gimple_set_location (stmt, loc);
9008 	gimple_seq_add_stmt_without_update (seq, stmt);
9009 	return res;
9010       }
9011   return builder->build ();
9012 }
9013 
9014 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9015    and generate a value guaranteed to be rounded upwards to ALIGN.
9016 
9017    Return the tree node representing this size, it is of TREE_TYPE TYPE.  */
9018 
9019 tree
gimple_build_round_up(gimple_seq * seq,location_t loc,tree type,tree old_size,unsigned HOST_WIDE_INT align)9020 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
9021 		       tree old_size, unsigned HOST_WIDE_INT align)
9022 {
9023   unsigned HOST_WIDE_INT tg_mask = align - 1;
9024   /* tree new_size = (old_size + tg_mask) & ~tg_mask;  */
9025   gcc_assert (INTEGRAL_TYPE_P (type));
9026   tree tree_mask = build_int_cst (type, tg_mask);
9027   tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
9028 				tree_mask);
9029 
9030   tree mask = build_int_cst (type, -align);
9031   return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
9032 }
9033 
9034 /* Return true if the result of assignment STMT is known to be non-negative.
9035    If the return value is based on the assumption that signed overflow is
9036    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9037    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
9038 
9039 static bool
gimple_assign_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9040 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9041 				   int depth)
9042 {
9043   enum tree_code code = gimple_assign_rhs_code (stmt);
9044   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9045   switch (get_gimple_rhs_class (code))
9046     {
9047     case GIMPLE_UNARY_RHS:
9048       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9049 					     type,
9050 					     gimple_assign_rhs1 (stmt),
9051 					     strict_overflow_p, depth);
9052     case GIMPLE_BINARY_RHS:
9053       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9054 					      type,
9055 					      gimple_assign_rhs1 (stmt),
9056 					      gimple_assign_rhs2 (stmt),
9057 					      strict_overflow_p, depth);
9058     case GIMPLE_TERNARY_RHS:
9059       return false;
9060     case GIMPLE_SINGLE_RHS:
9061       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9062 					      strict_overflow_p, depth);
9063     case GIMPLE_INVALID_RHS:
9064       break;
9065     }
9066   gcc_unreachable ();
9067 }
9068 
9069 /* Return true if return value of call STMT is known to be non-negative.
9070    If the return value is based on the assumption that signed overflow is
9071    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9072    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
9073 
9074 static bool
gimple_call_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9075 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9076 				 int depth)
9077 {
9078   tree arg0 = gimple_call_num_args (stmt) > 0 ?
9079     gimple_call_arg (stmt, 0) : NULL_TREE;
9080   tree arg1 = gimple_call_num_args (stmt) > 1 ?
9081     gimple_call_arg (stmt, 1) : NULL_TREE;
9082   tree lhs = gimple_call_lhs (stmt);
9083   return (lhs
9084 	  && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9085 					    gimple_call_combined_fn (stmt),
9086 					    arg0, arg1,
9087 					    strict_overflow_p, depth));
9088 }
9089 
9090 /* Return true if return value of call STMT is known to be non-negative.
9091    If the return value is based on the assumption that signed overflow is
9092    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9093    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
9094 
9095 static bool
gimple_phi_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9096 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9097 				int depth)
9098 {
9099   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9100     {
9101       tree arg = gimple_phi_arg_def (stmt, i);
9102       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9103 	return false;
9104     }
9105   return true;
9106 }
9107 
9108 /* Return true if STMT is known to compute a non-negative value.
9109    If the return value is based on the assumption that signed overflow is
9110    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9111    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
9112 
9113 bool
gimple_stmt_nonnegative_warnv_p(gimple * stmt,bool * strict_overflow_p,int depth)9114 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9115 				 int depth)
9116 {
9117   switch (gimple_code (stmt))
9118     {
9119     case GIMPLE_ASSIGN:
9120       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9121 						depth);
9122     case GIMPLE_CALL:
9123       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9124 					      depth);
9125     case GIMPLE_PHI:
9126       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9127 					     depth);
9128     default:
9129       return false;
9130     }
9131 }
9132 
9133 /* Return true if the floating-point value computed by assignment STMT
9134    is known to have an integer value.  We also allow +Inf, -Inf and NaN
9135    to be considered integer values. Return false for signaling NaN.
9136 
9137    DEPTH is the current nesting depth of the query.  */
9138 
9139 static bool
gimple_assign_integer_valued_real_p(gimple * stmt,int depth)9140 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9141 {
9142   enum tree_code code = gimple_assign_rhs_code (stmt);
9143   switch (get_gimple_rhs_class (code))
9144     {
9145     case GIMPLE_UNARY_RHS:
9146       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9147 					  gimple_assign_rhs1 (stmt), depth);
9148     case GIMPLE_BINARY_RHS:
9149       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9150 					   gimple_assign_rhs1 (stmt),
9151 					   gimple_assign_rhs2 (stmt), depth);
9152     case GIMPLE_TERNARY_RHS:
9153       return false;
9154     case GIMPLE_SINGLE_RHS:
9155       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9156     case GIMPLE_INVALID_RHS:
9157       break;
9158     }
9159   gcc_unreachable ();
9160 }
9161 
9162 /* Return true if the floating-point value computed by call STMT is known
9163    to have an integer value.  We also allow +Inf, -Inf and NaN to be
9164    considered integer values. Return false for signaling NaN.
9165 
9166    DEPTH is the current nesting depth of the query.  */
9167 
9168 static bool
gimple_call_integer_valued_real_p(gimple * stmt,int depth)9169 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9170 {
9171   tree arg0 = (gimple_call_num_args (stmt) > 0
9172 	       ? gimple_call_arg (stmt, 0)
9173 	       : NULL_TREE);
9174   tree arg1 = (gimple_call_num_args (stmt) > 1
9175 	       ? gimple_call_arg (stmt, 1)
9176 	       : NULL_TREE);
9177   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9178 				     arg0, arg1, depth);
9179 }
9180 
9181 /* Return true if the floating-point result of phi STMT is known to have
9182    an integer value.  We also allow +Inf, -Inf and NaN to be considered
9183    integer values. Return false for signaling NaN.
9184 
9185    DEPTH is the current nesting depth of the query.  */
9186 
9187 static bool
gimple_phi_integer_valued_real_p(gimple * stmt,int depth)9188 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9189 {
9190   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9191     {
9192       tree arg = gimple_phi_arg_def (stmt, i);
9193       if (!integer_valued_real_single_p (arg, depth + 1))
9194 	return false;
9195     }
9196   return true;
9197 }
9198 
9199 /* Return true if the floating-point value computed by STMT is known
9200    to have an integer value.  We also allow +Inf, -Inf and NaN to be
9201    considered integer values. Return false for signaling NaN.
9202 
9203    DEPTH is the current nesting depth of the query.  */
9204 
9205 bool
gimple_stmt_integer_valued_real_p(gimple * stmt,int depth)9206 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9207 {
9208   switch (gimple_code (stmt))
9209     {
9210     case GIMPLE_ASSIGN:
9211       return gimple_assign_integer_valued_real_p (stmt, depth);
9212     case GIMPLE_CALL:
9213       return gimple_call_integer_valued_real_p (stmt, depth);
9214     case GIMPLE_PHI:
9215       return gimple_phi_integer_valued_real_p (stmt, depth);
9216     default:
9217       return false;
9218     }
9219 }
9220