xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimple-fold.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Statement simplification on GIMPLE.
2    Copyright (C) 2010-2019 Free Software Foundation, Inc.
3    Split out from tree-ssa-ccp.c.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 
69 enum strlen_range_kind {
70   /* Compute the exact constant string length.  */
71   SRK_STRLEN,
72   /* Compute the maximum constant string length.  */
73   SRK_STRLENMAX,
74   /* Compute a range of string lengths bounded by object sizes.  When
75      the length of a string cannot be determined, consider as the upper
76      bound the size of the enclosing object the string may be a member
77      or element of.  Also determine the size of the largest character
78      array the string may refer to.  */
79   SRK_LENRANGE,
80   /* Determine the integer value of the argument (not string length).  */
81   SRK_INT_VALUE
82 };
83 
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86 
87 /* Return true when DECL can be referenced from current unit.
88    FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89    We can get declarations that are not possible to reference for various
90    reasons:
91 
92      1) When analyzing C++ virtual tables.
93 	C++ virtual tables do have known constructors even
94 	when they are keyed to other compilation unit.
95 	Those tables can contain pointers to methods and vars
96 	in other units.  Those methods have both STATIC and EXTERNAL
97 	set.
98      2) In WHOPR mode devirtualization might lead to reference
99 	to method that was partitioned elsehwere.
100 	In this case we have static VAR_DECL or FUNCTION_DECL
101 	that has no corresponding callgraph/varpool node
102 	declaring the body.
103      3) COMDAT functions referred by external vtables that
104         we devirtualize only during final compilation stage.
105         At this time we already decided that we will not output
106         the function body and thus we can't reference the symbol
107         directly.  */
108 
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112   varpool_node *vnode;
113   struct cgraph_node *node;
114   symtab_node *snode;
115 
116   if (DECL_ABSTRACT_P (decl))
117     return false;
118 
119   /* We are concerned only about static/external vars and functions.  */
120   if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121       || !VAR_OR_FUNCTION_DECL_P (decl))
122     return true;
123 
124   /* Static objects can be referred only if they are defined and not optimized
125      out yet.  */
126   if (!TREE_PUBLIC (decl))
127     {
128       if (DECL_EXTERNAL (decl))
129 	return false;
130       /* Before we start optimizing unreachable code we can be sure all
131 	 static objects are defined.  */
132       if (symtab->function_flags_ready)
133 	return true;
134       snode = symtab_node::get (decl);
135       if (!snode || !snode->definition)
136 	return false;
137       node = dyn_cast <cgraph_node *> (snode);
138       return !node || !node->global.inlined_to;
139     }
140 
141   /* We will later output the initializer, so we can refer to it.
142      So we are concerned only when DECL comes from initializer of
143      external var or var that has been optimized out.  */
144   if (!from_decl
145       || !VAR_P (from_decl)
146       || (!DECL_EXTERNAL (from_decl)
147 	  && (vnode = varpool_node::get (from_decl)) != NULL
148 	  && vnode->definition)
149       || (flag_ltrans
150 	  && (vnode = varpool_node::get (from_decl)) != NULL
151 	  && vnode->in_other_partition))
152     return true;
153   /* We are folding reference from external vtable.  The vtable may reffer
154      to a symbol keyed to other compilation unit.  The other compilation
155      unit may be in separate DSO and the symbol may be hidden.  */
156   if (DECL_VISIBILITY_SPECIFIED (decl)
157       && DECL_EXTERNAL (decl)
158       && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159       && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160     return false;
161   /* When function is public, we always can introduce new reference.
162      Exception are the COMDAT functions where introducing a direct
163      reference imply need to include function body in the curren tunit.  */
164   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165     return true;
166   /* We have COMDAT.  We are going to check if we still have definition
167      or if the definition is going to be output in other partition.
168      Bypass this when gimplifying; all needed functions will be produced.
169 
170      As observed in PR20991 for already optimized out comdat virtual functions
171      it may be tempting to not necessarily give up because the copy will be
172      output elsewhere when corresponding vtable is output.
173      This is however not possible - ABI specify that COMDATs are output in
174      units where they are used and when the other unit was compiled with LTO
175      it is possible that vtable was kept public while the function itself
176      was privatized. */
177   if (!symtab->function_flags_ready)
178     return true;
179 
180   snode = symtab_node::get (decl);
181   if (!snode
182       || ((!snode->definition || DECL_EXTERNAL (decl))
183 	  && (!snode->in_other_partition
184 	      || (!snode->forced_by_abi && !snode->force_output))))
185     return false;
186   node = dyn_cast <cgraph_node *> (snode);
187   return !node || !node->global.inlined_to;
188 }
189 
190 /* Create a temporary for TYPE for a statement STMT.  If the current function
191    is in SSA form, a SSA name is created.  Otherwise a temporary register
192    is made.  */
193 
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 {
197   if (gimple_in_ssa_p (cfun))
198     return make_ssa_name (type, stmt);
199   else
200     return create_tmp_reg (type);
201 }
202 
203 /* CVAL is value taken from DECL_INITIAL of variable.  Try to transform it into
204    acceptable form for is_gimple_min_invariant.
205    FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL.  */
206 
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
209 {
210   tree orig_cval = cval;
211   STRIP_NOPS (cval);
212   if (TREE_CODE (cval) == POINTER_PLUS_EXPR
213       && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
214     {
215       tree ptr = TREE_OPERAND (cval, 0);
216       if (is_gimple_min_invariant (ptr))
217 	cval = build1_loc (EXPR_LOCATION (cval),
218 			   ADDR_EXPR, TREE_TYPE (ptr),
219 			   fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
220 					ptr,
221 					fold_convert (ptr_type_node,
222 						      TREE_OPERAND (cval, 1))));
223     }
224   if (TREE_CODE (cval) == ADDR_EXPR)
225     {
226       tree base = NULL_TREE;
227       if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
228 	{
229 	  base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
230 	  if (base)
231 	    TREE_OPERAND (cval, 0) = base;
232 	}
233       else
234 	base = get_base_address (TREE_OPERAND (cval, 0));
235       if (!base)
236 	return NULL_TREE;
237 
238       if (VAR_OR_FUNCTION_DECL_P (base)
239 	  && !can_refer_decl_in_current_unit_p (base, from_decl))
240 	return NULL_TREE;
241       if (TREE_TYPE (base) == error_mark_node)
242 	return NULL_TREE;
243       if (VAR_P (base))
244 	TREE_ADDRESSABLE (base) = 1;
245       else if (TREE_CODE (base) == FUNCTION_DECL)
246 	{
247 	  /* Make sure we create a cgraph node for functions we'll reference.
248 	     They can be non-existent if the reference comes from an entry
249 	     of an external vtable for example.  */
250 	  cgraph_node::get_create (base);
251 	}
252       /* Fixup types in global initializers.  */
253       if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
254 	cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
255 
256       if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
257 	cval = fold_convert (TREE_TYPE (orig_cval), cval);
258       return cval;
259     }
260   if (TREE_OVERFLOW_P (cval))
261     return drop_tree_overflow (cval);
262   return orig_cval;
263 }
264 
265 /* If SYM is a constant variable with known value, return the value.
266    NULL_TREE is returned otherwise.  */
267 
268 tree
269 get_symbol_constant_value (tree sym)
270 {
271   tree val = ctor_for_folding (sym);
272   if (val != error_mark_node)
273     {
274       if (val)
275 	{
276 	  val = canonicalize_constructor_val (unshare_expr (val), sym);
277 	  if (val && is_gimple_min_invariant (val))
278 	    return val;
279 	  else
280 	    return NULL_TREE;
281 	}
282       /* Variables declared 'const' without an initializer
283 	 have zero as the initializer if they may not be
284 	 overridden at link or run time.  */
285       if (!val
286           && is_gimple_reg_type (TREE_TYPE (sym)))
287 	return build_zero_cst (TREE_TYPE (sym));
288     }
289 
290   return NULL_TREE;
291 }
292 
293 
294 
295 /* Subroutine of fold_stmt.  We perform several simplifications of the
296    memory reference tree EXPR and make sure to re-gimplify them properly
297    after propagation of constant addresses.  IS_LHS is true if the
298    reference is supposed to be an lvalue.  */
299 
300 static tree
301 maybe_fold_reference (tree expr, bool is_lhs)
302 {
303   tree result;
304 
305   if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
306        || TREE_CODE (expr) == REALPART_EXPR
307        || TREE_CODE (expr) == IMAGPART_EXPR)
308       && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
309     return fold_unary_loc (EXPR_LOCATION (expr),
310 			   TREE_CODE (expr),
311 			   TREE_TYPE (expr),
312 			   TREE_OPERAND (expr, 0));
313   else if (TREE_CODE (expr) == BIT_FIELD_REF
314 	   && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
315     return fold_ternary_loc (EXPR_LOCATION (expr),
316 			     TREE_CODE (expr),
317 			     TREE_TYPE (expr),
318 			     TREE_OPERAND (expr, 0),
319 			     TREE_OPERAND (expr, 1),
320 			     TREE_OPERAND (expr, 2));
321 
322   if (!is_lhs
323       && (result = fold_const_aggregate_ref (expr))
324       && is_gimple_min_invariant (result))
325     return result;
326 
327   return NULL_TREE;
328 }
329 
330 
331 /* Attempt to fold an assignment statement pointed-to by SI.  Returns a
332    replacement rhs for the statement or NULL_TREE if no simplification
333    could be made.  It is assumed that the operands have been previously
334    folded.  */
335 
336 static tree
337 fold_gimple_assign (gimple_stmt_iterator *si)
338 {
339   gimple *stmt = gsi_stmt (*si);
340   enum tree_code subcode = gimple_assign_rhs_code (stmt);
341   location_t loc = gimple_location (stmt);
342 
343   tree result = NULL_TREE;
344 
345   switch (get_gimple_rhs_class (subcode))
346     {
347     case GIMPLE_SINGLE_RHS:
348       {
349         tree rhs = gimple_assign_rhs1 (stmt);
350 
351 	if (TREE_CLOBBER_P (rhs))
352 	  return NULL_TREE;
353 
354 	if (REFERENCE_CLASS_P (rhs))
355 	  return maybe_fold_reference (rhs, false);
356 
357 	else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
358 	  {
359 	    tree val = OBJ_TYPE_REF_EXPR (rhs);
360 	    if (is_gimple_min_invariant (val))
361 	      return val;
362 	    else if (flag_devirtualize && virtual_method_call_p (rhs))
363 	      {
364 		bool final;
365 		vec <cgraph_node *>targets
366 		  = possible_polymorphic_call_targets (rhs, stmt, &final);
367 		if (final && targets.length () <= 1 && dbg_cnt (devirt))
368 		  {
369 		    if (dump_enabled_p ())
370 		      {
371 			dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
372 					 "resolving virtual function address "
373 					 "reference to function %s\n",
374 					 targets.length () == 1
375 					 ? targets[0]->name ()
376 					 : "NULL");
377 		      }
378 		    if (targets.length () == 1)
379 		      {
380 			val = fold_convert (TREE_TYPE (val),
381 					    build_fold_addr_expr_loc
382 					      (loc, targets[0]->decl));
383 			STRIP_USELESS_TYPE_CONVERSION (val);
384 		      }
385 		    else
386 		      /* We cannot use __builtin_unreachable here because it
387 			 cannot have address taken.  */
388 		      val = build_int_cst (TREE_TYPE (val), 0);
389 		    return val;
390 		  }
391 	      }
392 	  }
393 
394 	else if (TREE_CODE (rhs) == ADDR_EXPR)
395 	  {
396 	    tree ref = TREE_OPERAND (rhs, 0);
397 	    tree tem = maybe_fold_reference (ref, true);
398 	    if (tem
399 		&& TREE_CODE (tem) == MEM_REF
400 		&& integer_zerop (TREE_OPERAND (tem, 1)))
401 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
402 	    else if (tem)
403 	      result = fold_convert (TREE_TYPE (rhs),
404 				     build_fold_addr_expr_loc (loc, tem));
405 	    else if (TREE_CODE (ref) == MEM_REF
406 		     && integer_zerop (TREE_OPERAND (ref, 1)))
407 	      result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
408 
409 	    if (result)
410 	      {
411 		/* Strip away useless type conversions.  Both the
412 		   NON_LVALUE_EXPR that may have been added by fold, and
413 		   "useless" type conversions that might now be apparent
414 		   due to propagation.  */
415 		STRIP_USELESS_TYPE_CONVERSION (result);
416 
417 		if (result != rhs && valid_gimple_rhs_p (result))
418 		  return result;
419 	      }
420 	  }
421 
422 	else if (TREE_CODE (rhs) == CONSTRUCTOR
423 		 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
424 	  {
425 	    /* Fold a constant vector CONSTRUCTOR to VECTOR_CST.  */
426 	    unsigned i;
427 	    tree val;
428 
429 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
430 	      if (! CONSTANT_CLASS_P (val))
431 		return NULL_TREE;
432 
433 	    return build_vector_from_ctor (TREE_TYPE (rhs),
434 					   CONSTRUCTOR_ELTS (rhs));
435 	  }
436 
437 	else if (DECL_P (rhs))
438 	  return get_symbol_constant_value (rhs);
439       }
440       break;
441 
442     case GIMPLE_UNARY_RHS:
443       break;
444 
445     case GIMPLE_BINARY_RHS:
446       break;
447 
448     case GIMPLE_TERNARY_RHS:
449       result = fold_ternary_loc (loc, subcode,
450 				 TREE_TYPE (gimple_assign_lhs (stmt)),
451 				 gimple_assign_rhs1 (stmt),
452 				 gimple_assign_rhs2 (stmt),
453 				 gimple_assign_rhs3 (stmt));
454 
455       if (result)
456         {
457           STRIP_USELESS_TYPE_CONVERSION (result);
458           if (valid_gimple_rhs_p (result))
459 	    return result;
460         }
461       break;
462 
463     case GIMPLE_INVALID_RHS:
464       gcc_unreachable ();
465     }
466 
467   return NULL_TREE;
468 }
469 
470 
471 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
472    adjusting the replacement stmts location and virtual operands.
473    If the statement has a lhs the last stmt in the sequence is expected
474    to assign to that lhs.  */
475 
476 static void
477 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
478 {
479   gimple *stmt = gsi_stmt (*si_p);
480 
481   if (gimple_has_location (stmt))
482     annotate_all_with_location (stmts, gimple_location (stmt));
483 
484   /* First iterate over the replacement statements backward, assigning
485      virtual operands to their defining statements.  */
486   gimple *laststore = NULL;
487   for (gimple_stmt_iterator i = gsi_last (stmts);
488        !gsi_end_p (i); gsi_prev (&i))
489     {
490       gimple *new_stmt = gsi_stmt (i);
491       if ((gimple_assign_single_p (new_stmt)
492 	   && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
493 	  || (is_gimple_call (new_stmt)
494 	      && (gimple_call_flags (new_stmt)
495 		  & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
496 	{
497 	  tree vdef;
498 	  if (!laststore)
499 	    vdef = gimple_vdef (stmt);
500 	  else
501 	    vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
502 	  gimple_set_vdef (new_stmt, vdef);
503 	  if (vdef && TREE_CODE (vdef) == SSA_NAME)
504 	    SSA_NAME_DEF_STMT (vdef) = new_stmt;
505 	  laststore = new_stmt;
506 	}
507     }
508 
509   /* Second iterate over the statements forward, assigning virtual
510      operands to their uses.  */
511   tree reaching_vuse = gimple_vuse (stmt);
512   for (gimple_stmt_iterator i = gsi_start (stmts);
513        !gsi_end_p (i); gsi_next (&i))
514     {
515       gimple *new_stmt = gsi_stmt (i);
516       /* If the new statement possibly has a VUSE, update it with exact SSA
517 	 name we know will reach this one.  */
518       if (gimple_has_mem_ops (new_stmt))
519 	gimple_set_vuse (new_stmt, reaching_vuse);
520       gimple_set_modified (new_stmt, true);
521       if (gimple_vdef (new_stmt))
522 	reaching_vuse = gimple_vdef (new_stmt);
523     }
524 
525   /* If the new sequence does not do a store release the virtual
526      definition of the original statement.  */
527   if (reaching_vuse
528       && reaching_vuse == gimple_vuse (stmt))
529     {
530       tree vdef = gimple_vdef (stmt);
531       if (vdef
532 	  && TREE_CODE (vdef) == SSA_NAME)
533 	{
534 	  unlink_stmt_vdef (stmt);
535 	  release_ssa_name (vdef);
536 	}
537     }
538 
539   /* Finally replace the original statement with the sequence.  */
540   gsi_replace_with_seq (si_p, stmts, false);
541 }
542 
543 /* Convert EXPR into a GIMPLE value suitable for substitution on the
544    RHS of an assignment.  Insert the necessary statements before
545    iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
546    is replaced.  If the call is expected to produces a result, then it
547    is replaced by an assignment of the new RHS to the result variable.
548    If the result is to be ignored, then the call is replaced by a
549    GIMPLE_NOP.  A proper VDEF chain is retained by making the first
550    VUSE and the last VDEF of the whole sequence be the same as the replaced
551    statement and using new SSA names for stores in between.  */
552 
553 void
554 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
555 {
556   tree lhs;
557   gimple *stmt, *new_stmt;
558   gimple_stmt_iterator i;
559   gimple_seq stmts = NULL;
560 
561   stmt = gsi_stmt (*si_p);
562 
563   gcc_assert (is_gimple_call (stmt));
564 
565   push_gimplify_context (gimple_in_ssa_p (cfun));
566 
567   lhs = gimple_call_lhs (stmt);
568   if (lhs == NULL_TREE)
569     {
570       gimplify_and_add (expr, &stmts);
571       /* We can end up with folding a memcpy of an empty class assignment
572 	 which gets optimized away by C++ gimplification.  */
573       if (gimple_seq_empty_p (stmts))
574 	{
575 	  pop_gimplify_context (NULL);
576 	  if (gimple_in_ssa_p (cfun))
577 	    {
578 	      unlink_stmt_vdef (stmt);
579 	      release_defs (stmt);
580 	    }
581 	  gsi_replace (si_p, gimple_build_nop (), false);
582 	  return;
583 	}
584     }
585   else
586     {
587       tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
588       new_stmt = gimple_build_assign (lhs, tmp);
589       i = gsi_last (stmts);
590       gsi_insert_after_without_update (&i, new_stmt,
591 				       GSI_CONTINUE_LINKING);
592     }
593 
594   pop_gimplify_context (NULL);
595 
596   gsi_replace_with_seq_vops (si_p, stmts);
597 }
598 
599 
600 /* Replace the call at *GSI with the gimple value VAL.  */
601 
602 void
603 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
604 {
605   gimple *stmt = gsi_stmt (*gsi);
606   tree lhs = gimple_call_lhs (stmt);
607   gimple *repl;
608   if (lhs)
609     {
610       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
611 	val = fold_convert (TREE_TYPE (lhs), val);
612       repl = gimple_build_assign (lhs, val);
613     }
614   else
615     repl = gimple_build_nop ();
616   tree vdef = gimple_vdef (stmt);
617   if (vdef && TREE_CODE (vdef) == SSA_NAME)
618     {
619       unlink_stmt_vdef (stmt);
620       release_ssa_name (vdef);
621     }
622   gsi_replace (gsi, repl, false);
623 }
624 
625 /* Replace the call at *GSI with the new call REPL and fold that
626    again.  */
627 
628 static void
629 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
630 {
631   gimple *stmt = gsi_stmt (*gsi);
632   gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
633   gimple_set_location (repl, gimple_location (stmt));
634   if (gimple_vdef (stmt)
635       && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
636     {
637       gimple_set_vdef (repl, gimple_vdef (stmt));
638       SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
639     }
640   if (gimple_vuse (stmt))
641     gimple_set_vuse (repl, gimple_vuse (stmt));
642   gsi_replace (gsi, repl, false);
643   fold_stmt (gsi);
644 }
645 
646 /* Return true if VAR is a VAR_DECL or a component thereof.  */
647 
648 static bool
649 var_decl_component_p (tree var)
650 {
651   tree inner = var;
652   while (handled_component_p (inner))
653     inner = TREE_OPERAND (inner, 0);
654   return (DECL_P (inner)
655 	  || (TREE_CODE (inner) == MEM_REF
656 	      && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
657 }
658 
659 /* Return TRUE if the SIZE argument, representing the size of an
660    object, is in a range of values of which exactly zero is valid.  */
661 
662 static bool
663 size_must_be_zero_p (tree size)
664 {
665   if (integer_zerop (size))
666     return true;
667 
668   if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
669     return false;
670 
671   tree type = TREE_TYPE (size);
672   int prec = TYPE_PRECISION (type);
673 
674   /* Compute the value of SSIZE_MAX, the largest positive value that
675      can be stored in ssize_t, the signed counterpart of size_t.  */
676   wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
677   value_range valid_range (VR_RANGE,
678 			   build_int_cst (type, 0),
679 			   wide_int_to_tree (type, ssize_max));
680   value_range vr;
681   get_range_info (size, vr);
682   vr.intersect (&valid_range);
683   return vr.zero_p ();
684 }
685 
686 /* Fold function call to builtin mem{{,p}cpy,move}.  Try to detect and
687    diagnose (otherwise undefined) overlapping copies without preventing
688    folding.  When folded, GCC guarantees that overlapping memcpy has
689    the same semantics as memmove.  Call to the library memcpy need not
690    provide the same guarantee.  Return false if no simplification can
691    be made.  */
692 
693 static bool
694 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
695 			       tree dest, tree src, int endp)
696 {
697   gimple *stmt = gsi_stmt (*gsi);
698   tree lhs = gimple_call_lhs (stmt);
699   tree len = gimple_call_arg (stmt, 2);
700   tree destvar, srcvar;
701   location_t loc = gimple_location (stmt);
702 
703   /* If the LEN parameter is a constant zero or in range where
704      the only valid value is zero, return DEST.  */
705   if (size_must_be_zero_p (len))
706     {
707       gimple *repl;
708       if (gimple_call_lhs (stmt))
709 	repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
710       else
711 	repl = gimple_build_nop ();
712       tree vdef = gimple_vdef (stmt);
713       if (vdef && TREE_CODE (vdef) == SSA_NAME)
714 	{
715 	  unlink_stmt_vdef (stmt);
716 	  release_ssa_name (vdef);
717 	}
718       gsi_replace (gsi, repl, false);
719       return true;
720     }
721 
722   /* If SRC and DEST are the same (and not volatile), return
723      DEST{,+LEN,+LEN-1}.  */
724   if (operand_equal_p (src, dest, 0))
725     {
726       /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
727 	 It's safe and may even be emitted by GCC itself (see bug
728 	 32667).  */
729       unlink_stmt_vdef (stmt);
730       if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
731 	release_ssa_name (gimple_vdef (stmt));
732       if (!lhs)
733 	{
734 	  gsi_replace (gsi, gimple_build_nop (), false);
735 	  return true;
736 	}
737       goto done;
738     }
739   else
740     {
741       tree srctype, desttype;
742       unsigned int src_align, dest_align;
743       tree off0;
744       const char *tmp_str;
745       unsigned HOST_WIDE_INT tmp_len;
746 
747       /* Build accesses at offset zero with a ref-all character type.  */
748       off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
749 							 ptr_mode, true), 0);
750 
751       /* If we can perform the copy efficiently with first doing all loads
752          and then all stores inline it that way.  Currently efficiently
753 	 means that we can load all the memory into a single integer
754 	 register which is what MOVE_MAX gives us.  */
755       src_align = get_pointer_alignment (src);
756       dest_align = get_pointer_alignment (dest);
757       if (tree_fits_uhwi_p (len)
758 	  && compare_tree_int (len, MOVE_MAX) <= 0
759 	  /* ???  Don't transform copies from strings with known length this
760 	     confuses the tree-ssa-strlen.c.  This doesn't handle
761 	     the case in gcc.dg/strlenopt-8.c which is XFAILed for that
762 	     reason.  */
763 	  && !c_strlen (src, 2)
764 	  && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
765 	       && memchr (tmp_str, 0, tmp_len) == NULL))
766 	{
767 	  unsigned ilen = tree_to_uhwi (len);
768 	  if (pow2p_hwi (ilen))
769 	    {
770 	      /* Detect out-of-bounds accesses without issuing warnings.
771 		 Avoid folding out-of-bounds copies but to avoid false
772 		 positives for unreachable code defer warning until after
773 		 DCE has worked its magic.
774 		 -Wrestrict is still diagnosed.  */
775 	      if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
776 							 dest, src, len, len,
777 							 false, false))
778 		if (warning != OPT_Wrestrict)
779 		  return false;
780 
781 	      scalar_int_mode mode;
782 	      tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
783 	      if (type
784 		  && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
785 		  && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
786 		  /* If the destination pointer is not aligned we must be able
787 		     to emit an unaligned store.  */
788 		  && (dest_align >= GET_MODE_ALIGNMENT (mode)
789 		      || !targetm.slow_unaligned_access (mode, dest_align)
790 		      || (optab_handler (movmisalign_optab, mode)
791 			  != CODE_FOR_nothing)))
792 		{
793 		  tree srctype = type;
794 		  tree desttype = type;
795 		  if (src_align < GET_MODE_ALIGNMENT (mode))
796 		    srctype = build_aligned_type (type, src_align);
797 		  tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
798 		  tree tem = fold_const_aggregate_ref (srcmem);
799 		  if (tem)
800 		    srcmem = tem;
801 		  else if (src_align < GET_MODE_ALIGNMENT (mode)
802 			   && targetm.slow_unaligned_access (mode, src_align)
803 			   && (optab_handler (movmisalign_optab, mode)
804 			       == CODE_FOR_nothing))
805 		    srcmem = NULL_TREE;
806 		  if (srcmem)
807 		    {
808 		      gimple *new_stmt;
809 		      if (is_gimple_reg_type (TREE_TYPE (srcmem)))
810 			{
811 			  new_stmt = gimple_build_assign (NULL_TREE, srcmem);
812 			  srcmem
813 			    = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
814 							  new_stmt);
815 			  gimple_assign_set_lhs (new_stmt, srcmem);
816 			  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
817 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
818 			}
819 		      if (dest_align < GET_MODE_ALIGNMENT (mode))
820 			desttype = build_aligned_type (type, dest_align);
821 		      new_stmt
822 			= gimple_build_assign (fold_build2 (MEM_REF, desttype,
823 							    dest, off0),
824 					       srcmem);
825 		      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
826 		      gimple_set_vdef (new_stmt, gimple_vdef (stmt));
827 		      if (gimple_vdef (new_stmt)
828 			  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
829 			SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
830 		      if (!lhs)
831 			{
832 			  gsi_replace (gsi, new_stmt, false);
833 			  return true;
834 			}
835 		      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
836 		      goto done;
837 		    }
838 		}
839 	    }
840 	}
841 
842       if (endp == 3)
843 	{
844 	  /* Both DEST and SRC must be pointer types.
845 	     ??? This is what old code did.  Is the testing for pointer types
846 	     really mandatory?
847 
848 	     If either SRC is readonly or length is 1, we can use memcpy.  */
849 	  if (!dest_align || !src_align)
850 	    return false;
851 	  if (readonly_data_expr (src)
852 	      || (tree_fits_uhwi_p (len)
853 		  && (MIN (src_align, dest_align) / BITS_PER_UNIT
854 		      >= tree_to_uhwi (len))))
855 	    {
856 	      tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
857 	      if (!fn)
858 		return false;
859 	      gimple_call_set_fndecl (stmt, fn);
860 	      gimple_call_set_arg (stmt, 0, dest);
861 	      gimple_call_set_arg (stmt, 1, src);
862 	      fold_stmt (gsi);
863 	      return true;
864 	    }
865 
866 	  /* If *src and *dest can't overlap, optimize into memcpy as well.  */
867 	  if (TREE_CODE (src) == ADDR_EXPR
868 	      && TREE_CODE (dest) == ADDR_EXPR)
869 	    {
870 	      tree src_base, dest_base, fn;
871 	      poly_int64 src_offset = 0, dest_offset = 0;
872 	      poly_uint64 maxsize;
873 
874 	      srcvar = TREE_OPERAND (src, 0);
875 	      src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
876 	      if (src_base == NULL)
877 		src_base = srcvar;
878 	      destvar = TREE_OPERAND (dest, 0);
879 	      dest_base = get_addr_base_and_unit_offset (destvar,
880 							 &dest_offset);
881 	      if (dest_base == NULL)
882 		dest_base = destvar;
883 	      if (!poly_int_tree_p (len, &maxsize))
884 		maxsize = -1;
885 	      if (SSA_VAR_P (src_base)
886 		  && SSA_VAR_P (dest_base))
887 		{
888 		  if (operand_equal_p (src_base, dest_base, 0)
889 		      && ranges_maybe_overlap_p (src_offset, maxsize,
890 						 dest_offset, maxsize))
891 		    return false;
892 		}
893 	      else if (TREE_CODE (src_base) == MEM_REF
894 		       && TREE_CODE (dest_base) == MEM_REF)
895 		{
896 		  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
897 					 TREE_OPERAND (dest_base, 0), 0))
898 		    return false;
899 		  poly_offset_int full_src_offset
900 		    = mem_ref_offset (src_base) + src_offset;
901 		  poly_offset_int full_dest_offset
902 		    = mem_ref_offset (dest_base) + dest_offset;
903 		  if (ranges_maybe_overlap_p (full_src_offset, maxsize,
904 					      full_dest_offset, maxsize))
905 		    return false;
906 		}
907 	      else
908 		return false;
909 
910 	      fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 	      if (!fn)
912 		return false;
913 	      gimple_call_set_fndecl (stmt, fn);
914 	      gimple_call_set_arg (stmt, 0, dest);
915 	      gimple_call_set_arg (stmt, 1, src);
916 	      fold_stmt (gsi);
917 	      return true;
918 	    }
919 
920 	  /* If the destination and source do not alias optimize into
921 	     memcpy as well.  */
922 	  if ((is_gimple_min_invariant (dest)
923 	       || TREE_CODE (dest) == SSA_NAME)
924 	      && (is_gimple_min_invariant (src)
925 		  || TREE_CODE (src) == SSA_NAME))
926 	    {
927 	      ao_ref destr, srcr;
928 	      ao_ref_init_from_ptr_and_size (&destr, dest, len);
929 	      ao_ref_init_from_ptr_and_size (&srcr, src, len);
930 	      if (!refs_may_alias_p_1 (&destr, &srcr, false))
931 		{
932 		  tree fn;
933 		  fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
934 		  if (!fn)
935 		    return false;
936 		  gimple_call_set_fndecl (stmt, fn);
937 		  gimple_call_set_arg (stmt, 0, dest);
938 		  gimple_call_set_arg (stmt, 1, src);
939 		  fold_stmt (gsi);
940 		  return true;
941 		}
942 	    }
943 
944 	  return false;
945 	}
946 
947       if (!tree_fits_shwi_p (len))
948 	return false;
949       if (!POINTER_TYPE_P (TREE_TYPE (src))
950 	  || !POINTER_TYPE_P (TREE_TYPE (dest)))
951 	return false;
952       /* In the following try to find a type that is most natural to be
953 	 used for the memcpy source and destination and that allows
954 	 the most optimization when memcpy is turned into a plain assignment
955 	 using that type.  In theory we could always use a char[len] type
956 	 but that only gains us that the destination and source possibly
957 	 no longer will have their address taken.  */
958       srctype = TREE_TYPE (TREE_TYPE (src));
959       if (TREE_CODE (srctype) == ARRAY_TYPE
960 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
961 	srctype = TREE_TYPE (srctype);
962       desttype = TREE_TYPE (TREE_TYPE (dest));
963       if (TREE_CODE (desttype) == ARRAY_TYPE
964 	  && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
965 	desttype = TREE_TYPE (desttype);
966       if (TREE_ADDRESSABLE (srctype)
967 	  || TREE_ADDRESSABLE (desttype))
968 	return false;
969 
970       /* Make sure we are not copying using a floating-point mode or
971          a type whose size possibly does not match its precision.  */
972       if (FLOAT_MODE_P (TYPE_MODE (desttype))
973 	  || TREE_CODE (desttype) == BOOLEAN_TYPE
974 	  || TREE_CODE (desttype) == ENUMERAL_TYPE)
975 	desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
976       if (FLOAT_MODE_P (TYPE_MODE (srctype))
977 	  || TREE_CODE (srctype) == BOOLEAN_TYPE
978 	  || TREE_CODE (srctype) == ENUMERAL_TYPE)
979 	srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
980       if (!srctype)
981 	srctype = desttype;
982       if (!desttype)
983 	desttype = srctype;
984       if (!srctype)
985 	return false;
986 
987       src_align = get_pointer_alignment (src);
988       dest_align = get_pointer_alignment (dest);
989       if (dest_align < TYPE_ALIGN (desttype)
990 	  || src_align < TYPE_ALIGN (srctype))
991 	return false;
992 
993       destvar = NULL_TREE;
994       if (TREE_CODE (dest) == ADDR_EXPR
995 	  && var_decl_component_p (TREE_OPERAND (dest, 0))
996 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
997 	destvar = fold_build2 (MEM_REF, desttype, dest, off0);
998 
999       srcvar = NULL_TREE;
1000       if (TREE_CODE (src) == ADDR_EXPR
1001 	  && var_decl_component_p (TREE_OPERAND (src, 0))
1002 	  && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1003 	{
1004 	  if (!destvar
1005 	      || src_align >= TYPE_ALIGN (desttype))
1006 	    srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1007 				  src, off0);
1008 	  else if (!STRICT_ALIGNMENT)
1009 	    {
1010 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1011 					    src_align);
1012 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 	    }
1014 	}
1015 
1016       if (srcvar == NULL_TREE && destvar == NULL_TREE)
1017 	return false;
1018 
1019       if (srcvar == NULL_TREE)
1020 	{
1021 	  if (src_align >= TYPE_ALIGN (desttype))
1022 	    srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1023 	  else
1024 	    {
1025 	      if (STRICT_ALIGNMENT)
1026 		return false;
1027 	      srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1028 					    src_align);
1029 	      srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 	    }
1031 	}
1032       else if (destvar == NULL_TREE)
1033 	{
1034 	  if (dest_align >= TYPE_ALIGN (srctype))
1035 	    destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1036 	  else
1037 	    {
1038 	      if (STRICT_ALIGNMENT)
1039 		return false;
1040 	      desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1041 					     dest_align);
1042 	      destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 	    }
1044 	}
1045 
1046       /* Same as above, detect out-of-bounds accesses without issuing
1047 	 warnings.  Avoid folding out-of-bounds copies but to avoid
1048 	 false positives for unreachable code defer warning until
1049 	 after DCE has worked its magic.
1050 	 -Wrestrict is still diagnosed.  */
1051       if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1052 						 dest, src, len, len,
1053 						 false, false))
1054 	if (warning != OPT_Wrestrict)
1055 	  return false;
1056 
1057       gimple *new_stmt;
1058       if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1059 	{
1060 	  tree tem = fold_const_aggregate_ref (srcvar);
1061 	  if (tem)
1062 	    srcvar = tem;
1063 	  if (! is_gimple_min_invariant (srcvar))
1064 	    {
1065 	      new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1066 	      srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1067 						   new_stmt);
1068 	      gimple_assign_set_lhs (new_stmt, srcvar);
1069 	      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1070 	      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1071 	    }
1072 	  new_stmt = gimple_build_assign (destvar, srcvar);
1073 	  goto set_vop_and_replace;
1074 	}
1075 
1076       /* We get an aggregate copy.  Use an unsigned char[] type to
1077 	 perform the copying to preserve padding and to avoid any issues
1078 	 with TREE_ADDRESSABLE types or float modes behavior on copying.  */
1079       desttype = build_array_type_nelts (unsigned_char_type_node,
1080 					 tree_to_uhwi (len));
1081       srctype = desttype;
1082       if (src_align > TYPE_ALIGN (srctype))
1083 	srctype = build_aligned_type (srctype, src_align);
1084       if (dest_align > TYPE_ALIGN (desttype))
1085 	desttype = build_aligned_type (desttype, dest_align);
1086       new_stmt
1087 	= gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1088 			       fold_build2 (MEM_REF, srctype, src, off0));
1089 set_vop_and_replace:
1090       gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1091       gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1092       if (gimple_vdef (new_stmt)
1093 	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1094 	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1095       if (!lhs)
1096 	{
1097 	  gsi_replace (gsi, new_stmt, false);
1098 	  return true;
1099 	}
1100       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1101     }
1102 
1103 done:
1104   gimple_seq stmts = NULL;
1105   if (endp == 0 || endp == 3)
1106     len = NULL_TREE;
1107   else if (endp == 2)
1108     len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1109 			ssize_int (1));
1110   if (endp == 2 || endp == 1)
1111     {
1112       len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1113       dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1114 			   TREE_TYPE (dest), dest, len);
1115     }
1116 
1117   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1118   gimple *repl = gimple_build_assign (lhs, dest);
1119   gsi_replace (gsi, repl, false);
1120   return true;
1121 }
1122 
1123 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1124    to built-in memcmp (a, b, len).  */
1125 
1126 static bool
1127 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1128 {
1129   tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1130 
1131   if (!fn)
1132     return false;
1133 
1134   /* Transform bcmp (a, b, len) into memcmp (a, b, len).  */
1135 
1136   gimple *stmt = gsi_stmt (*gsi);
1137   tree a = gimple_call_arg (stmt, 0);
1138   tree b = gimple_call_arg (stmt, 1);
1139   tree len = gimple_call_arg (stmt, 2);
1140 
1141   gimple *repl = gimple_build_call (fn, 3, a, b, len);
1142   replace_call_with_call_and_fold (gsi, repl);
1143 
1144   return true;
1145 }
1146 
1147 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1148    to built-in memmove (dest, src, len).  */
1149 
1150 static bool
1151 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1152 {
1153   tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1154 
1155   if (!fn)
1156     return false;
1157 
1158   /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1159      it's quivalent to memmove (not memcpy).  Transform bcopy (src, dest,
1160      len) into memmove (dest, src, len).  */
1161 
1162   gimple *stmt = gsi_stmt (*gsi);
1163   tree src = gimple_call_arg (stmt, 0);
1164   tree dest = gimple_call_arg (stmt, 1);
1165   tree len = gimple_call_arg (stmt, 2);
1166 
1167   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1168   gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1169   replace_call_with_call_and_fold (gsi, repl);
1170 
1171   return true;
1172 }
1173 
1174 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1175    to built-in memset (dest, 0, len).  */
1176 
1177 static bool
1178 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1179 {
1180   tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1181 
1182   if (!fn)
1183     return false;
1184 
1185   /* Transform bzero (dest, len) into memset (dest, 0, len).  */
1186 
1187   gimple *stmt = gsi_stmt (*gsi);
1188   tree dest = gimple_call_arg (stmt, 0);
1189   tree len = gimple_call_arg (stmt, 1);
1190 
1191   gimple_seq seq = NULL;
1192   gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1193   gimple_seq_add_stmt_without_update (&seq, repl);
1194   gsi_replace_with_seq_vops (gsi, seq);
1195   fold_stmt (gsi);
1196 
1197   return true;
1198 }
1199 
1200 /* Fold function call to builtin memset or bzero at *GSI setting the
1201    memory of size LEN to VAL.  Return whether a simplification was made.  */
1202 
1203 static bool
1204 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1205 {
1206   gimple *stmt = gsi_stmt (*gsi);
1207   tree etype;
1208   unsigned HOST_WIDE_INT length, cval;
1209 
1210   /* If the LEN parameter is zero, return DEST.  */
1211   if (integer_zerop (len))
1212     {
1213       replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1214       return true;
1215     }
1216 
1217   if (! tree_fits_uhwi_p (len))
1218     return false;
1219 
1220   if (TREE_CODE (c) != INTEGER_CST)
1221     return false;
1222 
1223   tree dest = gimple_call_arg (stmt, 0);
1224   tree var = dest;
1225   if (TREE_CODE (var) != ADDR_EXPR)
1226     return false;
1227 
1228   var = TREE_OPERAND (var, 0);
1229   if (TREE_THIS_VOLATILE (var))
1230     return false;
1231 
1232   etype = TREE_TYPE (var);
1233   if (TREE_CODE (etype) == ARRAY_TYPE)
1234     etype = TREE_TYPE (etype);
1235 
1236   if (!INTEGRAL_TYPE_P (etype)
1237       && !POINTER_TYPE_P (etype))
1238     return NULL_TREE;
1239 
1240   if (! var_decl_component_p (var))
1241     return NULL_TREE;
1242 
1243   length = tree_to_uhwi (len);
1244   if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1245       || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1246     return NULL_TREE;
1247 
1248   if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1249     return NULL_TREE;
1250 
1251   if (integer_zerop (c))
1252     cval = 0;
1253   else
1254     {
1255       if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1256 	return NULL_TREE;
1257 
1258       cval = TREE_INT_CST_LOW (c);
1259       cval &= 0xff;
1260       cval |= cval << 8;
1261       cval |= cval << 16;
1262       cval |= (cval << 31) << 1;
1263     }
1264 
1265   var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1266   gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1267   gimple_set_vuse (store, gimple_vuse (stmt));
1268   tree vdef = gimple_vdef (stmt);
1269   if (vdef && TREE_CODE (vdef) == SSA_NAME)
1270     {
1271       gimple_set_vdef (store, gimple_vdef (stmt));
1272       SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1273     }
1274   gsi_insert_before (gsi, store, GSI_SAME_STMT);
1275   if (gimple_call_lhs (stmt))
1276     {
1277       gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1278       gsi_replace (gsi, asgn, false);
1279     }
1280   else
1281     {
1282       gimple_stmt_iterator gsi2 = *gsi;
1283       gsi_prev (gsi);
1284       gsi_remove (&gsi2, true);
1285     }
1286 
1287   return true;
1288 }
1289 
1290 /* Helper of get_range_strlen for ARG that is not an SSA_NAME.  */
1291 
1292 static bool
1293 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1294 		       c_strlen_data *pdata, unsigned eltsize)
1295 {
1296   gcc_assert (TREE_CODE (arg) != SSA_NAME);
1297 
1298   /* The length computed by this invocation of the function.  */
1299   tree val = NULL_TREE;
1300 
1301   /* True if VAL is an optimistic (tight) bound determined from
1302      the size of the character array in which the string may be
1303      stored.  In that case, the computed VAL is used to set
1304      PDATA->MAXBOUND.  */
1305   bool tight_bound = false;
1306 
1307   /* We can end up with &(*iftmp_1)[0] here as well, so handle it.  */
1308   if (TREE_CODE (arg) == ADDR_EXPR
1309       && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1310     {
1311       tree op = TREE_OPERAND (arg, 0);
1312       if (integer_zerop (TREE_OPERAND (op, 1)))
1313 	{
1314 	  tree aop0 = TREE_OPERAND (op, 0);
1315 	  if (TREE_CODE (aop0) == INDIRECT_REF
1316 	      && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1317 	    return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1318 				     pdata, eltsize);
1319 	}
1320       else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1321 	       && rkind == SRK_LENRANGE)
1322 	{
1323 	  /* Fail if an array is the last member of a struct object
1324 	     since it could be treated as a (fake) flexible array
1325 	     member.  */
1326 	  tree idx = TREE_OPERAND (op, 1);
1327 
1328 	  arg = TREE_OPERAND (op, 0);
1329 	  tree optype = TREE_TYPE (arg);
1330 	  if (tree dom = TYPE_DOMAIN (optype))
1331 	    if (tree bound = TYPE_MAX_VALUE (dom))
1332 	      if (TREE_CODE (bound) == INTEGER_CST
1333 		  && TREE_CODE (idx) == INTEGER_CST
1334 		  && tree_int_cst_lt (bound, idx))
1335 		return false;
1336 	}
1337     }
1338 
1339   if (rkind == SRK_INT_VALUE)
1340     {
1341       /* We are computing the maximum value (not string length).  */
1342       val = arg;
1343       if (TREE_CODE (val) != INTEGER_CST
1344 	  || tree_int_cst_sgn (val) < 0)
1345 	return false;
1346     }
1347   else
1348     {
1349       c_strlen_data lendata = { };
1350       val = c_strlen (arg, 1, &lendata, eltsize);
1351 
1352       if (!val && lendata.decl)
1353 	{
1354 	  /* ARG refers to an unterminated const character array.
1355 	     DATA.DECL with size DATA.LEN.  */
1356 	  val = lendata.minlen;
1357 	  pdata->decl = lendata.decl;
1358 	}
1359     }
1360 
1361   if (!val && rkind == SRK_LENRANGE)
1362     {
1363       if (TREE_CODE (arg) == ADDR_EXPR)
1364 	return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1365 				 pdata, eltsize);
1366 
1367       if (TREE_CODE (arg) == ARRAY_REF)
1368 	{
1369 	  tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1370 
1371 	  /* Determine the "innermost" array type.  */
1372 	  while (TREE_CODE (optype) == ARRAY_TYPE
1373 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1374 	    optype = TREE_TYPE (optype);
1375 
1376 	  /* Avoid arrays of pointers.  */
1377 	  tree eltype = TREE_TYPE (optype);
1378 	  if (TREE_CODE (optype) != ARRAY_TYPE
1379 	      || !INTEGRAL_TYPE_P (eltype))
1380 	    return false;
1381 
1382 	  /* Fail when the array bound is unknown or zero.  */
1383 	  val = TYPE_SIZE_UNIT (optype);
1384 	  if (!val || integer_zerop (val))
1385 	    return false;
1386 
1387 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1388 			      integer_one_node);
1389 
1390 	  /* Set the minimum size to zero since the string in
1391 	     the array could have zero length.  */
1392 	  pdata->minlen = ssize_int (0);
1393 
1394 	  tight_bound = true;
1395 	}
1396       else if (TREE_CODE (arg) == COMPONENT_REF
1397 	       && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1398 		   == ARRAY_TYPE))
1399 	{
1400 	  /* Use the type of the member array to determine the upper
1401 	     bound on the length of the array.  This may be overly
1402 	     optimistic if the array itself isn't NUL-terminated and
1403 	     the caller relies on the subsequent member to contain
1404 	     the NUL but that would only be considered valid if
1405 	     the array were the last member of a struct.  */
1406 
1407 	  tree fld = TREE_OPERAND (arg, 1);
1408 
1409 	  tree optype = TREE_TYPE (fld);
1410 
1411 	  /* Determine the "innermost" array type.  */
1412 	  while (TREE_CODE (optype) == ARRAY_TYPE
1413 		 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1414 	    optype = TREE_TYPE (optype);
1415 
1416 	  /* Fail when the array bound is unknown or zero.  */
1417 	  val = TYPE_SIZE_UNIT (optype);
1418 	  if (!val || integer_zerop (val))
1419 	    return false;
1420 	  val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1421 			     integer_one_node);
1422 
1423 	  /* Set the minimum size to zero since the string in
1424 	     the array could have zero length.  */
1425 	  pdata->minlen = ssize_int (0);
1426 
1427 	  /* The array size determined above is an optimistic bound
1428 	     on the length.  If the array isn't nul-terminated the
1429 	     length computed by the library function would be greater.
1430 	     Even though using strlen to cross the subobject boundary
1431 	     is undefined, avoid drawing conclusions from the member
1432 	     type about the length here.  */
1433 	  tight_bound = true;
1434 	}
1435       else if (VAR_P (arg))
1436 	{
1437 	  /* Avoid handling pointers to arrays.  GCC might misuse
1438 	     a pointer to an array of one bound to point to an array
1439 	     object of a greater bound.  */
1440 	  tree argtype = TREE_TYPE (arg);
1441 	  if (TREE_CODE (argtype) == ARRAY_TYPE)
1442 	    {
1443 	      val = TYPE_SIZE_UNIT (argtype);
1444 	      if (!val
1445 		  || TREE_CODE (val) != INTEGER_CST
1446 		  || integer_zerop (val))
1447 		return false;
1448 	      val = wide_int_to_tree (TREE_TYPE (val),
1449 				      wi::sub (wi::to_wide (val), 1));
1450 
1451 	      /* Set the minimum size to zero since the string in
1452 		 the array could have zero length.  */
1453 	      pdata->minlen = ssize_int (0);
1454 	    }
1455 	}
1456     }
1457 
1458   if (!val)
1459     return false;
1460 
1461   /* Adjust the lower bound on the string length as necessary.  */
1462   if (!pdata->minlen
1463       || (rkind != SRK_STRLEN
1464 	  && TREE_CODE (pdata->minlen) == INTEGER_CST
1465 	  && TREE_CODE (val) == INTEGER_CST
1466 	  && tree_int_cst_lt (val, pdata->minlen)))
1467     pdata->minlen = val;
1468 
1469   if (pdata->maxbound)
1470     {
1471       /* Adjust the tighter (more optimistic) string length bound
1472 	 if necessary and proceed to adjust the more conservative
1473 	 bound.  */
1474       if (TREE_CODE (val) == INTEGER_CST)
1475 	{
1476 	  if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1477 	    {
1478 	      if (tree_int_cst_lt (pdata->maxbound, val))
1479 		pdata->maxbound = val;
1480 	    }
1481 	  else
1482 	    pdata->maxbound = build_all_ones_cst (size_type_node);
1483 	}
1484       else
1485 	pdata->maxbound = val;
1486     }
1487   else
1488     pdata->maxbound = val;
1489 
1490   if (tight_bound)
1491     {
1492       /* VAL computed above represents an optimistically tight bound
1493 	 on the length of the string based on the referenced object's
1494 	 or subobject's type.  Determine the conservative upper bound
1495 	 based on the enclosing object's size if possible.  */
1496       if (rkind == SRK_LENRANGE)
1497 	{
1498 	  poly_int64 offset;
1499 	  tree base = get_addr_base_and_unit_offset (arg, &offset);
1500 	  if (!base)
1501 	    {
1502 	      /* When the call above fails due to a non-constant offset
1503 		 assume the offset is zero and use the size of the whole
1504 		 enclosing object instead.  */
1505 	      base = get_base_address (arg);
1506 	      offset = 0;
1507 	    }
1508 	  /* If the base object is a pointer no upper bound on the length
1509 	     can be determined.  Otherwise the maximum length is equal to
1510 	     the size of the enclosing object minus the offset of
1511 	     the referenced subobject minus 1 (for the terminating nul).  */
1512 	  tree type = TREE_TYPE (base);
1513 	  if (TREE_CODE (type) == POINTER_TYPE
1514 	      || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1515 	    val = build_all_ones_cst (size_type_node);
1516 	  else
1517 	    {
1518 	      val = DECL_SIZE_UNIT (base);
1519 	      val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1520 				 size_int (offset + 1));
1521 	    }
1522 	}
1523       else
1524 	return false;
1525     }
1526 
1527   if (pdata->maxlen)
1528     {
1529       /* Adjust the more conservative bound if possible/necessary
1530 	 and fail otherwise.  */
1531       if (rkind != SRK_STRLEN)
1532 	{
1533 	  if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1534 	      || TREE_CODE (val) != INTEGER_CST)
1535 	    return false;
1536 
1537 	  if (tree_int_cst_lt (pdata->maxlen, val))
1538 	    pdata->maxlen = val;
1539 	  return true;
1540 	}
1541       else if (simple_cst_equal (val, pdata->maxlen) != 1)
1542 	{
1543 	  /* Fail if the length of this ARG is different from that
1544 	     previously determined from another ARG.  */
1545 	  return false;
1546 	}
1547     }
1548 
1549   pdata->maxlen = val;
1550   return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1551 }
1552 
1553 /* For an ARG referencing one or more strings, try to obtain the range
1554    of their lengths, or the size of the largest array ARG referes to if
1555    the range of lengths cannot be determined, and store all in *PDATA.
1556    For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1557    the maximum constant value.
1558    If ARG is an SSA_NAME, follow its use-def chains.  When RKIND ==
1559    SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1560    length or if we are unable to determine the length, return false.
1561    VISITED is a bitmap of visited variables.
1562    RKIND determines the kind of value or range to obtain (see
1563    strlen_range_kind).
1564    Set PDATA->DECL if ARG refers to an unterminated constant array.
1565    On input, set ELTSIZE to 1 for normal single byte character strings,
1566    and either 2 or 4 for wide characer strings (the size of wchar_t).
1567    Return true if *PDATA was successfully populated and false otherwise.  */
1568 
1569 static bool
1570 get_range_strlen (tree arg, bitmap *visited,
1571 		  strlen_range_kind rkind,
1572 		  c_strlen_data *pdata, unsigned eltsize)
1573 {
1574 
1575   if (TREE_CODE (arg) != SSA_NAME)
1576     return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1577 
1578   /* If ARG is registered for SSA update we cannot look at its defining
1579      statement.  */
1580   if (name_registered_for_update_p (arg))
1581     return false;
1582 
1583   /* If we were already here, break the infinite cycle.  */
1584   if (!*visited)
1585     *visited = BITMAP_ALLOC (NULL);
1586   if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1587     return true;
1588 
1589   tree var = arg;
1590   gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1591 
1592   switch (gimple_code (def_stmt))
1593     {
1594       case GIMPLE_ASSIGN:
1595 	/* The RHS of the statement defining VAR must either have a
1596 	   constant length or come from another SSA_NAME with a constant
1597 	   length.  */
1598         if (gimple_assign_single_p (def_stmt)
1599             || gimple_assign_unary_nop_p (def_stmt))
1600           {
1601 	    tree rhs = gimple_assign_rhs1 (def_stmt);
1602 	    return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1603           }
1604 	else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1605 	  {
1606 	    tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1607 			    gimple_assign_rhs3 (def_stmt) };
1608 
1609 	    for (unsigned int i = 0; i < 2; i++)
1610 	      if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1611 		{
1612 		  if (rkind != SRK_LENRANGE)
1613 		    return false;
1614 		  /* Set the upper bound to the maximum to prevent
1615 		     it from being adjusted in the next iteration but
1616 		     leave MINLEN and the more conservative MAXBOUND
1617 		     determined so far alone (or leave them null if
1618 		     they haven't been set yet).  That the MINLEN is
1619 		     in fact zero can be determined from MAXLEN being
1620 		     unbounded but the discovered minimum is used for
1621 		     diagnostics.  */
1622 		  pdata->maxlen = build_all_ones_cst (size_type_node);
1623 		}
1624 	    return true;
1625 	  }
1626         return false;
1627 
1628       case GIMPLE_PHI:
1629 	/* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1630 	   must have a constant length.  */
1631 	for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1632           {
1633             tree arg = gimple_phi_arg (def_stmt, i)->def;
1634 
1635             /* If this PHI has itself as an argument, we cannot
1636                determine the string length of this argument.  However,
1637                if we can find a constant string length for the other
1638                PHI args then we can still be sure that this is a
1639                constant string length.  So be optimistic and just
1640                continue with the next argument.  */
1641             if (arg == gimple_phi_result (def_stmt))
1642               continue;
1643 
1644 	    if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1645 	      {
1646 		if (rkind != SRK_LENRANGE)
1647 		  return false;
1648 		/* Set the upper bound to the maximum to prevent
1649 		   it from being adjusted in the next iteration but
1650 		   leave MINLEN and the more conservative MAXBOUND
1651 		   determined so far alone (or leave them null if
1652 		   they haven't been set yet).  That the MINLEN is
1653 		   in fact zero can be determined from MAXLEN being
1654 		   unbounded but the discovered minimum is used for
1655 		   diagnostics.  */
1656 		pdata->maxlen = build_all_ones_cst (size_type_node);
1657 	      }
1658           }
1659         return true;
1660 
1661       default:
1662         return false;
1663     }
1664 }
1665 
1666 /* Determine the minimum and maximum value or string length that ARG
1667    refers to and store each in the first two elements of MINMAXLEN.
1668    For expressions that point to strings of unknown lengths that are
1669    character arrays, use the upper bound of the array as the maximum
1670    length.  For example, given an expression like 'x ? array : "xyz"'
1671    and array declared as 'char array[8]', MINMAXLEN[0] will be set
1672    to 0 and MINMAXLEN[1] to 7, the longest string that could be
1673    stored in array.
1674    Return true if the range of the string lengths has been obtained
1675    from the upper bound of an array at the end of a struct.  Such
1676    an array may hold a string that's longer than its upper bound
1677    due to it being used as a poor-man's flexible array member.
1678 
1679    STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1680    and false if PHIs and COND_EXPRs are to be handled optimistically,
1681    if we can determine string length minimum and maximum; it will use
1682    the minimum from the ones where it can be determined.
1683    STRICT false should be only used for warning code.
1684    When non-null, clear *NONSTR if ARG refers to a constant array
1685    that is known not be nul-terminated.  Otherwise set it to
1686    the declaration of the constant non-terminated array.
1687 
1688    ELTSIZE is 1 for normal single byte character strings, and 2 or
1689    4 for wide characer strings.  ELTSIZE is by default 1.  */
1690 
1691 bool
1692 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1693 {
1694   bitmap visited = NULL;
1695 
1696   if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1697     {
1698       /* On failure extend the length range to an impossible maximum
1699 	 (a valid MAXLEN must be less than PTRDIFF_MAX - 1).  Other
1700 	 members can stay unchanged regardless.  */
1701       pdata->minlen = ssize_int (0);
1702       pdata->maxlen = build_all_ones_cst (size_type_node);
1703     }
1704   else if (!pdata->minlen)
1705     pdata->minlen = ssize_int (0);
1706 
1707   /* Unless its null, leave the more conservative MAXBOUND unchanged.  */
1708   if (!pdata->maxbound)
1709     pdata->maxbound = pdata->maxlen;
1710 
1711   if (visited)
1712     BITMAP_FREE (visited);
1713 
1714   return !integer_all_onesp (pdata->maxlen);
1715 }
1716 
1717 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1718    For ARG of pointer types, NONSTR indicates if the caller is prepared
1719    to handle unterminated strings.   For integer ARG and when RKIND ==
1720    SRK_INT_VALUE, NONSTR must be null.
1721 
1722    If an unterminated array is discovered and our caller handles
1723    unterminated arrays, then bubble up the offending DECL and
1724    return the maximum size.  Otherwise return NULL.  */
1725 
1726 static tree
1727 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1728 {
1729   /* A non-null NONSTR is meaningless when determining the maximum
1730      value of an integer ARG.  */
1731   gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1732   /* ARG must have an integral type when RKIND says so.  */
1733   gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1734 
1735   bitmap visited = NULL;
1736 
1737   /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1738      is unbounded.  */
1739   c_strlen_data lendata = { };
1740   if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1741     lendata.maxlen = NULL_TREE;
1742   else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1743     lendata.maxlen = NULL_TREE;
1744 
1745   if (visited)
1746     BITMAP_FREE (visited);
1747 
1748   if (nonstr)
1749     {
1750       /* For callers prepared to handle unterminated arrays set
1751 	 *NONSTR to point to the declaration of the array and return
1752 	 the maximum length/size. */
1753       *nonstr = lendata.decl;
1754       return lendata.maxlen;
1755     }
1756 
1757   /* Fail if the constant array isn't nul-terminated.  */
1758   return lendata.decl ? NULL_TREE : lendata.maxlen;
1759 }
1760 
1761 
1762 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1763    If LEN is not NULL, it represents the length of the string to be
1764    copied.  Return NULL_TREE if no simplification can be made.  */
1765 
1766 static bool
1767 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1768 			    tree dest, tree src)
1769 {
1770   gimple *stmt = gsi_stmt (*gsi);
1771   location_t loc = gimple_location (stmt);
1772   tree fn;
1773 
1774   /* If SRC and DEST are the same (and not volatile), return DEST.  */
1775   if (operand_equal_p (src, dest, 0))
1776     {
1777       /* Issue -Wrestrict unless the pointers are null (those do
1778 	 not point to objects and so do not indicate an overlap;
1779 	 such calls could be the result of sanitization and jump
1780 	 threading).  */
1781       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1782 	{
1783 	  tree func = gimple_call_fndecl (stmt);
1784 
1785 	  warning_at (loc, OPT_Wrestrict,
1786 		      "%qD source argument is the same as destination",
1787 		      func);
1788 	}
1789 
1790       replace_call_with_value (gsi, dest);
1791       return true;
1792     }
1793 
1794   if (optimize_function_for_size_p (cfun))
1795     return false;
1796 
1797   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1798   if (!fn)
1799     return false;
1800 
1801   /* Set to non-null if ARG refers to an unterminated array.  */
1802   tree nonstr = NULL;
1803   tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1804 
1805   if (nonstr)
1806     {
1807       /* Avoid folding calls with unterminated arrays.  */
1808       if (!gimple_no_warning_p (stmt))
1809 	warn_string_no_nul (loc, "strcpy", src, nonstr);
1810       gimple_set_no_warning (stmt, true);
1811       return false;
1812     }
1813 
1814   if (!len)
1815     return false;
1816 
1817   len = fold_convert_loc (loc, size_type_node, len);
1818   len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1819   len = force_gimple_operand_gsi (gsi, len, true,
1820 				  NULL_TREE, true, GSI_SAME_STMT);
1821   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1822   replace_call_with_call_and_fold (gsi, repl);
1823   return true;
1824 }
1825 
1826 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1827    If SLEN is not NULL, it represents the length of the source string.
1828    Return NULL_TREE if no simplification can be made.  */
1829 
1830 static bool
1831 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1832 			     tree dest, tree src, tree len)
1833 {
1834   gimple *stmt = gsi_stmt (*gsi);
1835   location_t loc = gimple_location (stmt);
1836   bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1837 
1838   /* If the LEN parameter is zero, return DEST.  */
1839   if (integer_zerop (len))
1840     {
1841       /* Avoid warning if the destination refers to a an array/pointer
1842 	 decorate with attribute nonstring.  */
1843       if (!nonstring)
1844 	{
1845 	  tree fndecl = gimple_call_fndecl (stmt);
1846 
1847 	  /* Warn about the lack of nul termination: the result is not
1848 	     a (nul-terminated) string.  */
1849 	  tree slen = get_maxval_strlen (src, SRK_STRLEN);
1850 	  if (slen && !integer_zerop (slen))
1851 	    warning_at (loc, OPT_Wstringop_truncation,
1852 			"%G%qD destination unchanged after copying no bytes "
1853 			"from a string of length %E",
1854 			stmt, fndecl, slen);
1855 	  else
1856 	    warning_at (loc, OPT_Wstringop_truncation,
1857 			"%G%qD destination unchanged after copying no bytes",
1858 			stmt, fndecl);
1859 	}
1860 
1861       replace_call_with_value (gsi, dest);
1862       return true;
1863     }
1864 
1865   /* We can't compare slen with len as constants below if len is not a
1866      constant.  */
1867   if (TREE_CODE (len) != INTEGER_CST)
1868     return false;
1869 
1870   /* Now, we must be passed a constant src ptr parameter.  */
1871   tree slen = get_maxval_strlen (src, SRK_STRLEN);
1872   if (!slen || TREE_CODE (slen) != INTEGER_CST)
1873     return false;
1874 
1875   /* The size of the source string including the terminating nul.  */
1876   tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1877 
1878   /* We do not support simplification of this case, though we do
1879      support it when expanding trees into RTL.  */
1880   /* FIXME: generate a call to __builtin_memset.  */
1881   if (tree_int_cst_lt (ssize, len))
1882     return false;
1883 
1884   /* Diagnose truncation that leaves the copy unterminated.  */
1885   maybe_diag_stxncpy_trunc (*gsi, src, len);
1886 
1887   /* OK transform into builtin memcpy.  */
1888   tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1889   if (!fn)
1890     return false;
1891 
1892   len = fold_convert_loc (loc, size_type_node, len);
1893   len = force_gimple_operand_gsi (gsi, len, true,
1894 				  NULL_TREE, true, GSI_SAME_STMT);
1895   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1896   replace_call_with_call_and_fold (gsi, repl);
1897 
1898   return true;
1899 }
1900 
1901 /* Fold function call to builtin strchr or strrchr.
1902    If both arguments are constant, evaluate and fold the result,
1903    otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1904    In general strlen is significantly faster than strchr
1905    due to being a simpler operation.  */
1906 static bool
1907 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1908 {
1909   gimple *stmt = gsi_stmt (*gsi);
1910   tree str = gimple_call_arg (stmt, 0);
1911   tree c = gimple_call_arg (stmt, 1);
1912   location_t loc = gimple_location (stmt);
1913   const char *p;
1914   char ch;
1915 
1916   if (!gimple_call_lhs (stmt))
1917     return false;
1918 
1919   if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1920     {
1921       const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1922 
1923       if (p1 == NULL)
1924 	{
1925 	  replace_call_with_value (gsi, integer_zero_node);
1926 	  return true;
1927 	}
1928 
1929       tree len = build_int_cst (size_type_node, p1 - p);
1930       gimple_seq stmts = NULL;
1931       gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1932 					      POINTER_PLUS_EXPR, str, len);
1933       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1934       gsi_replace_with_seq_vops (gsi, stmts);
1935       return true;
1936     }
1937 
1938   if (!integer_zerop (c))
1939     return false;
1940 
1941   /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size.  */
1942   if (is_strrchr && optimize_function_for_size_p (cfun))
1943     {
1944       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1945 
1946       if (strchr_fn)
1947 	{
1948 	  gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1949 	  replace_call_with_call_and_fold (gsi, repl);
1950 	  return true;
1951 	}
1952 
1953       return false;
1954     }
1955 
1956   tree len;
1957   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1958 
1959   if (!strlen_fn)
1960     return false;
1961 
1962   /* Create newstr = strlen (str).  */
1963   gimple_seq stmts = NULL;
1964   gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1965   gimple_set_location (new_stmt, loc);
1966   len = create_tmp_reg_or_ssa_name (size_type_node);
1967   gimple_call_set_lhs (new_stmt, len);
1968   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1969 
1970   /* Create (str p+ strlen (str)).  */
1971   new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1972 				  POINTER_PLUS_EXPR, str, len);
1973   gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1974   gsi_replace_with_seq_vops (gsi, stmts);
1975   /* gsi now points at the assignment to the lhs, get a
1976      stmt iterator to the strlen.
1977      ???  We can't use gsi_for_stmt as that doesn't work when the
1978      CFG isn't built yet.  */
1979   gimple_stmt_iterator gsi2 = *gsi;
1980   gsi_prev (&gsi2);
1981   fold_stmt (&gsi2);
1982   return true;
1983 }
1984 
1985 /* Fold function call to builtin strstr.
1986    If both arguments are constant, evaluate and fold the result,
1987    additionally fold strstr (x, "") into x and strstr (x, "c")
1988    into strchr (x, 'c').  */
1989 static bool
1990 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1991 {
1992   gimple *stmt = gsi_stmt (*gsi);
1993   tree haystack = gimple_call_arg (stmt, 0);
1994   tree needle = gimple_call_arg (stmt, 1);
1995   const char *p, *q;
1996 
1997   if (!gimple_call_lhs (stmt))
1998     return false;
1999 
2000   q = c_getstr (needle);
2001   if (q == NULL)
2002     return false;
2003 
2004   if ((p = c_getstr (haystack)))
2005     {
2006       const char *r = strstr (p, q);
2007 
2008       if (r == NULL)
2009 	{
2010 	  replace_call_with_value (gsi, integer_zero_node);
2011 	  return true;
2012 	}
2013 
2014       tree len = build_int_cst (size_type_node, r - p);
2015       gimple_seq stmts = NULL;
2016       gimple *new_stmt
2017 	= gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2018 			       haystack, len);
2019       gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2020       gsi_replace_with_seq_vops (gsi, stmts);
2021       return true;
2022     }
2023 
2024   /* For strstr (x, "") return x.  */
2025   if (q[0] == '\0')
2026     {
2027       replace_call_with_value (gsi, haystack);
2028       return true;
2029     }
2030 
2031   /* Transform strstr (x, "c") into strchr (x, 'c').  */
2032   if (q[1] == '\0')
2033     {
2034       tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2035       if (strchr_fn)
2036 	{
2037 	  tree c = build_int_cst (integer_type_node, q[0]);
2038 	  gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2039 	  replace_call_with_call_and_fold (gsi, repl);
2040 	  return true;
2041 	}
2042     }
2043 
2044   return false;
2045 }
2046 
2047 /* Simplify a call to the strcat builtin.  DST and SRC are the arguments
2048    to the call.
2049 
2050    Return NULL_TREE if no simplification was possible, otherwise return the
2051    simplified form of the call as a tree.
2052 
2053    The simplified form may be a constant or other expression which
2054    computes the same value, but in a more efficient manner (including
2055    calls to other builtin functions).
2056 
2057    The call may contain arguments which need to be evaluated, but
2058    which are not useful to determine the result of the call.  In
2059    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
2060    COMPOUND_EXPR will be an argument which must be evaluated.
2061    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
2062    COMPOUND_EXPR in the chain will contain the tree for the simplified
2063    form of the builtin function call.  */
2064 
2065 static bool
2066 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2067 {
2068   gimple *stmt = gsi_stmt (*gsi);
2069   location_t loc = gimple_location (stmt);
2070 
2071   const char *p = c_getstr (src);
2072 
2073   /* If the string length is zero, return the dst parameter.  */
2074   if (p && *p == '\0')
2075     {
2076       replace_call_with_value (gsi, dst);
2077       return true;
2078     }
2079 
2080   if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2081     return false;
2082 
2083   /* See if we can store by pieces into (dst + strlen(dst)).  */
2084   tree newdst;
2085   tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2086   tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2087 
2088   if (!strlen_fn || !memcpy_fn)
2089     return false;
2090 
2091   /* If the length of the source string isn't computable don't
2092      split strcat into strlen and memcpy.  */
2093   tree len = get_maxval_strlen (src, SRK_STRLEN);
2094   if (! len)
2095     return false;
2096 
2097   /* Create strlen (dst).  */
2098   gimple_seq stmts = NULL, stmts2;
2099   gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2100   gimple_set_location (repl, loc);
2101   newdst = create_tmp_reg_or_ssa_name (size_type_node);
2102   gimple_call_set_lhs (repl, newdst);
2103   gimple_seq_add_stmt_without_update (&stmts, repl);
2104 
2105   /* Create (dst p+ strlen (dst)).  */
2106   newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2107   newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2108   gimple_seq_add_seq_without_update (&stmts, stmts2);
2109 
2110   len = fold_convert_loc (loc, size_type_node, len);
2111   len = size_binop_loc (loc, PLUS_EXPR, len,
2112 			build_int_cst (size_type_node, 1));
2113   len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2114   gimple_seq_add_seq_without_update (&stmts, stmts2);
2115 
2116   repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2117   gimple_seq_add_stmt_without_update (&stmts, repl);
2118   if (gimple_call_lhs (stmt))
2119     {
2120       repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2121       gimple_seq_add_stmt_without_update (&stmts, repl);
2122       gsi_replace_with_seq_vops (gsi, stmts);
2123       /* gsi now points at the assignment to the lhs, get a
2124          stmt iterator to the memcpy call.
2125 	 ???  We can't use gsi_for_stmt as that doesn't work when the
2126 	 CFG isn't built yet.  */
2127       gimple_stmt_iterator gsi2 = *gsi;
2128       gsi_prev (&gsi2);
2129       fold_stmt (&gsi2);
2130     }
2131   else
2132     {
2133       gsi_replace_with_seq_vops (gsi, stmts);
2134       fold_stmt (gsi);
2135     }
2136   return true;
2137 }
2138 
2139 /* Fold a call to the __strcat_chk builtin FNDECL.  DEST, SRC, and SIZE
2140    are the arguments to the call.  */
2141 
2142 static bool
2143 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2144 {
2145   gimple *stmt = gsi_stmt (*gsi);
2146   tree dest = gimple_call_arg (stmt, 0);
2147   tree src = gimple_call_arg (stmt, 1);
2148   tree size = gimple_call_arg (stmt, 2);
2149   tree fn;
2150   const char *p;
2151 
2152 
2153   p = c_getstr (src);
2154   /* If the SRC parameter is "", return DEST.  */
2155   if (p && *p == '\0')
2156     {
2157       replace_call_with_value (gsi, dest);
2158       return true;
2159     }
2160 
2161   if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2162     return false;
2163 
2164   /* If __builtin_strcat_chk is used, assume strcat is available.  */
2165   fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2166   if (!fn)
2167     return false;
2168 
2169   gimple *repl = gimple_build_call (fn, 2, dest, src);
2170   replace_call_with_call_and_fold (gsi, repl);
2171   return true;
2172 }
2173 
2174 /* Simplify a call to the strncat builtin.  */
2175 
2176 static bool
2177 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2178 {
2179   gimple *stmt = gsi_stmt (*gsi);
2180   tree dst = gimple_call_arg (stmt, 0);
2181   tree src = gimple_call_arg (stmt, 1);
2182   tree len = gimple_call_arg (stmt, 2);
2183 
2184   const char *p = c_getstr (src);
2185 
2186   /* If the requested length is zero, or the src parameter string
2187      length is zero, return the dst parameter.  */
2188   if (integer_zerop (len) || (p && *p == '\0'))
2189     {
2190       replace_call_with_value (gsi, dst);
2191       return true;
2192     }
2193 
2194   if (TREE_CODE (len) != INTEGER_CST || !p)
2195     return false;
2196 
2197   unsigned srclen = strlen (p);
2198 
2199   int cmpsrc = compare_tree_int (len, srclen);
2200 
2201   /* Return early if the requested len is less than the string length.
2202      Warnings will be issued elsewhere later.  */
2203   if (cmpsrc < 0)
2204     return false;
2205 
2206   unsigned HOST_WIDE_INT dstsize;
2207 
2208   bool nowarn = gimple_no_warning_p (stmt);
2209 
2210   if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2211     {
2212       int cmpdst = compare_tree_int (len, dstsize);
2213 
2214       if (cmpdst >= 0)
2215 	{
2216 	  tree fndecl = gimple_call_fndecl (stmt);
2217 
2218 	  /* Strncat copies (at most) LEN bytes and always appends
2219 	     the terminating NUL so the specified bound should never
2220 	     be equal to (or greater than) the size of the destination.
2221 	     If it is, the copy could overflow.  */
2222 	  location_t loc = gimple_location (stmt);
2223 	  nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2224 			       cmpdst == 0
2225 			       ? G_("%G%qD specified bound %E equals "
2226 				    "destination size")
2227 			       : G_("%G%qD specified bound %E exceeds "
2228 				    "destination size %wu"),
2229 			       stmt, fndecl, len, dstsize);
2230 	  if (nowarn)
2231 	    gimple_set_no_warning (stmt, true);
2232 	}
2233     }
2234 
2235   if (!nowarn && cmpsrc == 0)
2236     {
2237       tree fndecl = gimple_call_fndecl (stmt);
2238       location_t loc = gimple_location (stmt);
2239 
2240       /* To avoid possible overflow the specified bound should also
2241 	 not be equal to the length of the source, even when the size
2242 	 of the destination is unknown (it's not an uncommon mistake
2243 	 to specify as the bound to strncpy the length of the source).  */
2244       if (warning_at (loc, OPT_Wstringop_overflow_,
2245 		      "%G%qD specified bound %E equals source length",
2246 		      stmt, fndecl, len))
2247 	gimple_set_no_warning (stmt, true);
2248     }
2249 
2250   tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2251 
2252   /* If the replacement _DECL isn't initialized, don't do the
2253      transformation.  */
2254   if (!fn)
2255     return false;
2256 
2257   /* Otherwise, emit a call to strcat.  */
2258   gcall *repl = gimple_build_call (fn, 2, dst, src);
2259   replace_call_with_call_and_fold (gsi, repl);
2260   return true;
2261 }
2262 
2263 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2264    LEN, and SIZE.  */
2265 
2266 static bool
2267 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2268 {
2269   gimple *stmt = gsi_stmt (*gsi);
2270   tree dest = gimple_call_arg (stmt, 0);
2271   tree src = gimple_call_arg (stmt, 1);
2272   tree len = gimple_call_arg (stmt, 2);
2273   tree size = gimple_call_arg (stmt, 3);
2274   tree fn;
2275   const char *p;
2276 
2277   p = c_getstr (src);
2278   /* If the SRC parameter is "" or if LEN is 0, return DEST.  */
2279   if ((p && *p == '\0')
2280       || integer_zerop (len))
2281     {
2282       replace_call_with_value (gsi, dest);
2283       return true;
2284     }
2285 
2286   if (! tree_fits_uhwi_p (size))
2287     return false;
2288 
2289   if (! integer_all_onesp (size))
2290     {
2291       tree src_len = c_strlen (src, 1);
2292       if (src_len
2293 	  && tree_fits_uhwi_p (src_len)
2294 	  && tree_fits_uhwi_p (len)
2295 	  && ! tree_int_cst_lt (len, src_len))
2296 	{
2297 	  /* If LEN >= strlen (SRC), optimize into __strcat_chk.  */
2298 	  fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2299 	  if (!fn)
2300 	    return false;
2301 
2302 	  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2303 	  replace_call_with_call_and_fold (gsi, repl);
2304 	  return true;
2305 	}
2306       return false;
2307     }
2308 
2309   /* If __builtin_strncat_chk is used, assume strncat is available.  */
2310   fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2311   if (!fn)
2312     return false;
2313 
2314   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2315   replace_call_with_call_and_fold (gsi, repl);
2316   return true;
2317 }
2318 
2319 /* Build and append gimple statements to STMTS that would load a first
2320    character of a memory location identified by STR.  LOC is location
2321    of the statement.  */
2322 
2323 static tree
2324 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2325 {
2326   tree var;
2327 
2328   tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2329   tree cst_uchar_ptr_node
2330     = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2331   tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2332 
2333   tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2334   gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2335   var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2336 
2337   gimple_assign_set_lhs (stmt, var);
2338   gimple_seq_add_stmt_without_update (stmts, stmt);
2339 
2340   return var;
2341 }
2342 
2343 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2344    FCODE is the name of the builtin.  */
2345 
2346 static bool
2347 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2348 {
2349   gimple *stmt = gsi_stmt (*gsi);
2350   tree callee = gimple_call_fndecl (stmt);
2351   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2352 
2353   tree type = integer_type_node;
2354   tree str1 = gimple_call_arg (stmt, 0);
2355   tree str2 = gimple_call_arg (stmt, 1);
2356   tree lhs = gimple_call_lhs (stmt);
2357   HOST_WIDE_INT length = -1;
2358 
2359   /* Handle strncmp and strncasecmp functions.  */
2360   if (gimple_call_num_args (stmt) == 3)
2361     {
2362       tree len = gimple_call_arg (stmt, 2);
2363       if (tree_fits_uhwi_p (len))
2364 	length = tree_to_uhwi (len);
2365     }
2366 
2367   /* If the LEN parameter is zero, return zero.  */
2368   if (length == 0)
2369     {
2370       replace_call_with_value (gsi, integer_zero_node);
2371       return true;
2372     }
2373 
2374   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
2375   if (operand_equal_p (str1, str2, 0))
2376     {
2377       replace_call_with_value (gsi, integer_zero_node);
2378       return true;
2379     }
2380 
2381   const char *p1 = c_getstr (str1);
2382   const char *p2 = c_getstr (str2);
2383 
2384   /* For known strings, return an immediate value.  */
2385   if (p1 && p2)
2386     {
2387       int r = 0;
2388       bool known_result = false;
2389 
2390       switch (fcode)
2391 	{
2392 	case BUILT_IN_STRCMP:
2393 	case BUILT_IN_STRCMP_EQ:
2394 	  {
2395 	    r = strcmp (p1, p2);
2396 	    known_result = true;
2397 	    break;
2398 	  }
2399 	case BUILT_IN_STRNCMP:
2400 	case BUILT_IN_STRNCMP_EQ:
2401 	  {
2402 	    if (length == -1)
2403 	      break;
2404 	    r = strncmp (p1, p2, length);
2405 	    known_result = true;
2406 	    break;
2407 	  }
2408 	/* Only handleable situation is where the string are equal (result 0),
2409 	   which is already handled by operand_equal_p case.  */
2410 	case BUILT_IN_STRCASECMP:
2411 	  break;
2412 	case BUILT_IN_STRNCASECMP:
2413 	  {
2414 	    if (length == -1)
2415 	      break;
2416 	    r = strncmp (p1, p2, length);
2417 	    if (r == 0)
2418 	      known_result = true;
2419 	    break;
2420 	  }
2421 	default:
2422 	  gcc_unreachable ();
2423 	}
2424 
2425       if (known_result)
2426 	{
2427 	  replace_call_with_value (gsi, build_cmp_result (type, r));
2428 	  return true;
2429 	}
2430     }
2431 
2432   bool nonzero_length = length >= 1
2433     || fcode == BUILT_IN_STRCMP
2434     || fcode == BUILT_IN_STRCMP_EQ
2435     || fcode == BUILT_IN_STRCASECMP;
2436 
2437   location_t loc = gimple_location (stmt);
2438 
2439   /* If the second arg is "", return *(const unsigned char*)arg1.  */
2440   if (p2 && *p2 == '\0' && nonzero_length)
2441     {
2442       gimple_seq stmts = NULL;
2443       tree var = gimple_load_first_char (loc, str1, &stmts);
2444       if (lhs)
2445 	{
2446 	  stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2447 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2448 	}
2449 
2450       gsi_replace_with_seq_vops (gsi, stmts);
2451       return true;
2452     }
2453 
2454   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
2455   if (p1 && *p1 == '\0' && nonzero_length)
2456     {
2457       gimple_seq stmts = NULL;
2458       tree var = gimple_load_first_char (loc, str2, &stmts);
2459 
2460       if (lhs)
2461 	{
2462 	  tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2463 	  stmt = gimple_build_assign (c, NOP_EXPR, var);
2464 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2465 
2466 	  stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2467 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2468 	}
2469 
2470       gsi_replace_with_seq_vops (gsi, stmts);
2471       return true;
2472     }
2473 
2474   /* If len parameter is one, return an expression corresponding to
2475      (*(const unsigned char*)arg2 - *(const unsigned char*)arg1).  */
2476   if (fcode == BUILT_IN_STRNCMP && length == 1)
2477     {
2478       gimple_seq stmts = NULL;
2479       tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2480       tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2481 
2482       if (lhs)
2483 	{
2484 	  tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2485 	  gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2486 	  gimple_seq_add_stmt_without_update (&stmts, convert1);
2487 
2488 	  tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2489 	  gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2490 	  gimple_seq_add_stmt_without_update (&stmts, convert2);
2491 
2492 	  stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2493 	  gimple_seq_add_stmt_without_update (&stmts, stmt);
2494 	}
2495 
2496       gsi_replace_with_seq_vops (gsi, stmts);
2497       return true;
2498     }
2499 
2500   /* If length is larger than the length of one constant string,
2501      replace strncmp with corresponding strcmp */
2502   if (fcode == BUILT_IN_STRNCMP
2503       && length > 0
2504       && ((p2 && (size_t) length > strlen (p2))
2505           || (p1 && (size_t) length > strlen (p1))))
2506     {
2507       tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2508       if (!fn)
2509         return false;
2510       gimple *repl = gimple_build_call (fn, 2, str1, str2);
2511       replace_call_with_call_and_fold (gsi, repl);
2512       return true;
2513     }
2514 
2515   return false;
2516 }
2517 
2518 /* Fold a call to the memchr pointed by GSI iterator.  */
2519 
2520 static bool
2521 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2522 {
2523   gimple *stmt = gsi_stmt (*gsi);
2524   tree lhs = gimple_call_lhs (stmt);
2525   tree arg1 = gimple_call_arg (stmt, 0);
2526   tree arg2 = gimple_call_arg (stmt, 1);
2527   tree len = gimple_call_arg (stmt, 2);
2528 
2529   /* If the LEN parameter is zero, return zero.  */
2530   if (integer_zerop (len))
2531     {
2532       replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2533       return true;
2534     }
2535 
2536   char c;
2537   if (TREE_CODE (arg2) != INTEGER_CST
2538       || !tree_fits_uhwi_p (len)
2539       || !target_char_cst_p (arg2, &c))
2540     return false;
2541 
2542   unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2543   unsigned HOST_WIDE_INT string_length;
2544   const char *p1 = c_getstr (arg1, &string_length);
2545 
2546   if (p1)
2547     {
2548       const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2549       if (r == NULL)
2550 	{
2551 	  if (length <= string_length)
2552 	    {
2553 	      replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2554 	      return true;
2555 	    }
2556 	}
2557       else
2558 	{
2559 	  unsigned HOST_WIDE_INT offset = r - p1;
2560 	  gimple_seq stmts = NULL;
2561 	  if (lhs != NULL_TREE)
2562 	    {
2563 	      tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2564 	      gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2565 						   arg1, offset_cst);
2566 	      gimple_seq_add_stmt_without_update (&stmts, stmt);
2567 	    }
2568 	  else
2569 	    gimple_seq_add_stmt_without_update (&stmts,
2570 						gimple_build_nop ());
2571 
2572 	  gsi_replace_with_seq_vops (gsi, stmts);
2573 	  return true;
2574 	}
2575     }
2576 
2577   return false;
2578 }
2579 
2580 /* Fold a call to the fputs builtin.  ARG0 and ARG1 are the arguments
2581    to the call.  IGNORE is true if the value returned
2582    by the builtin will be ignored.  UNLOCKED is true is true if this
2583    actually a call to fputs_unlocked.  If LEN in non-NULL, it represents
2584    the known length of the string.  Return NULL_TREE if no simplification
2585    was possible.  */
2586 
2587 static bool
2588 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2589 			   tree arg0, tree arg1,
2590 			   bool unlocked)
2591 {
2592   gimple *stmt = gsi_stmt (*gsi);
2593 
2594   /* If we're using an unlocked function, assume the other unlocked
2595      functions exist explicitly.  */
2596   tree const fn_fputc = (unlocked
2597 			 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2598 			 : builtin_decl_implicit (BUILT_IN_FPUTC));
2599   tree const fn_fwrite = (unlocked
2600 			  ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2601 			  : builtin_decl_implicit (BUILT_IN_FWRITE));
2602 
2603   /* If the return value is used, don't do the transformation.  */
2604   if (gimple_call_lhs (stmt))
2605     return false;
2606 
2607   /* Get the length of the string passed to fputs.  If the length
2608      can't be determined, punt.  */
2609   tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2610   if (!len
2611       || TREE_CODE (len) != INTEGER_CST)
2612     return false;
2613 
2614   switch (compare_tree_int (len, 1))
2615     {
2616     case -1: /* length is 0, delete the call entirely .  */
2617       replace_call_with_value (gsi, integer_zero_node);
2618       return true;
2619 
2620     case 0: /* length is 1, call fputc.  */
2621       {
2622 	const char *p = c_getstr (arg0);
2623 	if (p != NULL)
2624 	  {
2625 	    if (!fn_fputc)
2626 	      return false;
2627 
2628 	    gimple *repl = gimple_build_call (fn_fputc, 2,
2629 					     build_int_cst
2630 					     (integer_type_node, p[0]), arg1);
2631 	    replace_call_with_call_and_fold (gsi, repl);
2632 	    return true;
2633 	  }
2634       }
2635       /* FALLTHROUGH */
2636     case 1: /* length is greater than 1, call fwrite.  */
2637       {
2638 	/* If optimizing for size keep fputs.  */
2639 	if (optimize_function_for_size_p (cfun))
2640 	  return false;
2641 	/* New argument list transforming fputs(string, stream) to
2642 	   fwrite(string, 1, len, stream).  */
2643 	if (!fn_fwrite)
2644 	  return false;
2645 
2646 	gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2647 					 size_one_node, len, arg1);
2648 	replace_call_with_call_and_fold (gsi, repl);
2649 	return true;
2650       }
2651     default:
2652       gcc_unreachable ();
2653     }
2654   return false;
2655 }
2656 
2657 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2658    DEST, SRC, LEN, and SIZE are the arguments to the call.
2659    IGNORE is true, if return value can be ignored.  FCODE is the BUILT_IN_*
2660    code of the builtin.  If MAXLEN is not NULL, it is maximum length
2661    passed as third argument.  */
2662 
2663 static bool
2664 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2665 				tree dest, tree src, tree len, tree size,
2666 				enum built_in_function fcode)
2667 {
2668   gimple *stmt = gsi_stmt (*gsi);
2669   location_t loc = gimple_location (stmt);
2670   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2671   tree fn;
2672 
2673   /* If SRC and DEST are the same (and not volatile), return DEST
2674      (resp. DEST+LEN for __mempcpy_chk).  */
2675   if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2676     {
2677       if (fcode != BUILT_IN_MEMPCPY_CHK)
2678 	{
2679 	  replace_call_with_value (gsi, dest);
2680 	  return true;
2681 	}
2682       else
2683 	{
2684 	  gimple_seq stmts = NULL;
2685 	  len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2686 	  tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2687 				    TREE_TYPE (dest), dest, len);
2688 	  gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2689 	  replace_call_with_value (gsi, temp);
2690 	  return true;
2691 	}
2692     }
2693 
2694   if (! tree_fits_uhwi_p (size))
2695     return false;
2696 
2697   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2698   if (! integer_all_onesp (size))
2699     {
2700       if (! tree_fits_uhwi_p (len))
2701 	{
2702 	  /* If LEN is not constant, try MAXLEN too.
2703 	     For MAXLEN only allow optimizing into non-_ocs function
2704 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2705 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2706 	    {
2707 	      if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2708 		{
2709 		  /* (void) __mempcpy_chk () can be optimized into
2710 		     (void) __memcpy_chk ().  */
2711 		  fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2712 		  if (!fn)
2713 		    return false;
2714 
2715 		  gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2716 		  replace_call_with_call_and_fold (gsi, repl);
2717 		  return true;
2718 		}
2719 	      return false;
2720 	    }
2721 	}
2722       else
2723 	maxlen = len;
2724 
2725       if (tree_int_cst_lt (size, maxlen))
2726 	return false;
2727     }
2728 
2729   fn = NULL_TREE;
2730   /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2731      mem{cpy,pcpy,move,set} is available.  */
2732   switch (fcode)
2733     {
2734     case BUILT_IN_MEMCPY_CHK:
2735       fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2736       break;
2737     case BUILT_IN_MEMPCPY_CHK:
2738       fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2739       break;
2740     case BUILT_IN_MEMMOVE_CHK:
2741       fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2742       break;
2743     case BUILT_IN_MEMSET_CHK:
2744       fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2745       break;
2746     default:
2747       break;
2748     }
2749 
2750   if (!fn)
2751     return false;
2752 
2753   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2754   replace_call_with_call_and_fold (gsi, repl);
2755   return true;
2756 }
2757 
2758 /* Fold a call to the __st[rp]cpy_chk builtin.
2759    DEST, SRC, and SIZE are the arguments to the call.
2760    IGNORE is true if return value can be ignored.  FCODE is the BUILT_IN_*
2761    code of the builtin.  If MAXLEN is not NULL, it is maximum length of
2762    strings passed as second argument.  */
2763 
2764 static bool
2765 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2766 				tree dest,
2767 				tree src, tree size,
2768 				enum built_in_function fcode)
2769 {
2770   gimple *stmt = gsi_stmt (*gsi);
2771   location_t loc = gimple_location (stmt);
2772   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2773   tree len, fn;
2774 
2775   /* If SRC and DEST are the same (and not volatile), return DEST.  */
2776   if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2777     {
2778       /* Issue -Wrestrict unless the pointers are null (those do
2779 	 not point to objects and so do not indicate an overlap;
2780 	 such calls could be the result of sanitization and jump
2781 	 threading).  */
2782       if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2783 	{
2784 	  tree func = gimple_call_fndecl (stmt);
2785 
2786 	  warning_at (loc, OPT_Wrestrict,
2787 		      "%qD source argument is the same as destination",
2788 		      func);
2789 	}
2790 
2791       replace_call_with_value (gsi, dest);
2792       return true;
2793     }
2794 
2795   if (! tree_fits_uhwi_p (size))
2796     return false;
2797 
2798   tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2799   if (! integer_all_onesp (size))
2800     {
2801       len = c_strlen (src, 1);
2802       if (! len || ! tree_fits_uhwi_p (len))
2803 	{
2804 	  /* If LEN is not constant, try MAXLEN too.
2805 	     For MAXLEN only allow optimizing into non-_ocs function
2806 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2807 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2808 	    {
2809 	      if (fcode == BUILT_IN_STPCPY_CHK)
2810 		{
2811 		  if (! ignore)
2812 		    return false;
2813 
2814 		  /* If return value of __stpcpy_chk is ignored,
2815 		     optimize into __strcpy_chk.  */
2816 		  fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2817 		  if (!fn)
2818 		    return false;
2819 
2820 		  gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2821 		  replace_call_with_call_and_fold (gsi, repl);
2822 		  return true;
2823 		}
2824 
2825 	      if (! len || TREE_SIDE_EFFECTS (len))
2826 		return false;
2827 
2828 	      /* If c_strlen returned something, but not a constant,
2829 		 transform __strcpy_chk into __memcpy_chk.  */
2830 	      fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2831 	      if (!fn)
2832 		return false;
2833 
2834 	      gimple_seq stmts = NULL;
2835 	      len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2836 	      len = gimple_convert (&stmts, loc, size_type_node, len);
2837 	      len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2838 				  build_int_cst (size_type_node, 1));
2839 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2840 	      gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2841 	      replace_call_with_call_and_fold (gsi, repl);
2842 	      return true;
2843 	    }
2844 	}
2845       else
2846 	maxlen = len;
2847 
2848       if (! tree_int_cst_lt (maxlen, size))
2849 	return false;
2850     }
2851 
2852   /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available.  */
2853   fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2854 			      ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2855   if (!fn)
2856     return false;
2857 
2858   gimple *repl = gimple_build_call (fn, 2, dest, src);
2859   replace_call_with_call_and_fold (gsi, repl);
2860   return true;
2861 }
2862 
2863 /* Fold a call to the __st{r,p}ncpy_chk builtin.  DEST, SRC, LEN, and SIZE
2864    are the arguments to the call.  If MAXLEN is not NULL, it is maximum
2865    length passed as third argument. IGNORE is true if return value can be
2866    ignored. FCODE is the BUILT_IN_* code of the builtin. */
2867 
2868 static bool
2869 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2870 				 tree dest, tree src,
2871 				 tree len, tree size,
2872 				 enum built_in_function fcode)
2873 {
2874   gimple *stmt = gsi_stmt (*gsi);
2875   bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2876   tree fn;
2877 
2878   if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2879     {
2880        /* If return value of __stpncpy_chk is ignored,
2881           optimize into __strncpy_chk.  */
2882        fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2883        if (fn)
2884 	 {
2885 	   gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2886 	   replace_call_with_call_and_fold (gsi, repl);
2887 	   return true;
2888 	 }
2889     }
2890 
2891   if (! tree_fits_uhwi_p (size))
2892     return false;
2893 
2894   tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2895   if (! integer_all_onesp (size))
2896     {
2897       if (! tree_fits_uhwi_p (len))
2898 	{
2899 	  /* If LEN is not constant, try MAXLEN too.
2900 	     For MAXLEN only allow optimizing into non-_ocs function
2901 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
2902 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2903 	    return false;
2904 	}
2905       else
2906 	maxlen = len;
2907 
2908       if (tree_int_cst_lt (size, maxlen))
2909 	return false;
2910     }
2911 
2912   /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available.  */
2913   fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2914 			      ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2915   if (!fn)
2916     return false;
2917 
2918   gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2919   replace_call_with_call_and_fold (gsi, repl);
2920   return true;
2921 }
2922 
2923 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2924    Return NULL_TREE if no simplification can be made.  */
2925 
2926 static bool
2927 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2928 {
2929   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2930   location_t loc = gimple_location (stmt);
2931   tree dest = gimple_call_arg (stmt, 0);
2932   tree src = gimple_call_arg (stmt, 1);
2933   tree fn, lenp1;
2934 
2935   /* If the result is unused, replace stpcpy with strcpy.  */
2936   if (gimple_call_lhs (stmt) == NULL_TREE)
2937     {
2938       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2939       if (!fn)
2940 	return false;
2941       gimple_call_set_fndecl (stmt, fn);
2942       fold_stmt (gsi);
2943       return true;
2944     }
2945 
2946   /* Set to non-null if ARG refers to an unterminated array.  */
2947   c_strlen_data data = { };
2948   tree len = c_strlen (src, 1, &data, 1);
2949   if (!len
2950       || TREE_CODE (len) != INTEGER_CST)
2951     {
2952       data.decl = unterminated_array (src);
2953       if (!data.decl)
2954 	return false;
2955     }
2956 
2957   if (data.decl)
2958     {
2959       /* Avoid folding calls with unterminated arrays.  */
2960       if (!gimple_no_warning_p (stmt))
2961 	warn_string_no_nul (loc, "stpcpy", src, data.decl);
2962       gimple_set_no_warning (stmt, true);
2963       return false;
2964     }
2965 
2966   if (optimize_function_for_size_p (cfun)
2967       /* If length is zero it's small enough.  */
2968       && !integer_zerop (len))
2969     return false;
2970 
2971   /* If the source has a known length replace stpcpy with memcpy.  */
2972   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2973   if (!fn)
2974     return false;
2975 
2976   gimple_seq stmts = NULL;
2977   tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2978   lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2979 			tem, build_int_cst (size_type_node, 1));
2980   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2981   gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2982   gimple_set_vuse (repl, gimple_vuse (stmt));
2983   gimple_set_vdef (repl, gimple_vdef (stmt));
2984   if (gimple_vdef (repl)
2985       && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2986     SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2987   gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2988   /* Replace the result with dest + len.  */
2989   stmts = NULL;
2990   tem = gimple_convert (&stmts, loc, sizetype, len);
2991   gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2992   gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2993 				      POINTER_PLUS_EXPR, dest, tem);
2994   gsi_replace (gsi, ret, false);
2995   /* Finally fold the memcpy call.  */
2996   gimple_stmt_iterator gsi2 = *gsi;
2997   gsi_prev (&gsi2);
2998   fold_stmt (&gsi2);
2999   return true;
3000 }
3001 
3002 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS.  Return
3003    NULL_TREE if a normal call should be emitted rather than expanding
3004    the function inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
3005    BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
3006    passed as second argument.  */
3007 
3008 static bool
3009 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3010 				  enum built_in_function fcode)
3011 {
3012   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3013   tree dest, size, len, fn, fmt, flag;
3014   const char *fmt_str;
3015 
3016   /* Verify the required arguments in the original call.  */
3017   if (gimple_call_num_args (stmt) < 5)
3018     return false;
3019 
3020   dest = gimple_call_arg (stmt, 0);
3021   len = gimple_call_arg (stmt, 1);
3022   flag = gimple_call_arg (stmt, 2);
3023   size = gimple_call_arg (stmt, 3);
3024   fmt = gimple_call_arg (stmt, 4);
3025 
3026   if (! tree_fits_uhwi_p (size))
3027     return false;
3028 
3029   if (! integer_all_onesp (size))
3030     {
3031       tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3032       if (! tree_fits_uhwi_p (len))
3033 	{
3034 	  /* If LEN is not constant, try MAXLEN too.
3035 	     For MAXLEN only allow optimizing into non-_ocs function
3036 	     if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
3037 	  if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3038 	    return false;
3039 	}
3040       else
3041 	maxlen = len;
3042 
3043       if (tree_int_cst_lt (size, maxlen))
3044 	return false;
3045     }
3046 
3047   if (!init_target_chars ())
3048     return false;
3049 
3050   /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3051      or if format doesn't contain % chars or is "%s".  */
3052   if (! integer_zerop (flag))
3053     {
3054       fmt_str = c_getstr (fmt);
3055       if (fmt_str == NULL)
3056 	return false;
3057       if (strchr (fmt_str, target_percent) != NULL
3058 	  && strcmp (fmt_str, target_percent_s))
3059 	return false;
3060     }
3061 
3062   /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3063      available.  */
3064   fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3065 			      ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3066   if (!fn)
3067     return false;
3068 
3069   /* Replace the called function and the first 5 argument by 3 retaining
3070      trailing varargs.  */
3071   gimple_call_set_fndecl (stmt, fn);
3072   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3073   gimple_call_set_arg (stmt, 0, dest);
3074   gimple_call_set_arg (stmt, 1, len);
3075   gimple_call_set_arg (stmt, 2, fmt);
3076   for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3077     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3078   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3079   fold_stmt (gsi);
3080   return true;
3081 }
3082 
3083 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3084    Return NULL_TREE if a normal call should be emitted rather than
3085    expanding the function inline.  FCODE is either BUILT_IN_SPRINTF_CHK
3086    or BUILT_IN_VSPRINTF_CHK.  */
3087 
3088 static bool
3089 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3090 				 enum built_in_function fcode)
3091 {
3092   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3093   tree dest, size, len, fn, fmt, flag;
3094   const char *fmt_str;
3095   unsigned nargs = gimple_call_num_args (stmt);
3096 
3097   /* Verify the required arguments in the original call.  */
3098   if (nargs < 4)
3099     return false;
3100   dest = gimple_call_arg (stmt, 0);
3101   flag = gimple_call_arg (stmt, 1);
3102   size = gimple_call_arg (stmt, 2);
3103   fmt = gimple_call_arg (stmt, 3);
3104 
3105   if (! tree_fits_uhwi_p (size))
3106     return false;
3107 
3108   len = NULL_TREE;
3109 
3110   if (!init_target_chars ())
3111     return false;
3112 
3113   /* Check whether the format is a literal string constant.  */
3114   fmt_str = c_getstr (fmt);
3115   if (fmt_str != NULL)
3116     {
3117       /* If the format doesn't contain % args or %%, we know the size.  */
3118       if (strchr (fmt_str, target_percent) == 0)
3119 	{
3120 	  if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3121 	    len = build_int_cstu (size_type_node, strlen (fmt_str));
3122 	}
3123       /* If the format is "%s" and first ... argument is a string literal,
3124 	 we know the size too.  */
3125       else if (fcode == BUILT_IN_SPRINTF_CHK
3126 	       && strcmp (fmt_str, target_percent_s) == 0)
3127 	{
3128 	  tree arg;
3129 
3130 	  if (nargs == 5)
3131 	    {
3132 	      arg = gimple_call_arg (stmt, 4);
3133 	      if (POINTER_TYPE_P (TREE_TYPE (arg)))
3134 		{
3135 		  len = c_strlen (arg, 1);
3136 		  if (! len || ! tree_fits_uhwi_p (len))
3137 		    len = NULL_TREE;
3138 		}
3139 	    }
3140 	}
3141     }
3142 
3143   if (! integer_all_onesp (size))
3144     {
3145       if (! len || ! tree_int_cst_lt (len, size))
3146 	return false;
3147     }
3148 
3149   /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3150      or if format doesn't contain % chars or is "%s".  */
3151   if (! integer_zerop (flag))
3152     {
3153       if (fmt_str == NULL)
3154 	return false;
3155       if (strchr (fmt_str, target_percent) != NULL
3156 	  && strcmp (fmt_str, target_percent_s))
3157 	return false;
3158     }
3159 
3160   /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
3161   fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3162 			      ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3163   if (!fn)
3164     return false;
3165 
3166   /* Replace the called function and the first 4 argument by 2 retaining
3167      trailing varargs.  */
3168   gimple_call_set_fndecl (stmt, fn);
3169   gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3170   gimple_call_set_arg (stmt, 0, dest);
3171   gimple_call_set_arg (stmt, 1, fmt);
3172   for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3173     gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3174   gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3175   fold_stmt (gsi);
3176   return true;
3177 }
3178 
3179 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3180    ORIG may be null if this is a 2-argument call.  We don't attempt to
3181    simplify calls with more than 3 arguments.
3182 
3183    Return true if simplification was possible, otherwise false.  */
3184 
3185 bool
3186 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3187 {
3188   gimple *stmt = gsi_stmt (*gsi);
3189   tree dest = gimple_call_arg (stmt, 0);
3190   tree fmt = gimple_call_arg (stmt, 1);
3191   tree orig = NULL_TREE;
3192   const char *fmt_str = NULL;
3193 
3194   /* Verify the required arguments in the original call.  We deal with two
3195      types of sprintf() calls: 'sprintf (str, fmt)' and
3196      'sprintf (dest, "%s", orig)'.  */
3197   if (gimple_call_num_args (stmt) > 3)
3198     return false;
3199 
3200   if (gimple_call_num_args (stmt) == 3)
3201     orig = gimple_call_arg (stmt, 2);
3202 
3203   /* Check whether the format is a literal string constant.  */
3204   fmt_str = c_getstr (fmt);
3205   if (fmt_str == NULL)
3206     return false;
3207 
3208   if (!init_target_chars ())
3209     return false;
3210 
3211   /* If the format doesn't contain % args or %%, use strcpy.  */
3212   if (strchr (fmt_str, target_percent) == NULL)
3213     {
3214       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3215 
3216       if (!fn)
3217 	return false;
3218 
3219       /* Don't optimize sprintf (buf, "abc", ptr++).  */
3220       if (orig)
3221 	return false;
3222 
3223       /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3224 	 'format' is known to contain no % formats.  */
3225       gimple_seq stmts = NULL;
3226       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3227 
3228       /* Propagate the NO_WARNING bit to avoid issuing the same
3229 	 warning more than once.  */
3230       if (gimple_no_warning_p (stmt))
3231 	gimple_set_no_warning (repl, true);
3232 
3233       gimple_seq_add_stmt_without_update (&stmts, repl);
3234       if (tree lhs = gimple_call_lhs (stmt))
3235 	{
3236 	  repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3237 							  strlen (fmt_str)));
3238 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3239 	  gsi_replace_with_seq_vops (gsi, stmts);
3240 	  /* gsi now points at the assignment to the lhs, get a
3241 	     stmt iterator to the memcpy call.
3242 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3243 	     CFG isn't built yet.  */
3244 	  gimple_stmt_iterator gsi2 = *gsi;
3245 	  gsi_prev (&gsi2);
3246 	  fold_stmt (&gsi2);
3247 	}
3248       else
3249 	{
3250 	  gsi_replace_with_seq_vops (gsi, stmts);
3251 	  fold_stmt (gsi);
3252 	}
3253       return true;
3254     }
3255 
3256   /* If the format is "%s", use strcpy if the result isn't used.  */
3257   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3258     {
3259       tree fn;
3260       fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3261 
3262       if (!fn)
3263 	return false;
3264 
3265       /* Don't crash on sprintf (str1, "%s").  */
3266       if (!orig)
3267 	return false;
3268 
3269       tree orig_len = NULL_TREE;
3270       if (gimple_call_lhs (stmt))
3271 	{
3272 	  orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3273 	  if (!orig_len)
3274 	    return false;
3275 	}
3276 
3277       /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2).  */
3278       gimple_seq stmts = NULL;
3279       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3280 
3281       /* Propagate the NO_WARNING bit to avoid issuing the same
3282 	 warning more than once.  */
3283       if (gimple_no_warning_p (stmt))
3284 	gimple_set_no_warning (repl, true);
3285 
3286       gimple_seq_add_stmt_without_update (&stmts, repl);
3287       if (tree lhs = gimple_call_lhs (stmt))
3288 	{
3289 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3290 					  TREE_TYPE (orig_len)))
3291 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3292 	  repl = gimple_build_assign (lhs, orig_len);
3293 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3294 	  gsi_replace_with_seq_vops (gsi, stmts);
3295 	  /* gsi now points at the assignment to the lhs, get a
3296 	     stmt iterator to the memcpy call.
3297 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3298 	     CFG isn't built yet.  */
3299 	  gimple_stmt_iterator gsi2 = *gsi;
3300 	  gsi_prev (&gsi2);
3301 	  fold_stmt (&gsi2);
3302 	}
3303       else
3304 	{
3305 	  gsi_replace_with_seq_vops (gsi, stmts);
3306 	  fold_stmt (gsi);
3307 	}
3308       return true;
3309     }
3310   return false;
3311 }
3312 
3313 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3314    FMT, and ORIG.  ORIG may be null if this is a 3-argument call.  We don't
3315    attempt to simplify calls with more than 4 arguments.
3316 
3317    Return true if simplification was possible, otherwise false.  */
3318 
3319 bool
3320 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3321 {
3322   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3323   tree dest = gimple_call_arg (stmt, 0);
3324   tree destsize = gimple_call_arg (stmt, 1);
3325   tree fmt = gimple_call_arg (stmt, 2);
3326   tree orig = NULL_TREE;
3327   const char *fmt_str = NULL;
3328 
3329   if (gimple_call_num_args (stmt) > 4)
3330     return false;
3331 
3332   if (gimple_call_num_args (stmt) == 4)
3333     orig = gimple_call_arg (stmt, 3);
3334 
3335   if (!tree_fits_uhwi_p (destsize))
3336     return false;
3337   unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3338 
3339   /* Check whether the format is a literal string constant.  */
3340   fmt_str = c_getstr (fmt);
3341   if (fmt_str == NULL)
3342     return false;
3343 
3344   if (!init_target_chars ())
3345     return false;
3346 
3347   /* If the format doesn't contain % args or %%, use strcpy.  */
3348   if (strchr (fmt_str, target_percent) == NULL)
3349     {
3350       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3351       if (!fn)
3352 	return false;
3353 
3354       /* Don't optimize snprintf (buf, 4, "abc", ptr++).  */
3355       if (orig)
3356 	return false;
3357 
3358       /* We could expand this as
3359 	 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3360 	 or to
3361 	 memcpy (str, fmt_with_nul_at_cstm1, cst);
3362 	 but in the former case that might increase code size
3363 	 and in the latter case grow .rodata section too much.
3364 	 So punt for now.  */
3365       size_t len = strlen (fmt_str);
3366       if (len >= destlen)
3367 	return false;
3368 
3369       gimple_seq stmts = NULL;
3370       gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3371       gimple_seq_add_stmt_without_update (&stmts, repl);
3372       if (tree lhs = gimple_call_lhs (stmt))
3373 	{
3374 	  repl = gimple_build_assign (lhs,
3375 				      build_int_cst (TREE_TYPE (lhs), len));
3376 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3377 	  gsi_replace_with_seq_vops (gsi, stmts);
3378 	  /* gsi now points at the assignment to the lhs, get a
3379 	     stmt iterator to the memcpy call.
3380 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3381 	     CFG isn't built yet.  */
3382 	  gimple_stmt_iterator gsi2 = *gsi;
3383 	  gsi_prev (&gsi2);
3384 	  fold_stmt (&gsi2);
3385 	}
3386       else
3387 	{
3388 	  gsi_replace_with_seq_vops (gsi, stmts);
3389 	  fold_stmt (gsi);
3390 	}
3391       return true;
3392     }
3393 
3394   /* If the format is "%s", use strcpy if the result isn't used.  */
3395   else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3396     {
3397       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3398       if (!fn)
3399 	return false;
3400 
3401       /* Don't crash on snprintf (str1, cst, "%s").  */
3402       if (!orig)
3403 	return false;
3404 
3405       tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3406       if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3407 	return false;
3408 
3409       /* We could expand this as
3410 	 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3411 	 or to
3412 	 memcpy (str1, str2_with_nul_at_cstm1, cst);
3413 	 but in the former case that might increase code size
3414 	 and in the latter case grow .rodata section too much.
3415 	 So punt for now.  */
3416       if (compare_tree_int (orig_len, destlen) >= 0)
3417 	return false;
3418 
3419       /* Convert snprintf (str1, cst, "%s", str2) into
3420 	 strcpy (str1, str2) if strlen (str2) < cst.  */
3421       gimple_seq stmts = NULL;
3422       gimple *repl = gimple_build_call (fn, 2, dest, orig);
3423       gimple_seq_add_stmt_without_update (&stmts, repl);
3424       if (tree lhs = gimple_call_lhs (stmt))
3425 	{
3426 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
3427 					  TREE_TYPE (orig_len)))
3428 	    orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3429 	  repl = gimple_build_assign (lhs, orig_len);
3430 	  gimple_seq_add_stmt_without_update (&stmts, repl);
3431 	  gsi_replace_with_seq_vops (gsi, stmts);
3432 	  /* gsi now points at the assignment to the lhs, get a
3433 	     stmt iterator to the memcpy call.
3434 	     ???  We can't use gsi_for_stmt as that doesn't work when the
3435 	     CFG isn't built yet.  */
3436 	  gimple_stmt_iterator gsi2 = *gsi;
3437 	  gsi_prev (&gsi2);
3438 	  fold_stmt (&gsi2);
3439 	}
3440       else
3441 	{
3442 	  gsi_replace_with_seq_vops (gsi, stmts);
3443 	  fold_stmt (gsi);
3444 	}
3445       return true;
3446     }
3447   return false;
3448 }
3449 
3450 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3451    FP, FMT, and ARG are the arguments to the call.  We don't fold calls with
3452    more than 3 arguments, and ARG may be null in the 2-argument case.
3453 
3454    Return NULL_TREE if no simplification was possible, otherwise return the
3455    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3456    code of the function to be simplified.  */
3457 
3458 static bool
3459 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3460 			     tree fp, tree fmt, tree arg,
3461 			     enum built_in_function fcode)
3462 {
3463   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3464   tree fn_fputc, fn_fputs;
3465   const char *fmt_str = NULL;
3466 
3467   /* If the return value is used, don't do the transformation.  */
3468   if (gimple_call_lhs (stmt) != NULL_TREE)
3469     return false;
3470 
3471   /* Check whether the format is a literal string constant.  */
3472   fmt_str = c_getstr (fmt);
3473   if (fmt_str == NULL)
3474     return false;
3475 
3476   if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3477     {
3478       /* If we're using an unlocked function, assume the other
3479 	 unlocked functions exist explicitly.  */
3480       fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3481       fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3482     }
3483   else
3484     {
3485       fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3486       fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3487     }
3488 
3489   if (!init_target_chars ())
3490     return false;
3491 
3492   /* If the format doesn't contain % args or %%, use strcpy.  */
3493   if (strchr (fmt_str, target_percent) == NULL)
3494     {
3495       if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3496 	  && arg)
3497 	return false;
3498 
3499       /* If the format specifier was "", fprintf does nothing.  */
3500       if (fmt_str[0] == '\0')
3501 	{
3502 	  replace_call_with_value (gsi, NULL_TREE);
3503 	  return true;
3504 	}
3505 
3506       /* When "string" doesn't contain %, replace all cases of
3507 	 fprintf (fp, string) with fputs (string, fp).  The fputs
3508 	 builtin will take care of special cases like length == 1.  */
3509       if (fn_fputs)
3510 	{
3511 	  gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3512 	  replace_call_with_call_and_fold (gsi, repl);
3513 	  return true;
3514 	}
3515     }
3516 
3517   /* The other optimizations can be done only on the non-va_list variants.  */
3518   else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3519     return false;
3520 
3521   /* If the format specifier was "%s", call __builtin_fputs (arg, fp).  */
3522   else if (strcmp (fmt_str, target_percent_s) == 0)
3523     {
3524       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3525 	return false;
3526       if (fn_fputs)
3527 	{
3528 	  gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3529 	  replace_call_with_call_and_fold (gsi, repl);
3530 	  return true;
3531 	}
3532     }
3533 
3534   /* If the format specifier was "%c", call __builtin_fputc (arg, fp).  */
3535   else if (strcmp (fmt_str, target_percent_c) == 0)
3536     {
3537       if (!arg
3538 	  || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3539 	return false;
3540       if (fn_fputc)
3541 	{
3542 	  gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3543 	  replace_call_with_call_and_fold (gsi, repl);
3544 	  return true;
3545 	}
3546     }
3547 
3548   return false;
3549 }
3550 
3551 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3552    FMT and ARG are the arguments to the call; we don't fold cases with
3553    more than 2 arguments, and ARG may be null if this is a 1-argument case.
3554 
3555    Return NULL_TREE if no simplification was possible, otherwise return the
3556    simplified form of the call as a tree.  FCODE is the BUILT_IN_*
3557    code of the function to be simplified.  */
3558 
3559 static bool
3560 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3561 			    tree arg, enum built_in_function fcode)
3562 {
3563   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3564   tree fn_putchar, fn_puts, newarg;
3565   const char *fmt_str = NULL;
3566 
3567   /* If the return value is used, don't do the transformation.  */
3568   if (gimple_call_lhs (stmt) != NULL_TREE)
3569     return false;
3570 
3571   /* Check whether the format is a literal string constant.  */
3572   fmt_str = c_getstr (fmt);
3573   if (fmt_str == NULL)
3574     return false;
3575 
3576   if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3577     {
3578       /* If we're using an unlocked function, assume the other
3579 	 unlocked functions exist explicitly.  */
3580       fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3581       fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3582     }
3583   else
3584     {
3585       fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3586       fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3587     }
3588 
3589   if (!init_target_chars ())
3590     return false;
3591 
3592   if (strcmp (fmt_str, target_percent_s) == 0
3593       || strchr (fmt_str, target_percent) == NULL)
3594     {
3595       const char *str;
3596 
3597       if (strcmp (fmt_str, target_percent_s) == 0)
3598 	{
3599 	  if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3600 	    return false;
3601 
3602 	  if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3603 	    return false;
3604 
3605 	  str = c_getstr (arg);
3606 	  if (str == NULL)
3607 	    return false;
3608 	}
3609       else
3610 	{
3611 	  /* The format specifier doesn't contain any '%' characters.  */
3612 	  if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3613 	      && arg)
3614 	    return false;
3615 	  str = fmt_str;
3616 	}
3617 
3618       /* If the string was "", printf does nothing.  */
3619       if (str[0] == '\0')
3620 	{
3621 	  replace_call_with_value (gsi, NULL_TREE);
3622 	  return true;
3623 	}
3624 
3625       /* If the string has length of 1, call putchar.  */
3626       if (str[1] == '\0')
3627 	{
3628 	  /* Given printf("c"), (where c is any one character,)
3629 	     convert "c"[0] to an int and pass that to the replacement
3630 	     function.  */
3631 	  newarg = build_int_cst (integer_type_node, str[0]);
3632 	  if (fn_putchar)
3633 	    {
3634 	      gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3635 	      replace_call_with_call_and_fold (gsi, repl);
3636 	      return true;
3637 	    }
3638 	}
3639       else
3640 	{
3641 	  /* If the string was "string\n", call puts("string").  */
3642 	  size_t len = strlen (str);
3643 	  if ((unsigned char)str[len - 1] == target_newline
3644 	      && (size_t) (int) len == len
3645 	      && (int) len > 0)
3646 	    {
3647 	      char *newstr;
3648 
3649 	      /* Create a NUL-terminated string that's one char shorter
3650 		 than the original, stripping off the trailing '\n'.  */
3651 	      newstr = xstrdup (str);
3652 	      newstr[len - 1] = '\0';
3653 	      newarg = build_string_literal (len, newstr);
3654 	      free (newstr);
3655 	      if (fn_puts)
3656 		{
3657 		  gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3658 		  replace_call_with_call_and_fold (gsi, repl);
3659 		  return true;
3660 		}
3661 	    }
3662 	  else
3663 	    /* We'd like to arrange to call fputs(string,stdout) here,
3664 	       but we need stdout and don't have a way to get it yet.  */
3665 	    return false;
3666 	}
3667     }
3668 
3669   /* The other optimizations can be done only on the non-va_list variants.  */
3670   else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3671     return false;
3672 
3673   /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
3674   else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3675     {
3676       if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3677 	return false;
3678       if (fn_puts)
3679 	{
3680 	  gcall *repl = gimple_build_call (fn_puts, 1, arg);
3681 	  replace_call_with_call_and_fold (gsi, repl);
3682 	  return true;
3683 	}
3684     }
3685 
3686   /* If the format specifier was "%c", call __builtin_putchar(arg).  */
3687   else if (strcmp (fmt_str, target_percent_c) == 0)
3688     {
3689       if (!arg || ! useless_type_conversion_p (integer_type_node,
3690 					       TREE_TYPE (arg)))
3691 	return false;
3692       if (fn_putchar)
3693 	{
3694 	  gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3695 	  replace_call_with_call_and_fold (gsi, repl);
3696 	  return true;
3697 	}
3698     }
3699 
3700   return false;
3701 }
3702 
3703 
3704 
3705 /* Fold a call to __builtin_strlen with known length LEN.  */
3706 
3707 static bool
3708 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3709 {
3710   gimple *stmt = gsi_stmt (*gsi);
3711   tree arg = gimple_call_arg (stmt, 0);
3712 
3713   wide_int minlen;
3714   wide_int maxlen;
3715 
3716   c_strlen_data lendata = { };
3717   if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3718       && !lendata.decl
3719       && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3720       && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3721     {
3722       /* The range of lengths refers to either a single constant
3723 	 string or to the longest and shortest constant string
3724 	 referenced by the argument of the strlen() call, or to
3725 	 the strings that can possibly be stored in the arrays
3726 	 the argument refers to.  */
3727       minlen = wi::to_wide (lendata.minlen);
3728       maxlen = wi::to_wide (lendata.maxlen);
3729     }
3730   else
3731     {
3732       unsigned prec = TYPE_PRECISION (sizetype);
3733 
3734       minlen = wi::shwi (0, prec);
3735       maxlen = wi::to_wide (max_object_size (), prec) - 2;
3736     }
3737 
3738   if (minlen == maxlen)
3739     {
3740       /* Fold the strlen call to a constant.  */
3741       tree type = TREE_TYPE (lendata.minlen);
3742       tree len = force_gimple_operand_gsi (gsi,
3743 					   wide_int_to_tree (type, minlen),
3744 					   true, NULL, true, GSI_SAME_STMT);
3745       replace_call_with_value (gsi, len);
3746       return true;
3747     }
3748 
3749   /* Set the strlen() range to [0, MAXLEN].  */
3750   if (tree lhs = gimple_call_lhs (stmt))
3751     set_strlen_range (lhs, maxlen);
3752 
3753   return false;
3754 }
3755 
3756 /* Fold a call to __builtin_acc_on_device.  */
3757 
3758 static bool
3759 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3760 {
3761   /* Defer folding until we know which compiler we're in.  */
3762   if (symtab->state != EXPANSION)
3763     return false;
3764 
3765   unsigned val_host = GOMP_DEVICE_HOST;
3766   unsigned val_dev = GOMP_DEVICE_NONE;
3767 
3768 #ifdef ACCEL_COMPILER
3769   val_host = GOMP_DEVICE_NOT_HOST;
3770   val_dev = ACCEL_COMPILER_acc_device;
3771 #endif
3772 
3773   location_t loc = gimple_location (gsi_stmt (*gsi));
3774 
3775   tree host_eq = make_ssa_name (boolean_type_node);
3776   gimple *host_ass = gimple_build_assign
3777     (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3778   gimple_set_location (host_ass, loc);
3779   gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3780 
3781   tree dev_eq = make_ssa_name (boolean_type_node);
3782   gimple *dev_ass = gimple_build_assign
3783     (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3784   gimple_set_location (dev_ass, loc);
3785   gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3786 
3787   tree result = make_ssa_name (boolean_type_node);
3788   gimple *result_ass = gimple_build_assign
3789     (result, BIT_IOR_EXPR, host_eq, dev_eq);
3790   gimple_set_location (result_ass, loc);
3791   gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3792 
3793   replace_call_with_value (gsi, result);
3794 
3795   return true;
3796 }
3797 
3798 /* Fold realloc (0, n) -> malloc (n).  */
3799 
3800 static bool
3801 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3802 {
3803   gimple *stmt = gsi_stmt (*gsi);
3804   tree arg = gimple_call_arg (stmt, 0);
3805   tree size = gimple_call_arg (stmt, 1);
3806 
3807   if (operand_equal_p (arg, null_pointer_node, 0))
3808     {
3809       tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3810       if (fn_malloc)
3811 	{
3812 	  gcall *repl = gimple_build_call (fn_malloc, 1, size);
3813 	  replace_call_with_call_and_fold (gsi, repl);
3814 	  return true;
3815 	}
3816     }
3817   return false;
3818 }
3819 
3820 /* Fold the non-target builtin at *GSI and return whether any simplification
3821    was made.  */
3822 
3823 static bool
3824 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3825 {
3826   gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3827   tree callee = gimple_call_fndecl (stmt);
3828 
3829   /* Give up for always_inline inline builtins until they are
3830      inlined.  */
3831   if (avoid_folding_inline_builtin (callee))
3832     return false;
3833 
3834   unsigned n = gimple_call_num_args (stmt);
3835   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3836   switch (fcode)
3837     {
3838     case BUILT_IN_BCMP:
3839       return gimple_fold_builtin_bcmp (gsi);
3840     case BUILT_IN_BCOPY:
3841       return gimple_fold_builtin_bcopy (gsi);
3842     case BUILT_IN_BZERO:
3843       return gimple_fold_builtin_bzero (gsi);
3844 
3845     case BUILT_IN_MEMSET:
3846       return gimple_fold_builtin_memset (gsi,
3847 					 gimple_call_arg (stmt, 1),
3848 					 gimple_call_arg (stmt, 2));
3849     case BUILT_IN_MEMCPY:
3850       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3851 					    gimple_call_arg (stmt, 1), 0);
3852     case BUILT_IN_MEMPCPY:
3853       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3854 					    gimple_call_arg (stmt, 1), 1);
3855     case BUILT_IN_MEMMOVE:
3856       return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3857 					    gimple_call_arg (stmt, 1), 3);
3858     case BUILT_IN_SPRINTF_CHK:
3859     case BUILT_IN_VSPRINTF_CHK:
3860       return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3861     case BUILT_IN_STRCAT_CHK:
3862       return gimple_fold_builtin_strcat_chk (gsi);
3863     case BUILT_IN_STRNCAT_CHK:
3864       return gimple_fold_builtin_strncat_chk (gsi);
3865     case BUILT_IN_STRLEN:
3866       return gimple_fold_builtin_strlen (gsi);
3867     case BUILT_IN_STRCPY:
3868       return gimple_fold_builtin_strcpy (gsi,
3869 					 gimple_call_arg (stmt, 0),
3870 					 gimple_call_arg (stmt, 1));
3871     case BUILT_IN_STRNCPY:
3872       return gimple_fold_builtin_strncpy (gsi,
3873 					  gimple_call_arg (stmt, 0),
3874 					  gimple_call_arg (stmt, 1),
3875 					  gimple_call_arg (stmt, 2));
3876     case BUILT_IN_STRCAT:
3877       return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3878 					 gimple_call_arg (stmt, 1));
3879     case BUILT_IN_STRNCAT:
3880       return gimple_fold_builtin_strncat (gsi);
3881     case BUILT_IN_INDEX:
3882     case BUILT_IN_STRCHR:
3883       return gimple_fold_builtin_strchr (gsi, false);
3884     case BUILT_IN_RINDEX:
3885     case BUILT_IN_STRRCHR:
3886       return gimple_fold_builtin_strchr (gsi, true);
3887     case BUILT_IN_STRSTR:
3888       return gimple_fold_builtin_strstr (gsi);
3889     case BUILT_IN_STRCMP:
3890     case BUILT_IN_STRCMP_EQ:
3891     case BUILT_IN_STRCASECMP:
3892     case BUILT_IN_STRNCMP:
3893     case BUILT_IN_STRNCMP_EQ:
3894     case BUILT_IN_STRNCASECMP:
3895       return gimple_fold_builtin_string_compare (gsi);
3896     case BUILT_IN_MEMCHR:
3897       return gimple_fold_builtin_memchr (gsi);
3898     case BUILT_IN_FPUTS:
3899       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3900 					gimple_call_arg (stmt, 1), false);
3901     case BUILT_IN_FPUTS_UNLOCKED:
3902       return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3903 					gimple_call_arg (stmt, 1), true);
3904     case BUILT_IN_MEMCPY_CHK:
3905     case BUILT_IN_MEMPCPY_CHK:
3906     case BUILT_IN_MEMMOVE_CHK:
3907     case BUILT_IN_MEMSET_CHK:
3908       return gimple_fold_builtin_memory_chk (gsi,
3909 					     gimple_call_arg (stmt, 0),
3910 					     gimple_call_arg (stmt, 1),
3911 					     gimple_call_arg (stmt, 2),
3912 					     gimple_call_arg (stmt, 3),
3913 					     fcode);
3914     case BUILT_IN_STPCPY:
3915       return gimple_fold_builtin_stpcpy (gsi);
3916     case BUILT_IN_STRCPY_CHK:
3917     case BUILT_IN_STPCPY_CHK:
3918       return gimple_fold_builtin_stxcpy_chk (gsi,
3919 					     gimple_call_arg (stmt, 0),
3920 					     gimple_call_arg (stmt, 1),
3921 					     gimple_call_arg (stmt, 2),
3922 					     fcode);
3923     case BUILT_IN_STRNCPY_CHK:
3924     case BUILT_IN_STPNCPY_CHK:
3925       return gimple_fold_builtin_stxncpy_chk (gsi,
3926 					      gimple_call_arg (stmt, 0),
3927 					      gimple_call_arg (stmt, 1),
3928 					      gimple_call_arg (stmt, 2),
3929 					      gimple_call_arg (stmt, 3),
3930 					      fcode);
3931     case BUILT_IN_SNPRINTF_CHK:
3932     case BUILT_IN_VSNPRINTF_CHK:
3933       return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3934 
3935     case BUILT_IN_FPRINTF:
3936     case BUILT_IN_FPRINTF_UNLOCKED:
3937     case BUILT_IN_VFPRINTF:
3938       if (n == 2 || n == 3)
3939 	return gimple_fold_builtin_fprintf (gsi,
3940 					    gimple_call_arg (stmt, 0),
3941 					    gimple_call_arg (stmt, 1),
3942 					    n == 3
3943 					    ? gimple_call_arg (stmt, 2)
3944 					    : NULL_TREE,
3945 					    fcode);
3946       break;
3947     case BUILT_IN_FPRINTF_CHK:
3948     case BUILT_IN_VFPRINTF_CHK:
3949       if (n == 3 || n == 4)
3950 	return gimple_fold_builtin_fprintf (gsi,
3951 					    gimple_call_arg (stmt, 0),
3952 					    gimple_call_arg (stmt, 2),
3953 					    n == 4
3954 					    ? gimple_call_arg (stmt, 3)
3955 					    : NULL_TREE,
3956 					    fcode);
3957       break;
3958     case BUILT_IN_PRINTF:
3959     case BUILT_IN_PRINTF_UNLOCKED:
3960     case BUILT_IN_VPRINTF:
3961       if (n == 1 || n == 2)
3962 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3963 					   n == 2
3964 					   ? gimple_call_arg (stmt, 1)
3965 					   : NULL_TREE, fcode);
3966       break;
3967     case BUILT_IN_PRINTF_CHK:
3968     case BUILT_IN_VPRINTF_CHK:
3969       if (n == 2 || n == 3)
3970 	return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3971 					   n == 3
3972 					   ? gimple_call_arg (stmt, 2)
3973 					   : NULL_TREE, fcode);
3974       break;
3975     case BUILT_IN_ACC_ON_DEVICE:
3976       return gimple_fold_builtin_acc_on_device (gsi,
3977 						gimple_call_arg (stmt, 0));
3978     case BUILT_IN_REALLOC:
3979       return gimple_fold_builtin_realloc (gsi);
3980 
3981     default:;
3982     }
3983 
3984   /* Try the generic builtin folder.  */
3985   bool ignore = (gimple_call_lhs (stmt) == NULL);
3986   tree result = fold_call_stmt (stmt, ignore);
3987   if (result)
3988     {
3989       if (ignore)
3990 	STRIP_NOPS (result);
3991       else
3992 	result = fold_convert (gimple_call_return_type (stmt), result);
3993       if (!update_call_from_tree (gsi, result))
3994 	gimplify_and_update_call_from_tree (gsi, result);
3995       return true;
3996     }
3997 
3998   return false;
3999 }
4000 
4001 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4002    function calls to constants, where possible.  */
4003 
4004 static tree
4005 fold_internal_goacc_dim (const gimple *call)
4006 {
4007   int axis = oacc_get_ifn_dim_arg (call);
4008   int size = oacc_get_fn_dim_size (current_function_decl, axis);
4009   tree result = NULL_TREE;
4010   tree type = TREE_TYPE (gimple_call_lhs (call));
4011 
4012   switch (gimple_call_internal_fn (call))
4013     {
4014     case IFN_GOACC_DIM_POS:
4015       /* If the size is 1, we know the answer.  */
4016       if (size == 1)
4017 	result = build_int_cst (type, 0);
4018       break;
4019     case IFN_GOACC_DIM_SIZE:
4020       /* If the size is not dynamic, we know the answer.  */
4021       if (size)
4022 	result = build_int_cst (type, size);
4023       break;
4024     default:
4025       break;
4026     }
4027 
4028   return result;
4029 }
4030 
4031 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4032    for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4033    &var where var is only addressable because of such calls.  */
4034 
4035 bool
4036 optimize_atomic_compare_exchange_p (gimple *stmt)
4037 {
4038   if (gimple_call_num_args (stmt) != 6
4039       || !flag_inline_atomics
4040       || !optimize
4041       || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4042       || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4043       || !gimple_vdef (stmt)
4044       || !gimple_vuse (stmt))
4045     return false;
4046 
4047   tree fndecl = gimple_call_fndecl (stmt);
4048   switch (DECL_FUNCTION_CODE (fndecl))
4049     {
4050     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4051     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4052     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4053     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4054     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4055       break;
4056     default:
4057       return false;
4058     }
4059 
4060   tree expected = gimple_call_arg (stmt, 1);
4061   if (TREE_CODE (expected) != ADDR_EXPR
4062       || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4063     return false;
4064 
4065   tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4066   if (!is_gimple_reg_type (etype)
4067       || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4068       || TREE_THIS_VOLATILE (etype)
4069       || VECTOR_TYPE_P (etype)
4070       || TREE_CODE (etype) == COMPLEX_TYPE
4071       /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4072 	 might not preserve all the bits.  See PR71716.  */
4073       || SCALAR_FLOAT_TYPE_P (etype)
4074       || maybe_ne (TYPE_PRECISION (etype),
4075 		   GET_MODE_BITSIZE (TYPE_MODE (etype))))
4076     return false;
4077 
4078   tree weak = gimple_call_arg (stmt, 3);
4079   if (!integer_zerop (weak) && !integer_onep (weak))
4080     return false;
4081 
4082   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4083   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4084   machine_mode mode = TYPE_MODE (itype);
4085 
4086   if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4087       == CODE_FOR_nothing
4088       && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4089     return false;
4090 
4091   if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4092     return false;
4093 
4094   return true;
4095 }
4096 
4097 /* Fold
4098      r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4099    into
4100      _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4101      i = IMAGPART_EXPR <t>;
4102      r = (_Bool) i;
4103      e = REALPART_EXPR <t>;  */
4104 
4105 void
4106 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4107 {
4108   gimple *stmt = gsi_stmt (*gsi);
4109   tree fndecl = gimple_call_fndecl (stmt);
4110   tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4111   tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4112   tree ctype = build_complex_type (itype);
4113   tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4114   bool throws = false;
4115   edge e = NULL;
4116   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4117 				   expected);
4118   gsi_insert_before (gsi, g, GSI_SAME_STMT);
4119   gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4120   if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4121     {
4122       g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4123 			       build1 (VIEW_CONVERT_EXPR, itype,
4124 				       gimple_assign_lhs (g)));
4125       gsi_insert_before (gsi, g, GSI_SAME_STMT);
4126     }
4127   int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4128 	     + int_size_in_bytes (itype);
4129   g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4130 				  gimple_call_arg (stmt, 0),
4131 				  gimple_assign_lhs (g),
4132 				  gimple_call_arg (stmt, 2),
4133 				  build_int_cst (integer_type_node, flag),
4134 				  gimple_call_arg (stmt, 4),
4135 				  gimple_call_arg (stmt, 5));
4136   tree lhs = make_ssa_name (ctype);
4137   gimple_call_set_lhs (g, lhs);
4138   gimple_set_vdef (g, gimple_vdef (stmt));
4139   gimple_set_vuse (g, gimple_vuse (stmt));
4140   SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4141   tree oldlhs = gimple_call_lhs (stmt);
4142   if (stmt_can_throw_internal (cfun, stmt))
4143     {
4144       throws = true;
4145       e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4146     }
4147   gimple_call_set_nothrow (as_a <gcall *> (g),
4148 			   gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4149   gimple_call_set_lhs (stmt, NULL_TREE);
4150   gsi_replace (gsi, g, true);
4151   if (oldlhs)
4152     {
4153       g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4154 			       build1 (IMAGPART_EXPR, itype, lhs));
4155       if (throws)
4156 	{
4157 	  gsi_insert_on_edge_immediate (e, g);
4158 	  *gsi = gsi_for_stmt (g);
4159 	}
4160       else
4161 	gsi_insert_after (gsi, g, GSI_NEW_STMT);
4162       g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4163       gsi_insert_after (gsi, g, GSI_NEW_STMT);
4164     }
4165   g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4166 			   build1 (REALPART_EXPR, itype, lhs));
4167   if (throws && oldlhs == NULL_TREE)
4168     {
4169       gsi_insert_on_edge_immediate (e, g);
4170       *gsi = gsi_for_stmt (g);
4171     }
4172   else
4173     gsi_insert_after (gsi, g, GSI_NEW_STMT);
4174   if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4175     {
4176       g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4177 			       VIEW_CONVERT_EXPR,
4178 			       build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4179 				       gimple_assign_lhs (g)));
4180       gsi_insert_after (gsi, g, GSI_NEW_STMT);
4181     }
4182   g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4183   gsi_insert_after (gsi, g, GSI_NEW_STMT);
4184   *gsi = gsiret;
4185 }
4186 
4187 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4188    doesn't fit into TYPE.  The test for overflow should be regardless of
4189    -fwrapv, and even for unsigned types.  */
4190 
4191 bool
4192 arith_overflowed_p (enum tree_code code, const_tree type,
4193 		    const_tree arg0, const_tree arg1)
4194 {
4195   widest2_int warg0 = widest2_int_cst (arg0);
4196   widest2_int warg1 = widest2_int_cst (arg1);
4197   widest2_int wres;
4198   switch (code)
4199     {
4200     case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4201     case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4202     case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4203     default: gcc_unreachable ();
4204     }
4205   signop sign = TYPE_SIGN (type);
4206   if (sign == UNSIGNED && wi::neg_p (wres))
4207     return true;
4208   return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4209 }
4210 
4211 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4212    The statement may be replaced by another statement, e.g., if the call
4213    simplifies to a constant value. Return true if any changes were made.
4214    It is assumed that the operands have been previously folded.  */
4215 
4216 static bool
4217 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4218 {
4219   gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4220   tree callee;
4221   bool changed = false;
4222   unsigned i;
4223 
4224   /* Fold *& in call arguments.  */
4225   for (i = 0; i < gimple_call_num_args (stmt); ++i)
4226     if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4227       {
4228 	tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4229 	if (tmp)
4230 	  {
4231 	    gimple_call_set_arg (stmt, i, tmp);
4232 	    changed = true;
4233 	  }
4234       }
4235 
4236   /* Check for virtual calls that became direct calls.  */
4237   callee = gimple_call_fn (stmt);
4238   if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4239     {
4240       if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4241 	{
4242           if (dump_file && virtual_method_call_p (callee)
4243 	      && !possible_polymorphic_call_target_p
4244 		    (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4245 						     (OBJ_TYPE_REF_EXPR (callee)))))
4246 	    {
4247 	      fprintf (dump_file,
4248 		       "Type inheritance inconsistent devirtualization of ");
4249 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4250 	      fprintf (dump_file, " to ");
4251 	      print_generic_expr (dump_file, callee, TDF_SLIM);
4252 	      fprintf (dump_file, "\n");
4253 	    }
4254 
4255 	  gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4256 	  changed = true;
4257 	}
4258       else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4259 	{
4260 	  bool final;
4261 	  vec <cgraph_node *>targets
4262 	    = possible_polymorphic_call_targets (callee, stmt, &final);
4263 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
4264 	    {
4265 	      tree lhs = gimple_call_lhs (stmt);
4266 	      if (dump_enabled_p ())
4267 		{
4268 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4269 				   "folding virtual function call to %s\n",
4270 		 		   targets.length () == 1
4271 		  		   ? targets[0]->name ()
4272 		  		   : "__builtin_unreachable");
4273 		}
4274 	      if (targets.length () == 1)
4275 		{
4276 		  tree fndecl = targets[0]->decl;
4277 		  gimple_call_set_fndecl (stmt, fndecl);
4278 		  changed = true;
4279 		  /* If changing the call to __cxa_pure_virtual
4280 		     or similar noreturn function, adjust gimple_call_fntype
4281 		     too.  */
4282 		  if (gimple_call_noreturn_p (stmt)
4283 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4284 		      && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4285 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4286 			  == void_type_node))
4287 		    gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4288 		  /* If the call becomes noreturn, remove the lhs.  */
4289 		  if (lhs
4290 		      && gimple_call_noreturn_p (stmt)
4291 		      && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4292 			  || should_remove_lhs_p (lhs)))
4293 		    {
4294 		      if (TREE_CODE (lhs) == SSA_NAME)
4295 			{
4296 			  tree var = create_tmp_var (TREE_TYPE (lhs));
4297 			  tree def = get_or_create_ssa_default_def (cfun, var);
4298 			  gimple *new_stmt = gimple_build_assign (lhs, def);
4299 			  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4300 			}
4301 		      gimple_call_set_lhs (stmt, NULL_TREE);
4302 		    }
4303 		  maybe_remove_unused_call_args (cfun, stmt);
4304 		}
4305 	      else
4306 		{
4307 		  tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4308 		  gimple *new_stmt = gimple_build_call (fndecl, 0);
4309 		  gimple_set_location (new_stmt, gimple_location (stmt));
4310 		  /* If the call had a SSA name as lhs morph that into
4311 		     an uninitialized value.  */
4312 		  if (lhs && TREE_CODE (lhs) == SSA_NAME)
4313 		    {
4314 		      tree var = create_tmp_var (TREE_TYPE (lhs));
4315 		      SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4316 		      SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4317 		      set_ssa_default_def (cfun, var, lhs);
4318 		    }
4319 		  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4320 		  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4321 		  gsi_replace (gsi, new_stmt, false);
4322 		  return true;
4323 		}
4324 	    }
4325 	}
4326     }
4327 
4328   /* Check for indirect calls that became direct calls, and then
4329      no longer require a static chain.  */
4330   if (gimple_call_chain (stmt))
4331     {
4332       tree fn = gimple_call_fndecl (stmt);
4333       if (fn && !DECL_STATIC_CHAIN (fn))
4334 	{
4335 	  gimple_call_set_chain (stmt, NULL);
4336 	  changed = true;
4337 	}
4338       else
4339 	{
4340 	  tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4341 	  if (tmp)
4342 	    {
4343 	      gimple_call_set_chain (stmt, tmp);
4344 	      changed = true;
4345 	    }
4346 	}
4347     }
4348 
4349   if (inplace)
4350     return changed;
4351 
4352   /* Check for builtins that CCP can handle using information not
4353      available in the generic fold routines.  */
4354   if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4355     {
4356       if (gimple_fold_builtin (gsi))
4357         changed = true;
4358     }
4359   else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4360     {
4361 	changed |= targetm.gimple_fold_builtin (gsi);
4362     }
4363   else if (gimple_call_internal_p (stmt))
4364     {
4365       enum tree_code subcode = ERROR_MARK;
4366       tree result = NULL_TREE;
4367       bool cplx_result = false;
4368       tree overflow = NULL_TREE;
4369       switch (gimple_call_internal_fn (stmt))
4370 	{
4371 	case IFN_BUILTIN_EXPECT:
4372 	  result = fold_builtin_expect (gimple_location (stmt),
4373 					gimple_call_arg (stmt, 0),
4374 					gimple_call_arg (stmt, 1),
4375 					gimple_call_arg (stmt, 2),
4376 					NULL_TREE);
4377 	  break;
4378 	case IFN_UBSAN_OBJECT_SIZE:
4379 	  {
4380 	    tree offset = gimple_call_arg (stmt, 1);
4381 	    tree objsize = gimple_call_arg (stmt, 2);
4382 	    if (integer_all_onesp (objsize)
4383 		|| (TREE_CODE (offset) == INTEGER_CST
4384 		    && TREE_CODE (objsize) == INTEGER_CST
4385 		    && tree_int_cst_le (offset, objsize)))
4386 	      {
4387 		replace_call_with_value (gsi, NULL_TREE);
4388 		return true;
4389 	      }
4390 	  }
4391 	  break;
4392 	case IFN_UBSAN_PTR:
4393 	  if (integer_zerop (gimple_call_arg (stmt, 1)))
4394 	    {
4395 	      replace_call_with_value (gsi, NULL_TREE);
4396 	      return true;
4397 	    }
4398 	  break;
4399 	case IFN_UBSAN_BOUNDS:
4400 	  {
4401 	    tree index = gimple_call_arg (stmt, 1);
4402 	    tree bound = gimple_call_arg (stmt, 2);
4403 	    if (TREE_CODE (index) == INTEGER_CST
4404 		&& TREE_CODE (bound) == INTEGER_CST)
4405 	      {
4406 		index = fold_convert (TREE_TYPE (bound), index);
4407 		if (TREE_CODE (index) == INTEGER_CST
4408 		    && tree_int_cst_le (index, bound))
4409 		  {
4410 		    replace_call_with_value (gsi, NULL_TREE);
4411 		    return true;
4412 		  }
4413 	      }
4414 	  }
4415 	  break;
4416 	case IFN_GOACC_DIM_SIZE:
4417 	case IFN_GOACC_DIM_POS:
4418 	  result = fold_internal_goacc_dim (stmt);
4419 	  break;
4420 	case IFN_UBSAN_CHECK_ADD:
4421 	  subcode = PLUS_EXPR;
4422 	  break;
4423 	case IFN_UBSAN_CHECK_SUB:
4424 	  subcode = MINUS_EXPR;
4425 	  break;
4426 	case IFN_UBSAN_CHECK_MUL:
4427 	  subcode = MULT_EXPR;
4428 	  break;
4429 	case IFN_ADD_OVERFLOW:
4430 	  subcode = PLUS_EXPR;
4431 	  cplx_result = true;
4432 	  break;
4433 	case IFN_SUB_OVERFLOW:
4434 	  subcode = MINUS_EXPR;
4435 	  cplx_result = true;
4436 	  break;
4437 	case IFN_MUL_OVERFLOW:
4438 	  subcode = MULT_EXPR;
4439 	  cplx_result = true;
4440 	  break;
4441 	default:
4442 	  break;
4443 	}
4444       if (subcode != ERROR_MARK)
4445 	{
4446 	  tree arg0 = gimple_call_arg (stmt, 0);
4447 	  tree arg1 = gimple_call_arg (stmt, 1);
4448 	  tree type = TREE_TYPE (arg0);
4449 	  if (cplx_result)
4450 	    {
4451 	      tree lhs = gimple_call_lhs (stmt);
4452 	      if (lhs == NULL_TREE)
4453 		type = NULL_TREE;
4454 	      else
4455 		type = TREE_TYPE (TREE_TYPE (lhs));
4456 	    }
4457 	  if (type == NULL_TREE)
4458 	    ;
4459 	  /* x = y + 0; x = y - 0; x = y * 0; */
4460 	  else if (integer_zerop (arg1))
4461 	    result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4462 	  /* x = 0 + y; x = 0 * y; */
4463 	  else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4464 	    result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4465 	  /* x = y - y; */
4466 	  else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4467 	    result = integer_zero_node;
4468 	  /* x = y * 1; x = 1 * y; */
4469 	  else if (subcode == MULT_EXPR && integer_onep (arg1))
4470 	    result = arg0;
4471 	  else if (subcode == MULT_EXPR && integer_onep (arg0))
4472 	    result = arg1;
4473 	  else if (TREE_CODE (arg0) == INTEGER_CST
4474 		   && TREE_CODE (arg1) == INTEGER_CST)
4475 	    {
4476 	      if (cplx_result)
4477 		result = int_const_binop (subcode, fold_convert (type, arg0),
4478 					  fold_convert (type, arg1));
4479 	      else
4480 		result = int_const_binop (subcode, arg0, arg1);
4481 	      if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4482 		{
4483 		  if (cplx_result)
4484 		    overflow = build_one_cst (type);
4485 		  else
4486 		    result = NULL_TREE;
4487 		}
4488 	    }
4489 	  if (result)
4490 	    {
4491 	      if (result == integer_zero_node)
4492 		result = build_zero_cst (type);
4493 	      else if (cplx_result && TREE_TYPE (result) != type)
4494 		{
4495 		  if (TREE_CODE (result) == INTEGER_CST)
4496 		    {
4497 		      if (arith_overflowed_p (PLUS_EXPR, type, result,
4498 					      integer_zero_node))
4499 			overflow = build_one_cst (type);
4500 		    }
4501 		  else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4502 			    && TYPE_UNSIGNED (type))
4503 			   || (TYPE_PRECISION (type)
4504 			       < (TYPE_PRECISION (TREE_TYPE (result))
4505 				  + (TYPE_UNSIGNED (TREE_TYPE (result))
4506 				     && !TYPE_UNSIGNED (type)))))
4507 		    result = NULL_TREE;
4508 		  if (result)
4509 		    result = fold_convert (type, result);
4510 		}
4511 	    }
4512 	}
4513 
4514       if (result)
4515 	{
4516 	  if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4517 	    result = drop_tree_overflow (result);
4518 	  if (cplx_result)
4519 	    {
4520 	      if (overflow == NULL_TREE)
4521 		overflow = build_zero_cst (TREE_TYPE (result));
4522 	      tree ctype = build_complex_type (TREE_TYPE (result));
4523 	      if (TREE_CODE (result) == INTEGER_CST
4524 		  && TREE_CODE (overflow) == INTEGER_CST)
4525 		result = build_complex (ctype, result, overflow);
4526 	      else
4527 		result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4528 				     ctype, result, overflow);
4529 	    }
4530 	  if (!update_call_from_tree (gsi, result))
4531 	    gimplify_and_update_call_from_tree (gsi, result);
4532 	  changed = true;
4533 	}
4534     }
4535 
4536   return changed;
4537 }
4538 
4539 
4540 /* Return true whether NAME has a use on STMT.  */
4541 
4542 static bool
4543 has_use_on_stmt (tree name, gimple *stmt)
4544 {
4545   imm_use_iterator iter;
4546   use_operand_p use_p;
4547   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4548     if (USE_STMT (use_p) == stmt)
4549       return true;
4550   return false;
4551 }
4552 
4553 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4554    gimple_simplify.
4555 
4556    Replaces *GSI with the simplification result in RCODE and OPS
4557    and the associated statements in *SEQ.  Does the replacement
4558    according to INPLACE and returns true if the operation succeeded.  */
4559 
4560 static bool
4561 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4562 				  gimple_match_op *res_op,
4563 				  gimple_seq *seq, bool inplace)
4564 {
4565   gimple *stmt = gsi_stmt (*gsi);
4566   tree *ops = res_op->ops;
4567   unsigned int num_ops = res_op->num_ops;
4568 
4569   /* Play safe and do not allow abnormals to be mentioned in
4570      newly created statements.  See also maybe_push_res_to_seq.
4571      As an exception allow such uses if there was a use of the
4572      same SSA name on the old stmt.  */
4573   for (unsigned int i = 0; i < num_ops; ++i)
4574     if (TREE_CODE (ops[i]) == SSA_NAME
4575 	&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4576 	&& !has_use_on_stmt (ops[i], stmt))
4577       return false;
4578 
4579   if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4580     for (unsigned int i = 0; i < 2; ++i)
4581       if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4582 	  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4583 	  && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4584 	return false;
4585 
4586   /* Don't insert new statements when INPLACE is true, even if we could
4587      reuse STMT for the final statement.  */
4588   if (inplace && !gimple_seq_empty_p (*seq))
4589     return false;
4590 
4591   if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4592     {
4593       gcc_assert (res_op->code.is_tree_code ());
4594       if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4595 	  /* GIMPLE_CONDs condition may not throw.  */
4596 	  && (!flag_exceptions
4597 	      || !cfun->can_throw_non_call_exceptions
4598 	      || !operation_could_trap_p (res_op->code,
4599 					  FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4600 					  false, NULL_TREE)))
4601 	gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4602       else if (res_op->code == SSA_NAME)
4603 	gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4604 				   build_zero_cst (TREE_TYPE (ops[0])));
4605       else if (res_op->code == INTEGER_CST)
4606 	{
4607 	  if (integer_zerop (ops[0]))
4608 	    gimple_cond_make_false (cond_stmt);
4609 	  else
4610 	    gimple_cond_make_true (cond_stmt);
4611 	}
4612       else if (!inplace)
4613 	{
4614 	  tree res = maybe_push_res_to_seq (res_op, seq);
4615 	  if (!res)
4616 	    return false;
4617 	  gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4618 				     build_zero_cst (TREE_TYPE (res)));
4619 	}
4620       else
4621 	return false;
4622       if (dump_file && (dump_flags & TDF_DETAILS))
4623 	{
4624 	  fprintf (dump_file, "gimple_simplified to ");
4625 	  if (!gimple_seq_empty_p (*seq))
4626 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4627 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4628 			     0, TDF_SLIM);
4629 	}
4630       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4631       return true;
4632     }
4633   else if (is_gimple_assign (stmt)
4634 	   && res_op->code.is_tree_code ())
4635     {
4636       if (!inplace
4637 	  || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4638 	{
4639 	  maybe_build_generic_op (res_op);
4640 	  gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4641 					  res_op->op_or_null (0),
4642 					  res_op->op_or_null (1),
4643 					  res_op->op_or_null (2));
4644 	  if (dump_file && (dump_flags & TDF_DETAILS))
4645 	    {
4646 	      fprintf (dump_file, "gimple_simplified to ");
4647 	      if (!gimple_seq_empty_p (*seq))
4648 		print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4649 	      print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4650 				 0, TDF_SLIM);
4651 	    }
4652 	  gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4653 	  return true;
4654 	}
4655     }
4656   else if (res_op->code.is_fn_code ()
4657 	   && gimple_call_combined_fn (stmt) == res_op->code)
4658     {
4659       gcc_assert (num_ops == gimple_call_num_args (stmt));
4660       for (unsigned int i = 0; i < num_ops; ++i)
4661 	gimple_call_set_arg (stmt, i, ops[i]);
4662       if (dump_file && (dump_flags & TDF_DETAILS))
4663 	{
4664 	  fprintf (dump_file, "gimple_simplified to ");
4665 	  if (!gimple_seq_empty_p (*seq))
4666 	    print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4667 	  print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4668 	}
4669       gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4670       return true;
4671     }
4672   else if (!inplace)
4673     {
4674       if (gimple_has_lhs (stmt))
4675 	{
4676 	  tree lhs = gimple_get_lhs (stmt);
4677 	  if (!maybe_push_res_to_seq (res_op, seq, lhs))
4678 	    return false;
4679 	  if (dump_file && (dump_flags & TDF_DETAILS))
4680 	    {
4681 	      fprintf (dump_file, "gimple_simplified to ");
4682 	      print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4683 	    }
4684 	  gsi_replace_with_seq_vops (gsi, *seq);
4685 	  return true;
4686 	}
4687       else
4688 	gcc_unreachable ();
4689     }
4690 
4691   return false;
4692 }
4693 
4694 /* Canonicalize MEM_REFs invariant address operand after propagation.  */
4695 
4696 static bool
4697 maybe_canonicalize_mem_ref_addr (tree *t)
4698 {
4699   bool res = false;
4700 
4701   if (TREE_CODE (*t) == ADDR_EXPR)
4702     t = &TREE_OPERAND (*t, 0);
4703 
4704   /* The C and C++ frontends use an ARRAY_REF for indexing with their
4705      generic vector extension.  The actual vector referenced is
4706      view-converted to an array type for this purpose.  If the index
4707      is constant the canonical representation in the middle-end is a
4708      BIT_FIELD_REF so re-write the former to the latter here.  */
4709   if (TREE_CODE (*t) == ARRAY_REF
4710       && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4711       && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4712       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4713     {
4714       tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4715       if (VECTOR_TYPE_P (vtype))
4716 	{
4717 	  tree low = array_ref_low_bound (*t);
4718 	  if (TREE_CODE (low) == INTEGER_CST)
4719 	    {
4720 	      if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4721 		{
4722 		  widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4723 					    wi::to_widest (low));
4724 		  idx = wi::mul (idx, wi::to_widest
4725 					 (TYPE_SIZE (TREE_TYPE (*t))));
4726 		  widest_int ext
4727 		    = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4728 		  if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4729 		    {
4730 		      *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4731 				       TREE_TYPE (*t),
4732 				       TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4733 				       TYPE_SIZE (TREE_TYPE (*t)),
4734 				       wide_int_to_tree (bitsizetype, idx));
4735 		      res = true;
4736 		    }
4737 		}
4738 	    }
4739 	}
4740     }
4741 
4742   while (handled_component_p (*t))
4743     t = &TREE_OPERAND (*t, 0);
4744 
4745   /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4746      of invariant addresses into a SSA name MEM_REF address.  */
4747   if (TREE_CODE (*t) == MEM_REF
4748       || TREE_CODE (*t) == TARGET_MEM_REF)
4749     {
4750       tree addr = TREE_OPERAND (*t, 0);
4751       if (TREE_CODE (addr) == ADDR_EXPR
4752 	  && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4753 	      || handled_component_p (TREE_OPERAND (addr, 0))))
4754 	{
4755 	  tree base;
4756 	  poly_int64 coffset;
4757 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4758 						&coffset);
4759 	  if (!base)
4760 	    gcc_unreachable ();
4761 
4762 	  TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4763 	  TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4764 						  TREE_OPERAND (*t, 1),
4765 						  size_int (coffset));
4766 	  res = true;
4767 	}
4768       gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4769 			   || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4770     }
4771 
4772   /* Canonicalize back MEM_REFs to plain reference trees if the object
4773      accessed is a decl that has the same access semantics as the MEM_REF.  */
4774   if (TREE_CODE (*t) == MEM_REF
4775       && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4776       && integer_zerop (TREE_OPERAND (*t, 1))
4777       && MR_DEPENDENCE_CLIQUE (*t) == 0)
4778     {
4779       tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4780       tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4781       if (/* Same volatile qualification.  */
4782 	  TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4783 	  /* Same TBAA behavior with -fstrict-aliasing.  */
4784 	  && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4785 	  && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4786 	      == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4787 	  /* Same alignment.  */
4788 	  && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4789 	  /* We have to look out here to not drop a required conversion
4790 	     from the rhs to the lhs if *t appears on the lhs or vice-versa
4791 	     if it appears on the rhs.  Thus require strict type
4792 	     compatibility.  */
4793 	  && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4794 	{
4795 	  *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4796 	  res = true;
4797 	}
4798     }
4799 
4800   /* Canonicalize TARGET_MEM_REF in particular with respect to
4801      the indexes becoming constant.  */
4802   else if (TREE_CODE (*t) == TARGET_MEM_REF)
4803     {
4804       tree tem = maybe_fold_tmr (*t);
4805       if (tem)
4806 	{
4807 	  *t = tem;
4808 	  res = true;
4809 	}
4810     }
4811 
4812   return res;
4813 }
4814 
4815 /* Worker for both fold_stmt and fold_stmt_inplace.  The INPLACE argument
4816    distinguishes both cases.  */
4817 
4818 static bool
4819 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4820 {
4821   bool changed = false;
4822   gimple *stmt = gsi_stmt (*gsi);
4823   bool nowarning = gimple_no_warning_p (stmt);
4824   unsigned i;
4825   fold_defer_overflow_warnings ();
4826 
4827   /* First do required canonicalization of [TARGET_]MEM_REF addresses
4828      after propagation.
4829      ???  This shouldn't be done in generic folding but in the
4830      propagation helpers which also know whether an address was
4831      propagated.
4832      Also canonicalize operand order.  */
4833   switch (gimple_code (stmt))
4834     {
4835     case GIMPLE_ASSIGN:
4836       if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4837 	{
4838 	  tree *rhs = gimple_assign_rhs1_ptr (stmt);
4839 	  if ((REFERENCE_CLASS_P (*rhs)
4840 	       || TREE_CODE (*rhs) == ADDR_EXPR)
4841 	      && maybe_canonicalize_mem_ref_addr (rhs))
4842 	    changed = true;
4843 	  tree *lhs = gimple_assign_lhs_ptr (stmt);
4844 	  if (REFERENCE_CLASS_P (*lhs)
4845 	      && maybe_canonicalize_mem_ref_addr (lhs))
4846 	    changed = true;
4847 	}
4848       else
4849 	{
4850 	  /* Canonicalize operand order.  */
4851 	  enum tree_code code = gimple_assign_rhs_code (stmt);
4852 	  if (TREE_CODE_CLASS (code) == tcc_comparison
4853 	      || commutative_tree_code (code)
4854 	      || commutative_ternary_tree_code (code))
4855 	    {
4856 	      tree rhs1 = gimple_assign_rhs1 (stmt);
4857 	      tree rhs2 = gimple_assign_rhs2 (stmt);
4858 	      if (tree_swap_operands_p (rhs1, rhs2))
4859 		{
4860 		  gimple_assign_set_rhs1 (stmt, rhs2);
4861 		  gimple_assign_set_rhs2 (stmt, rhs1);
4862 		  if (TREE_CODE_CLASS (code) == tcc_comparison)
4863 		    gimple_assign_set_rhs_code (stmt,
4864 						swap_tree_comparison (code));
4865 		  changed = true;
4866 		}
4867 	    }
4868 	}
4869       break;
4870     case GIMPLE_CALL:
4871       {
4872 	for (i = 0; i < gimple_call_num_args (stmt); ++i)
4873 	  {
4874 	    tree *arg = gimple_call_arg_ptr (stmt, i);
4875 	    if (REFERENCE_CLASS_P (*arg)
4876 		&& maybe_canonicalize_mem_ref_addr (arg))
4877 	      changed = true;
4878 	  }
4879 	tree *lhs = gimple_call_lhs_ptr (stmt);
4880 	if (*lhs
4881 	    && REFERENCE_CLASS_P (*lhs)
4882 	    && maybe_canonicalize_mem_ref_addr (lhs))
4883 	  changed = true;
4884 	break;
4885       }
4886     case GIMPLE_ASM:
4887       {
4888 	gasm *asm_stmt = as_a <gasm *> (stmt);
4889 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4890 	  {
4891 	    tree link = gimple_asm_output_op (asm_stmt, i);
4892 	    tree op = TREE_VALUE (link);
4893 	    if (REFERENCE_CLASS_P (op)
4894 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4895 	      changed = true;
4896 	  }
4897 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4898 	  {
4899 	    tree link = gimple_asm_input_op (asm_stmt, i);
4900 	    tree op = TREE_VALUE (link);
4901 	    if ((REFERENCE_CLASS_P (op)
4902 		 || TREE_CODE (op) == ADDR_EXPR)
4903 		&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4904 	      changed = true;
4905 	  }
4906       }
4907       break;
4908     case GIMPLE_DEBUG:
4909       if (gimple_debug_bind_p (stmt))
4910 	{
4911 	  tree *val = gimple_debug_bind_get_value_ptr (stmt);
4912 	  if (*val
4913 	      && (REFERENCE_CLASS_P (*val)
4914 		  || TREE_CODE (*val) == ADDR_EXPR)
4915 	      && maybe_canonicalize_mem_ref_addr (val))
4916 	    changed = true;
4917 	}
4918       break;
4919     case GIMPLE_COND:
4920       {
4921 	/* Canonicalize operand order.  */
4922 	tree lhs = gimple_cond_lhs (stmt);
4923 	tree rhs = gimple_cond_rhs (stmt);
4924 	if (tree_swap_operands_p (lhs, rhs))
4925 	  {
4926 	    gcond *gc = as_a <gcond *> (stmt);
4927 	    gimple_cond_set_lhs (gc, rhs);
4928 	    gimple_cond_set_rhs (gc, lhs);
4929 	    gimple_cond_set_code (gc,
4930 				  swap_tree_comparison (gimple_cond_code (gc)));
4931 	    changed = true;
4932 	  }
4933       }
4934     default:;
4935     }
4936 
4937   /* Dispatch to pattern-based folding.  */
4938   if (!inplace
4939       || is_gimple_assign (stmt)
4940       || gimple_code (stmt) == GIMPLE_COND)
4941     {
4942       gimple_seq seq = NULL;
4943       gimple_match_op res_op;
4944       if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4945 			   valueize, valueize))
4946 	{
4947 	  if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4948 	    changed = true;
4949 	  else
4950 	    gimple_seq_discard (seq);
4951 	}
4952     }
4953 
4954   stmt = gsi_stmt (*gsi);
4955 
4956   /* Fold the main computation performed by the statement.  */
4957   switch (gimple_code (stmt))
4958     {
4959     case GIMPLE_ASSIGN:
4960       {
4961 	/* Try to canonicalize for boolean-typed X the comparisons
4962 	   X == 0, X == 1, X != 0, and X != 1.  */
4963 	if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4964 	    || gimple_assign_rhs_code (stmt) == NE_EXPR)
4965 	  {
4966 	    tree lhs = gimple_assign_lhs (stmt);
4967 	    tree op1 = gimple_assign_rhs1 (stmt);
4968 	    tree op2 = gimple_assign_rhs2 (stmt);
4969 	    tree type = TREE_TYPE (op1);
4970 
4971 	    /* Check whether the comparison operands are of the same boolean
4972 	       type as the result type is.
4973 	       Check that second operand is an integer-constant with value
4974 	       one or zero.  */
4975 	    if (TREE_CODE (op2) == INTEGER_CST
4976 		&& (integer_zerop (op2) || integer_onep (op2))
4977 		&& useless_type_conversion_p (TREE_TYPE (lhs), type))
4978 	      {
4979 		enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4980 		bool is_logical_not = false;
4981 
4982 		/* X == 0 and X != 1 is a logical-not.of X
4983 		   X == 1 and X != 0 is X  */
4984 		if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4985 		    || (cmp_code == NE_EXPR && integer_onep (op2)))
4986 		  is_logical_not = true;
4987 
4988 		if (is_logical_not == false)
4989 		  gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4990 		/* Only for one-bit precision typed X the transformation
4991 		   !X -> ~X is valied.  */
4992 		else if (TYPE_PRECISION (type) == 1)
4993 		  gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4994 		/* Otherwise we use !X -> X ^ 1.  */
4995 		else
4996 		  gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4997 						  build_int_cst (type, 1));
4998 		changed = true;
4999 		break;
5000 	      }
5001 	  }
5002 
5003 	unsigned old_num_ops = gimple_num_ops (stmt);
5004 	tree lhs = gimple_assign_lhs (stmt);
5005 	tree new_rhs = fold_gimple_assign (gsi);
5006 	if (new_rhs
5007 	    && !useless_type_conversion_p (TREE_TYPE (lhs),
5008 					   TREE_TYPE (new_rhs)))
5009 	  new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5010 	if (new_rhs
5011 	    && (!inplace
5012 		|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5013 	  {
5014 	    gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5015 	    changed = true;
5016 	  }
5017 	break;
5018       }
5019 
5020     case GIMPLE_CALL:
5021       changed |= gimple_fold_call (gsi, inplace);
5022       break;
5023 
5024     case GIMPLE_ASM:
5025       /* Fold *& in asm operands.  */
5026       {
5027 	gasm *asm_stmt = as_a <gasm *> (stmt);
5028 	size_t noutputs;
5029 	const char **oconstraints;
5030 	const char *constraint;
5031 	bool allows_mem, allows_reg;
5032 
5033 	noutputs = gimple_asm_noutputs (asm_stmt);
5034 	oconstraints = XALLOCAVEC (const char *, noutputs);
5035 
5036 	for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5037 	  {
5038 	    tree link = gimple_asm_output_op (asm_stmt, i);
5039 	    tree op = TREE_VALUE (link);
5040 	    oconstraints[i]
5041 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5042 	    if (REFERENCE_CLASS_P (op)
5043 		&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
5044 	      {
5045 		TREE_VALUE (link) = op;
5046 		changed = true;
5047 	      }
5048 	  }
5049 	for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5050 	  {
5051 	    tree link = gimple_asm_input_op (asm_stmt, i);
5052 	    tree op = TREE_VALUE (link);
5053 	    constraint
5054 	      = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5055 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5056 				    oconstraints, &allows_mem, &allows_reg);
5057 	    if (REFERENCE_CLASS_P (op)
5058 		&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5059 		   != NULL_TREE)
5060 	      {
5061 		TREE_VALUE (link) = op;
5062 		changed = true;
5063 	      }
5064 	  }
5065       }
5066       break;
5067 
5068     case GIMPLE_DEBUG:
5069       if (gimple_debug_bind_p (stmt))
5070 	{
5071 	  tree val = gimple_debug_bind_get_value (stmt);
5072 	  if (val
5073 	      && REFERENCE_CLASS_P (val))
5074 	    {
5075 	      tree tem = maybe_fold_reference (val, false);
5076 	      if (tem)
5077 		{
5078 		  gimple_debug_bind_set_value (stmt, tem);
5079 		  changed = true;
5080 		}
5081 	    }
5082 	  else if (val
5083 		   && TREE_CODE (val) == ADDR_EXPR)
5084 	    {
5085 	      tree ref = TREE_OPERAND (val, 0);
5086 	      tree tem = maybe_fold_reference (ref, false);
5087 	      if (tem)
5088 		{
5089 		  tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5090 		  gimple_debug_bind_set_value (stmt, tem);
5091 		  changed = true;
5092 		}
5093 	    }
5094 	}
5095       break;
5096 
5097     case GIMPLE_RETURN:
5098       {
5099 	greturn *ret_stmt = as_a<greturn *> (stmt);
5100 	tree ret = gimple_return_retval(ret_stmt);
5101 
5102 	if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5103 	  {
5104 	    tree val = valueize (ret);
5105 	    if (val && val != ret
5106 		&& may_propagate_copy (ret, val))
5107 	      {
5108 		gimple_return_set_retval (ret_stmt, val);
5109 		changed = true;
5110 	      }
5111 	  }
5112       }
5113       break;
5114 
5115     default:;
5116     }
5117 
5118   stmt = gsi_stmt (*gsi);
5119 
5120   /* Fold *& on the lhs.  */
5121   if (gimple_has_lhs (stmt))
5122     {
5123       tree lhs = gimple_get_lhs (stmt);
5124       if (lhs && REFERENCE_CLASS_P (lhs))
5125 	{
5126 	  tree new_lhs = maybe_fold_reference (lhs, true);
5127 	  if (new_lhs)
5128 	    {
5129 	      gimple_set_lhs (stmt, new_lhs);
5130 	      changed = true;
5131 	    }
5132 	}
5133     }
5134 
5135   fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5136   return changed;
5137 }
5138 
5139 /* Valueziation callback that ends up not following SSA edges.  */
5140 
5141 tree
5142 no_follow_ssa_edges (tree)
5143 {
5144   return NULL_TREE;
5145 }
5146 
5147 /* Valueization callback that ends up following single-use SSA edges only.  */
5148 
5149 tree
5150 follow_single_use_edges (tree val)
5151 {
5152   if (TREE_CODE (val) == SSA_NAME
5153       && !has_single_use (val))
5154     return NULL_TREE;
5155   return val;
5156 }
5157 
5158 /* Valueization callback that follows all SSA edges.  */
5159 
5160 tree
5161 follow_all_ssa_edges (tree val)
5162 {
5163   return val;
5164 }
5165 
5166 /* Fold the statement pointed to by GSI.  In some cases, this function may
5167    replace the whole statement with a new one.  Returns true iff folding
5168    makes any changes.
5169    The statement pointed to by GSI should be in valid gimple form but may
5170    be in unfolded state as resulting from for example constant propagation
5171    which can produce *&x = 0.  */
5172 
5173 bool
5174 fold_stmt (gimple_stmt_iterator *gsi)
5175 {
5176   return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5177 }
5178 
5179 bool
5180 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5181 {
5182   return fold_stmt_1 (gsi, false, valueize);
5183 }
5184 
5185 /* Perform the minimal folding on statement *GSI.  Only operations like
5186    *&x created by constant propagation are handled.  The statement cannot
5187    be replaced with a new one.  Return true if the statement was
5188    changed, false otherwise.
5189    The statement *GSI should be in valid gimple form but may
5190    be in unfolded state as resulting from for example constant propagation
5191    which can produce *&x = 0.  */
5192 
5193 bool
5194 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5195 {
5196   gimple *stmt = gsi_stmt (*gsi);
5197   bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5198   gcc_assert (gsi_stmt (*gsi) == stmt);
5199   return changed;
5200 }
5201 
5202 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5203    if EXPR is null or we don't know how.
5204    If non-null, the result always has boolean type.  */
5205 
5206 static tree
5207 canonicalize_bool (tree expr, bool invert)
5208 {
5209   if (!expr)
5210     return NULL_TREE;
5211   else if (invert)
5212     {
5213       if (integer_nonzerop (expr))
5214 	return boolean_false_node;
5215       else if (integer_zerop (expr))
5216 	return boolean_true_node;
5217       else if (TREE_CODE (expr) == SSA_NAME)
5218 	return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5219 			    build_int_cst (TREE_TYPE (expr), 0));
5220       else if (COMPARISON_CLASS_P (expr))
5221 	return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5222 			    boolean_type_node,
5223 			    TREE_OPERAND (expr, 0),
5224 			    TREE_OPERAND (expr, 1));
5225       else
5226 	return NULL_TREE;
5227     }
5228   else
5229     {
5230       if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5231 	return expr;
5232       if (integer_nonzerop (expr))
5233 	return boolean_true_node;
5234       else if (integer_zerop (expr))
5235 	return boolean_false_node;
5236       else if (TREE_CODE (expr) == SSA_NAME)
5237 	return fold_build2 (NE_EXPR, boolean_type_node, expr,
5238 			    build_int_cst (TREE_TYPE (expr), 0));
5239       else if (COMPARISON_CLASS_P (expr))
5240 	return fold_build2 (TREE_CODE (expr),
5241 			    boolean_type_node,
5242 			    TREE_OPERAND (expr, 0),
5243 			    TREE_OPERAND (expr, 1));
5244       else
5245 	return NULL_TREE;
5246     }
5247 }
5248 
5249 /* Check to see if a boolean expression EXPR is logically equivalent to the
5250    comparison (OP1 CODE OP2).  Check for various identities involving
5251    SSA_NAMEs.  */
5252 
5253 static bool
5254 same_bool_comparison_p (const_tree expr, enum tree_code code,
5255 			const_tree op1, const_tree op2)
5256 {
5257   gimple *s;
5258 
5259   /* The obvious case.  */
5260   if (TREE_CODE (expr) == code
5261       && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5262       && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5263     return true;
5264 
5265   /* Check for comparing (name, name != 0) and the case where expr
5266      is an SSA_NAME with a definition matching the comparison.  */
5267   if (TREE_CODE (expr) == SSA_NAME
5268       && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5269     {
5270       if (operand_equal_p (expr, op1, 0))
5271 	return ((code == NE_EXPR && integer_zerop (op2))
5272 		|| (code == EQ_EXPR && integer_nonzerop (op2)));
5273       s = SSA_NAME_DEF_STMT (expr);
5274       if (is_gimple_assign (s)
5275 	  && gimple_assign_rhs_code (s) == code
5276 	  && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5277 	  && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5278 	return true;
5279     }
5280 
5281   /* If op1 is of the form (name != 0) or (name == 0), and the definition
5282      of name is a comparison, recurse.  */
5283   if (TREE_CODE (op1) == SSA_NAME
5284       && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5285     {
5286       s = SSA_NAME_DEF_STMT (op1);
5287       if (is_gimple_assign (s)
5288 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5289 	{
5290 	  enum tree_code c = gimple_assign_rhs_code (s);
5291 	  if ((c == NE_EXPR && integer_zerop (op2))
5292 	      || (c == EQ_EXPR && integer_nonzerop (op2)))
5293 	    return same_bool_comparison_p (expr, c,
5294 					   gimple_assign_rhs1 (s),
5295 					   gimple_assign_rhs2 (s));
5296 	  if ((c == EQ_EXPR && integer_zerop (op2))
5297 	      || (c == NE_EXPR && integer_nonzerop (op2)))
5298 	    return same_bool_comparison_p (expr,
5299 					   invert_tree_comparison (c, false),
5300 					   gimple_assign_rhs1 (s),
5301 					   gimple_assign_rhs2 (s));
5302 	}
5303     }
5304   return false;
5305 }
5306 
5307 /* Check to see if two boolean expressions OP1 and OP2 are logically
5308    equivalent.  */
5309 
5310 static bool
5311 same_bool_result_p (const_tree op1, const_tree op2)
5312 {
5313   /* Simple cases first.  */
5314   if (operand_equal_p (op1, op2, 0))
5315     return true;
5316 
5317   /* Check the cases where at least one of the operands is a comparison.
5318      These are a bit smarter than operand_equal_p in that they apply some
5319      identifies on SSA_NAMEs.  */
5320   if (COMPARISON_CLASS_P (op2)
5321       && same_bool_comparison_p (op1, TREE_CODE (op2),
5322 				 TREE_OPERAND (op2, 0),
5323 				 TREE_OPERAND (op2, 1)))
5324     return true;
5325   if (COMPARISON_CLASS_P (op1)
5326       && same_bool_comparison_p (op2, TREE_CODE (op1),
5327 				 TREE_OPERAND (op1, 0),
5328 				 TREE_OPERAND (op1, 1)))
5329     return true;
5330 
5331   /* Default case.  */
5332   return false;
5333 }
5334 
5335 /* Forward declarations for some mutually recursive functions.  */
5336 
5337 static tree
5338 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5339 		   enum tree_code code2, tree op2a, tree op2b);
5340 static tree
5341 and_var_with_comparison (tree var, bool invert,
5342 			 enum tree_code code2, tree op2a, tree op2b);
5343 static tree
5344 and_var_with_comparison_1 (gimple *stmt,
5345 			   enum tree_code code2, tree op2a, tree op2b);
5346 static tree
5347 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5348 		  enum tree_code code2, tree op2a, tree op2b);
5349 static tree
5350 or_var_with_comparison (tree var, bool invert,
5351 			enum tree_code code2, tree op2a, tree op2b);
5352 static tree
5353 or_var_with_comparison_1 (gimple *stmt,
5354 			  enum tree_code code2, tree op2a, tree op2b);
5355 
5356 /* Helper function for and_comparisons_1:  try to simplify the AND of the
5357    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5358    If INVERT is true, invert the value of the VAR before doing the AND.
5359    Return NULL_EXPR if we can't simplify this to a single expression.  */
5360 
5361 static tree
5362 and_var_with_comparison (tree var, bool invert,
5363 			 enum tree_code code2, tree op2a, tree op2b)
5364 {
5365   tree t;
5366   gimple *stmt = SSA_NAME_DEF_STMT (var);
5367 
5368   /* We can only deal with variables whose definitions are assignments.  */
5369   if (!is_gimple_assign (stmt))
5370     return NULL_TREE;
5371 
5372   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5373      !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5374      Then we only have to consider the simpler non-inverted cases.  */
5375   if (invert)
5376     t = or_var_with_comparison_1 (stmt,
5377 				  invert_tree_comparison (code2, false),
5378 				  op2a, op2b);
5379   else
5380     t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5381   return canonicalize_bool (t, invert);
5382 }
5383 
5384 /* Try to simplify the AND of the ssa variable defined by the assignment
5385    STMT with the comparison specified by (OP2A CODE2 OP2B).
5386    Return NULL_EXPR if we can't simplify this to a single expression.  */
5387 
5388 static tree
5389 and_var_with_comparison_1 (gimple *stmt,
5390 			   enum tree_code code2, tree op2a, tree op2b)
5391 {
5392   tree var = gimple_assign_lhs (stmt);
5393   tree true_test_var = NULL_TREE;
5394   tree false_test_var = NULL_TREE;
5395   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5396 
5397   /* Check for identities like (var AND (var == 0)) => false.  */
5398   if (TREE_CODE (op2a) == SSA_NAME
5399       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5400     {
5401       if ((code2 == NE_EXPR && integer_zerop (op2b))
5402 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5403 	{
5404 	  true_test_var = op2a;
5405 	  if (var == true_test_var)
5406 	    return var;
5407 	}
5408       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5409 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5410 	{
5411 	  false_test_var = op2a;
5412 	  if (var == false_test_var)
5413 	    return boolean_false_node;
5414 	}
5415     }
5416 
5417   /* If the definition is a comparison, recurse on it.  */
5418   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5419     {
5420       tree t = and_comparisons_1 (innercode,
5421 				  gimple_assign_rhs1 (stmt),
5422 				  gimple_assign_rhs2 (stmt),
5423 				  code2,
5424 				  op2a,
5425 				  op2b);
5426       if (t)
5427 	return t;
5428     }
5429 
5430   /* If the definition is an AND or OR expression, we may be able to
5431      simplify by reassociating.  */
5432   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5433       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5434     {
5435       tree inner1 = gimple_assign_rhs1 (stmt);
5436       tree inner2 = gimple_assign_rhs2 (stmt);
5437       gimple *s;
5438       tree t;
5439       tree partial = NULL_TREE;
5440       bool is_and = (innercode == BIT_AND_EXPR);
5441 
5442       /* Check for boolean identities that don't require recursive examination
5443 	 of inner1/inner2:
5444 	 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5445 	 inner1 AND (inner1 OR inner2) => inner1
5446 	 !inner1 AND (inner1 AND inner2) => false
5447 	 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5448          Likewise for similar cases involving inner2.  */
5449       if (inner1 == true_test_var)
5450 	return (is_and ? var : inner1);
5451       else if (inner2 == true_test_var)
5452 	return (is_and ? var : inner2);
5453       else if (inner1 == false_test_var)
5454 	return (is_and
5455 		? boolean_false_node
5456 		: and_var_with_comparison (inner2, false, code2, op2a, op2b));
5457       else if (inner2 == false_test_var)
5458 	return (is_and
5459 		? boolean_false_node
5460 		: and_var_with_comparison (inner1, false, code2, op2a, op2b));
5461 
5462       /* Next, redistribute/reassociate the AND across the inner tests.
5463 	 Compute the first partial result, (inner1 AND (op2a code op2b))  */
5464       if (TREE_CODE (inner1) == SSA_NAME
5465 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5466 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5467 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5468 					      gimple_assign_rhs1 (s),
5469 					      gimple_assign_rhs2 (s),
5470 					      code2, op2a, op2b)))
5471 	{
5472 	  /* Handle the AND case, where we are reassociating:
5473 	     (inner1 AND inner2) AND (op2a code2 op2b)
5474 	     => (t AND inner2)
5475 	     If the partial result t is a constant, we win.  Otherwise
5476 	     continue on to try reassociating with the other inner test.  */
5477 	  if (is_and)
5478 	    {
5479 	      if (integer_onep (t))
5480 		return inner2;
5481 	      else if (integer_zerop (t))
5482 		return boolean_false_node;
5483 	    }
5484 
5485 	  /* Handle the OR case, where we are redistributing:
5486 	     (inner1 OR inner2) AND (op2a code2 op2b)
5487 	     => (t OR (inner2 AND (op2a code2 op2b)))  */
5488 	  else if (integer_onep (t))
5489 	    return boolean_true_node;
5490 
5491 	  /* Save partial result for later.  */
5492 	  partial = t;
5493 	}
5494 
5495       /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5496       if (TREE_CODE (inner2) == SSA_NAME
5497 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5498 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5499 	  && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5500 					      gimple_assign_rhs1 (s),
5501 					      gimple_assign_rhs2 (s),
5502 					      code2, op2a, op2b)))
5503 	{
5504 	  /* Handle the AND case, where we are reassociating:
5505 	     (inner1 AND inner2) AND (op2a code2 op2b)
5506 	     => (inner1 AND t)  */
5507 	  if (is_and)
5508 	    {
5509 	      if (integer_onep (t))
5510 		return inner1;
5511 	      else if (integer_zerop (t))
5512 		return boolean_false_node;
5513 	      /* If both are the same, we can apply the identity
5514 		 (x AND x) == x.  */
5515 	      else if (partial && same_bool_result_p (t, partial))
5516 		return t;
5517 	    }
5518 
5519 	  /* Handle the OR case. where we are redistributing:
5520 	     (inner1 OR inner2) AND (op2a code2 op2b)
5521 	     => (t OR (inner1 AND (op2a code2 op2b)))
5522 	     => (t OR partial)  */
5523 	  else
5524 	    {
5525 	      if (integer_onep (t))
5526 		return boolean_true_node;
5527 	      else if (partial)
5528 		{
5529 		  /* We already got a simplification for the other
5530 		     operand to the redistributed OR expression.  The
5531 		     interesting case is when at least one is false.
5532 		     Or, if both are the same, we can apply the identity
5533 		     (x OR x) == x.  */
5534 		  if (integer_zerop (partial))
5535 		    return t;
5536 		  else if (integer_zerop (t))
5537 		    return partial;
5538 		  else if (same_bool_result_p (t, partial))
5539 		    return t;
5540 		}
5541 	    }
5542 	}
5543     }
5544   return NULL_TREE;
5545 }
5546 
5547 /* Try to simplify the AND of two comparisons defined by
5548    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5549    If this can be done without constructing an intermediate value,
5550    return the resulting tree; otherwise NULL_TREE is returned.
5551    This function is deliberately asymmetric as it recurses on SSA_DEFs
5552    in the first comparison but not the second.  */
5553 
5554 static tree
5555 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5556 		   enum tree_code code2, tree op2a, tree op2b)
5557 {
5558   tree truth_type = truth_type_for (TREE_TYPE (op1a));
5559 
5560   /* First check for ((x CODE1 y) AND (x CODE2 y)).  */
5561   if (operand_equal_p (op1a, op2a, 0)
5562       && operand_equal_p (op1b, op2b, 0))
5563     {
5564       /* Result will be either NULL_TREE, or a combined comparison.  */
5565       tree t = combine_comparisons (UNKNOWN_LOCATION,
5566 				    TRUTH_ANDIF_EXPR, code1, code2,
5567 				    truth_type, op1a, op1b);
5568       if (t)
5569 	return t;
5570     }
5571 
5572   /* Likewise the swapped case of the above.  */
5573   if (operand_equal_p (op1a, op2b, 0)
5574       && operand_equal_p (op1b, op2a, 0))
5575     {
5576       /* Result will be either NULL_TREE, or a combined comparison.  */
5577       tree t = combine_comparisons (UNKNOWN_LOCATION,
5578 				    TRUTH_ANDIF_EXPR, code1,
5579 				    swap_tree_comparison (code2),
5580 				    truth_type, op1a, op1b);
5581       if (t)
5582 	return t;
5583     }
5584 
5585   /* If both comparisons are of the same value against constants, we might
5586      be able to merge them.  */
5587   if (operand_equal_p (op1a, op2a, 0)
5588       && TREE_CODE (op1b) == INTEGER_CST
5589       && TREE_CODE (op2b) == INTEGER_CST)
5590     {
5591       int cmp = tree_int_cst_compare (op1b, op2b);
5592 
5593       /* If we have (op1a == op1b), we should either be able to
5594 	 return that or FALSE, depending on whether the constant op1b
5595 	 also satisfies the other comparison against op2b.  */
5596       if (code1 == EQ_EXPR)
5597 	{
5598 	  bool done = true;
5599 	  bool val;
5600 	  switch (code2)
5601 	    {
5602 	    case EQ_EXPR: val = (cmp == 0); break;
5603 	    case NE_EXPR: val = (cmp != 0); break;
5604 	    case LT_EXPR: val = (cmp < 0); break;
5605 	    case GT_EXPR: val = (cmp > 0); break;
5606 	    case LE_EXPR: val = (cmp <= 0); break;
5607 	    case GE_EXPR: val = (cmp >= 0); break;
5608 	    default: done = false;
5609 	    }
5610 	  if (done)
5611 	    {
5612 	      if (val)
5613 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
5614 	      else
5615 		return boolean_false_node;
5616 	    }
5617 	}
5618       /* Likewise if the second comparison is an == comparison.  */
5619       else if (code2 == EQ_EXPR)
5620 	{
5621 	  bool done = true;
5622 	  bool val;
5623 	  switch (code1)
5624 	    {
5625 	    case EQ_EXPR: val = (cmp == 0); break;
5626 	    case NE_EXPR: val = (cmp != 0); break;
5627 	    case LT_EXPR: val = (cmp > 0); break;
5628 	    case GT_EXPR: val = (cmp < 0); break;
5629 	    case LE_EXPR: val = (cmp >= 0); break;
5630 	    case GE_EXPR: val = (cmp <= 0); break;
5631 	    default: done = false;
5632 	    }
5633 	  if (done)
5634 	    {
5635 	      if (val)
5636 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
5637 	      else
5638 		return boolean_false_node;
5639 	    }
5640 	}
5641 
5642       /* Same business with inequality tests.  */
5643       else if (code1 == NE_EXPR)
5644 	{
5645 	  bool val;
5646 	  switch (code2)
5647 	    {
5648 	    case EQ_EXPR: val = (cmp != 0); break;
5649 	    case NE_EXPR: val = (cmp == 0); break;
5650 	    case LT_EXPR: val = (cmp >= 0); break;
5651 	    case GT_EXPR: val = (cmp <= 0); break;
5652 	    case LE_EXPR: val = (cmp > 0); break;
5653 	    case GE_EXPR: val = (cmp < 0); break;
5654 	    default:
5655 	      val = false;
5656 	    }
5657 	  if (val)
5658 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5659 	}
5660       else if (code2 == NE_EXPR)
5661 	{
5662 	  bool val;
5663 	  switch (code1)
5664 	    {
5665 	    case EQ_EXPR: val = (cmp == 0); break;
5666 	    case NE_EXPR: val = (cmp != 0); break;
5667 	    case LT_EXPR: val = (cmp <= 0); break;
5668 	    case GT_EXPR: val = (cmp >= 0); break;
5669 	    case LE_EXPR: val = (cmp < 0); break;
5670 	    case GE_EXPR: val = (cmp > 0); break;
5671 	    default:
5672 	      val = false;
5673 	    }
5674 	  if (val)
5675 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5676 	}
5677 
5678       /* Chose the more restrictive of two < or <= comparisons.  */
5679       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5680 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5681 	{
5682 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5683 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5684 	  else
5685 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5686 	}
5687 
5688       /* Likewise chose the more restrictive of two > or >= comparisons.  */
5689       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5690 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5691 	{
5692 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5693 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
5694 	  else
5695 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
5696 	}
5697 
5698       /* Check for singleton ranges.  */
5699       else if (cmp == 0
5700 	       && ((code1 == LE_EXPR && code2 == GE_EXPR)
5701 		   || (code1 == GE_EXPR && code2 == LE_EXPR)))
5702 	return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5703 
5704       /* Check for disjoint ranges. */
5705       else if (cmp <= 0
5706 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
5707 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
5708 	return boolean_false_node;
5709       else if (cmp >= 0
5710 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
5711 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
5712 	return boolean_false_node;
5713     }
5714 
5715   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5716      NAME's definition is a truth value.  See if there are any simplifications
5717      that can be done against the NAME's definition.  */
5718   if (TREE_CODE (op1a) == SSA_NAME
5719       && (code1 == NE_EXPR || code1 == EQ_EXPR)
5720       && (integer_zerop (op1b) || integer_onep (op1b)))
5721     {
5722       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5723 		     || (code1 == NE_EXPR && integer_onep (op1b)));
5724       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5725       switch (gimple_code (stmt))
5726 	{
5727 	case GIMPLE_ASSIGN:
5728 	  /* Try to simplify by copy-propagating the definition.  */
5729 	  return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5730 
5731 	case GIMPLE_PHI:
5732 	  /* If every argument to the PHI produces the same result when
5733 	     ANDed with the second comparison, we win.
5734 	     Do not do this unless the type is bool since we need a bool
5735 	     result here anyway.  */
5736 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5737 	    {
5738 	      tree result = NULL_TREE;
5739 	      unsigned i;
5740 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
5741 		{
5742 		  tree arg = gimple_phi_arg_def (stmt, i);
5743 
5744 		  /* If this PHI has itself as an argument, ignore it.
5745 		     If all the other args produce the same result,
5746 		     we're still OK.  */
5747 		  if (arg == gimple_phi_result (stmt))
5748 		    continue;
5749 		  else if (TREE_CODE (arg) == INTEGER_CST)
5750 		    {
5751 		      if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5752 			{
5753 			  if (!result)
5754 			    result = boolean_false_node;
5755 			  else if (!integer_zerop (result))
5756 			    return NULL_TREE;
5757 			}
5758 		      else if (!result)
5759 			result = fold_build2 (code2, boolean_type_node,
5760 					      op2a, op2b);
5761 		      else if (!same_bool_comparison_p (result,
5762 							code2, op2a, op2b))
5763 			return NULL_TREE;
5764 		    }
5765 		  else if (TREE_CODE (arg) == SSA_NAME
5766 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
5767 		    {
5768 		      tree temp;
5769 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5770 		      /* In simple cases we can look through PHI nodes,
5771 			 but we have to be careful with loops.
5772 			 See PR49073.  */
5773 		      if (! dom_info_available_p (CDI_DOMINATORS)
5774 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
5775 			  || dominated_by_p (CDI_DOMINATORS,
5776 					     gimple_bb (def_stmt),
5777 					     gimple_bb (stmt)))
5778 			return NULL_TREE;
5779 		      temp = and_var_with_comparison (arg, invert, code2,
5780 						      op2a, op2b);
5781 		      if (!temp)
5782 			return NULL_TREE;
5783 		      else if (!result)
5784 			result = temp;
5785 		      else if (!same_bool_result_p (result, temp))
5786 			return NULL_TREE;
5787 		    }
5788 		  else
5789 		    return NULL_TREE;
5790 		}
5791 	      return result;
5792 	    }
5793 
5794 	default:
5795 	  break;
5796 	}
5797     }
5798   return NULL_TREE;
5799 }
5800 
5801 /* Try to simplify the AND of two comparisons, specified by
5802    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5803    If this can be simplified to a single expression (without requiring
5804    introducing more SSA variables to hold intermediate values),
5805    return the resulting tree.  Otherwise return NULL_TREE.
5806    If the result expression is non-null, it has boolean type.  */
5807 
5808 tree
5809 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5810 			    enum tree_code code2, tree op2a, tree op2b)
5811 {
5812   tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5813   if (t)
5814     return t;
5815   else
5816     return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5817 }
5818 
5819 /* Helper function for or_comparisons_1:  try to simplify the OR of the
5820    ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5821    If INVERT is true, invert the value of VAR before doing the OR.
5822    Return NULL_EXPR if we can't simplify this to a single expression.  */
5823 
5824 static tree
5825 or_var_with_comparison (tree var, bool invert,
5826 			enum tree_code code2, tree op2a, tree op2b)
5827 {
5828   tree t;
5829   gimple *stmt = SSA_NAME_DEF_STMT (var);
5830 
5831   /* We can only deal with variables whose definitions are assignments.  */
5832   if (!is_gimple_assign (stmt))
5833     return NULL_TREE;
5834 
5835   /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5836      !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5837      Then we only have to consider the simpler non-inverted cases.  */
5838   if (invert)
5839     t = and_var_with_comparison_1 (stmt,
5840 				   invert_tree_comparison (code2, false),
5841 				   op2a, op2b);
5842   else
5843     t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5844   return canonicalize_bool (t, invert);
5845 }
5846 
5847 /* Try to simplify the OR of the ssa variable defined by the assignment
5848    STMT with the comparison specified by (OP2A CODE2 OP2B).
5849    Return NULL_EXPR if we can't simplify this to a single expression.  */
5850 
5851 static tree
5852 or_var_with_comparison_1 (gimple *stmt,
5853 			  enum tree_code code2, tree op2a, tree op2b)
5854 {
5855   tree var = gimple_assign_lhs (stmt);
5856   tree true_test_var = NULL_TREE;
5857   tree false_test_var = NULL_TREE;
5858   enum tree_code innercode = gimple_assign_rhs_code (stmt);
5859 
5860   /* Check for identities like (var OR (var != 0)) => true .  */
5861   if (TREE_CODE (op2a) == SSA_NAME
5862       && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5863     {
5864       if ((code2 == NE_EXPR && integer_zerop (op2b))
5865 	  || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5866 	{
5867 	  true_test_var = op2a;
5868 	  if (var == true_test_var)
5869 	    return var;
5870 	}
5871       else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5872 	       || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5873 	{
5874 	  false_test_var = op2a;
5875 	  if (var == false_test_var)
5876 	    return boolean_true_node;
5877 	}
5878     }
5879 
5880   /* If the definition is a comparison, recurse on it.  */
5881   if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5882     {
5883       tree t = or_comparisons_1 (innercode,
5884 				 gimple_assign_rhs1 (stmt),
5885 				 gimple_assign_rhs2 (stmt),
5886 				 code2,
5887 				 op2a,
5888 				 op2b);
5889       if (t)
5890 	return t;
5891     }
5892 
5893   /* If the definition is an AND or OR expression, we may be able to
5894      simplify by reassociating.  */
5895   if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5896       && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5897     {
5898       tree inner1 = gimple_assign_rhs1 (stmt);
5899       tree inner2 = gimple_assign_rhs2 (stmt);
5900       gimple *s;
5901       tree t;
5902       tree partial = NULL_TREE;
5903       bool is_or = (innercode == BIT_IOR_EXPR);
5904 
5905       /* Check for boolean identities that don't require recursive examination
5906 	 of inner1/inner2:
5907 	 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5908 	 inner1 OR (inner1 AND inner2) => inner1
5909 	 !inner1 OR (inner1 OR inner2) => true
5910 	 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5911       */
5912       if (inner1 == true_test_var)
5913 	return (is_or ? var : inner1);
5914       else if (inner2 == true_test_var)
5915 	return (is_or ? var : inner2);
5916       else if (inner1 == false_test_var)
5917 	return (is_or
5918 		? boolean_true_node
5919 		: or_var_with_comparison (inner2, false, code2, op2a, op2b));
5920       else if (inner2 == false_test_var)
5921 	return (is_or
5922 		? boolean_true_node
5923 		: or_var_with_comparison (inner1, false, code2, op2a, op2b));
5924 
5925       /* Next, redistribute/reassociate the OR across the inner tests.
5926 	 Compute the first partial result, (inner1 OR (op2a code op2b))  */
5927       if (TREE_CODE (inner1) == SSA_NAME
5928 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5929 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5930 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5931 					     gimple_assign_rhs1 (s),
5932 					     gimple_assign_rhs2 (s),
5933 					     code2, op2a, op2b)))
5934 	{
5935 	  /* Handle the OR case, where we are reassociating:
5936 	     (inner1 OR inner2) OR (op2a code2 op2b)
5937 	     => (t OR inner2)
5938 	     If the partial result t is a constant, we win.  Otherwise
5939 	     continue on to try reassociating with the other inner test.  */
5940 	  if (is_or)
5941 	    {
5942 	      if (integer_onep (t))
5943 		return boolean_true_node;
5944 	      else if (integer_zerop (t))
5945 		return inner2;
5946 	    }
5947 
5948 	  /* Handle the AND case, where we are redistributing:
5949 	     (inner1 AND inner2) OR (op2a code2 op2b)
5950 	     => (t AND (inner2 OR (op2a code op2b)))  */
5951 	  else if (integer_zerop (t))
5952 	    return boolean_false_node;
5953 
5954 	  /* Save partial result for later.  */
5955 	  partial = t;
5956 	}
5957 
5958       /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5959       if (TREE_CODE (inner2) == SSA_NAME
5960 	  && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5961 	  && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5962 	  && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5963 					     gimple_assign_rhs1 (s),
5964 					     gimple_assign_rhs2 (s),
5965 					     code2, op2a, op2b)))
5966 	{
5967 	  /* Handle the OR case, where we are reassociating:
5968 	     (inner1 OR inner2) OR (op2a code2 op2b)
5969 	     => (inner1 OR t)
5970 	     => (t OR partial)  */
5971 	  if (is_or)
5972 	    {
5973 	      if (integer_zerop (t))
5974 		return inner1;
5975 	      else if (integer_onep (t))
5976 		return boolean_true_node;
5977 	      /* If both are the same, we can apply the identity
5978 		 (x OR x) == x.  */
5979 	      else if (partial && same_bool_result_p (t, partial))
5980 		return t;
5981 	    }
5982 
5983 	  /* Handle the AND case, where we are redistributing:
5984 	     (inner1 AND inner2) OR (op2a code2 op2b)
5985 	     => (t AND (inner1 OR (op2a code2 op2b)))
5986 	     => (t AND partial)  */
5987 	  else
5988 	    {
5989 	      if (integer_zerop (t))
5990 		return boolean_false_node;
5991 	      else if (partial)
5992 		{
5993 		  /* We already got a simplification for the other
5994 		     operand to the redistributed AND expression.  The
5995 		     interesting case is when at least one is true.
5996 		     Or, if both are the same, we can apply the identity
5997 		     (x AND x) == x.  */
5998 		  if (integer_onep (partial))
5999 		    return t;
6000 		  else if (integer_onep (t))
6001 		    return partial;
6002 		  else if (same_bool_result_p (t, partial))
6003 		    return t;
6004 		}
6005 	    }
6006 	}
6007     }
6008   return NULL_TREE;
6009 }
6010 
6011 /* Try to simplify the OR of two comparisons defined by
6012    (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6013    If this can be done without constructing an intermediate value,
6014    return the resulting tree; otherwise NULL_TREE is returned.
6015    This function is deliberately asymmetric as it recurses on SSA_DEFs
6016    in the first comparison but not the second.  */
6017 
6018 static tree
6019 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6020 		  enum tree_code code2, tree op2a, tree op2b)
6021 {
6022   tree truth_type = truth_type_for (TREE_TYPE (op1a));
6023 
6024   /* First check for ((x CODE1 y) OR (x CODE2 y)).  */
6025   if (operand_equal_p (op1a, op2a, 0)
6026       && operand_equal_p (op1b, op2b, 0))
6027     {
6028       /* Result will be either NULL_TREE, or a combined comparison.  */
6029       tree t = combine_comparisons (UNKNOWN_LOCATION,
6030 				    TRUTH_ORIF_EXPR, code1, code2,
6031 				    truth_type, op1a, op1b);
6032       if (t)
6033 	return t;
6034     }
6035 
6036   /* Likewise the swapped case of the above.  */
6037   if (operand_equal_p (op1a, op2b, 0)
6038       && operand_equal_p (op1b, op2a, 0))
6039     {
6040       /* Result will be either NULL_TREE, or a combined comparison.  */
6041       tree t = combine_comparisons (UNKNOWN_LOCATION,
6042 				    TRUTH_ORIF_EXPR, code1,
6043 				    swap_tree_comparison (code2),
6044 				    truth_type, op1a, op1b);
6045       if (t)
6046 	return t;
6047     }
6048 
6049   /* If both comparisons are of the same value against constants, we might
6050      be able to merge them.  */
6051   if (operand_equal_p (op1a, op2a, 0)
6052       && TREE_CODE (op1b) == INTEGER_CST
6053       && TREE_CODE (op2b) == INTEGER_CST)
6054     {
6055       int cmp = tree_int_cst_compare (op1b, op2b);
6056 
6057       /* If we have (op1a != op1b), we should either be able to
6058 	 return that or TRUE, depending on whether the constant op1b
6059 	 also satisfies the other comparison against op2b.  */
6060       if (code1 == NE_EXPR)
6061 	{
6062 	  bool done = true;
6063 	  bool val;
6064 	  switch (code2)
6065 	    {
6066 	    case EQ_EXPR: val = (cmp == 0); break;
6067 	    case NE_EXPR: val = (cmp != 0); break;
6068 	    case LT_EXPR: val = (cmp < 0); break;
6069 	    case GT_EXPR: val = (cmp > 0); break;
6070 	    case LE_EXPR: val = (cmp <= 0); break;
6071 	    case GE_EXPR: val = (cmp >= 0); break;
6072 	    default: done = false;
6073 	    }
6074 	  if (done)
6075 	    {
6076 	      if (val)
6077 		return boolean_true_node;
6078 	      else
6079 		return fold_build2 (code1, boolean_type_node, op1a, op1b);
6080 	    }
6081 	}
6082       /* Likewise if the second comparison is a != comparison.  */
6083       else if (code2 == NE_EXPR)
6084 	{
6085 	  bool done = true;
6086 	  bool val;
6087 	  switch (code1)
6088 	    {
6089 	    case EQ_EXPR: val = (cmp == 0); break;
6090 	    case NE_EXPR: val = (cmp != 0); break;
6091 	    case LT_EXPR: val = (cmp > 0); break;
6092 	    case GT_EXPR: val = (cmp < 0); break;
6093 	    case LE_EXPR: val = (cmp >= 0); break;
6094 	    case GE_EXPR: val = (cmp <= 0); break;
6095 	    default: done = false;
6096 	    }
6097 	  if (done)
6098 	    {
6099 	      if (val)
6100 		return boolean_true_node;
6101 	      else
6102 		return fold_build2 (code2, boolean_type_node, op2a, op2b);
6103 	    }
6104 	}
6105 
6106       /* See if an equality test is redundant with the other comparison.  */
6107       else if (code1 == EQ_EXPR)
6108 	{
6109 	  bool val;
6110 	  switch (code2)
6111 	    {
6112 	    case EQ_EXPR: val = (cmp == 0); break;
6113 	    case NE_EXPR: val = (cmp != 0); break;
6114 	    case LT_EXPR: val = (cmp < 0); break;
6115 	    case GT_EXPR: val = (cmp > 0); break;
6116 	    case LE_EXPR: val = (cmp <= 0); break;
6117 	    case GE_EXPR: val = (cmp >= 0); break;
6118 	    default:
6119 	      val = false;
6120 	    }
6121 	  if (val)
6122 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
6123 	}
6124       else if (code2 == EQ_EXPR)
6125 	{
6126 	  bool val;
6127 	  switch (code1)
6128 	    {
6129 	    case EQ_EXPR: val = (cmp == 0); break;
6130 	    case NE_EXPR: val = (cmp != 0); break;
6131 	    case LT_EXPR: val = (cmp > 0); break;
6132 	    case GT_EXPR: val = (cmp < 0); break;
6133 	    case LE_EXPR: val = (cmp >= 0); break;
6134 	    case GE_EXPR: val = (cmp <= 0); break;
6135 	    default:
6136 	      val = false;
6137 	    }
6138 	  if (val)
6139 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
6140 	}
6141 
6142       /* Chose the less restrictive of two < or <= comparisons.  */
6143       else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6144 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
6145 	{
6146 	  if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6147 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
6148 	  else
6149 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
6150 	}
6151 
6152       /* Likewise chose the less restrictive of two > or >= comparisons.  */
6153       else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6154 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
6155 	{
6156 	  if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6157 	    return fold_build2 (code2, boolean_type_node, op2a, op2b);
6158 	  else
6159 	    return fold_build2 (code1, boolean_type_node, op1a, op1b);
6160 	}
6161 
6162       /* Check for singleton ranges.  */
6163       else if (cmp == 0
6164 	       && ((code1 == LT_EXPR && code2 == GT_EXPR)
6165 		   || (code1 == GT_EXPR && code2 == LT_EXPR)))
6166 	return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6167 
6168       /* Check for less/greater pairs that don't restrict the range at all.  */
6169       else if (cmp >= 0
6170 	       && (code1 == LT_EXPR || code1 == LE_EXPR)
6171 	       && (code2 == GT_EXPR || code2 == GE_EXPR))
6172 	return boolean_true_node;
6173       else if (cmp <= 0
6174 	       && (code1 == GT_EXPR || code1 == GE_EXPR)
6175 	       && (code2 == LT_EXPR || code2 == LE_EXPR))
6176 	return boolean_true_node;
6177     }
6178 
6179   /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6180      NAME's definition is a truth value.  See if there are any simplifications
6181      that can be done against the NAME's definition.  */
6182   if (TREE_CODE (op1a) == SSA_NAME
6183       && (code1 == NE_EXPR || code1 == EQ_EXPR)
6184       && (integer_zerop (op1b) || integer_onep (op1b)))
6185     {
6186       bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6187 		     || (code1 == NE_EXPR && integer_onep (op1b)));
6188       gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6189       switch (gimple_code (stmt))
6190 	{
6191 	case GIMPLE_ASSIGN:
6192 	  /* Try to simplify by copy-propagating the definition.  */
6193 	  return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6194 
6195 	case GIMPLE_PHI:
6196 	  /* If every argument to the PHI produces the same result when
6197 	     ORed with the second comparison, we win.
6198 	     Do not do this unless the type is bool since we need a bool
6199 	     result here anyway.  */
6200 	  if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6201 	    {
6202 	      tree result = NULL_TREE;
6203 	      unsigned i;
6204 	      for (i = 0; i < gimple_phi_num_args (stmt); i++)
6205 		{
6206 		  tree arg = gimple_phi_arg_def (stmt, i);
6207 
6208 		  /* If this PHI has itself as an argument, ignore it.
6209 		     If all the other args produce the same result,
6210 		     we're still OK.  */
6211 		  if (arg == gimple_phi_result (stmt))
6212 		    continue;
6213 		  else if (TREE_CODE (arg) == INTEGER_CST)
6214 		    {
6215 		      if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6216 			{
6217 			  if (!result)
6218 			    result = boolean_true_node;
6219 			  else if (!integer_onep (result))
6220 			    return NULL_TREE;
6221 			}
6222 		      else if (!result)
6223 			result = fold_build2 (code2, boolean_type_node,
6224 					      op2a, op2b);
6225 		      else if (!same_bool_comparison_p (result,
6226 							code2, op2a, op2b))
6227 			return NULL_TREE;
6228 		    }
6229 		  else if (TREE_CODE (arg) == SSA_NAME
6230 			   && !SSA_NAME_IS_DEFAULT_DEF (arg))
6231 		    {
6232 		      tree temp;
6233 		      gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6234 		      /* In simple cases we can look through PHI nodes,
6235 			 but we have to be careful with loops.
6236 			 See PR49073.  */
6237 		      if (! dom_info_available_p (CDI_DOMINATORS)
6238 			  || gimple_bb (def_stmt) == gimple_bb (stmt)
6239 			  || dominated_by_p (CDI_DOMINATORS,
6240 					     gimple_bb (def_stmt),
6241 					     gimple_bb (stmt)))
6242 			return NULL_TREE;
6243 		      temp = or_var_with_comparison (arg, invert, code2,
6244 						     op2a, op2b);
6245 		      if (!temp)
6246 			return NULL_TREE;
6247 		      else if (!result)
6248 			result = temp;
6249 		      else if (!same_bool_result_p (result, temp))
6250 			return NULL_TREE;
6251 		    }
6252 		  else
6253 		    return NULL_TREE;
6254 		}
6255 	      return result;
6256 	    }
6257 
6258 	default:
6259 	  break;
6260 	}
6261     }
6262   return NULL_TREE;
6263 }
6264 
6265 /* Try to simplify the OR of two comparisons, specified by
6266    (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6267    If this can be simplified to a single expression (without requiring
6268    introducing more SSA variables to hold intermediate values),
6269    return the resulting tree.  Otherwise return NULL_TREE.
6270    If the result expression is non-null, it has boolean type.  */
6271 
6272 tree
6273 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6274 			   enum tree_code code2, tree op2a, tree op2b)
6275 {
6276   tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6277   if (t)
6278     return t;
6279   else
6280     return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6281 }
6282 
6283 
6284 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6285 
6286    Either NULL_TREE, a simplified but non-constant or a constant
6287    is returned.
6288 
6289    ???  This should go into a gimple-fold-inline.h file to be eventually
6290    privatized with the single valueize function used in the various TUs
6291    to avoid the indirect function call overhead.  */
6292 
6293 tree
6294 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6295 				tree (*gvalueize) (tree))
6296 {
6297   gimple_match_op res_op;
6298   /* ???  The SSA propagators do not correctly deal with following SSA use-def
6299      edges if there are intermediate VARYING defs.  For this reason
6300      do not follow SSA edges here even though SCCVN can technically
6301      just deal fine with that.  */
6302   if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6303     {
6304       tree res = NULL_TREE;
6305       if (gimple_simplified_result_is_gimple_val (&res_op))
6306 	res = res_op.ops[0];
6307       else if (mprts_hook)
6308 	res = mprts_hook (&res_op);
6309       if (res)
6310 	{
6311 	  if (dump_file && dump_flags & TDF_DETAILS)
6312 	    {
6313 	      fprintf (dump_file, "Match-and-simplified ");
6314 	      print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6315 	      fprintf (dump_file, " to ");
6316 	      print_generic_expr (dump_file, res);
6317 	      fprintf (dump_file, "\n");
6318 	    }
6319 	  return res;
6320 	}
6321     }
6322 
6323   location_t loc = gimple_location (stmt);
6324   switch (gimple_code (stmt))
6325     {
6326     case GIMPLE_ASSIGN:
6327       {
6328         enum tree_code subcode = gimple_assign_rhs_code (stmt);
6329 
6330         switch (get_gimple_rhs_class (subcode))
6331           {
6332           case GIMPLE_SINGLE_RHS:
6333             {
6334               tree rhs = gimple_assign_rhs1 (stmt);
6335               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6336 
6337               if (TREE_CODE (rhs) == SSA_NAME)
6338                 {
6339                   /* If the RHS is an SSA_NAME, return its known constant value,
6340                      if any.  */
6341                   return (*valueize) (rhs);
6342                 }
6343 	      /* Handle propagating invariant addresses into address
6344 		 operations.  */
6345 	      else if (TREE_CODE (rhs) == ADDR_EXPR
6346 		       && !is_gimple_min_invariant (rhs))
6347 		{
6348 		  poly_int64 offset = 0;
6349 		  tree base;
6350 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6351 							  &offset,
6352 							  valueize);
6353 		  if (base
6354 		      && (CONSTANT_CLASS_P (base)
6355 			  || decl_address_invariant_p (base)))
6356 		    return build_invariant_address (TREE_TYPE (rhs),
6357 						    base, offset);
6358 		}
6359 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
6360 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6361 		       && known_eq (CONSTRUCTOR_NELTS (rhs),
6362 				    TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6363 		{
6364 		  unsigned i, nelts;
6365 		  tree val;
6366 
6367 		  nelts = CONSTRUCTOR_NELTS (rhs);
6368 		  tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6369 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6370 		    {
6371 		      val = (*valueize) (val);
6372 		      if (TREE_CODE (val) == INTEGER_CST
6373 			  || TREE_CODE (val) == REAL_CST
6374 			  || TREE_CODE (val) == FIXED_CST)
6375 			vec.quick_push (val);
6376 		      else
6377 			return NULL_TREE;
6378 		    }
6379 
6380 		  return vec.build ();
6381 		}
6382 	      if (subcode == OBJ_TYPE_REF)
6383 		{
6384 		  tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6385 		  /* If callee is constant, we can fold away the wrapper.  */
6386 		  if (is_gimple_min_invariant (val))
6387 		    return val;
6388 		}
6389 
6390               if (kind == tcc_reference)
6391 		{
6392 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6393 		       || TREE_CODE (rhs) == REALPART_EXPR
6394 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
6395 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6396 		    {
6397 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6398 		      return fold_unary_loc (EXPR_LOCATION (rhs),
6399 					     TREE_CODE (rhs),
6400 					     TREE_TYPE (rhs), val);
6401 		    }
6402 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
6403 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6404 		    {
6405 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6406 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
6407 					       TREE_CODE (rhs),
6408 					       TREE_TYPE (rhs), val,
6409 					       TREE_OPERAND (rhs, 1),
6410 					       TREE_OPERAND (rhs, 2));
6411 		    }
6412 		  else if (TREE_CODE (rhs) == MEM_REF
6413 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6414 		    {
6415 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6416 		      if (TREE_CODE (val) == ADDR_EXPR
6417 			  && is_gimple_min_invariant (val))
6418 			{
6419 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6420 						  unshare_expr (val),
6421 						  TREE_OPERAND (rhs, 1));
6422 			  if (tem)
6423 			    rhs = tem;
6424 			}
6425 		    }
6426 		  return fold_const_aggregate_ref_1 (rhs, valueize);
6427 		}
6428               else if (kind == tcc_declaration)
6429                 return get_symbol_constant_value (rhs);
6430               return rhs;
6431             }
6432 
6433           case GIMPLE_UNARY_RHS:
6434 	    return NULL_TREE;
6435 
6436           case GIMPLE_BINARY_RHS:
6437 	    /* Translate &x + CST into an invariant form suitable for
6438 	       further propagation.  */
6439 	    if (subcode == POINTER_PLUS_EXPR)
6440 	      {
6441 		tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6442 		tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6443 		if (TREE_CODE (op0) == ADDR_EXPR
6444 		    && TREE_CODE (op1) == INTEGER_CST)
6445 		  {
6446 		    tree off = fold_convert (ptr_type_node, op1);
6447 		    return build_fold_addr_expr_loc
6448 			(loc,
6449 			 fold_build2 (MEM_REF,
6450 				      TREE_TYPE (TREE_TYPE (op0)),
6451 				      unshare_expr (op0), off));
6452 		  }
6453 	      }
6454 	    /* Canonicalize bool != 0 and bool == 0 appearing after
6455 	       valueization.  While gimple_simplify handles this
6456 	       it can get confused by the ~X == 1 -> X == 0 transform
6457 	       which we cant reduce to a SSA name or a constant
6458 	       (and we have no way to tell gimple_simplify to not
6459 	       consider those transforms in the first place).  */
6460 	    else if (subcode == EQ_EXPR
6461 		     || subcode == NE_EXPR)
6462 	      {
6463 		tree lhs = gimple_assign_lhs (stmt);
6464 		tree op0 = gimple_assign_rhs1 (stmt);
6465 		if (useless_type_conversion_p (TREE_TYPE (lhs),
6466 					       TREE_TYPE (op0)))
6467 		  {
6468 		    tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6469 		    op0 = (*valueize) (op0);
6470 		    if (TREE_CODE (op0) == INTEGER_CST)
6471 		      std::swap (op0, op1);
6472 		    if (TREE_CODE (op1) == INTEGER_CST
6473 			&& ((subcode == NE_EXPR && integer_zerop (op1))
6474 			    || (subcode == EQ_EXPR && integer_onep (op1))))
6475 		      return op0;
6476 		  }
6477 	      }
6478 	    return NULL_TREE;
6479 
6480           case GIMPLE_TERNARY_RHS:
6481             {
6482               /* Handle ternary operators that can appear in GIMPLE form.  */
6483               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6484               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6485               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6486               return fold_ternary_loc (loc, subcode,
6487 				       gimple_expr_type (stmt), op0, op1, op2);
6488             }
6489 
6490           default:
6491             gcc_unreachable ();
6492           }
6493       }
6494 
6495     case GIMPLE_CALL:
6496       {
6497 	tree fn;
6498 	gcall *call_stmt = as_a <gcall *> (stmt);
6499 
6500 	if (gimple_call_internal_p (stmt))
6501 	  {
6502 	    enum tree_code subcode = ERROR_MARK;
6503 	    switch (gimple_call_internal_fn (stmt))
6504 	      {
6505 	      case IFN_UBSAN_CHECK_ADD:
6506 		subcode = PLUS_EXPR;
6507 		break;
6508 	      case IFN_UBSAN_CHECK_SUB:
6509 		subcode = MINUS_EXPR;
6510 		break;
6511 	      case IFN_UBSAN_CHECK_MUL:
6512 		subcode = MULT_EXPR;
6513 		break;
6514 	      case IFN_BUILTIN_EXPECT:
6515 		  {
6516 		    tree arg0 = gimple_call_arg (stmt, 0);
6517 		    tree op0 = (*valueize) (arg0);
6518 		    if (TREE_CODE (op0) == INTEGER_CST)
6519 		      return op0;
6520 		    return NULL_TREE;
6521 		  }
6522 	      default:
6523 		return NULL_TREE;
6524 	      }
6525 	    tree arg0 = gimple_call_arg (stmt, 0);
6526 	    tree arg1 = gimple_call_arg (stmt, 1);
6527 	    tree op0 = (*valueize) (arg0);
6528 	    tree op1 = (*valueize) (arg1);
6529 
6530 	    if (TREE_CODE (op0) != INTEGER_CST
6531 		|| TREE_CODE (op1) != INTEGER_CST)
6532 	      {
6533 		switch (subcode)
6534 		  {
6535 		  case MULT_EXPR:
6536 		    /* x * 0 = 0 * x = 0 without overflow.  */
6537 		    if (integer_zerop (op0) || integer_zerop (op1))
6538 		      return build_zero_cst (TREE_TYPE (arg0));
6539 		    break;
6540 		  case MINUS_EXPR:
6541 		    /* y - y = 0 without overflow.  */
6542 		    if (operand_equal_p (op0, op1, 0))
6543 		      return build_zero_cst (TREE_TYPE (arg0));
6544 		    break;
6545 		  default:
6546 		    break;
6547 		  }
6548 	      }
6549 	    tree res
6550 	      = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6551 	    if (res
6552 		&& TREE_CODE (res) == INTEGER_CST
6553 		&& !TREE_OVERFLOW (res))
6554 	      return res;
6555 	    return NULL_TREE;
6556 	  }
6557 
6558 	fn = (*valueize) (gimple_call_fn (stmt));
6559 	if (TREE_CODE (fn) == ADDR_EXPR
6560 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6561 	    && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6562 	    && gimple_builtin_call_types_compatible_p (stmt,
6563 						       TREE_OPERAND (fn, 0)))
6564 	  {
6565 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6566 	    tree retval;
6567 	    unsigned i;
6568 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
6569 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
6570 	    retval = fold_builtin_call_array (loc,
6571 					 gimple_call_return_type (call_stmt),
6572 					 fn, gimple_call_num_args (stmt), args);
6573 	    if (retval)
6574 	      {
6575 		/* fold_call_expr wraps the result inside a NOP_EXPR.  */
6576 		STRIP_NOPS (retval);
6577 		retval = fold_convert (gimple_call_return_type (call_stmt),
6578 				       retval);
6579 	      }
6580 	    return retval;
6581 	  }
6582 	return NULL_TREE;
6583       }
6584 
6585     default:
6586       return NULL_TREE;
6587     }
6588 }
6589 
6590 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6591    Returns NULL_TREE if folding to a constant is not possible, otherwise
6592    returns a constant according to is_gimple_min_invariant.  */
6593 
6594 tree
6595 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6596 {
6597   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6598   if (res && is_gimple_min_invariant (res))
6599     return res;
6600   return NULL_TREE;
6601 }
6602 
6603 
6604 /* The following set of functions are supposed to fold references using
6605    their constant initializers.  */
6606 
6607 /* See if we can find constructor defining value of BASE.
6608    When we know the consructor with constant offset (such as
6609    base is array[40] and we do know constructor of array), then
6610    BIT_OFFSET is adjusted accordingly.
6611 
6612    As a special case, return error_mark_node when constructor
6613    is not explicitly available, but it is known to be zero
6614    such as 'static const int a;'.  */
6615 static tree
6616 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6617 		      tree (*valueize)(tree))
6618 {
6619   poly_int64 bit_offset2, size, max_size;
6620   bool reverse;
6621 
6622   if (TREE_CODE (base) == MEM_REF)
6623     {
6624       poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6625       if (!boff.to_shwi (bit_offset))
6626 	return NULL_TREE;
6627 
6628       if (valueize
6629 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6630 	base = valueize (TREE_OPERAND (base, 0));
6631       if (!base || TREE_CODE (base) != ADDR_EXPR)
6632         return NULL_TREE;
6633       base = TREE_OPERAND (base, 0);
6634     }
6635   else if (valueize
6636 	   && TREE_CODE (base) == SSA_NAME)
6637     base = valueize (base);
6638 
6639   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
6640      DECL_INITIAL.  If BASE is a nested reference into another
6641      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6642      the inner reference.  */
6643   switch (TREE_CODE (base))
6644     {
6645     case VAR_DECL:
6646     case CONST_DECL:
6647       {
6648 	tree init = ctor_for_folding (base);
6649 
6650 	/* Our semantic is exact opposite of ctor_for_folding;
6651 	   NULL means unknown, while error_mark_node is 0.  */
6652 	if (init == error_mark_node)
6653 	  return NULL_TREE;
6654 	if (!init)
6655 	  return error_mark_node;
6656 	return init;
6657       }
6658 
6659     case VIEW_CONVERT_EXPR:
6660       return get_base_constructor (TREE_OPERAND (base, 0),
6661 				   bit_offset, valueize);
6662 
6663     case ARRAY_REF:
6664     case COMPONENT_REF:
6665       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6666 				      &reverse);
6667       if (!known_size_p (max_size) || maybe_ne (size, max_size))
6668 	return NULL_TREE;
6669       *bit_offset +=  bit_offset2;
6670       return get_base_constructor (base, bit_offset, valueize);
6671 
6672     case CONSTRUCTOR:
6673       return base;
6674 
6675     default:
6676       if (CONSTANT_CLASS_P (base))
6677 	return base;
6678 
6679       return NULL_TREE;
6680     }
6681 }
6682 
6683 /* CTOR is CONSTRUCTOR of an array type.  Fold a reference of SIZE bits
6684    to the memory at bit OFFSET.     When non-null, TYPE is the expected
6685    type of the reference; otherwise the type of the referenced element
6686    is used instead. When SIZE is zero, attempt to fold a reference to
6687    the entire element which OFFSET refers to.  Increment *SUBOFF by
6688    the bit offset of the accessed element.  */
6689 
6690 static tree
6691 fold_array_ctor_reference (tree type, tree ctor,
6692 			   unsigned HOST_WIDE_INT offset,
6693 			   unsigned HOST_WIDE_INT size,
6694 			   tree from_decl,
6695 			   unsigned HOST_WIDE_INT *suboff)
6696 {
6697   offset_int low_bound;
6698   offset_int elt_size;
6699   offset_int access_index;
6700   tree domain_type = NULL_TREE;
6701   HOST_WIDE_INT inner_offset;
6702 
6703   /* Compute low bound and elt size.  */
6704   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6705     domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6706   if (domain_type && TYPE_MIN_VALUE (domain_type))
6707     {
6708       /* Static constructors for variably sized objects make no sense.  */
6709       if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6710 	return NULL_TREE;
6711       low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6712     }
6713   else
6714     low_bound = 0;
6715   /* Static constructors for variably sized objects make no sense.  */
6716   if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6717     return NULL_TREE;
6718   elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6719 
6720   /* When TYPE is non-null, verify that it specifies a constant-sized
6721      accessed not larger than size of array element.  Avoid division
6722      by zero below when ELT_SIZE is zero, such as with the result of
6723      an initializer for a zero-length array or an empty struct.  */
6724   if (elt_size == 0
6725       || (type
6726 	  && (!TYPE_SIZE_UNIT (type)
6727 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6728 	      || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)))))
6729     return NULL_TREE;
6730 
6731   /* Compute the array index we look for.  */
6732   access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6733 				 elt_size);
6734   access_index += low_bound;
6735 
6736   /* And offset within the access.  */
6737   inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6738 
6739   /* See if the array field is large enough to span whole access.  We do not
6740      care to fold accesses spanning multiple array indexes.  */
6741   if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6742     return NULL_TREE;
6743   if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6744     {
6745       if (!size && TREE_CODE (val) != CONSTRUCTOR)
6746 	{
6747 	  /* For the final reference to the entire accessed element
6748 	     (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6749 	     may be null) in favor of the type of the element, and set
6750 	     SIZE to the size of the accessed element.  */
6751 	  inner_offset = 0;
6752 	  type = TREE_TYPE (val);
6753 	  size = elt_size.to_uhwi () * BITS_PER_UNIT;
6754 	}
6755 
6756       *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6757       return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6758 				  suboff);
6759     }
6760 
6761   /* Memory not explicitly mentioned in constructor is 0 (or
6762      the reference is out of range).  */
6763   return type ? build_zero_cst (type) : NULL_TREE;
6764 }
6765 
6766 /* CTOR is CONSTRUCTOR of an aggregate or vector.  Fold a reference
6767    of SIZE bits to the memory at bit OFFSET.   When non-null, TYPE
6768    is the expected type of the reference; otherwise the type of
6769    the referenced member is used instead.  When SIZE is zero,
6770    attempt to fold a reference to the entire member which OFFSET
6771    refers to; in this case.  Increment *SUBOFF by the bit offset
6772    of the accessed member.  */
6773 
6774 static tree
6775 fold_nonarray_ctor_reference (tree type, tree ctor,
6776 			      unsigned HOST_WIDE_INT offset,
6777 			      unsigned HOST_WIDE_INT size,
6778 			      tree from_decl,
6779 			      unsigned HOST_WIDE_INT *suboff)
6780 {
6781   unsigned HOST_WIDE_INT cnt;
6782   tree cfield, cval;
6783 
6784   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6785 			    cval)
6786     {
6787       tree byte_offset = DECL_FIELD_OFFSET (cfield);
6788       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6789       tree field_size = DECL_SIZE (cfield);
6790 
6791       if (!field_size)
6792 	{
6793 	  /* Determine the size of the flexible array member from
6794 	     the size of the initializer provided for it.  */
6795 	  field_size = TYPE_SIZE (TREE_TYPE (cval));
6796 	}
6797 
6798       /* Variable sized objects in static constructors makes no sense,
6799 	 but field_size can be NULL for flexible array members.  */
6800       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6801 		  && TREE_CODE (byte_offset) == INTEGER_CST
6802 		  && (field_size != NULL_TREE
6803 		      ? TREE_CODE (field_size) == INTEGER_CST
6804 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6805 
6806       /* Compute bit offset of the field.  */
6807       offset_int bitoffset
6808 	= (wi::to_offset (field_offset)
6809 	   + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6810       /* Compute bit offset where the field ends.  */
6811       offset_int bitoffset_end;
6812       if (field_size != NULL_TREE)
6813 	bitoffset_end = bitoffset + wi::to_offset (field_size);
6814       else
6815 	bitoffset_end = 0;
6816 
6817       /* Compute the bit offset of the end of the desired access.
6818 	 As a special case, if the size of the desired access is
6819 	 zero, assume the access is to the entire field (and let
6820 	 the caller make any necessary adjustments by storing
6821 	 the actual bounds of the field in FIELDBOUNDS).  */
6822       offset_int access_end = offset_int (offset);
6823       if (size)
6824 	access_end += size;
6825       else
6826 	access_end = bitoffset_end;
6827 
6828       /* Is there any overlap between the desired access at
6829 	 [OFFSET, OFFSET+SIZE) and the offset of the field within
6830 	 the object at [BITOFFSET, BITOFFSET_END)?  */
6831       if (wi::cmps (access_end, bitoffset) > 0
6832 	  && (field_size == NULL_TREE
6833 	      || wi::lts_p (offset, bitoffset_end)))
6834 	{
6835 	  *suboff += bitoffset.to_uhwi ();
6836 
6837 	  if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6838 	    {
6839 	      /* For the final reference to the entire accessed member
6840 		 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6841 		 be null) in favor of the type of the member, and set
6842 		 SIZE to the size of the accessed member.  */
6843 	      offset = bitoffset.to_uhwi ();
6844 	      type = TREE_TYPE (cval);
6845 	      size = (bitoffset_end - bitoffset).to_uhwi ();
6846 	    }
6847 
6848 	  /* We do have overlap.  Now see if the field is large enough
6849 	     to cover the access.  Give up for accesses that extend
6850 	     beyond the end of the object or that span multiple fields.  */
6851 	  if (wi::cmps (access_end, bitoffset_end) > 0)
6852 	    return NULL_TREE;
6853 	  if (offset < bitoffset)
6854 	    return NULL_TREE;
6855 
6856 	  offset_int inner_offset = offset_int (offset) - bitoffset;
6857 	  return fold_ctor_reference (type, cval,
6858 				      inner_offset.to_uhwi (), size,
6859 				      from_decl, suboff);
6860 	}
6861     }
6862   /* Memory not explicitly mentioned in constructor is 0.  */
6863   return type ? build_zero_cst (type) : NULL_TREE;
6864 }
6865 
6866 /* CTOR is value initializing memory.  Fold a reference of TYPE and
6867    bit size POLY_SIZE to the memory at bit POLY_OFFSET.  When SIZE
6868    is zero, attempt to fold a reference to the entire subobject
6869    which OFFSET refers to.  This is used when folding accesses to
6870    string members of aggregates.  When non-null, set *SUBOFF to
6871    the bit offset of the accessed subobject.  */
6872 
6873 tree
6874 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6875 		     const poly_uint64 &poly_size, tree from_decl,
6876 		     unsigned HOST_WIDE_INT *suboff /* = NULL */)
6877 {
6878   tree ret;
6879 
6880   /* We found the field with exact match.  */
6881   if (type
6882       && useless_type_conversion_p (type, TREE_TYPE (ctor))
6883       && known_eq (poly_offset, 0U))
6884     return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6885 
6886   /* The remaining optimizations need a constant size and offset.  */
6887   unsigned HOST_WIDE_INT size, offset;
6888   if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6889     return NULL_TREE;
6890 
6891   /* We are at the end of walk, see if we can view convert the
6892      result.  */
6893   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6894       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
6895       && !compare_tree_int (TYPE_SIZE (type), size)
6896       && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6897     {
6898       ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6899       if (ret)
6900 	{
6901 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6902 	  if (ret)
6903 	    STRIP_USELESS_TYPE_CONVERSION (ret);
6904 	}
6905       return ret;
6906     }
6907   /* For constants and byte-aligned/sized reads try to go through
6908      native_encode/interpret.  */
6909   if (CONSTANT_CLASS_P (ctor)
6910       && BITS_PER_UNIT == 8
6911       && offset % BITS_PER_UNIT == 0
6912       && size % BITS_PER_UNIT == 0
6913       && size <= MAX_BITSIZE_MODE_ANY_MODE)
6914     {
6915       unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6916       int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6917 				    offset / BITS_PER_UNIT);
6918       if (len > 0)
6919 	return native_interpret_expr (type, buf, len);
6920     }
6921   if (TREE_CODE (ctor) == CONSTRUCTOR)
6922     {
6923       unsigned HOST_WIDE_INT dummy = 0;
6924       if (!suboff)
6925 	suboff = &dummy;
6926 
6927       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6928 	  || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6929 	return fold_array_ctor_reference (type, ctor, offset, size,
6930 					  from_decl, suboff);
6931 
6932       return fold_nonarray_ctor_reference (type, ctor, offset, size,
6933 					   from_decl, suboff);
6934     }
6935 
6936   return NULL_TREE;
6937 }
6938 
6939 /* Return the tree representing the element referenced by T if T is an
6940    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6941    names using VALUEIZE.  Return NULL_TREE otherwise.  */
6942 
6943 tree
6944 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6945 {
6946   tree ctor, idx, base;
6947   poly_int64 offset, size, max_size;
6948   tree tem;
6949   bool reverse;
6950 
6951   if (TREE_THIS_VOLATILE (t))
6952     return NULL_TREE;
6953 
6954   if (DECL_P (t))
6955     return get_symbol_constant_value (t);
6956 
6957   tem = fold_read_from_constant_string (t);
6958   if (tem)
6959     return tem;
6960 
6961   switch (TREE_CODE (t))
6962     {
6963     case ARRAY_REF:
6964     case ARRAY_RANGE_REF:
6965       /* Constant indexes are handled well by get_base_constructor.
6966 	 Only special case variable offsets.
6967 	 FIXME: This code can't handle nested references with variable indexes
6968 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
6969 	 get_ref_base_and_extent here and make it use a valueize callback.  */
6970       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6971 	  && valueize
6972 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6973 	  && poly_int_tree_p (idx))
6974 	{
6975 	  tree low_bound, unit_size;
6976 
6977 	  /* If the resulting bit-offset is constant, track it.  */
6978 	  if ((low_bound = array_ref_low_bound (t),
6979 	       poly_int_tree_p (low_bound))
6980 	      && (unit_size = array_ref_element_size (t),
6981 		  tree_fits_uhwi_p (unit_size)))
6982 	    {
6983 	      poly_offset_int woffset
6984 		= wi::sext (wi::to_poly_offset (idx)
6985 			    - wi::to_poly_offset (low_bound),
6986 			    TYPE_PRECISION (TREE_TYPE (idx)));
6987 	      woffset *= tree_to_uhwi (unit_size);
6988 	      woffset *= BITS_PER_UNIT;
6989 	      if (woffset.to_shwi (&offset))
6990 		{
6991 		  base = TREE_OPERAND (t, 0);
6992 		  ctor = get_base_constructor (base, &offset, valueize);
6993 		  /* Empty constructor.  Always fold to 0.  */
6994 		  if (ctor == error_mark_node)
6995 		    return build_zero_cst (TREE_TYPE (t));
6996 		  /* Out of bound array access.  Value is undefined,
6997 		     but don't fold.  */
6998 		  if (maybe_lt (offset, 0))
6999 		    return NULL_TREE;
7000 		  /* We cannot determine ctor.  */
7001 		  if (!ctor)
7002 		    return NULL_TREE;
7003 		  return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7004 					      tree_to_uhwi (unit_size)
7005 					      * BITS_PER_UNIT,
7006 					      base);
7007 		}
7008 	    }
7009 	}
7010       /* Fallthru.  */
7011 
7012     case COMPONENT_REF:
7013     case BIT_FIELD_REF:
7014     case TARGET_MEM_REF:
7015     case MEM_REF:
7016       base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7017       ctor = get_base_constructor (base, &offset, valueize);
7018 
7019       /* Empty constructor.  Always fold to 0.  */
7020       if (ctor == error_mark_node)
7021 	return build_zero_cst (TREE_TYPE (t));
7022       /* We do not know precise address.  */
7023       if (!known_size_p (max_size) || maybe_ne (max_size, size))
7024 	return NULL_TREE;
7025       /* We cannot determine ctor.  */
7026       if (!ctor)
7027 	return NULL_TREE;
7028 
7029       /* Out of bound array access.  Value is undefined, but don't fold.  */
7030       if (maybe_lt (offset, 0))
7031 	return NULL_TREE;
7032 
7033       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7034 				  base);
7035 
7036     case REALPART_EXPR:
7037     case IMAGPART_EXPR:
7038       {
7039 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7040 	if (c && TREE_CODE (c) == COMPLEX_CST)
7041 	  return fold_build1_loc (EXPR_LOCATION (t),
7042 			      TREE_CODE (t), TREE_TYPE (t), c);
7043 	break;
7044       }
7045 
7046     default:
7047       break;
7048     }
7049 
7050   return NULL_TREE;
7051 }
7052 
7053 tree
7054 fold_const_aggregate_ref (tree t)
7055 {
7056   return fold_const_aggregate_ref_1 (t, NULL);
7057 }
7058 
7059 /* Lookup virtual method with index TOKEN in a virtual table V
7060    at OFFSET.
7061    Set CAN_REFER if non-NULL to false if method
7062    is not referable or if the virtual table is ill-formed (such as rewriten
7063    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
7064 
7065 tree
7066 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7067 				   tree v,
7068 				   unsigned HOST_WIDE_INT offset,
7069 				   bool *can_refer)
7070 {
7071   tree vtable = v, init, fn;
7072   unsigned HOST_WIDE_INT size;
7073   unsigned HOST_WIDE_INT elt_size, access_index;
7074   tree domain_type;
7075 
7076   if (can_refer)
7077     *can_refer = true;
7078 
7079   /* First of all double check we have virtual table.  */
7080   if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7081     {
7082       /* Pass down that we lost track of the target.  */
7083       if (can_refer)
7084 	*can_refer = false;
7085       return NULL_TREE;
7086     }
7087 
7088   init = ctor_for_folding (v);
7089 
7090   /* The virtual tables should always be born with constructors
7091      and we always should assume that they are avaialble for
7092      folding.  At the moment we do not stream them in all cases,
7093      but it should never happen that ctor seem unreachable.  */
7094   gcc_assert (init);
7095   if (init == error_mark_node)
7096     {
7097       /* Pass down that we lost track of the target.  */
7098       if (can_refer)
7099 	*can_refer = false;
7100       return NULL_TREE;
7101     }
7102   gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7103   size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7104   offset *= BITS_PER_UNIT;
7105   offset += token * size;
7106 
7107   /* Lookup the value in the constructor that is assumed to be array.
7108      This is equivalent to
7109      fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7110 			       offset, size, NULL);
7111      but in a constant time.  We expect that frontend produced a simple
7112      array without indexed initializers.  */
7113 
7114   gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7115   domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7116   gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7117   elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7118 
7119   access_index = offset / BITS_PER_UNIT / elt_size;
7120   gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7121 
7122   /* The C++ FE can now produce indexed fields, and we check if the indexes
7123      match.  */
7124   if (access_index < CONSTRUCTOR_NELTS (init))
7125     {
7126       fn = CONSTRUCTOR_ELT (init, access_index)->value;
7127       tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7128       gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7129       STRIP_NOPS (fn);
7130     }
7131   else
7132     fn = NULL;
7133 
7134   /* For type inconsistent program we may end up looking up virtual method
7135      in virtual table that does not contain TOKEN entries.  We may overrun
7136      the virtual table and pick up a constant or RTTI info pointer.
7137      In any case the call is undefined.  */
7138   if (!fn
7139       || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7140       || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7141     fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7142   else
7143     {
7144       fn = TREE_OPERAND (fn, 0);
7145 
7146       /* When cgraph node is missing and function is not public, we cannot
7147 	 devirtualize.  This can happen in WHOPR when the actual method
7148 	 ends up in other partition, because we found devirtualization
7149 	 possibility too late.  */
7150       if (!can_refer_decl_in_current_unit_p (fn, vtable))
7151 	{
7152 	  if (can_refer)
7153 	    {
7154 	      *can_refer = false;
7155 	      return fn;
7156 	    }
7157 	  return NULL_TREE;
7158 	}
7159     }
7160 
7161   /* Make sure we create a cgraph node for functions we'll reference.
7162      They can be non-existent if the reference comes from an entry
7163      of an external vtable for example.  */
7164   cgraph_node::get_create (fn);
7165 
7166   return fn;
7167 }
7168 
7169 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7170    is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7171    KNOWN_BINFO carries the binfo describing the true type of
7172    OBJ_TYPE_REF_OBJECT(REF).
7173    Set CAN_REFER if non-NULL to false if method
7174    is not referable or if the virtual table is ill-formed (such as rewriten
7175    by non-C++ produced symbol). Otherwise just return NULL in that calse.  */
7176 
7177 tree
7178 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7179 				  bool *can_refer)
7180 {
7181   unsigned HOST_WIDE_INT offset;
7182   tree v;
7183 
7184   v = BINFO_VTABLE (known_binfo);
7185   /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone.  */
7186   if (!v)
7187     return NULL_TREE;
7188 
7189   if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7190     {
7191       if (can_refer)
7192 	*can_refer = false;
7193       return NULL_TREE;
7194     }
7195   return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7196 }
7197 
7198 /* Given a pointer value T, return a simplified version of an
7199    indirection through T, or NULL_TREE if no simplification is
7200    possible.  Note that the resulting type may be different from
7201    the type pointed to in the sense that it is still compatible
7202    from the langhooks point of view. */
7203 
7204 tree
7205 gimple_fold_indirect_ref (tree t)
7206 {
7207   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7208   tree sub = t;
7209   tree subtype;
7210 
7211   STRIP_NOPS (sub);
7212   subtype = TREE_TYPE (sub);
7213   if (!POINTER_TYPE_P (subtype)
7214       || TYPE_REF_CAN_ALIAS_ALL (ptype))
7215     return NULL_TREE;
7216 
7217   if (TREE_CODE (sub) == ADDR_EXPR)
7218     {
7219       tree op = TREE_OPERAND (sub, 0);
7220       tree optype = TREE_TYPE (op);
7221       /* *&p => p */
7222       if (useless_type_conversion_p (type, optype))
7223         return op;
7224 
7225       /* *(foo *)&fooarray => fooarray[0] */
7226       if (TREE_CODE (optype) == ARRAY_TYPE
7227 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7228 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
7229        {
7230          tree type_domain = TYPE_DOMAIN (optype);
7231          tree min_val = size_zero_node;
7232          if (type_domain && TYPE_MIN_VALUE (type_domain))
7233            min_val = TYPE_MIN_VALUE (type_domain);
7234 	 if (TREE_CODE (min_val) == INTEGER_CST)
7235 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7236        }
7237       /* *(foo *)&complexfoo => __real__ complexfoo */
7238       else if (TREE_CODE (optype) == COMPLEX_TYPE
7239                && useless_type_conversion_p (type, TREE_TYPE (optype)))
7240         return fold_build1 (REALPART_EXPR, type, op);
7241       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7242       else if (TREE_CODE (optype) == VECTOR_TYPE
7243                && useless_type_conversion_p (type, TREE_TYPE (optype)))
7244         {
7245           tree part_width = TYPE_SIZE (type);
7246           tree index = bitsize_int (0);
7247           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7248         }
7249     }
7250 
7251   /* *(p + CST) -> ...  */
7252   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7253       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7254     {
7255       tree addr = TREE_OPERAND (sub, 0);
7256       tree off = TREE_OPERAND (sub, 1);
7257       tree addrtype;
7258 
7259       STRIP_NOPS (addr);
7260       addrtype = TREE_TYPE (addr);
7261 
7262       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7263       if (TREE_CODE (addr) == ADDR_EXPR
7264 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7265 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7266 	  && tree_fits_uhwi_p (off))
7267 	{
7268           unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7269           tree part_width = TYPE_SIZE (type);
7270           unsigned HOST_WIDE_INT part_widthi
7271             = tree_to_shwi (part_width) / BITS_PER_UNIT;
7272           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7273           tree index = bitsize_int (indexi);
7274 	  if (known_lt (offset / part_widthi,
7275 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7276             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7277                                 part_width, index);
7278 	}
7279 
7280       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7281       if (TREE_CODE (addr) == ADDR_EXPR
7282 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7283 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7284         {
7285           tree size = TYPE_SIZE_UNIT (type);
7286           if (tree_int_cst_equal (size, off))
7287             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7288         }
7289 
7290       /* *(p + CST) -> MEM_REF <p, CST>.  */
7291       if (TREE_CODE (addr) != ADDR_EXPR
7292 	  || DECL_P (TREE_OPERAND (addr, 0)))
7293 	return fold_build2 (MEM_REF, type,
7294 			    addr,
7295 			    wide_int_to_tree (ptype, wi::to_wide (off)));
7296     }
7297 
7298   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7299   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7300       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7301       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7302     {
7303       tree type_domain;
7304       tree min_val = size_zero_node;
7305       tree osub = sub;
7306       sub = gimple_fold_indirect_ref (sub);
7307       if (! sub)
7308 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7309       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7310       if (type_domain && TYPE_MIN_VALUE (type_domain))
7311         min_val = TYPE_MIN_VALUE (type_domain);
7312       if (TREE_CODE (min_val) == INTEGER_CST)
7313 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7314     }
7315 
7316   return NULL_TREE;
7317 }
7318 
7319 /* Return true if CODE is an operation that when operating on signed
7320    integer types involves undefined behavior on overflow and the
7321    operation can be expressed with unsigned arithmetic.  */
7322 
7323 bool
7324 arith_code_with_undefined_signed_overflow (tree_code code)
7325 {
7326   switch (code)
7327     {
7328     case PLUS_EXPR:
7329     case MINUS_EXPR:
7330     case MULT_EXPR:
7331     case NEGATE_EXPR:
7332     case POINTER_PLUS_EXPR:
7333       return true;
7334     default:
7335       return false;
7336     }
7337 }
7338 
7339 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7340    operation that can be transformed to unsigned arithmetic by converting
7341    its operand, carrying out the operation in the corresponding unsigned
7342    type and converting the result back to the original type.
7343 
7344    Returns a sequence of statements that replace STMT and also contain
7345    a modified form of STMT itself.  */
7346 
7347 gimple_seq
7348 rewrite_to_defined_overflow (gimple *stmt)
7349 {
7350   if (dump_file && (dump_flags & TDF_DETAILS))
7351     {
7352       fprintf (dump_file, "rewriting stmt with undefined signed "
7353 	       "overflow ");
7354       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7355     }
7356 
7357   tree lhs = gimple_assign_lhs (stmt);
7358   tree type = unsigned_type_for (TREE_TYPE (lhs));
7359   gimple_seq stmts = NULL;
7360   for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7361     {
7362       tree op = gimple_op (stmt, i);
7363       op = gimple_convert (&stmts, type, op);
7364       gimple_set_op (stmt, i, op);
7365     }
7366   gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7367   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7368     gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7369   gimple_seq_add_stmt (&stmts, stmt);
7370   gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7371   gimple_seq_add_stmt (&stmts, cvt);
7372 
7373   return stmts;
7374 }
7375 
7376 
7377 /* The valueization hook we use for the gimple_build API simplification.
7378    This makes us match fold_buildN behavior by only combining with
7379    statements in the sequence(s) we are currently building.  */
7380 
7381 static tree
7382 gimple_build_valueize (tree op)
7383 {
7384   if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7385     return op;
7386   return NULL_TREE;
7387 }
7388 
7389 /* Build the expression CODE OP0 of type TYPE with location LOC,
7390    simplifying it first if possible.  Returns the built
7391    expression value and appends statements possibly defining it
7392    to SEQ.  */
7393 
7394 tree
7395 gimple_build (gimple_seq *seq, location_t loc,
7396 	      enum tree_code code, tree type, tree op0)
7397 {
7398   tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7399   if (!res)
7400     {
7401       res = create_tmp_reg_or_ssa_name (type);
7402       gimple *stmt;
7403       if (code == REALPART_EXPR
7404 	  || code == IMAGPART_EXPR
7405 	  || code == VIEW_CONVERT_EXPR)
7406 	stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7407       else
7408 	stmt = gimple_build_assign (res, code, op0);
7409       gimple_set_location (stmt, loc);
7410       gimple_seq_add_stmt_without_update (seq, stmt);
7411     }
7412   return res;
7413 }
7414 
7415 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7416    simplifying it first if possible.  Returns the built
7417    expression value and appends statements possibly defining it
7418    to SEQ.  */
7419 
7420 tree
7421 gimple_build (gimple_seq *seq, location_t loc,
7422 	      enum tree_code code, tree type, tree op0, tree op1)
7423 {
7424   tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7425   if (!res)
7426     {
7427       res = create_tmp_reg_or_ssa_name (type);
7428       gimple *stmt = gimple_build_assign (res, code, op0, op1);
7429       gimple_set_location (stmt, loc);
7430       gimple_seq_add_stmt_without_update (seq, stmt);
7431     }
7432   return res;
7433 }
7434 
7435 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7436    simplifying it first if possible.  Returns the built
7437    expression value and appends statements possibly defining it
7438    to SEQ.  */
7439 
7440 tree
7441 gimple_build (gimple_seq *seq, location_t loc,
7442 	      enum tree_code code, tree type, tree op0, tree op1, tree op2)
7443 {
7444   tree res = gimple_simplify (code, type, op0, op1, op2,
7445 			      seq, gimple_build_valueize);
7446   if (!res)
7447     {
7448       res = create_tmp_reg_or_ssa_name (type);
7449       gimple *stmt;
7450       if (code == BIT_FIELD_REF)
7451 	stmt = gimple_build_assign (res, code,
7452 				    build3 (code, type, op0, op1, op2));
7453       else
7454 	stmt = gimple_build_assign (res, code, op0, op1, op2);
7455       gimple_set_location (stmt, loc);
7456       gimple_seq_add_stmt_without_update (seq, stmt);
7457     }
7458   return res;
7459 }
7460 
7461 /* Build the call FN (ARG0) with a result of type TYPE
7462    (or no result if TYPE is void) with location LOC,
7463    simplifying it first if possible.  Returns the built
7464    expression value (or NULL_TREE if TYPE is void) and appends
7465    statements possibly defining it to SEQ.  */
7466 
7467 tree
7468 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7469 	      tree type, tree arg0)
7470 {
7471   tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7472   if (!res)
7473     {
7474       gcall *stmt;
7475       if (internal_fn_p (fn))
7476 	stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7477       else
7478 	{
7479 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7480 	  stmt = gimple_build_call (decl, 1, arg0);
7481 	}
7482       if (!VOID_TYPE_P (type))
7483 	{
7484 	  res = create_tmp_reg_or_ssa_name (type);
7485 	  gimple_call_set_lhs (stmt, res);
7486 	}
7487       gimple_set_location (stmt, loc);
7488       gimple_seq_add_stmt_without_update (seq, stmt);
7489     }
7490   return res;
7491 }
7492 
7493 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7494    (or no result if TYPE is void) with location LOC,
7495    simplifying it first if possible.  Returns the built
7496    expression value (or NULL_TREE if TYPE is void) and appends
7497    statements possibly defining it to SEQ.  */
7498 
7499 tree
7500 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7501 	      tree type, tree arg0, tree arg1)
7502 {
7503   tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7504   if (!res)
7505     {
7506       gcall *stmt;
7507       if (internal_fn_p (fn))
7508 	stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7509       else
7510 	{
7511 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7512 	  stmt = gimple_build_call (decl, 2, arg0, arg1);
7513 	}
7514       if (!VOID_TYPE_P (type))
7515 	{
7516 	  res = create_tmp_reg_or_ssa_name (type);
7517 	  gimple_call_set_lhs (stmt, res);
7518 	}
7519       gimple_set_location (stmt, loc);
7520       gimple_seq_add_stmt_without_update (seq, stmt);
7521     }
7522   return res;
7523 }
7524 
7525 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7526    (or no result if TYPE is void) with location LOC,
7527    simplifying it first if possible.  Returns the built
7528    expression value (or NULL_TREE if TYPE is void) and appends
7529    statements possibly defining it to SEQ.  */
7530 
7531 tree
7532 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7533 	      tree type, tree arg0, tree arg1, tree arg2)
7534 {
7535   tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7536 			      seq, gimple_build_valueize);
7537   if (!res)
7538     {
7539       gcall *stmt;
7540       if (internal_fn_p (fn))
7541 	stmt = gimple_build_call_internal (as_internal_fn (fn),
7542 					   3, arg0, arg1, arg2);
7543       else
7544 	{
7545 	  tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7546 	  stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7547 	}
7548       if (!VOID_TYPE_P (type))
7549 	{
7550 	  res = create_tmp_reg_or_ssa_name (type);
7551 	  gimple_call_set_lhs (stmt, res);
7552 	}
7553       gimple_set_location (stmt, loc);
7554       gimple_seq_add_stmt_without_update (seq, stmt);
7555     }
7556   return res;
7557 }
7558 
7559 /* Build the conversion (TYPE) OP with a result of type TYPE
7560    with location LOC if such conversion is neccesary in GIMPLE,
7561    simplifying it first.
7562    Returns the built expression value and appends
7563    statements possibly defining it to SEQ.  */
7564 
7565 tree
7566 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7567 {
7568   if (useless_type_conversion_p (type, TREE_TYPE (op)))
7569     return op;
7570   return gimple_build (seq, loc, NOP_EXPR, type, op);
7571 }
7572 
7573 /* Build the conversion (ptrofftype) OP with a result of a type
7574    compatible with ptrofftype with location LOC if such conversion
7575    is neccesary in GIMPLE, simplifying it first.
7576    Returns the built expression value and appends
7577    statements possibly defining it to SEQ.  */
7578 
7579 tree
7580 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7581 {
7582   if (ptrofftype_p (TREE_TYPE (op)))
7583     return op;
7584   return gimple_convert (seq, loc, sizetype, op);
7585 }
7586 
7587 /* Build a vector of type TYPE in which each element has the value OP.
7588    Return a gimple value for the result, appending any new statements
7589    to SEQ.  */
7590 
7591 tree
7592 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7593 			      tree op)
7594 {
7595   if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7596       && !CONSTANT_CLASS_P (op))
7597     return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7598 
7599   tree res, vec = build_vector_from_val (type, op);
7600   if (is_gimple_val (vec))
7601     return vec;
7602   if (gimple_in_ssa_p (cfun))
7603     res = make_ssa_name (type);
7604   else
7605     res = create_tmp_reg (type);
7606   gimple *stmt = gimple_build_assign (res, vec);
7607   gimple_set_location (stmt, loc);
7608   gimple_seq_add_stmt_without_update (seq, stmt);
7609   return res;
7610 }
7611 
7612 /* Build a vector from BUILDER, handling the case in which some elements
7613    are non-constant.  Return a gimple value for the result, appending any
7614    new instructions to SEQ.
7615 
7616    BUILDER must not have a stepped encoding on entry.  This is because
7617    the function is not geared up to handle the arithmetic that would
7618    be needed in the variable case, and any code building a vector that
7619    is known to be constant should use BUILDER->build () directly.  */
7620 
7621 tree
7622 gimple_build_vector (gimple_seq *seq, location_t loc,
7623 		     tree_vector_builder *builder)
7624 {
7625   gcc_assert (builder->nelts_per_pattern () <= 2);
7626   unsigned int encoded_nelts = builder->encoded_nelts ();
7627   for (unsigned int i = 0; i < encoded_nelts; ++i)
7628     if (!TREE_CONSTANT ((*builder)[i]))
7629       {
7630 	tree type = builder->type ();
7631 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7632 	vec<constructor_elt, va_gc> *v;
7633 	vec_alloc (v, nelts);
7634 	for (i = 0; i < nelts; ++i)
7635 	  CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7636 
7637 	tree res;
7638 	if (gimple_in_ssa_p (cfun))
7639 	  res = make_ssa_name (type);
7640 	else
7641 	  res = create_tmp_reg (type);
7642 	gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7643 	gimple_set_location (stmt, loc);
7644 	gimple_seq_add_stmt_without_update (seq, stmt);
7645 	return res;
7646       }
7647   return builder->build ();
7648 }
7649 
7650 /* Return true if the result of assignment STMT is known to be non-negative.
7651    If the return value is based on the assumption that signed overflow is
7652    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7653    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7654 
7655 static bool
7656 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7657 				   int depth)
7658 {
7659   enum tree_code code = gimple_assign_rhs_code (stmt);
7660   switch (get_gimple_rhs_class (code))
7661     {
7662     case GIMPLE_UNARY_RHS:
7663       return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7664 					     gimple_expr_type (stmt),
7665 					     gimple_assign_rhs1 (stmt),
7666 					     strict_overflow_p, depth);
7667     case GIMPLE_BINARY_RHS:
7668       return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7669 					      gimple_expr_type (stmt),
7670 					      gimple_assign_rhs1 (stmt),
7671 					      gimple_assign_rhs2 (stmt),
7672 					      strict_overflow_p, depth);
7673     case GIMPLE_TERNARY_RHS:
7674       return false;
7675     case GIMPLE_SINGLE_RHS:
7676       return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7677 					      strict_overflow_p, depth);
7678     case GIMPLE_INVALID_RHS:
7679       break;
7680     }
7681   gcc_unreachable ();
7682 }
7683 
7684 /* Return true if return value of call STMT is known to be non-negative.
7685    If the return value is based on the assumption that signed overflow is
7686    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7687    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7688 
7689 static bool
7690 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7691 				 int depth)
7692 {
7693   tree arg0 = gimple_call_num_args (stmt) > 0 ?
7694     gimple_call_arg (stmt, 0) : NULL_TREE;
7695   tree arg1 = gimple_call_num_args (stmt) > 1 ?
7696     gimple_call_arg (stmt, 1) : NULL_TREE;
7697 
7698   return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7699 					gimple_call_combined_fn (stmt),
7700 					arg0,
7701 					arg1,
7702 					strict_overflow_p, depth);
7703 }
7704 
7705 /* Return true if return value of call STMT is known to be non-negative.
7706    If the return value is based on the assumption that signed overflow is
7707    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7708    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7709 
7710 static bool
7711 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7712 				int depth)
7713 {
7714   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7715     {
7716       tree arg = gimple_phi_arg_def (stmt, i);
7717       if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7718 	return false;
7719     }
7720   return true;
7721 }
7722 
7723 /* Return true if STMT is known to compute a non-negative value.
7724    If the return value is based on the assumption that signed overflow is
7725    undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7726    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
7727 
7728 bool
7729 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7730 				 int depth)
7731 {
7732   switch (gimple_code (stmt))
7733     {
7734     case GIMPLE_ASSIGN:
7735       return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7736 						depth);
7737     case GIMPLE_CALL:
7738       return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7739 					      depth);
7740     case GIMPLE_PHI:
7741       return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7742 					     depth);
7743     default:
7744       return false;
7745     }
7746 }
7747 
7748 /* Return true if the floating-point value computed by assignment STMT
7749    is known to have an integer value.  We also allow +Inf, -Inf and NaN
7750    to be considered integer values. Return false for signaling NaN.
7751 
7752    DEPTH is the current nesting depth of the query.  */
7753 
7754 static bool
7755 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7756 {
7757   enum tree_code code = gimple_assign_rhs_code (stmt);
7758   switch (get_gimple_rhs_class (code))
7759     {
7760     case GIMPLE_UNARY_RHS:
7761       return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7762 					  gimple_assign_rhs1 (stmt), depth);
7763     case GIMPLE_BINARY_RHS:
7764       return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7765 					   gimple_assign_rhs1 (stmt),
7766 					   gimple_assign_rhs2 (stmt), depth);
7767     case GIMPLE_TERNARY_RHS:
7768       return false;
7769     case GIMPLE_SINGLE_RHS:
7770       return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7771     case GIMPLE_INVALID_RHS:
7772       break;
7773     }
7774   gcc_unreachable ();
7775 }
7776 
7777 /* Return true if the floating-point value computed by call STMT is known
7778    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7779    considered integer values. Return false for signaling NaN.
7780 
7781    DEPTH is the current nesting depth of the query.  */
7782 
7783 static bool
7784 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7785 {
7786   tree arg0 = (gimple_call_num_args (stmt) > 0
7787 	       ? gimple_call_arg (stmt, 0)
7788 	       : NULL_TREE);
7789   tree arg1 = (gimple_call_num_args (stmt) > 1
7790 	       ? gimple_call_arg (stmt, 1)
7791 	       : NULL_TREE);
7792   return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7793 				     arg0, arg1, depth);
7794 }
7795 
7796 /* Return true if the floating-point result of phi STMT is known to have
7797    an integer value.  We also allow +Inf, -Inf and NaN to be considered
7798    integer values. Return false for signaling NaN.
7799 
7800    DEPTH is the current nesting depth of the query.  */
7801 
7802 static bool
7803 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7804 {
7805   for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7806     {
7807       tree arg = gimple_phi_arg_def (stmt, i);
7808       if (!integer_valued_real_single_p (arg, depth + 1))
7809 	return false;
7810     }
7811   return true;
7812 }
7813 
7814 /* Return true if the floating-point value computed by STMT is known
7815    to have an integer value.  We also allow +Inf, -Inf and NaN to be
7816    considered integer values. Return false for signaling NaN.
7817 
7818    DEPTH is the current nesting depth of the query.  */
7819 
7820 bool
7821 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7822 {
7823   switch (gimple_code (stmt))
7824     {
7825     case GIMPLE_ASSIGN:
7826       return gimple_assign_integer_valued_real_p (stmt, depth);
7827     case GIMPLE_CALL:
7828       return gimple_call_integer_valued_real_p (stmt, depth);
7829     case GIMPLE_PHI:
7830       return gimple_phi_integer_valued_real_p (stmt, depth);
7831     default:
7832       return false;
7833     }
7834 }
7835