xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-forwprop.c (revision d90047b5d07facf36e6c01dcc0bded8997ce9cc2)
1 /* Forward propagation of expressions for single use variables.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49 
50 /* This pass propagates the RHS of assignment statements into use
51    sites of the LHS of the assignment.  It's basically a specialized
52    form of tree combination.   It is hoped all of this can disappear
53    when we have a generalized tree combiner.
54 
55    One class of common cases we handle is forward propagating a single use
56    variable into a COND_EXPR.
57 
58      bb0:
59        x = a COND b;
60        if (x) goto ... else goto ...
61 
62    Will be transformed into:
63 
64      bb0:
65        if (a COND b) goto ... else goto ...
66 
67    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
68 
69    Or (assuming c1 and c2 are constants):
70 
71      bb0:
72        x = a + c1;
73        if (x EQ/NEQ c2) goto ... else goto ...
74 
75    Will be transformed into:
76 
77      bb0:
78         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
79 
80    Similarly for x = a - c1.
81 
82    Or
83 
84      bb0:
85        x = !a
86        if (x) goto ... else goto ...
87 
88    Will be transformed into:
89 
90      bb0:
91         if (a == 0) goto ... else goto ...
92 
93    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
94    For these cases, we propagate A into all, possibly more than one,
95    COND_EXPRs that use X.
96 
97    Or
98 
99      bb0:
100        x = (typecast) a
101        if (x) goto ... else goto ...
102 
103    Will be transformed into:
104 
105      bb0:
106         if (a != 0) goto ... else goto ...
107 
108    (Assuming a is an integral type and x is a boolean or x is an
109     integral and a is a boolean.)
110 
111    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
112    For these cases, we propagate A into all, possibly more than one,
113    COND_EXPRs that use X.
114 
115    In addition to eliminating the variable and the statement which assigns
116    a value to the variable, we may be able to later thread the jump without
117    adding insane complexity in the dominator optimizer.
118 
119    Also note these transformations can cascade.  We handle this by having
120    a worklist of COND_EXPR statements to examine.  As we make a change to
121    a statement, we put it back on the worklist to examine on the next
122    iteration of the main loop.
123 
124    A second class of propagation opportunities arises for ADDR_EXPR
125    nodes.
126 
127      ptr = &x->y->z;
128      res = *ptr;
129 
130    Will get turned into
131 
132      res = x->y->z;
133 
134    Or
135      ptr = (type1*)&type2var;
136      res = *ptr
137 
138    Will get turned into (if type1 and type2 are the same size
139    and neither have volatile on them):
140      res = VIEW_CONVERT_EXPR<type1>(type2var)
141 
142    Or
143 
144      ptr = &x[0];
145      ptr2 = ptr + <constant>;
146 
147    Will get turned into
148 
149      ptr2 = &x[constant/elementsize];
150 
151   Or
152 
153      ptr = &x[0];
154      offset = index * element_size;
155      offset_p = (pointer) offset;
156      ptr2 = ptr + offset_p
157 
158   Will get turned into:
159 
160      ptr2 = &x[index];
161 
162   Or
163     ssa = (int) decl
164     res = ssa & 1
165 
166   Provided that decl has known alignment >= 2, will get turned into
167 
168     res = 0
169 
170   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
171   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
172   {NOT_EXPR,NEG_EXPR}.
173 
174    This will (of course) be extended as other needs arise.  */
175 
176 static bool forward_propagate_addr_expr (tree, tree, bool);
177 
178 /* Set to true if we delete dead edges during the optimization.  */
179 static bool cfg_changed;
180 
181 static tree rhs_to_tree (tree type, gimple *stmt);
182 
183 static bitmap to_purge;
184 
185 /* Const-and-copy lattice.  */
186 static vec<tree> lattice;
187 
188 /* Set the lattice entry for NAME to VAL.  */
189 static void
190 fwprop_set_lattice_val (tree name, tree val)
191 {
192   if (TREE_CODE (name) == SSA_NAME)
193     {
194       if (SSA_NAME_VERSION (name) >= lattice.length ())
195 	{
196 	  lattice.reserve (num_ssa_names - lattice.length ());
197 	  lattice.quick_grow_cleared (num_ssa_names);
198 	}
199       lattice[SSA_NAME_VERSION (name)] = val;
200     }
201 }
202 
203 /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
204 static void
205 fwprop_invalidate_lattice (tree name)
206 {
207   if (name
208       && TREE_CODE (name) == SSA_NAME
209       && SSA_NAME_VERSION (name) < lattice.length ())
210     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
211 }
212 
213 
214 /* Get the statement we can propagate from into NAME skipping
215    trivial copies.  Returns the statement which defines the
216    propagation source or NULL_TREE if there is no such one.
217    If SINGLE_USE_ONLY is set considers only sources which have
218    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
219    it is set to whether the chain to NAME is a single use chain
220    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
221 
222 static gimple *
223 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
224 {
225   bool single_use = true;
226 
227   do {
228     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
229 
230     if (!has_single_use (name))
231       {
232 	single_use = false;
233 	if (single_use_only)
234 	  return NULL;
235       }
236 
237     /* If name is defined by a PHI node or is the default def, bail out.  */
238     if (!is_gimple_assign (def_stmt))
239       return NULL;
240 
241     /* If def_stmt is a simple copy, continue looking.  */
242     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
243       name = gimple_assign_rhs1 (def_stmt);
244     else
245       {
246 	if (!single_use_only && single_use_p)
247 	  *single_use_p = single_use;
248 
249 	return def_stmt;
250       }
251   } while (1);
252 }
253 
254 /* Checks if the destination ssa name in DEF_STMT can be used as
255    propagation source.  Returns true if so, otherwise false.  */
256 
257 static bool
258 can_propagate_from (gimple *def_stmt)
259 {
260   gcc_assert (is_gimple_assign (def_stmt));
261 
262   /* If the rhs has side-effects we cannot propagate from it.  */
263   if (gimple_has_volatile_ops (def_stmt))
264     return false;
265 
266   /* If the rhs is a load we cannot propagate from it.  */
267   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
268       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
269     return false;
270 
271   /* Constants can be always propagated.  */
272   if (gimple_assign_single_p (def_stmt)
273       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
274     return true;
275 
276   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
277   if (stmt_references_abnormal_ssa_name (def_stmt))
278     return false;
279 
280   /* If the definition is a conversion of a pointer to a function type,
281      then we can not apply optimizations as some targets require
282      function pointers to be canonicalized and in this case this
283      optimization could eliminate a necessary canonicalization.  */
284   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
285     {
286       tree rhs = gimple_assign_rhs1 (def_stmt);
287       if (POINTER_TYPE_P (TREE_TYPE (rhs))
288           && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
289         return false;
290     }
291 
292   return true;
293 }
294 
295 /* Remove a chain of dead statements starting at the definition of
296    NAME.  The chain is linked via the first operand of the defining statements.
297    If NAME was replaced in its only use then this function can be used
298    to clean up dead stmts.  The function handles already released SSA
299    names gracefully.
300    Returns true if cleanup-cfg has to run.  */
301 
302 static bool
303 remove_prop_source_from_use (tree name)
304 {
305   gimple_stmt_iterator gsi;
306   gimple *stmt;
307   bool cfg_changed = false;
308 
309   do {
310     basic_block bb;
311 
312     if (SSA_NAME_IN_FREE_LIST (name)
313 	|| SSA_NAME_IS_DEFAULT_DEF (name)
314 	|| !has_zero_uses (name))
315       return cfg_changed;
316 
317     stmt = SSA_NAME_DEF_STMT (name);
318     if (gimple_code (stmt) == GIMPLE_PHI
319 	|| gimple_has_side_effects (stmt))
320       return cfg_changed;
321 
322     bb = gimple_bb (stmt);
323     gsi = gsi_for_stmt (stmt);
324     unlink_stmt_vdef (stmt);
325     if (gsi_remove (&gsi, true))
326       bitmap_set_bit (to_purge, bb->index);
327     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
328     release_defs (stmt);
329 
330     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
331   } while (name && TREE_CODE (name) == SSA_NAME);
332 
333   return cfg_changed;
334 }
335 
336 /* Return the rhs of a gassign *STMT in a form of a single tree,
337    converted to type TYPE.
338 
339    This should disappear, but is needed so we can combine expressions and use
340    the fold() interfaces. Long term, we need to develop folding and combine
341    routines that deal with gimple exclusively . */
342 
343 static tree
344 rhs_to_tree (tree type, gimple *stmt)
345 {
346   location_t loc = gimple_location (stmt);
347   enum tree_code code = gimple_assign_rhs_code (stmt);
348   if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
349     return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 			    gimple_assign_rhs2 (stmt),
351 			    gimple_assign_rhs3 (stmt));
352   else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
353     return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
354 			gimple_assign_rhs2 (stmt));
355   else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
356     return build1 (code, type, gimple_assign_rhs1 (stmt));
357   else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
358     return gimple_assign_rhs1 (stmt);
359   else
360     gcc_unreachable ();
361 }
362 
363 /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
364    the folded result in a form suitable for COND_EXPR_COND or
365    NULL_TREE, if there is no suitable simplified form.  If
366    INVARIANT_ONLY is true only gimple_min_invariant results are
367    considered simplified.  */
368 
369 static tree
370 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
371 			tree op0, tree op1, bool invariant_only)
372 {
373   tree t;
374 
375   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
376 
377   fold_defer_overflow_warnings ();
378   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
379   if (!t)
380     {
381       fold_undefer_overflow_warnings (false, NULL, 0);
382       return NULL_TREE;
383     }
384 
385   /* Require that we got a boolean type out if we put one in.  */
386   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
387 
388   /* Canonicalize the combined condition for use in a COND_EXPR.  */
389   t = canonicalize_cond_expr_cond (t);
390 
391   /* Bail out if we required an invariant but didn't get one.  */
392   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
393     {
394       fold_undefer_overflow_warnings (false, NULL, 0);
395       return NULL_TREE;
396     }
397 
398   fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
399 
400   return t;
401 }
402 
403 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
404    of its operand.  Return a new comparison tree or NULL_TREE if there
405    were no simplifying combines.  */
406 
407 static tree
408 forward_propagate_into_comparison_1 (gimple *stmt,
409 				     enum tree_code code, tree type,
410 				     tree op0, tree op1)
411 {
412   tree tmp = NULL_TREE;
413   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
414   bool single_use0_p = false, single_use1_p = false;
415 
416   /* For comparisons use the first operand, that is likely to
417      simplify comparisons against constants.  */
418   if (TREE_CODE (op0) == SSA_NAME)
419     {
420       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
421       if (def_stmt && can_propagate_from (def_stmt))
422 	{
423 	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
424 	  bool invariant_only_p = !single_use0_p;
425 
426 	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
427 
428 	  /* Always combine comparisons or conversions from booleans.  */
429 	  if (TREE_CODE (op1) == INTEGER_CST
430 	      && ((CONVERT_EXPR_CODE_P (def_code)
431 		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
432 		      == BOOLEAN_TYPE)
433 		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
434 	    invariant_only_p = false;
435 
436 	  tmp = combine_cond_expr_cond (stmt, code, type,
437 					rhs0, op1, invariant_only_p);
438 	  if (tmp)
439 	    return tmp;
440 	}
441     }
442 
443   /* If that wasn't successful, try the second operand.  */
444   if (TREE_CODE (op1) == SSA_NAME)
445     {
446       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
447       if (def_stmt && can_propagate_from (def_stmt))
448 	{
449 	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
450 	  tmp = combine_cond_expr_cond (stmt, code, type,
451 					op0, rhs1, !single_use1_p);
452 	  if (tmp)
453 	    return tmp;
454 	}
455     }
456 
457   /* If that wasn't successful either, try both operands.  */
458   if (rhs0 != NULL_TREE
459       && rhs1 != NULL_TREE)
460     tmp = combine_cond_expr_cond (stmt, code, type,
461 				  rhs0, rhs1,
462 				  !(single_use0_p && single_use1_p));
463 
464   return tmp;
465 }
466 
467 /* Propagate from the ssa name definition statements of the assignment
468    from a comparison at *GSI into the conditional if that simplifies it.
469    Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
470    otherwise returns 0.  */
471 
472 static int
473 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
474 {
475   gimple *stmt = gsi_stmt (*gsi);
476   tree tmp;
477   bool cfg_changed = false;
478   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
479   tree rhs1 = gimple_assign_rhs1 (stmt);
480   tree rhs2 = gimple_assign_rhs2 (stmt);
481 
482   /* Combine the comparison with defining statements.  */
483   tmp = forward_propagate_into_comparison_1 (stmt,
484 					     gimple_assign_rhs_code (stmt),
485 					     type, rhs1, rhs2);
486   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
487     {
488       gimple_assign_set_rhs_from_tree (gsi, tmp);
489       fold_stmt (gsi);
490       update_stmt (gsi_stmt (*gsi));
491 
492       if (TREE_CODE (rhs1) == SSA_NAME)
493 	cfg_changed |= remove_prop_source_from_use (rhs1);
494       if (TREE_CODE (rhs2) == SSA_NAME)
495 	cfg_changed |= remove_prop_source_from_use (rhs2);
496       return cfg_changed ? 2 : 1;
497     }
498 
499   return 0;
500 }
501 
502 /* Propagate from the ssa name definition statements of COND_EXPR
503    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
504    Returns zero if no statement was changed, one if there were
505    changes and two if cfg_cleanup needs to run.
506 
507    This must be kept in sync with forward_propagate_into_cond.  */
508 
509 static int
510 forward_propagate_into_gimple_cond (gcond *stmt)
511 {
512   tree tmp;
513   enum tree_code code = gimple_cond_code (stmt);
514   bool cfg_changed = false;
515   tree rhs1 = gimple_cond_lhs (stmt);
516   tree rhs2 = gimple_cond_rhs (stmt);
517 
518   /* We can do tree combining on SSA_NAME and comparison expressions.  */
519   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
520     return 0;
521 
522   tmp = forward_propagate_into_comparison_1 (stmt, code,
523 					     boolean_type_node,
524 					     rhs1, rhs2);
525   if (tmp)
526     {
527       if (dump_file && tmp)
528 	{
529 	  fprintf (dump_file, "  Replaced '");
530 	  print_gimple_expr (dump_file, stmt, 0, 0);
531 	  fprintf (dump_file, "' with '");
532 	  print_generic_expr (dump_file, tmp, 0);
533 	  fprintf (dump_file, "'\n");
534 	}
535 
536       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
537       update_stmt (stmt);
538 
539       if (TREE_CODE (rhs1) == SSA_NAME)
540 	cfg_changed |= remove_prop_source_from_use (rhs1);
541       if (TREE_CODE (rhs2) == SSA_NAME)
542 	cfg_changed |= remove_prop_source_from_use (rhs2);
543       return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
544     }
545 
546   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
547   if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
548        || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
549 	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
550       && ((code == EQ_EXPR
551 	   && integer_zerop (rhs2))
552 	  || (code == NE_EXPR
553 	      && integer_onep (rhs2))))
554     {
555       basic_block bb = gimple_bb (stmt);
556       gimple_cond_set_code (stmt, NE_EXPR);
557       gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
558       EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
559       EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
560       return 1;
561     }
562 
563   return 0;
564 }
565 
566 
567 /* Propagate from the ssa name definition statements of COND_EXPR
568    in the rhs of statement STMT into the conditional if that simplifies it.
569    Returns true zero if the stmt was changed.  */
570 
571 static bool
572 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
573 {
574   gimple *stmt = gsi_stmt (*gsi_p);
575   tree tmp = NULL_TREE;
576   tree cond = gimple_assign_rhs1 (stmt);
577   enum tree_code code = gimple_assign_rhs_code (stmt);
578 
579   /* We can do tree combining on SSA_NAME and comparison expressions.  */
580   if (COMPARISON_CLASS_P (cond))
581     tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
582 					       TREE_TYPE (cond),
583 					       TREE_OPERAND (cond, 0),
584 					       TREE_OPERAND (cond, 1));
585   else if (TREE_CODE (cond) == SSA_NAME)
586     {
587       enum tree_code def_code;
588       tree name = cond;
589       gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
590       if (!def_stmt || !can_propagate_from (def_stmt))
591 	return 0;
592 
593       def_code = gimple_assign_rhs_code (def_stmt);
594       if (TREE_CODE_CLASS (def_code) == tcc_comparison)
595 	tmp = fold_build2_loc (gimple_location (def_stmt),
596 			       def_code,
597 			       TREE_TYPE (cond),
598 			       gimple_assign_rhs1 (def_stmt),
599 			       gimple_assign_rhs2 (def_stmt));
600     }
601 
602   if (tmp
603       && is_gimple_condexpr (tmp))
604     {
605       if (dump_file && tmp)
606 	{
607 	  fprintf (dump_file, "  Replaced '");
608 	  print_generic_expr (dump_file, cond, 0);
609 	  fprintf (dump_file, "' with '");
610 	  print_generic_expr (dump_file, tmp, 0);
611 	  fprintf (dump_file, "'\n");
612 	}
613 
614       if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
615 				  : integer_onep (tmp))
616 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
617       else if (integer_zerop (tmp))
618 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
619       else
620 	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
621       stmt = gsi_stmt (*gsi_p);
622       update_stmt (stmt);
623 
624       return true;
625     }
626 
627   return 0;
628 }
629 
630 /* We've just substituted an ADDR_EXPR into stmt.  Update all the
631    relevant data structures to match.  */
632 
633 static void
634 tidy_after_forward_propagate_addr (gimple *stmt)
635 {
636   /* We may have turned a trapping insn into a non-trapping insn.  */
637   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
638     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
639 
640   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
641      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
642 }
643 
644 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
645    ADDR_EXPR <whatever>.
646 
647    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
648    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
649    node or for recovery of array indexing from pointer arithmetic.
650 
651    Return true if the propagation was successful (the propagation can
652    be not totally successful, yet things may have been changed).  */
653 
654 static bool
655 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
656 			       gimple_stmt_iterator *use_stmt_gsi,
657 			       bool single_use_p)
658 {
659   tree lhs, rhs, rhs2, array_ref;
660   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
661   enum tree_code rhs_code;
662   bool res = true;
663 
664   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
665 
666   lhs = gimple_assign_lhs (use_stmt);
667   rhs_code = gimple_assign_rhs_code (use_stmt);
668   rhs = gimple_assign_rhs1 (use_stmt);
669 
670   /* Do not perform copy-propagation but recurse through copy chains.  */
671   if (TREE_CODE (lhs) == SSA_NAME
672       && rhs_code == SSA_NAME)
673     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
674 
675   /* The use statement could be a conversion.  Recurse to the uses of the
676      lhs as copyprop does not copy through pointer to integer to pointer
677      conversions and FRE does not catch all cases either.
678      Treat the case of a single-use name and
679      a conversion to def_rhs type separate, though.  */
680   if (TREE_CODE (lhs) == SSA_NAME
681       && CONVERT_EXPR_CODE_P (rhs_code))
682     {
683       /* If there is a point in a conversion chain where the types match
684          so we can remove a conversion re-materialize the address here
685 	 and stop.  */
686       if (single_use_p
687 	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
688 	{
689 	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
690 	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
691 	  return true;
692 	}
693 
694       /* Else recurse if the conversion preserves the address value.  */
695       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
696 	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
697 	  && (TYPE_PRECISION (TREE_TYPE (lhs))
698 	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
699 	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
700 
701       return false;
702     }
703 
704   /* If this isn't a conversion chain from this on we only can propagate
705      into compatible pointer contexts.  */
706   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
707     return false;
708 
709   /* Propagate through constant pointer adjustments.  */
710   if (TREE_CODE (lhs) == SSA_NAME
711       && rhs_code == POINTER_PLUS_EXPR
712       && rhs == name
713       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
714     {
715       tree new_def_rhs;
716       /* As we come here with non-invariant addresses in def_rhs we need
717          to make sure we can build a valid constant offsetted address
718 	 for further propagation.  Simply rely on fold building that
719 	 and check after the fact.  */
720       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
721 				 def_rhs,
722 				 fold_convert (ptr_type_node,
723 					       gimple_assign_rhs2 (use_stmt)));
724       if (TREE_CODE (new_def_rhs) == MEM_REF
725 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
726 	return false;
727       new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
728 						    TREE_TYPE (rhs));
729 
730       /* Recurse.  If we could propagate into all uses of lhs do not
731 	 bother to replace into the current use but just pretend we did.  */
732       if (TREE_CODE (new_def_rhs) == ADDR_EXPR
733 	  && forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
734 	return true;
735 
736       if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
737 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
738 					new_def_rhs);
739       else if (is_gimple_min_invariant (new_def_rhs))
740 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
741       else
742 	return false;
743       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
744       update_stmt (use_stmt);
745       return true;
746     }
747 
748   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
749      ADDR_EXPR will not appear on the LHS.  */
750   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
751   while (handled_component_p (*lhsp))
752     lhsp = &TREE_OPERAND (*lhsp, 0);
753   lhs = *lhsp;
754 
755   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
756      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
757   if (TREE_CODE (lhs) == MEM_REF
758       && TREE_OPERAND (lhs, 0) == name)
759     {
760       tree def_rhs_base;
761       HOST_WIDE_INT def_rhs_offset;
762       /* If the address is invariant we can always fold it.  */
763       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
764 							 &def_rhs_offset)))
765 	{
766 	  offset_int off = mem_ref_offset (lhs);
767 	  tree new_ptr;
768 	  off += def_rhs_offset;
769 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
770 	    {
771 	      off += mem_ref_offset (def_rhs_base);
772 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
773 	    }
774 	  else
775 	    new_ptr = build_fold_addr_expr (def_rhs_base);
776 	  TREE_OPERAND (lhs, 0) = new_ptr;
777 	  TREE_OPERAND (lhs, 1)
778 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
779 	  tidy_after_forward_propagate_addr (use_stmt);
780 	  /* Continue propagating into the RHS if this was not the only use.  */
781 	  if (single_use_p)
782 	    return true;
783 	}
784       /* If the LHS is a plain dereference and the value type is the same as
785          that of the pointed-to type of the address we can put the
786 	 dereferenced address on the LHS preserving the original alias-type.  */
787       else if (integer_zerop (TREE_OPERAND (lhs, 1))
788 	       && ((gimple_assign_lhs (use_stmt) == lhs
789 		    && useless_type_conversion_p
790 		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
791 		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
792 		   || types_compatible_p (TREE_TYPE (lhs),
793 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
794 	       /* Don't forward anything into clobber stmts if it would result
795 		  in the lhs no longer being a MEM_REF.  */
796 	       && (!gimple_clobber_p (use_stmt)
797 		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
798 	{
799 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
800 	  tree new_offset, new_base, saved, new_lhs;
801 	  while (handled_component_p (*def_rhs_basep))
802 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
803 	  saved = *def_rhs_basep;
804 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
805 	    {
806 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
807 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
808 					 TREE_OPERAND (*def_rhs_basep, 1));
809 	    }
810 	  else
811 	    {
812 	      new_base = build_fold_addr_expr (*def_rhs_basep);
813 	      new_offset = TREE_OPERAND (lhs, 1);
814 	    }
815 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
816 				   new_base, new_offset);
817 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
818 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
819 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
820 	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
821 	  *lhsp = new_lhs;
822 	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
823 	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
824 	  *def_rhs_basep = saved;
825 	  tidy_after_forward_propagate_addr (use_stmt);
826 	  /* Continue propagating into the RHS if this was not the
827 	     only use.  */
828 	  if (single_use_p)
829 	    return true;
830 	}
831       else
832 	/* We can have a struct assignment dereferencing our name twice.
833 	   Note that we didn't propagate into the lhs to not falsely
834 	   claim we did when propagating into the rhs.  */
835 	res = false;
836     }
837 
838   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
839      nodes from the RHS.  */
840   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
841   if (TREE_CODE (*rhsp) == ADDR_EXPR)
842     rhsp = &TREE_OPERAND (*rhsp, 0);
843   while (handled_component_p (*rhsp))
844     rhsp = &TREE_OPERAND (*rhsp, 0);
845   rhs = *rhsp;
846 
847   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
848      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
849   if (TREE_CODE (rhs) == MEM_REF
850       && TREE_OPERAND (rhs, 0) == name)
851     {
852       tree def_rhs_base;
853       HOST_WIDE_INT def_rhs_offset;
854       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
855 							 &def_rhs_offset)))
856 	{
857 	  offset_int off = mem_ref_offset (rhs);
858 	  tree new_ptr;
859 	  off += def_rhs_offset;
860 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
861 	    {
862 	      off += mem_ref_offset (def_rhs_base);
863 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
864 	    }
865 	  else
866 	    new_ptr = build_fold_addr_expr (def_rhs_base);
867 	  TREE_OPERAND (rhs, 0) = new_ptr;
868 	  TREE_OPERAND (rhs, 1)
869 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
870 	  fold_stmt_inplace (use_stmt_gsi);
871 	  tidy_after_forward_propagate_addr (use_stmt);
872 	  return res;
873 	}
874       /* If the RHS is a plain dereference and the value type is the same as
875          that of the pointed-to type of the address we can put the
876 	 dereferenced address on the RHS preserving the original alias-type.  */
877       else if (integer_zerop (TREE_OPERAND (rhs, 1))
878 	       && ((gimple_assign_rhs1 (use_stmt) == rhs
879 		    && useless_type_conversion_p
880 		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
881 		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
882 		   || types_compatible_p (TREE_TYPE (rhs),
883 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
884 	{
885 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
886 	  tree new_offset, new_base, saved, new_rhs;
887 	  while (handled_component_p (*def_rhs_basep))
888 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
889 	  saved = *def_rhs_basep;
890 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
891 	    {
892 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
893 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
894 					 TREE_OPERAND (*def_rhs_basep, 1));
895 	    }
896 	  else
897 	    {
898 	      new_base = build_fold_addr_expr (*def_rhs_basep);
899 	      new_offset = TREE_OPERAND (rhs, 1);
900 	    }
901 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
902 				   new_base, new_offset);
903 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
904 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
905 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
906 	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
907 	  *rhsp = new_rhs;
908 	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
909 	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
910 	  *def_rhs_basep = saved;
911 	  fold_stmt_inplace (use_stmt_gsi);
912 	  tidy_after_forward_propagate_addr (use_stmt);
913 	  return res;
914 	}
915     }
916 
917   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
918      is nothing to do. */
919   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
920       || gimple_assign_rhs1 (use_stmt) != name)
921     return false;
922 
923   /* The remaining cases are all for turning pointer arithmetic into
924      array indexing.  They only apply when we have the address of
925      element zero in an array.  If that is not the case then there
926      is nothing to do.  */
927   array_ref = TREE_OPERAND (def_rhs, 0);
928   if ((TREE_CODE (array_ref) != ARRAY_REF
929        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
930        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
931       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
932     return false;
933 
934   rhs2 = gimple_assign_rhs2 (use_stmt);
935   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
936   if (TREE_CODE (rhs2) == INTEGER_CST)
937     {
938       tree new_rhs = build1_loc (gimple_location (use_stmt),
939 				 ADDR_EXPR, TREE_TYPE (def_rhs),
940 				 fold_build2 (MEM_REF,
941 					      TREE_TYPE (TREE_TYPE (def_rhs)),
942 					      unshare_expr (def_rhs),
943 					      fold_convert (ptr_type_node,
944 							    rhs2)));
945       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
946       use_stmt = gsi_stmt (*use_stmt_gsi);
947       update_stmt (use_stmt);
948       tidy_after_forward_propagate_addr (use_stmt);
949       return true;
950     }
951 
952   return false;
953 }
954 
955 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
956 
957    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
958    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
959    node or for recovery of array indexing from pointer arithmetic.
960 
961    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
962    the single use in the previous invocation.  Pass true when calling
963    this as toplevel.
964 
965    Returns true, if all uses have been propagated into.  */
966 
967 static bool
968 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
969 {
970   imm_use_iterator iter;
971   gimple *use_stmt;
972   bool all = true;
973   bool single_use_p = parent_single_use_p && has_single_use (name);
974 
975   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
976     {
977       bool result;
978       tree use_rhs;
979 
980       /* If the use is not in a simple assignment statement, then
981 	 there is nothing we can do.  */
982       if (!is_gimple_assign (use_stmt))
983 	{
984 	  if (!is_gimple_debug (use_stmt))
985 	    all = false;
986 	  continue;
987 	}
988 
989       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
990       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
991 					      single_use_p);
992       /* If the use has moved to a different statement adjust
993 	 the update machinery for the old statement too.  */
994       if (use_stmt != gsi_stmt (gsi))
995 	{
996 	  update_stmt (use_stmt);
997 	  use_stmt = gsi_stmt (gsi);
998 	}
999       update_stmt (use_stmt);
1000       all &= result;
1001 
1002       /* Remove intermediate now unused copy and conversion chains.  */
1003       use_rhs = gimple_assign_rhs1 (use_stmt);
1004       if (result
1005 	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1006 	  && TREE_CODE (use_rhs) == SSA_NAME
1007 	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1008 	{
1009 	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1010 	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1011 	  release_defs (use_stmt);
1012 	  gsi_remove (&gsi, true);
1013 	}
1014     }
1015 
1016   return all && has_zero_uses (name);
1017 }
1018 
1019 
1020 /* Helper function for simplify_gimple_switch.  Remove case labels that
1021    have values outside the range of the new type.  */
1022 
1023 static void
1024 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1025 {
1026   unsigned int branch_num = gimple_switch_num_labels (stmt);
1027   auto_vec<tree> labels (branch_num);
1028   unsigned int i, len;
1029 
1030   /* Collect the existing case labels in a VEC, and preprocess it as if
1031      we are gimplifying a GENERIC SWITCH_EXPR.  */
1032   for (i = 1; i < branch_num; i++)
1033     labels.quick_push (gimple_switch_label (stmt, i));
1034   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1035 
1036   /* If any labels were removed, replace the existing case labels
1037      in the GIMPLE_SWITCH statement with the correct ones.
1038      Note that the type updates were done in-place on the case labels,
1039      so we only have to replace the case labels in the GIMPLE_SWITCH
1040      if the number of labels changed.  */
1041   len = labels.length ();
1042   if (len < branch_num - 1)
1043     {
1044       bitmap target_blocks;
1045       edge_iterator ei;
1046       edge e;
1047 
1048       /* Corner case: *all* case labels have been removed as being
1049 	 out-of-range for INDEX_TYPE.  Push one label and let the
1050 	 CFG cleanups deal with this further.  */
1051       if (len == 0)
1052 	{
1053 	  tree label, elt;
1054 
1055 	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1056 	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1057 	  labels.quick_push (elt);
1058 	  len = 1;
1059 	}
1060 
1061       for (i = 0; i < labels.length (); i++)
1062 	gimple_switch_set_label (stmt, i + 1, labels[i]);
1063       for (i++ ; i < branch_num; i++)
1064 	gimple_switch_set_label (stmt, i, NULL_TREE);
1065       gimple_switch_set_num_labels (stmt, len + 1);
1066 
1067       /* Cleanup any edges that are now dead.  */
1068       target_blocks = BITMAP_ALLOC (NULL);
1069       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1070 	{
1071 	  tree elt = gimple_switch_label (stmt, i);
1072 	  basic_block target = label_to_block (CASE_LABEL (elt));
1073 	  bitmap_set_bit (target_blocks, target->index);
1074 	}
1075       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1076 	{
1077 	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1078 	    {
1079 	      remove_edge (e);
1080 	      cfg_changed = true;
1081 	      free_dominance_info (CDI_DOMINATORS);
1082 	    }
1083 	  else
1084 	    ei_next (&ei);
1085 	}
1086       BITMAP_FREE (target_blocks);
1087     }
1088 }
1089 
1090 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1091    the condition which we may be able to optimize better.  */
1092 
1093 static bool
1094 simplify_gimple_switch (gswitch *stmt)
1095 {
1096   /* The optimization that we really care about is removing unnecessary
1097      casts.  That will let us do much better in propagating the inferred
1098      constant at the switch target.  */
1099   tree cond = gimple_switch_index (stmt);
1100   if (TREE_CODE (cond) == SSA_NAME)
1101     {
1102       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1103       if (gimple_assign_cast_p (def_stmt))
1104 	{
1105 	  tree def = gimple_assign_rhs1 (def_stmt);
1106 	  if (TREE_CODE (def) != SSA_NAME)
1107 	    return false;
1108 
1109 	  /* If we have an extension or sign-change that preserves the
1110 	     values we check against then we can copy the source value into
1111 	     the switch.  */
1112 	  tree ti = TREE_TYPE (def);
1113 	  if (INTEGRAL_TYPE_P (ti)
1114 	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1115 	    {
1116 	      size_t n = gimple_switch_num_labels (stmt);
1117 	      tree min = NULL_TREE, max = NULL_TREE;
1118 	      if (n > 1)
1119 		{
1120 		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1121 		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1122 		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1123 		  else
1124 		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1125 		}
1126 	      if ((!min || int_fits_type_p (min, ti))
1127 		  && (!max || int_fits_type_p (max, ti)))
1128 		{
1129 		  gimple_switch_set_index (stmt, def);
1130 		  simplify_gimple_switch_label_vec (stmt, ti);
1131 		  update_stmt (stmt);
1132 		  return true;
1133 		}
1134 	    }
1135 	}
1136     }
1137 
1138   return false;
1139 }
1140 
1141 /* For pointers p2 and p1 return p2 - p1 if the
1142    difference is known and constant, otherwise return NULL.  */
1143 
1144 static tree
1145 constant_pointer_difference (tree p1, tree p2)
1146 {
1147   int i, j;
1148 #define CPD_ITERATIONS 5
1149   tree exps[2][CPD_ITERATIONS];
1150   tree offs[2][CPD_ITERATIONS];
1151   int cnt[2];
1152 
1153   for (i = 0; i < 2; i++)
1154     {
1155       tree p = i ? p1 : p2;
1156       tree off = size_zero_node;
1157       gimple *stmt;
1158       enum tree_code code;
1159 
1160       /* For each of p1 and p2 we need to iterate at least
1161 	 twice, to handle ADDR_EXPR directly in p1/p2,
1162 	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1163 	 on definition's stmt RHS.  Iterate a few extra times.  */
1164       j = 0;
1165       do
1166 	{
1167 	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1168 	    break;
1169 	  if (TREE_CODE (p) == ADDR_EXPR)
1170 	    {
1171 	      tree q = TREE_OPERAND (p, 0);
1172 	      HOST_WIDE_INT offset;
1173 	      tree base = get_addr_base_and_unit_offset (q, &offset);
1174 	      if (base)
1175 		{
1176 		  q = base;
1177 		  if (offset)
1178 		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1179 		}
1180 	      if (TREE_CODE (q) == MEM_REF
1181 		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1182 		{
1183 		  p = TREE_OPERAND (q, 0);
1184 		  off = size_binop (PLUS_EXPR, off,
1185 				    wide_int_to_tree (sizetype,
1186 						      mem_ref_offset (q)));
1187 		}
1188 	      else
1189 		{
1190 		  exps[i][j] = q;
1191 		  offs[i][j++] = off;
1192 		  break;
1193 		}
1194 	    }
1195 	  if (TREE_CODE (p) != SSA_NAME)
1196 	    break;
1197 	  exps[i][j] = p;
1198 	  offs[i][j++] = off;
1199 	  if (j == CPD_ITERATIONS)
1200 	    break;
1201 	  stmt = SSA_NAME_DEF_STMT (p);
1202 	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1203 	    break;
1204 	  code = gimple_assign_rhs_code (stmt);
1205 	  if (code == POINTER_PLUS_EXPR)
1206 	    {
1207 	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1208 		break;
1209 	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1210 	      p = gimple_assign_rhs1 (stmt);
1211 	    }
1212 	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1213 	    p = gimple_assign_rhs1 (stmt);
1214 	  else
1215 	    break;
1216 	}
1217       while (1);
1218       cnt[i] = j;
1219     }
1220 
1221   for (i = 0; i < cnt[0]; i++)
1222     for (j = 0; j < cnt[1]; j++)
1223       if (exps[0][i] == exps[1][j])
1224 	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1225 
1226   return NULL_TREE;
1227 }
1228 
1229 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1230    Optimize
1231    memcpy (p, "abcd", 4);
1232    memset (p + 4, ' ', 3);
1233    into
1234    memcpy (p, "abcd   ", 7);
1235    call if the latter can be stored by pieces during expansion.  */
1236 
1237 static bool
1238 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1239 {
1240   gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1241   tree vuse = gimple_vuse (stmt2);
1242   if (vuse == NULL)
1243     return false;
1244   stmt1 = SSA_NAME_DEF_STMT (vuse);
1245 
1246   switch (DECL_FUNCTION_CODE (callee2))
1247     {
1248     case BUILT_IN_MEMSET:
1249       if (gimple_call_num_args (stmt2) != 3
1250 	  || gimple_call_lhs (stmt2)
1251 	  || CHAR_BIT != 8
1252 	  || BITS_PER_UNIT != 8)
1253 	break;
1254       else
1255 	{
1256 	  tree callee1;
1257 	  tree ptr1, src1, str1, off1, len1, lhs1;
1258 	  tree ptr2 = gimple_call_arg (stmt2, 0);
1259 	  tree val2 = gimple_call_arg (stmt2, 1);
1260 	  tree len2 = gimple_call_arg (stmt2, 2);
1261 	  tree diff, vdef, new_str_cst;
1262 	  gimple *use_stmt;
1263 	  unsigned int ptr1_align;
1264 	  unsigned HOST_WIDE_INT src_len;
1265 	  char *src_buf;
1266 	  use_operand_p use_p;
1267 
1268 	  if (!tree_fits_shwi_p (val2)
1269 	      || !tree_fits_uhwi_p (len2)
1270 	      || compare_tree_int (len2, 1024) == 1)
1271 	    break;
1272 	  if (is_gimple_call (stmt1))
1273 	    {
1274 	      /* If first stmt is a call, it needs to be memcpy
1275 		 or mempcpy, with string literal as second argument and
1276 		 constant length.  */
1277 	      callee1 = gimple_call_fndecl (stmt1);
1278 	      if (callee1 == NULL_TREE
1279 		  || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1280 		  || gimple_call_num_args (stmt1) != 3)
1281 		break;
1282 	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1283 		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1284 		break;
1285 	      ptr1 = gimple_call_arg (stmt1, 0);
1286 	      src1 = gimple_call_arg (stmt1, 1);
1287 	      len1 = gimple_call_arg (stmt1, 2);
1288 	      lhs1 = gimple_call_lhs (stmt1);
1289 	      if (!tree_fits_uhwi_p (len1))
1290 		break;
1291 	      str1 = string_constant (src1, &off1);
1292 	      if (str1 == NULL_TREE)
1293 		break;
1294 	      if (!tree_fits_uhwi_p (off1)
1295 		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1296 		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1297 					     - tree_to_uhwi (off1)) > 0
1298 		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1299 		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1300 		     != TYPE_MODE (char_type_node))
1301 		break;
1302 	    }
1303 	  else if (gimple_assign_single_p (stmt1))
1304 	    {
1305 	      /* Otherwise look for length 1 memcpy optimized into
1306 		 assignment.  */
1307     	      ptr1 = gimple_assign_lhs (stmt1);
1308 	      src1 = gimple_assign_rhs1 (stmt1);
1309 	      if (TREE_CODE (ptr1) != MEM_REF
1310 		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1311 		  || !tree_fits_shwi_p (src1))
1312 		break;
1313 	      ptr1 = build_fold_addr_expr (ptr1);
1314 	      callee1 = NULL_TREE;
1315 	      len1 = size_one_node;
1316 	      lhs1 = NULL_TREE;
1317 	      off1 = size_zero_node;
1318 	      str1 = NULL_TREE;
1319 	    }
1320 	  else
1321 	    break;
1322 
1323 	  diff = constant_pointer_difference (ptr1, ptr2);
1324 	  if (diff == NULL && lhs1 != NULL)
1325 	    {
1326 	      diff = constant_pointer_difference (lhs1, ptr2);
1327 	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1328 		  && diff != NULL)
1329 		diff = size_binop (PLUS_EXPR, diff,
1330 				   fold_convert (sizetype, len1));
1331 	    }
1332 	  /* If the difference between the second and first destination pointer
1333 	     is not constant, or is bigger than memcpy length, bail out.  */
1334 	  if (diff == NULL
1335 	      || !tree_fits_uhwi_p (diff)
1336 	      || tree_int_cst_lt (len1, diff)
1337 	      || compare_tree_int (diff, 1024) == 1)
1338 	    break;
1339 
1340 	  /* Use maximum of difference plus memset length and memcpy length
1341 	     as the new memcpy length, if it is too big, bail out.  */
1342 	  src_len = tree_to_uhwi (diff);
1343 	  src_len += tree_to_uhwi (len2);
1344 	  if (src_len < tree_to_uhwi (len1))
1345 	    src_len = tree_to_uhwi (len1);
1346 	  if (src_len > 1024)
1347 	    break;
1348 
1349 	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1350 	     with bigger length will return different result.  */
1351 	  if (lhs1 != NULL_TREE
1352 	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1353 	      && (TREE_CODE (lhs1) != SSA_NAME
1354 		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1355 		  || use_stmt != stmt2))
1356 	    break;
1357 
1358 	  /* If anything reads memory in between memcpy and memset
1359 	     call, the modified memcpy call might change it.  */
1360 	  vdef = gimple_vdef (stmt1);
1361 	  if (vdef != NULL
1362 	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1363 		  || use_stmt != stmt2))
1364 	    break;
1365 
1366 	  ptr1_align = get_pointer_alignment (ptr1);
1367 	  /* Construct the new source string literal.  */
1368 	  src_buf = XALLOCAVEC (char, src_len + 1);
1369 	  if (callee1)
1370 	    memcpy (src_buf,
1371 		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1372 		    tree_to_uhwi (len1));
1373 	  else
1374 	    src_buf[0] = tree_to_shwi (src1);
1375 	  memset (src_buf + tree_to_uhwi (diff),
1376 		  tree_to_shwi (val2), tree_to_uhwi (len2));
1377 	  src_buf[src_len] = '\0';
1378 	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1379 	     handle embedded '\0's.  */
1380 	  if (strlen (src_buf) != src_len)
1381 	    break;
1382 	  rtl_profile_for_bb (gimple_bb (stmt2));
1383 	  /* If the new memcpy wouldn't be emitted by storing the literal
1384 	     by pieces, this optimization might enlarge .rodata too much,
1385 	     as commonly used string literals couldn't be shared any
1386 	     longer.  */
1387 	  if (!can_store_by_pieces (src_len,
1388 				    builtin_strncpy_read_str,
1389 				    src_buf, ptr1_align, false))
1390 	    break;
1391 
1392 	  new_str_cst = build_string_literal (src_len, src_buf);
1393 	  if (callee1)
1394 	    {
1395 	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1396 		 memset call.  */
1397 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1398 		gimple_call_set_lhs (stmt1, NULL_TREE);
1399 	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1400 	      gimple_call_set_arg (stmt1, 2,
1401 				   build_int_cst (TREE_TYPE (len1), src_len));
1402 	      update_stmt (stmt1);
1403 	      unlink_stmt_vdef (stmt2);
1404 	      gsi_remove (gsi_p, true);
1405 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1406 	      release_defs (stmt2);
1407 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1408 		{
1409 		  fwprop_invalidate_lattice (lhs1);
1410 		  release_ssa_name (lhs1);
1411 		}
1412 	      return true;
1413 	    }
1414 	  else
1415 	    {
1416 	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1417 		 assignment, remove STMT1 and change memset call into
1418 		 memcpy call.  */
1419 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1420 
1421 	      if (!is_gimple_val (ptr1))
1422 		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1423 						 true, GSI_SAME_STMT);
1424 	      gimple_call_set_fndecl (stmt2,
1425 				      builtin_decl_explicit (BUILT_IN_MEMCPY));
1426 	      gimple_call_set_arg (stmt2, 0, ptr1);
1427 	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1428 	      gimple_call_set_arg (stmt2, 2,
1429 				   build_int_cst (TREE_TYPE (len2), src_len));
1430 	      unlink_stmt_vdef (stmt1);
1431 	      gsi_remove (&gsi, true);
1432 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1433 	      release_defs (stmt1);
1434 	      update_stmt (stmt2);
1435 	      return false;
1436 	    }
1437 	}
1438       break;
1439     default:
1440       break;
1441     }
1442   return false;
1443 }
1444 
1445 /* Given a ssa_name in NAME see if it was defined by an assignment and
1446    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1447    to the second operand on the rhs. */
1448 
1449 static inline void
1450 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1451 {
1452   gimple *def;
1453   enum tree_code code1;
1454   tree arg11;
1455   tree arg21;
1456   tree arg31;
1457   enum gimple_rhs_class grhs_class;
1458 
1459   code1 = TREE_CODE (name);
1460   arg11 = name;
1461   arg21 = NULL_TREE;
1462   arg31 = NULL_TREE;
1463   grhs_class = get_gimple_rhs_class (code1);
1464 
1465   if (code1 == SSA_NAME)
1466     {
1467       def = SSA_NAME_DEF_STMT (name);
1468 
1469       if (def && is_gimple_assign (def)
1470 	  && can_propagate_from (def))
1471 	{
1472 	  code1 = gimple_assign_rhs_code (def);
1473 	  arg11 = gimple_assign_rhs1 (def);
1474           arg21 = gimple_assign_rhs2 (def);
1475           arg31 = gimple_assign_rhs3 (def);
1476 	}
1477     }
1478   else if (grhs_class != GIMPLE_SINGLE_RHS)
1479     code1 = ERROR_MARK;
1480 
1481   *code = code1;
1482   *arg1 = arg11;
1483   if (arg2)
1484     *arg2 = arg21;
1485   if (arg31)
1486     *code = ERROR_MARK;
1487 }
1488 
1489 
1490 /* Recognize rotation patterns.  Return true if a transformation
1491    applied, otherwise return false.
1492 
1493    We are looking for X with unsigned type T with bitsize B, OP being
1494    +, | or ^, some type T2 wider than T and
1495    (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1496    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1497    (X << Y) OP (X >> (B - Y))
1498    (X << (int) Y) OP (X >> (int) (B - Y))
1499    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1500    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1501    (X << Y) | (X >> ((-Y) & (B - 1)))
1502    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1503    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1504    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1505 
1506    and transform these into:
1507    X r<< CNT1
1508    X r<< Y
1509 
1510    Note, in the patterns with T2 type, the type of OP operands
1511    might be even a signed type, but should have precision B.  */
1512 
1513 static bool
1514 simplify_rotate (gimple_stmt_iterator *gsi)
1515 {
1516   gimple *stmt = gsi_stmt (*gsi);
1517   tree arg[2], rtype, rotcnt = NULL_TREE;
1518   tree def_arg1[2], def_arg2[2];
1519   enum tree_code def_code[2];
1520   tree lhs;
1521   int i;
1522   bool swapped_p = false;
1523   gimple *g;
1524 
1525   arg[0] = gimple_assign_rhs1 (stmt);
1526   arg[1] = gimple_assign_rhs2 (stmt);
1527   rtype = TREE_TYPE (arg[0]);
1528 
1529   /* Only create rotates in complete modes.  Other cases are not
1530      expanded properly.  */
1531   if (!INTEGRAL_TYPE_P (rtype)
1532       || TYPE_PRECISION (rtype) != GET_MODE_PRECISION (TYPE_MODE (rtype)))
1533     return false;
1534 
1535   for (i = 0; i < 2; i++)
1536     defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1537 
1538   /* Look through narrowing conversions.  */
1539   if (CONVERT_EXPR_CODE_P (def_code[0])
1540       && CONVERT_EXPR_CODE_P (def_code[1])
1541       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1542       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1543       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1544 	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1545       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) > TYPE_PRECISION (rtype)
1546       && has_single_use (arg[0])
1547       && has_single_use (arg[1]))
1548     {
1549       for (i = 0; i < 2; i++)
1550 	{
1551 	  arg[i] = def_arg1[i];
1552 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1553 	}
1554     }
1555 
1556   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1557   for (i = 0; i < 2; i++)
1558     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1559       return false;
1560     else if (!has_single_use (arg[i]))
1561       return false;
1562   if (def_code[0] == def_code[1])
1563     return false;
1564 
1565   /* If we've looked through narrowing conversions before, look through
1566      widening conversions from unsigned type with the same precision
1567      as rtype here.  */
1568   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1569     for (i = 0; i < 2; i++)
1570       {
1571 	tree tem;
1572 	enum tree_code code;
1573 	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1574 	if (!CONVERT_EXPR_CODE_P (code)
1575 	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1576 	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1577 	  return false;
1578 	def_arg1[i] = tem;
1579       }
1580   /* Both shifts have to use the same first operand.  */
1581   if (TREE_CODE (def_arg1[0]) != SSA_NAME || def_arg1[0] != def_arg1[1])
1582     return false;
1583   if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1584     return false;
1585 
1586   /* CNT1 + CNT2 == B case above.  */
1587   if (tree_fits_uhwi_p (def_arg2[0])
1588       && tree_fits_uhwi_p (def_arg2[1])
1589       && tree_to_uhwi (def_arg2[0])
1590 	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1591     rotcnt = def_arg2[0];
1592   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1593 	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1594     return false;
1595   else
1596     {
1597       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1598       enum tree_code cdef_code[2];
1599       /* Look through conversion of the shift count argument.
1600 	 The C/C++ FE cast any shift count argument to integer_type_node.
1601 	 The only problem might be if the shift count type maximum value
1602 	 is equal or smaller than number of bits in rtype.  */
1603       for (i = 0; i < 2; i++)
1604 	{
1605 	  def_arg2_alt[i] = def_arg2[i];
1606 	  defcodefor_name (def_arg2[i], &cdef_code[i],
1607 			   &cdef_arg1[i], &cdef_arg2[i]);
1608 	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1609 	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1610 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1611 		 > floor_log2 (TYPE_PRECISION (rtype))
1612 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1613 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (cdef_arg1[i]))))
1614 	    {
1615 	      def_arg2_alt[i] = cdef_arg1[i];
1616 	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1617 			       &cdef_arg1[i], &cdef_arg2[i]);
1618 	    }
1619 	}
1620       for (i = 0; i < 2; i++)
1621 	/* Check for one shift count being Y and the other B - Y,
1622 	   with optional casts.  */
1623 	if (cdef_code[i] == MINUS_EXPR
1624 	    && tree_fits_shwi_p (cdef_arg1[i])
1625 	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1626 	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1627 	  {
1628 	    tree tem;
1629 	    enum tree_code code;
1630 
1631 	    if (cdef_arg2[i] == def_arg2[1 - i]
1632 		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1633 	      {
1634 		rotcnt = cdef_arg2[i];
1635 		break;
1636 	      }
1637 	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1638 	    if (CONVERT_EXPR_CODE_P (code)
1639 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1640 		&& TYPE_PRECISION (TREE_TYPE (tem))
1641 		 > floor_log2 (TYPE_PRECISION (rtype))
1642 		&& TYPE_PRECISION (TREE_TYPE (tem))
1643 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1644 		&& (tem == def_arg2[1 - i]
1645 		    || tem == def_arg2_alt[1 - i]))
1646 	      {
1647 		rotcnt = tem;
1648 		break;
1649 	      }
1650 	  }
1651 	/* The above sequence isn't safe for Y being 0,
1652 	   because then one of the shifts triggers undefined behavior.
1653 	   This alternative is safe even for rotation count of 0.
1654 	   One shift count is Y and the other (-Y) & (B - 1).  */
1655 	else if (cdef_code[i] == BIT_AND_EXPR
1656 		 && tree_fits_shwi_p (cdef_arg2[i])
1657 		 && tree_to_shwi (cdef_arg2[i])
1658 		    == TYPE_PRECISION (rtype) - 1
1659 		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1660 		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1661 	  {
1662 	    tree tem;
1663 	    enum tree_code code;
1664 
1665 	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1666 	    if (CONVERT_EXPR_CODE_P (code)
1667 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1668 		&& TYPE_PRECISION (TREE_TYPE (tem))
1669 		 > floor_log2 (TYPE_PRECISION (rtype))
1670 		&& TYPE_PRECISION (TREE_TYPE (tem))
1671 		 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem))))
1672 	      defcodefor_name (tem, &code, &tem, NULL);
1673 
1674 	    if (code == NEGATE_EXPR)
1675 	      {
1676 		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1677 		  {
1678 		    rotcnt = tem;
1679 		    break;
1680 		  }
1681 		defcodefor_name (tem, &code, &tem, NULL);
1682 		if (CONVERT_EXPR_CODE_P (code)
1683 		    && INTEGRAL_TYPE_P (TREE_TYPE (tem))
1684 		    && TYPE_PRECISION (TREE_TYPE (tem))
1685 		       > floor_log2 (TYPE_PRECISION (rtype))
1686 		    && TYPE_PRECISION (TREE_TYPE (tem))
1687 		       == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (tem)))
1688 		    && (tem == def_arg2[1 - i]
1689 			|| tem == def_arg2_alt[1 - i]))
1690 		  {
1691 		    rotcnt = tem;
1692 		    break;
1693 		  }
1694 	      }
1695 	  }
1696       if (rotcnt == NULL_TREE)
1697 	return false;
1698       swapped_p = i != 1;
1699     }
1700 
1701   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1702 				  TREE_TYPE (rotcnt)))
1703     {
1704       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1705 			       NOP_EXPR, rotcnt);
1706       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1707       rotcnt = gimple_assign_lhs (g);
1708     }
1709   lhs = gimple_assign_lhs (stmt);
1710   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1711     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1712   g = gimple_build_assign (lhs,
1713 			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1714 			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1715   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1716     {
1717       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1718       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1719     }
1720   gsi_replace (gsi, g, false);
1721   return true;
1722 }
1723 
1724 /* Combine an element access with a shuffle.  Returns true if there were
1725    any changes made, else it returns false.  */
1726 
1727 static bool
1728 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
1729 {
1730   gimple *stmt = gsi_stmt (*gsi);
1731   gimple *def_stmt;
1732   tree op, op0, op1, op2;
1733   tree elem_type;
1734   unsigned idx, n, size;
1735   enum tree_code code;
1736 
1737   op = gimple_assign_rhs1 (stmt);
1738   gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
1739 
1740   op0 = TREE_OPERAND (op, 0);
1741   if (TREE_CODE (op0) != SSA_NAME
1742       || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
1743     return false;
1744 
1745   def_stmt = get_prop_source_stmt (op0, false, NULL);
1746   if (!def_stmt || !can_propagate_from (def_stmt))
1747     return false;
1748 
1749   op1 = TREE_OPERAND (op, 1);
1750   op2 = TREE_OPERAND (op, 2);
1751   code = gimple_assign_rhs_code (def_stmt);
1752 
1753   if (code == CONSTRUCTOR)
1754     {
1755       tree tem = fold_ternary (BIT_FIELD_REF, TREE_TYPE (op),
1756 			       gimple_assign_rhs1 (def_stmt), op1, op2);
1757       if (!tem || !valid_gimple_rhs_p (tem))
1758 	return false;
1759       gimple_assign_set_rhs_from_tree (gsi, tem);
1760       update_stmt (gsi_stmt (*gsi));
1761       return true;
1762     }
1763 
1764   elem_type = TREE_TYPE (TREE_TYPE (op0));
1765   if (TREE_TYPE (op) != elem_type)
1766     return false;
1767 
1768   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1769   n = TREE_INT_CST_LOW (op1) / size;
1770   if (n != 1)
1771     return false;
1772   idx = TREE_INT_CST_LOW (op2) / size;
1773 
1774   if (code == VEC_PERM_EXPR)
1775     {
1776       tree p, m, tem;
1777       unsigned nelts;
1778       m = gimple_assign_rhs3 (def_stmt);
1779       if (TREE_CODE (m) != VECTOR_CST)
1780 	return false;
1781       nelts = VECTOR_CST_NELTS (m);
1782       idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
1783       idx %= 2 * nelts;
1784       if (idx < nelts)
1785 	{
1786 	  p = gimple_assign_rhs1 (def_stmt);
1787 	}
1788       else
1789 	{
1790 	  p = gimple_assign_rhs2 (def_stmt);
1791 	  idx -= nelts;
1792 	}
1793       tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
1794 		    unshare_expr (p), op1, bitsize_int (idx * size));
1795       gimple_assign_set_rhs1 (stmt, tem);
1796       fold_stmt (gsi);
1797       update_stmt (gsi_stmt (*gsi));
1798       return true;
1799     }
1800 
1801   return false;
1802 }
1803 
1804 /* Determine whether applying the 2 permutations (mask1 then mask2)
1805    gives back one of the input.  */
1806 
1807 static int
1808 is_combined_permutation_identity (tree mask1, tree mask2)
1809 {
1810   tree mask;
1811   unsigned int nelts, i, j;
1812   bool maybe_identity1 = true;
1813   bool maybe_identity2 = true;
1814 
1815   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
1816 		       && TREE_CODE (mask2) == VECTOR_CST);
1817   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
1818   gcc_assert (TREE_CODE (mask) == VECTOR_CST);
1819 
1820   nelts = VECTOR_CST_NELTS (mask);
1821   for (i = 0; i < nelts; i++)
1822     {
1823       tree val = VECTOR_CST_ELT (mask, i);
1824       gcc_assert (TREE_CODE (val) == INTEGER_CST);
1825       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
1826       if (j == i)
1827 	maybe_identity2 = false;
1828       else if (j == i + nelts)
1829 	maybe_identity1 = false;
1830       else
1831 	return 0;
1832     }
1833   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
1834 }
1835 
1836 /* Combine a shuffle with its arguments.  Returns 1 if there were any
1837    changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
1838 
1839 static int
1840 simplify_permutation (gimple_stmt_iterator *gsi)
1841 {
1842   gimple *stmt = gsi_stmt (*gsi);
1843   gimple *def_stmt;
1844   tree op0, op1, op2, op3, arg0, arg1;
1845   enum tree_code code;
1846   bool single_use_op0 = false;
1847 
1848   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
1849 
1850   op0 = gimple_assign_rhs1 (stmt);
1851   op1 = gimple_assign_rhs2 (stmt);
1852   op2 = gimple_assign_rhs3 (stmt);
1853 
1854   if (TREE_CODE (op2) != VECTOR_CST)
1855     return 0;
1856 
1857   if (TREE_CODE (op0) == VECTOR_CST)
1858     {
1859       code = VECTOR_CST;
1860       arg0 = op0;
1861     }
1862   else if (TREE_CODE (op0) == SSA_NAME)
1863     {
1864       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
1865       if (!def_stmt || !can_propagate_from (def_stmt))
1866 	return 0;
1867 
1868       code = gimple_assign_rhs_code (def_stmt);
1869       arg0 = gimple_assign_rhs1 (def_stmt);
1870     }
1871   else
1872     return 0;
1873 
1874   /* Two consecutive shuffles.  */
1875   if (code == VEC_PERM_EXPR)
1876     {
1877       tree orig;
1878       int ident;
1879 
1880       if (op0 != op1)
1881 	return 0;
1882       op3 = gimple_assign_rhs3 (def_stmt);
1883       if (TREE_CODE (op3) != VECTOR_CST)
1884 	return 0;
1885       ident = is_combined_permutation_identity (op3, op2);
1886       if (!ident)
1887 	return 0;
1888       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
1889 			  : gimple_assign_rhs2 (def_stmt);
1890       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
1891       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
1892       gimple_set_num_ops (stmt, 2);
1893       update_stmt (stmt);
1894       return remove_prop_source_from_use (op0) ? 2 : 1;
1895     }
1896 
1897   /* Shuffle of a constructor.  */
1898   else if (code == CONSTRUCTOR || code == VECTOR_CST)
1899     {
1900       tree opt;
1901       bool ret = false;
1902       if (op0 != op1)
1903 	{
1904 	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
1905 	    return 0;
1906 
1907 	  if (TREE_CODE (op1) == VECTOR_CST)
1908 	    arg1 = op1;
1909 	  else if (TREE_CODE (op1) == SSA_NAME)
1910 	    {
1911 	      enum tree_code code2;
1912 
1913 	      gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
1914 	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
1915 		return 0;
1916 
1917 	      code2 = gimple_assign_rhs_code (def_stmt2);
1918 	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
1919 		return 0;
1920 	      arg1 = gimple_assign_rhs1 (def_stmt2);
1921 	    }
1922 	  else
1923 	    return 0;
1924 	}
1925       else
1926 	{
1927 	  /* Already used twice in this statement.  */
1928 	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
1929 	    return 0;
1930 	  arg1 = arg0;
1931 	}
1932       opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
1933       if (!opt
1934 	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
1935 	return 0;
1936       gimple_assign_set_rhs_from_tree (gsi, opt);
1937       update_stmt (gsi_stmt (*gsi));
1938       if (TREE_CODE (op0) == SSA_NAME)
1939 	ret = remove_prop_source_from_use (op0);
1940       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
1941 	ret |= remove_prop_source_from_use (op1);
1942       return ret ? 2 : 1;
1943     }
1944 
1945   return 0;
1946 }
1947 
1948 /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
1949 
1950 static bool
1951 simplify_vector_constructor (gimple_stmt_iterator *gsi)
1952 {
1953   gimple *stmt = gsi_stmt (*gsi);
1954   gimple *def_stmt;
1955   tree op, op2, orig, type, elem_type;
1956   unsigned elem_size, nelts, i;
1957   enum tree_code code, conv_code;
1958   constructor_elt *elt;
1959   unsigned char *sel;
1960   bool maybe_ident;
1961 
1962   gcc_checking_assert (gimple_assign_rhs_code (stmt) == CONSTRUCTOR);
1963 
1964   op = gimple_assign_rhs1 (stmt);
1965   type = TREE_TYPE (op);
1966   gcc_checking_assert (TREE_CODE (type) == VECTOR_TYPE);
1967 
1968   nelts = TYPE_VECTOR_SUBPARTS (type);
1969   elem_type = TREE_TYPE (type);
1970   elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
1971 
1972   sel = XALLOCAVEC (unsigned char, nelts);
1973   orig = NULL;
1974   conv_code = ERROR_MARK;
1975   maybe_ident = true;
1976   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
1977     {
1978       tree ref, op1;
1979 
1980       if (i >= nelts)
1981 	return false;
1982 
1983       if (TREE_CODE (elt->value) != SSA_NAME)
1984 	return false;
1985       def_stmt = get_prop_source_stmt (elt->value, false, NULL);
1986       if (!def_stmt)
1987 	return false;
1988       code = gimple_assign_rhs_code (def_stmt);
1989       if (code == FLOAT_EXPR
1990 	  || code == FIX_TRUNC_EXPR)
1991 	{
1992 	  op1 = gimple_assign_rhs1 (def_stmt);
1993 	  if (conv_code == ERROR_MARK)
1994 	    {
1995 	      if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (elt->value)))
1996 		  != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op1))))
1997 		return false;
1998 	      conv_code = code;
1999 	    }
2000 	  else if (conv_code != code)
2001 	    return false;
2002 	  if (TREE_CODE (op1) != SSA_NAME)
2003 	    return false;
2004 	  def_stmt = SSA_NAME_DEF_STMT (op1);
2005 	  if (! is_gimple_assign (def_stmt))
2006 	    return false;
2007 	  code = gimple_assign_rhs_code (def_stmt);
2008 	}
2009       if (code != BIT_FIELD_REF)
2010 	return false;
2011       op1 = gimple_assign_rhs1 (def_stmt);
2012       ref = TREE_OPERAND (op1, 0);
2013       if (orig)
2014 	{
2015 	  if (ref != orig)
2016 	    return false;
2017 	}
2018       else
2019 	{
2020 	  if (TREE_CODE (ref) != SSA_NAME)
2021 	    return false;
2022 	  if (! VECTOR_TYPE_P (TREE_TYPE (ref))
2023 	      || ! useless_type_conversion_p (TREE_TYPE (op1),
2024 					      TREE_TYPE (TREE_TYPE (ref))))
2025 	    return false;
2026 	  orig = ref;
2027 	}
2028       if (TREE_INT_CST_LOW (TREE_OPERAND (op1, 1)) != elem_size)
2029 	return false;
2030       sel[i] = TREE_INT_CST_LOW (TREE_OPERAND (op1, 2)) / elem_size;
2031       if (sel[i] != i) maybe_ident = false;
2032     }
2033   if (i < nelts)
2034     return false;
2035 
2036   if (! VECTOR_TYPE_P (TREE_TYPE (orig))
2037       || (TYPE_VECTOR_SUBPARTS (type)
2038 	  != TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig))))
2039     return false;
2040 
2041   tree tem;
2042   if (conv_code != ERROR_MARK
2043       && (! supportable_convert_operation (conv_code, type, TREE_TYPE (orig),
2044 					   &tem, &conv_code)
2045 	  || conv_code == CALL_EXPR))
2046     return false;
2047 
2048   if (maybe_ident)
2049     {
2050       if (conv_code == ERROR_MARK)
2051 	gimple_assign_set_rhs_from_tree (gsi, orig);
2052       else
2053 	gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2054 					NULL_TREE, NULL_TREE);
2055     }
2056   else
2057     {
2058       tree mask_type, *mask_elts;
2059 
2060       if (!can_vec_perm_p (TYPE_MODE (type), false, sel))
2061 	return false;
2062       mask_type
2063 	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2064 			     nelts);
2065       if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2066 	  || GET_MODE_SIZE (TYPE_MODE (mask_type))
2067 	     != GET_MODE_SIZE (TYPE_MODE (type)))
2068 	return false;
2069       mask_elts = XALLOCAVEC (tree, nelts);
2070       for (i = 0; i < nelts; i++)
2071 	mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]);
2072       op2 = build_vector (mask_type, mask_elts);
2073       if (conv_code == ERROR_MARK)
2074 	gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2);
2075       else
2076 	{
2077 	  gimple *perm
2078 	    = gimple_build_assign (make_ssa_name (TREE_TYPE (orig)),
2079 				   VEC_PERM_EXPR, orig, orig, op2);
2080 	  orig = gimple_assign_lhs (perm);
2081 	  gsi_insert_before (gsi, perm, GSI_SAME_STMT);
2082 	  gimple_assign_set_rhs_with_ops (gsi, conv_code, orig,
2083 					  NULL_TREE, NULL_TREE);
2084 	}
2085     }
2086   update_stmt (gsi_stmt (*gsi));
2087   return true;
2088 }
2089 
2090 
2091 /* Primitive "lattice" function for gimple_simplify.  */
2092 
2093 static tree
2094 fwprop_ssa_val (tree name)
2095 {
2096   /* First valueize NAME.  */
2097   if (TREE_CODE (name) == SSA_NAME
2098       && SSA_NAME_VERSION (name) < lattice.length ())
2099     {
2100       tree val = lattice[SSA_NAME_VERSION (name)];
2101       if (val)
2102 	name = val;
2103     }
2104   /* We continue matching along SSA use-def edges for SSA names
2105      that are not single-use.  Currently there are no patterns
2106      that would cause any issues with that.  */
2107   return name;
2108 }
2109 
2110 /* Main entry point for the forward propagation and statement combine
2111    optimizer.  */
2112 
2113 namespace {
2114 
2115 const pass_data pass_data_forwprop =
2116 {
2117   GIMPLE_PASS, /* type */
2118   "forwprop", /* name */
2119   OPTGROUP_NONE, /* optinfo_flags */
2120   TV_TREE_FORWPROP, /* tv_id */
2121   ( PROP_cfg | PROP_ssa ), /* properties_required */
2122   0, /* properties_provided */
2123   0, /* properties_destroyed */
2124   0, /* todo_flags_start */
2125   TODO_update_ssa, /* todo_flags_finish */
2126 };
2127 
2128 class pass_forwprop : public gimple_opt_pass
2129 {
2130 public:
2131   pass_forwprop (gcc::context *ctxt)
2132     : gimple_opt_pass (pass_data_forwprop, ctxt)
2133   {}
2134 
2135   /* opt_pass methods: */
2136   opt_pass * clone () { return new pass_forwprop (m_ctxt); }
2137   virtual bool gate (function *) { return flag_tree_forwprop; }
2138   virtual unsigned int execute (function *);
2139 
2140 }; // class pass_forwprop
2141 
2142 unsigned int
2143 pass_forwprop::execute (function *fun)
2144 {
2145   unsigned int todoflags = 0;
2146 
2147   cfg_changed = false;
2148 
2149   /* Combine stmts with the stmts defining their operands.  Do that
2150      in an order that guarantees visiting SSA defs before SSA uses.  */
2151   lattice.create (num_ssa_names);
2152   lattice.quick_grow_cleared (num_ssa_names);
2153   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2154   int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2155 							 postorder, false);
2156   auto_vec<gimple *, 4> to_fixup;
2157   to_purge = BITMAP_ALLOC (NULL);
2158   for (int i = 0; i < postorder_num; ++i)
2159     {
2160       gimple_stmt_iterator gsi;
2161       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2162 
2163       /* Propagate into PHIs and record degenerate ones in the lattice.  */
2164       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2165 	   gsi_next (&si))
2166 	{
2167 	  gphi *phi = si.phi ();
2168 	  tree res = gimple_phi_result (phi);
2169 	  if (virtual_operand_p (res))
2170 	    continue;
2171 
2172 	  use_operand_p use_p;
2173 	  ssa_op_iter it;
2174 	  tree first = NULL_TREE;
2175 	  bool all_same = true;
2176 	  FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2177 	    {
2178 	      tree use = USE_FROM_PTR (use_p);
2179 	      tree tem = fwprop_ssa_val (use);
2180 	      if (! first)
2181 		first = tem;
2182 	      else if (! operand_equal_p (first, tem, 0))
2183 		all_same = false;
2184 	      if (tem != use
2185 		  && may_propagate_copy (use, tem))
2186 		propagate_value (use_p, tem);
2187 	    }
2188 	  if (all_same)
2189 	    fwprop_set_lattice_val (res, first);
2190 	}
2191 
2192       /* Apply forward propagation to all stmts in the basic-block.
2193 	 Note we update GSI within the loop as necessary.  */
2194       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2195 	{
2196 	  gimple *stmt = gsi_stmt (gsi);
2197 	  tree lhs, rhs;
2198 	  enum tree_code code;
2199 
2200 	  if (!is_gimple_assign (stmt))
2201 	    {
2202 	      gsi_next (&gsi);
2203 	      continue;
2204 	    }
2205 
2206 	  lhs = gimple_assign_lhs (stmt);
2207 	  rhs = gimple_assign_rhs1 (stmt);
2208 	  code = gimple_assign_rhs_code (stmt);
2209 	  if (TREE_CODE (lhs) != SSA_NAME
2210 	      || has_zero_uses (lhs))
2211 	    {
2212 	      gsi_next (&gsi);
2213 	      continue;
2214 	    }
2215 
2216 	  /* If this statement sets an SSA_NAME to an address,
2217 	     try to propagate the address into the uses of the SSA_NAME.  */
2218 	  if (code == ADDR_EXPR
2219 	      /* Handle pointer conversions on invariant addresses
2220 		 as well, as this is valid gimple.  */
2221 	      || (CONVERT_EXPR_CODE_P (code)
2222 		  && TREE_CODE (rhs) == ADDR_EXPR
2223 		  && POINTER_TYPE_P (TREE_TYPE (lhs))))
2224 	    {
2225 	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2226 	      if ((!base
2227 		   || !DECL_P (base)
2228 		   || decl_address_invariant_p (base))
2229 		  && !stmt_references_abnormal_ssa_name (stmt)
2230 		  && forward_propagate_addr_expr (lhs, rhs, true))
2231 		{
2232 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2233 		  release_defs (stmt);
2234 		  gsi_remove (&gsi, true);
2235 		}
2236 	      else
2237 		gsi_next (&gsi);
2238 	    }
2239 	  else if (code == POINTER_PLUS_EXPR)
2240 	    {
2241 	      tree off = gimple_assign_rhs2 (stmt);
2242 	      if (TREE_CODE (off) == INTEGER_CST
2243 		  && can_propagate_from (stmt)
2244 		  && !simple_iv_increment_p (stmt)
2245 		  /* ???  Better adjust the interface to that function
2246 		     instead of building new trees here.  */
2247 		  && forward_propagate_addr_expr
2248 		       (lhs,
2249 			build1_loc (gimple_location (stmt),
2250 				    ADDR_EXPR, TREE_TYPE (rhs),
2251 				    fold_build2 (MEM_REF,
2252 						 TREE_TYPE (TREE_TYPE (rhs)),
2253 						 rhs,
2254 						 fold_convert (ptr_type_node,
2255 							       off))), true))
2256 		{
2257 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2258 		  release_defs (stmt);
2259 		  gsi_remove (&gsi, true);
2260 		}
2261 	      else if (is_gimple_min_invariant (rhs))
2262 		{
2263 		  /* Make sure to fold &a[0] + off_1 here.  */
2264 		  fold_stmt_inplace (&gsi);
2265 		  update_stmt (stmt);
2266 		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2267 		    gsi_next (&gsi);
2268 		}
2269 	      else
2270 		gsi_next (&gsi);
2271 	    }
2272 	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2273 		   && gimple_assign_load_p (stmt)
2274 		   && !gimple_has_volatile_ops (stmt)
2275 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2276 		       != TARGET_MEM_REF)
2277 		   && !stmt_can_throw_internal (stmt))
2278 	    {
2279 	      /* Rewrite loads used only in real/imagpart extractions to
2280 	         component-wise loads.  */
2281 	      use_operand_p use_p;
2282 	      imm_use_iterator iter;
2283 	      bool rewrite = true;
2284 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2285 		{
2286 		  gimple *use_stmt = USE_STMT (use_p);
2287 		  if (is_gimple_debug (use_stmt))
2288 		    continue;
2289 		  if (!is_gimple_assign (use_stmt)
2290 		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2291 			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR))
2292 		    {
2293 		      rewrite = false;
2294 		      break;
2295 		    }
2296 		}
2297 	      if (rewrite)
2298 		{
2299 		  gimple *use_stmt;
2300 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2301 		    {
2302 		      if (is_gimple_debug (use_stmt))
2303 			{
2304 			  if (gimple_debug_bind_p (use_stmt))
2305 			    {
2306 			      gimple_debug_bind_reset_value (use_stmt);
2307 			      update_stmt (use_stmt);
2308 			    }
2309 			  continue;
2310 			}
2311 
2312 		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2313 					     TREE_TYPE (TREE_TYPE (rhs)),
2314 					     unshare_expr (rhs));
2315 		      gimple *new_stmt
2316 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2317 					       new_rhs);
2318 
2319 		      location_t loc = gimple_location (use_stmt);
2320 		      gimple_set_location (new_stmt, loc);
2321 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2322 		      unlink_stmt_vdef (use_stmt);
2323 		      gsi_remove (&gsi2, true);
2324 
2325 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2326 		    }
2327 
2328 		  release_defs (stmt);
2329 		  gsi_remove (&gsi, true);
2330 		}
2331 	      else
2332 		gsi_next (&gsi);
2333 	    }
2334 	  else if (code == COMPLEX_EXPR)
2335 	    {
2336 	      /* Rewrite stores of a single-use complex build expression
2337 	         to component-wise stores.  */
2338 	      use_operand_p use_p;
2339 	      gimple *use_stmt;
2340 	      if (single_imm_use (lhs, &use_p, &use_stmt)
2341 		  && gimple_store_p (use_stmt)
2342 		  && !gimple_has_volatile_ops (use_stmt)
2343 		  && is_gimple_assign (use_stmt)
2344 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2345 		      != TARGET_MEM_REF))
2346 		{
2347 		  tree use_lhs = gimple_assign_lhs (use_stmt);
2348 		  tree new_lhs = build1 (REALPART_EXPR,
2349 					 TREE_TYPE (TREE_TYPE (use_lhs)),
2350 					 unshare_expr (use_lhs));
2351 		  gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2352 		  location_t loc = gimple_location (use_stmt);
2353 		  gimple_set_location (new_stmt, loc);
2354 		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2355 		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2356 		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2357 		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2358 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2359 		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2360 
2361 		  new_lhs = build1 (IMAGPART_EXPR,
2362 				    TREE_TYPE (TREE_TYPE (use_lhs)),
2363 				    unshare_expr (use_lhs));
2364 		  gimple_assign_set_lhs (use_stmt, new_lhs);
2365 		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
2366 		  update_stmt (use_stmt);
2367 
2368 		  release_defs (stmt);
2369 		  gsi_remove (&gsi, true);
2370 		}
2371 	      else
2372 		gsi_next (&gsi);
2373 	    }
2374 	  else
2375 	    gsi_next (&gsi);
2376 	}
2377 
2378       /* Combine stmts with the stmts defining their operands.
2379 	 Note we update GSI within the loop as necessary.  */
2380       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2381 	{
2382 	  gimple *stmt = gsi_stmt (gsi);
2383 	  gimple *orig_stmt = stmt;
2384 	  bool changed = false;
2385 	  bool was_noreturn = (is_gimple_call (stmt)
2386 			       && gimple_call_noreturn_p (stmt));
2387 
2388 	  /* Mark stmt as potentially needing revisiting.  */
2389 	  gimple_set_plf (stmt, GF_PLF_1, false);
2390 
2391 	  if (fold_stmt (&gsi, fwprop_ssa_val))
2392 	    {
2393 	      changed = true;
2394 	      stmt = gsi_stmt (gsi);
2395 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2396 		bitmap_set_bit (to_purge, bb->index);
2397 	      if (!was_noreturn
2398 		  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
2399 		to_fixup.safe_push (stmt);
2400 	      /* Cleanup the CFG if we simplified a condition to
2401 	         true or false.  */
2402 	      if (gcond *cond = dyn_cast <gcond *> (stmt))
2403 		if (gimple_cond_true_p (cond)
2404 		    || gimple_cond_false_p (cond))
2405 		  cfg_changed = true;
2406 	      update_stmt (stmt);
2407 	    }
2408 
2409 	  switch (gimple_code (stmt))
2410 	    {
2411 	    case GIMPLE_ASSIGN:
2412 	      {
2413 		tree rhs1 = gimple_assign_rhs1 (stmt);
2414 		enum tree_code code = gimple_assign_rhs_code (stmt);
2415 
2416 		if (code == COND_EXPR
2417 		    || code == VEC_COND_EXPR)
2418 		  {
2419 		    /* In this case the entire COND_EXPR is in rhs1. */
2420 		    if (forward_propagate_into_cond (&gsi))
2421 		      {
2422 			changed = true;
2423 			stmt = gsi_stmt (gsi);
2424 		      }
2425 		  }
2426 		else if (TREE_CODE_CLASS (code) == tcc_comparison)
2427 		  {
2428 		    int did_something;
2429 		    did_something = forward_propagate_into_comparison (&gsi);
2430 		    if (did_something == 2)
2431 		      cfg_changed = true;
2432 		    changed = did_something != 0;
2433 		  }
2434 		else if ((code == PLUS_EXPR
2435 			  || code == BIT_IOR_EXPR
2436 			  || code == BIT_XOR_EXPR)
2437 			 && simplify_rotate (&gsi))
2438 		  changed = true;
2439 		else if (code == VEC_PERM_EXPR)
2440 		  {
2441 		    int did_something = simplify_permutation (&gsi);
2442 		    if (did_something == 2)
2443 		      cfg_changed = true;
2444 		    changed = did_something != 0;
2445 		  }
2446 		else if (code == BIT_FIELD_REF)
2447 		  changed = simplify_bitfield_ref (&gsi);
2448                 else if (code == CONSTRUCTOR
2449                          && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
2450                   changed = simplify_vector_constructor (&gsi);
2451 		break;
2452 	      }
2453 
2454 	    case GIMPLE_SWITCH:
2455 	      changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
2456 	      break;
2457 
2458 	    case GIMPLE_COND:
2459 	      {
2460 		int did_something
2461 		  = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
2462 		if (did_something == 2)
2463 		  cfg_changed = true;
2464 		changed = did_something != 0;
2465 		break;
2466 	      }
2467 
2468 	    case GIMPLE_CALL:
2469 	      {
2470 		tree callee = gimple_call_fndecl (stmt);
2471 		if (callee != NULL_TREE
2472 		    && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2473 		  changed = simplify_builtin_call (&gsi, callee);
2474 		break;
2475 	      }
2476 
2477 	    default:;
2478 	    }
2479 
2480 	  if (changed)
2481 	    {
2482 	      /* If the stmt changed then re-visit it and the statements
2483 		 inserted before it.  */
2484 	      for (; !gsi_end_p (gsi); gsi_prev (&gsi))
2485 		if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
2486 		  break;
2487 	      if (gsi_end_p (gsi))
2488 		gsi = gsi_start_bb (bb);
2489 	      else
2490 		gsi_next (&gsi);
2491 	    }
2492 	  else
2493 	    {
2494 	      /* Stmt no longer needs to be revisited.  */
2495 	      gimple_set_plf (stmt, GF_PLF_1, true);
2496 
2497 	      /* Fill up the lattice.  */
2498 	      if (gimple_assign_single_p (stmt))
2499 		{
2500 		  tree lhs = gimple_assign_lhs (stmt);
2501 		  tree rhs = gimple_assign_rhs1 (stmt);
2502 		  if (TREE_CODE (lhs) == SSA_NAME)
2503 		    {
2504 		      tree val = lhs;
2505 		      if (TREE_CODE (rhs) == SSA_NAME)
2506 			val = fwprop_ssa_val (rhs);
2507 		      else if (is_gimple_min_invariant (rhs))
2508 			val = rhs;
2509 		      fwprop_set_lattice_val (lhs, val);
2510 		    }
2511 		}
2512 
2513 	      gsi_next (&gsi);
2514 	    }
2515 	}
2516     }
2517   free (postorder);
2518   lattice.release ();
2519 
2520   /* Fixup stmts that became noreturn calls.  This may require splitting
2521      blocks and thus isn't possible during the walk.  Do this
2522      in reverse order so we don't inadvertedly remove a stmt we want to
2523      fixup by visiting a dominating now noreturn call first.  */
2524   while (!to_fixup.is_empty ())
2525     {
2526       gimple *stmt = to_fixup.pop ();
2527       if (dump_file && dump_flags & TDF_DETAILS)
2528 	{
2529 	  fprintf (dump_file, "Fixing up noreturn call ");
2530 	  print_gimple_stmt (dump_file, stmt, 0, 0);
2531 	  fprintf (dump_file, "\n");
2532 	}
2533       cfg_changed |= fixup_noreturn_call (stmt);
2534     }
2535 
2536   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
2537   BITMAP_FREE (to_purge);
2538 
2539   if (cfg_changed)
2540     todoflags |= TODO_cleanup_cfg;
2541 
2542   return todoflags;
2543 }
2544 
2545 } // anon namespace
2546 
2547 gimple_opt_pass *
2548 make_pass_forwprop (gcc::context *ctxt)
2549 {
2550   return new pass_forwprop (ctxt);
2551 }
2552