xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-forwprop.c (revision 8feb0f0b7eaff0608f8350bbfa3098827b4bb91b)
1 /* Forward propagation of expressions for single use variables.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "optabs-query.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "gimple-fold.h"
36 #include "tree-eh.h"
37 #include "gimplify.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "expr.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa-propagate.h"
44 #include "tree-ssa-dom.h"
45 #include "builtins.h"
46 #include "tree-cfgcleanup.h"
47 #include "cfganal.h"
48 #include "optabs-tree.h"
49 #include "tree-vector-builder.h"
50 #include "vec-perm-indices.h"
51 #include "internal-fn.h"
52 #include "cgraph.h"
53 #include "tree-ssa.h"
54 
55 /* This pass propagates the RHS of assignment statements into use
56    sites of the LHS of the assignment.  It's basically a specialized
57    form of tree combination.   It is hoped all of this can disappear
58    when we have a generalized tree combiner.
59 
60    One class of common cases we handle is forward propagating a single use
61    variable into a COND_EXPR.
62 
63      bb0:
64        x = a COND b;
65        if (x) goto ... else goto ...
66 
67    Will be transformed into:
68 
69      bb0:
70        if (a COND b) goto ... else goto ...
71 
72    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
73 
74    Or (assuming c1 and c2 are constants):
75 
76      bb0:
77        x = a + c1;
78        if (x EQ/NEQ c2) goto ... else goto ...
79 
80    Will be transformed into:
81 
82      bb0:
83         if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
84 
85    Similarly for x = a - c1.
86 
87    Or
88 
89      bb0:
90        x = !a
91        if (x) goto ... else goto ...
92 
93    Will be transformed into:
94 
95      bb0:
96         if (a == 0) goto ... else goto ...
97 
98    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
99    For these cases, we propagate A into all, possibly more than one,
100    COND_EXPRs that use X.
101 
102    Or
103 
104      bb0:
105        x = (typecast) a
106        if (x) goto ... else goto ...
107 
108    Will be transformed into:
109 
110      bb0:
111         if (a != 0) goto ... else goto ...
112 
113    (Assuming a is an integral type and x is a boolean or x is an
114     integral and a is a boolean.)
115 
116    Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
117    For these cases, we propagate A into all, possibly more than one,
118    COND_EXPRs that use X.
119 
120    In addition to eliminating the variable and the statement which assigns
121    a value to the variable, we may be able to later thread the jump without
122    adding insane complexity in the dominator optimizer.
123 
124    Also note these transformations can cascade.  We handle this by having
125    a worklist of COND_EXPR statements to examine.  As we make a change to
126    a statement, we put it back on the worklist to examine on the next
127    iteration of the main loop.
128 
129    A second class of propagation opportunities arises for ADDR_EXPR
130    nodes.
131 
132      ptr = &x->y->z;
133      res = *ptr;
134 
135    Will get turned into
136 
137      res = x->y->z;
138 
139    Or
140      ptr = (type1*)&type2var;
141      res = *ptr
142 
143    Will get turned into (if type1 and type2 are the same size
144    and neither have volatile on them):
145      res = VIEW_CONVERT_EXPR<type1>(type2var)
146 
147    Or
148 
149      ptr = &x[0];
150      ptr2 = ptr + <constant>;
151 
152    Will get turned into
153 
154      ptr2 = &x[constant/elementsize];
155 
156   Or
157 
158      ptr = &x[0];
159      offset = index * element_size;
160      offset_p = (pointer) offset;
161      ptr2 = ptr + offset_p
162 
163   Will get turned into:
164 
165      ptr2 = &x[index];
166 
167   Or
168     ssa = (int) decl
169     res = ssa & 1
170 
171   Provided that decl has known alignment >= 2, will get turned into
172 
173     res = 0
174 
175   We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
176   allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
177   {NOT_EXPR,NEG_EXPR}.
178 
179    This will (of course) be extended as other needs arise.  */
180 
181 static bool forward_propagate_addr_expr (tree, tree, bool);
182 
183 /* Set to true if we delete dead edges during the optimization.  */
184 static bool cfg_changed;
185 
186 static tree rhs_to_tree (tree type, gimple *stmt);
187 
188 static bitmap to_purge;
189 
190 /* Const-and-copy lattice.  */
191 static vec<tree> lattice;
192 
193 /* Set the lattice entry for NAME to VAL.  */
194 static void
fwprop_set_lattice_val(tree name,tree val)195 fwprop_set_lattice_val (tree name, tree val)
196 {
197   if (TREE_CODE (name) == SSA_NAME)
198     {
199       if (SSA_NAME_VERSION (name) >= lattice.length ())
200 	{
201 	  lattice.reserve (num_ssa_names - lattice.length ());
202 	  lattice.quick_grow_cleared (num_ssa_names);
203 	}
204       lattice[SSA_NAME_VERSION (name)] = val;
205     }
206 }
207 
208 /* Invalidate the lattice entry for NAME, done when releasing SSA names.  */
209 static void
fwprop_invalidate_lattice(tree name)210 fwprop_invalidate_lattice (tree name)
211 {
212   if (name
213       && TREE_CODE (name) == SSA_NAME
214       && SSA_NAME_VERSION (name) < lattice.length ())
215     lattice[SSA_NAME_VERSION (name)] = NULL_TREE;
216 }
217 
218 
219 /* Get the statement we can propagate from into NAME skipping
220    trivial copies.  Returns the statement which defines the
221    propagation source or NULL_TREE if there is no such one.
222    If SINGLE_USE_ONLY is set considers only sources which have
223    a single use chain up to NAME.  If SINGLE_USE_P is non-null,
224    it is set to whether the chain to NAME is a single use chain
225    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
226 
227 static gimple *
get_prop_source_stmt(tree name,bool single_use_only,bool * single_use_p)228 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
229 {
230   bool single_use = true;
231 
232   do {
233     gimple *def_stmt = SSA_NAME_DEF_STMT (name);
234 
235     if (!has_single_use (name))
236       {
237 	single_use = false;
238 	if (single_use_only)
239 	  return NULL;
240       }
241 
242     /* If name is defined by a PHI node or is the default def, bail out.  */
243     if (!is_gimple_assign (def_stmt))
244       return NULL;
245 
246     /* If def_stmt is a simple copy, continue looking.  */
247     if (gimple_assign_rhs_code (def_stmt) == SSA_NAME)
248       name = gimple_assign_rhs1 (def_stmt);
249     else
250       {
251 	if (!single_use_only && single_use_p)
252 	  *single_use_p = single_use;
253 
254 	return def_stmt;
255       }
256   } while (1);
257 }
258 
259 /* Checks if the destination ssa name in DEF_STMT can be used as
260    propagation source.  Returns true if so, otherwise false.  */
261 
262 static bool
can_propagate_from(gimple * def_stmt)263 can_propagate_from (gimple *def_stmt)
264 {
265   gcc_assert (is_gimple_assign (def_stmt));
266 
267   /* If the rhs has side-effects we cannot propagate from it.  */
268   if (gimple_has_volatile_ops (def_stmt))
269     return false;
270 
271   /* If the rhs is a load we cannot propagate from it.  */
272   if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
273       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274     return false;
275 
276   /* Constants can be always propagated.  */
277   if (gimple_assign_single_p (def_stmt)
278       && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279     return true;
280 
281   /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
282   if (stmt_references_abnormal_ssa_name (def_stmt))
283     return false;
284 
285   /* If the definition is a conversion of a pointer to a function type,
286      then we cannot apply optimizations as some targets require
287      function pointers to be canonicalized and in this case this
288      optimization could eliminate a necessary canonicalization.  */
289   if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
290     {
291       tree rhs = gimple_assign_rhs1 (def_stmt);
292       if (POINTER_TYPE_P (TREE_TYPE (rhs))
293           && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
294         return false;
295     }
296 
297   return true;
298 }
299 
300 /* Remove a chain of dead statements starting at the definition of
301    NAME.  The chain is linked via the first operand of the defining statements.
302    If NAME was replaced in its only use then this function can be used
303    to clean up dead stmts.  The function handles already released SSA
304    names gracefully.
305    Returns true if cleanup-cfg has to run.  */
306 
307 static bool
remove_prop_source_from_use(tree name)308 remove_prop_source_from_use (tree name)
309 {
310   gimple_stmt_iterator gsi;
311   gimple *stmt;
312   bool cfg_changed = false;
313 
314   do {
315     basic_block bb;
316 
317     if (SSA_NAME_IN_FREE_LIST (name)
318 	|| SSA_NAME_IS_DEFAULT_DEF (name)
319 	|| !has_zero_uses (name))
320       return cfg_changed;
321 
322     stmt = SSA_NAME_DEF_STMT (name);
323     if (gimple_code (stmt) == GIMPLE_PHI
324 	|| gimple_has_side_effects (stmt))
325       return cfg_changed;
326 
327     bb = gimple_bb (stmt);
328     gsi = gsi_for_stmt (stmt);
329     unlink_stmt_vdef (stmt);
330     if (gsi_remove (&gsi, true))
331       bitmap_set_bit (to_purge, bb->index);
332     fwprop_invalidate_lattice (gimple_get_lhs (stmt));
333     release_defs (stmt);
334 
335     name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
336   } while (name && TREE_CODE (name) == SSA_NAME);
337 
338   return cfg_changed;
339 }
340 
341 /* Return the rhs of a gassign *STMT in a form of a single tree,
342    converted to type TYPE.
343 
344    This should disappear, but is needed so we can combine expressions and use
345    the fold() interfaces. Long term, we need to develop folding and combine
346    routines that deal with gimple exclusively . */
347 
348 static tree
rhs_to_tree(tree type,gimple * stmt)349 rhs_to_tree (tree type, gimple *stmt)
350 {
351   location_t loc = gimple_location (stmt);
352   enum tree_code code = gimple_assign_rhs_code (stmt);
353   switch (get_gimple_rhs_class (code))
354     {
355     case GIMPLE_TERNARY_RHS:
356       return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
357 			      gimple_assign_rhs2 (stmt),
358 			      gimple_assign_rhs3 (stmt));
359     case GIMPLE_BINARY_RHS:
360       return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
361 			      gimple_assign_rhs2 (stmt));
362     case GIMPLE_UNARY_RHS:
363       return build1 (code, type, gimple_assign_rhs1 (stmt));
364     case GIMPLE_SINGLE_RHS:
365       return gimple_assign_rhs1 (stmt);
366     default:
367       gcc_unreachable ();
368     }
369 }
370 
371 /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
372    the folded result in a form suitable for COND_EXPR_COND or
373    NULL_TREE, if there is no suitable simplified form.  If
374    INVARIANT_ONLY is true only gimple_min_invariant results are
375    considered simplified.  */
376 
377 static tree
combine_cond_expr_cond(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1,bool invariant_only)378 combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
379 			tree op0, tree op1, bool invariant_only)
380 {
381   tree t;
382 
383   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
384 
385   fold_defer_overflow_warnings ();
386   t = fold_binary_loc (gimple_location (stmt), code, type, op0, op1);
387   if (!t)
388     {
389       fold_undefer_overflow_warnings (false, NULL, 0);
390       return NULL_TREE;
391     }
392 
393   /* Require that we got a boolean type out if we put one in.  */
394   gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
395 
396   /* Canonicalize the combined condition for use in a COND_EXPR.  */
397   t = canonicalize_cond_expr_cond (t);
398 
399   /* Bail out if we required an invariant but didn't get one.  */
400   if (!t || (invariant_only && !is_gimple_min_invariant (t)))
401     {
402       fold_undefer_overflow_warnings (false, NULL, 0);
403       return NULL_TREE;
404     }
405 
406   fold_undefer_overflow_warnings (!gimple_no_warning_p (stmt), stmt, 0);
407 
408   return t;
409 }
410 
411 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
412    of its operand.  Return a new comparison tree or NULL_TREE if there
413    were no simplifying combines.  */
414 
415 static tree
forward_propagate_into_comparison_1(gimple * stmt,enum tree_code code,tree type,tree op0,tree op1)416 forward_propagate_into_comparison_1 (gimple *stmt,
417 				     enum tree_code code, tree type,
418 				     tree op0, tree op1)
419 {
420   tree tmp = NULL_TREE;
421   tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
422   bool single_use0_p = false, single_use1_p = false;
423 
424   /* For comparisons use the first operand, that is likely to
425      simplify comparisons against constants.  */
426   if (TREE_CODE (op0) == SSA_NAME)
427     {
428       gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
429       if (def_stmt && can_propagate_from (def_stmt))
430 	{
431 	  enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
432 	  bool invariant_only_p = !single_use0_p;
433 
434 	  rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
435 
436 	  /* Always combine comparisons or conversions from booleans.  */
437 	  if (TREE_CODE (op1) == INTEGER_CST
438 	      && ((CONVERT_EXPR_CODE_P (def_code)
439 		   && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs0, 0)))
440 		      == BOOLEAN_TYPE)
441 		  || TREE_CODE_CLASS (def_code) == tcc_comparison))
442 	    invariant_only_p = false;
443 
444 	  tmp = combine_cond_expr_cond (stmt, code, type,
445 					rhs0, op1, invariant_only_p);
446 	  if (tmp)
447 	    return tmp;
448 	}
449     }
450 
451   /* If that wasn't successful, try the second operand.  */
452   if (TREE_CODE (op1) == SSA_NAME)
453     {
454       gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
455       if (def_stmt && can_propagate_from (def_stmt))
456 	{
457 	  rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
458 	  tmp = combine_cond_expr_cond (stmt, code, type,
459 					op0, rhs1, !single_use1_p);
460 	  if (tmp)
461 	    return tmp;
462 	}
463     }
464 
465   /* If that wasn't successful either, try both operands.  */
466   if (rhs0 != NULL_TREE
467       && rhs1 != NULL_TREE)
468     tmp = combine_cond_expr_cond (stmt, code, type,
469 				  rhs0, rhs1,
470 				  !(single_use0_p && single_use1_p));
471 
472   return tmp;
473 }
474 
475 /* Propagate from the ssa name definition statements of the assignment
476    from a comparison at *GSI into the conditional if that simplifies it.
477    Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
478    otherwise returns 0.  */
479 
480 static int
forward_propagate_into_comparison(gimple_stmt_iterator * gsi)481 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
482 {
483   gimple *stmt = gsi_stmt (*gsi);
484   tree tmp;
485   bool cfg_changed = false;
486   tree type = TREE_TYPE (gimple_assign_lhs (stmt));
487   tree rhs1 = gimple_assign_rhs1 (stmt);
488   tree rhs2 = gimple_assign_rhs2 (stmt);
489 
490   /* Combine the comparison with defining statements.  */
491   tmp = forward_propagate_into_comparison_1 (stmt,
492 					     gimple_assign_rhs_code (stmt),
493 					     type, rhs1, rhs2);
494   if (tmp && useless_type_conversion_p (type, TREE_TYPE (tmp)))
495     {
496       gimple_assign_set_rhs_from_tree (gsi, tmp);
497       fold_stmt (gsi);
498       update_stmt (gsi_stmt (*gsi));
499 
500       if (TREE_CODE (rhs1) == SSA_NAME)
501 	cfg_changed |= remove_prop_source_from_use (rhs1);
502       if (TREE_CODE (rhs2) == SSA_NAME)
503 	cfg_changed |= remove_prop_source_from_use (rhs2);
504       return cfg_changed ? 2 : 1;
505     }
506 
507   return 0;
508 }
509 
510 /* Propagate from the ssa name definition statements of COND_EXPR
511    in GIMPLE_COND statement STMT into the conditional if that simplifies it.
512    Returns zero if no statement was changed, one if there were
513    changes and two if cfg_cleanup needs to run.
514 
515    This must be kept in sync with forward_propagate_into_cond.  */
516 
517 static int
forward_propagate_into_gimple_cond(gcond * stmt)518 forward_propagate_into_gimple_cond (gcond *stmt)
519 {
520   tree tmp;
521   enum tree_code code = gimple_cond_code (stmt);
522   bool cfg_changed = false;
523   tree rhs1 = gimple_cond_lhs (stmt);
524   tree rhs2 = gimple_cond_rhs (stmt);
525 
526   /* We can do tree combining on SSA_NAME and comparison expressions.  */
527   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
528     return 0;
529 
530   tmp = forward_propagate_into_comparison_1 (stmt, code,
531 					     boolean_type_node,
532 					     rhs1, rhs2);
533   if (tmp
534       && is_gimple_condexpr_for_cond (tmp))
535     {
536       if (dump_file)
537 	{
538 	  fprintf (dump_file, "  Replaced '");
539 	  print_gimple_expr (dump_file, stmt, 0);
540 	  fprintf (dump_file, "' with '");
541 	  print_generic_expr (dump_file, tmp);
542 	  fprintf (dump_file, "'\n");
543 	}
544 
545       gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
546       update_stmt (stmt);
547 
548       if (TREE_CODE (rhs1) == SSA_NAME)
549 	cfg_changed |= remove_prop_source_from_use (rhs1);
550       if (TREE_CODE (rhs2) == SSA_NAME)
551 	cfg_changed |= remove_prop_source_from_use (rhs2);
552       return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
553     }
554 
555   /* Canonicalize _Bool == 0 and _Bool != 1 to _Bool != 0 by swapping edges.  */
556   if ((TREE_CODE (TREE_TYPE (rhs1)) == BOOLEAN_TYPE
557        || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
558 	   && TYPE_PRECISION (TREE_TYPE (rhs1)) == 1))
559       && ((code == EQ_EXPR
560 	   && integer_zerop (rhs2))
561 	  || (code == NE_EXPR
562 	      && integer_onep (rhs2))))
563     {
564       basic_block bb = gimple_bb (stmt);
565       gimple_cond_set_code (stmt, NE_EXPR);
566       gimple_cond_set_rhs (stmt, build_zero_cst (TREE_TYPE (rhs1)));
567       EDGE_SUCC (bb, 0)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
568       EDGE_SUCC (bb, 1)->flags ^= (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE);
569       return 1;
570     }
571 
572   return 0;
573 }
574 
575 
576 /* Propagate from the ssa name definition statements of COND_EXPR
577    in the rhs of statement STMT into the conditional if that simplifies it.
578    Returns true zero if the stmt was changed.  */
579 
580 static bool
forward_propagate_into_cond(gimple_stmt_iterator * gsi_p)581 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
582 {
583   gimple *stmt = gsi_stmt (*gsi_p);
584   tree tmp = NULL_TREE;
585   tree cond = gimple_assign_rhs1 (stmt);
586   enum tree_code code = gimple_assign_rhs_code (stmt);
587 
588   /* We can do tree combining on SSA_NAME and comparison expressions.  */
589   if (COMPARISON_CLASS_P (cond))
590     tmp = forward_propagate_into_comparison_1 (stmt, TREE_CODE (cond),
591 					       TREE_TYPE (cond),
592 					       TREE_OPERAND (cond, 0),
593 					       TREE_OPERAND (cond, 1));
594   else if (TREE_CODE (cond) == SSA_NAME)
595     {
596       enum tree_code def_code;
597       tree name = cond;
598       gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
599       if (!def_stmt || !can_propagate_from (def_stmt))
600 	return 0;
601 
602       def_code = gimple_assign_rhs_code (def_stmt);
603       if (TREE_CODE_CLASS (def_code) == tcc_comparison)
604 	tmp = fold_build2_loc (gimple_location (def_stmt),
605 			       def_code,
606 			       TREE_TYPE (cond),
607 			       gimple_assign_rhs1 (def_stmt),
608 			       gimple_assign_rhs2 (def_stmt));
609     }
610 
611   if (tmp
612       && is_gimple_condexpr (tmp))
613     {
614       if (dump_file)
615 	{
616 	  fprintf (dump_file, "  Replaced '");
617 	  print_generic_expr (dump_file, cond);
618 	  fprintf (dump_file, "' with '");
619 	  print_generic_expr (dump_file, tmp);
620 	  fprintf (dump_file, "'\n");
621 	}
622 
623       if ((code == VEC_COND_EXPR) ? integer_all_onesp (tmp)
624 				  : integer_onep (tmp))
625 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs2 (stmt));
626       else if (integer_zerop (tmp))
627 	gimple_assign_set_rhs_from_tree (gsi_p, gimple_assign_rhs3 (stmt));
628       else
629 	gimple_assign_set_rhs1 (stmt, unshare_expr (tmp));
630       stmt = gsi_stmt (*gsi_p);
631       update_stmt (stmt);
632 
633       return true;
634     }
635 
636   return 0;
637 }
638 
639 /* We've just substituted an ADDR_EXPR into stmt.  Update all the
640    relevant data structures to match.  */
641 
642 static void
tidy_after_forward_propagate_addr(gimple * stmt)643 tidy_after_forward_propagate_addr (gimple *stmt)
644 {
645   /* We may have turned a trapping insn into a non-trapping insn.  */
646   if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
647     bitmap_set_bit (to_purge, gimple_bb (stmt)->index);
648 
649   if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
650      recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
651 }
652 
653 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
654    ADDR_EXPR <whatever>.
655 
656    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
657    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
658    node or for recovery of array indexing from pointer arithmetic.
659 
660    Return true if the propagation was successful (the propagation can
661    be not totally successful, yet things may have been changed).  */
662 
663 static bool
forward_propagate_addr_expr_1(tree name,tree def_rhs,gimple_stmt_iterator * use_stmt_gsi,bool single_use_p)664 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
665 			       gimple_stmt_iterator *use_stmt_gsi,
666 			       bool single_use_p)
667 {
668   tree lhs, rhs, rhs2, array_ref;
669   gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
670   enum tree_code rhs_code;
671   bool res = true;
672 
673   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
674 
675   lhs = gimple_assign_lhs (use_stmt);
676   rhs_code = gimple_assign_rhs_code (use_stmt);
677   rhs = gimple_assign_rhs1 (use_stmt);
678 
679   /* Do not perform copy-propagation but recurse through copy chains.  */
680   if (TREE_CODE (lhs) == SSA_NAME
681       && rhs_code == SSA_NAME)
682     return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
683 
684   /* The use statement could be a conversion.  Recurse to the uses of the
685      lhs as copyprop does not copy through pointer to integer to pointer
686      conversions and FRE does not catch all cases either.
687      Treat the case of a single-use name and
688      a conversion to def_rhs type separate, though.  */
689   if (TREE_CODE (lhs) == SSA_NAME
690       && CONVERT_EXPR_CODE_P (rhs_code))
691     {
692       /* If there is a point in a conversion chain where the types match
693          so we can remove a conversion re-materialize the address here
694 	 and stop.  */
695       if (single_use_p
696 	  && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
697 	{
698 	  gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
699 	  gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
700 	  return true;
701 	}
702 
703       /* Else recurse if the conversion preserves the address value.  */
704       if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
705 	   || POINTER_TYPE_P (TREE_TYPE (lhs)))
706 	  && (TYPE_PRECISION (TREE_TYPE (lhs))
707 	      >= TYPE_PRECISION (TREE_TYPE (def_rhs))))
708 	return forward_propagate_addr_expr (lhs, def_rhs, single_use_p);
709 
710       return false;
711     }
712 
713   /* If this isn't a conversion chain from this on we only can propagate
714      into compatible pointer contexts.  */
715   if (!types_compatible_p (TREE_TYPE (name), TREE_TYPE (def_rhs)))
716     return false;
717 
718   /* Propagate through constant pointer adjustments.  */
719   if (TREE_CODE (lhs) == SSA_NAME
720       && rhs_code == POINTER_PLUS_EXPR
721       && rhs == name
722       && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
723     {
724       tree new_def_rhs;
725       /* As we come here with non-invariant addresses in def_rhs we need
726          to make sure we can build a valid constant offsetted address
727 	 for further propagation.  Simply rely on fold building that
728 	 and check after the fact.  */
729       new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
730 				 def_rhs,
731 				 fold_convert (ptr_type_node,
732 					       gimple_assign_rhs2 (use_stmt)));
733       if (TREE_CODE (new_def_rhs) == MEM_REF
734 	  && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
735 	return false;
736       new_def_rhs = build1 (ADDR_EXPR, TREE_TYPE (rhs), new_def_rhs);
737 
738       /* Recurse.  If we could propagate into all uses of lhs do not
739 	 bother to replace into the current use but just pretend we did.  */
740       if (forward_propagate_addr_expr (lhs, new_def_rhs, single_use_p))
741 	return true;
742 
743       if (useless_type_conversion_p (TREE_TYPE (lhs),
744 				     TREE_TYPE (new_def_rhs)))
745 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
746 					new_def_rhs);
747       else if (is_gimple_min_invariant (new_def_rhs))
748 	gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR, new_def_rhs);
749       else
750 	return false;
751       gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
752       update_stmt (use_stmt);
753       return true;
754     }
755 
756   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
757      ADDR_EXPR will not appear on the LHS.  */
758   tree *lhsp = gimple_assign_lhs_ptr (use_stmt);
759   while (handled_component_p (*lhsp))
760     lhsp = &TREE_OPERAND (*lhsp, 0);
761   lhs = *lhsp;
762 
763   /* Now see if the LHS node is a MEM_REF using NAME.  If so,
764      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
765   if (TREE_CODE (lhs) == MEM_REF
766       && TREE_OPERAND (lhs, 0) == name)
767     {
768       tree def_rhs_base;
769       poly_int64 def_rhs_offset;
770       /* If the address is invariant we can always fold it.  */
771       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
772 							 &def_rhs_offset)))
773 	{
774 	  poly_offset_int off = mem_ref_offset (lhs);
775 	  tree new_ptr;
776 	  off += def_rhs_offset;
777 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
778 	    {
779 	      off += mem_ref_offset (def_rhs_base);
780 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
781 	    }
782 	  else
783 	    new_ptr = build_fold_addr_expr (def_rhs_base);
784 	  TREE_OPERAND (lhs, 0) = new_ptr;
785 	  TREE_OPERAND (lhs, 1)
786 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
787 	  tidy_after_forward_propagate_addr (use_stmt);
788 	  /* Continue propagating into the RHS if this was not the only use.  */
789 	  if (single_use_p)
790 	    return true;
791 	}
792       /* If the LHS is a plain dereference and the value type is the same as
793          that of the pointed-to type of the address we can put the
794 	 dereferenced address on the LHS preserving the original alias-type.  */
795       else if (integer_zerop (TREE_OPERAND (lhs, 1))
796 	       && ((gimple_assign_lhs (use_stmt) == lhs
797 		    && useless_type_conversion_p
798 		         (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
799 		          TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
800 		   || types_compatible_p (TREE_TYPE (lhs),
801 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
802 	       /* Don't forward anything into clobber stmts if it would result
803 		  in the lhs no longer being a MEM_REF.  */
804 	       && (!gimple_clobber_p (use_stmt)
805 		   || TREE_CODE (TREE_OPERAND (def_rhs, 0)) == MEM_REF))
806 	{
807 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
808 	  tree new_offset, new_base, saved, new_lhs;
809 	  while (handled_component_p (*def_rhs_basep))
810 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
811 	  saved = *def_rhs_basep;
812 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
813 	    {
814 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
815 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (lhs, 1)),
816 					 TREE_OPERAND (*def_rhs_basep, 1));
817 	    }
818 	  else
819 	    {
820 	      new_base = build_fold_addr_expr (*def_rhs_basep);
821 	      new_offset = TREE_OPERAND (lhs, 1);
822 	    }
823 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
824 				   new_base, new_offset);
825 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
826 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (lhs);
827 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
828 	  new_lhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
829 	  *lhsp = new_lhs;
830 	  TREE_THIS_VOLATILE (new_lhs) = TREE_THIS_VOLATILE (lhs);
831 	  TREE_SIDE_EFFECTS (new_lhs) = TREE_SIDE_EFFECTS (lhs);
832 	  *def_rhs_basep = saved;
833 	  tidy_after_forward_propagate_addr (use_stmt);
834 	  /* Continue propagating into the RHS if this was not the
835 	     only use.  */
836 	  if (single_use_p)
837 	    return true;
838 	}
839       else
840 	/* We can have a struct assignment dereferencing our name twice.
841 	   Note that we didn't propagate into the lhs to not falsely
842 	   claim we did when propagating into the rhs.  */
843 	res = false;
844     }
845 
846   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
847      nodes from the RHS.  */
848   tree *rhsp = gimple_assign_rhs1_ptr (use_stmt);
849   if (TREE_CODE (*rhsp) == ADDR_EXPR)
850     rhsp = &TREE_OPERAND (*rhsp, 0);
851   while (handled_component_p (*rhsp))
852     rhsp = &TREE_OPERAND (*rhsp, 0);
853   rhs = *rhsp;
854 
855   /* Now see if the RHS node is a MEM_REF using NAME.  If so,
856      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
857   if (TREE_CODE (rhs) == MEM_REF
858       && TREE_OPERAND (rhs, 0) == name)
859     {
860       tree def_rhs_base;
861       poly_int64 def_rhs_offset;
862       if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
863 							 &def_rhs_offset)))
864 	{
865 	  poly_offset_int off = mem_ref_offset (rhs);
866 	  tree new_ptr;
867 	  off += def_rhs_offset;
868 	  if (TREE_CODE (def_rhs_base) == MEM_REF)
869 	    {
870 	      off += mem_ref_offset (def_rhs_base);
871 	      new_ptr = TREE_OPERAND (def_rhs_base, 0);
872 	    }
873 	  else
874 	    new_ptr = build_fold_addr_expr (def_rhs_base);
875 	  TREE_OPERAND (rhs, 0) = new_ptr;
876 	  TREE_OPERAND (rhs, 1)
877 	    = wide_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
878 	  fold_stmt_inplace (use_stmt_gsi);
879 	  tidy_after_forward_propagate_addr (use_stmt);
880 	  return res;
881 	}
882       /* If the RHS is a plain dereference and the value type is the same as
883          that of the pointed-to type of the address we can put the
884 	 dereferenced address on the RHS preserving the original alias-type.  */
885       else if (integer_zerop (TREE_OPERAND (rhs, 1))
886 	       && ((gimple_assign_rhs1 (use_stmt) == rhs
887 		    && useless_type_conversion_p
888 		         (TREE_TYPE (gimple_assign_lhs (use_stmt)),
889 		          TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
890 		   || types_compatible_p (TREE_TYPE (rhs),
891 					  TREE_TYPE (TREE_OPERAND (def_rhs, 0)))))
892 	{
893 	  tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
894 	  tree new_offset, new_base, saved, new_rhs;
895 	  while (handled_component_p (*def_rhs_basep))
896 	    def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
897 	  saved = *def_rhs_basep;
898 	  if (TREE_CODE (*def_rhs_basep) == MEM_REF)
899 	    {
900 	      new_base = TREE_OPERAND (*def_rhs_basep, 0);
901 	      new_offset = fold_convert (TREE_TYPE (TREE_OPERAND (rhs, 1)),
902 					 TREE_OPERAND (*def_rhs_basep, 1));
903 	    }
904 	  else
905 	    {
906 	      new_base = build_fold_addr_expr (*def_rhs_basep);
907 	      new_offset = TREE_OPERAND (rhs, 1);
908 	    }
909 	  *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
910 				   new_base, new_offset);
911 	  TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
912 	  TREE_SIDE_EFFECTS (*def_rhs_basep) = TREE_SIDE_EFFECTS (rhs);
913 	  TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
914 	  new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
915 	  *rhsp = new_rhs;
916 	  TREE_THIS_VOLATILE (new_rhs) = TREE_THIS_VOLATILE (rhs);
917 	  TREE_SIDE_EFFECTS (new_rhs) = TREE_SIDE_EFFECTS (rhs);
918 	  *def_rhs_basep = saved;
919 	  fold_stmt_inplace (use_stmt_gsi);
920 	  tidy_after_forward_propagate_addr (use_stmt);
921 	  return res;
922 	}
923     }
924 
925   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
926      is nothing to do. */
927   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
928       || gimple_assign_rhs1 (use_stmt) != name)
929     return false;
930 
931   /* The remaining cases are all for turning pointer arithmetic into
932      array indexing.  They only apply when we have the address of
933      element zero in an array.  If that is not the case then there
934      is nothing to do.  */
935   array_ref = TREE_OPERAND (def_rhs, 0);
936   if ((TREE_CODE (array_ref) != ARRAY_REF
937        || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
938        || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
939       && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
940     return false;
941 
942   rhs2 = gimple_assign_rhs2 (use_stmt);
943   /* Optimize &x[C1] p+ C2 to  &x p+ C3 with C3 = C1 * element_size + C2.  */
944   if (TREE_CODE (rhs2) == INTEGER_CST)
945     {
946       tree new_rhs = build1_loc (gimple_location (use_stmt),
947 				 ADDR_EXPR, TREE_TYPE (def_rhs),
948 				 fold_build2 (MEM_REF,
949 					      TREE_TYPE (TREE_TYPE (def_rhs)),
950 					      unshare_expr (def_rhs),
951 					      fold_convert (ptr_type_node,
952 							    rhs2)));
953       gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
954       use_stmt = gsi_stmt (*use_stmt_gsi);
955       update_stmt (use_stmt);
956       tidy_after_forward_propagate_addr (use_stmt);
957       return true;
958     }
959 
960   return false;
961 }
962 
963 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
964 
965    Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
966    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
967    node or for recovery of array indexing from pointer arithmetic.
968 
969    PARENT_SINGLE_USE_P tells if, when in a recursive invocation, NAME was
970    the single use in the previous invocation.  Pass true when calling
971    this as toplevel.
972 
973    Returns true, if all uses have been propagated into.  */
974 
975 static bool
forward_propagate_addr_expr(tree name,tree rhs,bool parent_single_use_p)976 forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
977 {
978   imm_use_iterator iter;
979   gimple *use_stmt;
980   bool all = true;
981   bool single_use_p = parent_single_use_p && has_single_use (name);
982 
983   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
984     {
985       bool result;
986       tree use_rhs;
987 
988       /* If the use is not in a simple assignment statement, then
989 	 there is nothing we can do.  */
990       if (!is_gimple_assign (use_stmt))
991 	{
992 	  if (!is_gimple_debug (use_stmt))
993 	    all = false;
994 	  continue;
995 	}
996 
997       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
998       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
999 					      single_use_p);
1000       /* If the use has moved to a different statement adjust
1001 	 the update machinery for the old statement too.  */
1002       if (use_stmt != gsi_stmt (gsi))
1003 	{
1004 	  update_stmt (use_stmt);
1005 	  use_stmt = gsi_stmt (gsi);
1006 	}
1007       update_stmt (use_stmt);
1008       all &= result;
1009 
1010       /* Remove intermediate now unused copy and conversion chains.  */
1011       use_rhs = gimple_assign_rhs1 (use_stmt);
1012       if (result
1013 	  && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1014 	  && TREE_CODE (use_rhs) == SSA_NAME
1015 	  && has_zero_uses (gimple_assign_lhs (use_stmt)))
1016 	{
1017 	  gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1018 	  fwprop_invalidate_lattice (gimple_get_lhs (use_stmt));
1019 	  release_defs (use_stmt);
1020 	  gsi_remove (&gsi, true);
1021 	}
1022     }
1023 
1024   return all && has_zero_uses (name);
1025 }
1026 
1027 
1028 /* Helper function for simplify_gimple_switch.  Remove case labels that
1029    have values outside the range of the new type.  */
1030 
1031 static void
simplify_gimple_switch_label_vec(gswitch * stmt,tree index_type)1032 simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
1033 {
1034   unsigned int branch_num = gimple_switch_num_labels (stmt);
1035   auto_vec<tree> labels (branch_num);
1036   unsigned int i, len;
1037 
1038   /* Collect the existing case labels in a VEC, and preprocess it as if
1039      we are gimplifying a GENERIC SWITCH_EXPR.  */
1040   for (i = 1; i < branch_num; i++)
1041     labels.quick_push (gimple_switch_label (stmt, i));
1042   preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
1043 
1044   /* If any labels were removed, replace the existing case labels
1045      in the GIMPLE_SWITCH statement with the correct ones.
1046      Note that the type updates were done in-place on the case labels,
1047      so we only have to replace the case labels in the GIMPLE_SWITCH
1048      if the number of labels changed.  */
1049   len = labels.length ();
1050   if (len < branch_num - 1)
1051     {
1052       bitmap target_blocks;
1053       edge_iterator ei;
1054       edge e;
1055 
1056       /* Corner case: *all* case labels have been removed as being
1057 	 out-of-range for INDEX_TYPE.  Push one label and let the
1058 	 CFG cleanups deal with this further.  */
1059       if (len == 0)
1060 	{
1061 	  tree label, elt;
1062 
1063 	  label = CASE_LABEL (gimple_switch_default_label (stmt));
1064 	  elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
1065 	  labels.quick_push (elt);
1066 	  len = 1;
1067 	}
1068 
1069       for (i = 0; i < labels.length (); i++)
1070 	gimple_switch_set_label (stmt, i + 1, labels[i]);
1071       for (i++ ; i < branch_num; i++)
1072 	gimple_switch_set_label (stmt, i, NULL_TREE);
1073       gimple_switch_set_num_labels (stmt, len + 1);
1074 
1075       /* Cleanup any edges that are now dead.  */
1076       target_blocks = BITMAP_ALLOC (NULL);
1077       for (i = 0; i < gimple_switch_num_labels (stmt); i++)
1078 	{
1079 	  tree elt = gimple_switch_label (stmt, i);
1080 	  basic_block target = label_to_block (cfun, CASE_LABEL (elt));
1081 	  bitmap_set_bit (target_blocks, target->index);
1082 	}
1083       for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
1084 	{
1085 	  if (! bitmap_bit_p (target_blocks, e->dest->index))
1086 	    {
1087 	      remove_edge (e);
1088 	      cfg_changed = true;
1089 	      free_dominance_info (CDI_DOMINATORS);
1090 	    }
1091 	  else
1092 	    ei_next (&ei);
1093 	}
1094       BITMAP_FREE (target_blocks);
1095     }
1096 }
1097 
1098 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1099    the condition which we may be able to optimize better.  */
1100 
1101 static bool
simplify_gimple_switch(gswitch * stmt)1102 simplify_gimple_switch (gswitch *stmt)
1103 {
1104   /* The optimization that we really care about is removing unnecessary
1105      casts.  That will let us do much better in propagating the inferred
1106      constant at the switch target.  */
1107   tree cond = gimple_switch_index (stmt);
1108   if (TREE_CODE (cond) == SSA_NAME)
1109     {
1110       gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
1111       if (gimple_assign_cast_p (def_stmt))
1112 	{
1113 	  tree def = gimple_assign_rhs1 (def_stmt);
1114 	  if (TREE_CODE (def) != SSA_NAME)
1115 	    return false;
1116 
1117 	  /* If we have an extension or sign-change that preserves the
1118 	     values we check against then we can copy the source value into
1119 	     the switch.  */
1120 	  tree ti = TREE_TYPE (def);
1121 	  if (INTEGRAL_TYPE_P (ti)
1122 	      && TYPE_PRECISION (ti) <= TYPE_PRECISION (TREE_TYPE (cond)))
1123 	    {
1124 	      size_t n = gimple_switch_num_labels (stmt);
1125 	      tree min = NULL_TREE, max = NULL_TREE;
1126 	      if (n > 1)
1127 		{
1128 		  min = CASE_LOW (gimple_switch_label (stmt, 1));
1129 		  if (CASE_HIGH (gimple_switch_label (stmt, n - 1)))
1130 		    max = CASE_HIGH (gimple_switch_label (stmt, n - 1));
1131 		  else
1132 		    max = CASE_LOW (gimple_switch_label (stmt, n - 1));
1133 		}
1134 	      if ((!min || int_fits_type_p (min, ti))
1135 		  && (!max || int_fits_type_p (max, ti)))
1136 		{
1137 		  gimple_switch_set_index (stmt, def);
1138 		  simplify_gimple_switch_label_vec (stmt, ti);
1139 		  update_stmt (stmt);
1140 		  return true;
1141 		}
1142 	    }
1143 	}
1144     }
1145 
1146   return false;
1147 }
1148 
1149 /* For pointers p2 and p1 return p2 - p1 if the
1150    difference is known and constant, otherwise return NULL.  */
1151 
1152 static tree
constant_pointer_difference(tree p1,tree p2)1153 constant_pointer_difference (tree p1, tree p2)
1154 {
1155   int i, j;
1156 #define CPD_ITERATIONS 5
1157   tree exps[2][CPD_ITERATIONS];
1158   tree offs[2][CPD_ITERATIONS];
1159   int cnt[2];
1160 
1161   for (i = 0; i < 2; i++)
1162     {
1163       tree p = i ? p1 : p2;
1164       tree off = size_zero_node;
1165       gimple *stmt;
1166       enum tree_code code;
1167 
1168       /* For each of p1 and p2 we need to iterate at least
1169 	 twice, to handle ADDR_EXPR directly in p1/p2,
1170 	 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1171 	 on definition's stmt RHS.  Iterate a few extra times.  */
1172       j = 0;
1173       do
1174 	{
1175 	  if (!POINTER_TYPE_P (TREE_TYPE (p)))
1176 	    break;
1177 	  if (TREE_CODE (p) == ADDR_EXPR)
1178 	    {
1179 	      tree q = TREE_OPERAND (p, 0);
1180 	      poly_int64 offset;
1181 	      tree base = get_addr_base_and_unit_offset (q, &offset);
1182 	      if (base)
1183 		{
1184 		  q = base;
1185 		  if (maybe_ne (offset, 0))
1186 		    off = size_binop (PLUS_EXPR, off, size_int (offset));
1187 		}
1188 	      if (TREE_CODE (q) == MEM_REF
1189 		  && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1190 		{
1191 		  p = TREE_OPERAND (q, 0);
1192 		  off = size_binop (PLUS_EXPR, off,
1193 				    wide_int_to_tree (sizetype,
1194 						      mem_ref_offset (q)));
1195 		}
1196 	      else
1197 		{
1198 		  exps[i][j] = q;
1199 		  offs[i][j++] = off;
1200 		  break;
1201 		}
1202 	    }
1203 	  if (TREE_CODE (p) != SSA_NAME)
1204 	    break;
1205 	  exps[i][j] = p;
1206 	  offs[i][j++] = off;
1207 	  if (j == CPD_ITERATIONS)
1208 	    break;
1209 	  stmt = SSA_NAME_DEF_STMT (p);
1210 	  if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1211 	    break;
1212 	  code = gimple_assign_rhs_code (stmt);
1213 	  if (code == POINTER_PLUS_EXPR)
1214 	    {
1215 	      if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1216 		break;
1217 	      off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1218 	      p = gimple_assign_rhs1 (stmt);
1219 	    }
1220 	  else if (code == ADDR_EXPR || CONVERT_EXPR_CODE_P (code))
1221 	    p = gimple_assign_rhs1 (stmt);
1222 	  else
1223 	    break;
1224 	}
1225       while (1);
1226       cnt[i] = j;
1227     }
1228 
1229   for (i = 0; i < cnt[0]; i++)
1230     for (j = 0; j < cnt[1]; j++)
1231       if (exps[0][i] == exps[1][j])
1232 	return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1233 
1234   return NULL_TREE;
1235 }
1236 
1237 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1238    Optimize
1239    memcpy (p, "abcd", 4);
1240    memset (p + 4, ' ', 3);
1241    into
1242    memcpy (p, "abcd   ", 7);
1243    call if the latter can be stored by pieces during expansion.  */
1244 
1245 static bool
simplify_builtin_call(gimple_stmt_iterator * gsi_p,tree callee2)1246 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1247 {
1248   gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
1249   tree vuse = gimple_vuse (stmt2);
1250   if (vuse == NULL)
1251     return false;
1252   stmt1 = SSA_NAME_DEF_STMT (vuse);
1253 
1254   switch (DECL_FUNCTION_CODE (callee2))
1255     {
1256     case BUILT_IN_MEMSET:
1257       if (gimple_call_num_args (stmt2) != 3
1258 	  || gimple_call_lhs (stmt2)
1259 	  || CHAR_BIT != 8
1260 	  || BITS_PER_UNIT != 8)
1261 	break;
1262       else
1263 	{
1264 	  tree callee1;
1265 	  tree ptr1, src1, str1, off1, len1, lhs1;
1266 	  tree ptr2 = gimple_call_arg (stmt2, 0);
1267 	  tree val2 = gimple_call_arg (stmt2, 1);
1268 	  tree len2 = gimple_call_arg (stmt2, 2);
1269 	  tree diff, vdef, new_str_cst;
1270 	  gimple *use_stmt;
1271 	  unsigned int ptr1_align;
1272 	  unsigned HOST_WIDE_INT src_len;
1273 	  char *src_buf;
1274 	  use_operand_p use_p;
1275 
1276 	  if (!tree_fits_shwi_p (val2)
1277 	      || !tree_fits_uhwi_p (len2)
1278 	      || compare_tree_int (len2, 1024) == 1)
1279 	    break;
1280 	  if (is_gimple_call (stmt1))
1281 	    {
1282 	      /* If first stmt is a call, it needs to be memcpy
1283 		 or mempcpy, with string literal as second argument and
1284 		 constant length.  */
1285 	      callee1 = gimple_call_fndecl (stmt1);
1286 	      if (callee1 == NULL_TREE
1287 		  || !fndecl_built_in_p (callee1, BUILT_IN_NORMAL)
1288 		  || gimple_call_num_args (stmt1) != 3)
1289 		break;
1290 	      if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1291 		  && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1292 		break;
1293 	      ptr1 = gimple_call_arg (stmt1, 0);
1294 	      src1 = gimple_call_arg (stmt1, 1);
1295 	      len1 = gimple_call_arg (stmt1, 2);
1296 	      lhs1 = gimple_call_lhs (stmt1);
1297 	      if (!tree_fits_uhwi_p (len1))
1298 		break;
1299 	      str1 = string_constant (src1, &off1, NULL, NULL);
1300 	      if (str1 == NULL_TREE)
1301 		break;
1302 	      if (!tree_fits_uhwi_p (off1)
1303 		  || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1304 		  || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1305 					     - tree_to_uhwi (off1)) > 0
1306 		  || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1307 		  || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1308 		     != TYPE_MODE (char_type_node))
1309 		break;
1310 	    }
1311 	  else if (gimple_assign_single_p (stmt1))
1312 	    {
1313 	      /* Otherwise look for length 1 memcpy optimized into
1314 		 assignment.  */
1315     	      ptr1 = gimple_assign_lhs (stmt1);
1316 	      src1 = gimple_assign_rhs1 (stmt1);
1317 	      if (TREE_CODE (ptr1) != MEM_REF
1318 		  || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1319 		  || !tree_fits_shwi_p (src1))
1320 		break;
1321 	      ptr1 = build_fold_addr_expr (ptr1);
1322 	      STRIP_USELESS_TYPE_CONVERSION (ptr1);
1323 	      callee1 = NULL_TREE;
1324 	      len1 = size_one_node;
1325 	      lhs1 = NULL_TREE;
1326 	      off1 = size_zero_node;
1327 	      str1 = NULL_TREE;
1328 	    }
1329 	  else
1330 	    break;
1331 
1332 	  diff = constant_pointer_difference (ptr1, ptr2);
1333 	  if (diff == NULL && lhs1 != NULL)
1334 	    {
1335 	      diff = constant_pointer_difference (lhs1, ptr2);
1336 	      if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1337 		  && diff != NULL)
1338 		diff = size_binop (PLUS_EXPR, diff,
1339 				   fold_convert (sizetype, len1));
1340 	    }
1341 	  /* If the difference between the second and first destination pointer
1342 	     is not constant, or is bigger than memcpy length, bail out.  */
1343 	  if (diff == NULL
1344 	      || !tree_fits_uhwi_p (diff)
1345 	      || tree_int_cst_lt (len1, diff)
1346 	      || compare_tree_int (diff, 1024) == 1)
1347 	    break;
1348 
1349 	  /* Use maximum of difference plus memset length and memcpy length
1350 	     as the new memcpy length, if it is too big, bail out.  */
1351 	  src_len = tree_to_uhwi (diff);
1352 	  src_len += tree_to_uhwi (len2);
1353 	  if (src_len < tree_to_uhwi (len1))
1354 	    src_len = tree_to_uhwi (len1);
1355 	  if (src_len > 1024)
1356 	    break;
1357 
1358 	  /* If mempcpy value is used elsewhere, bail out, as mempcpy
1359 	     with bigger length will return different result.  */
1360 	  if (lhs1 != NULL_TREE
1361 	      && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1362 	      && (TREE_CODE (lhs1) != SSA_NAME
1363 		  || !single_imm_use (lhs1, &use_p, &use_stmt)
1364 		  || use_stmt != stmt2))
1365 	    break;
1366 
1367 	  /* If anything reads memory in between memcpy and memset
1368 	     call, the modified memcpy call might change it.  */
1369 	  vdef = gimple_vdef (stmt1);
1370 	  if (vdef != NULL
1371 	      && (!single_imm_use (vdef, &use_p, &use_stmt)
1372 		  || use_stmt != stmt2))
1373 	    break;
1374 
1375 	  ptr1_align = get_pointer_alignment (ptr1);
1376 	  /* Construct the new source string literal.  */
1377 	  src_buf = XALLOCAVEC (char, src_len + 1);
1378 	  if (callee1)
1379 	    memcpy (src_buf,
1380 		    TREE_STRING_POINTER (str1) + tree_to_uhwi (off1),
1381 		    tree_to_uhwi (len1));
1382 	  else
1383 	    src_buf[0] = tree_to_shwi (src1);
1384 	  memset (src_buf + tree_to_uhwi (diff),
1385 		  tree_to_shwi (val2), tree_to_uhwi (len2));
1386 	  src_buf[src_len] = '\0';
1387 	  /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1388 	     handle embedded '\0's.  */
1389 	  if (strlen (src_buf) != src_len)
1390 	    break;
1391 	  rtl_profile_for_bb (gimple_bb (stmt2));
1392 	  /* If the new memcpy wouldn't be emitted by storing the literal
1393 	     by pieces, this optimization might enlarge .rodata too much,
1394 	     as commonly used string literals couldn't be shared any
1395 	     longer.  */
1396 	  if (!can_store_by_pieces (src_len,
1397 				    builtin_strncpy_read_str,
1398 				    src_buf, ptr1_align, false))
1399 	    break;
1400 
1401 	  new_str_cst = build_string_literal (src_len, src_buf);
1402 	  if (callee1)
1403 	    {
1404 	      /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1405 		 memset call.  */
1406 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1407 		gimple_call_set_lhs (stmt1, NULL_TREE);
1408 	      gimple_call_set_arg (stmt1, 1, new_str_cst);
1409 	      gimple_call_set_arg (stmt1, 2,
1410 				   build_int_cst (TREE_TYPE (len1), src_len));
1411 	      update_stmt (stmt1);
1412 	      unlink_stmt_vdef (stmt2);
1413 	      gsi_replace (gsi_p, gimple_build_nop (), false);
1414 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt2));
1415 	      release_defs (stmt2);
1416 	      if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1417 		{
1418 		  fwprop_invalidate_lattice (lhs1);
1419 		  release_ssa_name (lhs1);
1420 		}
1421 	      return true;
1422 	    }
1423 	  else
1424 	    {
1425 	      /* Otherwise, if STMT1 is length 1 memcpy optimized into
1426 		 assignment, remove STMT1 and change memset call into
1427 		 memcpy call.  */
1428 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1429 
1430 	      if (!is_gimple_val (ptr1))
1431 		ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1432 						 true, GSI_SAME_STMT);
1433 	      tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCPY);
1434 	      gimple_call_set_fndecl (stmt2, fndecl);
1435 	      gimple_call_set_fntype (as_a <gcall *> (stmt2),
1436 				      TREE_TYPE (fndecl));
1437 	      gimple_call_set_arg (stmt2, 0, ptr1);
1438 	      gimple_call_set_arg (stmt2, 1, new_str_cst);
1439 	      gimple_call_set_arg (stmt2, 2,
1440 				   build_int_cst (TREE_TYPE (len2), src_len));
1441 	      unlink_stmt_vdef (stmt1);
1442 	      gsi_remove (&gsi, true);
1443 	      fwprop_invalidate_lattice (gimple_get_lhs (stmt1));
1444 	      release_defs (stmt1);
1445 	      update_stmt (stmt2);
1446 	      return false;
1447 	    }
1448 	}
1449       break;
1450     default:
1451       break;
1452     }
1453   return false;
1454 }
1455 
1456 /* Given a ssa_name in NAME see if it was defined by an assignment and
1457    set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
1458    to the second operand on the rhs. */
1459 
1460 static inline void
defcodefor_name(tree name,enum tree_code * code,tree * arg1,tree * arg2)1461 defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
1462 {
1463   gimple *def;
1464   enum tree_code code1;
1465   tree arg11;
1466   tree arg21;
1467   tree arg31;
1468   enum gimple_rhs_class grhs_class;
1469 
1470   code1 = TREE_CODE (name);
1471   arg11 = name;
1472   arg21 = NULL_TREE;
1473   arg31 = NULL_TREE;
1474   grhs_class = get_gimple_rhs_class (code1);
1475 
1476   if (code1 == SSA_NAME)
1477     {
1478       def = SSA_NAME_DEF_STMT (name);
1479 
1480       if (def && is_gimple_assign (def)
1481 	  && can_propagate_from (def))
1482 	{
1483 	  code1 = gimple_assign_rhs_code (def);
1484 	  arg11 = gimple_assign_rhs1 (def);
1485           arg21 = gimple_assign_rhs2 (def);
1486           arg31 = gimple_assign_rhs3 (def);
1487 	}
1488     }
1489   else if (grhs_class != GIMPLE_SINGLE_RHS)
1490     code1 = ERROR_MARK;
1491 
1492   *code = code1;
1493   *arg1 = arg11;
1494   if (arg2)
1495     *arg2 = arg21;
1496   if (arg31)
1497     *code = ERROR_MARK;
1498 }
1499 
1500 
1501 /* Recognize rotation patterns.  Return true if a transformation
1502    applied, otherwise return false.
1503 
1504    We are looking for X with unsigned type T with bitsize B, OP being
1505    +, | or ^, some type T2 wider than T.  For:
1506    (X << CNT1) OP (X >> CNT2)				iff CNT1 + CNT2 == B
1507    ((T) ((T2) X << CNT1)) OP ((T) ((T2) X >> CNT2))	iff CNT1 + CNT2 == B
1508 
1509    transform these into:
1510    X r<< CNT1
1511 
1512    Or for:
1513    (X << Y) OP (X >> (B - Y))
1514    (X << (int) Y) OP (X >> (int) (B - Y))
1515    ((T) ((T2) X << Y)) OP ((T) ((T2) X >> (B - Y)))
1516    ((T) ((T2) X << (int) Y)) OP ((T) ((T2) X >> (int) (B - Y)))
1517    (X << Y) | (X >> ((-Y) & (B - 1)))
1518    (X << (int) Y) | (X >> (int) ((-Y) & (B - 1)))
1519    ((T) ((T2) X << Y)) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1520    ((T) ((T2) X << (int) Y)) | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1521 
1522    transform these into:
1523    X r<< Y
1524 
1525    Or for:
1526    (X << (Y & (B - 1))) | (X >> ((-Y) & (B - 1)))
1527    (X << (int) (Y & (B - 1))) | (X >> (int) ((-Y) & (B - 1)))
1528    ((T) ((T2) X << (Y & (B - 1)))) | ((T) ((T2) X >> ((-Y) & (B - 1))))
1529    ((T) ((T2) X << (int) (Y & (B - 1)))) \
1530      | ((T) ((T2) X >> (int) ((-Y) & (B - 1))))
1531 
1532    transform these into:
1533    X r<< (Y & (B - 1))
1534 
1535    Note, in the patterns with T2 type, the type of OP operands
1536    might be even a signed type, but should have precision B.
1537    Expressions with & (B - 1) should be recognized only if B is
1538    a power of 2.  */
1539 
1540 static bool
simplify_rotate(gimple_stmt_iterator * gsi)1541 simplify_rotate (gimple_stmt_iterator *gsi)
1542 {
1543   gimple *stmt = gsi_stmt (*gsi);
1544   tree arg[2], rtype, rotcnt = NULL_TREE;
1545   tree def_arg1[2], def_arg2[2];
1546   enum tree_code def_code[2];
1547   tree lhs;
1548   int i;
1549   bool swapped_p = false;
1550   gimple *g;
1551 
1552   arg[0] = gimple_assign_rhs1 (stmt);
1553   arg[1] = gimple_assign_rhs2 (stmt);
1554   rtype = TREE_TYPE (arg[0]);
1555 
1556   /* Only create rotates in complete modes.  Other cases are not
1557      expanded properly.  */
1558   if (!INTEGRAL_TYPE_P (rtype)
1559       || !type_has_mode_precision_p (rtype))
1560     return false;
1561 
1562   for (i = 0; i < 2; i++)
1563     defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1564 
1565   /* Look through narrowing (or same precision) conversions.  */
1566   if (CONVERT_EXPR_CODE_P (def_code[0])
1567       && CONVERT_EXPR_CODE_P (def_code[1])
1568       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[0]))
1569       && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[1]))
1570       && TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1571 	 == TYPE_PRECISION (TREE_TYPE (def_arg1[1]))
1572       && TYPE_PRECISION (TREE_TYPE (def_arg1[0])) >= TYPE_PRECISION (rtype)
1573       && has_single_use (arg[0])
1574       && has_single_use (arg[1]))
1575     {
1576       for (i = 0; i < 2; i++)
1577 	{
1578 	  arg[i] = def_arg1[i];
1579 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1580 	}
1581     }
1582   else
1583     {
1584       /* Handle signed rotate; the RSHIFT_EXPR has to be done
1585 	 in unsigned type but LSHIFT_EXPR could be signed.  */
1586       i = (def_code[0] == LSHIFT_EXPR || def_code[0] == RSHIFT_EXPR);
1587       if (CONVERT_EXPR_CODE_P (def_code[i])
1588 	  && (def_code[1 - i] == LSHIFT_EXPR || def_code[1 - i] == RSHIFT_EXPR)
1589 	  && INTEGRAL_TYPE_P (TREE_TYPE (def_arg1[i]))
1590 	  && TYPE_PRECISION (rtype) == TYPE_PRECISION (TREE_TYPE (def_arg1[i]))
1591 	  && has_single_use (arg[i]))
1592 	{
1593 	  arg[i] = def_arg1[i];
1594 	  defcodefor_name (arg[i], &def_code[i], &def_arg1[i], &def_arg2[i]);
1595 	}
1596     }
1597 
1598   /* One operand has to be LSHIFT_EXPR and one RSHIFT_EXPR.  */
1599   for (i = 0; i < 2; i++)
1600     if (def_code[i] != LSHIFT_EXPR && def_code[i] != RSHIFT_EXPR)
1601       return false;
1602     else if (!has_single_use (arg[i]))
1603       return false;
1604   if (def_code[0] == def_code[1])
1605     return false;
1606 
1607   /* If we've looked through narrowing conversions before, look through
1608      widening conversions from unsigned type with the same precision
1609      as rtype here.  */
1610   if (TYPE_PRECISION (TREE_TYPE (def_arg1[0])) != TYPE_PRECISION (rtype))
1611     for (i = 0; i < 2; i++)
1612       {
1613 	tree tem;
1614 	enum tree_code code;
1615 	defcodefor_name (def_arg1[i], &code, &tem, NULL);
1616 	if (!CONVERT_EXPR_CODE_P (code)
1617 	    || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1618 	    || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1619 	  return false;
1620 	def_arg1[i] = tem;
1621       }
1622   /* Both shifts have to use the same first operand.  */
1623   if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1624       || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1625 			      TREE_TYPE (def_arg1[1])))
1626     {
1627       if ((TYPE_PRECISION (TREE_TYPE (def_arg1[0]))
1628 	   != TYPE_PRECISION (TREE_TYPE (def_arg1[1])))
1629 	  || (TYPE_UNSIGNED (TREE_TYPE (def_arg1[0]))
1630 	      == TYPE_UNSIGNED (TREE_TYPE (def_arg1[1]))))
1631 	return false;
1632 
1633       /* Handle signed rotate; the RSHIFT_EXPR has to be done
1634 	 in unsigned type but LSHIFT_EXPR could be signed.  */
1635       i = def_code[0] != RSHIFT_EXPR;
1636       if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[i])))
1637 	return false;
1638 
1639       tree tem;
1640       enum tree_code code;
1641       defcodefor_name (def_arg1[i], &code, &tem, NULL);
1642       if (!CONVERT_EXPR_CODE_P (code)
1643 	  || !INTEGRAL_TYPE_P (TREE_TYPE (tem))
1644 	  || TYPE_PRECISION (TREE_TYPE (tem)) != TYPE_PRECISION (rtype))
1645 	return false;
1646       def_arg1[i] = tem;
1647       if (!operand_equal_for_phi_arg_p (def_arg1[0], def_arg1[1])
1648 	  || !types_compatible_p (TREE_TYPE (def_arg1[0]),
1649 				  TREE_TYPE (def_arg1[1])))
1650 	return false;
1651     }
1652   else if (!TYPE_UNSIGNED (TREE_TYPE (def_arg1[0])))
1653     return false;
1654 
1655   /* CNT1 + CNT2 == B case above.  */
1656   if (tree_fits_uhwi_p (def_arg2[0])
1657       && tree_fits_uhwi_p (def_arg2[1])
1658       && tree_to_uhwi (def_arg2[0])
1659 	 + tree_to_uhwi (def_arg2[1]) == TYPE_PRECISION (rtype))
1660     rotcnt = def_arg2[0];
1661   else if (TREE_CODE (def_arg2[0]) != SSA_NAME
1662 	   || TREE_CODE (def_arg2[1]) != SSA_NAME)
1663     return false;
1664   else
1665     {
1666       tree cdef_arg1[2], cdef_arg2[2], def_arg2_alt[2];
1667       enum tree_code cdef_code[2];
1668       /* Look through conversion of the shift count argument.
1669 	 The C/C++ FE cast any shift count argument to integer_type_node.
1670 	 The only problem might be if the shift count type maximum value
1671 	 is equal or smaller than number of bits in rtype.  */
1672       for (i = 0; i < 2; i++)
1673 	{
1674 	  def_arg2_alt[i] = def_arg2[i];
1675 	  defcodefor_name (def_arg2[i], &cdef_code[i],
1676 			   &cdef_arg1[i], &cdef_arg2[i]);
1677 	  if (CONVERT_EXPR_CODE_P (cdef_code[i])
1678 	      && INTEGRAL_TYPE_P (TREE_TYPE (cdef_arg1[i]))
1679 	      && TYPE_PRECISION (TREE_TYPE (cdef_arg1[i]))
1680 		 > floor_log2 (TYPE_PRECISION (rtype))
1681 	      && type_has_mode_precision_p (TREE_TYPE (cdef_arg1[i])))
1682 	    {
1683 	      def_arg2_alt[i] = cdef_arg1[i];
1684 	      defcodefor_name (def_arg2_alt[i], &cdef_code[i],
1685 			       &cdef_arg1[i], &cdef_arg2[i]);
1686 	    }
1687 	}
1688       for (i = 0; i < 2; i++)
1689 	/* Check for one shift count being Y and the other B - Y,
1690 	   with optional casts.  */
1691 	if (cdef_code[i] == MINUS_EXPR
1692 	    && tree_fits_shwi_p (cdef_arg1[i])
1693 	    && tree_to_shwi (cdef_arg1[i]) == TYPE_PRECISION (rtype)
1694 	    && TREE_CODE (cdef_arg2[i]) == SSA_NAME)
1695 	  {
1696 	    tree tem;
1697 	    enum tree_code code;
1698 
1699 	    if (cdef_arg2[i] == def_arg2[1 - i]
1700 		|| cdef_arg2[i] == def_arg2_alt[1 - i])
1701 	      {
1702 		rotcnt = cdef_arg2[i];
1703 		break;
1704 	      }
1705 	    defcodefor_name (cdef_arg2[i], &code, &tem, NULL);
1706 	    if (CONVERT_EXPR_CODE_P (code)
1707 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1708 		&& TYPE_PRECISION (TREE_TYPE (tem))
1709 		 > floor_log2 (TYPE_PRECISION (rtype))
1710 		&& type_has_mode_precision_p (TREE_TYPE (tem))
1711 		&& (tem == def_arg2[1 - i]
1712 		    || tem == def_arg2_alt[1 - i]))
1713 	      {
1714 		rotcnt = tem;
1715 		break;
1716 	      }
1717 	  }
1718 	/* The above sequence isn't safe for Y being 0,
1719 	   because then one of the shifts triggers undefined behavior.
1720 	   This alternative is safe even for rotation count of 0.
1721 	   One shift count is Y and the other (-Y) & (B - 1).
1722 	   Or one shift count is Y & (B - 1) and the other (-Y) & (B - 1).  */
1723 	else if (cdef_code[i] == BIT_AND_EXPR
1724 		 && pow2p_hwi (TYPE_PRECISION (rtype))
1725 		 && tree_fits_shwi_p (cdef_arg2[i])
1726 		 && tree_to_shwi (cdef_arg2[i])
1727 		    == TYPE_PRECISION (rtype) - 1
1728 		 && TREE_CODE (cdef_arg1[i]) == SSA_NAME
1729 		 && gimple_assign_rhs_code (stmt) == BIT_IOR_EXPR)
1730 	  {
1731 	    tree tem;
1732 	    enum tree_code code;
1733 
1734 	    defcodefor_name (cdef_arg1[i], &code, &tem, NULL);
1735 	    if (CONVERT_EXPR_CODE_P (code)
1736 		&& INTEGRAL_TYPE_P (TREE_TYPE (tem))
1737 		&& TYPE_PRECISION (TREE_TYPE (tem))
1738 		 > floor_log2 (TYPE_PRECISION (rtype))
1739 		&& type_has_mode_precision_p (TREE_TYPE (tem)))
1740 	      defcodefor_name (tem, &code, &tem, NULL);
1741 
1742 	    if (code == NEGATE_EXPR)
1743 	      {
1744 		if (tem == def_arg2[1 - i] || tem == def_arg2_alt[1 - i])
1745 		  {
1746 		    rotcnt = tem;
1747 		    break;
1748 		  }
1749 		tree tem2;
1750 		defcodefor_name (tem, &code, &tem2, NULL);
1751 		if (CONVERT_EXPR_CODE_P (code)
1752 		    && INTEGRAL_TYPE_P (TREE_TYPE (tem2))
1753 		    && TYPE_PRECISION (TREE_TYPE (tem2))
1754 		       > floor_log2 (TYPE_PRECISION (rtype))
1755 		    && type_has_mode_precision_p (TREE_TYPE (tem2)))
1756 		  {
1757 		    if (tem2 == def_arg2[1 - i]
1758 			|| tem2 == def_arg2_alt[1 - i])
1759 		      {
1760 			rotcnt = tem2;
1761 			break;
1762 		      }
1763 		  }
1764 		else
1765 		  tem2 = NULL_TREE;
1766 
1767 		if (cdef_code[1 - i] == BIT_AND_EXPR
1768 		    && tree_fits_shwi_p (cdef_arg2[1 - i])
1769 		    && tree_to_shwi (cdef_arg2[1 - i])
1770 		       == TYPE_PRECISION (rtype) - 1
1771 		    && TREE_CODE (cdef_arg1[1 - i]) == SSA_NAME)
1772 		  {
1773 		    if (tem == cdef_arg1[1 - i]
1774 			|| tem2 == cdef_arg1[1 - i])
1775 		      {
1776 			rotcnt = def_arg2[1 - i];
1777 			break;
1778 		      }
1779 		    tree tem3;
1780 		    defcodefor_name (cdef_arg1[1 - i], &code, &tem3, NULL);
1781 		    if (CONVERT_EXPR_CODE_P (code)
1782 			&& INTEGRAL_TYPE_P (TREE_TYPE (tem3))
1783 			&& TYPE_PRECISION (TREE_TYPE (tem3))
1784 			   > floor_log2 (TYPE_PRECISION (rtype))
1785 			&& type_has_mode_precision_p (TREE_TYPE (tem3)))
1786 		      {
1787 			if (tem == tem3 || tem2 == tem3)
1788 			  {
1789 			    rotcnt = def_arg2[1 - i];
1790 			    break;
1791 			  }
1792 		      }
1793 		  }
1794 	      }
1795 	  }
1796       if (rotcnt == NULL_TREE)
1797 	return false;
1798       swapped_p = i != 1;
1799     }
1800 
1801   if (!useless_type_conversion_p (TREE_TYPE (def_arg2[0]),
1802 				  TREE_TYPE (rotcnt)))
1803     {
1804       g = gimple_build_assign (make_ssa_name (TREE_TYPE (def_arg2[0])),
1805 			       NOP_EXPR, rotcnt);
1806       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1807       rotcnt = gimple_assign_lhs (g);
1808     }
1809   lhs = gimple_assign_lhs (stmt);
1810   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1811     lhs = make_ssa_name (TREE_TYPE (def_arg1[0]));
1812   g = gimple_build_assign (lhs,
1813 			   ((def_code[0] == LSHIFT_EXPR) ^ swapped_p)
1814 			   ? LROTATE_EXPR : RROTATE_EXPR, def_arg1[0], rotcnt);
1815   if (!useless_type_conversion_p (rtype, TREE_TYPE (def_arg1[0])))
1816     {
1817       gsi_insert_before (gsi, g, GSI_SAME_STMT);
1818       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, lhs);
1819     }
1820   gsi_replace (gsi, g, false);
1821   return true;
1822 }
1823 
1824 
1825 /* Check whether an array contains a valid ctz table.  */
1826 static bool
check_ctz_array(tree ctor,unsigned HOST_WIDE_INT mulc,HOST_WIDE_INT & zero_val,unsigned shift,unsigned bits)1827 check_ctz_array (tree ctor, unsigned HOST_WIDE_INT mulc,
1828 		 HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
1829 {
1830   tree elt, idx;
1831   unsigned HOST_WIDE_INT i, mask;
1832   unsigned matched = 0;
1833 
1834   mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
1835 
1836   zero_val = 0;
1837 
1838   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, idx, elt)
1839     {
1840       if (TREE_CODE (idx) != INTEGER_CST || TREE_CODE (elt) != INTEGER_CST)
1841 	return false;
1842       if (i > bits * 2)
1843 	return false;
1844 
1845       unsigned HOST_WIDE_INT index = tree_to_shwi (idx);
1846       HOST_WIDE_INT val = tree_to_shwi (elt);
1847 
1848       if (index == 0)
1849 	{
1850 	  zero_val = val;
1851 	  matched++;
1852 	}
1853 
1854       if (val >= 0 && val < bits && (((mulc << val) & mask) >> shift) == index)
1855 	matched++;
1856 
1857       if (matched > bits)
1858 	return true;
1859     }
1860 
1861   return false;
1862 }
1863 
1864 /* Check whether a string contains a valid ctz table.  */
1865 static bool
check_ctz_string(tree string,unsigned HOST_WIDE_INT mulc,HOST_WIDE_INT & zero_val,unsigned shift,unsigned bits)1866 check_ctz_string (tree string, unsigned HOST_WIDE_INT mulc,
1867 		  HOST_WIDE_INT &zero_val, unsigned shift, unsigned bits)
1868 {
1869   unsigned HOST_WIDE_INT len = TREE_STRING_LENGTH (string);
1870   unsigned HOST_WIDE_INT mask;
1871   unsigned matched = 0;
1872   const unsigned char *p = (const unsigned char *) TREE_STRING_POINTER (string);
1873 
1874   if (len < bits || len > bits * 2)
1875     return false;
1876 
1877   mask = ((HOST_WIDE_INT_1U << (bits - shift)) - 1) << shift;
1878 
1879   zero_val = p[0];
1880 
1881   for (unsigned i = 0; i < len; i++)
1882     if (p[i] < bits && (((mulc << p[i]) & mask) >> shift) == i)
1883       matched++;
1884 
1885   return matched == bits;
1886 }
1887 
1888 /* Recognize count trailing zeroes idiom.
1889    The canonical form is array[((x & -x) * C) >> SHIFT] where C is a magic
1890    constant which when multiplied by a power of 2 creates a unique value
1891    in the top 5 or 6 bits.  This is then indexed into a table which maps it
1892    to the number of trailing zeroes.  Array[0] is returned so the caller can
1893    emit an appropriate sequence depending on whether ctz (0) is defined on
1894    the target.  */
1895 static bool
optimize_count_trailing_zeroes(tree array_ref,tree x,tree mulc,tree tshift,HOST_WIDE_INT & zero_val)1896 optimize_count_trailing_zeroes (tree array_ref, tree x, tree mulc,
1897 				tree tshift, HOST_WIDE_INT &zero_val)
1898 {
1899   tree type = TREE_TYPE (array_ref);
1900   tree array = TREE_OPERAND (array_ref, 0);
1901 
1902   gcc_assert (TREE_CODE (mulc) == INTEGER_CST);
1903   gcc_assert (TREE_CODE (tshift) == INTEGER_CST);
1904 
1905   tree input_type = TREE_TYPE (x);
1906   unsigned input_bits = tree_to_shwi (TYPE_SIZE (input_type));
1907 
1908   /* Check the array element type is not wider than 32 bits and the input is
1909      an unsigned 32-bit or 64-bit type.  */
1910   if (TYPE_PRECISION (type) > 32 || !TYPE_UNSIGNED (input_type))
1911     return false;
1912   if (input_bits != 32 && input_bits != 64)
1913     return false;
1914 
1915   if (!direct_internal_fn_supported_p (IFN_CTZ, input_type, OPTIMIZE_FOR_BOTH))
1916     return false;
1917 
1918   /* Check the lower bound of the array is zero.  */
1919   tree low = array_ref_low_bound (array_ref);
1920   if (!low || !integer_zerop (low))
1921     return false;
1922 
1923   unsigned shiftval = tree_to_shwi (tshift);
1924 
1925   /* Check the shift extracts the top 5..7 bits.  */
1926   if (shiftval < input_bits - 7 || shiftval > input_bits - 5)
1927     return false;
1928 
1929   tree ctor = ctor_for_folding (array);
1930   if (!ctor)
1931     return false;
1932 
1933   unsigned HOST_WIDE_INT val = tree_to_uhwi (mulc);
1934 
1935   if (TREE_CODE (ctor) == CONSTRUCTOR)
1936     return check_ctz_array (ctor, val, zero_val, shiftval, input_bits);
1937 
1938   if (TREE_CODE (ctor) == STRING_CST
1939       && TYPE_PRECISION (type) == CHAR_TYPE_SIZE)
1940     return check_ctz_string (ctor, val, zero_val, shiftval, input_bits);
1941 
1942   return false;
1943 }
1944 
1945 /* Match.pd function to match the ctz expression.  */
1946 extern bool gimple_ctz_table_index (tree, tree *, tree (*)(tree));
1947 
1948 static bool
simplify_count_trailing_zeroes(gimple_stmt_iterator * gsi)1949 simplify_count_trailing_zeroes (gimple_stmt_iterator *gsi)
1950 {
1951   gimple *stmt = gsi_stmt (*gsi);
1952   tree array_ref = gimple_assign_rhs1 (stmt);
1953   tree res_ops[3];
1954   HOST_WIDE_INT zero_val;
1955 
1956   gcc_checking_assert (TREE_CODE (array_ref) == ARRAY_REF);
1957 
1958   if (!gimple_ctz_table_index (TREE_OPERAND (array_ref, 1), &res_ops[0], NULL))
1959     return false;
1960 
1961   if (optimize_count_trailing_zeroes (array_ref, res_ops[0],
1962 				      res_ops[1], res_ops[2], zero_val))
1963     {
1964       tree type = TREE_TYPE (res_ops[0]);
1965       HOST_WIDE_INT ctz_val = 0;
1966       HOST_WIDE_INT type_size = tree_to_shwi (TYPE_SIZE (type));
1967       bool zero_ok
1968 	= CTZ_DEFINED_VALUE_AT_ZERO (SCALAR_INT_TYPE_MODE (type), ctz_val) == 2;
1969 
1970       /* If the input value can't be zero, don't special case ctz (0).  */
1971       if (tree_expr_nonzero_p (res_ops[0]))
1972 	{
1973 	  zero_ok = true;
1974 	  zero_val = 0;
1975 	  ctz_val = 0;
1976 	}
1977 
1978       /* Skip if there is no value defined at zero, or if we can't easily
1979 	 return the correct value for zero.  */
1980       if (!zero_ok)
1981 	return false;
1982       if (zero_val != ctz_val && !(zero_val == 0 && ctz_val == type_size))
1983 	return false;
1984 
1985       gimple_seq seq = NULL;
1986       gimple *g;
1987       gcall *call = gimple_build_call_internal (IFN_CTZ, 1, res_ops[0]);
1988       gimple_set_location (call, gimple_location (stmt));
1989       gimple_set_lhs (call, make_ssa_name (integer_type_node));
1990       gimple_seq_add_stmt (&seq, call);
1991 
1992       tree prev_lhs = gimple_call_lhs (call);
1993 
1994       /* Emit ctz (x) & 31 if ctz (0) is 32 but we need to return 0.  */
1995       if (zero_val == 0 && ctz_val == type_size)
1996 	{
1997 	  g = gimple_build_assign (make_ssa_name (integer_type_node),
1998 				   BIT_AND_EXPR, prev_lhs,
1999 				   build_int_cst (integer_type_node,
2000 						  type_size - 1));
2001 	  gimple_set_location (g, gimple_location (stmt));
2002 	  gimple_seq_add_stmt (&seq, g);
2003 	  prev_lhs = gimple_assign_lhs (g);
2004 	}
2005 
2006       g = gimple_build_assign (gimple_assign_lhs (stmt), NOP_EXPR, prev_lhs);
2007       gimple_seq_add_stmt (&seq, g);
2008       gsi_replace_with_seq (gsi, seq, true);
2009       return true;
2010     }
2011 
2012   return false;
2013 }
2014 
2015 
2016 /* Combine an element access with a shuffle.  Returns true if there were
2017    any changes made, else it returns false.  */
2018 
2019 static bool
simplify_bitfield_ref(gimple_stmt_iterator * gsi)2020 simplify_bitfield_ref (gimple_stmt_iterator *gsi)
2021 {
2022   gimple *stmt = gsi_stmt (*gsi);
2023   gimple *def_stmt;
2024   tree op, op0, op1;
2025   tree elem_type;
2026   unsigned idx, size;
2027   enum tree_code code;
2028 
2029   op = gimple_assign_rhs1 (stmt);
2030   gcc_checking_assert (TREE_CODE (op) == BIT_FIELD_REF);
2031 
2032   op0 = TREE_OPERAND (op, 0);
2033   if (TREE_CODE (op0) != SSA_NAME
2034       || TREE_CODE (TREE_TYPE (op0)) != VECTOR_TYPE)
2035     return false;
2036 
2037   def_stmt = get_prop_source_stmt (op0, false, NULL);
2038   if (!def_stmt || !can_propagate_from (def_stmt))
2039     return false;
2040 
2041   op1 = TREE_OPERAND (op, 1);
2042   code = gimple_assign_rhs_code (def_stmt);
2043   elem_type = TREE_TYPE (TREE_TYPE (op0));
2044   if (TREE_TYPE (op) != elem_type)
2045     return false;
2046 
2047   size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2048   if (maybe_ne (bit_field_size (op), size))
2049     return false;
2050 
2051   if (code == VEC_PERM_EXPR
2052       && constant_multiple_p (bit_field_offset (op), size, &idx))
2053     {
2054       tree p, m, tem;
2055       unsigned HOST_WIDE_INT nelts;
2056       m = gimple_assign_rhs3 (def_stmt);
2057       if (TREE_CODE (m) != VECTOR_CST
2058 	  || !VECTOR_CST_NELTS (m).is_constant (&nelts))
2059 	return false;
2060       idx = TREE_INT_CST_LOW (VECTOR_CST_ELT (m, idx));
2061       idx %= 2 * nelts;
2062       if (idx < nelts)
2063 	{
2064 	  p = gimple_assign_rhs1 (def_stmt);
2065 	}
2066       else
2067 	{
2068 	  p = gimple_assign_rhs2 (def_stmt);
2069 	  idx -= nelts;
2070 	}
2071       tem = build3 (BIT_FIELD_REF, TREE_TYPE (op),
2072 		    unshare_expr (p), op1, bitsize_int (idx * size));
2073       gimple_assign_set_rhs1 (stmt, tem);
2074       fold_stmt (gsi);
2075       update_stmt (gsi_stmt (*gsi));
2076       return true;
2077     }
2078 
2079   return false;
2080 }
2081 
2082 /* Determine whether applying the 2 permutations (mask1 then mask2)
2083    gives back one of the input.  */
2084 
2085 static int
is_combined_permutation_identity(tree mask1,tree mask2)2086 is_combined_permutation_identity (tree mask1, tree mask2)
2087 {
2088   tree mask;
2089   unsigned HOST_WIDE_INT nelts, i, j;
2090   bool maybe_identity1 = true;
2091   bool maybe_identity2 = true;
2092 
2093   gcc_checking_assert (TREE_CODE (mask1) == VECTOR_CST
2094 		       && TREE_CODE (mask2) == VECTOR_CST);
2095   mask = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (mask1), mask1, mask1, mask2);
2096   if (mask == NULL_TREE || TREE_CODE (mask) != VECTOR_CST)
2097     return 0;
2098 
2099   if (!VECTOR_CST_NELTS (mask).is_constant (&nelts))
2100     return 0;
2101   for (i = 0; i < nelts; i++)
2102     {
2103       tree val = VECTOR_CST_ELT (mask, i);
2104       gcc_assert (TREE_CODE (val) == INTEGER_CST);
2105       j = TREE_INT_CST_LOW (val) & (2 * nelts - 1);
2106       if (j == i)
2107 	maybe_identity2 = false;
2108       else if (j == i + nelts)
2109 	maybe_identity1 = false;
2110       else
2111 	return 0;
2112     }
2113   return maybe_identity1 ? 1 : maybe_identity2 ? 2 : 0;
2114 }
2115 
2116 /* Combine a shuffle with its arguments.  Returns 1 if there were any
2117    changes made, 2 if cfg-cleanup needs to run.  Else it returns 0.  */
2118 
2119 static int
simplify_permutation(gimple_stmt_iterator * gsi)2120 simplify_permutation (gimple_stmt_iterator *gsi)
2121 {
2122   gimple *stmt = gsi_stmt (*gsi);
2123   gimple *def_stmt;
2124   tree op0, op1, op2, op3, arg0, arg1;
2125   enum tree_code code;
2126   bool single_use_op0 = false;
2127 
2128   gcc_checking_assert (gimple_assign_rhs_code (stmt) == VEC_PERM_EXPR);
2129 
2130   op0 = gimple_assign_rhs1 (stmt);
2131   op1 = gimple_assign_rhs2 (stmt);
2132   op2 = gimple_assign_rhs3 (stmt);
2133 
2134   if (TREE_CODE (op2) != VECTOR_CST)
2135     return 0;
2136 
2137   if (TREE_CODE (op0) == VECTOR_CST)
2138     {
2139       code = VECTOR_CST;
2140       arg0 = op0;
2141     }
2142   else if (TREE_CODE (op0) == SSA_NAME)
2143     {
2144       def_stmt = get_prop_source_stmt (op0, false, &single_use_op0);
2145       if (!def_stmt || !can_propagate_from (def_stmt))
2146 	return 0;
2147 
2148       code = gimple_assign_rhs_code (def_stmt);
2149       arg0 = gimple_assign_rhs1 (def_stmt);
2150     }
2151   else
2152     return 0;
2153 
2154   /* Two consecutive shuffles.  */
2155   if (code == VEC_PERM_EXPR)
2156     {
2157       tree orig;
2158       int ident;
2159 
2160       if (op0 != op1)
2161 	return 0;
2162       op3 = gimple_assign_rhs3 (def_stmt);
2163       if (TREE_CODE (op3) != VECTOR_CST)
2164 	return 0;
2165       ident = is_combined_permutation_identity (op3, op2);
2166       if (!ident)
2167 	return 0;
2168       orig = (ident == 1) ? gimple_assign_rhs1 (def_stmt)
2169 			  : gimple_assign_rhs2 (def_stmt);
2170       gimple_assign_set_rhs1 (stmt, unshare_expr (orig));
2171       gimple_assign_set_rhs_code (stmt, TREE_CODE (orig));
2172       gimple_set_num_ops (stmt, 2);
2173       update_stmt (stmt);
2174       return remove_prop_source_from_use (op0) ? 2 : 1;
2175     }
2176 
2177   /* Shuffle of a constructor.  */
2178   else if (code == CONSTRUCTOR || code == VECTOR_CST)
2179     {
2180       tree opt;
2181       bool ret = false;
2182       if (op0 != op1)
2183 	{
2184 	  if (TREE_CODE (op0) == SSA_NAME && !single_use_op0)
2185 	    return 0;
2186 
2187 	  if (TREE_CODE (op1) == VECTOR_CST)
2188 	    arg1 = op1;
2189 	  else if (TREE_CODE (op1) == SSA_NAME)
2190 	    {
2191 	      enum tree_code code2;
2192 
2193 	      gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
2194 	      if (!def_stmt2 || !can_propagate_from (def_stmt2))
2195 		return 0;
2196 
2197 	      code2 = gimple_assign_rhs_code (def_stmt2);
2198 	      if (code2 != CONSTRUCTOR && code2 != VECTOR_CST)
2199 		return 0;
2200 	      arg1 = gimple_assign_rhs1 (def_stmt2);
2201 	    }
2202 	  else
2203 	    return 0;
2204 	}
2205       else
2206 	{
2207 	  /* Already used twice in this statement.  */
2208 	  if (TREE_CODE (op0) == SSA_NAME && num_imm_uses (op0) > 2)
2209 	    return 0;
2210 	  arg1 = arg0;
2211 	}
2212       opt = fold_ternary (VEC_PERM_EXPR, TREE_TYPE (op0), arg0, arg1, op2);
2213       if (!opt
2214 	  || (TREE_CODE (opt) != CONSTRUCTOR && TREE_CODE (opt) != VECTOR_CST))
2215 	return 0;
2216       gimple_assign_set_rhs_from_tree (gsi, opt);
2217       update_stmt (gsi_stmt (*gsi));
2218       if (TREE_CODE (op0) == SSA_NAME)
2219 	ret = remove_prop_source_from_use (op0);
2220       if (op0 != op1 && TREE_CODE (op1) == SSA_NAME)
2221 	ret |= remove_prop_source_from_use (op1);
2222       return ret ? 2 : 1;
2223     }
2224 
2225   return 0;
2226 }
2227 
2228 /* Get the BIT_FIELD_REF definition of VAL, if any, looking through
2229    conversions with code CONV_CODE or update it if still ERROR_MARK.
2230    Return NULL_TREE if no such matching def was found.  */
2231 
2232 static tree
get_bit_field_ref_def(tree val,enum tree_code & conv_code)2233 get_bit_field_ref_def (tree val, enum tree_code &conv_code)
2234 {
2235   if (TREE_CODE (val) != SSA_NAME)
2236     return NULL_TREE ;
2237   gimple *def_stmt = get_prop_source_stmt (val, false, NULL);
2238   if (!def_stmt)
2239     return NULL_TREE;
2240   enum tree_code code = gimple_assign_rhs_code (def_stmt);
2241   if (code == FLOAT_EXPR
2242       || code == FIX_TRUNC_EXPR
2243       || CONVERT_EXPR_CODE_P (code))
2244     {
2245       tree op1 = gimple_assign_rhs1 (def_stmt);
2246       if (conv_code == ERROR_MARK)
2247 	conv_code = code;
2248       else if (conv_code != code)
2249 	return NULL_TREE;
2250       if (TREE_CODE (op1) != SSA_NAME)
2251 	return NULL_TREE;
2252       def_stmt = SSA_NAME_DEF_STMT (op1);
2253       if (! is_gimple_assign (def_stmt))
2254 	return NULL_TREE;
2255       code = gimple_assign_rhs_code (def_stmt);
2256     }
2257   if (code != BIT_FIELD_REF)
2258     return NULL_TREE;
2259   return gimple_assign_rhs1 (def_stmt);
2260 }
2261 
2262 /* Recognize a VEC_PERM_EXPR.  Returns true if there were any changes.  */
2263 
2264 static bool
simplify_vector_constructor(gimple_stmt_iterator * gsi)2265 simplify_vector_constructor (gimple_stmt_iterator *gsi)
2266 {
2267   gimple *stmt = gsi_stmt (*gsi);
2268   tree op, orig[2], type, elem_type;
2269   unsigned elem_size, i;
2270   unsigned HOST_WIDE_INT nelts;
2271   unsigned HOST_WIDE_INT refnelts;
2272   enum tree_code conv_code;
2273   constructor_elt *elt;
2274 
2275   op = gimple_assign_rhs1 (stmt);
2276   type = TREE_TYPE (op);
2277   gcc_checking_assert (TREE_CODE (op) == CONSTRUCTOR
2278 		       && TREE_CODE (type) == VECTOR_TYPE);
2279 
2280   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
2281     return false;
2282   elem_type = TREE_TYPE (type);
2283   elem_size = TREE_INT_CST_LOW (TYPE_SIZE (elem_type));
2284 
2285   orig[0] = NULL;
2286   orig[1] = NULL;
2287   conv_code = ERROR_MARK;
2288   bool maybe_ident = true;
2289   bool maybe_blend[2] = { true, true };
2290   tree one_constant = NULL_TREE;
2291   tree one_nonconstant = NULL_TREE;
2292   auto_vec<tree> constants;
2293   constants.safe_grow_cleared (nelts);
2294   auto_vec<std::pair<unsigned, unsigned>, 64> elts;
2295   FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
2296     {
2297       tree ref, op1;
2298       unsigned int elem;
2299 
2300       if (i >= nelts)
2301 	return false;
2302 
2303       /* Look for elements extracted and possibly converted from
2304          another vector.  */
2305       op1 = get_bit_field_ref_def (elt->value, conv_code);
2306       if (op1
2307 	  && TREE_CODE ((ref = TREE_OPERAND (op1, 0))) == SSA_NAME
2308 	  && VECTOR_TYPE_P (TREE_TYPE (ref))
2309 	  && useless_type_conversion_p (TREE_TYPE (op1),
2310 					TREE_TYPE (TREE_TYPE (ref)))
2311 	  && constant_multiple_p (bit_field_offset (op1),
2312 				  bit_field_size (op1), &elem)
2313 	  && TYPE_VECTOR_SUBPARTS (TREE_TYPE (ref)).is_constant (&refnelts))
2314 	{
2315 	  unsigned int j;
2316 	  for (j = 0; j < 2; ++j)
2317 	    {
2318 	      if (!orig[j])
2319 		{
2320 		  if (j == 0
2321 		      || useless_type_conversion_p (TREE_TYPE (orig[0]),
2322 						    TREE_TYPE (ref)))
2323 		    break;
2324 		}
2325 	      else if (ref == orig[j])
2326 		break;
2327 	    }
2328 	  /* Found a suitable vector element.  */
2329 	  if (j < 2)
2330 	    {
2331 	      orig[j] = ref;
2332 	      if (elem != i || j != 0)
2333 		maybe_ident = false;
2334 	      if (elem != i)
2335 		maybe_blend[j] = false;
2336 	      elts.safe_push (std::make_pair (j, elem));
2337 	      continue;
2338 	    }
2339 	  /* Else fallthru.  */
2340 	}
2341       /* Handle elements not extracted from a vector.
2342           1. constants by permuting with constant vector
2343 	  2. a unique non-constant element by permuting with a splat vector  */
2344       if (orig[1]
2345 	  && orig[1] != error_mark_node)
2346 	return false;
2347       orig[1] = error_mark_node;
2348       if (CONSTANT_CLASS_P (elt->value))
2349 	{
2350 	  if (one_nonconstant)
2351 	    return false;
2352 	  if (!one_constant)
2353 	    one_constant = elt->value;
2354 	  constants[i] = elt->value;
2355 	}
2356       else
2357 	{
2358 	  if (one_constant)
2359 	    return false;
2360 	  if (!one_nonconstant)
2361 	    one_nonconstant = elt->value;
2362 	  else if (!operand_equal_p (one_nonconstant, elt->value, 0))
2363 	    return false;
2364 	}
2365       elts.safe_push (std::make_pair (1, i));
2366       maybe_ident = false;
2367     }
2368   if (i < nelts)
2369     return false;
2370 
2371   if (! orig[0]
2372       || ! VECTOR_TYPE_P (TREE_TYPE (orig[0])))
2373     return false;
2374   refnelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (orig[0])).to_constant ();
2375   /* We currently do not handle larger destination vectors.  */
2376   if (refnelts < nelts)
2377     return false;
2378 
2379   if (maybe_ident)
2380     {
2381       tree conv_src_type
2382 	= (nelts != refnelts
2383 	   ? (conv_code != ERROR_MARK
2384 	      ? build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])), nelts)
2385 	      : type)
2386 	   : TREE_TYPE (orig[0]));
2387       if (conv_code != ERROR_MARK
2388 	  && !supportable_convert_operation (conv_code, type, conv_src_type,
2389 					     &conv_code))
2390 	{
2391 	  /* Only few targets implement direct conversion patterns so try
2392 	     some simple special cases via VEC_[UN]PACK[_FLOAT]_LO_EXPR.  */
2393 	  optab optab;
2394 	  tree halfvectype, dblvectype;
2395 	  enum tree_code unpack_op;
2396 
2397 	  if (!BYTES_BIG_ENDIAN)
2398 	    unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2399 			 ? VEC_UNPACK_FLOAT_LO_EXPR
2400 			 : VEC_UNPACK_LO_EXPR);
2401 	  else
2402 	    unpack_op = (FLOAT_TYPE_P (TREE_TYPE (type))
2403 			 ? VEC_UNPACK_FLOAT_HI_EXPR
2404 			 : VEC_UNPACK_HI_EXPR);
2405 
2406 	  /* Conversions between DFP and FP have no special tree code
2407 	     but we cannot handle those since all relevant vector conversion
2408 	     optabs only have a single mode.  */
2409 	  if (CONVERT_EXPR_CODE_P (conv_code)
2410 	      && FLOAT_TYPE_P (TREE_TYPE (type))
2411 	      && (DECIMAL_FLOAT_TYPE_P (TREE_TYPE (type))
2412 		  != DECIMAL_FLOAT_TYPE_P (TREE_TYPE (conv_src_type))))
2413 	    return false;
2414 
2415 	  if (CONVERT_EXPR_CODE_P (conv_code)
2416 	      && (2 * TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
2417 		  == TYPE_PRECISION (TREE_TYPE (type)))
2418 	      && mode_for_vector (as_a <scalar_mode>
2419 				  (TYPE_MODE (TREE_TYPE (TREE_TYPE (orig[0])))),
2420 				  nelts * 2).exists ()
2421 	      && (dblvectype
2422 		  = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
2423 				       nelts * 2))
2424 	      /* Only use it for vector modes or for vector booleans
2425 		 represented as scalar bitmasks.  See PR95528.  */
2426 	      && (VECTOR_MODE_P (TYPE_MODE (dblvectype))
2427 		  || VECTOR_BOOLEAN_TYPE_P (dblvectype))
2428 	      && (optab = optab_for_tree_code (unpack_op,
2429 					       dblvectype,
2430 					       optab_default))
2431 	      && (optab_handler (optab, TYPE_MODE (dblvectype))
2432 		  != CODE_FOR_nothing))
2433 	    {
2434 	      gimple_seq stmts = NULL;
2435 	      tree dbl;
2436 	      if (refnelts == nelts)
2437 		{
2438 		  /* ???  Paradoxical subregs don't exist, so insert into
2439 		     the lower half of a wider zero vector.  */
2440 		  dbl = gimple_build (&stmts, BIT_INSERT_EXPR, dblvectype,
2441 				      build_zero_cst (dblvectype), orig[0],
2442 				      bitsize_zero_node);
2443 		}
2444 	      else if (refnelts == 2 * nelts)
2445 		dbl = orig[0];
2446 	      else
2447 		dbl = gimple_build (&stmts, BIT_FIELD_REF, dblvectype,
2448 				    orig[0], TYPE_SIZE (dblvectype),
2449 				    bitsize_zero_node);
2450 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2451 	      gimple_assign_set_rhs_with_ops (gsi, unpack_op, dbl);
2452 	    }
2453 	  else if (CONVERT_EXPR_CODE_P (conv_code)
2454 		   && (TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig[0])))
2455 		       == 2 * TYPE_PRECISION (TREE_TYPE (type)))
2456 		   && mode_for_vector (as_a <scalar_mode>
2457 				         (TYPE_MODE
2458 					   (TREE_TYPE (TREE_TYPE (orig[0])))),
2459 				       nelts / 2).exists ()
2460 		   && (halfvectype
2461 		         = build_vector_type (TREE_TYPE (TREE_TYPE (orig[0])),
2462 					      nelts / 2))
2463 		   /* Only use it for vector modes or for vector booleans
2464 		      represented as scalar bitmasks.  See PR95528.  */
2465 		   && (VECTOR_MODE_P (TYPE_MODE (halfvectype))
2466 		       || VECTOR_BOOLEAN_TYPE_P (halfvectype))
2467 		   && (optab = optab_for_tree_code (VEC_PACK_TRUNC_EXPR,
2468 						    halfvectype,
2469 						    optab_default))
2470 		   && (optab_handler (optab, TYPE_MODE (halfvectype))
2471 		       != CODE_FOR_nothing))
2472 	    {
2473 	      gimple_seq stmts = NULL;
2474 	      tree low = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
2475 				       orig[0], TYPE_SIZE (halfvectype),
2476 				       bitsize_zero_node);
2477 	      tree hig = gimple_build (&stmts, BIT_FIELD_REF, halfvectype,
2478 				       orig[0], TYPE_SIZE (halfvectype),
2479 				       TYPE_SIZE (halfvectype));
2480 	      gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2481 	      gimple_assign_set_rhs_with_ops (gsi, VEC_PACK_TRUNC_EXPR,
2482 					      low, hig);
2483 	    }
2484 	  else
2485 	    return false;
2486 	  update_stmt (gsi_stmt (*gsi));
2487 	  return true;
2488 	}
2489       if (nelts != refnelts)
2490 	{
2491 	  gassign *lowpart
2492 	    = gimple_build_assign (make_ssa_name (conv_src_type),
2493 				   build3 (BIT_FIELD_REF, conv_src_type,
2494 					   orig[0], TYPE_SIZE (conv_src_type),
2495 					   bitsize_zero_node));
2496 	  gsi_insert_before (gsi, lowpart, GSI_SAME_STMT);
2497 	  orig[0] = gimple_assign_lhs (lowpart);
2498 	}
2499       if (conv_code == ERROR_MARK)
2500 	{
2501 	  tree src_type = TREE_TYPE (orig[0]);
2502 	  if (!useless_type_conversion_p (type, src_type))
2503 	    {
2504 	      gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
2505 				    TYPE_VECTOR_SUBPARTS (src_type))
2506 			  && useless_type_conversion_p (TREE_TYPE (type),
2507 							TREE_TYPE (src_type)));
2508 	      tree rhs = build1 (VIEW_CONVERT_EXPR, type, orig[0]);
2509 	      orig[0] = make_ssa_name (type);
2510 	      gassign *assign = gimple_build_assign (orig[0], rhs);
2511 	      gsi_insert_before (gsi, assign, GSI_SAME_STMT);
2512 	    }
2513 	  gimple_assign_set_rhs_from_tree (gsi, orig[0]);
2514 	}
2515       else
2516 	gimple_assign_set_rhs_with_ops (gsi, conv_code, orig[0],
2517 					NULL_TREE, NULL_TREE);
2518     }
2519   else
2520     {
2521       /* If we combine a vector with a non-vector avoid cases where
2522 	 we'll obviously end up with more GIMPLE stmts which is when
2523 	 we'll later not fold this to a single insert into the vector
2524 	 and we had a single extract originally.  See PR92819.  */
2525       if (nelts == 2
2526 	  && refnelts > 2
2527 	  && orig[1] == error_mark_node
2528 	  && !maybe_blend[0])
2529 	return false;
2530       tree mask_type, perm_type, conv_src_type;
2531       perm_type = TREE_TYPE (orig[0]);
2532       conv_src_type = (nelts == refnelts
2533 		       ? perm_type
2534 		       : build_vector_type (TREE_TYPE (perm_type), nelts));
2535       if (conv_code != ERROR_MARK
2536 	  && !supportable_convert_operation (conv_code, type, conv_src_type,
2537 					     &conv_code))
2538 	return false;
2539 
2540       /* Now that we know the number of elements of the source build the
2541 	 permute vector.
2542 	 ???  When the second vector has constant values we can shuffle
2543 	 it and its source indexes to make the permutation supported.
2544 	 For now it mimics a blend.  */
2545       vec_perm_builder sel (refnelts, refnelts, 1);
2546       bool all_same_p = true;
2547       for (i = 0; i < elts.length (); ++i)
2548 	{
2549 	  sel.quick_push (elts[i].second + elts[i].first * refnelts);
2550 	  all_same_p &= known_eq (sel[i], sel[0]);
2551 	}
2552       /* And fill the tail with "something".  It's really don't care,
2553          and ideally we'd allow VEC_PERM to have a smaller destination
2554 	 vector.  As a heuristic:
2555 
2556 	 (a) if what we have so far duplicates a single element, make the
2557 	     tail do the same
2558 
2559 	 (b) otherwise preserve a uniform orig[0].  This facilitates
2560 	     later pattern-matching of VEC_PERM_EXPR to a BIT_INSERT_EXPR.  */
2561       for (; i < refnelts; ++i)
2562 	sel.quick_push (all_same_p
2563 			? sel[0]
2564 			: (elts[0].second == 0 && elts[0].first == 0
2565 			   ? 0 : refnelts) + i);
2566       vec_perm_indices indices (sel, orig[1] ? 2 : 1, refnelts);
2567       if (!can_vec_perm_const_p (TYPE_MODE (perm_type), indices))
2568 	return false;
2569       mask_type
2570 	= build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2571 			     refnelts);
2572       if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2573 	  || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2574 		       GET_MODE_SIZE (TYPE_MODE (perm_type))))
2575 	return false;
2576       tree op2 = vec_perm_indices_to_tree (mask_type, indices);
2577       bool converted_orig1 = false;
2578       gimple_seq stmts = NULL;
2579       if (!orig[1])
2580 	orig[1] = orig[0];
2581       else if (orig[1] == error_mark_node
2582 	       && one_nonconstant)
2583 	{
2584 	  /* ???  We can see if we can safely convert to the original
2585 	     element type.  */
2586 	  converted_orig1 = conv_code != ERROR_MARK;
2587 	  orig[1] = gimple_build_vector_from_val (&stmts, UNKNOWN_LOCATION,
2588 						  converted_orig1
2589 						  ? type : perm_type,
2590 						  one_nonconstant);
2591 	}
2592       else if (orig[1] == error_mark_node)
2593 	{
2594 	  /* ???  See if we can convert the vector to the original type.  */
2595 	  converted_orig1 = conv_code != ERROR_MARK;
2596 	  unsigned n = converted_orig1 ? nelts : refnelts;
2597 	  tree_vector_builder vec (converted_orig1
2598 				   ? type : perm_type, n, 1);
2599 	  for (unsigned i = 0; i < n; ++i)
2600 	    if (i < nelts && constants[i])
2601 	      vec.quick_push (constants[i]);
2602 	    else
2603 	      /* ??? Push a don't-care value.  */
2604 	      vec.quick_push (one_constant);
2605 	  orig[1] = vec.build ();
2606 	}
2607       tree blend_op2 = NULL_TREE;
2608       if (converted_orig1)
2609 	{
2610 	  /* Make sure we can do a blend in the target type.  */
2611 	  vec_perm_builder sel (nelts, nelts, 1);
2612 	  for (i = 0; i < elts.length (); ++i)
2613 	    sel.quick_push (elts[i].first
2614 			    ? elts[i].second + nelts : i);
2615 	  vec_perm_indices indices (sel, 2, nelts);
2616 	  if (!can_vec_perm_const_p (TYPE_MODE (type), indices))
2617 	    return false;
2618 	  mask_type
2619 	    = build_vector_type (build_nonstandard_integer_type (elem_size, 1),
2620 				 nelts);
2621 	  if (GET_MODE_CLASS (TYPE_MODE (mask_type)) != MODE_VECTOR_INT
2622 	      || maybe_ne (GET_MODE_SIZE (TYPE_MODE (mask_type)),
2623 			   GET_MODE_SIZE (TYPE_MODE (type))))
2624 	    return false;
2625 	  blend_op2 = vec_perm_indices_to_tree (mask_type, indices);
2626 	}
2627       tree orig1_for_perm
2628 	= converted_orig1 ? build_zero_cst (perm_type) : orig[1];
2629       tree res = gimple_build (&stmts, VEC_PERM_EXPR, perm_type,
2630 			       orig[0], orig1_for_perm, op2);
2631       if (nelts != refnelts)
2632 	res = gimple_build (&stmts, BIT_FIELD_REF,
2633 			    conv_code != ERROR_MARK ? conv_src_type : type,
2634 			    res, TYPE_SIZE (type), bitsize_zero_node);
2635       if (conv_code != ERROR_MARK)
2636 	res = gimple_build (&stmts, conv_code, type, res);
2637       else if (!useless_type_conversion_p (type, TREE_TYPE (res)))
2638 	{
2639 	  gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type),
2640 				TYPE_VECTOR_SUBPARTS (perm_type))
2641 		      && useless_type_conversion_p (TREE_TYPE (type),
2642 						    TREE_TYPE (perm_type)));
2643 	  res = gimple_build (&stmts, VIEW_CONVERT_EXPR, type, res);
2644 	}
2645       /* Blend in the actual constant.  */
2646       if (converted_orig1)
2647 	res = gimple_build (&stmts, VEC_PERM_EXPR, type,
2648 			    res, orig[1], blend_op2);
2649       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2650       gimple_assign_set_rhs_with_ops (gsi, SSA_NAME, res);
2651     }
2652   update_stmt (gsi_stmt (*gsi));
2653   return true;
2654 }
2655 
2656 
2657 /* Primitive "lattice" function for gimple_simplify.  */
2658 
2659 static tree
fwprop_ssa_val(tree name)2660 fwprop_ssa_val (tree name)
2661 {
2662   /* First valueize NAME.  */
2663   if (TREE_CODE (name) == SSA_NAME
2664       && SSA_NAME_VERSION (name) < lattice.length ())
2665     {
2666       tree val = lattice[SSA_NAME_VERSION (name)];
2667       if (val)
2668 	name = val;
2669     }
2670   /* We continue matching along SSA use-def edges for SSA names
2671      that are not single-use.  Currently there are no patterns
2672      that would cause any issues with that.  */
2673   return name;
2674 }
2675 
2676 /* Main entry point for the forward propagation and statement combine
2677    optimizer.  */
2678 
2679 namespace {
2680 
2681 const pass_data pass_data_forwprop =
2682 {
2683   GIMPLE_PASS, /* type */
2684   "forwprop", /* name */
2685   OPTGROUP_NONE, /* optinfo_flags */
2686   TV_TREE_FORWPROP, /* tv_id */
2687   ( PROP_cfg | PROP_ssa ), /* properties_required */
2688   0, /* properties_provided */
2689   0, /* properties_destroyed */
2690   0, /* todo_flags_start */
2691   TODO_update_ssa, /* todo_flags_finish */
2692 };
2693 
2694 class pass_forwprop : public gimple_opt_pass
2695 {
2696 public:
pass_forwprop(gcc::context * ctxt)2697   pass_forwprop (gcc::context *ctxt)
2698     : gimple_opt_pass (pass_data_forwprop, ctxt)
2699   {}
2700 
2701   /* opt_pass methods: */
clone()2702   opt_pass * clone () { return new pass_forwprop (m_ctxt); }
gate(function *)2703   virtual bool gate (function *) { return flag_tree_forwprop; }
2704   virtual unsigned int execute (function *);
2705 
2706 }; // class pass_forwprop
2707 
2708 unsigned int
execute(function * fun)2709 pass_forwprop::execute (function *fun)
2710 {
2711   unsigned int todoflags = 0;
2712 
2713   cfg_changed = false;
2714 
2715   /* Combine stmts with the stmts defining their operands.  Do that
2716      in an order that guarantees visiting SSA defs before SSA uses.  */
2717   lattice.create (num_ssa_names);
2718   lattice.quick_grow_cleared (num_ssa_names);
2719   int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
2720   int postorder_num = pre_and_rev_post_order_compute_fn (cfun, NULL,
2721 							 postorder, false);
2722   auto_vec<gimple *, 4> to_fixup;
2723   auto_vec<gimple *, 32> to_remove;
2724   to_purge = BITMAP_ALLOC (NULL);
2725   for (int i = 0; i < postorder_num; ++i)
2726     {
2727       gimple_stmt_iterator gsi;
2728       basic_block bb = BASIC_BLOCK_FOR_FN (fun, postorder[i]);
2729 
2730       /* Record degenerate PHIs in the lattice.  */
2731       for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
2732 	   gsi_next (&si))
2733 	{
2734 	  gphi *phi = si.phi ();
2735 	  tree res = gimple_phi_result (phi);
2736 	  if (virtual_operand_p (res))
2737 	    continue;
2738 
2739 	  use_operand_p use_p;
2740 	  ssa_op_iter it;
2741 	  tree first = NULL_TREE;
2742 	  bool all_same = true;
2743 	  FOR_EACH_PHI_ARG (use_p, phi, it, SSA_OP_USE)
2744 	    {
2745 	      tree use = USE_FROM_PTR (use_p);
2746 	      if (! first)
2747 		first = use;
2748 	      else if (! operand_equal_p (first, use, 0))
2749 		{
2750 		  all_same = false;
2751 		  break;
2752 		}
2753 	    }
2754 	  if (all_same)
2755 	    {
2756 	      if (may_propagate_copy (res, first))
2757 		to_remove.safe_push (phi);
2758 	      fwprop_set_lattice_val (res, first);
2759 	    }
2760 	}
2761 
2762       /* Apply forward propagation to all stmts in the basic-block.
2763 	 Note we update GSI within the loop as necessary.  */
2764       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2765 	{
2766 	  gimple *stmt = gsi_stmt (gsi);
2767 	  tree lhs, rhs;
2768 	  enum tree_code code;
2769 
2770 	  if (!is_gimple_assign (stmt))
2771 	    {
2772 	      gsi_next (&gsi);
2773 	      continue;
2774 	    }
2775 
2776 	  lhs = gimple_assign_lhs (stmt);
2777 	  rhs = gimple_assign_rhs1 (stmt);
2778 	  code = gimple_assign_rhs_code (stmt);
2779 	  if (TREE_CODE (lhs) != SSA_NAME
2780 	      || has_zero_uses (lhs))
2781 	    {
2782 	      gsi_next (&gsi);
2783 	      continue;
2784 	    }
2785 
2786 	  /* If this statement sets an SSA_NAME to an address,
2787 	     try to propagate the address into the uses of the SSA_NAME.  */
2788 	  if ((code == ADDR_EXPR
2789 	       /* Handle pointer conversions on invariant addresses
2790 		  as well, as this is valid gimple.  */
2791 	       || (CONVERT_EXPR_CODE_P (code)
2792 		   && TREE_CODE (rhs) == ADDR_EXPR
2793 		   && POINTER_TYPE_P (TREE_TYPE (lhs))))
2794 	      && TREE_CODE (TREE_OPERAND (rhs, 0)) != TARGET_MEM_REF)
2795 	    {
2796 	      tree base = get_base_address (TREE_OPERAND (rhs, 0));
2797 	      if ((!base
2798 		   || !DECL_P (base)
2799 		   || decl_address_invariant_p (base))
2800 		  && !stmt_references_abnormal_ssa_name (stmt)
2801 		  && forward_propagate_addr_expr (lhs, rhs, true))
2802 		{
2803 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2804 		  release_defs (stmt);
2805 		  gsi_remove (&gsi, true);
2806 		}
2807 	      else
2808 		gsi_next (&gsi);
2809 	    }
2810 	  else if (code == POINTER_PLUS_EXPR)
2811 	    {
2812 	      tree off = gimple_assign_rhs2 (stmt);
2813 	      if (TREE_CODE (off) == INTEGER_CST
2814 		  && can_propagate_from (stmt)
2815 		  && !simple_iv_increment_p (stmt)
2816 		  /* ???  Better adjust the interface to that function
2817 		     instead of building new trees here.  */
2818 		  && forward_propagate_addr_expr
2819 		       (lhs,
2820 			build1_loc (gimple_location (stmt),
2821 				    ADDR_EXPR, TREE_TYPE (rhs),
2822 				    fold_build2 (MEM_REF,
2823 						 TREE_TYPE (TREE_TYPE (rhs)),
2824 						 rhs,
2825 						 fold_convert (ptr_type_node,
2826 							       off))), true))
2827 		{
2828 		  fwprop_invalidate_lattice (gimple_get_lhs (stmt));
2829 		  release_defs (stmt);
2830 		  gsi_remove (&gsi, true);
2831 		}
2832 	      else if (is_gimple_min_invariant (rhs))
2833 		{
2834 		  /* Make sure to fold &a[0] + off_1 here.  */
2835 		  fold_stmt_inplace (&gsi);
2836 		  update_stmt (stmt);
2837 		  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2838 		    gsi_next (&gsi);
2839 		}
2840 	      else
2841 		gsi_next (&gsi);
2842 	    }
2843 	  else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
2844 		   && gimple_assign_load_p (stmt)
2845 		   && !gimple_has_volatile_ops (stmt)
2846 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2847 		       != TARGET_MEM_REF)
2848 		   && !stmt_can_throw_internal (cfun, stmt))
2849 	    {
2850 	      /* Rewrite loads used only in real/imagpart extractions to
2851 	         component-wise loads.  */
2852 	      use_operand_p use_p;
2853 	      imm_use_iterator iter;
2854 	      bool rewrite = true;
2855 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2856 		{
2857 		  gimple *use_stmt = USE_STMT (use_p);
2858 		  if (is_gimple_debug (use_stmt))
2859 		    continue;
2860 		  if (!is_gimple_assign (use_stmt)
2861 		      || (gimple_assign_rhs_code (use_stmt) != REALPART_EXPR
2862 			  && gimple_assign_rhs_code (use_stmt) != IMAGPART_EXPR)
2863 		      || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
2864 		    {
2865 		      rewrite = false;
2866 		      break;
2867 		    }
2868 		}
2869 	      if (rewrite)
2870 		{
2871 		  gimple *use_stmt;
2872 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2873 		    {
2874 		      if (is_gimple_debug (use_stmt))
2875 			{
2876 			  if (gimple_debug_bind_p (use_stmt))
2877 			    {
2878 			      gimple_debug_bind_reset_value (use_stmt);
2879 			      update_stmt (use_stmt);
2880 			    }
2881 			  continue;
2882 			}
2883 
2884 		      tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
2885 					     TREE_TYPE (TREE_TYPE (rhs)),
2886 					     unshare_expr (rhs));
2887 		      gimple *new_stmt
2888 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2889 					       new_rhs);
2890 
2891 		      location_t loc = gimple_location (use_stmt);
2892 		      gimple_set_location (new_stmt, loc);
2893 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2894 		      unlink_stmt_vdef (use_stmt);
2895 		      gsi_remove (&gsi2, true);
2896 
2897 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2898 		    }
2899 
2900 		  release_defs (stmt);
2901 		  gsi_remove (&gsi, true);
2902 		}
2903 	      else
2904 		gsi_next (&gsi);
2905 	    }
2906 	  else if (TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE
2907 		   && TYPE_MODE (TREE_TYPE (lhs)) == BLKmode
2908 		   && gimple_assign_load_p (stmt)
2909 		   && !gimple_has_volatile_ops (stmt)
2910 		   && (TREE_CODE (gimple_assign_rhs1 (stmt))
2911 		       != TARGET_MEM_REF)
2912 		   && !stmt_can_throw_internal (cfun, stmt))
2913 	    {
2914 	      /* Rewrite loads used only in BIT_FIELD_REF extractions to
2915 	         component-wise loads.  */
2916 	      use_operand_p use_p;
2917 	      imm_use_iterator iter;
2918 	      bool rewrite = true;
2919 	      FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
2920 		{
2921 		  gimple *use_stmt = USE_STMT (use_p);
2922 		  if (is_gimple_debug (use_stmt))
2923 		    continue;
2924 		  if (!is_gimple_assign (use_stmt)
2925 		      || gimple_assign_rhs_code (use_stmt) != BIT_FIELD_REF
2926 		      || TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) != lhs)
2927 		    {
2928 		      rewrite = false;
2929 		      break;
2930 		    }
2931 		}
2932 	      if (rewrite)
2933 		{
2934 		  gimple *use_stmt;
2935 		  FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
2936 		    {
2937 		      if (is_gimple_debug (use_stmt))
2938 			{
2939 			  if (gimple_debug_bind_p (use_stmt))
2940 			    {
2941 			      gimple_debug_bind_reset_value (use_stmt);
2942 			      update_stmt (use_stmt);
2943 			    }
2944 			  continue;
2945 			}
2946 
2947 		      tree bfr = gimple_assign_rhs1 (use_stmt);
2948 		      tree new_rhs = fold_build3 (BIT_FIELD_REF,
2949 						  TREE_TYPE (bfr),
2950 						  unshare_expr (rhs),
2951 						  TREE_OPERAND (bfr, 1),
2952 						  TREE_OPERAND (bfr, 2));
2953 		      gimple *new_stmt
2954 			= gimple_build_assign (gimple_assign_lhs (use_stmt),
2955 					       new_rhs);
2956 
2957 		      location_t loc = gimple_location (use_stmt);
2958 		      gimple_set_location (new_stmt, loc);
2959 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2960 		      unlink_stmt_vdef (use_stmt);
2961 		      gsi_remove (&gsi2, true);
2962 
2963 		      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
2964 		    }
2965 
2966 		  release_defs (stmt);
2967 		  gsi_remove (&gsi, true);
2968 		}
2969 	      else
2970 		gsi_next (&gsi);
2971 	    }
2972 
2973 	  else if (code == COMPLEX_EXPR)
2974 	    {
2975 	      /* Rewrite stores of a single-use complex build expression
2976 	         to component-wise stores.  */
2977 	      use_operand_p use_p;
2978 	      gimple *use_stmt;
2979 	      if (single_imm_use (lhs, &use_p, &use_stmt)
2980 		  && gimple_store_p (use_stmt)
2981 		  && !gimple_has_volatile_ops (use_stmt)
2982 		  && is_gimple_assign (use_stmt)
2983 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
2984 		      != TARGET_MEM_REF))
2985 		{
2986 		  tree use_lhs = gimple_assign_lhs (use_stmt);
2987 		  tree new_lhs = build1 (REALPART_EXPR,
2988 					 TREE_TYPE (TREE_TYPE (use_lhs)),
2989 					 unshare_expr (use_lhs));
2990 		  gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
2991 		  location_t loc = gimple_location (use_stmt);
2992 		  gimple_set_location (new_stmt, loc);
2993 		  gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
2994 		  gimple_set_vdef (new_stmt, make_ssa_name (gimple_vop (cfun)));
2995 		  SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2996 		  gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
2997 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
2998 		  gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
2999 
3000 		  new_lhs = build1 (IMAGPART_EXPR,
3001 				    TREE_TYPE (TREE_TYPE (use_lhs)),
3002 				    unshare_expr (use_lhs));
3003 		  gimple_assign_set_lhs (use_stmt, new_lhs);
3004 		  gimple_assign_set_rhs1 (use_stmt, gimple_assign_rhs2 (stmt));
3005 		  update_stmt (use_stmt);
3006 
3007 		  release_defs (stmt);
3008 		  gsi_remove (&gsi, true);
3009 		}
3010 	      else
3011 		gsi_next (&gsi);
3012 	    }
3013 	  else if (code == CONSTRUCTOR
3014 		   && VECTOR_TYPE_P (TREE_TYPE (rhs))
3015 		   && TYPE_MODE (TREE_TYPE (rhs)) == BLKmode
3016 		   && CONSTRUCTOR_NELTS (rhs) > 0
3017 		   && (!VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3018 		       || (TYPE_MODE (TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value))
3019 			   != BLKmode)))
3020 	    {
3021 	      /* Rewrite stores of a single-use vector constructors
3022 	         to component-wise stores if the mode isn't supported.  */
3023 	      use_operand_p use_p;
3024 	      gimple *use_stmt;
3025 	      if (single_imm_use (lhs, &use_p, &use_stmt)
3026 		  && gimple_store_p (use_stmt)
3027 		  && !gimple_has_volatile_ops (use_stmt)
3028 		  && !stmt_can_throw_internal (cfun, use_stmt)
3029 		  && is_gimple_assign (use_stmt)
3030 		  && (TREE_CODE (gimple_assign_lhs (use_stmt))
3031 		      != TARGET_MEM_REF))
3032 		{
3033 		  tree elt_t = TREE_TYPE (CONSTRUCTOR_ELT (rhs, 0)->value);
3034 		  unsigned HOST_WIDE_INT elt_w
3035 		    = tree_to_uhwi (TYPE_SIZE (elt_t));
3036 		  unsigned HOST_WIDE_INT n
3037 		    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs)));
3038 		  for (unsigned HOST_WIDE_INT bi = 0; bi < n; bi += elt_w)
3039 		    {
3040 		      unsigned HOST_WIDE_INT ci = bi / elt_w;
3041 		      tree new_rhs;
3042 		      if (ci < CONSTRUCTOR_NELTS (rhs))
3043 			new_rhs = CONSTRUCTOR_ELT (rhs, ci)->value;
3044 		      else
3045 			new_rhs = build_zero_cst (elt_t);
3046 		      tree use_lhs = gimple_assign_lhs (use_stmt);
3047 		      tree new_lhs = build3 (BIT_FIELD_REF,
3048 					     elt_t,
3049 					     unshare_expr (use_lhs),
3050 					     bitsize_int (elt_w),
3051 					     bitsize_int (bi));
3052 		      gimple *new_stmt = gimple_build_assign (new_lhs, new_rhs);
3053 		      location_t loc = gimple_location (use_stmt);
3054 		      gimple_set_location (new_stmt, loc);
3055 		      gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
3056 		      gimple_set_vdef (new_stmt,
3057 				       make_ssa_name (gimple_vop (cfun)));
3058 		      SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
3059 		      gimple_set_vuse (use_stmt, gimple_vdef (new_stmt));
3060 		      gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3061 		      gsi_insert_before (&gsi2, new_stmt, GSI_SAME_STMT);
3062 		    }
3063 		  gimple_stmt_iterator gsi2 = gsi_for_stmt (use_stmt);
3064 		  unlink_stmt_vdef (use_stmt);
3065 		  release_defs (use_stmt);
3066 		  gsi_remove (&gsi2, true);
3067 		  release_defs (stmt);
3068 		  gsi_remove (&gsi, true);
3069 		}
3070 	      else
3071 		gsi_next (&gsi);
3072 	    }
3073 	  else
3074 	    gsi_next (&gsi);
3075 	}
3076 
3077       /* Combine stmts with the stmts defining their operands.
3078 	 Note we update GSI within the loop as necessary.  */
3079       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3080 	{
3081 	  gimple *stmt = gsi_stmt (gsi);
3082 
3083 	  /* Mark stmt as potentially needing revisiting.  */
3084 	  gimple_set_plf (stmt, GF_PLF_1, false);
3085 
3086 	  /* Substitute from our lattice.  We need to do so only once.  */
3087 	  bool substituted_p = false;
3088 	  use_operand_p usep;
3089 	  ssa_op_iter iter;
3090 	  FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_USE)
3091 	    {
3092 	      tree use = USE_FROM_PTR (usep);
3093 	      tree val = fwprop_ssa_val (use);
3094 	      if (val && val != use && may_propagate_copy (use, val))
3095 		{
3096 		  propagate_value (usep, val);
3097 		  substituted_p = true;
3098 		}
3099 	    }
3100 	  if (substituted_p
3101 	      && is_gimple_assign (stmt)
3102 	      && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3103 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
3104 
3105 	  bool changed;
3106 	  do
3107 	    {
3108 	      gimple *orig_stmt = stmt = gsi_stmt (gsi);
3109 	      bool was_noreturn = (is_gimple_call (stmt)
3110 				   && gimple_call_noreturn_p (stmt));
3111 	      changed = false;
3112 
3113 	      if (fold_stmt (&gsi, fwprop_ssa_val))
3114 		{
3115 		  changed = true;
3116 		  stmt = gsi_stmt (gsi);
3117 		  /* Cleanup the CFG if we simplified a condition to
3118 		     true or false.  */
3119 		  if (gcond *cond = dyn_cast <gcond *> (stmt))
3120 		    if (gimple_cond_true_p (cond)
3121 			|| gimple_cond_false_p (cond))
3122 		      cfg_changed = true;
3123 		}
3124 
3125 	      if (changed || substituted_p)
3126 		{
3127 		  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
3128 		    bitmap_set_bit (to_purge, bb->index);
3129 		  if (!was_noreturn
3130 		      && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
3131 		    to_fixup.safe_push (stmt);
3132 		  update_stmt (stmt);
3133 		  substituted_p = false;
3134 		}
3135 
3136 	      switch (gimple_code (stmt))
3137 		{
3138 		case GIMPLE_ASSIGN:
3139 		  {
3140 		    tree rhs1 = gimple_assign_rhs1 (stmt);
3141 		    enum tree_code code = gimple_assign_rhs_code (stmt);
3142 
3143 		    if (code == COND_EXPR
3144 			|| code == VEC_COND_EXPR)
3145 		      {
3146 			/* In this case the entire COND_EXPR is in rhs1. */
3147 			if (forward_propagate_into_cond (&gsi))
3148 			  {
3149 			    changed = true;
3150 			    stmt = gsi_stmt (gsi);
3151 			  }
3152 		      }
3153 		    else if (TREE_CODE_CLASS (code) == tcc_comparison)
3154 		      {
3155 			int did_something;
3156 			did_something = forward_propagate_into_comparison (&gsi);
3157 			if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
3158 			  bitmap_set_bit (to_purge, bb->index);
3159 			if (did_something == 2)
3160 			  cfg_changed = true;
3161 			changed = did_something != 0;
3162 		      }
3163 		    else if ((code == PLUS_EXPR
3164 			      || code == BIT_IOR_EXPR
3165 			      || code == BIT_XOR_EXPR)
3166 			     && simplify_rotate (&gsi))
3167 		      changed = true;
3168 		    else if (code == VEC_PERM_EXPR)
3169 		      {
3170 			int did_something = simplify_permutation (&gsi);
3171 			if (did_something == 2)
3172 			  cfg_changed = true;
3173 			changed = did_something != 0;
3174 		      }
3175 		    else if (code == BIT_FIELD_REF)
3176 		      changed = simplify_bitfield_ref (&gsi);
3177 		    else if (code == CONSTRUCTOR
3178 			     && TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
3179 		      changed = simplify_vector_constructor (&gsi);
3180 		    else if (code == ARRAY_REF)
3181 		      changed = simplify_count_trailing_zeroes (&gsi);
3182 		    break;
3183 		  }
3184 
3185 		case GIMPLE_SWITCH:
3186 		  changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
3187 		  break;
3188 
3189 		case GIMPLE_COND:
3190 		  {
3191 		    int did_something = forward_propagate_into_gimple_cond
3192 							(as_a <gcond *> (stmt));
3193 		    if (did_something == 2)
3194 		      cfg_changed = true;
3195 		    changed = did_something != 0;
3196 		    break;
3197 		  }
3198 
3199 		case GIMPLE_CALL:
3200 		  {
3201 		    tree callee = gimple_call_fndecl (stmt);
3202 		    if (callee != NULL_TREE
3203 			&& fndecl_built_in_p (callee, BUILT_IN_NORMAL))
3204 		      changed = simplify_builtin_call (&gsi, callee);
3205 		    break;
3206 		  }
3207 
3208 		default:;
3209 		}
3210 
3211 	      if (changed)
3212 		{
3213 		  /* If the stmt changed then re-visit it and the statements
3214 		     inserted before it.  */
3215 		  for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3216 		    if (gimple_plf (gsi_stmt (gsi), GF_PLF_1))
3217 		      break;
3218 		  if (gsi_end_p (gsi))
3219 		    gsi = gsi_start_bb (bb);
3220 		  else
3221 		    gsi_next (&gsi);
3222 		}
3223 	    }
3224 	  while (changed);
3225 
3226 	  /* Stmt no longer needs to be revisited.  */
3227 	  stmt = gsi_stmt (gsi);
3228 	  gcc_checking_assert (!gimple_plf (stmt, GF_PLF_1));
3229 	  gimple_set_plf (stmt, GF_PLF_1, true);
3230 
3231 	  /* Fill up the lattice.  */
3232 	  if (gimple_assign_single_p (stmt))
3233 	    {
3234 	      tree lhs = gimple_assign_lhs (stmt);
3235 	      tree rhs = gimple_assign_rhs1 (stmt);
3236 	      if (TREE_CODE (lhs) == SSA_NAME)
3237 		{
3238 		  tree val = lhs;
3239 		  if (TREE_CODE (rhs) == SSA_NAME)
3240 		    val = fwprop_ssa_val (rhs);
3241 		  else if (is_gimple_min_invariant (rhs))
3242 		    val = rhs;
3243 		  /* If we can propagate the lattice-value mark the
3244 		     stmt for removal.  */
3245 		  if (val != lhs
3246 		      && may_propagate_copy (lhs, val))
3247 		    to_remove.safe_push (stmt);
3248 		  fwprop_set_lattice_val (lhs, val);
3249 		}
3250 	    }
3251 	  else if (gimple_nop_p (stmt))
3252 	    to_remove.safe_push (stmt);
3253 	}
3254 
3255       /* Substitute in destination PHI arguments.  */
3256       edge_iterator ei;
3257       edge e;
3258       FOR_EACH_EDGE (e, ei, bb->succs)
3259 	for (gphi_iterator gsi = gsi_start_phis (e->dest);
3260 	     !gsi_end_p (gsi); gsi_next (&gsi))
3261 	  {
3262 	    gphi *phi = gsi.phi ();
3263 	    use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
3264 	    tree arg = USE_FROM_PTR (use_p);
3265 	    if (TREE_CODE (arg) != SSA_NAME
3266 		|| virtual_operand_p (arg))
3267 	      continue;
3268 	    tree val = fwprop_ssa_val (arg);
3269 	    if (val != arg
3270 		&& may_propagate_copy (arg, val))
3271 	      propagate_value (use_p, val);
3272 	  }
3273     }
3274   free (postorder);
3275   lattice.release ();
3276 
3277   /* Remove stmts in reverse order to make debug stmt creation possible.  */
3278   while (!to_remove.is_empty())
3279     {
3280       gimple *stmt = to_remove.pop ();
3281       if (dump_file && (dump_flags & TDF_DETAILS))
3282 	{
3283 	  fprintf (dump_file, "Removing dead stmt ");
3284 	  print_gimple_stmt (dump_file, stmt, 0);
3285 	  fprintf (dump_file, "\n");
3286 	}
3287       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3288       if (gimple_code (stmt) == GIMPLE_PHI)
3289 	remove_phi_node (&gsi, true);
3290       else
3291 	{
3292 	  unlink_stmt_vdef (stmt);
3293 	  gsi_remove (&gsi, true);
3294 	  release_defs (stmt);
3295 	}
3296     }
3297 
3298   /* Fixup stmts that became noreturn calls.  This may require splitting
3299      blocks and thus isn't possible during the walk.  Do this
3300      in reverse order so we don't inadvertedly remove a stmt we want to
3301      fixup by visiting a dominating now noreturn call first.  */
3302   while (!to_fixup.is_empty ())
3303     {
3304       gimple *stmt = to_fixup.pop ();
3305       if (dump_file && dump_flags & TDF_DETAILS)
3306 	{
3307 	  fprintf (dump_file, "Fixing up noreturn call ");
3308 	  print_gimple_stmt (dump_file, stmt, 0);
3309 	  fprintf (dump_file, "\n");
3310 	}
3311       cfg_changed |= fixup_noreturn_call (stmt);
3312     }
3313 
3314   cfg_changed |= gimple_purge_all_dead_eh_edges (to_purge);
3315   BITMAP_FREE (to_purge);
3316 
3317   if (cfg_changed)
3318     todoflags |= TODO_cleanup_cfg;
3319 
3320   return todoflags;
3321 }
3322 
3323 } // anon namespace
3324 
3325 gimple_opt_pass *
make_pass_forwprop(gcc::context * ctxt)3326 make_pass_forwprop (gcc::context *ctxt)
3327 {
3328   return new pass_forwprop (ctxt);
3329 }
3330