xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-propagate.c (revision 9fb66d812c00ebfb445c0b47dea128f32aa6fe96)
1 /* Generic SSA value propagation engine.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by the
9    Free Software Foundation; either version 3, or (at your option) any
10    later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "dumpfile.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "domwalk.h"
38 #include "cfgloop.h"
39 #include "tree-cfgcleanup.h"
40 #include "cfganal.h"
41 
42 /* This file implements a generic value propagation engine based on
43    the same propagation used by the SSA-CCP algorithm [1].
44 
45    Propagation is performed by simulating the execution of every
46    statement that produces the value being propagated.  Simulation
47    proceeds as follows:
48 
49    1- Initially, all edges of the CFG are marked not executable and
50       the CFG worklist is seeded with all the statements in the entry
51       basic block (block 0).
52 
53    2- Every statement S is simulated with a call to the call-back
54       function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
55       results:
56 
57       	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
58 	    interest and does not affect any of the work lists.
59 	    The statement may be simulated again if any of its input
60 	    operands change in future iterations of the simulator.
61 
62 	SSA_PROP_VARYING: The value produced by S cannot be determined
63 	    at compile time.  Further simulation of S is not required.
64 	    If S is a conditional jump, all the outgoing edges for the
65 	    block are considered executable and added to the work
66 	    list.
67 
68 	SSA_PROP_INTERESTING: S produces a value that can be computed
69 	    at compile time.  Its result can be propagated into the
70 	    statements that feed from S.  Furthermore, if S is a
71 	    conditional jump, only the edge known to be taken is added
72 	    to the work list.  Edges that are known not to execute are
73 	    never simulated.
74 
75    3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
76       return value from SSA_PROP_VISIT_PHI has the same semantics as
77       described in #2.
78 
79    4- Three work lists are kept.  Statements are only added to these
80       lists if they produce one of SSA_PROP_INTERESTING or
81       SSA_PROP_VARYING.
82 
83    	CFG_BLOCKS contains the list of blocks to be simulated.
84 	    Blocks are added to this list if their incoming edges are
85 	    found executable.
86 
87 	SSA_EDGE_WORKLIST contains the list of statements that we
88 	    need to revisit.
89 
90    5- Simulation terminates when all three work lists are drained.
91 
92    Before calling ssa_propagate, it is important to clear
93    prop_simulate_again_p for all the statements in the program that
94    should be simulated.  This initialization allows an implementation
95    to specify which statements should never be simulated.
96 
97    It is also important to compute def-use information before calling
98    ssa_propagate.
99 
100    References:
101 
102      [1] Constant propagation with conditional branches,
103          Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
104 
105      [2] Building an Optimizing Compiler,
106 	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
107 
108      [3] Advanced Compiler Design and Implementation,
109 	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
110 
111 /* Worklists of control flow edge destinations.  This contains
112    the CFG order number of the blocks so we can iterate in CFG
113    order by visiting in bit-order.  We use two worklists to
114    first make forward progress before iterating.  */
115 static bitmap cfg_blocks;
116 static bitmap cfg_blocks_back;
117 static int *bb_to_cfg_order;
118 static int *cfg_order_to_bb;
119 
120 /* Worklists of SSA edges which will need reexamination as their
121    definition has changed.  SSA edges are def-use edges in the SSA
122    web.  For each D-U edge, we store the target statement or PHI node
123    UID in a bitmap.  UIDs order stmts in execution order.  We use
124    two worklists to first make forward progress before iterating.  */
125 static bitmap ssa_edge_worklist;
126 static bitmap ssa_edge_worklist_back;
127 static vec<gimple *> uid_to_stmt;
128 
129 /* Current RPO index in the iteration.  */
130 static int curr_order;
131 
132 
133 /* We have just defined a new value for VAR.  If IS_VARYING is true,
134    add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
135    them to INTERESTING_SSA_EDGES.  */
136 
137 static void
138 add_ssa_edge (tree var)
139 {
140   imm_use_iterator iter;
141   use_operand_p use_p;
142 
143   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
144     {
145       gimple *use_stmt = USE_STMT (use_p);
146       if (!prop_simulate_again_p (use_stmt))
147 	continue;
148 
149       /* If we did not yet simulate the block wait for this to happen
150          and do not add the stmt to the SSA edge worklist.  */
151       basic_block use_bb = gimple_bb (use_stmt);
152       if (! (use_bb->flags & BB_VISITED))
153 	continue;
154 
155       /* If this is a use on a not yet executable edge do not bother to
156 	 queue it.  */
157       if (gimple_code (use_stmt) == GIMPLE_PHI
158 	  && !(EDGE_PRED (use_bb, PHI_ARG_INDEX_FROM_USE (use_p))->flags
159 	       & EDGE_EXECUTABLE))
160 	continue;
161 
162       bitmap worklist;
163       if (bb_to_cfg_order[gimple_bb (use_stmt)->index] < curr_order)
164 	worklist = ssa_edge_worklist_back;
165       else
166 	worklist = ssa_edge_worklist;
167       if (bitmap_set_bit (worklist, gimple_uid (use_stmt)))
168 	{
169 	  uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
170 	  if (dump_file && (dump_flags & TDF_DETAILS))
171 	    {
172 	      fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
173 	      print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
174 	    }
175 	}
176     }
177 }
178 
179 
180 /* Add edge E to the control flow worklist.  */
181 
182 static void
183 add_control_edge (edge e)
184 {
185   basic_block bb = e->dest;
186   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
187     return;
188 
189   /* If the edge had already been executed, skip it.  */
190   if (e->flags & EDGE_EXECUTABLE)
191     return;
192 
193   e->flags |= EDGE_EXECUTABLE;
194 
195   int bb_order = bb_to_cfg_order[bb->index];
196   if (bb_order < curr_order)
197     bitmap_set_bit (cfg_blocks_back, bb_order);
198   else
199     bitmap_set_bit (cfg_blocks, bb_order);
200 
201   if (dump_file && (dump_flags & TDF_DETAILS))
202     fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
203 	e->src->index, e->dest->index);
204 }
205 
206 
207 /* Simulate the execution of STMT and update the work lists accordingly.  */
208 
209 void
210 ssa_propagation_engine::simulate_stmt (gimple *stmt)
211 {
212   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
213   edge taken_edge = NULL;
214   tree output_name = NULL_TREE;
215 
216   /* Pull the stmt off the SSA edge worklist.  */
217   bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
218 
219   /* Don't bother visiting statements that are already
220      considered varying by the propagator.  */
221   if (!prop_simulate_again_p (stmt))
222     return;
223 
224   if (gimple_code (stmt) == GIMPLE_PHI)
225     {
226       val = visit_phi (as_a <gphi *> (stmt));
227       output_name = gimple_phi_result (stmt);
228     }
229   else
230     val = visit_stmt (stmt, &taken_edge, &output_name);
231 
232   if (val == SSA_PROP_VARYING)
233     {
234       prop_set_simulate_again (stmt, false);
235 
236       /* If the statement produced a new varying value, add the SSA
237 	 edges coming out of OUTPUT_NAME.  */
238       if (output_name)
239 	add_ssa_edge (output_name);
240 
241       /* If STMT transfers control out of its basic block, add
242 	 all outgoing edges to the work list.  */
243       if (stmt_ends_bb_p (stmt))
244 	{
245 	  edge e;
246 	  edge_iterator ei;
247 	  basic_block bb = gimple_bb (stmt);
248 	  FOR_EACH_EDGE (e, ei, bb->succs)
249 	    add_control_edge (e);
250 	}
251       return;
252     }
253   else if (val == SSA_PROP_INTERESTING)
254     {
255       /* If the statement produced new value, add the SSA edges coming
256 	 out of OUTPUT_NAME.  */
257       if (output_name)
258 	add_ssa_edge (output_name);
259 
260       /* If we know which edge is going to be taken out of this block,
261 	 add it to the CFG work list.  */
262       if (taken_edge)
263 	add_control_edge (taken_edge);
264     }
265 
266   /* If there are no SSA uses on the stmt whose defs are simulated
267      again then this stmt will be never visited again.  */
268   bool has_simulate_again_uses = false;
269   use_operand_p use_p;
270   ssa_op_iter iter;
271   if (gimple_code  (stmt) == GIMPLE_PHI)
272     {
273       edge_iterator ei;
274       edge e;
275       tree arg;
276       FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
277 	if (!(e->flags & EDGE_EXECUTABLE)
278 	    || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
279 		&& TREE_CODE (arg) == SSA_NAME
280 		&& !SSA_NAME_IS_DEFAULT_DEF (arg)
281 		&& prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
282 	  {
283 	    has_simulate_again_uses = true;
284 	    break;
285 	  }
286     }
287   else
288     FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
289       {
290 	gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
291 	if (!gimple_nop_p (def_stmt)
292 	    && prop_simulate_again_p (def_stmt))
293 	  {
294 	    has_simulate_again_uses = true;
295 	    break;
296 	  }
297       }
298   if (!has_simulate_again_uses)
299     {
300       if (dump_file && (dump_flags & TDF_DETAILS))
301 	fprintf (dump_file, "marking stmt to be not simulated again\n");
302       prop_set_simulate_again (stmt, false);
303     }
304 }
305 
306 
307 /* Simulate the execution of BLOCK.  Evaluate the statement associated
308    with each variable reference inside the block.  */
309 
310 void
311 ssa_propagation_engine::simulate_block (basic_block block)
312 {
313   gimple_stmt_iterator gsi;
314 
315   /* There is nothing to do for the exit block.  */
316   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
317     return;
318 
319   if (dump_file && (dump_flags & TDF_DETAILS))
320     fprintf (dump_file, "\nSimulating block %d\n", block->index);
321 
322   /* Always simulate PHI nodes, even if we have simulated this block
323      before.  */
324   for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
325     simulate_stmt (gsi_stmt (gsi));
326 
327   /* If this is the first time we've simulated this block, then we
328      must simulate each of its statements.  */
329   if (! (block->flags & BB_VISITED))
330     {
331       gimple_stmt_iterator j;
332       unsigned int normal_edge_count;
333       edge e, normal_edge;
334       edge_iterator ei;
335 
336       for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
337 	simulate_stmt (gsi_stmt (j));
338 
339       /* Note that we have simulated this block.  */
340       block->flags |= BB_VISITED;
341 
342       /* We cannot predict when abnormal and EH edges will be executed, so
343 	 once a block is considered executable, we consider any
344 	 outgoing abnormal edges as executable.
345 
346 	 TODO: This is not exactly true.  Simplifying statement might
347 	 prove it non-throwing and also computed goto can be handled
348 	 when destination is known.
349 
350 	 At the same time, if this block has only one successor that is
351 	 reached by non-abnormal edges, then add that successor to the
352 	 worklist.  */
353       normal_edge_count = 0;
354       normal_edge = NULL;
355       FOR_EACH_EDGE (e, ei, block->succs)
356 	{
357 	  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
358 	    add_control_edge (e);
359 	  else
360 	    {
361 	      normal_edge_count++;
362 	      normal_edge = e;
363 	    }
364 	}
365 
366       if (normal_edge_count == 1)
367 	add_control_edge (normal_edge);
368     }
369 }
370 
371 
372 /* Initialize local data structures and work lists.  */
373 
374 static void
375 ssa_prop_init (void)
376 {
377   edge e;
378   edge_iterator ei;
379   basic_block bb;
380 
381   /* Worklists of SSA edges.  */
382   ssa_edge_worklist = BITMAP_ALLOC (NULL);
383   ssa_edge_worklist_back = BITMAP_ALLOC (NULL);
384   bitmap_tree_view (ssa_edge_worklist);
385   bitmap_tree_view (ssa_edge_worklist_back);
386 
387   /* Worklist of basic-blocks.  */
388   bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
389   cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
390   int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
391 					     cfg_order_to_bb, false);
392   for (int i = 0; i < n; ++i)
393     bb_to_cfg_order[cfg_order_to_bb[i]] = i;
394   cfg_blocks = BITMAP_ALLOC (NULL);
395   cfg_blocks_back = BITMAP_ALLOC (NULL);
396 
397   /* Initially assume that every edge in the CFG is not executable.
398      (including the edges coming out of the entry block).  Mark blocks
399      as not visited, blocks not yet visited will have all their statements
400      simulated once an incoming edge gets executable.  */
401   set_gimple_stmt_max_uid (cfun, 0);
402   for (int i = 0; i < n; ++i)
403     {
404       gimple_stmt_iterator si;
405       bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
406 
407       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
408 	{
409 	  gimple *stmt = gsi_stmt (si);
410 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
411 	}
412 
413       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
414 	{
415 	  gimple *stmt = gsi_stmt (si);
416 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
417 	}
418 
419       bb->flags &= ~BB_VISITED;
420       FOR_EACH_EDGE (e, ei, bb->succs)
421 	e->flags &= ~EDGE_EXECUTABLE;
422     }
423   uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun));
424 
425   /* Seed the algorithm by adding the successors of the entry block to the
426      edge worklist.  */
427   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
428     {
429       e->flags &= ~EDGE_EXECUTABLE;
430       add_control_edge (e);
431     }
432 }
433 
434 
435 /* Free allocated storage.  */
436 
437 static void
438 ssa_prop_fini (void)
439 {
440   BITMAP_FREE (cfg_blocks);
441   BITMAP_FREE (cfg_blocks_back);
442   free (bb_to_cfg_order);
443   free (cfg_order_to_bb);
444   BITMAP_FREE (ssa_edge_worklist);
445   BITMAP_FREE (ssa_edge_worklist_back);
446   uid_to_stmt.release ();
447 }
448 
449 
450 /* Return true if EXPR is an acceptable right-hand-side for a
451    GIMPLE assignment.  We validate the entire tree, not just
452    the root node, thus catching expressions that embed complex
453    operands that are not permitted in GIMPLE.  This function
454    is needed because the folding routines in fold-const.c
455    may return such expressions in some cases, e.g., an array
456    access with an embedded index addition.  It may make more
457    sense to have folding routines that are sensitive to the
458    constraints on GIMPLE operands, rather than abandoning any
459    any attempt to fold if the usual folding turns out to be too
460    aggressive.  */
461 
462 bool
463 valid_gimple_rhs_p (tree expr)
464 {
465   enum tree_code code = TREE_CODE (expr);
466 
467   switch (TREE_CODE_CLASS (code))
468     {
469     case tcc_declaration:
470       if (!is_gimple_variable (expr))
471 	return false;
472       break;
473 
474     case tcc_constant:
475       /* All constants are ok.  */
476       break;
477 
478     case tcc_comparison:
479       /* GENERIC allows comparisons with non-boolean types, reject
480          those for GIMPLE.  Let vector-typed comparisons pass - rules
481 	 for GENERIC and GIMPLE are the same here.  */
482       if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
483 	    && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
484 		|| TYPE_PRECISION (TREE_TYPE (expr)) == 1))
485 	  && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
486 	return false;
487 
488       /* Fallthru.  */
489     case tcc_binary:
490       if (!is_gimple_val (TREE_OPERAND (expr, 0))
491 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
492 	return false;
493       break;
494 
495     case tcc_unary:
496       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
497 	return false;
498       break;
499 
500     case tcc_expression:
501       switch (code)
502         {
503         case ADDR_EXPR:
504           {
505 	    tree t;
506 	    if (is_gimple_min_invariant (expr))
507 	      return true;
508             t = TREE_OPERAND (expr, 0);
509             while (handled_component_p (t))
510               {
511                 /* ??? More checks needed, see the GIMPLE verifier.  */
512                 if ((TREE_CODE (t) == ARRAY_REF
513                      || TREE_CODE (t) == ARRAY_RANGE_REF)
514                     && !is_gimple_val (TREE_OPERAND (t, 1)))
515                   return false;
516                 t = TREE_OPERAND (t, 0);
517               }
518             if (!is_gimple_id (t))
519               return false;
520           }
521           break;
522 
523 	default:
524 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
525 	    {
526 	      if (((code == VEC_COND_EXPR || code == COND_EXPR)
527 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
528 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
529 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
530 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
531 		return false;
532 	      break;
533 	    }
534 	  return false;
535 	}
536       break;
537 
538     case tcc_vl_exp:
539       return false;
540 
541     case tcc_exceptional:
542       if (code == CONSTRUCTOR)
543 	{
544 	  unsigned i;
545 	  tree elt;
546 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
547 	    if (!is_gimple_val (elt))
548 	      return false;
549 	  return true;
550 	}
551       if (code != SSA_NAME)
552         return false;
553       break;
554 
555     case tcc_reference:
556       if (code == BIT_FIELD_REF)
557 	return is_gimple_val (TREE_OPERAND (expr, 0));
558       return false;
559 
560     default:
561       return false;
562     }
563 
564   return true;
565 }
566 
567 
568 /* Return true if EXPR is a CALL_EXPR suitable for representation
569    as a single GIMPLE_CALL statement.  If the arguments require
570    further gimplification, return false.  */
571 
572 static bool
573 valid_gimple_call_p (tree expr)
574 {
575   unsigned i, nargs;
576 
577   if (TREE_CODE (expr) != CALL_EXPR)
578     return false;
579 
580   nargs = call_expr_nargs (expr);
581   for (i = 0; i < nargs; i++)
582     {
583       tree arg = CALL_EXPR_ARG (expr, i);
584       if (is_gimple_reg_type (TREE_TYPE (arg)))
585 	{
586 	  if (!is_gimple_val (arg))
587 	    return false;
588 	}
589       else
590 	if (!is_gimple_lvalue (arg))
591 	  return false;
592     }
593 
594   return true;
595 }
596 
597 
598 /* Make SSA names defined by OLD_STMT point to NEW_STMT
599    as their defining statement.  */
600 
601 void
602 move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
603 {
604   tree var;
605   ssa_op_iter iter;
606 
607   if (gimple_in_ssa_p (cfun))
608     {
609       /* Make defined SSA_NAMEs point to the new
610          statement as their definition.  */
611       FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
612         {
613           if (TREE_CODE (var) == SSA_NAME)
614             SSA_NAME_DEF_STMT (var) = new_stmt;
615         }
616     }
617 }
618 
619 /* Helper function for update_gimple_call and update_call_from_tree.
620    A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT.  */
621 
622 static void
623 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
624 			   gimple *stmt)
625 {
626   gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
627   move_ssa_defining_stmt_for_defs (new_stmt, stmt);
628   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
629   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
630   gimple_set_location (new_stmt, gimple_location (stmt));
631   if (gimple_block (new_stmt) == NULL_TREE)
632     gimple_set_block (new_stmt, gimple_block (stmt));
633   gsi_replace (si_p, new_stmt, false);
634 }
635 
636 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
637    with number of arguments NARGS, where the arguments in GIMPLE form
638    follow NARGS argument.  */
639 
640 bool
641 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
642 {
643   va_list ap;
644   gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
645 
646   gcc_assert (is_gimple_call (stmt));
647   va_start (ap, nargs);
648   new_stmt = gimple_build_call_valist (fn, nargs, ap);
649   finish_update_gimple_call (si_p, new_stmt, stmt);
650   va_end (ap);
651   return true;
652 }
653 
654 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
655    value of EXPR, which is expected to be the result of folding the
656    call.  This can only be done if EXPR is a CALL_EXPR with valid
657    GIMPLE operands as arguments, or if it is a suitable RHS expression
658    for a GIMPLE_ASSIGN.  More complex expressions will require
659    gimplification, which will introduce additional statements.  In this
660    event, no update is performed, and the function returns false.
661    Note that we cannot mutate a GIMPLE_CALL in-place, so we always
662    replace the statement at *SI_P with an entirely new statement.
663    The new statement need not be a call, e.g., if the original call
664    folded to a constant.  */
665 
666 bool
667 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
668 {
669   gimple *stmt = gsi_stmt (*si_p);
670 
671   if (valid_gimple_call_p (expr))
672     {
673       /* The call has simplified to another call.  */
674       tree fn = CALL_EXPR_FN (expr);
675       unsigned i;
676       unsigned nargs = call_expr_nargs (expr);
677       vec<tree> args = vNULL;
678       gcall *new_stmt;
679 
680       if (nargs > 0)
681         {
682           args.create (nargs);
683           args.safe_grow_cleared (nargs);
684 
685           for (i = 0; i < nargs; i++)
686             args[i] = CALL_EXPR_ARG (expr, i);
687         }
688 
689       new_stmt = gimple_build_call_vec (fn, args);
690       finish_update_gimple_call (si_p, new_stmt, stmt);
691       args.release ();
692 
693       return true;
694     }
695   else if (valid_gimple_rhs_p (expr))
696     {
697       tree lhs = gimple_call_lhs (stmt);
698       gimple *new_stmt;
699 
700       /* The call has simplified to an expression
701          that cannot be represented as a GIMPLE_CALL. */
702       if (lhs)
703         {
704           /* A value is expected.
705              Introduce a new GIMPLE_ASSIGN statement.  */
706           STRIP_USELESS_TYPE_CONVERSION (expr);
707           new_stmt = gimple_build_assign (lhs, expr);
708           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
709 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
710 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
711         }
712       else if (!TREE_SIDE_EFFECTS (expr))
713         {
714           /* No value is expected, and EXPR has no effect.
715              Replace it with an empty statement.  */
716           new_stmt = gimple_build_nop ();
717 	  if (gimple_in_ssa_p (cfun))
718 	    {
719 	      unlink_stmt_vdef (stmt);
720 	      release_defs (stmt);
721 	    }
722         }
723       else
724         {
725           /* No value is expected, but EXPR has an effect,
726              e.g., it could be a reference to a volatile
727              variable.  Create an assignment statement
728              with a dummy (unused) lhs variable.  */
729           STRIP_USELESS_TYPE_CONVERSION (expr);
730 	  if (gimple_in_ssa_p (cfun))
731 	    lhs = make_ssa_name (TREE_TYPE (expr));
732 	  else
733 	    lhs = create_tmp_var (TREE_TYPE (expr));
734           new_stmt = gimple_build_assign (lhs, expr);
735 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
736 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
737           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
738         }
739       gimple_set_location (new_stmt, gimple_location (stmt));
740       gsi_replace (si_p, new_stmt, false);
741       return true;
742     }
743   else
744     /* The call simplified to an expression that is
745        not a valid GIMPLE RHS.  */
746     return false;
747 }
748 
749 /* Entry point to the propagation engine.
750 
751    The VISIT_STMT virtual function is called for every statement
752    visited and the VISIT_PHI virtual function is called for every PHI
753    node visited.  */
754 
755 void
756 ssa_propagation_engine::ssa_propagate (void)
757 {
758   ssa_prop_init ();
759 
760   curr_order = 0;
761 
762   /* Iterate until the worklists are empty.  We iterate both blocks
763      and stmts in RPO order, using sets of two worklists to first
764      complete the current iteration before iterating over backedges.  */
765   while (1)
766     {
767       int next_block_order = (bitmap_empty_p (cfg_blocks)
768 			      ? -1 : bitmap_first_set_bit (cfg_blocks));
769       int next_stmt_uid = (bitmap_empty_p (ssa_edge_worklist)
770 			   ? -1 : bitmap_first_set_bit (ssa_edge_worklist));
771       if (next_block_order == -1 && next_stmt_uid == -1)
772 	{
773 	  if (bitmap_empty_p (cfg_blocks_back)
774 	      && bitmap_empty_p (ssa_edge_worklist_back))
775 	    break;
776 
777 	  if (dump_file && (dump_flags & TDF_DETAILS))
778 	    fprintf (dump_file, "Regular worklists empty, now processing "
779 		     "backedge destinations\n");
780 	  std::swap (cfg_blocks, cfg_blocks_back);
781 	  std::swap (ssa_edge_worklist, ssa_edge_worklist_back);
782 	  continue;
783 	}
784 
785       int next_stmt_bb_order = -1;
786       gimple *next_stmt = NULL;
787       if (next_stmt_uid != -1)
788 	{
789 	  next_stmt = uid_to_stmt[next_stmt_uid];
790 	  next_stmt_bb_order = bb_to_cfg_order[gimple_bb (next_stmt)->index];
791 	}
792 
793       /* Pull the next block to simulate off the worklist if it comes first.  */
794       if (next_block_order != -1
795 	  && (next_stmt_bb_order == -1
796 	      || next_block_order <= next_stmt_bb_order))
797 	{
798 	  curr_order = next_block_order;
799 	  bitmap_clear_bit (cfg_blocks, next_block_order);
800 	  basic_block bb
801 	    = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [next_block_order]);
802 	  simulate_block (bb);
803 	}
804       /* Else simulate from the SSA edge worklist.  */
805       else
806 	{
807 	  curr_order = next_stmt_bb_order;
808 	  if (dump_file && (dump_flags & TDF_DETAILS))
809 	    {
810 	      fprintf (dump_file, "\nSimulating statement: ");
811 	      print_gimple_stmt (dump_file, next_stmt, 0, dump_flags);
812 	    }
813 	  simulate_stmt (next_stmt);
814 	}
815     }
816 
817   ssa_prop_fini ();
818 }
819 
820 
821 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
822    is a non-volatile pointer dereference, a structure reference or a
823    reference to a single _DECL.  Ignore volatile memory references
824    because they are not interesting for the optimizers.  */
825 
826 bool
827 stmt_makes_single_store (gimple *stmt)
828 {
829   tree lhs;
830 
831   if (gimple_code (stmt) != GIMPLE_ASSIGN
832       && gimple_code (stmt) != GIMPLE_CALL)
833     return false;
834 
835   if (!gimple_vdef (stmt))
836     return false;
837 
838   lhs = gimple_get_lhs (stmt);
839 
840   /* A call statement may have a null LHS.  */
841   if (!lhs)
842     return false;
843 
844   return (!TREE_THIS_VOLATILE (lhs)
845           && (DECL_P (lhs)
846 	      || REFERENCE_CLASS_P (lhs)));
847 }
848 
849 
850 /* Propagation statistics.  */
851 struct prop_stats_d
852 {
853   long num_const_prop;
854   long num_copy_prop;
855   long num_stmts_folded;
856   long num_dce;
857 };
858 
859 static struct prop_stats_d prop_stats;
860 
861 /* Replace USE references in statement STMT with the values stored in
862    PROP_VALUE. Return true if at least one reference was replaced.  */
863 
864 bool
865 substitute_and_fold_engine::replace_uses_in (gimple *stmt)
866 {
867   bool replaced = false;
868   use_operand_p use;
869   ssa_op_iter iter;
870 
871   FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
872     {
873       tree tuse = USE_FROM_PTR (use);
874       tree val = get_value (tuse);
875 
876       if (val == tuse || val == NULL_TREE)
877 	continue;
878 
879       if (gimple_code (stmt) == GIMPLE_ASM
880 	  && !may_propagate_copy_into_asm (tuse))
881 	continue;
882 
883       if (!may_propagate_copy (tuse, val))
884 	continue;
885 
886       if (TREE_CODE (val) != SSA_NAME)
887 	prop_stats.num_const_prop++;
888       else
889 	prop_stats.num_copy_prop++;
890 
891       propagate_value (use, val);
892 
893       replaced = true;
894     }
895 
896   return replaced;
897 }
898 
899 
900 /* Replace propagated values into all the arguments for PHI using the
901    values from PROP_VALUE.  */
902 
903 bool
904 substitute_and_fold_engine::replace_phi_args_in (gphi *phi)
905 {
906   size_t i;
907   bool replaced = false;
908 
909   if (dump_file && (dump_flags & TDF_DETAILS))
910     {
911       fprintf (dump_file, "Folding PHI node: ");
912       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
913     }
914 
915   for (i = 0; i < gimple_phi_num_args (phi); i++)
916     {
917       tree arg = gimple_phi_arg_def (phi, i);
918 
919       if (TREE_CODE (arg) == SSA_NAME)
920 	{
921 	  tree val = get_value (arg);
922 
923 	  if (val && val != arg && may_propagate_copy (arg, val))
924 	    {
925 	      edge e = gimple_phi_arg_edge (phi, i);
926 
927 	      if (TREE_CODE (val) != SSA_NAME)
928 		prop_stats.num_const_prop++;
929 	      else
930 		prop_stats.num_copy_prop++;
931 
932 	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
933 	      replaced = true;
934 
935 	      /* If we propagated a copy and this argument flows
936 		 through an abnormal edge, update the replacement
937 		 accordingly.  */
938 	      if (TREE_CODE (val) == SSA_NAME
939 		  && e->flags & EDGE_ABNORMAL
940 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
941 		{
942 		  /* This can only occur for virtual operands, since
943 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
944 		     would prevent replacement.  */
945 		  gcc_checking_assert (virtual_operand_p (val));
946 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
947 		}
948 	    }
949 	}
950     }
951 
952   if (dump_file && (dump_flags & TDF_DETAILS))
953     {
954       if (!replaced)
955 	fprintf (dump_file, "No folding possible\n");
956       else
957 	{
958 	  fprintf (dump_file, "Folded into: ");
959 	  print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
960 	  fprintf (dump_file, "\n");
961 	}
962     }
963 
964   return replaced;
965 }
966 
967 
968 class substitute_and_fold_dom_walker : public dom_walker
969 {
970 public:
971     substitute_and_fold_dom_walker (cdi_direction direction,
972 				    class substitute_and_fold_engine *engine)
973 	: dom_walker (direction),
974           something_changed (false),
975 	  substitute_and_fold_engine (engine)
976     {
977       stmts_to_remove.create (0);
978       stmts_to_fixup.create (0);
979       need_eh_cleanup = BITMAP_ALLOC (NULL);
980     }
981     ~substitute_and_fold_dom_walker ()
982     {
983       stmts_to_remove.release ();
984       stmts_to_fixup.release ();
985       BITMAP_FREE (need_eh_cleanup);
986     }
987 
988     virtual edge before_dom_children (basic_block);
989     virtual void after_dom_children (basic_block) {}
990 
991     bool something_changed;
992     vec<gimple *> stmts_to_remove;
993     vec<gimple *> stmts_to_fixup;
994     bitmap need_eh_cleanup;
995 
996     class substitute_and_fold_engine *substitute_and_fold_engine;
997 };
998 
999 edge
1000 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1001 {
1002   /* Propagate known values into PHI nodes.  */
1003   for (gphi_iterator i = gsi_start_phis (bb);
1004        !gsi_end_p (i);
1005        gsi_next (&i))
1006     {
1007       gphi *phi = i.phi ();
1008       tree res = gimple_phi_result (phi);
1009       if (virtual_operand_p (res))
1010 	continue;
1011       if (res && TREE_CODE (res) == SSA_NAME)
1012 	{
1013 	  tree sprime = substitute_and_fold_engine->get_value (res);
1014 	  if (sprime
1015 	      && sprime != res
1016 	      && may_propagate_copy (res, sprime))
1017 	    {
1018 	      stmts_to_remove.safe_push (phi);
1019 	      continue;
1020 	    }
1021 	}
1022       something_changed |= substitute_and_fold_engine->replace_phi_args_in (phi);
1023     }
1024 
1025   /* Propagate known values into stmts.  In some case it exposes
1026      more trivially deletable stmts to walk backward.  */
1027   for (gimple_stmt_iterator i = gsi_start_bb (bb);
1028        !gsi_end_p (i);
1029        gsi_next (&i))
1030     {
1031       bool did_replace;
1032       gimple *stmt = gsi_stmt (i);
1033 
1034       /* No point propagating into a stmt we have a value for we
1035          can propagate into all uses.  Mark it for removal instead.  */
1036       tree lhs = gimple_get_lhs (stmt);
1037       if (lhs && TREE_CODE (lhs) == SSA_NAME)
1038 	{
1039 	  tree sprime = substitute_and_fold_engine->get_value (lhs);
1040 	  if (sprime
1041 	      && sprime != lhs
1042 	      && may_propagate_copy (lhs, sprime)
1043 	      && !stmt_could_throw_p (cfun, stmt)
1044 	      && !gimple_has_side_effects (stmt)
1045 	      /* We have to leave ASSERT_EXPRs around for jump-threading.  */
1046 	      && (!is_gimple_assign (stmt)
1047 		  || gimple_assign_rhs_code (stmt) != ASSERT_EXPR))
1048 	    {
1049 	      stmts_to_remove.safe_push (stmt);
1050 	      continue;
1051 	    }
1052 	}
1053 
1054       /* Replace the statement with its folded version and mark it
1055 	 folded.  */
1056       did_replace = false;
1057       if (dump_file && (dump_flags & TDF_DETAILS))
1058 	{
1059 	  fprintf (dump_file, "Folding statement: ");
1060 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1061 	}
1062 
1063       gimple *old_stmt = stmt;
1064       bool was_noreturn = (is_gimple_call (stmt)
1065 			   && gimple_call_noreturn_p (stmt));
1066 
1067       /* Replace real uses in the statement.  */
1068       did_replace |= substitute_and_fold_engine->replace_uses_in (stmt);
1069 
1070       /* If we made a replacement, fold the statement.  */
1071       if (did_replace)
1072 	{
1073 	  fold_stmt (&i, follow_single_use_edges);
1074 	  stmt = gsi_stmt (i);
1075 	  gimple_set_modified (stmt, true);
1076 	}
1077 
1078       /* Some statements may be simplified using propagator
1079 	 specific information.  Do this before propagating
1080 	 into the stmt to not disturb pass specific information.  */
1081       update_stmt_if_modified (stmt);
1082       if (substitute_and_fold_engine->fold_stmt(&i))
1083 	{
1084 	  did_replace = true;
1085 	  prop_stats.num_stmts_folded++;
1086 	  stmt = gsi_stmt (i);
1087 	  gimple_set_modified (stmt, true);
1088 	}
1089 
1090       /* If this is a control statement the propagator left edges
1091          unexecuted on force the condition in a way consistent with
1092 	 that.  See PR66945 for cases where the propagator can end
1093 	 up with a different idea of a taken edge than folding
1094 	 (once undefined behavior is involved).  */
1095       if (gimple_code (stmt) == GIMPLE_COND)
1096 	{
1097 	  if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
1098 	      ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
1099 	    {
1100 	      if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
1101 		  == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
1102 		gimple_cond_make_true (as_a <gcond *> (stmt));
1103 	      else
1104 		gimple_cond_make_false (as_a <gcond *> (stmt));
1105 	      gimple_set_modified (stmt, true);
1106 	      did_replace = true;
1107 	    }
1108 	}
1109 
1110       /* Now cleanup.  */
1111       if (did_replace)
1112 	{
1113 	  /* If we cleaned up EH information from the statement,
1114 	     remove EH edges.  */
1115 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1116 	    bitmap_set_bit (need_eh_cleanup, bb->index);
1117 
1118 	  /* If we turned a not noreturn call into a noreturn one
1119 	     schedule it for fixup.  */
1120 	  if (!was_noreturn
1121 	      && is_gimple_call (stmt)
1122 	      && gimple_call_noreturn_p (stmt))
1123 	    stmts_to_fixup.safe_push (stmt);
1124 
1125 	  if (gimple_assign_single_p (stmt))
1126 	    {
1127 	      tree rhs = gimple_assign_rhs1 (stmt);
1128 
1129 	      if (TREE_CODE (rhs) == ADDR_EXPR)
1130 		recompute_tree_invariant_for_addr_expr (rhs);
1131 	    }
1132 
1133 	  /* Determine what needs to be done to update the SSA form.  */
1134 	  update_stmt_if_modified (stmt);
1135 	  if (!is_gimple_debug (stmt))
1136 	    something_changed = true;
1137 	}
1138 
1139       if (dump_file && (dump_flags & TDF_DETAILS))
1140 	{
1141 	  if (did_replace)
1142 	    {
1143 	      fprintf (dump_file, "Folded into: ");
1144 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1145 	      fprintf (dump_file, "\n");
1146 	    }
1147 	  else
1148 	    fprintf (dump_file, "Not folded\n");
1149 	}
1150     }
1151   return NULL;
1152 }
1153 
1154 
1155 
1156 /* Perform final substitution and folding of propagated values.
1157    Process the whole function if BLOCK is null, otherwise only
1158    process the blocks that BLOCK dominates.  In the latter case,
1159    it is the caller's responsibility to ensure that dominator
1160    information is available and up-to-date.
1161 
1162    PROP_VALUE[I] contains the single value that should be substituted
1163    at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1164    substituted.
1165 
1166    If FOLD_FN is non-NULL the function will be invoked on all statements
1167    before propagating values for pass specific simplification.
1168 
1169    DO_DCE is true if trivially dead stmts can be removed.
1170 
1171    If DO_DCE is true, the statements within a BB are walked from
1172    last to first element.  Otherwise we scan from first to last element.
1173 
1174    Return TRUE when something changed.  */
1175 
1176 bool
1177 substitute_and_fold_engine::substitute_and_fold (basic_block block)
1178 {
1179   if (dump_file && (dump_flags & TDF_DETAILS))
1180     fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1181 
1182   memset (&prop_stats, 0, sizeof (prop_stats));
1183 
1184   /* Don't call calculate_dominance_info when iterating over a subgraph.
1185      Callers that are using the interface this way are likely to want to
1186      iterate over several disjoint subgraphs, and it would be expensive
1187      in enable-checking builds to revalidate the whole dominance tree
1188      each time.  */
1189   if (block)
1190     gcc_assert (dom_info_state (CDI_DOMINATORS));
1191   else
1192     calculate_dominance_info (CDI_DOMINATORS);
1193   substitute_and_fold_dom_walker walker (CDI_DOMINATORS, this);
1194   walker.walk (block ? block : ENTRY_BLOCK_PTR_FOR_FN (cfun));
1195 
1196   /* We cannot remove stmts during the BB walk, especially not release
1197      SSA names there as that destroys the lattice of our callers.
1198      Remove stmts in reverse order to make debug stmt creation possible.  */
1199   while (!walker.stmts_to_remove.is_empty ())
1200     {
1201       gimple *stmt = walker.stmts_to_remove.pop ();
1202       if (dump_file && dump_flags & TDF_DETAILS)
1203 	{
1204 	  fprintf (dump_file, "Removing dead stmt ");
1205 	  print_gimple_stmt (dump_file, stmt, 0);
1206 	  fprintf (dump_file, "\n");
1207 	}
1208       prop_stats.num_dce++;
1209       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1210       if (gimple_code (stmt) == GIMPLE_PHI)
1211 	remove_phi_node (&gsi, true);
1212       else
1213 	{
1214 	  unlink_stmt_vdef (stmt);
1215 	  gsi_remove (&gsi, true);
1216 	  release_defs (stmt);
1217 	}
1218     }
1219 
1220   if (!bitmap_empty_p (walker.need_eh_cleanup))
1221     gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1222 
1223   /* Fixup stmts that became noreturn calls.  This may require splitting
1224      blocks and thus isn't possible during the dominator walk.  Do this
1225      in reverse order so we don't inadvertedly remove a stmt we want to
1226      fixup by visiting a dominating now noreturn call first.  */
1227   while (!walker.stmts_to_fixup.is_empty ())
1228     {
1229       gimple *stmt = walker.stmts_to_fixup.pop ();
1230       if (dump_file && dump_flags & TDF_DETAILS)
1231 	{
1232 	  fprintf (dump_file, "Fixing up noreturn call ");
1233 	  print_gimple_stmt (dump_file, stmt, 0);
1234 	  fprintf (dump_file, "\n");
1235 	}
1236       fixup_noreturn_call (stmt);
1237     }
1238 
1239   statistics_counter_event (cfun, "Constants propagated",
1240 			    prop_stats.num_const_prop);
1241   statistics_counter_event (cfun, "Copies propagated",
1242 			    prop_stats.num_copy_prop);
1243   statistics_counter_event (cfun, "Statements folded",
1244 			    prop_stats.num_stmts_folded);
1245   statistics_counter_event (cfun, "Statements deleted",
1246 			    prop_stats.num_dce);
1247 
1248   return walker.something_changed;
1249 }
1250 
1251 
1252 /* Return true if we may propagate ORIG into DEST, false otherwise.  */
1253 
1254 bool
1255 may_propagate_copy (tree dest, tree orig)
1256 {
1257   tree type_d = TREE_TYPE (dest);
1258   tree type_o = TREE_TYPE (orig);
1259 
1260   /* If ORIG is a default definition which flows in from an abnormal edge
1261      then the copy can be propagated.  It is important that we do so to avoid
1262      uninitialized copies.  */
1263   if (TREE_CODE (orig) == SSA_NAME
1264       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1265       && SSA_NAME_IS_DEFAULT_DEF (orig)
1266       && (SSA_NAME_VAR (orig) == NULL_TREE
1267 	  || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1268     ;
1269   /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1270      be propagated.  */
1271   else if (TREE_CODE (orig) == SSA_NAME
1272 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1273     return false;
1274   /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1275      propagated.  */
1276   else if (TREE_CODE (dest) == SSA_NAME
1277 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1278     return false;
1279 
1280   /* Do not copy between types for which we *do* need a conversion.  */
1281   if (!useless_type_conversion_p (type_d, type_o))
1282     return false;
1283 
1284   /* Generally propagating virtual operands is not ok as that may
1285      create overlapping life-ranges.  */
1286   if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1287     return false;
1288 
1289   /* Anything else is OK.  */
1290   return true;
1291 }
1292 
1293 /* Like may_propagate_copy, but use as the destination expression
1294    the principal expression (typically, the RHS) contained in
1295    statement DEST.  This is more efficient when working with the
1296    gimple tuples representation.  */
1297 
1298 bool
1299 may_propagate_copy_into_stmt (gimple *dest, tree orig)
1300 {
1301   tree type_d;
1302   tree type_o;
1303 
1304   /* If the statement is a switch or a single-rhs assignment,
1305      then the expression to be replaced by the propagation may
1306      be an SSA_NAME.  Fortunately, there is an explicit tree
1307      for the expression, so we delegate to may_propagate_copy.  */
1308 
1309   if (gimple_assign_single_p (dest))
1310     return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1311   else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1312     return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1313 
1314   /* In other cases, the expression is not materialized, so there
1315      is no destination to pass to may_propagate_copy.  On the other
1316      hand, the expression cannot be an SSA_NAME, so the analysis
1317      is much simpler.  */
1318 
1319   if (TREE_CODE (orig) == SSA_NAME
1320       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1321     return false;
1322 
1323   if (is_gimple_assign (dest))
1324     type_d = TREE_TYPE (gimple_assign_lhs (dest));
1325   else if (gimple_code (dest) == GIMPLE_COND)
1326     type_d = boolean_type_node;
1327   else if (is_gimple_call (dest)
1328            && gimple_call_lhs (dest) != NULL_TREE)
1329     type_d = TREE_TYPE (gimple_call_lhs (dest));
1330   else
1331     gcc_unreachable ();
1332 
1333   type_o = TREE_TYPE (orig);
1334 
1335   if (!useless_type_conversion_p (type_d, type_o))
1336     return false;
1337 
1338   return true;
1339 }
1340 
1341 /* Similarly, but we know that we're propagating into an ASM_EXPR.  */
1342 
1343 bool
1344 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1345 {
1346   return true;
1347 }
1348 
1349 
1350 /* Common code for propagate_value and replace_exp.
1351 
1352    Replace use operand OP_P with VAL.  FOR_PROPAGATION indicates if the
1353    replacement is done to propagate a value or not.  */
1354 
1355 static void
1356 replace_exp_1 (use_operand_p op_p, tree val,
1357     	       bool for_propagation ATTRIBUTE_UNUSED)
1358 {
1359   if (flag_checking)
1360     {
1361       tree op = USE_FROM_PTR (op_p);
1362       gcc_assert (!(for_propagation
1363 		  && TREE_CODE (op) == SSA_NAME
1364 		  && TREE_CODE (val) == SSA_NAME
1365 		  && !may_propagate_copy (op, val)));
1366     }
1367 
1368   if (TREE_CODE (val) == SSA_NAME)
1369     SET_USE (op_p, val);
1370   else
1371     SET_USE (op_p, unshare_expr (val));
1372 }
1373 
1374 
1375 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1376    into the operand pointed to by OP_P.
1377 
1378    Use this version for const/copy propagation as it will perform additional
1379    checks to ensure validity of the const/copy propagation.  */
1380 
1381 void
1382 propagate_value (use_operand_p op_p, tree val)
1383 {
1384   replace_exp_1 (op_p, val, true);
1385 }
1386 
1387 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1388 
1389    Use this version when not const/copy propagating values.  For example,
1390    PRE uses this version when building expressions as they would appear
1391    in specific blocks taking into account actions of PHI nodes.
1392 
1393    The statement in which an expression has been replaced should be
1394    folded using fold_stmt_inplace.  */
1395 
1396 void
1397 replace_exp (use_operand_p op_p, tree val)
1398 {
1399   replace_exp_1 (op_p, val, false);
1400 }
1401 
1402 
1403 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1404    into the tree pointed to by OP_P.
1405 
1406    Use this version for const/copy propagation when SSA operands are not
1407    available.  It will perform the additional checks to ensure validity of
1408    the const/copy propagation, but will not update any operand information.
1409    Be sure to mark the stmt as modified.  */
1410 
1411 void
1412 propagate_tree_value (tree *op_p, tree val)
1413 {
1414   if (TREE_CODE (val) == SSA_NAME)
1415     *op_p = val;
1416   else
1417     *op_p = unshare_expr (val);
1418 }
1419 
1420 
1421 /* Like propagate_tree_value, but use as the operand to replace
1422    the principal expression (typically, the RHS) contained in the
1423    statement referenced by iterator GSI.  Note that it is not
1424    always possible to update the statement in-place, so a new
1425    statement may be created to replace the original.  */
1426 
1427 void
1428 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1429 {
1430   gimple *stmt = gsi_stmt (*gsi);
1431 
1432   if (is_gimple_assign (stmt))
1433     {
1434       tree expr = NULL_TREE;
1435       if (gimple_assign_single_p (stmt))
1436         expr = gimple_assign_rhs1 (stmt);
1437       propagate_tree_value (&expr, val);
1438       gimple_assign_set_rhs_from_tree (gsi, expr);
1439     }
1440   else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1441     {
1442       tree lhs = NULL_TREE;
1443       tree rhs = build_zero_cst (TREE_TYPE (val));
1444       propagate_tree_value (&lhs, val);
1445       gimple_cond_set_code (cond_stmt, NE_EXPR);
1446       gimple_cond_set_lhs (cond_stmt, lhs);
1447       gimple_cond_set_rhs (cond_stmt, rhs);
1448     }
1449   else if (is_gimple_call (stmt)
1450            && gimple_call_lhs (stmt) != NULL_TREE)
1451     {
1452       tree expr = NULL_TREE;
1453       bool res;
1454       propagate_tree_value (&expr, val);
1455       res = update_call_from_tree (gsi, expr);
1456       gcc_assert (res);
1457     }
1458   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1459     propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1460   else
1461     gcc_unreachable ();
1462 }
1463