xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-propagate.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Generic SSA value propagation engine.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify it
8    under the terms of the GNU General Public License as published by the
9    Free Software Foundation; either version 3, or (at your option) any
10    later version.
11 
12    GCC is distributed in the hope that it will be useful, but WITHOUT
13    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15    for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "dumpfile.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "tree-ssa.h"
36 #include "tree-ssa-propagate.h"
37 #include "domwalk.h"
38 #include "cfgloop.h"
39 #include "tree-cfgcleanup.h"
40 #include "cfganal.h"
41 
42 /* This file implements a generic value propagation engine based on
43    the same propagation used by the SSA-CCP algorithm [1].
44 
45    Propagation is performed by simulating the execution of every
46    statement that produces the value being propagated.  Simulation
47    proceeds as follows:
48 
49    1- Initially, all edges of the CFG are marked not executable and
50       the CFG worklist is seeded with all the statements in the entry
51       basic block (block 0).
52 
53    2- Every statement S is simulated with a call to the call-back
54       function SSA_PROP_VISIT_STMT.  This evaluation may produce 3
55       results:
56 
57       	SSA_PROP_NOT_INTERESTING: Statement S produces nothing of
58 	    interest and does not affect any of the work lists.
59 	    The statement may be simulated again if any of its input
60 	    operands change in future iterations of the simulator.
61 
62 	SSA_PROP_VARYING: The value produced by S cannot be determined
63 	    at compile time.  Further simulation of S is not required.
64 	    If S is a conditional jump, all the outgoing edges for the
65 	    block are considered executable and added to the work
66 	    list.
67 
68 	SSA_PROP_INTERESTING: S produces a value that can be computed
69 	    at compile time.  Its result can be propagated into the
70 	    statements that feed from S.  Furthermore, if S is a
71 	    conditional jump, only the edge known to be taken is added
72 	    to the work list.  Edges that are known not to execute are
73 	    never simulated.
74 
75    3- PHI nodes are simulated with a call to SSA_PROP_VISIT_PHI.  The
76       return value from SSA_PROP_VISIT_PHI has the same semantics as
77       described in #2.
78 
79    4- Three work lists are kept.  Statements are only added to these
80       lists if they produce one of SSA_PROP_INTERESTING or
81       SSA_PROP_VARYING.
82 
83    	CFG_BLOCKS contains the list of blocks to be simulated.
84 	    Blocks are added to this list if their incoming edges are
85 	    found executable.
86 
87 	SSA_EDGE_WORKLIST contains the list of statements that we
88 	    need to revisit.
89 
90    5- Simulation terminates when all three work lists are drained.
91 
92    Before calling ssa_propagate, it is important to clear
93    prop_simulate_again_p for all the statements in the program that
94    should be simulated.  This initialization allows an implementation
95    to specify which statements should never be simulated.
96 
97    It is also important to compute def-use information before calling
98    ssa_propagate.
99 
100    References:
101 
102      [1] Constant propagation with conditional branches,
103          Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
104 
105      [2] Building an Optimizing Compiler,
106 	 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
107 
108      [3] Advanced Compiler Design and Implementation,
109 	 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6  */
110 
111 /* Function pointers used to parameterize the propagation engine.  */
112 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
113 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
114 
115 /* Worklist of control flow edge destinations.  This contains
116    the CFG order number of the blocks so we can iterate in CFG
117    order by visiting in bit-order.  */
118 static bitmap cfg_blocks;
119 static int *bb_to_cfg_order;
120 static int *cfg_order_to_bb;
121 
122 /* Worklist of SSA edges which will need reexamination as their
123    definition has changed.  SSA edges are def-use edges in the SSA
124    web.  For each D-U edge, we store the target statement or PHI node
125    UID in a bitmap.  UIDs order stmts in execution order.   */
126 static bitmap ssa_edge_worklist;
127 static vec<gimple *> uid_to_stmt;
128 
129 /* Return true if the block worklist empty.  */
130 
131 static inline bool
132 cfg_blocks_empty_p (void)
133 {
134   return bitmap_empty_p (cfg_blocks);
135 }
136 
137 
138 /* Add a basic block to the worklist.  The block must not be the ENTRY
139    or EXIT block.  */
140 
141 static void
142 cfg_blocks_add (basic_block bb)
143 {
144   gcc_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
145 	      && bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
146   bitmap_set_bit (cfg_blocks, bb_to_cfg_order[bb->index]);
147 }
148 
149 
150 /* Remove a block from the worklist.  */
151 
152 static basic_block
153 cfg_blocks_get (void)
154 {
155   gcc_assert (!cfg_blocks_empty_p ());
156   int order_index = bitmap_first_set_bit (cfg_blocks);
157   bitmap_clear_bit (cfg_blocks, order_index);
158   return BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb [order_index]);
159 }
160 
161 
162 /* We have just defined a new value for VAR.  If IS_VARYING is true,
163    add all immediate uses of VAR to VARYING_SSA_EDGES, otherwise add
164    them to INTERESTING_SSA_EDGES.  */
165 
166 static void
167 add_ssa_edge (tree var)
168 {
169   imm_use_iterator iter;
170   use_operand_p use_p;
171 
172   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
173     {
174       gimple *use_stmt = USE_STMT (use_p);
175 
176       /* If we did not yet simulate the block wait for this to happen
177          and do not add the stmt to the SSA edge worklist.  */
178       if (! (gimple_bb (use_stmt)->flags & BB_VISITED))
179 	continue;
180 
181       if (prop_simulate_again_p (use_stmt)
182 	  && bitmap_set_bit (ssa_edge_worklist, gimple_uid (use_stmt)))
183 	{
184 	  uid_to_stmt[gimple_uid (use_stmt)] = use_stmt;
185 	  if (dump_file && (dump_flags & TDF_DETAILS))
186 	    {
187 	      fprintf (dump_file, "ssa_edge_worklist: adding SSA use in ");
188 	      print_gimple_stmt (dump_file, use_stmt, 0, TDF_SLIM);
189 	    }
190 	}
191     }
192 }
193 
194 
195 /* Add edge E to the control flow worklist.  */
196 
197 static void
198 add_control_edge (edge e)
199 {
200   basic_block bb = e->dest;
201   if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
202     return;
203 
204   /* If the edge had already been executed, skip it.  */
205   if (e->flags & EDGE_EXECUTABLE)
206     return;
207 
208   e->flags |= EDGE_EXECUTABLE;
209 
210   cfg_blocks_add (bb);
211 
212   if (dump_file && (dump_flags & TDF_DETAILS))
213     fprintf (dump_file, "Adding destination of edge (%d -> %d) to worklist\n",
214 	e->src->index, e->dest->index);
215 }
216 
217 
218 /* Simulate the execution of STMT and update the work lists accordingly.  */
219 
220 static void
221 simulate_stmt (gimple *stmt)
222 {
223   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
224   edge taken_edge = NULL;
225   tree output_name = NULL_TREE;
226 
227   /* Pull the stmt off the SSA edge worklist.  */
228   bitmap_clear_bit (ssa_edge_worklist, gimple_uid (stmt));
229 
230   /* Don't bother visiting statements that are already
231      considered varying by the propagator.  */
232   if (!prop_simulate_again_p (stmt))
233     return;
234 
235   if (gimple_code (stmt) == GIMPLE_PHI)
236     {
237       val = ssa_prop_visit_phi (as_a <gphi *> (stmt));
238       output_name = gimple_phi_result (stmt);
239     }
240   else
241     val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
242 
243   if (val == SSA_PROP_VARYING)
244     {
245       prop_set_simulate_again (stmt, false);
246 
247       /* If the statement produced a new varying value, add the SSA
248 	 edges coming out of OUTPUT_NAME.  */
249       if (output_name)
250 	add_ssa_edge (output_name);
251 
252       /* If STMT transfers control out of its basic block, add
253 	 all outgoing edges to the work list.  */
254       if (stmt_ends_bb_p (stmt))
255 	{
256 	  edge e;
257 	  edge_iterator ei;
258 	  basic_block bb = gimple_bb (stmt);
259 	  FOR_EACH_EDGE (e, ei, bb->succs)
260 	    add_control_edge (e);
261 	}
262       return;
263     }
264   else if (val == SSA_PROP_INTERESTING)
265     {
266       /* If the statement produced new value, add the SSA edges coming
267 	 out of OUTPUT_NAME.  */
268       if (output_name)
269 	add_ssa_edge (output_name);
270 
271       /* If we know which edge is going to be taken out of this block,
272 	 add it to the CFG work list.  */
273       if (taken_edge)
274 	add_control_edge (taken_edge);
275     }
276 
277   /* If there are no SSA uses on the stmt whose defs are simulated
278      again then this stmt will be never visited again.  */
279   bool has_simulate_again_uses = false;
280   use_operand_p use_p;
281   ssa_op_iter iter;
282   if (gimple_code  (stmt) == GIMPLE_PHI)
283     {
284       edge_iterator ei;
285       edge e;
286       tree arg;
287       FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->preds)
288 	if (!(e->flags & EDGE_EXECUTABLE)
289 	    || ((arg = PHI_ARG_DEF_FROM_EDGE (stmt, e))
290 		&& TREE_CODE (arg) == SSA_NAME
291 		&& !SSA_NAME_IS_DEFAULT_DEF (arg)
292 		&& prop_simulate_again_p (SSA_NAME_DEF_STMT (arg))))
293 	  {
294 	    has_simulate_again_uses = true;
295 	    break;
296 	  }
297     }
298   else
299     FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
300       {
301 	gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
302 	if (!gimple_nop_p (def_stmt)
303 	    && prop_simulate_again_p (def_stmt))
304 	  {
305 	    has_simulate_again_uses = true;
306 	    break;
307 	  }
308       }
309   if (!has_simulate_again_uses)
310     {
311       if (dump_file && (dump_flags & TDF_DETAILS))
312 	fprintf (dump_file, "marking stmt to be not simulated again\n");
313       prop_set_simulate_again (stmt, false);
314     }
315 }
316 
317 /* Process an SSA edge worklist.  WORKLIST is the SSA edge worklist to
318    drain.  This pops statements off the given WORKLIST and processes
319    them until one statement was simulated or there are no more statements
320    on WORKLIST.  We take a pointer to WORKLIST because it may be reallocated
321    when an SSA edge is added to it in simulate_stmt.  Return true if a stmt
322    was simulated.  */
323 
324 static void
325 process_ssa_edge_worklist ()
326 {
327   /* Process the next entry from the worklist.  */
328   unsigned stmt_uid = bitmap_first_set_bit (ssa_edge_worklist);
329   bitmap_clear_bit (ssa_edge_worklist, stmt_uid);
330   gimple *stmt = uid_to_stmt[stmt_uid];
331 
332   /* We should not have stmts in not yet simulated BBs on the worklist.  */
333   gcc_assert (gimple_bb (stmt)->flags & BB_VISITED);
334 
335   if (dump_file && (dump_flags & TDF_DETAILS))
336     {
337       fprintf (dump_file, "\nSimulating statement: ");
338       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
339     }
340 
341   simulate_stmt (stmt);
342 }
343 
344 
345 /* Simulate the execution of BLOCK.  Evaluate the statement associated
346    with each variable reference inside the block.  */
347 
348 static void
349 simulate_block (basic_block block)
350 {
351   gimple_stmt_iterator gsi;
352 
353   /* There is nothing to do for the exit block.  */
354   if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
355     return;
356 
357   if (dump_file && (dump_flags & TDF_DETAILS))
358     fprintf (dump_file, "\nSimulating block %d\n", block->index);
359 
360   /* Always simulate PHI nodes, even if we have simulated this block
361      before.  */
362   for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
363     simulate_stmt (gsi_stmt (gsi));
364 
365   /* If this is the first time we've simulated this block, then we
366      must simulate each of its statements.  */
367   if (! (block->flags & BB_VISITED))
368     {
369       gimple_stmt_iterator j;
370       unsigned int normal_edge_count;
371       edge e, normal_edge;
372       edge_iterator ei;
373 
374       for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
375 	simulate_stmt (gsi_stmt (j));
376 
377       /* Note that we have simulated this block.  */
378       block->flags |= BB_VISITED;
379 
380       /* We can not predict when abnormal and EH edges will be executed, so
381 	 once a block is considered executable, we consider any
382 	 outgoing abnormal edges as executable.
383 
384 	 TODO: This is not exactly true.  Simplifying statement might
385 	 prove it non-throwing and also computed goto can be handled
386 	 when destination is known.
387 
388 	 At the same time, if this block has only one successor that is
389 	 reached by non-abnormal edges, then add that successor to the
390 	 worklist.  */
391       normal_edge_count = 0;
392       normal_edge = NULL;
393       FOR_EACH_EDGE (e, ei, block->succs)
394 	{
395 	  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
396 	    add_control_edge (e);
397 	  else
398 	    {
399 	      normal_edge_count++;
400 	      normal_edge = e;
401 	    }
402 	}
403 
404       if (normal_edge_count == 1)
405 	add_control_edge (normal_edge);
406     }
407 }
408 
409 
410 /* Initialize local data structures and work lists.  */
411 
412 static void
413 ssa_prop_init (void)
414 {
415   edge e;
416   edge_iterator ei;
417   basic_block bb;
418 
419   /* Worklists of SSA edges.  */
420   ssa_edge_worklist = BITMAP_ALLOC (NULL);
421 
422   /* Worklist of basic-blocks.  */
423   bb_to_cfg_order = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
424   cfg_order_to_bb = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
425   int n = pre_and_rev_post_order_compute_fn (cfun, NULL,
426 					     cfg_order_to_bb, false);
427   for (int i = 0; i < n; ++i)
428     bb_to_cfg_order[cfg_order_to_bb[i]] = i;
429   cfg_blocks = BITMAP_ALLOC (NULL);
430 
431   if (dump_file && (dump_flags & TDF_DETAILS))
432     dump_immediate_uses (dump_file);
433 
434   /* Initially assume that every edge in the CFG is not executable.
435      (including the edges coming out of the entry block).  Mark blocks
436      as not visited, blocks not yet visited will have all their statements
437      simulated once an incoming edge gets executable.  */
438   set_gimple_stmt_max_uid (cfun, 0);
439   for (int i = 0; i < n; ++i)
440     {
441       gimple_stmt_iterator si;
442       bb = BASIC_BLOCK_FOR_FN (cfun, cfg_order_to_bb[i]);
443 
444       for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
445 	{
446 	  gimple *stmt = gsi_stmt (si);
447 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
448 	}
449 
450       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
451 	{
452 	  gimple *stmt = gsi_stmt (si);
453 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
454 	}
455 
456       bb->flags &= ~BB_VISITED;
457       FOR_EACH_EDGE (e, ei, bb->succs)
458 	e->flags &= ~EDGE_EXECUTABLE;
459     }
460   uid_to_stmt.safe_grow (gimple_stmt_max_uid (cfun));
461 
462   /* Seed the algorithm by adding the successors of the entry block to the
463      edge worklist.  */
464   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
465     {
466       e->flags &= ~EDGE_EXECUTABLE;
467       add_control_edge (e);
468     }
469 }
470 
471 
472 /* Free allocated storage.  */
473 
474 static void
475 ssa_prop_fini (void)
476 {
477   BITMAP_FREE (cfg_blocks);
478   free (bb_to_cfg_order);
479   free (cfg_order_to_bb);
480   BITMAP_FREE (ssa_edge_worklist);
481   uid_to_stmt.release ();
482 }
483 
484 
485 /* Return true if EXPR is an acceptable right-hand-side for a
486    GIMPLE assignment.  We validate the entire tree, not just
487    the root node, thus catching expressions that embed complex
488    operands that are not permitted in GIMPLE.  This function
489    is needed because the folding routines in fold-const.c
490    may return such expressions in some cases, e.g., an array
491    access with an embedded index addition.  It may make more
492    sense to have folding routines that are sensitive to the
493    constraints on GIMPLE operands, rather than abandoning any
494    any attempt to fold if the usual folding turns out to be too
495    aggressive.  */
496 
497 bool
498 valid_gimple_rhs_p (tree expr)
499 {
500   enum tree_code code = TREE_CODE (expr);
501 
502   switch (TREE_CODE_CLASS (code))
503     {
504     case tcc_declaration:
505       if (!is_gimple_variable (expr))
506 	return false;
507       break;
508 
509     case tcc_constant:
510       /* All constants are ok.  */
511       break;
512 
513     case tcc_comparison:
514       /* GENERIC allows comparisons with non-boolean types, reject
515          those for GIMPLE.  Let vector-typed comparisons pass - rules
516 	 for GENERIC and GIMPLE are the same here.  */
517       if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
518 	    && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
519 		|| TYPE_PRECISION (TREE_TYPE (expr)) == 1))
520 	  && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
521 	return false;
522 
523       /* Fallthru.  */
524     case tcc_binary:
525       if (!is_gimple_val (TREE_OPERAND (expr, 0))
526 	  || !is_gimple_val (TREE_OPERAND (expr, 1)))
527 	return false;
528       break;
529 
530     case tcc_unary:
531       if (!is_gimple_val (TREE_OPERAND (expr, 0)))
532 	return false;
533       break;
534 
535     case tcc_expression:
536       switch (code)
537         {
538         case ADDR_EXPR:
539           {
540 	    tree t;
541 	    if (is_gimple_min_invariant (expr))
542 	      return true;
543             t = TREE_OPERAND (expr, 0);
544             while (handled_component_p (t))
545               {
546                 /* ??? More checks needed, see the GIMPLE verifier.  */
547                 if ((TREE_CODE (t) == ARRAY_REF
548                      || TREE_CODE (t) == ARRAY_RANGE_REF)
549                     && !is_gimple_val (TREE_OPERAND (t, 1)))
550                   return false;
551                 t = TREE_OPERAND (t, 0);
552               }
553             if (!is_gimple_id (t))
554               return false;
555           }
556           break;
557 
558 	default:
559 	  if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
560 	    {
561 	      if (((code == VEC_COND_EXPR || code == COND_EXPR)
562 		   ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
563 		   : !is_gimple_val (TREE_OPERAND (expr, 0)))
564 		  || !is_gimple_val (TREE_OPERAND (expr, 1))
565 		  || !is_gimple_val (TREE_OPERAND (expr, 2)))
566 		return false;
567 	      break;
568 	    }
569 	  return false;
570 	}
571       break;
572 
573     case tcc_vl_exp:
574       return false;
575 
576     case tcc_exceptional:
577       if (code == CONSTRUCTOR)
578 	{
579 	  unsigned i;
580 	  tree elt;
581 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
582 	    if (!is_gimple_val (elt))
583 	      return false;
584 	  return true;
585 	}
586       if (code != SSA_NAME)
587         return false;
588       break;
589 
590     case tcc_reference:
591       if (code == BIT_FIELD_REF)
592 	return is_gimple_val (TREE_OPERAND (expr, 0));
593       return false;
594 
595     default:
596       return false;
597     }
598 
599   return true;
600 }
601 
602 
603 /* Return true if EXPR is a CALL_EXPR suitable for representation
604    as a single GIMPLE_CALL statement.  If the arguments require
605    further gimplification, return false.  */
606 
607 static bool
608 valid_gimple_call_p (tree expr)
609 {
610   unsigned i, nargs;
611 
612   if (TREE_CODE (expr) != CALL_EXPR)
613     return false;
614 
615   nargs = call_expr_nargs (expr);
616   for (i = 0; i < nargs; i++)
617     {
618       tree arg = CALL_EXPR_ARG (expr, i);
619       if (is_gimple_reg_type (TREE_TYPE (arg)))
620 	{
621 	  if (!is_gimple_val (arg))
622 	    return false;
623 	}
624       else
625 	if (!is_gimple_lvalue (arg))
626 	  return false;
627     }
628 
629   return true;
630 }
631 
632 
633 /* Make SSA names defined by OLD_STMT point to NEW_STMT
634    as their defining statement.  */
635 
636 void
637 move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
638 {
639   tree var;
640   ssa_op_iter iter;
641 
642   if (gimple_in_ssa_p (cfun))
643     {
644       /* Make defined SSA_NAMEs point to the new
645          statement as their definition.  */
646       FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
647         {
648           if (TREE_CODE (var) == SSA_NAME)
649             SSA_NAME_DEF_STMT (var) = new_stmt;
650         }
651     }
652 }
653 
654 /* Helper function for update_gimple_call and update_call_from_tree.
655    A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT.  */
656 
657 static void
658 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
659 			   gimple *stmt)
660 {
661   gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
662   move_ssa_defining_stmt_for_defs (new_stmt, stmt);
663   gimple_set_vuse (new_stmt, gimple_vuse (stmt));
664   gimple_set_vdef (new_stmt, gimple_vdef (stmt));
665   gimple_set_location (new_stmt, gimple_location (stmt));
666   if (gimple_block (new_stmt) == NULL_TREE)
667     gimple_set_block (new_stmt, gimple_block (stmt));
668   gsi_replace (si_p, new_stmt, false);
669 }
670 
671 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
672    with number of arguments NARGS, where the arguments in GIMPLE form
673    follow NARGS argument.  */
674 
675 bool
676 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
677 {
678   va_list ap;
679   gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
680 
681   gcc_assert (is_gimple_call (stmt));
682   va_start (ap, nargs);
683   new_stmt = gimple_build_call_valist (fn, nargs, ap);
684   finish_update_gimple_call (si_p, new_stmt, stmt);
685   va_end (ap);
686   return true;
687 }
688 
689 /* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
690    value of EXPR, which is expected to be the result of folding the
691    call.  This can only be done if EXPR is a CALL_EXPR with valid
692    GIMPLE operands as arguments, or if it is a suitable RHS expression
693    for a GIMPLE_ASSIGN.  More complex expressions will require
694    gimplification, which will introduce additional statements.  In this
695    event, no update is performed, and the function returns false.
696    Note that we cannot mutate a GIMPLE_CALL in-place, so we always
697    replace the statement at *SI_P with an entirely new statement.
698    The new statement need not be a call, e.g., if the original call
699    folded to a constant.  */
700 
701 bool
702 update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
703 {
704   gimple *stmt = gsi_stmt (*si_p);
705 
706   if (valid_gimple_call_p (expr))
707     {
708       /* The call has simplified to another call.  */
709       tree fn = CALL_EXPR_FN (expr);
710       unsigned i;
711       unsigned nargs = call_expr_nargs (expr);
712       vec<tree> args = vNULL;
713       gcall *new_stmt;
714 
715       if (nargs > 0)
716         {
717           args.create (nargs);
718           args.safe_grow_cleared (nargs);
719 
720           for (i = 0; i < nargs; i++)
721             args[i] = CALL_EXPR_ARG (expr, i);
722         }
723 
724       new_stmt = gimple_build_call_vec (fn, args);
725       finish_update_gimple_call (si_p, new_stmt, stmt);
726       args.release ();
727 
728       return true;
729     }
730   else if (valid_gimple_rhs_p (expr))
731     {
732       tree lhs = gimple_call_lhs (stmt);
733       gimple *new_stmt;
734 
735       /* The call has simplified to an expression
736          that cannot be represented as a GIMPLE_CALL. */
737       if (lhs)
738         {
739           /* A value is expected.
740              Introduce a new GIMPLE_ASSIGN statement.  */
741           STRIP_USELESS_TYPE_CONVERSION (expr);
742           new_stmt = gimple_build_assign (lhs, expr);
743           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
744 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
745 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
746         }
747       else if (!TREE_SIDE_EFFECTS (expr))
748         {
749           /* No value is expected, and EXPR has no effect.
750              Replace it with an empty statement.  */
751           new_stmt = gimple_build_nop ();
752 	  if (gimple_in_ssa_p (cfun))
753 	    {
754 	      unlink_stmt_vdef (stmt);
755 	      release_defs (stmt);
756 	    }
757         }
758       else
759         {
760           /* No value is expected, but EXPR has an effect,
761              e.g., it could be a reference to a volatile
762              variable.  Create an assignment statement
763              with a dummy (unused) lhs variable.  */
764           STRIP_USELESS_TYPE_CONVERSION (expr);
765 	  if (gimple_in_ssa_p (cfun))
766 	    lhs = make_ssa_name (TREE_TYPE (expr));
767 	  else
768 	    lhs = create_tmp_var (TREE_TYPE (expr));
769           new_stmt = gimple_build_assign (lhs, expr);
770 	  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
771 	  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
772           move_ssa_defining_stmt_for_defs (new_stmt, stmt);
773         }
774       gimple_set_location (new_stmt, gimple_location (stmt));
775       gsi_replace (si_p, new_stmt, false);
776       return true;
777     }
778   else
779     /* The call simplified to an expression that is
780        not a valid GIMPLE RHS.  */
781     return false;
782 }
783 
784 
785 /* Entry point to the propagation engine.
786 
787    VISIT_STMT is called for every statement visited.
788    VISIT_PHI is called for every PHI node visited.  */
789 
790 void
791 ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
792 	       ssa_prop_visit_phi_fn visit_phi)
793 {
794   ssa_prop_visit_stmt = visit_stmt;
795   ssa_prop_visit_phi = visit_phi;
796 
797   ssa_prop_init ();
798 
799   /* Iterate until the worklists are empty.  */
800   while (! cfg_blocks_empty_p ()
801 	 || ! bitmap_empty_p (ssa_edge_worklist))
802     {
803       /* First simulate whole blocks.  */
804       if (! cfg_blocks_empty_p ())
805 	{
806 	  /* Pull the next block to simulate off the worklist.  */
807 	  basic_block dest_block = cfg_blocks_get ();
808 	  simulate_block (dest_block);
809 	  continue;
810 	}
811 
812       /* Then simulate from the SSA edge worklist.  */
813       process_ssa_edge_worklist ();
814     }
815 
816   ssa_prop_fini ();
817 }
818 
819 
820 /* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
821    is a non-volatile pointer dereference, a structure reference or a
822    reference to a single _DECL.  Ignore volatile memory references
823    because they are not interesting for the optimizers.  */
824 
825 bool
826 stmt_makes_single_store (gimple *stmt)
827 {
828   tree lhs;
829 
830   if (gimple_code (stmt) != GIMPLE_ASSIGN
831       && gimple_code (stmt) != GIMPLE_CALL)
832     return false;
833 
834   if (!gimple_vdef (stmt))
835     return false;
836 
837   lhs = gimple_get_lhs (stmt);
838 
839   /* A call statement may have a null LHS.  */
840   if (!lhs)
841     return false;
842 
843   return (!TREE_THIS_VOLATILE (lhs)
844           && (DECL_P (lhs)
845 	      || REFERENCE_CLASS_P (lhs)));
846 }
847 
848 
849 /* Propagation statistics.  */
850 struct prop_stats_d
851 {
852   long num_const_prop;
853   long num_copy_prop;
854   long num_stmts_folded;
855   long num_dce;
856 };
857 
858 static struct prop_stats_d prop_stats;
859 
860 /* Replace USE references in statement STMT with the values stored in
861    PROP_VALUE. Return true if at least one reference was replaced.  */
862 
863 bool
864 replace_uses_in (gimple *stmt, ssa_prop_get_value_fn get_value)
865 {
866   bool replaced = false;
867   use_operand_p use;
868   ssa_op_iter iter;
869 
870   FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
871     {
872       tree tuse = USE_FROM_PTR (use);
873       tree val = (*get_value) (tuse);
874 
875       if (val == tuse || val == NULL_TREE)
876 	continue;
877 
878       if (gimple_code (stmt) == GIMPLE_ASM
879 	  && !may_propagate_copy_into_asm (tuse))
880 	continue;
881 
882       if (!may_propagate_copy (tuse, val))
883 	continue;
884 
885       if (TREE_CODE (val) != SSA_NAME)
886 	prop_stats.num_const_prop++;
887       else
888 	prop_stats.num_copy_prop++;
889 
890       propagate_value (use, val);
891 
892       replaced = true;
893     }
894 
895   return replaced;
896 }
897 
898 
899 /* Replace propagated values into all the arguments for PHI using the
900    values from PROP_VALUE.  */
901 
902 static bool
903 replace_phi_args_in (gphi *phi, ssa_prop_get_value_fn get_value)
904 {
905   size_t i;
906   bool replaced = false;
907 
908   if (dump_file && (dump_flags & TDF_DETAILS))
909     {
910       fprintf (dump_file, "Folding PHI node: ");
911       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
912     }
913 
914   for (i = 0; i < gimple_phi_num_args (phi); i++)
915     {
916       tree arg = gimple_phi_arg_def (phi, i);
917 
918       if (TREE_CODE (arg) == SSA_NAME)
919 	{
920 	  tree val = (*get_value) (arg);
921 
922 	  if (val && val != arg && may_propagate_copy (arg, val))
923 	    {
924 	      edge e = gimple_phi_arg_edge (phi, i);
925 
926 	      if (TREE_CODE (val) != SSA_NAME)
927 		prop_stats.num_const_prop++;
928 	      else
929 		prop_stats.num_copy_prop++;
930 
931 	      propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
932 	      replaced = true;
933 
934 	      /* If we propagated a copy and this argument flows
935 		 through an abnormal edge, update the replacement
936 		 accordingly.  */
937 	      if (TREE_CODE (val) == SSA_NAME
938 		  && e->flags & EDGE_ABNORMAL
939 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
940 		{
941 		  /* This can only occur for virtual operands, since
942 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
943 		     would prevent replacement.  */
944 		  gcc_checking_assert (virtual_operand_p (val));
945 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
946 		}
947 	    }
948 	}
949     }
950 
951   if (dump_file && (dump_flags & TDF_DETAILS))
952     {
953       if (!replaced)
954 	fprintf (dump_file, "No folding possible\n");
955       else
956 	{
957 	  fprintf (dump_file, "Folded into: ");
958 	  print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
959 	  fprintf (dump_file, "\n");
960 	}
961     }
962 
963   return replaced;
964 }
965 
966 
967 class substitute_and_fold_dom_walker : public dom_walker
968 {
969 public:
970     substitute_and_fold_dom_walker (cdi_direction direction,
971 				    ssa_prop_get_value_fn get_value_fn_,
972 				    ssa_prop_fold_stmt_fn fold_fn_)
973 	: dom_walker (direction), get_value_fn (get_value_fn_),
974       fold_fn (fold_fn_), something_changed (false)
975     {
976       stmts_to_remove.create (0);
977       stmts_to_fixup.create (0);
978       need_eh_cleanup = BITMAP_ALLOC (NULL);
979     }
980     ~substitute_and_fold_dom_walker ()
981     {
982       stmts_to_remove.release ();
983       stmts_to_fixup.release ();
984       BITMAP_FREE (need_eh_cleanup);
985     }
986 
987     virtual edge before_dom_children (basic_block);
988     virtual void after_dom_children (basic_block) {}
989 
990     ssa_prop_get_value_fn get_value_fn;
991     ssa_prop_fold_stmt_fn fold_fn;
992     bool something_changed;
993     vec<gimple *> stmts_to_remove;
994     vec<gimple *> stmts_to_fixup;
995     bitmap need_eh_cleanup;
996 };
997 
998 edge
999 substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
1000 {
1001   /* Propagate known values into PHI nodes.  */
1002   for (gphi_iterator i = gsi_start_phis (bb);
1003        !gsi_end_p (i);
1004        gsi_next (&i))
1005     {
1006       gphi *phi = i.phi ();
1007       tree res = gimple_phi_result (phi);
1008       if (virtual_operand_p (res))
1009 	continue;
1010       if (res && TREE_CODE (res) == SSA_NAME)
1011 	{
1012 	  tree sprime = get_value_fn (res);
1013 	  if (sprime
1014 	      && sprime != res
1015 	      && may_propagate_copy (res, sprime))
1016 	    {
1017 	      stmts_to_remove.safe_push (phi);
1018 	      continue;
1019 	    }
1020 	}
1021       something_changed |= replace_phi_args_in (phi, get_value_fn);
1022     }
1023 
1024   /* Propagate known values into stmts.  In some case it exposes
1025      more trivially deletable stmts to walk backward.  */
1026   for (gimple_stmt_iterator i = gsi_start_bb (bb);
1027        !gsi_end_p (i);
1028        gsi_next (&i))
1029     {
1030       bool did_replace;
1031       gimple *stmt = gsi_stmt (i);
1032 
1033       /* No point propagating into a stmt we have a value for we
1034          can propagate into all uses.  Mark it for removal instead.  */
1035       tree lhs = gimple_get_lhs (stmt);
1036       if (lhs && TREE_CODE (lhs) == SSA_NAME)
1037 	{
1038 	  tree sprime = get_value_fn (lhs);
1039 	  if (sprime
1040 	      && sprime != lhs
1041 	      && may_propagate_copy (lhs, sprime)
1042 	      && !stmt_could_throw_p (stmt)
1043 	      && !gimple_has_side_effects (stmt)
1044 	      /* We have to leave ASSERT_EXPRs around for jump-threading.  */
1045 	      && (!is_gimple_assign (stmt)
1046 		  || gimple_assign_rhs_code (stmt) != ASSERT_EXPR))
1047 	    {
1048 	      stmts_to_remove.safe_push (stmt);
1049 	      continue;
1050 	    }
1051 	}
1052 
1053       /* Replace the statement with its folded version and mark it
1054 	 folded.  */
1055       did_replace = false;
1056       if (dump_file && (dump_flags & TDF_DETAILS))
1057 	{
1058 	  fprintf (dump_file, "Folding statement: ");
1059 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1060 	}
1061 
1062       gimple *old_stmt = stmt;
1063       bool was_noreturn = (is_gimple_call (stmt)
1064 			   && gimple_call_noreturn_p (stmt));
1065 
1066       /* Replace real uses in the statement.  */
1067       did_replace |= replace_uses_in (stmt, get_value_fn);
1068 
1069       /* If we made a replacement, fold the statement.  */
1070       if (did_replace)
1071 	{
1072 	  fold_stmt (&i, follow_single_use_edges);
1073 	  stmt = gsi_stmt (i);
1074 	  gimple_set_modified (stmt, true);
1075 	}
1076 
1077       /* Some statements may be simplified using propagator
1078 	 specific information.  Do this before propagating
1079 	 into the stmt to not disturb pass specific information.  */
1080       if (fold_fn)
1081 	{
1082 	  update_stmt_if_modified (stmt);
1083 	  if ((*fold_fn)(&i))
1084 	    {
1085 	      did_replace = true;
1086 	      prop_stats.num_stmts_folded++;
1087 	      stmt = gsi_stmt (i);
1088 	      gimple_set_modified (stmt, true);
1089 	    }
1090 	}
1091 
1092       /* If this is a control statement the propagator left edges
1093          unexecuted on force the condition in a way consistent with
1094 	 that.  See PR66945 for cases where the propagator can end
1095 	 up with a different idea of a taken edge than folding
1096 	 (once undefined behavior is involved).  */
1097       if (gimple_code (stmt) == GIMPLE_COND)
1098 	{
1099 	  if ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE)
1100 	      ^ (EDGE_SUCC (bb, 1)->flags & EDGE_EXECUTABLE))
1101 	    {
1102 	      if (((EDGE_SUCC (bb, 0)->flags & EDGE_TRUE_VALUE) != 0)
1103 		  == ((EDGE_SUCC (bb, 0)->flags & EDGE_EXECUTABLE) != 0))
1104 		gimple_cond_make_true (as_a <gcond *> (stmt));
1105 	      else
1106 		gimple_cond_make_false (as_a <gcond *> (stmt));
1107 	      gimple_set_modified (stmt, true);
1108 	      did_replace = true;
1109 	    }
1110 	}
1111 
1112       /* Now cleanup.  */
1113       if (did_replace)
1114 	{
1115 	  /* If we cleaned up EH information from the statement,
1116 	     remove EH edges.  */
1117 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1118 	    bitmap_set_bit (need_eh_cleanup, bb->index);
1119 
1120 	  /* If we turned a not noreturn call into a noreturn one
1121 	     schedule it for fixup.  */
1122 	  if (!was_noreturn
1123 	      && is_gimple_call (stmt)
1124 	      && gimple_call_noreturn_p (stmt))
1125 	    stmts_to_fixup.safe_push (stmt);
1126 
1127 	  if (gimple_assign_single_p (stmt))
1128 	    {
1129 	      tree rhs = gimple_assign_rhs1 (stmt);
1130 
1131 	      if (TREE_CODE (rhs) == ADDR_EXPR)
1132 		recompute_tree_invariant_for_addr_expr (rhs);
1133 	    }
1134 
1135 	  /* Determine what needs to be done to update the SSA form.  */
1136 	  update_stmt_if_modified (stmt);
1137 	  if (!is_gimple_debug (stmt))
1138 	    something_changed = true;
1139 	}
1140 
1141       if (dump_file && (dump_flags & TDF_DETAILS))
1142 	{
1143 	  if (did_replace)
1144 	    {
1145 	      fprintf (dump_file, "Folded into: ");
1146 	      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1147 	      fprintf (dump_file, "\n");
1148 	    }
1149 	  else
1150 	    fprintf (dump_file, "Not folded\n");
1151 	}
1152     }
1153   return NULL;
1154 }
1155 
1156 
1157 
1158 /* Perform final substitution and folding of propagated values.
1159 
1160    PROP_VALUE[I] contains the single value that should be substituted
1161    at every use of SSA name N_I.  If PROP_VALUE is NULL, no values are
1162    substituted.
1163 
1164    If FOLD_FN is non-NULL the function will be invoked on all statements
1165    before propagating values for pass specific simplification.
1166 
1167    DO_DCE is true if trivially dead stmts can be removed.
1168 
1169    If DO_DCE is true, the statements within a BB are walked from
1170    last to first element.  Otherwise we scan from first to last element.
1171 
1172    Return TRUE when something changed.  */
1173 
1174 bool
1175 substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
1176 		     ssa_prop_fold_stmt_fn fold_fn)
1177 {
1178   gcc_assert (get_value_fn);
1179 
1180   if (dump_file && (dump_flags & TDF_DETAILS))
1181     fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
1182 
1183   memset (&prop_stats, 0, sizeof (prop_stats));
1184 
1185   calculate_dominance_info (CDI_DOMINATORS);
1186   substitute_and_fold_dom_walker walker(CDI_DOMINATORS,
1187 					get_value_fn, fold_fn);
1188   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1189 
1190   /* We cannot remove stmts during the BB walk, especially not release
1191      SSA names there as that destroys the lattice of our callers.
1192      Remove stmts in reverse order to make debug stmt creation possible.  */
1193   while (!walker.stmts_to_remove.is_empty ())
1194     {
1195       gimple *stmt = walker.stmts_to_remove.pop ();
1196       if (dump_file && dump_flags & TDF_DETAILS)
1197 	{
1198 	  fprintf (dump_file, "Removing dead stmt ");
1199 	  print_gimple_stmt (dump_file, stmt, 0, 0);
1200 	  fprintf (dump_file, "\n");
1201 	}
1202       prop_stats.num_dce++;
1203       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1204       if (gimple_code (stmt) == GIMPLE_PHI)
1205 	remove_phi_node (&gsi, true);
1206       else
1207 	{
1208 	  unlink_stmt_vdef (stmt);
1209 	  gsi_remove (&gsi, true);
1210 	  release_defs (stmt);
1211 	}
1212     }
1213 
1214   if (!bitmap_empty_p (walker.need_eh_cleanup))
1215     gimple_purge_all_dead_eh_edges (walker.need_eh_cleanup);
1216 
1217   /* Fixup stmts that became noreturn calls.  This may require splitting
1218      blocks and thus isn't possible during the dominator walk.  Do this
1219      in reverse order so we don't inadvertedly remove a stmt we want to
1220      fixup by visiting a dominating now noreturn call first.  */
1221   while (!walker.stmts_to_fixup.is_empty ())
1222     {
1223       gimple *stmt = walker.stmts_to_fixup.pop ();
1224       if (dump_file && dump_flags & TDF_DETAILS)
1225 	{
1226 	  fprintf (dump_file, "Fixing up noreturn call ");
1227 	  print_gimple_stmt (dump_file, stmt, 0, 0);
1228 	  fprintf (dump_file, "\n");
1229 	}
1230       fixup_noreturn_call (stmt);
1231     }
1232 
1233   statistics_counter_event (cfun, "Constants propagated",
1234 			    prop_stats.num_const_prop);
1235   statistics_counter_event (cfun, "Copies propagated",
1236 			    prop_stats.num_copy_prop);
1237   statistics_counter_event (cfun, "Statements folded",
1238 			    prop_stats.num_stmts_folded);
1239   statistics_counter_event (cfun, "Statements deleted",
1240 			    prop_stats.num_dce);
1241 
1242   return walker.something_changed;
1243 }
1244 
1245 
1246 /* Return true if we may propagate ORIG into DEST, false otherwise.  */
1247 
1248 bool
1249 may_propagate_copy (tree dest, tree orig)
1250 {
1251   tree type_d = TREE_TYPE (dest);
1252   tree type_o = TREE_TYPE (orig);
1253 
1254   /* If ORIG is a default definition which flows in from an abnormal edge
1255      then the copy can be propagated.  It is important that we do so to avoid
1256      uninitialized copies.  */
1257   if (TREE_CODE (orig) == SSA_NAME
1258       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
1259       && SSA_NAME_IS_DEFAULT_DEF (orig)
1260       && (SSA_NAME_VAR (orig) == NULL_TREE
1261 	  || TREE_CODE (SSA_NAME_VAR (orig)) == VAR_DECL))
1262     ;
1263   /* Otherwise if ORIG just flows in from an abnormal edge then the copy cannot
1264      be propagated.  */
1265   else if (TREE_CODE (orig) == SSA_NAME
1266 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1267     return false;
1268   /* Similarly if DEST flows in from an abnormal edge then the copy cannot be
1269      propagated.  */
1270   else if (TREE_CODE (dest) == SSA_NAME
1271 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
1272     return false;
1273 
1274   /* Do not copy between types for which we *do* need a conversion.  */
1275   if (!useless_type_conversion_p (type_d, type_o))
1276     return false;
1277 
1278   /* Generally propagating virtual operands is not ok as that may
1279      create overlapping life-ranges.  */
1280   if (TREE_CODE (dest) == SSA_NAME && virtual_operand_p (dest))
1281     return false;
1282 
1283   /* Anything else is OK.  */
1284   return true;
1285 }
1286 
1287 /* Like may_propagate_copy, but use as the destination expression
1288    the principal expression (typically, the RHS) contained in
1289    statement DEST.  This is more efficient when working with the
1290    gimple tuples representation.  */
1291 
1292 bool
1293 may_propagate_copy_into_stmt (gimple *dest, tree orig)
1294 {
1295   tree type_d;
1296   tree type_o;
1297 
1298   /* If the statement is a switch or a single-rhs assignment,
1299      then the expression to be replaced by the propagation may
1300      be an SSA_NAME.  Fortunately, there is an explicit tree
1301      for the expression, so we delegate to may_propagate_copy.  */
1302 
1303   if (gimple_assign_single_p (dest))
1304     return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
1305   else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
1306     return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
1307 
1308   /* In other cases, the expression is not materialized, so there
1309      is no destination to pass to may_propagate_copy.  On the other
1310      hand, the expression cannot be an SSA_NAME, so the analysis
1311      is much simpler.  */
1312 
1313   if (TREE_CODE (orig) == SSA_NAME
1314       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
1315     return false;
1316 
1317   if (is_gimple_assign (dest))
1318     type_d = TREE_TYPE (gimple_assign_lhs (dest));
1319   else if (gimple_code (dest) == GIMPLE_COND)
1320     type_d = boolean_type_node;
1321   else if (is_gimple_call (dest)
1322            && gimple_call_lhs (dest) != NULL_TREE)
1323     type_d = TREE_TYPE (gimple_call_lhs (dest));
1324   else
1325     gcc_unreachable ();
1326 
1327   type_o = TREE_TYPE (orig);
1328 
1329   if (!useless_type_conversion_p (type_d, type_o))
1330     return false;
1331 
1332   return true;
1333 }
1334 
1335 /* Similarly, but we know that we're propagating into an ASM_EXPR.  */
1336 
1337 bool
1338 may_propagate_copy_into_asm (tree dest ATTRIBUTE_UNUSED)
1339 {
1340   return true;
1341 }
1342 
1343 
1344 /* Common code for propagate_value and replace_exp.
1345 
1346    Replace use operand OP_P with VAL.  FOR_PROPAGATION indicates if the
1347    replacement is done to propagate a value or not.  */
1348 
1349 static void
1350 replace_exp_1 (use_operand_p op_p, tree val,
1351     	       bool for_propagation ATTRIBUTE_UNUSED)
1352 {
1353   if (flag_checking)
1354     {
1355       tree op = USE_FROM_PTR (op_p);
1356       gcc_assert (!(for_propagation
1357 		  && TREE_CODE (op) == SSA_NAME
1358 		  && TREE_CODE (val) == SSA_NAME
1359 		  && !may_propagate_copy (op, val)));
1360     }
1361 
1362   if (TREE_CODE (val) == SSA_NAME)
1363     SET_USE (op_p, val);
1364   else
1365     SET_USE (op_p, unshare_expr (val));
1366 }
1367 
1368 
1369 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1370    into the operand pointed to by OP_P.
1371 
1372    Use this version for const/copy propagation as it will perform additional
1373    checks to ensure validity of the const/copy propagation.  */
1374 
1375 void
1376 propagate_value (use_operand_p op_p, tree val)
1377 {
1378   replace_exp_1 (op_p, val, true);
1379 }
1380 
1381 /* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
1382 
1383    Use this version when not const/copy propagating values.  For example,
1384    PRE uses this version when building expressions as they would appear
1385    in specific blocks taking into account actions of PHI nodes.
1386 
1387    The statement in which an expression has been replaced should be
1388    folded using fold_stmt_inplace.  */
1389 
1390 void
1391 replace_exp (use_operand_p op_p, tree val)
1392 {
1393   replace_exp_1 (op_p, val, false);
1394 }
1395 
1396 
1397 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
1398    into the tree pointed to by OP_P.
1399 
1400    Use this version for const/copy propagation when SSA operands are not
1401    available.  It will perform the additional checks to ensure validity of
1402    the const/copy propagation, but will not update any operand information.
1403    Be sure to mark the stmt as modified.  */
1404 
1405 void
1406 propagate_tree_value (tree *op_p, tree val)
1407 {
1408   if (TREE_CODE (val) == SSA_NAME)
1409     *op_p = val;
1410   else
1411     *op_p = unshare_expr (val);
1412 }
1413 
1414 
1415 /* Like propagate_tree_value, but use as the operand to replace
1416    the principal expression (typically, the RHS) contained in the
1417    statement referenced by iterator GSI.  Note that it is not
1418    always possible to update the statement in-place, so a new
1419    statement may be created to replace the original.  */
1420 
1421 void
1422 propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
1423 {
1424   gimple *stmt = gsi_stmt (*gsi);
1425 
1426   if (is_gimple_assign (stmt))
1427     {
1428       tree expr = NULL_TREE;
1429       if (gimple_assign_single_p (stmt))
1430         expr = gimple_assign_rhs1 (stmt);
1431       propagate_tree_value (&expr, val);
1432       gimple_assign_set_rhs_from_tree (gsi, expr);
1433     }
1434   else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
1435     {
1436       tree lhs = NULL_TREE;
1437       tree rhs = build_zero_cst (TREE_TYPE (val));
1438       propagate_tree_value (&lhs, val);
1439       gimple_cond_set_code (cond_stmt, NE_EXPR);
1440       gimple_cond_set_lhs (cond_stmt, lhs);
1441       gimple_cond_set_rhs (cond_stmt, rhs);
1442     }
1443   else if (is_gimple_call (stmt)
1444            && gimple_call_lhs (stmt) != NULL_TREE)
1445     {
1446       tree expr = NULL_TREE;
1447       bool res;
1448       propagate_tree_value (&expr, val);
1449       res = update_call_from_tree (gsi, expr);
1450       gcc_assert (res);
1451     }
1452   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
1453     propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
1454   else
1455     gcc_unreachable ();
1456 }
1457