xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-outof-ssa.c (revision 53b02e147d4ed531c0d2a5ca9b3e8026ba3e99b5)
1 /* Convert a program in SSA form into Normal form.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3    Contributed by Andrew Macleod <amacleod@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "ssa.h"
30 #include "tree-ssa.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "tree-dfa.h"
36 #include "stor-layout.h"
37 #include "cfgrtl.h"
38 #include "cfganal.h"
39 #include "tree-eh.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "dumpfile.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
47 #include "dojump.h"
48 
49 /* FIXME: A lot of code here deals with expanding to RTL.  All that code
50    should be in cfgexpand.c.  */
51 #include "explow.h"
52 #include "expr.h"
53 
54 /* Return TRUE if expression STMT is suitable for replacement.  */
55 
56 bool
57 ssa_is_replaceable_p (gimple *stmt)
58 {
59   use_operand_p use_p;
60   tree def;
61   gimple *use_stmt;
62 
63   /* Only consider modify stmts.  */
64   if (!is_gimple_assign (stmt))
65     return false;
66 
67   /* If the statement may throw an exception, it cannot be replaced.  */
68   if (stmt_could_throw_p (cfun, stmt))
69     return false;
70 
71   /* Punt if there is more than 1 def.  */
72   def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
73   if (!def)
74     return false;
75 
76   /* Only consider definitions which have a single use.  */
77   if (!single_imm_use (def, &use_p, &use_stmt))
78     return false;
79 
80   /* Used in this block, but at the TOP of the block, not the end.  */
81   if (gimple_code (use_stmt) == GIMPLE_PHI)
82     return false;
83 
84   /* There must be no VDEFs.  */
85   if (gimple_vdef (stmt))
86     return false;
87 
88   /* Float expressions must go through memory if float-store is on.  */
89   if (flag_float_store
90       && FLOAT_TYPE_P (gimple_expr_type (stmt)))
91     return false;
92 
93   /* An assignment with a register variable on the RHS is not
94      replaceable.  */
95   if (gimple_assign_rhs_code (stmt) == VAR_DECL
96       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
97     return false;
98 
99   /* No function calls can be replaced.  */
100   if (is_gimple_call (stmt))
101     return false;
102 
103   /* Leave any stmt with volatile operands alone as well.  */
104   if (gimple_has_volatile_ops (stmt))
105     return false;
106 
107   return true;
108 }
109 
110 
111 /* Used to hold all the components required to do SSA PHI elimination.
112    The node and pred/succ list is a simple linear list of nodes and
113    edges represented as pairs of nodes.
114 
115    The predecessor and successor list:  Nodes are entered in pairs, where
116    [0] ->PRED, [1]->SUCC.  All the even indexes in the array represent
117    predecessors, all the odd elements are successors.
118 
119    Rationale:
120    When implemented as bitmaps, very large programs SSA->Normal times were
121    being dominated by clearing the interference graph.
122 
123    Typically this list of edges is extremely small since it only includes
124    PHI results and uses from a single edge which have not coalesced with
125    each other.  This means that no virtual PHI nodes are included, and
126    empirical evidence suggests that the number of edges rarely exceed
127    3, and in a bootstrap of GCC, the maximum size encountered was 7.
128    This also limits the number of possible nodes that are involved to
129    rarely more than 6, and in the bootstrap of gcc, the maximum number
130    of nodes encountered was 12.  */
131 
132 struct elim_graph
133 {
134   elim_graph (var_map map);
135 
136   /* Size of the elimination vectors.  */
137   int size;
138 
139   /* List of nodes in the elimination graph.  */
140   auto_vec<int> nodes;
141 
142   /*  The predecessor and successor edge list.  */
143   auto_vec<int> edge_list;
144 
145   /* Source locus on each edge */
146   auto_vec<location_t> edge_locus;
147 
148   /* Visited vector.  */
149   auto_sbitmap visited;
150 
151   /* Stack for visited nodes.  */
152   auto_vec<int> stack;
153 
154   /* The variable partition map.  */
155   var_map map;
156 
157   /* Edge being eliminated by this graph.  */
158   edge e;
159 
160   /* List of constant copies to emit.  These are pushed on in pairs.  */
161   auto_vec<int> const_dests;
162   auto_vec<tree> const_copies;
163 
164   /* Source locations for any constant copies.  */
165   auto_vec<location_t> copy_locus;
166 };
167 
168 
169 /* For an edge E find out a good source location to associate with
170    instructions inserted on edge E.  If E has an implicit goto set,
171    use its location.  Otherwise search instructions in predecessors
172    of E for a location, and use that one.  That makes sense because
173    we insert on edges for PHI nodes, and effects of PHIs happen on
174    the end of the predecessor conceptually.  */
175 
176 static void
177 set_location_for_edge (edge e)
178 {
179   if (e->goto_locus)
180     {
181       set_curr_insn_location (e->goto_locus);
182     }
183   else
184     {
185       basic_block bb = e->src;
186       gimple_stmt_iterator gsi;
187 
188       do
189 	{
190 	  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
191 	    {
192 	      gimple *stmt = gsi_stmt (gsi);
193 	      if (is_gimple_debug (stmt))
194 		continue;
195 	      if (gimple_has_location (stmt) || gimple_block (stmt))
196 		{
197 		  set_curr_insn_location (gimple_location (stmt));
198 		  return;
199 		}
200 	    }
201 	  /* Nothing found in this basic block.  Make a half-assed attempt
202 	     to continue with another block.  */
203 	  if (single_pred_p (bb))
204 	    bb = single_pred (bb);
205 	  else
206 	    bb = e->src;
207 	}
208       while (bb != e->src);
209     }
210 }
211 
212 /* Emit insns to copy SRC into DEST converting SRC if necessary.  As
213    SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
214    which we deduce the size to copy in that case.  */
215 
216 static inline rtx_insn *
217 emit_partition_copy (rtx dest, rtx src, int unsignedsrcp, tree sizeexp)
218 {
219   start_sequence ();
220 
221   if (GET_MODE (src) != VOIDmode && GET_MODE (src) != GET_MODE (dest))
222     src = convert_to_mode (GET_MODE (dest), src, unsignedsrcp);
223   if (GET_MODE (src) == BLKmode)
224     {
225       gcc_assert (GET_MODE (dest) == BLKmode);
226       emit_block_move (dest, src, expr_size (sizeexp), BLOCK_OP_NORMAL);
227     }
228   else
229     emit_move_insn (dest, src);
230   do_pending_stack_adjust ();
231 
232   rtx_insn *seq = get_insns ();
233   end_sequence ();
234 
235   return seq;
236 }
237 
238 /* Insert a copy instruction from partition SRC to DEST onto edge E.  */
239 
240 static void
241 insert_partition_copy_on_edge (edge e, int dest, int src, location_t locus)
242 {
243   tree var;
244   if (dump_file && (dump_flags & TDF_DETAILS))
245     {
246       fprintf (dump_file,
247 	       "Inserting a partition copy on edge BB%d->BB%d : "
248 	       "PART.%d = PART.%d",
249 	       e->src->index,
250 	       e->dest->index, dest, src);
251       fprintf (dump_file, "\n");
252     }
253 
254   gcc_assert (SA.partition_to_pseudo[dest]);
255   gcc_assert (SA.partition_to_pseudo[src]);
256 
257   set_location_for_edge (e);
258   /* If a locus is provided, override the default.  */
259   if (locus)
260     set_curr_insn_location (locus);
261 
262   var = partition_to_var (SA.map, src);
263   rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
264 				       copy_rtx (SA.partition_to_pseudo[src]),
265 				       TYPE_UNSIGNED (TREE_TYPE (var)),
266 				       var);
267 
268   insert_insn_on_edge (seq, e);
269 }
270 
271 /* Insert a copy instruction from expression SRC to partition DEST
272    onto edge E.  */
273 
274 static void
275 insert_value_copy_on_edge (edge e, int dest, tree src, location_t locus)
276 {
277   rtx dest_rtx, seq, x;
278   machine_mode dest_mode, src_mode;
279   int unsignedp;
280 
281   if (dump_file && (dump_flags & TDF_DETAILS))
282     {
283       fprintf (dump_file,
284 	       "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
285 	       e->src->index,
286 	       e->dest->index, dest);
287       print_generic_expr (dump_file, src, TDF_SLIM);
288       fprintf (dump_file, "\n");
289     }
290 
291   dest_rtx = copy_rtx (SA.partition_to_pseudo[dest]);
292   gcc_assert (dest_rtx);
293 
294   set_location_for_edge (e);
295   /* If a locus is provided, override the default.  */
296   if (locus)
297     set_curr_insn_location (locus);
298 
299   start_sequence ();
300 
301   tree name = partition_to_var (SA.map, dest);
302   src_mode = TYPE_MODE (TREE_TYPE (src));
303   dest_mode = GET_MODE (dest_rtx);
304   gcc_assert (src_mode == TYPE_MODE (TREE_TYPE (name)));
305   gcc_assert (!REG_P (dest_rtx)
306 	      || dest_mode == promote_ssa_mode (name, &unsignedp));
307 
308   if (src_mode != dest_mode)
309     {
310       x = expand_expr (src, NULL, src_mode, EXPAND_NORMAL);
311       x = convert_modes (dest_mode, src_mode, x, unsignedp);
312     }
313   else if (src_mode == BLKmode)
314     {
315       x = dest_rtx;
316       store_expr (src, x, 0, false, false);
317     }
318   else
319     x = expand_expr (src, dest_rtx, dest_mode, EXPAND_NORMAL);
320 
321   if (x != dest_rtx)
322     emit_move_insn (dest_rtx, x);
323   do_pending_stack_adjust ();
324 
325   seq = get_insns ();
326   end_sequence ();
327 
328   insert_insn_on_edge (seq, e);
329 }
330 
331 /* Insert a copy instruction from RTL expression SRC to partition DEST
332    onto edge E.  */
333 
334 static void
335 insert_rtx_to_part_on_edge (edge e, int dest, rtx src, int unsignedsrcp,
336 			    location_t locus)
337 {
338   if (dump_file && (dump_flags & TDF_DETAILS))
339     {
340       fprintf (dump_file,
341 	       "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
342 	       e->src->index,
343 	       e->dest->index, dest);
344       print_simple_rtl (dump_file, src);
345       fprintf (dump_file, "\n");
346     }
347 
348   gcc_assert (SA.partition_to_pseudo[dest]);
349 
350   set_location_for_edge (e);
351   /* If a locus is provided, override the default.  */
352   if (locus)
353     set_curr_insn_location (locus);
354 
355   /* We give the destination as sizeexp in case src/dest are BLKmode
356      mems.  Usually we give the source.  As we result from SSA names
357      the left and right size should be the same (and no WITH_SIZE_EXPR
358      involved), so it doesn't matter.  */
359   rtx_insn *seq = emit_partition_copy (copy_rtx (SA.partition_to_pseudo[dest]),
360 				       src, unsignedsrcp,
361 				       partition_to_var (SA.map, dest));
362 
363   insert_insn_on_edge (seq, e);
364 }
365 
366 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
367    onto edge E.  */
368 
369 static void
370 insert_part_to_rtx_on_edge (edge e, rtx dest, int src, location_t locus)
371 {
372   tree var;
373   if (dump_file && (dump_flags & TDF_DETAILS))
374     {
375       fprintf (dump_file,
376 	       "Inserting a temp copy on edge BB%d->BB%d : ",
377 	       e->src->index,
378 	       e->dest->index);
379       print_simple_rtl (dump_file, dest);
380       fprintf (dump_file, "= PART.%d\n", src);
381     }
382 
383   gcc_assert (SA.partition_to_pseudo[src]);
384 
385   set_location_for_edge (e);
386   /* If a locus is provided, override the default.  */
387   if (locus)
388     set_curr_insn_location (locus);
389 
390   var = partition_to_var (SA.map, src);
391   rtx_insn *seq = emit_partition_copy (dest,
392 				       copy_rtx (SA.partition_to_pseudo[src]),
393 				       TYPE_UNSIGNED (TREE_TYPE (var)),
394 				       var);
395 
396   insert_insn_on_edge (seq, e);
397 }
398 
399 
400 /* Create an elimination graph for map.  */
401 
402 elim_graph::elim_graph (var_map map) :
403   nodes (30), edge_list (20), edge_locus (10), visited (map->num_partitions),
404   stack (30), map (map), const_dests (20), const_copies (20), copy_locus (10)
405 {
406 }
407 
408 
409 /* Empty elimination graph G.  */
410 
411 static inline void
412 clear_elim_graph (elim_graph *g)
413 {
414   g->nodes.truncate (0);
415   g->edge_list.truncate (0);
416   g->edge_locus.truncate (0);
417 }
418 
419 
420 /* Return the number of nodes in graph G.  */
421 
422 static inline int
423 elim_graph_size (elim_graph *g)
424 {
425   return g->nodes.length ();
426 }
427 
428 
429 /* Add NODE to graph G, if it doesn't exist already.  */
430 
431 static inline void
432 elim_graph_add_node (elim_graph *g, int node)
433 {
434   int x;
435   int t;
436 
437   FOR_EACH_VEC_ELT (g->nodes, x, t)
438     if (t == node)
439       return;
440   g->nodes.safe_push (node);
441 }
442 
443 
444 /* Add the edge PRED->SUCC to graph G.  */
445 
446 static inline void
447 elim_graph_add_edge (elim_graph *g, int pred, int succ, location_t locus)
448 {
449   g->edge_list.safe_push (pred);
450   g->edge_list.safe_push (succ);
451   g->edge_locus.safe_push (locus);
452 }
453 
454 
455 /* Remove an edge from graph G for which NODE is the predecessor, and
456    return the successor node.  -1 is returned if there is no such edge.  */
457 
458 static inline int
459 elim_graph_remove_succ_edge (elim_graph *g, int node, location_t *locus)
460 {
461   int y;
462   unsigned x;
463   for (x = 0; x < g->edge_list.length (); x += 2)
464     if (g->edge_list[x] == node)
465       {
466         g->edge_list[x] = -1;
467 	y = g->edge_list[x + 1];
468 	g->edge_list[x + 1] = -1;
469 	*locus = g->edge_locus[x / 2];
470 	g->edge_locus[x / 2] = UNKNOWN_LOCATION;
471 	return y;
472       }
473   *locus = UNKNOWN_LOCATION;
474   return -1;
475 }
476 
477 
478 /* Find all the nodes in GRAPH which are successors to NODE in the
479    edge list.  VAR will hold the partition number found.  CODE is the
480    code fragment executed for every node found.  */
481 
482 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE)		\
483 do {									\
484   unsigned x_;								\
485   int y_;								\
486   for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2)	\
487     {									\
488       y_ = (GRAPH)->edge_list[x_];					\
489       if (y_ != (NODE))							\
490         continue;							\
491       (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]);			\
492       (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]);			\
493       CODE;								\
494     }									\
495 } while (0)
496 
497 
498 /* Find all the nodes which are predecessors of NODE in the edge list for
499    GRAPH.  VAR will hold the partition number found.  CODE is the
500    code fragment executed for every node found.  */
501 
502 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE)		\
503 do {									\
504   unsigned x_;								\
505   int y_;								\
506   for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2)	\
507     {									\
508       y_ = (GRAPH)->edge_list[x_ + 1];					\
509       if (y_ != (NODE))							\
510         continue;							\
511       (void) ((VAR) = (GRAPH)->edge_list[x_]);				\
512       (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]);			\
513       CODE;								\
514     }									\
515 } while (0)
516 
517 
518 /* Add T to elimination graph G.  */
519 
520 static inline void
521 eliminate_name (elim_graph *g, int T)
522 {
523   elim_graph_add_node (g, T);
524 }
525 
526 /* Return true if this phi argument T should have a copy queued when using
527    var_map MAP.  PHI nodes should contain only ssa_names and invariants.  A
528    test for ssa_name is definitely simpler, but don't let invalid contents
529    slip through in the meantime.  */
530 
531 static inline bool
532 queue_phi_copy_p (var_map map, tree t)
533 {
534   if (TREE_CODE (t) == SSA_NAME)
535     {
536       if (var_to_partition (map, t) == NO_PARTITION)
537         return true;
538       return false;
539     }
540   gcc_checking_assert (is_gimple_min_invariant (t));
541   return true;
542 }
543 
544 /* Build elimination graph G for basic block BB on incoming PHI edge
545    G->e.  */
546 
547 static void
548 eliminate_build (elim_graph *g)
549 {
550   tree Ti;
551   int p0, pi;
552   gphi_iterator gsi;
553 
554   clear_elim_graph (g);
555 
556   for (gsi = gsi_start_phis (g->e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
557     {
558       gphi *phi = gsi.phi ();
559       location_t locus;
560 
561       p0 = var_to_partition (g->map, gimple_phi_result (phi));
562       /* Ignore results which are not in partitions.  */
563       if (p0 == NO_PARTITION)
564 	continue;
565 
566       Ti = PHI_ARG_DEF (phi, g->e->dest_idx);
567       locus = gimple_phi_arg_location_from_edge (phi, g->e);
568 
569       /* If this argument is a constant, or a SSA_NAME which is being
570 	 left in SSA form, just queue a copy to be emitted on this
571 	 edge.  */
572       if (queue_phi_copy_p (g->map, Ti))
573         {
574 	  /* Save constant copies until all other copies have been emitted
575 	     on this edge.  */
576 	  g->const_dests.safe_push (p0);
577 	  g->const_copies.safe_push (Ti);
578 	  g->copy_locus.safe_push (locus);
579 	}
580       else
581         {
582 	  pi = var_to_partition (g->map, Ti);
583 	  if (p0 != pi)
584 	    {
585 	      eliminate_name (g, p0);
586 	      eliminate_name (g, pi);
587 	      elim_graph_add_edge (g, p0, pi, locus);
588 	    }
589 	}
590     }
591 }
592 
593 
594 /* Push successors of T onto the elimination stack for G.  */
595 
596 static void
597 elim_forward (elim_graph *g, int T)
598 {
599   int S;
600   location_t locus;
601 
602   bitmap_set_bit (g->visited, T);
603   FOR_EACH_ELIM_GRAPH_SUCC (g, T, S, locus,
604     {
605       if (!bitmap_bit_p (g->visited, S))
606         elim_forward (g, S);
607     });
608   g->stack.safe_push (T);
609 }
610 
611 
612 /* Return 1 if there unvisited predecessors of T in graph G.  */
613 
614 static int
615 elim_unvisited_predecessor (elim_graph *g, int T)
616 {
617   int P;
618   location_t locus;
619 
620   FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
621     {
622       if (!bitmap_bit_p (g->visited, P))
623         return 1;
624     });
625   return 0;
626 }
627 
628 /* Process predecessors first, and insert a copy.  */
629 
630 static void
631 elim_backward (elim_graph *g, int T)
632 {
633   int P;
634   location_t locus;
635 
636   bitmap_set_bit (g->visited, T);
637   FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
638     {
639       if (!bitmap_bit_p (g->visited, P))
640         {
641 	  elim_backward (g, P);
642 	  insert_partition_copy_on_edge (g->e, P, T, locus);
643 	}
644     });
645 }
646 
647 /* Allocate a new pseudo register usable for storing values sitting
648    in NAME (a decl or SSA name), i.e. with matching mode and attributes.  */
649 
650 static rtx
651 get_temp_reg (tree name)
652 {
653   tree type = TREE_TYPE (name);
654   int unsignedp;
655   machine_mode reg_mode = promote_ssa_mode (name, &unsignedp);
656   if (reg_mode == BLKmode)
657     return assign_temp (type, 0, 0);
658   rtx x = gen_reg_rtx (reg_mode);
659   if (POINTER_TYPE_P (type))
660     mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
661   return x;
662 }
663 
664 /* Insert required copies for T in graph G.  Check for a strongly connected
665    region, and create a temporary to break the cycle if one is found.  */
666 
667 static void
668 elim_create (elim_graph *g, int T)
669 {
670   int P, S;
671   location_t locus;
672 
673   if (elim_unvisited_predecessor (g, T))
674     {
675       tree var = partition_to_var (g->map, T);
676       rtx U = get_temp_reg (var);
677       int unsignedsrcp = TYPE_UNSIGNED (TREE_TYPE (var));
678 
679       insert_part_to_rtx_on_edge (g->e, U, T, UNKNOWN_LOCATION);
680       FOR_EACH_ELIM_GRAPH_PRED (g, T, P, locus,
681 	{
682 	  if (!bitmap_bit_p (g->visited, P))
683 	    {
684 	      elim_backward (g, P);
685 	      insert_rtx_to_part_on_edge (g->e, P, U, unsignedsrcp, locus);
686 	    }
687 	});
688     }
689   else
690     {
691       S = elim_graph_remove_succ_edge (g, T, &locus);
692       if (S != -1)
693 	{
694 	  bitmap_set_bit (g->visited, T);
695 	  insert_partition_copy_on_edge (g->e, T, S, locus);
696 	}
697     }
698 }
699 
700 
701 /* Eliminate all the phi nodes on edge E in graph G.  */
702 
703 static void
704 eliminate_phi (edge e, elim_graph *g)
705 {
706   int x;
707 
708   gcc_assert (g->const_copies.length () == 0);
709   gcc_assert (g->copy_locus.length () == 0);
710 
711   /* Abnormal edges already have everything coalesced.  */
712   if (e->flags & EDGE_ABNORMAL)
713     return;
714 
715   g->e = e;
716 
717   eliminate_build (g);
718 
719   if (elim_graph_size (g) != 0)
720     {
721       int part;
722 
723       bitmap_clear (g->visited);
724       g->stack.truncate (0);
725 
726       FOR_EACH_VEC_ELT (g->nodes, x, part)
727         {
728 	  if (!bitmap_bit_p (g->visited, part))
729 	    elim_forward (g, part);
730 	}
731 
732       bitmap_clear (g->visited);
733       while (g->stack.length () > 0)
734 	{
735 	  x = g->stack.pop ();
736 	  if (!bitmap_bit_p (g->visited, x))
737 	    elim_create (g, x);
738 	}
739     }
740 
741   /* If there are any pending constant copies, issue them now.  */
742   while (g->const_copies.length () > 0)
743     {
744       int dest;
745       tree src;
746       location_t locus;
747 
748       src = g->const_copies.pop ();
749       dest = g->const_dests.pop ();
750       locus = g->copy_locus.pop ();
751       insert_value_copy_on_edge (e, dest, src, locus);
752     }
753 }
754 
755 
756 /* Remove each argument from PHI.  If an arg was the last use of an SSA_NAME,
757    check to see if this allows another PHI node to be removed.  */
758 
759 static void
760 remove_gimple_phi_args (gphi *phi)
761 {
762   use_operand_p arg_p;
763   ssa_op_iter iter;
764 
765   if (dump_file && (dump_flags & TDF_DETAILS))
766     {
767       fprintf (dump_file, "Removing Dead PHI definition: ");
768       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
769     }
770 
771   FOR_EACH_PHI_ARG (arg_p, phi, iter, SSA_OP_USE)
772     {
773       tree arg = USE_FROM_PTR (arg_p);
774       if (TREE_CODE (arg) == SSA_NAME)
775         {
776 	  /* Remove the reference to the existing argument.  */
777 	  SET_USE (arg_p, NULL_TREE);
778 	  if (has_zero_uses (arg))
779 	    {
780 	      gimple *stmt;
781 	      gimple_stmt_iterator gsi;
782 
783 	      stmt = SSA_NAME_DEF_STMT (arg);
784 
785 	      /* Also remove the def if it is a PHI node.  */
786 	      if (gimple_code (stmt) == GIMPLE_PHI)
787 		{
788 		  remove_gimple_phi_args (as_a <gphi *> (stmt));
789 		  gsi = gsi_for_stmt (stmt);
790 		  remove_phi_node (&gsi, true);
791 		}
792 
793 	    }
794 	}
795     }
796 }
797 
798 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses.  */
799 
800 static void
801 eliminate_useless_phis (void)
802 {
803   basic_block bb;
804   gphi_iterator gsi;
805   tree result;
806 
807   FOR_EACH_BB_FN (bb, cfun)
808     {
809       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
810         {
811 	  gphi *phi = gsi.phi ();
812 	  result = gimple_phi_result (phi);
813 	  if (virtual_operand_p (result))
814 	    remove_phi_node (&gsi, true);
815           else
816 	    {
817 	      /* Also remove real PHIs with no uses.  */
818 	      if (has_zero_uses (result))
819 	        {
820 		  remove_gimple_phi_args (phi);
821 		  remove_phi_node (&gsi, true);
822 		}
823 	      else
824 		gsi_next (&gsi);
825 	    }
826 	}
827     }
828 }
829 
830 
831 /* This function will rewrite the current program using the variable mapping
832    found in MAP.  If the replacement vector VALUES is provided, any
833    occurrences of partitions with non-null entries in the vector will be
834    replaced with the expression in the vector instead of its mapped
835    variable.  */
836 
837 static void
838 rewrite_trees (var_map map)
839 {
840   if (!flag_checking)
841     return;
842 
843   basic_block bb;
844   /* Search for PHIs where the destination has no partition, but one
845      or more arguments has a partition.  This should not happen and can
846      create incorrect code.  */
847   FOR_EACH_BB_FN (bb, cfun)
848     {
849       gphi_iterator gsi;
850       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
851 	{
852 	  gphi *phi = gsi.phi ();
853 	  tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
854 	  if (T0 == NULL_TREE)
855 	    {
856 	      size_t i;
857 	      for (i = 0; i < gimple_phi_num_args (phi); i++)
858 		{
859 		  tree arg = PHI_ARG_DEF (phi, i);
860 
861 		  if (TREE_CODE (arg) == SSA_NAME
862 		      && var_to_partition (map, arg) != NO_PARTITION)
863 		    {
864 		      fprintf (stderr, "Argument of PHI is in a partition :(");
865 		      print_generic_expr (stderr, arg, TDF_SLIM);
866 		      fprintf (stderr, "), but the result is not :");
867 		      print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
868 		      internal_error ("SSA corruption");
869 		    }
870 		}
871 	    }
872 	}
873     }
874 }
875 
876 /* Create a default def for VAR.  */
877 
878 static void
879 create_default_def (tree var, void *arg ATTRIBUTE_UNUSED)
880 {
881   if (!is_gimple_reg (var))
882     return;
883 
884   tree ssa = get_or_create_ssa_default_def (cfun, var);
885   gcc_assert (ssa);
886 }
887 
888 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
889    assign_parms may ask for a default partition.  */
890 
891 static void
892 for_all_parms (void (*callback)(tree var, void *arg), void *arg)
893 {
894   for (tree var = DECL_ARGUMENTS (current_function_decl); var;
895        var = DECL_CHAIN (var))
896     callback (var, arg);
897   if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
898     callback (DECL_RESULT (current_function_decl), arg);
899   if (cfun->static_chain_decl)
900     callback (cfun->static_chain_decl, arg);
901 }
902 
903 /* We need to pass two arguments to set_parm_default_def_partition,
904    but for_all_parms only supports one.  Use a pair.  */
905 
906 typedef std::pair<var_map, bitmap> parm_default_def_partition_arg;
907 
908 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
909    ARG's MAP containing VAR's default def.  */
910 
911 static void
912 set_parm_default_def_partition (tree var, void *arg_)
913 {
914   parm_default_def_partition_arg *arg = (parm_default_def_partition_arg *)arg_;
915   var_map map = arg->first;
916   bitmap parts = arg->second;
917 
918   if (!is_gimple_reg (var))
919     return;
920 
921   tree ssa = ssa_default_def (cfun, var);
922   gcc_assert (ssa);
923 
924   int version = var_to_partition (map, ssa);
925   gcc_assert (version != NO_PARTITION);
926 
927   bool changed = bitmap_set_bit (parts, version);
928   gcc_assert (changed);
929 }
930 
931 /* Allocate and return a bitmap that has a bit set for each partition
932    that contains a default def for a parameter.  */
933 
934 static bitmap
935 get_parm_default_def_partitions (var_map map)
936 {
937   bitmap parm_default_def_parts = BITMAP_ALLOC (NULL);
938 
939   parm_default_def_partition_arg
940     arg = std::make_pair (map, parm_default_def_parts);
941 
942   for_all_parms (set_parm_default_def_partition, &arg);
943 
944   return parm_default_def_parts;
945 }
946 
947 /* Allocate and return a bitmap that has a bit set for each partition
948    that contains an undefined value.  */
949 
950 static bitmap
951 get_undefined_value_partitions (var_map map)
952 {
953   bitmap undefined_value_parts = BITMAP_ALLOC (NULL);
954 
955   for (unsigned int i = 1; i < num_ssa_names; i++)
956     {
957       tree var = ssa_name (i);
958       if (var
959 	  && !virtual_operand_p (var)
960 	  && !has_zero_uses (var)
961 	  && ssa_undefined_value_p (var))
962 	{
963 	  const int p = var_to_partition (map, var);
964 	  if (p != NO_PARTITION)
965 	    bitmap_set_bit (undefined_value_parts, p);
966 	}
967     }
968 
969   return undefined_value_parts;
970 }
971 
972 /* Given the out-of-ssa info object SA (with prepared partitions)
973    eliminate all phi nodes in all basic blocks.  Afterwards no
974    basic block will have phi nodes anymore and there are possibly
975    some RTL instructions inserted on edges.  */
976 
977 void
978 expand_phi_nodes (struct ssaexpand *sa)
979 {
980   basic_block bb;
981   elim_graph g (sa->map);
982 
983   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
984 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
985     if (!gimple_seq_empty_p (phi_nodes (bb)))
986       {
987 	edge e;
988 	edge_iterator ei;
989 	FOR_EACH_EDGE (e, ei, bb->preds)
990 	  eliminate_phi (e, &g);
991 	set_phi_nodes (bb, NULL);
992 	/* We can't redirect EH edges in RTL land, so we need to do this
993 	   here.  Redirection happens only when splitting is necessary,
994 	   which it is only for critical edges, normally.  For EH edges
995 	   it might also be necessary when the successor has more than
996 	   one predecessor.  In that case the edge is either required to
997 	   be fallthru (which EH edges aren't), or the predecessor needs
998 	   to end with a jump (which again, isn't the case with EH edges).
999 	   Hence, split all EH edges on which we inserted instructions
1000 	   and whose successor has multiple predecessors.  */
1001 	for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
1002 	  {
1003 	    if (e->insns.r && (e->flags & EDGE_EH)
1004 		&& !single_pred_p (e->dest))
1005 	      {
1006 		rtx_insn *insns = e->insns.r;
1007 		basic_block bb;
1008 		e->insns.r = NULL;
1009 		bb = split_edge (e);
1010 		single_pred_edge (bb)->insns.r = insns;
1011 	      }
1012 	    else
1013 	      ei_next (&ei);
1014 	  }
1015       }
1016 }
1017 
1018 
1019 /* Remove the ssa-names in the current function and translate them into normal
1020    compiler variables.  PERFORM_TER is true if Temporary Expression Replacement
1021    should also be used.  */
1022 
1023 static void
1024 remove_ssa_form (bool perform_ter, struct ssaexpand *sa)
1025 {
1026   bitmap values = NULL;
1027   var_map map;
1028 
1029   for_all_parms (create_default_def, NULL);
1030   map = init_var_map (num_ssa_names);
1031   coalesce_ssa_name (map);
1032 
1033   /* Return to viewing the variable list as just all reference variables after
1034      coalescing has been performed.  */
1035   partition_view_normal (map);
1036 
1037   if (dump_file && (dump_flags & TDF_DETAILS))
1038     {
1039       fprintf (dump_file, "After Coalescing:\n");
1040       dump_var_map (dump_file, map);
1041     }
1042 
1043   if (perform_ter)
1044     {
1045       values = find_replaceable_exprs (map);
1046       if (values && dump_file && (dump_flags & TDF_DETAILS))
1047 	dump_replaceable_exprs (dump_file, values);
1048     }
1049 
1050   rewrite_trees (map);
1051 
1052   sa->map = map;
1053   sa->values = values;
1054   sa->partitions_for_parm_default_defs = get_parm_default_def_partitions (map);
1055   sa->partitions_for_undefined_values = get_undefined_value_partitions (map);
1056 }
1057 
1058 
1059 /* If not already done so for basic block BB, assign increasing uids
1060    to each of its instructions.  */
1061 
1062 static void
1063 maybe_renumber_stmts_bb (basic_block bb)
1064 {
1065   unsigned i = 0;
1066   gimple_stmt_iterator gsi;
1067 
1068   if (!bb->aux)
1069     return;
1070   bb->aux = NULL;
1071   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1072     {
1073       gimple *stmt = gsi_stmt (gsi);
1074       gimple_set_uid (stmt, i);
1075       i++;
1076     }
1077 }
1078 
1079 
1080 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1081    of a PHI node) and ARG (one of its arguments) conflict.  Return false
1082    otherwise, also when we simply aren't sure.  */
1083 
1084 static bool
1085 trivially_conflicts_p (basic_block bb, tree result, tree arg)
1086 {
1087   use_operand_p use;
1088   imm_use_iterator imm_iter;
1089   gimple *defa = SSA_NAME_DEF_STMT (arg);
1090 
1091   /* If ARG isn't defined in the same block it's too complicated for
1092      our little mind.  */
1093   if (gimple_bb (defa) != bb)
1094     return false;
1095 
1096   FOR_EACH_IMM_USE_FAST (use, imm_iter, result)
1097     {
1098       gimple *use_stmt = USE_STMT (use);
1099       if (is_gimple_debug (use_stmt))
1100 	continue;
1101       /* Now, if there's a use of RESULT that lies outside this basic block,
1102 	 then there surely is a conflict with ARG.  */
1103       if (gimple_bb (use_stmt) != bb)
1104 	return true;
1105       if (gimple_code (use_stmt) == GIMPLE_PHI)
1106 	continue;
1107       /* The use now is in a real stmt of BB, so if ARG was defined
1108          in a PHI node (like RESULT) both conflict.  */
1109       if (gimple_code (defa) == GIMPLE_PHI)
1110 	return true;
1111       maybe_renumber_stmts_bb (bb);
1112       /* If the use of RESULT occurs after the definition of ARG,
1113          the two conflict too.  */
1114       if (gimple_uid (defa) < gimple_uid (use_stmt))
1115 	return true;
1116     }
1117 
1118   return false;
1119 }
1120 
1121 
1122 /* Search every PHI node for arguments associated with backedges which
1123    we can trivially determine will need a copy (the argument is either
1124    not an SSA_NAME or the argument has a different underlying variable
1125    than the PHI result).
1126 
1127    Insert a copy from the PHI argument to a new destination at the
1128    end of the block with the backedge to the top of the loop.  Update
1129    the PHI argument to reference this new destination.  */
1130 
1131 static void
1132 insert_backedge_copies (void)
1133 {
1134   basic_block bb;
1135   gphi_iterator gsi;
1136 
1137   mark_dfs_back_edges ();
1138 
1139   FOR_EACH_BB_FN (bb, cfun)
1140     {
1141       /* Mark block as possibly needing calculation of UIDs.  */
1142       bb->aux = &bb->aux;
1143 
1144       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1145 	{
1146 	  gphi *phi = gsi.phi ();
1147 	  tree result = gimple_phi_result (phi);
1148 	  size_t i;
1149 
1150 	  if (virtual_operand_p (result))
1151 	    continue;
1152 
1153 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1154 	    {
1155 	      tree arg = gimple_phi_arg_def (phi, i);
1156 	      edge e = gimple_phi_arg_edge (phi, i);
1157 	      /* We are only interested in copies emitted on critical
1158                  backedges.  */
1159 	      if (!(e->flags & EDGE_DFS_BACK)
1160 		  || !EDGE_CRITICAL_P (e))
1161 		continue;
1162 
1163 	      /* If the argument is not an SSA_NAME, then we will need a
1164 		 constant initialization.  If the argument is an SSA_NAME then
1165 		 a copy statement may be needed.  First handle the case
1166 		 where we cannot insert before the argument definition.  */
1167 	      if (TREE_CODE (arg) != SSA_NAME
1168 		  || (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_PHI
1169 		      && trivially_conflicts_p (bb, result, arg)))
1170 		{
1171 		  tree name;
1172 		  gassign *stmt;
1173 		  gimple *last = NULL;
1174 		  gimple_stmt_iterator gsi2;
1175 
1176 		  gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
1177 		  if (!gsi_end_p (gsi2))
1178 		    last = gsi_stmt (gsi2);
1179 
1180 		  /* In theory the only way we ought to get back to the
1181 		     start of a loop should be with a COND_EXPR or GOTO_EXPR.
1182 		     However, better safe than sorry.
1183 		     If the block ends with a control statement or
1184 		     something that might throw, then we have to
1185 		     insert this assignment before the last
1186 		     statement.  Else insert it after the last statement.  */
1187 		  if (last && stmt_ends_bb_p (last))
1188 		    {
1189 		      /* If the last statement in the block is the definition
1190 			 site of the PHI argument, then we can't insert
1191 			 anything after it.  */
1192 		      if (TREE_CODE (arg) == SSA_NAME
1193 			  && SSA_NAME_DEF_STMT (arg) == last)
1194 			continue;
1195 		    }
1196 
1197 		  /* Create a new instance of the underlying variable of the
1198 		     PHI result.  */
1199 		  name = copy_ssa_name (result);
1200 		  stmt = gimple_build_assign (name,
1201 					      gimple_phi_arg_def (phi, i));
1202 
1203 		  /* copy location if present.  */
1204 		  if (gimple_phi_arg_has_location (phi, i))
1205 		    gimple_set_location (stmt,
1206 					 gimple_phi_arg_location (phi, i));
1207 
1208 		  /* Insert the new statement into the block and update
1209 		     the PHI node.  */
1210 		  if (last && stmt_ends_bb_p (last))
1211 		    gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
1212 		  else
1213 		    gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
1214 		  SET_PHI_ARG_DEF (phi, i, name);
1215 		}
1216 	      /* Insert a copy before the definition of the backedge value
1217 		 and adjust all conflicting uses.  */
1218 	      else if (trivially_conflicts_p (bb, result, arg))
1219 		{
1220 		  gimple *def = SSA_NAME_DEF_STMT (arg);
1221 		  if (gimple_nop_p (def)
1222 		      || gimple_code (def) == GIMPLE_PHI)
1223 		    continue;
1224 		  tree name = copy_ssa_name (result);
1225 		  gimple *stmt = gimple_build_assign (name, result);
1226 		  imm_use_iterator imm_iter;
1227 		  gimple *use_stmt;
1228 		  /* The following matches trivially_conflicts_p.  */
1229 		  FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, result)
1230 		    {
1231 		      if (gimple_bb (use_stmt) != bb
1232 			  || (gimple_code (use_stmt) != GIMPLE_PHI
1233 			      && (maybe_renumber_stmts_bb (bb), true)
1234 			      && gimple_uid (use_stmt) > gimple_uid (def)))
1235 			{
1236 			  use_operand_p use;
1237 			  FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1238 			    SET_USE (use, name);
1239 			}
1240 		    }
1241 		  gimple_stmt_iterator gsi = gsi_for_stmt (def);
1242 		  gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
1243 		}
1244 	    }
1245 	}
1246 
1247       /* Unmark this block again.  */
1248       bb->aux = NULL;
1249     }
1250 }
1251 
1252 /* Free all memory associated with going out of SSA form.  SA is
1253    the outof-SSA info object.  */
1254 
1255 void
1256 finish_out_of_ssa (struct ssaexpand *sa)
1257 {
1258   free (sa->partition_to_pseudo);
1259   if (sa->values)
1260     BITMAP_FREE (sa->values);
1261   delete_var_map (sa->map);
1262   BITMAP_FREE (sa->partitions_for_parm_default_defs);
1263   BITMAP_FREE (sa->partitions_for_undefined_values);
1264   memset (sa, 0, sizeof *sa);
1265 }
1266 
1267 /* Take the current function out of SSA form, translating PHIs as described in
1268    R. Morgan, ``Building an Optimizing Compiler'',
1269    Butterworth-Heinemann, Boston, MA, 1998. pp 176-186.  */
1270 
1271 unsigned int
1272 rewrite_out_of_ssa (struct ssaexpand *sa)
1273 {
1274   /* If elimination of a PHI requires inserting a copy on a backedge,
1275      then we will have to split the backedge which has numerous
1276      undesirable performance effects.
1277 
1278      A significant number of such cases can be handled here by inserting
1279      copies into the loop itself.  */
1280   insert_backedge_copies ();
1281 
1282 
1283   /* Eliminate PHIs which are of no use, such as virtual or dead phis.  */
1284   eliminate_useless_phis ();
1285 
1286   if (dump_file && (dump_flags & TDF_DETAILS))
1287     gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1288 
1289   remove_ssa_form (flag_tree_ter, sa);
1290 
1291   if (dump_file && (dump_flags & TDF_DETAILS))
1292     gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
1293 
1294   return 0;
1295 }
1296