xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Control flow functions for trees.
2    Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3    2010  Free Software Foundation, Inc.
4    Contributed by Diego Novillo <dnovillo@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50 
51 /* This file contains functions for building the Control Flow Graph (CFG)
52    for a function tree.  */
53 
54 /* Local declarations.  */
55 
56 /* Initial capacity for the basic block array.  */
57 static const int initial_cfg_capacity = 20;
58 
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
61    via their TREE_CHAIN field, which we clear after we're done with the
62    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63 
64    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65    update the case vector in response to edge redirections.
66 
67    Right now this table is set up and torn down at key points in the
68    compilation process.  It would be nice if we could make the table
69    more persistent.  The key is getting notification of changes to
70    the CFG (particularly edge removal, creation and redirection).  */
71 
72 static struct pointer_map_t *edge_to_cases;
73 
74 /* CFG statistics.  */
75 struct cfg_stats_d
76 {
77   long num_merged_labels;
78 };
79 
80 static struct cfg_stats_d cfg_stats;
81 
82 /* Nonzero if we found a computed goto while building basic blocks.  */
83 static bool found_computed_goto;
84 
85 /* Hash table to store last discriminator assigned for each locus.  */
86 struct locus_discrim_map
87 {
88   location_t locus;
89   int discriminator;
90 };
91 static htab_t discriminator_per_locus;
92 
93 /* Basic blocks and flowgraphs.  */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
96 
97 /* Edges.  */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static void make_gimple_asm_edges (basic_block);
103 static unsigned int locus_map_hash (const void *);
104 static int locus_map_eq (const void *, const void *);
105 static void assign_discriminator (location_t, basic_block);
106 static edge gimple_redirect_edge_and_branch (edge, basic_block);
107 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
109 
110 /* Various helpers.  */
111 static inline bool stmt_starts_bb_p (gimple, gimple);
112 static int gimple_verify_flow_info (void);
113 static void gimple_make_forwarder_block (edge);
114 static void gimple_cfg2vcg (FILE *);
115 static gimple first_non_label_stmt (basic_block);
116 
117 /* Flowgraph optimization and cleanup.  */
118 static void gimple_merge_blocks (basic_block, basic_block);
119 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (gimple, tree);
125 
126 void
127 init_empty_tree_cfg_for_function (struct function *fn)
128 {
129   /* Initialize the basic block array.  */
130   init_flow (fn);
131   profile_status_for_function (fn) = PROFILE_ABSENT;
132   n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133   last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134   basic_block_info_for_function (fn)
135     = VEC_alloc (basic_block, gc, initial_cfg_capacity);
136   VEC_safe_grow_cleared (basic_block, gc,
137 			 basic_block_info_for_function (fn),
138 			 initial_cfg_capacity);
139 
140   /* Build a mapping of labels to their associated blocks.  */
141   label_to_block_map_for_function (fn)
142     = VEC_alloc (basic_block, gc, initial_cfg_capacity);
143   VEC_safe_grow_cleared (basic_block, gc,
144 			 label_to_block_map_for_function (fn),
145 			 initial_cfg_capacity);
146 
147   SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
148 				ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
149   SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
150 		   EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
151 
152   ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
153     = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
154   EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
155     = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
156 }
157 
158 void
159 init_empty_tree_cfg (void)
160 {
161   init_empty_tree_cfg_for_function (cfun);
162 }
163 
164 /*---------------------------------------------------------------------------
165 			      Create basic blocks
166 ---------------------------------------------------------------------------*/
167 
168 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
169    statements to be added to the flowgraph.  */
170 
171 static void
172 build_gimple_cfg (gimple_seq seq)
173 {
174   /* Register specific gimple functions.  */
175   gimple_register_cfg_hooks ();
176 
177   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
178 
179   init_empty_tree_cfg ();
180 
181   found_computed_goto = 0;
182   make_blocks (seq);
183 
184   /* Computed gotos are hell to deal with, especially if there are
185      lots of them with a large number of destinations.  So we factor
186      them to a common computed goto location before we build the
187      edge list.  After we convert back to normal form, we will un-factor
188      the computed gotos since factoring introduces an unwanted jump.  */
189   if (found_computed_goto)
190     factor_computed_gotos ();
191 
192   /* Make sure there is always at least one block, even if it's empty.  */
193   if (n_basic_blocks == NUM_FIXED_BLOCKS)
194     create_empty_bb (ENTRY_BLOCK_PTR);
195 
196   /* Adjust the size of the array.  */
197   if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
198     VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
199 
200   /* To speed up statement iterator walks, we first purge dead labels.  */
201   cleanup_dead_labels ();
202 
203   /* Group case nodes to reduce the number of edges.
204      We do this after cleaning up dead labels because otherwise we miss
205      a lot of obvious case merging opportunities.  */
206   group_case_labels ();
207 
208   /* Create the edges of the flowgraph.  */
209   discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
210                                          free);
211   make_edges ();
212   cleanup_dead_labels ();
213   htab_delete (discriminator_per_locus);
214 
215   /* Debugging dumps.  */
216 
217   /* Write the flowgraph to a VCG file.  */
218   {
219     int local_dump_flags;
220     FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
221     if (vcg_file)
222       {
223 	gimple_cfg2vcg (vcg_file);
224 	dump_end (TDI_vcg, vcg_file);
225       }
226   }
227 
228 #ifdef ENABLE_CHECKING
229   verify_stmts ();
230 #endif
231 }
232 
233 static unsigned int
234 execute_build_cfg (void)
235 {
236   gimple_seq body = gimple_body (current_function_decl);
237 
238   build_gimple_cfg (body);
239   gimple_set_body (current_function_decl, NULL);
240   if (dump_file && (dump_flags & TDF_DETAILS))
241     {
242       fprintf (dump_file, "Scope blocks:\n");
243       dump_scope_blocks (dump_file, dump_flags);
244     }
245   return 0;
246 }
247 
248 struct gimple_opt_pass pass_build_cfg =
249 {
250  {
251   GIMPLE_PASS,
252   "cfg",				/* name */
253   NULL,					/* gate */
254   execute_build_cfg,			/* execute */
255   NULL,					/* sub */
256   NULL,					/* next */
257   0,					/* static_pass_number */
258   TV_TREE_CFG,				/* tv_id */
259   PROP_gimple_leh, 			/* properties_required */
260   PROP_cfg,				/* properties_provided */
261   0,					/* properties_destroyed */
262   0,					/* todo_flags_start */
263   TODO_verify_stmts | TODO_cleanup_cfg
264   | TODO_dump_func			/* todo_flags_finish */
265  }
266 };
267 
268 
269 /* Return true if T is a computed goto.  */
270 
271 static bool
272 computed_goto_p (gimple t)
273 {
274   return (gimple_code (t) == GIMPLE_GOTO
275 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
276 }
277 
278 
279 /* Search the CFG for any computed gotos.  If found, factor them to a
280    common computed goto site.  Also record the location of that site so
281    that we can un-factor the gotos after we have converted back to
282    normal form.  */
283 
284 static void
285 factor_computed_gotos (void)
286 {
287   basic_block bb;
288   tree factored_label_decl = NULL;
289   tree var = NULL;
290   gimple factored_computed_goto_label = NULL;
291   gimple factored_computed_goto = NULL;
292 
293   /* We know there are one or more computed gotos in this function.
294      Examine the last statement in each basic block to see if the block
295      ends with a computed goto.  */
296 
297   FOR_EACH_BB (bb)
298     {
299       gimple_stmt_iterator gsi = gsi_last_bb (bb);
300       gimple last;
301 
302       if (gsi_end_p (gsi))
303 	continue;
304 
305       last = gsi_stmt (gsi);
306 
307       /* Ignore the computed goto we create when we factor the original
308 	 computed gotos.  */
309       if (last == factored_computed_goto)
310 	continue;
311 
312       /* If the last statement is a computed goto, factor it.  */
313       if (computed_goto_p (last))
314 	{
315 	  gimple assignment;
316 
317 	  /* The first time we find a computed goto we need to create
318 	     the factored goto block and the variable each original
319 	     computed goto will use for their goto destination.  */
320 	  if (!factored_computed_goto)
321 	    {
322 	      basic_block new_bb = create_empty_bb (bb);
323 	      gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
324 
325 	      /* Create the destination of the factored goto.  Each original
326 		 computed goto will put its desired destination into this
327 		 variable and jump to the label we create immediately
328 		 below.  */
329 	      var = create_tmp_var (ptr_type_node, "gotovar");
330 
331 	      /* Build a label for the new block which will contain the
332 		 factored computed goto.  */
333 	      factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
334 	      factored_computed_goto_label
335 		= gimple_build_label (factored_label_decl);
336 	      gsi_insert_after (&new_gsi, factored_computed_goto_label,
337 				GSI_NEW_STMT);
338 
339 	      /* Build our new computed goto.  */
340 	      factored_computed_goto = gimple_build_goto (var);
341 	      gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
342 	    }
343 
344 	  /* Copy the original computed goto's destination into VAR.  */
345 	  assignment = gimple_build_assign (var, gimple_goto_dest (last));
346 	  gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
347 
348 	  /* And re-vector the computed goto to the new destination.  */
349 	  gimple_goto_set_dest (last, factored_label_decl);
350 	}
351     }
352 }
353 
354 
355 /* Build a flowgraph for the sequence of stmts SEQ.  */
356 
357 static void
358 make_blocks (gimple_seq seq)
359 {
360   gimple_stmt_iterator i = gsi_start (seq);
361   gimple stmt = NULL;
362   bool start_new_block = true;
363   bool first_stmt_of_seq = true;
364   basic_block bb = ENTRY_BLOCK_PTR;
365 
366   while (!gsi_end_p (i))
367     {
368       gimple prev_stmt;
369 
370       prev_stmt = stmt;
371       stmt = gsi_stmt (i);
372 
373       /* If the statement starts a new basic block or if we have determined
374 	 in a previous pass that we need to create a new block for STMT, do
375 	 so now.  */
376       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
377 	{
378 	  if (!first_stmt_of_seq)
379 	    seq = gsi_split_seq_before (&i);
380 	  bb = create_basic_block (seq, NULL, bb);
381 	  start_new_block = false;
382 	}
383 
384       /* Now add STMT to BB and create the subgraphs for special statement
385 	 codes.  */
386       gimple_set_bb (stmt, bb);
387 
388       if (computed_goto_p (stmt))
389 	found_computed_goto = true;
390 
391       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
392 	 next iteration.  */
393       if (stmt_ends_bb_p (stmt))
394 	{
395 	  /* If the stmt can make abnormal goto use a new temporary
396 	     for the assignment to the LHS.  This makes sure the old value
397 	     of the LHS is available on the abnormal edge.  Otherwise
398 	     we will end up with overlapping life-ranges for abnormal
399 	     SSA names.  */
400 	  if (gimple_has_lhs (stmt)
401 	      && stmt_can_make_abnormal_goto (stmt)
402 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
403 	    {
404 	      tree lhs = gimple_get_lhs (stmt);
405 	      tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
406 	      gimple s = gimple_build_assign (lhs, tmp);
407 	      gimple_set_location (s, gimple_location (stmt));
408 	      gimple_set_block (s, gimple_block (stmt));
409 	      gimple_set_lhs (stmt, tmp);
410 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
411 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
412 		DECL_GIMPLE_REG_P (tmp) = 1;
413 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
414 	    }
415 	  start_new_block = true;
416 	}
417 
418       gsi_next (&i);
419       first_stmt_of_seq = false;
420     }
421 }
422 
423 
424 /* Create and return a new empty basic block after bb AFTER.  */
425 
426 static basic_block
427 create_bb (void *h, void *e, basic_block after)
428 {
429   basic_block bb;
430 
431   gcc_assert (!e);
432 
433   /* Create and initialize a new basic block.  Since alloc_block uses
434      ggc_alloc_cleared to allocate a basic block, we do not have to
435      clear the newly allocated basic block here.  */
436   bb = alloc_block ();
437 
438   bb->index = last_basic_block;
439   bb->flags = BB_NEW;
440   bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
441   set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
442 
443   /* Add the new block to the linked list of blocks.  */
444   link_block (bb, after);
445 
446   /* Grow the basic block array if needed.  */
447   if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
448     {
449       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450       VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
451     }
452 
453   /* Add the newly created block to the array.  */
454   SET_BASIC_BLOCK (last_basic_block, bb);
455 
456   n_basic_blocks++;
457   last_basic_block++;
458 
459   return bb;
460 }
461 
462 
463 /*---------------------------------------------------------------------------
464 				 Edge creation
465 ---------------------------------------------------------------------------*/
466 
467 /* Fold COND_EXPR_COND of each COND_EXPR.  */
468 
469 void
470 fold_cond_expr_cond (void)
471 {
472   basic_block bb;
473 
474   FOR_EACH_BB (bb)
475     {
476       gimple stmt = last_stmt (bb);
477 
478       if (stmt && gimple_code (stmt) == GIMPLE_COND)
479 	{
480 	  location_t loc = gimple_location (stmt);
481 	  tree cond;
482 	  bool zerop, onep;
483 
484 	  fold_defer_overflow_warnings ();
485 	  cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 			      gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 	  if (cond)
488 	    {
489 	      zerop = integer_zerop (cond);
490 	      onep = integer_onep (cond);
491 	    }
492 	  else
493 	    zerop = onep = false;
494 
495 	  fold_undefer_overflow_warnings (zerop || onep,
496 					  stmt,
497 					  WARN_STRICT_OVERFLOW_CONDITIONAL);
498 	  if (zerop)
499 	    gimple_cond_make_false (stmt);
500 	  else if (onep)
501 	    gimple_cond_make_true (stmt);
502 	}
503     }
504 }
505 
506 /* Join all the blocks in the flowgraph.  */
507 
508 static void
509 make_edges (void)
510 {
511   basic_block bb;
512   struct omp_region *cur_region = NULL;
513 
514   /* Create an edge from entry to the first block with executable
515      statements in it.  */
516   make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
517 
518   /* Traverse the basic block array placing edges.  */
519   FOR_EACH_BB (bb)
520     {
521       gimple last = last_stmt (bb);
522       bool fallthru;
523 
524       if (last)
525 	{
526 	  enum gimple_code code = gimple_code (last);
527 	  switch (code)
528 	    {
529 	    case GIMPLE_GOTO:
530 	      make_goto_expr_edges (bb);
531 	      fallthru = false;
532 	      break;
533 	    case GIMPLE_RETURN:
534 	      make_edge (bb, EXIT_BLOCK_PTR, 0);
535 	      fallthru = false;
536 	      break;
537 	    case GIMPLE_COND:
538 	      make_cond_expr_edges (bb);
539 	      fallthru = false;
540 	      break;
541 	    case GIMPLE_SWITCH:
542 	      make_gimple_switch_edges (bb);
543 	      fallthru = false;
544 	      break;
545 	    case GIMPLE_RESX:
546 	      make_eh_edges (last);
547 	      fallthru = false;
548 	      break;
549 	    case GIMPLE_EH_DISPATCH:
550 	      fallthru = make_eh_dispatch_edges (last);
551 	      break;
552 
553 	    case GIMPLE_CALL:
554 	      /* If this function receives a nonlocal goto, then we need to
555 		 make edges from this call site to all the nonlocal goto
556 		 handlers.  */
557 	      if (stmt_can_make_abnormal_goto (last))
558 		make_abnormal_goto_edges (bb, true);
559 
560 	      /* If this statement has reachable exception handlers, then
561 		 create abnormal edges to them.  */
562 	      make_eh_edges (last);
563 
564 	      /* Some calls are known not to return.  */
565 	      fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
566 	      break;
567 
568 	    case GIMPLE_ASSIGN:
569 	       /* A GIMPLE_ASSIGN may throw internally and thus be considered
570 		  control-altering. */
571 	      if (is_ctrl_altering_stmt (last))
572 		make_eh_edges (last);
573 	      fallthru = true;
574 	      break;
575 
576 	    case GIMPLE_ASM:
577 	      make_gimple_asm_edges (bb);
578 	      fallthru = true;
579 	      break;
580 
581 	    case GIMPLE_OMP_PARALLEL:
582 	    case GIMPLE_OMP_TASK:
583 	    case GIMPLE_OMP_FOR:
584 	    case GIMPLE_OMP_SINGLE:
585 	    case GIMPLE_OMP_MASTER:
586 	    case GIMPLE_OMP_ORDERED:
587 	    case GIMPLE_OMP_CRITICAL:
588 	    case GIMPLE_OMP_SECTION:
589 	      cur_region = new_omp_region (bb, code, cur_region);
590 	      fallthru = true;
591 	      break;
592 
593 	    case GIMPLE_OMP_SECTIONS:
594 	      cur_region = new_omp_region (bb, code, cur_region);
595 	      fallthru = true;
596 	      break;
597 
598 	    case GIMPLE_OMP_SECTIONS_SWITCH:
599 	      fallthru = false;
600 	      break;
601 
602             case GIMPLE_OMP_ATOMIC_LOAD:
603             case GIMPLE_OMP_ATOMIC_STORE:
604                fallthru = true;
605                break;
606 
607 	    case GIMPLE_OMP_RETURN:
608 	      /* In the case of a GIMPLE_OMP_SECTION, the edge will go
609 		 somewhere other than the next block.  This will be
610 		 created later.  */
611 	      cur_region->exit = bb;
612 	      fallthru = cur_region->type != GIMPLE_OMP_SECTION;
613 	      cur_region = cur_region->outer;
614 	      break;
615 
616 	    case GIMPLE_OMP_CONTINUE:
617 	      cur_region->cont = bb;
618 	      switch (cur_region->type)
619 		{
620 		case GIMPLE_OMP_FOR:
621 		  /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
622 		     succs edges as abnormal to prevent splitting
623 		     them.  */
624 		  single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
625 		  /* Make the loopback edge.  */
626 		  make_edge (bb, single_succ (cur_region->entry),
627 			     EDGE_ABNORMAL);
628 
629 		  /* Create an edge from GIMPLE_OMP_FOR to exit, which
630 		     corresponds to the case that the body of the loop
631 		     is not executed at all.  */
632 		  make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
633 		  make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
634 		  fallthru = false;
635 		  break;
636 
637 		case GIMPLE_OMP_SECTIONS:
638 		  /* Wire up the edges into and out of the nested sections.  */
639 		  {
640 		    basic_block switch_bb = single_succ (cur_region->entry);
641 
642 		    struct omp_region *i;
643 		    for (i = cur_region->inner; i ; i = i->next)
644 		      {
645 			gcc_assert (i->type == GIMPLE_OMP_SECTION);
646 			make_edge (switch_bb, i->entry, 0);
647 			make_edge (i->exit, bb, EDGE_FALLTHRU);
648 		      }
649 
650 		    /* Make the loopback edge to the block with
651 		       GIMPLE_OMP_SECTIONS_SWITCH.  */
652 		    make_edge (bb, switch_bb, 0);
653 
654 		    /* Make the edge from the switch to exit.  */
655 		    make_edge (switch_bb, bb->next_bb, 0);
656 		    fallthru = false;
657 		  }
658 		  break;
659 
660 		default:
661 		  gcc_unreachable ();
662 		}
663 	      break;
664 
665 	    default:
666 	      gcc_assert (!stmt_ends_bb_p (last));
667 	      fallthru = true;
668 	    }
669 	}
670       else
671 	fallthru = true;
672 
673       if (fallthru)
674         {
675 	  make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
676 	  if (last)
677             assign_discriminator (gimple_location (last), bb->next_bb);
678 	}
679     }
680 
681   if (root_omp_region)
682     free_omp_regions ();
683 
684   /* Fold COND_EXPR_COND of each COND_EXPR.  */
685   fold_cond_expr_cond ();
686 }
687 
688 /* Trivial hash function for a location_t.  ITEM is a pointer to
689    a hash table entry that maps a location_t to a discriminator.  */
690 
691 static unsigned int
692 locus_map_hash (const void *item)
693 {
694   return ((const struct locus_discrim_map *) item)->locus;
695 }
696 
697 /* Equality function for the locus-to-discriminator map.  VA and VB
698    point to the two hash table entries to compare.  */
699 
700 static int
701 locus_map_eq (const void *va, const void *vb)
702 {
703   const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
704   const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
705   return a->locus == b->locus;
706 }
707 
708 /* Find the next available discriminator value for LOCUS.  The
709    discriminator distinguishes among several basic blocks that
710    share a common locus, allowing for more accurate sample-based
711    profiling.  */
712 
713 static int
714 next_discriminator_for_locus (location_t locus)
715 {
716   struct locus_discrim_map item;
717   struct locus_discrim_map **slot;
718 
719   item.locus = locus;
720   item.discriminator = 0;
721   slot = (struct locus_discrim_map **)
722       htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
723                                 (hashval_t) locus, INSERT);
724   gcc_assert (slot);
725   if (*slot == HTAB_EMPTY_ENTRY)
726     {
727       *slot = XNEW (struct locus_discrim_map);
728       gcc_assert (*slot);
729       (*slot)->locus = locus;
730       (*slot)->discriminator = 0;
731     }
732   (*slot)->discriminator++;
733   return (*slot)->discriminator;
734 }
735 
736 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
737 
738 static bool
739 same_line_p (location_t locus1, location_t locus2)
740 {
741   expanded_location from, to;
742 
743   if (locus1 == locus2)
744     return true;
745 
746   from = expand_location (locus1);
747   to = expand_location (locus2);
748 
749   if (from.line != to.line)
750     return false;
751   if (from.file == to.file)
752     return true;
753   return (from.file != NULL
754           && to.file != NULL
755           && strcmp (from.file, to.file) == 0);
756 }
757 
758 /* Assign a unique discriminator value to block BB if it begins at the same
759    LOCUS as its predecessor block.  */
760 
761 static void
762 assign_discriminator (location_t locus, basic_block bb)
763 {
764   gimple first_in_to_bb, last_in_to_bb;
765 
766   if (locus == 0 || bb->discriminator != 0)
767     return;
768 
769   first_in_to_bb = first_non_label_stmt (bb);
770   last_in_to_bb = last_stmt (bb);
771   if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
772       || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
773     bb->discriminator = next_discriminator_for_locus (locus);
774 }
775 
776 /* Create the edges for a GIMPLE_COND starting at block BB.  */
777 
778 static void
779 make_cond_expr_edges (basic_block bb)
780 {
781   gimple entry = last_stmt (bb);
782   gimple then_stmt, else_stmt;
783   basic_block then_bb, else_bb;
784   tree then_label, else_label;
785   edge e;
786   location_t entry_locus;
787 
788   gcc_assert (entry);
789   gcc_assert (gimple_code (entry) == GIMPLE_COND);
790 
791   entry_locus = gimple_location (entry);
792 
793   /* Entry basic blocks for each component.  */
794   then_label = gimple_cond_true_label (entry);
795   else_label = gimple_cond_false_label (entry);
796   then_bb = label_to_block (then_label);
797   else_bb = label_to_block (else_label);
798   then_stmt = first_stmt (then_bb);
799   else_stmt = first_stmt (else_bb);
800 
801   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
802   assign_discriminator (entry_locus, then_bb);
803   e->goto_locus = gimple_location (then_stmt);
804   if (e->goto_locus)
805     e->goto_block = gimple_block (then_stmt);
806   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
807   if (e)
808     {
809       assign_discriminator (entry_locus, else_bb);
810       e->goto_locus = gimple_location (else_stmt);
811       if (e->goto_locus)
812 	e->goto_block = gimple_block (else_stmt);
813     }
814 
815   /* We do not need the labels anymore.  */
816   gimple_cond_set_true_label (entry, NULL_TREE);
817   gimple_cond_set_false_label (entry, NULL_TREE);
818 }
819 
820 
821 /* Called for each element in the hash table (P) as we delete the
822    edge to cases hash table.
823 
824    Clear all the TREE_CHAINs to prevent problems with copying of
825    SWITCH_EXPRs and structure sharing rules, then free the hash table
826    element.  */
827 
828 static bool
829 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
830 		       void *data ATTRIBUTE_UNUSED)
831 {
832   tree t, next;
833 
834   for (t = (tree) *value; t; t = next)
835     {
836       next = TREE_CHAIN (t);
837       TREE_CHAIN (t) = NULL;
838     }
839 
840   *value = NULL;
841   return false;
842 }
843 
844 /* Start recording information mapping edges to case labels.  */
845 
846 void
847 start_recording_case_labels (void)
848 {
849   gcc_assert (edge_to_cases == NULL);
850   edge_to_cases = pointer_map_create ();
851 }
852 
853 /* Return nonzero if we are recording information for case labels.  */
854 
855 static bool
856 recording_case_labels_p (void)
857 {
858   return (edge_to_cases != NULL);
859 }
860 
861 /* Stop recording information mapping edges to case labels and
862    remove any information we have recorded.  */
863 void
864 end_recording_case_labels (void)
865 {
866   pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
867   pointer_map_destroy (edge_to_cases);
868   edge_to_cases = NULL;
869 }
870 
871 /* If we are inside a {start,end}_recording_cases block, then return
872    a chain of CASE_LABEL_EXPRs from T which reference E.
873 
874    Otherwise return NULL.  */
875 
876 static tree
877 get_cases_for_edge (edge e, gimple t)
878 {
879   void **slot;
880   size_t i, n;
881 
882   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
883      chains available.  Return NULL so the caller can detect this case.  */
884   if (!recording_case_labels_p ())
885     return NULL;
886 
887   slot = pointer_map_contains (edge_to_cases, e);
888   if (slot)
889     return (tree) *slot;
890 
891   /* If we did not find E in the hash table, then this must be the first
892      time we have been queried for information about E & T.  Add all the
893      elements from T to the hash table then perform the query again.  */
894 
895   n = gimple_switch_num_labels (t);
896   for (i = 0; i < n; i++)
897     {
898       tree elt = gimple_switch_label (t, i);
899       tree lab = CASE_LABEL (elt);
900       basic_block label_bb = label_to_block (lab);
901       edge this_edge = find_edge (e->src, label_bb);
902 
903       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
904 	 a new chain.  */
905       slot = pointer_map_insert (edge_to_cases, this_edge);
906       TREE_CHAIN (elt) = (tree) *slot;
907       *slot = elt;
908     }
909 
910   return (tree) *pointer_map_contains (edge_to_cases, e);
911 }
912 
913 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
914 
915 static void
916 make_gimple_switch_edges (basic_block bb)
917 {
918   gimple entry = last_stmt (bb);
919   location_t entry_locus;
920   size_t i, n;
921 
922   entry_locus = gimple_location (entry);
923 
924   n = gimple_switch_num_labels (entry);
925 
926   for (i = 0; i < n; ++i)
927     {
928       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
929       basic_block label_bb = label_to_block (lab);
930       make_edge (bb, label_bb, 0);
931       assign_discriminator (entry_locus, label_bb);
932     }
933 }
934 
935 
936 /* Return the basic block holding label DEST.  */
937 
938 basic_block
939 label_to_block_fn (struct function *ifun, tree dest)
940 {
941   int uid = LABEL_DECL_UID (dest);
942 
943   /* We would die hard when faced by an undefined label.  Emit a label to
944      the very first basic block.  This will hopefully make even the dataflow
945      and undefined variable warnings quite right.  */
946   if ((errorcount || sorrycount) && uid < 0)
947     {
948       gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
949       gimple stmt;
950 
951       stmt = gimple_build_label (dest);
952       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
953       uid = LABEL_DECL_UID (dest);
954     }
955   if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
956       <= (unsigned int) uid)
957     return NULL;
958   return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
959 }
960 
961 /* Create edges for an abnormal goto statement at block BB.  If FOR_CALL
962    is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR.  */
963 
964 void
965 make_abnormal_goto_edges (basic_block bb, bool for_call)
966 {
967   basic_block target_bb;
968   gimple_stmt_iterator gsi;
969 
970   FOR_EACH_BB (target_bb)
971     for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
972       {
973 	gimple label_stmt = gsi_stmt (gsi);
974 	tree target;
975 
976 	if (gimple_code (label_stmt) != GIMPLE_LABEL)
977 	  break;
978 
979 	target = gimple_label_label (label_stmt);
980 
981 	/* Make an edge to every label block that has been marked as a
982 	   potential target for a computed goto or a non-local goto.  */
983 	if ((FORCED_LABEL (target) && !for_call)
984 	    || (DECL_NONLOCAL (target) && for_call))
985 	  {
986 	    make_edge (bb, target_bb, EDGE_ABNORMAL);
987 	    break;
988 	  }
989       }
990 }
991 
992 /* Create edges for a goto statement at block BB.  */
993 
994 static void
995 make_goto_expr_edges (basic_block bb)
996 {
997   gimple_stmt_iterator last = gsi_last_bb (bb);
998   gimple goto_t = gsi_stmt (last);
999 
1000   /* A simple GOTO creates normal edges.  */
1001   if (simple_goto_p (goto_t))
1002     {
1003       tree dest = gimple_goto_dest (goto_t);
1004       basic_block label_bb = label_to_block (dest);
1005       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1006       e->goto_locus = gimple_location (goto_t);
1007       assign_discriminator (e->goto_locus, label_bb);
1008       if (e->goto_locus)
1009 	e->goto_block = gimple_block (goto_t);
1010       gsi_remove (&last, true);
1011       return;
1012     }
1013 
1014   /* A computed GOTO creates abnormal edges.  */
1015   make_abnormal_goto_edges (bb, false);
1016 }
1017 
1018 /* Create edges for an asm statement with labels at block BB.  */
1019 
1020 static void
1021 make_gimple_asm_edges (basic_block bb)
1022 {
1023   gimple stmt = last_stmt (bb);
1024   location_t stmt_loc = gimple_location (stmt);
1025   int i, n = gimple_asm_nlabels (stmt);
1026 
1027   for (i = 0; i < n; ++i)
1028     {
1029       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1030       basic_block label_bb = label_to_block (label);
1031       make_edge (bb, label_bb, 0);
1032       assign_discriminator (stmt_loc, label_bb);
1033     }
1034 }
1035 
1036 /*---------------------------------------------------------------------------
1037 			       Flowgraph analysis
1038 ---------------------------------------------------------------------------*/
1039 
1040 /* Cleanup useless labels in basic blocks.  This is something we wish
1041    to do early because it allows us to group case labels before creating
1042    the edges for the CFG, and it speeds up block statement iterators in
1043    all passes later on.
1044    We rerun this pass after CFG is created, to get rid of the labels that
1045    are no longer referenced.  After then we do not run it any more, since
1046    (almost) no new labels should be created.  */
1047 
1048 /* A map from basic block index to the leading label of that block.  */
1049 static struct label_record
1050 {
1051   /* The label.  */
1052   tree label;
1053 
1054   /* True if the label is referenced from somewhere.  */
1055   bool used;
1056 } *label_for_bb;
1057 
1058 /* Given LABEL return the first label in the same basic block.  */
1059 
1060 static tree
1061 main_block_label (tree label)
1062 {
1063   basic_block bb = label_to_block (label);
1064   tree main_label = label_for_bb[bb->index].label;
1065 
1066   /* label_to_block possibly inserted undefined label into the chain.  */
1067   if (!main_label)
1068     {
1069       label_for_bb[bb->index].label = label;
1070       main_label = label;
1071     }
1072 
1073   label_for_bb[bb->index].used = true;
1074   return main_label;
1075 }
1076 
1077 /* Clean up redundant labels within the exception tree.  */
1078 
1079 static void
1080 cleanup_dead_labels_eh (void)
1081 {
1082   eh_landing_pad lp;
1083   eh_region r;
1084   tree lab;
1085   int i;
1086 
1087   if (cfun->eh == NULL)
1088     return;
1089 
1090   for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1091     if (lp && lp->post_landing_pad)
1092       {
1093 	lab = main_block_label (lp->post_landing_pad);
1094 	if (lab != lp->post_landing_pad)
1095 	  {
1096 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1097 	    EH_LANDING_PAD_NR (lab) = lp->index;
1098 	  }
1099       }
1100 
1101   FOR_ALL_EH_REGION (r)
1102     switch (r->type)
1103       {
1104       case ERT_CLEANUP:
1105       case ERT_MUST_NOT_THROW:
1106 	break;
1107 
1108       case ERT_TRY:
1109 	{
1110 	  eh_catch c;
1111 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1112 	    {
1113 	      lab = c->label;
1114 	      if (lab)
1115 		c->label = main_block_label (lab);
1116 	    }
1117 	}
1118 	break;
1119 
1120       case ERT_ALLOWED_EXCEPTIONS:
1121 	lab = r->u.allowed.label;
1122 	if (lab)
1123 	  r->u.allowed.label = main_block_label (lab);
1124 	break;
1125       }
1126 }
1127 
1128 
1129 /* Cleanup redundant labels.  This is a three-step process:
1130      1) Find the leading label for each block.
1131      2) Redirect all references to labels to the leading labels.
1132      3) Cleanup all useless labels.  */
1133 
1134 void
1135 cleanup_dead_labels (void)
1136 {
1137   basic_block bb;
1138   label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1139 
1140   /* Find a suitable label for each block.  We use the first user-defined
1141      label if there is one, or otherwise just the first label we see.  */
1142   FOR_EACH_BB (bb)
1143     {
1144       gimple_stmt_iterator i;
1145 
1146       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1147 	{
1148 	  tree label;
1149 	  gimple stmt = gsi_stmt (i);
1150 
1151 	  if (gimple_code (stmt) != GIMPLE_LABEL)
1152 	    break;
1153 
1154 	  label = gimple_label_label (stmt);
1155 
1156 	  /* If we have not yet seen a label for the current block,
1157 	     remember this one and see if there are more labels.  */
1158 	  if (!label_for_bb[bb->index].label)
1159 	    {
1160 	      label_for_bb[bb->index].label = label;
1161 	      continue;
1162 	    }
1163 
1164 	  /* If we did see a label for the current block already, but it
1165 	     is an artificially created label, replace it if the current
1166 	     label is a user defined label.  */
1167 	  if (!DECL_ARTIFICIAL (label)
1168 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1169 	    {
1170 	      label_for_bb[bb->index].label = label;
1171 	      break;
1172 	    }
1173 	}
1174     }
1175 
1176   /* Now redirect all jumps/branches to the selected label.
1177      First do so for each block ending in a control statement.  */
1178   FOR_EACH_BB (bb)
1179     {
1180       gimple stmt = last_stmt (bb);
1181       if (!stmt)
1182 	continue;
1183 
1184       switch (gimple_code (stmt))
1185 	{
1186 	case GIMPLE_COND:
1187 	  {
1188 	    tree true_label = gimple_cond_true_label (stmt);
1189 	    tree false_label = gimple_cond_false_label (stmt);
1190 
1191 	    if (true_label)
1192 	      gimple_cond_set_true_label (stmt, main_block_label (true_label));
1193 	    if (false_label)
1194 	      gimple_cond_set_false_label (stmt, main_block_label (false_label));
1195 	    break;
1196 	  }
1197 
1198 	case GIMPLE_SWITCH:
1199 	  {
1200 	    size_t i, n = gimple_switch_num_labels (stmt);
1201 
1202 	    /* Replace all destination labels.  */
1203 	    for (i = 0; i < n; ++i)
1204 	      {
1205 		tree case_label = gimple_switch_label (stmt, i);
1206 		tree label = main_block_label (CASE_LABEL (case_label));
1207 		CASE_LABEL (case_label) = label;
1208 	      }
1209 	    break;
1210 	  }
1211 
1212 	case GIMPLE_ASM:
1213 	  {
1214 	    int i, n = gimple_asm_nlabels (stmt);
1215 
1216 	    for (i = 0; i < n; ++i)
1217 	      {
1218 		tree cons = gimple_asm_label_op (stmt, i);
1219 		tree label = main_block_label (TREE_VALUE (cons));
1220 		TREE_VALUE (cons) = label;
1221 	      }
1222 	    break;
1223 	  }
1224 
1225 	/* We have to handle gotos until they're removed, and we don't
1226 	   remove them until after we've created the CFG edges.  */
1227 	case GIMPLE_GOTO:
1228           if (!computed_goto_p (stmt))
1229 	    {
1230 	      tree new_dest = main_block_label (gimple_goto_dest (stmt));
1231 	      gimple_goto_set_dest (stmt, new_dest);
1232 	    }
1233 	  break;
1234 
1235 	default:
1236 	  break;
1237       }
1238     }
1239 
1240   /* Do the same for the exception region tree labels.  */
1241   cleanup_dead_labels_eh ();
1242 
1243   /* Finally, purge dead labels.  All user-defined labels and labels that
1244      can be the target of non-local gotos and labels which have their
1245      address taken are preserved.  */
1246   FOR_EACH_BB (bb)
1247     {
1248       gimple_stmt_iterator i;
1249       tree label_for_this_bb = label_for_bb[bb->index].label;
1250 
1251       if (!label_for_this_bb)
1252 	continue;
1253 
1254       /* If the main label of the block is unused, we may still remove it.  */
1255       if (!label_for_bb[bb->index].used)
1256 	label_for_this_bb = NULL;
1257 
1258       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1259 	{
1260 	  tree label;
1261 	  gimple stmt = gsi_stmt (i);
1262 
1263 	  if (gimple_code (stmt) != GIMPLE_LABEL)
1264 	    break;
1265 
1266 	  label = gimple_label_label (stmt);
1267 
1268 	  if (label == label_for_this_bb
1269 	      || !DECL_ARTIFICIAL (label)
1270 	      || DECL_NONLOCAL (label)
1271 	      || FORCED_LABEL (label))
1272 	    gsi_next (&i);
1273 	  else
1274 	    gsi_remove (&i, true);
1275 	}
1276     }
1277 
1278   free (label_for_bb);
1279 }
1280 
1281 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1282    and scan the sorted vector of cases.  Combine the ones jumping to the
1283    same label.
1284    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1285 
1286 void
1287 group_case_labels (void)
1288 {
1289   basic_block bb;
1290 
1291   FOR_EACH_BB (bb)
1292     {
1293       gimple stmt = last_stmt (bb);
1294       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1295 	{
1296 	  int old_size = gimple_switch_num_labels (stmt);
1297 	  int i, j, new_size = old_size;
1298 	  tree default_case = NULL_TREE;
1299 	  tree default_label = NULL_TREE;
1300 	  bool has_default;
1301 
1302 	  /* The default label is always the first case in a switch
1303 	     statement after gimplification if it was not optimized
1304 	     away */
1305 	  if (!CASE_LOW (gimple_switch_default_label (stmt))
1306 	      && !CASE_HIGH (gimple_switch_default_label (stmt)))
1307 	    {
1308 	      default_case = gimple_switch_default_label (stmt);
1309 	      default_label = CASE_LABEL (default_case);
1310 	      has_default = true;
1311 	    }
1312 	  else
1313 	    has_default = false;
1314 
1315 	  /* Look for possible opportunities to merge cases.  */
1316 	  if (has_default)
1317 	    i = 1;
1318 	  else
1319 	    i = 0;
1320 	  while (i < old_size)
1321 	    {
1322 	      tree base_case, base_label, base_high;
1323 	      base_case = gimple_switch_label (stmt, i);
1324 
1325 	      gcc_assert (base_case);
1326 	      base_label = CASE_LABEL (base_case);
1327 
1328 	      /* Discard cases that have the same destination as the
1329 		 default case.  */
1330 	      if (base_label == default_label)
1331 		{
1332 		  gimple_switch_set_label (stmt, i, NULL_TREE);
1333 		  i++;
1334 		  new_size--;
1335 		  continue;
1336 		}
1337 
1338 	      base_high = CASE_HIGH (base_case)
1339 			  ? CASE_HIGH (base_case)
1340 			  : CASE_LOW (base_case);
1341 	      i++;
1342 
1343 	      /* Try to merge case labels.  Break out when we reach the end
1344 		 of the label vector or when we cannot merge the next case
1345 		 label with the current one.  */
1346 	      while (i < old_size)
1347 		{
1348 		  tree merge_case = gimple_switch_label (stmt, i);
1349 	          tree merge_label = CASE_LABEL (merge_case);
1350 		  tree t = int_const_binop (PLUS_EXPR, base_high,
1351 					    integer_one_node, 1);
1352 
1353 		  /* Merge the cases if they jump to the same place,
1354 		     and their ranges are consecutive.  */
1355 		  if (merge_label == base_label
1356 		      && tree_int_cst_equal (CASE_LOW (merge_case), t))
1357 		    {
1358 		      base_high = CASE_HIGH (merge_case) ?
1359 			CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1360 		      CASE_HIGH (base_case) = base_high;
1361 		      gimple_switch_set_label (stmt, i, NULL_TREE);
1362 		      new_size--;
1363 		      i++;
1364 		    }
1365 		  else
1366 		    break;
1367 		}
1368 	    }
1369 
1370 	  /* Compress the case labels in the label vector, and adjust the
1371 	     length of the vector.  */
1372 	  for (i = 0, j = 0; i < new_size; i++)
1373 	    {
1374 	      while (! gimple_switch_label (stmt, j))
1375 		j++;
1376 	      gimple_switch_set_label (stmt, i,
1377 				       gimple_switch_label (stmt, j++));
1378 	    }
1379 
1380 	  gcc_assert (new_size <= old_size);
1381 	  gimple_switch_set_num_labels (stmt, new_size);
1382 	}
1383     }
1384 }
1385 
1386 /* Checks whether we can merge block B into block A.  */
1387 
1388 static bool
1389 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1390 {
1391   gimple stmt;
1392   gimple_stmt_iterator gsi;
1393   gimple_seq phis;
1394 
1395   if (!single_succ_p (a))
1396     return false;
1397 
1398   if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1399     return false;
1400 
1401   if (single_succ (a) != b)
1402     return false;
1403 
1404   if (!single_pred_p (b))
1405     return false;
1406 
1407   if (b == EXIT_BLOCK_PTR)
1408     return false;
1409 
1410   /* If A ends by a statement causing exceptions or something similar, we
1411      cannot merge the blocks.  */
1412   stmt = last_stmt (a);
1413   if (stmt && stmt_ends_bb_p (stmt))
1414     return false;
1415 
1416   /* Do not allow a block with only a non-local label to be merged.  */
1417   if (stmt
1418       && gimple_code (stmt) == GIMPLE_LABEL
1419       && DECL_NONLOCAL (gimple_label_label (stmt)))
1420     return false;
1421 
1422   /* Examine the labels at the beginning of B.  */
1423   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1424     {
1425       tree lab;
1426       stmt = gsi_stmt (gsi);
1427       if (gimple_code (stmt) != GIMPLE_LABEL)
1428 	break;
1429       lab = gimple_label_label (stmt);
1430 
1431       /* Do not remove user labels.  */
1432       if (!DECL_ARTIFICIAL (lab))
1433 	return false;
1434     }
1435 
1436   /* Protect the loop latches.  */
1437   if (current_loops && b->loop_father->latch == b)
1438     return false;
1439 
1440   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1441      is not up-to-date and a name-mapping is registered, we cannot eliminate
1442      any phis.  Symbols marked for renaming are never a problem though.  */
1443   phis = phi_nodes (b);
1444   if (!gimple_seq_empty_p (phis)
1445       && name_mappings_registered_p ())
1446     return false;
1447 
1448   return true;
1449 }
1450 
1451 /* Return true if the var whose chain of uses starts at PTR has no
1452    nondebug uses.  */
1453 bool
1454 has_zero_uses_1 (const ssa_use_operand_t *head)
1455 {
1456   const ssa_use_operand_t *ptr;
1457 
1458   for (ptr = head->next; ptr != head; ptr = ptr->next)
1459     if (!is_gimple_debug (USE_STMT (ptr)))
1460       return false;
1461 
1462   return true;
1463 }
1464 
1465 /* Return true if the var whose chain of uses starts at PTR has a
1466    single nondebug use.  Set USE_P and STMT to that single nondebug
1467    use, if so, or to NULL otherwise.  */
1468 bool
1469 single_imm_use_1 (const ssa_use_operand_t *head,
1470 		  use_operand_p *use_p, gimple *stmt)
1471 {
1472   ssa_use_operand_t *ptr, *single_use = 0;
1473 
1474   for (ptr = head->next; ptr != head; ptr = ptr->next)
1475     if (!is_gimple_debug (USE_STMT (ptr)))
1476       {
1477 	if (single_use)
1478 	  {
1479 	    single_use = NULL;
1480 	    break;
1481 	  }
1482 	single_use = ptr;
1483       }
1484 
1485   if (use_p)
1486     *use_p = single_use;
1487 
1488   if (stmt)
1489     *stmt = single_use ? single_use->loc.stmt : NULL;
1490 
1491   return !!single_use;
1492 }
1493 
1494 /* Replaces all uses of NAME by VAL.  */
1495 
1496 void
1497 replace_uses_by (tree name, tree val)
1498 {
1499   imm_use_iterator imm_iter;
1500   use_operand_p use;
1501   gimple stmt;
1502   edge e;
1503 
1504   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1505     {
1506       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1507         {
1508 	  replace_exp (use, val);
1509 
1510 	  if (gimple_code (stmt) == GIMPLE_PHI)
1511 	    {
1512 	      e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1513 	      if (e->flags & EDGE_ABNORMAL)
1514 		{
1515 		  /* This can only occur for virtual operands, since
1516 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1517 		     would prevent replacement.  */
1518 		  gcc_assert (!is_gimple_reg (name));
1519 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1520 		}
1521 	    }
1522 	}
1523 
1524       if (gimple_code (stmt) != GIMPLE_PHI)
1525 	{
1526 	  size_t i;
1527 
1528 	  fold_stmt_inplace (stmt);
1529 	  if (cfgcleanup_altered_bbs && !is_gimple_debug (stmt))
1530 	    bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1531 
1532 	  /* FIXME.  This should go in update_stmt.  */
1533 	  for (i = 0; i < gimple_num_ops (stmt); i++)
1534 	    {
1535 	      tree op = gimple_op (stmt, i);
1536               /* Operands may be empty here.  For example, the labels
1537                  of a GIMPLE_COND are nulled out following the creation
1538                  of the corresponding CFG edges.  */
1539 	      if (op && TREE_CODE (op) == ADDR_EXPR)
1540 		recompute_tree_invariant_for_addr_expr (op);
1541 	    }
1542 
1543 	  maybe_clean_or_replace_eh_stmt (stmt, stmt);
1544 	  update_stmt (stmt);
1545 	}
1546     }
1547 
1548   gcc_assert (has_zero_uses (name));
1549 
1550   /* Also update the trees stored in loop structures.  */
1551   if (current_loops)
1552     {
1553       struct loop *loop;
1554       loop_iterator li;
1555 
1556       FOR_EACH_LOOP (li, loop, 0)
1557 	{
1558 	  substitute_in_loop_info (loop, name, val);
1559 	}
1560     }
1561 }
1562 
1563 /* Merge block B into block A.  */
1564 
1565 static void
1566 gimple_merge_blocks (basic_block a, basic_block b)
1567 {
1568   gimple_stmt_iterator last, gsi, psi;
1569   gimple_seq phis = phi_nodes (b);
1570 
1571   if (dump_file)
1572     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1573 
1574   /* Remove all single-valued PHI nodes from block B of the form
1575      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1576   gsi = gsi_last_bb (a);
1577   for (psi = gsi_start (phis); !gsi_end_p (psi); )
1578     {
1579       gimple phi = gsi_stmt (psi);
1580       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1581       gimple copy;
1582       bool may_replace_uses = !is_gimple_reg (def)
1583 			      || may_propagate_copy (def, use);
1584 
1585       /* In case we maintain loop closed ssa form, do not propagate arguments
1586 	 of loop exit phi nodes.  */
1587       if (current_loops
1588 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1589 	  && is_gimple_reg (def)
1590 	  && TREE_CODE (use) == SSA_NAME
1591 	  && a->loop_father != b->loop_father)
1592 	may_replace_uses = false;
1593 
1594       if (!may_replace_uses)
1595 	{
1596 	  gcc_assert (is_gimple_reg (def));
1597 
1598 	  /* Note that just emitting the copies is fine -- there is no problem
1599 	     with ordering of phi nodes.  This is because A is the single
1600 	     predecessor of B, therefore results of the phi nodes cannot
1601 	     appear as arguments of the phi nodes.  */
1602 	  copy = gimple_build_assign (def, use);
1603 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1604           remove_phi_node (&psi, false);
1605 	}
1606       else
1607         {
1608 	  /* If we deal with a PHI for virtual operands, we can simply
1609 	     propagate these without fussing with folding or updating
1610 	     the stmt.  */
1611 	  if (!is_gimple_reg (def))
1612 	    {
1613 	      imm_use_iterator iter;
1614 	      use_operand_p use_p;
1615 	      gimple stmt;
1616 
1617 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1618 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1619 		  SET_USE (use_p, use);
1620 
1621 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1622 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1623 	    }
1624 	  else
1625             replace_uses_by (def, use);
1626 
1627           remove_phi_node (&psi, true);
1628         }
1629     }
1630 
1631   /* Ensure that B follows A.  */
1632   move_block_after (b, a);
1633 
1634   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1635   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1636 
1637   /* Remove labels from B and set gimple_bb to A for other statements.  */
1638   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1639     {
1640       gimple stmt = gsi_stmt (gsi);
1641       if (gimple_code (stmt) == GIMPLE_LABEL)
1642 	{
1643 	  tree label = gimple_label_label (stmt);
1644 	  int lp_nr;
1645 
1646 	  gsi_remove (&gsi, false);
1647 
1648 	  /* Now that we can thread computed gotos, we might have
1649 	     a situation where we have a forced label in block B
1650 	     However, the label at the start of block B might still be
1651 	     used in other ways (think about the runtime checking for
1652 	     Fortran assigned gotos).  So we can not just delete the
1653 	     label.  Instead we move the label to the start of block A.  */
1654 	  if (FORCED_LABEL (label))
1655 	    {
1656 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1657 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1658 	    }
1659 
1660 	  lp_nr = EH_LANDING_PAD_NR (label);
1661 	  if (lp_nr)
1662 	    {
1663 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1664 	      lp->post_landing_pad = NULL;
1665 	    }
1666 	}
1667       else
1668 	{
1669 	  gimple_set_bb (stmt, a);
1670 	  gsi_next (&gsi);
1671 	}
1672     }
1673 
1674   /* Merge the sequences.  */
1675   last = gsi_last_bb (a);
1676   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1677   set_bb_seq (b, NULL);
1678 
1679   if (cfgcleanup_altered_bbs)
1680     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1681 }
1682 
1683 
1684 /* Return the one of two successors of BB that is not reachable by a
1685    complex edge, if there is one.  Else, return BB.  We use
1686    this in optimizations that use post-dominators for their heuristics,
1687    to catch the cases in C++ where function calls are involved.  */
1688 
1689 basic_block
1690 single_noncomplex_succ (basic_block bb)
1691 {
1692   edge e0, e1;
1693   if (EDGE_COUNT (bb->succs) != 2)
1694     return bb;
1695 
1696   e0 = EDGE_SUCC (bb, 0);
1697   e1 = EDGE_SUCC (bb, 1);
1698   if (e0->flags & EDGE_COMPLEX)
1699     return e1->dest;
1700   if (e1->flags & EDGE_COMPLEX)
1701     return e0->dest;
1702 
1703   return bb;
1704 }
1705 
1706 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1707 
1708 void
1709 notice_special_calls (gimple call)
1710 {
1711   int flags = gimple_call_flags (call);
1712 
1713   if (flags & ECF_MAY_BE_ALLOCA)
1714     cfun->calls_alloca = true;
1715   if (flags & ECF_RETURNS_TWICE)
1716     cfun->calls_setjmp = true;
1717 }
1718 
1719 
1720 /* Clear flags set by notice_special_calls.  Used by dead code removal
1721    to update the flags.  */
1722 
1723 void
1724 clear_special_calls (void)
1725 {
1726   cfun->calls_alloca = false;
1727   cfun->calls_setjmp = false;
1728 }
1729 
1730 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1731 
1732 static void
1733 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1734 {
1735   /* Since this block is no longer reachable, we can just delete all
1736      of its PHI nodes.  */
1737   remove_phi_nodes (bb);
1738 
1739   /* Remove edges to BB's successors.  */
1740   while (EDGE_COUNT (bb->succs) > 0)
1741     remove_edge (EDGE_SUCC (bb, 0));
1742 }
1743 
1744 
1745 /* Remove statements of basic block BB.  */
1746 
1747 static void
1748 remove_bb (basic_block bb)
1749 {
1750   gimple_stmt_iterator i;
1751 
1752   if (dump_file)
1753     {
1754       fprintf (dump_file, "Removing basic block %d\n", bb->index);
1755       if (dump_flags & TDF_DETAILS)
1756 	{
1757 	  dump_bb (bb, dump_file, 0);
1758 	  fprintf (dump_file, "\n");
1759 	}
1760     }
1761 
1762   if (current_loops)
1763     {
1764       struct loop *loop = bb->loop_father;
1765 
1766       /* If a loop gets removed, clean up the information associated
1767 	 with it.  */
1768       if (loop->latch == bb
1769 	  || loop->header == bb)
1770 	free_numbers_of_iterations_estimates_loop (loop);
1771     }
1772 
1773   /* Remove all the instructions in the block.  */
1774   if (bb_seq (bb) != NULL)
1775     {
1776       /* Walk backwards so as to get a chance to substitute all
1777 	 released DEFs into debug stmts.  See
1778 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1779 	 details.  */
1780       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1781 	{
1782 	  gimple stmt = gsi_stmt (i);
1783 	  if (gimple_code (stmt) == GIMPLE_LABEL
1784 	      && (FORCED_LABEL (gimple_label_label (stmt))
1785 		  || DECL_NONLOCAL (gimple_label_label (stmt))))
1786 	    {
1787 	      basic_block new_bb;
1788 	      gimple_stmt_iterator new_gsi;
1789 
1790 	      /* A non-reachable non-local label may still be referenced.
1791 		 But it no longer needs to carry the extra semantics of
1792 		 non-locality.  */
1793 	      if (DECL_NONLOCAL (gimple_label_label (stmt)))
1794 		{
1795 		  DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1796 		  FORCED_LABEL (gimple_label_label (stmt)) = 1;
1797 		}
1798 
1799 	      new_bb = bb->prev_bb;
1800 	      new_gsi = gsi_start_bb (new_bb);
1801 	      gsi_remove (&i, false);
1802 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1803 	    }
1804 	  else
1805 	    {
1806 	      /* Release SSA definitions if we are in SSA.  Note that we
1807 		 may be called when not in SSA.  For example,
1808 		 final_cleanup calls this function via
1809 		 cleanup_tree_cfg.  */
1810 	      if (gimple_in_ssa_p (cfun))
1811 		release_defs (stmt);
1812 
1813 	      gsi_remove (&i, true);
1814 	    }
1815 
1816 	  if (gsi_end_p (i))
1817 	    i = gsi_last_bb (bb);
1818 	  else
1819 	    gsi_prev (&i);
1820 	}
1821     }
1822 
1823   remove_phi_nodes_and_edges_for_unreachable_block (bb);
1824   bb->il.gimple = NULL;
1825 }
1826 
1827 
1828 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1829    predicate VAL, return the edge that will be taken out of the block.
1830    If VAL does not match a unique edge, NULL is returned.  */
1831 
1832 edge
1833 find_taken_edge (basic_block bb, tree val)
1834 {
1835   gimple stmt;
1836 
1837   stmt = last_stmt (bb);
1838 
1839   gcc_assert (stmt);
1840   gcc_assert (is_ctrl_stmt (stmt));
1841 
1842   if (val == NULL)
1843     return NULL;
1844 
1845   if (!is_gimple_min_invariant (val))
1846     return NULL;
1847 
1848   if (gimple_code (stmt) == GIMPLE_COND)
1849     return find_taken_edge_cond_expr (bb, val);
1850 
1851   if (gimple_code (stmt) == GIMPLE_SWITCH)
1852     return find_taken_edge_switch_expr (bb, val);
1853 
1854   if (computed_goto_p (stmt))
1855     {
1856       /* Only optimize if the argument is a label, if the argument is
1857 	 not a label then we can not construct a proper CFG.
1858 
1859          It may be the case that we only need to allow the LABEL_REF to
1860          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1861          appear inside a LABEL_EXPR just to be safe.  */
1862       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1863 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1864 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1865       return NULL;
1866     }
1867 
1868   gcc_unreachable ();
1869 }
1870 
1871 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1872    statement, determine which of the outgoing edges will be taken out of the
1873    block.  Return NULL if either edge may be taken.  */
1874 
1875 static edge
1876 find_taken_edge_computed_goto (basic_block bb, tree val)
1877 {
1878   basic_block dest;
1879   edge e = NULL;
1880 
1881   dest = label_to_block (val);
1882   if (dest)
1883     {
1884       e = find_edge (bb, dest);
1885       gcc_assert (e != NULL);
1886     }
1887 
1888   return e;
1889 }
1890 
1891 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1892    statement, determine which of the two edges will be taken out of the
1893    block.  Return NULL if either edge may be taken.  */
1894 
1895 static edge
1896 find_taken_edge_cond_expr (basic_block bb, tree val)
1897 {
1898   edge true_edge, false_edge;
1899 
1900   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1901 
1902   gcc_assert (TREE_CODE (val) == INTEGER_CST);
1903   return (integer_zerop (val) ? false_edge : true_edge);
1904 }
1905 
1906 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1907    statement, determine which edge will be taken out of the block.  Return
1908    NULL if any edge may be taken.  */
1909 
1910 static edge
1911 find_taken_edge_switch_expr (basic_block bb, tree val)
1912 {
1913   basic_block dest_bb;
1914   edge e;
1915   gimple switch_stmt;
1916   tree taken_case;
1917 
1918   switch_stmt = last_stmt (bb);
1919   taken_case = find_case_label_for_value (switch_stmt, val);
1920   dest_bb = label_to_block (CASE_LABEL (taken_case));
1921 
1922   e = find_edge (bb, dest_bb);
1923   gcc_assert (e);
1924   return e;
1925 }
1926 
1927 
1928 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1929    We can make optimal use here of the fact that the case labels are
1930    sorted: We can do a binary search for a case matching VAL.  */
1931 
1932 static tree
1933 find_case_label_for_value (gimple switch_stmt, tree val)
1934 {
1935   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1936   tree default_case = gimple_switch_default_label (switch_stmt);
1937 
1938   for (low = 0, high = n; high - low > 1; )
1939     {
1940       size_t i = (high + low) / 2;
1941       tree t = gimple_switch_label (switch_stmt, i);
1942       int cmp;
1943 
1944       /* Cache the result of comparing CASE_LOW and val.  */
1945       cmp = tree_int_cst_compare (CASE_LOW (t), val);
1946 
1947       if (cmp > 0)
1948 	high = i;
1949       else
1950 	low = i;
1951 
1952       if (CASE_HIGH (t) == NULL)
1953 	{
1954 	  /* A singe-valued case label.  */
1955 	  if (cmp == 0)
1956 	    return t;
1957 	}
1958       else
1959 	{
1960 	  /* A case range.  We can only handle integer ranges.  */
1961 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1962 	    return t;
1963 	}
1964     }
1965 
1966   return default_case;
1967 }
1968 
1969 
1970 /* Dump a basic block on stderr.  */
1971 
1972 void
1973 gimple_debug_bb (basic_block bb)
1974 {
1975   gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
1976 }
1977 
1978 
1979 /* Dump basic block with index N on stderr.  */
1980 
1981 basic_block
1982 gimple_debug_bb_n (int n)
1983 {
1984   gimple_debug_bb (BASIC_BLOCK (n));
1985   return BASIC_BLOCK (n);
1986 }
1987 
1988 
1989 /* Dump the CFG on stderr.
1990 
1991    FLAGS are the same used by the tree dumping functions
1992    (see TDF_* in tree-pass.h).  */
1993 
1994 void
1995 gimple_debug_cfg (int flags)
1996 {
1997   gimple_dump_cfg (stderr, flags);
1998 }
1999 
2000 
2001 /* Dump the program showing basic block boundaries on the given FILE.
2002 
2003    FLAGS are the same used by the tree dumping functions (see TDF_* in
2004    tree.h).  */
2005 
2006 void
2007 gimple_dump_cfg (FILE *file, int flags)
2008 {
2009   if (flags & TDF_DETAILS)
2010     {
2011       const char *funcname
2012 	= lang_hooks.decl_printable_name (current_function_decl, 2);
2013 
2014       fputc ('\n', file);
2015       fprintf (file, ";; Function %s\n\n", funcname);
2016       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2017 	       n_basic_blocks, n_edges, last_basic_block);
2018 
2019       brief_dump_cfg (file);
2020       fprintf (file, "\n");
2021     }
2022 
2023   if (flags & TDF_STATS)
2024     dump_cfg_stats (file);
2025 
2026   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2027 }
2028 
2029 
2030 /* Dump CFG statistics on FILE.  */
2031 
2032 void
2033 dump_cfg_stats (FILE *file)
2034 {
2035   static long max_num_merged_labels = 0;
2036   unsigned long size, total = 0;
2037   long num_edges;
2038   basic_block bb;
2039   const char * const fmt_str   = "%-30s%-13s%12s\n";
2040   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2041   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2042   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2043   const char *funcname
2044     = lang_hooks.decl_printable_name (current_function_decl, 2);
2045 
2046 
2047   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2048 
2049   fprintf (file, "---------------------------------------------------------\n");
2050   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2051   fprintf (file, fmt_str, "", "  instances  ", "used ");
2052   fprintf (file, "---------------------------------------------------------\n");
2053 
2054   size = n_basic_blocks * sizeof (struct basic_block_def);
2055   total += size;
2056   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2057 	   SCALE (size), LABEL (size));
2058 
2059   num_edges = 0;
2060   FOR_EACH_BB (bb)
2061     num_edges += EDGE_COUNT (bb->succs);
2062   size = num_edges * sizeof (struct edge_def);
2063   total += size;
2064   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2065 
2066   fprintf (file, "---------------------------------------------------------\n");
2067   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2068 	   LABEL (total));
2069   fprintf (file, "---------------------------------------------------------\n");
2070   fprintf (file, "\n");
2071 
2072   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2073     max_num_merged_labels = cfg_stats.num_merged_labels;
2074 
2075   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2076 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2077 
2078   fprintf (file, "\n");
2079 }
2080 
2081 
2082 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2083    linked in the final executable.  */
2084 
2085 void
2086 debug_cfg_stats (void)
2087 {
2088   dump_cfg_stats (stderr);
2089 }
2090 
2091 
2092 /* Dump the flowgraph to a .vcg FILE.  */
2093 
2094 static void
2095 gimple_cfg2vcg (FILE *file)
2096 {
2097   edge e;
2098   edge_iterator ei;
2099   basic_block bb;
2100   const char *funcname
2101     = lang_hooks.decl_printable_name (current_function_decl, 2);
2102 
2103   /* Write the file header.  */
2104   fprintf (file, "graph: { title: \"%s\"\n", funcname);
2105   fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2106   fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2107 
2108   /* Write blocks and edges.  */
2109   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2110     {
2111       fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2112 	       e->dest->index);
2113 
2114       if (e->flags & EDGE_FAKE)
2115 	fprintf (file, " linestyle: dotted priority: 10");
2116       else
2117 	fprintf (file, " linestyle: solid priority: 100");
2118 
2119       fprintf (file, " }\n");
2120     }
2121   fputc ('\n', file);
2122 
2123   FOR_EACH_BB (bb)
2124     {
2125       enum gimple_code head_code, end_code;
2126       const char *head_name, *end_name;
2127       int head_line = 0;
2128       int end_line = 0;
2129       gimple first = first_stmt (bb);
2130       gimple last = last_stmt (bb);
2131 
2132       if (first)
2133 	{
2134 	  head_code = gimple_code (first);
2135 	  head_name = gimple_code_name[head_code];
2136 	  head_line = get_lineno (first);
2137 	}
2138       else
2139 	head_name = "no-statement";
2140 
2141       if (last)
2142 	{
2143 	  end_code = gimple_code (last);
2144 	  end_name = gimple_code_name[end_code];
2145 	  end_line = get_lineno (last);
2146 	}
2147       else
2148 	end_name = "no-statement";
2149 
2150       fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2151 	       bb->index, bb->index, head_name, head_line, end_name,
2152 	       end_line);
2153 
2154       FOR_EACH_EDGE (e, ei, bb->succs)
2155 	{
2156 	  if (e->dest == EXIT_BLOCK_PTR)
2157 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2158 	  else
2159 	    fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2160 
2161 	  if (e->flags & EDGE_FAKE)
2162 	    fprintf (file, " priority: 10 linestyle: dotted");
2163 	  else
2164 	    fprintf (file, " priority: 100 linestyle: solid");
2165 
2166 	  fprintf (file, " }\n");
2167 	}
2168 
2169       if (bb->next_bb != EXIT_BLOCK_PTR)
2170 	fputc ('\n', file);
2171     }
2172 
2173   fputs ("}\n\n", file);
2174 }
2175 
2176 
2177 
2178 /*---------------------------------------------------------------------------
2179 			     Miscellaneous helpers
2180 ---------------------------------------------------------------------------*/
2181 
2182 /* Return true if T represents a stmt that always transfers control.  */
2183 
2184 bool
2185 is_ctrl_stmt (gimple t)
2186 {
2187   switch (gimple_code (t))
2188     {
2189     case GIMPLE_COND:
2190     case GIMPLE_SWITCH:
2191     case GIMPLE_GOTO:
2192     case GIMPLE_RETURN:
2193     case GIMPLE_RESX:
2194       return true;
2195     default:
2196       return false;
2197     }
2198 }
2199 
2200 
2201 /* Return true if T is a statement that may alter the flow of control
2202    (e.g., a call to a non-returning function).  */
2203 
2204 bool
2205 is_ctrl_altering_stmt (gimple t)
2206 {
2207   gcc_assert (t);
2208 
2209   switch (gimple_code (t))
2210     {
2211     case GIMPLE_CALL:
2212       {
2213 	int flags = gimple_call_flags (t);
2214 
2215 	/* A non-pure/const call alters flow control if the current
2216 	   function has nonlocal labels.  */
2217 	if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2218 	  return true;
2219 
2220 	/* A call also alters control flow if it does not return.  */
2221 	if (flags & ECF_NORETURN)
2222 	  return true;
2223       }
2224       break;
2225 
2226     case GIMPLE_EH_DISPATCH:
2227       /* EH_DISPATCH branches to the individual catch handlers at
2228 	 this level of a try or allowed-exceptions region.  It can
2229 	 fallthru to the next statement as well.  */
2230       return true;
2231 
2232     case GIMPLE_ASM:
2233       if (gimple_asm_nlabels (t) > 0)
2234 	return true;
2235       break;
2236 
2237     CASE_GIMPLE_OMP:
2238       /* OpenMP directives alter control flow.  */
2239       return true;
2240 
2241     default:
2242       break;
2243     }
2244 
2245   /* If a statement can throw, it alters control flow.  */
2246   return stmt_can_throw_internal (t);
2247 }
2248 
2249 
2250 /* Return true if T is a simple local goto.  */
2251 
2252 bool
2253 simple_goto_p (gimple t)
2254 {
2255   return (gimple_code (t) == GIMPLE_GOTO
2256 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2257 }
2258 
2259 
2260 /* Return true if T can make an abnormal transfer of control flow.
2261    Transfers of control flow associated with EH are excluded.  */
2262 
2263 bool
2264 stmt_can_make_abnormal_goto (gimple t)
2265 {
2266   if (computed_goto_p (t))
2267     return true;
2268   if (is_gimple_call (t))
2269     return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2270   return false;
2271 }
2272 
2273 
2274 /* Return true if STMT should start a new basic block.  PREV_STMT is
2275    the statement preceding STMT.  It is used when STMT is a label or a
2276    case label.  Labels should only start a new basic block if their
2277    previous statement wasn't a label.  Otherwise, sequence of labels
2278    would generate unnecessary basic blocks that only contain a single
2279    label.  */
2280 
2281 static inline bool
2282 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2283 {
2284   if (stmt == NULL)
2285     return false;
2286 
2287   /* Labels start a new basic block only if the preceding statement
2288      wasn't a label of the same type.  This prevents the creation of
2289      consecutive blocks that have nothing but a single label.  */
2290   if (gimple_code (stmt) == GIMPLE_LABEL)
2291     {
2292       /* Nonlocal and computed GOTO targets always start a new block.  */
2293       if (DECL_NONLOCAL (gimple_label_label (stmt))
2294 	  || FORCED_LABEL (gimple_label_label (stmt)))
2295 	return true;
2296 
2297       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2298 	{
2299 	  if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2300 	    return true;
2301 
2302 	  cfg_stats.num_merged_labels++;
2303 	  return false;
2304 	}
2305       else
2306 	return true;
2307     }
2308 
2309   return false;
2310 }
2311 
2312 
2313 /* Return true if T should end a basic block.  */
2314 
2315 bool
2316 stmt_ends_bb_p (gimple t)
2317 {
2318   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2319 }
2320 
2321 /* Remove block annotations and other data structures.  */
2322 
2323 void
2324 delete_tree_cfg_annotations (void)
2325 {
2326   label_to_block_map = NULL;
2327 }
2328 
2329 
2330 /* Return the first statement in basic block BB.  */
2331 
2332 gimple
2333 first_stmt (basic_block bb)
2334 {
2335   gimple_stmt_iterator i = gsi_start_bb (bb);
2336   gimple stmt = NULL;
2337 
2338   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2339     {
2340       gsi_next (&i);
2341       stmt = NULL;
2342     }
2343   return stmt;
2344 }
2345 
2346 /* Return the first non-label statement in basic block BB.  */
2347 
2348 static gimple
2349 first_non_label_stmt (basic_block bb)
2350 {
2351   gimple_stmt_iterator i = gsi_start_bb (bb);
2352   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2353     gsi_next (&i);
2354   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2355 }
2356 
2357 /* Return the last statement in basic block BB.  */
2358 
2359 gimple
2360 last_stmt (basic_block bb)
2361 {
2362   gimple_stmt_iterator i = gsi_last_bb (bb);
2363   gimple stmt = NULL;
2364 
2365   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2366     {
2367       gsi_prev (&i);
2368       stmt = NULL;
2369     }
2370   return stmt;
2371 }
2372 
2373 /* Return the last statement of an otherwise empty block.  Return NULL
2374    if the block is totally empty, or if it contains more than one
2375    statement.  */
2376 
2377 gimple
2378 last_and_only_stmt (basic_block bb)
2379 {
2380   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2381   gimple last, prev;
2382 
2383   if (gsi_end_p (i))
2384     return NULL;
2385 
2386   last = gsi_stmt (i);
2387   gsi_prev_nondebug (&i);
2388   if (gsi_end_p (i))
2389     return last;
2390 
2391   /* Empty statements should no longer appear in the instruction stream.
2392      Everything that might have appeared before should be deleted by
2393      remove_useless_stmts, and the optimizers should just gsi_remove
2394      instead of smashing with build_empty_stmt.
2395 
2396      Thus the only thing that should appear here in a block containing
2397      one executable statement is a label.  */
2398   prev = gsi_stmt (i);
2399   if (gimple_code (prev) == GIMPLE_LABEL)
2400     return last;
2401   else
2402     return NULL;
2403 }
2404 
2405 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2406 
2407 static void
2408 reinstall_phi_args (edge new_edge, edge old_edge)
2409 {
2410   edge_var_map_vector v;
2411   edge_var_map *vm;
2412   int i;
2413   gimple_stmt_iterator phis;
2414 
2415   v = redirect_edge_var_map_vector (old_edge);
2416   if (!v)
2417     return;
2418 
2419   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2420        VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2421        i++, gsi_next (&phis))
2422     {
2423       gimple phi = gsi_stmt (phis);
2424       tree result = redirect_edge_var_map_result (vm);
2425       tree arg = redirect_edge_var_map_def (vm);
2426 
2427       gcc_assert (result == gimple_phi_result (phi));
2428 
2429       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2430     }
2431 
2432   redirect_edge_var_map_clear (old_edge);
2433 }
2434 
2435 /* Returns the basic block after which the new basic block created
2436    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2437    near its "logical" location.  This is of most help to humans looking
2438    at debugging dumps.  */
2439 
2440 static basic_block
2441 split_edge_bb_loc (edge edge_in)
2442 {
2443   basic_block dest = edge_in->dest;
2444   basic_block dest_prev = dest->prev_bb;
2445 
2446   if (dest_prev)
2447     {
2448       edge e = find_edge (dest_prev, dest);
2449       if (e && !(e->flags & EDGE_COMPLEX))
2450 	return edge_in->src;
2451     }
2452   return dest_prev;
2453 }
2454 
2455 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2456    Abort on abnormal edges.  */
2457 
2458 static basic_block
2459 gimple_split_edge (edge edge_in)
2460 {
2461   basic_block new_bb, after_bb, dest;
2462   edge new_edge, e;
2463 
2464   /* Abnormal edges cannot be split.  */
2465   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2466 
2467   dest = edge_in->dest;
2468 
2469   after_bb = split_edge_bb_loc (edge_in);
2470 
2471   new_bb = create_empty_bb (after_bb);
2472   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2473   new_bb->count = edge_in->count;
2474   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2475   new_edge->probability = REG_BR_PROB_BASE;
2476   new_edge->count = edge_in->count;
2477 
2478   e = redirect_edge_and_branch (edge_in, new_bb);
2479   gcc_assert (e == edge_in);
2480   reinstall_phi_args (new_edge, e);
2481 
2482   return new_bb;
2483 }
2484 
2485 /* Callback for walk_tree, check that all elements with address taken are
2486    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2487    inside a PHI node.  */
2488 
2489 static tree
2490 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2491 {
2492   tree t = *tp, x;
2493 
2494   if (TYPE_P (t))
2495     *walk_subtrees = 0;
2496 
2497   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2498 #define CHECK_OP(N, MSG) \
2499   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2500        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2501 
2502   switch (TREE_CODE (t))
2503     {
2504     case SSA_NAME:
2505       if (SSA_NAME_IN_FREE_LIST (t))
2506 	{
2507 	  error ("SSA name in freelist but still referenced");
2508 	  return *tp;
2509 	}
2510       break;
2511 
2512     case INDIRECT_REF:
2513       x = TREE_OPERAND (t, 0);
2514       if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2515 	{
2516 	  error ("Indirect reference's operand is not a register or a constant.");
2517 	  return x;
2518 	}
2519       break;
2520 
2521     case ASSERT_EXPR:
2522       x = fold (ASSERT_EXPR_COND (t));
2523       if (x == boolean_false_node)
2524 	{
2525 	  error ("ASSERT_EXPR with an always-false condition");
2526 	  return *tp;
2527 	}
2528       break;
2529 
2530     case MODIFY_EXPR:
2531       error ("MODIFY_EXPR not expected while having tuples.");
2532       return *tp;
2533 
2534     case ADDR_EXPR:
2535       {
2536 	bool old_constant;
2537 	bool old_side_effects;
2538 	bool new_constant;
2539 	bool new_side_effects;
2540 
2541 	gcc_assert (is_gimple_address (t));
2542 
2543 	old_constant = TREE_CONSTANT (t);
2544 	old_side_effects = TREE_SIDE_EFFECTS (t);
2545 
2546 	recompute_tree_invariant_for_addr_expr (t);
2547 	new_side_effects = TREE_SIDE_EFFECTS (t);
2548 	new_constant = TREE_CONSTANT (t);
2549 
2550         if (old_constant != new_constant)
2551 	  {
2552 	    error ("constant not recomputed when ADDR_EXPR changed");
2553 	    return t;
2554 	  }
2555 	if (old_side_effects != new_side_effects)
2556 	  {
2557 	    error ("side effects not recomputed when ADDR_EXPR changed");
2558 	    return t;
2559 	  }
2560 
2561 	/* Skip any references (they will be checked when we recurse down the
2562 	   tree) and ensure that any variable used as a prefix is marked
2563 	   addressable.  */
2564 	for (x = TREE_OPERAND (t, 0);
2565 	     handled_component_p (x);
2566 	     x = TREE_OPERAND (x, 0))
2567 	  ;
2568 
2569 	if (!(TREE_CODE (x) == VAR_DECL
2570 	      || TREE_CODE (x) == PARM_DECL
2571 	      || TREE_CODE (x) == RESULT_DECL))
2572 	  return NULL;
2573 	if (!TREE_ADDRESSABLE (x))
2574 	  {
2575 	    error ("address taken, but ADDRESSABLE bit not set");
2576 	    return x;
2577 	  }
2578 	if (DECL_GIMPLE_REG_P (x))
2579 	  {
2580 	    error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2581 	    return x;
2582 	  }
2583 
2584 	break;
2585       }
2586 
2587     case COND_EXPR:
2588       x = COND_EXPR_COND (t);
2589       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2590 	{
2591 	  error ("non-integral used in condition");
2592 	  return x;
2593 	}
2594       if (!is_gimple_condexpr (x))
2595         {
2596 	  error ("invalid conditional operand");
2597 	  return x;
2598 	}
2599       break;
2600 
2601     case NON_LVALUE_EXPR:
2602 	gcc_unreachable ();
2603 
2604     CASE_CONVERT:
2605     case FIX_TRUNC_EXPR:
2606     case FLOAT_EXPR:
2607     case NEGATE_EXPR:
2608     case ABS_EXPR:
2609     case BIT_NOT_EXPR:
2610     case TRUTH_NOT_EXPR:
2611       CHECK_OP (0, "invalid operand to unary operator");
2612       break;
2613 
2614     case REALPART_EXPR:
2615     case IMAGPART_EXPR:
2616     case COMPONENT_REF:
2617     case ARRAY_REF:
2618     case ARRAY_RANGE_REF:
2619     case BIT_FIELD_REF:
2620     case VIEW_CONVERT_EXPR:
2621       /* We have a nest of references.  Verify that each of the operands
2622 	 that determine where to reference is either a constant or a variable,
2623 	 verify that the base is valid, and then show we've already checked
2624 	 the subtrees.  */
2625       while (handled_component_p (t))
2626 	{
2627 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2628 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2629 	  else if (TREE_CODE (t) == ARRAY_REF
2630 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2631 	    {
2632 	      CHECK_OP (1, "invalid array index");
2633 	      if (TREE_OPERAND (t, 2))
2634 		CHECK_OP (2, "invalid array lower bound");
2635 	      if (TREE_OPERAND (t, 3))
2636 		CHECK_OP (3, "invalid array stride");
2637 	    }
2638 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
2639 	    {
2640 	      if (!host_integerp (TREE_OPERAND (t, 1), 1)
2641 		  || !host_integerp (TREE_OPERAND (t, 2), 1))
2642 		{
2643 		  error ("invalid position or size operand to BIT_FIELD_REF");
2644 		  return t;
2645 		}
2646 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2647 		       && (TYPE_PRECISION (TREE_TYPE (t))
2648 			   != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2649 		{
2650 		  error ("integral result type precision does not match "
2651 			 "field size of BIT_FIELD_REF");
2652 		  return t;
2653 		}
2654 	      if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2655 		  && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2656 		      != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2657 		{
2658 		  error ("mode precision of non-integral result does not "
2659 			 "match field size of BIT_FIELD_REF");
2660 		  return t;
2661 		}
2662 	    }
2663 
2664 	  t = TREE_OPERAND (t, 0);
2665 	}
2666 
2667       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2668 	{
2669 	  error ("invalid reference prefix");
2670 	  return t;
2671 	}
2672       *walk_subtrees = 0;
2673       break;
2674     case PLUS_EXPR:
2675     case MINUS_EXPR:
2676       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2677 	 POINTER_PLUS_EXPR. */
2678       if (POINTER_TYPE_P (TREE_TYPE (t)))
2679 	{
2680 	  error ("invalid operand to plus/minus, type is a pointer");
2681 	  return t;
2682 	}
2683       CHECK_OP (0, "invalid operand to binary operator");
2684       CHECK_OP (1, "invalid operand to binary operator");
2685       break;
2686 
2687     case POINTER_PLUS_EXPR:
2688       /* Check to make sure the first operand is a pointer or reference type. */
2689       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2690 	{
2691 	  error ("invalid operand to pointer plus, first operand is not a pointer");
2692 	  return t;
2693 	}
2694       /* Check to make sure the second operand is an integer with type of
2695 	 sizetype.  */
2696       if (!useless_type_conversion_p (sizetype,
2697 				     TREE_TYPE (TREE_OPERAND (t, 1))))
2698 	{
2699 	  error ("invalid operand to pointer plus, second operand is not an "
2700 		 "integer with type of sizetype.");
2701 	  return t;
2702 	}
2703       /* FALLTHROUGH */
2704     case LT_EXPR:
2705     case LE_EXPR:
2706     case GT_EXPR:
2707     case GE_EXPR:
2708     case EQ_EXPR:
2709     case NE_EXPR:
2710     case UNORDERED_EXPR:
2711     case ORDERED_EXPR:
2712     case UNLT_EXPR:
2713     case UNLE_EXPR:
2714     case UNGT_EXPR:
2715     case UNGE_EXPR:
2716     case UNEQ_EXPR:
2717     case LTGT_EXPR:
2718     case MULT_EXPR:
2719     case TRUNC_DIV_EXPR:
2720     case CEIL_DIV_EXPR:
2721     case FLOOR_DIV_EXPR:
2722     case ROUND_DIV_EXPR:
2723     case TRUNC_MOD_EXPR:
2724     case CEIL_MOD_EXPR:
2725     case FLOOR_MOD_EXPR:
2726     case ROUND_MOD_EXPR:
2727     case RDIV_EXPR:
2728     case EXACT_DIV_EXPR:
2729     case MIN_EXPR:
2730     case MAX_EXPR:
2731     case LSHIFT_EXPR:
2732     case RSHIFT_EXPR:
2733     case LROTATE_EXPR:
2734     case RROTATE_EXPR:
2735     case BIT_IOR_EXPR:
2736     case BIT_XOR_EXPR:
2737     case BIT_AND_EXPR:
2738       CHECK_OP (0, "invalid operand to binary operator");
2739       CHECK_OP (1, "invalid operand to binary operator");
2740       break;
2741 
2742     case CONSTRUCTOR:
2743       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2744 	*walk_subtrees = 0;
2745       break;
2746 
2747     default:
2748       break;
2749     }
2750   return NULL;
2751 
2752 #undef CHECK_OP
2753 }
2754 
2755 
2756 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2757    Returns true if there is an error, otherwise false.  */
2758 
2759 static bool
2760 verify_types_in_gimple_min_lval (tree expr)
2761 {
2762   tree op;
2763 
2764   if (is_gimple_id (expr))
2765     return false;
2766 
2767   if (!INDIRECT_REF_P (expr)
2768       && TREE_CODE (expr) != TARGET_MEM_REF)
2769     {
2770       error ("invalid expression for min lvalue");
2771       return true;
2772     }
2773 
2774   /* TARGET_MEM_REFs are strange beasts.  */
2775   if (TREE_CODE (expr) == TARGET_MEM_REF)
2776     return false;
2777 
2778   op = TREE_OPERAND (expr, 0);
2779   if (!is_gimple_val (op))
2780     {
2781       error ("invalid operand in indirect reference");
2782       debug_generic_stmt (op);
2783       return true;
2784     }
2785   if (!useless_type_conversion_p (TREE_TYPE (expr),
2786 				  TREE_TYPE (TREE_TYPE (op))))
2787     {
2788       error ("type mismatch in indirect reference");
2789       debug_generic_stmt (TREE_TYPE (expr));
2790       debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2791       return true;
2792     }
2793 
2794   return false;
2795 }
2796 
2797 /* Verify if EXPR is a valid GIMPLE reference expression.  If
2798    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
2799    if there is an error, otherwise false.  */
2800 
2801 static bool
2802 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2803 {
2804   while (handled_component_p (expr))
2805     {
2806       tree op = TREE_OPERAND (expr, 0);
2807 
2808       if (TREE_CODE (expr) == ARRAY_REF
2809 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
2810 	{
2811 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
2812 	      || (TREE_OPERAND (expr, 2)
2813 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
2814 	      || (TREE_OPERAND (expr, 3)
2815 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
2816 	    {
2817 	      error ("invalid operands to array reference");
2818 	      debug_generic_stmt (expr);
2819 	      return true;
2820 	    }
2821 	}
2822 
2823       /* Verify if the reference array element types are compatible.  */
2824       if (TREE_CODE (expr) == ARRAY_REF
2825 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2826 					 TREE_TYPE (TREE_TYPE (op))))
2827 	{
2828 	  error ("type mismatch in array reference");
2829 	  debug_generic_stmt (TREE_TYPE (expr));
2830 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2831 	  return true;
2832 	}
2833       if (TREE_CODE (expr) == ARRAY_RANGE_REF
2834 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2835 					 TREE_TYPE (TREE_TYPE (op))))
2836 	{
2837 	  error ("type mismatch in array range reference");
2838 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2839 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2840 	  return true;
2841 	}
2842 
2843       if ((TREE_CODE (expr) == REALPART_EXPR
2844 	   || TREE_CODE (expr) == IMAGPART_EXPR)
2845 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2846 					 TREE_TYPE (TREE_TYPE (op))))
2847 	{
2848 	  error ("type mismatch in real/imagpart reference");
2849 	  debug_generic_stmt (TREE_TYPE (expr));
2850 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2851 	  return true;
2852 	}
2853 
2854       if (TREE_CODE (expr) == COMPONENT_REF
2855 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2856 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
2857 	{
2858 	  error ("type mismatch in component reference");
2859 	  debug_generic_stmt (TREE_TYPE (expr));
2860 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2861 	  return true;
2862 	}
2863 
2864       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2865 	{
2866 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2867 	     that their operand is not an SSA name or an invariant when
2868 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
2869 	     bug).  Otherwise there is nothing to verify, gross mismatches at
2870 	     most invoke undefined behavior.  */
2871 	  if (require_lvalue
2872 	      && (TREE_CODE (op) == SSA_NAME
2873 		  || is_gimple_min_invariant (op)))
2874 	    {
2875 	      error ("Conversion of an SSA_NAME on the left hand side.");
2876 	      debug_generic_stmt (expr);
2877 	      return true;
2878 	    }
2879 	  else if (!handled_component_p (op))
2880 	    return false;
2881 	}
2882 
2883       expr = op;
2884     }
2885 
2886   return ((require_lvalue || !is_gimple_min_invariant (expr))
2887 	  && verify_types_in_gimple_min_lval (expr));
2888 }
2889 
2890 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2891    list of pointer-to types that is trivially convertible to DEST.  */
2892 
2893 static bool
2894 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2895 {
2896   tree src;
2897 
2898   if (!TYPE_POINTER_TO (src_obj))
2899     return true;
2900 
2901   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2902     if (useless_type_conversion_p (dest, src))
2903       return true;
2904 
2905   return false;
2906 }
2907 
2908 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2909    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
2910 
2911 static bool
2912 valid_fixed_convert_types_p (tree type1, tree type2)
2913 {
2914   return (FIXED_POINT_TYPE_P (type1)
2915 	  && (INTEGRAL_TYPE_P (type2)
2916 	      || SCALAR_FLOAT_TYPE_P (type2)
2917 	      || FIXED_POINT_TYPE_P (type2)));
2918 }
2919 
2920 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
2921    is a problem, otherwise false.  */
2922 
2923 static bool
2924 verify_gimple_call (gimple stmt)
2925 {
2926   tree fn = gimple_call_fn (stmt);
2927   tree fntype;
2928   unsigned i;
2929 
2930   if (TREE_CODE (fn) != OBJ_TYPE_REF
2931       && !is_gimple_val (fn))
2932     {
2933       error ("invalid function in gimple call");
2934       debug_generic_stmt (fn);
2935       return true;
2936     }
2937 
2938   if (!POINTER_TYPE_P (TREE_TYPE  (fn))
2939       || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2940 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2941     {
2942       error ("non-function in gimple call");
2943       return true;
2944     }
2945 
2946   if (gimple_call_lhs (stmt)
2947       && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2948 	  || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2949     {
2950       error ("invalid LHS in gimple call");
2951       return true;
2952     }
2953 
2954   if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2955     {
2956       error ("LHS in noreturn call");
2957       return true;
2958     }
2959 
2960   fntype = TREE_TYPE (TREE_TYPE (fn));
2961   if (gimple_call_lhs (stmt)
2962       && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
2963 				     TREE_TYPE (fntype))
2964       /* ???  At least C++ misses conversions at assignments from
2965 	 void * call results.
2966 	 ???  Java is completely off.  Especially with functions
2967 	 returning java.lang.Object.
2968 	 For now simply allow arbitrary pointer type conversions.  */
2969       && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
2970 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
2971     {
2972       error ("invalid conversion in gimple call");
2973       debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
2974       debug_generic_stmt (TREE_TYPE (fntype));
2975       return true;
2976     }
2977 
2978   if (gimple_call_chain (stmt)
2979       && !is_gimple_val (gimple_call_chain (stmt)))
2980     {
2981       error ("invalid static chain in gimple call");
2982       debug_generic_stmt (gimple_call_chain (stmt));
2983       return true;
2984     }
2985 
2986   /* If there is a static chain argument, this should not be an indirect
2987      call, and the decl should have DECL_STATIC_CHAIN set.  */
2988   if (gimple_call_chain (stmt))
2989     {
2990       if (TREE_CODE (fn) != ADDR_EXPR
2991 	  || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
2992 	{
2993 	  error ("static chain in indirect gimple call");
2994 	  return true;
2995 	}
2996       fn = TREE_OPERAND (fn, 0);
2997 
2998       if (!DECL_STATIC_CHAIN (fn))
2999 	{
3000 	  error ("static chain with function that doesn't use one");
3001 	  return true;
3002 	}
3003     }
3004 
3005   /* ???  The C frontend passes unpromoted arguments in case it
3006      didn't see a function declaration before the call.  So for now
3007      leave the call arguments mostly unverified.  Once we gimplify
3008      unit-at-a-time we have a chance to fix this.  */
3009 
3010   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3011     {
3012       tree arg = gimple_call_arg (stmt, i);
3013       if (!is_gimple_operand (arg))
3014 	{
3015 	  error ("invalid argument to gimple call");
3016 	  debug_generic_expr (arg);
3017 	}
3018     }
3019 
3020   return false;
3021 }
3022 
3023 /* Verifies the gimple comparison with the result type TYPE and
3024    the operands OP0 and OP1.  */
3025 
3026 static bool
3027 verify_gimple_comparison (tree type, tree op0, tree op1)
3028 {
3029   tree op0_type = TREE_TYPE (op0);
3030   tree op1_type = TREE_TYPE (op1);
3031 
3032   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3033     {
3034       error ("invalid operands in gimple comparison");
3035       return true;
3036     }
3037 
3038   /* For comparisons we do not have the operations type as the
3039      effective type the comparison is carried out in.  Instead
3040      we require that either the first operand is trivially
3041      convertible into the second, or the other way around.
3042      The resulting type of a comparison may be any integral type.
3043      Because we special-case pointers to void we allow
3044      comparisons of pointers with the same mode as well.  */
3045   if ((!useless_type_conversion_p (op0_type, op1_type)
3046        && !useless_type_conversion_p (op1_type, op0_type)
3047        && (!POINTER_TYPE_P (op0_type)
3048 	   || !POINTER_TYPE_P (op1_type)
3049 	   || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3050       || !INTEGRAL_TYPE_P (type))
3051     {
3052       error ("type mismatch in comparison expression");
3053       debug_generic_expr (type);
3054       debug_generic_expr (op0_type);
3055       debug_generic_expr (op1_type);
3056       return true;
3057     }
3058 
3059   return false;
3060 }
3061 
3062 /* Verify a gimple assignment statement STMT with an unary rhs.
3063    Returns true if anything is wrong.  */
3064 
3065 static bool
3066 verify_gimple_assign_unary (gimple stmt)
3067 {
3068   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3069   tree lhs = gimple_assign_lhs (stmt);
3070   tree lhs_type = TREE_TYPE (lhs);
3071   tree rhs1 = gimple_assign_rhs1 (stmt);
3072   tree rhs1_type = TREE_TYPE (rhs1);
3073 
3074   if (!is_gimple_reg (lhs)
3075       && !(optimize == 0
3076 	   && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3077     {
3078       error ("non-register as LHS of unary operation");
3079       return true;
3080     }
3081 
3082   if (!is_gimple_val (rhs1))
3083     {
3084       error ("invalid operand in unary operation");
3085       return true;
3086     }
3087 
3088   /* First handle conversions.  */
3089   switch (rhs_code)
3090     {
3091     CASE_CONVERT:
3092       {
3093 	/* Allow conversions between integral types and pointers only if
3094 	   there is no sign or zero extension involved.
3095 	   For targets were the precision of sizetype doesn't match that
3096 	   of pointers we need to allow arbitrary conversions from and
3097 	   to sizetype.  */
3098 	if ((POINTER_TYPE_P (lhs_type)
3099 	     && INTEGRAL_TYPE_P (rhs1_type)
3100 	     && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3101 		 || rhs1_type == sizetype))
3102 	    || (POINTER_TYPE_P (rhs1_type)
3103 		&& INTEGRAL_TYPE_P (lhs_type)
3104 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3105 		    || lhs_type == sizetype)))
3106 	  return false;
3107 
3108 	/* Allow conversion from integer to offset type and vice versa.  */
3109 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3110 	     && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3111 	    || (TREE_CODE (lhs_type) == INTEGER_TYPE
3112 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3113 	  return false;
3114 
3115 	/* Otherwise assert we are converting between types of the
3116 	   same kind.  */
3117 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3118 	  {
3119 	    error ("invalid types in nop conversion");
3120 	    debug_generic_expr (lhs_type);
3121 	    debug_generic_expr (rhs1_type);
3122 	    return true;
3123 	  }
3124 
3125 	return false;
3126       }
3127 
3128     case ADDR_SPACE_CONVERT_EXPR:
3129       {
3130 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3131 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3132 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3133 	  {
3134 	    error ("invalid types in address space conversion");
3135 	    debug_generic_expr (lhs_type);
3136 	    debug_generic_expr (rhs1_type);
3137 	    return true;
3138 	  }
3139 
3140 	return false;
3141       }
3142 
3143     case FIXED_CONVERT_EXPR:
3144       {
3145 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3146 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3147 	  {
3148 	    error ("invalid types in fixed-point conversion");
3149 	    debug_generic_expr (lhs_type);
3150 	    debug_generic_expr (rhs1_type);
3151 	    return true;
3152 	  }
3153 
3154 	return false;
3155       }
3156 
3157     case FLOAT_EXPR:
3158       {
3159 	if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3160 	  {
3161 	    error ("invalid types in conversion to floating point");
3162 	    debug_generic_expr (lhs_type);
3163 	    debug_generic_expr (rhs1_type);
3164 	    return true;
3165 	  }
3166 
3167         return false;
3168       }
3169 
3170     case FIX_TRUNC_EXPR:
3171       {
3172 	if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3173 	  {
3174 	    error ("invalid types in conversion to integer");
3175 	    debug_generic_expr (lhs_type);
3176 	    debug_generic_expr (rhs1_type);
3177 	    return true;
3178 	  }
3179 
3180         return false;
3181       }
3182 
3183     case VEC_UNPACK_HI_EXPR:
3184     case VEC_UNPACK_LO_EXPR:
3185     case REDUC_MAX_EXPR:
3186     case REDUC_MIN_EXPR:
3187     case REDUC_PLUS_EXPR:
3188     case VEC_UNPACK_FLOAT_HI_EXPR:
3189     case VEC_UNPACK_FLOAT_LO_EXPR:
3190       /* FIXME.  */
3191       return false;
3192 
3193     case TRUTH_NOT_EXPR:
3194     case NEGATE_EXPR:
3195     case ABS_EXPR:
3196     case BIT_NOT_EXPR:
3197     case PAREN_EXPR:
3198     case NON_LVALUE_EXPR:
3199     case CONJ_EXPR:
3200       break;
3201 
3202     default:
3203       gcc_unreachable ();
3204     }
3205 
3206   /* For the remaining codes assert there is no conversion involved.  */
3207   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3208     {
3209       error ("non-trivial conversion in unary operation");
3210       debug_generic_expr (lhs_type);
3211       debug_generic_expr (rhs1_type);
3212       return true;
3213     }
3214 
3215   return false;
3216 }
3217 
3218 /* Verify a gimple assignment statement STMT with a binary rhs.
3219    Returns true if anything is wrong.  */
3220 
3221 static bool
3222 verify_gimple_assign_binary (gimple stmt)
3223 {
3224   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3225   tree lhs = gimple_assign_lhs (stmt);
3226   tree lhs_type = TREE_TYPE (lhs);
3227   tree rhs1 = gimple_assign_rhs1 (stmt);
3228   tree rhs1_type = TREE_TYPE (rhs1);
3229   tree rhs2 = gimple_assign_rhs2 (stmt);
3230   tree rhs2_type = TREE_TYPE (rhs2);
3231 
3232   if (!is_gimple_reg (lhs)
3233       && !(optimize == 0
3234 	   && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3235     {
3236       error ("non-register as LHS of binary operation");
3237       return true;
3238     }
3239 
3240   if (!is_gimple_val (rhs1)
3241       || !is_gimple_val (rhs2))
3242     {
3243       error ("invalid operands in binary operation");
3244       return true;
3245     }
3246 
3247   /* First handle operations that involve different types.  */
3248   switch (rhs_code)
3249     {
3250     case COMPLEX_EXPR:
3251       {
3252 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3253 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3254 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3255 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3256 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3257 	  {
3258 	    error ("type mismatch in complex expression");
3259 	    debug_generic_expr (lhs_type);
3260 	    debug_generic_expr (rhs1_type);
3261 	    debug_generic_expr (rhs2_type);
3262 	    return true;
3263 	  }
3264 
3265 	return false;
3266       }
3267 
3268     case LSHIFT_EXPR:
3269     case RSHIFT_EXPR:
3270     case LROTATE_EXPR:
3271     case RROTATE_EXPR:
3272       {
3273 	/* Shifts and rotates are ok on integral types, fixed point
3274 	   types and integer vector types.  */
3275 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3276 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3277 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3278 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3279 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3280 		/* Vector shifts of vectors are also ok.  */
3281 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3282 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3283 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3284 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3285 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3286 	  {
3287 	    error ("type mismatch in shift expression");
3288 	    debug_generic_expr (lhs_type);
3289 	    debug_generic_expr (rhs1_type);
3290 	    debug_generic_expr (rhs2_type);
3291 	    return true;
3292 	  }
3293 
3294 	return false;
3295       }
3296 
3297     case VEC_LSHIFT_EXPR:
3298     case VEC_RSHIFT_EXPR:
3299       {
3300 	if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3301 	    || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3302 		 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3303 		 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3304 		 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3305 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3306 		&& (TREE_CODE (rhs2_type) != VECTOR_TYPE
3307 		    || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3308 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3309 	  {
3310 	    error ("type mismatch in vector shift expression");
3311 	    debug_generic_expr (lhs_type);
3312 	    debug_generic_expr (rhs1_type);
3313 	    debug_generic_expr (rhs2_type);
3314 	    return true;
3315 	  }
3316 	/* For shifting a vector of floating point components we
3317 	   only allow shifting by a constant multiple of the element size.  */
3318 	if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3319 	    && (TREE_CODE (rhs2) != INTEGER_CST
3320 		|| !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3321 					   TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3322 	  {
3323 	    error ("non-element sized vector shift of floating point vector");
3324 	    return true;
3325 	  }
3326 
3327 	return false;
3328       }
3329 
3330     case PLUS_EXPR:
3331       {
3332 	/* We use regular PLUS_EXPR for vectors.
3333 	   ???  This just makes the checker happy and may not be what is
3334 	   intended.  */
3335 	if (TREE_CODE (lhs_type) == VECTOR_TYPE
3336 	    && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3337 	  {
3338 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3339 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3340 	      {
3341 		error ("invalid non-vector operands to vector valued plus");
3342 		return true;
3343 	      }
3344 	    lhs_type = TREE_TYPE (lhs_type);
3345 	    rhs1_type = TREE_TYPE (rhs1_type);
3346 	    rhs2_type = TREE_TYPE (rhs2_type);
3347 	    /* PLUS_EXPR is commutative, so we might end up canonicalizing
3348 	       the pointer to 2nd place.  */
3349 	    if (POINTER_TYPE_P (rhs2_type))
3350 	      {
3351 		tree tem = rhs1_type;
3352 		rhs1_type = rhs2_type;
3353 		rhs2_type = tem;
3354 	      }
3355 	    goto do_pointer_plus_expr_check;
3356 	  }
3357       }
3358     /* Fallthru.  */
3359     case MINUS_EXPR:
3360       {
3361 	if (POINTER_TYPE_P (lhs_type)
3362 	    || POINTER_TYPE_P (rhs1_type)
3363 	    || POINTER_TYPE_P (rhs2_type))
3364 	  {
3365 	    error ("invalid (pointer) operands to plus/minus");
3366 	    return true;
3367 	  }
3368 
3369 	/* Continue with generic binary expression handling.  */
3370 	break;
3371       }
3372 
3373     case POINTER_PLUS_EXPR:
3374       {
3375 do_pointer_plus_expr_check:
3376 	if (!POINTER_TYPE_P (rhs1_type)
3377 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3378 	    || !useless_type_conversion_p (sizetype, rhs2_type))
3379 	  {
3380 	    error ("type mismatch in pointer plus expression");
3381 	    debug_generic_stmt (lhs_type);
3382 	    debug_generic_stmt (rhs1_type);
3383 	    debug_generic_stmt (rhs2_type);
3384 	    return true;
3385 	  }
3386 
3387 	return false;
3388       }
3389 
3390     case TRUTH_ANDIF_EXPR:
3391     case TRUTH_ORIF_EXPR:
3392       gcc_unreachable ();
3393 
3394     case TRUTH_AND_EXPR:
3395     case TRUTH_OR_EXPR:
3396     case TRUTH_XOR_EXPR:
3397       {
3398 	/* We allow any kind of integral typed argument and result.  */
3399 	if (!INTEGRAL_TYPE_P (rhs1_type)
3400 	    || !INTEGRAL_TYPE_P (rhs2_type)
3401 	    || !INTEGRAL_TYPE_P (lhs_type))
3402 	  {
3403 	    error ("type mismatch in binary truth expression");
3404 	    debug_generic_expr (lhs_type);
3405 	    debug_generic_expr (rhs1_type);
3406 	    debug_generic_expr (rhs2_type);
3407 	    return true;
3408 	  }
3409 
3410 	return false;
3411       }
3412 
3413     case LT_EXPR:
3414     case LE_EXPR:
3415     case GT_EXPR:
3416     case GE_EXPR:
3417     case EQ_EXPR:
3418     case NE_EXPR:
3419     case UNORDERED_EXPR:
3420     case ORDERED_EXPR:
3421     case UNLT_EXPR:
3422     case UNLE_EXPR:
3423     case UNGT_EXPR:
3424     case UNGE_EXPR:
3425     case UNEQ_EXPR:
3426     case LTGT_EXPR:
3427       /* Comparisons are also binary, but the result type is not
3428 	 connected to the operand types.  */
3429       return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3430 
3431     case WIDEN_SUM_EXPR:
3432     case WIDEN_MULT_EXPR:
3433     case VEC_WIDEN_MULT_HI_EXPR:
3434     case VEC_WIDEN_MULT_LO_EXPR:
3435     case VEC_PACK_TRUNC_EXPR:
3436     case VEC_PACK_SAT_EXPR:
3437     case VEC_PACK_FIX_TRUNC_EXPR:
3438     case VEC_EXTRACT_EVEN_EXPR:
3439     case VEC_EXTRACT_ODD_EXPR:
3440     case VEC_INTERLEAVE_HIGH_EXPR:
3441     case VEC_INTERLEAVE_LOW_EXPR:
3442       /* FIXME.  */
3443       return false;
3444 
3445     case MULT_EXPR:
3446     case TRUNC_DIV_EXPR:
3447     case CEIL_DIV_EXPR:
3448     case FLOOR_DIV_EXPR:
3449     case ROUND_DIV_EXPR:
3450     case TRUNC_MOD_EXPR:
3451     case CEIL_MOD_EXPR:
3452     case FLOOR_MOD_EXPR:
3453     case ROUND_MOD_EXPR:
3454     case RDIV_EXPR:
3455     case EXACT_DIV_EXPR:
3456     case MIN_EXPR:
3457     case MAX_EXPR:
3458     case BIT_IOR_EXPR:
3459     case BIT_XOR_EXPR:
3460     case BIT_AND_EXPR:
3461       /* Continue with generic binary expression handling.  */
3462       break;
3463 
3464     default:
3465       gcc_unreachable ();
3466     }
3467 
3468   if (!useless_type_conversion_p (lhs_type, rhs1_type)
3469       || !useless_type_conversion_p (lhs_type, rhs2_type))
3470     {
3471       error ("type mismatch in binary expression");
3472       debug_generic_stmt (lhs_type);
3473       debug_generic_stmt (rhs1_type);
3474       debug_generic_stmt (rhs2_type);
3475       return true;
3476     }
3477 
3478   return false;
3479 }
3480 
3481 /* Verify a gimple assignment statement STMT with a single rhs.
3482    Returns true if anything is wrong.  */
3483 
3484 static bool
3485 verify_gimple_assign_single (gimple stmt)
3486 {
3487   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3488   tree lhs = gimple_assign_lhs (stmt);
3489   tree lhs_type = TREE_TYPE (lhs);
3490   tree rhs1 = gimple_assign_rhs1 (stmt);
3491   tree rhs1_type = TREE_TYPE (rhs1);
3492   bool res = false;
3493 
3494   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3495     {
3496       error ("non-trivial conversion at assignment");
3497       debug_generic_expr (lhs_type);
3498       debug_generic_expr (rhs1_type);
3499       return true;
3500     }
3501 
3502   if (handled_component_p (lhs))
3503     res |= verify_types_in_gimple_reference (lhs, true);
3504 
3505   /* Special codes we cannot handle via their class.  */
3506   switch (rhs_code)
3507     {
3508     case ADDR_EXPR:
3509       {
3510 	tree op = TREE_OPERAND (rhs1, 0);
3511 	if (!is_gimple_addressable (op))
3512 	  {
3513 	    error ("invalid operand in unary expression");
3514 	    return true;
3515 	  }
3516 
3517 	if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3518 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3519 							  TREE_TYPE (op)))
3520 	  {
3521 	    error ("type mismatch in address expression");
3522 	    debug_generic_stmt (TREE_TYPE (rhs1));
3523 	    debug_generic_stmt (TREE_TYPE (op));
3524 	    return true;
3525 	  }
3526 
3527 	return verify_types_in_gimple_reference (op, true);
3528       }
3529 
3530     /* tcc_reference  */
3531     case COMPONENT_REF:
3532     case BIT_FIELD_REF:
3533     case INDIRECT_REF:
3534     case ALIGN_INDIRECT_REF:
3535     case MISALIGNED_INDIRECT_REF:
3536     case ARRAY_REF:
3537     case ARRAY_RANGE_REF:
3538     case VIEW_CONVERT_EXPR:
3539     case REALPART_EXPR:
3540     case IMAGPART_EXPR:
3541     case TARGET_MEM_REF:
3542       if (!is_gimple_reg (lhs)
3543 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
3544 	{
3545 	  error ("invalid rhs for gimple memory store");
3546 	  debug_generic_stmt (lhs);
3547 	  debug_generic_stmt (rhs1);
3548 	  return true;
3549 	}
3550       return res || verify_types_in_gimple_reference (rhs1, false);
3551 
3552     /* tcc_constant  */
3553     case SSA_NAME:
3554     case INTEGER_CST:
3555     case REAL_CST:
3556     case FIXED_CST:
3557     case COMPLEX_CST:
3558     case VECTOR_CST:
3559     case STRING_CST:
3560       return res;
3561 
3562     /* tcc_declaration  */
3563     case CONST_DECL:
3564       return res;
3565     case VAR_DECL:
3566     case PARM_DECL:
3567       if (!is_gimple_reg (lhs)
3568 	  && !is_gimple_reg (rhs1)
3569 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
3570 	{
3571 	  error ("invalid rhs for gimple memory store");
3572 	  debug_generic_stmt (lhs);
3573 	  debug_generic_stmt (rhs1);
3574 	  return true;
3575 	}
3576       return res;
3577 
3578     case COND_EXPR:
3579     case CONSTRUCTOR:
3580     case OBJ_TYPE_REF:
3581     case ASSERT_EXPR:
3582     case WITH_SIZE_EXPR:
3583     case POLYNOMIAL_CHREC:
3584     case DOT_PROD_EXPR:
3585     case VEC_COND_EXPR:
3586     case REALIGN_LOAD_EXPR:
3587       /* FIXME.  */
3588       return res;
3589 
3590     default:;
3591     }
3592 
3593   return res;
3594 }
3595 
3596 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
3597    is a problem, otherwise false.  */
3598 
3599 static bool
3600 verify_gimple_assign (gimple stmt)
3601 {
3602   switch (gimple_assign_rhs_class (stmt))
3603     {
3604     case GIMPLE_SINGLE_RHS:
3605       return verify_gimple_assign_single (stmt);
3606 
3607     case GIMPLE_UNARY_RHS:
3608       return verify_gimple_assign_unary (stmt);
3609 
3610     case GIMPLE_BINARY_RHS:
3611       return verify_gimple_assign_binary (stmt);
3612 
3613     default:
3614       gcc_unreachable ();
3615     }
3616 }
3617 
3618 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
3619    is a problem, otherwise false.  */
3620 
3621 static bool
3622 verify_gimple_return (gimple stmt)
3623 {
3624   tree op = gimple_return_retval (stmt);
3625   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3626 
3627   /* We cannot test for present return values as we do not fix up missing
3628      return values from the original source.  */
3629   if (op == NULL)
3630     return false;
3631 
3632   if (!is_gimple_val (op)
3633       && TREE_CODE (op) != RESULT_DECL)
3634     {
3635       error ("invalid operand in return statement");
3636       debug_generic_stmt (op);
3637       return true;
3638     }
3639 
3640   if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3641       /* ???  With C++ we can have the situation that the result
3642 	 decl is a reference type while the return type is an aggregate.  */
3643       && !(TREE_CODE (op) == RESULT_DECL
3644 	   && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3645 	   && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3646     {
3647       error ("invalid conversion in return statement");
3648       debug_generic_stmt (restype);
3649       debug_generic_stmt (TREE_TYPE (op));
3650       return true;
3651     }
3652 
3653   return false;
3654 }
3655 
3656 
3657 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
3658    is a problem, otherwise false.  */
3659 
3660 static bool
3661 verify_gimple_goto (gimple stmt)
3662 {
3663   tree dest = gimple_goto_dest (stmt);
3664 
3665   /* ???  We have two canonical forms of direct goto destinations, a
3666      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
3667   if (TREE_CODE (dest) != LABEL_DECL
3668       && (!is_gimple_val (dest)
3669 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
3670     {
3671       error ("goto destination is neither a label nor a pointer");
3672       return true;
3673     }
3674 
3675   return false;
3676 }
3677 
3678 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
3679    is a problem, otherwise false.  */
3680 
3681 static bool
3682 verify_gimple_switch (gimple stmt)
3683 {
3684   if (!is_gimple_val (gimple_switch_index (stmt)))
3685     {
3686       error ("invalid operand to switch statement");
3687       debug_generic_stmt (gimple_switch_index (stmt));
3688       return true;
3689     }
3690 
3691   return false;
3692 }
3693 
3694 
3695 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
3696    and false otherwise.  */
3697 
3698 static bool
3699 verify_gimple_phi (gimple stmt)
3700 {
3701   tree type = TREE_TYPE (gimple_phi_result (stmt));
3702   unsigned i;
3703 
3704   if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3705     {
3706       error ("Invalid PHI result");
3707       return true;
3708     }
3709 
3710   for (i = 0; i < gimple_phi_num_args (stmt); i++)
3711     {
3712       tree arg = gimple_phi_arg_def (stmt, i);
3713       if ((is_gimple_reg (gimple_phi_result (stmt))
3714 	   && !is_gimple_val (arg))
3715 	  || (!is_gimple_reg (gimple_phi_result (stmt))
3716 	      && !is_gimple_addressable (arg)))
3717 	{
3718 	  error ("Invalid PHI argument");
3719 	  debug_generic_stmt (arg);
3720 	  return true;
3721 	}
3722       if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3723 	{
3724 	  error ("Incompatible types in PHI argument %u", i);
3725 	  debug_generic_stmt (type);
3726 	  debug_generic_stmt (TREE_TYPE (arg));
3727 	  return true;
3728 	}
3729     }
3730 
3731   return false;
3732 }
3733 
3734 
3735 /* Verify a gimple debug statement STMT.
3736    Returns true if anything is wrong.  */
3737 
3738 static bool
3739 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3740 {
3741   /* There isn't much that could be wrong in a gimple debug stmt.  A
3742      gimple debug bind stmt, for example, maps a tree, that's usually
3743      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3744      component or member of an aggregate type, to another tree, that
3745      can be an arbitrary expression.  These stmts expand into debug
3746      insns, and are converted to debug notes by var-tracking.c.  */
3747   return false;
3748 }
3749 
3750 
3751 /* Verify the GIMPLE statement STMT.  Returns true if there is an
3752    error, otherwise false.  */
3753 
3754 static bool
3755 verify_types_in_gimple_stmt (gimple stmt)
3756 {
3757   switch (gimple_code (stmt))
3758     {
3759     case GIMPLE_ASSIGN:
3760       return verify_gimple_assign (stmt);
3761 
3762     case GIMPLE_LABEL:
3763       return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3764 
3765     case GIMPLE_CALL:
3766       return verify_gimple_call (stmt);
3767 
3768     case GIMPLE_COND:
3769       if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3770 	{
3771 	  error ("invalid comparison code in gimple cond");
3772 	  return true;
3773 	}
3774       if (!(!gimple_cond_true_label (stmt)
3775 	    || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3776 	  || !(!gimple_cond_false_label (stmt)
3777 	       || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3778 	{
3779 	  error ("invalid labels in gimple cond");
3780 	  return true;
3781 	}
3782 
3783       return verify_gimple_comparison (boolean_type_node,
3784 				       gimple_cond_lhs (stmt),
3785 				       gimple_cond_rhs (stmt));
3786 
3787     case GIMPLE_GOTO:
3788       return verify_gimple_goto (stmt);
3789 
3790     case GIMPLE_SWITCH:
3791       return verify_gimple_switch (stmt);
3792 
3793     case GIMPLE_RETURN:
3794       return verify_gimple_return (stmt);
3795 
3796     case GIMPLE_ASM:
3797       return false;
3798 
3799     case GIMPLE_PHI:
3800       return verify_gimple_phi (stmt);
3801 
3802     /* Tuples that do not have tree operands.  */
3803     case GIMPLE_NOP:
3804     case GIMPLE_PREDICT:
3805     case GIMPLE_RESX:
3806     case GIMPLE_EH_DISPATCH:
3807     case GIMPLE_EH_MUST_NOT_THROW:
3808       return false;
3809 
3810     CASE_GIMPLE_OMP:
3811       /* OpenMP directives are validated by the FE and never operated
3812 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
3813 	 non-gimple expressions when the main index variable has had
3814 	 its address taken.  This does not affect the loop itself
3815 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
3816 	 how to setup the parallel iteration.  */
3817       return false;
3818 
3819     case GIMPLE_DEBUG:
3820       return verify_gimple_debug (stmt);
3821 
3822     default:
3823       gcc_unreachable ();
3824     }
3825 }
3826 
3827 /* Verify the GIMPLE statements inside the sequence STMTS.  */
3828 
3829 static bool
3830 verify_types_in_gimple_seq_2 (gimple_seq stmts)
3831 {
3832   gimple_stmt_iterator ittr;
3833   bool err = false;
3834 
3835   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3836     {
3837       gimple stmt = gsi_stmt (ittr);
3838 
3839       switch (gimple_code (stmt))
3840         {
3841 	case GIMPLE_BIND:
3842 	  err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3843 	  break;
3844 
3845 	case GIMPLE_TRY:
3846 	  err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3847 	  err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3848 	  break;
3849 
3850 	case GIMPLE_EH_FILTER:
3851 	  err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3852 	  break;
3853 
3854 	case GIMPLE_CATCH:
3855 	  err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3856 	  break;
3857 
3858 	default:
3859 	  {
3860 	    bool err2 = verify_types_in_gimple_stmt (stmt);
3861 	    if (err2)
3862 	      debug_gimple_stmt (stmt);
3863 	    err |= err2;
3864 	  }
3865 	}
3866     }
3867 
3868   return err;
3869 }
3870 
3871 
3872 /* Verify the GIMPLE statements inside the statement list STMTS.  */
3873 
3874 void
3875 verify_types_in_gimple_seq (gimple_seq stmts)
3876 {
3877   if (verify_types_in_gimple_seq_2 (stmts))
3878     internal_error ("verify_gimple failed");
3879 }
3880 
3881 
3882 /* Verify STMT, return true if STMT is not in GIMPLE form.
3883    TODO: Implement type checking.  */
3884 
3885 static bool
3886 verify_stmt (gimple_stmt_iterator *gsi)
3887 {
3888   tree addr;
3889   struct walk_stmt_info wi;
3890   bool last_in_block = gsi_one_before_end_p (*gsi);
3891   gimple stmt = gsi_stmt (*gsi);
3892   int lp_nr;
3893 
3894   if (is_gimple_omp (stmt))
3895     {
3896       /* OpenMP directives are validated by the FE and never operated
3897 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
3898 	 non-gimple expressions when the main index variable has had
3899 	 its address taken.  This does not affect the loop itself
3900 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
3901 	 how to setup the parallel iteration.  */
3902       return false;
3903     }
3904 
3905   /* FIXME.  The C frontend passes unpromoted arguments in case it
3906      didn't see a function declaration before the call.  */
3907   if (is_gimple_call (stmt))
3908     {
3909       tree decl;
3910 
3911       if (!is_gimple_call_addr (gimple_call_fn (stmt)))
3912 	{
3913 	  error ("invalid function in call statement");
3914 	  return true;
3915 	}
3916 
3917       decl = gimple_call_fndecl (stmt);
3918       if (decl
3919 	  && TREE_CODE (decl) == FUNCTION_DECL
3920 	  && DECL_LOOPING_CONST_OR_PURE_P (decl)
3921 	  && (!DECL_PURE_P (decl))
3922 	  && (!TREE_READONLY (decl)))
3923 	{
3924 	  error ("invalid pure const state for function");
3925 	  return true;
3926 	}
3927     }
3928 
3929   if (is_gimple_debug (stmt))
3930     return false;
3931 
3932   memset (&wi, 0, sizeof (wi));
3933   addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
3934   if (addr)
3935     {
3936       debug_generic_expr (addr);
3937       inform (gimple_location (gsi_stmt (*gsi)), "in statement");
3938       debug_gimple_stmt (stmt);
3939       return true;
3940     }
3941 
3942   /* If the statement is marked as part of an EH region, then it is
3943      expected that the statement could throw.  Verify that when we
3944      have optimizations that simplify statements such that we prove
3945      that they cannot throw, that we update other data structures
3946      to match.  */
3947   lp_nr = lookup_stmt_eh_lp (stmt);
3948   if (lp_nr != 0)
3949     {
3950       if (!stmt_could_throw_p (stmt))
3951 	{
3952 	  /* During IPA passes, ipa-pure-const sets nothrow flags on calls
3953 	     and they are updated on statements only after fixup_cfg
3954 	     is executed at beggining of expansion stage.  */
3955 	  if (cgraph_state != CGRAPH_STATE_IPA_SSA)
3956 	    {
3957 	      error ("statement marked for throw, but doesn%'t");
3958 	      goto fail;
3959 	    }
3960 	}
3961       else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
3962 	{
3963 	  error ("statement marked for throw in middle of block");
3964 	  goto fail;
3965 	}
3966     }
3967 
3968   return false;
3969 
3970  fail:
3971   debug_gimple_stmt (stmt);
3972   return true;
3973 }
3974 
3975 
3976 /* Return true when the T can be shared.  */
3977 
3978 bool
3979 tree_node_can_be_shared (tree t)
3980 {
3981   if (IS_TYPE_OR_DECL_P (t)
3982       || is_gimple_min_invariant (t)
3983       || TREE_CODE (t) == SSA_NAME
3984       || t == error_mark_node
3985       || TREE_CODE (t) == IDENTIFIER_NODE)
3986     return true;
3987 
3988   if (TREE_CODE (t) == CASE_LABEL_EXPR)
3989     return true;
3990 
3991   while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3992 	   && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3993 	 || TREE_CODE (t) == COMPONENT_REF
3994 	 || TREE_CODE (t) == REALPART_EXPR
3995 	 || TREE_CODE (t) == IMAGPART_EXPR)
3996     t = TREE_OPERAND (t, 0);
3997 
3998   if (DECL_P (t))
3999     return true;
4000 
4001   return false;
4002 }
4003 
4004 
4005 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4006 
4007 static tree
4008 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4009 {
4010   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4011   struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4012 
4013   if (tree_node_can_be_shared (*tp))
4014     {
4015       *walk_subtrees = false;
4016       return NULL;
4017     }
4018 
4019   if (pointer_set_insert (visited, *tp))
4020     return *tp;
4021 
4022   return NULL;
4023 }
4024 
4025 
4026 static bool eh_error_found;
4027 static int
4028 verify_eh_throw_stmt_node (void **slot, void *data)
4029 {
4030   struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4031   struct pointer_set_t *visited = (struct pointer_set_t *) data;
4032 
4033   if (!pointer_set_contains (visited, node->stmt))
4034     {
4035       error ("Dead STMT in EH table");
4036       debug_gimple_stmt (node->stmt);
4037       eh_error_found = true;
4038     }
4039   return 1;
4040 }
4041 
4042 
4043 /* Verify the GIMPLE statements in every basic block.  */
4044 
4045 void
4046 verify_stmts (void)
4047 {
4048   basic_block bb;
4049   gimple_stmt_iterator gsi;
4050   bool err = false;
4051   struct pointer_set_t *visited, *visited_stmts;
4052   tree addr;
4053   struct walk_stmt_info wi;
4054 
4055   timevar_push (TV_TREE_STMT_VERIFY);
4056   visited = pointer_set_create ();
4057   visited_stmts = pointer_set_create ();
4058 
4059   memset (&wi, 0, sizeof (wi));
4060   wi.info = (void *) visited;
4061 
4062   FOR_EACH_BB (bb)
4063     {
4064       gimple phi;
4065       size_t i;
4066 
4067       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4068 	{
4069 	  phi = gsi_stmt (gsi);
4070 	  pointer_set_insert (visited_stmts, phi);
4071 	  if (gimple_bb (phi) != bb)
4072 	    {
4073 	      error ("gimple_bb (phi) is set to a wrong basic block");
4074 	      err |= true;
4075 	    }
4076 
4077 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
4078 	    {
4079 	      tree t = gimple_phi_arg_def (phi, i);
4080 	      tree addr;
4081 
4082 	      if (!t)
4083 		{
4084 		  error ("missing PHI def");
4085 		  debug_gimple_stmt (phi);
4086 		  err |= true;
4087 		  continue;
4088 		}
4089 	      /* Addressable variables do have SSA_NAMEs but they
4090 		 are not considered gimple values.  */
4091 	      else if (TREE_CODE (t) != SSA_NAME
4092 		       && TREE_CODE (t) != FUNCTION_DECL
4093 		       && !is_gimple_min_invariant (t))
4094 		{
4095 		  error ("PHI argument is not a GIMPLE value");
4096 		  debug_gimple_stmt (phi);
4097 		  debug_generic_expr (t);
4098 		  err |= true;
4099 		}
4100 
4101 	      addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4102 	      if (addr)
4103 		{
4104 		  error ("incorrect sharing of tree nodes");
4105 		  debug_gimple_stmt (phi);
4106 		  debug_generic_expr (addr);
4107 		  err |= true;
4108 		}
4109 	    }
4110 
4111 #ifdef ENABLE_TYPES_CHECKING
4112 	  if (verify_gimple_phi (phi))
4113 	    {
4114 	      debug_gimple_stmt (phi);
4115 	      err |= true;
4116 	    }
4117 #endif
4118 	}
4119 
4120       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4121 	{
4122 	  gimple stmt = gsi_stmt (gsi);
4123 
4124 	  if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4125 	      || gimple_code (stmt) == GIMPLE_BIND)
4126 	    {
4127 	      error ("invalid GIMPLE statement");
4128 	      debug_gimple_stmt (stmt);
4129 	      err |= true;
4130 	    }
4131 
4132 	  pointer_set_insert (visited_stmts, stmt);
4133 
4134 	  if (gimple_bb (stmt) != bb)
4135 	    {
4136 	      error ("gimple_bb (stmt) is set to a wrong basic block");
4137 	      debug_gimple_stmt (stmt);
4138 	      err |= true;
4139 	    }
4140 
4141 	  if (gimple_code (stmt) == GIMPLE_LABEL)
4142 	    {
4143 	      tree decl = gimple_label_label (stmt);
4144 	      int uid = LABEL_DECL_UID (decl);
4145 
4146 	      if (uid == -1
4147 		  || VEC_index (basic_block, label_to_block_map, uid) != bb)
4148 		{
4149 		  error ("incorrect entry in label_to_block_map");
4150 		  err |= true;
4151 		}
4152 
4153 	      uid = EH_LANDING_PAD_NR (decl);
4154 	      if (uid)
4155 		{
4156 		  eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4157 		  if (decl != lp->post_landing_pad)
4158 		    {
4159 		      error ("incorrect setting of landing pad number");
4160 		      err |= true;
4161 		    }
4162 		}
4163 	    }
4164 
4165 	  err |= verify_stmt (&gsi);
4166 
4167 #ifdef ENABLE_TYPES_CHECKING
4168 	  if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4169 	    {
4170 	      debug_gimple_stmt (stmt);
4171 	      err |= true;
4172 	    }
4173 #endif
4174 	  addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4175 	  if (addr)
4176 	    {
4177 	      error ("incorrect sharing of tree nodes");
4178 	      debug_gimple_stmt (stmt);
4179 	      debug_generic_expr (addr);
4180 	      err |= true;
4181 	    }
4182 	  gsi_next (&gsi);
4183 	}
4184     }
4185 
4186   eh_error_found = false;
4187   if (get_eh_throw_stmt_table (cfun))
4188     htab_traverse (get_eh_throw_stmt_table (cfun),
4189 		   verify_eh_throw_stmt_node,
4190 		   visited_stmts);
4191 
4192   if (err | eh_error_found)
4193     internal_error ("verify_stmts failed");
4194 
4195   pointer_set_destroy (visited);
4196   pointer_set_destroy (visited_stmts);
4197   verify_histograms ();
4198   timevar_pop (TV_TREE_STMT_VERIFY);
4199 }
4200 
4201 
4202 /* Verifies that the flow information is OK.  */
4203 
4204 static int
4205 gimple_verify_flow_info (void)
4206 {
4207   int err = 0;
4208   basic_block bb;
4209   gimple_stmt_iterator gsi;
4210   gimple stmt;
4211   edge e;
4212   edge_iterator ei;
4213 
4214   if (ENTRY_BLOCK_PTR->il.gimple)
4215     {
4216       error ("ENTRY_BLOCK has IL associated with it");
4217       err = 1;
4218     }
4219 
4220   if (EXIT_BLOCK_PTR->il.gimple)
4221     {
4222       error ("EXIT_BLOCK has IL associated with it");
4223       err = 1;
4224     }
4225 
4226   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4227     if (e->flags & EDGE_FALLTHRU)
4228       {
4229 	error ("fallthru to exit from bb %d", e->src->index);
4230 	err = 1;
4231       }
4232 
4233   FOR_EACH_BB (bb)
4234     {
4235       bool found_ctrl_stmt = false;
4236 
4237       stmt = NULL;
4238 
4239       /* Skip labels on the start of basic block.  */
4240       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4241 	{
4242 	  tree label;
4243 	  gimple prev_stmt = stmt;
4244 
4245 	  stmt = gsi_stmt (gsi);
4246 
4247 	  if (gimple_code (stmt) != GIMPLE_LABEL)
4248 	    break;
4249 
4250 	  label = gimple_label_label (stmt);
4251 	  if (prev_stmt && DECL_NONLOCAL (label))
4252 	    {
4253 	      error ("nonlocal label ");
4254 	      print_generic_expr (stderr, label, 0);
4255 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
4256 		       bb->index);
4257 	      err = 1;
4258 	    }
4259 
4260 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4261 	    {
4262 	      error ("EH landing pad label ");
4263 	      print_generic_expr (stderr, label, 0);
4264 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
4265 		       bb->index);
4266 	      err = 1;
4267 	    }
4268 
4269 	  if (label_to_block (label) != bb)
4270 	    {
4271 	      error ("label ");
4272 	      print_generic_expr (stderr, label, 0);
4273 	      fprintf (stderr, " to block does not match in bb %d",
4274 		       bb->index);
4275 	      err = 1;
4276 	    }
4277 
4278 	  if (decl_function_context (label) != current_function_decl)
4279 	    {
4280 	      error ("label ");
4281 	      print_generic_expr (stderr, label, 0);
4282 	      fprintf (stderr, " has incorrect context in bb %d",
4283 		       bb->index);
4284 	      err = 1;
4285 	    }
4286 	}
4287 
4288       /* Verify that body of basic block BB is free of control flow.  */
4289       for (; !gsi_end_p (gsi); gsi_next (&gsi))
4290 	{
4291 	  gimple stmt = gsi_stmt (gsi);
4292 
4293 	  if (found_ctrl_stmt)
4294 	    {
4295 	      error ("control flow in the middle of basic block %d",
4296 		     bb->index);
4297 	      err = 1;
4298 	    }
4299 
4300 	  if (stmt_ends_bb_p (stmt))
4301 	    found_ctrl_stmt = true;
4302 
4303 	  if (gimple_code (stmt) == GIMPLE_LABEL)
4304 	    {
4305 	      error ("label ");
4306 	      print_generic_expr (stderr, gimple_label_label (stmt), 0);
4307 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
4308 	      err = 1;
4309 	    }
4310 	}
4311 
4312       gsi = gsi_last_bb (bb);
4313       if (gsi_end_p (gsi))
4314 	continue;
4315 
4316       stmt = gsi_stmt (gsi);
4317 
4318       if (gimple_code (stmt) == GIMPLE_LABEL)
4319 	continue;
4320 
4321       err |= verify_eh_edges (stmt);
4322 
4323       if (is_ctrl_stmt (stmt))
4324 	{
4325 	  FOR_EACH_EDGE (e, ei, bb->succs)
4326 	    if (e->flags & EDGE_FALLTHRU)
4327 	      {
4328 		error ("fallthru edge after a control statement in bb %d",
4329 		       bb->index);
4330 		err = 1;
4331 	      }
4332 	}
4333 
4334       if (gimple_code (stmt) != GIMPLE_COND)
4335 	{
4336 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4337 	     after anything else but if statement.  */
4338 	  FOR_EACH_EDGE (e, ei, bb->succs)
4339 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4340 	      {
4341 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
4342 		       bb->index);
4343 		err = 1;
4344 	      }
4345 	}
4346 
4347       switch (gimple_code (stmt))
4348 	{
4349 	case GIMPLE_COND:
4350 	  {
4351 	    edge true_edge;
4352 	    edge false_edge;
4353 
4354 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4355 
4356 	    if (!true_edge
4357 		|| !false_edge
4358 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
4359 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
4360 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4361 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4362 		|| EDGE_COUNT (bb->succs) >= 3)
4363 	      {
4364 		error ("wrong outgoing edge flags at end of bb %d",
4365 		       bb->index);
4366 		err = 1;
4367 	      }
4368 	  }
4369 	  break;
4370 
4371 	case GIMPLE_GOTO:
4372 	  if (simple_goto_p (stmt))
4373 	    {
4374 	      error ("explicit goto at end of bb %d", bb->index);
4375 	      err = 1;
4376 	    }
4377 	  else
4378 	    {
4379 	      /* FIXME.  We should double check that the labels in the
4380 		 destination blocks have their address taken.  */
4381 	      FOR_EACH_EDGE (e, ei, bb->succs)
4382 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4383 				 | EDGE_FALSE_VALUE))
4384 		    || !(e->flags & EDGE_ABNORMAL))
4385 		  {
4386 		    error ("wrong outgoing edge flags at end of bb %d",
4387 			   bb->index);
4388 		    err = 1;
4389 		  }
4390 	    }
4391 	  break;
4392 
4393 	case GIMPLE_RETURN:
4394 	  if (!single_succ_p (bb)
4395 	      || (single_succ_edge (bb)->flags
4396 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
4397 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4398 	    {
4399 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
4400 	      err = 1;
4401 	    }
4402 	  if (single_succ (bb) != EXIT_BLOCK_PTR)
4403 	    {
4404 	      error ("return edge does not point to exit in bb %d",
4405 		     bb->index);
4406 	      err = 1;
4407 	    }
4408 	  break;
4409 
4410 	case GIMPLE_SWITCH:
4411 	  {
4412 	    tree prev;
4413 	    edge e;
4414 	    size_t i, n;
4415 
4416 	    n = gimple_switch_num_labels (stmt);
4417 
4418 	    /* Mark all the destination basic blocks.  */
4419 	    for (i = 0; i < n; ++i)
4420 	      {
4421 		tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4422 		basic_block label_bb = label_to_block (lab);
4423 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4424 		label_bb->aux = (void *)1;
4425 	      }
4426 
4427 	    /* Verify that the case labels are sorted.  */
4428 	    prev = gimple_switch_label (stmt, 0);
4429 	    for (i = 1; i < n; ++i)
4430 	      {
4431 		tree c = gimple_switch_label (stmt, i);
4432 		if (!CASE_LOW (c))
4433 		  {
4434 		    error ("found default case not at the start of "
4435 			   "case vector");
4436 		    err = 1;
4437 		    continue;
4438 		  }
4439 		if (CASE_LOW (prev)
4440 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4441 		  {
4442 		    error ("case labels not sorted: ");
4443 		    print_generic_expr (stderr, prev, 0);
4444 		    fprintf (stderr," is greater than ");
4445 		    print_generic_expr (stderr, c, 0);
4446 		    fprintf (stderr," but comes before it.\n");
4447 		    err = 1;
4448 		  }
4449 		prev = c;
4450 	      }
4451 	    /* VRP will remove the default case if it can prove it will
4452 	       never be executed.  So do not verify there always exists
4453 	       a default case here.  */
4454 
4455 	    FOR_EACH_EDGE (e, ei, bb->succs)
4456 	      {
4457 		if (!e->dest->aux)
4458 		  {
4459 		    error ("extra outgoing edge %d->%d",
4460 			   bb->index, e->dest->index);
4461 		    err = 1;
4462 		  }
4463 
4464 		e->dest->aux = (void *)2;
4465 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4466 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4467 		  {
4468 		    error ("wrong outgoing edge flags at end of bb %d",
4469 			   bb->index);
4470 		    err = 1;
4471 		  }
4472 	      }
4473 
4474 	    /* Check that we have all of them.  */
4475 	    for (i = 0; i < n; ++i)
4476 	      {
4477 		tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4478 		basic_block label_bb = label_to_block (lab);
4479 
4480 		if (label_bb->aux != (void *)2)
4481 		  {
4482 		    error ("missing edge %i->%i", bb->index, label_bb->index);
4483 		    err = 1;
4484 		  }
4485 	      }
4486 
4487 	    FOR_EACH_EDGE (e, ei, bb->succs)
4488 	      e->dest->aux = (void *)0;
4489 	  }
4490 	  break;
4491 
4492 	case GIMPLE_EH_DISPATCH:
4493 	  err |= verify_eh_dispatch_edge (stmt);
4494 	  break;
4495 
4496 	default:
4497 	  break;
4498 	}
4499     }
4500 
4501   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4502     verify_dominators (CDI_DOMINATORS);
4503 
4504   return err;
4505 }
4506 
4507 
4508 /* Updates phi nodes after creating a forwarder block joined
4509    by edge FALLTHRU.  */
4510 
4511 static void
4512 gimple_make_forwarder_block (edge fallthru)
4513 {
4514   edge e;
4515   edge_iterator ei;
4516   basic_block dummy, bb;
4517   tree var;
4518   gimple_stmt_iterator gsi;
4519 
4520   dummy = fallthru->src;
4521   bb = fallthru->dest;
4522 
4523   if (single_pred_p (bb))
4524     return;
4525 
4526   /* If we redirected a branch we must create new PHI nodes at the
4527      start of BB.  */
4528   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4529     {
4530       gimple phi, new_phi;
4531 
4532       phi = gsi_stmt (gsi);
4533       var = gimple_phi_result (phi);
4534       new_phi = create_phi_node (var, bb);
4535       SSA_NAME_DEF_STMT (var) = new_phi;
4536       gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4537       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4538 		   UNKNOWN_LOCATION);
4539     }
4540 
4541   /* Add the arguments we have stored on edges.  */
4542   FOR_EACH_EDGE (e, ei, bb->preds)
4543     {
4544       if (e == fallthru)
4545 	continue;
4546 
4547       flush_pending_stmts (e);
4548     }
4549 }
4550 
4551 
4552 /* Return a non-special label in the head of basic block BLOCK.
4553    Create one if it doesn't exist.  */
4554 
4555 tree
4556 gimple_block_label (basic_block bb)
4557 {
4558   gimple_stmt_iterator i, s = gsi_start_bb (bb);
4559   bool first = true;
4560   tree label;
4561   gimple stmt;
4562 
4563   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4564     {
4565       stmt = gsi_stmt (i);
4566       if (gimple_code (stmt) != GIMPLE_LABEL)
4567 	break;
4568       label = gimple_label_label (stmt);
4569       if (!DECL_NONLOCAL (label))
4570 	{
4571 	  if (!first)
4572 	    gsi_move_before (&i, &s);
4573 	  return label;
4574 	}
4575     }
4576 
4577   label = create_artificial_label (UNKNOWN_LOCATION);
4578   stmt = gimple_build_label (label);
4579   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4580   return label;
4581 }
4582 
4583 
4584 /* Attempt to perform edge redirection by replacing a possibly complex
4585    jump instruction by a goto or by removing the jump completely.
4586    This can apply only if all edges now point to the same block.  The
4587    parameters and return values are equivalent to
4588    redirect_edge_and_branch.  */
4589 
4590 static edge
4591 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4592 {
4593   basic_block src = e->src;
4594   gimple_stmt_iterator i;
4595   gimple stmt;
4596 
4597   /* We can replace or remove a complex jump only when we have exactly
4598      two edges.  */
4599   if (EDGE_COUNT (src->succs) != 2
4600       /* Verify that all targets will be TARGET.  Specifically, the
4601 	 edge that is not E must also go to TARGET.  */
4602       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4603     return NULL;
4604 
4605   i = gsi_last_bb (src);
4606   if (gsi_end_p (i))
4607     return NULL;
4608 
4609   stmt = gsi_stmt (i);
4610 
4611   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4612     {
4613       gsi_remove (&i, true);
4614       e = ssa_redirect_edge (e, target);
4615       e->flags = EDGE_FALLTHRU;
4616       return e;
4617     }
4618 
4619   return NULL;
4620 }
4621 
4622 
4623 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
4624    edge representing the redirected branch.  */
4625 
4626 static edge
4627 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4628 {
4629   basic_block bb = e->src;
4630   gimple_stmt_iterator gsi;
4631   edge ret;
4632   gimple stmt;
4633 
4634   if (e->flags & EDGE_ABNORMAL)
4635     return NULL;
4636 
4637   if (e->dest == dest)
4638     return NULL;
4639 
4640   if (e->flags & EDGE_EH)
4641     return redirect_eh_edge (e, dest);
4642 
4643   if (e->src != ENTRY_BLOCK_PTR)
4644     {
4645       ret = gimple_try_redirect_by_replacing_jump (e, dest);
4646       if (ret)
4647 	return ret;
4648     }
4649 
4650   gsi = gsi_last_bb (bb);
4651   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4652 
4653   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4654     {
4655     case GIMPLE_COND:
4656       /* For COND_EXPR, we only need to redirect the edge.  */
4657       break;
4658 
4659     case GIMPLE_GOTO:
4660       /* No non-abnormal edges should lead from a non-simple goto, and
4661 	 simple ones should be represented implicitly.  */
4662       gcc_unreachable ();
4663 
4664     case GIMPLE_SWITCH:
4665       {
4666 	tree label = gimple_block_label (dest);
4667         tree cases = get_cases_for_edge (e, stmt);
4668 
4669 	/* If we have a list of cases associated with E, then use it
4670 	   as it's a lot faster than walking the entire case vector.  */
4671 	if (cases)
4672 	  {
4673 	    edge e2 = find_edge (e->src, dest);
4674 	    tree last, first;
4675 
4676 	    first = cases;
4677 	    while (cases)
4678 	      {
4679 		last = cases;
4680 		CASE_LABEL (cases) = label;
4681 		cases = TREE_CHAIN (cases);
4682 	      }
4683 
4684 	    /* If there was already an edge in the CFG, then we need
4685 	       to move all the cases associated with E to E2.  */
4686 	    if (e2)
4687 	      {
4688 		tree cases2 = get_cases_for_edge (e2, stmt);
4689 
4690 		TREE_CHAIN (last) = TREE_CHAIN (cases2);
4691 		TREE_CHAIN (cases2) = first;
4692 	      }
4693 	  }
4694 	else
4695 	  {
4696 	    size_t i, n = gimple_switch_num_labels (stmt);
4697 
4698 	    for (i = 0; i < n; i++)
4699 	      {
4700 		tree elt = gimple_switch_label (stmt, i);
4701 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
4702 		  CASE_LABEL (elt) = label;
4703 	      }
4704 	  }
4705       }
4706       break;
4707 
4708     case GIMPLE_ASM:
4709       {
4710 	int i, n = gimple_asm_nlabels (stmt);
4711 	tree label = NULL;
4712 
4713 	for (i = 0; i < n; ++i)
4714 	  {
4715 	    tree cons = gimple_asm_label_op (stmt, i);
4716 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
4717 	      {
4718 		if (!label)
4719 		  label = gimple_block_label (dest);
4720 		TREE_VALUE (cons) = label;
4721 	      }
4722 	  }
4723 
4724 	/* If we didn't find any label matching the former edge in the
4725 	   asm labels, we must be redirecting the fallthrough
4726 	   edge.  */
4727 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4728       }
4729       break;
4730 
4731     case GIMPLE_RETURN:
4732       gsi_remove (&gsi, true);
4733       e->flags |= EDGE_FALLTHRU;
4734       break;
4735 
4736     case GIMPLE_OMP_RETURN:
4737     case GIMPLE_OMP_CONTINUE:
4738     case GIMPLE_OMP_SECTIONS_SWITCH:
4739     case GIMPLE_OMP_FOR:
4740       /* The edges from OMP constructs can be simply redirected.  */
4741       break;
4742 
4743     case GIMPLE_EH_DISPATCH:
4744       if (!(e->flags & EDGE_FALLTHRU))
4745 	redirect_eh_dispatch_edge (stmt, e, dest);
4746       break;
4747 
4748     default:
4749       /* Otherwise it must be a fallthru edge, and we don't need to
4750 	 do anything besides redirecting it.  */
4751       gcc_assert (e->flags & EDGE_FALLTHRU);
4752       break;
4753     }
4754 
4755   /* Update/insert PHI nodes as necessary.  */
4756 
4757   /* Now update the edges in the CFG.  */
4758   e = ssa_redirect_edge (e, dest);
4759 
4760   return e;
4761 }
4762 
4763 /* Returns true if it is possible to remove edge E by redirecting
4764    it to the destination of the other edge from E->src.  */
4765 
4766 static bool
4767 gimple_can_remove_branch_p (const_edge e)
4768 {
4769   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4770     return false;
4771 
4772   return true;
4773 }
4774 
4775 /* Simple wrapper, as we can always redirect fallthru edges.  */
4776 
4777 static basic_block
4778 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4779 {
4780   e = gimple_redirect_edge_and_branch (e, dest);
4781   gcc_assert (e);
4782 
4783   return NULL;
4784 }
4785 
4786 
4787 /* Splits basic block BB after statement STMT (but at least after the
4788    labels).  If STMT is NULL, BB is split just after the labels.  */
4789 
4790 static basic_block
4791 gimple_split_block (basic_block bb, void *stmt)
4792 {
4793   gimple_stmt_iterator gsi;
4794   gimple_stmt_iterator gsi_tgt;
4795   gimple act;
4796   gimple_seq list;
4797   basic_block new_bb;
4798   edge e;
4799   edge_iterator ei;
4800 
4801   new_bb = create_empty_bb (bb);
4802 
4803   /* Redirect the outgoing edges.  */
4804   new_bb->succs = bb->succs;
4805   bb->succs = NULL;
4806   FOR_EACH_EDGE (e, ei, new_bb->succs)
4807     e->src = new_bb;
4808 
4809   if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4810     stmt = NULL;
4811 
4812   /* Move everything from GSI to the new basic block.  */
4813   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4814     {
4815       act = gsi_stmt (gsi);
4816       if (gimple_code (act) == GIMPLE_LABEL)
4817 	continue;
4818 
4819       if (!stmt)
4820 	break;
4821 
4822       if (stmt == act)
4823 	{
4824 	  gsi_next (&gsi);
4825 	  break;
4826 	}
4827     }
4828 
4829   if (gsi_end_p (gsi))
4830     return new_bb;
4831 
4832   /* Split the statement list - avoid re-creating new containers as this
4833      brings ugly quadratic memory consumption in the inliner.
4834      (We are still quadratic since we need to update stmt BB pointers,
4835      sadly.)  */
4836   list = gsi_split_seq_before (&gsi);
4837   set_bb_seq (new_bb, list);
4838   for (gsi_tgt = gsi_start (list);
4839        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4840     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4841 
4842   return new_bb;
4843 }
4844 
4845 
4846 /* Moves basic block BB after block AFTER.  */
4847 
4848 static bool
4849 gimple_move_block_after (basic_block bb, basic_block after)
4850 {
4851   if (bb->prev_bb == after)
4852     return true;
4853 
4854   unlink_block (bb);
4855   link_block (bb, after);
4856 
4857   return true;
4858 }
4859 
4860 
4861 /* Return true if basic_block can be duplicated.  */
4862 
4863 static bool
4864 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4865 {
4866   return true;
4867 }
4868 
4869 /* Create a duplicate of the basic block BB.  NOTE: This does not
4870    preserve SSA form.  */
4871 
4872 static basic_block
4873 gimple_duplicate_bb (basic_block bb)
4874 {
4875   basic_block new_bb;
4876   gimple_stmt_iterator gsi, gsi_tgt;
4877   gimple_seq phis = phi_nodes (bb);
4878   gimple phi, stmt, copy;
4879 
4880   new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4881 
4882   /* Copy the PHI nodes.  We ignore PHI node arguments here because
4883      the incoming edges have not been setup yet.  */
4884   for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4885     {
4886       phi = gsi_stmt (gsi);
4887       copy = create_phi_node (gimple_phi_result (phi), new_bb);
4888       create_new_def_for (gimple_phi_result (copy), copy,
4889 			  gimple_phi_result_ptr (copy));
4890     }
4891 
4892   gsi_tgt = gsi_start_bb (new_bb);
4893   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4894     {
4895       def_operand_p def_p;
4896       ssa_op_iter op_iter;
4897 
4898       stmt = gsi_stmt (gsi);
4899       if (gimple_code (stmt) == GIMPLE_LABEL)
4900 	continue;
4901 
4902       /* Create a new copy of STMT and duplicate STMT's virtual
4903 	 operands.  */
4904       copy = gimple_copy (stmt);
4905       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
4906 
4907       maybe_duplicate_eh_stmt (copy, stmt);
4908       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4909 
4910       /* Create new names for all the definitions created by COPY and
4911 	 add replacement mappings for each new name.  */
4912       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4913 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4914     }
4915 
4916   return new_bb;
4917 }
4918 
4919 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
4920 
4921 static void
4922 add_phi_args_after_copy_edge (edge e_copy)
4923 {
4924   basic_block bb, bb_copy = e_copy->src, dest;
4925   edge e;
4926   edge_iterator ei;
4927   gimple phi, phi_copy;
4928   tree def;
4929   gimple_stmt_iterator psi, psi_copy;
4930 
4931   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
4932     return;
4933 
4934   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
4935 
4936   if (e_copy->dest->flags & BB_DUPLICATED)
4937     dest = get_bb_original (e_copy->dest);
4938   else
4939     dest = e_copy->dest;
4940 
4941   e = find_edge (bb, dest);
4942   if (!e)
4943     {
4944       /* During loop unrolling the target of the latch edge is copied.
4945 	 In this case we are not looking for edge to dest, but to
4946 	 duplicated block whose original was dest.  */
4947       FOR_EACH_EDGE (e, ei, bb->succs)
4948 	{
4949 	  if ((e->dest->flags & BB_DUPLICATED)
4950 	      && get_bb_original (e->dest) == dest)
4951 	    break;
4952 	}
4953 
4954       gcc_assert (e != NULL);
4955     }
4956 
4957   for (psi = gsi_start_phis (e->dest),
4958        psi_copy = gsi_start_phis (e_copy->dest);
4959        !gsi_end_p (psi);
4960        gsi_next (&psi), gsi_next (&psi_copy))
4961     {
4962       phi = gsi_stmt (psi);
4963       phi_copy = gsi_stmt (psi_copy);
4964       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4965       add_phi_arg (phi_copy, def, e_copy,
4966 		   gimple_phi_arg_location_from_edge (phi, e));
4967     }
4968 }
4969 
4970 
4971 /* Basic block BB_COPY was created by code duplication.  Add phi node
4972    arguments for edges going out of BB_COPY.  The blocks that were
4973    duplicated have BB_DUPLICATED set.  */
4974 
4975 void
4976 add_phi_args_after_copy_bb (basic_block bb_copy)
4977 {
4978   edge e_copy;
4979   edge_iterator ei;
4980 
4981   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4982     {
4983       add_phi_args_after_copy_edge (e_copy);
4984     }
4985 }
4986 
4987 /* Blocks in REGION_COPY array of length N_REGION were created by
4988    duplication of basic blocks.  Add phi node arguments for edges
4989    going from these blocks.  If E_COPY is not NULL, also add
4990    phi node arguments for its destination.*/
4991 
4992 void
4993 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
4994 			 edge e_copy)
4995 {
4996   unsigned i;
4997 
4998   for (i = 0; i < n_region; i++)
4999     region_copy[i]->flags |= BB_DUPLICATED;
5000 
5001   for (i = 0; i < n_region; i++)
5002     add_phi_args_after_copy_bb (region_copy[i]);
5003   if (e_copy)
5004     add_phi_args_after_copy_edge (e_copy);
5005 
5006   for (i = 0; i < n_region; i++)
5007     region_copy[i]->flags &= ~BB_DUPLICATED;
5008 }
5009 
5010 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5011    important exit edge EXIT.  By important we mean that no SSA name defined
5012    inside region is live over the other exit edges of the region.  All entry
5013    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
5014    to the duplicate of the region.  SSA form, dominance and loop information
5015    is updated.  The new basic blocks are stored to REGION_COPY in the same
5016    order as they had in REGION, provided that REGION_COPY is not NULL.
5017    The function returns false if it is unable to copy the region,
5018    true otherwise.  */
5019 
5020 bool
5021 gimple_duplicate_sese_region (edge entry, edge exit,
5022 			    basic_block *region, unsigned n_region,
5023 			    basic_block *region_copy)
5024 {
5025   unsigned i;
5026   bool free_region_copy = false, copying_header = false;
5027   struct loop *loop = entry->dest->loop_father;
5028   edge exit_copy;
5029   VEC (basic_block, heap) *doms;
5030   edge redirected;
5031   int total_freq = 0, entry_freq = 0;
5032   gcov_type total_count = 0, entry_count = 0;
5033 
5034   if (!can_copy_bbs_p (region, n_region))
5035     return false;
5036 
5037   /* Some sanity checking.  Note that we do not check for all possible
5038      missuses of the functions.  I.e. if you ask to copy something weird,
5039      it will work, but the state of structures probably will not be
5040      correct.  */
5041   for (i = 0; i < n_region; i++)
5042     {
5043       /* We do not handle subloops, i.e. all the blocks must belong to the
5044 	 same loop.  */
5045       if (region[i]->loop_father != loop)
5046 	return false;
5047 
5048       if (region[i] != entry->dest
5049 	  && region[i] == loop->header)
5050 	return false;
5051     }
5052 
5053   set_loop_copy (loop, loop);
5054 
5055   /* In case the function is used for loop header copying (which is the primary
5056      use), ensure that EXIT and its copy will be new latch and entry edges.  */
5057   if (loop->header == entry->dest)
5058     {
5059       copying_header = true;
5060       set_loop_copy (loop, loop_outer (loop));
5061 
5062       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5063 	return false;
5064 
5065       for (i = 0; i < n_region; i++)
5066 	if (region[i] != exit->src
5067 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5068 	  return false;
5069     }
5070 
5071   if (!region_copy)
5072     {
5073       region_copy = XNEWVEC (basic_block, n_region);
5074       free_region_copy = true;
5075     }
5076 
5077   gcc_assert (!need_ssa_update_p (cfun));
5078 
5079   /* Record blocks outside the region that are dominated by something
5080      inside.  */
5081   doms = NULL;
5082   initialize_original_copy_tables ();
5083 
5084   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5085 
5086   if (entry->dest->count)
5087     {
5088       total_count = entry->dest->count;
5089       entry_count = entry->count;
5090       /* Fix up corner cases, to avoid division by zero or creation of negative
5091 	 frequencies.  */
5092       if (entry_count > total_count)
5093 	entry_count = total_count;
5094     }
5095   else
5096     {
5097       total_freq = entry->dest->frequency;
5098       entry_freq = EDGE_FREQUENCY (entry);
5099       /* Fix up corner cases, to avoid division by zero or creation of negative
5100 	 frequencies.  */
5101       if (total_freq == 0)
5102 	total_freq = 1;
5103       else if (entry_freq > total_freq)
5104 	entry_freq = total_freq;
5105     }
5106 
5107   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5108 	    split_edge_bb_loc (entry));
5109   if (total_count)
5110     {
5111       scale_bbs_frequencies_gcov_type (region, n_region,
5112 				       total_count - entry_count,
5113 				       total_count);
5114       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5115 				       total_count);
5116     }
5117   else
5118     {
5119       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5120 				 total_freq);
5121       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5122     }
5123 
5124   if (copying_header)
5125     {
5126       loop->header = exit->dest;
5127       loop->latch = exit->src;
5128     }
5129 
5130   /* Redirect the entry and add the phi node arguments.  */
5131   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5132   gcc_assert (redirected != NULL);
5133   flush_pending_stmts (entry);
5134 
5135   /* Concerning updating of dominators:  We must recount dominators
5136      for entry block and its copy.  Anything that is outside of the
5137      region, but was dominated by something inside needs recounting as
5138      well.  */
5139   set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5140   VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5141   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5142   VEC_free (basic_block, heap, doms);
5143 
5144   /* Add the other PHI node arguments.  */
5145   add_phi_args_after_copy (region_copy, n_region, NULL);
5146 
5147   /* Update the SSA web.  */
5148   update_ssa (TODO_update_ssa);
5149 
5150   if (free_region_copy)
5151     free (region_copy);
5152 
5153   free_original_copy_tables ();
5154   return true;
5155 }
5156 
5157 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
5158    are stored to REGION_COPY in the same order in that they appear
5159    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
5160    the region, EXIT an exit from it.  The condition guarding EXIT
5161    is moved to ENTRY.  Returns true if duplication succeeds, false
5162    otherwise.
5163 
5164    For example,
5165 
5166    some_code;
5167    if (cond)
5168      A;
5169    else
5170      B;
5171 
5172    is transformed to
5173 
5174    if (cond)
5175      {
5176        some_code;
5177        A;
5178      }
5179    else
5180      {
5181        some_code;
5182        B;
5183      }
5184 */
5185 
5186 bool
5187 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5188 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5189 			  basic_block *region_copy ATTRIBUTE_UNUSED)
5190 {
5191   unsigned i;
5192   bool free_region_copy = false;
5193   struct loop *loop = exit->dest->loop_father;
5194   struct loop *orig_loop = entry->dest->loop_father;
5195   basic_block switch_bb, entry_bb, nentry_bb;
5196   VEC (basic_block, heap) *doms;
5197   int total_freq = 0, exit_freq = 0;
5198   gcov_type total_count = 0, exit_count = 0;
5199   edge exits[2], nexits[2], e;
5200   gimple_stmt_iterator gsi,gsi1;
5201   gimple cond_stmt;
5202   edge sorig, snew;
5203   basic_block exit_bb;
5204   basic_block iters_bb;
5205   tree new_rhs;
5206   gimple_stmt_iterator psi;
5207   gimple phi;
5208   tree def;
5209 
5210   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5211   exits[0] = exit;
5212   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5213 
5214   if (!can_copy_bbs_p (region, n_region))
5215     return false;
5216 
5217   initialize_original_copy_tables ();
5218   set_loop_copy (orig_loop, loop);
5219   duplicate_subloops (orig_loop, loop);
5220 
5221   if (!region_copy)
5222     {
5223       region_copy = XNEWVEC (basic_block, n_region);
5224       free_region_copy = true;
5225     }
5226 
5227   gcc_assert (!need_ssa_update_p (cfun));
5228 
5229   /* Record blocks outside the region that are dominated by something
5230      inside.  */
5231   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5232 
5233   if (exit->src->count)
5234     {
5235       total_count = exit->src->count;
5236       exit_count = exit->count;
5237       /* Fix up corner cases, to avoid division by zero or creation of negative
5238 	 frequencies.  */
5239       if (exit_count > total_count)
5240 	exit_count = total_count;
5241     }
5242   else
5243     {
5244       total_freq = exit->src->frequency;
5245       exit_freq = EDGE_FREQUENCY (exit);
5246       /* Fix up corner cases, to avoid division by zero or creation of negative
5247 	 frequencies.  */
5248       if (total_freq == 0)
5249 	total_freq = 1;
5250       if (exit_freq > total_freq)
5251 	exit_freq = total_freq;
5252     }
5253 
5254   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5255 	    split_edge_bb_loc (exit));
5256   if (total_count)
5257     {
5258       scale_bbs_frequencies_gcov_type (region, n_region,
5259 				       total_count - exit_count,
5260 				       total_count);
5261       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5262 				       total_count);
5263     }
5264   else
5265     {
5266       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5267 				 total_freq);
5268       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5269     }
5270 
5271   /* Create the switch block, and put the exit condition to it.  */
5272   entry_bb = entry->dest;
5273   nentry_bb = get_bb_copy (entry_bb);
5274   if (!last_stmt (entry->src)
5275       || !stmt_ends_bb_p (last_stmt (entry->src)))
5276     switch_bb = entry->src;
5277   else
5278     switch_bb = split_edge (entry);
5279   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5280 
5281   gsi = gsi_last_bb (switch_bb);
5282   cond_stmt = last_stmt (exit->src);
5283   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5284   cond_stmt = gimple_copy (cond_stmt);
5285 
5286  /* If the block consisting of the exit condition has the latch as
5287     successor, then the body of the loop is executed before
5288     the exit condition is tested.  In such case, moving the
5289     condition to the entry, causes that the loop will iterate
5290     one less iteration (which is the wanted outcome, since we
5291     peel out the last iteration).  If the body is executed after
5292     the condition, moving the condition to the entry requires
5293     decrementing one iteration.  */
5294   if (exits[1]->dest == orig_loop->latch)
5295     new_rhs = gimple_cond_rhs (cond_stmt);
5296   else
5297   {
5298     new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5299 			   gimple_cond_rhs (cond_stmt),
5300 			   build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5301 
5302     if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5303       {
5304 	iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5305 	for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5306 	  if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5307 	    break;
5308 
5309 	new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5310 					    NULL_TREE,false,GSI_CONTINUE_LINKING);
5311       }
5312   }
5313   gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5314   gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5315   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5316 
5317   sorig = single_succ_edge (switch_bb);
5318   sorig->flags = exits[1]->flags;
5319   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5320 
5321   /* Register the new edge from SWITCH_BB in loop exit lists.  */
5322   rescan_loop_exit (snew, true, false);
5323 
5324   /* Add the PHI node arguments.  */
5325   add_phi_args_after_copy (region_copy, n_region, snew);
5326 
5327   /* Get rid of now superfluous conditions and associated edges (and phi node
5328      arguments).  */
5329   exit_bb = exit->dest;
5330 
5331   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5332   PENDING_STMT (e) = NULL;
5333 
5334   /* The latch of ORIG_LOOP was copied, and so was the backedge
5335      to the original header.  We redirect this backedge to EXIT_BB.  */
5336   for (i = 0; i < n_region; i++)
5337     if (get_bb_original (region_copy[i]) == orig_loop->latch)
5338       {
5339 	gcc_assert (single_succ_edge (region_copy[i]));
5340 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5341 	PENDING_STMT (e) = NULL;
5342 	for (psi = gsi_start_phis (exit_bb);
5343 	     !gsi_end_p (psi);
5344 	     gsi_next (&psi))
5345 	  {
5346 	    phi = gsi_stmt (psi);
5347 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5348 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5349 	  }
5350       }
5351   e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5352   PENDING_STMT (e) = NULL;
5353 
5354   /* Anything that is outside of the region, but was dominated by something
5355      inside needs to update dominance info.  */
5356   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5357   VEC_free (basic_block, heap, doms);
5358   /* Update the SSA web.  */
5359   update_ssa (TODO_update_ssa);
5360 
5361   if (free_region_copy)
5362     free (region_copy);
5363 
5364   free_original_copy_tables ();
5365   return true;
5366 }
5367 
5368 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
5369    adding blocks when the dominator traversal reaches EXIT.  This
5370    function silently assumes that ENTRY strictly dominates EXIT.  */
5371 
5372 void
5373 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5374 			      VEC(basic_block,heap) **bbs_p)
5375 {
5376   basic_block son;
5377 
5378   for (son = first_dom_son (CDI_DOMINATORS, entry);
5379        son;
5380        son = next_dom_son (CDI_DOMINATORS, son))
5381     {
5382       VEC_safe_push (basic_block, heap, *bbs_p, son);
5383       if (son != exit)
5384 	gather_blocks_in_sese_region (son, exit, bbs_p);
5385     }
5386 }
5387 
5388 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5389    The duplicates are recorded in VARS_MAP.  */
5390 
5391 static void
5392 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5393 			   tree to_context)
5394 {
5395   tree t = *tp, new_t;
5396   struct function *f = DECL_STRUCT_FUNCTION (to_context);
5397   void **loc;
5398 
5399   if (DECL_CONTEXT (t) == to_context)
5400     return;
5401 
5402   loc = pointer_map_contains (vars_map, t);
5403 
5404   if (!loc)
5405     {
5406       loc = pointer_map_insert (vars_map, t);
5407 
5408       if (SSA_VAR_P (t))
5409 	{
5410 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5411 	  f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5412 	}
5413       else
5414 	{
5415 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
5416 	  new_t = copy_node (t);
5417 	}
5418       DECL_CONTEXT (new_t) = to_context;
5419 
5420       *loc = new_t;
5421     }
5422   else
5423     new_t = (tree) *loc;
5424 
5425   *tp = new_t;
5426 }
5427 
5428 
5429 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5430    VARS_MAP maps old ssa names and var_decls to the new ones.  */
5431 
5432 static tree
5433 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5434 		  tree to_context)
5435 {
5436   void **loc;
5437   tree new_name, decl = SSA_NAME_VAR (name);
5438 
5439   gcc_assert (is_gimple_reg (name));
5440 
5441   loc = pointer_map_contains (vars_map, name);
5442 
5443   if (!loc)
5444     {
5445       replace_by_duplicate_decl (&decl, vars_map, to_context);
5446 
5447       push_cfun (DECL_STRUCT_FUNCTION (to_context));
5448       if (gimple_in_ssa_p (cfun))
5449 	add_referenced_var (decl);
5450 
5451       new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5452       if (SSA_NAME_IS_DEFAULT_DEF (name))
5453 	set_default_def (decl, new_name);
5454       pop_cfun ();
5455 
5456       loc = pointer_map_insert (vars_map, name);
5457       *loc = new_name;
5458     }
5459   else
5460     new_name = (tree) *loc;
5461 
5462   return new_name;
5463 }
5464 
5465 struct move_stmt_d
5466 {
5467   tree orig_block;
5468   tree new_block;
5469   tree from_context;
5470   tree to_context;
5471   struct pointer_map_t *vars_map;
5472   htab_t new_label_map;
5473   struct pointer_map_t *eh_map;
5474   bool remap_decls_p;
5475 };
5476 
5477 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
5478    contained in *TP if it has been ORIG_BLOCK previously and change the
5479    DECL_CONTEXT of every local variable referenced in *TP.  */
5480 
5481 static tree
5482 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5483 {
5484   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5485   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5486   tree t = *tp;
5487 
5488   if (EXPR_P (t))
5489     /* We should never have TREE_BLOCK set on non-statements.  */
5490     gcc_assert (!TREE_BLOCK (t));
5491 
5492   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5493     {
5494       if (TREE_CODE (t) == SSA_NAME)
5495 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
5496       else if (TREE_CODE (t) == LABEL_DECL)
5497 	{
5498 	  if (p->new_label_map)
5499 	    {
5500 	      struct tree_map in, *out;
5501 	      in.base.from = t;
5502 	      out = (struct tree_map *)
5503 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5504 	      if (out)
5505 		*tp = t = out->to;
5506 	    }
5507 
5508 	  DECL_CONTEXT (t) = p->to_context;
5509 	}
5510       else if (p->remap_decls_p)
5511 	{
5512 	  /* Replace T with its duplicate.  T should no longer appear in the
5513 	     parent function, so this looks wasteful; however, it may appear
5514 	     in referenced_vars, and more importantly, as virtual operands of
5515 	     statements, and in alias lists of other variables.  It would be
5516 	     quite difficult to expunge it from all those places.  ??? It might
5517 	     suffice to do this for addressable variables.  */
5518 	  if ((TREE_CODE (t) == VAR_DECL
5519 	       && !is_global_var (t))
5520 	      || TREE_CODE (t) == CONST_DECL)
5521 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5522 
5523 	  if (SSA_VAR_P (t)
5524 	      && gimple_in_ssa_p (cfun))
5525 	    {
5526 	      push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5527 	      add_referenced_var (*tp);
5528 	      pop_cfun ();
5529 	    }
5530 	}
5531       *walk_subtrees = 0;
5532     }
5533   else if (TYPE_P (t))
5534     *walk_subtrees = 0;
5535 
5536   return NULL_TREE;
5537 }
5538 
5539 /* Helper for move_stmt_r.  Given an EH region number for the source
5540    function, map that to the duplicate EH regio number in the dest.  */
5541 
5542 static int
5543 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5544 {
5545   eh_region old_r, new_r;
5546   void **slot;
5547 
5548   old_r = get_eh_region_from_number (old_nr);
5549   slot = pointer_map_contains (p->eh_map, old_r);
5550   new_r = (eh_region) *slot;
5551 
5552   return new_r->index;
5553 }
5554 
5555 /* Similar, but operate on INTEGER_CSTs.  */
5556 
5557 static tree
5558 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5559 {
5560   int old_nr, new_nr;
5561 
5562   old_nr = tree_low_cst (old_t_nr, 0);
5563   new_nr = move_stmt_eh_region_nr (old_nr, p);
5564 
5565   return build_int_cst (NULL, new_nr);
5566 }
5567 
5568 /* Like move_stmt_op, but for gimple statements.
5569 
5570    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
5571    contained in the current statement in *GSI_P and change the
5572    DECL_CONTEXT of every local variable referenced in the current
5573    statement.  */
5574 
5575 static tree
5576 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5577 	     struct walk_stmt_info *wi)
5578 {
5579   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5580   gimple stmt = gsi_stmt (*gsi_p);
5581   tree block = gimple_block (stmt);
5582 
5583   if (p->orig_block == NULL_TREE
5584       || block == p->orig_block
5585       || block == NULL_TREE)
5586     gimple_set_block (stmt, p->new_block);
5587 #ifdef ENABLE_CHECKING
5588   else if (block != p->new_block)
5589     {
5590       while (block && block != p->orig_block)
5591 	block = BLOCK_SUPERCONTEXT (block);
5592       gcc_assert (block);
5593     }
5594 #endif
5595 
5596   switch (gimple_code (stmt))
5597     {
5598     case GIMPLE_CALL:
5599       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
5600       {
5601 	tree r, fndecl = gimple_call_fndecl (stmt);
5602 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5603 	  switch (DECL_FUNCTION_CODE (fndecl))
5604 	    {
5605 	    case BUILT_IN_EH_COPY_VALUES:
5606 	      r = gimple_call_arg (stmt, 1);
5607 	      r = move_stmt_eh_region_tree_nr (r, p);
5608 	      gimple_call_set_arg (stmt, 1, r);
5609 	      /* FALLTHRU */
5610 
5611 	    case BUILT_IN_EH_POINTER:
5612 	    case BUILT_IN_EH_FILTER:
5613 	      r = gimple_call_arg (stmt, 0);
5614 	      r = move_stmt_eh_region_tree_nr (r, p);
5615 	      gimple_call_set_arg (stmt, 0, r);
5616 	      break;
5617 
5618 	    default:
5619 	      break;
5620 	    }
5621       }
5622       break;
5623 
5624     case GIMPLE_RESX:
5625       {
5626 	int r = gimple_resx_region (stmt);
5627 	r = move_stmt_eh_region_nr (r, p);
5628 	gimple_resx_set_region (stmt, r);
5629       }
5630       break;
5631 
5632     case GIMPLE_EH_DISPATCH:
5633       {
5634 	int r = gimple_eh_dispatch_region (stmt);
5635 	r = move_stmt_eh_region_nr (r, p);
5636 	gimple_eh_dispatch_set_region (stmt, r);
5637       }
5638       break;
5639 
5640     case GIMPLE_OMP_RETURN:
5641     case GIMPLE_OMP_CONTINUE:
5642       break;
5643     default:
5644       if (is_gimple_omp (stmt))
5645 	{
5646 	  /* Do not remap variables inside OMP directives.  Variables
5647 	     referenced in clauses and directive header belong to the
5648 	     parent function and should not be moved into the child
5649 	     function.  */
5650 	  bool save_remap_decls_p = p->remap_decls_p;
5651 	  p->remap_decls_p = false;
5652 	  *handled_ops_p = true;
5653 
5654 	  walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5655 			   move_stmt_op, wi);
5656 
5657 	  p->remap_decls_p = save_remap_decls_p;
5658 	}
5659       break;
5660     }
5661 
5662   return NULL_TREE;
5663 }
5664 
5665 /* Move basic block BB from function CFUN to function DEST_FN.  The
5666    block is moved out of the original linked list and placed after
5667    block AFTER in the new list.  Also, the block is removed from the
5668    original array of blocks and placed in DEST_FN's array of blocks.
5669    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5670    updated to reflect the moved edges.
5671 
5672    The local variables are remapped to new instances, VARS_MAP is used
5673    to record the mapping.  */
5674 
5675 static void
5676 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5677 		  basic_block after, bool update_edge_count_p,
5678 		  struct move_stmt_d *d)
5679 {
5680   struct control_flow_graph *cfg;
5681   edge_iterator ei;
5682   edge e;
5683   gimple_stmt_iterator si;
5684   unsigned old_len, new_len;
5685 
5686   /* Remove BB from dominance structures.  */
5687   delete_from_dominance_info (CDI_DOMINATORS, bb);
5688   if (current_loops)
5689     remove_bb_from_loops (bb);
5690 
5691   /* Link BB to the new linked list.  */
5692   move_block_after (bb, after);
5693 
5694   /* Update the edge count in the corresponding flowgraphs.  */
5695   if (update_edge_count_p)
5696     FOR_EACH_EDGE (e, ei, bb->succs)
5697       {
5698 	cfun->cfg->x_n_edges--;
5699 	dest_cfun->cfg->x_n_edges++;
5700       }
5701 
5702   /* Remove BB from the original basic block array.  */
5703   VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5704   cfun->cfg->x_n_basic_blocks--;
5705 
5706   /* Grow DEST_CFUN's basic block array if needed.  */
5707   cfg = dest_cfun->cfg;
5708   cfg->x_n_basic_blocks++;
5709   if (bb->index >= cfg->x_last_basic_block)
5710     cfg->x_last_basic_block = bb->index + 1;
5711 
5712   old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5713   if ((unsigned) cfg->x_last_basic_block >= old_len)
5714     {
5715       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5716       VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5717 			     new_len);
5718     }
5719 
5720   VEC_replace (basic_block, cfg->x_basic_block_info,
5721                bb->index, bb);
5722 
5723   /* Remap the variables in phi nodes.  */
5724   for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5725     {
5726       gimple phi = gsi_stmt (si);
5727       use_operand_p use;
5728       tree op = PHI_RESULT (phi);
5729       ssa_op_iter oi;
5730 
5731       if (!is_gimple_reg (op))
5732 	{
5733 	  /* Remove the phi nodes for virtual operands (alias analysis will be
5734 	     run for the new function, anyway).  */
5735           remove_phi_node (&si, true);
5736 	  continue;
5737 	}
5738 
5739       SET_PHI_RESULT (phi,
5740 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5741       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5742 	{
5743 	  op = USE_FROM_PTR (use);
5744 	  if (TREE_CODE (op) == SSA_NAME)
5745 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5746 	}
5747 
5748       gsi_next (&si);
5749     }
5750 
5751   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5752     {
5753       gimple stmt = gsi_stmt (si);
5754       struct walk_stmt_info wi;
5755 
5756       memset (&wi, 0, sizeof (wi));
5757       wi.info = d;
5758       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5759 
5760       if (gimple_code (stmt) == GIMPLE_LABEL)
5761 	{
5762 	  tree label = gimple_label_label (stmt);
5763 	  int uid = LABEL_DECL_UID (label);
5764 
5765 	  gcc_assert (uid > -1);
5766 
5767 	  old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5768 	  if (old_len <= (unsigned) uid)
5769 	    {
5770 	      new_len = 3 * uid / 2 + 1;
5771 	      VEC_safe_grow_cleared (basic_block, gc,
5772 				     cfg->x_label_to_block_map, new_len);
5773 	    }
5774 
5775 	  VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5776 	  VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5777 
5778 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5779 
5780 	  if (uid >= dest_cfun->cfg->last_label_uid)
5781 	    dest_cfun->cfg->last_label_uid = uid + 1;
5782 	}
5783 
5784       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5785       remove_stmt_from_eh_lp_fn (cfun, stmt);
5786 
5787       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5788       gimple_remove_stmt_histograms (cfun, stmt);
5789 
5790       /* We cannot leave any operands allocated from the operand caches of
5791 	 the current function.  */
5792       free_stmt_operands (stmt);
5793       push_cfun (dest_cfun);
5794       update_stmt (stmt);
5795       pop_cfun ();
5796     }
5797 
5798   FOR_EACH_EDGE (e, ei, bb->succs)
5799     if (e->goto_locus)
5800       {
5801 	tree block = e->goto_block;
5802 	if (d->orig_block == NULL_TREE
5803 	    || block == d->orig_block)
5804 	  e->goto_block = d->new_block;
5805 #ifdef ENABLE_CHECKING
5806 	else if (block != d->new_block)
5807 	  {
5808 	    while (block && block != d->orig_block)
5809 	      block = BLOCK_SUPERCONTEXT (block);
5810 	    gcc_assert (block);
5811 	  }
5812 #endif
5813       }
5814 }
5815 
5816 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5817    the outermost EH region.  Use REGION as the incoming base EH region.  */
5818 
5819 static eh_region
5820 find_outermost_region_in_block (struct function *src_cfun,
5821 				basic_block bb, eh_region region)
5822 {
5823   gimple_stmt_iterator si;
5824 
5825   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5826     {
5827       gimple stmt = gsi_stmt (si);
5828       eh_region stmt_region;
5829       int lp_nr;
5830 
5831       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5832       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5833       if (stmt_region)
5834 	{
5835 	  if (region == NULL)
5836 	    region = stmt_region;
5837 	  else if (stmt_region != region)
5838 	    {
5839 	      region = eh_region_outermost (src_cfun, stmt_region, region);
5840 	      gcc_assert (region != NULL);
5841 	    }
5842 	}
5843     }
5844 
5845   return region;
5846 }
5847 
5848 static tree
5849 new_label_mapper (tree decl, void *data)
5850 {
5851   htab_t hash = (htab_t) data;
5852   struct tree_map *m;
5853   void **slot;
5854 
5855   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5856 
5857   m = XNEW (struct tree_map);
5858   m->hash = DECL_UID (decl);
5859   m->base.from = decl;
5860   m->to = create_artificial_label (UNKNOWN_LOCATION);
5861   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5862   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5863     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5864 
5865   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5866   gcc_assert (*slot == NULL);
5867 
5868   *slot = m;
5869 
5870   return m->to;
5871 }
5872 
5873 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5874    subblocks.  */
5875 
5876 static void
5877 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5878 				  tree to_context)
5879 {
5880   tree *tp, t;
5881 
5882   for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5883     {
5884       t = *tp;
5885       if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5886 	continue;
5887       replace_by_duplicate_decl (&t, vars_map, to_context);
5888       if (t != *tp)
5889 	{
5890 	  if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5891 	    {
5892 	      SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5893 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
5894 	    }
5895 	  TREE_CHAIN (t) = TREE_CHAIN (*tp);
5896 	  *tp = t;
5897 	}
5898     }
5899 
5900   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5901     replace_block_vars_by_duplicates (block, vars_map, to_context);
5902 }
5903 
5904 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5905    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
5906    single basic block in the original CFG and the new basic block is
5907    returned.  DEST_CFUN must not have a CFG yet.
5908 
5909    Note that the region need not be a pure SESE region.  Blocks inside
5910    the region may contain calls to abort/exit.  The only restriction
5911    is that ENTRY_BB should be the only entry point and it must
5912    dominate EXIT_BB.
5913 
5914    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5915    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5916    to the new function.
5917 
5918    All local variables referenced in the region are assumed to be in
5919    the corresponding BLOCK_VARS and unexpanded variable lists
5920    associated with DEST_CFUN.  */
5921 
5922 basic_block
5923 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5924 		        basic_block exit_bb, tree orig_block)
5925 {
5926   VEC(basic_block,heap) *bbs, *dom_bbs;
5927   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5928   basic_block after, bb, *entry_pred, *exit_succ, abb;
5929   struct function *saved_cfun = cfun;
5930   int *entry_flag, *exit_flag;
5931   unsigned *entry_prob, *exit_prob;
5932   unsigned i, num_entry_edges, num_exit_edges;
5933   edge e;
5934   edge_iterator ei;
5935   htab_t new_label_map;
5936   struct pointer_map_t *vars_map, *eh_map;
5937   struct loop *loop = entry_bb->loop_father;
5938   struct move_stmt_d d;
5939 
5940   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5941      region.  */
5942   gcc_assert (entry_bb != exit_bb
5943               && (!exit_bb
5944 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5945 
5946   /* Collect all the blocks in the region.  Manually add ENTRY_BB
5947      because it won't be added by dfs_enumerate_from.  */
5948   bbs = NULL;
5949   VEC_safe_push (basic_block, heap, bbs, entry_bb);
5950   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5951 
5952   /* The blocks that used to be dominated by something in BBS will now be
5953      dominated by the new block.  */
5954   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5955 				     VEC_address (basic_block, bbs),
5956 				     VEC_length (basic_block, bbs));
5957 
5958   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
5959      the predecessor edges to ENTRY_BB and the successor edges to
5960      EXIT_BB so that we can re-attach them to the new basic block that
5961      will replace the region.  */
5962   num_entry_edges = EDGE_COUNT (entry_bb->preds);
5963   entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5964   entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5965   entry_prob = XNEWVEC (unsigned, num_entry_edges);
5966   i = 0;
5967   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5968     {
5969       entry_prob[i] = e->probability;
5970       entry_flag[i] = e->flags;
5971       entry_pred[i++] = e->src;
5972       remove_edge (e);
5973     }
5974 
5975   if (exit_bb)
5976     {
5977       num_exit_edges = EDGE_COUNT (exit_bb->succs);
5978       exit_succ = (basic_block *) xcalloc (num_exit_edges,
5979 					   sizeof (basic_block));
5980       exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5981       exit_prob = XNEWVEC (unsigned, num_exit_edges);
5982       i = 0;
5983       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5984 	{
5985 	  exit_prob[i] = e->probability;
5986 	  exit_flag[i] = e->flags;
5987 	  exit_succ[i++] = e->dest;
5988 	  remove_edge (e);
5989 	}
5990     }
5991   else
5992     {
5993       num_exit_edges = 0;
5994       exit_succ = NULL;
5995       exit_flag = NULL;
5996       exit_prob = NULL;
5997     }
5998 
5999   /* Switch context to the child function to initialize DEST_FN's CFG.  */
6000   gcc_assert (dest_cfun->cfg == NULL);
6001   push_cfun (dest_cfun);
6002 
6003   init_empty_tree_cfg ();
6004 
6005   /* Initialize EH information for the new function.  */
6006   eh_map = NULL;
6007   new_label_map = NULL;
6008   if (saved_cfun->eh)
6009     {
6010       eh_region region = NULL;
6011 
6012       for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6013 	region = find_outermost_region_in_block (saved_cfun, bb, region);
6014 
6015       init_eh_for_function ();
6016       if (region != NULL)
6017 	{
6018 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6019 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6020 					 new_label_mapper, new_label_map);
6021 	}
6022     }
6023 
6024   pop_cfun ();
6025 
6026   /* Move blocks from BBS into DEST_CFUN.  */
6027   gcc_assert (VEC_length (basic_block, bbs) >= 2);
6028   after = dest_cfun->cfg->x_entry_block_ptr;
6029   vars_map = pointer_map_create ();
6030 
6031   memset (&d, 0, sizeof (d));
6032   d.orig_block = orig_block;
6033   d.new_block = DECL_INITIAL (dest_cfun->decl);
6034   d.from_context = cfun->decl;
6035   d.to_context = dest_cfun->decl;
6036   d.vars_map = vars_map;
6037   d.new_label_map = new_label_map;
6038   d.eh_map = eh_map;
6039   d.remap_decls_p = true;
6040 
6041   for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6042     {
6043       /* No need to update edge counts on the last block.  It has
6044 	 already been updated earlier when we detached the region from
6045 	 the original CFG.  */
6046       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6047       after = bb;
6048     }
6049 
6050   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
6051   if (orig_block)
6052     {
6053       tree block;
6054       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6055 		  == NULL_TREE);
6056       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6057 	= BLOCK_SUBBLOCKS (orig_block);
6058       for (block = BLOCK_SUBBLOCKS (orig_block);
6059 	   block; block = BLOCK_CHAIN (block))
6060 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6061       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6062     }
6063 
6064   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6065 				    vars_map, dest_cfun->decl);
6066 
6067   if (new_label_map)
6068     htab_delete (new_label_map);
6069   if (eh_map)
6070     pointer_map_destroy (eh_map);
6071   pointer_map_destroy (vars_map);
6072 
6073   /* Rewire the entry and exit blocks.  The successor to the entry
6074      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6075      the child function.  Similarly, the predecessor of DEST_FN's
6076      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
6077      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6078      various CFG manipulation function get to the right CFG.
6079 
6080      FIXME, this is silly.  The CFG ought to become a parameter to
6081      these helpers.  */
6082   push_cfun (dest_cfun);
6083   make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6084   if (exit_bb)
6085     make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
6086   pop_cfun ();
6087 
6088   /* Back in the original function, the SESE region has disappeared,
6089      create a new basic block in its place.  */
6090   bb = create_empty_bb (entry_pred[0]);
6091   if (current_loops)
6092     add_bb_to_loop (bb, loop);
6093   for (i = 0; i < num_entry_edges; i++)
6094     {
6095       e = make_edge (entry_pred[i], bb, entry_flag[i]);
6096       e->probability = entry_prob[i];
6097     }
6098 
6099   for (i = 0; i < num_exit_edges; i++)
6100     {
6101       e = make_edge (bb, exit_succ[i], exit_flag[i]);
6102       e->probability = exit_prob[i];
6103     }
6104 
6105   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6106   for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6107     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6108   VEC_free (basic_block, heap, dom_bbs);
6109 
6110   if (exit_bb)
6111     {
6112       free (exit_prob);
6113       free (exit_flag);
6114       free (exit_succ);
6115     }
6116   free (entry_prob);
6117   free (entry_flag);
6118   free (entry_pred);
6119   VEC_free (basic_block, heap, bbs);
6120 
6121   return bb;
6122 }
6123 
6124 
6125 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6126    */
6127 
6128 void
6129 dump_function_to_file (tree fn, FILE *file, int flags)
6130 {
6131   tree arg, vars, var;
6132   struct function *dsf;
6133   bool ignore_topmost_bind = false, any_var = false;
6134   basic_block bb;
6135   tree chain;
6136 
6137   fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6138 
6139   arg = DECL_ARGUMENTS (fn);
6140   while (arg)
6141     {
6142       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6143       fprintf (file, " ");
6144       print_generic_expr (file, arg, dump_flags);
6145       if (flags & TDF_VERBOSE)
6146 	print_node (file, "", arg, 4);
6147       if (TREE_CHAIN (arg))
6148 	fprintf (file, ", ");
6149       arg = TREE_CHAIN (arg);
6150     }
6151   fprintf (file, ")\n");
6152 
6153   if (flags & TDF_VERBOSE)
6154     print_node (file, "", fn, 2);
6155 
6156   dsf = DECL_STRUCT_FUNCTION (fn);
6157   if (dsf && (flags & TDF_EH))
6158     dump_eh_tree (file, dsf);
6159 
6160   if (flags & TDF_RAW && !gimple_has_body_p (fn))
6161     {
6162       dump_node (fn, TDF_SLIM | flags, file);
6163       return;
6164     }
6165 
6166   /* Switch CFUN to point to FN.  */
6167   push_cfun (DECL_STRUCT_FUNCTION (fn));
6168 
6169   /* When GIMPLE is lowered, the variables are no longer available in
6170      BIND_EXPRs, so display them separately.  */
6171   if (cfun && cfun->decl == fn && cfun->local_decls)
6172     {
6173       ignore_topmost_bind = true;
6174 
6175       fprintf (file, "{\n");
6176       for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6177 	{
6178 	  var = TREE_VALUE (vars);
6179 
6180 	  print_generic_decl (file, var, flags);
6181 	  if (flags & TDF_VERBOSE)
6182 	    print_node (file, "", var, 4);
6183 	  fprintf (file, "\n");
6184 
6185 	  any_var = true;
6186 	}
6187     }
6188 
6189   if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6190     {
6191       /* If the CFG has been built, emit a CFG-based dump.  */
6192       check_bb_profile (ENTRY_BLOCK_PTR, file);
6193       if (!ignore_topmost_bind)
6194 	fprintf (file, "{\n");
6195 
6196       if (any_var && n_basic_blocks)
6197 	fprintf (file, "\n");
6198 
6199       FOR_EACH_BB (bb)
6200 	gimple_dump_bb (bb, file, 2, flags);
6201 
6202       fprintf (file, "}\n");
6203       check_bb_profile (EXIT_BLOCK_PTR, file);
6204     }
6205   else if (DECL_SAVED_TREE (fn) == NULL)
6206     {
6207       /* The function is now in GIMPLE form but the CFG has not been
6208 	 built yet.  Emit the single sequence of GIMPLE statements
6209 	 that make up its body.  */
6210       gimple_seq body = gimple_body (fn);
6211 
6212       if (gimple_seq_first_stmt (body)
6213 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6214 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6215 	print_gimple_seq (file, body, 0, flags);
6216       else
6217 	{
6218 	  if (!ignore_topmost_bind)
6219 	    fprintf (file, "{\n");
6220 
6221 	  if (any_var)
6222 	    fprintf (file, "\n");
6223 
6224 	  print_gimple_seq (file, body, 2, flags);
6225 	  fprintf (file, "}\n");
6226 	}
6227     }
6228   else
6229     {
6230       int indent;
6231 
6232       /* Make a tree based dump.  */
6233       chain = DECL_SAVED_TREE (fn);
6234 
6235       if (chain && TREE_CODE (chain) == BIND_EXPR)
6236 	{
6237 	  if (ignore_topmost_bind)
6238 	    {
6239 	      chain = BIND_EXPR_BODY (chain);
6240 	      indent = 2;
6241 	    }
6242 	  else
6243 	    indent = 0;
6244 	}
6245       else
6246 	{
6247 	  if (!ignore_topmost_bind)
6248 	    fprintf (file, "{\n");
6249 	  indent = 2;
6250 	}
6251 
6252       if (any_var)
6253 	fprintf (file, "\n");
6254 
6255       print_generic_stmt_indented (file, chain, flags, indent);
6256       if (ignore_topmost_bind)
6257 	fprintf (file, "}\n");
6258     }
6259 
6260   fprintf (file, "\n\n");
6261 
6262   /* Restore CFUN.  */
6263   pop_cfun ();
6264 }
6265 
6266 
6267 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
6268 
6269 void
6270 debug_function (tree fn, int flags)
6271 {
6272   dump_function_to_file (fn, stderr, flags);
6273 }
6274 
6275 
6276 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
6277 
6278 static void
6279 print_pred_bbs (FILE *file, basic_block bb)
6280 {
6281   edge e;
6282   edge_iterator ei;
6283 
6284   FOR_EACH_EDGE (e, ei, bb->preds)
6285     fprintf (file, "bb_%d ", e->src->index);
6286 }
6287 
6288 
6289 /* Print on FILE the indexes for the successors of basic_block BB.  */
6290 
6291 static void
6292 print_succ_bbs (FILE *file, basic_block bb)
6293 {
6294   edge e;
6295   edge_iterator ei;
6296 
6297   FOR_EACH_EDGE (e, ei, bb->succs)
6298     fprintf (file, "bb_%d ", e->dest->index);
6299 }
6300 
6301 /* Print to FILE the basic block BB following the VERBOSITY level.  */
6302 
6303 void
6304 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6305 {
6306   char *s_indent = (char *) alloca ((size_t) indent + 1);
6307   memset ((void *) s_indent, ' ', (size_t) indent);
6308   s_indent[indent] = '\0';
6309 
6310   /* Print basic_block's header.  */
6311   if (verbosity >= 2)
6312     {
6313       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
6314       print_pred_bbs (file, bb);
6315       fprintf (file, "}, succs = {");
6316       print_succ_bbs (file, bb);
6317       fprintf (file, "})\n");
6318     }
6319 
6320   /* Print basic_block's body.  */
6321   if (verbosity >= 3)
6322     {
6323       fprintf (file, "%s  {\n", s_indent);
6324       gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6325       fprintf (file, "%s  }\n", s_indent);
6326     }
6327 }
6328 
6329 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6330 
6331 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
6332    VERBOSITY level this outputs the contents of the loop, or just its
6333    structure.  */
6334 
6335 static void
6336 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6337 {
6338   char *s_indent;
6339   basic_block bb;
6340 
6341   if (loop == NULL)
6342     return;
6343 
6344   s_indent = (char *) alloca ((size_t) indent + 1);
6345   memset ((void *) s_indent, ' ', (size_t) indent);
6346   s_indent[indent] = '\0';
6347 
6348   /* Print loop's header.  */
6349   fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6350 	   loop->num, loop->header->index, loop->latch->index);
6351   fprintf (file, ", niter = ");
6352   print_generic_expr (file, loop->nb_iterations, 0);
6353 
6354   if (loop->any_upper_bound)
6355     {
6356       fprintf (file, ", upper_bound = ");
6357       dump_double_int (file, loop->nb_iterations_upper_bound, true);
6358     }
6359 
6360   if (loop->any_estimate)
6361     {
6362       fprintf (file, ", estimate = ");
6363       dump_double_int (file, loop->nb_iterations_estimate, true);
6364     }
6365   fprintf (file, ")\n");
6366 
6367   /* Print loop's body.  */
6368   if (verbosity >= 1)
6369     {
6370       fprintf (file, "%s{\n", s_indent);
6371       FOR_EACH_BB (bb)
6372 	if (bb->loop_father == loop)
6373 	  print_loops_bb (file, bb, indent, verbosity);
6374 
6375       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6376       fprintf (file, "%s}\n", s_indent);
6377     }
6378 }
6379 
6380 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6381    spaces.  Following VERBOSITY level this outputs the contents of the
6382    loop, or just its structure.  */
6383 
6384 static void
6385 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6386 {
6387   if (loop == NULL)
6388     return;
6389 
6390   print_loop (file, loop, indent, verbosity);
6391   print_loop_and_siblings (file, loop->next, indent, verbosity);
6392 }
6393 
6394 /* Follow a CFG edge from the entry point of the program, and on entry
6395    of a loop, pretty print the loop structure on FILE.  */
6396 
6397 void
6398 print_loops (FILE *file, int verbosity)
6399 {
6400   basic_block bb;
6401 
6402   bb = ENTRY_BLOCK_PTR;
6403   if (bb && bb->loop_father)
6404     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6405 }
6406 
6407 
6408 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
6409 
6410 void
6411 debug_loops (int verbosity)
6412 {
6413   print_loops (stderr, verbosity);
6414 }
6415 
6416 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
6417 
6418 void
6419 debug_loop (struct loop *loop, int verbosity)
6420 {
6421   print_loop (stderr, loop, 0, verbosity);
6422 }
6423 
6424 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6425    level.  */
6426 
6427 void
6428 debug_loop_num (unsigned num, int verbosity)
6429 {
6430   debug_loop (get_loop (num), verbosity);
6431 }
6432 
6433 /* Return true if BB ends with a call, possibly followed by some
6434    instructions that must stay with the call.  Return false,
6435    otherwise.  */
6436 
6437 static bool
6438 gimple_block_ends_with_call_p (basic_block bb)
6439 {
6440   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6441   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6442 }
6443 
6444 
6445 /* Return true if BB ends with a conditional branch.  Return false,
6446    otherwise.  */
6447 
6448 static bool
6449 gimple_block_ends_with_condjump_p (const_basic_block bb)
6450 {
6451   gimple stmt = last_stmt (CONST_CAST_BB (bb));
6452   return (stmt && gimple_code (stmt) == GIMPLE_COND);
6453 }
6454 
6455 
6456 /* Return true if we need to add fake edge to exit at statement T.
6457    Helper function for gimple_flow_call_edges_add.  */
6458 
6459 static bool
6460 need_fake_edge_p (gimple t)
6461 {
6462   tree fndecl = NULL_TREE;
6463   int call_flags = 0;
6464 
6465   /* NORETURN and LONGJMP calls already have an edge to exit.
6466      CONST and PURE calls do not need one.
6467      We don't currently check for CONST and PURE here, although
6468      it would be a good idea, because those attributes are
6469      figured out from the RTL in mark_constant_function, and
6470      the counter incrementation code from -fprofile-arcs
6471      leads to different results from -fbranch-probabilities.  */
6472   if (is_gimple_call (t))
6473     {
6474       fndecl = gimple_call_fndecl (t);
6475       call_flags = gimple_call_flags (t);
6476     }
6477 
6478   if (is_gimple_call (t)
6479       && fndecl
6480       && DECL_BUILT_IN (fndecl)
6481       && (call_flags & ECF_NOTHROW)
6482       && !(call_flags & ECF_RETURNS_TWICE)
6483       /* fork() doesn't really return twice, but the effect of
6484          wrapping it in __gcov_fork() which calls __gcov_flush()
6485 	 and clears the counters before forking has the same
6486 	 effect as returning twice.  Force a fake edge.  */
6487       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6488 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6489     return false;
6490 
6491   if (is_gimple_call (t)
6492       && !(call_flags & ECF_NORETURN))
6493     return true;
6494 
6495   if (gimple_code (t) == GIMPLE_ASM
6496        && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6497     return true;
6498 
6499   return false;
6500 }
6501 
6502 
6503 /* Add fake edges to the function exit for any non constant and non
6504    noreturn calls, volatile inline assembly in the bitmap of blocks
6505    specified by BLOCKS or to the whole CFG if BLOCKS is zero.  Return
6506    the number of blocks that were split.
6507 
6508    The goal is to expose cases in which entering a basic block does
6509    not imply that all subsequent instructions must be executed.  */
6510 
6511 static int
6512 gimple_flow_call_edges_add (sbitmap blocks)
6513 {
6514   int i;
6515   int blocks_split = 0;
6516   int last_bb = last_basic_block;
6517   bool check_last_block = false;
6518 
6519   if (n_basic_blocks == NUM_FIXED_BLOCKS)
6520     return 0;
6521 
6522   if (! blocks)
6523     check_last_block = true;
6524   else
6525     check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6526 
6527   /* In the last basic block, before epilogue generation, there will be
6528      a fallthru edge to EXIT.  Special care is required if the last insn
6529      of the last basic block is a call because make_edge folds duplicate
6530      edges, which would result in the fallthru edge also being marked
6531      fake, which would result in the fallthru edge being removed by
6532      remove_fake_edges, which would result in an invalid CFG.
6533 
6534      Moreover, we can't elide the outgoing fake edge, since the block
6535      profiler needs to take this into account in order to solve the minimal
6536      spanning tree in the case that the call doesn't return.
6537 
6538      Handle this by adding a dummy instruction in a new last basic block.  */
6539   if (check_last_block)
6540     {
6541       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6542       gimple_stmt_iterator gsi = gsi_last_bb (bb);
6543       gimple t = NULL;
6544 
6545       if (!gsi_end_p (gsi))
6546 	t = gsi_stmt (gsi);
6547 
6548       if (t && need_fake_edge_p (t))
6549 	{
6550 	  edge e;
6551 
6552 	  e = find_edge (bb, EXIT_BLOCK_PTR);
6553 	  if (e)
6554 	    {
6555 	      gsi_insert_on_edge (e, gimple_build_nop ());
6556 	      gsi_commit_edge_inserts ();
6557 	    }
6558 	}
6559     }
6560 
6561   /* Now add fake edges to the function exit for any non constant
6562      calls since there is no way that we can determine if they will
6563      return or not...  */
6564   for (i = 0; i < last_bb; i++)
6565     {
6566       basic_block bb = BASIC_BLOCK (i);
6567       gimple_stmt_iterator gsi;
6568       gimple stmt, last_stmt;
6569 
6570       if (!bb)
6571 	continue;
6572 
6573       if (blocks && !TEST_BIT (blocks, i))
6574 	continue;
6575 
6576       gsi = gsi_last_bb (bb);
6577       if (!gsi_end_p (gsi))
6578 	{
6579 	  last_stmt = gsi_stmt (gsi);
6580 	  do
6581 	    {
6582 	      stmt = gsi_stmt (gsi);
6583 	      if (need_fake_edge_p (stmt))
6584 		{
6585 		  edge e;
6586 
6587 		  /* The handling above of the final block before the
6588 		     epilogue should be enough to verify that there is
6589 		     no edge to the exit block in CFG already.
6590 		     Calling make_edge in such case would cause us to
6591 		     mark that edge as fake and remove it later.  */
6592 #ifdef ENABLE_CHECKING
6593 		  if (stmt == last_stmt)
6594 		    {
6595 		      e = find_edge (bb, EXIT_BLOCK_PTR);
6596 		      gcc_assert (e == NULL);
6597 		    }
6598 #endif
6599 
6600 		  /* Note that the following may create a new basic block
6601 		     and renumber the existing basic blocks.  */
6602 		  if (stmt != last_stmt)
6603 		    {
6604 		      e = split_block (bb, stmt);
6605 		      if (e)
6606 			blocks_split++;
6607 		    }
6608 		  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6609 		}
6610 	      gsi_prev (&gsi);
6611 	    }
6612 	  while (!gsi_end_p (gsi));
6613 	}
6614     }
6615 
6616   if (blocks_split)
6617     verify_flow_info ();
6618 
6619   return blocks_split;
6620 }
6621 
6622 /* Purge dead abnormal call edges from basic block BB.  */
6623 
6624 bool
6625 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6626 {
6627   bool changed = gimple_purge_dead_eh_edges (bb);
6628 
6629   if (cfun->has_nonlocal_label)
6630     {
6631       gimple stmt = last_stmt (bb);
6632       edge_iterator ei;
6633       edge e;
6634 
6635       if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6636 	for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6637 	  {
6638 	    if (e->flags & EDGE_ABNORMAL)
6639 	      {
6640 		remove_edge (e);
6641 		changed = true;
6642 	      }
6643 	    else
6644 	      ei_next (&ei);
6645 	  }
6646 
6647       /* See gimple_purge_dead_eh_edges below.  */
6648       if (changed)
6649 	free_dominance_info (CDI_DOMINATORS);
6650     }
6651 
6652   return changed;
6653 }
6654 
6655 /* Removes edge E and all the blocks dominated by it, and updates dominance
6656    information.  The IL in E->src needs to be updated separately.
6657    If dominance info is not available, only the edge E is removed.*/
6658 
6659 void
6660 remove_edge_and_dominated_blocks (edge e)
6661 {
6662   VEC (basic_block, heap) *bbs_to_remove = NULL;
6663   VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6664   bitmap df, df_idom;
6665   edge f;
6666   edge_iterator ei;
6667   bool none_removed = false;
6668   unsigned i;
6669   basic_block bb, dbb;
6670   bitmap_iterator bi;
6671 
6672   if (!dom_info_available_p (CDI_DOMINATORS))
6673     {
6674       remove_edge (e);
6675       return;
6676     }
6677 
6678   /* No updating is needed for edges to exit.  */
6679   if (e->dest == EXIT_BLOCK_PTR)
6680     {
6681       if (cfgcleanup_altered_bbs)
6682 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6683       remove_edge (e);
6684       return;
6685     }
6686 
6687   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
6688      that is not dominated by E->dest, then this set is empty.  Otherwise,
6689      all the basic blocks dominated by E->dest are removed.
6690 
6691      Also, to DF_IDOM we store the immediate dominators of the blocks in
6692      the dominance frontier of E (i.e., of the successors of the
6693      removed blocks, if there are any, and of E->dest otherwise).  */
6694   FOR_EACH_EDGE (f, ei, e->dest->preds)
6695     {
6696       if (f == e)
6697 	continue;
6698 
6699       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6700 	{
6701 	  none_removed = true;
6702 	  break;
6703 	}
6704     }
6705 
6706   df = BITMAP_ALLOC (NULL);
6707   df_idom = BITMAP_ALLOC (NULL);
6708 
6709   if (none_removed)
6710     bitmap_set_bit (df_idom,
6711 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6712   else
6713     {
6714       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6715       for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6716 	{
6717 	  FOR_EACH_EDGE (f, ei, bb->succs)
6718 	    {
6719 	      if (f->dest != EXIT_BLOCK_PTR)
6720 		bitmap_set_bit (df, f->dest->index);
6721 	    }
6722 	}
6723       for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6724 	bitmap_clear_bit (df, bb->index);
6725 
6726       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6727 	{
6728 	  bb = BASIC_BLOCK (i);
6729 	  bitmap_set_bit (df_idom,
6730 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6731 	}
6732     }
6733 
6734   if (cfgcleanup_altered_bbs)
6735     {
6736       /* Record the set of the altered basic blocks.  */
6737       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6738       bitmap_ior_into (cfgcleanup_altered_bbs, df);
6739     }
6740 
6741   /* Remove E and the cancelled blocks.  */
6742   if (none_removed)
6743     remove_edge (e);
6744   else
6745     {
6746       /* Walk backwards so as to get a chance to substitute all
6747 	 released DEFs into debug stmts.  See
6748 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6749 	 details.  */
6750       for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6751 	delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6752     }
6753 
6754   /* Update the dominance information.  The immediate dominator may change only
6755      for blocks whose immediate dominator belongs to DF_IDOM:
6756 
6757      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6758      removal.  Let Z the arbitrary block such that idom(Z) = Y and
6759      Z dominates X after the removal.  Before removal, there exists a path P
6760      from Y to X that avoids Z.  Let F be the last edge on P that is
6761      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
6762      dominates W, and because of P, Z does not dominate W), and W belongs to
6763      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
6764   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6765     {
6766       bb = BASIC_BLOCK (i);
6767       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6768 	   dbb;
6769 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
6770 	VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6771     }
6772 
6773   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6774 
6775   BITMAP_FREE (df);
6776   BITMAP_FREE (df_idom);
6777   VEC_free (basic_block, heap, bbs_to_remove);
6778   VEC_free (basic_block, heap, bbs_to_fix_dom);
6779 }
6780 
6781 /* Purge dead EH edges from basic block BB.  */
6782 
6783 bool
6784 gimple_purge_dead_eh_edges (basic_block bb)
6785 {
6786   bool changed = false;
6787   edge e;
6788   edge_iterator ei;
6789   gimple stmt = last_stmt (bb);
6790 
6791   if (stmt && stmt_can_throw_internal (stmt))
6792     return false;
6793 
6794   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6795     {
6796       if (e->flags & EDGE_EH)
6797 	{
6798 	  remove_edge_and_dominated_blocks (e);
6799 	  changed = true;
6800 	}
6801       else
6802 	ei_next (&ei);
6803     }
6804 
6805   return changed;
6806 }
6807 
6808 bool
6809 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6810 {
6811   bool changed = false;
6812   unsigned i;
6813   bitmap_iterator bi;
6814 
6815   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6816     {
6817       basic_block bb = BASIC_BLOCK (i);
6818 
6819       /* Earlier gimple_purge_dead_eh_edges could have removed
6820 	 this basic block already.  */
6821       gcc_assert (bb || changed);
6822       if (bb != NULL)
6823 	changed |= gimple_purge_dead_eh_edges (bb);
6824     }
6825 
6826   return changed;
6827 }
6828 
6829 /* This function is called whenever a new edge is created or
6830    redirected.  */
6831 
6832 static void
6833 gimple_execute_on_growing_pred (edge e)
6834 {
6835   basic_block bb = e->dest;
6836 
6837   if (!gimple_seq_empty_p (phi_nodes (bb)))
6838     reserve_phi_args_for_new_edge (bb);
6839 }
6840 
6841 /* This function is called immediately before edge E is removed from
6842    the edge vector E->dest->preds.  */
6843 
6844 static void
6845 gimple_execute_on_shrinking_pred (edge e)
6846 {
6847   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
6848     remove_phi_args (e);
6849 }
6850 
6851 /*---------------------------------------------------------------------------
6852   Helper functions for Loop versioning
6853   ---------------------------------------------------------------------------*/
6854 
6855 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
6856    of 'first'. Both of them are dominated by 'new_head' basic block. When
6857    'new_head' was created by 'second's incoming edge it received phi arguments
6858    on the edge by split_edge(). Later, additional edge 'e' was created to
6859    connect 'new_head' and 'first'. Now this routine adds phi args on this
6860    additional edge 'e' that new_head to second edge received as part of edge
6861    splitting.  */
6862 
6863 static void
6864 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6865 				  basic_block new_head, edge e)
6866 {
6867   gimple phi1, phi2;
6868   gimple_stmt_iterator psi1, psi2;
6869   tree def;
6870   edge e2 = find_edge (new_head, second);
6871 
6872   /* Because NEW_HEAD has been created by splitting SECOND's incoming
6873      edge, we should always have an edge from NEW_HEAD to SECOND.  */
6874   gcc_assert (e2 != NULL);
6875 
6876   /* Browse all 'second' basic block phi nodes and add phi args to
6877      edge 'e' for 'first' head. PHI args are always in correct order.  */
6878 
6879   for (psi2 = gsi_start_phis (second),
6880        psi1 = gsi_start_phis (first);
6881        !gsi_end_p (psi2) && !gsi_end_p (psi1);
6882        gsi_next (&psi2),  gsi_next (&psi1))
6883     {
6884       phi1 = gsi_stmt (psi1);
6885       phi2 = gsi_stmt (psi2);
6886       def = PHI_ARG_DEF (phi2, e2->dest_idx);
6887       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
6888     }
6889 }
6890 
6891 
6892 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6893    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6894    the destination of the ELSE part.  */
6895 
6896 static void
6897 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6898 			       basic_block second_head ATTRIBUTE_UNUSED,
6899 			       basic_block cond_bb, void *cond_e)
6900 {
6901   gimple_stmt_iterator gsi;
6902   gimple new_cond_expr;
6903   tree cond_expr = (tree) cond_e;
6904   edge e0;
6905 
6906   /* Build new conditional expr */
6907   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6908 					       NULL_TREE, NULL_TREE);
6909 
6910   /* Add new cond in cond_bb.  */
6911   gsi = gsi_last_bb (cond_bb);
6912   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6913 
6914   /* Adjust edges appropriately to connect new head with first head
6915      as well as second head.  */
6916   e0 = single_succ_edge (cond_bb);
6917   e0->flags &= ~EDGE_FALLTHRU;
6918   e0->flags |= EDGE_FALSE_VALUE;
6919 }
6920 
6921 struct cfg_hooks gimple_cfg_hooks = {
6922   "gimple",
6923   gimple_verify_flow_info,
6924   gimple_dump_bb,		/* dump_bb  */
6925   create_bb,			/* create_basic_block  */
6926   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
6927   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
6928   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
6929   remove_bb,			/* delete_basic_block  */
6930   gimple_split_block,		/* split_block  */
6931   gimple_move_block_after,	/* move_block_after  */
6932   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
6933   gimple_merge_blocks,		/* merge_blocks  */
6934   gimple_predict_edge,		/* predict_edge  */
6935   gimple_predicted_by_p,		/* predicted_by_p  */
6936   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
6937   gimple_duplicate_bb,		/* duplicate_block  */
6938   gimple_split_edge,		/* split_edge  */
6939   gimple_make_forwarder_block,	/* make_forward_block  */
6940   NULL,				/* tidy_fallthru_edge  */
6941   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6942   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6943   gimple_flow_call_edges_add,     /* flow_call_edges_add */
6944   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
6945   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6946   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6947   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6948   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6949   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6950   flush_pending_stmts		/* flush_pending_stmts */
6951 };
6952 
6953 
6954 /* Split all critical edges.  */
6955 
6956 static unsigned int
6957 split_critical_edges (void)
6958 {
6959   basic_block bb;
6960   edge e;
6961   edge_iterator ei;
6962 
6963   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6964      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
6965      mappings around the calls to split_edge.  */
6966   start_recording_case_labels ();
6967   FOR_ALL_BB (bb)
6968     {
6969       FOR_EACH_EDGE (e, ei, bb->succs)
6970         {
6971 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6972 	    split_edge (e);
6973 	  /* PRE inserts statements to edges and expects that
6974 	     since split_critical_edges was done beforehand, committing edge
6975 	     insertions will not split more edges.  In addition to critical
6976 	     edges we must split edges that have multiple successors and
6977 	     end by control flow statements, such as RESX.
6978 	     Go ahead and split them too.  This matches the logic in
6979 	     gimple_find_edge_insert_loc.  */
6980 	  else if ((!single_pred_p (e->dest)
6981 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
6982 	            || e->dest == EXIT_BLOCK_PTR)
6983 		   && e->src != ENTRY_BLOCK_PTR
6984 	           && !(e->flags & EDGE_ABNORMAL))
6985 	    {
6986 	      gimple_stmt_iterator gsi;
6987 
6988 	      gsi = gsi_last_bb (e->src);
6989 	      if (!gsi_end_p (gsi)
6990 		  && stmt_ends_bb_p (gsi_stmt (gsi))
6991 		  && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
6992 		split_edge (e);
6993 	    }
6994 	}
6995     }
6996   end_recording_case_labels ();
6997   return 0;
6998 }
6999 
7000 struct gimple_opt_pass pass_split_crit_edges =
7001 {
7002  {
7003   GIMPLE_PASS,
7004   "crited",                          /* name */
7005   NULL,                          /* gate */
7006   split_critical_edges,          /* execute */
7007   NULL,                          /* sub */
7008   NULL,                          /* next */
7009   0,                             /* static_pass_number */
7010   TV_TREE_SPLIT_EDGES,           /* tv_id */
7011   PROP_cfg,                      /* properties required */
7012   PROP_no_crit_edges,            /* properties_provided */
7013   0,                             /* properties_destroyed */
7014   0,                             /* todo_flags_start */
7015   TODO_dump_func | TODO_verify_flow  /* todo_flags_finish */
7016  }
7017 };
7018 
7019 
7020 /* Build a ternary operation and gimplify it.  Emit code before GSI.
7021    Return the gimple_val holding the result.  */
7022 
7023 tree
7024 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7025 		 tree type, tree a, tree b, tree c)
7026 {
7027   tree ret;
7028   location_t loc = gimple_location (gsi_stmt (*gsi));
7029 
7030   ret = fold_build3_loc (loc, code, type, a, b, c);
7031   STRIP_NOPS (ret);
7032 
7033   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7034                                    GSI_SAME_STMT);
7035 }
7036 
7037 /* Build a binary operation and gimplify it.  Emit code before GSI.
7038    Return the gimple_val holding the result.  */
7039 
7040 tree
7041 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7042 		 tree type, tree a, tree b)
7043 {
7044   tree ret;
7045 
7046   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7047   STRIP_NOPS (ret);
7048 
7049   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7050                                    GSI_SAME_STMT);
7051 }
7052 
7053 /* Build a unary operation and gimplify it.  Emit code before GSI.
7054    Return the gimple_val holding the result.  */
7055 
7056 tree
7057 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7058 		 tree a)
7059 {
7060   tree ret;
7061 
7062   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7063   STRIP_NOPS (ret);
7064 
7065   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7066                                    GSI_SAME_STMT);
7067 }
7068 
7069 
7070 
7071 /* Emit return warnings.  */
7072 
7073 static unsigned int
7074 execute_warn_function_return (void)
7075 {
7076   source_location location;
7077   gimple last;
7078   edge e;
7079   edge_iterator ei;
7080 
7081   /* If we have a path to EXIT, then we do return.  */
7082   if (TREE_THIS_VOLATILE (cfun->decl)
7083       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7084     {
7085       location = UNKNOWN_LOCATION;
7086       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7087 	{
7088 	  last = last_stmt (e->src);
7089 	  if (gimple_code (last) == GIMPLE_RETURN
7090 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7091 	    break;
7092 	}
7093       if (location == UNKNOWN_LOCATION)
7094 	location = cfun->function_end_locus;
7095       if (warn_missing_noreturn)
7096         warning_at (location, 0, "%<noreturn%> function does return");
7097     }
7098 
7099   /* If we see "return;" in some basic block, then we do reach the end
7100      without returning a value.  */
7101   else if (warn_return_type
7102 	   && !TREE_NO_WARNING (cfun->decl)
7103 	   && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7104 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7105     {
7106       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7107 	{
7108 	  gimple last = last_stmt (e->src);
7109 	  if (gimple_code (last) == GIMPLE_RETURN
7110 	      && gimple_return_retval (last) == NULL
7111 	      && !gimple_no_warning_p (last))
7112 	    {
7113 	      location = gimple_location (last);
7114 	      if (location == UNKNOWN_LOCATION)
7115 		  location = cfun->function_end_locus;
7116 	      warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7117 	      TREE_NO_WARNING (cfun->decl) = 1;
7118 	      break;
7119 	    }
7120 	}
7121     }
7122   return 0;
7123 }
7124 
7125 
7126 /* Given a basic block B which ends with a conditional and has
7127    precisely two successors, determine which of the edges is taken if
7128    the conditional is true and which is taken if the conditional is
7129    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
7130 
7131 void
7132 extract_true_false_edges_from_block (basic_block b,
7133 				     edge *true_edge,
7134 				     edge *false_edge)
7135 {
7136   edge e = EDGE_SUCC (b, 0);
7137 
7138   if (e->flags & EDGE_TRUE_VALUE)
7139     {
7140       *true_edge = e;
7141       *false_edge = EDGE_SUCC (b, 1);
7142     }
7143   else
7144     {
7145       *false_edge = e;
7146       *true_edge = EDGE_SUCC (b, 1);
7147     }
7148 }
7149 
7150 struct gimple_opt_pass pass_warn_function_return =
7151 {
7152  {
7153   GIMPLE_PASS,
7154   "*warn_function_return",		/* name */
7155   NULL,					/* gate */
7156   execute_warn_function_return,		/* execute */
7157   NULL,					/* sub */
7158   NULL,					/* next */
7159   0,					/* static_pass_number */
7160   TV_NONE,				/* tv_id */
7161   PROP_cfg,				/* properties_required */
7162   0,					/* properties_provided */
7163   0,					/* properties_destroyed */
7164   0,					/* todo_flags_start */
7165   0					/* todo_flags_finish */
7166  }
7167 };
7168 
7169 /* Emit noreturn warnings.  */
7170 
7171 static unsigned int
7172 execute_warn_function_noreturn (void)
7173 {
7174   if (warn_missing_noreturn
7175       && !TREE_THIS_VOLATILE (cfun->decl)
7176       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7177       && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7178     warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
7179 		"function might be possible candidate "
7180 		"for attribute %<noreturn%>");
7181   return 0;
7182 }
7183 
7184 struct gimple_opt_pass pass_warn_function_noreturn =
7185 {
7186  {
7187   GIMPLE_PASS,
7188   "*warn_function_noreturn",		/* name */
7189   NULL,					/* gate */
7190   execute_warn_function_noreturn,	/* execute */
7191   NULL,					/* sub */
7192   NULL,					/* next */
7193   0,					/* static_pass_number */
7194   TV_NONE,				/* tv_id */
7195   PROP_cfg,				/* properties_required */
7196   0,					/* properties_provided */
7197   0,					/* properties_destroyed */
7198   0,					/* todo_flags_start */
7199   0					/* todo_flags_finish */
7200  }
7201 };
7202 
7203 
7204 /* Walk a gimplified function and warn for functions whose return value is
7205    ignored and attribute((warn_unused_result)) is set.  This is done before
7206    inlining, so we don't have to worry about that.  */
7207 
7208 static void
7209 do_warn_unused_result (gimple_seq seq)
7210 {
7211   tree fdecl, ftype;
7212   gimple_stmt_iterator i;
7213 
7214   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7215     {
7216       gimple g = gsi_stmt (i);
7217 
7218       switch (gimple_code (g))
7219 	{
7220 	case GIMPLE_BIND:
7221 	  do_warn_unused_result (gimple_bind_body (g));
7222 	  break;
7223 	case GIMPLE_TRY:
7224 	  do_warn_unused_result (gimple_try_eval (g));
7225 	  do_warn_unused_result (gimple_try_cleanup (g));
7226 	  break;
7227 	case GIMPLE_CATCH:
7228 	  do_warn_unused_result (gimple_catch_handler (g));
7229 	  break;
7230 	case GIMPLE_EH_FILTER:
7231 	  do_warn_unused_result (gimple_eh_filter_failure (g));
7232 	  break;
7233 
7234 	case GIMPLE_CALL:
7235 	  if (gimple_call_lhs (g))
7236 	    break;
7237 
7238 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
7239 	     LHS.  All calls whose value is ignored should be
7240 	     represented like this.  Look for the attribute.  */
7241 	  fdecl = gimple_call_fndecl (g);
7242 	  ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7243 
7244 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7245 	    {
7246 	      location_t loc = gimple_location (g);
7247 
7248 	      if (fdecl)
7249 		warning_at (loc, OPT_Wunused_result,
7250 			    "ignoring return value of %qD, "
7251 			    "declared with attribute warn_unused_result",
7252 			    fdecl);
7253 	      else
7254 		warning_at (loc, OPT_Wunused_result,
7255 			    "ignoring return value of function "
7256 			    "declared with attribute warn_unused_result");
7257 	    }
7258 	  break;
7259 
7260 	default:
7261 	  /* Not a container, not a call, or a call whose value is used.  */
7262 	  break;
7263 	}
7264     }
7265 }
7266 
7267 static unsigned int
7268 run_warn_unused_result (void)
7269 {
7270   do_warn_unused_result (gimple_body (current_function_decl));
7271   return 0;
7272 }
7273 
7274 static bool
7275 gate_warn_unused_result (void)
7276 {
7277   return flag_warn_unused_result;
7278 }
7279 
7280 struct gimple_opt_pass pass_warn_unused_result =
7281 {
7282   {
7283     GIMPLE_PASS,
7284     "*warn_unused_result",		/* name */
7285     gate_warn_unused_result,		/* gate */
7286     run_warn_unused_result,		/* execute */
7287     NULL,				/* sub */
7288     NULL,				/* next */
7289     0,					/* static_pass_number */
7290     TV_NONE,				/* tv_id */
7291     PROP_gimple_any,			/* properties_required */
7292     0,					/* properties_provided */
7293     0,					/* properties_destroyed */
7294     0,					/* todo_flags_start */
7295     0,					/* todo_flags_finish */
7296   }
7297 };
7298