xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2013 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "ggc.h"
31 #include "gimple-pretty-print.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "tree-pass.h"
35 #include "diagnostic-core.h"
36 #include "except.h"
37 #include "cfgloop.h"
38 #include "tree-ssa-propagate.h"
39 #include "value-prof.h"
40 #include "pointer-set.h"
41 #include "tree-inline.h"
42 #include "target.h"
43 
44 /* This file contains functions for building the Control Flow Graph (CFG)
45    for a function tree.  */
46 
47 /* Local declarations.  */
48 
49 /* Initial capacity for the basic block array.  */
50 static const int initial_cfg_capacity = 20;
51 
52 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
53    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
54    via their CASE_CHAIN field, which we clear after we're done with the
55    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
56 
57    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
58    update the case vector in response to edge redirections.
59 
60    Right now this table is set up and torn down at key points in the
61    compilation process.  It would be nice if we could make the table
62    more persistent.  The key is getting notification of changes to
63    the CFG (particularly edge removal, creation and redirection).  */
64 
65 static struct pointer_map_t *edge_to_cases;
66 
67 /* If we record edge_to_cases, this bitmap will hold indexes
68    of basic blocks that end in a GIMPLE_SWITCH which we touched
69    due to edge manipulations.  */
70 
71 static bitmap touched_switch_bbs;
72 
73 /* CFG statistics.  */
74 struct cfg_stats_d
75 {
76   long num_merged_labels;
77 };
78 
79 static struct cfg_stats_d cfg_stats;
80 
81 /* Nonzero if we found a computed goto while building basic blocks.  */
82 static bool found_computed_goto;
83 
84 /* Hash table to store last discriminator assigned for each locus.  */
85 struct locus_discrim_map
86 {
87   location_t locus;
88   int discriminator;
89 };
90 static htab_t discriminator_per_locus;
91 
92 /* Basic blocks and flowgraphs.  */
93 static void make_blocks (gimple_seq);
94 static void factor_computed_gotos (void);
95 
96 /* Edges.  */
97 static void make_edges (void);
98 static void make_cond_expr_edges (basic_block);
99 static void make_gimple_switch_edges (basic_block);
100 static void make_goto_expr_edges (basic_block);
101 static void make_gimple_asm_edges (basic_block);
102 static unsigned int locus_map_hash (const void *);
103 static int locus_map_eq (const void *, const void *);
104 static void assign_discriminator (location_t, basic_block);
105 static edge gimple_redirect_edge_and_branch (edge, basic_block);
106 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
107 
108 /* Various helpers.  */
109 static inline bool stmt_starts_bb_p (gimple, gimple);
110 static int gimple_verify_flow_info (void);
111 static void gimple_make_forwarder_block (edge);
112 static gimple first_non_label_stmt (basic_block);
113 static bool verify_gimple_transaction (gimple);
114 
115 /* Flowgraph optimization and cleanup.  */
116 static void gimple_merge_blocks (basic_block, basic_block);
117 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
118 static void remove_bb (basic_block);
119 static edge find_taken_edge_computed_goto (basic_block, tree);
120 static edge find_taken_edge_cond_expr (basic_block, tree);
121 static edge find_taken_edge_switch_expr (basic_block, tree);
122 static tree find_case_label_for_value (gimple, tree);
123 
124 void
125 init_empty_tree_cfg_for_function (struct function *fn)
126 {
127   /* Initialize the basic block array.  */
128   init_flow (fn);
129   profile_status_for_function (fn) = PROFILE_ABSENT;
130   n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
131   last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
132   vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
133   vec_safe_grow_cleared (basic_block_info_for_function (fn),
134 			 initial_cfg_capacity);
135 
136   /* Build a mapping of labels to their associated blocks.  */
137   vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
138   vec_safe_grow_cleared (label_to_block_map_for_function (fn),
139 			 initial_cfg_capacity);
140 
141   SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
142 				ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
143   SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
144 		   EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
145 
146   ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
147     = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
148   EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
149     = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
150 }
151 
152 void
153 init_empty_tree_cfg (void)
154 {
155   init_empty_tree_cfg_for_function (cfun);
156 }
157 
158 /*---------------------------------------------------------------------------
159 			      Create basic blocks
160 ---------------------------------------------------------------------------*/
161 
162 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
163    statements to be added to the flowgraph.  */
164 
165 static void
166 build_gimple_cfg (gimple_seq seq)
167 {
168   /* Register specific gimple functions.  */
169   gimple_register_cfg_hooks ();
170 
171   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
172 
173   init_empty_tree_cfg ();
174 
175   found_computed_goto = 0;
176   make_blocks (seq);
177 
178   /* Computed gotos are hell to deal with, especially if there are
179      lots of them with a large number of destinations.  So we factor
180      them to a common computed goto location before we build the
181      edge list.  After we convert back to normal form, we will un-factor
182      the computed gotos since factoring introduces an unwanted jump.  */
183   if (found_computed_goto)
184     factor_computed_gotos ();
185 
186   /* Make sure there is always at least one block, even if it's empty.  */
187   if (n_basic_blocks == NUM_FIXED_BLOCKS)
188     create_empty_bb (ENTRY_BLOCK_PTR);
189 
190   /* Adjust the size of the array.  */
191   if (basic_block_info->length () < (size_t) n_basic_blocks)
192     vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
193 
194   /* To speed up statement iterator walks, we first purge dead labels.  */
195   cleanup_dead_labels ();
196 
197   /* Group case nodes to reduce the number of edges.
198      We do this after cleaning up dead labels because otherwise we miss
199      a lot of obvious case merging opportunities.  */
200   group_case_labels ();
201 
202   /* Create the edges of the flowgraph.  */
203   discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
204                                          free);
205   make_edges ();
206   cleanup_dead_labels ();
207   htab_delete (discriminator_per_locus);
208 }
209 
210 static unsigned int
211 execute_build_cfg (void)
212 {
213   gimple_seq body = gimple_body (current_function_decl);
214 
215   build_gimple_cfg (body);
216   gimple_set_body (current_function_decl, NULL);
217   if (dump_file && (dump_flags & TDF_DETAILS))
218     {
219       fprintf (dump_file, "Scope blocks:\n");
220       dump_scope_blocks (dump_file, dump_flags);
221     }
222   return 0;
223 }
224 
225 struct gimple_opt_pass pass_build_cfg =
226 {
227  {
228   GIMPLE_PASS,
229   "cfg",				/* name */
230   OPTGROUP_NONE,                        /* optinfo_flags */
231   NULL,					/* gate */
232   execute_build_cfg,			/* execute */
233   NULL,					/* sub */
234   NULL,					/* next */
235   0,					/* static_pass_number */
236   TV_TREE_CFG,				/* tv_id */
237   PROP_gimple_leh, 			/* properties_required */
238   PROP_cfg,				/* properties_provided */
239   0,					/* properties_destroyed */
240   0,					/* todo_flags_start */
241   TODO_verify_stmts | TODO_cleanup_cfg  /* todo_flags_finish */
242  }
243 };
244 
245 
246 /* Return true if T is a computed goto.  */
247 
248 static bool
249 computed_goto_p (gimple t)
250 {
251   return (gimple_code (t) == GIMPLE_GOTO
252 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
253 }
254 
255 
256 /* Search the CFG for any computed gotos.  If found, factor them to a
257    common computed goto site.  Also record the location of that site so
258    that we can un-factor the gotos after we have converted back to
259    normal form.  */
260 
261 static void
262 factor_computed_gotos (void)
263 {
264   basic_block bb;
265   tree factored_label_decl = NULL;
266   tree var = NULL;
267   gimple factored_computed_goto_label = NULL;
268   gimple factored_computed_goto = NULL;
269 
270   /* We know there are one or more computed gotos in this function.
271      Examine the last statement in each basic block to see if the block
272      ends with a computed goto.  */
273 
274   FOR_EACH_BB (bb)
275     {
276       gimple_stmt_iterator gsi = gsi_last_bb (bb);
277       gimple last;
278 
279       if (gsi_end_p (gsi))
280 	continue;
281 
282       last = gsi_stmt (gsi);
283 
284       /* Ignore the computed goto we create when we factor the original
285 	 computed gotos.  */
286       if (last == factored_computed_goto)
287 	continue;
288 
289       /* If the last statement is a computed goto, factor it.  */
290       if (computed_goto_p (last))
291 	{
292 	  gimple assignment;
293 
294 	  /* The first time we find a computed goto we need to create
295 	     the factored goto block and the variable each original
296 	     computed goto will use for their goto destination.  */
297 	  if (!factored_computed_goto)
298 	    {
299 	      basic_block new_bb = create_empty_bb (bb);
300 	      gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
301 
302 	      /* Create the destination of the factored goto.  Each original
303 		 computed goto will put its desired destination into this
304 		 variable and jump to the label we create immediately
305 		 below.  */
306 	      var = create_tmp_var (ptr_type_node, "gotovar");
307 
308 	      /* Build a label for the new block which will contain the
309 		 factored computed goto.  */
310 	      factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
311 	      factored_computed_goto_label
312 		= gimple_build_label (factored_label_decl);
313 	      gsi_insert_after (&new_gsi, factored_computed_goto_label,
314 				GSI_NEW_STMT);
315 
316 	      /* Build our new computed goto.  */
317 	      factored_computed_goto = gimple_build_goto (var);
318 	      gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
319 	    }
320 
321 	  /* Copy the original computed goto's destination into VAR.  */
322 	  assignment = gimple_build_assign (var, gimple_goto_dest (last));
323 	  gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
324 
325 	  /* And re-vector the computed goto to the new destination.  */
326 	  gimple_goto_set_dest (last, factored_label_decl);
327 	}
328     }
329 }
330 
331 
332 /* Build a flowgraph for the sequence of stmts SEQ.  */
333 
334 static void
335 make_blocks (gimple_seq seq)
336 {
337   gimple_stmt_iterator i = gsi_start (seq);
338   gimple stmt = NULL;
339   bool start_new_block = true;
340   bool first_stmt_of_seq = true;
341   basic_block bb = ENTRY_BLOCK_PTR;
342 
343   while (!gsi_end_p (i))
344     {
345       gimple prev_stmt;
346 
347       prev_stmt = stmt;
348       stmt = gsi_stmt (i);
349 
350       /* If the statement starts a new basic block or if we have determined
351 	 in a previous pass that we need to create a new block for STMT, do
352 	 so now.  */
353       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
354 	{
355 	  if (!first_stmt_of_seq)
356 	    gsi_split_seq_before (&i, &seq);
357 	  bb = create_basic_block (seq, NULL, bb);
358 	  start_new_block = false;
359 	}
360 
361       /* Now add STMT to BB and create the subgraphs for special statement
362 	 codes.  */
363       gimple_set_bb (stmt, bb);
364 
365       if (computed_goto_p (stmt))
366 	found_computed_goto = true;
367 
368       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
369 	 next iteration.  */
370       if (stmt_ends_bb_p (stmt))
371 	{
372 	  /* If the stmt can make abnormal goto use a new temporary
373 	     for the assignment to the LHS.  This makes sure the old value
374 	     of the LHS is available on the abnormal edge.  Otherwise
375 	     we will end up with overlapping life-ranges for abnormal
376 	     SSA names.  */
377 	  if (gimple_has_lhs (stmt)
378 	      && stmt_can_make_abnormal_goto (stmt)
379 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
380 	    {
381 	      tree lhs = gimple_get_lhs (stmt);
382 	      tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
383 	      gimple s = gimple_build_assign (lhs, tmp);
384 	      gimple_set_location (s, gimple_location (stmt));
385 	      gimple_set_block (s, gimple_block (stmt));
386 	      gimple_set_lhs (stmt, tmp);
387 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
388 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
389 		DECL_GIMPLE_REG_P (tmp) = 1;
390 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
391 	    }
392 	  start_new_block = true;
393 	}
394 
395       gsi_next (&i);
396       first_stmt_of_seq = false;
397     }
398 }
399 
400 
401 /* Create and return a new empty basic block after bb AFTER.  */
402 
403 static basic_block
404 create_bb (void *h, void *e, basic_block after)
405 {
406   basic_block bb;
407 
408   gcc_assert (!e);
409 
410   /* Create and initialize a new basic block.  Since alloc_block uses
411      GC allocation that clears memory to allocate a basic block, we do
412      not have to clear the newly allocated basic block here.  */
413   bb = alloc_block ();
414 
415   bb->index = last_basic_block;
416   bb->flags = BB_NEW;
417   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
418 
419   /* Add the new block to the linked list of blocks.  */
420   link_block (bb, after);
421 
422   /* Grow the basic block array if needed.  */
423   if ((size_t) last_basic_block == basic_block_info->length ())
424     {
425       size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
426       vec_safe_grow_cleared (basic_block_info, new_size);
427     }
428 
429   /* Add the newly created block to the array.  */
430   SET_BASIC_BLOCK (last_basic_block, bb);
431 
432   n_basic_blocks++;
433   last_basic_block++;
434 
435   return bb;
436 }
437 
438 
439 /*---------------------------------------------------------------------------
440 				 Edge creation
441 ---------------------------------------------------------------------------*/
442 
443 /* Fold COND_EXPR_COND of each COND_EXPR.  */
444 
445 void
446 fold_cond_expr_cond (void)
447 {
448   basic_block bb;
449 
450   FOR_EACH_BB (bb)
451     {
452       gimple stmt = last_stmt (bb);
453 
454       if (stmt && gimple_code (stmt) == GIMPLE_COND)
455 	{
456 	  location_t loc = gimple_location (stmt);
457 	  tree cond;
458 	  bool zerop, onep;
459 
460 	  fold_defer_overflow_warnings ();
461 	  cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
462 			      gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
463 	  if (cond)
464 	    {
465 	      zerop = integer_zerop (cond);
466 	      onep = integer_onep (cond);
467 	    }
468 	  else
469 	    zerop = onep = false;
470 
471 	  fold_undefer_overflow_warnings (zerop || onep,
472 					  stmt,
473 					  WARN_STRICT_OVERFLOW_CONDITIONAL);
474 	  if (zerop)
475 	    gimple_cond_make_false (stmt);
476 	  else if (onep)
477 	    gimple_cond_make_true (stmt);
478 	}
479     }
480 }
481 
482 /* Join all the blocks in the flowgraph.  */
483 
484 static void
485 make_edges (void)
486 {
487   basic_block bb;
488   struct omp_region *cur_region = NULL;
489 
490   /* Create an edge from entry to the first block with executable
491      statements in it.  */
492   make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
493 
494   /* Traverse the basic block array placing edges.  */
495   FOR_EACH_BB (bb)
496     {
497       gimple last = last_stmt (bb);
498       bool fallthru;
499 
500       if (last)
501 	{
502 	  enum gimple_code code = gimple_code (last);
503 	  switch (code)
504 	    {
505 	    case GIMPLE_GOTO:
506 	      make_goto_expr_edges (bb);
507 	      fallthru = false;
508 	      break;
509 	    case GIMPLE_RETURN:
510 	      make_edge (bb, EXIT_BLOCK_PTR, 0);
511 	      fallthru = false;
512 	      break;
513 	    case GIMPLE_COND:
514 	      make_cond_expr_edges (bb);
515 	      fallthru = false;
516 	      break;
517 	    case GIMPLE_SWITCH:
518 	      make_gimple_switch_edges (bb);
519 	      fallthru = false;
520 	      break;
521 	    case GIMPLE_RESX:
522 	      make_eh_edges (last);
523 	      fallthru = false;
524 	      break;
525 	    case GIMPLE_EH_DISPATCH:
526 	      fallthru = make_eh_dispatch_edges (last);
527 	      break;
528 
529 	    case GIMPLE_CALL:
530 	      /* If this function receives a nonlocal goto, then we need to
531 		 make edges from this call site to all the nonlocal goto
532 		 handlers.  */
533 	      if (stmt_can_make_abnormal_goto (last))
534 		make_abnormal_goto_edges (bb, true);
535 
536 	      /* If this statement has reachable exception handlers, then
537 		 create abnormal edges to them.  */
538 	      make_eh_edges (last);
539 
540 	      /* BUILTIN_RETURN is really a return statement.  */
541 	      if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
542 		make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
543 	      /* Some calls are known not to return.  */
544 	      else
545 	        fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
546 	      break;
547 
548 	    case GIMPLE_ASSIGN:
549 	       /* A GIMPLE_ASSIGN may throw internally and thus be considered
550 		  control-altering. */
551 	      if (is_ctrl_altering_stmt (last))
552 		make_eh_edges (last);
553 	      fallthru = true;
554 	      break;
555 
556 	    case GIMPLE_ASM:
557 	      make_gimple_asm_edges (bb);
558 	      fallthru = true;
559 	      break;
560 
561 	    case GIMPLE_OMP_PARALLEL:
562 	    case GIMPLE_OMP_TASK:
563 	    case GIMPLE_OMP_FOR:
564 	    case GIMPLE_OMP_SINGLE:
565 	    case GIMPLE_OMP_MASTER:
566 	    case GIMPLE_OMP_ORDERED:
567 	    case GIMPLE_OMP_CRITICAL:
568 	    case GIMPLE_OMP_SECTION:
569 	      cur_region = new_omp_region (bb, code, cur_region);
570 	      fallthru = true;
571 	      break;
572 
573 	    case GIMPLE_OMP_SECTIONS:
574 	      cur_region = new_omp_region (bb, code, cur_region);
575 	      fallthru = true;
576 	      break;
577 
578 	    case GIMPLE_OMP_SECTIONS_SWITCH:
579 	      fallthru = false;
580 	      break;
581 
582             case GIMPLE_OMP_ATOMIC_LOAD:
583             case GIMPLE_OMP_ATOMIC_STORE:
584                fallthru = true;
585                break;
586 
587 	    case GIMPLE_OMP_RETURN:
588 	      /* In the case of a GIMPLE_OMP_SECTION, the edge will go
589 		 somewhere other than the next block.  This will be
590 		 created later.  */
591 	      cur_region->exit = bb;
592 	      if (cur_region->type == GIMPLE_OMP_TASK)
593 		/* Add an edge corresponding to not scheduling the task
594 		   immediately.  */
595 		make_edge (cur_region->entry, bb, EDGE_ABNORMAL);
596 	      fallthru = cur_region->type != GIMPLE_OMP_SECTION;
597 	      cur_region = cur_region->outer;
598 	      break;
599 
600 	    case GIMPLE_OMP_CONTINUE:
601 	      cur_region->cont = bb;
602 	      switch (cur_region->type)
603 		{
604 		case GIMPLE_OMP_FOR:
605 		  /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
606 		     succs edges as abnormal to prevent splitting
607 		     them.  */
608 		  single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
609 		  /* Make the loopback edge.  */
610 		  make_edge (bb, single_succ (cur_region->entry),
611 			     EDGE_ABNORMAL);
612 
613 		  /* Create an edge from GIMPLE_OMP_FOR to exit, which
614 		     corresponds to the case that the body of the loop
615 		     is not executed at all.  */
616 		  make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
617 		  make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
618 		  fallthru = false;
619 		  break;
620 
621 		case GIMPLE_OMP_SECTIONS:
622 		  /* Wire up the edges into and out of the nested sections.  */
623 		  {
624 		    basic_block switch_bb = single_succ (cur_region->entry);
625 
626 		    struct omp_region *i;
627 		    for (i = cur_region->inner; i ; i = i->next)
628 		      {
629 			gcc_assert (i->type == GIMPLE_OMP_SECTION);
630 			make_edge (switch_bb, i->entry, 0);
631 			make_edge (i->exit, bb, EDGE_FALLTHRU);
632 		      }
633 
634 		    /* Make the loopback edge to the block with
635 		       GIMPLE_OMP_SECTIONS_SWITCH.  */
636 		    make_edge (bb, switch_bb, 0);
637 
638 		    /* Make the edge from the switch to exit.  */
639 		    make_edge (switch_bb, bb->next_bb, 0);
640 		    fallthru = false;
641 		  }
642 		  break;
643 
644 		case GIMPLE_OMP_TASK:
645 		  fallthru = true;
646 		  break;
647 
648 		default:
649 		  gcc_unreachable ();
650 		}
651 	      break;
652 
653 	    case GIMPLE_TRANSACTION:
654 	      {
655 		tree abort_label = gimple_transaction_label (last);
656 		if (abort_label)
657 		  make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
658 		fallthru = true;
659 	      }
660 	      break;
661 
662 	    default:
663 	      gcc_assert (!stmt_ends_bb_p (last));
664 	      fallthru = true;
665 	    }
666 	}
667       else
668 	fallthru = true;
669 
670       if (fallthru)
671         {
672 	  make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
673 	  if (last)
674             assign_discriminator (gimple_location (last), bb->next_bb);
675 	}
676     }
677 
678   if (root_omp_region)
679     free_omp_regions ();
680 
681   /* Fold COND_EXPR_COND of each COND_EXPR.  */
682   fold_cond_expr_cond ();
683 }
684 
685 /* Trivial hash function for a location_t.  ITEM is a pointer to
686    a hash table entry that maps a location_t to a discriminator.  */
687 
688 static unsigned int
689 locus_map_hash (const void *item)
690 {
691   return ((const struct locus_discrim_map *) item)->locus;
692 }
693 
694 /* Equality function for the locus-to-discriminator map.  VA and VB
695    point to the two hash table entries to compare.  */
696 
697 static int
698 locus_map_eq (const void *va, const void *vb)
699 {
700   const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
701   const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
702   return a->locus == b->locus;
703 }
704 
705 /* Find the next available discriminator value for LOCUS.  The
706    discriminator distinguishes among several basic blocks that
707    share a common locus, allowing for more accurate sample-based
708    profiling.  */
709 
710 static int
711 next_discriminator_for_locus (location_t locus)
712 {
713   struct locus_discrim_map item;
714   struct locus_discrim_map **slot;
715 
716   item.locus = locus;
717   item.discriminator = 0;
718   slot = (struct locus_discrim_map **)
719       htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
720                                 (hashval_t) locus, INSERT);
721   gcc_assert (slot);
722   if (*slot == HTAB_EMPTY_ENTRY)
723     {
724       *slot = XNEW (struct locus_discrim_map);
725       gcc_assert (*slot);
726       (*slot)->locus = locus;
727       (*slot)->discriminator = 0;
728     }
729   (*slot)->discriminator++;
730   return (*slot)->discriminator;
731 }
732 
733 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
734 
735 static bool
736 same_line_p (location_t locus1, location_t locus2)
737 {
738   expanded_location from, to;
739 
740   if (locus1 == locus2)
741     return true;
742 
743   from = expand_location (locus1);
744   to = expand_location (locus2);
745 
746   if (from.line != to.line)
747     return false;
748   if (from.file == to.file)
749     return true;
750   return (from.file != NULL
751           && to.file != NULL
752           && filename_cmp (from.file, to.file) == 0);
753 }
754 
755 /* Assign a unique discriminator value to block BB if it begins at the same
756    LOCUS as its predecessor block.  */
757 
758 static void
759 assign_discriminator (location_t locus, basic_block bb)
760 {
761   gimple first_in_to_bb, last_in_to_bb;
762 
763   if (locus == 0 || bb->discriminator != 0)
764     return;
765 
766   first_in_to_bb = first_non_label_stmt (bb);
767   last_in_to_bb = last_stmt (bb);
768   if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
769       || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
770     bb->discriminator = next_discriminator_for_locus (locus);
771 }
772 
773 /* Create the edges for a GIMPLE_COND starting at block BB.  */
774 
775 static void
776 make_cond_expr_edges (basic_block bb)
777 {
778   gimple entry = last_stmt (bb);
779   gimple then_stmt, else_stmt;
780   basic_block then_bb, else_bb;
781   tree then_label, else_label;
782   edge e;
783   location_t entry_locus;
784 
785   gcc_assert (entry);
786   gcc_assert (gimple_code (entry) == GIMPLE_COND);
787 
788   entry_locus = gimple_location (entry);
789 
790   /* Entry basic blocks for each component.  */
791   then_label = gimple_cond_true_label (entry);
792   else_label = gimple_cond_false_label (entry);
793   then_bb = label_to_block (then_label);
794   else_bb = label_to_block (else_label);
795   then_stmt = first_stmt (then_bb);
796   else_stmt = first_stmt (else_bb);
797 
798   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
799   assign_discriminator (entry_locus, then_bb);
800   e->goto_locus = gimple_location (then_stmt);
801   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
802   if (e)
803     {
804       assign_discriminator (entry_locus, else_bb);
805       e->goto_locus = gimple_location (else_stmt);
806     }
807 
808   /* We do not need the labels anymore.  */
809   gimple_cond_set_true_label (entry, NULL_TREE);
810   gimple_cond_set_false_label (entry, NULL_TREE);
811 }
812 
813 
814 /* Called for each element in the hash table (P) as we delete the
815    edge to cases hash table.
816 
817    Clear all the TREE_CHAINs to prevent problems with copying of
818    SWITCH_EXPRs and structure sharing rules, then free the hash table
819    element.  */
820 
821 static bool
822 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
823 		       void *data ATTRIBUTE_UNUSED)
824 {
825   tree t, next;
826 
827   for (t = (tree) *value; t; t = next)
828     {
829       next = CASE_CHAIN (t);
830       CASE_CHAIN (t) = NULL;
831     }
832 
833   *value = NULL;
834   return true;
835 }
836 
837 /* Start recording information mapping edges to case labels.  */
838 
839 void
840 start_recording_case_labels (void)
841 {
842   gcc_assert (edge_to_cases == NULL);
843   edge_to_cases = pointer_map_create ();
844   touched_switch_bbs = BITMAP_ALLOC (NULL);
845 }
846 
847 /* Return nonzero if we are recording information for case labels.  */
848 
849 static bool
850 recording_case_labels_p (void)
851 {
852   return (edge_to_cases != NULL);
853 }
854 
855 /* Stop recording information mapping edges to case labels and
856    remove any information we have recorded.  */
857 void
858 end_recording_case_labels (void)
859 {
860   bitmap_iterator bi;
861   unsigned i;
862   pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
863   pointer_map_destroy (edge_to_cases);
864   edge_to_cases = NULL;
865   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
866     {
867       basic_block bb = BASIC_BLOCK (i);
868       if (bb)
869 	{
870 	  gimple stmt = last_stmt (bb);
871 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
872 	    group_case_labels_stmt (stmt);
873 	}
874     }
875   BITMAP_FREE (touched_switch_bbs);
876 }
877 
878 /* If we are inside a {start,end}_recording_cases block, then return
879    a chain of CASE_LABEL_EXPRs from T which reference E.
880 
881    Otherwise return NULL.  */
882 
883 static tree
884 get_cases_for_edge (edge e, gimple t)
885 {
886   void **slot;
887   size_t i, n;
888 
889   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
890      chains available.  Return NULL so the caller can detect this case.  */
891   if (!recording_case_labels_p ())
892     return NULL;
893 
894   slot = pointer_map_contains (edge_to_cases, e);
895   if (slot)
896     return (tree) *slot;
897 
898   /* If we did not find E in the hash table, then this must be the first
899      time we have been queried for information about E & T.  Add all the
900      elements from T to the hash table then perform the query again.  */
901 
902   n = gimple_switch_num_labels (t);
903   for (i = 0; i < n; i++)
904     {
905       tree elt = gimple_switch_label (t, i);
906       tree lab = CASE_LABEL (elt);
907       basic_block label_bb = label_to_block (lab);
908       edge this_edge = find_edge (e->src, label_bb);
909 
910       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
911 	 a new chain.  */
912       slot = pointer_map_insert (edge_to_cases, this_edge);
913       CASE_CHAIN (elt) = (tree) *slot;
914       *slot = elt;
915     }
916 
917   return (tree) *pointer_map_contains (edge_to_cases, e);
918 }
919 
920 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
921 
922 static void
923 make_gimple_switch_edges (basic_block bb)
924 {
925   gimple entry = last_stmt (bb);
926   location_t entry_locus;
927   size_t i, n;
928 
929   entry_locus = gimple_location (entry);
930 
931   n = gimple_switch_num_labels (entry);
932 
933   for (i = 0; i < n; ++i)
934     {
935       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
936       basic_block label_bb = label_to_block (lab);
937       make_edge (bb, label_bb, 0);
938       assign_discriminator (entry_locus, label_bb);
939     }
940 }
941 
942 
943 /* Return the basic block holding label DEST.  */
944 
945 basic_block
946 label_to_block_fn (struct function *ifun, tree dest)
947 {
948   int uid = LABEL_DECL_UID (dest);
949 
950   /* We would die hard when faced by an undefined label.  Emit a label to
951      the very first basic block.  This will hopefully make even the dataflow
952      and undefined variable warnings quite right.  */
953   if (seen_error () && uid < 0)
954     {
955       gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
956       gimple stmt;
957 
958       stmt = gimple_build_label (dest);
959       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
960       uid = LABEL_DECL_UID (dest);
961     }
962   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
963     return NULL;
964   return (*ifun->cfg->x_label_to_block_map)[uid];
965 }
966 
967 /* Create edges for an abnormal goto statement at block BB.  If FOR_CALL
968    is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR.  */
969 
970 void
971 make_abnormal_goto_edges (basic_block bb, bool for_call)
972 {
973   basic_block target_bb;
974   gimple_stmt_iterator gsi;
975 
976   FOR_EACH_BB (target_bb)
977     for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
978       {
979 	gimple label_stmt = gsi_stmt (gsi);
980 	tree target;
981 
982 	if (gimple_code (label_stmt) != GIMPLE_LABEL)
983 	  break;
984 
985 	target = gimple_label_label (label_stmt);
986 
987 	/* Make an edge to every label block that has been marked as a
988 	   potential target for a computed goto or a non-local goto.  */
989 	if ((FORCED_LABEL (target) && !for_call)
990 	    || (DECL_NONLOCAL (target) && for_call))
991 	  {
992 	    make_edge (bb, target_bb, EDGE_ABNORMAL);
993 	    break;
994 	  }
995       }
996 }
997 
998 /* Create edges for a goto statement at block BB.  */
999 
1000 static void
1001 make_goto_expr_edges (basic_block bb)
1002 {
1003   gimple_stmt_iterator last = gsi_last_bb (bb);
1004   gimple goto_t = gsi_stmt (last);
1005 
1006   /* A simple GOTO creates normal edges.  */
1007   if (simple_goto_p (goto_t))
1008     {
1009       tree dest = gimple_goto_dest (goto_t);
1010       basic_block label_bb = label_to_block (dest);
1011       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1012       e->goto_locus = gimple_location (goto_t);
1013       assign_discriminator (e->goto_locus, label_bb);
1014       gsi_remove (&last, true);
1015       return;
1016     }
1017 
1018   /* A computed GOTO creates abnormal edges.  */
1019   make_abnormal_goto_edges (bb, false);
1020 }
1021 
1022 /* Create edges for an asm statement with labels at block BB.  */
1023 
1024 static void
1025 make_gimple_asm_edges (basic_block bb)
1026 {
1027   gimple stmt = last_stmt (bb);
1028   location_t stmt_loc = gimple_location (stmt);
1029   int i, n = gimple_asm_nlabels (stmt);
1030 
1031   for (i = 0; i < n; ++i)
1032     {
1033       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1034       basic_block label_bb = label_to_block (label);
1035       make_edge (bb, label_bb, 0);
1036       assign_discriminator (stmt_loc, label_bb);
1037     }
1038 }
1039 
1040 /*---------------------------------------------------------------------------
1041 			       Flowgraph analysis
1042 ---------------------------------------------------------------------------*/
1043 
1044 /* Cleanup useless labels in basic blocks.  This is something we wish
1045    to do early because it allows us to group case labels before creating
1046    the edges for the CFG, and it speeds up block statement iterators in
1047    all passes later on.
1048    We rerun this pass after CFG is created, to get rid of the labels that
1049    are no longer referenced.  After then we do not run it any more, since
1050    (almost) no new labels should be created.  */
1051 
1052 /* A map from basic block index to the leading label of that block.  */
1053 static struct label_record
1054 {
1055   /* The label.  */
1056   tree label;
1057 
1058   /* True if the label is referenced from somewhere.  */
1059   bool used;
1060 } *label_for_bb;
1061 
1062 /* Given LABEL return the first label in the same basic block.  */
1063 
1064 static tree
1065 main_block_label (tree label)
1066 {
1067   basic_block bb = label_to_block (label);
1068   tree main_label = label_for_bb[bb->index].label;
1069 
1070   /* label_to_block possibly inserted undefined label into the chain.  */
1071   if (!main_label)
1072     {
1073       label_for_bb[bb->index].label = label;
1074       main_label = label;
1075     }
1076 
1077   label_for_bb[bb->index].used = true;
1078   return main_label;
1079 }
1080 
1081 /* Clean up redundant labels within the exception tree.  */
1082 
1083 static void
1084 cleanup_dead_labels_eh (void)
1085 {
1086   eh_landing_pad lp;
1087   eh_region r;
1088   tree lab;
1089   int i;
1090 
1091   if (cfun->eh == NULL)
1092     return;
1093 
1094   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1095     if (lp && lp->post_landing_pad)
1096       {
1097 	lab = main_block_label (lp->post_landing_pad);
1098 	if (lab != lp->post_landing_pad)
1099 	  {
1100 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1101 	    EH_LANDING_PAD_NR (lab) = lp->index;
1102 	  }
1103       }
1104 
1105   FOR_ALL_EH_REGION (r)
1106     switch (r->type)
1107       {
1108       case ERT_CLEANUP:
1109       case ERT_MUST_NOT_THROW:
1110 	break;
1111 
1112       case ERT_TRY:
1113 	{
1114 	  eh_catch c;
1115 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1116 	    {
1117 	      lab = c->label;
1118 	      if (lab)
1119 		c->label = main_block_label (lab);
1120 	    }
1121 	}
1122 	break;
1123 
1124       case ERT_ALLOWED_EXCEPTIONS:
1125 	lab = r->u.allowed.label;
1126 	if (lab)
1127 	  r->u.allowed.label = main_block_label (lab);
1128 	break;
1129       }
1130 }
1131 
1132 
1133 /* Cleanup redundant labels.  This is a three-step process:
1134      1) Find the leading label for each block.
1135      2) Redirect all references to labels to the leading labels.
1136      3) Cleanup all useless labels.  */
1137 
1138 void
1139 cleanup_dead_labels (void)
1140 {
1141   basic_block bb;
1142   label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1143 
1144   /* Find a suitable label for each block.  We use the first user-defined
1145      label if there is one, or otherwise just the first label we see.  */
1146   FOR_EACH_BB (bb)
1147     {
1148       gimple_stmt_iterator i;
1149 
1150       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1151 	{
1152 	  tree label;
1153 	  gimple stmt = gsi_stmt (i);
1154 
1155 	  if (gimple_code (stmt) != GIMPLE_LABEL)
1156 	    break;
1157 
1158 	  label = gimple_label_label (stmt);
1159 
1160 	  /* If we have not yet seen a label for the current block,
1161 	     remember this one and see if there are more labels.  */
1162 	  if (!label_for_bb[bb->index].label)
1163 	    {
1164 	      label_for_bb[bb->index].label = label;
1165 	      continue;
1166 	    }
1167 
1168 	  /* If we did see a label for the current block already, but it
1169 	     is an artificially created label, replace it if the current
1170 	     label is a user defined label.  */
1171 	  if (!DECL_ARTIFICIAL (label)
1172 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1173 	    {
1174 	      label_for_bb[bb->index].label = label;
1175 	      break;
1176 	    }
1177 	}
1178     }
1179 
1180   /* Now redirect all jumps/branches to the selected label.
1181      First do so for each block ending in a control statement.  */
1182   FOR_EACH_BB (bb)
1183     {
1184       gimple stmt = last_stmt (bb);
1185       tree label, new_label;
1186 
1187       if (!stmt)
1188 	continue;
1189 
1190       switch (gimple_code (stmt))
1191 	{
1192 	case GIMPLE_COND:
1193 	  label = gimple_cond_true_label (stmt);
1194 	  if (label)
1195 	    {
1196 	      new_label = main_block_label (label);
1197 	      if (new_label != label)
1198 		gimple_cond_set_true_label (stmt, new_label);
1199 	    }
1200 
1201 	  label = gimple_cond_false_label (stmt);
1202 	  if (label)
1203 	    {
1204 	      new_label = main_block_label (label);
1205 	      if (new_label != label)
1206 		gimple_cond_set_false_label (stmt, new_label);
1207 	    }
1208 	  break;
1209 
1210 	case GIMPLE_SWITCH:
1211 	  {
1212 	    size_t i, n = gimple_switch_num_labels (stmt);
1213 
1214 	    /* Replace all destination labels.  */
1215 	    for (i = 0; i < n; ++i)
1216 	      {
1217 		tree case_label = gimple_switch_label (stmt, i);
1218 		label = CASE_LABEL (case_label);
1219 		new_label = main_block_label (label);
1220 		if (new_label != label)
1221 		  CASE_LABEL (case_label) = new_label;
1222 	      }
1223 	    break;
1224 	  }
1225 
1226 	case GIMPLE_ASM:
1227 	  {
1228 	    int i, n = gimple_asm_nlabels (stmt);
1229 
1230 	    for (i = 0; i < n; ++i)
1231 	      {
1232 		tree cons = gimple_asm_label_op (stmt, i);
1233 		tree label = main_block_label (TREE_VALUE (cons));
1234 		TREE_VALUE (cons) = label;
1235 	      }
1236 	    break;
1237 	  }
1238 
1239 	/* We have to handle gotos until they're removed, and we don't
1240 	   remove them until after we've created the CFG edges.  */
1241 	case GIMPLE_GOTO:
1242 	  if (!computed_goto_p (stmt))
1243 	    {
1244 	      label = gimple_goto_dest (stmt);
1245 	      new_label = main_block_label (label);
1246 	      if (new_label != label)
1247 		gimple_goto_set_dest (stmt, new_label);
1248 	    }
1249 	  break;
1250 
1251 	case GIMPLE_TRANSACTION:
1252 	  {
1253 	    tree label = gimple_transaction_label (stmt);
1254 	    if (label)
1255 	      {
1256 		tree new_label = main_block_label (label);
1257 		if (new_label != label)
1258 		  gimple_transaction_set_label (stmt, new_label);
1259 	      }
1260 	  }
1261 	  break;
1262 
1263 	default:
1264 	  break;
1265       }
1266     }
1267 
1268   /* Do the same for the exception region tree labels.  */
1269   cleanup_dead_labels_eh ();
1270 
1271   /* Finally, purge dead labels.  All user-defined labels and labels that
1272      can be the target of non-local gotos and labels which have their
1273      address taken are preserved.  */
1274   FOR_EACH_BB (bb)
1275     {
1276       gimple_stmt_iterator i;
1277       tree label_for_this_bb = label_for_bb[bb->index].label;
1278 
1279       if (!label_for_this_bb)
1280 	continue;
1281 
1282       /* If the main label of the block is unused, we may still remove it.  */
1283       if (!label_for_bb[bb->index].used)
1284 	label_for_this_bb = NULL;
1285 
1286       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1287 	{
1288 	  tree label;
1289 	  gimple stmt = gsi_stmt (i);
1290 
1291 	  if (gimple_code (stmt) != GIMPLE_LABEL)
1292 	    break;
1293 
1294 	  label = gimple_label_label (stmt);
1295 
1296 	  if (label == label_for_this_bb
1297 	      || !DECL_ARTIFICIAL (label)
1298 	      || DECL_NONLOCAL (label)
1299 	      || FORCED_LABEL (label))
1300 	    gsi_next (&i);
1301 	  else
1302 	    gsi_remove (&i, true);
1303 	}
1304     }
1305 
1306   free (label_for_bb);
1307 }
1308 
1309 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1310    the ones jumping to the same label.
1311    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1312 
1313 void
1314 group_case_labels_stmt (gimple stmt)
1315 {
1316   int old_size = gimple_switch_num_labels (stmt);
1317   int i, j, new_size = old_size;
1318   basic_block default_bb = NULL;
1319 
1320   default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1321 
1322   /* Look for possible opportunities to merge cases.  */
1323   i = 1;
1324   while (i < old_size)
1325     {
1326       tree base_case, base_high;
1327       basic_block base_bb;
1328 
1329       base_case = gimple_switch_label (stmt, i);
1330 
1331       gcc_assert (base_case);
1332       base_bb = label_to_block (CASE_LABEL (base_case));
1333 
1334       /* Discard cases that have the same destination as the
1335 	 default case.  */
1336       if (base_bb == default_bb)
1337 	{
1338 	  gimple_switch_set_label (stmt, i, NULL_TREE);
1339 	  i++;
1340 	  new_size--;
1341 	  continue;
1342 	}
1343 
1344       base_high = CASE_HIGH (base_case)
1345 	  ? CASE_HIGH (base_case)
1346 	  : CASE_LOW (base_case);
1347       i++;
1348 
1349       /* Try to merge case labels.  Break out when we reach the end
1350 	 of the label vector or when we cannot merge the next case
1351 	 label with the current one.  */
1352       while (i < old_size)
1353 	{
1354 	  tree merge_case = gimple_switch_label (stmt, i);
1355 	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1356 	  double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1357 
1358 	  /* Merge the cases if they jump to the same place,
1359 	     and their ranges are consecutive.  */
1360 	  if (merge_bb == base_bb
1361 	      && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1362 	    {
1363 	      base_high = CASE_HIGH (merge_case) ?
1364 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1365 	      CASE_HIGH (base_case) = base_high;
1366 	      gimple_switch_set_label (stmt, i, NULL_TREE);
1367 	      new_size--;
1368 	      i++;
1369 	    }
1370 	  else
1371 	    break;
1372 	}
1373     }
1374 
1375   /* Compress the case labels in the label vector, and adjust the
1376      length of the vector.  */
1377   for (i = 0, j = 0; i < new_size; i++)
1378     {
1379       while (! gimple_switch_label (stmt, j))
1380 	j++;
1381       gimple_switch_set_label (stmt, i,
1382 			       gimple_switch_label (stmt, j++));
1383     }
1384 
1385   gcc_assert (new_size <= old_size);
1386   gimple_switch_set_num_labels (stmt, new_size);
1387 }
1388 
1389 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1390    and scan the sorted vector of cases.  Combine the ones jumping to the
1391    same label.  */
1392 
1393 void
1394 group_case_labels (void)
1395 {
1396   basic_block bb;
1397 
1398   FOR_EACH_BB (bb)
1399     {
1400       gimple stmt = last_stmt (bb);
1401       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1402 	group_case_labels_stmt (stmt);
1403     }
1404 }
1405 
1406 /* Checks whether we can merge block B into block A.  */
1407 
1408 static bool
1409 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1410 {
1411   gimple stmt;
1412   gimple_stmt_iterator gsi;
1413 
1414   if (!single_succ_p (a))
1415     return false;
1416 
1417   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1418     return false;
1419 
1420   if (single_succ (a) != b)
1421     return false;
1422 
1423   if (!single_pred_p (b))
1424     return false;
1425 
1426   if (b == EXIT_BLOCK_PTR)
1427     return false;
1428 
1429   /* If A ends by a statement causing exceptions or something similar, we
1430      cannot merge the blocks.  */
1431   stmt = last_stmt (a);
1432   if (stmt && stmt_ends_bb_p (stmt))
1433     return false;
1434 
1435   /* Do not allow a block with only a non-local label to be merged.  */
1436   if (stmt
1437       && gimple_code (stmt) == GIMPLE_LABEL
1438       && DECL_NONLOCAL (gimple_label_label (stmt)))
1439     return false;
1440 
1441   /* Examine the labels at the beginning of B.  */
1442   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1443     {
1444       tree lab;
1445       stmt = gsi_stmt (gsi);
1446       if (gimple_code (stmt) != GIMPLE_LABEL)
1447 	break;
1448       lab = gimple_label_label (stmt);
1449 
1450       /* Do not remove user forced labels or for -O0 any user labels.  */
1451       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1452 	return false;
1453     }
1454 
1455   /* Protect the loop latches.  */
1456   if (current_loops && b->loop_father->latch == b)
1457     return false;
1458 
1459   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1460      is not up-to-date and a name-mapping is registered, we cannot eliminate
1461      any phis.  Symbols marked for renaming are never a problem though.  */
1462   for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1463     {
1464       gimple phi = gsi_stmt (gsi);
1465       /* Technically only new names matter.  */
1466       if (name_registered_for_update_p (PHI_RESULT (phi)))
1467 	return false;
1468     }
1469 
1470   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1471   if (!optimize
1472       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1473     {
1474       location_t goto_locus = single_succ_edge (a)->goto_locus;
1475       gimple_stmt_iterator prev, next;
1476       prev = gsi_last_nondebug_bb (a);
1477       next = gsi_after_labels (b);
1478       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1479 	gsi_next_nondebug (&next);
1480       if ((gsi_end_p (prev)
1481 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1482 	  && (gsi_end_p (next)
1483 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1484 	return false;
1485     }
1486 
1487   return true;
1488 }
1489 
1490 /* Return true if the var whose chain of uses starts at PTR has no
1491    nondebug uses.  */
1492 bool
1493 has_zero_uses_1 (const ssa_use_operand_t *head)
1494 {
1495   const ssa_use_operand_t *ptr;
1496 
1497   for (ptr = head->next; ptr != head; ptr = ptr->next)
1498     if (!is_gimple_debug (USE_STMT (ptr)))
1499       return false;
1500 
1501   return true;
1502 }
1503 
1504 /* Return true if the var whose chain of uses starts at PTR has a
1505    single nondebug use.  Set USE_P and STMT to that single nondebug
1506    use, if so, or to NULL otherwise.  */
1507 bool
1508 single_imm_use_1 (const ssa_use_operand_t *head,
1509 		  use_operand_p *use_p, gimple *stmt)
1510 {
1511   ssa_use_operand_t *ptr, *single_use = 0;
1512 
1513   for (ptr = head->next; ptr != head; ptr = ptr->next)
1514     if (!is_gimple_debug (USE_STMT (ptr)))
1515       {
1516 	if (single_use)
1517 	  {
1518 	    single_use = NULL;
1519 	    break;
1520 	  }
1521 	single_use = ptr;
1522       }
1523 
1524   if (use_p)
1525     *use_p = single_use;
1526 
1527   if (stmt)
1528     *stmt = single_use ? single_use->loc.stmt : NULL;
1529 
1530   return !!single_use;
1531 }
1532 
1533 /* Replaces all uses of NAME by VAL.  */
1534 
1535 void
1536 replace_uses_by (tree name, tree val)
1537 {
1538   imm_use_iterator imm_iter;
1539   use_operand_p use;
1540   gimple stmt;
1541   edge e;
1542 
1543   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1544     {
1545       /* Mark the block if we change the last stmt in it.  */
1546       if (cfgcleanup_altered_bbs
1547 	  && stmt_ends_bb_p (stmt))
1548 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1549 
1550       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1551         {
1552 	  replace_exp (use, val);
1553 
1554 	  if (gimple_code (stmt) == GIMPLE_PHI)
1555 	    {
1556 	      e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1557 	      if (e->flags & EDGE_ABNORMAL)
1558 		{
1559 		  /* This can only occur for virtual operands, since
1560 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1561 		     would prevent replacement.  */
1562 		  gcc_checking_assert (virtual_operand_p (name));
1563 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1564 		}
1565 	    }
1566 	}
1567 
1568       if (gimple_code (stmt) != GIMPLE_PHI)
1569 	{
1570 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1571 	  gimple orig_stmt = stmt;
1572 	  size_t i;
1573 
1574 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1575 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1576 	     only change sth from non-invariant to invariant, and only
1577 	     when propagating constants.  */
1578 	  if (is_gimple_min_invariant (val))
1579 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1580 	      {
1581 		tree op = gimple_op (stmt, i);
1582 		/* Operands may be empty here.  For example, the labels
1583 		   of a GIMPLE_COND are nulled out following the creation
1584 		   of the corresponding CFG edges.  */
1585 		if (op && TREE_CODE (op) == ADDR_EXPR)
1586 		  recompute_tree_invariant_for_addr_expr (op);
1587 	      }
1588 
1589 	  if (fold_stmt (&gsi))
1590 	    stmt = gsi_stmt (gsi);
1591 
1592 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1593 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1594 
1595 	  update_stmt (stmt);
1596 	}
1597     }
1598 
1599   gcc_checking_assert (has_zero_uses (name));
1600 
1601   /* Also update the trees stored in loop structures.  */
1602   if (current_loops)
1603     {
1604       struct loop *loop;
1605       loop_iterator li;
1606 
1607       FOR_EACH_LOOP (li, loop, 0)
1608 	{
1609 	  substitute_in_loop_info (loop, name, val);
1610 	}
1611     }
1612 }
1613 
1614 /* Merge block B into block A.  */
1615 
1616 static void
1617 gimple_merge_blocks (basic_block a, basic_block b)
1618 {
1619   gimple_stmt_iterator last, gsi, psi;
1620 
1621   if (dump_file)
1622     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1623 
1624   /* Remove all single-valued PHI nodes from block B of the form
1625      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1626   gsi = gsi_last_bb (a);
1627   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1628     {
1629       gimple phi = gsi_stmt (psi);
1630       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1631       gimple copy;
1632       bool may_replace_uses = (virtual_operand_p (def)
1633 			       || may_propagate_copy (def, use));
1634 
1635       /* In case we maintain loop closed ssa form, do not propagate arguments
1636 	 of loop exit phi nodes.  */
1637       if (current_loops
1638 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1639 	  && !virtual_operand_p (def)
1640 	  && TREE_CODE (use) == SSA_NAME
1641 	  && a->loop_father != b->loop_father)
1642 	may_replace_uses = false;
1643 
1644       if (!may_replace_uses)
1645 	{
1646 	  gcc_assert (!virtual_operand_p (def));
1647 
1648 	  /* Note that just emitting the copies is fine -- there is no problem
1649 	     with ordering of phi nodes.  This is because A is the single
1650 	     predecessor of B, therefore results of the phi nodes cannot
1651 	     appear as arguments of the phi nodes.  */
1652 	  copy = gimple_build_assign (def, use);
1653 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1654           remove_phi_node (&psi, false);
1655 	}
1656       else
1657         {
1658 	  /* If we deal with a PHI for virtual operands, we can simply
1659 	     propagate these without fussing with folding or updating
1660 	     the stmt.  */
1661 	  if (virtual_operand_p (def))
1662 	    {
1663 	      imm_use_iterator iter;
1664 	      use_operand_p use_p;
1665 	      gimple stmt;
1666 
1667 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1668 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1669 		  SET_USE (use_p, use);
1670 
1671 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1672 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1673 	    }
1674 	  else
1675             replace_uses_by (def, use);
1676 
1677           remove_phi_node (&psi, true);
1678         }
1679     }
1680 
1681   /* Ensure that B follows A.  */
1682   move_block_after (b, a);
1683 
1684   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1685   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1686 
1687   /* Remove labels from B and set gimple_bb to A for other statements.  */
1688   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1689     {
1690       gimple stmt = gsi_stmt (gsi);
1691       if (gimple_code (stmt) == GIMPLE_LABEL)
1692 	{
1693 	  tree label = gimple_label_label (stmt);
1694 	  int lp_nr;
1695 
1696 	  gsi_remove (&gsi, false);
1697 
1698 	  /* Now that we can thread computed gotos, we might have
1699 	     a situation where we have a forced label in block B
1700 	     However, the label at the start of block B might still be
1701 	     used in other ways (think about the runtime checking for
1702 	     Fortran assigned gotos).  So we can not just delete the
1703 	     label.  Instead we move the label to the start of block A.  */
1704 	  if (FORCED_LABEL (label))
1705 	    {
1706 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1707 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1708 	    }
1709 	  /* Other user labels keep around in a form of a debug stmt.  */
1710 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1711 	    {
1712 	      gimple dbg = gimple_build_debug_bind (label,
1713 						    integer_zero_node,
1714 						    stmt);
1715 	      gimple_debug_bind_reset_value (dbg);
1716 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1717 	    }
1718 
1719 	  lp_nr = EH_LANDING_PAD_NR (label);
1720 	  if (lp_nr)
1721 	    {
1722 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1723 	      lp->post_landing_pad = NULL;
1724 	    }
1725 	}
1726       else
1727 	{
1728 	  gimple_set_bb (stmt, a);
1729 	  gsi_next (&gsi);
1730 	}
1731     }
1732 
1733   /* Merge the sequences.  */
1734   last = gsi_last_bb (a);
1735   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1736   set_bb_seq (b, NULL);
1737 
1738   if (cfgcleanup_altered_bbs)
1739     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1740 }
1741 
1742 
1743 /* Return the one of two successors of BB that is not reachable by a
1744    complex edge, if there is one.  Else, return BB.  We use
1745    this in optimizations that use post-dominators for their heuristics,
1746    to catch the cases in C++ where function calls are involved.  */
1747 
1748 basic_block
1749 single_noncomplex_succ (basic_block bb)
1750 {
1751   edge e0, e1;
1752   if (EDGE_COUNT (bb->succs) != 2)
1753     return bb;
1754 
1755   e0 = EDGE_SUCC (bb, 0);
1756   e1 = EDGE_SUCC (bb, 1);
1757   if (e0->flags & EDGE_COMPLEX)
1758     return e1->dest;
1759   if (e1->flags & EDGE_COMPLEX)
1760     return e0->dest;
1761 
1762   return bb;
1763 }
1764 
1765 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
1766 
1767 void
1768 notice_special_calls (gimple call)
1769 {
1770   int flags = gimple_call_flags (call);
1771 
1772   if (flags & ECF_MAY_BE_ALLOCA)
1773     cfun->calls_alloca = true;
1774   if (flags & ECF_RETURNS_TWICE)
1775     cfun->calls_setjmp = true;
1776 }
1777 
1778 
1779 /* Clear flags set by notice_special_calls.  Used by dead code removal
1780    to update the flags.  */
1781 
1782 void
1783 clear_special_calls (void)
1784 {
1785   cfun->calls_alloca = false;
1786   cfun->calls_setjmp = false;
1787 }
1788 
1789 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
1790 
1791 static void
1792 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1793 {
1794   /* Since this block is no longer reachable, we can just delete all
1795      of its PHI nodes.  */
1796   remove_phi_nodes (bb);
1797 
1798   /* Remove edges to BB's successors.  */
1799   while (EDGE_COUNT (bb->succs) > 0)
1800     remove_edge (EDGE_SUCC (bb, 0));
1801 }
1802 
1803 
1804 /* Remove statements of basic block BB.  */
1805 
1806 static void
1807 remove_bb (basic_block bb)
1808 {
1809   gimple_stmt_iterator i;
1810 
1811   if (dump_file)
1812     {
1813       fprintf (dump_file, "Removing basic block %d\n", bb->index);
1814       if (dump_flags & TDF_DETAILS)
1815 	{
1816 	  dump_bb (dump_file, bb, 0, dump_flags);
1817 	  fprintf (dump_file, "\n");
1818 	}
1819     }
1820 
1821   if (current_loops)
1822     {
1823       struct loop *loop = bb->loop_father;
1824 
1825       /* If a loop gets removed, clean up the information associated
1826 	 with it.  */
1827       if (loop->latch == bb
1828 	  || loop->header == bb)
1829 	free_numbers_of_iterations_estimates_loop (loop);
1830     }
1831 
1832   /* Remove all the instructions in the block.  */
1833   if (bb_seq (bb) != NULL)
1834     {
1835       /* Walk backwards so as to get a chance to substitute all
1836 	 released DEFs into debug stmts.  See
1837 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1838 	 details.  */
1839       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1840 	{
1841 	  gimple stmt = gsi_stmt (i);
1842 	  if (gimple_code (stmt) == GIMPLE_LABEL
1843 	      && (FORCED_LABEL (gimple_label_label (stmt))
1844 		  || DECL_NONLOCAL (gimple_label_label (stmt))))
1845 	    {
1846 	      basic_block new_bb;
1847 	      gimple_stmt_iterator new_gsi;
1848 
1849 	      /* A non-reachable non-local label may still be referenced.
1850 		 But it no longer needs to carry the extra semantics of
1851 		 non-locality.  */
1852 	      if (DECL_NONLOCAL (gimple_label_label (stmt)))
1853 		{
1854 		  DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1855 		  FORCED_LABEL (gimple_label_label (stmt)) = 1;
1856 		}
1857 
1858 	      new_bb = bb->prev_bb;
1859 	      new_gsi = gsi_start_bb (new_bb);
1860 	      gsi_remove (&i, false);
1861 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1862 	    }
1863 	  else
1864 	    {
1865 	      /* Release SSA definitions if we are in SSA.  Note that we
1866 		 may be called when not in SSA.  For example,
1867 		 final_cleanup calls this function via
1868 		 cleanup_tree_cfg.  */
1869 	      if (gimple_in_ssa_p (cfun))
1870 		release_defs (stmt);
1871 
1872 	      gsi_remove (&i, true);
1873 	    }
1874 
1875 	  if (gsi_end_p (i))
1876 	    i = gsi_last_bb (bb);
1877 	  else
1878 	    gsi_prev (&i);
1879 	}
1880     }
1881 
1882   remove_phi_nodes_and_edges_for_unreachable_block (bb);
1883   bb->il.gimple.seq = NULL;
1884   bb->il.gimple.phi_nodes = NULL;
1885 }
1886 
1887 
1888 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1889    predicate VAL, return the edge that will be taken out of the block.
1890    If VAL does not match a unique edge, NULL is returned.  */
1891 
1892 edge
1893 find_taken_edge (basic_block bb, tree val)
1894 {
1895   gimple stmt;
1896 
1897   stmt = last_stmt (bb);
1898 
1899   gcc_assert (stmt);
1900   gcc_assert (is_ctrl_stmt (stmt));
1901 
1902   if (val == NULL)
1903     return NULL;
1904 
1905   if (!is_gimple_min_invariant (val))
1906     return NULL;
1907 
1908   if (gimple_code (stmt) == GIMPLE_COND)
1909     return find_taken_edge_cond_expr (bb, val);
1910 
1911   if (gimple_code (stmt) == GIMPLE_SWITCH)
1912     return find_taken_edge_switch_expr (bb, val);
1913 
1914   if (computed_goto_p (stmt))
1915     {
1916       /* Only optimize if the argument is a label, if the argument is
1917 	 not a label then we can not construct a proper CFG.
1918 
1919          It may be the case that we only need to allow the LABEL_REF to
1920          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1921          appear inside a LABEL_EXPR just to be safe.  */
1922       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1923 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1924 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1925       return NULL;
1926     }
1927 
1928   gcc_unreachable ();
1929 }
1930 
1931 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1932    statement, determine which of the outgoing edges will be taken out of the
1933    block.  Return NULL if either edge may be taken.  */
1934 
1935 static edge
1936 find_taken_edge_computed_goto (basic_block bb, tree val)
1937 {
1938   basic_block dest;
1939   edge e = NULL;
1940 
1941   dest = label_to_block (val);
1942   if (dest)
1943     {
1944       e = find_edge (bb, dest);
1945       gcc_assert (e != NULL);
1946     }
1947 
1948   return e;
1949 }
1950 
1951 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1952    statement, determine which of the two edges will be taken out of the
1953    block.  Return NULL if either edge may be taken.  */
1954 
1955 static edge
1956 find_taken_edge_cond_expr (basic_block bb, tree val)
1957 {
1958   edge true_edge, false_edge;
1959 
1960   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1961 
1962   gcc_assert (TREE_CODE (val) == INTEGER_CST);
1963   return (integer_zerop (val) ? false_edge : true_edge);
1964 }
1965 
1966 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1967    statement, determine which edge will be taken out of the block.  Return
1968    NULL if any edge may be taken.  */
1969 
1970 static edge
1971 find_taken_edge_switch_expr (basic_block bb, tree val)
1972 {
1973   basic_block dest_bb;
1974   edge e;
1975   gimple switch_stmt;
1976   tree taken_case;
1977 
1978   switch_stmt = last_stmt (bb);
1979   taken_case = find_case_label_for_value (switch_stmt, val);
1980   dest_bb = label_to_block (CASE_LABEL (taken_case));
1981 
1982   e = find_edge (bb, dest_bb);
1983   gcc_assert (e);
1984   return e;
1985 }
1986 
1987 
1988 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1989    We can make optimal use here of the fact that the case labels are
1990    sorted: We can do a binary search for a case matching VAL.  */
1991 
1992 static tree
1993 find_case_label_for_value (gimple switch_stmt, tree val)
1994 {
1995   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1996   tree default_case = gimple_switch_default_label (switch_stmt);
1997 
1998   for (low = 0, high = n; high - low > 1; )
1999     {
2000       size_t i = (high + low) / 2;
2001       tree t = gimple_switch_label (switch_stmt, i);
2002       int cmp;
2003 
2004       /* Cache the result of comparing CASE_LOW and val.  */
2005       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2006 
2007       if (cmp > 0)
2008 	high = i;
2009       else
2010 	low = i;
2011 
2012       if (CASE_HIGH (t) == NULL)
2013 	{
2014 	  /* A singe-valued case label.  */
2015 	  if (cmp == 0)
2016 	    return t;
2017 	}
2018       else
2019 	{
2020 	  /* A case range.  We can only handle integer ranges.  */
2021 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2022 	    return t;
2023 	}
2024     }
2025 
2026   return default_case;
2027 }
2028 
2029 
2030 /* Dump a basic block on stderr.  */
2031 
2032 void
2033 gimple_debug_bb (basic_block bb)
2034 {
2035   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2036 }
2037 
2038 
2039 /* Dump basic block with index N on stderr.  */
2040 
2041 basic_block
2042 gimple_debug_bb_n (int n)
2043 {
2044   gimple_debug_bb (BASIC_BLOCK (n));
2045   return BASIC_BLOCK (n);
2046 }
2047 
2048 
2049 /* Dump the CFG on stderr.
2050 
2051    FLAGS are the same used by the tree dumping functions
2052    (see TDF_* in dumpfile.h).  */
2053 
2054 void
2055 gimple_debug_cfg (int flags)
2056 {
2057   gimple_dump_cfg (stderr, flags);
2058 }
2059 
2060 
2061 /* Dump the program showing basic block boundaries on the given FILE.
2062 
2063    FLAGS are the same used by the tree dumping functions (see TDF_* in
2064    tree.h).  */
2065 
2066 void
2067 gimple_dump_cfg (FILE *file, int flags)
2068 {
2069   if (flags & TDF_DETAILS)
2070     {
2071       dump_function_header (file, current_function_decl, flags);
2072       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2073 	       n_basic_blocks, n_edges, last_basic_block);
2074 
2075       brief_dump_cfg (file, flags | TDF_COMMENT);
2076       fprintf (file, "\n");
2077     }
2078 
2079   if (flags & TDF_STATS)
2080     dump_cfg_stats (file);
2081 
2082   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2083 }
2084 
2085 
2086 /* Dump CFG statistics on FILE.  */
2087 
2088 void
2089 dump_cfg_stats (FILE *file)
2090 {
2091   static long max_num_merged_labels = 0;
2092   unsigned long size, total = 0;
2093   long num_edges;
2094   basic_block bb;
2095   const char * const fmt_str   = "%-30s%-13s%12s\n";
2096   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2097   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2098   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2099   const char *funcname = current_function_name ();
2100 
2101   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2102 
2103   fprintf (file, "---------------------------------------------------------\n");
2104   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2105   fprintf (file, fmt_str, "", "  instances  ", "used ");
2106   fprintf (file, "---------------------------------------------------------\n");
2107 
2108   size = n_basic_blocks * sizeof (struct basic_block_def);
2109   total += size;
2110   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2111 	   SCALE (size), LABEL (size));
2112 
2113   num_edges = 0;
2114   FOR_EACH_BB (bb)
2115     num_edges += EDGE_COUNT (bb->succs);
2116   size = num_edges * sizeof (struct edge_def);
2117   total += size;
2118   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2119 
2120   fprintf (file, "---------------------------------------------------------\n");
2121   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2122 	   LABEL (total));
2123   fprintf (file, "---------------------------------------------------------\n");
2124   fprintf (file, "\n");
2125 
2126   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2127     max_num_merged_labels = cfg_stats.num_merged_labels;
2128 
2129   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2130 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2131 
2132   fprintf (file, "\n");
2133 }
2134 
2135 
2136 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2137    linked in the final executable.  */
2138 
2139 DEBUG_FUNCTION void
2140 debug_cfg_stats (void)
2141 {
2142   dump_cfg_stats (stderr);
2143 }
2144 
2145 /*---------------------------------------------------------------------------
2146 			     Miscellaneous helpers
2147 ---------------------------------------------------------------------------*/
2148 
2149 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2150    flow.  Transfers of control flow associated with EH are excluded.  */
2151 
2152 static bool
2153 call_can_make_abnormal_goto (gimple t)
2154 {
2155   /* If the function has no non-local labels, then a call cannot make an
2156      abnormal transfer of control.  */
2157   if (!cfun->has_nonlocal_label)
2158    return false;
2159 
2160   /* Likewise if the call has no side effects.  */
2161   if (!gimple_has_side_effects (t))
2162     return false;
2163 
2164   /* Likewise if the called function is leaf.  */
2165   if (gimple_call_flags (t) & ECF_LEAF)
2166     return false;
2167 
2168   return true;
2169 }
2170 
2171 
2172 /* Return true if T can make an abnormal transfer of control flow.
2173    Transfers of control flow associated with EH are excluded.  */
2174 
2175 bool
2176 stmt_can_make_abnormal_goto (gimple t)
2177 {
2178   if (computed_goto_p (t))
2179     return true;
2180   if (is_gimple_call (t))
2181     return call_can_make_abnormal_goto (t);
2182   return false;
2183 }
2184 
2185 
2186 /* Return true if T represents a stmt that always transfers control.  */
2187 
2188 bool
2189 is_ctrl_stmt (gimple t)
2190 {
2191   switch (gimple_code (t))
2192     {
2193     case GIMPLE_COND:
2194     case GIMPLE_SWITCH:
2195     case GIMPLE_GOTO:
2196     case GIMPLE_RETURN:
2197     case GIMPLE_RESX:
2198       return true;
2199     default:
2200       return false;
2201     }
2202 }
2203 
2204 
2205 /* Return true if T is a statement that may alter the flow of control
2206    (e.g., a call to a non-returning function).  */
2207 
2208 bool
2209 is_ctrl_altering_stmt (gimple t)
2210 {
2211   gcc_assert (t);
2212 
2213   switch (gimple_code (t))
2214     {
2215     case GIMPLE_CALL:
2216       {
2217 	int flags = gimple_call_flags (t);
2218 
2219 	/* A call alters control flow if it can make an abnormal goto.  */
2220 	if (call_can_make_abnormal_goto (t))
2221 	  return true;
2222 
2223 	/* A call also alters control flow if it does not return.  */
2224 	if (flags & ECF_NORETURN)
2225 	  return true;
2226 
2227 	/* TM ending statements have backedges out of the transaction.
2228 	   Return true so we split the basic block containing them.
2229 	   Note that the TM_BUILTIN test is merely an optimization.  */
2230 	if ((flags & ECF_TM_BUILTIN)
2231 	    && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2232 	  return true;
2233 
2234 	/* BUILT_IN_RETURN call is same as return statement.  */
2235 	if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2236 	  return true;
2237       }
2238       break;
2239 
2240     case GIMPLE_EH_DISPATCH:
2241       /* EH_DISPATCH branches to the individual catch handlers at
2242 	 this level of a try or allowed-exceptions region.  It can
2243 	 fallthru to the next statement as well.  */
2244       return true;
2245 
2246     case GIMPLE_ASM:
2247       if (gimple_asm_nlabels (t) > 0)
2248 	return true;
2249       break;
2250 
2251     CASE_GIMPLE_OMP:
2252       /* OpenMP directives alter control flow.  */
2253       return true;
2254 
2255     case GIMPLE_TRANSACTION:
2256       /* A transaction start alters control flow.  */
2257       return true;
2258 
2259     default:
2260       break;
2261     }
2262 
2263   /* If a statement can throw, it alters control flow.  */
2264   return stmt_can_throw_internal (t);
2265 }
2266 
2267 
2268 /* Return true if T is a simple local goto.  */
2269 
2270 bool
2271 simple_goto_p (gimple t)
2272 {
2273   return (gimple_code (t) == GIMPLE_GOTO
2274 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2275 }
2276 
2277 
2278 /* Return true if STMT should start a new basic block.  PREV_STMT is
2279    the statement preceding STMT.  It is used when STMT is a label or a
2280    case label.  Labels should only start a new basic block if their
2281    previous statement wasn't a label.  Otherwise, sequence of labels
2282    would generate unnecessary basic blocks that only contain a single
2283    label.  */
2284 
2285 static inline bool
2286 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2287 {
2288   if (stmt == NULL)
2289     return false;
2290 
2291   /* Labels start a new basic block only if the preceding statement
2292      wasn't a label of the same type.  This prevents the creation of
2293      consecutive blocks that have nothing but a single label.  */
2294   if (gimple_code (stmt) == GIMPLE_LABEL)
2295     {
2296       /* Nonlocal and computed GOTO targets always start a new block.  */
2297       if (DECL_NONLOCAL (gimple_label_label (stmt))
2298 	  || FORCED_LABEL (gimple_label_label (stmt)))
2299 	return true;
2300 
2301       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2302 	{
2303 	  if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2304 	    return true;
2305 
2306 	  cfg_stats.num_merged_labels++;
2307 	  return false;
2308 	}
2309       else
2310 	return true;
2311     }
2312 
2313   return false;
2314 }
2315 
2316 
2317 /* Return true if T should end a basic block.  */
2318 
2319 bool
2320 stmt_ends_bb_p (gimple t)
2321 {
2322   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2323 }
2324 
2325 /* Remove block annotations and other data structures.  */
2326 
2327 void
2328 delete_tree_cfg_annotations (void)
2329 {
2330   vec_free (label_to_block_map);
2331 }
2332 
2333 
2334 /* Return the first statement in basic block BB.  */
2335 
2336 gimple
2337 first_stmt (basic_block bb)
2338 {
2339   gimple_stmt_iterator i = gsi_start_bb (bb);
2340   gimple stmt = NULL;
2341 
2342   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2343     {
2344       gsi_next (&i);
2345       stmt = NULL;
2346     }
2347   return stmt;
2348 }
2349 
2350 /* Return the first non-label statement in basic block BB.  */
2351 
2352 static gimple
2353 first_non_label_stmt (basic_block bb)
2354 {
2355   gimple_stmt_iterator i = gsi_start_bb (bb);
2356   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2357     gsi_next (&i);
2358   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2359 }
2360 
2361 /* Return the last statement in basic block BB.  */
2362 
2363 gimple
2364 last_stmt (basic_block bb)
2365 {
2366   gimple_stmt_iterator i = gsi_last_bb (bb);
2367   gimple stmt = NULL;
2368 
2369   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2370     {
2371       gsi_prev (&i);
2372       stmt = NULL;
2373     }
2374   return stmt;
2375 }
2376 
2377 /* Return the last statement of an otherwise empty block.  Return NULL
2378    if the block is totally empty, or if it contains more than one
2379    statement.  */
2380 
2381 gimple
2382 last_and_only_stmt (basic_block bb)
2383 {
2384   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2385   gimple last, prev;
2386 
2387   if (gsi_end_p (i))
2388     return NULL;
2389 
2390   last = gsi_stmt (i);
2391   gsi_prev_nondebug (&i);
2392   if (gsi_end_p (i))
2393     return last;
2394 
2395   /* Empty statements should no longer appear in the instruction stream.
2396      Everything that might have appeared before should be deleted by
2397      remove_useless_stmts, and the optimizers should just gsi_remove
2398      instead of smashing with build_empty_stmt.
2399 
2400      Thus the only thing that should appear here in a block containing
2401      one executable statement is a label.  */
2402   prev = gsi_stmt (i);
2403   if (gimple_code (prev) == GIMPLE_LABEL)
2404     return last;
2405   else
2406     return NULL;
2407 }
2408 
2409 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2410 
2411 static void
2412 reinstall_phi_args (edge new_edge, edge old_edge)
2413 {
2414   edge_var_map_vector *v;
2415   edge_var_map *vm;
2416   int i;
2417   gimple_stmt_iterator phis;
2418 
2419   v = redirect_edge_var_map_vector (old_edge);
2420   if (!v)
2421     return;
2422 
2423   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2424        v->iterate (i, &vm) && !gsi_end_p (phis);
2425        i++, gsi_next (&phis))
2426     {
2427       gimple phi = gsi_stmt (phis);
2428       tree result = redirect_edge_var_map_result (vm);
2429       tree arg = redirect_edge_var_map_def (vm);
2430 
2431       gcc_assert (result == gimple_phi_result (phi));
2432 
2433       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2434     }
2435 
2436   redirect_edge_var_map_clear (old_edge);
2437 }
2438 
2439 /* Returns the basic block after which the new basic block created
2440    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2441    near its "logical" location.  This is of most help to humans looking
2442    at debugging dumps.  */
2443 
2444 static basic_block
2445 split_edge_bb_loc (edge edge_in)
2446 {
2447   basic_block dest = edge_in->dest;
2448   basic_block dest_prev = dest->prev_bb;
2449 
2450   if (dest_prev)
2451     {
2452       edge e = find_edge (dest_prev, dest);
2453       if (e && !(e->flags & EDGE_COMPLEX))
2454 	return edge_in->src;
2455     }
2456   return dest_prev;
2457 }
2458 
2459 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2460    Abort on abnormal edges.  */
2461 
2462 static basic_block
2463 gimple_split_edge (edge edge_in)
2464 {
2465   basic_block new_bb, after_bb, dest;
2466   edge new_edge, e;
2467 
2468   /* Abnormal edges cannot be split.  */
2469   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2470 
2471   dest = edge_in->dest;
2472 
2473   after_bb = split_edge_bb_loc (edge_in);
2474 
2475   new_bb = create_empty_bb (after_bb);
2476   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2477   new_bb->count = edge_in->count;
2478   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2479   new_edge->probability = REG_BR_PROB_BASE;
2480   new_edge->count = edge_in->count;
2481 
2482   e = redirect_edge_and_branch (edge_in, new_bb);
2483   gcc_assert (e == edge_in);
2484   reinstall_phi_args (new_edge, e);
2485 
2486   return new_bb;
2487 }
2488 
2489 
2490 /* Verify properties of the address expression T with base object BASE.  */
2491 
2492 static tree
2493 verify_address (tree t, tree base)
2494 {
2495   bool old_constant;
2496   bool old_side_effects;
2497   bool new_constant;
2498   bool new_side_effects;
2499 
2500   old_constant = TREE_CONSTANT (t);
2501   old_side_effects = TREE_SIDE_EFFECTS (t);
2502 
2503   recompute_tree_invariant_for_addr_expr (t);
2504   new_side_effects = TREE_SIDE_EFFECTS (t);
2505   new_constant = TREE_CONSTANT (t);
2506 
2507   if (old_constant != new_constant)
2508     {
2509       error ("constant not recomputed when ADDR_EXPR changed");
2510       return t;
2511     }
2512   if (old_side_effects != new_side_effects)
2513     {
2514       error ("side effects not recomputed when ADDR_EXPR changed");
2515       return t;
2516     }
2517 
2518   if (!(TREE_CODE (base) == VAR_DECL
2519 	|| TREE_CODE (base) == PARM_DECL
2520 	|| TREE_CODE (base) == RESULT_DECL))
2521     return NULL_TREE;
2522 
2523   if (DECL_GIMPLE_REG_P (base))
2524     {
2525       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2526       return base;
2527     }
2528 
2529   return NULL_TREE;
2530 }
2531 
2532 /* Callback for walk_tree, check that all elements with address taken are
2533    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2534    inside a PHI node.  */
2535 
2536 static tree
2537 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2538 {
2539   tree t = *tp, x;
2540 
2541   if (TYPE_P (t))
2542     *walk_subtrees = 0;
2543 
2544   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2545 #define CHECK_OP(N, MSG) \
2546   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2547        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2548 
2549   switch (TREE_CODE (t))
2550     {
2551     case SSA_NAME:
2552       if (SSA_NAME_IN_FREE_LIST (t))
2553 	{
2554 	  error ("SSA name in freelist but still referenced");
2555 	  return *tp;
2556 	}
2557       break;
2558 
2559     case INDIRECT_REF:
2560       error ("INDIRECT_REF in gimple IL");
2561       return t;
2562 
2563     case MEM_REF:
2564       x = TREE_OPERAND (t, 0);
2565       if (!POINTER_TYPE_P (TREE_TYPE (x))
2566 	  || !is_gimple_mem_ref_addr (x))
2567 	{
2568 	  error ("invalid first operand of MEM_REF");
2569 	  return x;
2570 	}
2571       if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2572 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2573 	{
2574 	  error ("invalid offset operand of MEM_REF");
2575 	  return TREE_OPERAND (t, 1);
2576 	}
2577       if (TREE_CODE (x) == ADDR_EXPR
2578 	  && (x = verify_address (x, TREE_OPERAND (x, 0))))
2579 	return x;
2580       *walk_subtrees = 0;
2581       break;
2582 
2583     case ASSERT_EXPR:
2584       x = fold (ASSERT_EXPR_COND (t));
2585       if (x == boolean_false_node)
2586 	{
2587 	  error ("ASSERT_EXPR with an always-false condition");
2588 	  return *tp;
2589 	}
2590       break;
2591 
2592     case MODIFY_EXPR:
2593       error ("MODIFY_EXPR not expected while having tuples");
2594       return *tp;
2595 
2596     case ADDR_EXPR:
2597       {
2598 	tree tem;
2599 
2600 	gcc_assert (is_gimple_address (t));
2601 
2602 	/* Skip any references (they will be checked when we recurse down the
2603 	   tree) and ensure that any variable used as a prefix is marked
2604 	   addressable.  */
2605 	for (x = TREE_OPERAND (t, 0);
2606 	     handled_component_p (x);
2607 	     x = TREE_OPERAND (x, 0))
2608 	  ;
2609 
2610 	if ((tem = verify_address (t, x)))
2611 	  return tem;
2612 
2613 	if (!(TREE_CODE (x) == VAR_DECL
2614 	      || TREE_CODE (x) == PARM_DECL
2615 	      || TREE_CODE (x) == RESULT_DECL))
2616 	  return NULL;
2617 
2618 	if (!TREE_ADDRESSABLE (x))
2619 	  {
2620 	    error ("address taken, but ADDRESSABLE bit not set");
2621 	    return x;
2622 	  }
2623 
2624 	break;
2625       }
2626 
2627     case COND_EXPR:
2628       x = COND_EXPR_COND (t);
2629       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2630 	{
2631 	  error ("non-integral used in condition");
2632 	  return x;
2633 	}
2634       if (!is_gimple_condexpr (x))
2635         {
2636 	  error ("invalid conditional operand");
2637 	  return x;
2638 	}
2639       break;
2640 
2641     case NON_LVALUE_EXPR:
2642     case TRUTH_NOT_EXPR:
2643       gcc_unreachable ();
2644 
2645     CASE_CONVERT:
2646     case FIX_TRUNC_EXPR:
2647     case FLOAT_EXPR:
2648     case NEGATE_EXPR:
2649     case ABS_EXPR:
2650     case BIT_NOT_EXPR:
2651       CHECK_OP (0, "invalid operand to unary operator");
2652       break;
2653 
2654     case REALPART_EXPR:
2655     case IMAGPART_EXPR:
2656     case COMPONENT_REF:
2657     case ARRAY_REF:
2658     case ARRAY_RANGE_REF:
2659     case BIT_FIELD_REF:
2660     case VIEW_CONVERT_EXPR:
2661       /* We have a nest of references.  Verify that each of the operands
2662 	 that determine where to reference is either a constant or a variable,
2663 	 verify that the base is valid, and then show we've already checked
2664 	 the subtrees.  */
2665       while (handled_component_p (t))
2666 	{
2667 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2668 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2669 	  else if (TREE_CODE (t) == ARRAY_REF
2670 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2671 	    {
2672 	      CHECK_OP (1, "invalid array index");
2673 	      if (TREE_OPERAND (t, 2))
2674 		CHECK_OP (2, "invalid array lower bound");
2675 	      if (TREE_OPERAND (t, 3))
2676 		CHECK_OP (3, "invalid array stride");
2677 	    }
2678 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
2679 	    {
2680 	      if (!host_integerp (TREE_OPERAND (t, 1), 1)
2681 		  || !host_integerp (TREE_OPERAND (t, 2), 1))
2682 		{
2683 		  error ("invalid position or size operand to BIT_FIELD_REF");
2684 		  return t;
2685 		}
2686 	      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2687 		  && (TYPE_PRECISION (TREE_TYPE (t))
2688 		      != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2689 		{
2690 		  error ("integral result type precision does not match "
2691 			 "field size of BIT_FIELD_REF");
2692 		  return t;
2693 		}
2694 	      else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2695 		       && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2696 		       && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2697 		       && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2698 			   != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2699 		{
2700 		  error ("mode precision of non-integral result does not "
2701 			 "match field size of BIT_FIELD_REF");
2702 		  return t;
2703 		}
2704 	    }
2705 
2706 	  t = TREE_OPERAND (t, 0);
2707 	}
2708 
2709       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2710 	{
2711 	  error ("invalid reference prefix");
2712 	  return t;
2713 	}
2714       *walk_subtrees = 0;
2715       break;
2716     case PLUS_EXPR:
2717     case MINUS_EXPR:
2718       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2719 	 POINTER_PLUS_EXPR. */
2720       if (POINTER_TYPE_P (TREE_TYPE (t)))
2721 	{
2722 	  error ("invalid operand to plus/minus, type is a pointer");
2723 	  return t;
2724 	}
2725       CHECK_OP (0, "invalid operand to binary operator");
2726       CHECK_OP (1, "invalid operand to binary operator");
2727       break;
2728 
2729     case POINTER_PLUS_EXPR:
2730       /* Check to make sure the first operand is a pointer or reference type. */
2731       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2732 	{
2733 	  error ("invalid operand to pointer plus, first operand is not a pointer");
2734 	  return t;
2735 	}
2736       /* Check to make sure the second operand is a ptrofftype.  */
2737       if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2738 	{
2739 	  error ("invalid operand to pointer plus, second operand is not an "
2740 		 "integer type of appropriate width");
2741 	  return t;
2742 	}
2743       /* FALLTHROUGH */
2744     case LT_EXPR:
2745     case LE_EXPR:
2746     case GT_EXPR:
2747     case GE_EXPR:
2748     case EQ_EXPR:
2749     case NE_EXPR:
2750     case UNORDERED_EXPR:
2751     case ORDERED_EXPR:
2752     case UNLT_EXPR:
2753     case UNLE_EXPR:
2754     case UNGT_EXPR:
2755     case UNGE_EXPR:
2756     case UNEQ_EXPR:
2757     case LTGT_EXPR:
2758     case MULT_EXPR:
2759     case TRUNC_DIV_EXPR:
2760     case CEIL_DIV_EXPR:
2761     case FLOOR_DIV_EXPR:
2762     case ROUND_DIV_EXPR:
2763     case TRUNC_MOD_EXPR:
2764     case CEIL_MOD_EXPR:
2765     case FLOOR_MOD_EXPR:
2766     case ROUND_MOD_EXPR:
2767     case RDIV_EXPR:
2768     case EXACT_DIV_EXPR:
2769     case MIN_EXPR:
2770     case MAX_EXPR:
2771     case LSHIFT_EXPR:
2772     case RSHIFT_EXPR:
2773     case LROTATE_EXPR:
2774     case RROTATE_EXPR:
2775     case BIT_IOR_EXPR:
2776     case BIT_XOR_EXPR:
2777     case BIT_AND_EXPR:
2778       CHECK_OP (0, "invalid operand to binary operator");
2779       CHECK_OP (1, "invalid operand to binary operator");
2780       break;
2781 
2782     case CONSTRUCTOR:
2783       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2784 	*walk_subtrees = 0;
2785       break;
2786 
2787     case CASE_LABEL_EXPR:
2788       if (CASE_CHAIN (t))
2789 	{
2790 	  error ("invalid CASE_CHAIN");
2791 	  return t;
2792 	}
2793       break;
2794 
2795     default:
2796       break;
2797     }
2798   return NULL;
2799 
2800 #undef CHECK_OP
2801 }
2802 
2803 
2804 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2805    Returns true if there is an error, otherwise false.  */
2806 
2807 static bool
2808 verify_types_in_gimple_min_lval (tree expr)
2809 {
2810   tree op;
2811 
2812   if (is_gimple_id (expr))
2813     return false;
2814 
2815   if (TREE_CODE (expr) != TARGET_MEM_REF
2816       && TREE_CODE (expr) != MEM_REF)
2817     {
2818       error ("invalid expression for min lvalue");
2819       return true;
2820     }
2821 
2822   /* TARGET_MEM_REFs are strange beasts.  */
2823   if (TREE_CODE (expr) == TARGET_MEM_REF)
2824     return false;
2825 
2826   op = TREE_OPERAND (expr, 0);
2827   if (!is_gimple_val (op))
2828     {
2829       error ("invalid operand in indirect reference");
2830       debug_generic_stmt (op);
2831       return true;
2832     }
2833   /* Memory references now generally can involve a value conversion.  */
2834 
2835   return false;
2836 }
2837 
2838 /* Verify if EXPR is a valid GIMPLE reference expression.  If
2839    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
2840    if there is an error, otherwise false.  */
2841 
2842 static bool
2843 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2844 {
2845   while (handled_component_p (expr))
2846     {
2847       tree op = TREE_OPERAND (expr, 0);
2848 
2849       if (TREE_CODE (expr) == ARRAY_REF
2850 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
2851 	{
2852 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
2853 	      || (TREE_OPERAND (expr, 2)
2854 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
2855 	      || (TREE_OPERAND (expr, 3)
2856 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
2857 	    {
2858 	      error ("invalid operands to array reference");
2859 	      debug_generic_stmt (expr);
2860 	      return true;
2861 	    }
2862 	}
2863 
2864       /* Verify if the reference array element types are compatible.  */
2865       if (TREE_CODE (expr) == ARRAY_REF
2866 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2867 					 TREE_TYPE (TREE_TYPE (op))))
2868 	{
2869 	  error ("type mismatch in array reference");
2870 	  debug_generic_stmt (TREE_TYPE (expr));
2871 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2872 	  return true;
2873 	}
2874       if (TREE_CODE (expr) == ARRAY_RANGE_REF
2875 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2876 					 TREE_TYPE (TREE_TYPE (op))))
2877 	{
2878 	  error ("type mismatch in array range reference");
2879 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2880 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2881 	  return true;
2882 	}
2883 
2884       if ((TREE_CODE (expr) == REALPART_EXPR
2885 	   || TREE_CODE (expr) == IMAGPART_EXPR)
2886 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2887 					 TREE_TYPE (TREE_TYPE (op))))
2888 	{
2889 	  error ("type mismatch in real/imagpart reference");
2890 	  debug_generic_stmt (TREE_TYPE (expr));
2891 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2892 	  return true;
2893 	}
2894 
2895       if (TREE_CODE (expr) == COMPONENT_REF
2896 	  && !useless_type_conversion_p (TREE_TYPE (expr),
2897 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
2898 	{
2899 	  error ("type mismatch in component reference");
2900 	  debug_generic_stmt (TREE_TYPE (expr));
2901 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2902 	  return true;
2903 	}
2904 
2905       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2906 	{
2907 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2908 	     that their operand is not an SSA name or an invariant when
2909 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
2910 	     bug).  Otherwise there is nothing to verify, gross mismatches at
2911 	     most invoke undefined behavior.  */
2912 	  if (require_lvalue
2913 	      && (TREE_CODE (op) == SSA_NAME
2914 		  || is_gimple_min_invariant (op)))
2915 	    {
2916 	      error ("conversion of an SSA_NAME on the left hand side");
2917 	      debug_generic_stmt (expr);
2918 	      return true;
2919 	    }
2920 	  else if (TREE_CODE (op) == SSA_NAME
2921 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2922 	    {
2923 	      error ("conversion of register to a different size");
2924 	      debug_generic_stmt (expr);
2925 	      return true;
2926 	    }
2927 	  else if (!handled_component_p (op))
2928 	    return false;
2929 	}
2930 
2931       expr = op;
2932     }
2933 
2934   if (TREE_CODE (expr) == MEM_REF)
2935     {
2936       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2937 	{
2938 	  error ("invalid address operand in MEM_REF");
2939 	  debug_generic_stmt (expr);
2940 	  return true;
2941 	}
2942       if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2943 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2944 	{
2945 	  error ("invalid offset operand in MEM_REF");
2946 	  debug_generic_stmt (expr);
2947 	  return true;
2948 	}
2949     }
2950   else if (TREE_CODE (expr) == TARGET_MEM_REF)
2951     {
2952       if (!TMR_BASE (expr)
2953 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2954 	{
2955 	  error ("invalid address operand in TARGET_MEM_REF");
2956 	  return true;
2957 	}
2958       if (!TMR_OFFSET (expr)
2959 	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2960 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2961 	{
2962 	  error ("invalid offset operand in TARGET_MEM_REF");
2963 	  debug_generic_stmt (expr);
2964 	  return true;
2965 	}
2966     }
2967 
2968   return ((require_lvalue || !is_gimple_min_invariant (expr))
2969 	  && verify_types_in_gimple_min_lval (expr));
2970 }
2971 
2972 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2973    list of pointer-to types that is trivially convertible to DEST.  */
2974 
2975 static bool
2976 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2977 {
2978   tree src;
2979 
2980   if (!TYPE_POINTER_TO (src_obj))
2981     return true;
2982 
2983   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2984     if (useless_type_conversion_p (dest, src))
2985       return true;
2986 
2987   return false;
2988 }
2989 
2990 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2991    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
2992 
2993 static bool
2994 valid_fixed_convert_types_p (tree type1, tree type2)
2995 {
2996   return (FIXED_POINT_TYPE_P (type1)
2997 	  && (INTEGRAL_TYPE_P (type2)
2998 	      || SCALAR_FLOAT_TYPE_P (type2)
2999 	      || FIXED_POINT_TYPE_P (type2)));
3000 }
3001 
3002 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3003    is a problem, otherwise false.  */
3004 
3005 static bool
3006 verify_gimple_call (gimple stmt)
3007 {
3008   tree fn = gimple_call_fn (stmt);
3009   tree fntype, fndecl;
3010   unsigned i;
3011 
3012   if (gimple_call_internal_p (stmt))
3013     {
3014       if (fn)
3015 	{
3016 	  error ("gimple call has two targets");
3017 	  debug_generic_stmt (fn);
3018 	  return true;
3019 	}
3020     }
3021   else
3022     {
3023       if (!fn)
3024 	{
3025 	  error ("gimple call has no target");
3026 	  return true;
3027 	}
3028     }
3029 
3030   if (fn && !is_gimple_call_addr (fn))
3031     {
3032       error ("invalid function in gimple call");
3033       debug_generic_stmt (fn);
3034       return true;
3035     }
3036 
3037   if (fn
3038       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3039 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3040 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3041     {
3042       error ("non-function in gimple call");
3043       return true;
3044     }
3045 
3046    fndecl = gimple_call_fndecl (stmt);
3047    if (fndecl
3048        && TREE_CODE (fndecl) == FUNCTION_DECL
3049        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3050        && !DECL_PURE_P (fndecl)
3051        && !TREE_READONLY (fndecl))
3052      {
3053        error ("invalid pure const state for function");
3054        return true;
3055      }
3056 
3057   if (gimple_call_lhs (stmt)
3058       && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3059 	  || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3060     {
3061       error ("invalid LHS in gimple call");
3062       return true;
3063     }
3064 
3065   if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3066     {
3067       error ("LHS in noreturn call");
3068       return true;
3069     }
3070 
3071   fntype = gimple_call_fntype (stmt);
3072   if (fntype
3073       && gimple_call_lhs (stmt)
3074       && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3075 				     TREE_TYPE (fntype))
3076       /* ???  At least C++ misses conversions at assignments from
3077 	 void * call results.
3078 	 ???  Java is completely off.  Especially with functions
3079 	 returning java.lang.Object.
3080 	 For now simply allow arbitrary pointer type conversions.  */
3081       && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3082 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3083     {
3084       error ("invalid conversion in gimple call");
3085       debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3086       debug_generic_stmt (TREE_TYPE (fntype));
3087       return true;
3088     }
3089 
3090   if (gimple_call_chain (stmt)
3091       && !is_gimple_val (gimple_call_chain (stmt)))
3092     {
3093       error ("invalid static chain in gimple call");
3094       debug_generic_stmt (gimple_call_chain (stmt));
3095       return true;
3096     }
3097 
3098   /* If there is a static chain argument, this should not be an indirect
3099      call, and the decl should have DECL_STATIC_CHAIN set.  */
3100   if (gimple_call_chain (stmt))
3101     {
3102       if (!gimple_call_fndecl (stmt))
3103 	{
3104 	  error ("static chain in indirect gimple call");
3105 	  return true;
3106 	}
3107       fn = TREE_OPERAND (fn, 0);
3108 
3109       if (!DECL_STATIC_CHAIN (fn))
3110 	{
3111 	  error ("static chain with function that doesn%'t use one");
3112 	  return true;
3113 	}
3114     }
3115 
3116   /* ???  The C frontend passes unpromoted arguments in case it
3117      didn't see a function declaration before the call.  So for now
3118      leave the call arguments mostly unverified.  Once we gimplify
3119      unit-at-a-time we have a chance to fix this.  */
3120 
3121   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3122     {
3123       tree arg = gimple_call_arg (stmt, i);
3124       if ((is_gimple_reg_type (TREE_TYPE (arg))
3125 	   && !is_gimple_val (arg))
3126 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3127 	      && !is_gimple_lvalue (arg)))
3128 	{
3129 	  error ("invalid argument to gimple call");
3130 	  debug_generic_expr (arg);
3131 	  return true;
3132 	}
3133     }
3134 
3135   return false;
3136 }
3137 
3138 /* Verifies the gimple comparison with the result type TYPE and
3139    the operands OP0 and OP1.  */
3140 
3141 static bool
3142 verify_gimple_comparison (tree type, tree op0, tree op1)
3143 {
3144   tree op0_type = TREE_TYPE (op0);
3145   tree op1_type = TREE_TYPE (op1);
3146 
3147   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3148     {
3149       error ("invalid operands in gimple comparison");
3150       return true;
3151     }
3152 
3153   /* For comparisons we do not have the operations type as the
3154      effective type the comparison is carried out in.  Instead
3155      we require that either the first operand is trivially
3156      convertible into the second, or the other way around.
3157      Because we special-case pointers to void we allow
3158      comparisons of pointers with the same mode as well.  */
3159   if (!useless_type_conversion_p (op0_type, op1_type)
3160       && !useless_type_conversion_p (op1_type, op0_type)
3161       && (!POINTER_TYPE_P (op0_type)
3162 	  || !POINTER_TYPE_P (op1_type)
3163 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3164     {
3165       error ("mismatching comparison operand types");
3166       debug_generic_expr (op0_type);
3167       debug_generic_expr (op1_type);
3168       return true;
3169     }
3170 
3171   /* The resulting type of a comparison may be an effective boolean type.  */
3172   if (INTEGRAL_TYPE_P (type)
3173       && (TREE_CODE (type) == BOOLEAN_TYPE
3174 	  || TYPE_PRECISION (type) == 1))
3175     {
3176       if (TREE_CODE (op0_type) == VECTOR_TYPE
3177 	  || TREE_CODE (op1_type) == VECTOR_TYPE)
3178         {
3179           error ("vector comparison returning a boolean");
3180           debug_generic_expr (op0_type);
3181           debug_generic_expr (op1_type);
3182           return true;
3183         }
3184     }
3185   /* Or an integer vector type with the same size and element count
3186      as the comparison operand types.  */
3187   else if (TREE_CODE (type) == VECTOR_TYPE
3188 	   && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3189     {
3190       if (TREE_CODE (op0_type) != VECTOR_TYPE
3191 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3192         {
3193           error ("non-vector operands in vector comparison");
3194           debug_generic_expr (op0_type);
3195           debug_generic_expr (op1_type);
3196           return true;
3197         }
3198 
3199       if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3200 	  || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3201 	      != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3202         {
3203           error ("invalid vector comparison resulting type");
3204           debug_generic_expr (type);
3205           return true;
3206         }
3207     }
3208   else
3209     {
3210       error ("bogus comparison result type");
3211       debug_generic_expr (type);
3212       return true;
3213     }
3214 
3215   return false;
3216 }
3217 
3218 /* Verify a gimple assignment statement STMT with an unary rhs.
3219    Returns true if anything is wrong.  */
3220 
3221 static bool
3222 verify_gimple_assign_unary (gimple stmt)
3223 {
3224   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3225   tree lhs = gimple_assign_lhs (stmt);
3226   tree lhs_type = TREE_TYPE (lhs);
3227   tree rhs1 = gimple_assign_rhs1 (stmt);
3228   tree rhs1_type = TREE_TYPE (rhs1);
3229 
3230   if (!is_gimple_reg (lhs))
3231     {
3232       error ("non-register as LHS of unary operation");
3233       return true;
3234     }
3235 
3236   if (!is_gimple_val (rhs1))
3237     {
3238       error ("invalid operand in unary operation");
3239       return true;
3240     }
3241 
3242   /* First handle conversions.  */
3243   switch (rhs_code)
3244     {
3245     CASE_CONVERT:
3246       {
3247 	/* Allow conversions from pointer type to integral type only if
3248 	   there is no sign or zero extension involved.
3249 	   For targets were the precision of ptrofftype doesn't match that
3250 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3251 	if ((POINTER_TYPE_P (lhs_type)
3252 	     && INTEGRAL_TYPE_P (rhs1_type))
3253 	    || (POINTER_TYPE_P (rhs1_type)
3254 		&& INTEGRAL_TYPE_P (lhs_type)
3255 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3256 		    || ptrofftype_p (sizetype))))
3257 	  return false;
3258 
3259 	/* Allow conversion from integral to offset type and vice versa.  */
3260 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3261 	     && INTEGRAL_TYPE_P (rhs1_type))
3262 	    || (INTEGRAL_TYPE_P (lhs_type)
3263 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3264 	  return false;
3265 
3266 	/* Otherwise assert we are converting between types of the
3267 	   same kind.  */
3268 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3269 	  {
3270 	    error ("invalid types in nop conversion");
3271 	    debug_generic_expr (lhs_type);
3272 	    debug_generic_expr (rhs1_type);
3273 	    return true;
3274 	  }
3275 
3276 	return false;
3277       }
3278 
3279     case ADDR_SPACE_CONVERT_EXPR:
3280       {
3281 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3282 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3283 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3284 	  {
3285 	    error ("invalid types in address space conversion");
3286 	    debug_generic_expr (lhs_type);
3287 	    debug_generic_expr (rhs1_type);
3288 	    return true;
3289 	  }
3290 
3291 	return false;
3292       }
3293 
3294     case FIXED_CONVERT_EXPR:
3295       {
3296 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3297 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3298 	  {
3299 	    error ("invalid types in fixed-point conversion");
3300 	    debug_generic_expr (lhs_type);
3301 	    debug_generic_expr (rhs1_type);
3302 	    return true;
3303 	  }
3304 
3305 	return false;
3306       }
3307 
3308     case FLOAT_EXPR:
3309       {
3310 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3311 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3312 	        || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3313 	  {
3314 	    error ("invalid types in conversion to floating point");
3315 	    debug_generic_expr (lhs_type);
3316 	    debug_generic_expr (rhs1_type);
3317 	    return true;
3318 	  }
3319 
3320         return false;
3321       }
3322 
3323     case FIX_TRUNC_EXPR:
3324       {
3325         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3326             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3327                 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3328 	  {
3329 	    error ("invalid types in conversion to integer");
3330 	    debug_generic_expr (lhs_type);
3331 	    debug_generic_expr (rhs1_type);
3332 	    return true;
3333 	  }
3334 
3335         return false;
3336       }
3337 
3338     case VEC_UNPACK_HI_EXPR:
3339     case VEC_UNPACK_LO_EXPR:
3340     case REDUC_MAX_EXPR:
3341     case REDUC_MIN_EXPR:
3342     case REDUC_PLUS_EXPR:
3343     case VEC_UNPACK_FLOAT_HI_EXPR:
3344     case VEC_UNPACK_FLOAT_LO_EXPR:
3345       /* FIXME.  */
3346       return false;
3347 
3348     case NEGATE_EXPR:
3349     case ABS_EXPR:
3350     case BIT_NOT_EXPR:
3351     case PAREN_EXPR:
3352     case NON_LVALUE_EXPR:
3353     case CONJ_EXPR:
3354       break;
3355 
3356     default:
3357       gcc_unreachable ();
3358     }
3359 
3360   /* For the remaining codes assert there is no conversion involved.  */
3361   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3362     {
3363       error ("non-trivial conversion in unary operation");
3364       debug_generic_expr (lhs_type);
3365       debug_generic_expr (rhs1_type);
3366       return true;
3367     }
3368 
3369   return false;
3370 }
3371 
3372 /* Verify a gimple assignment statement STMT with a binary rhs.
3373    Returns true if anything is wrong.  */
3374 
3375 static bool
3376 verify_gimple_assign_binary (gimple stmt)
3377 {
3378   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3379   tree lhs = gimple_assign_lhs (stmt);
3380   tree lhs_type = TREE_TYPE (lhs);
3381   tree rhs1 = gimple_assign_rhs1 (stmt);
3382   tree rhs1_type = TREE_TYPE (rhs1);
3383   tree rhs2 = gimple_assign_rhs2 (stmt);
3384   tree rhs2_type = TREE_TYPE (rhs2);
3385 
3386   if (!is_gimple_reg (lhs))
3387     {
3388       error ("non-register as LHS of binary operation");
3389       return true;
3390     }
3391 
3392   if (!is_gimple_val (rhs1)
3393       || !is_gimple_val (rhs2))
3394     {
3395       error ("invalid operands in binary operation");
3396       return true;
3397     }
3398 
3399   /* First handle operations that involve different types.  */
3400   switch (rhs_code)
3401     {
3402     case COMPLEX_EXPR:
3403       {
3404 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3405 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3406 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3407 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3408 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3409 	  {
3410 	    error ("type mismatch in complex expression");
3411 	    debug_generic_expr (lhs_type);
3412 	    debug_generic_expr (rhs1_type);
3413 	    debug_generic_expr (rhs2_type);
3414 	    return true;
3415 	  }
3416 
3417 	return false;
3418       }
3419 
3420     case LSHIFT_EXPR:
3421     case RSHIFT_EXPR:
3422     case LROTATE_EXPR:
3423     case RROTATE_EXPR:
3424       {
3425 	/* Shifts and rotates are ok on integral types, fixed point
3426 	   types and integer vector types.  */
3427 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3428 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3429 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3430 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3431 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3432 		/* Vector shifts of vectors are also ok.  */
3433 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3434 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3435 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3436 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3437 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3438 	  {
3439 	    error ("type mismatch in shift expression");
3440 	    debug_generic_expr (lhs_type);
3441 	    debug_generic_expr (rhs1_type);
3442 	    debug_generic_expr (rhs2_type);
3443 	    return true;
3444 	  }
3445 
3446 	return false;
3447       }
3448 
3449     case VEC_LSHIFT_EXPR:
3450     case VEC_RSHIFT_EXPR:
3451       {
3452 	if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3453 	    || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3454 		 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3455 		 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3456 		 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3457 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3458 		&& (TREE_CODE (rhs2_type) != VECTOR_TYPE
3459 		    || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3460 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3461 	  {
3462 	    error ("type mismatch in vector shift expression");
3463 	    debug_generic_expr (lhs_type);
3464 	    debug_generic_expr (rhs1_type);
3465 	    debug_generic_expr (rhs2_type);
3466 	    return true;
3467 	  }
3468 	/* For shifting a vector of non-integral components we
3469 	   only allow shifting by a constant multiple of the element size.  */
3470 	if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3471 	    && (TREE_CODE (rhs2) != INTEGER_CST
3472 		|| !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3473 					   TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3474 	  {
3475 	    error ("non-element sized vector shift of floating point vector");
3476 	    return true;
3477 	  }
3478 
3479 	return false;
3480       }
3481 
3482     case WIDEN_LSHIFT_EXPR:
3483       {
3484         if (!INTEGRAL_TYPE_P (lhs_type)
3485             || !INTEGRAL_TYPE_P (rhs1_type)
3486             || TREE_CODE (rhs2) != INTEGER_CST
3487             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3488           {
3489             error ("type mismatch in widening vector shift expression");
3490             debug_generic_expr (lhs_type);
3491             debug_generic_expr (rhs1_type);
3492             debug_generic_expr (rhs2_type);
3493             return true;
3494           }
3495 
3496         return false;
3497       }
3498 
3499     case VEC_WIDEN_LSHIFT_HI_EXPR:
3500     case VEC_WIDEN_LSHIFT_LO_EXPR:
3501       {
3502         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3503             || TREE_CODE (lhs_type) != VECTOR_TYPE
3504             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3505             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3506             || TREE_CODE (rhs2) != INTEGER_CST
3507             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3508                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3509           {
3510             error ("type mismatch in widening vector shift expression");
3511             debug_generic_expr (lhs_type);
3512             debug_generic_expr (rhs1_type);
3513             debug_generic_expr (rhs2_type);
3514             return true;
3515           }
3516 
3517         return false;
3518       }
3519 
3520     case PLUS_EXPR:
3521     case MINUS_EXPR:
3522       {
3523 	tree lhs_etype = lhs_type;
3524 	tree rhs1_etype = rhs1_type;
3525 	tree rhs2_etype = rhs2_type;
3526 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3527 	  {
3528 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3529 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3530 	      {
3531 		error ("invalid non-vector operands to vector valued plus");
3532 		return true;
3533 	      }
3534 	    lhs_etype = TREE_TYPE (lhs_type);
3535 	    rhs1_etype = TREE_TYPE (rhs1_type);
3536 	    rhs2_etype = TREE_TYPE (rhs2_type);
3537 	  }
3538 	if (POINTER_TYPE_P (lhs_etype)
3539 	    || POINTER_TYPE_P (rhs1_etype)
3540 	    || POINTER_TYPE_P (rhs2_etype))
3541 	  {
3542 	    error ("invalid (pointer) operands to plus/minus");
3543 	    return true;
3544 	  }
3545 
3546 	/* Continue with generic binary expression handling.  */
3547 	break;
3548       }
3549 
3550     case POINTER_PLUS_EXPR:
3551       {
3552 	if (!POINTER_TYPE_P (rhs1_type)
3553 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3554 	    || !ptrofftype_p (rhs2_type))
3555 	  {
3556 	    error ("type mismatch in pointer plus expression");
3557 	    debug_generic_stmt (lhs_type);
3558 	    debug_generic_stmt (rhs1_type);
3559 	    debug_generic_stmt (rhs2_type);
3560 	    return true;
3561 	  }
3562 
3563 	return false;
3564       }
3565 
3566     case TRUTH_ANDIF_EXPR:
3567     case TRUTH_ORIF_EXPR:
3568     case TRUTH_AND_EXPR:
3569     case TRUTH_OR_EXPR:
3570     case TRUTH_XOR_EXPR:
3571 
3572       gcc_unreachable ();
3573 
3574     case LT_EXPR:
3575     case LE_EXPR:
3576     case GT_EXPR:
3577     case GE_EXPR:
3578     case EQ_EXPR:
3579     case NE_EXPR:
3580     case UNORDERED_EXPR:
3581     case ORDERED_EXPR:
3582     case UNLT_EXPR:
3583     case UNLE_EXPR:
3584     case UNGT_EXPR:
3585     case UNGE_EXPR:
3586     case UNEQ_EXPR:
3587     case LTGT_EXPR:
3588       /* Comparisons are also binary, but the result type is not
3589 	 connected to the operand types.  */
3590       return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3591 
3592     case WIDEN_MULT_EXPR:
3593       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3594 	return true;
3595       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3596 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3597 
3598     case WIDEN_SUM_EXPR:
3599     case VEC_WIDEN_MULT_HI_EXPR:
3600     case VEC_WIDEN_MULT_LO_EXPR:
3601     case VEC_WIDEN_MULT_EVEN_EXPR:
3602     case VEC_WIDEN_MULT_ODD_EXPR:
3603     case VEC_PACK_TRUNC_EXPR:
3604     case VEC_PACK_SAT_EXPR:
3605     case VEC_PACK_FIX_TRUNC_EXPR:
3606       /* FIXME.  */
3607       return false;
3608 
3609     case MULT_EXPR:
3610     case MULT_HIGHPART_EXPR:
3611     case TRUNC_DIV_EXPR:
3612     case CEIL_DIV_EXPR:
3613     case FLOOR_DIV_EXPR:
3614     case ROUND_DIV_EXPR:
3615     case TRUNC_MOD_EXPR:
3616     case CEIL_MOD_EXPR:
3617     case FLOOR_MOD_EXPR:
3618     case ROUND_MOD_EXPR:
3619     case RDIV_EXPR:
3620     case EXACT_DIV_EXPR:
3621     case MIN_EXPR:
3622     case MAX_EXPR:
3623     case BIT_IOR_EXPR:
3624     case BIT_XOR_EXPR:
3625     case BIT_AND_EXPR:
3626       /* Continue with generic binary expression handling.  */
3627       break;
3628 
3629     default:
3630       gcc_unreachable ();
3631     }
3632 
3633   if (!useless_type_conversion_p (lhs_type, rhs1_type)
3634       || !useless_type_conversion_p (lhs_type, rhs2_type))
3635     {
3636       error ("type mismatch in binary expression");
3637       debug_generic_stmt (lhs_type);
3638       debug_generic_stmt (rhs1_type);
3639       debug_generic_stmt (rhs2_type);
3640       return true;
3641     }
3642 
3643   return false;
3644 }
3645 
3646 /* Verify a gimple assignment statement STMT with a ternary rhs.
3647    Returns true if anything is wrong.  */
3648 
3649 static bool
3650 verify_gimple_assign_ternary (gimple stmt)
3651 {
3652   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3653   tree lhs = gimple_assign_lhs (stmt);
3654   tree lhs_type = TREE_TYPE (lhs);
3655   tree rhs1 = gimple_assign_rhs1 (stmt);
3656   tree rhs1_type = TREE_TYPE (rhs1);
3657   tree rhs2 = gimple_assign_rhs2 (stmt);
3658   tree rhs2_type = TREE_TYPE (rhs2);
3659   tree rhs3 = gimple_assign_rhs3 (stmt);
3660   tree rhs3_type = TREE_TYPE (rhs3);
3661 
3662   if (!is_gimple_reg (lhs))
3663     {
3664       error ("non-register as LHS of ternary operation");
3665       return true;
3666     }
3667 
3668   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3669        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3670       || !is_gimple_val (rhs2)
3671       || !is_gimple_val (rhs3))
3672     {
3673       error ("invalid operands in ternary operation");
3674       return true;
3675     }
3676 
3677   /* First handle operations that involve different types.  */
3678   switch (rhs_code)
3679     {
3680     case WIDEN_MULT_PLUS_EXPR:
3681     case WIDEN_MULT_MINUS_EXPR:
3682       if ((!INTEGRAL_TYPE_P (rhs1_type)
3683 	   && !FIXED_POINT_TYPE_P (rhs1_type))
3684 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
3685 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
3686 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3687 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3688 	{
3689 	  error ("type mismatch in widening multiply-accumulate expression");
3690 	  debug_generic_expr (lhs_type);
3691 	  debug_generic_expr (rhs1_type);
3692 	  debug_generic_expr (rhs2_type);
3693 	  debug_generic_expr (rhs3_type);
3694 	  return true;
3695 	}
3696       break;
3697 
3698     case FMA_EXPR:
3699       if (!useless_type_conversion_p (lhs_type, rhs1_type)
3700 	  || !useless_type_conversion_p (lhs_type, rhs2_type)
3701 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3702 	{
3703 	  error ("type mismatch in fused multiply-add expression");
3704 	  debug_generic_expr (lhs_type);
3705 	  debug_generic_expr (rhs1_type);
3706 	  debug_generic_expr (rhs2_type);
3707 	  debug_generic_expr (rhs3_type);
3708 	  return true;
3709 	}
3710       break;
3711 
3712     case COND_EXPR:
3713     case VEC_COND_EXPR:
3714       if (!useless_type_conversion_p (lhs_type, rhs2_type)
3715 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3716 	{
3717 	  error ("type mismatch in conditional expression");
3718 	  debug_generic_expr (lhs_type);
3719 	  debug_generic_expr (rhs2_type);
3720 	  debug_generic_expr (rhs3_type);
3721 	  return true;
3722 	}
3723       break;
3724 
3725     case VEC_PERM_EXPR:
3726       if (!useless_type_conversion_p (lhs_type, rhs1_type)
3727 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
3728 	{
3729 	  error ("type mismatch in vector permute expression");
3730 	  debug_generic_expr (lhs_type);
3731 	  debug_generic_expr (rhs1_type);
3732 	  debug_generic_expr (rhs2_type);
3733 	  debug_generic_expr (rhs3_type);
3734 	  return true;
3735 	}
3736 
3737       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3738 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
3739 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3740 	{
3741 	  error ("vector types expected in vector permute expression");
3742 	  debug_generic_expr (lhs_type);
3743 	  debug_generic_expr (rhs1_type);
3744 	  debug_generic_expr (rhs2_type);
3745 	  debug_generic_expr (rhs3_type);
3746 	  return true;
3747 	}
3748 
3749       if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3750 	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
3751 	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
3752 	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
3753 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
3754 	{
3755 	  error ("vectors with different element number found "
3756 		 "in vector permute expression");
3757 	  debug_generic_expr (lhs_type);
3758 	  debug_generic_expr (rhs1_type);
3759 	  debug_generic_expr (rhs2_type);
3760 	  debug_generic_expr (rhs3_type);
3761 	  return true;
3762 	}
3763 
3764       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3765 	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3766 	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3767 	{
3768 	  error ("invalid mask type in vector permute expression");
3769 	  debug_generic_expr (lhs_type);
3770 	  debug_generic_expr (rhs1_type);
3771 	  debug_generic_expr (rhs2_type);
3772 	  debug_generic_expr (rhs3_type);
3773 	  return true;
3774 	}
3775 
3776       return false;
3777 
3778     case DOT_PROD_EXPR:
3779     case REALIGN_LOAD_EXPR:
3780       /* FIXME.  */
3781       return false;
3782 
3783     default:
3784       gcc_unreachable ();
3785     }
3786   return false;
3787 }
3788 
3789 /* Verify a gimple assignment statement STMT with a single rhs.
3790    Returns true if anything is wrong.  */
3791 
3792 static bool
3793 verify_gimple_assign_single (gimple stmt)
3794 {
3795   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3796   tree lhs = gimple_assign_lhs (stmt);
3797   tree lhs_type = TREE_TYPE (lhs);
3798   tree rhs1 = gimple_assign_rhs1 (stmt);
3799   tree rhs1_type = TREE_TYPE (rhs1);
3800   bool res = false;
3801 
3802   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3803     {
3804       error ("non-trivial conversion at assignment");
3805       debug_generic_expr (lhs_type);
3806       debug_generic_expr (rhs1_type);
3807       return true;
3808     }
3809 
3810   if (gimple_clobber_p (stmt)
3811       && !DECL_P (lhs))
3812     {
3813       error ("non-decl LHS in clobber statement");
3814       debug_generic_expr (lhs);
3815       return true;
3816     }
3817 
3818   if (handled_component_p (lhs))
3819     res |= verify_types_in_gimple_reference (lhs, true);
3820 
3821   /* Special codes we cannot handle via their class.  */
3822   switch (rhs_code)
3823     {
3824     case ADDR_EXPR:
3825       {
3826 	tree op = TREE_OPERAND (rhs1, 0);
3827 	if (!is_gimple_addressable (op))
3828 	  {
3829 	    error ("invalid operand in unary expression");
3830 	    return true;
3831 	  }
3832 
3833 	/* Technically there is no longer a need for matching types, but
3834 	   gimple hygiene asks for this check.  In LTO we can end up
3835 	   combining incompatible units and thus end up with addresses
3836 	   of globals that change their type to a common one.  */
3837 	if (!in_lto_p
3838 	    && !types_compatible_p (TREE_TYPE (op),
3839 				    TREE_TYPE (TREE_TYPE (rhs1)))
3840 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3841 							  TREE_TYPE (op)))
3842 	  {
3843 	    error ("type mismatch in address expression");
3844 	    debug_generic_stmt (TREE_TYPE (rhs1));
3845 	    debug_generic_stmt (TREE_TYPE (op));
3846 	    return true;
3847 	  }
3848 
3849 	return verify_types_in_gimple_reference (op, true);
3850       }
3851 
3852     /* tcc_reference  */
3853     case INDIRECT_REF:
3854       error ("INDIRECT_REF in gimple IL");
3855       return true;
3856 
3857     case COMPONENT_REF:
3858     case BIT_FIELD_REF:
3859     case ARRAY_REF:
3860     case ARRAY_RANGE_REF:
3861     case VIEW_CONVERT_EXPR:
3862     case REALPART_EXPR:
3863     case IMAGPART_EXPR:
3864     case TARGET_MEM_REF:
3865     case MEM_REF:
3866       if (!is_gimple_reg (lhs)
3867 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
3868 	{
3869 	  error ("invalid rhs for gimple memory store");
3870 	  debug_generic_stmt (lhs);
3871 	  debug_generic_stmt (rhs1);
3872 	  return true;
3873 	}
3874       return res || verify_types_in_gimple_reference (rhs1, false);
3875 
3876     /* tcc_constant  */
3877     case SSA_NAME:
3878     case INTEGER_CST:
3879     case REAL_CST:
3880     case FIXED_CST:
3881     case COMPLEX_CST:
3882     case VECTOR_CST:
3883     case STRING_CST:
3884       return res;
3885 
3886     /* tcc_declaration  */
3887     case CONST_DECL:
3888       return res;
3889     case VAR_DECL:
3890     case PARM_DECL:
3891       if (!is_gimple_reg (lhs)
3892 	  && !is_gimple_reg (rhs1)
3893 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
3894 	{
3895 	  error ("invalid rhs for gimple memory store");
3896 	  debug_generic_stmt (lhs);
3897 	  debug_generic_stmt (rhs1);
3898 	  return true;
3899 	}
3900       return res;
3901 
3902     case CONSTRUCTOR:
3903       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3904 	{
3905 	  unsigned int i;
3906 	  tree elt_i, elt_v, elt_t = NULL_TREE;
3907 
3908 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
3909 	    return res;
3910 	  /* For vector CONSTRUCTORs we require that either it is empty
3911 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3912 	     (then the element count must be correct to cover the whole
3913 	     outer vector and index must be NULL on all elements, or it is
3914 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
3915 	     smaller number of elements (assuming zero filling) and
3916 	     consecutive indexes as compared to NULL indexes (such
3917 	     CONSTRUCTORs can appear in the IL from FEs).  */
3918 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3919 	    {
3920 	      if (elt_t == NULL_TREE)
3921 		{
3922 		  elt_t = TREE_TYPE (elt_v);
3923 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
3924 		    {
3925 		      tree elt_t = TREE_TYPE (elt_v);
3926 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3927 						      TREE_TYPE (elt_t)))
3928 			{
3929 			  error ("incorrect type of vector CONSTRUCTOR"
3930 				 " elements");
3931 			  debug_generic_stmt (rhs1);
3932 			  return true;
3933 			}
3934 		      else if (CONSTRUCTOR_NELTS (rhs1)
3935 			       * TYPE_VECTOR_SUBPARTS (elt_t)
3936 			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
3937 			{
3938 			  error ("incorrect number of vector CONSTRUCTOR"
3939 				 " elements");
3940 			  debug_generic_stmt (rhs1);
3941 			  return true;
3942 			}
3943 		    }
3944 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3945 						       elt_t))
3946 		    {
3947 		      error ("incorrect type of vector CONSTRUCTOR elements");
3948 		      debug_generic_stmt (rhs1);
3949 		      return true;
3950 		    }
3951 		  else if (CONSTRUCTOR_NELTS (rhs1)
3952 			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
3953 		    {
3954 		      error ("incorrect number of vector CONSTRUCTOR elements");
3955 		      debug_generic_stmt (rhs1);
3956 		      return true;
3957 		    }
3958 		}
3959 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3960 		{
3961 		  error ("incorrect type of vector CONSTRUCTOR elements");
3962 		  debug_generic_stmt (rhs1);
3963 		  return true;
3964 		}
3965 	      if (elt_i != NULL_TREE
3966 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
3967 		      || TREE_CODE (elt_i) != INTEGER_CST
3968 		      || compare_tree_int (elt_i, i) != 0))
3969 		{
3970 		  error ("vector CONSTRUCTOR with non-NULL element index");
3971 		  debug_generic_stmt (rhs1);
3972 		  return true;
3973 		}
3974 	    }
3975 	}
3976       return res;
3977     case OBJ_TYPE_REF:
3978     case ASSERT_EXPR:
3979     case WITH_SIZE_EXPR:
3980       /* FIXME.  */
3981       return res;
3982 
3983     default:;
3984     }
3985 
3986   return res;
3987 }
3988 
3989 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
3990    is a problem, otherwise false.  */
3991 
3992 static bool
3993 verify_gimple_assign (gimple stmt)
3994 {
3995   switch (gimple_assign_rhs_class (stmt))
3996     {
3997     case GIMPLE_SINGLE_RHS:
3998       return verify_gimple_assign_single (stmt);
3999 
4000     case GIMPLE_UNARY_RHS:
4001       return verify_gimple_assign_unary (stmt);
4002 
4003     case GIMPLE_BINARY_RHS:
4004       return verify_gimple_assign_binary (stmt);
4005 
4006     case GIMPLE_TERNARY_RHS:
4007       return verify_gimple_assign_ternary (stmt);
4008 
4009     default:
4010       gcc_unreachable ();
4011     }
4012 }
4013 
4014 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4015    is a problem, otherwise false.  */
4016 
4017 static bool
4018 verify_gimple_return (gimple stmt)
4019 {
4020   tree op = gimple_return_retval (stmt);
4021   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4022 
4023   /* We cannot test for present return values as we do not fix up missing
4024      return values from the original source.  */
4025   if (op == NULL)
4026     return false;
4027 
4028   if (!is_gimple_val (op)
4029       && TREE_CODE (op) != RESULT_DECL)
4030     {
4031       error ("invalid operand in return statement");
4032       debug_generic_stmt (op);
4033       return true;
4034     }
4035 
4036   if ((TREE_CODE (op) == RESULT_DECL
4037        && DECL_BY_REFERENCE (op))
4038       || (TREE_CODE (op) == SSA_NAME
4039 	  && SSA_NAME_VAR (op)
4040 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4041 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4042     op = TREE_TYPE (op);
4043 
4044   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4045     {
4046       error ("invalid conversion in return statement");
4047       debug_generic_stmt (restype);
4048       debug_generic_stmt (TREE_TYPE (op));
4049       return true;
4050     }
4051 
4052   return false;
4053 }
4054 
4055 
4056 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4057    is a problem, otherwise false.  */
4058 
4059 static bool
4060 verify_gimple_goto (gimple stmt)
4061 {
4062   tree dest = gimple_goto_dest (stmt);
4063 
4064   /* ???  We have two canonical forms of direct goto destinations, a
4065      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4066   if (TREE_CODE (dest) != LABEL_DECL
4067       && (!is_gimple_val (dest)
4068 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4069     {
4070       error ("goto destination is neither a label nor a pointer");
4071       return true;
4072     }
4073 
4074   return false;
4075 }
4076 
4077 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4078    is a problem, otherwise false.  */
4079 
4080 static bool
4081 verify_gimple_switch (gimple stmt)
4082 {
4083   unsigned int i, n;
4084   tree elt, prev_upper_bound = NULL_TREE;
4085   tree index_type, elt_type = NULL_TREE;
4086 
4087   if (!is_gimple_val (gimple_switch_index (stmt)))
4088     {
4089       error ("invalid operand to switch statement");
4090       debug_generic_stmt (gimple_switch_index (stmt));
4091       return true;
4092     }
4093 
4094   index_type = TREE_TYPE (gimple_switch_index (stmt));
4095   if (! INTEGRAL_TYPE_P (index_type))
4096     {
4097       error ("non-integral type switch statement");
4098       debug_generic_expr (index_type);
4099       return true;
4100     }
4101 
4102   elt = gimple_switch_label (stmt, 0);
4103   if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4104     {
4105       error ("invalid default case label in switch statement");
4106       debug_generic_expr (elt);
4107       return true;
4108     }
4109 
4110   n = gimple_switch_num_labels (stmt);
4111   for (i = 1; i < n; i++)
4112     {
4113       elt = gimple_switch_label (stmt, i);
4114 
4115       if (! CASE_LOW (elt))
4116 	{
4117 	  error ("invalid case label in switch statement");
4118 	  debug_generic_expr (elt);
4119 	  return true;
4120 	}
4121       if (CASE_HIGH (elt)
4122 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4123 	{
4124 	  error ("invalid case range in switch statement");
4125 	  debug_generic_expr (elt);
4126 	  return true;
4127 	}
4128 
4129       if (elt_type)
4130 	{
4131 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4132 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4133 	    {
4134 	      error ("type mismatch for case label in switch statement");
4135 	      debug_generic_expr (elt);
4136 	      return true;
4137 	    }
4138 	}
4139       else
4140 	{
4141 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4142 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4143 	    {
4144 	      error ("type precision mismatch in switch statement");
4145 	      return true;
4146 	    }
4147 	}
4148 
4149       if (prev_upper_bound)
4150 	{
4151 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4152 	    {
4153 	      error ("case labels not sorted in switch statement");
4154 	      return true;
4155 	    }
4156 	}
4157 
4158       prev_upper_bound = CASE_HIGH (elt);
4159       if (! prev_upper_bound)
4160 	prev_upper_bound = CASE_LOW (elt);
4161     }
4162 
4163   return false;
4164 }
4165 
4166 /* Verify a gimple debug statement STMT.
4167    Returns true if anything is wrong.  */
4168 
4169 static bool
4170 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4171 {
4172   /* There isn't much that could be wrong in a gimple debug stmt.  A
4173      gimple debug bind stmt, for example, maps a tree, that's usually
4174      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4175      component or member of an aggregate type, to another tree, that
4176      can be an arbitrary expression.  These stmts expand into debug
4177      insns, and are converted to debug notes by var-tracking.c.  */
4178   return false;
4179 }
4180 
4181 /* Verify a gimple label statement STMT.
4182    Returns true if anything is wrong.  */
4183 
4184 static bool
4185 verify_gimple_label (gimple stmt)
4186 {
4187   tree decl = gimple_label_label (stmt);
4188   int uid;
4189   bool err = false;
4190 
4191   if (TREE_CODE (decl) != LABEL_DECL)
4192     return true;
4193 
4194   uid = LABEL_DECL_UID (decl);
4195   if (cfun->cfg
4196       && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4197     {
4198       error ("incorrect entry in label_to_block_map");
4199       err |= true;
4200     }
4201 
4202   uid = EH_LANDING_PAD_NR (decl);
4203   if (uid)
4204     {
4205       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4206       if (decl != lp->post_landing_pad)
4207 	{
4208 	  error ("incorrect setting of landing pad number");
4209 	  err |= true;
4210 	}
4211     }
4212 
4213   return err;
4214 }
4215 
4216 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4217    error, otherwise false.  */
4218 
4219 static bool
4220 verify_gimple_stmt (gimple stmt)
4221 {
4222   switch (gimple_code (stmt))
4223     {
4224     case GIMPLE_ASSIGN:
4225       return verify_gimple_assign (stmt);
4226 
4227     case GIMPLE_LABEL:
4228       return verify_gimple_label (stmt);
4229 
4230     case GIMPLE_CALL:
4231       return verify_gimple_call (stmt);
4232 
4233     case GIMPLE_COND:
4234       if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4235 	{
4236 	  error ("invalid comparison code in gimple cond");
4237 	  return true;
4238 	}
4239       if (!(!gimple_cond_true_label (stmt)
4240 	    || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4241 	  || !(!gimple_cond_false_label (stmt)
4242 	       || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4243 	{
4244 	  error ("invalid labels in gimple cond");
4245 	  return true;
4246 	}
4247 
4248       return verify_gimple_comparison (boolean_type_node,
4249 				       gimple_cond_lhs (stmt),
4250 				       gimple_cond_rhs (stmt));
4251 
4252     case GIMPLE_GOTO:
4253       return verify_gimple_goto (stmt);
4254 
4255     case GIMPLE_SWITCH:
4256       return verify_gimple_switch (stmt);
4257 
4258     case GIMPLE_RETURN:
4259       return verify_gimple_return (stmt);
4260 
4261     case GIMPLE_ASM:
4262       return false;
4263 
4264     case GIMPLE_TRANSACTION:
4265       return verify_gimple_transaction (stmt);
4266 
4267     /* Tuples that do not have tree operands.  */
4268     case GIMPLE_NOP:
4269     case GIMPLE_PREDICT:
4270     case GIMPLE_RESX:
4271     case GIMPLE_EH_DISPATCH:
4272     case GIMPLE_EH_MUST_NOT_THROW:
4273       return false;
4274 
4275     CASE_GIMPLE_OMP:
4276       /* OpenMP directives are validated by the FE and never operated
4277 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4278 	 non-gimple expressions when the main index variable has had
4279 	 its address taken.  This does not affect the loop itself
4280 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4281 	 how to setup the parallel iteration.  */
4282       return false;
4283 
4284     case GIMPLE_DEBUG:
4285       return verify_gimple_debug (stmt);
4286 
4287     default:
4288       gcc_unreachable ();
4289     }
4290 }
4291 
4292 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4293    and false otherwise.  */
4294 
4295 static bool
4296 verify_gimple_phi (gimple phi)
4297 {
4298   bool err = false;
4299   unsigned i;
4300   tree phi_result = gimple_phi_result (phi);
4301   bool virtual_p;
4302 
4303   if (!phi_result)
4304     {
4305       error ("invalid PHI result");
4306       return true;
4307     }
4308 
4309   virtual_p = virtual_operand_p (phi_result);
4310   if (TREE_CODE (phi_result) != SSA_NAME
4311       || (virtual_p
4312 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4313     {
4314       error ("invalid PHI result");
4315       err = true;
4316     }
4317 
4318   for (i = 0; i < gimple_phi_num_args (phi); i++)
4319     {
4320       tree t = gimple_phi_arg_def (phi, i);
4321 
4322       if (!t)
4323 	{
4324 	  error ("missing PHI def");
4325 	  err |= true;
4326 	  continue;
4327 	}
4328       /* Addressable variables do have SSA_NAMEs but they
4329 	 are not considered gimple values.  */
4330       else if ((TREE_CODE (t) == SSA_NAME
4331 		&& virtual_p != virtual_operand_p (t))
4332 	       || (virtual_p
4333 		   && (TREE_CODE (t) != SSA_NAME
4334 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4335 	       || (!virtual_p
4336 		   && !is_gimple_val (t)))
4337 	{
4338 	  error ("invalid PHI argument");
4339 	  debug_generic_expr (t);
4340 	  err |= true;
4341 	}
4342 #ifdef ENABLE_TYPES_CHECKING
4343       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4344 	{
4345 	  error ("incompatible types in PHI argument %u", i);
4346 	  debug_generic_stmt (TREE_TYPE (phi_result));
4347 	  debug_generic_stmt (TREE_TYPE (t));
4348 	  err |= true;
4349 	}
4350 #endif
4351     }
4352 
4353   return err;
4354 }
4355 
4356 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4357 
4358 static bool
4359 verify_gimple_in_seq_2 (gimple_seq stmts)
4360 {
4361   gimple_stmt_iterator ittr;
4362   bool err = false;
4363 
4364   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4365     {
4366       gimple stmt = gsi_stmt (ittr);
4367 
4368       switch (gimple_code (stmt))
4369         {
4370 	case GIMPLE_BIND:
4371 	  err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4372 	  break;
4373 
4374 	case GIMPLE_TRY:
4375 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4376 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4377 	  break;
4378 
4379 	case GIMPLE_EH_FILTER:
4380 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4381 	  break;
4382 
4383 	case GIMPLE_EH_ELSE:
4384 	  err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4385 	  err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4386 	  break;
4387 
4388 	case GIMPLE_CATCH:
4389 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4390 	  break;
4391 
4392 	case GIMPLE_TRANSACTION:
4393 	  err |= verify_gimple_transaction (stmt);
4394 	  break;
4395 
4396 	default:
4397 	  {
4398 	    bool err2 = verify_gimple_stmt (stmt);
4399 	    if (err2)
4400 	      debug_gimple_stmt (stmt);
4401 	    err |= err2;
4402 	  }
4403 	}
4404     }
4405 
4406   return err;
4407 }
4408 
4409 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4410    is a problem, otherwise false.  */
4411 
4412 static bool
4413 verify_gimple_transaction (gimple stmt)
4414 {
4415   tree lab = gimple_transaction_label (stmt);
4416   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4417     return true;
4418   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4419 }
4420 
4421 
4422 /* Verify the GIMPLE statements inside the statement list STMTS.  */
4423 
4424 DEBUG_FUNCTION void
4425 verify_gimple_in_seq (gimple_seq stmts)
4426 {
4427   timevar_push (TV_TREE_STMT_VERIFY);
4428   if (verify_gimple_in_seq_2 (stmts))
4429     internal_error ("verify_gimple failed");
4430   timevar_pop (TV_TREE_STMT_VERIFY);
4431 }
4432 
4433 /* Return true when the T can be shared.  */
4434 
4435 bool
4436 tree_node_can_be_shared (tree t)
4437 {
4438   if (IS_TYPE_OR_DECL_P (t)
4439       || is_gimple_min_invariant (t)
4440       || TREE_CODE (t) == SSA_NAME
4441       || t == error_mark_node
4442       || TREE_CODE (t) == IDENTIFIER_NODE)
4443     return true;
4444 
4445   if (TREE_CODE (t) == CASE_LABEL_EXPR)
4446     return true;
4447 
4448   if (DECL_P (t))
4449     return true;
4450 
4451   return false;
4452 }
4453 
4454 /* Called via walk_tree.  Verify tree sharing.  */
4455 
4456 static tree
4457 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4458 {
4459   struct pointer_set_t *visited = (struct pointer_set_t *) data;
4460 
4461   if (tree_node_can_be_shared (*tp))
4462     {
4463       *walk_subtrees = false;
4464       return NULL;
4465     }
4466 
4467   if (pointer_set_insert (visited, *tp))
4468     return *tp;
4469 
4470   return NULL;
4471 }
4472 
4473 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4474 
4475 static tree
4476 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4477 {
4478   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4479   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4480 }
4481 
4482 static bool eh_error_found;
4483 static int
4484 verify_eh_throw_stmt_node (void **slot, void *data)
4485 {
4486   struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4487   struct pointer_set_t *visited = (struct pointer_set_t *) data;
4488 
4489   if (!pointer_set_contains (visited, node->stmt))
4490     {
4491       error ("dead STMT in EH table");
4492       debug_gimple_stmt (node->stmt);
4493       eh_error_found = true;
4494     }
4495   return 1;
4496 }
4497 
4498 /* Verify if the location LOCs block is in BLOCKS.  */
4499 
4500 static bool
4501 verify_location (pointer_set_t *blocks, location_t loc)
4502 {
4503   tree block = LOCATION_BLOCK (loc);
4504   if (block != NULL_TREE
4505       && !pointer_set_contains (blocks, block))
4506     {
4507       error ("location references block not in block tree");
4508       return true;
4509     }
4510   if (block != NULL_TREE)
4511     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4512   return false;
4513 }
4514 
4515 /* Called via walk_tree.  Verify locations of expressions.  */
4516 
4517 static tree
4518 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4519 {
4520   struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4521 
4522   if (TREE_CODE (*tp) == VAR_DECL
4523       && DECL_DEBUG_EXPR_IS_FROM (*tp))
4524     {
4525       tree t = DECL_DEBUG_EXPR (*tp);
4526       tree addr = walk_tree (&t, verify_expr_location_1, blocks, NULL);
4527       if (addr)
4528 	return addr;
4529     }
4530 
4531   if (!EXPR_P (*tp))
4532     {
4533       *walk_subtrees = false;
4534       return NULL;
4535     }
4536 
4537   location_t loc = EXPR_LOCATION (*tp);
4538   if (verify_location (blocks, loc))
4539     return *tp;
4540 
4541   return NULL;
4542 }
4543 
4544 /* Called via walk_gimple_op.  Verify locations of expressions.  */
4545 
4546 static tree
4547 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4548 {
4549   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4550   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4551 }
4552 
4553 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
4554 
4555 static void
4556 collect_subblocks (pointer_set_t *blocks, tree block)
4557 {
4558   tree t;
4559   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4560     {
4561       pointer_set_insert (blocks, t);
4562       collect_subblocks (blocks, t);
4563     }
4564 }
4565 
4566 /* Verify the GIMPLE statements in the CFG of FN.  */
4567 
4568 DEBUG_FUNCTION void
4569 verify_gimple_in_cfg (struct function *fn)
4570 {
4571   basic_block bb;
4572   bool err = false;
4573   struct pointer_set_t *visited, *visited_stmts, *blocks;
4574 
4575   timevar_push (TV_TREE_STMT_VERIFY);
4576   visited = pointer_set_create ();
4577   visited_stmts = pointer_set_create ();
4578 
4579   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
4580   blocks = pointer_set_create ();
4581   if (DECL_INITIAL (fn->decl))
4582     {
4583       pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4584       collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4585     }
4586 
4587   FOR_EACH_BB_FN (bb, fn)
4588     {
4589       gimple_stmt_iterator gsi;
4590 
4591       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4592 	{
4593 	  gimple phi = gsi_stmt (gsi);
4594 	  bool err2 = false;
4595 	  unsigned i;
4596 
4597 	  pointer_set_insert (visited_stmts, phi);
4598 
4599 	  if (gimple_bb (phi) != bb)
4600 	    {
4601 	      error ("gimple_bb (phi) is set to a wrong basic block");
4602 	      err2 = true;
4603 	    }
4604 
4605 	  err2 |= verify_gimple_phi (phi);
4606 
4607 	  /* Only PHI arguments have locations.  */
4608 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
4609 	    {
4610 	      error ("PHI node with location");
4611 	      err2 = true;
4612 	    }
4613 
4614 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
4615 	    {
4616 	      tree arg = gimple_phi_arg_def (phi, i);
4617 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
4618 				     visited, NULL);
4619 	      if (addr)
4620 		{
4621 		  error ("incorrect sharing of tree nodes");
4622 		  debug_generic_expr (addr);
4623 		  err2 |= true;
4624 		}
4625 	      location_t loc = gimple_phi_arg_location (phi, i);
4626 	      if (virtual_operand_p (gimple_phi_result (phi))
4627 		  && loc != UNKNOWN_LOCATION)
4628 		{
4629 		  error ("virtual PHI with argument locations");
4630 		  err2 = true;
4631 		}
4632 	      addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4633 	      if (addr)
4634 		{
4635 		  debug_generic_expr (addr);
4636 		  err2 = true;
4637 		}
4638 	      err2 |= verify_location (blocks, loc);
4639 	    }
4640 
4641 	  if (err2)
4642 	    debug_gimple_stmt (phi);
4643 	  err |= err2;
4644 	}
4645 
4646       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4647 	{
4648 	  gimple stmt = gsi_stmt (gsi);
4649 	  bool err2 = false;
4650 	  struct walk_stmt_info wi;
4651 	  tree addr;
4652 	  int lp_nr;
4653 
4654 	  pointer_set_insert (visited_stmts, stmt);
4655 
4656 	  if (gimple_bb (stmt) != bb)
4657 	    {
4658 	      error ("gimple_bb (stmt) is set to a wrong basic block");
4659 	      err2 = true;
4660 	    }
4661 
4662 	  err2 |= verify_gimple_stmt (stmt);
4663 	  err2 |= verify_location (blocks, gimple_location (stmt));
4664 
4665 	  memset (&wi, 0, sizeof (wi));
4666 	  wi.info = (void *) visited;
4667 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4668 	  if (addr)
4669 	    {
4670 	      error ("incorrect sharing of tree nodes");
4671 	      debug_generic_expr (addr);
4672 	      err2 |= true;
4673 	    }
4674 
4675 	  memset (&wi, 0, sizeof (wi));
4676 	  wi.info = (void *) blocks;
4677 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4678 	  if (addr)
4679 	    {
4680 	      debug_generic_expr (addr);
4681 	      err2 |= true;
4682 	    }
4683 
4684 	  /* ???  Instead of not checking these stmts at all the walker
4685 	     should know its context via wi.  */
4686 	  if (!is_gimple_debug (stmt)
4687 	      && !is_gimple_omp (stmt))
4688 	    {
4689 	      memset (&wi, 0, sizeof (wi));
4690 	      addr = walk_gimple_op (stmt, verify_expr, &wi);
4691 	      if (addr)
4692 		{
4693 		  debug_generic_expr (addr);
4694 		  inform (gimple_location (stmt), "in statement");
4695 		  err2 |= true;
4696 		}
4697 	    }
4698 
4699 	  /* If the statement is marked as part of an EH region, then it is
4700 	     expected that the statement could throw.  Verify that when we
4701 	     have optimizations that simplify statements such that we prove
4702 	     that they cannot throw, that we update other data structures
4703 	     to match.  */
4704 	  lp_nr = lookup_stmt_eh_lp (stmt);
4705 	  if (lp_nr != 0)
4706 	    {
4707 	      if (!stmt_could_throw_p (stmt))
4708 		{
4709 		  error ("statement marked for throw, but doesn%'t");
4710 		  err2 |= true;
4711 		}
4712 	      else if (lp_nr > 0
4713 		       && !gsi_one_before_end_p (gsi)
4714 		       && stmt_can_throw_internal (stmt))
4715 		{
4716 		  error ("statement marked for throw in middle of block");
4717 		  err2 |= true;
4718 		}
4719 	    }
4720 
4721 	  if (err2)
4722 	    debug_gimple_stmt (stmt);
4723 	  err |= err2;
4724 	}
4725     }
4726 
4727   eh_error_found = false;
4728   if (get_eh_throw_stmt_table (cfun))
4729     htab_traverse (get_eh_throw_stmt_table (cfun),
4730 		   verify_eh_throw_stmt_node,
4731 		   visited_stmts);
4732 
4733   if (err || eh_error_found)
4734     internal_error ("verify_gimple failed");
4735 
4736   pointer_set_destroy (visited);
4737   pointer_set_destroy (visited_stmts);
4738   pointer_set_destroy (blocks);
4739   verify_histograms ();
4740   timevar_pop (TV_TREE_STMT_VERIFY);
4741 }
4742 
4743 
4744 /* Verifies that the flow information is OK.  */
4745 
4746 static int
4747 gimple_verify_flow_info (void)
4748 {
4749   int err = 0;
4750   basic_block bb;
4751   gimple_stmt_iterator gsi;
4752   gimple stmt;
4753   edge e;
4754   edge_iterator ei;
4755 
4756   if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4757     {
4758       error ("ENTRY_BLOCK has IL associated with it");
4759       err = 1;
4760     }
4761 
4762   if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4763     {
4764       error ("EXIT_BLOCK has IL associated with it");
4765       err = 1;
4766     }
4767 
4768   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4769     if (e->flags & EDGE_FALLTHRU)
4770       {
4771 	error ("fallthru to exit from bb %d", e->src->index);
4772 	err = 1;
4773       }
4774 
4775   FOR_EACH_BB (bb)
4776     {
4777       bool found_ctrl_stmt = false;
4778 
4779       stmt = NULL;
4780 
4781       /* Skip labels on the start of basic block.  */
4782       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4783 	{
4784 	  tree label;
4785 	  gimple prev_stmt = stmt;
4786 
4787 	  stmt = gsi_stmt (gsi);
4788 
4789 	  if (gimple_code (stmt) != GIMPLE_LABEL)
4790 	    break;
4791 
4792 	  label = gimple_label_label (stmt);
4793 	  if (prev_stmt && DECL_NONLOCAL (label))
4794 	    {
4795 	      error ("nonlocal label ");
4796 	      print_generic_expr (stderr, label, 0);
4797 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
4798 		       bb->index);
4799 	      err = 1;
4800 	    }
4801 
4802 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4803 	    {
4804 	      error ("EH landing pad label ");
4805 	      print_generic_expr (stderr, label, 0);
4806 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
4807 		       bb->index);
4808 	      err = 1;
4809 	    }
4810 
4811 	  if (label_to_block (label) != bb)
4812 	    {
4813 	      error ("label ");
4814 	      print_generic_expr (stderr, label, 0);
4815 	      fprintf (stderr, " to block does not match in bb %d",
4816 		       bb->index);
4817 	      err = 1;
4818 	    }
4819 
4820 	  if (decl_function_context (label) != current_function_decl)
4821 	    {
4822 	      error ("label ");
4823 	      print_generic_expr (stderr, label, 0);
4824 	      fprintf (stderr, " has incorrect context in bb %d",
4825 		       bb->index);
4826 	      err = 1;
4827 	    }
4828 	}
4829 
4830       /* Verify that body of basic block BB is free of control flow.  */
4831       for (; !gsi_end_p (gsi); gsi_next (&gsi))
4832 	{
4833 	  gimple stmt = gsi_stmt (gsi);
4834 
4835 	  if (found_ctrl_stmt)
4836 	    {
4837 	      error ("control flow in the middle of basic block %d",
4838 		     bb->index);
4839 	      err = 1;
4840 	    }
4841 
4842 	  if (stmt_ends_bb_p (stmt))
4843 	    found_ctrl_stmt = true;
4844 
4845 	  if (gimple_code (stmt) == GIMPLE_LABEL)
4846 	    {
4847 	      error ("label ");
4848 	      print_generic_expr (stderr, gimple_label_label (stmt), 0);
4849 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
4850 	      err = 1;
4851 	    }
4852 	}
4853 
4854       gsi = gsi_last_bb (bb);
4855       if (gsi_end_p (gsi))
4856 	continue;
4857 
4858       stmt = gsi_stmt (gsi);
4859 
4860       if (gimple_code (stmt) == GIMPLE_LABEL)
4861 	continue;
4862 
4863       err |= verify_eh_edges (stmt);
4864 
4865       if (is_ctrl_stmt (stmt))
4866 	{
4867 	  FOR_EACH_EDGE (e, ei, bb->succs)
4868 	    if (e->flags & EDGE_FALLTHRU)
4869 	      {
4870 		error ("fallthru edge after a control statement in bb %d",
4871 		       bb->index);
4872 		err = 1;
4873 	      }
4874 	}
4875 
4876       if (gimple_code (stmt) != GIMPLE_COND)
4877 	{
4878 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4879 	     after anything else but if statement.  */
4880 	  FOR_EACH_EDGE (e, ei, bb->succs)
4881 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4882 	      {
4883 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
4884 		       bb->index);
4885 		err = 1;
4886 	      }
4887 	}
4888 
4889       switch (gimple_code (stmt))
4890 	{
4891 	case GIMPLE_COND:
4892 	  {
4893 	    edge true_edge;
4894 	    edge false_edge;
4895 
4896 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4897 
4898 	    if (!true_edge
4899 		|| !false_edge
4900 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
4901 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
4902 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4903 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4904 		|| EDGE_COUNT (bb->succs) >= 3)
4905 	      {
4906 		error ("wrong outgoing edge flags at end of bb %d",
4907 		       bb->index);
4908 		err = 1;
4909 	      }
4910 	  }
4911 	  break;
4912 
4913 	case GIMPLE_GOTO:
4914 	  if (simple_goto_p (stmt))
4915 	    {
4916 	      error ("explicit goto at end of bb %d", bb->index);
4917 	      err = 1;
4918 	    }
4919 	  else
4920 	    {
4921 	      /* FIXME.  We should double check that the labels in the
4922 		 destination blocks have their address taken.  */
4923 	      FOR_EACH_EDGE (e, ei, bb->succs)
4924 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4925 				 | EDGE_FALSE_VALUE))
4926 		    || !(e->flags & EDGE_ABNORMAL))
4927 		  {
4928 		    error ("wrong outgoing edge flags at end of bb %d",
4929 			   bb->index);
4930 		    err = 1;
4931 		  }
4932 	    }
4933 	  break;
4934 
4935 	case GIMPLE_CALL:
4936 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4937 	    break;
4938 	  /* ... fallthru ... */
4939 	case GIMPLE_RETURN:
4940 	  if (!single_succ_p (bb)
4941 	      || (single_succ_edge (bb)->flags
4942 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
4943 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4944 	    {
4945 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
4946 	      err = 1;
4947 	    }
4948 	  if (single_succ (bb) != EXIT_BLOCK_PTR)
4949 	    {
4950 	      error ("return edge does not point to exit in bb %d",
4951 		     bb->index);
4952 	      err = 1;
4953 	    }
4954 	  break;
4955 
4956 	case GIMPLE_SWITCH:
4957 	  {
4958 	    tree prev;
4959 	    edge e;
4960 	    size_t i, n;
4961 
4962 	    n = gimple_switch_num_labels (stmt);
4963 
4964 	    /* Mark all the destination basic blocks.  */
4965 	    for (i = 0; i < n; ++i)
4966 	      {
4967 		tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4968 		basic_block label_bb = label_to_block (lab);
4969 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4970 		label_bb->aux = (void *)1;
4971 	      }
4972 
4973 	    /* Verify that the case labels are sorted.  */
4974 	    prev = gimple_switch_label (stmt, 0);
4975 	    for (i = 1; i < n; ++i)
4976 	      {
4977 		tree c = gimple_switch_label (stmt, i);
4978 		if (!CASE_LOW (c))
4979 		  {
4980 		    error ("found default case not at the start of "
4981 			   "case vector");
4982 		    err = 1;
4983 		    continue;
4984 		  }
4985 		if (CASE_LOW (prev)
4986 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4987 		  {
4988 		    error ("case labels not sorted: ");
4989 		    print_generic_expr (stderr, prev, 0);
4990 		    fprintf (stderr," is greater than ");
4991 		    print_generic_expr (stderr, c, 0);
4992 		    fprintf (stderr," but comes before it.\n");
4993 		    err = 1;
4994 		  }
4995 		prev = c;
4996 	      }
4997 	    /* VRP will remove the default case if it can prove it will
4998 	       never be executed.  So do not verify there always exists
4999 	       a default case here.  */
5000 
5001 	    FOR_EACH_EDGE (e, ei, bb->succs)
5002 	      {
5003 		if (!e->dest->aux)
5004 		  {
5005 		    error ("extra outgoing edge %d->%d",
5006 			   bb->index, e->dest->index);
5007 		    err = 1;
5008 		  }
5009 
5010 		e->dest->aux = (void *)2;
5011 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5012 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5013 		  {
5014 		    error ("wrong outgoing edge flags at end of bb %d",
5015 			   bb->index);
5016 		    err = 1;
5017 		  }
5018 	      }
5019 
5020 	    /* Check that we have all of them.  */
5021 	    for (i = 0; i < n; ++i)
5022 	      {
5023 		tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5024 		basic_block label_bb = label_to_block (lab);
5025 
5026 		if (label_bb->aux != (void *)2)
5027 		  {
5028 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5029 		    err = 1;
5030 		  }
5031 	      }
5032 
5033 	    FOR_EACH_EDGE (e, ei, bb->succs)
5034 	      e->dest->aux = (void *)0;
5035 	  }
5036 	  break;
5037 
5038 	case GIMPLE_EH_DISPATCH:
5039 	  err |= verify_eh_dispatch_edge (stmt);
5040 	  break;
5041 
5042 	default:
5043 	  break;
5044 	}
5045     }
5046 
5047   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5048     verify_dominators (CDI_DOMINATORS);
5049 
5050   return err;
5051 }
5052 
5053 
5054 /* Updates phi nodes after creating a forwarder block joined
5055    by edge FALLTHRU.  */
5056 
5057 static void
5058 gimple_make_forwarder_block (edge fallthru)
5059 {
5060   edge e;
5061   edge_iterator ei;
5062   basic_block dummy, bb;
5063   tree var;
5064   gimple_stmt_iterator gsi;
5065 
5066   dummy = fallthru->src;
5067   bb = fallthru->dest;
5068 
5069   if (single_pred_p (bb))
5070     return;
5071 
5072   /* If we redirected a branch we must create new PHI nodes at the
5073      start of BB.  */
5074   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5075     {
5076       gimple phi, new_phi;
5077 
5078       phi = gsi_stmt (gsi);
5079       var = gimple_phi_result (phi);
5080       new_phi = create_phi_node (var, bb);
5081       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5082       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5083 		   UNKNOWN_LOCATION);
5084     }
5085 
5086   /* Add the arguments we have stored on edges.  */
5087   FOR_EACH_EDGE (e, ei, bb->preds)
5088     {
5089       if (e == fallthru)
5090 	continue;
5091 
5092       flush_pending_stmts (e);
5093     }
5094 }
5095 
5096 
5097 /* Return a non-special label in the head of basic block BLOCK.
5098    Create one if it doesn't exist.  */
5099 
5100 tree
5101 gimple_block_label (basic_block bb)
5102 {
5103   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5104   bool first = true;
5105   tree label;
5106   gimple stmt;
5107 
5108   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5109     {
5110       stmt = gsi_stmt (i);
5111       if (gimple_code (stmt) != GIMPLE_LABEL)
5112 	break;
5113       label = gimple_label_label (stmt);
5114       if (!DECL_NONLOCAL (label))
5115 	{
5116 	  if (!first)
5117 	    gsi_move_before (&i, &s);
5118 	  return label;
5119 	}
5120     }
5121 
5122   label = create_artificial_label (UNKNOWN_LOCATION);
5123   stmt = gimple_build_label (label);
5124   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5125   return label;
5126 }
5127 
5128 
5129 /* Attempt to perform edge redirection by replacing a possibly complex
5130    jump instruction by a goto or by removing the jump completely.
5131    This can apply only if all edges now point to the same block.  The
5132    parameters and return values are equivalent to
5133    redirect_edge_and_branch.  */
5134 
5135 static edge
5136 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5137 {
5138   basic_block src = e->src;
5139   gimple_stmt_iterator i;
5140   gimple stmt;
5141 
5142   /* We can replace or remove a complex jump only when we have exactly
5143      two edges.  */
5144   if (EDGE_COUNT (src->succs) != 2
5145       /* Verify that all targets will be TARGET.  Specifically, the
5146 	 edge that is not E must also go to TARGET.  */
5147       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5148     return NULL;
5149 
5150   i = gsi_last_bb (src);
5151   if (gsi_end_p (i))
5152     return NULL;
5153 
5154   stmt = gsi_stmt (i);
5155 
5156   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5157     {
5158       gsi_remove (&i, true);
5159       e = ssa_redirect_edge (e, target);
5160       e->flags = EDGE_FALLTHRU;
5161       return e;
5162     }
5163 
5164   return NULL;
5165 }
5166 
5167 
5168 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5169    edge representing the redirected branch.  */
5170 
5171 static edge
5172 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5173 {
5174   basic_block bb = e->src;
5175   gimple_stmt_iterator gsi;
5176   edge ret;
5177   gimple stmt;
5178 
5179   if (e->flags & EDGE_ABNORMAL)
5180     return NULL;
5181 
5182   if (e->dest == dest)
5183     return NULL;
5184 
5185   if (e->flags & EDGE_EH)
5186     return redirect_eh_edge (e, dest);
5187 
5188   if (e->src != ENTRY_BLOCK_PTR)
5189     {
5190       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5191       if (ret)
5192 	return ret;
5193     }
5194 
5195   gsi = gsi_last_bb (bb);
5196   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5197 
5198   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5199     {
5200     case GIMPLE_COND:
5201       /* For COND_EXPR, we only need to redirect the edge.  */
5202       break;
5203 
5204     case GIMPLE_GOTO:
5205       /* No non-abnormal edges should lead from a non-simple goto, and
5206 	 simple ones should be represented implicitly.  */
5207       gcc_unreachable ();
5208 
5209     case GIMPLE_SWITCH:
5210       {
5211 	tree label = gimple_block_label (dest);
5212         tree cases = get_cases_for_edge (e, stmt);
5213 
5214 	/* If we have a list of cases associated with E, then use it
5215 	   as it's a lot faster than walking the entire case vector.  */
5216 	if (cases)
5217 	  {
5218 	    edge e2 = find_edge (e->src, dest);
5219 	    tree last, first;
5220 
5221 	    first = cases;
5222 	    while (cases)
5223 	      {
5224 		last = cases;
5225 		CASE_LABEL (cases) = label;
5226 		cases = CASE_CHAIN (cases);
5227 	      }
5228 
5229 	    /* If there was already an edge in the CFG, then we need
5230 	       to move all the cases associated with E to E2.  */
5231 	    if (e2)
5232 	      {
5233 		tree cases2 = get_cases_for_edge (e2, stmt);
5234 
5235 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5236 		CASE_CHAIN (cases2) = first;
5237 	      }
5238 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5239 	  }
5240 	else
5241 	  {
5242 	    size_t i, n = gimple_switch_num_labels (stmt);
5243 
5244 	    for (i = 0; i < n; i++)
5245 	      {
5246 		tree elt = gimple_switch_label (stmt, i);
5247 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5248 		  CASE_LABEL (elt) = label;
5249 	      }
5250 	  }
5251       }
5252       break;
5253 
5254     case GIMPLE_ASM:
5255       {
5256 	int i, n = gimple_asm_nlabels (stmt);
5257 	tree label = NULL;
5258 
5259 	for (i = 0; i < n; ++i)
5260 	  {
5261 	    tree cons = gimple_asm_label_op (stmt, i);
5262 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5263 	      {
5264 		if (!label)
5265 		  label = gimple_block_label (dest);
5266 		TREE_VALUE (cons) = label;
5267 	      }
5268 	  }
5269 
5270 	/* If we didn't find any label matching the former edge in the
5271 	   asm labels, we must be redirecting the fallthrough
5272 	   edge.  */
5273 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5274       }
5275       break;
5276 
5277     case GIMPLE_RETURN:
5278       gsi_remove (&gsi, true);
5279       e->flags |= EDGE_FALLTHRU;
5280       break;
5281 
5282     case GIMPLE_OMP_RETURN:
5283     case GIMPLE_OMP_CONTINUE:
5284     case GIMPLE_OMP_SECTIONS_SWITCH:
5285     case GIMPLE_OMP_FOR:
5286       /* The edges from OMP constructs can be simply redirected.  */
5287       break;
5288 
5289     case GIMPLE_EH_DISPATCH:
5290       if (!(e->flags & EDGE_FALLTHRU))
5291 	redirect_eh_dispatch_edge (stmt, e, dest);
5292       break;
5293 
5294     case GIMPLE_TRANSACTION:
5295       /* The ABORT edge has a stored label associated with it, otherwise
5296 	 the edges are simply redirectable.  */
5297       if (e->flags == 0)
5298 	gimple_transaction_set_label (stmt, gimple_block_label (dest));
5299       break;
5300 
5301     default:
5302       /* Otherwise it must be a fallthru edge, and we don't need to
5303 	 do anything besides redirecting it.  */
5304       gcc_assert (e->flags & EDGE_FALLTHRU);
5305       break;
5306     }
5307 
5308   /* Update/insert PHI nodes as necessary.  */
5309 
5310   /* Now update the edges in the CFG.  */
5311   e = ssa_redirect_edge (e, dest);
5312 
5313   return e;
5314 }
5315 
5316 /* Returns true if it is possible to remove edge E by redirecting
5317    it to the destination of the other edge from E->src.  */
5318 
5319 static bool
5320 gimple_can_remove_branch_p (const_edge e)
5321 {
5322   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5323     return false;
5324 
5325   return true;
5326 }
5327 
5328 /* Simple wrapper, as we can always redirect fallthru edges.  */
5329 
5330 static basic_block
5331 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5332 {
5333   e = gimple_redirect_edge_and_branch (e, dest);
5334   gcc_assert (e);
5335 
5336   return NULL;
5337 }
5338 
5339 
5340 /* Splits basic block BB after statement STMT (but at least after the
5341    labels).  If STMT is NULL, BB is split just after the labels.  */
5342 
5343 static basic_block
5344 gimple_split_block (basic_block bb, void *stmt)
5345 {
5346   gimple_stmt_iterator gsi;
5347   gimple_stmt_iterator gsi_tgt;
5348   gimple act;
5349   gimple_seq list;
5350   basic_block new_bb;
5351   edge e;
5352   edge_iterator ei;
5353 
5354   new_bb = create_empty_bb (bb);
5355 
5356   /* Redirect the outgoing edges.  */
5357   new_bb->succs = bb->succs;
5358   bb->succs = NULL;
5359   FOR_EACH_EDGE (e, ei, new_bb->succs)
5360     e->src = new_bb;
5361 
5362   if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5363     stmt = NULL;
5364 
5365   /* Move everything from GSI to the new basic block.  */
5366   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5367     {
5368       act = gsi_stmt (gsi);
5369       if (gimple_code (act) == GIMPLE_LABEL)
5370 	continue;
5371 
5372       if (!stmt)
5373 	break;
5374 
5375       if (stmt == act)
5376 	{
5377 	  gsi_next (&gsi);
5378 	  break;
5379 	}
5380     }
5381 
5382   if (gsi_end_p (gsi))
5383     return new_bb;
5384 
5385   /* Split the statement list - avoid re-creating new containers as this
5386      brings ugly quadratic memory consumption in the inliner.
5387      (We are still quadratic since we need to update stmt BB pointers,
5388      sadly.)  */
5389   gsi_split_seq_before (&gsi, &list);
5390   set_bb_seq (new_bb, list);
5391   for (gsi_tgt = gsi_start (list);
5392        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5393     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5394 
5395   return new_bb;
5396 }
5397 
5398 
5399 /* Moves basic block BB after block AFTER.  */
5400 
5401 static bool
5402 gimple_move_block_after (basic_block bb, basic_block after)
5403 {
5404   if (bb->prev_bb == after)
5405     return true;
5406 
5407   unlink_block (bb);
5408   link_block (bb, after);
5409 
5410   return true;
5411 }
5412 
5413 
5414 /* Return TRUE if block BB has no executable statements, otherwise return
5415    FALSE.  */
5416 
5417 bool
5418 gimple_empty_block_p (basic_block bb)
5419 {
5420   /* BB must have no executable statements.  */
5421   gimple_stmt_iterator gsi = gsi_after_labels (bb);
5422   if (phi_nodes (bb))
5423     return false;
5424   if (gsi_end_p (gsi))
5425     return true;
5426   if (is_gimple_debug (gsi_stmt (gsi)))
5427     gsi_next_nondebug (&gsi);
5428   return gsi_end_p (gsi);
5429 }
5430 
5431 
5432 /* Split a basic block if it ends with a conditional branch and if the
5433    other part of the block is not empty.  */
5434 
5435 static basic_block
5436 gimple_split_block_before_cond_jump (basic_block bb)
5437 {
5438   gimple last, split_point;
5439   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5440   if (gsi_end_p (gsi))
5441     return NULL;
5442   last = gsi_stmt (gsi);
5443   if (gimple_code (last) != GIMPLE_COND
5444       && gimple_code (last) != GIMPLE_SWITCH)
5445     return NULL;
5446   gsi_prev_nondebug (&gsi);
5447   split_point = gsi_stmt (gsi);
5448   return split_block (bb, split_point)->dest;
5449 }
5450 
5451 
5452 /* Return true if basic_block can be duplicated.  */
5453 
5454 static bool
5455 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5456 {
5457   return true;
5458 }
5459 
5460 /* Create a duplicate of the basic block BB.  NOTE: This does not
5461    preserve SSA form.  */
5462 
5463 static basic_block
5464 gimple_duplicate_bb (basic_block bb)
5465 {
5466   basic_block new_bb;
5467   gimple_stmt_iterator gsi, gsi_tgt;
5468   gimple_seq phis = phi_nodes (bb);
5469   gimple phi, stmt, copy;
5470 
5471   new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5472 
5473   /* Copy the PHI nodes.  We ignore PHI node arguments here because
5474      the incoming edges have not been setup yet.  */
5475   for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5476     {
5477       phi = gsi_stmt (gsi);
5478       copy = create_phi_node (NULL_TREE, new_bb);
5479       create_new_def_for (gimple_phi_result (phi), copy,
5480 			  gimple_phi_result_ptr (copy));
5481     }
5482 
5483   gsi_tgt = gsi_start_bb (new_bb);
5484   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5485     {
5486       def_operand_p def_p;
5487       ssa_op_iter op_iter;
5488       tree lhs;
5489 
5490       stmt = gsi_stmt (gsi);
5491       if (gimple_code (stmt) == GIMPLE_LABEL)
5492 	continue;
5493 
5494       /* Don't duplicate label debug stmts.  */
5495       if (gimple_debug_bind_p (stmt)
5496 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
5497 	     == LABEL_DECL)
5498 	continue;
5499 
5500       /* Create a new copy of STMT and duplicate STMT's virtual
5501 	 operands.  */
5502       copy = gimple_copy (stmt);
5503       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5504 
5505       maybe_duplicate_eh_stmt (copy, stmt);
5506       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5507 
5508       /* When copying around a stmt writing into a local non-user
5509 	 aggregate, make sure it won't share stack slot with other
5510 	 vars.  */
5511       lhs = gimple_get_lhs (stmt);
5512       if (lhs && TREE_CODE (lhs) != SSA_NAME)
5513 	{
5514 	  tree base = get_base_address (lhs);
5515 	  if (base
5516 	      && (TREE_CODE (base) == VAR_DECL
5517 		  || TREE_CODE (base) == RESULT_DECL)
5518 	      && DECL_IGNORED_P (base)
5519 	      && !TREE_STATIC (base)
5520 	      && !DECL_EXTERNAL (base)
5521 	      && (TREE_CODE (base) != VAR_DECL
5522 		  || !DECL_HAS_VALUE_EXPR_P (base)))
5523 	    DECL_NONSHAREABLE (base) = 1;
5524 	}
5525 
5526       /* Create new names for all the definitions created by COPY and
5527 	 add replacement mappings for each new name.  */
5528       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5529 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5530     }
5531 
5532   return new_bb;
5533 }
5534 
5535 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
5536 
5537 static void
5538 add_phi_args_after_copy_edge (edge e_copy)
5539 {
5540   basic_block bb, bb_copy = e_copy->src, dest;
5541   edge e;
5542   edge_iterator ei;
5543   gimple phi, phi_copy;
5544   tree def;
5545   gimple_stmt_iterator psi, psi_copy;
5546 
5547   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5548     return;
5549 
5550   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5551 
5552   if (e_copy->dest->flags & BB_DUPLICATED)
5553     dest = get_bb_original (e_copy->dest);
5554   else
5555     dest = e_copy->dest;
5556 
5557   e = find_edge (bb, dest);
5558   if (!e)
5559     {
5560       /* During loop unrolling the target of the latch edge is copied.
5561 	 In this case we are not looking for edge to dest, but to
5562 	 duplicated block whose original was dest.  */
5563       FOR_EACH_EDGE (e, ei, bb->succs)
5564 	{
5565 	  if ((e->dest->flags & BB_DUPLICATED)
5566 	      && get_bb_original (e->dest) == dest)
5567 	    break;
5568 	}
5569 
5570       gcc_assert (e != NULL);
5571     }
5572 
5573   for (psi = gsi_start_phis (e->dest),
5574        psi_copy = gsi_start_phis (e_copy->dest);
5575        !gsi_end_p (psi);
5576        gsi_next (&psi), gsi_next (&psi_copy))
5577     {
5578       phi = gsi_stmt (psi);
5579       phi_copy = gsi_stmt (psi_copy);
5580       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5581       add_phi_arg (phi_copy, def, e_copy,
5582 		   gimple_phi_arg_location_from_edge (phi, e));
5583     }
5584 }
5585 
5586 
5587 /* Basic block BB_COPY was created by code duplication.  Add phi node
5588    arguments for edges going out of BB_COPY.  The blocks that were
5589    duplicated have BB_DUPLICATED set.  */
5590 
5591 void
5592 add_phi_args_after_copy_bb (basic_block bb_copy)
5593 {
5594   edge e_copy;
5595   edge_iterator ei;
5596 
5597   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5598     {
5599       add_phi_args_after_copy_edge (e_copy);
5600     }
5601 }
5602 
5603 /* Blocks in REGION_COPY array of length N_REGION were created by
5604    duplication of basic blocks.  Add phi node arguments for edges
5605    going from these blocks.  If E_COPY is not NULL, also add
5606    phi node arguments for its destination.*/
5607 
5608 void
5609 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5610 			 edge e_copy)
5611 {
5612   unsigned i;
5613 
5614   for (i = 0; i < n_region; i++)
5615     region_copy[i]->flags |= BB_DUPLICATED;
5616 
5617   for (i = 0; i < n_region; i++)
5618     add_phi_args_after_copy_bb (region_copy[i]);
5619   if (e_copy)
5620     add_phi_args_after_copy_edge (e_copy);
5621 
5622   for (i = 0; i < n_region; i++)
5623     region_copy[i]->flags &= ~BB_DUPLICATED;
5624 }
5625 
5626 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5627    important exit edge EXIT.  By important we mean that no SSA name defined
5628    inside region is live over the other exit edges of the region.  All entry
5629    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
5630    to the duplicate of the region.  Dominance and loop information is
5631    updated, but not the SSA web.  The new basic blocks are stored to
5632    REGION_COPY in the same order as they had in REGION, provided that
5633    REGION_COPY is not NULL.
5634    The function returns false if it is unable to copy the region,
5635    true otherwise.  */
5636 
5637 bool
5638 gimple_duplicate_sese_region (edge entry, edge exit,
5639 			    basic_block *region, unsigned n_region,
5640 			    basic_block *region_copy)
5641 {
5642   unsigned i;
5643   bool free_region_copy = false, copying_header = false;
5644   struct loop *loop = entry->dest->loop_father;
5645   edge exit_copy;
5646   vec<basic_block> doms;
5647   edge redirected;
5648   int total_freq = 0, entry_freq = 0;
5649   gcov_type total_count = 0, entry_count = 0;
5650 
5651   if (!can_copy_bbs_p (region, n_region))
5652     return false;
5653 
5654   /* Some sanity checking.  Note that we do not check for all possible
5655      missuses of the functions.  I.e. if you ask to copy something weird,
5656      it will work, but the state of structures probably will not be
5657      correct.  */
5658   for (i = 0; i < n_region; i++)
5659     {
5660       /* We do not handle subloops, i.e. all the blocks must belong to the
5661 	 same loop.  */
5662       if (region[i]->loop_father != loop)
5663 	return false;
5664 
5665       if (region[i] != entry->dest
5666 	  && region[i] == loop->header)
5667 	return false;
5668     }
5669 
5670   set_loop_copy (loop, loop);
5671 
5672   /* In case the function is used for loop header copying (which is the primary
5673      use), ensure that EXIT and its copy will be new latch and entry edges.  */
5674   if (loop->header == entry->dest)
5675     {
5676       copying_header = true;
5677       set_loop_copy (loop, loop_outer (loop));
5678 
5679       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5680 	return false;
5681 
5682       for (i = 0; i < n_region; i++)
5683 	if (region[i] != exit->src
5684 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5685 	  return false;
5686     }
5687 
5688   if (!region_copy)
5689     {
5690       region_copy = XNEWVEC (basic_block, n_region);
5691       free_region_copy = true;
5692     }
5693 
5694   /* Record blocks outside the region that are dominated by something
5695      inside.  */
5696   doms.create (0);
5697   initialize_original_copy_tables ();
5698 
5699   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5700 
5701   if (entry->dest->count)
5702     {
5703       total_count = entry->dest->count;
5704       entry_count = entry->count;
5705       /* Fix up corner cases, to avoid division by zero or creation of negative
5706 	 frequencies.  */
5707       if (entry_count > total_count)
5708 	entry_count = total_count;
5709     }
5710   else
5711     {
5712       total_freq = entry->dest->frequency;
5713       entry_freq = EDGE_FREQUENCY (entry);
5714       /* Fix up corner cases, to avoid division by zero or creation of negative
5715 	 frequencies.  */
5716       if (total_freq == 0)
5717 	total_freq = 1;
5718       else if (entry_freq > total_freq)
5719 	entry_freq = total_freq;
5720     }
5721 
5722   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5723 	    split_edge_bb_loc (entry));
5724   if (total_count)
5725     {
5726       scale_bbs_frequencies_gcov_type (region, n_region,
5727 				       total_count - entry_count,
5728 				       total_count);
5729       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5730 				       total_count);
5731     }
5732   else
5733     {
5734       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5735 				 total_freq);
5736       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5737     }
5738 
5739   if (copying_header)
5740     {
5741       loop->header = exit->dest;
5742       loop->latch = exit->src;
5743     }
5744 
5745   /* Redirect the entry and add the phi node arguments.  */
5746   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5747   gcc_assert (redirected != NULL);
5748   flush_pending_stmts (entry);
5749 
5750   /* Concerning updating of dominators:  We must recount dominators
5751      for entry block and its copy.  Anything that is outside of the
5752      region, but was dominated by something inside needs recounting as
5753      well.  */
5754   set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5755   doms.safe_push (get_bb_original (entry->dest));
5756   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5757   doms.release ();
5758 
5759   /* Add the other PHI node arguments.  */
5760   add_phi_args_after_copy (region_copy, n_region, NULL);
5761 
5762   if (free_region_copy)
5763     free (region_copy);
5764 
5765   free_original_copy_tables ();
5766   return true;
5767 }
5768 
5769 /* Checks if BB is part of the region defined by N_REGION BBS.  */
5770 static bool
5771 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5772 {
5773   unsigned int n;
5774 
5775   for (n = 0; n < n_region; n++)
5776     {
5777      if (bb == bbs[n])
5778        return true;
5779     }
5780   return false;
5781 }
5782 
5783 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
5784    are stored to REGION_COPY in the same order in that they appear
5785    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
5786    the region, EXIT an exit from it.  The condition guarding EXIT
5787    is moved to ENTRY.  Returns true if duplication succeeds, false
5788    otherwise.
5789 
5790    For example,
5791 
5792    some_code;
5793    if (cond)
5794      A;
5795    else
5796      B;
5797 
5798    is transformed to
5799 
5800    if (cond)
5801      {
5802        some_code;
5803        A;
5804      }
5805    else
5806      {
5807        some_code;
5808        B;
5809      }
5810 */
5811 
5812 bool
5813 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5814 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5815 			  basic_block *region_copy ATTRIBUTE_UNUSED)
5816 {
5817   unsigned i;
5818   bool free_region_copy = false;
5819   struct loop *loop = exit->dest->loop_father;
5820   struct loop *orig_loop = entry->dest->loop_father;
5821   basic_block switch_bb, entry_bb, nentry_bb;
5822   vec<basic_block> doms;
5823   int total_freq = 0, exit_freq = 0;
5824   gcov_type total_count = 0, exit_count = 0;
5825   edge exits[2], nexits[2], e;
5826   gimple_stmt_iterator gsi;
5827   gimple cond_stmt;
5828   edge sorig, snew;
5829   basic_block exit_bb;
5830   gimple_stmt_iterator psi;
5831   gimple phi;
5832   tree def;
5833   struct loop *target, *aloop, *cloop;
5834 
5835   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5836   exits[0] = exit;
5837   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5838 
5839   if (!can_copy_bbs_p (region, n_region))
5840     return false;
5841 
5842   initialize_original_copy_tables ();
5843   set_loop_copy (orig_loop, loop);
5844 
5845   target= loop;
5846   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5847     {
5848       if (bb_part_of_region_p (aloop->header, region, n_region))
5849 	{
5850 	  cloop = duplicate_loop (aloop, target);
5851 	  duplicate_subloops (aloop, cloop);
5852 	}
5853     }
5854 
5855   if (!region_copy)
5856     {
5857       region_copy = XNEWVEC (basic_block, n_region);
5858       free_region_copy = true;
5859     }
5860 
5861   gcc_assert (!need_ssa_update_p (cfun));
5862 
5863   /* Record blocks outside the region that are dominated by something
5864      inside.  */
5865   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5866 
5867   if (exit->src->count)
5868     {
5869       total_count = exit->src->count;
5870       exit_count = exit->count;
5871       /* Fix up corner cases, to avoid division by zero or creation of negative
5872 	 frequencies.  */
5873       if (exit_count > total_count)
5874 	exit_count = total_count;
5875     }
5876   else
5877     {
5878       total_freq = exit->src->frequency;
5879       exit_freq = EDGE_FREQUENCY (exit);
5880       /* Fix up corner cases, to avoid division by zero or creation of negative
5881 	 frequencies.  */
5882       if (total_freq == 0)
5883 	total_freq = 1;
5884       if (exit_freq > total_freq)
5885 	exit_freq = total_freq;
5886     }
5887 
5888   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5889 	    split_edge_bb_loc (exit));
5890   if (total_count)
5891     {
5892       scale_bbs_frequencies_gcov_type (region, n_region,
5893 				       total_count - exit_count,
5894 				       total_count);
5895       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5896 				       total_count);
5897     }
5898   else
5899     {
5900       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5901 				 total_freq);
5902       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5903     }
5904 
5905   /* Create the switch block, and put the exit condition to it.  */
5906   entry_bb = entry->dest;
5907   nentry_bb = get_bb_copy (entry_bb);
5908   if (!last_stmt (entry->src)
5909       || !stmt_ends_bb_p (last_stmt (entry->src)))
5910     switch_bb = entry->src;
5911   else
5912     switch_bb = split_edge (entry);
5913   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5914 
5915   gsi = gsi_last_bb (switch_bb);
5916   cond_stmt = last_stmt (exit->src);
5917   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5918   cond_stmt = gimple_copy (cond_stmt);
5919 
5920   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5921 
5922   sorig = single_succ_edge (switch_bb);
5923   sorig->flags = exits[1]->flags;
5924   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5925 
5926   /* Register the new edge from SWITCH_BB in loop exit lists.  */
5927   rescan_loop_exit (snew, true, false);
5928 
5929   /* Add the PHI node arguments.  */
5930   add_phi_args_after_copy (region_copy, n_region, snew);
5931 
5932   /* Get rid of now superfluous conditions and associated edges (and phi node
5933      arguments).  */
5934   exit_bb = exit->dest;
5935 
5936   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5937   PENDING_STMT (e) = NULL;
5938 
5939   /* The latch of ORIG_LOOP was copied, and so was the backedge
5940      to the original header.  We redirect this backedge to EXIT_BB.  */
5941   for (i = 0; i < n_region; i++)
5942     if (get_bb_original (region_copy[i]) == orig_loop->latch)
5943       {
5944 	gcc_assert (single_succ_edge (region_copy[i]));
5945 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5946 	PENDING_STMT (e) = NULL;
5947 	for (psi = gsi_start_phis (exit_bb);
5948 	     !gsi_end_p (psi);
5949 	     gsi_next (&psi))
5950 	  {
5951 	    phi = gsi_stmt (psi);
5952 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5953 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5954 	  }
5955       }
5956   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5957   PENDING_STMT (e) = NULL;
5958 
5959   /* Anything that is outside of the region, but was dominated by something
5960      inside needs to update dominance info.  */
5961   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5962   doms.release ();
5963   /* Update the SSA web.  */
5964   update_ssa (TODO_update_ssa);
5965 
5966   if (free_region_copy)
5967     free (region_copy);
5968 
5969   free_original_copy_tables ();
5970   return true;
5971 }
5972 
5973 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
5974    adding blocks when the dominator traversal reaches EXIT.  This
5975    function silently assumes that ENTRY strictly dominates EXIT.  */
5976 
5977 void
5978 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5979 			      vec<basic_block> *bbs_p)
5980 {
5981   basic_block son;
5982 
5983   for (son = first_dom_son (CDI_DOMINATORS, entry);
5984        son;
5985        son = next_dom_son (CDI_DOMINATORS, son))
5986     {
5987       bbs_p->safe_push (son);
5988       if (son != exit)
5989 	gather_blocks_in_sese_region (son, exit, bbs_p);
5990     }
5991 }
5992 
5993 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5994    The duplicates are recorded in VARS_MAP.  */
5995 
5996 static void
5997 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5998 			   tree to_context)
5999 {
6000   tree t = *tp, new_t;
6001   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6002   void **loc;
6003 
6004   if (DECL_CONTEXT (t) == to_context)
6005     return;
6006 
6007   loc = pointer_map_contains (vars_map, t);
6008 
6009   if (!loc)
6010     {
6011       loc = pointer_map_insert (vars_map, t);
6012 
6013       if (SSA_VAR_P (t))
6014 	{
6015 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6016 	  add_local_decl (f, new_t);
6017 	}
6018       else
6019 	{
6020 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6021 	  new_t = copy_node (t);
6022 	}
6023       DECL_CONTEXT (new_t) = to_context;
6024 
6025       *loc = new_t;
6026     }
6027   else
6028     new_t = (tree) *loc;
6029 
6030   *tp = new_t;
6031 }
6032 
6033 
6034 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6035    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6036 
6037 static tree
6038 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6039 		  tree to_context)
6040 {
6041   void **loc;
6042   tree new_name;
6043 
6044   gcc_assert (!virtual_operand_p (name));
6045 
6046   loc = pointer_map_contains (vars_map, name);
6047 
6048   if (!loc)
6049     {
6050       tree decl = SSA_NAME_VAR (name);
6051       if (decl)
6052 	{
6053 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6054 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6055 				       decl, SSA_NAME_DEF_STMT (name));
6056 	  if (SSA_NAME_IS_DEFAULT_DEF (name))
6057 	    set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6058 				 decl, new_name);
6059 	}
6060       else
6061 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6062 				     name, SSA_NAME_DEF_STMT (name));
6063 
6064       loc = pointer_map_insert (vars_map, name);
6065       *loc = new_name;
6066     }
6067   else
6068     new_name = (tree) *loc;
6069 
6070   return new_name;
6071 }
6072 
6073 struct move_stmt_d
6074 {
6075   tree orig_block;
6076   tree new_block;
6077   tree from_context;
6078   tree to_context;
6079   struct pointer_map_t *vars_map;
6080   htab_t new_label_map;
6081   struct pointer_map_t *eh_map;
6082   bool remap_decls_p;
6083 };
6084 
6085 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6086    contained in *TP if it has been ORIG_BLOCK previously and change the
6087    DECL_CONTEXT of every local variable referenced in *TP.  */
6088 
6089 static tree
6090 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6091 {
6092   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6093   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6094   tree t = *tp;
6095 
6096   if (EXPR_P (t))
6097     {
6098       tree block = TREE_BLOCK (t);
6099       if (block == p->orig_block
6100 	  || (p->orig_block == NULL_TREE
6101 	      && block != NULL_TREE))
6102 	TREE_SET_BLOCK (t, p->new_block);
6103 #ifdef ENABLE_CHECKING
6104       else if (block != NULL_TREE)
6105 	{
6106 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6107 	    block = BLOCK_SUPERCONTEXT (block);
6108 	  gcc_assert (block == p->orig_block);
6109 	}
6110 #endif
6111     }
6112   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6113     {
6114       if (TREE_CODE (t) == SSA_NAME)
6115 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6116       else if (TREE_CODE (t) == LABEL_DECL)
6117 	{
6118 	  if (p->new_label_map)
6119 	    {
6120 	      struct tree_map in, *out;
6121 	      in.base.from = t;
6122 	      out = (struct tree_map *)
6123 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6124 	      if (out)
6125 		*tp = t = out->to;
6126 	    }
6127 
6128 	  DECL_CONTEXT (t) = p->to_context;
6129 	}
6130       else if (p->remap_decls_p)
6131 	{
6132 	  /* Replace T with its duplicate.  T should no longer appear in the
6133 	     parent function, so this looks wasteful; however, it may appear
6134 	     in referenced_vars, and more importantly, as virtual operands of
6135 	     statements, and in alias lists of other variables.  It would be
6136 	     quite difficult to expunge it from all those places.  ??? It might
6137 	     suffice to do this for addressable variables.  */
6138 	  if ((TREE_CODE (t) == VAR_DECL
6139 	       && !is_global_var (t))
6140 	      || TREE_CODE (t) == CONST_DECL)
6141 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6142 	}
6143       *walk_subtrees = 0;
6144     }
6145   else if (TYPE_P (t))
6146     *walk_subtrees = 0;
6147 
6148   return NULL_TREE;
6149 }
6150 
6151 /* Helper for move_stmt_r.  Given an EH region number for the source
6152    function, map that to the duplicate EH regio number in the dest.  */
6153 
6154 static int
6155 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6156 {
6157   eh_region old_r, new_r;
6158   void **slot;
6159 
6160   old_r = get_eh_region_from_number (old_nr);
6161   slot = pointer_map_contains (p->eh_map, old_r);
6162   new_r = (eh_region) *slot;
6163 
6164   return new_r->index;
6165 }
6166 
6167 /* Similar, but operate on INTEGER_CSTs.  */
6168 
6169 static tree
6170 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6171 {
6172   int old_nr, new_nr;
6173 
6174   old_nr = tree_low_cst (old_t_nr, 0);
6175   new_nr = move_stmt_eh_region_nr (old_nr, p);
6176 
6177   return build_int_cst (integer_type_node, new_nr);
6178 }
6179 
6180 /* Like move_stmt_op, but for gimple statements.
6181 
6182    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6183    contained in the current statement in *GSI_P and change the
6184    DECL_CONTEXT of every local variable referenced in the current
6185    statement.  */
6186 
6187 static tree
6188 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6189 	     struct walk_stmt_info *wi)
6190 {
6191   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6192   gimple stmt = gsi_stmt (*gsi_p);
6193   tree block = gimple_block (stmt);
6194 
6195   if (block == p->orig_block
6196       || (p->orig_block == NULL_TREE
6197 	  && block != NULL_TREE))
6198     gimple_set_block (stmt, p->new_block);
6199 
6200   switch (gimple_code (stmt))
6201     {
6202     case GIMPLE_CALL:
6203       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6204       {
6205 	tree r, fndecl = gimple_call_fndecl (stmt);
6206 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6207 	  switch (DECL_FUNCTION_CODE (fndecl))
6208 	    {
6209 	    case BUILT_IN_EH_COPY_VALUES:
6210 	      r = gimple_call_arg (stmt, 1);
6211 	      r = move_stmt_eh_region_tree_nr (r, p);
6212 	      gimple_call_set_arg (stmt, 1, r);
6213 	      /* FALLTHRU */
6214 
6215 	    case BUILT_IN_EH_POINTER:
6216 	    case BUILT_IN_EH_FILTER:
6217 	      r = gimple_call_arg (stmt, 0);
6218 	      r = move_stmt_eh_region_tree_nr (r, p);
6219 	      gimple_call_set_arg (stmt, 0, r);
6220 	      break;
6221 
6222 	    default:
6223 	      break;
6224 	    }
6225       }
6226       break;
6227 
6228     case GIMPLE_RESX:
6229       {
6230 	int r = gimple_resx_region (stmt);
6231 	r = move_stmt_eh_region_nr (r, p);
6232 	gimple_resx_set_region (stmt, r);
6233       }
6234       break;
6235 
6236     case GIMPLE_EH_DISPATCH:
6237       {
6238 	int r = gimple_eh_dispatch_region (stmt);
6239 	r = move_stmt_eh_region_nr (r, p);
6240 	gimple_eh_dispatch_set_region (stmt, r);
6241       }
6242       break;
6243 
6244     case GIMPLE_OMP_RETURN:
6245     case GIMPLE_OMP_CONTINUE:
6246       break;
6247     default:
6248       if (is_gimple_omp (stmt))
6249 	{
6250 	  /* Do not remap variables inside OMP directives.  Variables
6251 	     referenced in clauses and directive header belong to the
6252 	     parent function and should not be moved into the child
6253 	     function.  */
6254 	  bool save_remap_decls_p = p->remap_decls_p;
6255 	  p->remap_decls_p = false;
6256 	  *handled_ops_p = true;
6257 
6258 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6259 			       move_stmt_op, wi);
6260 
6261 	  p->remap_decls_p = save_remap_decls_p;
6262 	}
6263       break;
6264     }
6265 
6266   return NULL_TREE;
6267 }
6268 
6269 /* Move basic block BB from function CFUN to function DEST_FN.  The
6270    block is moved out of the original linked list and placed after
6271    block AFTER in the new list.  Also, the block is removed from the
6272    original array of blocks and placed in DEST_FN's array of blocks.
6273    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6274    updated to reflect the moved edges.
6275 
6276    The local variables are remapped to new instances, VARS_MAP is used
6277    to record the mapping.  */
6278 
6279 static void
6280 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6281 		  basic_block after, bool update_edge_count_p,
6282 		  struct move_stmt_d *d)
6283 {
6284   struct control_flow_graph *cfg;
6285   edge_iterator ei;
6286   edge e;
6287   gimple_stmt_iterator si;
6288   unsigned old_len, new_len;
6289 
6290   /* Remove BB from dominance structures.  */
6291   delete_from_dominance_info (CDI_DOMINATORS, bb);
6292   if (current_loops)
6293     remove_bb_from_loops (bb);
6294 
6295   /* Link BB to the new linked list.  */
6296   move_block_after (bb, after);
6297 
6298   /* Update the edge count in the corresponding flowgraphs.  */
6299   if (update_edge_count_p)
6300     FOR_EACH_EDGE (e, ei, bb->succs)
6301       {
6302 	cfun->cfg->x_n_edges--;
6303 	dest_cfun->cfg->x_n_edges++;
6304       }
6305 
6306   /* Remove BB from the original basic block array.  */
6307   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6308   cfun->cfg->x_n_basic_blocks--;
6309 
6310   /* Grow DEST_CFUN's basic block array if needed.  */
6311   cfg = dest_cfun->cfg;
6312   cfg->x_n_basic_blocks++;
6313   if (bb->index >= cfg->x_last_basic_block)
6314     cfg->x_last_basic_block = bb->index + 1;
6315 
6316   old_len = vec_safe_length (cfg->x_basic_block_info);
6317   if ((unsigned) cfg->x_last_basic_block >= old_len)
6318     {
6319       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6320       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6321     }
6322 
6323   (*cfg->x_basic_block_info)[bb->index] = bb;
6324 
6325   /* Remap the variables in phi nodes.  */
6326   for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6327     {
6328       gimple phi = gsi_stmt (si);
6329       use_operand_p use;
6330       tree op = PHI_RESULT (phi);
6331       ssa_op_iter oi;
6332       unsigned i;
6333 
6334       if (virtual_operand_p (op))
6335 	{
6336 	  /* Remove the phi nodes for virtual operands (alias analysis will be
6337 	     run for the new function, anyway).  */
6338           remove_phi_node (&si, true);
6339 	  continue;
6340 	}
6341 
6342       SET_PHI_RESULT (phi,
6343 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6344       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6345 	{
6346 	  op = USE_FROM_PTR (use);
6347 	  if (TREE_CODE (op) == SSA_NAME)
6348 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6349 	}
6350 
6351       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6352 	{
6353 	  location_t locus = gimple_phi_arg_location (phi, i);
6354 	  tree block = LOCATION_BLOCK (locus);
6355 
6356 	  if (locus == UNKNOWN_LOCATION)
6357 	    continue;
6358 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6359 	    {
6360 	      if (d->new_block == NULL_TREE)
6361 		locus = LOCATION_LOCUS (locus);
6362 	      else
6363 		locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6364 	      gimple_phi_arg_set_location (phi, i, locus);
6365 	    }
6366 	}
6367 
6368       gsi_next (&si);
6369     }
6370 
6371   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6372     {
6373       gimple stmt = gsi_stmt (si);
6374       struct walk_stmt_info wi;
6375 
6376       memset (&wi, 0, sizeof (wi));
6377       wi.info = d;
6378       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6379 
6380       if (gimple_code (stmt) == GIMPLE_LABEL)
6381 	{
6382 	  tree label = gimple_label_label (stmt);
6383 	  int uid = LABEL_DECL_UID (label);
6384 
6385 	  gcc_assert (uid > -1);
6386 
6387 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
6388 	  if (old_len <= (unsigned) uid)
6389 	    {
6390 	      new_len = 3 * uid / 2 + 1;
6391 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6392 	    }
6393 
6394 	  (*cfg->x_label_to_block_map)[uid] = bb;
6395 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6396 
6397 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6398 
6399 	  if (uid >= dest_cfun->cfg->last_label_uid)
6400 	    dest_cfun->cfg->last_label_uid = uid + 1;
6401 	}
6402 
6403       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6404       remove_stmt_from_eh_lp_fn (cfun, stmt);
6405 
6406       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6407       gimple_remove_stmt_histograms (cfun, stmt);
6408 
6409       /* We cannot leave any operands allocated from the operand caches of
6410 	 the current function.  */
6411       free_stmt_operands (stmt);
6412       push_cfun (dest_cfun);
6413       update_stmt (stmt);
6414       pop_cfun ();
6415     }
6416 
6417   FOR_EACH_EDGE (e, ei, bb->succs)
6418     if (e->goto_locus != UNKNOWN_LOCATION)
6419       {
6420 	tree block = LOCATION_BLOCK (e->goto_locus);
6421 	if (d->orig_block == NULL_TREE
6422 	    || block == d->orig_block)
6423 	  e->goto_locus = d->new_block ?
6424 	      COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6425 	      LOCATION_LOCUS (e->goto_locus);
6426       }
6427 }
6428 
6429 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6430    the outermost EH region.  Use REGION as the incoming base EH region.  */
6431 
6432 static eh_region
6433 find_outermost_region_in_block (struct function *src_cfun,
6434 				basic_block bb, eh_region region)
6435 {
6436   gimple_stmt_iterator si;
6437 
6438   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6439     {
6440       gimple stmt = gsi_stmt (si);
6441       eh_region stmt_region;
6442       int lp_nr;
6443 
6444       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6445       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6446       if (stmt_region)
6447 	{
6448 	  if (region == NULL)
6449 	    region = stmt_region;
6450 	  else if (stmt_region != region)
6451 	    {
6452 	      region = eh_region_outermost (src_cfun, stmt_region, region);
6453 	      gcc_assert (region != NULL);
6454 	    }
6455 	}
6456     }
6457 
6458   return region;
6459 }
6460 
6461 static tree
6462 new_label_mapper (tree decl, void *data)
6463 {
6464   htab_t hash = (htab_t) data;
6465   struct tree_map *m;
6466   void **slot;
6467 
6468   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6469 
6470   m = XNEW (struct tree_map);
6471   m->hash = DECL_UID (decl);
6472   m->base.from = decl;
6473   m->to = create_artificial_label (UNKNOWN_LOCATION);
6474   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6475   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6476     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6477 
6478   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6479   gcc_assert (*slot == NULL);
6480 
6481   *slot = m;
6482 
6483   return m->to;
6484 }
6485 
6486 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6487    subblocks.  */
6488 
6489 static void
6490 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6491 				  tree to_context)
6492 {
6493   tree *tp, t;
6494 
6495   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6496     {
6497       t = *tp;
6498       if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6499 	continue;
6500       replace_by_duplicate_decl (&t, vars_map, to_context);
6501       if (t != *tp)
6502 	{
6503 	  if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6504 	    {
6505 	      SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6506 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
6507 	    }
6508 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
6509 	  *tp = t;
6510 	}
6511     }
6512 
6513   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6514     replace_block_vars_by_duplicates (block, vars_map, to_context);
6515 }
6516 
6517 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6518    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
6519    single basic block in the original CFG and the new basic block is
6520    returned.  DEST_CFUN must not have a CFG yet.
6521 
6522    Note that the region need not be a pure SESE region.  Blocks inside
6523    the region may contain calls to abort/exit.  The only restriction
6524    is that ENTRY_BB should be the only entry point and it must
6525    dominate EXIT_BB.
6526 
6527    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6528    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6529    to the new function.
6530 
6531    All local variables referenced in the region are assumed to be in
6532    the corresponding BLOCK_VARS and unexpanded variable lists
6533    associated with DEST_CFUN.  */
6534 
6535 basic_block
6536 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6537 		        basic_block exit_bb, tree orig_block)
6538 {
6539   vec<basic_block> bbs, dom_bbs;
6540   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6541   basic_block after, bb, *entry_pred, *exit_succ, abb;
6542   struct function *saved_cfun = cfun;
6543   int *entry_flag, *exit_flag;
6544   unsigned *entry_prob, *exit_prob;
6545   unsigned i, num_entry_edges, num_exit_edges;
6546   edge e;
6547   edge_iterator ei;
6548   htab_t new_label_map;
6549   struct pointer_map_t *vars_map, *eh_map;
6550   struct loop *loop = entry_bb->loop_father;
6551   struct move_stmt_d d;
6552 
6553   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6554      region.  */
6555   gcc_assert (entry_bb != exit_bb
6556               && (!exit_bb
6557 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6558 
6559   /* Collect all the blocks in the region.  Manually add ENTRY_BB
6560      because it won't be added by dfs_enumerate_from.  */
6561   bbs.create (0);
6562   bbs.safe_push (entry_bb);
6563   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6564 
6565   /* The blocks that used to be dominated by something in BBS will now be
6566      dominated by the new block.  */
6567   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6568 				     bbs.address (),
6569 				     bbs.length ());
6570 
6571   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
6572      the predecessor edges to ENTRY_BB and the successor edges to
6573      EXIT_BB so that we can re-attach them to the new basic block that
6574      will replace the region.  */
6575   num_entry_edges = EDGE_COUNT (entry_bb->preds);
6576   entry_pred = XNEWVEC (basic_block, num_entry_edges);
6577   entry_flag = XNEWVEC (int, num_entry_edges);
6578   entry_prob = XNEWVEC (unsigned, num_entry_edges);
6579   i = 0;
6580   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6581     {
6582       entry_prob[i] = e->probability;
6583       entry_flag[i] = e->flags;
6584       entry_pred[i++] = e->src;
6585       remove_edge (e);
6586     }
6587 
6588   if (exit_bb)
6589     {
6590       num_exit_edges = EDGE_COUNT (exit_bb->succs);
6591       exit_succ = XNEWVEC (basic_block, num_exit_edges);
6592       exit_flag = XNEWVEC (int, num_exit_edges);
6593       exit_prob = XNEWVEC (unsigned, num_exit_edges);
6594       i = 0;
6595       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6596 	{
6597 	  exit_prob[i] = e->probability;
6598 	  exit_flag[i] = e->flags;
6599 	  exit_succ[i++] = e->dest;
6600 	  remove_edge (e);
6601 	}
6602     }
6603   else
6604     {
6605       num_exit_edges = 0;
6606       exit_succ = NULL;
6607       exit_flag = NULL;
6608       exit_prob = NULL;
6609     }
6610 
6611   /* Switch context to the child function to initialize DEST_FN's CFG.  */
6612   gcc_assert (dest_cfun->cfg == NULL);
6613   push_cfun (dest_cfun);
6614 
6615   init_empty_tree_cfg ();
6616 
6617   /* Initialize EH information for the new function.  */
6618   eh_map = NULL;
6619   new_label_map = NULL;
6620   if (saved_cfun->eh)
6621     {
6622       eh_region region = NULL;
6623 
6624       FOR_EACH_VEC_ELT (bbs, i, bb)
6625 	region = find_outermost_region_in_block (saved_cfun, bb, region);
6626 
6627       init_eh_for_function ();
6628       if (region != NULL)
6629 	{
6630 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6631 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6632 					 new_label_mapper, new_label_map);
6633 	}
6634     }
6635 
6636   pop_cfun ();
6637 
6638   /* Move blocks from BBS into DEST_CFUN.  */
6639   gcc_assert (bbs.length () >= 2);
6640   after = dest_cfun->cfg->x_entry_block_ptr;
6641   vars_map = pointer_map_create ();
6642 
6643   memset (&d, 0, sizeof (d));
6644   d.orig_block = orig_block;
6645   d.new_block = DECL_INITIAL (dest_cfun->decl);
6646   d.from_context = cfun->decl;
6647   d.to_context = dest_cfun->decl;
6648   d.vars_map = vars_map;
6649   d.new_label_map = new_label_map;
6650   d.eh_map = eh_map;
6651   d.remap_decls_p = true;
6652 
6653   FOR_EACH_VEC_ELT (bbs, i, bb)
6654     {
6655       /* No need to update edge counts on the last block.  It has
6656 	 already been updated earlier when we detached the region from
6657 	 the original CFG.  */
6658       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6659       after = bb;
6660     }
6661 
6662   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
6663   if (orig_block)
6664     {
6665       tree block;
6666       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6667 		  == NULL_TREE);
6668       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6669 	= BLOCK_SUBBLOCKS (orig_block);
6670       for (block = BLOCK_SUBBLOCKS (orig_block);
6671 	   block; block = BLOCK_CHAIN (block))
6672 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6673       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6674     }
6675 
6676   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6677 				    vars_map, dest_cfun->decl);
6678 
6679   if (new_label_map)
6680     htab_delete (new_label_map);
6681   if (eh_map)
6682     pointer_map_destroy (eh_map);
6683   pointer_map_destroy (vars_map);
6684 
6685   /* Rewire the entry and exit blocks.  The successor to the entry
6686      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6687      the child function.  Similarly, the predecessor of DEST_FN's
6688      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
6689      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6690      various CFG manipulation function get to the right CFG.
6691 
6692      FIXME, this is silly.  The CFG ought to become a parameter to
6693      these helpers.  */
6694   push_cfun (dest_cfun);
6695   make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6696   if (exit_bb)
6697     make_edge (exit_bb,  EXIT_BLOCK_PTR, 0);
6698   pop_cfun ();
6699 
6700   /* Back in the original function, the SESE region has disappeared,
6701      create a new basic block in its place.  */
6702   bb = create_empty_bb (entry_pred[0]);
6703   if (current_loops)
6704     add_bb_to_loop (bb, loop);
6705   for (i = 0; i < num_entry_edges; i++)
6706     {
6707       e = make_edge (entry_pred[i], bb, entry_flag[i]);
6708       e->probability = entry_prob[i];
6709     }
6710 
6711   for (i = 0; i < num_exit_edges; i++)
6712     {
6713       e = make_edge (bb, exit_succ[i], exit_flag[i]);
6714       e->probability = exit_prob[i];
6715     }
6716 
6717   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6718   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6719     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6720   dom_bbs.release ();
6721 
6722   if (exit_bb)
6723     {
6724       free (exit_prob);
6725       free (exit_flag);
6726       free (exit_succ);
6727     }
6728   free (entry_prob);
6729   free (entry_flag);
6730   free (entry_pred);
6731   bbs.release ();
6732 
6733   return bb;
6734 }
6735 
6736 
6737 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6738    */
6739 
6740 void
6741 dump_function_to_file (tree fndecl, FILE *file, int flags)
6742 {
6743   tree arg, var, old_current_fndecl = current_function_decl;
6744   struct function *dsf;
6745   bool ignore_topmost_bind = false, any_var = false;
6746   basic_block bb;
6747   tree chain;
6748   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6749 		  && decl_is_tm_clone (fndecl));
6750   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6751 
6752   current_function_decl = fndecl;
6753   fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6754 
6755   arg = DECL_ARGUMENTS (fndecl);
6756   while (arg)
6757     {
6758       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6759       fprintf (file, " ");
6760       print_generic_expr (file, arg, dump_flags);
6761       if (flags & TDF_VERBOSE)
6762 	print_node (file, "", arg, 4);
6763       if (DECL_CHAIN (arg))
6764 	fprintf (file, ", ");
6765       arg = DECL_CHAIN (arg);
6766     }
6767   fprintf (file, ")\n");
6768 
6769   if (flags & TDF_VERBOSE)
6770     print_node (file, "", fndecl, 2);
6771 
6772   dsf = DECL_STRUCT_FUNCTION (fndecl);
6773   if (dsf && (flags & TDF_EH))
6774     dump_eh_tree (file, dsf);
6775 
6776   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6777     {
6778       dump_node (fndecl, TDF_SLIM | flags, file);
6779       current_function_decl = old_current_fndecl;
6780       return;
6781     }
6782 
6783   /* When GIMPLE is lowered, the variables are no longer available in
6784      BIND_EXPRs, so display them separately.  */
6785   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6786     {
6787       unsigned ix;
6788       ignore_topmost_bind = true;
6789 
6790       fprintf (file, "{\n");
6791       if (!vec_safe_is_empty (fun->local_decls))
6792 	FOR_EACH_LOCAL_DECL (fun, ix, var)
6793 	  {
6794 	    print_generic_decl (file, var, flags);
6795 	    if (flags & TDF_VERBOSE)
6796 	      print_node (file, "", var, 4);
6797 	    fprintf (file, "\n");
6798 
6799 	    any_var = true;
6800 	  }
6801       if (gimple_in_ssa_p (cfun))
6802 	for (ix = 1; ix < num_ssa_names; ++ix)
6803 	  {
6804 	    tree name = ssa_name (ix);
6805 	    if (name && !SSA_NAME_VAR (name))
6806 	      {
6807 		fprintf (file, "  ");
6808 		print_generic_expr (file, TREE_TYPE (name), flags);
6809 		fprintf (file, " ");
6810 		print_generic_expr (file, name, flags);
6811 		fprintf (file, ";\n");
6812 
6813 		any_var = true;
6814 	      }
6815 	  }
6816     }
6817 
6818   if (fun && fun->decl == fndecl
6819       && fun->cfg
6820       && basic_block_info_for_function (fun))
6821     {
6822       /* If the CFG has been built, emit a CFG-based dump.  */
6823       if (!ignore_topmost_bind)
6824 	fprintf (file, "{\n");
6825 
6826       if (any_var && n_basic_blocks_for_function (fun))
6827 	fprintf (file, "\n");
6828 
6829       FOR_EACH_BB_FN (bb, fun)
6830 	dump_bb (file, bb, 2, flags | TDF_COMMENT);
6831 
6832       fprintf (file, "}\n");
6833     }
6834   else if (DECL_SAVED_TREE (fndecl) == NULL)
6835     {
6836       /* The function is now in GIMPLE form but the CFG has not been
6837 	 built yet.  Emit the single sequence of GIMPLE statements
6838 	 that make up its body.  */
6839       gimple_seq body = gimple_body (fndecl);
6840 
6841       if (gimple_seq_first_stmt (body)
6842 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6843 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6844 	print_gimple_seq (file, body, 0, flags);
6845       else
6846 	{
6847 	  if (!ignore_topmost_bind)
6848 	    fprintf (file, "{\n");
6849 
6850 	  if (any_var)
6851 	    fprintf (file, "\n");
6852 
6853 	  print_gimple_seq (file, body, 2, flags);
6854 	  fprintf (file, "}\n");
6855 	}
6856     }
6857   else
6858     {
6859       int indent;
6860 
6861       /* Make a tree based dump.  */
6862       chain = DECL_SAVED_TREE (fndecl);
6863       if (chain && TREE_CODE (chain) == BIND_EXPR)
6864 	{
6865 	  if (ignore_topmost_bind)
6866 	    {
6867 	      chain = BIND_EXPR_BODY (chain);
6868 	      indent = 2;
6869 	    }
6870 	  else
6871 	    indent = 0;
6872 	}
6873       else
6874 	{
6875 	  if (!ignore_topmost_bind)
6876 	    fprintf (file, "{\n");
6877 	  indent = 2;
6878 	}
6879 
6880       if (any_var)
6881 	fprintf (file, "\n");
6882 
6883       print_generic_stmt_indented (file, chain, flags, indent);
6884       if (ignore_topmost_bind)
6885 	fprintf (file, "}\n");
6886     }
6887 
6888   if (flags & TDF_ENUMERATE_LOCALS)
6889     dump_enumerated_decls (file, flags);
6890   fprintf (file, "\n\n");
6891 
6892   current_function_decl = old_current_fndecl;
6893 }
6894 
6895 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
6896 
6897 DEBUG_FUNCTION void
6898 debug_function (tree fn, int flags)
6899 {
6900   dump_function_to_file (fn, stderr, flags);
6901 }
6902 
6903 
6904 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
6905 
6906 static void
6907 print_pred_bbs (FILE *file, basic_block bb)
6908 {
6909   edge e;
6910   edge_iterator ei;
6911 
6912   FOR_EACH_EDGE (e, ei, bb->preds)
6913     fprintf (file, "bb_%d ", e->src->index);
6914 }
6915 
6916 
6917 /* Print on FILE the indexes for the successors of basic_block BB.  */
6918 
6919 static void
6920 print_succ_bbs (FILE *file, basic_block bb)
6921 {
6922   edge e;
6923   edge_iterator ei;
6924 
6925   FOR_EACH_EDGE (e, ei, bb->succs)
6926     fprintf (file, "bb_%d ", e->dest->index);
6927 }
6928 
6929 /* Print to FILE the basic block BB following the VERBOSITY level.  */
6930 
6931 void
6932 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6933 {
6934   char *s_indent = (char *) alloca ((size_t) indent + 1);
6935   memset ((void *) s_indent, ' ', (size_t) indent);
6936   s_indent[indent] = '\0';
6937 
6938   /* Print basic_block's header.  */
6939   if (verbosity >= 2)
6940     {
6941       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
6942       print_pred_bbs (file, bb);
6943       fprintf (file, "}, succs = {");
6944       print_succ_bbs (file, bb);
6945       fprintf (file, "})\n");
6946     }
6947 
6948   /* Print basic_block's body.  */
6949   if (verbosity >= 3)
6950     {
6951       fprintf (file, "%s  {\n", s_indent);
6952       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6953       fprintf (file, "%s  }\n", s_indent);
6954     }
6955 }
6956 
6957 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6958 
6959 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
6960    VERBOSITY level this outputs the contents of the loop, or just its
6961    structure.  */
6962 
6963 static void
6964 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6965 {
6966   char *s_indent;
6967   basic_block bb;
6968 
6969   if (loop == NULL)
6970     return;
6971 
6972   s_indent = (char *) alloca ((size_t) indent + 1);
6973   memset ((void *) s_indent, ' ', (size_t) indent);
6974   s_indent[indent] = '\0';
6975 
6976   /* Print loop's header.  */
6977   fprintf (file, "%sloop_%d (", s_indent, loop->num);
6978   if (loop->header)
6979     fprintf (file, "header = %d", loop->header->index);
6980   else
6981     {
6982       fprintf (file, "deleted)\n");
6983       return;
6984     }
6985   if (loop->latch)
6986     fprintf (file, ", latch = %d", loop->latch->index);
6987   else
6988     fprintf (file, ", multiple latches");
6989   fprintf (file, ", niter = ");
6990   print_generic_expr (file, loop->nb_iterations, 0);
6991 
6992   if (loop->any_upper_bound)
6993     {
6994       fprintf (file, ", upper_bound = ");
6995       dump_double_int (file, loop->nb_iterations_upper_bound, true);
6996     }
6997 
6998   if (loop->any_estimate)
6999     {
7000       fprintf (file, ", estimate = ");
7001       dump_double_int (file, loop->nb_iterations_estimate, true);
7002     }
7003   fprintf (file, ")\n");
7004 
7005   /* Print loop's body.  */
7006   if (verbosity >= 1)
7007     {
7008       fprintf (file, "%s{\n", s_indent);
7009       FOR_EACH_BB (bb)
7010 	if (bb->loop_father == loop)
7011 	  print_loops_bb (file, bb, indent, verbosity);
7012 
7013       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7014       fprintf (file, "%s}\n", s_indent);
7015     }
7016 }
7017 
7018 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7019    spaces.  Following VERBOSITY level this outputs the contents of the
7020    loop, or just its structure.  */
7021 
7022 static void
7023 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
7024 {
7025   if (loop == NULL)
7026     return;
7027 
7028   print_loop (file, loop, indent, verbosity);
7029   print_loop_and_siblings (file, loop->next, indent, verbosity);
7030 }
7031 
7032 /* Follow a CFG edge from the entry point of the program, and on entry
7033    of a loop, pretty print the loop structure on FILE.  */
7034 
7035 void
7036 print_loops (FILE *file, int verbosity)
7037 {
7038   basic_block bb;
7039 
7040   bb = ENTRY_BLOCK_PTR;
7041   if (bb && bb->loop_father)
7042     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7043 }
7044 
7045 
7046 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
7047 
7048 DEBUG_FUNCTION void
7049 debug_loops (int verbosity)
7050 {
7051   print_loops (stderr, verbosity);
7052 }
7053 
7054 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
7055 
7056 DEBUG_FUNCTION void
7057 debug_loop (struct loop *loop, int verbosity)
7058 {
7059   print_loop (stderr, loop, 0, verbosity);
7060 }
7061 
7062 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7063    level.  */
7064 
7065 DEBUG_FUNCTION void
7066 debug_loop_num (unsigned num, int verbosity)
7067 {
7068   debug_loop (get_loop (num), verbosity);
7069 }
7070 
7071 /* Return true if BB ends with a call, possibly followed by some
7072    instructions that must stay with the call.  Return false,
7073    otherwise.  */
7074 
7075 static bool
7076 gimple_block_ends_with_call_p (basic_block bb)
7077 {
7078   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7079   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7080 }
7081 
7082 
7083 /* Return true if BB ends with a conditional branch.  Return false,
7084    otherwise.  */
7085 
7086 static bool
7087 gimple_block_ends_with_condjump_p (const_basic_block bb)
7088 {
7089   gimple stmt = last_stmt (CONST_CAST_BB (bb));
7090   return (stmt && gimple_code (stmt) == GIMPLE_COND);
7091 }
7092 
7093 
7094 /* Return true if we need to add fake edge to exit at statement T.
7095    Helper function for gimple_flow_call_edges_add.  */
7096 
7097 static bool
7098 need_fake_edge_p (gimple t)
7099 {
7100   tree fndecl = NULL_TREE;
7101   int call_flags = 0;
7102 
7103   /* NORETURN and LONGJMP calls already have an edge to exit.
7104      CONST and PURE calls do not need one.
7105      We don't currently check for CONST and PURE here, although
7106      it would be a good idea, because those attributes are
7107      figured out from the RTL in mark_constant_function, and
7108      the counter incrementation code from -fprofile-arcs
7109      leads to different results from -fbranch-probabilities.  */
7110   if (is_gimple_call (t))
7111     {
7112       fndecl = gimple_call_fndecl (t);
7113       call_flags = gimple_call_flags (t);
7114     }
7115 
7116   if (is_gimple_call (t)
7117       && fndecl
7118       && DECL_BUILT_IN (fndecl)
7119       && (call_flags & ECF_NOTHROW)
7120       && !(call_flags & ECF_RETURNS_TWICE)
7121       /* fork() doesn't really return twice, but the effect of
7122          wrapping it in __gcov_fork() which calls __gcov_flush()
7123 	 and clears the counters before forking has the same
7124 	 effect as returning twice.  Force a fake edge.  */
7125       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7126 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7127     return false;
7128 
7129   if (is_gimple_call (t))
7130     {
7131       edge_iterator ei;
7132       edge e;
7133       basic_block bb;
7134 
7135       if (!(call_flags & ECF_NORETURN))
7136 	return true;
7137 
7138       bb = gimple_bb (t);
7139       FOR_EACH_EDGE (e, ei, bb->succs)
7140 	if ((e->flags & EDGE_FAKE) == 0)
7141 	  return true;
7142     }
7143 
7144   if (gimple_code (t) == GIMPLE_ASM
7145        && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7146     return true;
7147 
7148   return false;
7149 }
7150 
7151 
7152 /* Add fake edges to the function exit for any non constant and non
7153    noreturn calls (or noreturn calls with EH/abnormal edges),
7154    volatile inline assembly in the bitmap of blocks specified by BLOCKS
7155    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
7156    that were split.
7157 
7158    The goal is to expose cases in which entering a basic block does
7159    not imply that all subsequent instructions must be executed.  */
7160 
7161 static int
7162 gimple_flow_call_edges_add (sbitmap blocks)
7163 {
7164   int i;
7165   int blocks_split = 0;
7166   int last_bb = last_basic_block;
7167   bool check_last_block = false;
7168 
7169   if (n_basic_blocks == NUM_FIXED_BLOCKS)
7170     return 0;
7171 
7172   if (! blocks)
7173     check_last_block = true;
7174   else
7175     check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7176 
7177   /* In the last basic block, before epilogue generation, there will be
7178      a fallthru edge to EXIT.  Special care is required if the last insn
7179      of the last basic block is a call because make_edge folds duplicate
7180      edges, which would result in the fallthru edge also being marked
7181      fake, which would result in the fallthru edge being removed by
7182      remove_fake_edges, which would result in an invalid CFG.
7183 
7184      Moreover, we can't elide the outgoing fake edge, since the block
7185      profiler needs to take this into account in order to solve the minimal
7186      spanning tree in the case that the call doesn't return.
7187 
7188      Handle this by adding a dummy instruction in a new last basic block.  */
7189   if (check_last_block)
7190     {
7191       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7192       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7193       gimple t = NULL;
7194 
7195       if (!gsi_end_p (gsi))
7196 	t = gsi_stmt (gsi);
7197 
7198       if (t && need_fake_edge_p (t))
7199 	{
7200 	  edge e;
7201 
7202 	  e = find_edge (bb, EXIT_BLOCK_PTR);
7203 	  if (e)
7204 	    {
7205 	      gsi_insert_on_edge (e, gimple_build_nop ());
7206 	      gsi_commit_edge_inserts ();
7207 	    }
7208 	}
7209     }
7210 
7211   /* Now add fake edges to the function exit for any non constant
7212      calls since there is no way that we can determine if they will
7213      return or not...  */
7214   for (i = 0; i < last_bb; i++)
7215     {
7216       basic_block bb = BASIC_BLOCK (i);
7217       gimple_stmt_iterator gsi;
7218       gimple stmt, last_stmt;
7219 
7220       if (!bb)
7221 	continue;
7222 
7223       if (blocks && !bitmap_bit_p (blocks, i))
7224 	continue;
7225 
7226       gsi = gsi_last_nondebug_bb (bb);
7227       if (!gsi_end_p (gsi))
7228 	{
7229 	  last_stmt = gsi_stmt (gsi);
7230 	  do
7231 	    {
7232 	      stmt = gsi_stmt (gsi);
7233 	      if (need_fake_edge_p (stmt))
7234 		{
7235 		  edge e;
7236 
7237 		  /* The handling above of the final block before the
7238 		     epilogue should be enough to verify that there is
7239 		     no edge to the exit block in CFG already.
7240 		     Calling make_edge in such case would cause us to
7241 		     mark that edge as fake and remove it later.  */
7242 #ifdef ENABLE_CHECKING
7243 		  if (stmt == last_stmt)
7244 		    {
7245 		      e = find_edge (bb, EXIT_BLOCK_PTR);
7246 		      gcc_assert (e == NULL);
7247 		    }
7248 #endif
7249 
7250 		  /* Note that the following may create a new basic block
7251 		     and renumber the existing basic blocks.  */
7252 		  if (stmt != last_stmt)
7253 		    {
7254 		      e = split_block (bb, stmt);
7255 		      if (e)
7256 			blocks_split++;
7257 		    }
7258 		  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7259 		}
7260 	      gsi_prev (&gsi);
7261 	    }
7262 	  while (!gsi_end_p (gsi));
7263 	}
7264     }
7265 
7266   if (blocks_split)
7267     verify_flow_info ();
7268 
7269   return blocks_split;
7270 }
7271 
7272 /* Removes edge E and all the blocks dominated by it, and updates dominance
7273    information.  The IL in E->src needs to be updated separately.
7274    If dominance info is not available, only the edge E is removed.*/
7275 
7276 void
7277 remove_edge_and_dominated_blocks (edge e)
7278 {
7279   vec<basic_block> bbs_to_remove = vNULL;
7280   vec<basic_block> bbs_to_fix_dom = vNULL;
7281   bitmap df, df_idom;
7282   edge f;
7283   edge_iterator ei;
7284   bool none_removed = false;
7285   unsigned i;
7286   basic_block bb, dbb;
7287   bitmap_iterator bi;
7288 
7289   if (!dom_info_available_p (CDI_DOMINATORS))
7290     {
7291       remove_edge (e);
7292       return;
7293     }
7294 
7295   /* No updating is needed for edges to exit.  */
7296   if (e->dest == EXIT_BLOCK_PTR)
7297     {
7298       if (cfgcleanup_altered_bbs)
7299 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7300       remove_edge (e);
7301       return;
7302     }
7303 
7304   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
7305      that is not dominated by E->dest, then this set is empty.  Otherwise,
7306      all the basic blocks dominated by E->dest are removed.
7307 
7308      Also, to DF_IDOM we store the immediate dominators of the blocks in
7309      the dominance frontier of E (i.e., of the successors of the
7310      removed blocks, if there are any, and of E->dest otherwise).  */
7311   FOR_EACH_EDGE (f, ei, e->dest->preds)
7312     {
7313       if (f == e)
7314 	continue;
7315 
7316       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7317 	{
7318 	  none_removed = true;
7319 	  break;
7320 	}
7321     }
7322 
7323   df = BITMAP_ALLOC (NULL);
7324   df_idom = BITMAP_ALLOC (NULL);
7325 
7326   if (none_removed)
7327     bitmap_set_bit (df_idom,
7328 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7329   else
7330     {
7331       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7332       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7333 	{
7334 	  FOR_EACH_EDGE (f, ei, bb->succs)
7335 	    {
7336 	      if (f->dest != EXIT_BLOCK_PTR)
7337 		bitmap_set_bit (df, f->dest->index);
7338 	    }
7339 	}
7340       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7341 	bitmap_clear_bit (df, bb->index);
7342 
7343       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7344 	{
7345 	  bb = BASIC_BLOCK (i);
7346 	  bitmap_set_bit (df_idom,
7347 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7348 	}
7349     }
7350 
7351   if (cfgcleanup_altered_bbs)
7352     {
7353       /* Record the set of the altered basic blocks.  */
7354       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7355       bitmap_ior_into (cfgcleanup_altered_bbs, df);
7356     }
7357 
7358   /* Remove E and the cancelled blocks.  */
7359   if (none_removed)
7360     remove_edge (e);
7361   else
7362     {
7363       /* Walk backwards so as to get a chance to substitute all
7364 	 released DEFs into debug stmts.  See
7365 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7366 	 details.  */
7367       for (i = bbs_to_remove.length (); i-- > 0; )
7368 	delete_basic_block (bbs_to_remove[i]);
7369     }
7370 
7371   /* Update the dominance information.  The immediate dominator may change only
7372      for blocks whose immediate dominator belongs to DF_IDOM:
7373 
7374      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7375      removal.  Let Z the arbitrary block such that idom(Z) = Y and
7376      Z dominates X after the removal.  Before removal, there exists a path P
7377      from Y to X that avoids Z.  Let F be the last edge on P that is
7378      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
7379      dominates W, and because of P, Z does not dominate W), and W belongs to
7380      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
7381   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7382     {
7383       bb = BASIC_BLOCK (i);
7384       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7385 	   dbb;
7386 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
7387 	bbs_to_fix_dom.safe_push (dbb);
7388     }
7389 
7390   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7391 
7392   BITMAP_FREE (df);
7393   BITMAP_FREE (df_idom);
7394   bbs_to_remove.release ();
7395   bbs_to_fix_dom.release ();
7396 }
7397 
7398 /* Purge dead EH edges from basic block BB.  */
7399 
7400 bool
7401 gimple_purge_dead_eh_edges (basic_block bb)
7402 {
7403   bool changed = false;
7404   edge e;
7405   edge_iterator ei;
7406   gimple stmt = last_stmt (bb);
7407 
7408   if (stmt && stmt_can_throw_internal (stmt))
7409     return false;
7410 
7411   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7412     {
7413       if (e->flags & EDGE_EH)
7414 	{
7415 	  remove_edge_and_dominated_blocks (e);
7416 	  changed = true;
7417 	}
7418       else
7419 	ei_next (&ei);
7420     }
7421 
7422   return changed;
7423 }
7424 
7425 /* Purge dead EH edges from basic block listed in BLOCKS.  */
7426 
7427 bool
7428 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7429 {
7430   bool changed = false;
7431   unsigned i;
7432   bitmap_iterator bi;
7433 
7434   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7435     {
7436       basic_block bb = BASIC_BLOCK (i);
7437 
7438       /* Earlier gimple_purge_dead_eh_edges could have removed
7439 	 this basic block already.  */
7440       gcc_assert (bb || changed);
7441       if (bb != NULL)
7442 	changed |= gimple_purge_dead_eh_edges (bb);
7443     }
7444 
7445   return changed;
7446 }
7447 
7448 /* Purge dead abnormal call edges from basic block BB.  */
7449 
7450 bool
7451 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7452 {
7453   bool changed = false;
7454   edge e;
7455   edge_iterator ei;
7456   gimple stmt = last_stmt (bb);
7457 
7458   if (!cfun->has_nonlocal_label)
7459     return false;
7460 
7461   if (stmt && stmt_can_make_abnormal_goto (stmt))
7462     return false;
7463 
7464   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7465     {
7466       if (e->flags & EDGE_ABNORMAL)
7467 	{
7468 	  remove_edge_and_dominated_blocks (e);
7469 	  changed = true;
7470 	}
7471       else
7472 	ei_next (&ei);
7473     }
7474 
7475   return changed;
7476 }
7477 
7478 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
7479 
7480 bool
7481 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7482 {
7483   bool changed = false;
7484   unsigned i;
7485   bitmap_iterator bi;
7486 
7487   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7488     {
7489       basic_block bb = BASIC_BLOCK (i);
7490 
7491       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7492 	 this basic block already.  */
7493       gcc_assert (bb || changed);
7494       if (bb != NULL)
7495 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
7496     }
7497 
7498   return changed;
7499 }
7500 
7501 /* This function is called whenever a new edge is created or
7502    redirected.  */
7503 
7504 static void
7505 gimple_execute_on_growing_pred (edge e)
7506 {
7507   basic_block bb = e->dest;
7508 
7509   if (!gimple_seq_empty_p (phi_nodes (bb)))
7510     reserve_phi_args_for_new_edge (bb);
7511 }
7512 
7513 /* This function is called immediately before edge E is removed from
7514    the edge vector E->dest->preds.  */
7515 
7516 static void
7517 gimple_execute_on_shrinking_pred (edge e)
7518 {
7519   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7520     remove_phi_args (e);
7521 }
7522 
7523 /*---------------------------------------------------------------------------
7524   Helper functions for Loop versioning
7525   ---------------------------------------------------------------------------*/
7526 
7527 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
7528    of 'first'. Both of them are dominated by 'new_head' basic block. When
7529    'new_head' was created by 'second's incoming edge it received phi arguments
7530    on the edge by split_edge(). Later, additional edge 'e' was created to
7531    connect 'new_head' and 'first'. Now this routine adds phi args on this
7532    additional edge 'e' that new_head to second edge received as part of edge
7533    splitting.  */
7534 
7535 static void
7536 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7537 				  basic_block new_head, edge e)
7538 {
7539   gimple phi1, phi2;
7540   gimple_stmt_iterator psi1, psi2;
7541   tree def;
7542   edge e2 = find_edge (new_head, second);
7543 
7544   /* Because NEW_HEAD has been created by splitting SECOND's incoming
7545      edge, we should always have an edge from NEW_HEAD to SECOND.  */
7546   gcc_assert (e2 != NULL);
7547 
7548   /* Browse all 'second' basic block phi nodes and add phi args to
7549      edge 'e' for 'first' head. PHI args are always in correct order.  */
7550 
7551   for (psi2 = gsi_start_phis (second),
7552        psi1 = gsi_start_phis (first);
7553        !gsi_end_p (psi2) && !gsi_end_p (psi1);
7554        gsi_next (&psi2),  gsi_next (&psi1))
7555     {
7556       phi1 = gsi_stmt (psi1);
7557       phi2 = gsi_stmt (psi2);
7558       def = PHI_ARG_DEF (phi2, e2->dest_idx);
7559       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7560     }
7561 }
7562 
7563 
7564 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7565    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7566    the destination of the ELSE part.  */
7567 
7568 static void
7569 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7570 			       basic_block second_head ATTRIBUTE_UNUSED,
7571 			       basic_block cond_bb, void *cond_e)
7572 {
7573   gimple_stmt_iterator gsi;
7574   gimple new_cond_expr;
7575   tree cond_expr = (tree) cond_e;
7576   edge e0;
7577 
7578   /* Build new conditional expr */
7579   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7580 					       NULL_TREE, NULL_TREE);
7581 
7582   /* Add new cond in cond_bb.  */
7583   gsi = gsi_last_bb (cond_bb);
7584   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7585 
7586   /* Adjust edges appropriately to connect new head with first head
7587      as well as second head.  */
7588   e0 = single_succ_edge (cond_bb);
7589   e0->flags &= ~EDGE_FALLTHRU;
7590   e0->flags |= EDGE_FALSE_VALUE;
7591 }
7592 
7593 
7594 /* Do book-keeping of basic block BB for the profile consistency checker.
7595    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7596    then do post-pass accounting.  Store the counting in RECORD.  */
7597 static void
7598 gimple_account_profile_record (basic_block bb, int after_pass,
7599 			       struct profile_record *record)
7600 {
7601   gimple_stmt_iterator i;
7602   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7603     {
7604       record->size[after_pass]
7605 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7606       if (profile_status == PROFILE_READ)
7607 	record->time[after_pass]
7608 	  += estimate_num_insns (gsi_stmt (i),
7609 				 &eni_time_weights) * bb->count;
7610       else if (profile_status == PROFILE_GUESSED)
7611 	record->time[after_pass]
7612 	  += estimate_num_insns (gsi_stmt (i),
7613 				 &eni_time_weights) * bb->frequency;
7614     }
7615 }
7616 
7617 struct cfg_hooks gimple_cfg_hooks = {
7618   "gimple",
7619   gimple_verify_flow_info,
7620   gimple_dump_bb,		/* dump_bb  */
7621   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
7622   create_bb,			/* create_basic_block  */
7623   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
7624   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
7625   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
7626   remove_bb,			/* delete_basic_block  */
7627   gimple_split_block,		/* split_block  */
7628   gimple_move_block_after,	/* move_block_after  */
7629   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
7630   gimple_merge_blocks,		/* merge_blocks  */
7631   gimple_predict_edge,		/* predict_edge  */
7632   gimple_predicted_by_p,	/* predicted_by_p  */
7633   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
7634   gimple_duplicate_bb,		/* duplicate_block  */
7635   gimple_split_edge,		/* split_edge  */
7636   gimple_make_forwarder_block,	/* make_forward_block  */
7637   NULL,				/* tidy_fallthru_edge  */
7638   NULL,				/* force_nonfallthru */
7639   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7640   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7641   gimple_flow_call_edges_add,   /* flow_call_edges_add */
7642   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
7643   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7644   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7645   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7646   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7647   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7648   flush_pending_stmts, 		/* flush_pending_stmts */
7649   gimple_empty_block_p,           /* block_empty_p */
7650   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7651   gimple_account_profile_record,
7652 };
7653 
7654 
7655 /* Split all critical edges.  */
7656 
7657 unsigned int
7658 split_critical_edges (void)
7659 {
7660   basic_block bb;
7661   edge e;
7662   edge_iterator ei;
7663 
7664   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7665      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
7666      mappings around the calls to split_edge.  */
7667   start_recording_case_labels ();
7668   FOR_ALL_BB (bb)
7669     {
7670       FOR_EACH_EDGE (e, ei, bb->succs)
7671         {
7672 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7673 	    split_edge (e);
7674 	  /* PRE inserts statements to edges and expects that
7675 	     since split_critical_edges was done beforehand, committing edge
7676 	     insertions will not split more edges.  In addition to critical
7677 	     edges we must split edges that have multiple successors and
7678 	     end by control flow statements, such as RESX.
7679 	     Go ahead and split them too.  This matches the logic in
7680 	     gimple_find_edge_insert_loc.  */
7681 	  else if ((!single_pred_p (e->dest)
7682 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
7683 	            || e->dest == EXIT_BLOCK_PTR)
7684 		   && e->src != ENTRY_BLOCK_PTR
7685 	           && !(e->flags & EDGE_ABNORMAL))
7686 	    {
7687 	      gimple_stmt_iterator gsi;
7688 
7689 	      gsi = gsi_last_bb (e->src);
7690 	      if (!gsi_end_p (gsi)
7691 		  && stmt_ends_bb_p (gsi_stmt (gsi))
7692 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7693 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
7694 						 BUILT_IN_RETURN)))
7695 		split_edge (e);
7696 	    }
7697 	}
7698     }
7699   end_recording_case_labels ();
7700   return 0;
7701 }
7702 
7703 struct gimple_opt_pass pass_split_crit_edges =
7704 {
7705  {
7706   GIMPLE_PASS,
7707   "crited",                          /* name */
7708   OPTGROUP_NONE,                 /* optinfo_flags */
7709   NULL,                          /* gate */
7710   split_critical_edges,          /* execute */
7711   NULL,                          /* sub */
7712   NULL,                          /* next */
7713   0,                             /* static_pass_number */
7714   TV_TREE_SPLIT_EDGES,           /* tv_id */
7715   PROP_cfg,                      /* properties required */
7716   PROP_no_crit_edges,            /* properties_provided */
7717   0,                             /* properties_destroyed */
7718   0,                             /* todo_flags_start */
7719   TODO_verify_flow               /* todo_flags_finish */
7720  }
7721 };
7722 
7723 
7724 /* Build a ternary operation and gimplify it.  Emit code before GSI.
7725    Return the gimple_val holding the result.  */
7726 
7727 tree
7728 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7729 		 tree type, tree a, tree b, tree c)
7730 {
7731   tree ret;
7732   location_t loc = gimple_location (gsi_stmt (*gsi));
7733 
7734   ret = fold_build3_loc (loc, code, type, a, b, c);
7735   STRIP_NOPS (ret);
7736 
7737   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7738                                    GSI_SAME_STMT);
7739 }
7740 
7741 /* Build a binary operation and gimplify it.  Emit code before GSI.
7742    Return the gimple_val holding the result.  */
7743 
7744 tree
7745 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7746 		 tree type, tree a, tree b)
7747 {
7748   tree ret;
7749 
7750   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7751   STRIP_NOPS (ret);
7752 
7753   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7754                                    GSI_SAME_STMT);
7755 }
7756 
7757 /* Build a unary operation and gimplify it.  Emit code before GSI.
7758    Return the gimple_val holding the result.  */
7759 
7760 tree
7761 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7762 		 tree a)
7763 {
7764   tree ret;
7765 
7766   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7767   STRIP_NOPS (ret);
7768 
7769   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7770                                    GSI_SAME_STMT);
7771 }
7772 
7773 
7774 
7775 /* Emit return warnings.  */
7776 
7777 static unsigned int
7778 execute_warn_function_return (void)
7779 {
7780   source_location location;
7781   gimple last;
7782   edge e;
7783   edge_iterator ei;
7784 
7785   if (!targetm.warn_func_return (cfun->decl))
7786     return 0;
7787 
7788   /* If we have a path to EXIT, then we do return.  */
7789   if (TREE_THIS_VOLATILE (cfun->decl)
7790       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7791     {
7792       location = UNKNOWN_LOCATION;
7793       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7794 	{
7795 	  last = last_stmt (e->src);
7796 	  if ((gimple_code (last) == GIMPLE_RETURN
7797 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7798 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7799 	    break;
7800 	}
7801       if (location == UNKNOWN_LOCATION)
7802 	location = cfun->function_end_locus;
7803 
7804 #ifdef notyet
7805       if (warn_missing_noreturn)
7806         warning_at (location, 0, "%<noreturn%> function does return");
7807 #endif
7808     }
7809 
7810   /* If we see "return;" in some basic block, then we do reach the end
7811      without returning a value.  */
7812   else if (warn_return_type
7813 	   && !TREE_NO_WARNING (cfun->decl)
7814 	   && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7815 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7816     {
7817       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7818 	{
7819 	  gimple last = last_stmt (e->src);
7820 	  if (gimple_code (last) == GIMPLE_RETURN
7821 	      && gimple_return_retval (last) == NULL
7822 	      && !gimple_no_warning_p (last))
7823 	    {
7824 	      location = gimple_location (last);
7825 	      if (location == UNKNOWN_LOCATION)
7826 		  location = cfun->function_end_locus;
7827 	      warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7828 	      TREE_NO_WARNING (cfun->decl) = 1;
7829 	      break;
7830 	    }
7831 	}
7832     }
7833   return 0;
7834 }
7835 
7836 
7837 /* Given a basic block B which ends with a conditional and has
7838    precisely two successors, determine which of the edges is taken if
7839    the conditional is true and which is taken if the conditional is
7840    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
7841 
7842 void
7843 extract_true_false_edges_from_block (basic_block b,
7844 				     edge *true_edge,
7845 				     edge *false_edge)
7846 {
7847   edge e = EDGE_SUCC (b, 0);
7848 
7849   if (e->flags & EDGE_TRUE_VALUE)
7850     {
7851       *true_edge = e;
7852       *false_edge = EDGE_SUCC (b, 1);
7853     }
7854   else
7855     {
7856       *false_edge = e;
7857       *true_edge = EDGE_SUCC (b, 1);
7858     }
7859 }
7860 
7861 struct gimple_opt_pass pass_warn_function_return =
7862 {
7863  {
7864   GIMPLE_PASS,
7865   "*warn_function_return",		/* name */
7866   OPTGROUP_NONE,                        /* optinfo_flags */
7867   NULL,					/* gate */
7868   execute_warn_function_return,		/* execute */
7869   NULL,					/* sub */
7870   NULL,					/* next */
7871   0,					/* static_pass_number */
7872   TV_NONE,				/* tv_id */
7873   PROP_cfg,				/* properties_required */
7874   0,					/* properties_provided */
7875   0,					/* properties_destroyed */
7876   0,					/* todo_flags_start */
7877   0					/* todo_flags_finish */
7878  }
7879 };
7880 
7881 /* Emit noreturn warnings.  */
7882 
7883 static unsigned int
7884 execute_warn_function_noreturn (void)
7885 {
7886   if (!TREE_THIS_VOLATILE (current_function_decl)
7887       && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7888     warn_function_noreturn (current_function_decl);
7889   return 0;
7890 }
7891 
7892 static bool
7893 gate_warn_function_noreturn (void)
7894 {
7895   return warn_suggest_attribute_noreturn;
7896 }
7897 
7898 struct gimple_opt_pass pass_warn_function_noreturn =
7899 {
7900  {
7901   GIMPLE_PASS,
7902   "*warn_function_noreturn",		/* name */
7903   OPTGROUP_NONE,                        /* optinfo_flags */
7904   gate_warn_function_noreturn,		/* gate */
7905   execute_warn_function_noreturn,	/* execute */
7906   NULL,					/* sub */
7907   NULL,					/* next */
7908   0,					/* static_pass_number */
7909   TV_NONE,				/* tv_id */
7910   PROP_cfg,				/* properties_required */
7911   0,					/* properties_provided */
7912   0,					/* properties_destroyed */
7913   0,					/* todo_flags_start */
7914   0					/* todo_flags_finish */
7915  }
7916 };
7917 
7918 
7919 /* Walk a gimplified function and warn for functions whose return value is
7920    ignored and attribute((warn_unused_result)) is set.  This is done before
7921    inlining, so we don't have to worry about that.  */
7922 
7923 static void
7924 do_warn_unused_result (gimple_seq seq)
7925 {
7926   tree fdecl, ftype;
7927   gimple_stmt_iterator i;
7928 
7929   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7930     {
7931       gimple g = gsi_stmt (i);
7932 
7933       switch (gimple_code (g))
7934 	{
7935 	case GIMPLE_BIND:
7936 	  do_warn_unused_result (gimple_bind_body (g));
7937 	  break;
7938 	case GIMPLE_TRY:
7939 	  do_warn_unused_result (gimple_try_eval (g));
7940 	  do_warn_unused_result (gimple_try_cleanup (g));
7941 	  break;
7942 	case GIMPLE_CATCH:
7943 	  do_warn_unused_result (gimple_catch_handler (g));
7944 	  break;
7945 	case GIMPLE_EH_FILTER:
7946 	  do_warn_unused_result (gimple_eh_filter_failure (g));
7947 	  break;
7948 
7949 	case GIMPLE_CALL:
7950 	  if (gimple_call_lhs (g))
7951 	    break;
7952 	  if (gimple_call_internal_p (g))
7953 	    break;
7954 
7955 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
7956 	     LHS.  All calls whose value is ignored should be
7957 	     represented like this.  Look for the attribute.  */
7958 	  fdecl = gimple_call_fndecl (g);
7959 	  ftype = gimple_call_fntype (g);
7960 
7961 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7962 	    {
7963 	      location_t loc = gimple_location (g);
7964 
7965 	      if (fdecl)
7966 		warning_at (loc, OPT_Wunused_result,
7967 			    "ignoring return value of %qD, "
7968 			    "declared with attribute warn_unused_result",
7969 			    fdecl);
7970 	      else
7971 		warning_at (loc, OPT_Wunused_result,
7972 			    "ignoring return value of function "
7973 			    "declared with attribute warn_unused_result");
7974 	    }
7975 	  break;
7976 
7977 	default:
7978 	  /* Not a container, not a call, or a call whose value is used.  */
7979 	  break;
7980 	}
7981     }
7982 }
7983 
7984 static unsigned int
7985 run_warn_unused_result (void)
7986 {
7987   do_warn_unused_result (gimple_body (current_function_decl));
7988   return 0;
7989 }
7990 
7991 static bool
7992 gate_warn_unused_result (void)
7993 {
7994   return flag_warn_unused_result;
7995 }
7996 
7997 struct gimple_opt_pass pass_warn_unused_result =
7998 {
7999   {
8000     GIMPLE_PASS,
8001     "*warn_unused_result",		/* name */
8002     OPTGROUP_NONE,                        /* optinfo_flags */
8003     gate_warn_unused_result,		/* gate */
8004     run_warn_unused_result,		/* execute */
8005     NULL,				/* sub */
8006     NULL,				/* next */
8007     0,					/* static_pass_number */
8008     TV_NONE,				/* tv_id */
8009     PROP_gimple_any,			/* properties_required */
8010     0,					/* properties_provided */
8011     0,					/* properties_destroyed */
8012     0,					/* todo_flags_start */
8013     0,					/* todo_flags_finish */
8014   }
8015 };
8016 
8017 
8018 /* Garbage collection support for edge_def.  */
8019 
8020 extern void gt_ggc_mx (tree&);
8021 extern void gt_ggc_mx (gimple&);
8022 extern void gt_ggc_mx (rtx&);
8023 extern void gt_ggc_mx (basic_block&);
8024 
8025 void
8026 gt_ggc_mx (edge_def *e)
8027 {
8028   tree block = LOCATION_BLOCK (e->goto_locus);
8029   gt_ggc_mx (e->src);
8030   gt_ggc_mx (e->dest);
8031   if (current_ir_type () == IR_GIMPLE)
8032     gt_ggc_mx (e->insns.g);
8033   else
8034     gt_ggc_mx (e->insns.r);
8035   gt_ggc_mx (block);
8036 }
8037 
8038 /* PCH support for edge_def.  */
8039 
8040 extern void gt_pch_nx (tree&);
8041 extern void gt_pch_nx (gimple&);
8042 extern void gt_pch_nx (rtx&);
8043 extern void gt_pch_nx (basic_block&);
8044 
8045 void
8046 gt_pch_nx (edge_def *e)
8047 {
8048   tree block = LOCATION_BLOCK (e->goto_locus);
8049   gt_pch_nx (e->src);
8050   gt_pch_nx (e->dest);
8051   if (current_ir_type () == IR_GIMPLE)
8052     gt_pch_nx (e->insns.g);
8053   else
8054     gt_pch_nx (e->insns.r);
8055   gt_pch_nx (block);
8056 }
8057 
8058 void
8059 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8060 {
8061   tree block = LOCATION_BLOCK (e->goto_locus);
8062   op (&(e->src), cookie);
8063   op (&(e->dest), cookie);
8064   if (current_ir_type () == IR_GIMPLE)
8065     op (&(e->insns.g), cookie);
8066   else
8067     op (&(e->insns.r), cookie);
8068   op (&(block), cookie);
8069 }
8070