xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 
64 /* This file contains functions for building the Control Flow Graph (CFG)
65    for a function tree.  */
66 
67 /* Local declarations.  */
68 
69 /* Initial capacity for the basic block array.  */
70 static const int initial_cfg_capacity = 20;
71 
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
74    via their CASE_CHAIN field, which we clear after we're done with the
75    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 
77    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78    update the case vector in response to edge redirections.
79 
80    Right now this table is set up and torn down at key points in the
81    compilation process.  It would be nice if we could make the table
82    more persistent.  The key is getting notification of changes to
83    the CFG (particularly edge removal, creation and redirection).  */
84 
85 static hash_map<edge, tree> *edge_to_cases;
86 
87 /* If we record edge_to_cases, this bitmap will hold indexes
88    of basic blocks that end in a GIMPLE_SWITCH which we touched
89    due to edge manipulations.  */
90 
91 static bitmap touched_switch_bbs;
92 
93 /* CFG statistics.  */
94 struct cfg_stats_d
95 {
96   long num_merged_labels;
97 };
98 
99 static struct cfg_stats_d cfg_stats;
100 
101 /* Data to pass to replace_block_vars_by_duplicates_1.  */
102 struct replace_decls_d
103 {
104   hash_map<tree, tree> *vars_map;
105   tree to_context;
106 };
107 
108 /* Hash table to store last discriminator assigned for each locus.  */
109 struct locus_discrim_map
110 {
111   location_t locus;
112   int discriminator;
113 };
114 
115 /* Hashtable helpers.  */
116 
117 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 {
119   static inline hashval_t hash (const locus_discrim_map *);
120   static inline bool equal (const locus_discrim_map *,
121 			    const locus_discrim_map *);
122 };
123 
124 /* Trivial hash function for a location_t.  ITEM is a pointer to
125    a hash table entry that maps a location_t to a discriminator.  */
126 
127 inline hashval_t
128 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 {
130   return LOCATION_LINE (item->locus);
131 }
132 
133 /* Equality function for the locus-to-discriminator map.  A and B
134    point to the two hash table entries to compare.  */
135 
136 inline bool
137 locus_discrim_hasher::equal (const locus_discrim_map *a,
138 			     const locus_discrim_map *b)
139 {
140   return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
141 }
142 
143 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 
145 /* Basic blocks and flowgraphs.  */
146 static void make_blocks (gimple_seq);
147 
148 /* Edges.  */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (gswitch *, basic_block);
153 static bool make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 
158 /* Various helpers.  */
159 static inline bool stmt_starts_bb_p (gimple *, gimple *);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge);
162 static gimple *first_non_label_stmt (basic_block);
163 static bool verify_gimple_transaction (gtransaction *);
164 static bool call_can_make_abnormal_goto (gimple *);
165 
166 /* Flowgraph optimization and cleanup.  */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
173 static tree find_case_label_for_value (gswitch *, tree);
174 static void lower_phi_internal_fn ();
175 
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179   /* Initialize the basic block array.  */
180   init_flow (fn);
181   profile_status_for_fn (fn) = PROFILE_ABSENT;
182   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 			 initial_cfg_capacity);
187 
188   /* Build a mapping of labels to their associated blocks.  */
189   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 			 initial_cfg_capacity);
192 
193   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195 
196   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197     = EXIT_BLOCK_PTR_FOR_FN (fn);
198   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199     = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201 
202 void
203 init_empty_tree_cfg (void)
204 {
205   init_empty_tree_cfg_for_function (cfun);
206 }
207 
208 /*---------------------------------------------------------------------------
209 			      Create basic blocks
210 ---------------------------------------------------------------------------*/
211 
212 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
213    statements to be added to the flowgraph.  */
214 
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218   /* Register specific gimple functions.  */
219   gimple_register_cfg_hooks ();
220 
221   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222 
223   init_empty_tree_cfg ();
224 
225   make_blocks (seq);
226 
227   /* Make sure there is always at least one block, even if it's empty.  */
228   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230 
231   /* Adjust the size of the array.  */
232   if (basic_block_info_for_fn (cfun)->length ()
233       < (size_t) n_basic_blocks_for_fn (cfun))
234     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 			   n_basic_blocks_for_fn (cfun));
236 
237   /* To speed up statement iterator walks, we first purge dead labels.  */
238   cleanup_dead_labels ();
239 
240   /* Group case nodes to reduce the number of edges.
241      We do this after cleaning up dead labels because otherwise we miss
242      a lot of obvious case merging opportunities.  */
243   group_case_labels ();
244 
245   /* Create the edges of the flowgraph.  */
246   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247   make_edges ();
248   assign_discriminators ();
249   lower_phi_internal_fn ();
250   cleanup_dead_labels ();
251   delete discriminator_per_locus;
252   discriminator_per_locus = NULL;
253 }
254 
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256    them and propagate the information to LOOP.  We assume that the annotations
257    come immediately before the condition in BB, if any.  */
258 
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 {
262   gimple_stmt_iterator gsi = gsi_last_bb (bb);
263   gimple *stmt = gsi_stmt (gsi);
264 
265   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266     return;
267 
268   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269     {
270       stmt = gsi_stmt (gsi);
271       if (gimple_code (stmt) != GIMPLE_CALL)
272 	break;
273       if (!gimple_call_internal_p (stmt)
274 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 	break;
276 
277       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 	{
279 	case annot_expr_ivdep_kind:
280 	  loop->safelen = INT_MAX;
281 	  break;
282 	case annot_expr_no_vector_kind:
283 	  loop->dont_vectorize = true;
284 	  break;
285 	case annot_expr_vector_kind:
286 	  loop->force_vectorize = true;
287 	  cfun->has_force_vectorize_loops = true;
288 	  break;
289 	default:
290 	  gcc_unreachable ();
291 	}
292 
293       stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 				  gimple_call_arg (stmt, 0));
295       gsi_replace (&gsi, stmt, true);
296     }
297 }
298 
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300    them and propagate the information to the loop.  We assume that the
301    annotations come immediately before the condition of the loop.  */
302 
303 static void
304 replace_loop_annotate (void)
305 {
306   struct loop *loop;
307   basic_block bb;
308   gimple_stmt_iterator gsi;
309   gimple *stmt;
310 
311   FOR_EACH_LOOP (loop, 0)
312     {
313       /* First look into the header.  */
314       replace_loop_annotate_in_block (loop->header, loop);
315 
316       /* Then look into the latch, if any.  */
317       if (loop->latch)
318 	replace_loop_annotate_in_block (loop->latch, loop);
319     }
320 
321   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
322   FOR_EACH_BB_FN (bb, cfun)
323     {
324       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
325 	{
326 	  stmt = gsi_stmt (gsi);
327 	  if (gimple_code (stmt) != GIMPLE_CALL)
328 	    continue;
329 	  if (!gimple_call_internal_p (stmt)
330 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
331 	    continue;
332 
333 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
334 	    {
335 	    case annot_expr_ivdep_kind:
336 	    case annot_expr_no_vector_kind:
337 	    case annot_expr_vector_kind:
338 	      break;
339 	    default:
340 	      gcc_unreachable ();
341 	    }
342 
343 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
344 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
345 				      gimple_call_arg (stmt, 0));
346 	  gsi_replace (&gsi, stmt, true);
347 	}
348     }
349 }
350 
351 /* Lower internal PHI function from GIMPLE FE.  */
352 
353 static void
354 lower_phi_internal_fn ()
355 {
356   basic_block bb, pred = NULL;
357   gimple_stmt_iterator gsi;
358   tree lhs;
359   gphi *phi_node;
360   gimple *stmt;
361 
362   /* After edge creation, handle __PHI function from GIMPLE FE.  */
363   FOR_EACH_BB_FN (bb, cfun)
364     {
365       for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
366 	{
367 	  stmt = gsi_stmt (gsi);
368 	  if (! gimple_call_internal_p (stmt, IFN_PHI))
369 	    break;
370 
371 	  lhs = gimple_call_lhs (stmt);
372 	  phi_node = create_phi_node (lhs, bb);
373 
374 	  /* Add arguments to the PHI node.  */
375 	  for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
376 	    {
377 	      tree arg = gimple_call_arg (stmt, i);
378 	      if (TREE_CODE (arg) == LABEL_DECL)
379 		pred = label_to_block (arg);
380 	      else
381 		{
382 		  edge e = find_edge (pred, bb);
383 		  add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
384 		}
385 	    }
386 
387 	  gsi_remove (&gsi, true);
388 	}
389     }
390 }
391 
392 static unsigned int
393 execute_build_cfg (void)
394 {
395   gimple_seq body = gimple_body (current_function_decl);
396 
397   build_gimple_cfg (body);
398   gimple_set_body (current_function_decl, NULL);
399   if (dump_file && (dump_flags & TDF_DETAILS))
400     {
401       fprintf (dump_file, "Scope blocks:\n");
402       dump_scope_blocks (dump_file, dump_flags);
403     }
404   cleanup_tree_cfg ();
405   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
406   replace_loop_annotate ();
407   return 0;
408 }
409 
410 namespace {
411 
412 const pass_data pass_data_build_cfg =
413 {
414   GIMPLE_PASS, /* type */
415   "cfg", /* name */
416   OPTGROUP_NONE, /* optinfo_flags */
417   TV_TREE_CFG, /* tv_id */
418   PROP_gimple_leh, /* properties_required */
419   ( PROP_cfg | PROP_loops ), /* properties_provided */
420   0, /* properties_destroyed */
421   0, /* todo_flags_start */
422   0, /* todo_flags_finish */
423 };
424 
425 class pass_build_cfg : public gimple_opt_pass
426 {
427 public:
428   pass_build_cfg (gcc::context *ctxt)
429     : gimple_opt_pass (pass_data_build_cfg, ctxt)
430   {}
431 
432   /* opt_pass methods: */
433   virtual unsigned int execute (function *) { return execute_build_cfg (); }
434 
435 }; // class pass_build_cfg
436 
437 } // anon namespace
438 
439 gimple_opt_pass *
440 make_pass_build_cfg (gcc::context *ctxt)
441 {
442   return new pass_build_cfg (ctxt);
443 }
444 
445 
446 /* Return true if T is a computed goto.  */
447 
448 bool
449 computed_goto_p (gimple *t)
450 {
451   return (gimple_code (t) == GIMPLE_GOTO
452 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
453 }
454 
455 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
456    the other edge points to a bb with just __builtin_unreachable ().
457    I.e. return true for C->M edge in:
458    <bb C>:
459    ...
460    if (something)
461      goto <bb N>;
462    else
463      goto <bb M>;
464    <bb N>:
465    __builtin_unreachable ();
466    <bb M>:  */
467 
468 bool
469 assert_unreachable_fallthru_edge_p (edge e)
470 {
471   basic_block pred_bb = e->src;
472   gimple *last = last_stmt (pred_bb);
473   if (last && gimple_code (last) == GIMPLE_COND)
474     {
475       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
476       if (other_bb == e->dest)
477 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
478       if (EDGE_COUNT (other_bb->succs) == 0)
479 	{
480 	  gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
481 	  gimple *stmt;
482 
483 	  if (gsi_end_p (gsi))
484 	    return false;
485 	  stmt = gsi_stmt (gsi);
486 	  while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
487 	    {
488 	      gsi_next (&gsi);
489 	      if (gsi_end_p (gsi))
490 		return false;
491 	      stmt = gsi_stmt (gsi);
492 	    }
493 	  return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
494 	}
495     }
496   return false;
497 }
498 
499 
500 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
501    could alter control flow except via eh. We initialize the flag at
502    CFG build time and only ever clear it later.  */
503 
504 static void
505 gimple_call_initialize_ctrl_altering (gimple *stmt)
506 {
507   int flags = gimple_call_flags (stmt);
508 
509   /* A call alters control flow if it can make an abnormal goto.  */
510   if (call_can_make_abnormal_goto (stmt)
511       /* A call also alters control flow if it does not return.  */
512       || flags & ECF_NORETURN
513       /* TM ending statements have backedges out of the transaction.
514 	 Return true so we split the basic block containing them.
515 	 Note that the TM_BUILTIN test is merely an optimization.  */
516       || ((flags & ECF_TM_BUILTIN)
517 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
518       /* BUILT_IN_RETURN call is same as return statement.  */
519       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
520       /* IFN_UNIQUE should be the last insn, to make checking for it
521 	 as cheap as possible.  */
522       || (gimple_call_internal_p (stmt)
523 	  && gimple_call_internal_unique_p (stmt)))
524     gimple_call_set_ctrl_altering (stmt, true);
525   else
526     gimple_call_set_ctrl_altering (stmt, false);
527 }
528 
529 
530 /* Insert SEQ after BB and build a flowgraph.  */
531 
532 static basic_block
533 make_blocks_1 (gimple_seq seq, basic_block bb)
534 {
535   gimple_stmt_iterator i = gsi_start (seq);
536   gimple *stmt = NULL;
537   bool start_new_block = true;
538   bool first_stmt_of_seq = true;
539 
540   while (!gsi_end_p (i))
541     {
542       gimple *prev_stmt;
543 
544       prev_stmt = stmt;
545       stmt = gsi_stmt (i);
546 
547       if (stmt && is_gimple_call (stmt))
548 	gimple_call_initialize_ctrl_altering (stmt);
549 
550       /* If the statement starts a new basic block or if we have determined
551 	 in a previous pass that we need to create a new block for STMT, do
552 	 so now.  */
553       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 	{
555 	  if (!first_stmt_of_seq)
556 	    gsi_split_seq_before (&i, &seq);
557 	  bb = create_basic_block (seq, bb);
558 	  start_new_block = false;
559 	}
560 
561       /* Now add STMT to BB and create the subgraphs for special statement
562 	 codes.  */
563       gimple_set_bb (stmt, bb);
564 
565       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
566 	 next iteration.  */
567       if (stmt_ends_bb_p (stmt))
568 	{
569 	  /* If the stmt can make abnormal goto use a new temporary
570 	     for the assignment to the LHS.  This makes sure the old value
571 	     of the LHS is available on the abnormal edge.  Otherwise
572 	     we will end up with overlapping life-ranges for abnormal
573 	     SSA names.  */
574 	  if (gimple_has_lhs (stmt)
575 	      && stmt_can_make_abnormal_goto (stmt)
576 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
577 	    {
578 	      tree lhs = gimple_get_lhs (stmt);
579 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
580 	      gimple *s = gimple_build_assign (lhs, tmp);
581 	      gimple_set_location (s, gimple_location (stmt));
582 	      gimple_set_block (s, gimple_block (stmt));
583 	      gimple_set_lhs (stmt, tmp);
584 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
585 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
586 		DECL_GIMPLE_REG_P (tmp) = 1;
587 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
588 	    }
589 	  start_new_block = true;
590 	}
591 
592       gsi_next (&i);
593       first_stmt_of_seq = false;
594     }
595   return bb;
596 }
597 
598 /* Build a flowgraph for the sequence of stmts SEQ.  */
599 
600 static void
601 make_blocks (gimple_seq seq)
602 {
603   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
604 }
605 
606 /* Create and return a new empty basic block after bb AFTER.  */
607 
608 static basic_block
609 create_bb (void *h, void *e, basic_block after)
610 {
611   basic_block bb;
612 
613   gcc_assert (!e);
614 
615   /* Create and initialize a new basic block.  Since alloc_block uses
616      GC allocation that clears memory to allocate a basic block, we do
617      not have to clear the newly allocated basic block here.  */
618   bb = alloc_block ();
619 
620   bb->index = last_basic_block_for_fn (cfun);
621   bb->flags = BB_NEW;
622   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
623 
624   /* Add the new block to the linked list of blocks.  */
625   link_block (bb, after);
626 
627   /* Grow the basic block array if needed.  */
628   if ((size_t) last_basic_block_for_fn (cfun)
629       == basic_block_info_for_fn (cfun)->length ())
630     {
631       size_t new_size =
632 	(last_basic_block_for_fn (cfun)
633 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
634       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
635     }
636 
637   /* Add the newly created block to the array.  */
638   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
639 
640   n_basic_blocks_for_fn (cfun)++;
641   last_basic_block_for_fn (cfun)++;
642 
643   return bb;
644 }
645 
646 
647 /*---------------------------------------------------------------------------
648 				 Edge creation
649 ---------------------------------------------------------------------------*/
650 
651 /* If basic block BB has an abnormal edge to a basic block
652    containing IFN_ABNORMAL_DISPATCHER internal call, return
653    that the dispatcher's basic block, otherwise return NULL.  */
654 
655 basic_block
656 get_abnormal_succ_dispatcher (basic_block bb)
657 {
658   edge e;
659   edge_iterator ei;
660 
661   FOR_EACH_EDGE (e, ei, bb->succs)
662     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
663       {
664 	gimple_stmt_iterator gsi
665 	  = gsi_start_nondebug_after_labels_bb (e->dest);
666 	gimple *g = gsi_stmt (gsi);
667 	if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
668 	  return e->dest;
669       }
670   return NULL;
671 }
672 
673 /* Helper function for make_edges.  Create a basic block with
674    with ABNORMAL_DISPATCHER internal call in it if needed, and
675    create abnormal edges from BBS to it and from it to FOR_BB
676    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
677 
678 static void
679 handle_abnormal_edges (basic_block *dispatcher_bbs,
680 		       basic_block for_bb, int *bb_to_omp_idx,
681 		       auto_vec<basic_block> *bbs, bool computed_goto)
682 {
683   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
684   unsigned int idx = 0;
685   basic_block bb;
686   bool inner = false;
687 
688   if (bb_to_omp_idx)
689     {
690       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
691       if (bb_to_omp_idx[for_bb->index] != 0)
692 	inner = true;
693     }
694 
695   /* If the dispatcher has been created already, then there are basic
696      blocks with abnormal edges to it, so just make a new edge to
697      for_bb.  */
698   if (*dispatcher == NULL)
699     {
700       /* Check if there are any basic blocks that need to have
701 	 abnormal edges to this dispatcher.  If there are none, return
702 	 early.  */
703       if (bb_to_omp_idx == NULL)
704 	{
705 	  if (bbs->is_empty ())
706 	    return;
707 	}
708       else
709 	{
710 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
711 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
712 	      break;
713 	  if (bb == NULL)
714 	    return;
715 	}
716 
717       /* Create the dispatcher bb.  */
718       *dispatcher = create_basic_block (NULL, for_bb);
719       if (computed_goto)
720 	{
721 	  /* Factor computed gotos into a common computed goto site.  Also
722 	     record the location of that site so that we can un-factor the
723 	     gotos after we have converted back to normal form.  */
724 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
725 
726 	  /* Create the destination of the factored goto.  Each original
727 	     computed goto will put its desired destination into this
728 	     variable and jump to the label we create immediately below.  */
729 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
730 
731 	  /* Build a label for the new block which will contain the
732 	     factored computed goto.  */
733 	  tree factored_label_decl
734 	    = create_artificial_label (UNKNOWN_LOCATION);
735 	  gimple *factored_computed_goto_label
736 	    = gimple_build_label (factored_label_decl);
737 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
738 
739 	  /* Build our new computed goto.  */
740 	  gimple *factored_computed_goto = gimple_build_goto (var);
741 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
742 
743 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 	    {
745 	      if (bb_to_omp_idx
746 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
747 		continue;
748 
749 	      gsi = gsi_last_bb (bb);
750 	      gimple *last = gsi_stmt (gsi);
751 
752 	      gcc_assert (computed_goto_p (last));
753 
754 	      /* Copy the original computed goto's destination into VAR.  */
755 	      gimple *assignment
756 		= gimple_build_assign (var, gimple_goto_dest (last));
757 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
758 
759 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
760 	      e->goto_locus = gimple_location (last);
761 	      gsi_remove (&gsi, true);
762 	    }
763 	}
764       else
765 	{
766 	  tree arg = inner ? boolean_true_node : boolean_false_node;
767 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
768 						 1, arg);
769 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
770 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
771 
772 	  /* Create predecessor edges of the dispatcher.  */
773 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
774 	    {
775 	      if (bb_to_omp_idx
776 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
777 		continue;
778 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
779 	    }
780 	}
781     }
782 
783   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
784 }
785 
786 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
787    computed goto, returns 2 when it ends with a statement that
788    might return to this function via an nonlocal goto, otherwise
789    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
790 
791 static int
792 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
793 {
794   gimple *last = last_stmt (bb);
795   bool fallthru = false;
796   int ret = 0;
797 
798   if (!last)
799     return ret;
800 
801   switch (gimple_code (last))
802     {
803     case GIMPLE_GOTO:
804       if (make_goto_expr_edges (bb))
805 	ret = 1;
806       fallthru = false;
807       break;
808     case GIMPLE_RETURN:
809       {
810 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
811 	e->goto_locus = gimple_location (last);
812 	fallthru = false;
813       }
814       break;
815     case GIMPLE_COND:
816       make_cond_expr_edges (bb);
817       fallthru = false;
818       break;
819     case GIMPLE_SWITCH:
820       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
821       fallthru = false;
822       break;
823     case GIMPLE_RESX:
824       make_eh_edges (last);
825       fallthru = false;
826       break;
827     case GIMPLE_EH_DISPATCH:
828       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
829       break;
830 
831     case GIMPLE_CALL:
832       /* If this function receives a nonlocal goto, then we need to
833 	 make edges from this call site to all the nonlocal goto
834 	 handlers.  */
835       if (stmt_can_make_abnormal_goto (last))
836 	ret = 2;
837 
838       /* If this statement has reachable exception handlers, then
839 	 create abnormal edges to them.  */
840       make_eh_edges (last);
841 
842       /* BUILTIN_RETURN is really a return statement.  */
843       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
844 	{
845 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
846 	  fallthru = false;
847 	}
848       /* Some calls are known not to return.  */
849       else
850 	fallthru = !gimple_call_noreturn_p (last);
851       break;
852 
853     case GIMPLE_ASSIGN:
854       /* A GIMPLE_ASSIGN may throw internally and thus be considered
855 	 control-altering.  */
856       if (is_ctrl_altering_stmt (last))
857 	make_eh_edges (last);
858       fallthru = true;
859       break;
860 
861     case GIMPLE_ASM:
862       make_gimple_asm_edges (bb);
863       fallthru = true;
864       break;
865 
866     CASE_GIMPLE_OMP:
867       fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
868       break;
869 
870     case GIMPLE_TRANSACTION:
871       {
872         gtransaction *txn = as_a <gtransaction *> (last);
873 	tree label1 = gimple_transaction_label_norm (txn);
874 	tree label2 = gimple_transaction_label_uninst (txn);
875 
876 	if (label1)
877 	  make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
878 	if (label2)
879 	  make_edge (bb, label_to_block (label2),
880 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
881 
882 	tree label3 = gimple_transaction_label_over (txn);
883 	if (gimple_transaction_subcode (txn)
884 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
885 	  make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
886 
887 	fallthru = false;
888       }
889       break;
890 
891     default:
892       gcc_assert (!stmt_ends_bb_p (last));
893       fallthru = true;
894       break;
895     }
896 
897   if (fallthru)
898     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
899 
900   return ret;
901 }
902 
903 /* Join all the blocks in the flowgraph.  */
904 
905 static void
906 make_edges (void)
907 {
908   basic_block bb;
909   struct omp_region *cur_region = NULL;
910   auto_vec<basic_block> ab_edge_goto;
911   auto_vec<basic_block> ab_edge_call;
912   int *bb_to_omp_idx = NULL;
913   int cur_omp_region_idx = 0;
914 
915   /* Create an edge from entry to the first block with executable
916      statements in it.  */
917   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
918 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
919 	     EDGE_FALLTHRU);
920 
921   /* Traverse the basic block array placing edges.  */
922   FOR_EACH_BB_FN (bb, cfun)
923     {
924       int mer;
925 
926       if (bb_to_omp_idx)
927 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
928 
929       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
930       if (mer == 1)
931 	ab_edge_goto.safe_push (bb);
932       else if (mer == 2)
933 	ab_edge_call.safe_push (bb);
934 
935       if (cur_region && bb_to_omp_idx == NULL)
936 	bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
937     }
938 
939   /* Computed gotos are hell to deal with, especially if there are
940      lots of them with a large number of destinations.  So we factor
941      them to a common computed goto location before we build the
942      edge list.  After we convert back to normal form, we will un-factor
943      the computed gotos since factoring introduces an unwanted jump.
944      For non-local gotos and abnormal edges from calls to calls that return
945      twice or forced labels, factor the abnormal edges too, by having all
946      abnormal edges from the calls go to a common artificial basic block
947      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
948      basic block to all forced labels and calls returning twice.
949      We do this per-OpenMP structured block, because those regions
950      are guaranteed to be single entry single exit by the standard,
951      so it is not allowed to enter or exit such regions abnormally this way,
952      thus all computed gotos, non-local gotos and setjmp/longjmp calls
953      must not transfer control across SESE region boundaries.  */
954   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
955     {
956       gimple_stmt_iterator gsi;
957       basic_block dispatcher_bb_array[2] = { NULL, NULL };
958       basic_block *dispatcher_bbs = dispatcher_bb_array;
959       int count = n_basic_blocks_for_fn (cfun);
960 
961       if (bb_to_omp_idx)
962 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
963 
964       FOR_EACH_BB_FN (bb, cfun)
965 	{
966 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
967 	    {
968 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
969 	      tree target;
970 
971 	      if (!label_stmt)
972 		break;
973 
974 	      target = gimple_label_label (label_stmt);
975 
976 	      /* Make an edge to every label block that has been marked as a
977 		 potential target for a computed goto or a non-local goto.  */
978 	      if (FORCED_LABEL (target))
979 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
980 				       &ab_edge_goto, true);
981 	      if (DECL_NONLOCAL (target))
982 		{
983 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 					 &ab_edge_call, false);
985 		  break;
986 		}
987 	    }
988 
989 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
990 	    gsi_next_nondebug (&gsi);
991 	  if (!gsi_end_p (gsi))
992 	    {
993 	      /* Make an edge to every setjmp-like call.  */
994 	      gimple *call_stmt = gsi_stmt (gsi);
995 	      if (is_gimple_call (call_stmt)
996 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
997 		      || gimple_call_builtin_p (call_stmt,
998 						BUILT_IN_SETJMP_RECEIVER)))
999 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1000 				       &ab_edge_call, false);
1001 	    }
1002 	}
1003 
1004       if (bb_to_omp_idx)
1005 	XDELETE (dispatcher_bbs);
1006     }
1007 
1008   XDELETE (bb_to_omp_idx);
1009 
1010   omp_free_regions ();
1011 }
1012 
1013 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
1014    needed.  Returns true if new bbs were created.
1015    Note: This is transitional code, and should not be used for new code.  We
1016    should be able to get rid of this by rewriting all target va-arg
1017    gimplification hooks to use an interface gimple_build_cond_value as described
1018    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
1019 
1020 bool
1021 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1022 {
1023   gimple *stmt = gsi_stmt (*gsi);
1024   basic_block bb = gimple_bb (stmt);
1025   basic_block lastbb, afterbb;
1026   int old_num_bbs = n_basic_blocks_for_fn (cfun);
1027   edge e;
1028   lastbb = make_blocks_1 (seq, bb);
1029   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1030     return false;
1031   e = split_block (bb, stmt);
1032   /* Move e->dest to come after the new basic blocks.  */
1033   afterbb = e->dest;
1034   unlink_block (afterbb);
1035   link_block (afterbb, lastbb);
1036   redirect_edge_succ (e, bb->next_bb);
1037   bb = bb->next_bb;
1038   while (bb != afterbb)
1039     {
1040       struct omp_region *cur_region = NULL;
1041       int cur_omp_region_idx = 0;
1042       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1043       gcc_assert (!mer && !cur_region);
1044       add_bb_to_loop (bb, afterbb->loop_father);
1045       bb = bb->next_bb;
1046     }
1047   return true;
1048 }
1049 
1050 /* Find the next available discriminator value for LOCUS.  The
1051    discriminator distinguishes among several basic blocks that
1052    share a common locus, allowing for more accurate sample-based
1053    profiling.  */
1054 
1055 static int
1056 next_discriminator_for_locus (location_t locus)
1057 {
1058   struct locus_discrim_map item;
1059   struct locus_discrim_map **slot;
1060 
1061   item.locus = locus;
1062   item.discriminator = 0;
1063   slot = discriminator_per_locus->find_slot_with_hash (
1064       &item, LOCATION_LINE (locus), INSERT);
1065   gcc_assert (slot);
1066   if (*slot == HTAB_EMPTY_ENTRY)
1067     {
1068       *slot = XNEW (struct locus_discrim_map);
1069       gcc_assert (*slot);
1070       (*slot)->locus = locus;
1071       (*slot)->discriminator = 0;
1072     }
1073   (*slot)->discriminator++;
1074   return (*slot)->discriminator;
1075 }
1076 
1077 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1078 
1079 static bool
1080 same_line_p (location_t locus1, location_t locus2)
1081 {
1082   expanded_location from, to;
1083 
1084   if (locus1 == locus2)
1085     return true;
1086 
1087   from = expand_location (locus1);
1088   to = expand_location (locus2);
1089 
1090   if (from.line != to.line)
1091     return false;
1092   if (from.file == to.file)
1093     return true;
1094   return (from.file != NULL
1095           && to.file != NULL
1096           && filename_cmp (from.file, to.file) == 0);
1097 }
1098 
1099 /* Assign discriminators to each basic block.  */
1100 
1101 static void
1102 assign_discriminators (void)
1103 {
1104   basic_block bb;
1105 
1106   FOR_EACH_BB_FN (bb, cfun)
1107     {
1108       edge e;
1109       edge_iterator ei;
1110       gimple *last = last_stmt (bb);
1111       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1112 
1113       if (locus == UNKNOWN_LOCATION)
1114 	continue;
1115 
1116       FOR_EACH_EDGE (e, ei, bb->succs)
1117 	{
1118 	  gimple *first = first_non_label_stmt (e->dest);
1119 	  gimple *last = last_stmt (e->dest);
1120 	  if ((first && same_line_p (locus, gimple_location (first)))
1121 	      || (last && same_line_p (locus, gimple_location (last))))
1122 	    {
1123 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1124 		bb->discriminator = next_discriminator_for_locus (locus);
1125 	      else
1126 		e->dest->discriminator = next_discriminator_for_locus (locus);
1127 	    }
1128 	}
1129     }
1130 }
1131 
1132 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1133 
1134 static void
1135 make_cond_expr_edges (basic_block bb)
1136 {
1137   gcond *entry = as_a <gcond *> (last_stmt (bb));
1138   gimple *then_stmt, *else_stmt;
1139   basic_block then_bb, else_bb;
1140   tree then_label, else_label;
1141   edge e;
1142 
1143   gcc_assert (entry);
1144   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1145 
1146   /* Entry basic blocks for each component.  */
1147   then_label = gimple_cond_true_label (entry);
1148   else_label = gimple_cond_false_label (entry);
1149   then_bb = label_to_block (then_label);
1150   else_bb = label_to_block (else_label);
1151   then_stmt = first_stmt (then_bb);
1152   else_stmt = first_stmt (else_bb);
1153 
1154   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1155   e->goto_locus = gimple_location (then_stmt);
1156   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1157   if (e)
1158     e->goto_locus = gimple_location (else_stmt);
1159 
1160   /* We do not need the labels anymore.  */
1161   gimple_cond_set_true_label (entry, NULL_TREE);
1162   gimple_cond_set_false_label (entry, NULL_TREE);
1163 }
1164 
1165 
1166 /* Called for each element in the hash table (P) as we delete the
1167    edge to cases hash table.
1168 
1169    Clear all the CASE_CHAINs to prevent problems with copying of
1170    SWITCH_EXPRs and structure sharing rules, then free the hash table
1171    element.  */
1172 
1173 bool
1174 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1175 {
1176   tree t, next;
1177 
1178   for (t = value; t; t = next)
1179     {
1180       next = CASE_CHAIN (t);
1181       CASE_CHAIN (t) = NULL;
1182     }
1183 
1184   return true;
1185 }
1186 
1187 /* Start recording information mapping edges to case labels.  */
1188 
1189 void
1190 start_recording_case_labels (void)
1191 {
1192   gcc_assert (edge_to_cases == NULL);
1193   edge_to_cases = new hash_map<edge, tree>;
1194   touched_switch_bbs = BITMAP_ALLOC (NULL);
1195 }
1196 
1197 /* Return nonzero if we are recording information for case labels.  */
1198 
1199 static bool
1200 recording_case_labels_p (void)
1201 {
1202   return (edge_to_cases != NULL);
1203 }
1204 
1205 /* Stop recording information mapping edges to case labels and
1206    remove any information we have recorded.  */
1207 void
1208 end_recording_case_labels (void)
1209 {
1210   bitmap_iterator bi;
1211   unsigned i;
1212   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1213   delete edge_to_cases;
1214   edge_to_cases = NULL;
1215   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1216     {
1217       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1218       if (bb)
1219 	{
1220 	  gimple *stmt = last_stmt (bb);
1221 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1222 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1223 	}
1224     }
1225   BITMAP_FREE (touched_switch_bbs);
1226 }
1227 
1228 /* If we are inside a {start,end}_recording_cases block, then return
1229    a chain of CASE_LABEL_EXPRs from T which reference E.
1230 
1231    Otherwise return NULL.  */
1232 
1233 static tree
1234 get_cases_for_edge (edge e, gswitch *t)
1235 {
1236   tree *slot;
1237   size_t i, n;
1238 
1239   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1240      chains available.  Return NULL so the caller can detect this case.  */
1241   if (!recording_case_labels_p ())
1242     return NULL;
1243 
1244   slot = edge_to_cases->get (e);
1245   if (slot)
1246     return *slot;
1247 
1248   /* If we did not find E in the hash table, then this must be the first
1249      time we have been queried for information about E & T.  Add all the
1250      elements from T to the hash table then perform the query again.  */
1251 
1252   n = gimple_switch_num_labels (t);
1253   for (i = 0; i < n; i++)
1254     {
1255       tree elt = gimple_switch_label (t, i);
1256       tree lab = CASE_LABEL (elt);
1257       basic_block label_bb = label_to_block (lab);
1258       edge this_edge = find_edge (e->src, label_bb);
1259 
1260       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1261 	 a new chain.  */
1262       tree &s = edge_to_cases->get_or_insert (this_edge);
1263       CASE_CHAIN (elt) = s;
1264       s = elt;
1265     }
1266 
1267   return *edge_to_cases->get (e);
1268 }
1269 
1270 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1271 
1272 static void
1273 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1274 {
1275   size_t i, n;
1276 
1277   n = gimple_switch_num_labels (entry);
1278 
1279   for (i = 0; i < n; ++i)
1280     {
1281       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1282       basic_block label_bb = label_to_block (lab);
1283       make_edge (bb, label_bb, 0);
1284     }
1285 }
1286 
1287 
1288 /* Return the basic block holding label DEST.  */
1289 
1290 basic_block
1291 label_to_block_fn (struct function *ifun, tree dest)
1292 {
1293   int uid = LABEL_DECL_UID (dest);
1294 
1295   /* We would die hard when faced by an undefined label.  Emit a label to
1296      the very first basic block.  This will hopefully make even the dataflow
1297      and undefined variable warnings quite right.  */
1298   if (seen_error () && uid < 0)
1299     {
1300       gimple_stmt_iterator gsi =
1301 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1302       gimple *stmt;
1303 
1304       stmt = gimple_build_label (dest);
1305       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1306       uid = LABEL_DECL_UID (dest);
1307     }
1308   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1309     return NULL;
1310   return (*ifun->cfg->x_label_to_block_map)[uid];
1311 }
1312 
1313 /* Create edges for a goto statement at block BB.  Returns true
1314    if abnormal edges should be created.  */
1315 
1316 static bool
1317 make_goto_expr_edges (basic_block bb)
1318 {
1319   gimple_stmt_iterator last = gsi_last_bb (bb);
1320   gimple *goto_t = gsi_stmt (last);
1321 
1322   /* A simple GOTO creates normal edges.  */
1323   if (simple_goto_p (goto_t))
1324     {
1325       tree dest = gimple_goto_dest (goto_t);
1326       basic_block label_bb = label_to_block (dest);
1327       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1328       e->goto_locus = gimple_location (goto_t);
1329       gsi_remove (&last, true);
1330       return false;
1331     }
1332 
1333   /* A computed GOTO creates abnormal edges.  */
1334   return true;
1335 }
1336 
1337 /* Create edges for an asm statement with labels at block BB.  */
1338 
1339 static void
1340 make_gimple_asm_edges (basic_block bb)
1341 {
1342   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1343   int i, n = gimple_asm_nlabels (stmt);
1344 
1345   for (i = 0; i < n; ++i)
1346     {
1347       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1348       basic_block label_bb = label_to_block (label);
1349       make_edge (bb, label_bb, 0);
1350     }
1351 }
1352 
1353 /*---------------------------------------------------------------------------
1354 			       Flowgraph analysis
1355 ---------------------------------------------------------------------------*/
1356 
1357 /* Cleanup useless labels in basic blocks.  This is something we wish
1358    to do early because it allows us to group case labels before creating
1359    the edges for the CFG, and it speeds up block statement iterators in
1360    all passes later on.
1361    We rerun this pass after CFG is created, to get rid of the labels that
1362    are no longer referenced.  After then we do not run it any more, since
1363    (almost) no new labels should be created.  */
1364 
1365 /* A map from basic block index to the leading label of that block.  */
1366 static struct label_record
1367 {
1368   /* The label.  */
1369   tree label;
1370 
1371   /* True if the label is referenced from somewhere.  */
1372   bool used;
1373 } *label_for_bb;
1374 
1375 /* Given LABEL return the first label in the same basic block.  */
1376 
1377 static tree
1378 main_block_label (tree label)
1379 {
1380   basic_block bb = label_to_block (label);
1381   tree main_label = label_for_bb[bb->index].label;
1382 
1383   /* label_to_block possibly inserted undefined label into the chain.  */
1384   if (!main_label)
1385     {
1386       label_for_bb[bb->index].label = label;
1387       main_label = label;
1388     }
1389 
1390   label_for_bb[bb->index].used = true;
1391   return main_label;
1392 }
1393 
1394 /* Clean up redundant labels within the exception tree.  */
1395 
1396 static void
1397 cleanup_dead_labels_eh (void)
1398 {
1399   eh_landing_pad lp;
1400   eh_region r;
1401   tree lab;
1402   int i;
1403 
1404   if (cfun->eh == NULL)
1405     return;
1406 
1407   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1408     if (lp && lp->post_landing_pad)
1409       {
1410 	lab = main_block_label (lp->post_landing_pad);
1411 	if (lab != lp->post_landing_pad)
1412 	  {
1413 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1414 	    EH_LANDING_PAD_NR (lab) = lp->index;
1415 	  }
1416       }
1417 
1418   FOR_ALL_EH_REGION (r)
1419     switch (r->type)
1420       {
1421       case ERT_CLEANUP:
1422       case ERT_MUST_NOT_THROW:
1423 	break;
1424 
1425       case ERT_TRY:
1426 	{
1427 	  eh_catch c;
1428 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1429 	    {
1430 	      lab = c->label;
1431 	      if (lab)
1432 		c->label = main_block_label (lab);
1433 	    }
1434 	}
1435 	break;
1436 
1437       case ERT_ALLOWED_EXCEPTIONS:
1438 	lab = r->u.allowed.label;
1439 	if (lab)
1440 	  r->u.allowed.label = main_block_label (lab);
1441 	break;
1442       }
1443 }
1444 
1445 
1446 /* Cleanup redundant labels.  This is a three-step process:
1447      1) Find the leading label for each block.
1448      2) Redirect all references to labels to the leading labels.
1449      3) Cleanup all useless labels.  */
1450 
1451 void
1452 cleanup_dead_labels (void)
1453 {
1454   basic_block bb;
1455   label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1456 
1457   /* Find a suitable label for each block.  We use the first user-defined
1458      label if there is one, or otherwise just the first label we see.  */
1459   FOR_EACH_BB_FN (bb, cfun)
1460     {
1461       gimple_stmt_iterator i;
1462 
1463       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1464 	{
1465 	  tree label;
1466 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1467 
1468 	  if (!label_stmt)
1469 	    break;
1470 
1471 	  label = gimple_label_label (label_stmt);
1472 
1473 	  /* If we have not yet seen a label for the current block,
1474 	     remember this one and see if there are more labels.  */
1475 	  if (!label_for_bb[bb->index].label)
1476 	    {
1477 	      label_for_bb[bb->index].label = label;
1478 	      continue;
1479 	    }
1480 
1481 	  /* If we did see a label for the current block already, but it
1482 	     is an artificially created label, replace it if the current
1483 	     label is a user defined label.  */
1484 	  if (!DECL_ARTIFICIAL (label)
1485 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1486 	    {
1487 	      label_for_bb[bb->index].label = label;
1488 	      break;
1489 	    }
1490 	}
1491     }
1492 
1493   /* Now redirect all jumps/branches to the selected label.
1494      First do so for each block ending in a control statement.  */
1495   FOR_EACH_BB_FN (bb, cfun)
1496     {
1497       gimple *stmt = last_stmt (bb);
1498       tree label, new_label;
1499 
1500       if (!stmt)
1501 	continue;
1502 
1503       switch (gimple_code (stmt))
1504 	{
1505 	case GIMPLE_COND:
1506 	  {
1507 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1508 	    label = gimple_cond_true_label (cond_stmt);
1509 	    if (label)
1510 	      {
1511 		new_label = main_block_label (label);
1512 		if (new_label != label)
1513 		  gimple_cond_set_true_label (cond_stmt, new_label);
1514 	      }
1515 
1516 	    label = gimple_cond_false_label (cond_stmt);
1517 	    if (label)
1518 	      {
1519 		new_label = main_block_label (label);
1520 		if (new_label != label)
1521 		  gimple_cond_set_false_label (cond_stmt, new_label);
1522 	      }
1523 	  }
1524 	  break;
1525 
1526 	case GIMPLE_SWITCH:
1527 	  {
1528 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1529 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1530 
1531 	    /* Replace all destination labels.  */
1532 	    for (i = 0; i < n; ++i)
1533 	      {
1534 		tree case_label = gimple_switch_label (switch_stmt, i);
1535 		label = CASE_LABEL (case_label);
1536 		new_label = main_block_label (label);
1537 		if (new_label != label)
1538 		  CASE_LABEL (case_label) = new_label;
1539 	      }
1540 	    break;
1541 	  }
1542 
1543 	case GIMPLE_ASM:
1544 	  {
1545 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1546 	    int i, n = gimple_asm_nlabels (asm_stmt);
1547 
1548 	    for (i = 0; i < n; ++i)
1549 	      {
1550 		tree cons = gimple_asm_label_op (asm_stmt, i);
1551 		tree label = main_block_label (TREE_VALUE (cons));
1552 		TREE_VALUE (cons) = label;
1553 	      }
1554 	    break;
1555 	  }
1556 
1557 	/* We have to handle gotos until they're removed, and we don't
1558 	   remove them until after we've created the CFG edges.  */
1559 	case GIMPLE_GOTO:
1560 	  if (!computed_goto_p (stmt))
1561 	    {
1562 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1563 	      label = gimple_goto_dest (goto_stmt);
1564 	      new_label = main_block_label (label);
1565 	      if (new_label != label)
1566 		gimple_goto_set_dest (goto_stmt, new_label);
1567 	    }
1568 	  break;
1569 
1570 	case GIMPLE_TRANSACTION:
1571 	  {
1572 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1573 
1574 	    label = gimple_transaction_label_norm (txn);
1575 	    if (label)
1576 	      {
1577 		new_label = main_block_label (label);
1578 		if (new_label != label)
1579 		  gimple_transaction_set_label_norm (txn, new_label);
1580 	      }
1581 
1582 	    label = gimple_transaction_label_uninst (txn);
1583 	    if (label)
1584 	      {
1585 		new_label = main_block_label (label);
1586 		if (new_label != label)
1587 		  gimple_transaction_set_label_uninst (txn, new_label);
1588 	      }
1589 
1590 	    label = gimple_transaction_label_over (txn);
1591 	    if (label)
1592 	      {
1593 		new_label = main_block_label (label);
1594 		if (new_label != label)
1595 		  gimple_transaction_set_label_over (txn, new_label);
1596 	      }
1597 	  }
1598 	  break;
1599 
1600 	default:
1601 	  break;
1602       }
1603     }
1604 
1605   /* Do the same for the exception region tree labels.  */
1606   cleanup_dead_labels_eh ();
1607 
1608   /* Finally, purge dead labels.  All user-defined labels and labels that
1609      can be the target of non-local gotos and labels which have their
1610      address taken are preserved.  */
1611   FOR_EACH_BB_FN (bb, cfun)
1612     {
1613       gimple_stmt_iterator i;
1614       tree label_for_this_bb = label_for_bb[bb->index].label;
1615 
1616       if (!label_for_this_bb)
1617 	continue;
1618 
1619       /* If the main label of the block is unused, we may still remove it.  */
1620       if (!label_for_bb[bb->index].used)
1621 	label_for_this_bb = NULL;
1622 
1623       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1624 	{
1625 	  tree label;
1626 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1627 
1628 	  if (!label_stmt)
1629 	    break;
1630 
1631 	  label = gimple_label_label (label_stmt);
1632 
1633 	  if (label == label_for_this_bb
1634 	      || !DECL_ARTIFICIAL (label)
1635 	      || DECL_NONLOCAL (label)
1636 	      || FORCED_LABEL (label))
1637 	    gsi_next (&i);
1638 	  else
1639 	    gsi_remove (&i, true);
1640 	}
1641     }
1642 
1643   free (label_for_bb);
1644 }
1645 
1646 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1647    the ones jumping to the same label.
1648    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1649 
1650 void
1651 group_case_labels_stmt (gswitch *stmt)
1652 {
1653   int old_size = gimple_switch_num_labels (stmt);
1654   int i, j, new_size = old_size;
1655   basic_block default_bb = NULL;
1656 
1657   default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1658 
1659   /* Look for possible opportunities to merge cases.  */
1660   i = 1;
1661   while (i < old_size)
1662     {
1663       tree base_case, base_high;
1664       basic_block base_bb;
1665 
1666       base_case = gimple_switch_label (stmt, i);
1667 
1668       gcc_assert (base_case);
1669       base_bb = label_to_block (CASE_LABEL (base_case));
1670 
1671       /* Discard cases that have the same destination as the
1672 	 default case.  */
1673       if (base_bb == default_bb)
1674 	{
1675 	  gimple_switch_set_label (stmt, i, NULL_TREE);
1676 	  i++;
1677 	  new_size--;
1678 	  continue;
1679 	}
1680 
1681       base_high = CASE_HIGH (base_case)
1682 	  ? CASE_HIGH (base_case)
1683 	  : CASE_LOW (base_case);
1684       i++;
1685 
1686       /* Try to merge case labels.  Break out when we reach the end
1687 	 of the label vector or when we cannot merge the next case
1688 	 label with the current one.  */
1689       while (i < old_size)
1690 	{
1691 	  tree merge_case = gimple_switch_label (stmt, i);
1692 	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1693 	  wide_int bhp1 = wi::add (base_high, 1);
1694 
1695 	  /* Merge the cases if they jump to the same place,
1696 	     and their ranges are consecutive.  */
1697 	  if (merge_bb == base_bb
1698 	      && wi::eq_p (CASE_LOW (merge_case), bhp1))
1699 	    {
1700 	      base_high = CASE_HIGH (merge_case) ?
1701 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1702 	      CASE_HIGH (base_case) = base_high;
1703 	      gimple_switch_set_label (stmt, i, NULL_TREE);
1704 	      new_size--;
1705 	      i++;
1706 	    }
1707 	  else
1708 	    break;
1709 	}
1710     }
1711 
1712   /* Compress the case labels in the label vector, and adjust the
1713      length of the vector.  */
1714   for (i = 0, j = 0; i < new_size; i++)
1715     {
1716       while (! gimple_switch_label (stmt, j))
1717 	j++;
1718       gimple_switch_set_label (stmt, i,
1719 			       gimple_switch_label (stmt, j++));
1720     }
1721 
1722   gcc_assert (new_size <= old_size);
1723   gimple_switch_set_num_labels (stmt, new_size);
1724 }
1725 
1726 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1727    and scan the sorted vector of cases.  Combine the ones jumping to the
1728    same label.  */
1729 
1730 void
1731 group_case_labels (void)
1732 {
1733   basic_block bb;
1734 
1735   FOR_EACH_BB_FN (bb, cfun)
1736     {
1737       gimple *stmt = last_stmt (bb);
1738       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1739 	group_case_labels_stmt (as_a <gswitch *> (stmt));
1740     }
1741 }
1742 
1743 /* Checks whether we can merge block B into block A.  */
1744 
1745 static bool
1746 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1747 {
1748   gimple *stmt;
1749 
1750   if (!single_succ_p (a))
1751     return false;
1752 
1753   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1754     return false;
1755 
1756   if (single_succ (a) != b)
1757     return false;
1758 
1759   if (!single_pred_p (b))
1760     return false;
1761 
1762   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1763       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1764     return false;
1765 
1766   /* If A ends by a statement causing exceptions or something similar, we
1767      cannot merge the blocks.  */
1768   stmt = last_stmt (a);
1769   if (stmt && stmt_ends_bb_p (stmt))
1770     return false;
1771 
1772   /* Do not allow a block with only a non-local label to be merged.  */
1773   if (stmt)
1774     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1775       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1776 	return false;
1777 
1778   /* Examine the labels at the beginning of B.  */
1779   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1780        gsi_next (&gsi))
1781     {
1782       tree lab;
1783       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1784       if (!label_stmt)
1785 	break;
1786       lab = gimple_label_label (label_stmt);
1787 
1788       /* Do not remove user forced labels or for -O0 any user labels.  */
1789       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1790 	return false;
1791     }
1792 
1793   /* Protect simple loop latches.  We only want to avoid merging
1794      the latch with the loop header or with a block in another
1795      loop in this case.  */
1796   if (current_loops
1797       && b->loop_father->latch == b
1798       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1799       && (b->loop_father->header == a
1800 	  || b->loop_father != a->loop_father))
1801     return false;
1802 
1803   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1804      is not up-to-date and a name-mapping is registered, we cannot eliminate
1805      any phis.  Symbols marked for renaming are never a problem though.  */
1806   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1807        gsi_next (&gsi))
1808     {
1809       gphi *phi = gsi.phi ();
1810       /* Technically only new names matter.  */
1811       if (name_registered_for_update_p (PHI_RESULT (phi)))
1812 	return false;
1813     }
1814 
1815   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1816   if (!optimize
1817       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1818     {
1819       location_t goto_locus = single_succ_edge (a)->goto_locus;
1820       gimple_stmt_iterator prev, next;
1821       prev = gsi_last_nondebug_bb (a);
1822       next = gsi_after_labels (b);
1823       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1824 	gsi_next_nondebug (&next);
1825       if ((gsi_end_p (prev)
1826 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1827 	  && (gsi_end_p (next)
1828 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1829 	return false;
1830     }
1831 
1832   return true;
1833 }
1834 
1835 /* Replaces all uses of NAME by VAL.  */
1836 
1837 void
1838 replace_uses_by (tree name, tree val)
1839 {
1840   imm_use_iterator imm_iter;
1841   use_operand_p use;
1842   gimple *stmt;
1843   edge e;
1844 
1845   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1846     {
1847       /* Mark the block if we change the last stmt in it.  */
1848       if (cfgcleanup_altered_bbs
1849 	  && stmt_ends_bb_p (stmt))
1850 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1851 
1852       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1853         {
1854 	  replace_exp (use, val);
1855 
1856 	  if (gimple_code (stmt) == GIMPLE_PHI)
1857 	    {
1858 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1859 				       PHI_ARG_INDEX_FROM_USE (use));
1860 	      if (e->flags & EDGE_ABNORMAL
1861 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1862 		{
1863 		  /* This can only occur for virtual operands, since
1864 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1865 		     would prevent replacement.  */
1866 		  gcc_checking_assert (virtual_operand_p (name));
1867 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1868 		}
1869 	    }
1870 	}
1871 
1872       if (gimple_code (stmt) != GIMPLE_PHI)
1873 	{
1874 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1875 	  gimple *orig_stmt = stmt;
1876 	  size_t i;
1877 
1878 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1879 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1880 	     only change sth from non-invariant to invariant, and only
1881 	     when propagating constants.  */
1882 	  if (is_gimple_min_invariant (val))
1883 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1884 	      {
1885 		tree op = gimple_op (stmt, i);
1886 		/* Operands may be empty here.  For example, the labels
1887 		   of a GIMPLE_COND are nulled out following the creation
1888 		   of the corresponding CFG edges.  */
1889 		if (op && TREE_CODE (op) == ADDR_EXPR)
1890 		  recompute_tree_invariant_for_addr_expr (op);
1891 	      }
1892 
1893 	  if (fold_stmt (&gsi))
1894 	    stmt = gsi_stmt (gsi);
1895 
1896 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1897 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1898 
1899 	  update_stmt (stmt);
1900 	}
1901     }
1902 
1903   gcc_checking_assert (has_zero_uses (name));
1904 
1905   /* Also update the trees stored in loop structures.  */
1906   if (current_loops)
1907     {
1908       struct loop *loop;
1909 
1910       FOR_EACH_LOOP (loop, 0)
1911 	{
1912 	  substitute_in_loop_info (loop, name, val);
1913 	}
1914     }
1915 }
1916 
1917 /* Merge block B into block A.  */
1918 
1919 static void
1920 gimple_merge_blocks (basic_block a, basic_block b)
1921 {
1922   gimple_stmt_iterator last, gsi;
1923   gphi_iterator psi;
1924 
1925   if (dump_file)
1926     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1927 
1928   /* Remove all single-valued PHI nodes from block B of the form
1929      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1930   gsi = gsi_last_bb (a);
1931   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1932     {
1933       gimple *phi = gsi_stmt (psi);
1934       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1935       gimple *copy;
1936       bool may_replace_uses = (virtual_operand_p (def)
1937 			       || may_propagate_copy (def, use));
1938 
1939       /* In case we maintain loop closed ssa form, do not propagate arguments
1940 	 of loop exit phi nodes.  */
1941       if (current_loops
1942 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1943 	  && !virtual_operand_p (def)
1944 	  && TREE_CODE (use) == SSA_NAME
1945 	  && a->loop_father != b->loop_father)
1946 	may_replace_uses = false;
1947 
1948       if (!may_replace_uses)
1949 	{
1950 	  gcc_assert (!virtual_operand_p (def));
1951 
1952 	  /* Note that just emitting the copies is fine -- there is no problem
1953 	     with ordering of phi nodes.  This is because A is the single
1954 	     predecessor of B, therefore results of the phi nodes cannot
1955 	     appear as arguments of the phi nodes.  */
1956 	  copy = gimple_build_assign (def, use);
1957 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1958           remove_phi_node (&psi, false);
1959 	}
1960       else
1961         {
1962 	  /* If we deal with a PHI for virtual operands, we can simply
1963 	     propagate these without fussing with folding or updating
1964 	     the stmt.  */
1965 	  if (virtual_operand_p (def))
1966 	    {
1967 	      imm_use_iterator iter;
1968 	      use_operand_p use_p;
1969 	      gimple *stmt;
1970 
1971 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1972 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1973 		  SET_USE (use_p, use);
1974 
1975 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1976 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1977 	    }
1978 	  else
1979             replace_uses_by (def, use);
1980 
1981           remove_phi_node (&psi, true);
1982         }
1983     }
1984 
1985   /* Ensure that B follows A.  */
1986   move_block_after (b, a);
1987 
1988   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1989   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1990 
1991   /* Remove labels from B and set gimple_bb to A for other statements.  */
1992   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1993     {
1994       gimple *stmt = gsi_stmt (gsi);
1995       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1996 	{
1997 	  tree label = gimple_label_label (label_stmt);
1998 	  int lp_nr;
1999 
2000 	  gsi_remove (&gsi, false);
2001 
2002 	  /* Now that we can thread computed gotos, we might have
2003 	     a situation where we have a forced label in block B
2004 	     However, the label at the start of block B might still be
2005 	     used in other ways (think about the runtime checking for
2006 	     Fortran assigned gotos).  So we can not just delete the
2007 	     label.  Instead we move the label to the start of block A.  */
2008 	  if (FORCED_LABEL (label))
2009 	    {
2010 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2011 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2012 	    }
2013 	  /* Other user labels keep around in a form of a debug stmt.  */
2014 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2015 	    {
2016 	      gimple *dbg = gimple_build_debug_bind (label,
2017 						     integer_zero_node,
2018 						     stmt);
2019 	      gimple_debug_bind_reset_value (dbg);
2020 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2021 	    }
2022 
2023 	  lp_nr = EH_LANDING_PAD_NR (label);
2024 	  if (lp_nr)
2025 	    {
2026 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2027 	      lp->post_landing_pad = NULL;
2028 	    }
2029 	}
2030       else
2031 	{
2032 	  gimple_set_bb (stmt, a);
2033 	  gsi_next (&gsi);
2034 	}
2035     }
2036 
2037   /* When merging two BBs, if their counts are different, the larger count
2038      is selected as the new bb count. This is to handle inconsistent
2039      profiles.  */
2040   if (a->loop_father == b->loop_father)
2041     {
2042       a->count = MAX (a->count, b->count);
2043       a->frequency = MAX (a->frequency, b->frequency);
2044     }
2045 
2046   /* Merge the sequences.  */
2047   last = gsi_last_bb (a);
2048   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2049   set_bb_seq (b, NULL);
2050 
2051   if (cfgcleanup_altered_bbs)
2052     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2053 }
2054 
2055 
2056 /* Return the one of two successors of BB that is not reachable by a
2057    complex edge, if there is one.  Else, return BB.  We use
2058    this in optimizations that use post-dominators for their heuristics,
2059    to catch the cases in C++ where function calls are involved.  */
2060 
2061 basic_block
2062 single_noncomplex_succ (basic_block bb)
2063 {
2064   edge e0, e1;
2065   if (EDGE_COUNT (bb->succs) != 2)
2066     return bb;
2067 
2068   e0 = EDGE_SUCC (bb, 0);
2069   e1 = EDGE_SUCC (bb, 1);
2070   if (e0->flags & EDGE_COMPLEX)
2071     return e1->dest;
2072   if (e1->flags & EDGE_COMPLEX)
2073     return e0->dest;
2074 
2075   return bb;
2076 }
2077 
2078 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2079 
2080 void
2081 notice_special_calls (gcall *call)
2082 {
2083   int flags = gimple_call_flags (call);
2084 
2085   if (flags & ECF_MAY_BE_ALLOCA)
2086     cfun->calls_alloca = true;
2087   if (flags & ECF_RETURNS_TWICE)
2088     cfun->calls_setjmp = true;
2089 }
2090 
2091 
2092 /* Clear flags set by notice_special_calls.  Used by dead code removal
2093    to update the flags.  */
2094 
2095 void
2096 clear_special_calls (void)
2097 {
2098   cfun->calls_alloca = false;
2099   cfun->calls_setjmp = false;
2100 }
2101 
2102 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2103 
2104 static void
2105 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2106 {
2107   /* Since this block is no longer reachable, we can just delete all
2108      of its PHI nodes.  */
2109   remove_phi_nodes (bb);
2110 
2111   /* Remove edges to BB's successors.  */
2112   while (EDGE_COUNT (bb->succs) > 0)
2113     remove_edge (EDGE_SUCC (bb, 0));
2114 }
2115 
2116 
2117 /* Remove statements of basic block BB.  */
2118 
2119 static void
2120 remove_bb (basic_block bb)
2121 {
2122   gimple_stmt_iterator i;
2123 
2124   if (dump_file)
2125     {
2126       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2127       if (dump_flags & TDF_DETAILS)
2128 	{
2129 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2130 	  fprintf (dump_file, "\n");
2131 	}
2132     }
2133 
2134   if (current_loops)
2135     {
2136       struct loop *loop = bb->loop_father;
2137 
2138       /* If a loop gets removed, clean up the information associated
2139 	 with it.  */
2140       if (loop->latch == bb
2141 	  || loop->header == bb)
2142 	free_numbers_of_iterations_estimates_loop (loop);
2143     }
2144 
2145   /* Remove all the instructions in the block.  */
2146   if (bb_seq (bb) != NULL)
2147     {
2148       /* Walk backwards so as to get a chance to substitute all
2149 	 released DEFs into debug stmts.  See
2150 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2151 	 details.  */
2152       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2153 	{
2154 	  gimple *stmt = gsi_stmt (i);
2155 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2156 	  if (label_stmt
2157 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2158 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2159 	    {
2160 	      basic_block new_bb;
2161 	      gimple_stmt_iterator new_gsi;
2162 
2163 	      /* A non-reachable non-local label may still be referenced.
2164 		 But it no longer needs to carry the extra semantics of
2165 		 non-locality.  */
2166 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2167 		{
2168 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2169 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2170 		}
2171 
2172 	      new_bb = bb->prev_bb;
2173 	      new_gsi = gsi_after_labels (new_bb);
2174 	      gsi_remove (&i, false);
2175 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2176 	    }
2177 	  else
2178 	    {
2179 	      /* Release SSA definitions.  */
2180 	      release_defs (stmt);
2181 	      gsi_remove (&i, true);
2182 	    }
2183 
2184 	  if (gsi_end_p (i))
2185 	    i = gsi_last_bb (bb);
2186 	  else
2187 	    gsi_prev (&i);
2188 	}
2189     }
2190 
2191   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2192   bb->il.gimple.seq = NULL;
2193   bb->il.gimple.phi_nodes = NULL;
2194 }
2195 
2196 
2197 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2198    predicate VAL, return the edge that will be taken out of the block.
2199    If VAL does not match a unique edge, NULL is returned.  */
2200 
2201 edge
2202 find_taken_edge (basic_block bb, tree val)
2203 {
2204   gimple *stmt;
2205 
2206   stmt = last_stmt (bb);
2207 
2208   gcc_assert (stmt);
2209   gcc_assert (is_ctrl_stmt (stmt));
2210 
2211   if (val == NULL)
2212     return NULL;
2213 
2214   if (!is_gimple_min_invariant (val))
2215     return NULL;
2216 
2217   if (gimple_code (stmt) == GIMPLE_COND)
2218     return find_taken_edge_cond_expr (bb, val);
2219 
2220   if (gimple_code (stmt) == GIMPLE_SWITCH)
2221     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2222 
2223   if (computed_goto_p (stmt))
2224     {
2225       /* Only optimize if the argument is a label, if the argument is
2226 	 not a label then we can not construct a proper CFG.
2227 
2228          It may be the case that we only need to allow the LABEL_REF to
2229          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2230          appear inside a LABEL_EXPR just to be safe.  */
2231       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2232 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2233 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2234       return NULL;
2235     }
2236 
2237   gcc_unreachable ();
2238 }
2239 
2240 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2241    statement, determine which of the outgoing edges will be taken out of the
2242    block.  Return NULL if either edge may be taken.  */
2243 
2244 static edge
2245 find_taken_edge_computed_goto (basic_block bb, tree val)
2246 {
2247   basic_block dest;
2248   edge e = NULL;
2249 
2250   dest = label_to_block (val);
2251   if (dest)
2252     {
2253       e = find_edge (bb, dest);
2254       gcc_assert (e != NULL);
2255     }
2256 
2257   return e;
2258 }
2259 
2260 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2261    statement, determine which of the two edges will be taken out of the
2262    block.  Return NULL if either edge may be taken.  */
2263 
2264 static edge
2265 find_taken_edge_cond_expr (basic_block bb, tree val)
2266 {
2267   edge true_edge, false_edge;
2268 
2269   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2270 
2271   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2272   return (integer_zerop (val) ? false_edge : true_edge);
2273 }
2274 
2275 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2276    statement, determine which edge will be taken out of the block.  Return
2277    NULL if any edge may be taken.  */
2278 
2279 static edge
2280 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2281 			     tree val)
2282 {
2283   basic_block dest_bb;
2284   edge e;
2285   tree taken_case;
2286 
2287   taken_case = find_case_label_for_value (switch_stmt, val);
2288   dest_bb = label_to_block (CASE_LABEL (taken_case));
2289 
2290   e = find_edge (bb, dest_bb);
2291   gcc_assert (e);
2292   return e;
2293 }
2294 
2295 
2296 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2297    We can make optimal use here of the fact that the case labels are
2298    sorted: We can do a binary search for a case matching VAL.  */
2299 
2300 static tree
2301 find_case_label_for_value (gswitch *switch_stmt, tree val)
2302 {
2303   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2304   tree default_case = gimple_switch_default_label (switch_stmt);
2305 
2306   for (low = 0, high = n; high - low > 1; )
2307     {
2308       size_t i = (high + low) / 2;
2309       tree t = gimple_switch_label (switch_stmt, i);
2310       int cmp;
2311 
2312       /* Cache the result of comparing CASE_LOW and val.  */
2313       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2314 
2315       if (cmp > 0)
2316 	high = i;
2317       else
2318 	low = i;
2319 
2320       if (CASE_HIGH (t) == NULL)
2321 	{
2322 	  /* A singe-valued case label.  */
2323 	  if (cmp == 0)
2324 	    return t;
2325 	}
2326       else
2327 	{
2328 	  /* A case range.  We can only handle integer ranges.  */
2329 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2330 	    return t;
2331 	}
2332     }
2333 
2334   return default_case;
2335 }
2336 
2337 
2338 /* Dump a basic block on stderr.  */
2339 
2340 void
2341 gimple_debug_bb (basic_block bb)
2342 {
2343   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2344 }
2345 
2346 
2347 /* Dump basic block with index N on stderr.  */
2348 
2349 basic_block
2350 gimple_debug_bb_n (int n)
2351 {
2352   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2353   return BASIC_BLOCK_FOR_FN (cfun, n);
2354 }
2355 
2356 
2357 /* Dump the CFG on stderr.
2358 
2359    FLAGS are the same used by the tree dumping functions
2360    (see TDF_* in dumpfile.h).  */
2361 
2362 void
2363 gimple_debug_cfg (int flags)
2364 {
2365   gimple_dump_cfg (stderr, flags);
2366 }
2367 
2368 
2369 /* Dump the program showing basic block boundaries on the given FILE.
2370 
2371    FLAGS are the same used by the tree dumping functions (see TDF_* in
2372    tree.h).  */
2373 
2374 void
2375 gimple_dump_cfg (FILE *file, int flags)
2376 {
2377   if (flags & TDF_DETAILS)
2378     {
2379       dump_function_header (file, current_function_decl, flags);
2380       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2381 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2382 	       last_basic_block_for_fn (cfun));
2383 
2384       brief_dump_cfg (file, flags | TDF_COMMENT);
2385       fprintf (file, "\n");
2386     }
2387 
2388   if (flags & TDF_STATS)
2389     dump_cfg_stats (file);
2390 
2391   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2392 }
2393 
2394 
2395 /* Dump CFG statistics on FILE.  */
2396 
2397 void
2398 dump_cfg_stats (FILE *file)
2399 {
2400   static long max_num_merged_labels = 0;
2401   unsigned long size, total = 0;
2402   long num_edges;
2403   basic_block bb;
2404   const char * const fmt_str   = "%-30s%-13s%12s\n";
2405   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2406   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2407   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2408   const char *funcname = current_function_name ();
2409 
2410   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2411 
2412   fprintf (file, "---------------------------------------------------------\n");
2413   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2414   fprintf (file, fmt_str, "", "  instances  ", "used ");
2415   fprintf (file, "---------------------------------------------------------\n");
2416 
2417   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2418   total += size;
2419   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2420 	   SCALE (size), LABEL (size));
2421 
2422   num_edges = 0;
2423   FOR_EACH_BB_FN (bb, cfun)
2424     num_edges += EDGE_COUNT (bb->succs);
2425   size = num_edges * sizeof (struct edge_def);
2426   total += size;
2427   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2428 
2429   fprintf (file, "---------------------------------------------------------\n");
2430   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2431 	   LABEL (total));
2432   fprintf (file, "---------------------------------------------------------\n");
2433   fprintf (file, "\n");
2434 
2435   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2436     max_num_merged_labels = cfg_stats.num_merged_labels;
2437 
2438   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2439 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2440 
2441   fprintf (file, "\n");
2442 }
2443 
2444 
2445 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2446    linked in the final executable.  */
2447 
2448 DEBUG_FUNCTION void
2449 debug_cfg_stats (void)
2450 {
2451   dump_cfg_stats (stderr);
2452 }
2453 
2454 /*---------------------------------------------------------------------------
2455 			     Miscellaneous helpers
2456 ---------------------------------------------------------------------------*/
2457 
2458 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2459    flow.  Transfers of control flow associated with EH are excluded.  */
2460 
2461 static bool
2462 call_can_make_abnormal_goto (gimple *t)
2463 {
2464   /* If the function has no non-local labels, then a call cannot make an
2465      abnormal transfer of control.  */
2466   if (!cfun->has_nonlocal_label
2467       && !cfun->calls_setjmp)
2468    return false;
2469 
2470   /* Likewise if the call has no side effects.  */
2471   if (!gimple_has_side_effects (t))
2472     return false;
2473 
2474   /* Likewise if the called function is leaf.  */
2475   if (gimple_call_flags (t) & ECF_LEAF)
2476     return false;
2477 
2478   return true;
2479 }
2480 
2481 
2482 /* Return true if T can make an abnormal transfer of control flow.
2483    Transfers of control flow associated with EH are excluded.  */
2484 
2485 bool
2486 stmt_can_make_abnormal_goto (gimple *t)
2487 {
2488   if (computed_goto_p (t))
2489     return true;
2490   if (is_gimple_call (t))
2491     return call_can_make_abnormal_goto (t);
2492   return false;
2493 }
2494 
2495 
2496 /* Return true if T represents a stmt that always transfers control.  */
2497 
2498 bool
2499 is_ctrl_stmt (gimple *t)
2500 {
2501   switch (gimple_code (t))
2502     {
2503     case GIMPLE_COND:
2504     case GIMPLE_SWITCH:
2505     case GIMPLE_GOTO:
2506     case GIMPLE_RETURN:
2507     case GIMPLE_RESX:
2508       return true;
2509     default:
2510       return false;
2511     }
2512 }
2513 
2514 
2515 /* Return true if T is a statement that may alter the flow of control
2516    (e.g., a call to a non-returning function).  */
2517 
2518 bool
2519 is_ctrl_altering_stmt (gimple *t)
2520 {
2521   gcc_assert (t);
2522 
2523   switch (gimple_code (t))
2524     {
2525     case GIMPLE_CALL:
2526       /* Per stmt call flag indicates whether the call could alter
2527 	 controlflow.  */
2528       if (gimple_call_ctrl_altering_p (t))
2529 	return true;
2530       break;
2531 
2532     case GIMPLE_EH_DISPATCH:
2533       /* EH_DISPATCH branches to the individual catch handlers at
2534 	 this level of a try or allowed-exceptions region.  It can
2535 	 fallthru to the next statement as well.  */
2536       return true;
2537 
2538     case GIMPLE_ASM:
2539       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2540 	return true;
2541       break;
2542 
2543     CASE_GIMPLE_OMP:
2544       /* OpenMP directives alter control flow.  */
2545       return true;
2546 
2547     case GIMPLE_TRANSACTION:
2548       /* A transaction start alters control flow.  */
2549       return true;
2550 
2551     default:
2552       break;
2553     }
2554 
2555   /* If a statement can throw, it alters control flow.  */
2556   return stmt_can_throw_internal (t);
2557 }
2558 
2559 
2560 /* Return true if T is a simple local goto.  */
2561 
2562 bool
2563 simple_goto_p (gimple *t)
2564 {
2565   return (gimple_code (t) == GIMPLE_GOTO
2566 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2567 }
2568 
2569 
2570 /* Return true if STMT should start a new basic block.  PREV_STMT is
2571    the statement preceding STMT.  It is used when STMT is a label or a
2572    case label.  Labels should only start a new basic block if their
2573    previous statement wasn't a label.  Otherwise, sequence of labels
2574    would generate unnecessary basic blocks that only contain a single
2575    label.  */
2576 
2577 static inline bool
2578 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2579 {
2580   if (stmt == NULL)
2581     return false;
2582 
2583   /* Labels start a new basic block only if the preceding statement
2584      wasn't a label of the same type.  This prevents the creation of
2585      consecutive blocks that have nothing but a single label.  */
2586   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2587     {
2588       /* Nonlocal and computed GOTO targets always start a new block.  */
2589       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2590 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2591 	return true;
2592 
2593       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2594 	{
2595 	  if (DECL_NONLOCAL (gimple_label_label (
2596 			       as_a <glabel *> (prev_stmt))))
2597 	    return true;
2598 
2599 	  cfg_stats.num_merged_labels++;
2600 	  return false;
2601 	}
2602       else
2603 	return true;
2604     }
2605   else if (gimple_code (stmt) == GIMPLE_CALL)
2606     {
2607       if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2608 	/* setjmp acts similar to a nonlocal GOTO target and thus should
2609 	   start a new block.  */
2610 	return true;
2611       if (gimple_call_internal_p (stmt, IFN_PHI)
2612 	  && prev_stmt
2613 	  && gimple_code (prev_stmt) != GIMPLE_LABEL
2614 	  && (gimple_code (prev_stmt) != GIMPLE_CALL
2615 	      || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2616 	/* PHI nodes start a new block unless preceeded by a label
2617 	   or another PHI.  */
2618 	return true;
2619     }
2620 
2621   return false;
2622 }
2623 
2624 
2625 /* Return true if T should end a basic block.  */
2626 
2627 bool
2628 stmt_ends_bb_p (gimple *t)
2629 {
2630   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2631 }
2632 
2633 /* Remove block annotations and other data structures.  */
2634 
2635 void
2636 delete_tree_cfg_annotations (struct function *fn)
2637 {
2638   vec_free (label_to_block_map_for_fn (fn));
2639 }
2640 
2641 /* Return the virtual phi in BB.  */
2642 
2643 gphi *
2644 get_virtual_phi (basic_block bb)
2645 {
2646   for (gphi_iterator gsi = gsi_start_phis (bb);
2647        !gsi_end_p (gsi);
2648        gsi_next (&gsi))
2649     {
2650       gphi *phi = gsi.phi ();
2651 
2652       if (virtual_operand_p (PHI_RESULT (phi)))
2653 	return phi;
2654     }
2655 
2656   return NULL;
2657 }
2658 
2659 /* Return the first statement in basic block BB.  */
2660 
2661 gimple *
2662 first_stmt (basic_block bb)
2663 {
2664   gimple_stmt_iterator i = gsi_start_bb (bb);
2665   gimple *stmt = NULL;
2666 
2667   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2668     {
2669       gsi_next (&i);
2670       stmt = NULL;
2671     }
2672   return stmt;
2673 }
2674 
2675 /* Return the first non-label statement in basic block BB.  */
2676 
2677 static gimple *
2678 first_non_label_stmt (basic_block bb)
2679 {
2680   gimple_stmt_iterator i = gsi_start_bb (bb);
2681   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2682     gsi_next (&i);
2683   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2684 }
2685 
2686 /* Return the last statement in basic block BB.  */
2687 
2688 gimple *
2689 last_stmt (basic_block bb)
2690 {
2691   gimple_stmt_iterator i = gsi_last_bb (bb);
2692   gimple *stmt = NULL;
2693 
2694   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2695     {
2696       gsi_prev (&i);
2697       stmt = NULL;
2698     }
2699   return stmt;
2700 }
2701 
2702 /* Return the last statement of an otherwise empty block.  Return NULL
2703    if the block is totally empty, or if it contains more than one
2704    statement.  */
2705 
2706 gimple *
2707 last_and_only_stmt (basic_block bb)
2708 {
2709   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2710   gimple *last, *prev;
2711 
2712   if (gsi_end_p (i))
2713     return NULL;
2714 
2715   last = gsi_stmt (i);
2716   gsi_prev_nondebug (&i);
2717   if (gsi_end_p (i))
2718     return last;
2719 
2720   /* Empty statements should no longer appear in the instruction stream.
2721      Everything that might have appeared before should be deleted by
2722      remove_useless_stmts, and the optimizers should just gsi_remove
2723      instead of smashing with build_empty_stmt.
2724 
2725      Thus the only thing that should appear here in a block containing
2726      one executable statement is a label.  */
2727   prev = gsi_stmt (i);
2728   if (gimple_code (prev) == GIMPLE_LABEL)
2729     return last;
2730   else
2731     return NULL;
2732 }
2733 
2734 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2735 
2736 static void
2737 reinstall_phi_args (edge new_edge, edge old_edge)
2738 {
2739   edge_var_map *vm;
2740   int i;
2741   gphi_iterator phis;
2742 
2743   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2744   if (!v)
2745     return;
2746 
2747   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2748        v->iterate (i, &vm) && !gsi_end_p (phis);
2749        i++, gsi_next (&phis))
2750     {
2751       gphi *phi = phis.phi ();
2752       tree result = redirect_edge_var_map_result (vm);
2753       tree arg = redirect_edge_var_map_def (vm);
2754 
2755       gcc_assert (result == gimple_phi_result (phi));
2756 
2757       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2758     }
2759 
2760   redirect_edge_var_map_clear (old_edge);
2761 }
2762 
2763 /* Returns the basic block after which the new basic block created
2764    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2765    near its "logical" location.  This is of most help to humans looking
2766    at debugging dumps.  */
2767 
2768 basic_block
2769 split_edge_bb_loc (edge edge_in)
2770 {
2771   basic_block dest = edge_in->dest;
2772   basic_block dest_prev = dest->prev_bb;
2773 
2774   if (dest_prev)
2775     {
2776       edge e = find_edge (dest_prev, dest);
2777       if (e && !(e->flags & EDGE_COMPLEX))
2778 	return edge_in->src;
2779     }
2780   return dest_prev;
2781 }
2782 
2783 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2784    Abort on abnormal edges.  */
2785 
2786 static basic_block
2787 gimple_split_edge (edge edge_in)
2788 {
2789   basic_block new_bb, after_bb, dest;
2790   edge new_edge, e;
2791 
2792   /* Abnormal edges cannot be split.  */
2793   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2794 
2795   dest = edge_in->dest;
2796 
2797   after_bb = split_edge_bb_loc (edge_in);
2798 
2799   new_bb = create_empty_bb (after_bb);
2800   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2801   new_bb->count = edge_in->count;
2802   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2803   new_edge->probability = REG_BR_PROB_BASE;
2804   new_edge->count = edge_in->count;
2805 
2806   e = redirect_edge_and_branch (edge_in, new_bb);
2807   gcc_assert (e == edge_in);
2808   reinstall_phi_args (new_edge, e);
2809 
2810   return new_bb;
2811 }
2812 
2813 
2814 /* Verify properties of the address expression T with base object BASE.  */
2815 
2816 static tree
2817 verify_address (tree t, tree base)
2818 {
2819   bool old_constant;
2820   bool old_side_effects;
2821   bool new_constant;
2822   bool new_side_effects;
2823 
2824   old_constant = TREE_CONSTANT (t);
2825   old_side_effects = TREE_SIDE_EFFECTS (t);
2826 
2827   recompute_tree_invariant_for_addr_expr (t);
2828   new_side_effects = TREE_SIDE_EFFECTS (t);
2829   new_constant = TREE_CONSTANT (t);
2830 
2831   if (old_constant != new_constant)
2832     {
2833       error ("constant not recomputed when ADDR_EXPR changed");
2834       return t;
2835     }
2836   if (old_side_effects != new_side_effects)
2837     {
2838       error ("side effects not recomputed when ADDR_EXPR changed");
2839       return t;
2840     }
2841 
2842   if (!(VAR_P (base)
2843 	|| TREE_CODE (base) == PARM_DECL
2844 	|| TREE_CODE (base) == RESULT_DECL))
2845     return NULL_TREE;
2846 
2847   if (DECL_GIMPLE_REG_P (base))
2848     {
2849       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2850       return base;
2851     }
2852 
2853   return NULL_TREE;
2854 }
2855 
2856 /* Callback for walk_tree, check that all elements with address taken are
2857    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2858    inside a PHI node.  */
2859 
2860 static tree
2861 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2862 {
2863   tree t = *tp, x;
2864 
2865   if (TYPE_P (t))
2866     *walk_subtrees = 0;
2867 
2868   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2869 #define CHECK_OP(N, MSG) \
2870   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2871        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2872 
2873   switch (TREE_CODE (t))
2874     {
2875     case SSA_NAME:
2876       if (SSA_NAME_IN_FREE_LIST (t))
2877 	{
2878 	  error ("SSA name in freelist but still referenced");
2879 	  return *tp;
2880 	}
2881       break;
2882 
2883     case PARM_DECL:
2884     case VAR_DECL:
2885     case RESULT_DECL:
2886       {
2887 	tree context = decl_function_context (t);
2888 	if (context != cfun->decl
2889 	    && !SCOPE_FILE_SCOPE_P (context)
2890 	    && !TREE_STATIC (t)
2891 	    && !DECL_EXTERNAL (t))
2892 	  {
2893 	    error ("Local declaration from a different function");
2894 	    return t;
2895 	  }
2896       }
2897       break;
2898 
2899     case INDIRECT_REF:
2900       error ("INDIRECT_REF in gimple IL");
2901       return t;
2902 
2903     case MEM_REF:
2904       x = TREE_OPERAND (t, 0);
2905       if (!POINTER_TYPE_P (TREE_TYPE (x))
2906 	  || !is_gimple_mem_ref_addr (x))
2907 	{
2908 	  error ("invalid first operand of MEM_REF");
2909 	  return x;
2910 	}
2911       if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2912 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2913 	{
2914 	  error ("invalid offset operand of MEM_REF");
2915 	  return TREE_OPERAND (t, 1);
2916 	}
2917       if (TREE_CODE (x) == ADDR_EXPR)
2918 	{
2919 	  tree va = verify_address (x, TREE_OPERAND (x, 0));
2920 	  if (va)
2921 	    return va;
2922 	  x = TREE_OPERAND (x, 0);
2923 	}
2924       walk_tree (&x, verify_expr, data, NULL);
2925       *walk_subtrees = 0;
2926       break;
2927 
2928     case ASSERT_EXPR:
2929       x = fold (ASSERT_EXPR_COND (t));
2930       if (x == boolean_false_node)
2931 	{
2932 	  error ("ASSERT_EXPR with an always-false condition");
2933 	  return *tp;
2934 	}
2935       break;
2936 
2937     case MODIFY_EXPR:
2938       error ("MODIFY_EXPR not expected while having tuples");
2939       return *tp;
2940 
2941     case ADDR_EXPR:
2942       {
2943 	tree tem;
2944 
2945 	gcc_assert (is_gimple_address (t));
2946 
2947 	/* Skip any references (they will be checked when we recurse down the
2948 	   tree) and ensure that any variable used as a prefix is marked
2949 	   addressable.  */
2950 	for (x = TREE_OPERAND (t, 0);
2951 	     handled_component_p (x);
2952 	     x = TREE_OPERAND (x, 0))
2953 	  ;
2954 
2955 	if ((tem = verify_address (t, x)))
2956 	  return tem;
2957 
2958 	if (!(VAR_P (x)
2959 	      || TREE_CODE (x) == PARM_DECL
2960 	      || TREE_CODE (x) == RESULT_DECL))
2961 	  return NULL;
2962 
2963 	if (!TREE_ADDRESSABLE (x))
2964 	  {
2965 	    error ("address taken, but ADDRESSABLE bit not set");
2966 	    return x;
2967 	  }
2968 
2969 	break;
2970       }
2971 
2972     case COND_EXPR:
2973       x = COND_EXPR_COND (t);
2974       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2975 	{
2976 	  error ("non-integral used in condition");
2977 	  return x;
2978 	}
2979       if (!is_gimple_condexpr (x))
2980         {
2981 	  error ("invalid conditional operand");
2982 	  return x;
2983 	}
2984       break;
2985 
2986     case NON_LVALUE_EXPR:
2987     case TRUTH_NOT_EXPR:
2988       gcc_unreachable ();
2989 
2990     CASE_CONVERT:
2991     case FIX_TRUNC_EXPR:
2992     case FLOAT_EXPR:
2993     case NEGATE_EXPR:
2994     case ABS_EXPR:
2995     case BIT_NOT_EXPR:
2996       CHECK_OP (0, "invalid operand to unary operator");
2997       break;
2998 
2999     case REALPART_EXPR:
3000     case IMAGPART_EXPR:
3001     case BIT_FIELD_REF:
3002       if (!is_gimple_reg_type (TREE_TYPE (t)))
3003 	{
3004 	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3005 	  return t;
3006 	}
3007 
3008       if (TREE_CODE (t) == BIT_FIELD_REF)
3009 	{
3010 	  tree t0 = TREE_OPERAND (t, 0);
3011 	  tree t1 = TREE_OPERAND (t, 1);
3012 	  tree t2 = TREE_OPERAND (t, 2);
3013 	  if (!tree_fits_uhwi_p (t1)
3014 	      || !tree_fits_uhwi_p (t2))
3015 	    {
3016 	      error ("invalid position or size operand to BIT_FIELD_REF");
3017 	      return t;
3018 	    }
3019 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3020 	      && (TYPE_PRECISION (TREE_TYPE (t))
3021 		  != tree_to_uhwi (t1)))
3022 	    {
3023 	      error ("integral result type precision does not match "
3024 		     "field size of BIT_FIELD_REF");
3025 	      return t;
3026 	    }
3027 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3028 		   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3029 		   && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3030 		       != tree_to_uhwi (t1)))
3031 	    {
3032 	      error ("mode size of non-integral result does not "
3033 		     "match field size of BIT_FIELD_REF");
3034 	      return t;
3035 	    }
3036 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3037 	      && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3038 		  > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3039 	    {
3040 	      error ("position plus size exceeds size of referenced object in "
3041 		     "BIT_FIELD_REF");
3042 	      return t;
3043 	    }
3044 	}
3045       t = TREE_OPERAND (t, 0);
3046 
3047       /* Fall-through.  */
3048     case COMPONENT_REF:
3049     case ARRAY_REF:
3050     case ARRAY_RANGE_REF:
3051     case VIEW_CONVERT_EXPR:
3052       /* We have a nest of references.  Verify that each of the operands
3053 	 that determine where to reference is either a constant or a variable,
3054 	 verify that the base is valid, and then show we've already checked
3055 	 the subtrees.  */
3056       while (handled_component_p (t))
3057 	{
3058 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3059 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3060 	  else if (TREE_CODE (t) == ARRAY_REF
3061 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
3062 	    {
3063 	      CHECK_OP (1, "invalid array index");
3064 	      if (TREE_OPERAND (t, 2))
3065 		CHECK_OP (2, "invalid array lower bound");
3066 	      if (TREE_OPERAND (t, 3))
3067 		CHECK_OP (3, "invalid array stride");
3068 	    }
3069 	  else if (TREE_CODE (t) == BIT_FIELD_REF
3070 		   || TREE_CODE (t) == REALPART_EXPR
3071 		   || TREE_CODE (t) == IMAGPART_EXPR)
3072 	    {
3073 	      error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3074 		     "REALPART_EXPR");
3075 	      return t;
3076 	    }
3077 
3078 	  t = TREE_OPERAND (t, 0);
3079 	}
3080 
3081       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3082 	{
3083 	  error ("invalid reference prefix");
3084 	  return t;
3085 	}
3086       walk_tree (&t, verify_expr, data, NULL);
3087       *walk_subtrees = 0;
3088       break;
3089     case PLUS_EXPR:
3090     case MINUS_EXPR:
3091       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3092 	 POINTER_PLUS_EXPR. */
3093       if (POINTER_TYPE_P (TREE_TYPE (t)))
3094 	{
3095 	  error ("invalid operand to plus/minus, type is a pointer");
3096 	  return t;
3097 	}
3098       CHECK_OP (0, "invalid operand to binary operator");
3099       CHECK_OP (1, "invalid operand to binary operator");
3100       break;
3101 
3102     case POINTER_PLUS_EXPR:
3103       /* Check to make sure the first operand is a pointer or reference type. */
3104       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3105 	{
3106 	  error ("invalid operand to pointer plus, first operand is not a pointer");
3107 	  return t;
3108 	}
3109       /* Check to make sure the second operand is a ptrofftype.  */
3110       if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3111 	{
3112 	  error ("invalid operand to pointer plus, second operand is not an "
3113 		 "integer type of appropriate width");
3114 	  return t;
3115 	}
3116       /* FALLTHROUGH */
3117     case LT_EXPR:
3118     case LE_EXPR:
3119     case GT_EXPR:
3120     case GE_EXPR:
3121     case EQ_EXPR:
3122     case NE_EXPR:
3123     case UNORDERED_EXPR:
3124     case ORDERED_EXPR:
3125     case UNLT_EXPR:
3126     case UNLE_EXPR:
3127     case UNGT_EXPR:
3128     case UNGE_EXPR:
3129     case UNEQ_EXPR:
3130     case LTGT_EXPR:
3131     case MULT_EXPR:
3132     case TRUNC_DIV_EXPR:
3133     case CEIL_DIV_EXPR:
3134     case FLOOR_DIV_EXPR:
3135     case ROUND_DIV_EXPR:
3136     case TRUNC_MOD_EXPR:
3137     case CEIL_MOD_EXPR:
3138     case FLOOR_MOD_EXPR:
3139     case ROUND_MOD_EXPR:
3140     case RDIV_EXPR:
3141     case EXACT_DIV_EXPR:
3142     case MIN_EXPR:
3143     case MAX_EXPR:
3144     case LSHIFT_EXPR:
3145     case RSHIFT_EXPR:
3146     case LROTATE_EXPR:
3147     case RROTATE_EXPR:
3148     case BIT_IOR_EXPR:
3149     case BIT_XOR_EXPR:
3150     case BIT_AND_EXPR:
3151       CHECK_OP (0, "invalid operand to binary operator");
3152       CHECK_OP (1, "invalid operand to binary operator");
3153       break;
3154 
3155     case CONSTRUCTOR:
3156       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3157 	*walk_subtrees = 0;
3158       break;
3159 
3160     case CASE_LABEL_EXPR:
3161       if (CASE_CHAIN (t))
3162 	{
3163 	  error ("invalid CASE_CHAIN");
3164 	  return t;
3165 	}
3166       break;
3167 
3168     default:
3169       break;
3170     }
3171   return NULL;
3172 
3173 #undef CHECK_OP
3174 }
3175 
3176 
3177 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3178    Returns true if there is an error, otherwise false.  */
3179 
3180 static bool
3181 verify_types_in_gimple_min_lval (tree expr)
3182 {
3183   tree op;
3184 
3185   if (is_gimple_id (expr))
3186     return false;
3187 
3188   if (TREE_CODE (expr) != TARGET_MEM_REF
3189       && TREE_CODE (expr) != MEM_REF)
3190     {
3191       error ("invalid expression for min lvalue");
3192       return true;
3193     }
3194 
3195   /* TARGET_MEM_REFs are strange beasts.  */
3196   if (TREE_CODE (expr) == TARGET_MEM_REF)
3197     return false;
3198 
3199   op = TREE_OPERAND (expr, 0);
3200   if (!is_gimple_val (op))
3201     {
3202       error ("invalid operand in indirect reference");
3203       debug_generic_stmt (op);
3204       return true;
3205     }
3206   /* Memory references now generally can involve a value conversion.  */
3207 
3208   return false;
3209 }
3210 
3211 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3212    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3213    if there is an error, otherwise false.  */
3214 
3215 static bool
3216 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3217 {
3218   while (handled_component_p (expr))
3219     {
3220       tree op = TREE_OPERAND (expr, 0);
3221 
3222       if (TREE_CODE (expr) == ARRAY_REF
3223 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3224 	{
3225 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3226 	      || (TREE_OPERAND (expr, 2)
3227 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3228 	      || (TREE_OPERAND (expr, 3)
3229 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3230 	    {
3231 	      error ("invalid operands to array reference");
3232 	      debug_generic_stmt (expr);
3233 	      return true;
3234 	    }
3235 	}
3236 
3237       /* Verify if the reference array element types are compatible.  */
3238       if (TREE_CODE (expr) == ARRAY_REF
3239 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3240 					 TREE_TYPE (TREE_TYPE (op))))
3241 	{
3242 	  error ("type mismatch in array reference");
3243 	  debug_generic_stmt (TREE_TYPE (expr));
3244 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3245 	  return true;
3246 	}
3247       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3248 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3249 					 TREE_TYPE (TREE_TYPE (op))))
3250 	{
3251 	  error ("type mismatch in array range reference");
3252 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3253 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3254 	  return true;
3255 	}
3256 
3257       if ((TREE_CODE (expr) == REALPART_EXPR
3258 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3259 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3260 					 TREE_TYPE (TREE_TYPE (op))))
3261 	{
3262 	  error ("type mismatch in real/imagpart reference");
3263 	  debug_generic_stmt (TREE_TYPE (expr));
3264 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3265 	  return true;
3266 	}
3267 
3268       if (TREE_CODE (expr) == COMPONENT_REF
3269 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3270 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
3271 	{
3272 	  error ("type mismatch in component reference");
3273 	  debug_generic_stmt (TREE_TYPE (expr));
3274 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3275 	  return true;
3276 	}
3277 
3278       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3279 	{
3280 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3281 	     that their operand is not an SSA name or an invariant when
3282 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3283 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3284 	     most invoke undefined behavior.  */
3285 	  if (require_lvalue
3286 	      && (TREE_CODE (op) == SSA_NAME
3287 		  || is_gimple_min_invariant (op)))
3288 	    {
3289 	      error ("conversion of an SSA_NAME on the left hand side");
3290 	      debug_generic_stmt (expr);
3291 	      return true;
3292 	    }
3293 	  else if (TREE_CODE (op) == SSA_NAME
3294 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3295 	    {
3296 	      error ("conversion of register to a different size");
3297 	      debug_generic_stmt (expr);
3298 	      return true;
3299 	    }
3300 	  else if (!handled_component_p (op))
3301 	    return false;
3302 	}
3303 
3304       expr = op;
3305     }
3306 
3307   if (TREE_CODE (expr) == MEM_REF)
3308     {
3309       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3310 	{
3311 	  error ("invalid address operand in MEM_REF");
3312 	  debug_generic_stmt (expr);
3313 	  return true;
3314 	}
3315       if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3316 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3317 	{
3318 	  error ("invalid offset operand in MEM_REF");
3319 	  debug_generic_stmt (expr);
3320 	  return true;
3321 	}
3322     }
3323   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3324     {
3325       if (!TMR_BASE (expr)
3326 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3327 	{
3328 	  error ("invalid address operand in TARGET_MEM_REF");
3329 	  return true;
3330 	}
3331       if (!TMR_OFFSET (expr)
3332 	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3333 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3334 	{
3335 	  error ("invalid offset operand in TARGET_MEM_REF");
3336 	  debug_generic_stmt (expr);
3337 	  return true;
3338 	}
3339     }
3340 
3341   return ((require_lvalue || !is_gimple_min_invariant (expr))
3342 	  && verify_types_in_gimple_min_lval (expr));
3343 }
3344 
3345 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3346    list of pointer-to types that is trivially convertible to DEST.  */
3347 
3348 static bool
3349 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3350 {
3351   tree src;
3352 
3353   if (!TYPE_POINTER_TO (src_obj))
3354     return true;
3355 
3356   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3357     if (useless_type_conversion_p (dest, src))
3358       return true;
3359 
3360   return false;
3361 }
3362 
3363 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3364    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3365 
3366 static bool
3367 valid_fixed_convert_types_p (tree type1, tree type2)
3368 {
3369   return (FIXED_POINT_TYPE_P (type1)
3370 	  && (INTEGRAL_TYPE_P (type2)
3371 	      || SCALAR_FLOAT_TYPE_P (type2)
3372 	      || FIXED_POINT_TYPE_P (type2)));
3373 }
3374 
3375 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3376    is a problem, otherwise false.  */
3377 
3378 static bool
3379 verify_gimple_call (gcall *stmt)
3380 {
3381   tree fn = gimple_call_fn (stmt);
3382   tree fntype, fndecl;
3383   unsigned i;
3384 
3385   if (gimple_call_internal_p (stmt))
3386     {
3387       if (fn)
3388 	{
3389 	  error ("gimple call has two targets");
3390 	  debug_generic_stmt (fn);
3391 	  return true;
3392 	}
3393       /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3394       else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3395 	{
3396 	  return false;
3397 	}
3398     }
3399   else
3400     {
3401       if (!fn)
3402 	{
3403 	  error ("gimple call has no target");
3404 	  return true;
3405 	}
3406     }
3407 
3408   if (fn && !is_gimple_call_addr (fn))
3409     {
3410       error ("invalid function in gimple call");
3411       debug_generic_stmt (fn);
3412       return true;
3413     }
3414 
3415   if (fn
3416       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3417 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3418 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3419     {
3420       error ("non-function in gimple call");
3421       return true;
3422     }
3423 
3424    fndecl = gimple_call_fndecl (stmt);
3425    if (fndecl
3426        && TREE_CODE (fndecl) == FUNCTION_DECL
3427        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3428        && !DECL_PURE_P (fndecl)
3429        && !TREE_READONLY (fndecl))
3430      {
3431        error ("invalid pure const state for function");
3432        return true;
3433      }
3434 
3435   tree lhs = gimple_call_lhs (stmt);
3436   if (lhs
3437       && (!is_gimple_lvalue (lhs)
3438 	  || verify_types_in_gimple_reference (lhs, true)))
3439     {
3440       error ("invalid LHS in gimple call");
3441       return true;
3442     }
3443 
3444   if (gimple_call_ctrl_altering_p (stmt)
3445       && gimple_call_noreturn_p (stmt)
3446       && should_remove_lhs_p (lhs))
3447     {
3448       error ("LHS in noreturn call");
3449       return true;
3450     }
3451 
3452   fntype = gimple_call_fntype (stmt);
3453   if (fntype
3454       && lhs
3455       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3456       /* ???  At least C++ misses conversions at assignments from
3457 	 void * call results.
3458 	 ???  Java is completely off.  Especially with functions
3459 	 returning java.lang.Object.
3460 	 For now simply allow arbitrary pointer type conversions.  */
3461       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3462 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3463     {
3464       error ("invalid conversion in gimple call");
3465       debug_generic_stmt (TREE_TYPE (lhs));
3466       debug_generic_stmt (TREE_TYPE (fntype));
3467       return true;
3468     }
3469 
3470   if (gimple_call_chain (stmt)
3471       && !is_gimple_val (gimple_call_chain (stmt)))
3472     {
3473       error ("invalid static chain in gimple call");
3474       debug_generic_stmt (gimple_call_chain (stmt));
3475       return true;
3476     }
3477 
3478   /* If there is a static chain argument, the call should either be
3479      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3480   if (gimple_call_chain (stmt)
3481       && fndecl
3482       && !DECL_STATIC_CHAIN (fndecl))
3483     {
3484       error ("static chain with function that doesn%'t use one");
3485       return true;
3486     }
3487 
3488   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3489     {
3490       switch (DECL_FUNCTION_CODE (fndecl))
3491 	{
3492 	case BUILT_IN_UNREACHABLE:
3493 	case BUILT_IN_TRAP:
3494 	  if (gimple_call_num_args (stmt) > 0)
3495 	    {
3496 	      /* Built-in unreachable with parameters might not be caught by
3497 		 undefined behavior sanitizer.  Front-ends do check users do not
3498 		 call them that way but we also produce calls to
3499 		 __builtin_unreachable internally, for example when IPA figures
3500 		 out a call cannot happen in a legal program.  In such cases,
3501 		 we must make sure arguments are stripped off.  */
3502 	      error ("__builtin_unreachable or __builtin_trap call with "
3503 		     "arguments");
3504 	      return true;
3505 	    }
3506 	  break;
3507 	default:
3508 	  break;
3509 	}
3510     }
3511 
3512   /* ???  The C frontend passes unpromoted arguments in case it
3513      didn't see a function declaration before the call.  So for now
3514      leave the call arguments mostly unverified.  Once we gimplify
3515      unit-at-a-time we have a chance to fix this.  */
3516 
3517   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3518     {
3519       tree arg = gimple_call_arg (stmt, i);
3520       if ((is_gimple_reg_type (TREE_TYPE (arg))
3521 	   && !is_gimple_val (arg))
3522 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3523 	      && !is_gimple_lvalue (arg)))
3524 	{
3525 	  error ("invalid argument to gimple call");
3526 	  debug_generic_expr (arg);
3527 	  return true;
3528 	}
3529     }
3530 
3531   return false;
3532 }
3533 
3534 /* Verifies the gimple comparison with the result type TYPE and
3535    the operands OP0 and OP1, comparison code is CODE.  */
3536 
3537 static bool
3538 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3539 {
3540   tree op0_type = TREE_TYPE (op0);
3541   tree op1_type = TREE_TYPE (op1);
3542 
3543   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3544     {
3545       error ("invalid operands in gimple comparison");
3546       return true;
3547     }
3548 
3549   /* For comparisons we do not have the operations type as the
3550      effective type the comparison is carried out in.  Instead
3551      we require that either the first operand is trivially
3552      convertible into the second, or the other way around.
3553      Because we special-case pointers to void we allow
3554      comparisons of pointers with the same mode as well.  */
3555   if (!useless_type_conversion_p (op0_type, op1_type)
3556       && !useless_type_conversion_p (op1_type, op0_type)
3557       && (!POINTER_TYPE_P (op0_type)
3558 	  || !POINTER_TYPE_P (op1_type)
3559 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3560     {
3561       error ("mismatching comparison operand types");
3562       debug_generic_expr (op0_type);
3563       debug_generic_expr (op1_type);
3564       return true;
3565     }
3566 
3567   /* The resulting type of a comparison may be an effective boolean type.  */
3568   if (INTEGRAL_TYPE_P (type)
3569       && (TREE_CODE (type) == BOOLEAN_TYPE
3570 	  || TYPE_PRECISION (type) == 1))
3571     {
3572       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3573 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3574 	  && code != EQ_EXPR && code != NE_EXPR
3575 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3576 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3577 	{
3578 	  error ("unsupported operation or type for vector comparison"
3579 		 " returning a boolean");
3580 	  debug_generic_expr (op0_type);
3581 	  debug_generic_expr (op1_type);
3582 	  return true;
3583         }
3584     }
3585   /* Or a boolean vector type with the same element count
3586      as the comparison operand types.  */
3587   else if (TREE_CODE (type) == VECTOR_TYPE
3588 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3589     {
3590       if (TREE_CODE (op0_type) != VECTOR_TYPE
3591 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3592         {
3593           error ("non-vector operands in vector comparison");
3594           debug_generic_expr (op0_type);
3595           debug_generic_expr (op1_type);
3596           return true;
3597         }
3598 
3599       if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3600         {
3601           error ("invalid vector comparison resulting type");
3602           debug_generic_expr (type);
3603           return true;
3604         }
3605     }
3606   else
3607     {
3608       error ("bogus comparison result type");
3609       debug_generic_expr (type);
3610       return true;
3611     }
3612 
3613   return false;
3614 }
3615 
3616 /* Verify a gimple assignment statement STMT with an unary rhs.
3617    Returns true if anything is wrong.  */
3618 
3619 static bool
3620 verify_gimple_assign_unary (gassign *stmt)
3621 {
3622   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3623   tree lhs = gimple_assign_lhs (stmt);
3624   tree lhs_type = TREE_TYPE (lhs);
3625   tree rhs1 = gimple_assign_rhs1 (stmt);
3626   tree rhs1_type = TREE_TYPE (rhs1);
3627 
3628   if (!is_gimple_reg (lhs))
3629     {
3630       error ("non-register as LHS of unary operation");
3631       return true;
3632     }
3633 
3634   if (!is_gimple_val (rhs1))
3635     {
3636       error ("invalid operand in unary operation");
3637       return true;
3638     }
3639 
3640   /* First handle conversions.  */
3641   switch (rhs_code)
3642     {
3643     CASE_CONVERT:
3644       {
3645 	/* Allow conversions from pointer type to integral type only if
3646 	   there is no sign or zero extension involved.
3647 	   For targets were the precision of ptrofftype doesn't match that
3648 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3649 	if ((POINTER_TYPE_P (lhs_type)
3650 	     && INTEGRAL_TYPE_P (rhs1_type))
3651 	    || (POINTER_TYPE_P (rhs1_type)
3652 		&& INTEGRAL_TYPE_P (lhs_type)
3653 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3654 		    || ptrofftype_p (sizetype))))
3655 	  return false;
3656 
3657 	/* Allow conversion from integral to offset type and vice versa.  */
3658 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3659 	     && INTEGRAL_TYPE_P (rhs1_type))
3660 	    || (INTEGRAL_TYPE_P (lhs_type)
3661 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3662 	  return false;
3663 
3664 	/* Otherwise assert we are converting between types of the
3665 	   same kind.  */
3666 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3667 	  {
3668 	    error ("invalid types in nop conversion");
3669 	    debug_generic_expr (lhs_type);
3670 	    debug_generic_expr (rhs1_type);
3671 	    return true;
3672 	  }
3673 
3674 	return false;
3675       }
3676 
3677     case ADDR_SPACE_CONVERT_EXPR:
3678       {
3679 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3680 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3681 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3682 	  {
3683 	    error ("invalid types in address space conversion");
3684 	    debug_generic_expr (lhs_type);
3685 	    debug_generic_expr (rhs1_type);
3686 	    return true;
3687 	  }
3688 
3689 	return false;
3690       }
3691 
3692     case FIXED_CONVERT_EXPR:
3693       {
3694 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3695 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3696 	  {
3697 	    error ("invalid types in fixed-point conversion");
3698 	    debug_generic_expr (lhs_type);
3699 	    debug_generic_expr (rhs1_type);
3700 	    return true;
3701 	  }
3702 
3703 	return false;
3704       }
3705 
3706     case FLOAT_EXPR:
3707       {
3708 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3709 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3710 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3711 	  {
3712 	    error ("invalid types in conversion to floating point");
3713 	    debug_generic_expr (lhs_type);
3714 	    debug_generic_expr (rhs1_type);
3715 	    return true;
3716 	  }
3717 
3718         return false;
3719       }
3720 
3721     case FIX_TRUNC_EXPR:
3722       {
3723         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3724             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3725                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3726 	  {
3727 	    error ("invalid types in conversion to integer");
3728 	    debug_generic_expr (lhs_type);
3729 	    debug_generic_expr (rhs1_type);
3730 	    return true;
3731 	  }
3732 
3733         return false;
3734       }
3735     case REDUC_MAX_EXPR:
3736     case REDUC_MIN_EXPR:
3737     case REDUC_PLUS_EXPR:
3738       if (!VECTOR_TYPE_P (rhs1_type)
3739 	  || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3740         {
3741 	  error ("reduction should convert from vector to element type");
3742 	  debug_generic_expr (lhs_type);
3743 	  debug_generic_expr (rhs1_type);
3744 	  return true;
3745 	}
3746       return false;
3747 
3748     case VEC_UNPACK_HI_EXPR:
3749     case VEC_UNPACK_LO_EXPR:
3750     case VEC_UNPACK_FLOAT_HI_EXPR:
3751     case VEC_UNPACK_FLOAT_LO_EXPR:
3752       /* FIXME.  */
3753       return false;
3754 
3755     case NEGATE_EXPR:
3756     case ABS_EXPR:
3757     case BIT_NOT_EXPR:
3758     case PAREN_EXPR:
3759     case CONJ_EXPR:
3760       break;
3761 
3762     default:
3763       gcc_unreachable ();
3764     }
3765 
3766   /* For the remaining codes assert there is no conversion involved.  */
3767   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3768     {
3769       error ("non-trivial conversion in unary operation");
3770       debug_generic_expr (lhs_type);
3771       debug_generic_expr (rhs1_type);
3772       return true;
3773     }
3774 
3775   return false;
3776 }
3777 
3778 /* Verify a gimple assignment statement STMT with a binary rhs.
3779    Returns true if anything is wrong.  */
3780 
3781 static bool
3782 verify_gimple_assign_binary (gassign *stmt)
3783 {
3784   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3785   tree lhs = gimple_assign_lhs (stmt);
3786   tree lhs_type = TREE_TYPE (lhs);
3787   tree rhs1 = gimple_assign_rhs1 (stmt);
3788   tree rhs1_type = TREE_TYPE (rhs1);
3789   tree rhs2 = gimple_assign_rhs2 (stmt);
3790   tree rhs2_type = TREE_TYPE (rhs2);
3791 
3792   if (!is_gimple_reg (lhs))
3793     {
3794       error ("non-register as LHS of binary operation");
3795       return true;
3796     }
3797 
3798   if (!is_gimple_val (rhs1)
3799       || !is_gimple_val (rhs2))
3800     {
3801       error ("invalid operands in binary operation");
3802       return true;
3803     }
3804 
3805   /* First handle operations that involve different types.  */
3806   switch (rhs_code)
3807     {
3808     case COMPLEX_EXPR:
3809       {
3810 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3811 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3812 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3813 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3814 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3815 	  {
3816 	    error ("type mismatch in complex expression");
3817 	    debug_generic_expr (lhs_type);
3818 	    debug_generic_expr (rhs1_type);
3819 	    debug_generic_expr (rhs2_type);
3820 	    return true;
3821 	  }
3822 
3823 	return false;
3824       }
3825 
3826     case LSHIFT_EXPR:
3827     case RSHIFT_EXPR:
3828     case LROTATE_EXPR:
3829     case RROTATE_EXPR:
3830       {
3831 	/* Shifts and rotates are ok on integral types, fixed point
3832 	   types and integer vector types.  */
3833 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3834 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3835 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3836 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3837 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3838 		/* Vector shifts of vectors are also ok.  */
3839 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3840 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3841 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3842 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3843 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3844 	  {
3845 	    error ("type mismatch in shift expression");
3846 	    debug_generic_expr (lhs_type);
3847 	    debug_generic_expr (rhs1_type);
3848 	    debug_generic_expr (rhs2_type);
3849 	    return true;
3850 	  }
3851 
3852 	return false;
3853       }
3854 
3855     case WIDEN_LSHIFT_EXPR:
3856       {
3857         if (!INTEGRAL_TYPE_P (lhs_type)
3858             || !INTEGRAL_TYPE_P (rhs1_type)
3859             || TREE_CODE (rhs2) != INTEGER_CST
3860             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3861           {
3862             error ("type mismatch in widening vector shift expression");
3863             debug_generic_expr (lhs_type);
3864             debug_generic_expr (rhs1_type);
3865             debug_generic_expr (rhs2_type);
3866             return true;
3867           }
3868 
3869         return false;
3870       }
3871 
3872     case VEC_WIDEN_LSHIFT_HI_EXPR:
3873     case VEC_WIDEN_LSHIFT_LO_EXPR:
3874       {
3875         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3876             || TREE_CODE (lhs_type) != VECTOR_TYPE
3877             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3878             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3879             || TREE_CODE (rhs2) != INTEGER_CST
3880             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3881                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3882           {
3883             error ("type mismatch in widening vector shift expression");
3884             debug_generic_expr (lhs_type);
3885             debug_generic_expr (rhs1_type);
3886             debug_generic_expr (rhs2_type);
3887             return true;
3888           }
3889 
3890         return false;
3891       }
3892 
3893     case PLUS_EXPR:
3894     case MINUS_EXPR:
3895       {
3896 	tree lhs_etype = lhs_type;
3897 	tree rhs1_etype = rhs1_type;
3898 	tree rhs2_etype = rhs2_type;
3899 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3900 	  {
3901 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3902 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3903 	      {
3904 		error ("invalid non-vector operands to vector valued plus");
3905 		return true;
3906 	      }
3907 	    lhs_etype = TREE_TYPE (lhs_type);
3908 	    rhs1_etype = TREE_TYPE (rhs1_type);
3909 	    rhs2_etype = TREE_TYPE (rhs2_type);
3910 	  }
3911 	if (POINTER_TYPE_P (lhs_etype)
3912 	    || POINTER_TYPE_P (rhs1_etype)
3913 	    || POINTER_TYPE_P (rhs2_etype))
3914 	  {
3915 	    error ("invalid (pointer) operands to plus/minus");
3916 	    return true;
3917 	  }
3918 
3919 	/* Continue with generic binary expression handling.  */
3920 	break;
3921       }
3922 
3923     case POINTER_PLUS_EXPR:
3924       {
3925 	if (!POINTER_TYPE_P (rhs1_type)
3926 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3927 	    || !ptrofftype_p (rhs2_type))
3928 	  {
3929 	    error ("type mismatch in pointer plus expression");
3930 	    debug_generic_stmt (lhs_type);
3931 	    debug_generic_stmt (rhs1_type);
3932 	    debug_generic_stmt (rhs2_type);
3933 	    return true;
3934 	  }
3935 
3936 	return false;
3937       }
3938 
3939     case TRUTH_ANDIF_EXPR:
3940     case TRUTH_ORIF_EXPR:
3941     case TRUTH_AND_EXPR:
3942     case TRUTH_OR_EXPR:
3943     case TRUTH_XOR_EXPR:
3944 
3945       gcc_unreachable ();
3946 
3947     case LT_EXPR:
3948     case LE_EXPR:
3949     case GT_EXPR:
3950     case GE_EXPR:
3951     case EQ_EXPR:
3952     case NE_EXPR:
3953     case UNORDERED_EXPR:
3954     case ORDERED_EXPR:
3955     case UNLT_EXPR:
3956     case UNLE_EXPR:
3957     case UNGT_EXPR:
3958     case UNGE_EXPR:
3959     case UNEQ_EXPR:
3960     case LTGT_EXPR:
3961       /* Comparisons are also binary, but the result type is not
3962 	 connected to the operand types.  */
3963       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3964 
3965     case WIDEN_MULT_EXPR:
3966       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3967 	return true;
3968       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3969 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3970 
3971     case WIDEN_SUM_EXPR:
3972     case VEC_WIDEN_MULT_HI_EXPR:
3973     case VEC_WIDEN_MULT_LO_EXPR:
3974     case VEC_WIDEN_MULT_EVEN_EXPR:
3975     case VEC_WIDEN_MULT_ODD_EXPR:
3976     case VEC_PACK_TRUNC_EXPR:
3977     case VEC_PACK_SAT_EXPR:
3978     case VEC_PACK_FIX_TRUNC_EXPR:
3979       /* FIXME.  */
3980       return false;
3981 
3982     case MULT_EXPR:
3983     case MULT_HIGHPART_EXPR:
3984     case TRUNC_DIV_EXPR:
3985     case CEIL_DIV_EXPR:
3986     case FLOOR_DIV_EXPR:
3987     case ROUND_DIV_EXPR:
3988     case TRUNC_MOD_EXPR:
3989     case CEIL_MOD_EXPR:
3990     case FLOOR_MOD_EXPR:
3991     case ROUND_MOD_EXPR:
3992     case RDIV_EXPR:
3993     case EXACT_DIV_EXPR:
3994     case MIN_EXPR:
3995     case MAX_EXPR:
3996     case BIT_IOR_EXPR:
3997     case BIT_XOR_EXPR:
3998     case BIT_AND_EXPR:
3999       /* Continue with generic binary expression handling.  */
4000       break;
4001 
4002     default:
4003       gcc_unreachable ();
4004     }
4005 
4006   if (!useless_type_conversion_p (lhs_type, rhs1_type)
4007       || !useless_type_conversion_p (lhs_type, rhs2_type))
4008     {
4009       error ("type mismatch in binary expression");
4010       debug_generic_stmt (lhs_type);
4011       debug_generic_stmt (rhs1_type);
4012       debug_generic_stmt (rhs2_type);
4013       return true;
4014     }
4015 
4016   return false;
4017 }
4018 
4019 /* Verify a gimple assignment statement STMT with a ternary rhs.
4020    Returns true if anything is wrong.  */
4021 
4022 static bool
4023 verify_gimple_assign_ternary (gassign *stmt)
4024 {
4025   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4026   tree lhs = gimple_assign_lhs (stmt);
4027   tree lhs_type = TREE_TYPE (lhs);
4028   tree rhs1 = gimple_assign_rhs1 (stmt);
4029   tree rhs1_type = TREE_TYPE (rhs1);
4030   tree rhs2 = gimple_assign_rhs2 (stmt);
4031   tree rhs2_type = TREE_TYPE (rhs2);
4032   tree rhs3 = gimple_assign_rhs3 (stmt);
4033   tree rhs3_type = TREE_TYPE (rhs3);
4034 
4035   if (!is_gimple_reg (lhs))
4036     {
4037       error ("non-register as LHS of ternary operation");
4038       return true;
4039     }
4040 
4041   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4042        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4043       || !is_gimple_val (rhs2)
4044       || !is_gimple_val (rhs3))
4045     {
4046       error ("invalid operands in ternary operation");
4047       return true;
4048     }
4049 
4050   /* First handle operations that involve different types.  */
4051   switch (rhs_code)
4052     {
4053     case WIDEN_MULT_PLUS_EXPR:
4054     case WIDEN_MULT_MINUS_EXPR:
4055       if ((!INTEGRAL_TYPE_P (rhs1_type)
4056 	   && !FIXED_POINT_TYPE_P (rhs1_type))
4057 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
4058 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4059 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4060 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4061 	{
4062 	  error ("type mismatch in widening multiply-accumulate expression");
4063 	  debug_generic_expr (lhs_type);
4064 	  debug_generic_expr (rhs1_type);
4065 	  debug_generic_expr (rhs2_type);
4066 	  debug_generic_expr (rhs3_type);
4067 	  return true;
4068 	}
4069       break;
4070 
4071     case FMA_EXPR:
4072       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4073 	  || !useless_type_conversion_p (lhs_type, rhs2_type)
4074 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4075 	{
4076 	  error ("type mismatch in fused multiply-add expression");
4077 	  debug_generic_expr (lhs_type);
4078 	  debug_generic_expr (rhs1_type);
4079 	  debug_generic_expr (rhs2_type);
4080 	  debug_generic_expr (rhs3_type);
4081 	  return true;
4082 	}
4083       break;
4084 
4085     case VEC_COND_EXPR:
4086       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4087 	  || TYPE_VECTOR_SUBPARTS (rhs1_type)
4088 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
4089 	{
4090 	  error ("the first argument of a VEC_COND_EXPR must be of a "
4091 		 "boolean vector type of the same number of elements "
4092 		 "as the result");
4093 	  debug_generic_expr (lhs_type);
4094 	  debug_generic_expr (rhs1_type);
4095 	  return true;
4096 	}
4097       /* Fallthrough.  */
4098     case COND_EXPR:
4099       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4100 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4101 	{
4102 	  error ("type mismatch in conditional expression");
4103 	  debug_generic_expr (lhs_type);
4104 	  debug_generic_expr (rhs2_type);
4105 	  debug_generic_expr (rhs3_type);
4106 	  return true;
4107 	}
4108       break;
4109 
4110     case VEC_PERM_EXPR:
4111       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4112 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4113 	{
4114 	  error ("type mismatch in vector permute expression");
4115 	  debug_generic_expr (lhs_type);
4116 	  debug_generic_expr (rhs1_type);
4117 	  debug_generic_expr (rhs2_type);
4118 	  debug_generic_expr (rhs3_type);
4119 	  return true;
4120 	}
4121 
4122       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4124 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4125 	{
4126 	  error ("vector types expected in vector permute expression");
4127 	  debug_generic_expr (lhs_type);
4128 	  debug_generic_expr (rhs1_type);
4129 	  debug_generic_expr (rhs2_type);
4130 	  debug_generic_expr (rhs3_type);
4131 	  return true;
4132 	}
4133 
4134       if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4135 	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
4136 	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
4137 	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
4138 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
4139 	{
4140 	  error ("vectors with different element number found "
4141 		 "in vector permute expression");
4142 	  debug_generic_expr (lhs_type);
4143 	  debug_generic_expr (rhs1_type);
4144 	  debug_generic_expr (rhs2_type);
4145 	  debug_generic_expr (rhs3_type);
4146 	  return true;
4147 	}
4148 
4149       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4150 	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4151 	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4152 	{
4153 	  error ("invalid mask type in vector permute expression");
4154 	  debug_generic_expr (lhs_type);
4155 	  debug_generic_expr (rhs1_type);
4156 	  debug_generic_expr (rhs2_type);
4157 	  debug_generic_expr (rhs3_type);
4158 	  return true;
4159 	}
4160 
4161       return false;
4162 
4163     case SAD_EXPR:
4164       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4165 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4166 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4167 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4168 	{
4169 	  error ("type mismatch in sad expression");
4170 	  debug_generic_expr (lhs_type);
4171 	  debug_generic_expr (rhs1_type);
4172 	  debug_generic_expr (rhs2_type);
4173 	  debug_generic_expr (rhs3_type);
4174 	  return true;
4175 	}
4176 
4177       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4178 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4179 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4180 	{
4181 	  error ("vector types expected in sad expression");
4182 	  debug_generic_expr (lhs_type);
4183 	  debug_generic_expr (rhs1_type);
4184 	  debug_generic_expr (rhs2_type);
4185 	  debug_generic_expr (rhs3_type);
4186 	  return true;
4187 	}
4188 
4189       return false;
4190 
4191     case BIT_INSERT_EXPR:
4192       if (! useless_type_conversion_p (lhs_type, rhs1_type))
4193 	{
4194 	  error ("type mismatch in BIT_INSERT_EXPR");
4195 	  debug_generic_expr (lhs_type);
4196 	  debug_generic_expr (rhs1_type);
4197 	  return true;
4198 	}
4199       if (! ((INTEGRAL_TYPE_P (rhs1_type)
4200 	      && INTEGRAL_TYPE_P (rhs2_type))
4201 	     || (VECTOR_TYPE_P (rhs1_type)
4202 		 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4203 	{
4204 	  error ("not allowed type combination in BIT_INSERT_EXPR");
4205 	  debug_generic_expr (rhs1_type);
4206 	  debug_generic_expr (rhs2_type);
4207 	  return true;
4208 	}
4209       if (! tree_fits_uhwi_p (rhs3)
4210 	  || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4211 	{
4212 	  error ("invalid position or size in BIT_INSERT_EXPR");
4213 	  return true;
4214 	}
4215       if (INTEGRAL_TYPE_P (rhs1_type))
4216 	{
4217 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4218 	  if (bitpos >= TYPE_PRECISION (rhs1_type)
4219 	      || (bitpos + TYPE_PRECISION (rhs2_type)
4220 		  > TYPE_PRECISION (rhs1_type)))
4221 	    {
4222 	      error ("insertion out of range in BIT_INSERT_EXPR");
4223 	      return true;
4224 	    }
4225 	}
4226       else if (VECTOR_TYPE_P (rhs1_type))
4227 	{
4228 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4229 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4230 	  if (bitpos % bitsize != 0)
4231 	    {
4232 	      error ("vector insertion not at element boundary");
4233 	      return true;
4234 	    }
4235 	}
4236       return false;
4237 
4238     case DOT_PROD_EXPR:
4239     case REALIGN_LOAD_EXPR:
4240       /* FIXME.  */
4241       return false;
4242 
4243     default:
4244       gcc_unreachable ();
4245     }
4246   return false;
4247 }
4248 
4249 /* Verify a gimple assignment statement STMT with a single rhs.
4250    Returns true if anything is wrong.  */
4251 
4252 static bool
4253 verify_gimple_assign_single (gassign *stmt)
4254 {
4255   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4256   tree lhs = gimple_assign_lhs (stmt);
4257   tree lhs_type = TREE_TYPE (lhs);
4258   tree rhs1 = gimple_assign_rhs1 (stmt);
4259   tree rhs1_type = TREE_TYPE (rhs1);
4260   bool res = false;
4261 
4262   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4263     {
4264       error ("non-trivial conversion at assignment");
4265       debug_generic_expr (lhs_type);
4266       debug_generic_expr (rhs1_type);
4267       return true;
4268     }
4269 
4270   if (gimple_clobber_p (stmt)
4271       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4272     {
4273       error ("non-decl/MEM_REF LHS in clobber statement");
4274       debug_generic_expr (lhs);
4275       return true;
4276     }
4277 
4278   if (handled_component_p (lhs)
4279       || TREE_CODE (lhs) == MEM_REF
4280       || TREE_CODE (lhs) == TARGET_MEM_REF)
4281     res |= verify_types_in_gimple_reference (lhs, true);
4282 
4283   /* Special codes we cannot handle via their class.  */
4284   switch (rhs_code)
4285     {
4286     case ADDR_EXPR:
4287       {
4288 	tree op = TREE_OPERAND (rhs1, 0);
4289 	if (!is_gimple_addressable (op))
4290 	  {
4291 	    error ("invalid operand in unary expression");
4292 	    return true;
4293 	  }
4294 
4295 	/* Technically there is no longer a need for matching types, but
4296 	   gimple hygiene asks for this check.  In LTO we can end up
4297 	   combining incompatible units and thus end up with addresses
4298 	   of globals that change their type to a common one.  */
4299 	if (!in_lto_p
4300 	    && !types_compatible_p (TREE_TYPE (op),
4301 				    TREE_TYPE (TREE_TYPE (rhs1)))
4302 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4303 							  TREE_TYPE (op)))
4304 	  {
4305 	    error ("type mismatch in address expression");
4306 	    debug_generic_stmt (TREE_TYPE (rhs1));
4307 	    debug_generic_stmt (TREE_TYPE (op));
4308 	    return true;
4309 	  }
4310 
4311 	return verify_types_in_gimple_reference (op, true);
4312       }
4313 
4314     /* tcc_reference  */
4315     case INDIRECT_REF:
4316       error ("INDIRECT_REF in gimple IL");
4317       return true;
4318 
4319     case COMPONENT_REF:
4320     case BIT_FIELD_REF:
4321     case ARRAY_REF:
4322     case ARRAY_RANGE_REF:
4323     case VIEW_CONVERT_EXPR:
4324     case REALPART_EXPR:
4325     case IMAGPART_EXPR:
4326     case TARGET_MEM_REF:
4327     case MEM_REF:
4328       if (!is_gimple_reg (lhs)
4329 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4330 	{
4331 	  error ("invalid rhs for gimple memory store");
4332 	  debug_generic_stmt (lhs);
4333 	  debug_generic_stmt (rhs1);
4334 	  return true;
4335 	}
4336       return res || verify_types_in_gimple_reference (rhs1, false);
4337 
4338     /* tcc_constant  */
4339     case SSA_NAME:
4340     case INTEGER_CST:
4341     case REAL_CST:
4342     case FIXED_CST:
4343     case COMPLEX_CST:
4344     case VECTOR_CST:
4345     case STRING_CST:
4346       return res;
4347 
4348     /* tcc_declaration  */
4349     case CONST_DECL:
4350       return res;
4351     case VAR_DECL:
4352     case PARM_DECL:
4353       if (!is_gimple_reg (lhs)
4354 	  && !is_gimple_reg (rhs1)
4355 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4356 	{
4357 	  error ("invalid rhs for gimple memory store");
4358 	  debug_generic_stmt (lhs);
4359 	  debug_generic_stmt (rhs1);
4360 	  return true;
4361 	}
4362       return res;
4363 
4364     case CONSTRUCTOR:
4365       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4366 	{
4367 	  unsigned int i;
4368 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4369 
4370 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4371 	    return res;
4372 	  /* For vector CONSTRUCTORs we require that either it is empty
4373 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4374 	     (then the element count must be correct to cover the whole
4375 	     outer vector and index must be NULL on all elements, or it is
4376 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4377 	     smaller number of elements (assuming zero filling) and
4378 	     consecutive indexes as compared to NULL indexes (such
4379 	     CONSTRUCTORs can appear in the IL from FEs).  */
4380 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4381 	    {
4382 	      if (elt_t == NULL_TREE)
4383 		{
4384 		  elt_t = TREE_TYPE (elt_v);
4385 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4386 		    {
4387 		      tree elt_t = TREE_TYPE (elt_v);
4388 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4389 						      TREE_TYPE (elt_t)))
4390 			{
4391 			  error ("incorrect type of vector CONSTRUCTOR"
4392 				 " elements");
4393 			  debug_generic_stmt (rhs1);
4394 			  return true;
4395 			}
4396 		      else if (CONSTRUCTOR_NELTS (rhs1)
4397 			       * TYPE_VECTOR_SUBPARTS (elt_t)
4398 			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
4399 			{
4400 			  error ("incorrect number of vector CONSTRUCTOR"
4401 				 " elements");
4402 			  debug_generic_stmt (rhs1);
4403 			  return true;
4404 			}
4405 		    }
4406 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4407 						       elt_t))
4408 		    {
4409 		      error ("incorrect type of vector CONSTRUCTOR elements");
4410 		      debug_generic_stmt (rhs1);
4411 		      return true;
4412 		    }
4413 		  else if (CONSTRUCTOR_NELTS (rhs1)
4414 			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
4415 		    {
4416 		      error ("incorrect number of vector CONSTRUCTOR elements");
4417 		      debug_generic_stmt (rhs1);
4418 		      return true;
4419 		    }
4420 		}
4421 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4422 		{
4423 		  error ("incorrect type of vector CONSTRUCTOR elements");
4424 		  debug_generic_stmt (rhs1);
4425 		  return true;
4426 		}
4427 	      if (elt_i != NULL_TREE
4428 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4429 		      || TREE_CODE (elt_i) != INTEGER_CST
4430 		      || compare_tree_int (elt_i, i) != 0))
4431 		{
4432 		  error ("vector CONSTRUCTOR with non-NULL element index");
4433 		  debug_generic_stmt (rhs1);
4434 		  return true;
4435 		}
4436 	      if (!is_gimple_val (elt_v))
4437 		{
4438 		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4439 		  debug_generic_stmt (rhs1);
4440 		  return true;
4441 		}
4442 	    }
4443 	}
4444       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4445 	{
4446 	  error ("non-vector CONSTRUCTOR with elements");
4447 	  debug_generic_stmt (rhs1);
4448 	  return true;
4449 	}
4450       return res;
4451     case OBJ_TYPE_REF:
4452     case ASSERT_EXPR:
4453     case WITH_SIZE_EXPR:
4454       /* FIXME.  */
4455       return res;
4456 
4457     default:;
4458     }
4459 
4460   return res;
4461 }
4462 
4463 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4464    is a problem, otherwise false.  */
4465 
4466 static bool
4467 verify_gimple_assign (gassign *stmt)
4468 {
4469   switch (gimple_assign_rhs_class (stmt))
4470     {
4471     case GIMPLE_SINGLE_RHS:
4472       return verify_gimple_assign_single (stmt);
4473 
4474     case GIMPLE_UNARY_RHS:
4475       return verify_gimple_assign_unary (stmt);
4476 
4477     case GIMPLE_BINARY_RHS:
4478       return verify_gimple_assign_binary (stmt);
4479 
4480     case GIMPLE_TERNARY_RHS:
4481       return verify_gimple_assign_ternary (stmt);
4482 
4483     default:
4484       gcc_unreachable ();
4485     }
4486 }
4487 
4488 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4489    is a problem, otherwise false.  */
4490 
4491 static bool
4492 verify_gimple_return (greturn *stmt)
4493 {
4494   tree op = gimple_return_retval (stmt);
4495   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4496 
4497   /* We cannot test for present return values as we do not fix up missing
4498      return values from the original source.  */
4499   if (op == NULL)
4500     return false;
4501 
4502   if (!is_gimple_val (op)
4503       && TREE_CODE (op) != RESULT_DECL)
4504     {
4505       error ("invalid operand in return statement");
4506       debug_generic_stmt (op);
4507       return true;
4508     }
4509 
4510   if ((TREE_CODE (op) == RESULT_DECL
4511        && DECL_BY_REFERENCE (op))
4512       || (TREE_CODE (op) == SSA_NAME
4513 	  && SSA_NAME_VAR (op)
4514 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4515 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4516     op = TREE_TYPE (op);
4517 
4518   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4519     {
4520       error ("invalid conversion in return statement");
4521       debug_generic_stmt (restype);
4522       debug_generic_stmt (TREE_TYPE (op));
4523       return true;
4524     }
4525 
4526   return false;
4527 }
4528 
4529 
4530 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4531    is a problem, otherwise false.  */
4532 
4533 static bool
4534 verify_gimple_goto (ggoto *stmt)
4535 {
4536   tree dest = gimple_goto_dest (stmt);
4537 
4538   /* ???  We have two canonical forms of direct goto destinations, a
4539      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4540   if (TREE_CODE (dest) != LABEL_DECL
4541       && (!is_gimple_val (dest)
4542 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4543     {
4544       error ("goto destination is neither a label nor a pointer");
4545       return true;
4546     }
4547 
4548   return false;
4549 }
4550 
4551 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4552    is a problem, otherwise false.  */
4553 
4554 static bool
4555 verify_gimple_switch (gswitch *stmt)
4556 {
4557   unsigned int i, n;
4558   tree elt, prev_upper_bound = NULL_TREE;
4559   tree index_type, elt_type = NULL_TREE;
4560 
4561   if (!is_gimple_val (gimple_switch_index (stmt)))
4562     {
4563       error ("invalid operand to switch statement");
4564       debug_generic_stmt (gimple_switch_index (stmt));
4565       return true;
4566     }
4567 
4568   index_type = TREE_TYPE (gimple_switch_index (stmt));
4569   if (! INTEGRAL_TYPE_P (index_type))
4570     {
4571       error ("non-integral type switch statement");
4572       debug_generic_expr (index_type);
4573       return true;
4574     }
4575 
4576   elt = gimple_switch_label (stmt, 0);
4577   if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4578     {
4579       error ("invalid default case label in switch statement");
4580       debug_generic_expr (elt);
4581       return true;
4582     }
4583 
4584   n = gimple_switch_num_labels (stmt);
4585   for (i = 1; i < n; i++)
4586     {
4587       elt = gimple_switch_label (stmt, i);
4588 
4589       if (! CASE_LOW (elt))
4590 	{
4591 	  error ("invalid case label in switch statement");
4592 	  debug_generic_expr (elt);
4593 	  return true;
4594 	}
4595       if (CASE_HIGH (elt)
4596 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4597 	{
4598 	  error ("invalid case range in switch statement");
4599 	  debug_generic_expr (elt);
4600 	  return true;
4601 	}
4602 
4603       if (elt_type)
4604 	{
4605 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4606 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4607 	    {
4608 	      error ("type mismatch for case label in switch statement");
4609 	      debug_generic_expr (elt);
4610 	      return true;
4611 	    }
4612 	}
4613       else
4614 	{
4615 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4616 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4617 	    {
4618 	      error ("type precision mismatch in switch statement");
4619 	      return true;
4620 	    }
4621 	}
4622 
4623       if (prev_upper_bound)
4624 	{
4625 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4626 	    {
4627 	      error ("case labels not sorted in switch statement");
4628 	      return true;
4629 	    }
4630 	}
4631 
4632       prev_upper_bound = CASE_HIGH (elt);
4633       if (! prev_upper_bound)
4634 	prev_upper_bound = CASE_LOW (elt);
4635     }
4636 
4637   return false;
4638 }
4639 
4640 /* Verify a gimple debug statement STMT.
4641    Returns true if anything is wrong.  */
4642 
4643 static bool
4644 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4645 {
4646   /* There isn't much that could be wrong in a gimple debug stmt.  A
4647      gimple debug bind stmt, for example, maps a tree, that's usually
4648      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4649      component or member of an aggregate type, to another tree, that
4650      can be an arbitrary expression.  These stmts expand into debug
4651      insns, and are converted to debug notes by var-tracking.c.  */
4652   return false;
4653 }
4654 
4655 /* Verify a gimple label statement STMT.
4656    Returns true if anything is wrong.  */
4657 
4658 static bool
4659 verify_gimple_label (glabel *stmt)
4660 {
4661   tree decl = gimple_label_label (stmt);
4662   int uid;
4663   bool err = false;
4664 
4665   if (TREE_CODE (decl) != LABEL_DECL)
4666     return true;
4667   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4668       && DECL_CONTEXT (decl) != current_function_decl)
4669     {
4670       error ("label's context is not the current function decl");
4671       err |= true;
4672     }
4673 
4674   uid = LABEL_DECL_UID (decl);
4675   if (cfun->cfg
4676       && (uid == -1
4677 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4678     {
4679       error ("incorrect entry in label_to_block_map");
4680       err |= true;
4681     }
4682 
4683   uid = EH_LANDING_PAD_NR (decl);
4684   if (uid)
4685     {
4686       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4687       if (decl != lp->post_landing_pad)
4688 	{
4689 	  error ("incorrect setting of landing pad number");
4690 	  err |= true;
4691 	}
4692     }
4693 
4694   return err;
4695 }
4696 
4697 /* Verify a gimple cond statement STMT.
4698    Returns true if anything is wrong.  */
4699 
4700 static bool
4701 verify_gimple_cond (gcond *stmt)
4702 {
4703   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4704     {
4705       error ("invalid comparison code in gimple cond");
4706       return true;
4707     }
4708   if (!(!gimple_cond_true_label (stmt)
4709 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4710       || !(!gimple_cond_false_label (stmt)
4711 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4712     {
4713       error ("invalid labels in gimple cond");
4714       return true;
4715     }
4716 
4717   return verify_gimple_comparison (boolean_type_node,
4718 				   gimple_cond_lhs (stmt),
4719 				   gimple_cond_rhs (stmt),
4720 				   gimple_cond_code (stmt));
4721 }
4722 
4723 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4724    error, otherwise false.  */
4725 
4726 static bool
4727 verify_gimple_stmt (gimple *stmt)
4728 {
4729   switch (gimple_code (stmt))
4730     {
4731     case GIMPLE_ASSIGN:
4732       return verify_gimple_assign (as_a <gassign *> (stmt));
4733 
4734     case GIMPLE_LABEL:
4735       return verify_gimple_label (as_a <glabel *> (stmt));
4736 
4737     case GIMPLE_CALL:
4738       return verify_gimple_call (as_a <gcall *> (stmt));
4739 
4740     case GIMPLE_COND:
4741       return verify_gimple_cond (as_a <gcond *> (stmt));
4742 
4743     case GIMPLE_GOTO:
4744       return verify_gimple_goto (as_a <ggoto *> (stmt));
4745 
4746     case GIMPLE_SWITCH:
4747       return verify_gimple_switch (as_a <gswitch *> (stmt));
4748 
4749     case GIMPLE_RETURN:
4750       return verify_gimple_return (as_a <greturn *> (stmt));
4751 
4752     case GIMPLE_ASM:
4753       return false;
4754 
4755     case GIMPLE_TRANSACTION:
4756       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4757 
4758     /* Tuples that do not have tree operands.  */
4759     case GIMPLE_NOP:
4760     case GIMPLE_PREDICT:
4761     case GIMPLE_RESX:
4762     case GIMPLE_EH_DISPATCH:
4763     case GIMPLE_EH_MUST_NOT_THROW:
4764       return false;
4765 
4766     CASE_GIMPLE_OMP:
4767       /* OpenMP directives are validated by the FE and never operated
4768 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4769 	 non-gimple expressions when the main index variable has had
4770 	 its address taken.  This does not affect the loop itself
4771 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4772 	 how to setup the parallel iteration.  */
4773       return false;
4774 
4775     case GIMPLE_DEBUG:
4776       return verify_gimple_debug (stmt);
4777 
4778     default:
4779       gcc_unreachable ();
4780     }
4781 }
4782 
4783 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4784    and false otherwise.  */
4785 
4786 static bool
4787 verify_gimple_phi (gimple *phi)
4788 {
4789   bool err = false;
4790   unsigned i;
4791   tree phi_result = gimple_phi_result (phi);
4792   bool virtual_p;
4793 
4794   if (!phi_result)
4795     {
4796       error ("invalid PHI result");
4797       return true;
4798     }
4799 
4800   virtual_p = virtual_operand_p (phi_result);
4801   if (TREE_CODE (phi_result) != SSA_NAME
4802       || (virtual_p
4803 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4804     {
4805       error ("invalid PHI result");
4806       err = true;
4807     }
4808 
4809   for (i = 0; i < gimple_phi_num_args (phi); i++)
4810     {
4811       tree t = gimple_phi_arg_def (phi, i);
4812 
4813       if (!t)
4814 	{
4815 	  error ("missing PHI def");
4816 	  err |= true;
4817 	  continue;
4818 	}
4819       /* Addressable variables do have SSA_NAMEs but they
4820 	 are not considered gimple values.  */
4821       else if ((TREE_CODE (t) == SSA_NAME
4822 		&& virtual_p != virtual_operand_p (t))
4823 	       || (virtual_p
4824 		   && (TREE_CODE (t) != SSA_NAME
4825 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4826 	       || (!virtual_p
4827 		   && !is_gimple_val (t)))
4828 	{
4829 	  error ("invalid PHI argument");
4830 	  debug_generic_expr (t);
4831 	  err |= true;
4832 	}
4833 #ifdef ENABLE_TYPES_CHECKING
4834       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4835 	{
4836 	  error ("incompatible types in PHI argument %u", i);
4837 	  debug_generic_stmt (TREE_TYPE (phi_result));
4838 	  debug_generic_stmt (TREE_TYPE (t));
4839 	  err |= true;
4840 	}
4841 #endif
4842     }
4843 
4844   return err;
4845 }
4846 
4847 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4848 
4849 static bool
4850 verify_gimple_in_seq_2 (gimple_seq stmts)
4851 {
4852   gimple_stmt_iterator ittr;
4853   bool err = false;
4854 
4855   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4856     {
4857       gimple *stmt = gsi_stmt (ittr);
4858 
4859       switch (gimple_code (stmt))
4860         {
4861 	case GIMPLE_BIND:
4862 	  err |= verify_gimple_in_seq_2 (
4863                    gimple_bind_body (as_a <gbind *> (stmt)));
4864 	  break;
4865 
4866 	case GIMPLE_TRY:
4867 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4868 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4869 	  break;
4870 
4871 	case GIMPLE_EH_FILTER:
4872 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4873 	  break;
4874 
4875 	case GIMPLE_EH_ELSE:
4876 	  {
4877 	    geh_else *eh_else = as_a <geh_else *> (stmt);
4878 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4879 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4880 	  }
4881 	  break;
4882 
4883 	case GIMPLE_CATCH:
4884 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4885 					   as_a <gcatch *> (stmt)));
4886 	  break;
4887 
4888 	case GIMPLE_TRANSACTION:
4889 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4890 	  break;
4891 
4892 	default:
4893 	  {
4894 	    bool err2 = verify_gimple_stmt (stmt);
4895 	    if (err2)
4896 	      debug_gimple_stmt (stmt);
4897 	    err |= err2;
4898 	  }
4899 	}
4900     }
4901 
4902   return err;
4903 }
4904 
4905 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4906    is a problem, otherwise false.  */
4907 
4908 static bool
4909 verify_gimple_transaction (gtransaction *stmt)
4910 {
4911   tree lab;
4912 
4913   lab = gimple_transaction_label_norm (stmt);
4914   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4915     return true;
4916   lab = gimple_transaction_label_uninst (stmt);
4917   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4918     return true;
4919   lab = gimple_transaction_label_over (stmt);
4920   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4921     return true;
4922 
4923   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4924 }
4925 
4926 
4927 /* Verify the GIMPLE statements inside the statement list STMTS.  */
4928 
4929 DEBUG_FUNCTION void
4930 verify_gimple_in_seq (gimple_seq stmts)
4931 {
4932   timevar_push (TV_TREE_STMT_VERIFY);
4933   if (verify_gimple_in_seq_2 (stmts))
4934     internal_error ("verify_gimple failed");
4935   timevar_pop (TV_TREE_STMT_VERIFY);
4936 }
4937 
4938 /* Return true when the T can be shared.  */
4939 
4940 static bool
4941 tree_node_can_be_shared (tree t)
4942 {
4943   if (IS_TYPE_OR_DECL_P (t)
4944       || is_gimple_min_invariant (t)
4945       || TREE_CODE (t) == SSA_NAME
4946       || t == error_mark_node
4947       || TREE_CODE (t) == IDENTIFIER_NODE)
4948     return true;
4949 
4950   if (TREE_CODE (t) == CASE_LABEL_EXPR)
4951     return true;
4952 
4953   if (DECL_P (t))
4954     return true;
4955 
4956   return false;
4957 }
4958 
4959 /* Called via walk_tree.  Verify tree sharing.  */
4960 
4961 static tree
4962 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4963 {
4964   hash_set<void *> *visited = (hash_set<void *> *) data;
4965 
4966   if (tree_node_can_be_shared (*tp))
4967     {
4968       *walk_subtrees = false;
4969       return NULL;
4970     }
4971 
4972   if (visited->add (*tp))
4973     return *tp;
4974 
4975   return NULL;
4976 }
4977 
4978 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4979 
4980 static tree
4981 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4982 {
4983   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4984   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4985 }
4986 
4987 static bool eh_error_found;
4988 bool
4989 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4990 			   hash_set<gimple *> *visited)
4991 {
4992   if (!visited->contains (stmt))
4993     {
4994       error ("dead STMT in EH table");
4995       debug_gimple_stmt (stmt);
4996       eh_error_found = true;
4997     }
4998   return true;
4999 }
5000 
5001 /* Verify if the location LOCs block is in BLOCKS.  */
5002 
5003 static bool
5004 verify_location (hash_set<tree> *blocks, location_t loc)
5005 {
5006   tree block = LOCATION_BLOCK (loc);
5007   if (block != NULL_TREE
5008       && !blocks->contains (block))
5009     {
5010       error ("location references block not in block tree");
5011       return true;
5012     }
5013   if (block != NULL_TREE)
5014     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5015   return false;
5016 }
5017 
5018 /* Called via walk_tree.  Verify that expressions have no blocks.  */
5019 
5020 static tree
5021 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5022 {
5023   if (!EXPR_P (*tp))
5024     {
5025       *walk_subtrees = false;
5026       return NULL;
5027     }
5028 
5029   location_t loc = EXPR_LOCATION (*tp);
5030   if (LOCATION_BLOCK (loc) != NULL)
5031     return *tp;
5032 
5033   return NULL;
5034 }
5035 
5036 /* Called via walk_tree.  Verify locations of expressions.  */
5037 
5038 static tree
5039 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5040 {
5041   hash_set<tree> *blocks = (hash_set<tree> *) data;
5042 
5043   if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5044     {
5045       tree t = DECL_DEBUG_EXPR (*tp);
5046       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5047       if (addr)
5048 	return addr;
5049     }
5050   if ((VAR_P (*tp)
5051        || TREE_CODE (*tp) == PARM_DECL
5052        || TREE_CODE (*tp) == RESULT_DECL)
5053       && DECL_HAS_VALUE_EXPR_P (*tp))
5054     {
5055       tree t = DECL_VALUE_EXPR (*tp);
5056       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5057       if (addr)
5058 	return addr;
5059     }
5060 
5061   if (!EXPR_P (*tp))
5062     {
5063       *walk_subtrees = false;
5064       return NULL;
5065     }
5066 
5067   location_t loc = EXPR_LOCATION (*tp);
5068   if (verify_location (blocks, loc))
5069     return *tp;
5070 
5071   return NULL;
5072 }
5073 
5074 /* Called via walk_gimple_op.  Verify locations of expressions.  */
5075 
5076 static tree
5077 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5078 {
5079   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5080   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5081 }
5082 
5083 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
5084 
5085 static void
5086 collect_subblocks (hash_set<tree> *blocks, tree block)
5087 {
5088   tree t;
5089   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5090     {
5091       blocks->add (t);
5092       collect_subblocks (blocks, t);
5093     }
5094 }
5095 
5096 /* Verify the GIMPLE statements in the CFG of FN.  */
5097 
5098 DEBUG_FUNCTION void
5099 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5100 {
5101   basic_block bb;
5102   bool err = false;
5103 
5104   timevar_push (TV_TREE_STMT_VERIFY);
5105   hash_set<void *> visited;
5106   hash_set<gimple *> visited_stmts;
5107 
5108   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
5109   hash_set<tree> blocks;
5110   if (DECL_INITIAL (fn->decl))
5111     {
5112       blocks.add (DECL_INITIAL (fn->decl));
5113       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5114     }
5115 
5116   FOR_EACH_BB_FN (bb, fn)
5117     {
5118       gimple_stmt_iterator gsi;
5119 
5120       for (gphi_iterator gpi = gsi_start_phis (bb);
5121 	   !gsi_end_p (gpi);
5122 	   gsi_next (&gpi))
5123 	{
5124 	  gphi *phi = gpi.phi ();
5125 	  bool err2 = false;
5126 	  unsigned i;
5127 
5128 	  visited_stmts.add (phi);
5129 
5130 	  if (gimple_bb (phi) != bb)
5131 	    {
5132 	      error ("gimple_bb (phi) is set to a wrong basic block");
5133 	      err2 = true;
5134 	    }
5135 
5136 	  err2 |= verify_gimple_phi (phi);
5137 
5138 	  /* Only PHI arguments have locations.  */
5139 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
5140 	    {
5141 	      error ("PHI node with location");
5142 	      err2 = true;
5143 	    }
5144 
5145 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5146 	    {
5147 	      tree arg = gimple_phi_arg_def (phi, i);
5148 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5149 				     &visited, NULL);
5150 	      if (addr)
5151 		{
5152 		  error ("incorrect sharing of tree nodes");
5153 		  debug_generic_expr (addr);
5154 		  err2 |= true;
5155 		}
5156 	      location_t loc = gimple_phi_arg_location (phi, i);
5157 	      if (virtual_operand_p (gimple_phi_result (phi))
5158 		  && loc != UNKNOWN_LOCATION)
5159 		{
5160 		  error ("virtual PHI with argument locations");
5161 		  err2 = true;
5162 		}
5163 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5164 	      if (addr)
5165 		{
5166 		  debug_generic_expr (addr);
5167 		  err2 = true;
5168 		}
5169 	      err2 |= verify_location (&blocks, loc);
5170 	    }
5171 
5172 	  if (err2)
5173 	    debug_gimple_stmt (phi);
5174 	  err |= err2;
5175 	}
5176 
5177       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5178 	{
5179 	  gimple *stmt = gsi_stmt (gsi);
5180 	  bool err2 = false;
5181 	  struct walk_stmt_info wi;
5182 	  tree addr;
5183 	  int lp_nr;
5184 
5185 	  visited_stmts.add (stmt);
5186 
5187 	  if (gimple_bb (stmt) != bb)
5188 	    {
5189 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5190 	      err2 = true;
5191 	    }
5192 
5193 	  err2 |= verify_gimple_stmt (stmt);
5194 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5195 
5196 	  memset (&wi, 0, sizeof (wi));
5197 	  wi.info = (void *) &visited;
5198 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5199 	  if (addr)
5200 	    {
5201 	      error ("incorrect sharing of tree nodes");
5202 	      debug_generic_expr (addr);
5203 	      err2 |= true;
5204 	    }
5205 
5206 	  memset (&wi, 0, sizeof (wi));
5207 	  wi.info = (void *) &blocks;
5208 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5209 	  if (addr)
5210 	    {
5211 	      debug_generic_expr (addr);
5212 	      err2 |= true;
5213 	    }
5214 
5215 	  /* ???  Instead of not checking these stmts at all the walker
5216 	     should know its context via wi.  */
5217 	  if (!is_gimple_debug (stmt)
5218 	      && !is_gimple_omp (stmt))
5219 	    {
5220 	      memset (&wi, 0, sizeof (wi));
5221 	      addr = walk_gimple_op (stmt, verify_expr, &wi);
5222 	      if (addr)
5223 		{
5224 		  debug_generic_expr (addr);
5225 		  inform (gimple_location (stmt), "in statement");
5226 		  err2 |= true;
5227 		}
5228 	    }
5229 
5230 	  /* If the statement is marked as part of an EH region, then it is
5231 	     expected that the statement could throw.  Verify that when we
5232 	     have optimizations that simplify statements such that we prove
5233 	     that they cannot throw, that we update other data structures
5234 	     to match.  */
5235 	  lp_nr = lookup_stmt_eh_lp (stmt);
5236 	  if (lp_nr > 0)
5237 	    {
5238 	      if (!stmt_could_throw_p (stmt))
5239 		{
5240 		  if (verify_nothrow)
5241 		    {
5242 		      error ("statement marked for throw, but doesn%'t");
5243 		      err2 |= true;
5244 		    }
5245 		}
5246 	      else if (!gsi_one_before_end_p (gsi))
5247 		{
5248 		  error ("statement marked for throw in middle of block");
5249 		  err2 |= true;
5250 		}
5251 	    }
5252 
5253 	  if (err2)
5254 	    debug_gimple_stmt (stmt);
5255 	  err |= err2;
5256 	}
5257     }
5258 
5259   eh_error_found = false;
5260   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5261   if (eh_table)
5262     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5263       (&visited_stmts);
5264 
5265   if (err || eh_error_found)
5266     internal_error ("verify_gimple failed");
5267 
5268   verify_histograms ();
5269   timevar_pop (TV_TREE_STMT_VERIFY);
5270 }
5271 
5272 
5273 /* Verifies that the flow information is OK.  */
5274 
5275 static int
5276 gimple_verify_flow_info (void)
5277 {
5278   int err = 0;
5279   basic_block bb;
5280   gimple_stmt_iterator gsi;
5281   gimple *stmt;
5282   edge e;
5283   edge_iterator ei;
5284 
5285   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5286       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5287     {
5288       error ("ENTRY_BLOCK has IL associated with it");
5289       err = 1;
5290     }
5291 
5292   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5293       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5294     {
5295       error ("EXIT_BLOCK has IL associated with it");
5296       err = 1;
5297     }
5298 
5299   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5300     if (e->flags & EDGE_FALLTHRU)
5301       {
5302 	error ("fallthru to exit from bb %d", e->src->index);
5303 	err = 1;
5304       }
5305 
5306   FOR_EACH_BB_FN (bb, cfun)
5307     {
5308       bool found_ctrl_stmt = false;
5309 
5310       stmt = NULL;
5311 
5312       /* Skip labels on the start of basic block.  */
5313       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5314 	{
5315 	  tree label;
5316 	  gimple *prev_stmt = stmt;
5317 
5318 	  stmt = gsi_stmt (gsi);
5319 
5320 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5321 	    break;
5322 
5323 	  label = gimple_label_label (as_a <glabel *> (stmt));
5324 	  if (prev_stmt && DECL_NONLOCAL (label))
5325 	    {
5326 	      error ("nonlocal label ");
5327 	      print_generic_expr (stderr, label, 0);
5328 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5329 		       bb->index);
5330 	      err = 1;
5331 	    }
5332 
5333 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5334 	    {
5335 	      error ("EH landing pad label ");
5336 	      print_generic_expr (stderr, label, 0);
5337 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5338 		       bb->index);
5339 	      err = 1;
5340 	    }
5341 
5342 	  if (label_to_block (label) != bb)
5343 	    {
5344 	      error ("label ");
5345 	      print_generic_expr (stderr, label, 0);
5346 	      fprintf (stderr, " to block does not match in bb %d",
5347 		       bb->index);
5348 	      err = 1;
5349 	    }
5350 
5351 	  if (decl_function_context (label) != current_function_decl)
5352 	    {
5353 	      error ("label ");
5354 	      print_generic_expr (stderr, label, 0);
5355 	      fprintf (stderr, " has incorrect context in bb %d",
5356 		       bb->index);
5357 	      err = 1;
5358 	    }
5359 	}
5360 
5361       /* Verify that body of basic block BB is free of control flow.  */
5362       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5363 	{
5364 	  gimple *stmt = gsi_stmt (gsi);
5365 
5366 	  if (found_ctrl_stmt)
5367 	    {
5368 	      error ("control flow in the middle of basic block %d",
5369 		     bb->index);
5370 	      err = 1;
5371 	    }
5372 
5373 	  if (stmt_ends_bb_p (stmt))
5374 	    found_ctrl_stmt = true;
5375 
5376 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5377 	    {
5378 	      error ("label ");
5379 	      print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5380 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5381 	      err = 1;
5382 	    }
5383 	}
5384 
5385       gsi = gsi_last_bb (bb);
5386       if (gsi_end_p (gsi))
5387 	continue;
5388 
5389       stmt = gsi_stmt (gsi);
5390 
5391       if (gimple_code (stmt) == GIMPLE_LABEL)
5392 	continue;
5393 
5394       err |= verify_eh_edges (stmt);
5395 
5396       if (is_ctrl_stmt (stmt))
5397 	{
5398 	  FOR_EACH_EDGE (e, ei, bb->succs)
5399 	    if (e->flags & EDGE_FALLTHRU)
5400 	      {
5401 		error ("fallthru edge after a control statement in bb %d",
5402 		       bb->index);
5403 		err = 1;
5404 	      }
5405 	}
5406 
5407       if (gimple_code (stmt) != GIMPLE_COND)
5408 	{
5409 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5410 	     after anything else but if statement.  */
5411 	  FOR_EACH_EDGE (e, ei, bb->succs)
5412 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5413 	      {
5414 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5415 		       bb->index);
5416 		err = 1;
5417 	      }
5418 	}
5419 
5420       switch (gimple_code (stmt))
5421 	{
5422 	case GIMPLE_COND:
5423 	  {
5424 	    edge true_edge;
5425 	    edge false_edge;
5426 
5427 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5428 
5429 	    if (!true_edge
5430 		|| !false_edge
5431 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5432 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5433 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5434 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5435 		|| EDGE_COUNT (bb->succs) >= 3)
5436 	      {
5437 		error ("wrong outgoing edge flags at end of bb %d",
5438 		       bb->index);
5439 		err = 1;
5440 	      }
5441 	  }
5442 	  break;
5443 
5444 	case GIMPLE_GOTO:
5445 	  if (simple_goto_p (stmt))
5446 	    {
5447 	      error ("explicit goto at end of bb %d", bb->index);
5448 	      err = 1;
5449 	    }
5450 	  else
5451 	    {
5452 	      /* FIXME.  We should double check that the labels in the
5453 		 destination blocks have their address taken.  */
5454 	      FOR_EACH_EDGE (e, ei, bb->succs)
5455 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5456 				 | EDGE_FALSE_VALUE))
5457 		    || !(e->flags & EDGE_ABNORMAL))
5458 		  {
5459 		    error ("wrong outgoing edge flags at end of bb %d",
5460 			   bb->index);
5461 		    err = 1;
5462 		  }
5463 	    }
5464 	  break;
5465 
5466 	case GIMPLE_CALL:
5467 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5468 	    break;
5469 	  /* fallthru */
5470 	case GIMPLE_RETURN:
5471 	  if (!single_succ_p (bb)
5472 	      || (single_succ_edge (bb)->flags
5473 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5474 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5475 	    {
5476 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5477 	      err = 1;
5478 	    }
5479 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5480 	    {
5481 	      error ("return edge does not point to exit in bb %d",
5482 		     bb->index);
5483 	      err = 1;
5484 	    }
5485 	  break;
5486 
5487 	case GIMPLE_SWITCH:
5488 	  {
5489 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5490 	    tree prev;
5491 	    edge e;
5492 	    size_t i, n;
5493 
5494 	    n = gimple_switch_num_labels (switch_stmt);
5495 
5496 	    /* Mark all the destination basic blocks.  */
5497 	    for (i = 0; i < n; ++i)
5498 	      {
5499 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5500 		basic_block label_bb = label_to_block (lab);
5501 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5502 		label_bb->aux = (void *)1;
5503 	      }
5504 
5505 	    /* Verify that the case labels are sorted.  */
5506 	    prev = gimple_switch_label (switch_stmt, 0);
5507 	    for (i = 1; i < n; ++i)
5508 	      {
5509 		tree c = gimple_switch_label (switch_stmt, i);
5510 		if (!CASE_LOW (c))
5511 		  {
5512 		    error ("found default case not at the start of "
5513 			   "case vector");
5514 		    err = 1;
5515 		    continue;
5516 		  }
5517 		if (CASE_LOW (prev)
5518 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5519 		  {
5520 		    error ("case labels not sorted: ");
5521 		    print_generic_expr (stderr, prev, 0);
5522 		    fprintf (stderr," is greater than ");
5523 		    print_generic_expr (stderr, c, 0);
5524 		    fprintf (stderr," but comes before it.\n");
5525 		    err = 1;
5526 		  }
5527 		prev = c;
5528 	      }
5529 	    /* VRP will remove the default case if it can prove it will
5530 	       never be executed.  So do not verify there always exists
5531 	       a default case here.  */
5532 
5533 	    FOR_EACH_EDGE (e, ei, bb->succs)
5534 	      {
5535 		if (!e->dest->aux)
5536 		  {
5537 		    error ("extra outgoing edge %d->%d",
5538 			   bb->index, e->dest->index);
5539 		    err = 1;
5540 		  }
5541 
5542 		e->dest->aux = (void *)2;
5543 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5544 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5545 		  {
5546 		    error ("wrong outgoing edge flags at end of bb %d",
5547 			   bb->index);
5548 		    err = 1;
5549 		  }
5550 	      }
5551 
5552 	    /* Check that we have all of them.  */
5553 	    for (i = 0; i < n; ++i)
5554 	      {
5555 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5556 		basic_block label_bb = label_to_block (lab);
5557 
5558 		if (label_bb->aux != (void *)2)
5559 		  {
5560 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5561 		    err = 1;
5562 		  }
5563 	      }
5564 
5565 	    FOR_EACH_EDGE (e, ei, bb->succs)
5566 	      e->dest->aux = (void *)0;
5567 	  }
5568 	  break;
5569 
5570 	case GIMPLE_EH_DISPATCH:
5571 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5572 	  break;
5573 
5574 	default:
5575 	  break;
5576 	}
5577     }
5578 
5579   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5580     verify_dominators (CDI_DOMINATORS);
5581 
5582   return err;
5583 }
5584 
5585 
5586 /* Updates phi nodes after creating a forwarder block joined
5587    by edge FALLTHRU.  */
5588 
5589 static void
5590 gimple_make_forwarder_block (edge fallthru)
5591 {
5592   edge e;
5593   edge_iterator ei;
5594   basic_block dummy, bb;
5595   tree var;
5596   gphi_iterator gsi;
5597 
5598   dummy = fallthru->src;
5599   bb = fallthru->dest;
5600 
5601   if (single_pred_p (bb))
5602     return;
5603 
5604   /* If we redirected a branch we must create new PHI nodes at the
5605      start of BB.  */
5606   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5607     {
5608       gphi *phi, *new_phi;
5609 
5610       phi = gsi.phi ();
5611       var = gimple_phi_result (phi);
5612       new_phi = create_phi_node (var, bb);
5613       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5614       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5615 		   UNKNOWN_LOCATION);
5616     }
5617 
5618   /* Add the arguments we have stored on edges.  */
5619   FOR_EACH_EDGE (e, ei, bb->preds)
5620     {
5621       if (e == fallthru)
5622 	continue;
5623 
5624       flush_pending_stmts (e);
5625     }
5626 }
5627 
5628 
5629 /* Return a non-special label in the head of basic block BLOCK.
5630    Create one if it doesn't exist.  */
5631 
5632 tree
5633 gimple_block_label (basic_block bb)
5634 {
5635   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5636   bool first = true;
5637   tree label;
5638   glabel *stmt;
5639 
5640   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5641     {
5642       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5643       if (!stmt)
5644 	break;
5645       label = gimple_label_label (stmt);
5646       if (!DECL_NONLOCAL (label))
5647 	{
5648 	  if (!first)
5649 	    gsi_move_before (&i, &s);
5650 	  return label;
5651 	}
5652     }
5653 
5654   label = create_artificial_label (UNKNOWN_LOCATION);
5655   stmt = gimple_build_label (label);
5656   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5657   return label;
5658 }
5659 
5660 
5661 /* Attempt to perform edge redirection by replacing a possibly complex
5662    jump instruction by a goto or by removing the jump completely.
5663    This can apply only if all edges now point to the same block.  The
5664    parameters and return values are equivalent to
5665    redirect_edge_and_branch.  */
5666 
5667 static edge
5668 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5669 {
5670   basic_block src = e->src;
5671   gimple_stmt_iterator i;
5672   gimple *stmt;
5673 
5674   /* We can replace or remove a complex jump only when we have exactly
5675      two edges.  */
5676   if (EDGE_COUNT (src->succs) != 2
5677       /* Verify that all targets will be TARGET.  Specifically, the
5678 	 edge that is not E must also go to TARGET.  */
5679       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5680     return NULL;
5681 
5682   i = gsi_last_bb (src);
5683   if (gsi_end_p (i))
5684     return NULL;
5685 
5686   stmt = gsi_stmt (i);
5687 
5688   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5689     {
5690       gsi_remove (&i, true);
5691       e = ssa_redirect_edge (e, target);
5692       e->flags = EDGE_FALLTHRU;
5693       return e;
5694     }
5695 
5696   return NULL;
5697 }
5698 
5699 
5700 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5701    edge representing the redirected branch.  */
5702 
5703 static edge
5704 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5705 {
5706   basic_block bb = e->src;
5707   gimple_stmt_iterator gsi;
5708   edge ret;
5709   gimple *stmt;
5710 
5711   if (e->flags & EDGE_ABNORMAL)
5712     return NULL;
5713 
5714   if (e->dest == dest)
5715     return NULL;
5716 
5717   if (e->flags & EDGE_EH)
5718     return redirect_eh_edge (e, dest);
5719 
5720   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5721     {
5722       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5723       if (ret)
5724 	return ret;
5725     }
5726 
5727   gsi = gsi_last_bb (bb);
5728   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5729 
5730   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5731     {
5732     case GIMPLE_COND:
5733       /* For COND_EXPR, we only need to redirect the edge.  */
5734       break;
5735 
5736     case GIMPLE_GOTO:
5737       /* No non-abnormal edges should lead from a non-simple goto, and
5738 	 simple ones should be represented implicitly.  */
5739       gcc_unreachable ();
5740 
5741     case GIMPLE_SWITCH:
5742       {
5743 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5744 	tree label = gimple_block_label (dest);
5745         tree cases = get_cases_for_edge (e, switch_stmt);
5746 
5747 	/* If we have a list of cases associated with E, then use it
5748 	   as it's a lot faster than walking the entire case vector.  */
5749 	if (cases)
5750 	  {
5751 	    edge e2 = find_edge (e->src, dest);
5752 	    tree last, first;
5753 
5754 	    first = cases;
5755 	    while (cases)
5756 	      {
5757 		last = cases;
5758 		CASE_LABEL (cases) = label;
5759 		cases = CASE_CHAIN (cases);
5760 	      }
5761 
5762 	    /* If there was already an edge in the CFG, then we need
5763 	       to move all the cases associated with E to E2.  */
5764 	    if (e2)
5765 	      {
5766 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5767 
5768 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5769 		CASE_CHAIN (cases2) = first;
5770 	      }
5771 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5772 	  }
5773 	else
5774 	  {
5775 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5776 
5777 	    for (i = 0; i < n; i++)
5778 	      {
5779 		tree elt = gimple_switch_label (switch_stmt, i);
5780 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5781 		  CASE_LABEL (elt) = label;
5782 	      }
5783 	  }
5784       }
5785       break;
5786 
5787     case GIMPLE_ASM:
5788       {
5789 	gasm *asm_stmt = as_a <gasm *> (stmt);
5790 	int i, n = gimple_asm_nlabels (asm_stmt);
5791 	tree label = NULL;
5792 
5793 	for (i = 0; i < n; ++i)
5794 	  {
5795 	    tree cons = gimple_asm_label_op (asm_stmt, i);
5796 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5797 	      {
5798 		if (!label)
5799 		  label = gimple_block_label (dest);
5800 		TREE_VALUE (cons) = label;
5801 	      }
5802 	  }
5803 
5804 	/* If we didn't find any label matching the former edge in the
5805 	   asm labels, we must be redirecting the fallthrough
5806 	   edge.  */
5807 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5808       }
5809       break;
5810 
5811     case GIMPLE_RETURN:
5812       gsi_remove (&gsi, true);
5813       e->flags |= EDGE_FALLTHRU;
5814       break;
5815 
5816     case GIMPLE_OMP_RETURN:
5817     case GIMPLE_OMP_CONTINUE:
5818     case GIMPLE_OMP_SECTIONS_SWITCH:
5819     case GIMPLE_OMP_FOR:
5820       /* The edges from OMP constructs can be simply redirected.  */
5821       break;
5822 
5823     case GIMPLE_EH_DISPATCH:
5824       if (!(e->flags & EDGE_FALLTHRU))
5825 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5826       break;
5827 
5828     case GIMPLE_TRANSACTION:
5829       if (e->flags & EDGE_TM_ABORT)
5830 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5831 				           gimple_block_label (dest));
5832       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5833 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5834 				             gimple_block_label (dest));
5835       else
5836 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5837 				           gimple_block_label (dest));
5838       break;
5839 
5840     default:
5841       /* Otherwise it must be a fallthru edge, and we don't need to
5842 	 do anything besides redirecting it.  */
5843       gcc_assert (e->flags & EDGE_FALLTHRU);
5844       break;
5845     }
5846 
5847   /* Update/insert PHI nodes as necessary.  */
5848 
5849   /* Now update the edges in the CFG.  */
5850   e = ssa_redirect_edge (e, dest);
5851 
5852   return e;
5853 }
5854 
5855 /* Returns true if it is possible to remove edge E by redirecting
5856    it to the destination of the other edge from E->src.  */
5857 
5858 static bool
5859 gimple_can_remove_branch_p (const_edge e)
5860 {
5861   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5862     return false;
5863 
5864   return true;
5865 }
5866 
5867 /* Simple wrapper, as we can always redirect fallthru edges.  */
5868 
5869 static basic_block
5870 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5871 {
5872   e = gimple_redirect_edge_and_branch (e, dest);
5873   gcc_assert (e);
5874 
5875   return NULL;
5876 }
5877 
5878 
5879 /* Splits basic block BB after statement STMT (but at least after the
5880    labels).  If STMT is NULL, BB is split just after the labels.  */
5881 
5882 static basic_block
5883 gimple_split_block (basic_block bb, void *stmt)
5884 {
5885   gimple_stmt_iterator gsi;
5886   gimple_stmt_iterator gsi_tgt;
5887   gimple_seq list;
5888   basic_block new_bb;
5889   edge e;
5890   edge_iterator ei;
5891 
5892   new_bb = create_empty_bb (bb);
5893 
5894   /* Redirect the outgoing edges.  */
5895   new_bb->succs = bb->succs;
5896   bb->succs = NULL;
5897   FOR_EACH_EDGE (e, ei, new_bb->succs)
5898     e->src = new_bb;
5899 
5900   /* Get a stmt iterator pointing to the first stmt to move.  */
5901   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5902     gsi = gsi_after_labels (bb);
5903   else
5904     {
5905       gsi = gsi_for_stmt ((gimple *) stmt);
5906       gsi_next (&gsi);
5907     }
5908 
5909   /* Move everything from GSI to the new basic block.  */
5910   if (gsi_end_p (gsi))
5911     return new_bb;
5912 
5913   /* Split the statement list - avoid re-creating new containers as this
5914      brings ugly quadratic memory consumption in the inliner.
5915      (We are still quadratic since we need to update stmt BB pointers,
5916      sadly.)  */
5917   gsi_split_seq_before (&gsi, &list);
5918   set_bb_seq (new_bb, list);
5919   for (gsi_tgt = gsi_start (list);
5920        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5921     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5922 
5923   return new_bb;
5924 }
5925 
5926 
5927 /* Moves basic block BB after block AFTER.  */
5928 
5929 static bool
5930 gimple_move_block_after (basic_block bb, basic_block after)
5931 {
5932   if (bb->prev_bb == after)
5933     return true;
5934 
5935   unlink_block (bb);
5936   link_block (bb, after);
5937 
5938   return true;
5939 }
5940 
5941 
5942 /* Return TRUE if block BB has no executable statements, otherwise return
5943    FALSE.  */
5944 
5945 static bool
5946 gimple_empty_block_p (basic_block bb)
5947 {
5948   /* BB must have no executable statements.  */
5949   gimple_stmt_iterator gsi = gsi_after_labels (bb);
5950   if (phi_nodes (bb))
5951     return false;
5952   if (gsi_end_p (gsi))
5953     return true;
5954   if (is_gimple_debug (gsi_stmt (gsi)))
5955     gsi_next_nondebug (&gsi);
5956   return gsi_end_p (gsi);
5957 }
5958 
5959 
5960 /* Split a basic block if it ends with a conditional branch and if the
5961    other part of the block is not empty.  */
5962 
5963 static basic_block
5964 gimple_split_block_before_cond_jump (basic_block bb)
5965 {
5966   gimple *last, *split_point;
5967   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5968   if (gsi_end_p (gsi))
5969     return NULL;
5970   last = gsi_stmt (gsi);
5971   if (gimple_code (last) != GIMPLE_COND
5972       && gimple_code (last) != GIMPLE_SWITCH)
5973     return NULL;
5974   gsi_prev (&gsi);
5975   split_point = gsi_stmt (gsi);
5976   return split_block (bb, split_point)->dest;
5977 }
5978 
5979 
5980 /* Return true if basic_block can be duplicated.  */
5981 
5982 static bool
5983 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5984 {
5985   return true;
5986 }
5987 
5988 /* Create a duplicate of the basic block BB.  NOTE: This does not
5989    preserve SSA form.  */
5990 
5991 static basic_block
5992 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
5993 {
5994   basic_block new_bb;
5995   gimple_stmt_iterator gsi_tgt;
5996 
5997   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5998 
5999   /* Copy the PHI nodes.  We ignore PHI node arguments here because
6000      the incoming edges have not been setup yet.  */
6001   for (gphi_iterator gpi = gsi_start_phis (bb);
6002        !gsi_end_p (gpi);
6003        gsi_next (&gpi))
6004     {
6005       gphi *phi, *copy;
6006       phi = gpi.phi ();
6007       copy = create_phi_node (NULL_TREE, new_bb);
6008       create_new_def_for (gimple_phi_result (phi), copy,
6009 			  gimple_phi_result_ptr (copy));
6010       gimple_set_uid (copy, gimple_uid (phi));
6011     }
6012 
6013   gsi_tgt = gsi_start_bb (new_bb);
6014   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6015        !gsi_end_p (gsi);
6016        gsi_next (&gsi))
6017     {
6018       def_operand_p def_p;
6019       ssa_op_iter op_iter;
6020       tree lhs;
6021       gimple *stmt, *copy;
6022 
6023       stmt = gsi_stmt (gsi);
6024       if (gimple_code (stmt) == GIMPLE_LABEL)
6025 	continue;
6026 
6027       /* Don't duplicate label debug stmts.  */
6028       if (gimple_debug_bind_p (stmt)
6029 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
6030 	     == LABEL_DECL)
6031 	continue;
6032 
6033       /* Create a new copy of STMT and duplicate STMT's virtual
6034 	 operands.  */
6035       copy = gimple_copy (stmt);
6036       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6037 
6038       maybe_duplicate_eh_stmt (copy, stmt);
6039       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6040 
6041       /* When copying around a stmt writing into a local non-user
6042 	 aggregate, make sure it won't share stack slot with other
6043 	 vars.  */
6044       lhs = gimple_get_lhs (stmt);
6045       if (lhs && TREE_CODE (lhs) != SSA_NAME)
6046 	{
6047 	  tree base = get_base_address (lhs);
6048 	  if (base
6049 	      && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6050 	      && DECL_IGNORED_P (base)
6051 	      && !TREE_STATIC (base)
6052 	      && !DECL_EXTERNAL (base)
6053 	      && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6054 	    DECL_NONSHAREABLE (base) = 1;
6055 	}
6056 
6057       /* If requested remap dependence info of cliques brought in
6058          via inlining.  */
6059       if (id)
6060 	for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6061 	  {
6062 	    tree op = gimple_op (copy, i);
6063 	    if (!op)
6064 	      continue;
6065 	    if (TREE_CODE (op) == ADDR_EXPR
6066 		|| TREE_CODE (op) == WITH_SIZE_EXPR)
6067 	      op = TREE_OPERAND (op, 0);
6068 	    while (handled_component_p (op))
6069 	      op = TREE_OPERAND (op, 0);
6070 	    if ((TREE_CODE (op) == MEM_REF
6071 		 || TREE_CODE (op) == TARGET_MEM_REF)
6072 		&& MR_DEPENDENCE_CLIQUE (op) > 1
6073 		&& MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6074 	      {
6075 		if (!id->dependence_map)
6076 		  id->dependence_map = new hash_map<dependence_hash,
6077 						    unsigned short>;
6078 		bool existed;
6079 		unsigned short &newc = id->dependence_map->get_or_insert
6080 		    (MR_DEPENDENCE_CLIQUE (op), &existed);
6081 		if (!existed)
6082 		  {
6083 		    gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6084 		    newc = ++cfun->last_clique;
6085 		  }
6086 		MR_DEPENDENCE_CLIQUE (op) = newc;
6087 	      }
6088 	  }
6089 
6090       /* Create new names for all the definitions created by COPY and
6091 	 add replacement mappings for each new name.  */
6092       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6093 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6094     }
6095 
6096   return new_bb;
6097 }
6098 
6099 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
6100 
6101 static void
6102 add_phi_args_after_copy_edge (edge e_copy)
6103 {
6104   basic_block bb, bb_copy = e_copy->src, dest;
6105   edge e;
6106   edge_iterator ei;
6107   gphi *phi, *phi_copy;
6108   tree def;
6109   gphi_iterator psi, psi_copy;
6110 
6111   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6112     return;
6113 
6114   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6115 
6116   if (e_copy->dest->flags & BB_DUPLICATED)
6117     dest = get_bb_original (e_copy->dest);
6118   else
6119     dest = e_copy->dest;
6120 
6121   e = find_edge (bb, dest);
6122   if (!e)
6123     {
6124       /* During loop unrolling the target of the latch edge is copied.
6125 	 In this case we are not looking for edge to dest, but to
6126 	 duplicated block whose original was dest.  */
6127       FOR_EACH_EDGE (e, ei, bb->succs)
6128 	{
6129 	  if ((e->dest->flags & BB_DUPLICATED)
6130 	      && get_bb_original (e->dest) == dest)
6131 	    break;
6132 	}
6133 
6134       gcc_assert (e != NULL);
6135     }
6136 
6137   for (psi = gsi_start_phis (e->dest),
6138        psi_copy = gsi_start_phis (e_copy->dest);
6139        !gsi_end_p (psi);
6140        gsi_next (&psi), gsi_next (&psi_copy))
6141     {
6142       phi = psi.phi ();
6143       phi_copy = psi_copy.phi ();
6144       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6145       add_phi_arg (phi_copy, def, e_copy,
6146 		   gimple_phi_arg_location_from_edge (phi, e));
6147     }
6148 }
6149 
6150 
6151 /* Basic block BB_COPY was created by code duplication.  Add phi node
6152    arguments for edges going out of BB_COPY.  The blocks that were
6153    duplicated have BB_DUPLICATED set.  */
6154 
6155 void
6156 add_phi_args_after_copy_bb (basic_block bb_copy)
6157 {
6158   edge e_copy;
6159   edge_iterator ei;
6160 
6161   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6162     {
6163       add_phi_args_after_copy_edge (e_copy);
6164     }
6165 }
6166 
6167 /* Blocks in REGION_COPY array of length N_REGION were created by
6168    duplication of basic blocks.  Add phi node arguments for edges
6169    going from these blocks.  If E_COPY is not NULL, also add
6170    phi node arguments for its destination.*/
6171 
6172 void
6173 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6174 			 edge e_copy)
6175 {
6176   unsigned i;
6177 
6178   for (i = 0; i < n_region; i++)
6179     region_copy[i]->flags |= BB_DUPLICATED;
6180 
6181   for (i = 0; i < n_region; i++)
6182     add_phi_args_after_copy_bb (region_copy[i]);
6183   if (e_copy)
6184     add_phi_args_after_copy_edge (e_copy);
6185 
6186   for (i = 0; i < n_region; i++)
6187     region_copy[i]->flags &= ~BB_DUPLICATED;
6188 }
6189 
6190 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6191    important exit edge EXIT.  By important we mean that no SSA name defined
6192    inside region is live over the other exit edges of the region.  All entry
6193    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6194    to the duplicate of the region.  Dominance and loop information is
6195    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6196    UPDATE_DOMINANCE is false then we assume that the caller will update the
6197    dominance information after calling this function.  The new basic
6198    blocks are stored to REGION_COPY in the same order as they had in REGION,
6199    provided that REGION_COPY is not NULL.
6200    The function returns false if it is unable to copy the region,
6201    true otherwise.  */
6202 
6203 bool
6204 gimple_duplicate_sese_region (edge entry, edge exit,
6205 			    basic_block *region, unsigned n_region,
6206 			    basic_block *region_copy,
6207 			    bool update_dominance)
6208 {
6209   unsigned i;
6210   bool free_region_copy = false, copying_header = false;
6211   struct loop *loop = entry->dest->loop_father;
6212   edge exit_copy;
6213   vec<basic_block> doms;
6214   edge redirected;
6215   int total_freq = 0, entry_freq = 0;
6216   gcov_type total_count = 0, entry_count = 0;
6217 
6218   if (!can_copy_bbs_p (region, n_region))
6219     return false;
6220 
6221   /* Some sanity checking.  Note that we do not check for all possible
6222      missuses of the functions.  I.e. if you ask to copy something weird,
6223      it will work, but the state of structures probably will not be
6224      correct.  */
6225   for (i = 0; i < n_region; i++)
6226     {
6227       /* We do not handle subloops, i.e. all the blocks must belong to the
6228 	 same loop.  */
6229       if (region[i]->loop_father != loop)
6230 	return false;
6231 
6232       if (region[i] != entry->dest
6233 	  && region[i] == loop->header)
6234 	return false;
6235     }
6236 
6237   /* In case the function is used for loop header copying (which is the primary
6238      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6239   if (loop->header == entry->dest)
6240     {
6241       copying_header = true;
6242 
6243       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6244 	return false;
6245 
6246       for (i = 0; i < n_region; i++)
6247 	if (region[i] != exit->src
6248 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6249 	  return false;
6250     }
6251 
6252   initialize_original_copy_tables ();
6253 
6254   if (copying_header)
6255     set_loop_copy (loop, loop_outer (loop));
6256   else
6257     set_loop_copy (loop, loop);
6258 
6259   if (!region_copy)
6260     {
6261       region_copy = XNEWVEC (basic_block, n_region);
6262       free_region_copy = true;
6263     }
6264 
6265   /* Record blocks outside the region that are dominated by something
6266      inside.  */
6267   if (update_dominance)
6268     {
6269       doms.create (0);
6270       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6271     }
6272 
6273   if (entry->dest->count)
6274     {
6275       total_count = entry->dest->count;
6276       entry_count = entry->count;
6277       /* Fix up corner cases, to avoid division by zero or creation of negative
6278 	 frequencies.  */
6279       if (entry_count > total_count)
6280 	entry_count = total_count;
6281     }
6282   else
6283     {
6284       total_freq = entry->dest->frequency;
6285       entry_freq = EDGE_FREQUENCY (entry);
6286       /* Fix up corner cases, to avoid division by zero or creation of negative
6287 	 frequencies.  */
6288       if (total_freq == 0)
6289 	total_freq = 1;
6290       else if (entry_freq > total_freq)
6291 	entry_freq = total_freq;
6292     }
6293 
6294   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6295 	    split_edge_bb_loc (entry), update_dominance);
6296   if (total_count)
6297     {
6298       scale_bbs_frequencies_gcov_type (region, n_region,
6299 				       total_count - entry_count,
6300 				       total_count);
6301       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6302 				       total_count);
6303     }
6304   else
6305     {
6306       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6307 				 total_freq);
6308       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6309     }
6310 
6311   if (copying_header)
6312     {
6313       loop->header = exit->dest;
6314       loop->latch = exit->src;
6315     }
6316 
6317   /* Redirect the entry and add the phi node arguments.  */
6318   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6319   gcc_assert (redirected != NULL);
6320   flush_pending_stmts (entry);
6321 
6322   /* Concerning updating of dominators:  We must recount dominators
6323      for entry block and its copy.  Anything that is outside of the
6324      region, but was dominated by something inside needs recounting as
6325      well.  */
6326   if (update_dominance)
6327     {
6328       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6329       doms.safe_push (get_bb_original (entry->dest));
6330       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6331       doms.release ();
6332     }
6333 
6334   /* Add the other PHI node arguments.  */
6335   add_phi_args_after_copy (region_copy, n_region, NULL);
6336 
6337   if (free_region_copy)
6338     free (region_copy);
6339 
6340   free_original_copy_tables ();
6341   return true;
6342 }
6343 
6344 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6345 static bool
6346 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6347 {
6348   unsigned int n;
6349 
6350   for (n = 0; n < n_region; n++)
6351     {
6352      if (bb == bbs[n])
6353        return true;
6354     }
6355   return false;
6356 }
6357 
6358 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6359    are stored to REGION_COPY in the same order in that they appear
6360    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6361    the region, EXIT an exit from it.  The condition guarding EXIT
6362    is moved to ENTRY.  Returns true if duplication succeeds, false
6363    otherwise.
6364 
6365    For example,
6366 
6367    some_code;
6368    if (cond)
6369      A;
6370    else
6371      B;
6372 
6373    is transformed to
6374 
6375    if (cond)
6376      {
6377        some_code;
6378        A;
6379      }
6380    else
6381      {
6382        some_code;
6383        B;
6384      }
6385 */
6386 
6387 bool
6388 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6389 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6390 			  basic_block *region_copy ATTRIBUTE_UNUSED)
6391 {
6392   unsigned i;
6393   bool free_region_copy = false;
6394   struct loop *loop = exit->dest->loop_father;
6395   struct loop *orig_loop = entry->dest->loop_father;
6396   basic_block switch_bb, entry_bb, nentry_bb;
6397   vec<basic_block> doms;
6398   int total_freq = 0, exit_freq = 0;
6399   gcov_type total_count = 0, exit_count = 0;
6400   edge exits[2], nexits[2], e;
6401   gimple_stmt_iterator gsi;
6402   gimple *cond_stmt;
6403   edge sorig, snew;
6404   basic_block exit_bb;
6405   gphi_iterator psi;
6406   gphi *phi;
6407   tree def;
6408   struct loop *target, *aloop, *cloop;
6409 
6410   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6411   exits[0] = exit;
6412   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6413 
6414   if (!can_copy_bbs_p (region, n_region))
6415     return false;
6416 
6417   initialize_original_copy_tables ();
6418   set_loop_copy (orig_loop, loop);
6419 
6420   target= loop;
6421   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6422     {
6423       if (bb_part_of_region_p (aloop->header, region, n_region))
6424 	{
6425 	  cloop = duplicate_loop (aloop, target);
6426 	  duplicate_subloops (aloop, cloop);
6427 	}
6428     }
6429 
6430   if (!region_copy)
6431     {
6432       region_copy = XNEWVEC (basic_block, n_region);
6433       free_region_copy = true;
6434     }
6435 
6436   gcc_assert (!need_ssa_update_p (cfun));
6437 
6438   /* Record blocks outside the region that are dominated by something
6439      inside.  */
6440   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6441 
6442   if (exit->src->count)
6443     {
6444       total_count = exit->src->count;
6445       exit_count = exit->count;
6446       /* Fix up corner cases, to avoid division by zero or creation of negative
6447 	 frequencies.  */
6448       if (exit_count > total_count)
6449 	exit_count = total_count;
6450     }
6451   else
6452     {
6453       total_freq = exit->src->frequency;
6454       exit_freq = EDGE_FREQUENCY (exit);
6455       /* Fix up corner cases, to avoid division by zero or creation of negative
6456 	 frequencies.  */
6457       if (total_freq == 0)
6458 	total_freq = 1;
6459       if (exit_freq > total_freq)
6460 	exit_freq = total_freq;
6461     }
6462 
6463   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6464 	    split_edge_bb_loc (exit), true);
6465   if (total_count)
6466     {
6467       scale_bbs_frequencies_gcov_type (region, n_region,
6468 				       total_count - exit_count,
6469 				       total_count);
6470       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6471 				       total_count);
6472     }
6473   else
6474     {
6475       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6476 				 total_freq);
6477       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6478     }
6479 
6480   /* Create the switch block, and put the exit condition to it.  */
6481   entry_bb = entry->dest;
6482   nentry_bb = get_bb_copy (entry_bb);
6483   if (!last_stmt (entry->src)
6484       || !stmt_ends_bb_p (last_stmt (entry->src)))
6485     switch_bb = entry->src;
6486   else
6487     switch_bb = split_edge (entry);
6488   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6489 
6490   gsi = gsi_last_bb (switch_bb);
6491   cond_stmt = last_stmt (exit->src);
6492   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6493   cond_stmt = gimple_copy (cond_stmt);
6494 
6495   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6496 
6497   sorig = single_succ_edge (switch_bb);
6498   sorig->flags = exits[1]->flags;
6499   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6500 
6501   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6502   rescan_loop_exit (snew, true, false);
6503 
6504   /* Add the PHI node arguments.  */
6505   add_phi_args_after_copy (region_copy, n_region, snew);
6506 
6507   /* Get rid of now superfluous conditions and associated edges (and phi node
6508      arguments).  */
6509   exit_bb = exit->dest;
6510 
6511   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6512   PENDING_STMT (e) = NULL;
6513 
6514   /* The latch of ORIG_LOOP was copied, and so was the backedge
6515      to the original header.  We redirect this backedge to EXIT_BB.  */
6516   for (i = 0; i < n_region; i++)
6517     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6518       {
6519 	gcc_assert (single_succ_edge (region_copy[i]));
6520 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6521 	PENDING_STMT (e) = NULL;
6522 	for (psi = gsi_start_phis (exit_bb);
6523 	     !gsi_end_p (psi);
6524 	     gsi_next (&psi))
6525 	  {
6526 	    phi = psi.phi ();
6527 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6528 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6529 	  }
6530       }
6531   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6532   PENDING_STMT (e) = NULL;
6533 
6534   /* Anything that is outside of the region, but was dominated by something
6535      inside needs to update dominance info.  */
6536   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6537   doms.release ();
6538   /* Update the SSA web.  */
6539   update_ssa (TODO_update_ssa);
6540 
6541   if (free_region_copy)
6542     free (region_copy);
6543 
6544   free_original_copy_tables ();
6545   return true;
6546 }
6547 
6548 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6549    adding blocks when the dominator traversal reaches EXIT.  This
6550    function silently assumes that ENTRY strictly dominates EXIT.  */
6551 
6552 void
6553 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6554 			      vec<basic_block> *bbs_p)
6555 {
6556   basic_block son;
6557 
6558   for (son = first_dom_son (CDI_DOMINATORS, entry);
6559        son;
6560        son = next_dom_son (CDI_DOMINATORS, son))
6561     {
6562       bbs_p->safe_push (son);
6563       if (son != exit)
6564 	gather_blocks_in_sese_region (son, exit, bbs_p);
6565     }
6566 }
6567 
6568 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6569    The duplicates are recorded in VARS_MAP.  */
6570 
6571 static void
6572 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6573 			   tree to_context)
6574 {
6575   tree t = *tp, new_t;
6576   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6577 
6578   if (DECL_CONTEXT (t) == to_context)
6579     return;
6580 
6581   bool existed;
6582   tree &loc = vars_map->get_or_insert (t, &existed);
6583 
6584   if (!existed)
6585     {
6586       if (SSA_VAR_P (t))
6587 	{
6588 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6589 	  add_local_decl (f, new_t);
6590 	}
6591       else
6592 	{
6593 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6594 	  new_t = copy_node (t);
6595 	}
6596       DECL_CONTEXT (new_t) = to_context;
6597 
6598       loc = new_t;
6599     }
6600   else
6601     new_t = loc;
6602 
6603   *tp = new_t;
6604 }
6605 
6606 
6607 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6608    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6609 
6610 static tree
6611 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6612 		  tree to_context)
6613 {
6614   tree new_name;
6615 
6616   gcc_assert (!virtual_operand_p (name));
6617 
6618   tree *loc = vars_map->get (name);
6619 
6620   if (!loc)
6621     {
6622       tree decl = SSA_NAME_VAR (name);
6623       if (decl)
6624 	{
6625 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6626 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6627 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6628 				       decl, SSA_NAME_DEF_STMT (name));
6629 	}
6630       else
6631 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6632 				     name, SSA_NAME_DEF_STMT (name));
6633 
6634       /* Now that we've used the def stmt to define new_name, make sure it
6635 	 doesn't define name anymore.  */
6636       SSA_NAME_DEF_STMT (name) = NULL;
6637 
6638       vars_map->put (name, new_name);
6639     }
6640   else
6641     new_name = *loc;
6642 
6643   return new_name;
6644 }
6645 
6646 struct move_stmt_d
6647 {
6648   tree orig_block;
6649   tree new_block;
6650   tree from_context;
6651   tree to_context;
6652   hash_map<tree, tree> *vars_map;
6653   htab_t new_label_map;
6654   hash_map<void *, void *> *eh_map;
6655   bool remap_decls_p;
6656 };
6657 
6658 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6659    contained in *TP if it has been ORIG_BLOCK previously and change the
6660    DECL_CONTEXT of every local variable referenced in *TP.  */
6661 
6662 static tree
6663 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6664 {
6665   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6666   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6667   tree t = *tp;
6668 
6669   if (EXPR_P (t))
6670     {
6671       tree block = TREE_BLOCK (t);
6672       if (block == NULL_TREE)
6673 	;
6674       else if (block == p->orig_block
6675 	       || p->orig_block == NULL_TREE)
6676 	{
6677 	  /* tree_node_can_be_shared says we can share invariant
6678 	     addresses but unshare_expr copies them anyways.  Make sure
6679 	     to unshare before adjusting the block in place - we do not
6680 	     always see a copy here.  */
6681 	  if (TREE_CODE (t) == ADDR_EXPR
6682 	      && is_gimple_min_invariant (t))
6683 	    *tp = t = unshare_expr (t);
6684 	  TREE_SET_BLOCK (t, p->new_block);
6685 	}
6686       else if (flag_checking)
6687 	{
6688 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6689 	    block = BLOCK_SUPERCONTEXT (block);
6690 	  gcc_assert (block == p->orig_block);
6691 	}
6692     }
6693   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6694     {
6695       if (TREE_CODE (t) == SSA_NAME)
6696 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6697       else if (TREE_CODE (t) == PARM_DECL
6698 	       && gimple_in_ssa_p (cfun))
6699 	*tp = *(p->vars_map->get (t));
6700       else if (TREE_CODE (t) == LABEL_DECL)
6701 	{
6702 	  if (p->new_label_map)
6703 	    {
6704 	      struct tree_map in, *out;
6705 	      in.base.from = t;
6706 	      out = (struct tree_map *)
6707 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6708 	      if (out)
6709 		*tp = t = out->to;
6710 	    }
6711 
6712 	  /* For FORCED_LABELs we can end up with references from other
6713 	     functions if some SESE regions are outlined.  It is UB to
6714 	     jump in between them, but they could be used just for printing
6715 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6716 	     be the function containing the glabel stmt with that LABEL_DECL,
6717 	     rather than whatever function a reference to the label was seen
6718 	     last time.  */
6719 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6720 	    DECL_CONTEXT (t) = p->to_context;
6721 	}
6722       else if (p->remap_decls_p)
6723 	{
6724 	  /* Replace T with its duplicate.  T should no longer appear in the
6725 	     parent function, so this looks wasteful; however, it may appear
6726 	     in referenced_vars, and more importantly, as virtual operands of
6727 	     statements, and in alias lists of other variables.  It would be
6728 	     quite difficult to expunge it from all those places.  ??? It might
6729 	     suffice to do this for addressable variables.  */
6730 	  if ((VAR_P (t) && !is_global_var (t))
6731 	      || TREE_CODE (t) == CONST_DECL)
6732 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6733 	}
6734       *walk_subtrees = 0;
6735     }
6736   else if (TYPE_P (t))
6737     *walk_subtrees = 0;
6738 
6739   return NULL_TREE;
6740 }
6741 
6742 /* Helper for move_stmt_r.  Given an EH region number for the source
6743    function, map that to the duplicate EH regio number in the dest.  */
6744 
6745 static int
6746 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6747 {
6748   eh_region old_r, new_r;
6749 
6750   old_r = get_eh_region_from_number (old_nr);
6751   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6752 
6753   return new_r->index;
6754 }
6755 
6756 /* Similar, but operate on INTEGER_CSTs.  */
6757 
6758 static tree
6759 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6760 {
6761   int old_nr, new_nr;
6762 
6763   old_nr = tree_to_shwi (old_t_nr);
6764   new_nr = move_stmt_eh_region_nr (old_nr, p);
6765 
6766   return build_int_cst (integer_type_node, new_nr);
6767 }
6768 
6769 /* Like move_stmt_op, but for gimple statements.
6770 
6771    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6772    contained in the current statement in *GSI_P and change the
6773    DECL_CONTEXT of every local variable referenced in the current
6774    statement.  */
6775 
6776 static tree
6777 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6778 	     struct walk_stmt_info *wi)
6779 {
6780   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6781   gimple *stmt = gsi_stmt (*gsi_p);
6782   tree block = gimple_block (stmt);
6783 
6784   if (block == p->orig_block
6785       || (p->orig_block == NULL_TREE
6786 	  && block != NULL_TREE))
6787     gimple_set_block (stmt, p->new_block);
6788 
6789   switch (gimple_code (stmt))
6790     {
6791     case GIMPLE_CALL:
6792       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6793       {
6794 	tree r, fndecl = gimple_call_fndecl (stmt);
6795 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6796 	  switch (DECL_FUNCTION_CODE (fndecl))
6797 	    {
6798 	    case BUILT_IN_EH_COPY_VALUES:
6799 	      r = gimple_call_arg (stmt, 1);
6800 	      r = move_stmt_eh_region_tree_nr (r, p);
6801 	      gimple_call_set_arg (stmt, 1, r);
6802 	      /* FALLTHRU */
6803 
6804 	    case BUILT_IN_EH_POINTER:
6805 	    case BUILT_IN_EH_FILTER:
6806 	      r = gimple_call_arg (stmt, 0);
6807 	      r = move_stmt_eh_region_tree_nr (r, p);
6808 	      gimple_call_set_arg (stmt, 0, r);
6809 	      break;
6810 
6811 	    default:
6812 	      break;
6813 	    }
6814       }
6815       break;
6816 
6817     case GIMPLE_RESX:
6818       {
6819 	gresx *resx_stmt = as_a <gresx *> (stmt);
6820 	int r = gimple_resx_region (resx_stmt);
6821 	r = move_stmt_eh_region_nr (r, p);
6822 	gimple_resx_set_region (resx_stmt, r);
6823       }
6824       break;
6825 
6826     case GIMPLE_EH_DISPATCH:
6827       {
6828 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6829 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6830 	r = move_stmt_eh_region_nr (r, p);
6831 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6832       }
6833       break;
6834 
6835     case GIMPLE_OMP_RETURN:
6836     case GIMPLE_OMP_CONTINUE:
6837       break;
6838 
6839     case GIMPLE_LABEL:
6840       {
6841 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6842 	   so that such labels can be referenced from other regions.
6843 	   Make sure to update it when seeing a GIMPLE_LABEL though,
6844 	   that is the owner of the label.  */
6845 	walk_gimple_op (stmt, move_stmt_op, wi);
6846 	*handled_ops_p = true;
6847 	tree label = gimple_label_label (as_a <glabel *> (stmt));
6848 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6849 	  DECL_CONTEXT (label) = p->to_context;
6850       }
6851       break;
6852 
6853     default:
6854       if (is_gimple_omp (stmt))
6855 	{
6856 	  /* Do not remap variables inside OMP directives.  Variables
6857 	     referenced in clauses and directive header belong to the
6858 	     parent function and should not be moved into the child
6859 	     function.  */
6860 	  bool save_remap_decls_p = p->remap_decls_p;
6861 	  p->remap_decls_p = false;
6862 	  *handled_ops_p = true;
6863 
6864 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6865 			       move_stmt_op, wi);
6866 
6867 	  p->remap_decls_p = save_remap_decls_p;
6868 	}
6869       break;
6870     }
6871 
6872   return NULL_TREE;
6873 }
6874 
6875 /* Move basic block BB from function CFUN to function DEST_FN.  The
6876    block is moved out of the original linked list and placed after
6877    block AFTER in the new list.  Also, the block is removed from the
6878    original array of blocks and placed in DEST_FN's array of blocks.
6879    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6880    updated to reflect the moved edges.
6881 
6882    The local variables are remapped to new instances, VARS_MAP is used
6883    to record the mapping.  */
6884 
6885 static void
6886 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6887 		  basic_block after, bool update_edge_count_p,
6888 		  struct move_stmt_d *d)
6889 {
6890   struct control_flow_graph *cfg;
6891   edge_iterator ei;
6892   edge e;
6893   gimple_stmt_iterator si;
6894   unsigned old_len, new_len;
6895 
6896   /* Remove BB from dominance structures.  */
6897   delete_from_dominance_info (CDI_DOMINATORS, bb);
6898 
6899   /* Move BB from its current loop to the copy in the new function.  */
6900   if (current_loops)
6901     {
6902       struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6903       if (new_loop)
6904 	bb->loop_father = new_loop;
6905     }
6906 
6907   /* Link BB to the new linked list.  */
6908   move_block_after (bb, after);
6909 
6910   /* Update the edge count in the corresponding flowgraphs.  */
6911   if (update_edge_count_p)
6912     FOR_EACH_EDGE (e, ei, bb->succs)
6913       {
6914 	cfun->cfg->x_n_edges--;
6915 	dest_cfun->cfg->x_n_edges++;
6916       }
6917 
6918   /* Remove BB from the original basic block array.  */
6919   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6920   cfun->cfg->x_n_basic_blocks--;
6921 
6922   /* Grow DEST_CFUN's basic block array if needed.  */
6923   cfg = dest_cfun->cfg;
6924   cfg->x_n_basic_blocks++;
6925   if (bb->index >= cfg->x_last_basic_block)
6926     cfg->x_last_basic_block = bb->index + 1;
6927 
6928   old_len = vec_safe_length (cfg->x_basic_block_info);
6929   if ((unsigned) cfg->x_last_basic_block >= old_len)
6930     {
6931       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6932       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6933     }
6934 
6935   (*cfg->x_basic_block_info)[bb->index] = bb;
6936 
6937   /* Remap the variables in phi nodes.  */
6938   for (gphi_iterator psi = gsi_start_phis (bb);
6939        !gsi_end_p (psi); )
6940     {
6941       gphi *phi = psi.phi ();
6942       use_operand_p use;
6943       tree op = PHI_RESULT (phi);
6944       ssa_op_iter oi;
6945       unsigned i;
6946 
6947       if (virtual_operand_p (op))
6948 	{
6949 	  /* Remove the phi nodes for virtual operands (alias analysis will be
6950 	     run for the new function, anyway).  But replace all uses that
6951 	     might be outside of the region we move.  */
6952 	  use_operand_p use_p;
6953 	  imm_use_iterator iter;
6954 	  gimple *use_stmt;
6955 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
6956 	    FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
6957 	      SET_USE (use_p, SSA_NAME_VAR (op));
6958           remove_phi_node (&psi, true);
6959 	  continue;
6960 	}
6961 
6962       SET_PHI_RESULT (phi,
6963 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6964       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6965 	{
6966 	  op = USE_FROM_PTR (use);
6967 	  if (TREE_CODE (op) == SSA_NAME)
6968 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6969 	}
6970 
6971       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6972 	{
6973 	  location_t locus = gimple_phi_arg_location (phi, i);
6974 	  tree block = LOCATION_BLOCK (locus);
6975 
6976 	  if (locus == UNKNOWN_LOCATION)
6977 	    continue;
6978 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6979 	    {
6980 	      locus = set_block (locus, d->new_block);
6981 	      gimple_phi_arg_set_location (phi, i, locus);
6982 	    }
6983 	}
6984 
6985       gsi_next (&psi);
6986     }
6987 
6988   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6989     {
6990       gimple *stmt = gsi_stmt (si);
6991       struct walk_stmt_info wi;
6992 
6993       memset (&wi, 0, sizeof (wi));
6994       wi.info = d;
6995       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6996 
6997       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6998 	{
6999 	  tree label = gimple_label_label (label_stmt);
7000 	  int uid = LABEL_DECL_UID (label);
7001 
7002 	  gcc_assert (uid > -1);
7003 
7004 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
7005 	  if (old_len <= (unsigned) uid)
7006 	    {
7007 	      new_len = 3 * uid / 2 + 1;
7008 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7009 	    }
7010 
7011 	  (*cfg->x_label_to_block_map)[uid] = bb;
7012 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7013 
7014 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7015 
7016 	  if (uid >= dest_cfun->cfg->last_label_uid)
7017 	    dest_cfun->cfg->last_label_uid = uid + 1;
7018 	}
7019 
7020       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7021       remove_stmt_from_eh_lp_fn (cfun, stmt);
7022 
7023       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7024       gimple_remove_stmt_histograms (cfun, stmt);
7025 
7026       /* We cannot leave any operands allocated from the operand caches of
7027 	 the current function.  */
7028       free_stmt_operands (cfun, stmt);
7029       push_cfun (dest_cfun);
7030       update_stmt (stmt);
7031       pop_cfun ();
7032     }
7033 
7034   FOR_EACH_EDGE (e, ei, bb->succs)
7035     if (e->goto_locus != UNKNOWN_LOCATION)
7036       {
7037 	tree block = LOCATION_BLOCK (e->goto_locus);
7038 	if (d->orig_block == NULL_TREE
7039 	    || block == d->orig_block)
7040 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
7041       }
7042 }
7043 
7044 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7045    the outermost EH region.  Use REGION as the incoming base EH region.
7046    If there is no single outermost region, return NULL and set *ALL to
7047    true.  */
7048 
7049 static eh_region
7050 find_outermost_region_in_block (struct function *src_cfun,
7051 				basic_block bb, eh_region region,
7052 				bool *all)
7053 {
7054   gimple_stmt_iterator si;
7055 
7056   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7057     {
7058       gimple *stmt = gsi_stmt (si);
7059       eh_region stmt_region;
7060       int lp_nr;
7061 
7062       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7063       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7064       if (stmt_region)
7065 	{
7066 	  if (region == NULL)
7067 	    region = stmt_region;
7068 	  else if (stmt_region != region)
7069 	    {
7070 	      region = eh_region_outermost (src_cfun, stmt_region, region);
7071 	      if (region == NULL)
7072 		{
7073 		  *all = true;
7074 		  return NULL;
7075 		}
7076 	    }
7077 	}
7078     }
7079 
7080   return region;
7081 }
7082 
7083 static tree
7084 new_label_mapper (tree decl, void *data)
7085 {
7086   htab_t hash = (htab_t) data;
7087   struct tree_map *m;
7088   void **slot;
7089 
7090   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7091 
7092   m = XNEW (struct tree_map);
7093   m->hash = DECL_UID (decl);
7094   m->base.from = decl;
7095   m->to = create_artificial_label (UNKNOWN_LOCATION);
7096   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7097   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7098     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7099 
7100   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7101   gcc_assert (*slot == NULL);
7102 
7103   *slot = m;
7104 
7105   return m->to;
7106 }
7107 
7108 /* Tree walker to replace the decls used inside value expressions by
7109    duplicates.  */
7110 
7111 static tree
7112 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7113 {
7114   struct replace_decls_d *rd = (struct replace_decls_d *)data;
7115 
7116   switch (TREE_CODE (*tp))
7117     {
7118     case VAR_DECL:
7119     case PARM_DECL:
7120     case RESULT_DECL:
7121       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7122       break;
7123     default:
7124       break;
7125     }
7126 
7127   if (IS_TYPE_OR_DECL_P (*tp))
7128     *walk_subtrees = false;
7129 
7130   return NULL;
7131 }
7132 
7133 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7134    subblocks.  */
7135 
7136 static void
7137 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7138 				  tree to_context)
7139 {
7140   tree *tp, t;
7141 
7142   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7143     {
7144       t = *tp;
7145       if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7146 	continue;
7147       replace_by_duplicate_decl (&t, vars_map, to_context);
7148       if (t != *tp)
7149 	{
7150 	  if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7151 	    {
7152 	      tree x = DECL_VALUE_EXPR (*tp);
7153 	      struct replace_decls_d rd = { vars_map, to_context };
7154 	      unshare_expr (x);
7155 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7156 	      SET_DECL_VALUE_EXPR (t, x);
7157 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
7158 	    }
7159 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
7160 	  *tp = t;
7161 	}
7162     }
7163 
7164   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7165     replace_block_vars_by_duplicates (block, vars_map, to_context);
7166 }
7167 
7168 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7169    from FN1 to FN2.  */
7170 
7171 static void
7172 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7173 			      struct loop *loop)
7174 {
7175   /* Discard it from the old loop array.  */
7176   (*get_loops (fn1))[loop->num] = NULL;
7177 
7178   /* Place it in the new loop array, assigning it a new number.  */
7179   loop->num = number_of_loops (fn2);
7180   vec_safe_push (loops_for_fn (fn2)->larray, loop);
7181 
7182   /* Recurse to children.  */
7183   for (loop = loop->inner; loop; loop = loop->next)
7184     fixup_loop_arrays_after_move (fn1, fn2, loop);
7185 }
7186 
7187 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7188    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
7189 
7190 DEBUG_FUNCTION void
7191 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7192 {
7193   basic_block bb;
7194   edge_iterator ei;
7195   edge e;
7196   bitmap bbs = BITMAP_ALLOC (NULL);
7197   int i;
7198 
7199   gcc_assert (entry != NULL);
7200   gcc_assert (entry != exit);
7201   gcc_assert (bbs_p != NULL);
7202 
7203   gcc_assert (bbs_p->length () > 0);
7204 
7205   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7206     bitmap_set_bit (bbs, bb->index);
7207 
7208   gcc_assert (bitmap_bit_p (bbs, entry->index));
7209   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7210 
7211   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7212     {
7213       if (bb == entry)
7214 	{
7215 	  gcc_assert (single_pred_p (entry));
7216 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7217 	}
7218       else
7219 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7220 	  {
7221 	    e = ei_edge (ei);
7222 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7223 	  }
7224 
7225       if (bb == exit)
7226 	{
7227 	  gcc_assert (single_succ_p (exit));
7228 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7229 	}
7230       else
7231 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7232 	  {
7233 	    e = ei_edge (ei);
7234 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7235 	  }
7236     }
7237 
7238   BITMAP_FREE (bbs);
7239 }
7240 
7241 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7242 
7243 bool
7244 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7245 {
7246   bitmap release_names = (bitmap)data;
7247 
7248   if (TREE_CODE (from) != SSA_NAME)
7249     return true;
7250 
7251   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7252   return true;
7253 }
7254 
7255 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7256    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7257    single basic block in the original CFG and the new basic block is
7258    returned.  DEST_CFUN must not have a CFG yet.
7259 
7260    Note that the region need not be a pure SESE region.  Blocks inside
7261    the region may contain calls to abort/exit.  The only restriction
7262    is that ENTRY_BB should be the only entry point and it must
7263    dominate EXIT_BB.
7264 
7265    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7266    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7267    to the new function.
7268 
7269    All local variables referenced in the region are assumed to be in
7270    the corresponding BLOCK_VARS and unexpanded variable lists
7271    associated with DEST_CFUN.
7272 
7273    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7274    reimplement move_sese_region_to_fn by duplicating the region rather than
7275    moving it.  */
7276 
7277 basic_block
7278 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7279 		        basic_block exit_bb, tree orig_block)
7280 {
7281   vec<basic_block> bbs, dom_bbs;
7282   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7283   basic_block after, bb, *entry_pred, *exit_succ, abb;
7284   struct function *saved_cfun = cfun;
7285   int *entry_flag, *exit_flag;
7286   unsigned *entry_prob, *exit_prob;
7287   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7288   edge e;
7289   edge_iterator ei;
7290   htab_t new_label_map;
7291   hash_map<void *, void *> *eh_map;
7292   struct loop *loop = entry_bb->loop_father;
7293   struct loop *loop0 = get_loop (saved_cfun, 0);
7294   struct move_stmt_d d;
7295 
7296   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7297      region.  */
7298   gcc_assert (entry_bb != exit_bb
7299               && (!exit_bb
7300 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7301 
7302   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7303      because it won't be added by dfs_enumerate_from.  */
7304   bbs.create (0);
7305   bbs.safe_push (entry_bb);
7306   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7307 
7308   if (flag_checking)
7309     verify_sese (entry_bb, exit_bb, &bbs);
7310 
7311   /* The blocks that used to be dominated by something in BBS will now be
7312      dominated by the new block.  */
7313   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7314 				     bbs.address (),
7315 				     bbs.length ());
7316 
7317   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7318      the predecessor edges to ENTRY_BB and the successor edges to
7319      EXIT_BB so that we can re-attach them to the new basic block that
7320      will replace the region.  */
7321   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7322   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7323   entry_flag = XNEWVEC (int, num_entry_edges);
7324   entry_prob = XNEWVEC (unsigned, num_entry_edges);
7325   i = 0;
7326   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7327     {
7328       entry_prob[i] = e->probability;
7329       entry_flag[i] = e->flags;
7330       entry_pred[i++] = e->src;
7331       remove_edge (e);
7332     }
7333 
7334   if (exit_bb)
7335     {
7336       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7337       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7338       exit_flag = XNEWVEC (int, num_exit_edges);
7339       exit_prob = XNEWVEC (unsigned, num_exit_edges);
7340       i = 0;
7341       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7342 	{
7343 	  exit_prob[i] = e->probability;
7344 	  exit_flag[i] = e->flags;
7345 	  exit_succ[i++] = e->dest;
7346 	  remove_edge (e);
7347 	}
7348     }
7349   else
7350     {
7351       num_exit_edges = 0;
7352       exit_succ = NULL;
7353       exit_flag = NULL;
7354       exit_prob = NULL;
7355     }
7356 
7357   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7358   gcc_assert (dest_cfun->cfg == NULL);
7359   push_cfun (dest_cfun);
7360 
7361   init_empty_tree_cfg ();
7362 
7363   /* Initialize EH information for the new function.  */
7364   eh_map = NULL;
7365   new_label_map = NULL;
7366   if (saved_cfun->eh)
7367     {
7368       eh_region region = NULL;
7369       bool all = false;
7370 
7371       FOR_EACH_VEC_ELT (bbs, i, bb)
7372 	{
7373 	  region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7374 	  if (all)
7375 	    break;
7376 	}
7377 
7378       init_eh_for_function ();
7379       if (region != NULL || all)
7380 	{
7381 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7382 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7383 					 new_label_mapper, new_label_map);
7384 	}
7385     }
7386 
7387   /* Initialize an empty loop tree.  */
7388   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7389   init_loops_structure (dest_cfun, loops, 1);
7390   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7391   set_loops_for_fn (dest_cfun, loops);
7392 
7393   /* Move the outlined loop tree part.  */
7394   num_nodes = bbs.length ();
7395   FOR_EACH_VEC_ELT (bbs, i, bb)
7396     {
7397       if (bb->loop_father->header == bb)
7398 	{
7399 	  struct loop *this_loop = bb->loop_father;
7400 	  struct loop *outer = loop_outer (this_loop);
7401 	  if (outer == loop
7402 	      /* If the SESE region contains some bbs ending with
7403 		 a noreturn call, those are considered to belong
7404 		 to the outermost loop in saved_cfun, rather than
7405 		 the entry_bb's loop_father.  */
7406 	      || outer == loop0)
7407 	    {
7408 	      if (outer != loop)
7409 		num_nodes -= this_loop->num_nodes;
7410 	      flow_loop_tree_node_remove (bb->loop_father);
7411 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7412 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7413 	    }
7414 	}
7415       else if (bb->loop_father == loop0 && loop0 != loop)
7416 	num_nodes--;
7417 
7418       /* Remove loop exits from the outlined region.  */
7419       if (loops_for_fn (saved_cfun)->exits)
7420 	FOR_EACH_EDGE (e, ei, bb->succs)
7421 	  {
7422 	    struct loops *l = loops_for_fn (saved_cfun);
7423 	    loop_exit **slot
7424 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7425 					       NO_INSERT);
7426 	    if (slot)
7427 	      l->exits->clear_slot (slot);
7428 	  }
7429     }
7430 
7431 
7432   /* Adjust the number of blocks in the tree root of the outlined part.  */
7433   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7434 
7435   /* Setup a mapping to be used by move_block_to_fn.  */
7436   loop->aux = current_loops->tree_root;
7437   loop0->aux = current_loops->tree_root;
7438 
7439   pop_cfun ();
7440 
7441   /* Move blocks from BBS into DEST_CFUN.  */
7442   gcc_assert (bbs.length () >= 2);
7443   after = dest_cfun->cfg->x_entry_block_ptr;
7444   hash_map<tree, tree> vars_map;
7445 
7446   memset (&d, 0, sizeof (d));
7447   d.orig_block = orig_block;
7448   d.new_block = DECL_INITIAL (dest_cfun->decl);
7449   d.from_context = cfun->decl;
7450   d.to_context = dest_cfun->decl;
7451   d.vars_map = &vars_map;
7452   d.new_label_map = new_label_map;
7453   d.eh_map = eh_map;
7454   d.remap_decls_p = true;
7455 
7456   if (gimple_in_ssa_p (cfun))
7457     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7458       {
7459 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7460 	set_ssa_default_def (dest_cfun, arg, narg);
7461 	vars_map.put (arg, narg);
7462       }
7463 
7464   FOR_EACH_VEC_ELT (bbs, i, bb)
7465     {
7466       /* No need to update edge counts on the last block.  It has
7467 	 already been updated earlier when we detached the region from
7468 	 the original CFG.  */
7469       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7470       after = bb;
7471     }
7472 
7473   loop->aux = NULL;
7474   loop0->aux = NULL;
7475   /* Loop sizes are no longer correct, fix them up.  */
7476   loop->num_nodes -= num_nodes;
7477   for (struct loop *outer = loop_outer (loop);
7478        outer; outer = loop_outer (outer))
7479     outer->num_nodes -= num_nodes;
7480   loop0->num_nodes -= bbs.length () - num_nodes;
7481 
7482   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7483     {
7484       struct loop *aloop;
7485       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7486 	if (aloop != NULL)
7487 	  {
7488 	    if (aloop->simduid)
7489 	      {
7490 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7491 					   d.to_context);
7492 		dest_cfun->has_simduid_loops = true;
7493 	      }
7494 	    if (aloop->force_vectorize)
7495 	      dest_cfun->has_force_vectorize_loops = true;
7496 	  }
7497     }
7498 
7499   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7500   if (orig_block)
7501     {
7502       tree block;
7503       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7504 		  == NULL_TREE);
7505       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7506 	= BLOCK_SUBBLOCKS (orig_block);
7507       for (block = BLOCK_SUBBLOCKS (orig_block);
7508 	   block; block = BLOCK_CHAIN (block))
7509 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7510       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7511     }
7512 
7513   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7514 				    &vars_map, dest_cfun->decl);
7515 
7516   if (new_label_map)
7517     htab_delete (new_label_map);
7518   if (eh_map)
7519     delete eh_map;
7520 
7521   if (gimple_in_ssa_p (cfun))
7522     {
7523       /* We need to release ssa-names in a defined order, so first find them,
7524 	 and then iterate in ascending version order.  */
7525       bitmap release_names = BITMAP_ALLOC (NULL);
7526       vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7527       bitmap_iterator bi;
7528       unsigned i;
7529       EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7530 	release_ssa_name (ssa_name (i));
7531       BITMAP_FREE (release_names);
7532     }
7533 
7534   /* Rewire the entry and exit blocks.  The successor to the entry
7535      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7536      the child function.  Similarly, the predecessor of DEST_FN's
7537      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7538      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7539      various CFG manipulation function get to the right CFG.
7540 
7541      FIXME, this is silly.  The CFG ought to become a parameter to
7542      these helpers.  */
7543   push_cfun (dest_cfun);
7544   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7545   if (exit_bb)
7546     make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7547   pop_cfun ();
7548 
7549   /* Back in the original function, the SESE region has disappeared,
7550      create a new basic block in its place.  */
7551   bb = create_empty_bb (entry_pred[0]);
7552   if (current_loops)
7553     add_bb_to_loop (bb, loop);
7554   for (i = 0; i < num_entry_edges; i++)
7555     {
7556       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7557       e->probability = entry_prob[i];
7558     }
7559 
7560   for (i = 0; i < num_exit_edges; i++)
7561     {
7562       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7563       e->probability = exit_prob[i];
7564     }
7565 
7566   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7567   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7568     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7569   dom_bbs.release ();
7570 
7571   if (exit_bb)
7572     {
7573       free (exit_prob);
7574       free (exit_flag);
7575       free (exit_succ);
7576     }
7577   free (entry_prob);
7578   free (entry_flag);
7579   free (entry_pred);
7580   bbs.release ();
7581 
7582   return bb;
7583 }
7584 
7585 /* Dump default def DEF to file FILE using FLAGS and indentation
7586    SPC.  */
7587 
7588 static void
7589 dump_default_def (FILE *file, tree def, int spc, int flags)
7590 {
7591   for (int i = 0; i < spc; ++i)
7592     fprintf (file, " ");
7593   dump_ssaname_info_to_file (file, def, spc);
7594 
7595   print_generic_expr (file, TREE_TYPE (def), flags);
7596   fprintf (file, " ");
7597   print_generic_expr (file, def, flags);
7598   fprintf (file, " = ");
7599   print_generic_expr (file, SSA_NAME_VAR (def), flags);
7600   fprintf (file, ";\n");
7601 }
7602 
7603 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7604    */
7605 
7606 void
7607 dump_function_to_file (tree fndecl, FILE *file, int flags)
7608 {
7609   tree arg, var, old_current_fndecl = current_function_decl;
7610   struct function *dsf;
7611   bool ignore_topmost_bind = false, any_var = false;
7612   basic_block bb;
7613   tree chain;
7614   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7615 		  && decl_is_tm_clone (fndecl));
7616   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7617 
7618   if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7619     {
7620       fprintf (file, "__attribute__((");
7621 
7622       bool first = true;
7623       tree chain;
7624       for (chain = DECL_ATTRIBUTES (fndecl); chain;
7625 	   first = false, chain = TREE_CHAIN (chain))
7626 	{
7627 	  if (!first)
7628 	    fprintf (file, ", ");
7629 
7630 	  print_generic_expr (file, get_attribute_name (chain), dump_flags);
7631 	  if (TREE_VALUE (chain) != NULL_TREE)
7632 	    {
7633 	      fprintf (file, " (");
7634 	      print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7635 	      fprintf (file, ")");
7636 	    }
7637 	}
7638 
7639       fprintf (file, "))\n");
7640     }
7641 
7642   current_function_decl = fndecl;
7643   if (flags & TDF_GIMPLE)
7644     {
7645       print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7646 			  dump_flags | TDF_SLIM);
7647       fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7648     }
7649   else
7650     fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7651 
7652   arg = DECL_ARGUMENTS (fndecl);
7653   while (arg)
7654     {
7655       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7656       fprintf (file, " ");
7657       print_generic_expr (file, arg, dump_flags);
7658       if (flags & TDF_VERBOSE)
7659 	print_node (file, "", arg, 4);
7660       if (DECL_CHAIN (arg))
7661 	fprintf (file, ", ");
7662       arg = DECL_CHAIN (arg);
7663     }
7664   fprintf (file, ")\n");
7665 
7666   if (flags & TDF_VERBOSE)
7667     print_node (file, "", fndecl, 2);
7668 
7669   dsf = DECL_STRUCT_FUNCTION (fndecl);
7670   if (dsf && (flags & TDF_EH))
7671     dump_eh_tree (file, dsf);
7672 
7673   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7674     {
7675       dump_node (fndecl, TDF_SLIM | flags, file);
7676       current_function_decl = old_current_fndecl;
7677       return;
7678     }
7679 
7680   /* When GIMPLE is lowered, the variables are no longer available in
7681      BIND_EXPRs, so display them separately.  */
7682   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7683     {
7684       unsigned ix;
7685       ignore_topmost_bind = true;
7686 
7687       fprintf (file, "{\n");
7688       if (gimple_in_ssa_p (fun)
7689 	  && (flags & TDF_ALIAS))
7690 	{
7691 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7692 	       arg = DECL_CHAIN (arg))
7693 	    {
7694 	      tree def = ssa_default_def (fun, arg);
7695 	      if (def)
7696 		dump_default_def (file, def, 2, flags);
7697 	    }
7698 
7699 	  tree res = DECL_RESULT (fun->decl);
7700 	  if (res != NULL_TREE
7701 	      && DECL_BY_REFERENCE (res))
7702 	    {
7703 	      tree def = ssa_default_def (fun, res);
7704 	      if (def)
7705 		dump_default_def (file, def, 2, flags);
7706 	    }
7707 
7708 	  tree static_chain = fun->static_chain_decl;
7709 	  if (static_chain != NULL_TREE)
7710 	    {
7711 	      tree def = ssa_default_def (fun, static_chain);
7712 	      if (def)
7713 		dump_default_def (file, def, 2, flags);
7714 	    }
7715 	}
7716 
7717       if (!vec_safe_is_empty (fun->local_decls))
7718 	FOR_EACH_LOCAL_DECL (fun, ix, var)
7719 	  {
7720 	    print_generic_decl (file, var, flags);
7721 	    if (flags & TDF_VERBOSE)
7722 	      print_node (file, "", var, 4);
7723 	    fprintf (file, "\n");
7724 
7725 	    any_var = true;
7726 	  }
7727 
7728       tree name;
7729 
7730       if (gimple_in_ssa_p (cfun))
7731 	FOR_EACH_SSA_NAME (ix, name, cfun)
7732 	  {
7733 	    if (!SSA_NAME_VAR (name))
7734 	      {
7735 		fprintf (file, "  ");
7736 		print_generic_expr (file, TREE_TYPE (name), flags);
7737 		fprintf (file, " ");
7738 		print_generic_expr (file, name, flags);
7739 		fprintf (file, ";\n");
7740 
7741 		any_var = true;
7742 	      }
7743 	  }
7744     }
7745 
7746   if (fun && fun->decl == fndecl
7747       && fun->cfg
7748       && basic_block_info_for_fn (fun))
7749     {
7750       /* If the CFG has been built, emit a CFG-based dump.  */
7751       if (!ignore_topmost_bind)
7752 	fprintf (file, "{\n");
7753 
7754       if (any_var && n_basic_blocks_for_fn (fun))
7755 	fprintf (file, "\n");
7756 
7757       FOR_EACH_BB_FN (bb, fun)
7758 	dump_bb (file, bb, 2, flags | TDF_COMMENT);
7759 
7760       fprintf (file, "}\n");
7761     }
7762   else if (fun->curr_properties & PROP_gimple_any)
7763     {
7764       /* The function is now in GIMPLE form but the CFG has not been
7765 	 built yet.  Emit the single sequence of GIMPLE statements
7766 	 that make up its body.  */
7767       gimple_seq body = gimple_body (fndecl);
7768 
7769       if (gimple_seq_first_stmt (body)
7770 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7771 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7772 	print_gimple_seq (file, body, 0, flags);
7773       else
7774 	{
7775 	  if (!ignore_topmost_bind)
7776 	    fprintf (file, "{\n");
7777 
7778 	  if (any_var)
7779 	    fprintf (file, "\n");
7780 
7781 	  print_gimple_seq (file, body, 2, flags);
7782 	  fprintf (file, "}\n");
7783 	}
7784     }
7785   else
7786     {
7787       int indent;
7788 
7789       /* Make a tree based dump.  */
7790       chain = DECL_SAVED_TREE (fndecl);
7791       if (chain && TREE_CODE (chain) == BIND_EXPR)
7792 	{
7793 	  if (ignore_topmost_bind)
7794 	    {
7795 	      chain = BIND_EXPR_BODY (chain);
7796 	      indent = 2;
7797 	    }
7798 	  else
7799 	    indent = 0;
7800 	}
7801       else
7802 	{
7803 	  if (!ignore_topmost_bind)
7804 	    {
7805 	      fprintf (file, "{\n");
7806 	      /* No topmost bind, pretend it's ignored for later.  */
7807 	      ignore_topmost_bind = true;
7808 	    }
7809 	  indent = 2;
7810 	}
7811 
7812       if (any_var)
7813 	fprintf (file, "\n");
7814 
7815       print_generic_stmt_indented (file, chain, flags, indent);
7816       if (ignore_topmost_bind)
7817 	fprintf (file, "}\n");
7818     }
7819 
7820   if (flags & TDF_ENUMERATE_LOCALS)
7821     dump_enumerated_decls (file, flags);
7822   fprintf (file, "\n\n");
7823 
7824   current_function_decl = old_current_fndecl;
7825 }
7826 
7827 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
7828 
7829 DEBUG_FUNCTION void
7830 debug_function (tree fn, int flags)
7831 {
7832   dump_function_to_file (fn, stderr, flags);
7833 }
7834 
7835 
7836 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
7837 
7838 static void
7839 print_pred_bbs (FILE *file, basic_block bb)
7840 {
7841   edge e;
7842   edge_iterator ei;
7843 
7844   FOR_EACH_EDGE (e, ei, bb->preds)
7845     fprintf (file, "bb_%d ", e->src->index);
7846 }
7847 
7848 
7849 /* Print on FILE the indexes for the successors of basic_block BB.  */
7850 
7851 static void
7852 print_succ_bbs (FILE *file, basic_block bb)
7853 {
7854   edge e;
7855   edge_iterator ei;
7856 
7857   FOR_EACH_EDGE (e, ei, bb->succs)
7858     fprintf (file, "bb_%d ", e->dest->index);
7859 }
7860 
7861 /* Print to FILE the basic block BB following the VERBOSITY level.  */
7862 
7863 void
7864 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7865 {
7866   char *s_indent = (char *) alloca ((size_t) indent + 1);
7867   memset ((void *) s_indent, ' ', (size_t) indent);
7868   s_indent[indent] = '\0';
7869 
7870   /* Print basic_block's header.  */
7871   if (verbosity >= 2)
7872     {
7873       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
7874       print_pred_bbs (file, bb);
7875       fprintf (file, "}, succs = {");
7876       print_succ_bbs (file, bb);
7877       fprintf (file, "})\n");
7878     }
7879 
7880   /* Print basic_block's body.  */
7881   if (verbosity >= 3)
7882     {
7883       fprintf (file, "%s  {\n", s_indent);
7884       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7885       fprintf (file, "%s  }\n", s_indent);
7886     }
7887 }
7888 
7889 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7890 
7891 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
7892    VERBOSITY level this outputs the contents of the loop, or just its
7893    structure.  */
7894 
7895 static void
7896 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7897 {
7898   char *s_indent;
7899   basic_block bb;
7900 
7901   if (loop == NULL)
7902     return;
7903 
7904   s_indent = (char *) alloca ((size_t) indent + 1);
7905   memset ((void *) s_indent, ' ', (size_t) indent);
7906   s_indent[indent] = '\0';
7907 
7908   /* Print loop's header.  */
7909   fprintf (file, "%sloop_%d (", s_indent, loop->num);
7910   if (loop->header)
7911     fprintf (file, "header = %d", loop->header->index);
7912   else
7913     {
7914       fprintf (file, "deleted)\n");
7915       return;
7916     }
7917   if (loop->latch)
7918     fprintf (file, ", latch = %d", loop->latch->index);
7919   else
7920     fprintf (file, ", multiple latches");
7921   fprintf (file, ", niter = ");
7922   print_generic_expr (file, loop->nb_iterations, 0);
7923 
7924   if (loop->any_upper_bound)
7925     {
7926       fprintf (file, ", upper_bound = ");
7927       print_decu (loop->nb_iterations_upper_bound, file);
7928     }
7929   if (loop->any_likely_upper_bound)
7930     {
7931       fprintf (file, ", likely_upper_bound = ");
7932       print_decu (loop->nb_iterations_likely_upper_bound, file);
7933     }
7934 
7935   if (loop->any_estimate)
7936     {
7937       fprintf (file, ", estimate = ");
7938       print_decu (loop->nb_iterations_estimate, file);
7939     }
7940   fprintf (file, ")\n");
7941 
7942   /* Print loop's body.  */
7943   if (verbosity >= 1)
7944     {
7945       fprintf (file, "%s{\n", s_indent);
7946       FOR_EACH_BB_FN (bb, cfun)
7947 	if (bb->loop_father == loop)
7948 	  print_loops_bb (file, bb, indent, verbosity);
7949 
7950       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7951       fprintf (file, "%s}\n", s_indent);
7952     }
7953 }
7954 
7955 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7956    spaces.  Following VERBOSITY level this outputs the contents of the
7957    loop, or just its structure.  */
7958 
7959 static void
7960 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7961 			 int verbosity)
7962 {
7963   if (loop == NULL)
7964     return;
7965 
7966   print_loop (file, loop, indent, verbosity);
7967   print_loop_and_siblings (file, loop->next, indent, verbosity);
7968 }
7969 
7970 /* Follow a CFG edge from the entry point of the program, and on entry
7971    of a loop, pretty print the loop structure on FILE.  */
7972 
7973 void
7974 print_loops (FILE *file, int verbosity)
7975 {
7976   basic_block bb;
7977 
7978   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7979   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7980   if (bb && bb->loop_father)
7981     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7982 }
7983 
7984 /* Dump a loop.  */
7985 
7986 DEBUG_FUNCTION void
7987 debug (struct loop &ref)
7988 {
7989   print_loop (stderr, &ref, 0, /*verbosity*/0);
7990 }
7991 
7992 DEBUG_FUNCTION void
7993 debug (struct loop *ptr)
7994 {
7995   if (ptr)
7996     debug (*ptr);
7997   else
7998     fprintf (stderr, "<nil>\n");
7999 }
8000 
8001 /* Dump a loop verbosely.  */
8002 
8003 DEBUG_FUNCTION void
8004 debug_verbose (struct loop &ref)
8005 {
8006   print_loop (stderr, &ref, 0, /*verbosity*/3);
8007 }
8008 
8009 DEBUG_FUNCTION void
8010 debug_verbose (struct loop *ptr)
8011 {
8012   if (ptr)
8013     debug (*ptr);
8014   else
8015     fprintf (stderr, "<nil>\n");
8016 }
8017 
8018 
8019 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
8020 
8021 DEBUG_FUNCTION void
8022 debug_loops (int verbosity)
8023 {
8024   print_loops (stderr, verbosity);
8025 }
8026 
8027 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
8028 
8029 DEBUG_FUNCTION void
8030 debug_loop (struct loop *loop, int verbosity)
8031 {
8032   print_loop (stderr, loop, 0, verbosity);
8033 }
8034 
8035 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8036    level.  */
8037 
8038 DEBUG_FUNCTION void
8039 debug_loop_num (unsigned num, int verbosity)
8040 {
8041   debug_loop (get_loop (cfun, num), verbosity);
8042 }
8043 
8044 /* Return true if BB ends with a call, possibly followed by some
8045    instructions that must stay with the call.  Return false,
8046    otherwise.  */
8047 
8048 static bool
8049 gimple_block_ends_with_call_p (basic_block bb)
8050 {
8051   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8052   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8053 }
8054 
8055 
8056 /* Return true if BB ends with a conditional branch.  Return false,
8057    otherwise.  */
8058 
8059 static bool
8060 gimple_block_ends_with_condjump_p (const_basic_block bb)
8061 {
8062   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8063   return (stmt && gimple_code (stmt) == GIMPLE_COND);
8064 }
8065 
8066 
8067 /* Return true if statement T may terminate execution of BB in ways not
8068    explicitly represtented in the CFG.  */
8069 
8070 bool
8071 stmt_can_terminate_bb_p (gimple *t)
8072 {
8073   tree fndecl = NULL_TREE;
8074   int call_flags = 0;
8075 
8076   /* Eh exception not handled internally terminates execution of the whole
8077      function.  */
8078   if (stmt_can_throw_external (t))
8079     return true;
8080 
8081   /* NORETURN and LONGJMP calls already have an edge to exit.
8082      CONST and PURE calls do not need one.
8083      We don't currently check for CONST and PURE here, although
8084      it would be a good idea, because those attributes are
8085      figured out from the RTL in mark_constant_function, and
8086      the counter incrementation code from -fprofile-arcs
8087      leads to different results from -fbranch-probabilities.  */
8088   if (is_gimple_call (t))
8089     {
8090       fndecl = gimple_call_fndecl (t);
8091       call_flags = gimple_call_flags (t);
8092     }
8093 
8094   if (is_gimple_call (t)
8095       && fndecl
8096       && DECL_BUILT_IN (fndecl)
8097       && (call_flags & ECF_NOTHROW)
8098       && !(call_flags & ECF_RETURNS_TWICE)
8099       /* fork() doesn't really return twice, but the effect of
8100          wrapping it in __gcov_fork() which calls __gcov_flush()
8101 	 and clears the counters before forking has the same
8102 	 effect as returning twice.  Force a fake edge.  */
8103       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8104 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8105     return false;
8106 
8107   if (is_gimple_call (t))
8108     {
8109       edge_iterator ei;
8110       edge e;
8111       basic_block bb;
8112 
8113       if (call_flags & (ECF_PURE | ECF_CONST)
8114 	  && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8115 	return false;
8116 
8117       /* Function call may do longjmp, terminate program or do other things.
8118 	 Special case noreturn that have non-abnormal edges out as in this case
8119 	 the fact is sufficiently represented by lack of edges out of T.  */
8120       if (!(call_flags & ECF_NORETURN))
8121 	return true;
8122 
8123       bb = gimple_bb (t);
8124       FOR_EACH_EDGE (e, ei, bb->succs)
8125 	if ((e->flags & EDGE_FAKE) == 0)
8126 	  return true;
8127     }
8128 
8129   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8130     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8131       return true;
8132 
8133   return false;
8134 }
8135 
8136 
8137 /* Add fake edges to the function exit for any non constant and non
8138    noreturn calls (or noreturn calls with EH/abnormal edges),
8139    volatile inline assembly in the bitmap of blocks specified by BLOCKS
8140    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
8141    that were split.
8142 
8143    The goal is to expose cases in which entering a basic block does
8144    not imply that all subsequent instructions must be executed.  */
8145 
8146 static int
8147 gimple_flow_call_edges_add (sbitmap blocks)
8148 {
8149   int i;
8150   int blocks_split = 0;
8151   int last_bb = last_basic_block_for_fn (cfun);
8152   bool check_last_block = false;
8153 
8154   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8155     return 0;
8156 
8157   if (! blocks)
8158     check_last_block = true;
8159   else
8160     check_last_block = bitmap_bit_p (blocks,
8161 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8162 
8163   /* In the last basic block, before epilogue generation, there will be
8164      a fallthru edge to EXIT.  Special care is required if the last insn
8165      of the last basic block is a call because make_edge folds duplicate
8166      edges, which would result in the fallthru edge also being marked
8167      fake, which would result in the fallthru edge being removed by
8168      remove_fake_edges, which would result in an invalid CFG.
8169 
8170      Moreover, we can't elide the outgoing fake edge, since the block
8171      profiler needs to take this into account in order to solve the minimal
8172      spanning tree in the case that the call doesn't return.
8173 
8174      Handle this by adding a dummy instruction in a new last basic block.  */
8175   if (check_last_block)
8176     {
8177       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8178       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8179       gimple *t = NULL;
8180 
8181       if (!gsi_end_p (gsi))
8182 	t = gsi_stmt (gsi);
8183 
8184       if (t && stmt_can_terminate_bb_p (t))
8185 	{
8186 	  edge e;
8187 
8188 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8189 	  if (e)
8190 	    {
8191 	      gsi_insert_on_edge (e, gimple_build_nop ());
8192 	      gsi_commit_edge_inserts ();
8193 	    }
8194 	}
8195     }
8196 
8197   /* Now add fake edges to the function exit for any non constant
8198      calls since there is no way that we can determine if they will
8199      return or not...  */
8200   for (i = 0; i < last_bb; i++)
8201     {
8202       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8203       gimple_stmt_iterator gsi;
8204       gimple *stmt, *last_stmt;
8205 
8206       if (!bb)
8207 	continue;
8208 
8209       if (blocks && !bitmap_bit_p (blocks, i))
8210 	continue;
8211 
8212       gsi = gsi_last_nondebug_bb (bb);
8213       if (!gsi_end_p (gsi))
8214 	{
8215 	  last_stmt = gsi_stmt (gsi);
8216 	  do
8217 	    {
8218 	      stmt = gsi_stmt (gsi);
8219 	      if (stmt_can_terminate_bb_p (stmt))
8220 		{
8221 		  edge e;
8222 
8223 		  /* The handling above of the final block before the
8224 		     epilogue should be enough to verify that there is
8225 		     no edge to the exit block in CFG already.
8226 		     Calling make_edge in such case would cause us to
8227 		     mark that edge as fake and remove it later.  */
8228 		  if (flag_checking && stmt == last_stmt)
8229 		    {
8230 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8231 		      gcc_assert (e == NULL);
8232 		    }
8233 
8234 		  /* Note that the following may create a new basic block
8235 		     and renumber the existing basic blocks.  */
8236 		  if (stmt != last_stmt)
8237 		    {
8238 		      e = split_block (bb, stmt);
8239 		      if (e)
8240 			blocks_split++;
8241 		    }
8242 		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8243 		}
8244 	      gsi_prev (&gsi);
8245 	    }
8246 	  while (!gsi_end_p (gsi));
8247 	}
8248     }
8249 
8250   if (blocks_split)
8251     verify_flow_info ();
8252 
8253   return blocks_split;
8254 }
8255 
8256 /* Removes edge E and all the blocks dominated by it, and updates dominance
8257    information.  The IL in E->src needs to be updated separately.
8258    If dominance info is not available, only the edge E is removed.*/
8259 
8260 void
8261 remove_edge_and_dominated_blocks (edge e)
8262 {
8263   vec<basic_block> bbs_to_remove = vNULL;
8264   vec<basic_block> bbs_to_fix_dom = vNULL;
8265   bitmap df, df_idom;
8266   edge f;
8267   edge_iterator ei;
8268   bool none_removed = false;
8269   unsigned i;
8270   basic_block bb, dbb;
8271   bitmap_iterator bi;
8272 
8273   /* If we are removing a path inside a non-root loop that may change
8274      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8275   if (current_loops
8276       && loop_outer (e->src->loop_father) != NULL
8277       && e->src->loop_father == e->dest->loop_father)
8278     loops_state_set (LOOPS_NEED_FIXUP);
8279 
8280   if (!dom_info_available_p (CDI_DOMINATORS))
8281     {
8282       remove_edge (e);
8283       return;
8284     }
8285 
8286   /* No updating is needed for edges to exit.  */
8287   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8288     {
8289       if (cfgcleanup_altered_bbs)
8290 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8291       remove_edge (e);
8292       return;
8293     }
8294 
8295   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8296      that is not dominated by E->dest, then this set is empty.  Otherwise,
8297      all the basic blocks dominated by E->dest are removed.
8298 
8299      Also, to DF_IDOM we store the immediate dominators of the blocks in
8300      the dominance frontier of E (i.e., of the successors of the
8301      removed blocks, if there are any, and of E->dest otherwise).  */
8302   FOR_EACH_EDGE (f, ei, e->dest->preds)
8303     {
8304       if (f == e)
8305 	continue;
8306 
8307       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8308 	{
8309 	  none_removed = true;
8310 	  break;
8311 	}
8312     }
8313 
8314   df = BITMAP_ALLOC (NULL);
8315   df_idom = BITMAP_ALLOC (NULL);
8316 
8317   if (none_removed)
8318     bitmap_set_bit (df_idom,
8319 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8320   else
8321     {
8322       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8323       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8324 	{
8325 	  FOR_EACH_EDGE (f, ei, bb->succs)
8326 	    {
8327 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8328 		bitmap_set_bit (df, f->dest->index);
8329 	    }
8330 	}
8331       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8332 	bitmap_clear_bit (df, bb->index);
8333 
8334       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8335 	{
8336 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8337 	  bitmap_set_bit (df_idom,
8338 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8339 	}
8340     }
8341 
8342   if (cfgcleanup_altered_bbs)
8343     {
8344       /* Record the set of the altered basic blocks.  */
8345       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8346       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8347     }
8348 
8349   /* Remove E and the cancelled blocks.  */
8350   if (none_removed)
8351     remove_edge (e);
8352   else
8353     {
8354       /* Walk backwards so as to get a chance to substitute all
8355 	 released DEFs into debug stmts.  See
8356 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8357 	 details.  */
8358       for (i = bbs_to_remove.length (); i-- > 0; )
8359 	delete_basic_block (bbs_to_remove[i]);
8360     }
8361 
8362   /* Update the dominance information.  The immediate dominator may change only
8363      for blocks whose immediate dominator belongs to DF_IDOM:
8364 
8365      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8366      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8367      Z dominates X after the removal.  Before removal, there exists a path P
8368      from Y to X that avoids Z.  Let F be the last edge on P that is
8369      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8370      dominates W, and because of P, Z does not dominate W), and W belongs to
8371      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8372   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8373     {
8374       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8375       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8376 	   dbb;
8377 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8378 	bbs_to_fix_dom.safe_push (dbb);
8379     }
8380 
8381   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8382 
8383   BITMAP_FREE (df);
8384   BITMAP_FREE (df_idom);
8385   bbs_to_remove.release ();
8386   bbs_to_fix_dom.release ();
8387 }
8388 
8389 /* Purge dead EH edges from basic block BB.  */
8390 
8391 bool
8392 gimple_purge_dead_eh_edges (basic_block bb)
8393 {
8394   bool changed = false;
8395   edge e;
8396   edge_iterator ei;
8397   gimple *stmt = last_stmt (bb);
8398 
8399   if (stmt && stmt_can_throw_internal (stmt))
8400     return false;
8401 
8402   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8403     {
8404       if (e->flags & EDGE_EH)
8405 	{
8406 	  remove_edge_and_dominated_blocks (e);
8407 	  changed = true;
8408 	}
8409       else
8410 	ei_next (&ei);
8411     }
8412 
8413   return changed;
8414 }
8415 
8416 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8417 
8418 bool
8419 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8420 {
8421   bool changed = false;
8422   unsigned i;
8423   bitmap_iterator bi;
8424 
8425   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8426     {
8427       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8428 
8429       /* Earlier gimple_purge_dead_eh_edges could have removed
8430 	 this basic block already.  */
8431       gcc_assert (bb || changed);
8432       if (bb != NULL)
8433 	changed |= gimple_purge_dead_eh_edges (bb);
8434     }
8435 
8436   return changed;
8437 }
8438 
8439 /* Purge dead abnormal call edges from basic block BB.  */
8440 
8441 bool
8442 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8443 {
8444   bool changed = false;
8445   edge e;
8446   edge_iterator ei;
8447   gimple *stmt = last_stmt (bb);
8448 
8449   if (!cfun->has_nonlocal_label
8450       && !cfun->calls_setjmp)
8451     return false;
8452 
8453   if (stmt && stmt_can_make_abnormal_goto (stmt))
8454     return false;
8455 
8456   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8457     {
8458       if (e->flags & EDGE_ABNORMAL)
8459 	{
8460 	  if (e->flags & EDGE_FALLTHRU)
8461 	    e->flags &= ~EDGE_ABNORMAL;
8462 	  else
8463 	    remove_edge_and_dominated_blocks (e);
8464 	  changed = true;
8465 	}
8466       else
8467 	ei_next (&ei);
8468     }
8469 
8470   return changed;
8471 }
8472 
8473 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8474 
8475 bool
8476 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8477 {
8478   bool changed = false;
8479   unsigned i;
8480   bitmap_iterator bi;
8481 
8482   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8483     {
8484       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8485 
8486       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8487 	 this basic block already.  */
8488       gcc_assert (bb || changed);
8489       if (bb != NULL)
8490 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8491     }
8492 
8493   return changed;
8494 }
8495 
8496 /* This function is called whenever a new edge is created or
8497    redirected.  */
8498 
8499 static void
8500 gimple_execute_on_growing_pred (edge e)
8501 {
8502   basic_block bb = e->dest;
8503 
8504   if (!gimple_seq_empty_p (phi_nodes (bb)))
8505     reserve_phi_args_for_new_edge (bb);
8506 }
8507 
8508 /* This function is called immediately before edge E is removed from
8509    the edge vector E->dest->preds.  */
8510 
8511 static void
8512 gimple_execute_on_shrinking_pred (edge e)
8513 {
8514   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8515     remove_phi_args (e);
8516 }
8517 
8518 /*---------------------------------------------------------------------------
8519   Helper functions for Loop versioning
8520   ---------------------------------------------------------------------------*/
8521 
8522 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8523    of 'first'. Both of them are dominated by 'new_head' basic block. When
8524    'new_head' was created by 'second's incoming edge it received phi arguments
8525    on the edge by split_edge(). Later, additional edge 'e' was created to
8526    connect 'new_head' and 'first'. Now this routine adds phi args on this
8527    additional edge 'e' that new_head to second edge received as part of edge
8528    splitting.  */
8529 
8530 static void
8531 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8532 				  basic_block new_head, edge e)
8533 {
8534   gphi *phi1, *phi2;
8535   gphi_iterator psi1, psi2;
8536   tree def;
8537   edge e2 = find_edge (new_head, second);
8538 
8539   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8540      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8541   gcc_assert (e2 != NULL);
8542 
8543   /* Browse all 'second' basic block phi nodes and add phi args to
8544      edge 'e' for 'first' head. PHI args are always in correct order.  */
8545 
8546   for (psi2 = gsi_start_phis (second),
8547        psi1 = gsi_start_phis (first);
8548        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8549        gsi_next (&psi2),  gsi_next (&psi1))
8550     {
8551       phi1 = psi1.phi ();
8552       phi2 = psi2.phi ();
8553       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8554       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8555     }
8556 }
8557 
8558 
8559 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8560    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8561    the destination of the ELSE part.  */
8562 
8563 static void
8564 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8565 			       basic_block second_head ATTRIBUTE_UNUSED,
8566 			       basic_block cond_bb, void *cond_e)
8567 {
8568   gimple_stmt_iterator gsi;
8569   gimple *new_cond_expr;
8570   tree cond_expr = (tree) cond_e;
8571   edge e0;
8572 
8573   /* Build new conditional expr */
8574   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8575 					       NULL_TREE, NULL_TREE);
8576 
8577   /* Add new cond in cond_bb.  */
8578   gsi = gsi_last_bb (cond_bb);
8579   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8580 
8581   /* Adjust edges appropriately to connect new head with first head
8582      as well as second head.  */
8583   e0 = single_succ_edge (cond_bb);
8584   e0->flags &= ~EDGE_FALLTHRU;
8585   e0->flags |= EDGE_FALSE_VALUE;
8586 }
8587 
8588 
8589 /* Do book-keeping of basic block BB for the profile consistency checker.
8590    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8591    then do post-pass accounting.  Store the counting in RECORD.  */
8592 static void
8593 gimple_account_profile_record (basic_block bb, int after_pass,
8594 			       struct profile_record *record)
8595 {
8596   gimple_stmt_iterator i;
8597   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8598     {
8599       record->size[after_pass]
8600 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8601       if (profile_status_for_fn (cfun) == PROFILE_READ)
8602 	record->time[after_pass]
8603 	  += estimate_num_insns (gsi_stmt (i),
8604 				 &eni_time_weights) * bb->count;
8605       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8606 	record->time[after_pass]
8607 	  += estimate_num_insns (gsi_stmt (i),
8608 				 &eni_time_weights) * bb->frequency;
8609     }
8610 }
8611 
8612 struct cfg_hooks gimple_cfg_hooks = {
8613   "gimple",
8614   gimple_verify_flow_info,
8615   gimple_dump_bb,		/* dump_bb  */
8616   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8617   create_bb,			/* create_basic_block  */
8618   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8619   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8620   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8621   remove_bb,			/* delete_basic_block  */
8622   gimple_split_block,		/* split_block  */
8623   gimple_move_block_after,	/* move_block_after  */
8624   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8625   gimple_merge_blocks,		/* merge_blocks  */
8626   gimple_predict_edge,		/* predict_edge  */
8627   gimple_predicted_by_p,	/* predicted_by_p  */
8628   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8629   gimple_duplicate_bb,		/* duplicate_block  */
8630   gimple_split_edge,		/* split_edge  */
8631   gimple_make_forwarder_block,	/* make_forward_block  */
8632   NULL,				/* tidy_fallthru_edge  */
8633   NULL,				/* force_nonfallthru */
8634   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8635   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8636   gimple_flow_call_edges_add,   /* flow_call_edges_add */
8637   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8638   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8639   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8640   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8641   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8642   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8643   flush_pending_stmts, 		/* flush_pending_stmts */
8644   gimple_empty_block_p,           /* block_empty_p */
8645   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8646   gimple_account_profile_record,
8647 };
8648 
8649 
8650 /* Split all critical edges.  */
8651 
8652 unsigned int
8653 split_critical_edges (void)
8654 {
8655   basic_block bb;
8656   edge e;
8657   edge_iterator ei;
8658 
8659   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8660      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8661      mappings around the calls to split_edge.  */
8662   start_recording_case_labels ();
8663   FOR_ALL_BB_FN (bb, cfun)
8664     {
8665       FOR_EACH_EDGE (e, ei, bb->succs)
8666         {
8667 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8668 	    split_edge (e);
8669 	  /* PRE inserts statements to edges and expects that
8670 	     since split_critical_edges was done beforehand, committing edge
8671 	     insertions will not split more edges.  In addition to critical
8672 	     edges we must split edges that have multiple successors and
8673 	     end by control flow statements, such as RESX.
8674 	     Go ahead and split them too.  This matches the logic in
8675 	     gimple_find_edge_insert_loc.  */
8676 	  else if ((!single_pred_p (e->dest)
8677 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8678 		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8679 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8680 	           && !(e->flags & EDGE_ABNORMAL))
8681 	    {
8682 	      gimple_stmt_iterator gsi;
8683 
8684 	      gsi = gsi_last_bb (e->src);
8685 	      if (!gsi_end_p (gsi)
8686 		  && stmt_ends_bb_p (gsi_stmt (gsi))
8687 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8688 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8689 						 BUILT_IN_RETURN)))
8690 		split_edge (e);
8691 	    }
8692 	}
8693     }
8694   end_recording_case_labels ();
8695   return 0;
8696 }
8697 
8698 namespace {
8699 
8700 const pass_data pass_data_split_crit_edges =
8701 {
8702   GIMPLE_PASS, /* type */
8703   "crited", /* name */
8704   OPTGROUP_NONE, /* optinfo_flags */
8705   TV_TREE_SPLIT_EDGES, /* tv_id */
8706   PROP_cfg, /* properties_required */
8707   PROP_no_crit_edges, /* properties_provided */
8708   0, /* properties_destroyed */
8709   0, /* todo_flags_start */
8710   0, /* todo_flags_finish */
8711 };
8712 
8713 class pass_split_crit_edges : public gimple_opt_pass
8714 {
8715 public:
8716   pass_split_crit_edges (gcc::context *ctxt)
8717     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8718   {}
8719 
8720   /* opt_pass methods: */
8721   virtual unsigned int execute (function *) { return split_critical_edges (); }
8722 
8723   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8724 }; // class pass_split_crit_edges
8725 
8726 } // anon namespace
8727 
8728 gimple_opt_pass *
8729 make_pass_split_crit_edges (gcc::context *ctxt)
8730 {
8731   return new pass_split_crit_edges (ctxt);
8732 }
8733 
8734 
8735 /* Insert COND expression which is GIMPLE_COND after STMT
8736    in basic block BB with appropriate basic block split
8737    and creation of a new conditionally executed basic block.
8738    Return created basic block.  */
8739 basic_block
8740 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8741 {
8742   edge fall = split_block (bb, stmt);
8743   gimple_stmt_iterator iter = gsi_last_bb (bb);
8744   basic_block new_bb;
8745 
8746   /* Insert cond statement.  */
8747   gcc_assert (gimple_code (cond) == GIMPLE_COND);
8748   if (gsi_end_p (iter))
8749     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8750   else
8751     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8752 
8753   /* Create conditionally executed block.  */
8754   new_bb = create_empty_bb (bb);
8755   make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8756   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8757 
8758   /* Fix edge for split bb.  */
8759   fall->flags = EDGE_FALSE_VALUE;
8760 
8761   /* Update dominance info.  */
8762   if (dom_info_available_p (CDI_DOMINATORS))
8763     {
8764       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8765       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8766     }
8767 
8768   /* Update loop info.  */
8769   if (current_loops)
8770     add_bb_to_loop (new_bb, bb->loop_father);
8771 
8772   return new_bb;
8773 }
8774 
8775 /* Build a ternary operation and gimplify it.  Emit code before GSI.
8776    Return the gimple_val holding the result.  */
8777 
8778 tree
8779 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8780 		 tree type, tree a, tree b, tree c)
8781 {
8782   tree ret;
8783   location_t loc = gimple_location (gsi_stmt (*gsi));
8784 
8785   ret = fold_build3_loc (loc, code, type, a, b, c);
8786   STRIP_NOPS (ret);
8787 
8788   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8789                                    GSI_SAME_STMT);
8790 }
8791 
8792 /* Build a binary operation and gimplify it.  Emit code before GSI.
8793    Return the gimple_val holding the result.  */
8794 
8795 tree
8796 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8797 		 tree type, tree a, tree b)
8798 {
8799   tree ret;
8800 
8801   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8802   STRIP_NOPS (ret);
8803 
8804   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8805                                    GSI_SAME_STMT);
8806 }
8807 
8808 /* Build a unary operation and gimplify it.  Emit code before GSI.
8809    Return the gimple_val holding the result.  */
8810 
8811 tree
8812 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8813 		 tree a)
8814 {
8815   tree ret;
8816 
8817   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8818   STRIP_NOPS (ret);
8819 
8820   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8821                                    GSI_SAME_STMT);
8822 }
8823 
8824 
8825 
8826 /* Given a basic block B which ends with a conditional and has
8827    precisely two successors, determine which of the edges is taken if
8828    the conditional is true and which is taken if the conditional is
8829    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
8830 
8831 void
8832 extract_true_false_edges_from_block (basic_block b,
8833 				     edge *true_edge,
8834 				     edge *false_edge)
8835 {
8836   edge e = EDGE_SUCC (b, 0);
8837 
8838   if (e->flags & EDGE_TRUE_VALUE)
8839     {
8840       *true_edge = e;
8841       *false_edge = EDGE_SUCC (b, 1);
8842     }
8843   else
8844     {
8845       *false_edge = e;
8846       *true_edge = EDGE_SUCC (b, 1);
8847     }
8848 }
8849 
8850 
8851 /* From a controlling predicate in the immediate dominator DOM of
8852    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8853    predicate evaluates to true and false and store them to
8854    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8855    they are non-NULL.  Returns true if the edges can be determined,
8856    else return false.  */
8857 
8858 bool
8859 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8860 				     edge *true_controlled_edge,
8861 				     edge *false_controlled_edge)
8862 {
8863   basic_block bb = phiblock;
8864   edge true_edge, false_edge, tem;
8865   edge e0 = NULL, e1 = NULL;
8866 
8867   /* We have to verify that one edge into the PHI node is dominated
8868      by the true edge of the predicate block and the other edge
8869      dominated by the false edge.  This ensures that the PHI argument
8870      we are going to take is completely determined by the path we
8871      take from the predicate block.
8872      We can only use BB dominance checks below if the destination of
8873      the true/false edges are dominated by their edge, thus only
8874      have a single predecessor.  */
8875   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8876   tem = EDGE_PRED (bb, 0);
8877   if (tem == true_edge
8878       || (single_pred_p (true_edge->dest)
8879 	  && (tem->src == true_edge->dest
8880 	      || dominated_by_p (CDI_DOMINATORS,
8881 				 tem->src, true_edge->dest))))
8882     e0 = tem;
8883   else if (tem == false_edge
8884 	   || (single_pred_p (false_edge->dest)
8885 	       && (tem->src == false_edge->dest
8886 		   || dominated_by_p (CDI_DOMINATORS,
8887 				      tem->src, false_edge->dest))))
8888     e1 = tem;
8889   else
8890     return false;
8891   tem = EDGE_PRED (bb, 1);
8892   if (tem == true_edge
8893       || (single_pred_p (true_edge->dest)
8894 	  && (tem->src == true_edge->dest
8895 	      || dominated_by_p (CDI_DOMINATORS,
8896 				 tem->src, true_edge->dest))))
8897     e0 = tem;
8898   else if (tem == false_edge
8899 	   || (single_pred_p (false_edge->dest)
8900 	       && (tem->src == false_edge->dest
8901 		   || dominated_by_p (CDI_DOMINATORS,
8902 				      tem->src, false_edge->dest))))
8903     e1 = tem;
8904   else
8905     return false;
8906   if (!e0 || !e1)
8907     return false;
8908 
8909   if (true_controlled_edge)
8910     *true_controlled_edge = e0;
8911   if (false_controlled_edge)
8912     *false_controlled_edge = e1;
8913 
8914   return true;
8915 }
8916 
8917 
8918 
8919 /* Emit return warnings.  */
8920 
8921 namespace {
8922 
8923 const pass_data pass_data_warn_function_return =
8924 {
8925   GIMPLE_PASS, /* type */
8926   "*warn_function_return", /* name */
8927   OPTGROUP_NONE, /* optinfo_flags */
8928   TV_NONE, /* tv_id */
8929   PROP_cfg, /* properties_required */
8930   0, /* properties_provided */
8931   0, /* properties_destroyed */
8932   0, /* todo_flags_start */
8933   0, /* todo_flags_finish */
8934 };
8935 
8936 class pass_warn_function_return : public gimple_opt_pass
8937 {
8938 public:
8939   pass_warn_function_return (gcc::context *ctxt)
8940     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8941   {}
8942 
8943   /* opt_pass methods: */
8944   virtual unsigned int execute (function *);
8945 
8946 }; // class pass_warn_function_return
8947 
8948 unsigned int
8949 pass_warn_function_return::execute (function *fun)
8950 {
8951   source_location location;
8952   gimple *last;
8953   edge e;
8954   edge_iterator ei;
8955 
8956   if (!targetm.warn_func_return (fun->decl))
8957     return 0;
8958 
8959   /* If we have a path to EXIT, then we do return.  */
8960   if (TREE_THIS_VOLATILE (fun->decl)
8961       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8962     {
8963       location = UNKNOWN_LOCATION;
8964       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8965 	{
8966 	  last = last_stmt (e->src);
8967 	  if ((gimple_code (last) == GIMPLE_RETURN
8968 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8969 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8970 	    break;
8971 	}
8972       if (location == UNKNOWN_LOCATION)
8973 	location = cfun->function_end_locus;
8974 
8975 #ifdef notyet
8976       if (warn_missing_noreturn)
8977         warning_at (location, 0, "%<noreturn%> function does return");
8978 #endif
8979     }
8980 
8981   /* If we see "return;" in some basic block, then we do reach the end
8982      without returning a value.  */
8983   else if (warn_return_type
8984 	   && !TREE_NO_WARNING (fun->decl)
8985 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8986     {
8987       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8988 	{
8989 	  gimple *last = last_stmt (e->src);
8990 	  greturn *return_stmt = dyn_cast <greturn *> (last);
8991 	  if (return_stmt
8992 	      && gimple_return_retval (return_stmt) == NULL
8993 	      && !gimple_no_warning_p (last))
8994 	    {
8995 	      location = gimple_location (last);
8996 	      if (location == UNKNOWN_LOCATION)
8997 		location = fun->function_end_locus;
8998 	      warning_at (location, OPT_Wreturn_type,
8999 			  "control reaches end of non-void function");
9000 	      TREE_NO_WARNING (fun->decl) = 1;
9001 	      break;
9002 	    }
9003 	}
9004       /* -fsanitize=return turns fallthrough from the end of non-void function
9005 	 into __builtin___ubsan_handle_missing_return () call.
9006 	 Recognize those too.  */
9007       basic_block bb;
9008       if (!TREE_NO_WARNING (fun->decl) && (flag_sanitize & SANITIZE_RETURN))
9009 	FOR_EACH_BB_FN (bb, fun)
9010 	  if (EDGE_COUNT (bb->succs) == 0)
9011 	    {
9012 	      gimple *last = last_stmt (bb);
9013 	      const enum built_in_function ubsan_missing_ret
9014 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9015 	      if (last && gimple_call_builtin_p (last, ubsan_missing_ret))
9016 		{
9017 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
9018 		  gsi_prev_nondebug (&gsi);
9019 		  gimple *prev = gsi_stmt (gsi);
9020 		  if (prev == NULL)
9021 		    location = UNKNOWN_LOCATION;
9022 		  else
9023 		    location = gimple_location (prev);
9024 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9025 		    location = fun->function_end_locus;
9026 		  warning_at (location, OPT_Wreturn_type,
9027 			      "control reaches end of non-void function");
9028 		  TREE_NO_WARNING (fun->decl) = 1;
9029 		  break;
9030 		}
9031 	    }
9032     }
9033   return 0;
9034 }
9035 
9036 } // anon namespace
9037 
9038 gimple_opt_pass *
9039 make_pass_warn_function_return (gcc::context *ctxt)
9040 {
9041   return new pass_warn_function_return (ctxt);
9042 }
9043 
9044 /* Walk a gimplified function and warn for functions whose return value is
9045    ignored and attribute((warn_unused_result)) is set.  This is done before
9046    inlining, so we don't have to worry about that.  */
9047 
9048 static void
9049 do_warn_unused_result (gimple_seq seq)
9050 {
9051   tree fdecl, ftype;
9052   gimple_stmt_iterator i;
9053 
9054   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9055     {
9056       gimple *g = gsi_stmt (i);
9057 
9058       switch (gimple_code (g))
9059 	{
9060 	case GIMPLE_BIND:
9061 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9062 	  break;
9063 	case GIMPLE_TRY:
9064 	  do_warn_unused_result (gimple_try_eval (g));
9065 	  do_warn_unused_result (gimple_try_cleanup (g));
9066 	  break;
9067 	case GIMPLE_CATCH:
9068 	  do_warn_unused_result (gimple_catch_handler (
9069 				   as_a <gcatch *> (g)));
9070 	  break;
9071 	case GIMPLE_EH_FILTER:
9072 	  do_warn_unused_result (gimple_eh_filter_failure (g));
9073 	  break;
9074 
9075 	case GIMPLE_CALL:
9076 	  if (gimple_call_lhs (g))
9077 	    break;
9078 	  if (gimple_call_internal_p (g))
9079 	    break;
9080 
9081 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
9082 	     LHS.  All calls whose value is ignored should be
9083 	     represented like this.  Look for the attribute.  */
9084 	  fdecl = gimple_call_fndecl (g);
9085 	  ftype = gimple_call_fntype (g);
9086 
9087 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9088 	    {
9089 	      location_t loc = gimple_location (g);
9090 
9091 	      if (fdecl)
9092 		warning_at (loc, OPT_Wunused_result,
9093 			    "ignoring return value of %qD, "
9094 			    "declared with attribute warn_unused_result",
9095 			    fdecl);
9096 	      else
9097 		warning_at (loc, OPT_Wunused_result,
9098 			    "ignoring return value of function "
9099 			    "declared with attribute warn_unused_result");
9100 	    }
9101 	  break;
9102 
9103 	default:
9104 	  /* Not a container, not a call, or a call whose value is used.  */
9105 	  break;
9106 	}
9107     }
9108 }
9109 
9110 namespace {
9111 
9112 const pass_data pass_data_warn_unused_result =
9113 {
9114   GIMPLE_PASS, /* type */
9115   "*warn_unused_result", /* name */
9116   OPTGROUP_NONE, /* optinfo_flags */
9117   TV_NONE, /* tv_id */
9118   PROP_gimple_any, /* properties_required */
9119   0, /* properties_provided */
9120   0, /* properties_destroyed */
9121   0, /* todo_flags_start */
9122   0, /* todo_flags_finish */
9123 };
9124 
9125 class pass_warn_unused_result : public gimple_opt_pass
9126 {
9127 public:
9128   pass_warn_unused_result (gcc::context *ctxt)
9129     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9130   {}
9131 
9132   /* opt_pass methods: */
9133   virtual bool gate (function *) { return flag_warn_unused_result; }
9134   virtual unsigned int execute (function *)
9135     {
9136       do_warn_unused_result (gimple_body (current_function_decl));
9137       return 0;
9138     }
9139 
9140 }; // class pass_warn_unused_result
9141 
9142 } // anon namespace
9143 
9144 gimple_opt_pass *
9145 make_pass_warn_unused_result (gcc::context *ctxt)
9146 {
9147   return new pass_warn_unused_result (ctxt);
9148 }
9149 
9150 /* IPA passes, compilation of earlier functions or inlining
9151    might have changed some properties, such as marked functions nothrow,
9152    pure, const or noreturn.
9153    Remove redundant edges and basic blocks, and create new ones if necessary.
9154 
9155    This pass can't be executed as stand alone pass from pass manager, because
9156    in between inlining and this fixup the verify_flow_info would fail.  */
9157 
9158 unsigned int
9159 execute_fixup_cfg (void)
9160 {
9161   basic_block bb;
9162   gimple_stmt_iterator gsi;
9163   int todo = 0;
9164   gcov_type count_scale;
9165   edge e;
9166   edge_iterator ei;
9167   cgraph_node *node = cgraph_node::get (current_function_decl);
9168 
9169   count_scale
9170     = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9171 
9172   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9173   EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9174     = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9175 
9176   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9177     e->count = apply_scale (e->count, count_scale);
9178 
9179   FOR_EACH_BB_FN (bb, cfun)
9180     {
9181       bb->count = apply_scale (bb->count, count_scale);
9182       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9183 	{
9184 	  gimple *stmt = gsi_stmt (gsi);
9185 	  tree decl = is_gimple_call (stmt)
9186 		      ? gimple_call_fndecl (stmt)
9187 		      : NULL;
9188 	  if (decl)
9189 	    {
9190 	      int flags = gimple_call_flags (stmt);
9191 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9192 		{
9193 		  if (gimple_purge_dead_abnormal_call_edges (bb))
9194 		    todo |= TODO_cleanup_cfg;
9195 
9196 		  if (gimple_in_ssa_p (cfun))
9197 		    {
9198 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9199 		      update_stmt (stmt);
9200 		    }
9201 		}
9202 
9203 	      if (flags & ECF_NORETURN
9204 		  && fixup_noreturn_call (stmt))
9205 		todo |= TODO_cleanup_cfg;
9206 	     }
9207 
9208 	  /* Remove stores to variables we marked write-only.
9209 	     Keep access when store has side effect, i.e. in case when source
9210 	     is volatile.  */
9211 	  if (gimple_store_p (stmt)
9212 	      && !gimple_has_side_effects (stmt))
9213 	    {
9214 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9215 
9216 	      if (VAR_P (lhs)
9217 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9218 		  && varpool_node::get (lhs)->writeonly)
9219 		{
9220 		  unlink_stmt_vdef (stmt);
9221 		  gsi_remove (&gsi, true);
9222 		  release_defs (stmt);
9223 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9224 	          continue;
9225 		}
9226 	    }
9227 	  /* For calls we can simply remove LHS when it is known
9228 	     to be write-only.  */
9229 	  if (is_gimple_call (stmt)
9230 	      && gimple_get_lhs (stmt))
9231 	    {
9232 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9233 
9234 	      if (VAR_P (lhs)
9235 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9236 		  && varpool_node::get (lhs)->writeonly)
9237 		{
9238 		  gimple_call_set_lhs (stmt, NULL);
9239 		  update_stmt (stmt);
9240 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9241 		}
9242 	    }
9243 
9244 	  if (maybe_clean_eh_stmt (stmt)
9245 	      && gimple_purge_dead_eh_edges (bb))
9246 	    todo |= TODO_cleanup_cfg;
9247 	  gsi_next (&gsi);
9248 	}
9249 
9250       FOR_EACH_EDGE (e, ei, bb->succs)
9251         e->count = apply_scale (e->count, count_scale);
9252 
9253       /* If we have a basic block with no successors that does not
9254 	 end with a control statement or a noreturn call end it with
9255 	 a call to __builtin_unreachable.  This situation can occur
9256 	 when inlining a noreturn call that does in fact return.  */
9257       if (EDGE_COUNT (bb->succs) == 0)
9258 	{
9259 	  gimple *stmt = last_stmt (bb);
9260 	  if (!stmt
9261 	      || (!is_ctrl_stmt (stmt)
9262 		  && (!is_gimple_call (stmt)
9263 		      || !gimple_call_noreturn_p (stmt))))
9264 	    {
9265 	      if (stmt && is_gimple_call (stmt))
9266 		gimple_call_set_ctrl_altering (stmt, false);
9267 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9268 	      stmt = gimple_build_call (fndecl, 0);
9269 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9270 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9271 	      if (!cfun->after_inlining)
9272 		{
9273 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9274 		  int freq
9275 		    = compute_call_stmt_bb_frequency (current_function_decl,
9276 						      bb);
9277 		  node->create_edge (cgraph_node::get_create (fndecl),
9278 				     call_stmt, bb->count, freq);
9279 		}
9280 	    }
9281 	}
9282     }
9283   if (count_scale != REG_BR_PROB_BASE)
9284     compute_function_frequency ();
9285 
9286   if (current_loops
9287       && (todo & TODO_cleanup_cfg))
9288     loops_state_set (LOOPS_NEED_FIXUP);
9289 
9290   return todo;
9291 }
9292 
9293 namespace {
9294 
9295 const pass_data pass_data_fixup_cfg =
9296 {
9297   GIMPLE_PASS, /* type */
9298   "fixup_cfg", /* name */
9299   OPTGROUP_NONE, /* optinfo_flags */
9300   TV_NONE, /* tv_id */
9301   PROP_cfg, /* properties_required */
9302   0, /* properties_provided */
9303   0, /* properties_destroyed */
9304   0, /* todo_flags_start */
9305   0, /* todo_flags_finish */
9306 };
9307 
9308 class pass_fixup_cfg : public gimple_opt_pass
9309 {
9310 public:
9311   pass_fixup_cfg (gcc::context *ctxt)
9312     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9313   {}
9314 
9315   /* opt_pass methods: */
9316   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9317   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9318 
9319 }; // class pass_fixup_cfg
9320 
9321 } // anon namespace
9322 
9323 gimple_opt_pass *
9324 make_pass_fixup_cfg (gcc::context *ctxt)
9325 {
9326   return new pass_fixup_cfg (ctxt);
9327 }
9328 
9329 /* Garbage collection support for edge_def.  */
9330 
9331 extern void gt_ggc_mx (tree&);
9332 extern void gt_ggc_mx (gimple *&);
9333 extern void gt_ggc_mx (rtx&);
9334 extern void gt_ggc_mx (basic_block&);
9335 
9336 static void
9337 gt_ggc_mx (rtx_insn *& x)
9338 {
9339   if (x)
9340     gt_ggc_mx_rtx_def ((void *) x);
9341 }
9342 
9343 void
9344 gt_ggc_mx (edge_def *e)
9345 {
9346   tree block = LOCATION_BLOCK (e->goto_locus);
9347   gt_ggc_mx (e->src);
9348   gt_ggc_mx (e->dest);
9349   if (current_ir_type () == IR_GIMPLE)
9350     gt_ggc_mx (e->insns.g);
9351   else
9352     gt_ggc_mx (e->insns.r);
9353   gt_ggc_mx (block);
9354 }
9355 
9356 /* PCH support for edge_def.  */
9357 
9358 extern void gt_pch_nx (tree&);
9359 extern void gt_pch_nx (gimple *&);
9360 extern void gt_pch_nx (rtx&);
9361 extern void gt_pch_nx (basic_block&);
9362 
9363 static void
9364 gt_pch_nx (rtx_insn *& x)
9365 {
9366   if (x)
9367     gt_pch_nx_rtx_def ((void *) x);
9368 }
9369 
9370 void
9371 gt_pch_nx (edge_def *e)
9372 {
9373   tree block = LOCATION_BLOCK (e->goto_locus);
9374   gt_pch_nx (e->src);
9375   gt_pch_nx (e->dest);
9376   if (current_ir_type () == IR_GIMPLE)
9377     gt_pch_nx (e->insns.g);
9378   else
9379     gt_pch_nx (e->insns.r);
9380   gt_pch_nx (block);
9381 }
9382 
9383 void
9384 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9385 {
9386   tree block = LOCATION_BLOCK (e->goto_locus);
9387   op (&(e->src), cookie);
9388   op (&(e->dest), cookie);
9389   if (current_ir_type () == IR_GIMPLE)
9390     op (&(e->insns.g), cookie);
9391   else
9392     op (&(e->insns.r), cookie);
9393   op (&(block), cookie);
9394 }
9395 
9396 #if CHECKING_P
9397 
9398 namespace selftest {
9399 
9400 /* Helper function for CFG selftests: create a dummy function decl
9401    and push it as cfun.  */
9402 
9403 static tree
9404 push_fndecl (const char *name)
9405 {
9406   tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9407   /* FIXME: this uses input_location: */
9408   tree fndecl = build_fn_decl (name, fn_type);
9409   tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9410 			    NULL_TREE, integer_type_node);
9411   DECL_RESULT (fndecl) = retval;
9412   push_struct_function (fndecl);
9413   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9414   ASSERT_TRUE (fun != NULL);
9415   init_empty_tree_cfg_for_function (fun);
9416   ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9417   ASSERT_EQ (0, n_edges_for_fn (fun));
9418   return fndecl;
9419 }
9420 
9421 /* These tests directly create CFGs.
9422    Compare with the static fns within tree-cfg.c:
9423      - build_gimple_cfg
9424      - make_blocks: calls create_basic_block (seq, bb);
9425      - make_edges.   */
9426 
9427 /* Verify a simple cfg of the form:
9428      ENTRY -> A -> B -> C -> EXIT.  */
9429 
9430 static void
9431 test_linear_chain ()
9432 {
9433   gimple_register_cfg_hooks ();
9434 
9435   tree fndecl = push_fndecl ("cfg_test_linear_chain");
9436   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9437 
9438   /* Create some empty blocks.  */
9439   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9440   basic_block bb_b = create_empty_bb (bb_a);
9441   basic_block bb_c = create_empty_bb (bb_b);
9442 
9443   ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9444   ASSERT_EQ (0, n_edges_for_fn (fun));
9445 
9446   /* Create some edges: a simple linear chain of BBs.  */
9447   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9448   make_edge (bb_a, bb_b, 0);
9449   make_edge (bb_b, bb_c, 0);
9450   make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9451 
9452   /* Verify the edges.  */
9453   ASSERT_EQ (4, n_edges_for_fn (fun));
9454   ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9455   ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9456   ASSERT_EQ (1, bb_a->preds->length ());
9457   ASSERT_EQ (1, bb_a->succs->length ());
9458   ASSERT_EQ (1, bb_b->preds->length ());
9459   ASSERT_EQ (1, bb_b->succs->length ());
9460   ASSERT_EQ (1, bb_c->preds->length ());
9461   ASSERT_EQ (1, bb_c->succs->length ());
9462   ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9463   ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9464 
9465   /* Verify the dominance information
9466      Each BB in our simple chain should be dominated by the one before
9467      it.  */
9468   calculate_dominance_info (CDI_DOMINATORS);
9469   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9470   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9471   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9472   ASSERT_EQ (1, dom_by_b.length ());
9473   ASSERT_EQ (bb_c, dom_by_b[0]);
9474   free_dominance_info (CDI_DOMINATORS);
9475   dom_by_b.release ();
9476 
9477   /* Similarly for post-dominance: each BB in our chain is post-dominated
9478      by the one after it.  */
9479   calculate_dominance_info (CDI_POST_DOMINATORS);
9480   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9481   ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9482   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9483   ASSERT_EQ (1, postdom_by_b.length ());
9484   ASSERT_EQ (bb_a, postdom_by_b[0]);
9485   free_dominance_info (CDI_POST_DOMINATORS);
9486   postdom_by_b.release ();
9487 
9488   pop_cfun ();
9489 }
9490 
9491 /* Verify a simple CFG of the form:
9492      ENTRY
9493        |
9494        A
9495       / \
9496      /t  \f
9497     B     C
9498      \   /
9499       \ /
9500        D
9501        |
9502       EXIT.  */
9503 
9504 static void
9505 test_diamond ()
9506 {
9507   gimple_register_cfg_hooks ();
9508 
9509   tree fndecl = push_fndecl ("cfg_test_diamond");
9510   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9511 
9512   /* Create some empty blocks.  */
9513   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9514   basic_block bb_b = create_empty_bb (bb_a);
9515   basic_block bb_c = create_empty_bb (bb_a);
9516   basic_block bb_d = create_empty_bb (bb_b);
9517 
9518   ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9519   ASSERT_EQ (0, n_edges_for_fn (fun));
9520 
9521   /* Create the edges.  */
9522   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9523   make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9524   make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9525   make_edge (bb_b, bb_d, 0);
9526   make_edge (bb_c, bb_d, 0);
9527   make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9528 
9529   /* Verify the edges.  */
9530   ASSERT_EQ (6, n_edges_for_fn (fun));
9531   ASSERT_EQ (1, bb_a->preds->length ());
9532   ASSERT_EQ (2, bb_a->succs->length ());
9533   ASSERT_EQ (1, bb_b->preds->length ());
9534   ASSERT_EQ (1, bb_b->succs->length ());
9535   ASSERT_EQ (1, bb_c->preds->length ());
9536   ASSERT_EQ (1, bb_c->succs->length ());
9537   ASSERT_EQ (2, bb_d->preds->length ());
9538   ASSERT_EQ (1, bb_d->succs->length ());
9539 
9540   /* Verify the dominance information.  */
9541   calculate_dominance_info (CDI_DOMINATORS);
9542   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9543   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9544   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9545   vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9546   ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order.  */
9547   dom_by_a.release ();
9548   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9549   ASSERT_EQ (0, dom_by_b.length ());
9550   dom_by_b.release ();
9551   free_dominance_info (CDI_DOMINATORS);
9552 
9553   /* Similarly for post-dominance.  */
9554   calculate_dominance_info (CDI_POST_DOMINATORS);
9555   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9556   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9557   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9558   vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9559   ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order.  */
9560   postdom_by_d.release ();
9561   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9562   ASSERT_EQ (0, postdom_by_b.length ());
9563   postdom_by_b.release ();
9564   free_dominance_info (CDI_POST_DOMINATORS);
9565 
9566   pop_cfun ();
9567 }
9568 
9569 /* Verify that we can handle a CFG containing a "complete" aka
9570    fully-connected subgraph (where A B C D below all have edges
9571    pointing to each other node, also to themselves).
9572    e.g.:
9573      ENTRY  EXIT
9574        |    ^
9575        |   /
9576        |  /
9577        | /
9578        V/
9579        A<--->B
9580        ^^   ^^
9581        | \ / |
9582        |  X  |
9583        | / \ |
9584        VV   VV
9585        C<--->D
9586 */
9587 
9588 static void
9589 test_fully_connected ()
9590 {
9591   gimple_register_cfg_hooks ();
9592 
9593   tree fndecl = push_fndecl ("cfg_fully_connected");
9594   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9595 
9596   const int n = 4;
9597 
9598   /* Create some empty blocks.  */
9599   auto_vec <basic_block> subgraph_nodes;
9600   for (int i = 0; i < n; i++)
9601     subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9602 
9603   ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9604   ASSERT_EQ (0, n_edges_for_fn (fun));
9605 
9606   /* Create the edges.  */
9607   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9608   make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9609   for (int i = 0; i < n; i++)
9610     for (int j = 0; j < n; j++)
9611       make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9612 
9613   /* Verify the edges.  */
9614   ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9615   /* The first one is linked to ENTRY/EXIT as well as itself and
9616      everything else.  */
9617   ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9618   ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9619   /* The other ones in the subgraph are linked to everything in
9620      the subgraph (including themselves).  */
9621   for (int i = 1; i < n; i++)
9622     {
9623       ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9624       ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9625     }
9626 
9627   /* Verify the dominance information.  */
9628   calculate_dominance_info (CDI_DOMINATORS);
9629   /* The initial block in the subgraph should be dominated by ENTRY.  */
9630   ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9631 	     get_immediate_dominator (CDI_DOMINATORS,
9632 				      subgraph_nodes[0]));
9633   /* Every other block in the subgraph should be dominated by the
9634      initial block.  */
9635   for (int i = 1; i < n; i++)
9636     ASSERT_EQ (subgraph_nodes[0],
9637 	       get_immediate_dominator (CDI_DOMINATORS,
9638 					subgraph_nodes[i]));
9639   free_dominance_info (CDI_DOMINATORS);
9640 
9641   /* Similarly for post-dominance.  */
9642   calculate_dominance_info (CDI_POST_DOMINATORS);
9643   /* The initial block in the subgraph should be postdominated by EXIT.  */
9644   ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9645 	     get_immediate_dominator (CDI_POST_DOMINATORS,
9646 				      subgraph_nodes[0]));
9647   /* Every other block in the subgraph should be postdominated by the
9648      initial block, since that leads to EXIT.  */
9649   for (int i = 1; i < n; i++)
9650     ASSERT_EQ (subgraph_nodes[0],
9651 	       get_immediate_dominator (CDI_POST_DOMINATORS,
9652 					subgraph_nodes[i]));
9653   free_dominance_info (CDI_POST_DOMINATORS);
9654 
9655   pop_cfun ();
9656 }
9657 
9658 /* Run all of the selftests within this file.  */
9659 
9660 void
9661 tree_cfg_c_tests ()
9662 {
9663   test_linear_chain ();
9664   test_diamond ();
9665   test_fully_connected ();
9666 }
9667 
9668 } // namespace selftest
9669 
9670 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9671    - loop
9672    - nested loops
9673    - switch statement (a block with many out-edges)
9674    - something that jumps to itself
9675    - etc  */
9676 
9677 #endif /* CHECKING_P */
9678