xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 
64 /* This file contains functions for building the Control Flow Graph (CFG)
65    for a function tree.  */
66 
67 /* Local declarations.  */
68 
69 /* Initial capacity for the basic block array.  */
70 static const int initial_cfg_capacity = 20;
71 
72 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
73    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
74    via their CASE_CHAIN field, which we clear after we're done with the
75    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
76 
77    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
78    update the case vector in response to edge redirections.
79 
80    Right now this table is set up and torn down at key points in the
81    compilation process.  It would be nice if we could make the table
82    more persistent.  The key is getting notification of changes to
83    the CFG (particularly edge removal, creation and redirection).  */
84 
85 static hash_map<edge, tree> *edge_to_cases;
86 
87 /* If we record edge_to_cases, this bitmap will hold indexes
88    of basic blocks that end in a GIMPLE_SWITCH which we touched
89    due to edge manipulations.  */
90 
91 static bitmap touched_switch_bbs;
92 
93 /* CFG statistics.  */
94 struct cfg_stats_d
95 {
96   long num_merged_labels;
97 };
98 
99 static struct cfg_stats_d cfg_stats;
100 
101 /* Data to pass to replace_block_vars_by_duplicates_1.  */
102 struct replace_decls_d
103 {
104   hash_map<tree, tree> *vars_map;
105   tree to_context;
106 };
107 
108 /* Hash table to store last discriminator assigned for each locus.  */
109 struct locus_discrim_map
110 {
111   location_t locus;
112   int discriminator;
113 };
114 
115 /* Hashtable helpers.  */
116 
117 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
118 {
119   static inline hashval_t hash (const locus_discrim_map *);
120   static inline bool equal (const locus_discrim_map *,
121 			    const locus_discrim_map *);
122 };
123 
124 /* Trivial hash function for a location_t.  ITEM is a pointer to
125    a hash table entry that maps a location_t to a discriminator.  */
126 
127 inline hashval_t
128 locus_discrim_hasher::hash (const locus_discrim_map *item)
129 {
130   return LOCATION_LINE (item->locus);
131 }
132 
133 /* Equality function for the locus-to-discriminator map.  A and B
134    point to the two hash table entries to compare.  */
135 
136 inline bool
137 locus_discrim_hasher::equal (const locus_discrim_map *a,
138 			     const locus_discrim_map *b)
139 {
140   return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
141 }
142 
143 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
144 
145 /* Basic blocks and flowgraphs.  */
146 static void make_blocks (gimple_seq);
147 
148 /* Edges.  */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (gswitch *, basic_block);
153 static bool make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 
158 /* Various helpers.  */
159 static inline bool stmt_starts_bb_p (gimple *, gimple *);
160 static int gimple_verify_flow_info (void);
161 static void gimple_make_forwarder_block (edge);
162 static gimple *first_non_label_stmt (basic_block);
163 static bool verify_gimple_transaction (gtransaction *);
164 static bool call_can_make_abnormal_goto (gimple *);
165 
166 /* Flowgraph optimization and cleanup.  */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
173 static tree find_case_label_for_value (gswitch *, tree);
174 static void lower_phi_internal_fn ();
175 
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179   /* Initialize the basic block array.  */
180   init_flow (fn);
181   profile_status_for_fn (fn) = PROFILE_ABSENT;
182   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 			 initial_cfg_capacity);
187 
188   /* Build a mapping of labels to their associated blocks.  */
189   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 			 initial_cfg_capacity);
192 
193   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195 
196   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197     = EXIT_BLOCK_PTR_FOR_FN (fn);
198   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199     = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201 
202 void
203 init_empty_tree_cfg (void)
204 {
205   init_empty_tree_cfg_for_function (cfun);
206 }
207 
208 /*---------------------------------------------------------------------------
209 			      Create basic blocks
210 ---------------------------------------------------------------------------*/
211 
212 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
213    statements to be added to the flowgraph.  */
214 
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218   /* Register specific gimple functions.  */
219   gimple_register_cfg_hooks ();
220 
221   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222 
223   init_empty_tree_cfg ();
224 
225   make_blocks (seq);
226 
227   /* Make sure there is always at least one block, even if it's empty.  */
228   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230 
231   /* Adjust the size of the array.  */
232   if (basic_block_info_for_fn (cfun)->length ()
233       < (size_t) n_basic_blocks_for_fn (cfun))
234     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 			   n_basic_blocks_for_fn (cfun));
236 
237   /* To speed up statement iterator walks, we first purge dead labels.  */
238   cleanup_dead_labels ();
239 
240   /* Group case nodes to reduce the number of edges.
241      We do this after cleaning up dead labels because otherwise we miss
242      a lot of obvious case merging opportunities.  */
243   group_case_labels ();
244 
245   /* Create the edges of the flowgraph.  */
246   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247   make_edges ();
248   assign_discriminators ();
249   lower_phi_internal_fn ();
250   cleanup_dead_labels ();
251   delete discriminator_per_locus;
252   discriminator_per_locus = NULL;
253 }
254 
255 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
256    them and propagate the information to LOOP.  We assume that the annotations
257    come immediately before the condition in BB, if any.  */
258 
259 static void
260 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
261 {
262   gimple_stmt_iterator gsi = gsi_last_bb (bb);
263   gimple *stmt = gsi_stmt (gsi);
264 
265   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
266     return;
267 
268   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
269     {
270       stmt = gsi_stmt (gsi);
271       if (gimple_code (stmt) != GIMPLE_CALL)
272 	break;
273       if (!gimple_call_internal_p (stmt)
274 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
275 	break;
276 
277       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
278 	{
279 	case annot_expr_ivdep_kind:
280 	  loop->safelen = INT_MAX;
281 	  break;
282 	case annot_expr_no_vector_kind:
283 	  loop->dont_vectorize = true;
284 	  break;
285 	case annot_expr_vector_kind:
286 	  loop->force_vectorize = true;
287 	  cfun->has_force_vectorize_loops = true;
288 	  break;
289 	default:
290 	  gcc_unreachable ();
291 	}
292 
293       stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 				  gimple_call_arg (stmt, 0));
295       gsi_replace (&gsi, stmt, true);
296     }
297 }
298 
299 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
300    them and propagate the information to the loop.  We assume that the
301    annotations come immediately before the condition of the loop.  */
302 
303 static void
304 replace_loop_annotate (void)
305 {
306   struct loop *loop;
307   basic_block bb;
308   gimple_stmt_iterator gsi;
309   gimple *stmt;
310 
311   FOR_EACH_LOOP (loop, 0)
312     {
313       /* First look into the header.  */
314       replace_loop_annotate_in_block (loop->header, loop);
315 
316       /* Then look into the latch, if any.  */
317       if (loop->latch)
318 	replace_loop_annotate_in_block (loop->latch, loop);
319     }
320 
321   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
322   FOR_EACH_BB_FN (bb, cfun)
323     {
324       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
325 	{
326 	  stmt = gsi_stmt (gsi);
327 	  if (gimple_code (stmt) != GIMPLE_CALL)
328 	    continue;
329 	  if (!gimple_call_internal_p (stmt)
330 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
331 	    continue;
332 
333 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
334 	    {
335 	    case annot_expr_ivdep_kind:
336 	    case annot_expr_no_vector_kind:
337 	    case annot_expr_vector_kind:
338 	      break;
339 	    default:
340 	      gcc_unreachable ();
341 	    }
342 
343 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
344 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
345 				      gimple_call_arg (stmt, 0));
346 	  gsi_replace (&gsi, stmt, true);
347 	}
348     }
349 }
350 
351 /* Lower internal PHI function from GIMPLE FE.  */
352 
353 static void
354 lower_phi_internal_fn ()
355 {
356   basic_block bb, pred = NULL;
357   gimple_stmt_iterator gsi;
358   tree lhs;
359   gphi *phi_node;
360   gimple *stmt;
361 
362   /* After edge creation, handle __PHI function from GIMPLE FE.  */
363   FOR_EACH_BB_FN (bb, cfun)
364     {
365       for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
366 	{
367 	  stmt = gsi_stmt (gsi);
368 	  if (! gimple_call_internal_p (stmt, IFN_PHI))
369 	    break;
370 
371 	  lhs = gimple_call_lhs (stmt);
372 	  phi_node = create_phi_node (lhs, bb);
373 
374 	  /* Add arguments to the PHI node.  */
375 	  for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
376 	    {
377 	      tree arg = gimple_call_arg (stmt, i);
378 	      if (TREE_CODE (arg) == LABEL_DECL)
379 		pred = label_to_block (arg);
380 	      else
381 		{
382 		  edge e = find_edge (pred, bb);
383 		  add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
384 		}
385 	    }
386 
387 	  gsi_remove (&gsi, true);
388 	}
389     }
390 }
391 
392 static unsigned int
393 execute_build_cfg (void)
394 {
395   gimple_seq body = gimple_body (current_function_decl);
396 
397   build_gimple_cfg (body);
398   gimple_set_body (current_function_decl, NULL);
399   if (dump_file && (dump_flags & TDF_DETAILS))
400     {
401       fprintf (dump_file, "Scope blocks:\n");
402       dump_scope_blocks (dump_file, dump_flags);
403     }
404   cleanup_tree_cfg ();
405   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
406   replace_loop_annotate ();
407   return 0;
408 }
409 
410 namespace {
411 
412 const pass_data pass_data_build_cfg =
413 {
414   GIMPLE_PASS, /* type */
415   "cfg", /* name */
416   OPTGROUP_NONE, /* optinfo_flags */
417   TV_TREE_CFG, /* tv_id */
418   PROP_gimple_leh, /* properties_required */
419   ( PROP_cfg | PROP_loops ), /* properties_provided */
420   0, /* properties_destroyed */
421   0, /* todo_flags_start */
422   0, /* todo_flags_finish */
423 };
424 
425 class pass_build_cfg : public gimple_opt_pass
426 {
427 public:
428   pass_build_cfg (gcc::context *ctxt)
429     : gimple_opt_pass (pass_data_build_cfg, ctxt)
430   {}
431 
432   /* opt_pass methods: */
433   virtual unsigned int execute (function *) { return execute_build_cfg (); }
434 
435 }; // class pass_build_cfg
436 
437 } // anon namespace
438 
439 gimple_opt_pass *
440 make_pass_build_cfg (gcc::context *ctxt)
441 {
442   return new pass_build_cfg (ctxt);
443 }
444 
445 
446 /* Return true if T is a computed goto.  */
447 
448 bool
449 computed_goto_p (gimple *t)
450 {
451   return (gimple_code (t) == GIMPLE_GOTO
452 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
453 }
454 
455 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
456    the other edge points to a bb with just __builtin_unreachable ().
457    I.e. return true for C->M edge in:
458    <bb C>:
459    ...
460    if (something)
461      goto <bb N>;
462    else
463      goto <bb M>;
464    <bb N>:
465    __builtin_unreachable ();
466    <bb M>:  */
467 
468 bool
469 assert_unreachable_fallthru_edge_p (edge e)
470 {
471   basic_block pred_bb = e->src;
472   gimple *last = last_stmt (pred_bb);
473   if (last && gimple_code (last) == GIMPLE_COND)
474     {
475       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
476       if (other_bb == e->dest)
477 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
478       if (EDGE_COUNT (other_bb->succs) == 0)
479 	{
480 	  gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
481 	  gimple *stmt;
482 
483 	  if (gsi_end_p (gsi))
484 	    return false;
485 	  stmt = gsi_stmt (gsi);
486 	  while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
487 	    {
488 	      gsi_next (&gsi);
489 	      if (gsi_end_p (gsi))
490 		return false;
491 	      stmt = gsi_stmt (gsi);
492 	    }
493 	  return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
494 	}
495     }
496   return false;
497 }
498 
499 
500 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
501    could alter control flow except via eh. We initialize the flag at
502    CFG build time and only ever clear it later.  */
503 
504 static void
505 gimple_call_initialize_ctrl_altering (gimple *stmt)
506 {
507   int flags = gimple_call_flags (stmt);
508 
509   /* A call alters control flow if it can make an abnormal goto.  */
510   if (call_can_make_abnormal_goto (stmt)
511       /* A call also alters control flow if it does not return.  */
512       || flags & ECF_NORETURN
513       /* TM ending statements have backedges out of the transaction.
514 	 Return true so we split the basic block containing them.
515 	 Note that the TM_BUILTIN test is merely an optimization.  */
516       || ((flags & ECF_TM_BUILTIN)
517 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
518       /* BUILT_IN_RETURN call is same as return statement.  */
519       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
520       /* IFN_UNIQUE should be the last insn, to make checking for it
521 	 as cheap as possible.  */
522       || (gimple_call_internal_p (stmt)
523 	  && gimple_call_internal_unique_p (stmt)))
524     gimple_call_set_ctrl_altering (stmt, true);
525   else
526     gimple_call_set_ctrl_altering (stmt, false);
527 }
528 
529 
530 /* Insert SEQ after BB and build a flowgraph.  */
531 
532 static basic_block
533 make_blocks_1 (gimple_seq seq, basic_block bb)
534 {
535   gimple_stmt_iterator i = gsi_start (seq);
536   gimple *stmt = NULL;
537   bool start_new_block = true;
538   bool first_stmt_of_seq = true;
539 
540   while (!gsi_end_p (i))
541     {
542       gimple *prev_stmt;
543 
544       prev_stmt = stmt;
545       stmt = gsi_stmt (i);
546 
547       if (stmt && is_gimple_call (stmt))
548 	gimple_call_initialize_ctrl_altering (stmt);
549 
550       /* If the statement starts a new basic block or if we have determined
551 	 in a previous pass that we need to create a new block for STMT, do
552 	 so now.  */
553       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 	{
555 	  if (!first_stmt_of_seq)
556 	    gsi_split_seq_before (&i, &seq);
557 	  bb = create_basic_block (seq, bb);
558 	  start_new_block = false;
559 	}
560 
561       /* Now add STMT to BB and create the subgraphs for special statement
562 	 codes.  */
563       gimple_set_bb (stmt, bb);
564 
565       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
566 	 next iteration.  */
567       if (stmt_ends_bb_p (stmt))
568 	{
569 	  /* If the stmt can make abnormal goto use a new temporary
570 	     for the assignment to the LHS.  This makes sure the old value
571 	     of the LHS is available on the abnormal edge.  Otherwise
572 	     we will end up with overlapping life-ranges for abnormal
573 	     SSA names.  */
574 	  if (gimple_has_lhs (stmt)
575 	      && stmt_can_make_abnormal_goto (stmt)
576 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
577 	    {
578 	      tree lhs = gimple_get_lhs (stmt);
579 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
580 	      gimple *s = gimple_build_assign (lhs, tmp);
581 	      gimple_set_location (s, gimple_location (stmt));
582 	      gimple_set_block (s, gimple_block (stmt));
583 	      gimple_set_lhs (stmt, tmp);
584 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
585 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
586 		DECL_GIMPLE_REG_P (tmp) = 1;
587 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
588 	    }
589 	  start_new_block = true;
590 	}
591 
592       gsi_next (&i);
593       first_stmt_of_seq = false;
594     }
595   return bb;
596 }
597 
598 /* Build a flowgraph for the sequence of stmts SEQ.  */
599 
600 static void
601 make_blocks (gimple_seq seq)
602 {
603   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
604 }
605 
606 /* Create and return a new empty basic block after bb AFTER.  */
607 
608 static basic_block
609 create_bb (void *h, void *e, basic_block after)
610 {
611   basic_block bb;
612 
613   gcc_assert (!e);
614 
615   /* Create and initialize a new basic block.  Since alloc_block uses
616      GC allocation that clears memory to allocate a basic block, we do
617      not have to clear the newly allocated basic block here.  */
618   bb = alloc_block ();
619 
620   bb->index = last_basic_block_for_fn (cfun);
621   bb->flags = BB_NEW;
622   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
623 
624   /* Add the new block to the linked list of blocks.  */
625   link_block (bb, after);
626 
627   /* Grow the basic block array if needed.  */
628   if ((size_t) last_basic_block_for_fn (cfun)
629       == basic_block_info_for_fn (cfun)->length ())
630     {
631       size_t new_size =
632 	(last_basic_block_for_fn (cfun)
633 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
634       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
635     }
636 
637   /* Add the newly created block to the array.  */
638   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
639 
640   n_basic_blocks_for_fn (cfun)++;
641   last_basic_block_for_fn (cfun)++;
642 
643   return bb;
644 }
645 
646 
647 /*---------------------------------------------------------------------------
648 				 Edge creation
649 ---------------------------------------------------------------------------*/
650 
651 /* If basic block BB has an abnormal edge to a basic block
652    containing IFN_ABNORMAL_DISPATCHER internal call, return
653    that the dispatcher's basic block, otherwise return NULL.  */
654 
655 basic_block
656 get_abnormal_succ_dispatcher (basic_block bb)
657 {
658   edge e;
659   edge_iterator ei;
660 
661   FOR_EACH_EDGE (e, ei, bb->succs)
662     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
663       {
664 	gimple_stmt_iterator gsi
665 	  = gsi_start_nondebug_after_labels_bb (e->dest);
666 	gimple *g = gsi_stmt (gsi);
667 	if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
668 	  return e->dest;
669       }
670   return NULL;
671 }
672 
673 /* Helper function for make_edges.  Create a basic block with
674    with ABNORMAL_DISPATCHER internal call in it if needed, and
675    create abnormal edges from BBS to it and from it to FOR_BB
676    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
677 
678 static void
679 handle_abnormal_edges (basic_block *dispatcher_bbs,
680 		       basic_block for_bb, int *bb_to_omp_idx,
681 		       auto_vec<basic_block> *bbs, bool computed_goto)
682 {
683   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
684   unsigned int idx = 0;
685   basic_block bb;
686   bool inner = false;
687 
688   if (bb_to_omp_idx)
689     {
690       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
691       if (bb_to_omp_idx[for_bb->index] != 0)
692 	inner = true;
693     }
694 
695   /* If the dispatcher has been created already, then there are basic
696      blocks with abnormal edges to it, so just make a new edge to
697      for_bb.  */
698   if (*dispatcher == NULL)
699     {
700       /* Check if there are any basic blocks that need to have
701 	 abnormal edges to this dispatcher.  If there are none, return
702 	 early.  */
703       if (bb_to_omp_idx == NULL)
704 	{
705 	  if (bbs->is_empty ())
706 	    return;
707 	}
708       else
709 	{
710 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
711 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
712 	      break;
713 	  if (bb == NULL)
714 	    return;
715 	}
716 
717       /* Create the dispatcher bb.  */
718       *dispatcher = create_basic_block (NULL, for_bb);
719       if (computed_goto)
720 	{
721 	  /* Factor computed gotos into a common computed goto site.  Also
722 	     record the location of that site so that we can un-factor the
723 	     gotos after we have converted back to normal form.  */
724 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
725 
726 	  /* Create the destination of the factored goto.  Each original
727 	     computed goto will put its desired destination into this
728 	     variable and jump to the label we create immediately below.  */
729 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
730 
731 	  /* Build a label for the new block which will contain the
732 	     factored computed goto.  */
733 	  tree factored_label_decl
734 	    = create_artificial_label (UNKNOWN_LOCATION);
735 	  gimple *factored_computed_goto_label
736 	    = gimple_build_label (factored_label_decl);
737 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
738 
739 	  /* Build our new computed goto.  */
740 	  gimple *factored_computed_goto = gimple_build_goto (var);
741 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
742 
743 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
744 	    {
745 	      if (bb_to_omp_idx
746 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
747 		continue;
748 
749 	      gsi = gsi_last_bb (bb);
750 	      gimple *last = gsi_stmt (gsi);
751 
752 	      gcc_assert (computed_goto_p (last));
753 
754 	      /* Copy the original computed goto's destination into VAR.  */
755 	      gimple *assignment
756 		= gimple_build_assign (var, gimple_goto_dest (last));
757 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
758 
759 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
760 	      e->goto_locus = gimple_location (last);
761 	      gsi_remove (&gsi, true);
762 	    }
763 	}
764       else
765 	{
766 	  tree arg = inner ? boolean_true_node : boolean_false_node;
767 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
768 						 1, arg);
769 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
770 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
771 
772 	  /* Create predecessor edges of the dispatcher.  */
773 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
774 	    {
775 	      if (bb_to_omp_idx
776 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
777 		continue;
778 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
779 	    }
780 	}
781     }
782 
783   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
784 }
785 
786 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
787    computed goto, returns 2 when it ends with a statement that
788    might return to this function via an nonlocal goto, otherwise
789    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
790 
791 static int
792 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
793 {
794   gimple *last = last_stmt (bb);
795   bool fallthru = false;
796   int ret = 0;
797 
798   if (!last)
799     return ret;
800 
801   switch (gimple_code (last))
802     {
803     case GIMPLE_GOTO:
804       if (make_goto_expr_edges (bb))
805 	ret = 1;
806       fallthru = false;
807       break;
808     case GIMPLE_RETURN:
809       {
810 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
811 	e->goto_locus = gimple_location (last);
812 	fallthru = false;
813       }
814       break;
815     case GIMPLE_COND:
816       make_cond_expr_edges (bb);
817       fallthru = false;
818       break;
819     case GIMPLE_SWITCH:
820       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
821       fallthru = false;
822       break;
823     case GIMPLE_RESX:
824       make_eh_edges (last);
825       fallthru = false;
826       break;
827     case GIMPLE_EH_DISPATCH:
828       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
829       break;
830 
831     case GIMPLE_CALL:
832       /* If this function receives a nonlocal goto, then we need to
833 	 make edges from this call site to all the nonlocal goto
834 	 handlers.  */
835       if (stmt_can_make_abnormal_goto (last))
836 	ret = 2;
837 
838       /* If this statement has reachable exception handlers, then
839 	 create abnormal edges to them.  */
840       make_eh_edges (last);
841 
842       /* BUILTIN_RETURN is really a return statement.  */
843       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
844 	{
845 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
846 	  fallthru = false;
847 	}
848       /* Some calls are known not to return.  */
849       else
850 	fallthru = !gimple_call_noreturn_p (last);
851       break;
852 
853     case GIMPLE_ASSIGN:
854       /* A GIMPLE_ASSIGN may throw internally and thus be considered
855 	 control-altering.  */
856       if (is_ctrl_altering_stmt (last))
857 	make_eh_edges (last);
858       fallthru = true;
859       break;
860 
861     case GIMPLE_ASM:
862       make_gimple_asm_edges (bb);
863       fallthru = true;
864       break;
865 
866     CASE_GIMPLE_OMP:
867       fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
868       break;
869 
870     case GIMPLE_TRANSACTION:
871       {
872         gtransaction *txn = as_a <gtransaction *> (last);
873 	tree label1 = gimple_transaction_label_norm (txn);
874 	tree label2 = gimple_transaction_label_uninst (txn);
875 
876 	if (label1)
877 	  make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
878 	if (label2)
879 	  make_edge (bb, label_to_block (label2),
880 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
881 
882 	tree label3 = gimple_transaction_label_over (txn);
883 	if (gimple_transaction_subcode (txn)
884 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
885 	  make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
886 
887 	fallthru = false;
888       }
889       break;
890 
891     default:
892       gcc_assert (!stmt_ends_bb_p (last));
893       fallthru = true;
894       break;
895     }
896 
897   if (fallthru)
898     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
899 
900   return ret;
901 }
902 
903 /* Join all the blocks in the flowgraph.  */
904 
905 static void
906 make_edges (void)
907 {
908   basic_block bb;
909   struct omp_region *cur_region = NULL;
910   auto_vec<basic_block> ab_edge_goto;
911   auto_vec<basic_block> ab_edge_call;
912   int *bb_to_omp_idx = NULL;
913   int cur_omp_region_idx = 0;
914 
915   /* Create an edge from entry to the first block with executable
916      statements in it.  */
917   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
918 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
919 	     EDGE_FALLTHRU);
920 
921   /* Traverse the basic block array placing edges.  */
922   FOR_EACH_BB_FN (bb, cfun)
923     {
924       int mer;
925 
926       if (bb_to_omp_idx)
927 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
928 
929       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
930       if (mer == 1)
931 	ab_edge_goto.safe_push (bb);
932       else if (mer == 2)
933 	ab_edge_call.safe_push (bb);
934 
935       if (cur_region && bb_to_omp_idx == NULL)
936 	bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
937     }
938 
939   /* Computed gotos are hell to deal with, especially if there are
940      lots of them with a large number of destinations.  So we factor
941      them to a common computed goto location before we build the
942      edge list.  After we convert back to normal form, we will un-factor
943      the computed gotos since factoring introduces an unwanted jump.
944      For non-local gotos and abnormal edges from calls to calls that return
945      twice or forced labels, factor the abnormal edges too, by having all
946      abnormal edges from the calls go to a common artificial basic block
947      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
948      basic block to all forced labels and calls returning twice.
949      We do this per-OpenMP structured block, because those regions
950      are guaranteed to be single entry single exit by the standard,
951      so it is not allowed to enter or exit such regions abnormally this way,
952      thus all computed gotos, non-local gotos and setjmp/longjmp calls
953      must not transfer control across SESE region boundaries.  */
954   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
955     {
956       gimple_stmt_iterator gsi;
957       basic_block dispatcher_bb_array[2] = { NULL, NULL };
958       basic_block *dispatcher_bbs = dispatcher_bb_array;
959       int count = n_basic_blocks_for_fn (cfun);
960 
961       if (bb_to_omp_idx)
962 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
963 
964       FOR_EACH_BB_FN (bb, cfun)
965 	{
966 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
967 	    {
968 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
969 	      tree target;
970 
971 	      if (!label_stmt)
972 		break;
973 
974 	      target = gimple_label_label (label_stmt);
975 
976 	      /* Make an edge to every label block that has been marked as a
977 		 potential target for a computed goto or a non-local goto.  */
978 	      if (FORCED_LABEL (target))
979 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
980 				       &ab_edge_goto, true);
981 	      if (DECL_NONLOCAL (target))
982 		{
983 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
984 					 &ab_edge_call, false);
985 		  break;
986 		}
987 	    }
988 
989 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
990 	    gsi_next_nondebug (&gsi);
991 	  if (!gsi_end_p (gsi))
992 	    {
993 	      /* Make an edge to every setjmp-like call.  */
994 	      gimple *call_stmt = gsi_stmt (gsi);
995 	      if (is_gimple_call (call_stmt)
996 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
997 		      || gimple_call_builtin_p (call_stmt,
998 						BUILT_IN_SETJMP_RECEIVER)))
999 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1000 				       &ab_edge_call, false);
1001 	    }
1002 	}
1003 
1004       if (bb_to_omp_idx)
1005 	XDELETE (dispatcher_bbs);
1006     }
1007 
1008   XDELETE (bb_to_omp_idx);
1009 
1010   omp_free_regions ();
1011 }
1012 
1013 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
1014    needed.  Returns true if new bbs were created.
1015    Note: This is transitional code, and should not be used for new code.  We
1016    should be able to get rid of this by rewriting all target va-arg
1017    gimplification hooks to use an interface gimple_build_cond_value as described
1018    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
1019 
1020 bool
1021 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1022 {
1023   gimple *stmt = gsi_stmt (*gsi);
1024   basic_block bb = gimple_bb (stmt);
1025   basic_block lastbb, afterbb;
1026   int old_num_bbs = n_basic_blocks_for_fn (cfun);
1027   edge e;
1028   lastbb = make_blocks_1 (seq, bb);
1029   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1030     return false;
1031   e = split_block (bb, stmt);
1032   /* Move e->dest to come after the new basic blocks.  */
1033   afterbb = e->dest;
1034   unlink_block (afterbb);
1035   link_block (afterbb, lastbb);
1036   redirect_edge_succ (e, bb->next_bb);
1037   bb = bb->next_bb;
1038   while (bb != afterbb)
1039     {
1040       struct omp_region *cur_region = NULL;
1041       int cur_omp_region_idx = 0;
1042       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1043       gcc_assert (!mer && !cur_region);
1044       add_bb_to_loop (bb, afterbb->loop_father);
1045       bb = bb->next_bb;
1046     }
1047   return true;
1048 }
1049 
1050 /* Find the next available discriminator value for LOCUS.  The
1051    discriminator distinguishes among several basic blocks that
1052    share a common locus, allowing for more accurate sample-based
1053    profiling.  */
1054 
1055 static int
1056 next_discriminator_for_locus (location_t locus)
1057 {
1058   struct locus_discrim_map item;
1059   struct locus_discrim_map **slot;
1060 
1061   item.locus = locus;
1062   item.discriminator = 0;
1063   slot = discriminator_per_locus->find_slot_with_hash (
1064       &item, LOCATION_LINE (locus), INSERT);
1065   gcc_assert (slot);
1066   if (*slot == HTAB_EMPTY_ENTRY)
1067     {
1068       *slot = XNEW (struct locus_discrim_map);
1069       gcc_assert (*slot);
1070       (*slot)->locus = locus;
1071       (*slot)->discriminator = 0;
1072     }
1073   (*slot)->discriminator++;
1074   return (*slot)->discriminator;
1075 }
1076 
1077 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1078 
1079 static bool
1080 same_line_p (location_t locus1, location_t locus2)
1081 {
1082   expanded_location from, to;
1083 
1084   if (locus1 == locus2)
1085     return true;
1086 
1087   from = expand_location (locus1);
1088   to = expand_location (locus2);
1089 
1090   if (from.line != to.line)
1091     return false;
1092   if (from.file == to.file)
1093     return true;
1094   return (from.file != NULL
1095           && to.file != NULL
1096           && filename_cmp (from.file, to.file) == 0);
1097 }
1098 
1099 /* Assign discriminators to each basic block.  */
1100 
1101 static void
1102 assign_discriminators (void)
1103 {
1104   basic_block bb;
1105 
1106   FOR_EACH_BB_FN (bb, cfun)
1107     {
1108       edge e;
1109       edge_iterator ei;
1110       gimple *last = last_stmt (bb);
1111       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1112 
1113       if (locus == UNKNOWN_LOCATION)
1114 	continue;
1115 
1116       FOR_EACH_EDGE (e, ei, bb->succs)
1117 	{
1118 	  gimple *first = first_non_label_stmt (e->dest);
1119 	  gimple *last = last_stmt (e->dest);
1120 	  if ((first && same_line_p (locus, gimple_location (first)))
1121 	      || (last && same_line_p (locus, gimple_location (last))))
1122 	    {
1123 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1124 		bb->discriminator = next_discriminator_for_locus (locus);
1125 	      else
1126 		e->dest->discriminator = next_discriminator_for_locus (locus);
1127 	    }
1128 	}
1129     }
1130 }
1131 
1132 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1133 
1134 static void
1135 make_cond_expr_edges (basic_block bb)
1136 {
1137   gcond *entry = as_a <gcond *> (last_stmt (bb));
1138   gimple *then_stmt, *else_stmt;
1139   basic_block then_bb, else_bb;
1140   tree then_label, else_label;
1141   edge e;
1142 
1143   gcc_assert (entry);
1144   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1145 
1146   /* Entry basic blocks for each component.  */
1147   then_label = gimple_cond_true_label (entry);
1148   else_label = gimple_cond_false_label (entry);
1149   then_bb = label_to_block (then_label);
1150   else_bb = label_to_block (else_label);
1151   then_stmt = first_stmt (then_bb);
1152   else_stmt = first_stmt (else_bb);
1153 
1154   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1155   e->goto_locus = gimple_location (then_stmt);
1156   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1157   if (e)
1158     e->goto_locus = gimple_location (else_stmt);
1159 
1160   /* We do not need the labels anymore.  */
1161   gimple_cond_set_true_label (entry, NULL_TREE);
1162   gimple_cond_set_false_label (entry, NULL_TREE);
1163 }
1164 
1165 
1166 /* Called for each element in the hash table (P) as we delete the
1167    edge to cases hash table.
1168 
1169    Clear all the CASE_CHAINs to prevent problems with copying of
1170    SWITCH_EXPRs and structure sharing rules, then free the hash table
1171    element.  */
1172 
1173 bool
1174 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1175 {
1176   tree t, next;
1177 
1178   for (t = value; t; t = next)
1179     {
1180       next = CASE_CHAIN (t);
1181       CASE_CHAIN (t) = NULL;
1182     }
1183 
1184   return true;
1185 }
1186 
1187 /* Start recording information mapping edges to case labels.  */
1188 
1189 void
1190 start_recording_case_labels (void)
1191 {
1192   gcc_assert (edge_to_cases == NULL);
1193   edge_to_cases = new hash_map<edge, tree>;
1194   touched_switch_bbs = BITMAP_ALLOC (NULL);
1195 }
1196 
1197 /* Return nonzero if we are recording information for case labels.  */
1198 
1199 static bool
1200 recording_case_labels_p (void)
1201 {
1202   return (edge_to_cases != NULL);
1203 }
1204 
1205 /* Stop recording information mapping edges to case labels and
1206    remove any information we have recorded.  */
1207 void
1208 end_recording_case_labels (void)
1209 {
1210   bitmap_iterator bi;
1211   unsigned i;
1212   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1213   delete edge_to_cases;
1214   edge_to_cases = NULL;
1215   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1216     {
1217       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1218       if (bb)
1219 	{
1220 	  gimple *stmt = last_stmt (bb);
1221 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1222 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1223 	}
1224     }
1225   BITMAP_FREE (touched_switch_bbs);
1226 }
1227 
1228 /* If we are inside a {start,end}_recording_cases block, then return
1229    a chain of CASE_LABEL_EXPRs from T which reference E.
1230 
1231    Otherwise return NULL.  */
1232 
1233 static tree
1234 get_cases_for_edge (edge e, gswitch *t)
1235 {
1236   tree *slot;
1237   size_t i, n;
1238 
1239   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1240      chains available.  Return NULL so the caller can detect this case.  */
1241   if (!recording_case_labels_p ())
1242     return NULL;
1243 
1244   slot = edge_to_cases->get (e);
1245   if (slot)
1246     return *slot;
1247 
1248   /* If we did not find E in the hash table, then this must be the first
1249      time we have been queried for information about E & T.  Add all the
1250      elements from T to the hash table then perform the query again.  */
1251 
1252   n = gimple_switch_num_labels (t);
1253   for (i = 0; i < n; i++)
1254     {
1255       tree elt = gimple_switch_label (t, i);
1256       tree lab = CASE_LABEL (elt);
1257       basic_block label_bb = label_to_block (lab);
1258       edge this_edge = find_edge (e->src, label_bb);
1259 
1260       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1261 	 a new chain.  */
1262       tree &s = edge_to_cases->get_or_insert (this_edge);
1263       CASE_CHAIN (elt) = s;
1264       s = elt;
1265     }
1266 
1267   return *edge_to_cases->get (e);
1268 }
1269 
1270 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1271 
1272 static void
1273 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1274 {
1275   size_t i, n;
1276 
1277   n = gimple_switch_num_labels (entry);
1278 
1279   for (i = 0; i < n; ++i)
1280     {
1281       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1282       basic_block label_bb = label_to_block (lab);
1283       make_edge (bb, label_bb, 0);
1284     }
1285 }
1286 
1287 
1288 /* Return the basic block holding label DEST.  */
1289 
1290 basic_block
1291 label_to_block_fn (struct function *ifun, tree dest)
1292 {
1293   int uid = LABEL_DECL_UID (dest);
1294 
1295   /* We would die hard when faced by an undefined label.  Emit a label to
1296      the very first basic block.  This will hopefully make even the dataflow
1297      and undefined variable warnings quite right.  */
1298   if (seen_error () && uid < 0)
1299     {
1300       gimple_stmt_iterator gsi =
1301 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1302       gimple *stmt;
1303 
1304       stmt = gimple_build_label (dest);
1305       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1306       uid = LABEL_DECL_UID (dest);
1307     }
1308   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1309     return NULL;
1310   return (*ifun->cfg->x_label_to_block_map)[uid];
1311 }
1312 
1313 /* Create edges for a goto statement at block BB.  Returns true
1314    if abnormal edges should be created.  */
1315 
1316 static bool
1317 make_goto_expr_edges (basic_block bb)
1318 {
1319   gimple_stmt_iterator last = gsi_last_bb (bb);
1320   gimple *goto_t = gsi_stmt (last);
1321 
1322   /* A simple GOTO creates normal edges.  */
1323   if (simple_goto_p (goto_t))
1324     {
1325       tree dest = gimple_goto_dest (goto_t);
1326       basic_block label_bb = label_to_block (dest);
1327       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1328       e->goto_locus = gimple_location (goto_t);
1329       gsi_remove (&last, true);
1330       return false;
1331     }
1332 
1333   /* A computed GOTO creates abnormal edges.  */
1334   return true;
1335 }
1336 
1337 /* Create edges for an asm statement with labels at block BB.  */
1338 
1339 static void
1340 make_gimple_asm_edges (basic_block bb)
1341 {
1342   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1343   int i, n = gimple_asm_nlabels (stmt);
1344 
1345   for (i = 0; i < n; ++i)
1346     {
1347       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1348       basic_block label_bb = label_to_block (label);
1349       make_edge (bb, label_bb, 0);
1350     }
1351 }
1352 
1353 /*---------------------------------------------------------------------------
1354 			       Flowgraph analysis
1355 ---------------------------------------------------------------------------*/
1356 
1357 /* Cleanup useless labels in basic blocks.  This is something we wish
1358    to do early because it allows us to group case labels before creating
1359    the edges for the CFG, and it speeds up block statement iterators in
1360    all passes later on.
1361    We rerun this pass after CFG is created, to get rid of the labels that
1362    are no longer referenced.  After then we do not run it any more, since
1363    (almost) no new labels should be created.  */
1364 
1365 /* A map from basic block index to the leading label of that block.  */
1366 static struct label_record
1367 {
1368   /* The label.  */
1369   tree label;
1370 
1371   /* True if the label is referenced from somewhere.  */
1372   bool used;
1373 } *label_for_bb;
1374 
1375 /* Given LABEL return the first label in the same basic block.  */
1376 
1377 static tree
1378 main_block_label (tree label)
1379 {
1380   basic_block bb = label_to_block (label);
1381   tree main_label = label_for_bb[bb->index].label;
1382 
1383   /* label_to_block possibly inserted undefined label into the chain.  */
1384   if (!main_label)
1385     {
1386       label_for_bb[bb->index].label = label;
1387       main_label = label;
1388     }
1389 
1390   label_for_bb[bb->index].used = true;
1391   return main_label;
1392 }
1393 
1394 /* Clean up redundant labels within the exception tree.  */
1395 
1396 static void
1397 cleanup_dead_labels_eh (void)
1398 {
1399   eh_landing_pad lp;
1400   eh_region r;
1401   tree lab;
1402   int i;
1403 
1404   if (cfun->eh == NULL)
1405     return;
1406 
1407   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1408     if (lp && lp->post_landing_pad)
1409       {
1410 	lab = main_block_label (lp->post_landing_pad);
1411 	if (lab != lp->post_landing_pad)
1412 	  {
1413 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1414 	    EH_LANDING_PAD_NR (lab) = lp->index;
1415 	  }
1416       }
1417 
1418   FOR_ALL_EH_REGION (r)
1419     switch (r->type)
1420       {
1421       case ERT_CLEANUP:
1422       case ERT_MUST_NOT_THROW:
1423 	break;
1424 
1425       case ERT_TRY:
1426 	{
1427 	  eh_catch c;
1428 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1429 	    {
1430 	      lab = c->label;
1431 	      if (lab)
1432 		c->label = main_block_label (lab);
1433 	    }
1434 	}
1435 	break;
1436 
1437       case ERT_ALLOWED_EXCEPTIONS:
1438 	lab = r->u.allowed.label;
1439 	if (lab)
1440 	  r->u.allowed.label = main_block_label (lab);
1441 	break;
1442       }
1443 }
1444 
1445 
1446 /* Cleanup redundant labels.  This is a three-step process:
1447      1) Find the leading label for each block.
1448      2) Redirect all references to labels to the leading labels.
1449      3) Cleanup all useless labels.  */
1450 
1451 void
1452 cleanup_dead_labels (void)
1453 {
1454   basic_block bb;
1455   label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1456 
1457   /* Find a suitable label for each block.  We use the first user-defined
1458      label if there is one, or otherwise just the first label we see.  */
1459   FOR_EACH_BB_FN (bb, cfun)
1460     {
1461       gimple_stmt_iterator i;
1462 
1463       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1464 	{
1465 	  tree label;
1466 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1467 
1468 	  if (!label_stmt)
1469 	    break;
1470 
1471 	  label = gimple_label_label (label_stmt);
1472 
1473 	  /* If we have not yet seen a label for the current block,
1474 	     remember this one and see if there are more labels.  */
1475 	  if (!label_for_bb[bb->index].label)
1476 	    {
1477 	      label_for_bb[bb->index].label = label;
1478 	      continue;
1479 	    }
1480 
1481 	  /* If we did see a label for the current block already, but it
1482 	     is an artificially created label, replace it if the current
1483 	     label is a user defined label.  */
1484 	  if (!DECL_ARTIFICIAL (label)
1485 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1486 	    {
1487 	      label_for_bb[bb->index].label = label;
1488 	      break;
1489 	    }
1490 	}
1491     }
1492 
1493   /* Now redirect all jumps/branches to the selected label.
1494      First do so for each block ending in a control statement.  */
1495   FOR_EACH_BB_FN (bb, cfun)
1496     {
1497       gimple *stmt = last_stmt (bb);
1498       tree label, new_label;
1499 
1500       if (!stmt)
1501 	continue;
1502 
1503       switch (gimple_code (stmt))
1504 	{
1505 	case GIMPLE_COND:
1506 	  {
1507 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1508 	    label = gimple_cond_true_label (cond_stmt);
1509 	    if (label)
1510 	      {
1511 		new_label = main_block_label (label);
1512 		if (new_label != label)
1513 		  gimple_cond_set_true_label (cond_stmt, new_label);
1514 	      }
1515 
1516 	    label = gimple_cond_false_label (cond_stmt);
1517 	    if (label)
1518 	      {
1519 		new_label = main_block_label (label);
1520 		if (new_label != label)
1521 		  gimple_cond_set_false_label (cond_stmt, new_label);
1522 	      }
1523 	  }
1524 	  break;
1525 
1526 	case GIMPLE_SWITCH:
1527 	  {
1528 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1529 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1530 
1531 	    /* Replace all destination labels.  */
1532 	    for (i = 0; i < n; ++i)
1533 	      {
1534 		tree case_label = gimple_switch_label (switch_stmt, i);
1535 		label = CASE_LABEL (case_label);
1536 		new_label = main_block_label (label);
1537 		if (new_label != label)
1538 		  CASE_LABEL (case_label) = new_label;
1539 	      }
1540 	    break;
1541 	  }
1542 
1543 	case GIMPLE_ASM:
1544 	  {
1545 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1546 	    int i, n = gimple_asm_nlabels (asm_stmt);
1547 
1548 	    for (i = 0; i < n; ++i)
1549 	      {
1550 		tree cons = gimple_asm_label_op (asm_stmt, i);
1551 		tree label = main_block_label (TREE_VALUE (cons));
1552 		TREE_VALUE (cons) = label;
1553 	      }
1554 	    break;
1555 	  }
1556 
1557 	/* We have to handle gotos until they're removed, and we don't
1558 	   remove them until after we've created the CFG edges.  */
1559 	case GIMPLE_GOTO:
1560 	  if (!computed_goto_p (stmt))
1561 	    {
1562 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1563 	      label = gimple_goto_dest (goto_stmt);
1564 	      new_label = main_block_label (label);
1565 	      if (new_label != label)
1566 		gimple_goto_set_dest (goto_stmt, new_label);
1567 	    }
1568 	  break;
1569 
1570 	case GIMPLE_TRANSACTION:
1571 	  {
1572 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1573 
1574 	    label = gimple_transaction_label_norm (txn);
1575 	    if (label)
1576 	      {
1577 		new_label = main_block_label (label);
1578 		if (new_label != label)
1579 		  gimple_transaction_set_label_norm (txn, new_label);
1580 	      }
1581 
1582 	    label = gimple_transaction_label_uninst (txn);
1583 	    if (label)
1584 	      {
1585 		new_label = main_block_label (label);
1586 		if (new_label != label)
1587 		  gimple_transaction_set_label_uninst (txn, new_label);
1588 	      }
1589 
1590 	    label = gimple_transaction_label_over (txn);
1591 	    if (label)
1592 	      {
1593 		new_label = main_block_label (label);
1594 		if (new_label != label)
1595 		  gimple_transaction_set_label_over (txn, new_label);
1596 	      }
1597 	  }
1598 	  break;
1599 
1600 	default:
1601 	  break;
1602       }
1603     }
1604 
1605   /* Do the same for the exception region tree labels.  */
1606   cleanup_dead_labels_eh ();
1607 
1608   /* Finally, purge dead labels.  All user-defined labels and labels that
1609      can be the target of non-local gotos and labels which have their
1610      address taken are preserved.  */
1611   FOR_EACH_BB_FN (bb, cfun)
1612     {
1613       gimple_stmt_iterator i;
1614       tree label_for_this_bb = label_for_bb[bb->index].label;
1615 
1616       if (!label_for_this_bb)
1617 	continue;
1618 
1619       /* If the main label of the block is unused, we may still remove it.  */
1620       if (!label_for_bb[bb->index].used)
1621 	label_for_this_bb = NULL;
1622 
1623       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1624 	{
1625 	  tree label;
1626 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1627 
1628 	  if (!label_stmt)
1629 	    break;
1630 
1631 	  label = gimple_label_label (label_stmt);
1632 
1633 	  if (label == label_for_this_bb
1634 	      || !DECL_ARTIFICIAL (label)
1635 	      || DECL_NONLOCAL (label)
1636 	      || FORCED_LABEL (label))
1637 	    gsi_next (&i);
1638 	  else
1639 	    gsi_remove (&i, true);
1640 	}
1641     }
1642 
1643   free (label_for_bb);
1644 }
1645 
1646 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1647    the ones jumping to the same label.
1648    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1649 
1650 void
1651 group_case_labels_stmt (gswitch *stmt)
1652 {
1653   int old_size = gimple_switch_num_labels (stmt);
1654   int i, j, new_size = old_size;
1655   basic_block default_bb = NULL;
1656 
1657   default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1658 
1659   /* Look for possible opportunities to merge cases.  */
1660   i = 1;
1661   while (i < old_size)
1662     {
1663       tree base_case, base_high;
1664       basic_block base_bb;
1665 
1666       base_case = gimple_switch_label (stmt, i);
1667 
1668       gcc_assert (base_case);
1669       base_bb = label_to_block (CASE_LABEL (base_case));
1670 
1671       /* Discard cases that have the same destination as the
1672 	 default case.  */
1673       if (base_bb == default_bb)
1674 	{
1675 	  gimple_switch_set_label (stmt, i, NULL_TREE);
1676 	  i++;
1677 	  new_size--;
1678 	  continue;
1679 	}
1680 
1681       base_high = CASE_HIGH (base_case)
1682 	  ? CASE_HIGH (base_case)
1683 	  : CASE_LOW (base_case);
1684       i++;
1685 
1686       /* Try to merge case labels.  Break out when we reach the end
1687 	 of the label vector or when we cannot merge the next case
1688 	 label with the current one.  */
1689       while (i < old_size)
1690 	{
1691 	  tree merge_case = gimple_switch_label (stmt, i);
1692 	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1693 	  wide_int bhp1 = wi::add (base_high, 1);
1694 
1695 	  /* Merge the cases if they jump to the same place,
1696 	     and their ranges are consecutive.  */
1697 	  if (merge_bb == base_bb
1698 	      && wi::eq_p (CASE_LOW (merge_case), bhp1))
1699 	    {
1700 	      base_high = CASE_HIGH (merge_case) ?
1701 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1702 	      CASE_HIGH (base_case) = base_high;
1703 	      gimple_switch_set_label (stmt, i, NULL_TREE);
1704 	      new_size--;
1705 	      i++;
1706 	    }
1707 	  else
1708 	    break;
1709 	}
1710     }
1711 
1712   /* Compress the case labels in the label vector, and adjust the
1713      length of the vector.  */
1714   for (i = 0, j = 0; i < new_size; i++)
1715     {
1716       while (! gimple_switch_label (stmt, j))
1717 	j++;
1718       gimple_switch_set_label (stmt, i,
1719 			       gimple_switch_label (stmt, j++));
1720     }
1721 
1722   gcc_assert (new_size <= old_size);
1723   gimple_switch_set_num_labels (stmt, new_size);
1724 }
1725 
1726 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1727    and scan the sorted vector of cases.  Combine the ones jumping to the
1728    same label.  */
1729 
1730 void
1731 group_case_labels (void)
1732 {
1733   basic_block bb;
1734 
1735   FOR_EACH_BB_FN (bb, cfun)
1736     {
1737       gimple *stmt = last_stmt (bb);
1738       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1739 	group_case_labels_stmt (as_a <gswitch *> (stmt));
1740     }
1741 }
1742 
1743 /* Checks whether we can merge block B into block A.  */
1744 
1745 static bool
1746 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1747 {
1748   gimple *stmt;
1749 
1750   if (!single_succ_p (a))
1751     return false;
1752 
1753   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1754     return false;
1755 
1756   if (single_succ (a) != b)
1757     return false;
1758 
1759   if (!single_pred_p (b))
1760     return false;
1761 
1762   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1763       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1764     return false;
1765 
1766   /* If A ends by a statement causing exceptions or something similar, we
1767      cannot merge the blocks.  */
1768   stmt = last_stmt (a);
1769   if (stmt && stmt_ends_bb_p (stmt))
1770     return false;
1771 
1772   /* Do not allow a block with only a non-local label to be merged.  */
1773   if (stmt)
1774     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1775       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1776 	return false;
1777 
1778   /* Examine the labels at the beginning of B.  */
1779   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1780        gsi_next (&gsi))
1781     {
1782       tree lab;
1783       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1784       if (!label_stmt)
1785 	break;
1786       lab = gimple_label_label (label_stmt);
1787 
1788       /* Do not remove user forced labels or for -O0 any user labels.  */
1789       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1790 	return false;
1791     }
1792 
1793   /* Protect simple loop latches.  We only want to avoid merging
1794      the latch with the loop header or with a block in another
1795      loop in this case.  */
1796   if (current_loops
1797       && b->loop_father->latch == b
1798       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1799       && (b->loop_father->header == a
1800 	  || b->loop_father != a->loop_father))
1801     return false;
1802 
1803   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1804      is not up-to-date and a name-mapping is registered, we cannot eliminate
1805      any phis.  Symbols marked for renaming are never a problem though.  */
1806   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1807        gsi_next (&gsi))
1808     {
1809       gphi *phi = gsi.phi ();
1810       /* Technically only new names matter.  */
1811       if (name_registered_for_update_p (PHI_RESULT (phi)))
1812 	return false;
1813     }
1814 
1815   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1816   if (!optimize
1817       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1818     {
1819       location_t goto_locus = single_succ_edge (a)->goto_locus;
1820       gimple_stmt_iterator prev, next;
1821       prev = gsi_last_nondebug_bb (a);
1822       next = gsi_after_labels (b);
1823       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1824 	gsi_next_nondebug (&next);
1825       if ((gsi_end_p (prev)
1826 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1827 	  && (gsi_end_p (next)
1828 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1829 	return false;
1830     }
1831 
1832   return true;
1833 }
1834 
1835 /* Replaces all uses of NAME by VAL.  */
1836 
1837 void
1838 replace_uses_by (tree name, tree val)
1839 {
1840   imm_use_iterator imm_iter;
1841   use_operand_p use;
1842   gimple *stmt;
1843   edge e;
1844 
1845   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1846     {
1847       /* Mark the block if we change the last stmt in it.  */
1848       if (cfgcleanup_altered_bbs
1849 	  && stmt_ends_bb_p (stmt))
1850 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1851 
1852       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1853         {
1854 	  replace_exp (use, val);
1855 
1856 	  if (gimple_code (stmt) == GIMPLE_PHI)
1857 	    {
1858 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1859 				       PHI_ARG_INDEX_FROM_USE (use));
1860 	      if (e->flags & EDGE_ABNORMAL
1861 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1862 		{
1863 		  /* This can only occur for virtual operands, since
1864 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1865 		     would prevent replacement.  */
1866 		  gcc_checking_assert (virtual_operand_p (name));
1867 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1868 		}
1869 	    }
1870 	}
1871 
1872       if (gimple_code (stmt) != GIMPLE_PHI)
1873 	{
1874 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1875 	  gimple *orig_stmt = stmt;
1876 	  size_t i;
1877 
1878 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1879 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1880 	     only change sth from non-invariant to invariant, and only
1881 	     when propagating constants.  */
1882 	  if (is_gimple_min_invariant (val))
1883 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1884 	      {
1885 		tree op = gimple_op (stmt, i);
1886 		/* Operands may be empty here.  For example, the labels
1887 		   of a GIMPLE_COND are nulled out following the creation
1888 		   of the corresponding CFG edges.  */
1889 		if (op && TREE_CODE (op) == ADDR_EXPR)
1890 		  recompute_tree_invariant_for_addr_expr (op);
1891 	      }
1892 
1893 	  if (fold_stmt (&gsi))
1894 	    stmt = gsi_stmt (gsi);
1895 
1896 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1897 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1898 
1899 	  update_stmt (stmt);
1900 	}
1901     }
1902 
1903   gcc_checking_assert (has_zero_uses (name));
1904 
1905   /* Also update the trees stored in loop structures.  */
1906   if (current_loops)
1907     {
1908       struct loop *loop;
1909 
1910       FOR_EACH_LOOP (loop, 0)
1911 	{
1912 	  substitute_in_loop_info (loop, name, val);
1913 	}
1914     }
1915 }
1916 
1917 /* Merge block B into block A.  */
1918 
1919 static void
1920 gimple_merge_blocks (basic_block a, basic_block b)
1921 {
1922   gimple_stmt_iterator last, gsi;
1923   gphi_iterator psi;
1924 
1925   if (dump_file)
1926     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1927 
1928   /* Remove all single-valued PHI nodes from block B of the form
1929      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1930   gsi = gsi_last_bb (a);
1931   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1932     {
1933       gimple *phi = gsi_stmt (psi);
1934       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1935       gimple *copy;
1936       bool may_replace_uses = (virtual_operand_p (def)
1937 			       || may_propagate_copy (def, use));
1938 
1939       /* In case we maintain loop closed ssa form, do not propagate arguments
1940 	 of loop exit phi nodes.  */
1941       if (current_loops
1942 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1943 	  && !virtual_operand_p (def)
1944 	  && TREE_CODE (use) == SSA_NAME
1945 	  && a->loop_father != b->loop_father)
1946 	may_replace_uses = false;
1947 
1948       if (!may_replace_uses)
1949 	{
1950 	  gcc_assert (!virtual_operand_p (def));
1951 
1952 	  /* Note that just emitting the copies is fine -- there is no problem
1953 	     with ordering of phi nodes.  This is because A is the single
1954 	     predecessor of B, therefore results of the phi nodes cannot
1955 	     appear as arguments of the phi nodes.  */
1956 	  copy = gimple_build_assign (def, use);
1957 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1958           remove_phi_node (&psi, false);
1959 	}
1960       else
1961         {
1962 	  /* If we deal with a PHI for virtual operands, we can simply
1963 	     propagate these without fussing with folding or updating
1964 	     the stmt.  */
1965 	  if (virtual_operand_p (def))
1966 	    {
1967 	      imm_use_iterator iter;
1968 	      use_operand_p use_p;
1969 	      gimple *stmt;
1970 
1971 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1972 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1973 		  SET_USE (use_p, use);
1974 
1975 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1976 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1977 	    }
1978 	  else
1979             replace_uses_by (def, use);
1980 
1981           remove_phi_node (&psi, true);
1982         }
1983     }
1984 
1985   /* Ensure that B follows A.  */
1986   move_block_after (b, a);
1987 
1988   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1989   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1990 
1991   /* Remove labels from B and set gimple_bb to A for other statements.  */
1992   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1993     {
1994       gimple *stmt = gsi_stmt (gsi);
1995       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1996 	{
1997 	  tree label = gimple_label_label (label_stmt);
1998 	  int lp_nr;
1999 
2000 	  gsi_remove (&gsi, false);
2001 
2002 	  /* Now that we can thread computed gotos, we might have
2003 	     a situation where we have a forced label in block B
2004 	     However, the label at the start of block B might still be
2005 	     used in other ways (think about the runtime checking for
2006 	     Fortran assigned gotos).  So we can not just delete the
2007 	     label.  Instead we move the label to the start of block A.  */
2008 	  if (FORCED_LABEL (label))
2009 	    {
2010 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2011 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2012 	    }
2013 	  /* Other user labels keep around in a form of a debug stmt.  */
2014 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2015 	    {
2016 	      gimple *dbg = gimple_build_debug_bind (label,
2017 						     integer_zero_node,
2018 						     stmt);
2019 	      gimple_debug_bind_reset_value (dbg);
2020 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2021 	    }
2022 
2023 	  lp_nr = EH_LANDING_PAD_NR (label);
2024 	  if (lp_nr)
2025 	    {
2026 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2027 	      lp->post_landing_pad = NULL;
2028 	    }
2029 	}
2030       else
2031 	{
2032 	  gimple_set_bb (stmt, a);
2033 	  gsi_next (&gsi);
2034 	}
2035     }
2036 
2037   /* When merging two BBs, if their counts are different, the larger count
2038      is selected as the new bb count. This is to handle inconsistent
2039      profiles.  */
2040   if (a->loop_father == b->loop_father)
2041     {
2042       a->count = MAX (a->count, b->count);
2043       a->frequency = MAX (a->frequency, b->frequency);
2044     }
2045 
2046   /* Merge the sequences.  */
2047   last = gsi_last_bb (a);
2048   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2049   set_bb_seq (b, NULL);
2050 
2051   if (cfgcleanup_altered_bbs)
2052     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2053 }
2054 
2055 
2056 /* Return the one of two successors of BB that is not reachable by a
2057    complex edge, if there is one.  Else, return BB.  We use
2058    this in optimizations that use post-dominators for their heuristics,
2059    to catch the cases in C++ where function calls are involved.  */
2060 
2061 basic_block
2062 single_noncomplex_succ (basic_block bb)
2063 {
2064   edge e0, e1;
2065   if (EDGE_COUNT (bb->succs) != 2)
2066     return bb;
2067 
2068   e0 = EDGE_SUCC (bb, 0);
2069   e1 = EDGE_SUCC (bb, 1);
2070   if (e0->flags & EDGE_COMPLEX)
2071     return e1->dest;
2072   if (e1->flags & EDGE_COMPLEX)
2073     return e0->dest;
2074 
2075   return bb;
2076 }
2077 
2078 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2079 
2080 void
2081 notice_special_calls (gcall *call)
2082 {
2083   int flags = gimple_call_flags (call);
2084 
2085   if (flags & ECF_MAY_BE_ALLOCA)
2086     cfun->calls_alloca = true;
2087   if (flags & ECF_RETURNS_TWICE)
2088     cfun->calls_setjmp = true;
2089 }
2090 
2091 
2092 /* Clear flags set by notice_special_calls.  Used by dead code removal
2093    to update the flags.  */
2094 
2095 void
2096 clear_special_calls (void)
2097 {
2098   cfun->calls_alloca = false;
2099   cfun->calls_setjmp = false;
2100 }
2101 
2102 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2103 
2104 static void
2105 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2106 {
2107   /* Since this block is no longer reachable, we can just delete all
2108      of its PHI nodes.  */
2109   remove_phi_nodes (bb);
2110 
2111   /* Remove edges to BB's successors.  */
2112   while (EDGE_COUNT (bb->succs) > 0)
2113     remove_edge (EDGE_SUCC (bb, 0));
2114 }
2115 
2116 
2117 /* Remove statements of basic block BB.  */
2118 
2119 static void
2120 remove_bb (basic_block bb)
2121 {
2122   gimple_stmt_iterator i;
2123 
2124   if (dump_file)
2125     {
2126       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2127       if (dump_flags & TDF_DETAILS)
2128 	{
2129 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2130 	  fprintf (dump_file, "\n");
2131 	}
2132     }
2133 
2134   if (current_loops)
2135     {
2136       struct loop *loop = bb->loop_father;
2137 
2138       /* If a loop gets removed, clean up the information associated
2139 	 with it.  */
2140       if (loop->latch == bb
2141 	  || loop->header == bb)
2142 	free_numbers_of_iterations_estimates_loop (loop);
2143     }
2144 
2145   /* Remove all the instructions in the block.  */
2146   if (bb_seq (bb) != NULL)
2147     {
2148       /* Walk backwards so as to get a chance to substitute all
2149 	 released DEFs into debug stmts.  See
2150 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2151 	 details.  */
2152       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2153 	{
2154 	  gimple *stmt = gsi_stmt (i);
2155 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2156 	  if (label_stmt
2157 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2158 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2159 	    {
2160 	      basic_block new_bb;
2161 	      gimple_stmt_iterator new_gsi;
2162 
2163 	      /* A non-reachable non-local label may still be referenced.
2164 		 But it no longer needs to carry the extra semantics of
2165 		 non-locality.  */
2166 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2167 		{
2168 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2169 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2170 		}
2171 
2172 	      new_bb = bb->prev_bb;
2173 	      new_gsi = gsi_start_bb (new_bb);
2174 	      gsi_remove (&i, false);
2175 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2176 	    }
2177 	  else
2178 	    {
2179 	      /* Release SSA definitions.  */
2180 	      release_defs (stmt);
2181 	      gsi_remove (&i, true);
2182 	    }
2183 
2184 	  if (gsi_end_p (i))
2185 	    i = gsi_last_bb (bb);
2186 	  else
2187 	    gsi_prev (&i);
2188 	}
2189     }
2190 
2191   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2192   bb->il.gimple.seq = NULL;
2193   bb->il.gimple.phi_nodes = NULL;
2194 }
2195 
2196 
2197 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2198    predicate VAL, return the edge that will be taken out of the block.
2199    If VAL does not match a unique edge, NULL is returned.  */
2200 
2201 edge
2202 find_taken_edge (basic_block bb, tree val)
2203 {
2204   gimple *stmt;
2205 
2206   stmt = last_stmt (bb);
2207 
2208   gcc_assert (stmt);
2209   gcc_assert (is_ctrl_stmt (stmt));
2210 
2211   if (val == NULL)
2212     return NULL;
2213 
2214   if (!is_gimple_min_invariant (val))
2215     return NULL;
2216 
2217   if (gimple_code (stmt) == GIMPLE_COND)
2218     return find_taken_edge_cond_expr (bb, val);
2219 
2220   if (gimple_code (stmt) == GIMPLE_SWITCH)
2221     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2222 
2223   if (computed_goto_p (stmt))
2224     {
2225       /* Only optimize if the argument is a label, if the argument is
2226 	 not a label then we can not construct a proper CFG.
2227 
2228          It may be the case that we only need to allow the LABEL_REF to
2229          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2230          appear inside a LABEL_EXPR just to be safe.  */
2231       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2232 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2233 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2234       return NULL;
2235     }
2236 
2237   gcc_unreachable ();
2238 }
2239 
2240 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2241    statement, determine which of the outgoing edges will be taken out of the
2242    block.  Return NULL if either edge may be taken.  */
2243 
2244 static edge
2245 find_taken_edge_computed_goto (basic_block bb, tree val)
2246 {
2247   basic_block dest;
2248   edge e = NULL;
2249 
2250   dest = label_to_block (val);
2251   if (dest)
2252     {
2253       e = find_edge (bb, dest);
2254       gcc_assert (e != NULL);
2255     }
2256 
2257   return e;
2258 }
2259 
2260 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2261    statement, determine which of the two edges will be taken out of the
2262    block.  Return NULL if either edge may be taken.  */
2263 
2264 static edge
2265 find_taken_edge_cond_expr (basic_block bb, tree val)
2266 {
2267   edge true_edge, false_edge;
2268 
2269   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2270 
2271   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2272   return (integer_zerop (val) ? false_edge : true_edge);
2273 }
2274 
2275 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2276    statement, determine which edge will be taken out of the block.  Return
2277    NULL if any edge may be taken.  */
2278 
2279 static edge
2280 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2281 			     tree val)
2282 {
2283   basic_block dest_bb;
2284   edge e;
2285   tree taken_case;
2286 
2287   taken_case = find_case_label_for_value (switch_stmt, val);
2288   dest_bb = label_to_block (CASE_LABEL (taken_case));
2289 
2290   e = find_edge (bb, dest_bb);
2291   gcc_assert (e);
2292   return e;
2293 }
2294 
2295 
2296 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2297    We can make optimal use here of the fact that the case labels are
2298    sorted: We can do a binary search for a case matching VAL.  */
2299 
2300 static tree
2301 find_case_label_for_value (gswitch *switch_stmt, tree val)
2302 {
2303   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2304   tree default_case = gimple_switch_default_label (switch_stmt);
2305 
2306   for (low = 0, high = n; high - low > 1; )
2307     {
2308       size_t i = (high + low) / 2;
2309       tree t = gimple_switch_label (switch_stmt, i);
2310       int cmp;
2311 
2312       /* Cache the result of comparing CASE_LOW and val.  */
2313       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2314 
2315       if (cmp > 0)
2316 	high = i;
2317       else
2318 	low = i;
2319 
2320       if (CASE_HIGH (t) == NULL)
2321 	{
2322 	  /* A singe-valued case label.  */
2323 	  if (cmp == 0)
2324 	    return t;
2325 	}
2326       else
2327 	{
2328 	  /* A case range.  We can only handle integer ranges.  */
2329 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2330 	    return t;
2331 	}
2332     }
2333 
2334   return default_case;
2335 }
2336 
2337 
2338 /* Dump a basic block on stderr.  */
2339 
2340 void
2341 gimple_debug_bb (basic_block bb)
2342 {
2343   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2344 }
2345 
2346 
2347 /* Dump basic block with index N on stderr.  */
2348 
2349 basic_block
2350 gimple_debug_bb_n (int n)
2351 {
2352   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2353   return BASIC_BLOCK_FOR_FN (cfun, n);
2354 }
2355 
2356 
2357 /* Dump the CFG on stderr.
2358 
2359    FLAGS are the same used by the tree dumping functions
2360    (see TDF_* in dumpfile.h).  */
2361 
2362 void
2363 gimple_debug_cfg (int flags)
2364 {
2365   gimple_dump_cfg (stderr, flags);
2366 }
2367 
2368 
2369 /* Dump the program showing basic block boundaries on the given FILE.
2370 
2371    FLAGS are the same used by the tree dumping functions (see TDF_* in
2372    tree.h).  */
2373 
2374 void
2375 gimple_dump_cfg (FILE *file, int flags)
2376 {
2377   if (flags & TDF_DETAILS)
2378     {
2379       dump_function_header (file, current_function_decl, flags);
2380       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2381 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2382 	       last_basic_block_for_fn (cfun));
2383 
2384       brief_dump_cfg (file, flags | TDF_COMMENT);
2385       fprintf (file, "\n");
2386     }
2387 
2388   if (flags & TDF_STATS)
2389     dump_cfg_stats (file);
2390 
2391   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2392 }
2393 
2394 
2395 /* Dump CFG statistics on FILE.  */
2396 
2397 void
2398 dump_cfg_stats (FILE *file)
2399 {
2400   static long max_num_merged_labels = 0;
2401   unsigned long size, total = 0;
2402   long num_edges;
2403   basic_block bb;
2404   const char * const fmt_str   = "%-30s%-13s%12s\n";
2405   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2406   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2407   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2408   const char *funcname = current_function_name ();
2409 
2410   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2411 
2412   fprintf (file, "---------------------------------------------------------\n");
2413   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2414   fprintf (file, fmt_str, "", "  instances  ", "used ");
2415   fprintf (file, "---------------------------------------------------------\n");
2416 
2417   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2418   total += size;
2419   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2420 	   SCALE (size), LABEL (size));
2421 
2422   num_edges = 0;
2423   FOR_EACH_BB_FN (bb, cfun)
2424     num_edges += EDGE_COUNT (bb->succs);
2425   size = num_edges * sizeof (struct edge_def);
2426   total += size;
2427   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2428 
2429   fprintf (file, "---------------------------------------------------------\n");
2430   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2431 	   LABEL (total));
2432   fprintf (file, "---------------------------------------------------------\n");
2433   fprintf (file, "\n");
2434 
2435   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2436     max_num_merged_labels = cfg_stats.num_merged_labels;
2437 
2438   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2439 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2440 
2441   fprintf (file, "\n");
2442 }
2443 
2444 
2445 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2446    linked in the final executable.  */
2447 
2448 DEBUG_FUNCTION void
2449 debug_cfg_stats (void)
2450 {
2451   dump_cfg_stats (stderr);
2452 }
2453 
2454 /*---------------------------------------------------------------------------
2455 			     Miscellaneous helpers
2456 ---------------------------------------------------------------------------*/
2457 
2458 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2459    flow.  Transfers of control flow associated with EH are excluded.  */
2460 
2461 static bool
2462 call_can_make_abnormal_goto (gimple *t)
2463 {
2464   /* If the function has no non-local labels, then a call cannot make an
2465      abnormal transfer of control.  */
2466   if (!cfun->has_nonlocal_label
2467       && !cfun->calls_setjmp)
2468    return false;
2469 
2470   /* Likewise if the call has no side effects.  */
2471   if (!gimple_has_side_effects (t))
2472     return false;
2473 
2474   /* Likewise if the called function is leaf.  */
2475   if (gimple_call_flags (t) & ECF_LEAF)
2476     return false;
2477 
2478   return true;
2479 }
2480 
2481 
2482 /* Return true if T can make an abnormal transfer of control flow.
2483    Transfers of control flow associated with EH are excluded.  */
2484 
2485 bool
2486 stmt_can_make_abnormal_goto (gimple *t)
2487 {
2488   if (computed_goto_p (t))
2489     return true;
2490   if (is_gimple_call (t))
2491     return call_can_make_abnormal_goto (t);
2492   return false;
2493 }
2494 
2495 
2496 /* Return true if T represents a stmt that always transfers control.  */
2497 
2498 bool
2499 is_ctrl_stmt (gimple *t)
2500 {
2501   switch (gimple_code (t))
2502     {
2503     case GIMPLE_COND:
2504     case GIMPLE_SWITCH:
2505     case GIMPLE_GOTO:
2506     case GIMPLE_RETURN:
2507     case GIMPLE_RESX:
2508       return true;
2509     default:
2510       return false;
2511     }
2512 }
2513 
2514 
2515 /* Return true if T is a statement that may alter the flow of control
2516    (e.g., a call to a non-returning function).  */
2517 
2518 bool
2519 is_ctrl_altering_stmt (gimple *t)
2520 {
2521   gcc_assert (t);
2522 
2523   switch (gimple_code (t))
2524     {
2525     case GIMPLE_CALL:
2526       /* Per stmt call flag indicates whether the call could alter
2527 	 controlflow.  */
2528       if (gimple_call_ctrl_altering_p (t))
2529 	return true;
2530       break;
2531 
2532     case GIMPLE_EH_DISPATCH:
2533       /* EH_DISPATCH branches to the individual catch handlers at
2534 	 this level of a try or allowed-exceptions region.  It can
2535 	 fallthru to the next statement as well.  */
2536       return true;
2537 
2538     case GIMPLE_ASM:
2539       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2540 	return true;
2541       break;
2542 
2543     CASE_GIMPLE_OMP:
2544       /* OpenMP directives alter control flow.  */
2545       return true;
2546 
2547     case GIMPLE_TRANSACTION:
2548       /* A transaction start alters control flow.  */
2549       return true;
2550 
2551     default:
2552       break;
2553     }
2554 
2555   /* If a statement can throw, it alters control flow.  */
2556   return stmt_can_throw_internal (t);
2557 }
2558 
2559 
2560 /* Return true if T is a simple local goto.  */
2561 
2562 bool
2563 simple_goto_p (gimple *t)
2564 {
2565   return (gimple_code (t) == GIMPLE_GOTO
2566 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2567 }
2568 
2569 
2570 /* Return true if STMT should start a new basic block.  PREV_STMT is
2571    the statement preceding STMT.  It is used when STMT is a label or a
2572    case label.  Labels should only start a new basic block if their
2573    previous statement wasn't a label.  Otherwise, sequence of labels
2574    would generate unnecessary basic blocks that only contain a single
2575    label.  */
2576 
2577 static inline bool
2578 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2579 {
2580   if (stmt == NULL)
2581     return false;
2582 
2583   /* Labels start a new basic block only if the preceding statement
2584      wasn't a label of the same type.  This prevents the creation of
2585      consecutive blocks that have nothing but a single label.  */
2586   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2587     {
2588       /* Nonlocal and computed GOTO targets always start a new block.  */
2589       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2590 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2591 	return true;
2592 
2593       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2594 	{
2595 	  if (DECL_NONLOCAL (gimple_label_label (
2596 			       as_a <glabel *> (prev_stmt))))
2597 	    return true;
2598 
2599 	  cfg_stats.num_merged_labels++;
2600 	  return false;
2601 	}
2602       else
2603 	return true;
2604     }
2605   else if (gimple_code (stmt) == GIMPLE_CALL)
2606     {
2607       if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2608 	/* setjmp acts similar to a nonlocal GOTO target and thus should
2609 	   start a new block.  */
2610 	return true;
2611       if (gimple_call_internal_p (stmt, IFN_PHI)
2612 	  && prev_stmt
2613 	  && gimple_code (prev_stmt) != GIMPLE_LABEL
2614 	  && (gimple_code (prev_stmt) != GIMPLE_CALL
2615 	      || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2616 	/* PHI nodes start a new block unless preceeded by a label
2617 	   or another PHI.  */
2618 	return true;
2619     }
2620 
2621   return false;
2622 }
2623 
2624 
2625 /* Return true if T should end a basic block.  */
2626 
2627 bool
2628 stmt_ends_bb_p (gimple *t)
2629 {
2630   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2631 }
2632 
2633 /* Remove block annotations and other data structures.  */
2634 
2635 void
2636 delete_tree_cfg_annotations (struct function *fn)
2637 {
2638   vec_free (label_to_block_map_for_fn (fn));
2639 }
2640 
2641 /* Return the virtual phi in BB.  */
2642 
2643 gphi *
2644 get_virtual_phi (basic_block bb)
2645 {
2646   for (gphi_iterator gsi = gsi_start_phis (bb);
2647        !gsi_end_p (gsi);
2648        gsi_next (&gsi))
2649     {
2650       gphi *phi = gsi.phi ();
2651 
2652       if (virtual_operand_p (PHI_RESULT (phi)))
2653 	return phi;
2654     }
2655 
2656   return NULL;
2657 }
2658 
2659 /* Return the first statement in basic block BB.  */
2660 
2661 gimple *
2662 first_stmt (basic_block bb)
2663 {
2664   gimple_stmt_iterator i = gsi_start_bb (bb);
2665   gimple *stmt = NULL;
2666 
2667   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2668     {
2669       gsi_next (&i);
2670       stmt = NULL;
2671     }
2672   return stmt;
2673 }
2674 
2675 /* Return the first non-label statement in basic block BB.  */
2676 
2677 static gimple *
2678 first_non_label_stmt (basic_block bb)
2679 {
2680   gimple_stmt_iterator i = gsi_start_bb (bb);
2681   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2682     gsi_next (&i);
2683   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2684 }
2685 
2686 /* Return the last statement in basic block BB.  */
2687 
2688 gimple *
2689 last_stmt (basic_block bb)
2690 {
2691   gimple_stmt_iterator i = gsi_last_bb (bb);
2692   gimple *stmt = NULL;
2693 
2694   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2695     {
2696       gsi_prev (&i);
2697       stmt = NULL;
2698     }
2699   return stmt;
2700 }
2701 
2702 /* Return the last statement of an otherwise empty block.  Return NULL
2703    if the block is totally empty, or if it contains more than one
2704    statement.  */
2705 
2706 gimple *
2707 last_and_only_stmt (basic_block bb)
2708 {
2709   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2710   gimple *last, *prev;
2711 
2712   if (gsi_end_p (i))
2713     return NULL;
2714 
2715   last = gsi_stmt (i);
2716   gsi_prev_nondebug (&i);
2717   if (gsi_end_p (i))
2718     return last;
2719 
2720   /* Empty statements should no longer appear in the instruction stream.
2721      Everything that might have appeared before should be deleted by
2722      remove_useless_stmts, and the optimizers should just gsi_remove
2723      instead of smashing with build_empty_stmt.
2724 
2725      Thus the only thing that should appear here in a block containing
2726      one executable statement is a label.  */
2727   prev = gsi_stmt (i);
2728   if (gimple_code (prev) == GIMPLE_LABEL)
2729     return last;
2730   else
2731     return NULL;
2732 }
2733 
2734 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2735 
2736 static void
2737 reinstall_phi_args (edge new_edge, edge old_edge)
2738 {
2739   edge_var_map *vm;
2740   int i;
2741   gphi_iterator phis;
2742 
2743   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2744   if (!v)
2745     return;
2746 
2747   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2748        v->iterate (i, &vm) && !gsi_end_p (phis);
2749        i++, gsi_next (&phis))
2750     {
2751       gphi *phi = phis.phi ();
2752       tree result = redirect_edge_var_map_result (vm);
2753       tree arg = redirect_edge_var_map_def (vm);
2754 
2755       gcc_assert (result == gimple_phi_result (phi));
2756 
2757       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2758     }
2759 
2760   redirect_edge_var_map_clear (old_edge);
2761 }
2762 
2763 /* Returns the basic block after which the new basic block created
2764    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2765    near its "logical" location.  This is of most help to humans looking
2766    at debugging dumps.  */
2767 
2768 basic_block
2769 split_edge_bb_loc (edge edge_in)
2770 {
2771   basic_block dest = edge_in->dest;
2772   basic_block dest_prev = dest->prev_bb;
2773 
2774   if (dest_prev)
2775     {
2776       edge e = find_edge (dest_prev, dest);
2777       if (e && !(e->flags & EDGE_COMPLEX))
2778 	return edge_in->src;
2779     }
2780   return dest_prev;
2781 }
2782 
2783 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2784    Abort on abnormal edges.  */
2785 
2786 static basic_block
2787 gimple_split_edge (edge edge_in)
2788 {
2789   basic_block new_bb, after_bb, dest;
2790   edge new_edge, e;
2791 
2792   /* Abnormal edges cannot be split.  */
2793   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2794 
2795   dest = edge_in->dest;
2796 
2797   after_bb = split_edge_bb_loc (edge_in);
2798 
2799   new_bb = create_empty_bb (after_bb);
2800   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2801   new_bb->count = edge_in->count;
2802   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2803   new_edge->probability = REG_BR_PROB_BASE;
2804   new_edge->count = edge_in->count;
2805 
2806   e = redirect_edge_and_branch (edge_in, new_bb);
2807   gcc_assert (e == edge_in);
2808   reinstall_phi_args (new_edge, e);
2809 
2810   return new_bb;
2811 }
2812 
2813 
2814 /* Verify properties of the address expression T with base object BASE.  */
2815 
2816 static tree
2817 verify_address (tree t, tree base)
2818 {
2819   bool old_constant;
2820   bool old_side_effects;
2821   bool new_constant;
2822   bool new_side_effects;
2823 
2824   old_constant = TREE_CONSTANT (t);
2825   old_side_effects = TREE_SIDE_EFFECTS (t);
2826 
2827   recompute_tree_invariant_for_addr_expr (t);
2828   new_side_effects = TREE_SIDE_EFFECTS (t);
2829   new_constant = TREE_CONSTANT (t);
2830 
2831   if (old_constant != new_constant)
2832     {
2833       error ("constant not recomputed when ADDR_EXPR changed");
2834       return t;
2835     }
2836   if (old_side_effects != new_side_effects)
2837     {
2838       error ("side effects not recomputed when ADDR_EXPR changed");
2839       return t;
2840     }
2841 
2842   if (!(VAR_P (base)
2843 	|| TREE_CODE (base) == PARM_DECL
2844 	|| TREE_CODE (base) == RESULT_DECL))
2845     return NULL_TREE;
2846 
2847   if (DECL_GIMPLE_REG_P (base))
2848     {
2849       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2850       return base;
2851     }
2852 
2853   return NULL_TREE;
2854 }
2855 
2856 /* Callback for walk_tree, check that all elements with address taken are
2857    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2858    inside a PHI node.  */
2859 
2860 static tree
2861 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2862 {
2863   tree t = *tp, x;
2864 
2865   if (TYPE_P (t))
2866     *walk_subtrees = 0;
2867 
2868   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2869 #define CHECK_OP(N, MSG) \
2870   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2871        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2872 
2873   switch (TREE_CODE (t))
2874     {
2875     case SSA_NAME:
2876       if (SSA_NAME_IN_FREE_LIST (t))
2877 	{
2878 	  error ("SSA name in freelist but still referenced");
2879 	  return *tp;
2880 	}
2881       break;
2882 
2883     case PARM_DECL:
2884     case VAR_DECL:
2885     case RESULT_DECL:
2886       {
2887 	tree context = decl_function_context (t);
2888 	if (context != cfun->decl
2889 	    && !SCOPE_FILE_SCOPE_P (context)
2890 	    && !TREE_STATIC (t)
2891 	    && !DECL_EXTERNAL (t))
2892 	  {
2893 	    error ("Local declaration from a different function");
2894 	    return t;
2895 	  }
2896       }
2897       break;
2898 
2899     case INDIRECT_REF:
2900       error ("INDIRECT_REF in gimple IL");
2901       return t;
2902 
2903     case MEM_REF:
2904       x = TREE_OPERAND (t, 0);
2905       if (!POINTER_TYPE_P (TREE_TYPE (x))
2906 	  || !is_gimple_mem_ref_addr (x))
2907 	{
2908 	  error ("invalid first operand of MEM_REF");
2909 	  return x;
2910 	}
2911       if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2912 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2913 	{
2914 	  error ("invalid offset operand of MEM_REF");
2915 	  return TREE_OPERAND (t, 1);
2916 	}
2917       if (TREE_CODE (x) == ADDR_EXPR)
2918 	{
2919 	  tree va = verify_address (x, TREE_OPERAND (x, 0));
2920 	  if (va)
2921 	    return va;
2922 	  x = TREE_OPERAND (x, 0);
2923 	}
2924       walk_tree (&x, verify_expr, data, NULL);
2925       *walk_subtrees = 0;
2926       break;
2927 
2928     case ASSERT_EXPR:
2929       x = fold (ASSERT_EXPR_COND (t));
2930       if (x == boolean_false_node)
2931 	{
2932 	  error ("ASSERT_EXPR with an always-false condition");
2933 	  return *tp;
2934 	}
2935       break;
2936 
2937     case MODIFY_EXPR:
2938       error ("MODIFY_EXPR not expected while having tuples");
2939       return *tp;
2940 
2941     case ADDR_EXPR:
2942       {
2943 	tree tem;
2944 
2945 	gcc_assert (is_gimple_address (t));
2946 
2947 	/* Skip any references (they will be checked when we recurse down the
2948 	   tree) and ensure that any variable used as a prefix is marked
2949 	   addressable.  */
2950 	for (x = TREE_OPERAND (t, 0);
2951 	     handled_component_p (x);
2952 	     x = TREE_OPERAND (x, 0))
2953 	  ;
2954 
2955 	if ((tem = verify_address (t, x)))
2956 	  return tem;
2957 
2958 	if (!(VAR_P (x)
2959 	      || TREE_CODE (x) == PARM_DECL
2960 	      || TREE_CODE (x) == RESULT_DECL))
2961 	  return NULL;
2962 
2963 	if (!TREE_ADDRESSABLE (x))
2964 	  {
2965 	    error ("address taken, but ADDRESSABLE bit not set");
2966 	    return x;
2967 	  }
2968 
2969 	break;
2970       }
2971 
2972     case COND_EXPR:
2973       x = COND_EXPR_COND (t);
2974       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2975 	{
2976 	  error ("non-integral used in condition");
2977 	  return x;
2978 	}
2979       if (!is_gimple_condexpr (x))
2980         {
2981 	  error ("invalid conditional operand");
2982 	  return x;
2983 	}
2984       break;
2985 
2986     case NON_LVALUE_EXPR:
2987     case TRUTH_NOT_EXPR:
2988       gcc_unreachable ();
2989 
2990     CASE_CONVERT:
2991     case FIX_TRUNC_EXPR:
2992     case FLOAT_EXPR:
2993     case NEGATE_EXPR:
2994     case ABS_EXPR:
2995     case BIT_NOT_EXPR:
2996       CHECK_OP (0, "invalid operand to unary operator");
2997       break;
2998 
2999     case REALPART_EXPR:
3000     case IMAGPART_EXPR:
3001     case BIT_FIELD_REF:
3002       if (!is_gimple_reg_type (TREE_TYPE (t)))
3003 	{
3004 	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3005 	  return t;
3006 	}
3007 
3008       if (TREE_CODE (t) == BIT_FIELD_REF)
3009 	{
3010 	  tree t0 = TREE_OPERAND (t, 0);
3011 	  tree t1 = TREE_OPERAND (t, 1);
3012 	  tree t2 = TREE_OPERAND (t, 2);
3013 	  if (!tree_fits_uhwi_p (t1)
3014 	      || !tree_fits_uhwi_p (t2))
3015 	    {
3016 	      error ("invalid position or size operand to BIT_FIELD_REF");
3017 	      return t;
3018 	    }
3019 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3020 	      && (TYPE_PRECISION (TREE_TYPE (t))
3021 		  != tree_to_uhwi (t1)))
3022 	    {
3023 	      error ("integral result type precision does not match "
3024 		     "field size of BIT_FIELD_REF");
3025 	      return t;
3026 	    }
3027 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3028 		   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3029 		   && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3030 		       != tree_to_uhwi (t1)))
3031 	    {
3032 	      error ("mode size of non-integral result does not "
3033 		     "match field size of BIT_FIELD_REF");
3034 	      return t;
3035 	    }
3036 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3037 	      && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3038 		  > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3039 	    {
3040 	      error ("position plus size exceeds size of referenced object in "
3041 		     "BIT_FIELD_REF");
3042 	      return t;
3043 	    }
3044 	}
3045       t = TREE_OPERAND (t, 0);
3046 
3047       /* Fall-through.  */
3048     case COMPONENT_REF:
3049     case ARRAY_REF:
3050     case ARRAY_RANGE_REF:
3051     case VIEW_CONVERT_EXPR:
3052       /* We have a nest of references.  Verify that each of the operands
3053 	 that determine where to reference is either a constant or a variable,
3054 	 verify that the base is valid, and then show we've already checked
3055 	 the subtrees.  */
3056       while (handled_component_p (t))
3057 	{
3058 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3059 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3060 	  else if (TREE_CODE (t) == ARRAY_REF
3061 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
3062 	    {
3063 	      CHECK_OP (1, "invalid array index");
3064 	      if (TREE_OPERAND (t, 2))
3065 		CHECK_OP (2, "invalid array lower bound");
3066 	      if (TREE_OPERAND (t, 3))
3067 		CHECK_OP (3, "invalid array stride");
3068 	    }
3069 	  else if (TREE_CODE (t) == BIT_FIELD_REF
3070 		   || TREE_CODE (t) == REALPART_EXPR
3071 		   || TREE_CODE (t) == IMAGPART_EXPR)
3072 	    {
3073 	      error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3074 		     "REALPART_EXPR");
3075 	      return t;
3076 	    }
3077 
3078 	  t = TREE_OPERAND (t, 0);
3079 	}
3080 
3081       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3082 	{
3083 	  error ("invalid reference prefix");
3084 	  return t;
3085 	}
3086       walk_tree (&t, verify_expr, data, NULL);
3087       *walk_subtrees = 0;
3088       break;
3089     case PLUS_EXPR:
3090     case MINUS_EXPR:
3091       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3092 	 POINTER_PLUS_EXPR. */
3093       if (POINTER_TYPE_P (TREE_TYPE (t)))
3094 	{
3095 	  error ("invalid operand to plus/minus, type is a pointer");
3096 	  return t;
3097 	}
3098       CHECK_OP (0, "invalid operand to binary operator");
3099       CHECK_OP (1, "invalid operand to binary operator");
3100       break;
3101 
3102     case POINTER_PLUS_EXPR:
3103       /* Check to make sure the first operand is a pointer or reference type. */
3104       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3105 	{
3106 	  error ("invalid operand to pointer plus, first operand is not a pointer");
3107 	  return t;
3108 	}
3109       /* Check to make sure the second operand is a ptrofftype.  */
3110       if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3111 	{
3112 	  error ("invalid operand to pointer plus, second operand is not an "
3113 		 "integer type of appropriate width");
3114 	  return t;
3115 	}
3116       /* FALLTHROUGH */
3117     case LT_EXPR:
3118     case LE_EXPR:
3119     case GT_EXPR:
3120     case GE_EXPR:
3121     case EQ_EXPR:
3122     case NE_EXPR:
3123     case UNORDERED_EXPR:
3124     case ORDERED_EXPR:
3125     case UNLT_EXPR:
3126     case UNLE_EXPR:
3127     case UNGT_EXPR:
3128     case UNGE_EXPR:
3129     case UNEQ_EXPR:
3130     case LTGT_EXPR:
3131     case MULT_EXPR:
3132     case TRUNC_DIV_EXPR:
3133     case CEIL_DIV_EXPR:
3134     case FLOOR_DIV_EXPR:
3135     case ROUND_DIV_EXPR:
3136     case TRUNC_MOD_EXPR:
3137     case CEIL_MOD_EXPR:
3138     case FLOOR_MOD_EXPR:
3139     case ROUND_MOD_EXPR:
3140     case RDIV_EXPR:
3141     case EXACT_DIV_EXPR:
3142     case MIN_EXPR:
3143     case MAX_EXPR:
3144     case LSHIFT_EXPR:
3145     case RSHIFT_EXPR:
3146     case LROTATE_EXPR:
3147     case RROTATE_EXPR:
3148     case BIT_IOR_EXPR:
3149     case BIT_XOR_EXPR:
3150     case BIT_AND_EXPR:
3151       CHECK_OP (0, "invalid operand to binary operator");
3152       CHECK_OP (1, "invalid operand to binary operator");
3153       break;
3154 
3155     case CONSTRUCTOR:
3156       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3157 	*walk_subtrees = 0;
3158       break;
3159 
3160     case CASE_LABEL_EXPR:
3161       if (CASE_CHAIN (t))
3162 	{
3163 	  error ("invalid CASE_CHAIN");
3164 	  return t;
3165 	}
3166       break;
3167 
3168     default:
3169       break;
3170     }
3171   return NULL;
3172 
3173 #undef CHECK_OP
3174 }
3175 
3176 
3177 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3178    Returns true if there is an error, otherwise false.  */
3179 
3180 static bool
3181 verify_types_in_gimple_min_lval (tree expr)
3182 {
3183   tree op;
3184 
3185   if (is_gimple_id (expr))
3186     return false;
3187 
3188   if (TREE_CODE (expr) != TARGET_MEM_REF
3189       && TREE_CODE (expr) != MEM_REF)
3190     {
3191       error ("invalid expression for min lvalue");
3192       return true;
3193     }
3194 
3195   /* TARGET_MEM_REFs are strange beasts.  */
3196   if (TREE_CODE (expr) == TARGET_MEM_REF)
3197     return false;
3198 
3199   op = TREE_OPERAND (expr, 0);
3200   if (!is_gimple_val (op))
3201     {
3202       error ("invalid operand in indirect reference");
3203       debug_generic_stmt (op);
3204       return true;
3205     }
3206   /* Memory references now generally can involve a value conversion.  */
3207 
3208   return false;
3209 }
3210 
3211 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3212    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3213    if there is an error, otherwise false.  */
3214 
3215 static bool
3216 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3217 {
3218   while (handled_component_p (expr))
3219     {
3220       tree op = TREE_OPERAND (expr, 0);
3221 
3222       if (TREE_CODE (expr) == ARRAY_REF
3223 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3224 	{
3225 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3226 	      || (TREE_OPERAND (expr, 2)
3227 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3228 	      || (TREE_OPERAND (expr, 3)
3229 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3230 	    {
3231 	      error ("invalid operands to array reference");
3232 	      debug_generic_stmt (expr);
3233 	      return true;
3234 	    }
3235 	}
3236 
3237       /* Verify if the reference array element types are compatible.  */
3238       if (TREE_CODE (expr) == ARRAY_REF
3239 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3240 					 TREE_TYPE (TREE_TYPE (op))))
3241 	{
3242 	  error ("type mismatch in array reference");
3243 	  debug_generic_stmt (TREE_TYPE (expr));
3244 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3245 	  return true;
3246 	}
3247       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3248 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3249 					 TREE_TYPE (TREE_TYPE (op))))
3250 	{
3251 	  error ("type mismatch in array range reference");
3252 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3253 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3254 	  return true;
3255 	}
3256 
3257       if ((TREE_CODE (expr) == REALPART_EXPR
3258 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3259 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3260 					 TREE_TYPE (TREE_TYPE (op))))
3261 	{
3262 	  error ("type mismatch in real/imagpart reference");
3263 	  debug_generic_stmt (TREE_TYPE (expr));
3264 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3265 	  return true;
3266 	}
3267 
3268       if (TREE_CODE (expr) == COMPONENT_REF
3269 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3270 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
3271 	{
3272 	  error ("type mismatch in component reference");
3273 	  debug_generic_stmt (TREE_TYPE (expr));
3274 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3275 	  return true;
3276 	}
3277 
3278       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3279 	{
3280 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3281 	     that their operand is not an SSA name or an invariant when
3282 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3283 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3284 	     most invoke undefined behavior.  */
3285 	  if (require_lvalue
3286 	      && (TREE_CODE (op) == SSA_NAME
3287 		  || is_gimple_min_invariant (op)))
3288 	    {
3289 	      error ("conversion of an SSA_NAME on the left hand side");
3290 	      debug_generic_stmt (expr);
3291 	      return true;
3292 	    }
3293 	  else if (TREE_CODE (op) == SSA_NAME
3294 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3295 	    {
3296 	      error ("conversion of register to a different size");
3297 	      debug_generic_stmt (expr);
3298 	      return true;
3299 	    }
3300 	  else if (!handled_component_p (op))
3301 	    return false;
3302 	}
3303 
3304       expr = op;
3305     }
3306 
3307   if (TREE_CODE (expr) == MEM_REF)
3308     {
3309       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3310 	{
3311 	  error ("invalid address operand in MEM_REF");
3312 	  debug_generic_stmt (expr);
3313 	  return true;
3314 	}
3315       if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3316 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3317 	{
3318 	  error ("invalid offset operand in MEM_REF");
3319 	  debug_generic_stmt (expr);
3320 	  return true;
3321 	}
3322     }
3323   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3324     {
3325       if (!TMR_BASE (expr)
3326 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3327 	{
3328 	  error ("invalid address operand in TARGET_MEM_REF");
3329 	  return true;
3330 	}
3331       if (!TMR_OFFSET (expr)
3332 	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3333 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3334 	{
3335 	  error ("invalid offset operand in TARGET_MEM_REF");
3336 	  debug_generic_stmt (expr);
3337 	  return true;
3338 	}
3339     }
3340 
3341   return ((require_lvalue || !is_gimple_min_invariant (expr))
3342 	  && verify_types_in_gimple_min_lval (expr));
3343 }
3344 
3345 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3346    list of pointer-to types that is trivially convertible to DEST.  */
3347 
3348 static bool
3349 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3350 {
3351   tree src;
3352 
3353   if (!TYPE_POINTER_TO (src_obj))
3354     return true;
3355 
3356   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3357     if (useless_type_conversion_p (dest, src))
3358       return true;
3359 
3360   return false;
3361 }
3362 
3363 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3364    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3365 
3366 static bool
3367 valid_fixed_convert_types_p (tree type1, tree type2)
3368 {
3369   return (FIXED_POINT_TYPE_P (type1)
3370 	  && (INTEGRAL_TYPE_P (type2)
3371 	      || SCALAR_FLOAT_TYPE_P (type2)
3372 	      || FIXED_POINT_TYPE_P (type2)));
3373 }
3374 
3375 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3376    is a problem, otherwise false.  */
3377 
3378 static bool
3379 verify_gimple_call (gcall *stmt)
3380 {
3381   tree fn = gimple_call_fn (stmt);
3382   tree fntype, fndecl;
3383   unsigned i;
3384 
3385   if (gimple_call_internal_p (stmt))
3386     {
3387       if (fn)
3388 	{
3389 	  error ("gimple call has two targets");
3390 	  debug_generic_stmt (fn);
3391 	  return true;
3392 	}
3393       /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3394       else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3395 	{
3396 	  return false;
3397 	}
3398     }
3399   else
3400     {
3401       if (!fn)
3402 	{
3403 	  error ("gimple call has no target");
3404 	  return true;
3405 	}
3406     }
3407 
3408   if (fn && !is_gimple_call_addr (fn))
3409     {
3410       error ("invalid function in gimple call");
3411       debug_generic_stmt (fn);
3412       return true;
3413     }
3414 
3415   if (fn
3416       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3417 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3418 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3419     {
3420       error ("non-function in gimple call");
3421       return true;
3422     }
3423 
3424    fndecl = gimple_call_fndecl (stmt);
3425    if (fndecl
3426        && TREE_CODE (fndecl) == FUNCTION_DECL
3427        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3428        && !DECL_PURE_P (fndecl)
3429        && !TREE_READONLY (fndecl))
3430      {
3431        error ("invalid pure const state for function");
3432        return true;
3433      }
3434 
3435   tree lhs = gimple_call_lhs (stmt);
3436   if (lhs
3437       && (!is_gimple_lvalue (lhs)
3438 	  || verify_types_in_gimple_reference (lhs, true)))
3439     {
3440       error ("invalid LHS in gimple call");
3441       return true;
3442     }
3443 
3444   if (gimple_call_ctrl_altering_p (stmt)
3445       && gimple_call_noreturn_p (stmt)
3446       && should_remove_lhs_p (lhs))
3447     {
3448       error ("LHS in noreturn call");
3449       return true;
3450     }
3451 
3452   fntype = gimple_call_fntype (stmt);
3453   if (fntype
3454       && lhs
3455       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3456       /* ???  At least C++ misses conversions at assignments from
3457 	 void * call results.
3458 	 ???  Java is completely off.  Especially with functions
3459 	 returning java.lang.Object.
3460 	 For now simply allow arbitrary pointer type conversions.  */
3461       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3462 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3463     {
3464       error ("invalid conversion in gimple call");
3465       debug_generic_stmt (TREE_TYPE (lhs));
3466       debug_generic_stmt (TREE_TYPE (fntype));
3467       return true;
3468     }
3469 
3470   if (gimple_call_chain (stmt)
3471       && !is_gimple_val (gimple_call_chain (stmt)))
3472     {
3473       error ("invalid static chain in gimple call");
3474       debug_generic_stmt (gimple_call_chain (stmt));
3475       return true;
3476     }
3477 
3478   /* If there is a static chain argument, the call should either be
3479      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3480   if (gimple_call_chain (stmt)
3481       && fndecl
3482       && !DECL_STATIC_CHAIN (fndecl))
3483     {
3484       error ("static chain with function that doesn%'t use one");
3485       return true;
3486     }
3487 
3488   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3489     {
3490       switch (DECL_FUNCTION_CODE (fndecl))
3491 	{
3492 	case BUILT_IN_UNREACHABLE:
3493 	case BUILT_IN_TRAP:
3494 	  if (gimple_call_num_args (stmt) > 0)
3495 	    {
3496 	      /* Built-in unreachable with parameters might not be caught by
3497 		 undefined behavior sanitizer.  Front-ends do check users do not
3498 		 call them that way but we also produce calls to
3499 		 __builtin_unreachable internally, for example when IPA figures
3500 		 out a call cannot happen in a legal program.  In such cases,
3501 		 we must make sure arguments are stripped off.  */
3502 	      error ("__builtin_unreachable or __builtin_trap call with "
3503 		     "arguments");
3504 	      return true;
3505 	    }
3506 	  break;
3507 	default:
3508 	  break;
3509 	}
3510     }
3511 
3512   /* ???  The C frontend passes unpromoted arguments in case it
3513      didn't see a function declaration before the call.  So for now
3514      leave the call arguments mostly unverified.  Once we gimplify
3515      unit-at-a-time we have a chance to fix this.  */
3516 
3517   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3518     {
3519       tree arg = gimple_call_arg (stmt, i);
3520       if ((is_gimple_reg_type (TREE_TYPE (arg))
3521 	   && !is_gimple_val (arg))
3522 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3523 	      && !is_gimple_lvalue (arg)))
3524 	{
3525 	  error ("invalid argument to gimple call");
3526 	  debug_generic_expr (arg);
3527 	  return true;
3528 	}
3529     }
3530 
3531   return false;
3532 }
3533 
3534 /* Verifies the gimple comparison with the result type TYPE and
3535    the operands OP0 and OP1, comparison code is CODE.  */
3536 
3537 static bool
3538 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3539 {
3540   tree op0_type = TREE_TYPE (op0);
3541   tree op1_type = TREE_TYPE (op1);
3542 
3543   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3544     {
3545       error ("invalid operands in gimple comparison");
3546       return true;
3547     }
3548 
3549   /* For comparisons we do not have the operations type as the
3550      effective type the comparison is carried out in.  Instead
3551      we require that either the first operand is trivially
3552      convertible into the second, or the other way around.
3553      Because we special-case pointers to void we allow
3554      comparisons of pointers with the same mode as well.  */
3555   if (!useless_type_conversion_p (op0_type, op1_type)
3556       && !useless_type_conversion_p (op1_type, op0_type)
3557       && (!POINTER_TYPE_P (op0_type)
3558 	  || !POINTER_TYPE_P (op1_type)
3559 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3560     {
3561       error ("mismatching comparison operand types");
3562       debug_generic_expr (op0_type);
3563       debug_generic_expr (op1_type);
3564       return true;
3565     }
3566 
3567   /* The resulting type of a comparison may be an effective boolean type.  */
3568   if (INTEGRAL_TYPE_P (type)
3569       && (TREE_CODE (type) == BOOLEAN_TYPE
3570 	  || TYPE_PRECISION (type) == 1))
3571     {
3572       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3573 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3574 	  && code != EQ_EXPR && code != NE_EXPR
3575 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3576 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3577 	{
3578 	  error ("unsupported operation or type for vector comparison"
3579 		 " returning a boolean");
3580 	  debug_generic_expr (op0_type);
3581 	  debug_generic_expr (op1_type);
3582 	  return true;
3583         }
3584     }
3585   /* Or a boolean vector type with the same element count
3586      as the comparison operand types.  */
3587   else if (TREE_CODE (type) == VECTOR_TYPE
3588 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3589     {
3590       if (TREE_CODE (op0_type) != VECTOR_TYPE
3591 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3592         {
3593           error ("non-vector operands in vector comparison");
3594           debug_generic_expr (op0_type);
3595           debug_generic_expr (op1_type);
3596           return true;
3597         }
3598 
3599       if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3600         {
3601           error ("invalid vector comparison resulting type");
3602           debug_generic_expr (type);
3603           return true;
3604         }
3605     }
3606   else
3607     {
3608       error ("bogus comparison result type");
3609       debug_generic_expr (type);
3610       return true;
3611     }
3612 
3613   return false;
3614 }
3615 
3616 /* Verify a gimple assignment statement STMT with an unary rhs.
3617    Returns true if anything is wrong.  */
3618 
3619 static bool
3620 verify_gimple_assign_unary (gassign *stmt)
3621 {
3622   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3623   tree lhs = gimple_assign_lhs (stmt);
3624   tree lhs_type = TREE_TYPE (lhs);
3625   tree rhs1 = gimple_assign_rhs1 (stmt);
3626   tree rhs1_type = TREE_TYPE (rhs1);
3627 
3628   if (!is_gimple_reg (lhs))
3629     {
3630       error ("non-register as LHS of unary operation");
3631       return true;
3632     }
3633 
3634   if (!is_gimple_val (rhs1))
3635     {
3636       error ("invalid operand in unary operation");
3637       return true;
3638     }
3639 
3640   /* First handle conversions.  */
3641   switch (rhs_code)
3642     {
3643     CASE_CONVERT:
3644       {
3645 	/* Allow conversions from pointer type to integral type only if
3646 	   there is no sign or zero extension involved.
3647 	   For targets were the precision of ptrofftype doesn't match that
3648 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3649 	if ((POINTER_TYPE_P (lhs_type)
3650 	     && INTEGRAL_TYPE_P (rhs1_type))
3651 	    || (POINTER_TYPE_P (rhs1_type)
3652 		&& INTEGRAL_TYPE_P (lhs_type)
3653 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3654 		    || ptrofftype_p (sizetype))))
3655 	  return false;
3656 
3657 	/* Allow conversion from integral to offset type and vice versa.  */
3658 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3659 	     && INTEGRAL_TYPE_P (rhs1_type))
3660 	    || (INTEGRAL_TYPE_P (lhs_type)
3661 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3662 	  return false;
3663 
3664 	/* Otherwise assert we are converting between types of the
3665 	   same kind.  */
3666 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3667 	  {
3668 	    error ("invalid types in nop conversion");
3669 	    debug_generic_expr (lhs_type);
3670 	    debug_generic_expr (rhs1_type);
3671 	    return true;
3672 	  }
3673 
3674 	return false;
3675       }
3676 
3677     case ADDR_SPACE_CONVERT_EXPR:
3678       {
3679 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3680 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3681 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3682 	  {
3683 	    error ("invalid types in address space conversion");
3684 	    debug_generic_expr (lhs_type);
3685 	    debug_generic_expr (rhs1_type);
3686 	    return true;
3687 	  }
3688 
3689 	return false;
3690       }
3691 
3692     case FIXED_CONVERT_EXPR:
3693       {
3694 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3695 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3696 	  {
3697 	    error ("invalid types in fixed-point conversion");
3698 	    debug_generic_expr (lhs_type);
3699 	    debug_generic_expr (rhs1_type);
3700 	    return true;
3701 	  }
3702 
3703 	return false;
3704       }
3705 
3706     case FLOAT_EXPR:
3707       {
3708 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3709 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3710 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3711 	  {
3712 	    error ("invalid types in conversion to floating point");
3713 	    debug_generic_expr (lhs_type);
3714 	    debug_generic_expr (rhs1_type);
3715 	    return true;
3716 	  }
3717 
3718         return false;
3719       }
3720 
3721     case FIX_TRUNC_EXPR:
3722       {
3723         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3724             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3725                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3726 	  {
3727 	    error ("invalid types in conversion to integer");
3728 	    debug_generic_expr (lhs_type);
3729 	    debug_generic_expr (rhs1_type);
3730 	    return true;
3731 	  }
3732 
3733         return false;
3734       }
3735     case REDUC_MAX_EXPR:
3736     case REDUC_MIN_EXPR:
3737     case REDUC_PLUS_EXPR:
3738       if (!VECTOR_TYPE_P (rhs1_type)
3739 	  || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3740         {
3741 	  error ("reduction should convert from vector to element type");
3742 	  debug_generic_expr (lhs_type);
3743 	  debug_generic_expr (rhs1_type);
3744 	  return true;
3745 	}
3746       return false;
3747 
3748     case VEC_UNPACK_HI_EXPR:
3749     case VEC_UNPACK_LO_EXPR:
3750     case VEC_UNPACK_FLOAT_HI_EXPR:
3751     case VEC_UNPACK_FLOAT_LO_EXPR:
3752       /* FIXME.  */
3753       return false;
3754 
3755     case NEGATE_EXPR:
3756     case ABS_EXPR:
3757     case BIT_NOT_EXPR:
3758     case PAREN_EXPR:
3759     case CONJ_EXPR:
3760       break;
3761 
3762     default:
3763       gcc_unreachable ();
3764     }
3765 
3766   /* For the remaining codes assert there is no conversion involved.  */
3767   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3768     {
3769       error ("non-trivial conversion in unary operation");
3770       debug_generic_expr (lhs_type);
3771       debug_generic_expr (rhs1_type);
3772       return true;
3773     }
3774 
3775   return false;
3776 }
3777 
3778 /* Verify a gimple assignment statement STMT with a binary rhs.
3779    Returns true if anything is wrong.  */
3780 
3781 static bool
3782 verify_gimple_assign_binary (gassign *stmt)
3783 {
3784   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3785   tree lhs = gimple_assign_lhs (stmt);
3786   tree lhs_type = TREE_TYPE (lhs);
3787   tree rhs1 = gimple_assign_rhs1 (stmt);
3788   tree rhs1_type = TREE_TYPE (rhs1);
3789   tree rhs2 = gimple_assign_rhs2 (stmt);
3790   tree rhs2_type = TREE_TYPE (rhs2);
3791 
3792   if (!is_gimple_reg (lhs))
3793     {
3794       error ("non-register as LHS of binary operation");
3795       return true;
3796     }
3797 
3798   if (!is_gimple_val (rhs1)
3799       || !is_gimple_val (rhs2))
3800     {
3801       error ("invalid operands in binary operation");
3802       return true;
3803     }
3804 
3805   /* First handle operations that involve different types.  */
3806   switch (rhs_code)
3807     {
3808     case COMPLEX_EXPR:
3809       {
3810 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3811 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3812 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3813 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3814 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3815 	  {
3816 	    error ("type mismatch in complex expression");
3817 	    debug_generic_expr (lhs_type);
3818 	    debug_generic_expr (rhs1_type);
3819 	    debug_generic_expr (rhs2_type);
3820 	    return true;
3821 	  }
3822 
3823 	return false;
3824       }
3825 
3826     case LSHIFT_EXPR:
3827     case RSHIFT_EXPR:
3828     case LROTATE_EXPR:
3829     case RROTATE_EXPR:
3830       {
3831 	/* Shifts and rotates are ok on integral types, fixed point
3832 	   types and integer vector types.  */
3833 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3834 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3835 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3836 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3837 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3838 		/* Vector shifts of vectors are also ok.  */
3839 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3840 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3841 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3842 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3843 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3844 	  {
3845 	    error ("type mismatch in shift expression");
3846 	    debug_generic_expr (lhs_type);
3847 	    debug_generic_expr (rhs1_type);
3848 	    debug_generic_expr (rhs2_type);
3849 	    return true;
3850 	  }
3851 
3852 	return false;
3853       }
3854 
3855     case WIDEN_LSHIFT_EXPR:
3856       {
3857         if (!INTEGRAL_TYPE_P (lhs_type)
3858             || !INTEGRAL_TYPE_P (rhs1_type)
3859             || TREE_CODE (rhs2) != INTEGER_CST
3860             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3861           {
3862             error ("type mismatch in widening vector shift expression");
3863             debug_generic_expr (lhs_type);
3864             debug_generic_expr (rhs1_type);
3865             debug_generic_expr (rhs2_type);
3866             return true;
3867           }
3868 
3869         return false;
3870       }
3871 
3872     case VEC_WIDEN_LSHIFT_HI_EXPR:
3873     case VEC_WIDEN_LSHIFT_LO_EXPR:
3874       {
3875         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3876             || TREE_CODE (lhs_type) != VECTOR_TYPE
3877             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3878             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3879             || TREE_CODE (rhs2) != INTEGER_CST
3880             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3881                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3882           {
3883             error ("type mismatch in widening vector shift expression");
3884             debug_generic_expr (lhs_type);
3885             debug_generic_expr (rhs1_type);
3886             debug_generic_expr (rhs2_type);
3887             return true;
3888           }
3889 
3890         return false;
3891       }
3892 
3893     case PLUS_EXPR:
3894     case MINUS_EXPR:
3895       {
3896 	tree lhs_etype = lhs_type;
3897 	tree rhs1_etype = rhs1_type;
3898 	tree rhs2_etype = rhs2_type;
3899 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3900 	  {
3901 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3902 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3903 	      {
3904 		error ("invalid non-vector operands to vector valued plus");
3905 		return true;
3906 	      }
3907 	    lhs_etype = TREE_TYPE (lhs_type);
3908 	    rhs1_etype = TREE_TYPE (rhs1_type);
3909 	    rhs2_etype = TREE_TYPE (rhs2_type);
3910 	  }
3911 	if (POINTER_TYPE_P (lhs_etype)
3912 	    || POINTER_TYPE_P (rhs1_etype)
3913 	    || POINTER_TYPE_P (rhs2_etype))
3914 	  {
3915 	    error ("invalid (pointer) operands to plus/minus");
3916 	    return true;
3917 	  }
3918 
3919 	/* Continue with generic binary expression handling.  */
3920 	break;
3921       }
3922 
3923     case POINTER_PLUS_EXPR:
3924       {
3925 	if (!POINTER_TYPE_P (rhs1_type)
3926 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3927 	    || !ptrofftype_p (rhs2_type))
3928 	  {
3929 	    error ("type mismatch in pointer plus expression");
3930 	    debug_generic_stmt (lhs_type);
3931 	    debug_generic_stmt (rhs1_type);
3932 	    debug_generic_stmt (rhs2_type);
3933 	    return true;
3934 	  }
3935 
3936 	return false;
3937       }
3938 
3939     case TRUTH_ANDIF_EXPR:
3940     case TRUTH_ORIF_EXPR:
3941     case TRUTH_AND_EXPR:
3942     case TRUTH_OR_EXPR:
3943     case TRUTH_XOR_EXPR:
3944 
3945       gcc_unreachable ();
3946 
3947     case LT_EXPR:
3948     case LE_EXPR:
3949     case GT_EXPR:
3950     case GE_EXPR:
3951     case EQ_EXPR:
3952     case NE_EXPR:
3953     case UNORDERED_EXPR:
3954     case ORDERED_EXPR:
3955     case UNLT_EXPR:
3956     case UNLE_EXPR:
3957     case UNGT_EXPR:
3958     case UNGE_EXPR:
3959     case UNEQ_EXPR:
3960     case LTGT_EXPR:
3961       /* Comparisons are also binary, but the result type is not
3962 	 connected to the operand types.  */
3963       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3964 
3965     case WIDEN_MULT_EXPR:
3966       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3967 	return true;
3968       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3969 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3970 
3971     case WIDEN_SUM_EXPR:
3972     case VEC_WIDEN_MULT_HI_EXPR:
3973     case VEC_WIDEN_MULT_LO_EXPR:
3974     case VEC_WIDEN_MULT_EVEN_EXPR:
3975     case VEC_WIDEN_MULT_ODD_EXPR:
3976     case VEC_PACK_TRUNC_EXPR:
3977     case VEC_PACK_SAT_EXPR:
3978     case VEC_PACK_FIX_TRUNC_EXPR:
3979       /* FIXME.  */
3980       return false;
3981 
3982     case MULT_EXPR:
3983     case MULT_HIGHPART_EXPR:
3984     case TRUNC_DIV_EXPR:
3985     case CEIL_DIV_EXPR:
3986     case FLOOR_DIV_EXPR:
3987     case ROUND_DIV_EXPR:
3988     case TRUNC_MOD_EXPR:
3989     case CEIL_MOD_EXPR:
3990     case FLOOR_MOD_EXPR:
3991     case ROUND_MOD_EXPR:
3992     case RDIV_EXPR:
3993     case EXACT_DIV_EXPR:
3994     case MIN_EXPR:
3995     case MAX_EXPR:
3996     case BIT_IOR_EXPR:
3997     case BIT_XOR_EXPR:
3998     case BIT_AND_EXPR:
3999       /* Continue with generic binary expression handling.  */
4000       break;
4001 
4002     default:
4003       gcc_unreachable ();
4004     }
4005 
4006   if (!useless_type_conversion_p (lhs_type, rhs1_type)
4007       || !useless_type_conversion_p (lhs_type, rhs2_type))
4008     {
4009       error ("type mismatch in binary expression");
4010       debug_generic_stmt (lhs_type);
4011       debug_generic_stmt (rhs1_type);
4012       debug_generic_stmt (rhs2_type);
4013       return true;
4014     }
4015 
4016   return false;
4017 }
4018 
4019 /* Verify a gimple assignment statement STMT with a ternary rhs.
4020    Returns true if anything is wrong.  */
4021 
4022 static bool
4023 verify_gimple_assign_ternary (gassign *stmt)
4024 {
4025   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4026   tree lhs = gimple_assign_lhs (stmt);
4027   tree lhs_type = TREE_TYPE (lhs);
4028   tree rhs1 = gimple_assign_rhs1 (stmt);
4029   tree rhs1_type = TREE_TYPE (rhs1);
4030   tree rhs2 = gimple_assign_rhs2 (stmt);
4031   tree rhs2_type = TREE_TYPE (rhs2);
4032   tree rhs3 = gimple_assign_rhs3 (stmt);
4033   tree rhs3_type = TREE_TYPE (rhs3);
4034 
4035   if (!is_gimple_reg (lhs))
4036     {
4037       error ("non-register as LHS of ternary operation");
4038       return true;
4039     }
4040 
4041   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4042        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4043       || !is_gimple_val (rhs2)
4044       || !is_gimple_val (rhs3))
4045     {
4046       error ("invalid operands in ternary operation");
4047       return true;
4048     }
4049 
4050   /* First handle operations that involve different types.  */
4051   switch (rhs_code)
4052     {
4053     case WIDEN_MULT_PLUS_EXPR:
4054     case WIDEN_MULT_MINUS_EXPR:
4055       if ((!INTEGRAL_TYPE_P (rhs1_type)
4056 	   && !FIXED_POINT_TYPE_P (rhs1_type))
4057 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
4058 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4059 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4060 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4061 	{
4062 	  error ("type mismatch in widening multiply-accumulate expression");
4063 	  debug_generic_expr (lhs_type);
4064 	  debug_generic_expr (rhs1_type);
4065 	  debug_generic_expr (rhs2_type);
4066 	  debug_generic_expr (rhs3_type);
4067 	  return true;
4068 	}
4069       break;
4070 
4071     case FMA_EXPR:
4072       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4073 	  || !useless_type_conversion_p (lhs_type, rhs2_type)
4074 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4075 	{
4076 	  error ("type mismatch in fused multiply-add expression");
4077 	  debug_generic_expr (lhs_type);
4078 	  debug_generic_expr (rhs1_type);
4079 	  debug_generic_expr (rhs2_type);
4080 	  debug_generic_expr (rhs3_type);
4081 	  return true;
4082 	}
4083       break;
4084 
4085     case VEC_COND_EXPR:
4086       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4087 	  || TYPE_VECTOR_SUBPARTS (rhs1_type)
4088 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
4089 	{
4090 	  error ("the first argument of a VEC_COND_EXPR must be of a "
4091 		 "boolean vector type of the same number of elements "
4092 		 "as the result");
4093 	  debug_generic_expr (lhs_type);
4094 	  debug_generic_expr (rhs1_type);
4095 	  return true;
4096 	}
4097       /* Fallthrough.  */
4098     case COND_EXPR:
4099       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4100 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4101 	{
4102 	  error ("type mismatch in conditional expression");
4103 	  debug_generic_expr (lhs_type);
4104 	  debug_generic_expr (rhs2_type);
4105 	  debug_generic_expr (rhs3_type);
4106 	  return true;
4107 	}
4108       break;
4109 
4110     case VEC_PERM_EXPR:
4111       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4112 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4113 	{
4114 	  error ("type mismatch in vector permute expression");
4115 	  debug_generic_expr (lhs_type);
4116 	  debug_generic_expr (rhs1_type);
4117 	  debug_generic_expr (rhs2_type);
4118 	  debug_generic_expr (rhs3_type);
4119 	  return true;
4120 	}
4121 
4122       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4124 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4125 	{
4126 	  error ("vector types expected in vector permute expression");
4127 	  debug_generic_expr (lhs_type);
4128 	  debug_generic_expr (rhs1_type);
4129 	  debug_generic_expr (rhs2_type);
4130 	  debug_generic_expr (rhs3_type);
4131 	  return true;
4132 	}
4133 
4134       if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4135 	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
4136 	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
4137 	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
4138 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
4139 	{
4140 	  error ("vectors with different element number found "
4141 		 "in vector permute expression");
4142 	  debug_generic_expr (lhs_type);
4143 	  debug_generic_expr (rhs1_type);
4144 	  debug_generic_expr (rhs2_type);
4145 	  debug_generic_expr (rhs3_type);
4146 	  return true;
4147 	}
4148 
4149       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4150 	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4151 	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4152 	{
4153 	  error ("invalid mask type in vector permute expression");
4154 	  debug_generic_expr (lhs_type);
4155 	  debug_generic_expr (rhs1_type);
4156 	  debug_generic_expr (rhs2_type);
4157 	  debug_generic_expr (rhs3_type);
4158 	  return true;
4159 	}
4160 
4161       return false;
4162 
4163     case SAD_EXPR:
4164       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4165 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4166 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4167 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4168 	{
4169 	  error ("type mismatch in sad expression");
4170 	  debug_generic_expr (lhs_type);
4171 	  debug_generic_expr (rhs1_type);
4172 	  debug_generic_expr (rhs2_type);
4173 	  debug_generic_expr (rhs3_type);
4174 	  return true;
4175 	}
4176 
4177       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4178 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4179 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4180 	{
4181 	  error ("vector types expected in sad expression");
4182 	  debug_generic_expr (lhs_type);
4183 	  debug_generic_expr (rhs1_type);
4184 	  debug_generic_expr (rhs2_type);
4185 	  debug_generic_expr (rhs3_type);
4186 	  return true;
4187 	}
4188 
4189       return false;
4190 
4191     case BIT_INSERT_EXPR:
4192       if (! useless_type_conversion_p (lhs_type, rhs1_type))
4193 	{
4194 	  error ("type mismatch in BIT_INSERT_EXPR");
4195 	  debug_generic_expr (lhs_type);
4196 	  debug_generic_expr (rhs1_type);
4197 	  return true;
4198 	}
4199       if (! ((INTEGRAL_TYPE_P (rhs1_type)
4200 	      && INTEGRAL_TYPE_P (rhs2_type))
4201 	     || (VECTOR_TYPE_P (rhs1_type)
4202 		 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4203 	{
4204 	  error ("not allowed type combination in BIT_INSERT_EXPR");
4205 	  debug_generic_expr (rhs1_type);
4206 	  debug_generic_expr (rhs2_type);
4207 	  return true;
4208 	}
4209       if (! tree_fits_uhwi_p (rhs3)
4210 	  || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4211 	{
4212 	  error ("invalid position or size in BIT_INSERT_EXPR");
4213 	  return true;
4214 	}
4215       if (INTEGRAL_TYPE_P (rhs1_type))
4216 	{
4217 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4218 	  if (bitpos >= TYPE_PRECISION (rhs1_type)
4219 	      || (bitpos + TYPE_PRECISION (rhs2_type)
4220 		  > TYPE_PRECISION (rhs1_type)))
4221 	    {
4222 	      error ("insertion out of range in BIT_INSERT_EXPR");
4223 	      return true;
4224 	    }
4225 	}
4226       else if (VECTOR_TYPE_P (rhs1_type))
4227 	{
4228 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4229 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4230 	  if (bitpos % bitsize != 0)
4231 	    {
4232 	      error ("vector insertion not at element boundary");
4233 	      return true;
4234 	    }
4235 	}
4236       return false;
4237 
4238     case DOT_PROD_EXPR:
4239     case REALIGN_LOAD_EXPR:
4240       /* FIXME.  */
4241       return false;
4242 
4243     default:
4244       gcc_unreachable ();
4245     }
4246   return false;
4247 }
4248 
4249 /* Verify a gimple assignment statement STMT with a single rhs.
4250    Returns true if anything is wrong.  */
4251 
4252 static bool
4253 verify_gimple_assign_single (gassign *stmt)
4254 {
4255   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4256   tree lhs = gimple_assign_lhs (stmt);
4257   tree lhs_type = TREE_TYPE (lhs);
4258   tree rhs1 = gimple_assign_rhs1 (stmt);
4259   tree rhs1_type = TREE_TYPE (rhs1);
4260   bool res = false;
4261 
4262   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4263     {
4264       error ("non-trivial conversion at assignment");
4265       debug_generic_expr (lhs_type);
4266       debug_generic_expr (rhs1_type);
4267       return true;
4268     }
4269 
4270   if (gimple_clobber_p (stmt)
4271       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4272     {
4273       error ("non-decl/MEM_REF LHS in clobber statement");
4274       debug_generic_expr (lhs);
4275       return true;
4276     }
4277 
4278   if (handled_component_p (lhs)
4279       || TREE_CODE (lhs) == MEM_REF
4280       || TREE_CODE (lhs) == TARGET_MEM_REF)
4281     res |= verify_types_in_gimple_reference (lhs, true);
4282 
4283   /* Special codes we cannot handle via their class.  */
4284   switch (rhs_code)
4285     {
4286     case ADDR_EXPR:
4287       {
4288 	tree op = TREE_OPERAND (rhs1, 0);
4289 	if (!is_gimple_addressable (op))
4290 	  {
4291 	    error ("invalid operand in unary expression");
4292 	    return true;
4293 	  }
4294 
4295 	/* Technically there is no longer a need for matching types, but
4296 	   gimple hygiene asks for this check.  In LTO we can end up
4297 	   combining incompatible units and thus end up with addresses
4298 	   of globals that change their type to a common one.  */
4299 	if (!in_lto_p
4300 	    && !types_compatible_p (TREE_TYPE (op),
4301 				    TREE_TYPE (TREE_TYPE (rhs1)))
4302 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4303 							  TREE_TYPE (op)))
4304 	  {
4305 	    error ("type mismatch in address expression");
4306 	    debug_generic_stmt (TREE_TYPE (rhs1));
4307 	    debug_generic_stmt (TREE_TYPE (op));
4308 	    return true;
4309 	  }
4310 
4311 	return verify_types_in_gimple_reference (op, true);
4312       }
4313 
4314     /* tcc_reference  */
4315     case INDIRECT_REF:
4316       error ("INDIRECT_REF in gimple IL");
4317       return true;
4318 
4319     case COMPONENT_REF:
4320     case BIT_FIELD_REF:
4321     case ARRAY_REF:
4322     case ARRAY_RANGE_REF:
4323     case VIEW_CONVERT_EXPR:
4324     case REALPART_EXPR:
4325     case IMAGPART_EXPR:
4326     case TARGET_MEM_REF:
4327     case MEM_REF:
4328       if (!is_gimple_reg (lhs)
4329 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4330 	{
4331 	  error ("invalid rhs for gimple memory store");
4332 	  debug_generic_stmt (lhs);
4333 	  debug_generic_stmt (rhs1);
4334 	  return true;
4335 	}
4336       return res || verify_types_in_gimple_reference (rhs1, false);
4337 
4338     /* tcc_constant  */
4339     case SSA_NAME:
4340     case INTEGER_CST:
4341     case REAL_CST:
4342     case FIXED_CST:
4343     case COMPLEX_CST:
4344     case VECTOR_CST:
4345     case STRING_CST:
4346       return res;
4347 
4348     /* tcc_declaration  */
4349     case CONST_DECL:
4350       return res;
4351     case VAR_DECL:
4352     case PARM_DECL:
4353       if (!is_gimple_reg (lhs)
4354 	  && !is_gimple_reg (rhs1)
4355 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4356 	{
4357 	  error ("invalid rhs for gimple memory store");
4358 	  debug_generic_stmt (lhs);
4359 	  debug_generic_stmt (rhs1);
4360 	  return true;
4361 	}
4362       return res;
4363 
4364     case CONSTRUCTOR:
4365       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4366 	{
4367 	  unsigned int i;
4368 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4369 
4370 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4371 	    return res;
4372 	  /* For vector CONSTRUCTORs we require that either it is empty
4373 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4374 	     (then the element count must be correct to cover the whole
4375 	     outer vector and index must be NULL on all elements, or it is
4376 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4377 	     smaller number of elements (assuming zero filling) and
4378 	     consecutive indexes as compared to NULL indexes (such
4379 	     CONSTRUCTORs can appear in the IL from FEs).  */
4380 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4381 	    {
4382 	      if (elt_t == NULL_TREE)
4383 		{
4384 		  elt_t = TREE_TYPE (elt_v);
4385 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4386 		    {
4387 		      tree elt_t = TREE_TYPE (elt_v);
4388 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4389 						      TREE_TYPE (elt_t)))
4390 			{
4391 			  error ("incorrect type of vector CONSTRUCTOR"
4392 				 " elements");
4393 			  debug_generic_stmt (rhs1);
4394 			  return true;
4395 			}
4396 		      else if (CONSTRUCTOR_NELTS (rhs1)
4397 			       * TYPE_VECTOR_SUBPARTS (elt_t)
4398 			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
4399 			{
4400 			  error ("incorrect number of vector CONSTRUCTOR"
4401 				 " elements");
4402 			  debug_generic_stmt (rhs1);
4403 			  return true;
4404 			}
4405 		    }
4406 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4407 						       elt_t))
4408 		    {
4409 		      error ("incorrect type of vector CONSTRUCTOR elements");
4410 		      debug_generic_stmt (rhs1);
4411 		      return true;
4412 		    }
4413 		  else if (CONSTRUCTOR_NELTS (rhs1)
4414 			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
4415 		    {
4416 		      error ("incorrect number of vector CONSTRUCTOR elements");
4417 		      debug_generic_stmt (rhs1);
4418 		      return true;
4419 		    }
4420 		}
4421 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4422 		{
4423 		  error ("incorrect type of vector CONSTRUCTOR elements");
4424 		  debug_generic_stmt (rhs1);
4425 		  return true;
4426 		}
4427 	      if (elt_i != NULL_TREE
4428 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4429 		      || TREE_CODE (elt_i) != INTEGER_CST
4430 		      || compare_tree_int (elt_i, i) != 0))
4431 		{
4432 		  error ("vector CONSTRUCTOR with non-NULL element index");
4433 		  debug_generic_stmt (rhs1);
4434 		  return true;
4435 		}
4436 	      if (!is_gimple_val (elt_v))
4437 		{
4438 		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4439 		  debug_generic_stmt (rhs1);
4440 		  return true;
4441 		}
4442 	    }
4443 	}
4444       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4445 	{
4446 	  error ("non-vector CONSTRUCTOR with elements");
4447 	  debug_generic_stmt (rhs1);
4448 	  return true;
4449 	}
4450       return res;
4451     case OBJ_TYPE_REF:
4452     case ASSERT_EXPR:
4453     case WITH_SIZE_EXPR:
4454       /* FIXME.  */
4455       return res;
4456 
4457     default:;
4458     }
4459 
4460   return res;
4461 }
4462 
4463 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4464    is a problem, otherwise false.  */
4465 
4466 static bool
4467 verify_gimple_assign (gassign *stmt)
4468 {
4469   switch (gimple_assign_rhs_class (stmt))
4470     {
4471     case GIMPLE_SINGLE_RHS:
4472       return verify_gimple_assign_single (stmt);
4473 
4474     case GIMPLE_UNARY_RHS:
4475       return verify_gimple_assign_unary (stmt);
4476 
4477     case GIMPLE_BINARY_RHS:
4478       return verify_gimple_assign_binary (stmt);
4479 
4480     case GIMPLE_TERNARY_RHS:
4481       return verify_gimple_assign_ternary (stmt);
4482 
4483     default:
4484       gcc_unreachable ();
4485     }
4486 }
4487 
4488 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4489    is a problem, otherwise false.  */
4490 
4491 static bool
4492 verify_gimple_return (greturn *stmt)
4493 {
4494   tree op = gimple_return_retval (stmt);
4495   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4496 
4497   /* We cannot test for present return values as we do not fix up missing
4498      return values from the original source.  */
4499   if (op == NULL)
4500     return false;
4501 
4502   if (!is_gimple_val (op)
4503       && TREE_CODE (op) != RESULT_DECL)
4504     {
4505       error ("invalid operand in return statement");
4506       debug_generic_stmt (op);
4507       return true;
4508     }
4509 
4510   if ((TREE_CODE (op) == RESULT_DECL
4511        && DECL_BY_REFERENCE (op))
4512       || (TREE_CODE (op) == SSA_NAME
4513 	  && SSA_NAME_VAR (op)
4514 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4515 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4516     op = TREE_TYPE (op);
4517 
4518   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4519     {
4520       error ("invalid conversion in return statement");
4521       debug_generic_stmt (restype);
4522       debug_generic_stmt (TREE_TYPE (op));
4523       return true;
4524     }
4525 
4526   return false;
4527 }
4528 
4529 
4530 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4531    is a problem, otherwise false.  */
4532 
4533 static bool
4534 verify_gimple_goto (ggoto *stmt)
4535 {
4536   tree dest = gimple_goto_dest (stmt);
4537 
4538   /* ???  We have two canonical forms of direct goto destinations, a
4539      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4540   if (TREE_CODE (dest) != LABEL_DECL
4541       && (!is_gimple_val (dest)
4542 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4543     {
4544       error ("goto destination is neither a label nor a pointer");
4545       return true;
4546     }
4547 
4548   return false;
4549 }
4550 
4551 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4552    is a problem, otherwise false.  */
4553 
4554 static bool
4555 verify_gimple_switch (gswitch *stmt)
4556 {
4557   unsigned int i, n;
4558   tree elt, prev_upper_bound = NULL_TREE;
4559   tree index_type, elt_type = NULL_TREE;
4560 
4561   if (!is_gimple_val (gimple_switch_index (stmt)))
4562     {
4563       error ("invalid operand to switch statement");
4564       debug_generic_stmt (gimple_switch_index (stmt));
4565       return true;
4566     }
4567 
4568   index_type = TREE_TYPE (gimple_switch_index (stmt));
4569   if (! INTEGRAL_TYPE_P (index_type))
4570     {
4571       error ("non-integral type switch statement");
4572       debug_generic_expr (index_type);
4573       return true;
4574     }
4575 
4576   elt = gimple_switch_label (stmt, 0);
4577   if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4578     {
4579       error ("invalid default case label in switch statement");
4580       debug_generic_expr (elt);
4581       return true;
4582     }
4583 
4584   n = gimple_switch_num_labels (stmt);
4585   for (i = 1; i < n; i++)
4586     {
4587       elt = gimple_switch_label (stmt, i);
4588 
4589       if (! CASE_LOW (elt))
4590 	{
4591 	  error ("invalid case label in switch statement");
4592 	  debug_generic_expr (elt);
4593 	  return true;
4594 	}
4595       if (CASE_HIGH (elt)
4596 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4597 	{
4598 	  error ("invalid case range in switch statement");
4599 	  debug_generic_expr (elt);
4600 	  return true;
4601 	}
4602 
4603       if (elt_type)
4604 	{
4605 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4606 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4607 	    {
4608 	      error ("type mismatch for case label in switch statement");
4609 	      debug_generic_expr (elt);
4610 	      return true;
4611 	    }
4612 	}
4613       else
4614 	{
4615 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4616 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4617 	    {
4618 	      error ("type precision mismatch in switch statement");
4619 	      return true;
4620 	    }
4621 	}
4622 
4623       if (prev_upper_bound)
4624 	{
4625 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4626 	    {
4627 	      error ("case labels not sorted in switch statement");
4628 	      return true;
4629 	    }
4630 	}
4631 
4632       prev_upper_bound = CASE_HIGH (elt);
4633       if (! prev_upper_bound)
4634 	prev_upper_bound = CASE_LOW (elt);
4635     }
4636 
4637   return false;
4638 }
4639 
4640 /* Verify a gimple debug statement STMT.
4641    Returns true if anything is wrong.  */
4642 
4643 static bool
4644 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4645 {
4646   /* There isn't much that could be wrong in a gimple debug stmt.  A
4647      gimple debug bind stmt, for example, maps a tree, that's usually
4648      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4649      component or member of an aggregate type, to another tree, that
4650      can be an arbitrary expression.  These stmts expand into debug
4651      insns, and are converted to debug notes by var-tracking.c.  */
4652   return false;
4653 }
4654 
4655 /* Verify a gimple label statement STMT.
4656    Returns true if anything is wrong.  */
4657 
4658 static bool
4659 verify_gimple_label (glabel *stmt)
4660 {
4661   tree decl = gimple_label_label (stmt);
4662   int uid;
4663   bool err = false;
4664 
4665   if (TREE_CODE (decl) != LABEL_DECL)
4666     return true;
4667   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4668       && DECL_CONTEXT (decl) != current_function_decl)
4669     {
4670       error ("label's context is not the current function decl");
4671       err |= true;
4672     }
4673 
4674   uid = LABEL_DECL_UID (decl);
4675   if (cfun->cfg
4676       && (uid == -1
4677 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4678     {
4679       error ("incorrect entry in label_to_block_map");
4680       err |= true;
4681     }
4682 
4683   uid = EH_LANDING_PAD_NR (decl);
4684   if (uid)
4685     {
4686       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4687       if (decl != lp->post_landing_pad)
4688 	{
4689 	  error ("incorrect setting of landing pad number");
4690 	  err |= true;
4691 	}
4692     }
4693 
4694   return err;
4695 }
4696 
4697 /* Verify a gimple cond statement STMT.
4698    Returns true if anything is wrong.  */
4699 
4700 static bool
4701 verify_gimple_cond (gcond *stmt)
4702 {
4703   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4704     {
4705       error ("invalid comparison code in gimple cond");
4706       return true;
4707     }
4708   if (!(!gimple_cond_true_label (stmt)
4709 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4710       || !(!gimple_cond_false_label (stmt)
4711 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4712     {
4713       error ("invalid labels in gimple cond");
4714       return true;
4715     }
4716 
4717   return verify_gimple_comparison (boolean_type_node,
4718 				   gimple_cond_lhs (stmt),
4719 				   gimple_cond_rhs (stmt),
4720 				   gimple_cond_code (stmt));
4721 }
4722 
4723 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4724    error, otherwise false.  */
4725 
4726 static bool
4727 verify_gimple_stmt (gimple *stmt)
4728 {
4729   switch (gimple_code (stmt))
4730     {
4731     case GIMPLE_ASSIGN:
4732       return verify_gimple_assign (as_a <gassign *> (stmt));
4733 
4734     case GIMPLE_LABEL:
4735       return verify_gimple_label (as_a <glabel *> (stmt));
4736 
4737     case GIMPLE_CALL:
4738       return verify_gimple_call (as_a <gcall *> (stmt));
4739 
4740     case GIMPLE_COND:
4741       return verify_gimple_cond (as_a <gcond *> (stmt));
4742 
4743     case GIMPLE_GOTO:
4744       return verify_gimple_goto (as_a <ggoto *> (stmt));
4745 
4746     case GIMPLE_SWITCH:
4747       return verify_gimple_switch (as_a <gswitch *> (stmt));
4748 
4749     case GIMPLE_RETURN:
4750       return verify_gimple_return (as_a <greturn *> (stmt));
4751 
4752     case GIMPLE_ASM:
4753       return false;
4754 
4755     case GIMPLE_TRANSACTION:
4756       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4757 
4758     /* Tuples that do not have tree operands.  */
4759     case GIMPLE_NOP:
4760     case GIMPLE_PREDICT:
4761     case GIMPLE_RESX:
4762     case GIMPLE_EH_DISPATCH:
4763     case GIMPLE_EH_MUST_NOT_THROW:
4764       return false;
4765 
4766     CASE_GIMPLE_OMP:
4767       /* OpenMP directives are validated by the FE and never operated
4768 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4769 	 non-gimple expressions when the main index variable has had
4770 	 its address taken.  This does not affect the loop itself
4771 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4772 	 how to setup the parallel iteration.  */
4773       return false;
4774 
4775     case GIMPLE_DEBUG:
4776       return verify_gimple_debug (stmt);
4777 
4778     default:
4779       gcc_unreachable ();
4780     }
4781 }
4782 
4783 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4784    and false otherwise.  */
4785 
4786 static bool
4787 verify_gimple_phi (gimple *phi)
4788 {
4789   bool err = false;
4790   unsigned i;
4791   tree phi_result = gimple_phi_result (phi);
4792   bool virtual_p;
4793 
4794   if (!phi_result)
4795     {
4796       error ("invalid PHI result");
4797       return true;
4798     }
4799 
4800   virtual_p = virtual_operand_p (phi_result);
4801   if (TREE_CODE (phi_result) != SSA_NAME
4802       || (virtual_p
4803 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4804     {
4805       error ("invalid PHI result");
4806       err = true;
4807     }
4808 
4809   for (i = 0; i < gimple_phi_num_args (phi); i++)
4810     {
4811       tree t = gimple_phi_arg_def (phi, i);
4812 
4813       if (!t)
4814 	{
4815 	  error ("missing PHI def");
4816 	  err |= true;
4817 	  continue;
4818 	}
4819       /* Addressable variables do have SSA_NAMEs but they
4820 	 are not considered gimple values.  */
4821       else if ((TREE_CODE (t) == SSA_NAME
4822 		&& virtual_p != virtual_operand_p (t))
4823 	       || (virtual_p
4824 		   && (TREE_CODE (t) != SSA_NAME
4825 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4826 	       || (!virtual_p
4827 		   && !is_gimple_val (t)))
4828 	{
4829 	  error ("invalid PHI argument");
4830 	  debug_generic_expr (t);
4831 	  err |= true;
4832 	}
4833 #ifdef ENABLE_TYPES_CHECKING
4834       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4835 	{
4836 	  error ("incompatible types in PHI argument %u", i);
4837 	  debug_generic_stmt (TREE_TYPE (phi_result));
4838 	  debug_generic_stmt (TREE_TYPE (t));
4839 	  err |= true;
4840 	}
4841 #endif
4842     }
4843 
4844   return err;
4845 }
4846 
4847 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4848 
4849 static bool
4850 verify_gimple_in_seq_2 (gimple_seq stmts)
4851 {
4852   gimple_stmt_iterator ittr;
4853   bool err = false;
4854 
4855   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4856     {
4857       gimple *stmt = gsi_stmt (ittr);
4858 
4859       switch (gimple_code (stmt))
4860         {
4861 	case GIMPLE_BIND:
4862 	  err |= verify_gimple_in_seq_2 (
4863                    gimple_bind_body (as_a <gbind *> (stmt)));
4864 	  break;
4865 
4866 	case GIMPLE_TRY:
4867 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4868 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4869 	  break;
4870 
4871 	case GIMPLE_EH_FILTER:
4872 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4873 	  break;
4874 
4875 	case GIMPLE_EH_ELSE:
4876 	  {
4877 	    geh_else *eh_else = as_a <geh_else *> (stmt);
4878 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4879 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4880 	  }
4881 	  break;
4882 
4883 	case GIMPLE_CATCH:
4884 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4885 					   as_a <gcatch *> (stmt)));
4886 	  break;
4887 
4888 	case GIMPLE_TRANSACTION:
4889 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4890 	  break;
4891 
4892 	default:
4893 	  {
4894 	    bool err2 = verify_gimple_stmt (stmt);
4895 	    if (err2)
4896 	      debug_gimple_stmt (stmt);
4897 	    err |= err2;
4898 	  }
4899 	}
4900     }
4901 
4902   return err;
4903 }
4904 
4905 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4906    is a problem, otherwise false.  */
4907 
4908 static bool
4909 verify_gimple_transaction (gtransaction *stmt)
4910 {
4911   tree lab;
4912 
4913   lab = gimple_transaction_label_norm (stmt);
4914   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4915     return true;
4916   lab = gimple_transaction_label_uninst (stmt);
4917   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4918     return true;
4919   lab = gimple_transaction_label_over (stmt);
4920   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4921     return true;
4922 
4923   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4924 }
4925 
4926 
4927 /* Verify the GIMPLE statements inside the statement list STMTS.  */
4928 
4929 DEBUG_FUNCTION void
4930 verify_gimple_in_seq (gimple_seq stmts)
4931 {
4932   timevar_push (TV_TREE_STMT_VERIFY);
4933   if (verify_gimple_in_seq_2 (stmts))
4934     internal_error ("verify_gimple failed");
4935   timevar_pop (TV_TREE_STMT_VERIFY);
4936 }
4937 
4938 /* Return true when the T can be shared.  */
4939 
4940 static bool
4941 tree_node_can_be_shared (tree t)
4942 {
4943   if (IS_TYPE_OR_DECL_P (t)
4944       || is_gimple_min_invariant (t)
4945       || TREE_CODE (t) == SSA_NAME
4946       || t == error_mark_node
4947       || TREE_CODE (t) == IDENTIFIER_NODE)
4948     return true;
4949 
4950   if (TREE_CODE (t) == CASE_LABEL_EXPR)
4951     return true;
4952 
4953   if (DECL_P (t))
4954     return true;
4955 
4956   return false;
4957 }
4958 
4959 /* Called via walk_tree.  Verify tree sharing.  */
4960 
4961 static tree
4962 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4963 {
4964   hash_set<void *> *visited = (hash_set<void *> *) data;
4965 
4966   if (tree_node_can_be_shared (*tp))
4967     {
4968       *walk_subtrees = false;
4969       return NULL;
4970     }
4971 
4972   if (visited->add (*tp))
4973     return *tp;
4974 
4975   return NULL;
4976 }
4977 
4978 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4979 
4980 static tree
4981 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4982 {
4983   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4984   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4985 }
4986 
4987 static bool eh_error_found;
4988 bool
4989 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4990 			   hash_set<gimple *> *visited)
4991 {
4992   if (!visited->contains (stmt))
4993     {
4994       error ("dead STMT in EH table");
4995       debug_gimple_stmt (stmt);
4996       eh_error_found = true;
4997     }
4998   return true;
4999 }
5000 
5001 /* Verify if the location LOCs block is in BLOCKS.  */
5002 
5003 static bool
5004 verify_location (hash_set<tree> *blocks, location_t loc)
5005 {
5006   tree block = LOCATION_BLOCK (loc);
5007   if (block != NULL_TREE
5008       && !blocks->contains (block))
5009     {
5010       error ("location references block not in block tree");
5011       return true;
5012     }
5013   if (block != NULL_TREE)
5014     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5015   return false;
5016 }
5017 
5018 /* Called via walk_tree.  Verify that expressions have no blocks.  */
5019 
5020 static tree
5021 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5022 {
5023   if (!EXPR_P (*tp))
5024     {
5025       *walk_subtrees = false;
5026       return NULL;
5027     }
5028 
5029   location_t loc = EXPR_LOCATION (*tp);
5030   if (LOCATION_BLOCK (loc) != NULL)
5031     return *tp;
5032 
5033   return NULL;
5034 }
5035 
5036 /* Called via walk_tree.  Verify locations of expressions.  */
5037 
5038 static tree
5039 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5040 {
5041   hash_set<tree> *blocks = (hash_set<tree> *) data;
5042 
5043   if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5044     {
5045       tree t = DECL_DEBUG_EXPR (*tp);
5046       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5047       if (addr)
5048 	return addr;
5049     }
5050   if ((VAR_P (*tp)
5051        || TREE_CODE (*tp) == PARM_DECL
5052        || TREE_CODE (*tp) == RESULT_DECL)
5053       && DECL_HAS_VALUE_EXPR_P (*tp))
5054     {
5055       tree t = DECL_VALUE_EXPR (*tp);
5056       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5057       if (addr)
5058 	return addr;
5059     }
5060 
5061   if (!EXPR_P (*tp))
5062     {
5063       *walk_subtrees = false;
5064       return NULL;
5065     }
5066 
5067   location_t loc = EXPR_LOCATION (*tp);
5068   if (verify_location (blocks, loc))
5069     return *tp;
5070 
5071   return NULL;
5072 }
5073 
5074 /* Called via walk_gimple_op.  Verify locations of expressions.  */
5075 
5076 static tree
5077 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5078 {
5079   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5080   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5081 }
5082 
5083 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
5084 
5085 static void
5086 collect_subblocks (hash_set<tree> *blocks, tree block)
5087 {
5088   tree t;
5089   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5090     {
5091       blocks->add (t);
5092       collect_subblocks (blocks, t);
5093     }
5094 }
5095 
5096 /* Verify the GIMPLE statements in the CFG of FN.  */
5097 
5098 DEBUG_FUNCTION void
5099 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5100 {
5101   basic_block bb;
5102   bool err = false;
5103 
5104   timevar_push (TV_TREE_STMT_VERIFY);
5105   hash_set<void *> visited;
5106   hash_set<gimple *> visited_stmts;
5107 
5108   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
5109   hash_set<tree> blocks;
5110   if (DECL_INITIAL (fn->decl))
5111     {
5112       blocks.add (DECL_INITIAL (fn->decl));
5113       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5114     }
5115 
5116   FOR_EACH_BB_FN (bb, fn)
5117     {
5118       gimple_stmt_iterator gsi;
5119 
5120       for (gphi_iterator gpi = gsi_start_phis (bb);
5121 	   !gsi_end_p (gpi);
5122 	   gsi_next (&gpi))
5123 	{
5124 	  gphi *phi = gpi.phi ();
5125 	  bool err2 = false;
5126 	  unsigned i;
5127 
5128 	  visited_stmts.add (phi);
5129 
5130 	  if (gimple_bb (phi) != bb)
5131 	    {
5132 	      error ("gimple_bb (phi) is set to a wrong basic block");
5133 	      err2 = true;
5134 	    }
5135 
5136 	  err2 |= verify_gimple_phi (phi);
5137 
5138 	  /* Only PHI arguments have locations.  */
5139 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
5140 	    {
5141 	      error ("PHI node with location");
5142 	      err2 = true;
5143 	    }
5144 
5145 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5146 	    {
5147 	      tree arg = gimple_phi_arg_def (phi, i);
5148 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5149 				     &visited, NULL);
5150 	      if (addr)
5151 		{
5152 		  error ("incorrect sharing of tree nodes");
5153 		  debug_generic_expr (addr);
5154 		  err2 |= true;
5155 		}
5156 	      location_t loc = gimple_phi_arg_location (phi, i);
5157 	      if (virtual_operand_p (gimple_phi_result (phi))
5158 		  && loc != UNKNOWN_LOCATION)
5159 		{
5160 		  error ("virtual PHI with argument locations");
5161 		  err2 = true;
5162 		}
5163 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5164 	      if (addr)
5165 		{
5166 		  debug_generic_expr (addr);
5167 		  err2 = true;
5168 		}
5169 	      err2 |= verify_location (&blocks, loc);
5170 	    }
5171 
5172 	  if (err2)
5173 	    debug_gimple_stmt (phi);
5174 	  err |= err2;
5175 	}
5176 
5177       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5178 	{
5179 	  gimple *stmt = gsi_stmt (gsi);
5180 	  bool err2 = false;
5181 	  struct walk_stmt_info wi;
5182 	  tree addr;
5183 	  int lp_nr;
5184 
5185 	  visited_stmts.add (stmt);
5186 
5187 	  if (gimple_bb (stmt) != bb)
5188 	    {
5189 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5190 	      err2 = true;
5191 	    }
5192 
5193 	  err2 |= verify_gimple_stmt (stmt);
5194 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5195 
5196 	  memset (&wi, 0, sizeof (wi));
5197 	  wi.info = (void *) &visited;
5198 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5199 	  if (addr)
5200 	    {
5201 	      error ("incorrect sharing of tree nodes");
5202 	      debug_generic_expr (addr);
5203 	      err2 |= true;
5204 	    }
5205 
5206 	  memset (&wi, 0, sizeof (wi));
5207 	  wi.info = (void *) &blocks;
5208 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5209 	  if (addr)
5210 	    {
5211 	      debug_generic_expr (addr);
5212 	      err2 |= true;
5213 	    }
5214 
5215 	  /* ???  Instead of not checking these stmts at all the walker
5216 	     should know its context via wi.  */
5217 	  if (!is_gimple_debug (stmt)
5218 	      && !is_gimple_omp (stmt))
5219 	    {
5220 	      memset (&wi, 0, sizeof (wi));
5221 	      addr = walk_gimple_op (stmt, verify_expr, &wi);
5222 	      if (addr)
5223 		{
5224 		  debug_generic_expr (addr);
5225 		  inform (gimple_location (stmt), "in statement");
5226 		  err2 |= true;
5227 		}
5228 	    }
5229 
5230 	  /* If the statement is marked as part of an EH region, then it is
5231 	     expected that the statement could throw.  Verify that when we
5232 	     have optimizations that simplify statements such that we prove
5233 	     that they cannot throw, that we update other data structures
5234 	     to match.  */
5235 	  lp_nr = lookup_stmt_eh_lp (stmt);
5236 	  if (lp_nr > 0)
5237 	    {
5238 	      if (!stmt_could_throw_p (stmt))
5239 		{
5240 		  if (verify_nothrow)
5241 		    {
5242 		      error ("statement marked for throw, but doesn%'t");
5243 		      err2 |= true;
5244 		    }
5245 		}
5246 	      else if (!gsi_one_before_end_p (gsi))
5247 		{
5248 		  error ("statement marked for throw in middle of block");
5249 		  err2 |= true;
5250 		}
5251 	    }
5252 
5253 	  if (err2)
5254 	    debug_gimple_stmt (stmt);
5255 	  err |= err2;
5256 	}
5257     }
5258 
5259   eh_error_found = false;
5260   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5261   if (eh_table)
5262     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5263       (&visited_stmts);
5264 
5265   if (err || eh_error_found)
5266     internal_error ("verify_gimple failed");
5267 
5268   verify_histograms ();
5269   timevar_pop (TV_TREE_STMT_VERIFY);
5270 }
5271 
5272 
5273 /* Verifies that the flow information is OK.  */
5274 
5275 static int
5276 gimple_verify_flow_info (void)
5277 {
5278   int err = 0;
5279   basic_block bb;
5280   gimple_stmt_iterator gsi;
5281   gimple *stmt;
5282   edge e;
5283   edge_iterator ei;
5284 
5285   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5286       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5287     {
5288       error ("ENTRY_BLOCK has IL associated with it");
5289       err = 1;
5290     }
5291 
5292   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5293       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5294     {
5295       error ("EXIT_BLOCK has IL associated with it");
5296       err = 1;
5297     }
5298 
5299   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5300     if (e->flags & EDGE_FALLTHRU)
5301       {
5302 	error ("fallthru to exit from bb %d", e->src->index);
5303 	err = 1;
5304       }
5305 
5306   FOR_EACH_BB_FN (bb, cfun)
5307     {
5308       bool found_ctrl_stmt = false;
5309 
5310       stmt = NULL;
5311 
5312       /* Skip labels on the start of basic block.  */
5313       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5314 	{
5315 	  tree label;
5316 	  gimple *prev_stmt = stmt;
5317 
5318 	  stmt = gsi_stmt (gsi);
5319 
5320 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5321 	    break;
5322 
5323 	  label = gimple_label_label (as_a <glabel *> (stmt));
5324 	  if (prev_stmt && DECL_NONLOCAL (label))
5325 	    {
5326 	      error ("nonlocal label ");
5327 	      print_generic_expr (stderr, label, 0);
5328 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5329 		       bb->index);
5330 	      err = 1;
5331 	    }
5332 
5333 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5334 	    {
5335 	      error ("EH landing pad label ");
5336 	      print_generic_expr (stderr, label, 0);
5337 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5338 		       bb->index);
5339 	      err = 1;
5340 	    }
5341 
5342 	  if (label_to_block (label) != bb)
5343 	    {
5344 	      error ("label ");
5345 	      print_generic_expr (stderr, label, 0);
5346 	      fprintf (stderr, " to block does not match in bb %d",
5347 		       bb->index);
5348 	      err = 1;
5349 	    }
5350 
5351 	  if (decl_function_context (label) != current_function_decl)
5352 	    {
5353 	      error ("label ");
5354 	      print_generic_expr (stderr, label, 0);
5355 	      fprintf (stderr, " has incorrect context in bb %d",
5356 		       bb->index);
5357 	      err = 1;
5358 	    }
5359 	}
5360 
5361       /* Verify that body of basic block BB is free of control flow.  */
5362       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5363 	{
5364 	  gimple *stmt = gsi_stmt (gsi);
5365 
5366 	  if (found_ctrl_stmt)
5367 	    {
5368 	      error ("control flow in the middle of basic block %d",
5369 		     bb->index);
5370 	      err = 1;
5371 	    }
5372 
5373 	  if (stmt_ends_bb_p (stmt))
5374 	    found_ctrl_stmt = true;
5375 
5376 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5377 	    {
5378 	      error ("label ");
5379 	      print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5380 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5381 	      err = 1;
5382 	    }
5383 	}
5384 
5385       gsi = gsi_last_bb (bb);
5386       if (gsi_end_p (gsi))
5387 	continue;
5388 
5389       stmt = gsi_stmt (gsi);
5390 
5391       if (gimple_code (stmt) == GIMPLE_LABEL)
5392 	continue;
5393 
5394       err |= verify_eh_edges (stmt);
5395 
5396       if (is_ctrl_stmt (stmt))
5397 	{
5398 	  FOR_EACH_EDGE (e, ei, bb->succs)
5399 	    if (e->flags & EDGE_FALLTHRU)
5400 	      {
5401 		error ("fallthru edge after a control statement in bb %d",
5402 		       bb->index);
5403 		err = 1;
5404 	      }
5405 	}
5406 
5407       if (gimple_code (stmt) != GIMPLE_COND)
5408 	{
5409 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5410 	     after anything else but if statement.  */
5411 	  FOR_EACH_EDGE (e, ei, bb->succs)
5412 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5413 	      {
5414 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5415 		       bb->index);
5416 		err = 1;
5417 	      }
5418 	}
5419 
5420       switch (gimple_code (stmt))
5421 	{
5422 	case GIMPLE_COND:
5423 	  {
5424 	    edge true_edge;
5425 	    edge false_edge;
5426 
5427 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5428 
5429 	    if (!true_edge
5430 		|| !false_edge
5431 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5432 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5433 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5434 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5435 		|| EDGE_COUNT (bb->succs) >= 3)
5436 	      {
5437 		error ("wrong outgoing edge flags at end of bb %d",
5438 		       bb->index);
5439 		err = 1;
5440 	      }
5441 	  }
5442 	  break;
5443 
5444 	case GIMPLE_GOTO:
5445 	  if (simple_goto_p (stmt))
5446 	    {
5447 	      error ("explicit goto at end of bb %d", bb->index);
5448 	      err = 1;
5449 	    }
5450 	  else
5451 	    {
5452 	      /* FIXME.  We should double check that the labels in the
5453 		 destination blocks have their address taken.  */
5454 	      FOR_EACH_EDGE (e, ei, bb->succs)
5455 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5456 				 | EDGE_FALSE_VALUE))
5457 		    || !(e->flags & EDGE_ABNORMAL))
5458 		  {
5459 		    error ("wrong outgoing edge flags at end of bb %d",
5460 			   bb->index);
5461 		    err = 1;
5462 		  }
5463 	    }
5464 	  break;
5465 
5466 	case GIMPLE_CALL:
5467 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5468 	    break;
5469 	  /* fallthru */
5470 	case GIMPLE_RETURN:
5471 	  if (!single_succ_p (bb)
5472 	      || (single_succ_edge (bb)->flags
5473 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5474 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5475 	    {
5476 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5477 	      err = 1;
5478 	    }
5479 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5480 	    {
5481 	      error ("return edge does not point to exit in bb %d",
5482 		     bb->index);
5483 	      err = 1;
5484 	    }
5485 	  break;
5486 
5487 	case GIMPLE_SWITCH:
5488 	  {
5489 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5490 	    tree prev;
5491 	    edge e;
5492 	    size_t i, n;
5493 
5494 	    n = gimple_switch_num_labels (switch_stmt);
5495 
5496 	    /* Mark all the destination basic blocks.  */
5497 	    for (i = 0; i < n; ++i)
5498 	      {
5499 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5500 		basic_block label_bb = label_to_block (lab);
5501 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5502 		label_bb->aux = (void *)1;
5503 	      }
5504 
5505 	    /* Verify that the case labels are sorted.  */
5506 	    prev = gimple_switch_label (switch_stmt, 0);
5507 	    for (i = 1; i < n; ++i)
5508 	      {
5509 		tree c = gimple_switch_label (switch_stmt, i);
5510 		if (!CASE_LOW (c))
5511 		  {
5512 		    error ("found default case not at the start of "
5513 			   "case vector");
5514 		    err = 1;
5515 		    continue;
5516 		  }
5517 		if (CASE_LOW (prev)
5518 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5519 		  {
5520 		    error ("case labels not sorted: ");
5521 		    print_generic_expr (stderr, prev, 0);
5522 		    fprintf (stderr," is greater than ");
5523 		    print_generic_expr (stderr, c, 0);
5524 		    fprintf (stderr," but comes before it.\n");
5525 		    err = 1;
5526 		  }
5527 		prev = c;
5528 	      }
5529 	    /* VRP will remove the default case if it can prove it will
5530 	       never be executed.  So do not verify there always exists
5531 	       a default case here.  */
5532 
5533 	    FOR_EACH_EDGE (e, ei, bb->succs)
5534 	      {
5535 		if (!e->dest->aux)
5536 		  {
5537 		    error ("extra outgoing edge %d->%d",
5538 			   bb->index, e->dest->index);
5539 		    err = 1;
5540 		  }
5541 
5542 		e->dest->aux = (void *)2;
5543 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5544 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5545 		  {
5546 		    error ("wrong outgoing edge flags at end of bb %d",
5547 			   bb->index);
5548 		    err = 1;
5549 		  }
5550 	      }
5551 
5552 	    /* Check that we have all of them.  */
5553 	    for (i = 0; i < n; ++i)
5554 	      {
5555 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5556 		basic_block label_bb = label_to_block (lab);
5557 
5558 		if (label_bb->aux != (void *)2)
5559 		  {
5560 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5561 		    err = 1;
5562 		  }
5563 	      }
5564 
5565 	    FOR_EACH_EDGE (e, ei, bb->succs)
5566 	      e->dest->aux = (void *)0;
5567 	  }
5568 	  break;
5569 
5570 	case GIMPLE_EH_DISPATCH:
5571 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5572 	  break;
5573 
5574 	default:
5575 	  break;
5576 	}
5577     }
5578 
5579   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5580     verify_dominators (CDI_DOMINATORS);
5581 
5582   return err;
5583 }
5584 
5585 
5586 /* Updates phi nodes after creating a forwarder block joined
5587    by edge FALLTHRU.  */
5588 
5589 static void
5590 gimple_make_forwarder_block (edge fallthru)
5591 {
5592   edge e;
5593   edge_iterator ei;
5594   basic_block dummy, bb;
5595   tree var;
5596   gphi_iterator gsi;
5597 
5598   dummy = fallthru->src;
5599   bb = fallthru->dest;
5600 
5601   if (single_pred_p (bb))
5602     return;
5603 
5604   /* If we redirected a branch we must create new PHI nodes at the
5605      start of BB.  */
5606   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5607     {
5608       gphi *phi, *new_phi;
5609 
5610       phi = gsi.phi ();
5611       var = gimple_phi_result (phi);
5612       new_phi = create_phi_node (var, bb);
5613       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5614       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5615 		   UNKNOWN_LOCATION);
5616     }
5617 
5618   /* Add the arguments we have stored on edges.  */
5619   FOR_EACH_EDGE (e, ei, bb->preds)
5620     {
5621       if (e == fallthru)
5622 	continue;
5623 
5624       flush_pending_stmts (e);
5625     }
5626 }
5627 
5628 
5629 /* Return a non-special label in the head of basic block BLOCK.
5630    Create one if it doesn't exist.  */
5631 
5632 tree
5633 gimple_block_label (basic_block bb)
5634 {
5635   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5636   bool first = true;
5637   tree label;
5638   glabel *stmt;
5639 
5640   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5641     {
5642       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5643       if (!stmt)
5644 	break;
5645       label = gimple_label_label (stmt);
5646       if (!DECL_NONLOCAL (label))
5647 	{
5648 	  if (!first)
5649 	    gsi_move_before (&i, &s);
5650 	  return label;
5651 	}
5652     }
5653 
5654   label = create_artificial_label (UNKNOWN_LOCATION);
5655   stmt = gimple_build_label (label);
5656   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5657   return label;
5658 }
5659 
5660 
5661 /* Attempt to perform edge redirection by replacing a possibly complex
5662    jump instruction by a goto or by removing the jump completely.
5663    This can apply only if all edges now point to the same block.  The
5664    parameters and return values are equivalent to
5665    redirect_edge_and_branch.  */
5666 
5667 static edge
5668 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5669 {
5670   basic_block src = e->src;
5671   gimple_stmt_iterator i;
5672   gimple *stmt;
5673 
5674   /* We can replace or remove a complex jump only when we have exactly
5675      two edges.  */
5676   if (EDGE_COUNT (src->succs) != 2
5677       /* Verify that all targets will be TARGET.  Specifically, the
5678 	 edge that is not E must also go to TARGET.  */
5679       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5680     return NULL;
5681 
5682   i = gsi_last_bb (src);
5683   if (gsi_end_p (i))
5684     return NULL;
5685 
5686   stmt = gsi_stmt (i);
5687 
5688   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5689     {
5690       gsi_remove (&i, true);
5691       e = ssa_redirect_edge (e, target);
5692       e->flags = EDGE_FALLTHRU;
5693       return e;
5694     }
5695 
5696   return NULL;
5697 }
5698 
5699 
5700 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5701    edge representing the redirected branch.  */
5702 
5703 static edge
5704 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5705 {
5706   basic_block bb = e->src;
5707   gimple_stmt_iterator gsi;
5708   edge ret;
5709   gimple *stmt;
5710 
5711   if (e->flags & EDGE_ABNORMAL)
5712     return NULL;
5713 
5714   if (e->dest == dest)
5715     return NULL;
5716 
5717   if (e->flags & EDGE_EH)
5718     return redirect_eh_edge (e, dest);
5719 
5720   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5721     {
5722       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5723       if (ret)
5724 	return ret;
5725     }
5726 
5727   gsi = gsi_last_bb (bb);
5728   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5729 
5730   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5731     {
5732     case GIMPLE_COND:
5733       /* For COND_EXPR, we only need to redirect the edge.  */
5734       break;
5735 
5736     case GIMPLE_GOTO:
5737       /* No non-abnormal edges should lead from a non-simple goto, and
5738 	 simple ones should be represented implicitly.  */
5739       gcc_unreachable ();
5740 
5741     case GIMPLE_SWITCH:
5742       {
5743 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5744 	tree label = gimple_block_label (dest);
5745         tree cases = get_cases_for_edge (e, switch_stmt);
5746 
5747 	/* If we have a list of cases associated with E, then use it
5748 	   as it's a lot faster than walking the entire case vector.  */
5749 	if (cases)
5750 	  {
5751 	    edge e2 = find_edge (e->src, dest);
5752 	    tree last, first;
5753 
5754 	    first = cases;
5755 	    while (cases)
5756 	      {
5757 		last = cases;
5758 		CASE_LABEL (cases) = label;
5759 		cases = CASE_CHAIN (cases);
5760 	      }
5761 
5762 	    /* If there was already an edge in the CFG, then we need
5763 	       to move all the cases associated with E to E2.  */
5764 	    if (e2)
5765 	      {
5766 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5767 
5768 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5769 		CASE_CHAIN (cases2) = first;
5770 	      }
5771 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5772 	  }
5773 	else
5774 	  {
5775 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5776 
5777 	    for (i = 0; i < n; i++)
5778 	      {
5779 		tree elt = gimple_switch_label (switch_stmt, i);
5780 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5781 		  CASE_LABEL (elt) = label;
5782 	      }
5783 	  }
5784       }
5785       break;
5786 
5787     case GIMPLE_ASM:
5788       {
5789 	gasm *asm_stmt = as_a <gasm *> (stmt);
5790 	int i, n = gimple_asm_nlabels (asm_stmt);
5791 	tree label = NULL;
5792 
5793 	for (i = 0; i < n; ++i)
5794 	  {
5795 	    tree cons = gimple_asm_label_op (asm_stmt, i);
5796 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5797 	      {
5798 		if (!label)
5799 		  label = gimple_block_label (dest);
5800 		TREE_VALUE (cons) = label;
5801 	      }
5802 	  }
5803 
5804 	/* If we didn't find any label matching the former edge in the
5805 	   asm labels, we must be redirecting the fallthrough
5806 	   edge.  */
5807 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5808       }
5809       break;
5810 
5811     case GIMPLE_RETURN:
5812       gsi_remove (&gsi, true);
5813       e->flags |= EDGE_FALLTHRU;
5814       break;
5815 
5816     case GIMPLE_OMP_RETURN:
5817     case GIMPLE_OMP_CONTINUE:
5818     case GIMPLE_OMP_SECTIONS_SWITCH:
5819     case GIMPLE_OMP_FOR:
5820       /* The edges from OMP constructs can be simply redirected.  */
5821       break;
5822 
5823     case GIMPLE_EH_DISPATCH:
5824       if (!(e->flags & EDGE_FALLTHRU))
5825 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5826       break;
5827 
5828     case GIMPLE_TRANSACTION:
5829       if (e->flags & EDGE_TM_ABORT)
5830 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5831 				           gimple_block_label (dest));
5832       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5833 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5834 				             gimple_block_label (dest));
5835       else
5836 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5837 				           gimple_block_label (dest));
5838       break;
5839 
5840     default:
5841       /* Otherwise it must be a fallthru edge, and we don't need to
5842 	 do anything besides redirecting it.  */
5843       gcc_assert (e->flags & EDGE_FALLTHRU);
5844       break;
5845     }
5846 
5847   /* Update/insert PHI nodes as necessary.  */
5848 
5849   /* Now update the edges in the CFG.  */
5850   e = ssa_redirect_edge (e, dest);
5851 
5852   return e;
5853 }
5854 
5855 /* Returns true if it is possible to remove edge E by redirecting
5856    it to the destination of the other edge from E->src.  */
5857 
5858 static bool
5859 gimple_can_remove_branch_p (const_edge e)
5860 {
5861   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5862     return false;
5863 
5864   return true;
5865 }
5866 
5867 /* Simple wrapper, as we can always redirect fallthru edges.  */
5868 
5869 static basic_block
5870 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5871 {
5872   e = gimple_redirect_edge_and_branch (e, dest);
5873   gcc_assert (e);
5874 
5875   return NULL;
5876 }
5877 
5878 
5879 /* Splits basic block BB after statement STMT (but at least after the
5880    labels).  If STMT is NULL, BB is split just after the labels.  */
5881 
5882 static basic_block
5883 gimple_split_block (basic_block bb, void *stmt)
5884 {
5885   gimple_stmt_iterator gsi;
5886   gimple_stmt_iterator gsi_tgt;
5887   gimple_seq list;
5888   basic_block new_bb;
5889   edge e;
5890   edge_iterator ei;
5891 
5892   new_bb = create_empty_bb (bb);
5893 
5894   /* Redirect the outgoing edges.  */
5895   new_bb->succs = bb->succs;
5896   bb->succs = NULL;
5897   FOR_EACH_EDGE (e, ei, new_bb->succs)
5898     e->src = new_bb;
5899 
5900   /* Get a stmt iterator pointing to the first stmt to move.  */
5901   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5902     gsi = gsi_after_labels (bb);
5903   else
5904     {
5905       gsi = gsi_for_stmt ((gimple *) stmt);
5906       gsi_next (&gsi);
5907     }
5908 
5909   /* Move everything from GSI to the new basic block.  */
5910   if (gsi_end_p (gsi))
5911     return new_bb;
5912 
5913   /* Split the statement list - avoid re-creating new containers as this
5914      brings ugly quadratic memory consumption in the inliner.
5915      (We are still quadratic since we need to update stmt BB pointers,
5916      sadly.)  */
5917   gsi_split_seq_before (&gsi, &list);
5918   set_bb_seq (new_bb, list);
5919   for (gsi_tgt = gsi_start (list);
5920        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5921     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5922 
5923   return new_bb;
5924 }
5925 
5926 
5927 /* Moves basic block BB after block AFTER.  */
5928 
5929 static bool
5930 gimple_move_block_after (basic_block bb, basic_block after)
5931 {
5932   if (bb->prev_bb == after)
5933     return true;
5934 
5935   unlink_block (bb);
5936   link_block (bb, after);
5937 
5938   return true;
5939 }
5940 
5941 
5942 /* Return TRUE if block BB has no executable statements, otherwise return
5943    FALSE.  */
5944 
5945 static bool
5946 gimple_empty_block_p (basic_block bb)
5947 {
5948   /* BB must have no executable statements.  */
5949   gimple_stmt_iterator gsi = gsi_after_labels (bb);
5950   if (phi_nodes (bb))
5951     return false;
5952   if (gsi_end_p (gsi))
5953     return true;
5954   if (is_gimple_debug (gsi_stmt (gsi)))
5955     gsi_next_nondebug (&gsi);
5956   return gsi_end_p (gsi);
5957 }
5958 
5959 
5960 /* Split a basic block if it ends with a conditional branch and if the
5961    other part of the block is not empty.  */
5962 
5963 static basic_block
5964 gimple_split_block_before_cond_jump (basic_block bb)
5965 {
5966   gimple *last, *split_point;
5967   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5968   if (gsi_end_p (gsi))
5969     return NULL;
5970   last = gsi_stmt (gsi);
5971   if (gimple_code (last) != GIMPLE_COND
5972       && gimple_code (last) != GIMPLE_SWITCH)
5973     return NULL;
5974   gsi_prev (&gsi);
5975   split_point = gsi_stmt (gsi);
5976   return split_block (bb, split_point)->dest;
5977 }
5978 
5979 
5980 /* Return true if basic_block can be duplicated.  */
5981 
5982 static bool
5983 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5984 {
5985   return true;
5986 }
5987 
5988 /* Create a duplicate of the basic block BB.  NOTE: This does not
5989    preserve SSA form.  */
5990 
5991 static basic_block
5992 gimple_duplicate_bb (basic_block bb)
5993 {
5994   basic_block new_bb;
5995   gimple_stmt_iterator gsi_tgt;
5996 
5997   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5998 
5999   /* Copy the PHI nodes.  We ignore PHI node arguments here because
6000      the incoming edges have not been setup yet.  */
6001   for (gphi_iterator gpi = gsi_start_phis (bb);
6002        !gsi_end_p (gpi);
6003        gsi_next (&gpi))
6004     {
6005       gphi *phi, *copy;
6006       phi = gpi.phi ();
6007       copy = create_phi_node (NULL_TREE, new_bb);
6008       create_new_def_for (gimple_phi_result (phi), copy,
6009 			  gimple_phi_result_ptr (copy));
6010       gimple_set_uid (copy, gimple_uid (phi));
6011     }
6012 
6013   gsi_tgt = gsi_start_bb (new_bb);
6014   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6015        !gsi_end_p (gsi);
6016        gsi_next (&gsi))
6017     {
6018       def_operand_p def_p;
6019       ssa_op_iter op_iter;
6020       tree lhs;
6021       gimple *stmt, *copy;
6022 
6023       stmt = gsi_stmt (gsi);
6024       if (gimple_code (stmt) == GIMPLE_LABEL)
6025 	continue;
6026 
6027       /* Don't duplicate label debug stmts.  */
6028       if (gimple_debug_bind_p (stmt)
6029 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
6030 	     == LABEL_DECL)
6031 	continue;
6032 
6033       /* Create a new copy of STMT and duplicate STMT's virtual
6034 	 operands.  */
6035       copy = gimple_copy (stmt);
6036       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6037 
6038       maybe_duplicate_eh_stmt (copy, stmt);
6039       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6040 
6041       /* When copying around a stmt writing into a local non-user
6042 	 aggregate, make sure it won't share stack slot with other
6043 	 vars.  */
6044       lhs = gimple_get_lhs (stmt);
6045       if (lhs && TREE_CODE (lhs) != SSA_NAME)
6046 	{
6047 	  tree base = get_base_address (lhs);
6048 	  if (base
6049 	      && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6050 	      && DECL_IGNORED_P (base)
6051 	      && !TREE_STATIC (base)
6052 	      && !DECL_EXTERNAL (base)
6053 	      && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6054 	    DECL_NONSHAREABLE (base) = 1;
6055 	}
6056 
6057       /* Create new names for all the definitions created by COPY and
6058 	 add replacement mappings for each new name.  */
6059       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6060 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6061     }
6062 
6063   return new_bb;
6064 }
6065 
6066 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
6067 
6068 static void
6069 add_phi_args_after_copy_edge (edge e_copy)
6070 {
6071   basic_block bb, bb_copy = e_copy->src, dest;
6072   edge e;
6073   edge_iterator ei;
6074   gphi *phi, *phi_copy;
6075   tree def;
6076   gphi_iterator psi, psi_copy;
6077 
6078   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6079     return;
6080 
6081   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6082 
6083   if (e_copy->dest->flags & BB_DUPLICATED)
6084     dest = get_bb_original (e_copy->dest);
6085   else
6086     dest = e_copy->dest;
6087 
6088   e = find_edge (bb, dest);
6089   if (!e)
6090     {
6091       /* During loop unrolling the target of the latch edge is copied.
6092 	 In this case we are not looking for edge to dest, but to
6093 	 duplicated block whose original was dest.  */
6094       FOR_EACH_EDGE (e, ei, bb->succs)
6095 	{
6096 	  if ((e->dest->flags & BB_DUPLICATED)
6097 	      && get_bb_original (e->dest) == dest)
6098 	    break;
6099 	}
6100 
6101       gcc_assert (e != NULL);
6102     }
6103 
6104   for (psi = gsi_start_phis (e->dest),
6105        psi_copy = gsi_start_phis (e_copy->dest);
6106        !gsi_end_p (psi);
6107        gsi_next (&psi), gsi_next (&psi_copy))
6108     {
6109       phi = psi.phi ();
6110       phi_copy = psi_copy.phi ();
6111       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6112       add_phi_arg (phi_copy, def, e_copy,
6113 		   gimple_phi_arg_location_from_edge (phi, e));
6114     }
6115 }
6116 
6117 
6118 /* Basic block BB_COPY was created by code duplication.  Add phi node
6119    arguments for edges going out of BB_COPY.  The blocks that were
6120    duplicated have BB_DUPLICATED set.  */
6121 
6122 void
6123 add_phi_args_after_copy_bb (basic_block bb_copy)
6124 {
6125   edge e_copy;
6126   edge_iterator ei;
6127 
6128   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6129     {
6130       add_phi_args_after_copy_edge (e_copy);
6131     }
6132 }
6133 
6134 /* Blocks in REGION_COPY array of length N_REGION were created by
6135    duplication of basic blocks.  Add phi node arguments for edges
6136    going from these blocks.  If E_COPY is not NULL, also add
6137    phi node arguments for its destination.*/
6138 
6139 void
6140 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6141 			 edge e_copy)
6142 {
6143   unsigned i;
6144 
6145   for (i = 0; i < n_region; i++)
6146     region_copy[i]->flags |= BB_DUPLICATED;
6147 
6148   for (i = 0; i < n_region; i++)
6149     add_phi_args_after_copy_bb (region_copy[i]);
6150   if (e_copy)
6151     add_phi_args_after_copy_edge (e_copy);
6152 
6153   for (i = 0; i < n_region; i++)
6154     region_copy[i]->flags &= ~BB_DUPLICATED;
6155 }
6156 
6157 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6158    important exit edge EXIT.  By important we mean that no SSA name defined
6159    inside region is live over the other exit edges of the region.  All entry
6160    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6161    to the duplicate of the region.  Dominance and loop information is
6162    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6163    UPDATE_DOMINANCE is false then we assume that the caller will update the
6164    dominance information after calling this function.  The new basic
6165    blocks are stored to REGION_COPY in the same order as they had in REGION,
6166    provided that REGION_COPY is not NULL.
6167    The function returns false if it is unable to copy the region,
6168    true otherwise.  */
6169 
6170 bool
6171 gimple_duplicate_sese_region (edge entry, edge exit,
6172 			    basic_block *region, unsigned n_region,
6173 			    basic_block *region_copy,
6174 			    bool update_dominance)
6175 {
6176   unsigned i;
6177   bool free_region_copy = false, copying_header = false;
6178   struct loop *loop = entry->dest->loop_father;
6179   edge exit_copy;
6180   vec<basic_block> doms;
6181   edge redirected;
6182   int total_freq = 0, entry_freq = 0;
6183   gcov_type total_count = 0, entry_count = 0;
6184 
6185   if (!can_copy_bbs_p (region, n_region))
6186     return false;
6187 
6188   /* Some sanity checking.  Note that we do not check for all possible
6189      missuses of the functions.  I.e. if you ask to copy something weird,
6190      it will work, but the state of structures probably will not be
6191      correct.  */
6192   for (i = 0; i < n_region; i++)
6193     {
6194       /* We do not handle subloops, i.e. all the blocks must belong to the
6195 	 same loop.  */
6196       if (region[i]->loop_father != loop)
6197 	return false;
6198 
6199       if (region[i] != entry->dest
6200 	  && region[i] == loop->header)
6201 	return false;
6202     }
6203 
6204   /* In case the function is used for loop header copying (which is the primary
6205      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6206   if (loop->header == entry->dest)
6207     {
6208       copying_header = true;
6209 
6210       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6211 	return false;
6212 
6213       for (i = 0; i < n_region; i++)
6214 	if (region[i] != exit->src
6215 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6216 	  return false;
6217     }
6218 
6219   initialize_original_copy_tables ();
6220 
6221   if (copying_header)
6222     set_loop_copy (loop, loop_outer (loop));
6223   else
6224     set_loop_copy (loop, loop);
6225 
6226   if (!region_copy)
6227     {
6228       region_copy = XNEWVEC (basic_block, n_region);
6229       free_region_copy = true;
6230     }
6231 
6232   /* Record blocks outside the region that are dominated by something
6233      inside.  */
6234   if (update_dominance)
6235     {
6236       doms.create (0);
6237       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6238     }
6239 
6240   if (entry->dest->count)
6241     {
6242       total_count = entry->dest->count;
6243       entry_count = entry->count;
6244       /* Fix up corner cases, to avoid division by zero or creation of negative
6245 	 frequencies.  */
6246       if (entry_count > total_count)
6247 	entry_count = total_count;
6248     }
6249   else
6250     {
6251       total_freq = entry->dest->frequency;
6252       entry_freq = EDGE_FREQUENCY (entry);
6253       /* Fix up corner cases, to avoid division by zero or creation of negative
6254 	 frequencies.  */
6255       if (total_freq == 0)
6256 	total_freq = 1;
6257       else if (entry_freq > total_freq)
6258 	entry_freq = total_freq;
6259     }
6260 
6261   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6262 	    split_edge_bb_loc (entry), update_dominance);
6263   if (total_count)
6264     {
6265       scale_bbs_frequencies_gcov_type (region, n_region,
6266 				       total_count - entry_count,
6267 				       total_count);
6268       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6269 				       total_count);
6270     }
6271   else
6272     {
6273       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6274 				 total_freq);
6275       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6276     }
6277 
6278   if (copying_header)
6279     {
6280       loop->header = exit->dest;
6281       loop->latch = exit->src;
6282     }
6283 
6284   /* Redirect the entry and add the phi node arguments.  */
6285   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6286   gcc_assert (redirected != NULL);
6287   flush_pending_stmts (entry);
6288 
6289   /* Concerning updating of dominators:  We must recount dominators
6290      for entry block and its copy.  Anything that is outside of the
6291      region, but was dominated by something inside needs recounting as
6292      well.  */
6293   if (update_dominance)
6294     {
6295       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6296       doms.safe_push (get_bb_original (entry->dest));
6297       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6298       doms.release ();
6299     }
6300 
6301   /* Add the other PHI node arguments.  */
6302   add_phi_args_after_copy (region_copy, n_region, NULL);
6303 
6304   if (free_region_copy)
6305     free (region_copy);
6306 
6307   free_original_copy_tables ();
6308   return true;
6309 }
6310 
6311 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6312 static bool
6313 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6314 {
6315   unsigned int n;
6316 
6317   for (n = 0; n < n_region; n++)
6318     {
6319      if (bb == bbs[n])
6320        return true;
6321     }
6322   return false;
6323 }
6324 
6325 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6326    are stored to REGION_COPY in the same order in that they appear
6327    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6328    the region, EXIT an exit from it.  The condition guarding EXIT
6329    is moved to ENTRY.  Returns true if duplication succeeds, false
6330    otherwise.
6331 
6332    For example,
6333 
6334    some_code;
6335    if (cond)
6336      A;
6337    else
6338      B;
6339 
6340    is transformed to
6341 
6342    if (cond)
6343      {
6344        some_code;
6345        A;
6346      }
6347    else
6348      {
6349        some_code;
6350        B;
6351      }
6352 */
6353 
6354 bool
6355 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6356 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6357 			  basic_block *region_copy ATTRIBUTE_UNUSED)
6358 {
6359   unsigned i;
6360   bool free_region_copy = false;
6361   struct loop *loop = exit->dest->loop_father;
6362   struct loop *orig_loop = entry->dest->loop_father;
6363   basic_block switch_bb, entry_bb, nentry_bb;
6364   vec<basic_block> doms;
6365   int total_freq = 0, exit_freq = 0;
6366   gcov_type total_count = 0, exit_count = 0;
6367   edge exits[2], nexits[2], e;
6368   gimple_stmt_iterator gsi;
6369   gimple *cond_stmt;
6370   edge sorig, snew;
6371   basic_block exit_bb;
6372   gphi_iterator psi;
6373   gphi *phi;
6374   tree def;
6375   struct loop *target, *aloop, *cloop;
6376 
6377   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6378   exits[0] = exit;
6379   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6380 
6381   if (!can_copy_bbs_p (region, n_region))
6382     return false;
6383 
6384   initialize_original_copy_tables ();
6385   set_loop_copy (orig_loop, loop);
6386 
6387   target= loop;
6388   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6389     {
6390       if (bb_part_of_region_p (aloop->header, region, n_region))
6391 	{
6392 	  cloop = duplicate_loop (aloop, target);
6393 	  duplicate_subloops (aloop, cloop);
6394 	}
6395     }
6396 
6397   if (!region_copy)
6398     {
6399       region_copy = XNEWVEC (basic_block, n_region);
6400       free_region_copy = true;
6401     }
6402 
6403   gcc_assert (!need_ssa_update_p (cfun));
6404 
6405   /* Record blocks outside the region that are dominated by something
6406      inside.  */
6407   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6408 
6409   if (exit->src->count)
6410     {
6411       total_count = exit->src->count;
6412       exit_count = exit->count;
6413       /* Fix up corner cases, to avoid division by zero or creation of negative
6414 	 frequencies.  */
6415       if (exit_count > total_count)
6416 	exit_count = total_count;
6417     }
6418   else
6419     {
6420       total_freq = exit->src->frequency;
6421       exit_freq = EDGE_FREQUENCY (exit);
6422       /* Fix up corner cases, to avoid division by zero or creation of negative
6423 	 frequencies.  */
6424       if (total_freq == 0)
6425 	total_freq = 1;
6426       if (exit_freq > total_freq)
6427 	exit_freq = total_freq;
6428     }
6429 
6430   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6431 	    split_edge_bb_loc (exit), true);
6432   if (total_count)
6433     {
6434       scale_bbs_frequencies_gcov_type (region, n_region,
6435 				       total_count - exit_count,
6436 				       total_count);
6437       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6438 				       total_count);
6439     }
6440   else
6441     {
6442       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6443 				 total_freq);
6444       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6445     }
6446 
6447   /* Create the switch block, and put the exit condition to it.  */
6448   entry_bb = entry->dest;
6449   nentry_bb = get_bb_copy (entry_bb);
6450   if (!last_stmt (entry->src)
6451       || !stmt_ends_bb_p (last_stmt (entry->src)))
6452     switch_bb = entry->src;
6453   else
6454     switch_bb = split_edge (entry);
6455   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6456 
6457   gsi = gsi_last_bb (switch_bb);
6458   cond_stmt = last_stmt (exit->src);
6459   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6460   cond_stmt = gimple_copy (cond_stmt);
6461 
6462   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6463 
6464   sorig = single_succ_edge (switch_bb);
6465   sorig->flags = exits[1]->flags;
6466   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6467 
6468   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6469   rescan_loop_exit (snew, true, false);
6470 
6471   /* Add the PHI node arguments.  */
6472   add_phi_args_after_copy (region_copy, n_region, snew);
6473 
6474   /* Get rid of now superfluous conditions and associated edges (and phi node
6475      arguments).  */
6476   exit_bb = exit->dest;
6477 
6478   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6479   PENDING_STMT (e) = NULL;
6480 
6481   /* The latch of ORIG_LOOP was copied, and so was the backedge
6482      to the original header.  We redirect this backedge to EXIT_BB.  */
6483   for (i = 0; i < n_region; i++)
6484     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6485       {
6486 	gcc_assert (single_succ_edge (region_copy[i]));
6487 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6488 	PENDING_STMT (e) = NULL;
6489 	for (psi = gsi_start_phis (exit_bb);
6490 	     !gsi_end_p (psi);
6491 	     gsi_next (&psi))
6492 	  {
6493 	    phi = psi.phi ();
6494 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6495 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6496 	  }
6497       }
6498   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6499   PENDING_STMT (e) = NULL;
6500 
6501   /* Anything that is outside of the region, but was dominated by something
6502      inside needs to update dominance info.  */
6503   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6504   doms.release ();
6505   /* Update the SSA web.  */
6506   update_ssa (TODO_update_ssa);
6507 
6508   if (free_region_copy)
6509     free (region_copy);
6510 
6511   free_original_copy_tables ();
6512   return true;
6513 }
6514 
6515 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6516    adding blocks when the dominator traversal reaches EXIT.  This
6517    function silently assumes that ENTRY strictly dominates EXIT.  */
6518 
6519 void
6520 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6521 			      vec<basic_block> *bbs_p)
6522 {
6523   basic_block son;
6524 
6525   for (son = first_dom_son (CDI_DOMINATORS, entry);
6526        son;
6527        son = next_dom_son (CDI_DOMINATORS, son))
6528     {
6529       bbs_p->safe_push (son);
6530       if (son != exit)
6531 	gather_blocks_in_sese_region (son, exit, bbs_p);
6532     }
6533 }
6534 
6535 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6536    The duplicates are recorded in VARS_MAP.  */
6537 
6538 static void
6539 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6540 			   tree to_context)
6541 {
6542   tree t = *tp, new_t;
6543   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6544 
6545   if (DECL_CONTEXT (t) == to_context)
6546     return;
6547 
6548   bool existed;
6549   tree &loc = vars_map->get_or_insert (t, &existed);
6550 
6551   if (!existed)
6552     {
6553       if (SSA_VAR_P (t))
6554 	{
6555 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6556 	  add_local_decl (f, new_t);
6557 	}
6558       else
6559 	{
6560 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6561 	  new_t = copy_node (t);
6562 	}
6563       DECL_CONTEXT (new_t) = to_context;
6564 
6565       loc = new_t;
6566     }
6567   else
6568     new_t = loc;
6569 
6570   *tp = new_t;
6571 }
6572 
6573 
6574 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6575    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6576 
6577 static tree
6578 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6579 		  tree to_context)
6580 {
6581   tree new_name;
6582 
6583   gcc_assert (!virtual_operand_p (name));
6584 
6585   tree *loc = vars_map->get (name);
6586 
6587   if (!loc)
6588     {
6589       tree decl = SSA_NAME_VAR (name);
6590       if (decl)
6591 	{
6592 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6593 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6594 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6595 				       decl, SSA_NAME_DEF_STMT (name));
6596 	}
6597       else
6598 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6599 				     name, SSA_NAME_DEF_STMT (name));
6600 
6601       /* Now that we've used the def stmt to define new_name, make sure it
6602 	 doesn't define name anymore.  */
6603       SSA_NAME_DEF_STMT (name) = NULL;
6604 
6605       vars_map->put (name, new_name);
6606     }
6607   else
6608     new_name = *loc;
6609 
6610   return new_name;
6611 }
6612 
6613 struct move_stmt_d
6614 {
6615   tree orig_block;
6616   tree new_block;
6617   tree from_context;
6618   tree to_context;
6619   hash_map<tree, tree> *vars_map;
6620   htab_t new_label_map;
6621   hash_map<void *, void *> *eh_map;
6622   bool remap_decls_p;
6623 };
6624 
6625 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6626    contained in *TP if it has been ORIG_BLOCK previously and change the
6627    DECL_CONTEXT of every local variable referenced in *TP.  */
6628 
6629 static tree
6630 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6631 {
6632   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6633   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6634   tree t = *tp;
6635 
6636   if (EXPR_P (t))
6637     {
6638       tree block = TREE_BLOCK (t);
6639       if (block == NULL_TREE)
6640 	;
6641       else if (block == p->orig_block
6642 	       || p->orig_block == NULL_TREE)
6643 	{
6644 	  /* tree_node_can_be_shared says we can share invariant
6645 	     addresses but unshare_expr copies them anyways.  Make sure
6646 	     to unshare before adjusting the block in place - we do not
6647 	     always see a copy here.  */
6648 	  if (TREE_CODE (t) == ADDR_EXPR
6649 	      && is_gimple_min_invariant (t))
6650 	    *tp = t = unshare_expr (t);
6651 	  TREE_SET_BLOCK (t, p->new_block);
6652 	}
6653       else if (flag_checking)
6654 	{
6655 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6656 	    block = BLOCK_SUPERCONTEXT (block);
6657 	  gcc_assert (block == p->orig_block);
6658 	}
6659     }
6660   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6661     {
6662       if (TREE_CODE (t) == SSA_NAME)
6663 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6664       else if (TREE_CODE (t) == PARM_DECL
6665 	       && gimple_in_ssa_p (cfun))
6666 	*tp = *(p->vars_map->get (t));
6667       else if (TREE_CODE (t) == LABEL_DECL)
6668 	{
6669 	  if (p->new_label_map)
6670 	    {
6671 	      struct tree_map in, *out;
6672 	      in.base.from = t;
6673 	      out = (struct tree_map *)
6674 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6675 	      if (out)
6676 		*tp = t = out->to;
6677 	    }
6678 
6679 	  /* For FORCED_LABELs we can end up with references from other
6680 	     functions if some SESE regions are outlined.  It is UB to
6681 	     jump in between them, but they could be used just for printing
6682 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6683 	     be the function containing the glabel stmt with that LABEL_DECL,
6684 	     rather than whatever function a reference to the label was seen
6685 	     last time.  */
6686 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6687 	    DECL_CONTEXT (t) = p->to_context;
6688 	}
6689       else if (p->remap_decls_p)
6690 	{
6691 	  /* Replace T with its duplicate.  T should no longer appear in the
6692 	     parent function, so this looks wasteful; however, it may appear
6693 	     in referenced_vars, and more importantly, as virtual operands of
6694 	     statements, and in alias lists of other variables.  It would be
6695 	     quite difficult to expunge it from all those places.  ??? It might
6696 	     suffice to do this for addressable variables.  */
6697 	  if ((VAR_P (t) && !is_global_var (t))
6698 	      || TREE_CODE (t) == CONST_DECL)
6699 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6700 	}
6701       *walk_subtrees = 0;
6702     }
6703   else if (TYPE_P (t))
6704     *walk_subtrees = 0;
6705 
6706   return NULL_TREE;
6707 }
6708 
6709 /* Helper for move_stmt_r.  Given an EH region number for the source
6710    function, map that to the duplicate EH regio number in the dest.  */
6711 
6712 static int
6713 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6714 {
6715   eh_region old_r, new_r;
6716 
6717   old_r = get_eh_region_from_number (old_nr);
6718   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6719 
6720   return new_r->index;
6721 }
6722 
6723 /* Similar, but operate on INTEGER_CSTs.  */
6724 
6725 static tree
6726 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6727 {
6728   int old_nr, new_nr;
6729 
6730   old_nr = tree_to_shwi (old_t_nr);
6731   new_nr = move_stmt_eh_region_nr (old_nr, p);
6732 
6733   return build_int_cst (integer_type_node, new_nr);
6734 }
6735 
6736 /* Like move_stmt_op, but for gimple statements.
6737 
6738    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6739    contained in the current statement in *GSI_P and change the
6740    DECL_CONTEXT of every local variable referenced in the current
6741    statement.  */
6742 
6743 static tree
6744 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6745 	     struct walk_stmt_info *wi)
6746 {
6747   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6748   gimple *stmt = gsi_stmt (*gsi_p);
6749   tree block = gimple_block (stmt);
6750 
6751   if (block == p->orig_block
6752       || (p->orig_block == NULL_TREE
6753 	  && block != NULL_TREE))
6754     gimple_set_block (stmt, p->new_block);
6755 
6756   switch (gimple_code (stmt))
6757     {
6758     case GIMPLE_CALL:
6759       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6760       {
6761 	tree r, fndecl = gimple_call_fndecl (stmt);
6762 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6763 	  switch (DECL_FUNCTION_CODE (fndecl))
6764 	    {
6765 	    case BUILT_IN_EH_COPY_VALUES:
6766 	      r = gimple_call_arg (stmt, 1);
6767 	      r = move_stmt_eh_region_tree_nr (r, p);
6768 	      gimple_call_set_arg (stmt, 1, r);
6769 	      /* FALLTHRU */
6770 
6771 	    case BUILT_IN_EH_POINTER:
6772 	    case BUILT_IN_EH_FILTER:
6773 	      r = gimple_call_arg (stmt, 0);
6774 	      r = move_stmt_eh_region_tree_nr (r, p);
6775 	      gimple_call_set_arg (stmt, 0, r);
6776 	      break;
6777 
6778 	    default:
6779 	      break;
6780 	    }
6781       }
6782       break;
6783 
6784     case GIMPLE_RESX:
6785       {
6786 	gresx *resx_stmt = as_a <gresx *> (stmt);
6787 	int r = gimple_resx_region (resx_stmt);
6788 	r = move_stmt_eh_region_nr (r, p);
6789 	gimple_resx_set_region (resx_stmt, r);
6790       }
6791       break;
6792 
6793     case GIMPLE_EH_DISPATCH:
6794       {
6795 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6796 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6797 	r = move_stmt_eh_region_nr (r, p);
6798 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6799       }
6800       break;
6801 
6802     case GIMPLE_OMP_RETURN:
6803     case GIMPLE_OMP_CONTINUE:
6804       break;
6805 
6806     case GIMPLE_LABEL:
6807       {
6808 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6809 	   so that such labels can be referenced from other regions.
6810 	   Make sure to update it when seeing a GIMPLE_LABEL though,
6811 	   that is the owner of the label.  */
6812 	walk_gimple_op (stmt, move_stmt_op, wi);
6813 	*handled_ops_p = true;
6814 	tree label = gimple_label_label (as_a <glabel *> (stmt));
6815 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6816 	  DECL_CONTEXT (label) = p->to_context;
6817       }
6818       break;
6819 
6820     default:
6821       if (is_gimple_omp (stmt))
6822 	{
6823 	  /* Do not remap variables inside OMP directives.  Variables
6824 	     referenced in clauses and directive header belong to the
6825 	     parent function and should not be moved into the child
6826 	     function.  */
6827 	  bool save_remap_decls_p = p->remap_decls_p;
6828 	  p->remap_decls_p = false;
6829 	  *handled_ops_p = true;
6830 
6831 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6832 			       move_stmt_op, wi);
6833 
6834 	  p->remap_decls_p = save_remap_decls_p;
6835 	}
6836       break;
6837     }
6838 
6839   return NULL_TREE;
6840 }
6841 
6842 /* Move basic block BB from function CFUN to function DEST_FN.  The
6843    block is moved out of the original linked list and placed after
6844    block AFTER in the new list.  Also, the block is removed from the
6845    original array of blocks and placed in DEST_FN's array of blocks.
6846    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6847    updated to reflect the moved edges.
6848 
6849    The local variables are remapped to new instances, VARS_MAP is used
6850    to record the mapping.  */
6851 
6852 static void
6853 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6854 		  basic_block after, bool update_edge_count_p,
6855 		  struct move_stmt_d *d)
6856 {
6857   struct control_flow_graph *cfg;
6858   edge_iterator ei;
6859   edge e;
6860   gimple_stmt_iterator si;
6861   unsigned old_len, new_len;
6862 
6863   /* Remove BB from dominance structures.  */
6864   delete_from_dominance_info (CDI_DOMINATORS, bb);
6865 
6866   /* Move BB from its current loop to the copy in the new function.  */
6867   if (current_loops)
6868     {
6869       struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6870       if (new_loop)
6871 	bb->loop_father = new_loop;
6872     }
6873 
6874   /* Link BB to the new linked list.  */
6875   move_block_after (bb, after);
6876 
6877   /* Update the edge count in the corresponding flowgraphs.  */
6878   if (update_edge_count_p)
6879     FOR_EACH_EDGE (e, ei, bb->succs)
6880       {
6881 	cfun->cfg->x_n_edges--;
6882 	dest_cfun->cfg->x_n_edges++;
6883       }
6884 
6885   /* Remove BB from the original basic block array.  */
6886   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6887   cfun->cfg->x_n_basic_blocks--;
6888 
6889   /* Grow DEST_CFUN's basic block array if needed.  */
6890   cfg = dest_cfun->cfg;
6891   cfg->x_n_basic_blocks++;
6892   if (bb->index >= cfg->x_last_basic_block)
6893     cfg->x_last_basic_block = bb->index + 1;
6894 
6895   old_len = vec_safe_length (cfg->x_basic_block_info);
6896   if ((unsigned) cfg->x_last_basic_block >= old_len)
6897     {
6898       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6899       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6900     }
6901 
6902   (*cfg->x_basic_block_info)[bb->index] = bb;
6903 
6904   /* Remap the variables in phi nodes.  */
6905   for (gphi_iterator psi = gsi_start_phis (bb);
6906        !gsi_end_p (psi); )
6907     {
6908       gphi *phi = psi.phi ();
6909       use_operand_p use;
6910       tree op = PHI_RESULT (phi);
6911       ssa_op_iter oi;
6912       unsigned i;
6913 
6914       if (virtual_operand_p (op))
6915 	{
6916 	  /* Remove the phi nodes for virtual operands (alias analysis will be
6917 	     run for the new function, anyway).  */
6918           remove_phi_node (&psi, true);
6919 	  continue;
6920 	}
6921 
6922       SET_PHI_RESULT (phi,
6923 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6924       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6925 	{
6926 	  op = USE_FROM_PTR (use);
6927 	  if (TREE_CODE (op) == SSA_NAME)
6928 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6929 	}
6930 
6931       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6932 	{
6933 	  location_t locus = gimple_phi_arg_location (phi, i);
6934 	  tree block = LOCATION_BLOCK (locus);
6935 
6936 	  if (locus == UNKNOWN_LOCATION)
6937 	    continue;
6938 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6939 	    {
6940 	      locus = set_block (locus, d->new_block);
6941 	      gimple_phi_arg_set_location (phi, i, locus);
6942 	    }
6943 	}
6944 
6945       gsi_next (&psi);
6946     }
6947 
6948   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6949     {
6950       gimple *stmt = gsi_stmt (si);
6951       struct walk_stmt_info wi;
6952 
6953       memset (&wi, 0, sizeof (wi));
6954       wi.info = d;
6955       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6956 
6957       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6958 	{
6959 	  tree label = gimple_label_label (label_stmt);
6960 	  int uid = LABEL_DECL_UID (label);
6961 
6962 	  gcc_assert (uid > -1);
6963 
6964 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
6965 	  if (old_len <= (unsigned) uid)
6966 	    {
6967 	      new_len = 3 * uid / 2 + 1;
6968 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6969 	    }
6970 
6971 	  (*cfg->x_label_to_block_map)[uid] = bb;
6972 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6973 
6974 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6975 
6976 	  if (uid >= dest_cfun->cfg->last_label_uid)
6977 	    dest_cfun->cfg->last_label_uid = uid + 1;
6978 	}
6979 
6980       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6981       remove_stmt_from_eh_lp_fn (cfun, stmt);
6982 
6983       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6984       gimple_remove_stmt_histograms (cfun, stmt);
6985 
6986       /* We cannot leave any operands allocated from the operand caches of
6987 	 the current function.  */
6988       free_stmt_operands (cfun, stmt);
6989       push_cfun (dest_cfun);
6990       update_stmt (stmt);
6991       pop_cfun ();
6992     }
6993 
6994   FOR_EACH_EDGE (e, ei, bb->succs)
6995     if (e->goto_locus != UNKNOWN_LOCATION)
6996       {
6997 	tree block = LOCATION_BLOCK (e->goto_locus);
6998 	if (d->orig_block == NULL_TREE
6999 	    || block == d->orig_block)
7000 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
7001       }
7002 }
7003 
7004 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7005    the outermost EH region.  Use REGION as the incoming base EH region.  */
7006 
7007 static eh_region
7008 find_outermost_region_in_block (struct function *src_cfun,
7009 				basic_block bb, eh_region region)
7010 {
7011   gimple_stmt_iterator si;
7012 
7013   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7014     {
7015       gimple *stmt = gsi_stmt (si);
7016       eh_region stmt_region;
7017       int lp_nr;
7018 
7019       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7020       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7021       if (stmt_region)
7022 	{
7023 	  if (region == NULL)
7024 	    region = stmt_region;
7025 	  else if (stmt_region != region)
7026 	    {
7027 	      region = eh_region_outermost (src_cfun, stmt_region, region);
7028 	      gcc_assert (region != NULL);
7029 	    }
7030 	}
7031     }
7032 
7033   return region;
7034 }
7035 
7036 static tree
7037 new_label_mapper (tree decl, void *data)
7038 {
7039   htab_t hash = (htab_t) data;
7040   struct tree_map *m;
7041   void **slot;
7042 
7043   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7044 
7045   m = XNEW (struct tree_map);
7046   m->hash = DECL_UID (decl);
7047   m->base.from = decl;
7048   m->to = create_artificial_label (UNKNOWN_LOCATION);
7049   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7050   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7051     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7052 
7053   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7054   gcc_assert (*slot == NULL);
7055 
7056   *slot = m;
7057 
7058   return m->to;
7059 }
7060 
7061 /* Tree walker to replace the decls used inside value expressions by
7062    duplicates.  */
7063 
7064 static tree
7065 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7066 {
7067   struct replace_decls_d *rd = (struct replace_decls_d *)data;
7068 
7069   switch (TREE_CODE (*tp))
7070     {
7071     case VAR_DECL:
7072     case PARM_DECL:
7073     case RESULT_DECL:
7074       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7075       break;
7076     default:
7077       break;
7078     }
7079 
7080   if (IS_TYPE_OR_DECL_P (*tp))
7081     *walk_subtrees = false;
7082 
7083   return NULL;
7084 }
7085 
7086 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7087    subblocks.  */
7088 
7089 static void
7090 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7091 				  tree to_context)
7092 {
7093   tree *tp, t;
7094 
7095   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7096     {
7097       t = *tp;
7098       if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7099 	continue;
7100       replace_by_duplicate_decl (&t, vars_map, to_context);
7101       if (t != *tp)
7102 	{
7103 	  if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7104 	    {
7105 	      tree x = DECL_VALUE_EXPR (*tp);
7106 	      struct replace_decls_d rd = { vars_map, to_context };
7107 	      unshare_expr (x);
7108 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7109 	      SET_DECL_VALUE_EXPR (t, x);
7110 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
7111 	    }
7112 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
7113 	  *tp = t;
7114 	}
7115     }
7116 
7117   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7118     replace_block_vars_by_duplicates (block, vars_map, to_context);
7119 }
7120 
7121 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7122    from FN1 to FN2.  */
7123 
7124 static void
7125 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7126 			      struct loop *loop)
7127 {
7128   /* Discard it from the old loop array.  */
7129   (*get_loops (fn1))[loop->num] = NULL;
7130 
7131   /* Place it in the new loop array, assigning it a new number.  */
7132   loop->num = number_of_loops (fn2);
7133   vec_safe_push (loops_for_fn (fn2)->larray, loop);
7134 
7135   /* Recurse to children.  */
7136   for (loop = loop->inner; loop; loop = loop->next)
7137     fixup_loop_arrays_after_move (fn1, fn2, loop);
7138 }
7139 
7140 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7141    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
7142 
7143 DEBUG_FUNCTION void
7144 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7145 {
7146   basic_block bb;
7147   edge_iterator ei;
7148   edge e;
7149   bitmap bbs = BITMAP_ALLOC (NULL);
7150   int i;
7151 
7152   gcc_assert (entry != NULL);
7153   gcc_assert (entry != exit);
7154   gcc_assert (bbs_p != NULL);
7155 
7156   gcc_assert (bbs_p->length () > 0);
7157 
7158   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7159     bitmap_set_bit (bbs, bb->index);
7160 
7161   gcc_assert (bitmap_bit_p (bbs, entry->index));
7162   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7163 
7164   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7165     {
7166       if (bb == entry)
7167 	{
7168 	  gcc_assert (single_pred_p (entry));
7169 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7170 	}
7171       else
7172 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7173 	  {
7174 	    e = ei_edge (ei);
7175 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7176 	  }
7177 
7178       if (bb == exit)
7179 	{
7180 	  gcc_assert (single_succ_p (exit));
7181 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7182 	}
7183       else
7184 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7185 	  {
7186 	    e = ei_edge (ei);
7187 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7188 	  }
7189     }
7190 
7191   BITMAP_FREE (bbs);
7192 }
7193 
7194 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7195 
7196 bool
7197 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7198 {
7199   bitmap release_names = (bitmap)data;
7200 
7201   if (TREE_CODE (from) != SSA_NAME)
7202     return true;
7203 
7204   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7205   return true;
7206 }
7207 
7208 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7209    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7210    single basic block in the original CFG and the new basic block is
7211    returned.  DEST_CFUN must not have a CFG yet.
7212 
7213    Note that the region need not be a pure SESE region.  Blocks inside
7214    the region may contain calls to abort/exit.  The only restriction
7215    is that ENTRY_BB should be the only entry point and it must
7216    dominate EXIT_BB.
7217 
7218    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7219    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7220    to the new function.
7221 
7222    All local variables referenced in the region are assumed to be in
7223    the corresponding BLOCK_VARS and unexpanded variable lists
7224    associated with DEST_CFUN.
7225 
7226    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7227    reimplement move_sese_region_to_fn by duplicating the region rather than
7228    moving it.  */
7229 
7230 basic_block
7231 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7232 		        basic_block exit_bb, tree orig_block)
7233 {
7234   vec<basic_block> bbs, dom_bbs;
7235   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7236   basic_block after, bb, *entry_pred, *exit_succ, abb;
7237   struct function *saved_cfun = cfun;
7238   int *entry_flag, *exit_flag;
7239   unsigned *entry_prob, *exit_prob;
7240   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7241   edge e;
7242   edge_iterator ei;
7243   htab_t new_label_map;
7244   hash_map<void *, void *> *eh_map;
7245   struct loop *loop = entry_bb->loop_father;
7246   struct loop *loop0 = get_loop (saved_cfun, 0);
7247   struct move_stmt_d d;
7248 
7249   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7250      region.  */
7251   gcc_assert (entry_bb != exit_bb
7252               && (!exit_bb
7253 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7254 
7255   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7256      because it won't be added by dfs_enumerate_from.  */
7257   bbs.create (0);
7258   bbs.safe_push (entry_bb);
7259   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7260 
7261   if (flag_checking)
7262     verify_sese (entry_bb, exit_bb, &bbs);
7263 
7264   /* The blocks that used to be dominated by something in BBS will now be
7265      dominated by the new block.  */
7266   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7267 				     bbs.address (),
7268 				     bbs.length ());
7269 
7270   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7271      the predecessor edges to ENTRY_BB and the successor edges to
7272      EXIT_BB so that we can re-attach them to the new basic block that
7273      will replace the region.  */
7274   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7275   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7276   entry_flag = XNEWVEC (int, num_entry_edges);
7277   entry_prob = XNEWVEC (unsigned, num_entry_edges);
7278   i = 0;
7279   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7280     {
7281       entry_prob[i] = e->probability;
7282       entry_flag[i] = e->flags;
7283       entry_pred[i++] = e->src;
7284       remove_edge (e);
7285     }
7286 
7287   if (exit_bb)
7288     {
7289       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7290       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7291       exit_flag = XNEWVEC (int, num_exit_edges);
7292       exit_prob = XNEWVEC (unsigned, num_exit_edges);
7293       i = 0;
7294       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7295 	{
7296 	  exit_prob[i] = e->probability;
7297 	  exit_flag[i] = e->flags;
7298 	  exit_succ[i++] = e->dest;
7299 	  remove_edge (e);
7300 	}
7301     }
7302   else
7303     {
7304       num_exit_edges = 0;
7305       exit_succ = NULL;
7306       exit_flag = NULL;
7307       exit_prob = NULL;
7308     }
7309 
7310   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7311   gcc_assert (dest_cfun->cfg == NULL);
7312   push_cfun (dest_cfun);
7313 
7314   init_empty_tree_cfg ();
7315 
7316   /* Initialize EH information for the new function.  */
7317   eh_map = NULL;
7318   new_label_map = NULL;
7319   if (saved_cfun->eh)
7320     {
7321       eh_region region = NULL;
7322 
7323       FOR_EACH_VEC_ELT (bbs, i, bb)
7324 	region = find_outermost_region_in_block (saved_cfun, bb, region);
7325 
7326       init_eh_for_function ();
7327       if (region != NULL)
7328 	{
7329 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7330 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7331 					 new_label_mapper, new_label_map);
7332 	}
7333     }
7334 
7335   /* Initialize an empty loop tree.  */
7336   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7337   init_loops_structure (dest_cfun, loops, 1);
7338   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7339   set_loops_for_fn (dest_cfun, loops);
7340 
7341   /* Move the outlined loop tree part.  */
7342   num_nodes = bbs.length ();
7343   FOR_EACH_VEC_ELT (bbs, i, bb)
7344     {
7345       if (bb->loop_father->header == bb)
7346 	{
7347 	  struct loop *this_loop = bb->loop_father;
7348 	  struct loop *outer = loop_outer (this_loop);
7349 	  if (outer == loop
7350 	      /* If the SESE region contains some bbs ending with
7351 		 a noreturn call, those are considered to belong
7352 		 to the outermost loop in saved_cfun, rather than
7353 		 the entry_bb's loop_father.  */
7354 	      || outer == loop0)
7355 	    {
7356 	      if (outer != loop)
7357 		num_nodes -= this_loop->num_nodes;
7358 	      flow_loop_tree_node_remove (bb->loop_father);
7359 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7360 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7361 	    }
7362 	}
7363       else if (bb->loop_father == loop0 && loop0 != loop)
7364 	num_nodes--;
7365 
7366       /* Remove loop exits from the outlined region.  */
7367       if (loops_for_fn (saved_cfun)->exits)
7368 	FOR_EACH_EDGE (e, ei, bb->succs)
7369 	  {
7370 	    struct loops *l = loops_for_fn (saved_cfun);
7371 	    loop_exit **slot
7372 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7373 					       NO_INSERT);
7374 	    if (slot)
7375 	      l->exits->clear_slot (slot);
7376 	  }
7377     }
7378 
7379 
7380   /* Adjust the number of blocks in the tree root of the outlined part.  */
7381   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7382 
7383   /* Setup a mapping to be used by move_block_to_fn.  */
7384   loop->aux = current_loops->tree_root;
7385   loop0->aux = current_loops->tree_root;
7386 
7387   pop_cfun ();
7388 
7389   /* Move blocks from BBS into DEST_CFUN.  */
7390   gcc_assert (bbs.length () >= 2);
7391   after = dest_cfun->cfg->x_entry_block_ptr;
7392   hash_map<tree, tree> vars_map;
7393 
7394   memset (&d, 0, sizeof (d));
7395   d.orig_block = orig_block;
7396   d.new_block = DECL_INITIAL (dest_cfun->decl);
7397   d.from_context = cfun->decl;
7398   d.to_context = dest_cfun->decl;
7399   d.vars_map = &vars_map;
7400   d.new_label_map = new_label_map;
7401   d.eh_map = eh_map;
7402   d.remap_decls_p = true;
7403 
7404   if (gimple_in_ssa_p (cfun))
7405     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7406       {
7407 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7408 	set_ssa_default_def (dest_cfun, arg, narg);
7409 	vars_map.put (arg, narg);
7410       }
7411 
7412   FOR_EACH_VEC_ELT (bbs, i, bb)
7413     {
7414       /* No need to update edge counts on the last block.  It has
7415 	 already been updated earlier when we detached the region from
7416 	 the original CFG.  */
7417       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7418       after = bb;
7419     }
7420 
7421   loop->aux = NULL;
7422   loop0->aux = NULL;
7423   /* Loop sizes are no longer correct, fix them up.  */
7424   loop->num_nodes -= num_nodes;
7425   for (struct loop *outer = loop_outer (loop);
7426        outer; outer = loop_outer (outer))
7427     outer->num_nodes -= num_nodes;
7428   loop0->num_nodes -= bbs.length () - num_nodes;
7429 
7430   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7431     {
7432       struct loop *aloop;
7433       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7434 	if (aloop != NULL)
7435 	  {
7436 	    if (aloop->simduid)
7437 	      {
7438 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7439 					   d.to_context);
7440 		dest_cfun->has_simduid_loops = true;
7441 	      }
7442 	    if (aloop->force_vectorize)
7443 	      dest_cfun->has_force_vectorize_loops = true;
7444 	  }
7445     }
7446 
7447   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7448   if (orig_block)
7449     {
7450       tree block;
7451       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7452 		  == NULL_TREE);
7453       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7454 	= BLOCK_SUBBLOCKS (orig_block);
7455       for (block = BLOCK_SUBBLOCKS (orig_block);
7456 	   block; block = BLOCK_CHAIN (block))
7457 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7458       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7459     }
7460 
7461   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7462 				    &vars_map, dest_cfun->decl);
7463 
7464   if (new_label_map)
7465     htab_delete (new_label_map);
7466   if (eh_map)
7467     delete eh_map;
7468 
7469   if (gimple_in_ssa_p (cfun))
7470     {
7471       /* We need to release ssa-names in a defined order, so first find them,
7472 	 and then iterate in ascending version order.  */
7473       bitmap release_names = BITMAP_ALLOC (NULL);
7474       vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7475       bitmap_iterator bi;
7476       unsigned i;
7477       EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7478 	release_ssa_name (ssa_name (i));
7479       BITMAP_FREE (release_names);
7480     }
7481 
7482   /* Rewire the entry and exit blocks.  The successor to the entry
7483      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7484      the child function.  Similarly, the predecessor of DEST_FN's
7485      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7486      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7487      various CFG manipulation function get to the right CFG.
7488 
7489      FIXME, this is silly.  The CFG ought to become a parameter to
7490      these helpers.  */
7491   push_cfun (dest_cfun);
7492   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7493   if (exit_bb)
7494     make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7495   pop_cfun ();
7496 
7497   /* Back in the original function, the SESE region has disappeared,
7498      create a new basic block in its place.  */
7499   bb = create_empty_bb (entry_pred[0]);
7500   if (current_loops)
7501     add_bb_to_loop (bb, loop);
7502   for (i = 0; i < num_entry_edges; i++)
7503     {
7504       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7505       e->probability = entry_prob[i];
7506     }
7507 
7508   for (i = 0; i < num_exit_edges; i++)
7509     {
7510       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7511       e->probability = exit_prob[i];
7512     }
7513 
7514   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7515   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7516     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7517   dom_bbs.release ();
7518 
7519   if (exit_bb)
7520     {
7521       free (exit_prob);
7522       free (exit_flag);
7523       free (exit_succ);
7524     }
7525   free (entry_prob);
7526   free (entry_flag);
7527   free (entry_pred);
7528   bbs.release ();
7529 
7530   return bb;
7531 }
7532 
7533 /* Dump default def DEF to file FILE using FLAGS and indentation
7534    SPC.  */
7535 
7536 static void
7537 dump_default_def (FILE *file, tree def, int spc, int flags)
7538 {
7539   for (int i = 0; i < spc; ++i)
7540     fprintf (file, " ");
7541   dump_ssaname_info_to_file (file, def, spc);
7542 
7543   print_generic_expr (file, TREE_TYPE (def), flags);
7544   fprintf (file, " ");
7545   print_generic_expr (file, def, flags);
7546   fprintf (file, " = ");
7547   print_generic_expr (file, SSA_NAME_VAR (def), flags);
7548   fprintf (file, ";\n");
7549 }
7550 
7551 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7552    */
7553 
7554 void
7555 dump_function_to_file (tree fndecl, FILE *file, int flags)
7556 {
7557   tree arg, var, old_current_fndecl = current_function_decl;
7558   struct function *dsf;
7559   bool ignore_topmost_bind = false, any_var = false;
7560   basic_block bb;
7561   tree chain;
7562   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7563 		  && decl_is_tm_clone (fndecl));
7564   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7565 
7566   if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7567     {
7568       fprintf (file, "__attribute__((");
7569 
7570       bool first = true;
7571       tree chain;
7572       for (chain = DECL_ATTRIBUTES (fndecl); chain;
7573 	   first = false, chain = TREE_CHAIN (chain))
7574 	{
7575 	  if (!first)
7576 	    fprintf (file, ", ");
7577 
7578 	  print_generic_expr (file, get_attribute_name (chain), dump_flags);
7579 	  if (TREE_VALUE (chain) != NULL_TREE)
7580 	    {
7581 	      fprintf (file, " (");
7582 	      print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7583 	      fprintf (file, ")");
7584 	    }
7585 	}
7586 
7587       fprintf (file, "))\n");
7588     }
7589 
7590   current_function_decl = fndecl;
7591   if (flags & TDF_GIMPLE)
7592     {
7593       print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7594 			  dump_flags | TDF_SLIM);
7595       fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7596     }
7597   else
7598     fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7599 
7600   arg = DECL_ARGUMENTS (fndecl);
7601   while (arg)
7602     {
7603       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7604       fprintf (file, " ");
7605       print_generic_expr (file, arg, dump_flags);
7606       if (flags & TDF_VERBOSE)
7607 	print_node (file, "", arg, 4);
7608       if (DECL_CHAIN (arg))
7609 	fprintf (file, ", ");
7610       arg = DECL_CHAIN (arg);
7611     }
7612   fprintf (file, ")\n");
7613 
7614   if (flags & TDF_VERBOSE)
7615     print_node (file, "", fndecl, 2);
7616 
7617   dsf = DECL_STRUCT_FUNCTION (fndecl);
7618   if (dsf && (flags & TDF_EH))
7619     dump_eh_tree (file, dsf);
7620 
7621   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7622     {
7623       dump_node (fndecl, TDF_SLIM | flags, file);
7624       current_function_decl = old_current_fndecl;
7625       return;
7626     }
7627 
7628   /* When GIMPLE is lowered, the variables are no longer available in
7629      BIND_EXPRs, so display them separately.  */
7630   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7631     {
7632       unsigned ix;
7633       ignore_topmost_bind = true;
7634 
7635       fprintf (file, "{\n");
7636       if (gimple_in_ssa_p (fun)
7637 	  && (flags & TDF_ALIAS))
7638 	{
7639 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7640 	       arg = DECL_CHAIN (arg))
7641 	    {
7642 	      tree def = ssa_default_def (fun, arg);
7643 	      if (def)
7644 		dump_default_def (file, def, 2, flags);
7645 	    }
7646 
7647 	  tree res = DECL_RESULT (fun->decl);
7648 	  if (res != NULL_TREE
7649 	      && DECL_BY_REFERENCE (res))
7650 	    {
7651 	      tree def = ssa_default_def (fun, res);
7652 	      if (def)
7653 		dump_default_def (file, def, 2, flags);
7654 	    }
7655 
7656 	  tree static_chain = fun->static_chain_decl;
7657 	  if (static_chain != NULL_TREE)
7658 	    {
7659 	      tree def = ssa_default_def (fun, static_chain);
7660 	      if (def)
7661 		dump_default_def (file, def, 2, flags);
7662 	    }
7663 	}
7664 
7665       if (!vec_safe_is_empty (fun->local_decls))
7666 	FOR_EACH_LOCAL_DECL (fun, ix, var)
7667 	  {
7668 	    print_generic_decl (file, var, flags);
7669 	    if (flags & TDF_VERBOSE)
7670 	      print_node (file, "", var, 4);
7671 	    fprintf (file, "\n");
7672 
7673 	    any_var = true;
7674 	  }
7675 
7676       tree name;
7677 
7678       if (gimple_in_ssa_p (cfun))
7679 	FOR_EACH_SSA_NAME (ix, name, cfun)
7680 	  {
7681 	    if (!SSA_NAME_VAR (name))
7682 	      {
7683 		fprintf (file, "  ");
7684 		print_generic_expr (file, TREE_TYPE (name), flags);
7685 		fprintf (file, " ");
7686 		print_generic_expr (file, name, flags);
7687 		fprintf (file, ";\n");
7688 
7689 		any_var = true;
7690 	      }
7691 	  }
7692     }
7693 
7694   if (fun && fun->decl == fndecl
7695       && fun->cfg
7696       && basic_block_info_for_fn (fun))
7697     {
7698       /* If the CFG has been built, emit a CFG-based dump.  */
7699       if (!ignore_topmost_bind)
7700 	fprintf (file, "{\n");
7701 
7702       if (any_var && n_basic_blocks_for_fn (fun))
7703 	fprintf (file, "\n");
7704 
7705       FOR_EACH_BB_FN (bb, fun)
7706 	dump_bb (file, bb, 2, flags | TDF_COMMENT);
7707 
7708       fprintf (file, "}\n");
7709     }
7710   else if (fun->curr_properties & PROP_gimple_any)
7711     {
7712       /* The function is now in GIMPLE form but the CFG has not been
7713 	 built yet.  Emit the single sequence of GIMPLE statements
7714 	 that make up its body.  */
7715       gimple_seq body = gimple_body (fndecl);
7716 
7717       if (gimple_seq_first_stmt (body)
7718 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7719 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7720 	print_gimple_seq (file, body, 0, flags);
7721       else
7722 	{
7723 	  if (!ignore_topmost_bind)
7724 	    fprintf (file, "{\n");
7725 
7726 	  if (any_var)
7727 	    fprintf (file, "\n");
7728 
7729 	  print_gimple_seq (file, body, 2, flags);
7730 	  fprintf (file, "}\n");
7731 	}
7732     }
7733   else
7734     {
7735       int indent;
7736 
7737       /* Make a tree based dump.  */
7738       chain = DECL_SAVED_TREE (fndecl);
7739       if (chain && TREE_CODE (chain) == BIND_EXPR)
7740 	{
7741 	  if (ignore_topmost_bind)
7742 	    {
7743 	      chain = BIND_EXPR_BODY (chain);
7744 	      indent = 2;
7745 	    }
7746 	  else
7747 	    indent = 0;
7748 	}
7749       else
7750 	{
7751 	  if (!ignore_topmost_bind)
7752 	    {
7753 	      fprintf (file, "{\n");
7754 	      /* No topmost bind, pretend it's ignored for later.  */
7755 	      ignore_topmost_bind = true;
7756 	    }
7757 	  indent = 2;
7758 	}
7759 
7760       if (any_var)
7761 	fprintf (file, "\n");
7762 
7763       print_generic_stmt_indented (file, chain, flags, indent);
7764       if (ignore_topmost_bind)
7765 	fprintf (file, "}\n");
7766     }
7767 
7768   if (flags & TDF_ENUMERATE_LOCALS)
7769     dump_enumerated_decls (file, flags);
7770   fprintf (file, "\n\n");
7771 
7772   current_function_decl = old_current_fndecl;
7773 }
7774 
7775 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
7776 
7777 DEBUG_FUNCTION void
7778 debug_function (tree fn, int flags)
7779 {
7780   dump_function_to_file (fn, stderr, flags);
7781 }
7782 
7783 
7784 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
7785 
7786 static void
7787 print_pred_bbs (FILE *file, basic_block bb)
7788 {
7789   edge e;
7790   edge_iterator ei;
7791 
7792   FOR_EACH_EDGE (e, ei, bb->preds)
7793     fprintf (file, "bb_%d ", e->src->index);
7794 }
7795 
7796 
7797 /* Print on FILE the indexes for the successors of basic_block BB.  */
7798 
7799 static void
7800 print_succ_bbs (FILE *file, basic_block bb)
7801 {
7802   edge e;
7803   edge_iterator ei;
7804 
7805   FOR_EACH_EDGE (e, ei, bb->succs)
7806     fprintf (file, "bb_%d ", e->dest->index);
7807 }
7808 
7809 /* Print to FILE the basic block BB following the VERBOSITY level.  */
7810 
7811 void
7812 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7813 {
7814   char *s_indent = (char *) alloca ((size_t) indent + 1);
7815   memset ((void *) s_indent, ' ', (size_t) indent);
7816   s_indent[indent] = '\0';
7817 
7818   /* Print basic_block's header.  */
7819   if (verbosity >= 2)
7820     {
7821       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
7822       print_pred_bbs (file, bb);
7823       fprintf (file, "}, succs = {");
7824       print_succ_bbs (file, bb);
7825       fprintf (file, "})\n");
7826     }
7827 
7828   /* Print basic_block's body.  */
7829   if (verbosity >= 3)
7830     {
7831       fprintf (file, "%s  {\n", s_indent);
7832       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7833       fprintf (file, "%s  }\n", s_indent);
7834     }
7835 }
7836 
7837 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7838 
7839 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
7840    VERBOSITY level this outputs the contents of the loop, or just its
7841    structure.  */
7842 
7843 static void
7844 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7845 {
7846   char *s_indent;
7847   basic_block bb;
7848 
7849   if (loop == NULL)
7850     return;
7851 
7852   s_indent = (char *) alloca ((size_t) indent + 1);
7853   memset ((void *) s_indent, ' ', (size_t) indent);
7854   s_indent[indent] = '\0';
7855 
7856   /* Print loop's header.  */
7857   fprintf (file, "%sloop_%d (", s_indent, loop->num);
7858   if (loop->header)
7859     fprintf (file, "header = %d", loop->header->index);
7860   else
7861     {
7862       fprintf (file, "deleted)\n");
7863       return;
7864     }
7865   if (loop->latch)
7866     fprintf (file, ", latch = %d", loop->latch->index);
7867   else
7868     fprintf (file, ", multiple latches");
7869   fprintf (file, ", niter = ");
7870   print_generic_expr (file, loop->nb_iterations, 0);
7871 
7872   if (loop->any_upper_bound)
7873     {
7874       fprintf (file, ", upper_bound = ");
7875       print_decu (loop->nb_iterations_upper_bound, file);
7876     }
7877   if (loop->any_likely_upper_bound)
7878     {
7879       fprintf (file, ", likely_upper_bound = ");
7880       print_decu (loop->nb_iterations_likely_upper_bound, file);
7881     }
7882 
7883   if (loop->any_estimate)
7884     {
7885       fprintf (file, ", estimate = ");
7886       print_decu (loop->nb_iterations_estimate, file);
7887     }
7888   fprintf (file, ")\n");
7889 
7890   /* Print loop's body.  */
7891   if (verbosity >= 1)
7892     {
7893       fprintf (file, "%s{\n", s_indent);
7894       FOR_EACH_BB_FN (bb, cfun)
7895 	if (bb->loop_father == loop)
7896 	  print_loops_bb (file, bb, indent, verbosity);
7897 
7898       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7899       fprintf (file, "%s}\n", s_indent);
7900     }
7901 }
7902 
7903 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7904    spaces.  Following VERBOSITY level this outputs the contents of the
7905    loop, or just its structure.  */
7906 
7907 static void
7908 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7909 			 int verbosity)
7910 {
7911   if (loop == NULL)
7912     return;
7913 
7914   print_loop (file, loop, indent, verbosity);
7915   print_loop_and_siblings (file, loop->next, indent, verbosity);
7916 }
7917 
7918 /* Follow a CFG edge from the entry point of the program, and on entry
7919    of a loop, pretty print the loop structure on FILE.  */
7920 
7921 void
7922 print_loops (FILE *file, int verbosity)
7923 {
7924   basic_block bb;
7925 
7926   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7927   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7928   if (bb && bb->loop_father)
7929     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7930 }
7931 
7932 /* Dump a loop.  */
7933 
7934 DEBUG_FUNCTION void
7935 debug (struct loop &ref)
7936 {
7937   print_loop (stderr, &ref, 0, /*verbosity*/0);
7938 }
7939 
7940 DEBUG_FUNCTION void
7941 debug (struct loop *ptr)
7942 {
7943   if (ptr)
7944     debug (*ptr);
7945   else
7946     fprintf (stderr, "<nil>\n");
7947 }
7948 
7949 /* Dump a loop verbosely.  */
7950 
7951 DEBUG_FUNCTION void
7952 debug_verbose (struct loop &ref)
7953 {
7954   print_loop (stderr, &ref, 0, /*verbosity*/3);
7955 }
7956 
7957 DEBUG_FUNCTION void
7958 debug_verbose (struct loop *ptr)
7959 {
7960   if (ptr)
7961     debug (*ptr);
7962   else
7963     fprintf (stderr, "<nil>\n");
7964 }
7965 
7966 
7967 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
7968 
7969 DEBUG_FUNCTION void
7970 debug_loops (int verbosity)
7971 {
7972   print_loops (stderr, verbosity);
7973 }
7974 
7975 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
7976 
7977 DEBUG_FUNCTION void
7978 debug_loop (struct loop *loop, int verbosity)
7979 {
7980   print_loop (stderr, loop, 0, verbosity);
7981 }
7982 
7983 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7984    level.  */
7985 
7986 DEBUG_FUNCTION void
7987 debug_loop_num (unsigned num, int verbosity)
7988 {
7989   debug_loop (get_loop (cfun, num), verbosity);
7990 }
7991 
7992 /* Return true if BB ends with a call, possibly followed by some
7993    instructions that must stay with the call.  Return false,
7994    otherwise.  */
7995 
7996 static bool
7997 gimple_block_ends_with_call_p (basic_block bb)
7998 {
7999   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8000   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8001 }
8002 
8003 
8004 /* Return true if BB ends with a conditional branch.  Return false,
8005    otherwise.  */
8006 
8007 static bool
8008 gimple_block_ends_with_condjump_p (const_basic_block bb)
8009 {
8010   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8011   return (stmt && gimple_code (stmt) == GIMPLE_COND);
8012 }
8013 
8014 
8015 /* Return true if statement T may terminate execution of BB in ways not
8016    explicitly represtented in the CFG.  */
8017 
8018 bool
8019 stmt_can_terminate_bb_p (gimple *t)
8020 {
8021   tree fndecl = NULL_TREE;
8022   int call_flags = 0;
8023 
8024   /* Eh exception not handled internally terminates execution of the whole
8025      function.  */
8026   if (stmt_can_throw_external (t))
8027     return true;
8028 
8029   /* NORETURN and LONGJMP calls already have an edge to exit.
8030      CONST and PURE calls do not need one.
8031      We don't currently check for CONST and PURE here, although
8032      it would be a good idea, because those attributes are
8033      figured out from the RTL in mark_constant_function, and
8034      the counter incrementation code from -fprofile-arcs
8035      leads to different results from -fbranch-probabilities.  */
8036   if (is_gimple_call (t))
8037     {
8038       fndecl = gimple_call_fndecl (t);
8039       call_flags = gimple_call_flags (t);
8040     }
8041 
8042   if (is_gimple_call (t)
8043       && fndecl
8044       && DECL_BUILT_IN (fndecl)
8045       && (call_flags & ECF_NOTHROW)
8046       && !(call_flags & ECF_RETURNS_TWICE)
8047       /* fork() doesn't really return twice, but the effect of
8048          wrapping it in __gcov_fork() which calls __gcov_flush()
8049 	 and clears the counters before forking has the same
8050 	 effect as returning twice.  Force a fake edge.  */
8051       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8052 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8053     return false;
8054 
8055   if (is_gimple_call (t))
8056     {
8057       edge_iterator ei;
8058       edge e;
8059       basic_block bb;
8060 
8061       if (call_flags & (ECF_PURE | ECF_CONST)
8062 	  && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8063 	return false;
8064 
8065       /* Function call may do longjmp, terminate program or do other things.
8066 	 Special case noreturn that have non-abnormal edges out as in this case
8067 	 the fact is sufficiently represented by lack of edges out of T.  */
8068       if (!(call_flags & ECF_NORETURN))
8069 	return true;
8070 
8071       bb = gimple_bb (t);
8072       FOR_EACH_EDGE (e, ei, bb->succs)
8073 	if ((e->flags & EDGE_FAKE) == 0)
8074 	  return true;
8075     }
8076 
8077   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8078     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8079       return true;
8080 
8081   return false;
8082 }
8083 
8084 
8085 /* Add fake edges to the function exit for any non constant and non
8086    noreturn calls (or noreturn calls with EH/abnormal edges),
8087    volatile inline assembly in the bitmap of blocks specified by BLOCKS
8088    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
8089    that were split.
8090 
8091    The goal is to expose cases in which entering a basic block does
8092    not imply that all subsequent instructions must be executed.  */
8093 
8094 static int
8095 gimple_flow_call_edges_add (sbitmap blocks)
8096 {
8097   int i;
8098   int blocks_split = 0;
8099   int last_bb = last_basic_block_for_fn (cfun);
8100   bool check_last_block = false;
8101 
8102   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8103     return 0;
8104 
8105   if (! blocks)
8106     check_last_block = true;
8107   else
8108     check_last_block = bitmap_bit_p (blocks,
8109 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8110 
8111   /* In the last basic block, before epilogue generation, there will be
8112      a fallthru edge to EXIT.  Special care is required if the last insn
8113      of the last basic block is a call because make_edge folds duplicate
8114      edges, which would result in the fallthru edge also being marked
8115      fake, which would result in the fallthru edge being removed by
8116      remove_fake_edges, which would result in an invalid CFG.
8117 
8118      Moreover, we can't elide the outgoing fake edge, since the block
8119      profiler needs to take this into account in order to solve the minimal
8120      spanning tree in the case that the call doesn't return.
8121 
8122      Handle this by adding a dummy instruction in a new last basic block.  */
8123   if (check_last_block)
8124     {
8125       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8126       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8127       gimple *t = NULL;
8128 
8129       if (!gsi_end_p (gsi))
8130 	t = gsi_stmt (gsi);
8131 
8132       if (t && stmt_can_terminate_bb_p (t))
8133 	{
8134 	  edge e;
8135 
8136 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8137 	  if (e)
8138 	    {
8139 	      gsi_insert_on_edge (e, gimple_build_nop ());
8140 	      gsi_commit_edge_inserts ();
8141 	    }
8142 	}
8143     }
8144 
8145   /* Now add fake edges to the function exit for any non constant
8146      calls since there is no way that we can determine if they will
8147      return or not...  */
8148   for (i = 0; i < last_bb; i++)
8149     {
8150       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8151       gimple_stmt_iterator gsi;
8152       gimple *stmt, *last_stmt;
8153 
8154       if (!bb)
8155 	continue;
8156 
8157       if (blocks && !bitmap_bit_p (blocks, i))
8158 	continue;
8159 
8160       gsi = gsi_last_nondebug_bb (bb);
8161       if (!gsi_end_p (gsi))
8162 	{
8163 	  last_stmt = gsi_stmt (gsi);
8164 	  do
8165 	    {
8166 	      stmt = gsi_stmt (gsi);
8167 	      if (stmt_can_terminate_bb_p (stmt))
8168 		{
8169 		  edge e;
8170 
8171 		  /* The handling above of the final block before the
8172 		     epilogue should be enough to verify that there is
8173 		     no edge to the exit block in CFG already.
8174 		     Calling make_edge in such case would cause us to
8175 		     mark that edge as fake and remove it later.  */
8176 		  if (flag_checking && stmt == last_stmt)
8177 		    {
8178 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8179 		      gcc_assert (e == NULL);
8180 		    }
8181 
8182 		  /* Note that the following may create a new basic block
8183 		     and renumber the existing basic blocks.  */
8184 		  if (stmt != last_stmt)
8185 		    {
8186 		      e = split_block (bb, stmt);
8187 		      if (e)
8188 			blocks_split++;
8189 		    }
8190 		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8191 		}
8192 	      gsi_prev (&gsi);
8193 	    }
8194 	  while (!gsi_end_p (gsi));
8195 	}
8196     }
8197 
8198   if (blocks_split)
8199     verify_flow_info ();
8200 
8201   return blocks_split;
8202 }
8203 
8204 /* Removes edge E and all the blocks dominated by it, and updates dominance
8205    information.  The IL in E->src needs to be updated separately.
8206    If dominance info is not available, only the edge E is removed.*/
8207 
8208 void
8209 remove_edge_and_dominated_blocks (edge e)
8210 {
8211   vec<basic_block> bbs_to_remove = vNULL;
8212   vec<basic_block> bbs_to_fix_dom = vNULL;
8213   bitmap df, df_idom;
8214   edge f;
8215   edge_iterator ei;
8216   bool none_removed = false;
8217   unsigned i;
8218   basic_block bb, dbb;
8219   bitmap_iterator bi;
8220 
8221   /* If we are removing a path inside a non-root loop that may change
8222      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8223   if (current_loops
8224       && loop_outer (e->src->loop_father) != NULL
8225       && e->src->loop_father == e->dest->loop_father)
8226     loops_state_set (LOOPS_NEED_FIXUP);
8227 
8228   if (!dom_info_available_p (CDI_DOMINATORS))
8229     {
8230       remove_edge (e);
8231       return;
8232     }
8233 
8234   /* No updating is needed for edges to exit.  */
8235   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8236     {
8237       if (cfgcleanup_altered_bbs)
8238 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8239       remove_edge (e);
8240       return;
8241     }
8242 
8243   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8244      that is not dominated by E->dest, then this set is empty.  Otherwise,
8245      all the basic blocks dominated by E->dest are removed.
8246 
8247      Also, to DF_IDOM we store the immediate dominators of the blocks in
8248      the dominance frontier of E (i.e., of the successors of the
8249      removed blocks, if there are any, and of E->dest otherwise).  */
8250   FOR_EACH_EDGE (f, ei, e->dest->preds)
8251     {
8252       if (f == e)
8253 	continue;
8254 
8255       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8256 	{
8257 	  none_removed = true;
8258 	  break;
8259 	}
8260     }
8261 
8262   df = BITMAP_ALLOC (NULL);
8263   df_idom = BITMAP_ALLOC (NULL);
8264 
8265   if (none_removed)
8266     bitmap_set_bit (df_idom,
8267 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8268   else
8269     {
8270       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8271       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8272 	{
8273 	  FOR_EACH_EDGE (f, ei, bb->succs)
8274 	    {
8275 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8276 		bitmap_set_bit (df, f->dest->index);
8277 	    }
8278 	}
8279       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8280 	bitmap_clear_bit (df, bb->index);
8281 
8282       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8283 	{
8284 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8285 	  bitmap_set_bit (df_idom,
8286 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8287 	}
8288     }
8289 
8290   if (cfgcleanup_altered_bbs)
8291     {
8292       /* Record the set of the altered basic blocks.  */
8293       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8294       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8295     }
8296 
8297   /* Remove E and the cancelled blocks.  */
8298   if (none_removed)
8299     remove_edge (e);
8300   else
8301     {
8302       /* Walk backwards so as to get a chance to substitute all
8303 	 released DEFs into debug stmts.  See
8304 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8305 	 details.  */
8306       for (i = bbs_to_remove.length (); i-- > 0; )
8307 	delete_basic_block (bbs_to_remove[i]);
8308     }
8309 
8310   /* Update the dominance information.  The immediate dominator may change only
8311      for blocks whose immediate dominator belongs to DF_IDOM:
8312 
8313      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8314      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8315      Z dominates X after the removal.  Before removal, there exists a path P
8316      from Y to X that avoids Z.  Let F be the last edge on P that is
8317      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8318      dominates W, and because of P, Z does not dominate W), and W belongs to
8319      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8320   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8321     {
8322       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8323       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8324 	   dbb;
8325 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8326 	bbs_to_fix_dom.safe_push (dbb);
8327     }
8328 
8329   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8330 
8331   BITMAP_FREE (df);
8332   BITMAP_FREE (df_idom);
8333   bbs_to_remove.release ();
8334   bbs_to_fix_dom.release ();
8335 }
8336 
8337 /* Purge dead EH edges from basic block BB.  */
8338 
8339 bool
8340 gimple_purge_dead_eh_edges (basic_block bb)
8341 {
8342   bool changed = false;
8343   edge e;
8344   edge_iterator ei;
8345   gimple *stmt = last_stmt (bb);
8346 
8347   if (stmt && stmt_can_throw_internal (stmt))
8348     return false;
8349 
8350   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8351     {
8352       if (e->flags & EDGE_EH)
8353 	{
8354 	  remove_edge_and_dominated_blocks (e);
8355 	  changed = true;
8356 	}
8357       else
8358 	ei_next (&ei);
8359     }
8360 
8361   return changed;
8362 }
8363 
8364 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8365 
8366 bool
8367 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8368 {
8369   bool changed = false;
8370   unsigned i;
8371   bitmap_iterator bi;
8372 
8373   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8374     {
8375       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8376 
8377       /* Earlier gimple_purge_dead_eh_edges could have removed
8378 	 this basic block already.  */
8379       gcc_assert (bb || changed);
8380       if (bb != NULL)
8381 	changed |= gimple_purge_dead_eh_edges (bb);
8382     }
8383 
8384   return changed;
8385 }
8386 
8387 /* Purge dead abnormal call edges from basic block BB.  */
8388 
8389 bool
8390 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8391 {
8392   bool changed = false;
8393   edge e;
8394   edge_iterator ei;
8395   gimple *stmt = last_stmt (bb);
8396 
8397   if (!cfun->has_nonlocal_label
8398       && !cfun->calls_setjmp)
8399     return false;
8400 
8401   if (stmt && stmt_can_make_abnormal_goto (stmt))
8402     return false;
8403 
8404   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8405     {
8406       if (e->flags & EDGE_ABNORMAL)
8407 	{
8408 	  if (e->flags & EDGE_FALLTHRU)
8409 	    e->flags &= ~EDGE_ABNORMAL;
8410 	  else
8411 	    remove_edge_and_dominated_blocks (e);
8412 	  changed = true;
8413 	}
8414       else
8415 	ei_next (&ei);
8416     }
8417 
8418   return changed;
8419 }
8420 
8421 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8422 
8423 bool
8424 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8425 {
8426   bool changed = false;
8427   unsigned i;
8428   bitmap_iterator bi;
8429 
8430   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8431     {
8432       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8433 
8434       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8435 	 this basic block already.  */
8436       gcc_assert (bb || changed);
8437       if (bb != NULL)
8438 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8439     }
8440 
8441   return changed;
8442 }
8443 
8444 /* This function is called whenever a new edge is created or
8445    redirected.  */
8446 
8447 static void
8448 gimple_execute_on_growing_pred (edge e)
8449 {
8450   basic_block bb = e->dest;
8451 
8452   if (!gimple_seq_empty_p (phi_nodes (bb)))
8453     reserve_phi_args_for_new_edge (bb);
8454 }
8455 
8456 /* This function is called immediately before edge E is removed from
8457    the edge vector E->dest->preds.  */
8458 
8459 static void
8460 gimple_execute_on_shrinking_pred (edge e)
8461 {
8462   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8463     remove_phi_args (e);
8464 }
8465 
8466 /*---------------------------------------------------------------------------
8467   Helper functions for Loop versioning
8468   ---------------------------------------------------------------------------*/
8469 
8470 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8471    of 'first'. Both of them are dominated by 'new_head' basic block. When
8472    'new_head' was created by 'second's incoming edge it received phi arguments
8473    on the edge by split_edge(). Later, additional edge 'e' was created to
8474    connect 'new_head' and 'first'. Now this routine adds phi args on this
8475    additional edge 'e' that new_head to second edge received as part of edge
8476    splitting.  */
8477 
8478 static void
8479 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8480 				  basic_block new_head, edge e)
8481 {
8482   gphi *phi1, *phi2;
8483   gphi_iterator psi1, psi2;
8484   tree def;
8485   edge e2 = find_edge (new_head, second);
8486 
8487   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8488      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8489   gcc_assert (e2 != NULL);
8490 
8491   /* Browse all 'second' basic block phi nodes and add phi args to
8492      edge 'e' for 'first' head. PHI args are always in correct order.  */
8493 
8494   for (psi2 = gsi_start_phis (second),
8495        psi1 = gsi_start_phis (first);
8496        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8497        gsi_next (&psi2),  gsi_next (&psi1))
8498     {
8499       phi1 = psi1.phi ();
8500       phi2 = psi2.phi ();
8501       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8502       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8503     }
8504 }
8505 
8506 
8507 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8508    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8509    the destination of the ELSE part.  */
8510 
8511 static void
8512 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8513 			       basic_block second_head ATTRIBUTE_UNUSED,
8514 			       basic_block cond_bb, void *cond_e)
8515 {
8516   gimple_stmt_iterator gsi;
8517   gimple *new_cond_expr;
8518   tree cond_expr = (tree) cond_e;
8519   edge e0;
8520 
8521   /* Build new conditional expr */
8522   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8523 					       NULL_TREE, NULL_TREE);
8524 
8525   /* Add new cond in cond_bb.  */
8526   gsi = gsi_last_bb (cond_bb);
8527   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8528 
8529   /* Adjust edges appropriately to connect new head with first head
8530      as well as second head.  */
8531   e0 = single_succ_edge (cond_bb);
8532   e0->flags &= ~EDGE_FALLTHRU;
8533   e0->flags |= EDGE_FALSE_VALUE;
8534 }
8535 
8536 
8537 /* Do book-keeping of basic block BB for the profile consistency checker.
8538    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8539    then do post-pass accounting.  Store the counting in RECORD.  */
8540 static void
8541 gimple_account_profile_record (basic_block bb, int after_pass,
8542 			       struct profile_record *record)
8543 {
8544   gimple_stmt_iterator i;
8545   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8546     {
8547       record->size[after_pass]
8548 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8549       if (profile_status_for_fn (cfun) == PROFILE_READ)
8550 	record->time[after_pass]
8551 	  += estimate_num_insns (gsi_stmt (i),
8552 				 &eni_time_weights) * bb->count;
8553       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8554 	record->time[after_pass]
8555 	  += estimate_num_insns (gsi_stmt (i),
8556 				 &eni_time_weights) * bb->frequency;
8557     }
8558 }
8559 
8560 struct cfg_hooks gimple_cfg_hooks = {
8561   "gimple",
8562   gimple_verify_flow_info,
8563   gimple_dump_bb,		/* dump_bb  */
8564   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8565   create_bb,			/* create_basic_block  */
8566   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8567   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8568   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8569   remove_bb,			/* delete_basic_block  */
8570   gimple_split_block,		/* split_block  */
8571   gimple_move_block_after,	/* move_block_after  */
8572   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8573   gimple_merge_blocks,		/* merge_blocks  */
8574   gimple_predict_edge,		/* predict_edge  */
8575   gimple_predicted_by_p,	/* predicted_by_p  */
8576   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8577   gimple_duplicate_bb,		/* duplicate_block  */
8578   gimple_split_edge,		/* split_edge  */
8579   gimple_make_forwarder_block,	/* make_forward_block  */
8580   NULL,				/* tidy_fallthru_edge  */
8581   NULL,				/* force_nonfallthru */
8582   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8583   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8584   gimple_flow_call_edges_add,   /* flow_call_edges_add */
8585   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8586   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8587   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8588   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8589   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8590   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8591   flush_pending_stmts, 		/* flush_pending_stmts */
8592   gimple_empty_block_p,           /* block_empty_p */
8593   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8594   gimple_account_profile_record,
8595 };
8596 
8597 
8598 /* Split all critical edges.  */
8599 
8600 unsigned int
8601 split_critical_edges (void)
8602 {
8603   basic_block bb;
8604   edge e;
8605   edge_iterator ei;
8606 
8607   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8608      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8609      mappings around the calls to split_edge.  */
8610   start_recording_case_labels ();
8611   FOR_ALL_BB_FN (bb, cfun)
8612     {
8613       FOR_EACH_EDGE (e, ei, bb->succs)
8614         {
8615 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8616 	    split_edge (e);
8617 	  /* PRE inserts statements to edges and expects that
8618 	     since split_critical_edges was done beforehand, committing edge
8619 	     insertions will not split more edges.  In addition to critical
8620 	     edges we must split edges that have multiple successors and
8621 	     end by control flow statements, such as RESX.
8622 	     Go ahead and split them too.  This matches the logic in
8623 	     gimple_find_edge_insert_loc.  */
8624 	  else if ((!single_pred_p (e->dest)
8625 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8626 		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8627 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8628 	           && !(e->flags & EDGE_ABNORMAL))
8629 	    {
8630 	      gimple_stmt_iterator gsi;
8631 
8632 	      gsi = gsi_last_bb (e->src);
8633 	      if (!gsi_end_p (gsi)
8634 		  && stmt_ends_bb_p (gsi_stmt (gsi))
8635 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8636 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8637 						 BUILT_IN_RETURN)))
8638 		split_edge (e);
8639 	    }
8640 	}
8641     }
8642   end_recording_case_labels ();
8643   return 0;
8644 }
8645 
8646 namespace {
8647 
8648 const pass_data pass_data_split_crit_edges =
8649 {
8650   GIMPLE_PASS, /* type */
8651   "crited", /* name */
8652   OPTGROUP_NONE, /* optinfo_flags */
8653   TV_TREE_SPLIT_EDGES, /* tv_id */
8654   PROP_cfg, /* properties_required */
8655   PROP_no_crit_edges, /* properties_provided */
8656   0, /* properties_destroyed */
8657   0, /* todo_flags_start */
8658   0, /* todo_flags_finish */
8659 };
8660 
8661 class pass_split_crit_edges : public gimple_opt_pass
8662 {
8663 public:
8664   pass_split_crit_edges (gcc::context *ctxt)
8665     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8666   {}
8667 
8668   /* opt_pass methods: */
8669   virtual unsigned int execute (function *) { return split_critical_edges (); }
8670 
8671   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8672 }; // class pass_split_crit_edges
8673 
8674 } // anon namespace
8675 
8676 gimple_opt_pass *
8677 make_pass_split_crit_edges (gcc::context *ctxt)
8678 {
8679   return new pass_split_crit_edges (ctxt);
8680 }
8681 
8682 
8683 /* Insert COND expression which is GIMPLE_COND after STMT
8684    in basic block BB with appropriate basic block split
8685    and creation of a new conditionally executed basic block.
8686    Return created basic block.  */
8687 basic_block
8688 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8689 {
8690   edge fall = split_block (bb, stmt);
8691   gimple_stmt_iterator iter = gsi_last_bb (bb);
8692   basic_block new_bb;
8693 
8694   /* Insert cond statement.  */
8695   gcc_assert (gimple_code (cond) == GIMPLE_COND);
8696   if (gsi_end_p (iter))
8697     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8698   else
8699     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8700 
8701   /* Create conditionally executed block.  */
8702   new_bb = create_empty_bb (bb);
8703   make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8704   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8705 
8706   /* Fix edge for split bb.  */
8707   fall->flags = EDGE_FALSE_VALUE;
8708 
8709   /* Update dominance info.  */
8710   if (dom_info_available_p (CDI_DOMINATORS))
8711     {
8712       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8713       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8714     }
8715 
8716   /* Update loop info.  */
8717   if (current_loops)
8718     add_bb_to_loop (new_bb, bb->loop_father);
8719 
8720   return new_bb;
8721 }
8722 
8723 /* Build a ternary operation and gimplify it.  Emit code before GSI.
8724    Return the gimple_val holding the result.  */
8725 
8726 tree
8727 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8728 		 tree type, tree a, tree b, tree c)
8729 {
8730   tree ret;
8731   location_t loc = gimple_location (gsi_stmt (*gsi));
8732 
8733   ret = fold_build3_loc (loc, code, type, a, b, c);
8734   STRIP_NOPS (ret);
8735 
8736   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8737                                    GSI_SAME_STMT);
8738 }
8739 
8740 /* Build a binary operation and gimplify it.  Emit code before GSI.
8741    Return the gimple_val holding the result.  */
8742 
8743 tree
8744 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8745 		 tree type, tree a, tree b)
8746 {
8747   tree ret;
8748 
8749   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8750   STRIP_NOPS (ret);
8751 
8752   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8753                                    GSI_SAME_STMT);
8754 }
8755 
8756 /* Build a unary operation and gimplify it.  Emit code before GSI.
8757    Return the gimple_val holding the result.  */
8758 
8759 tree
8760 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8761 		 tree a)
8762 {
8763   tree ret;
8764 
8765   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8766   STRIP_NOPS (ret);
8767 
8768   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8769                                    GSI_SAME_STMT);
8770 }
8771 
8772 
8773 
8774 /* Given a basic block B which ends with a conditional and has
8775    precisely two successors, determine which of the edges is taken if
8776    the conditional is true and which is taken if the conditional is
8777    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
8778 
8779 void
8780 extract_true_false_edges_from_block (basic_block b,
8781 				     edge *true_edge,
8782 				     edge *false_edge)
8783 {
8784   edge e = EDGE_SUCC (b, 0);
8785 
8786   if (e->flags & EDGE_TRUE_VALUE)
8787     {
8788       *true_edge = e;
8789       *false_edge = EDGE_SUCC (b, 1);
8790     }
8791   else
8792     {
8793       *false_edge = e;
8794       *true_edge = EDGE_SUCC (b, 1);
8795     }
8796 }
8797 
8798 
8799 /* From a controlling predicate in the immediate dominator DOM of
8800    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8801    predicate evaluates to true and false and store them to
8802    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8803    they are non-NULL.  Returns true if the edges can be determined,
8804    else return false.  */
8805 
8806 bool
8807 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8808 				     edge *true_controlled_edge,
8809 				     edge *false_controlled_edge)
8810 {
8811   basic_block bb = phiblock;
8812   edge true_edge, false_edge, tem;
8813   edge e0 = NULL, e1 = NULL;
8814 
8815   /* We have to verify that one edge into the PHI node is dominated
8816      by the true edge of the predicate block and the other edge
8817      dominated by the false edge.  This ensures that the PHI argument
8818      we are going to take is completely determined by the path we
8819      take from the predicate block.
8820      We can only use BB dominance checks below if the destination of
8821      the true/false edges are dominated by their edge, thus only
8822      have a single predecessor.  */
8823   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8824   tem = EDGE_PRED (bb, 0);
8825   if (tem == true_edge
8826       || (single_pred_p (true_edge->dest)
8827 	  && (tem->src == true_edge->dest
8828 	      || dominated_by_p (CDI_DOMINATORS,
8829 				 tem->src, true_edge->dest))))
8830     e0 = tem;
8831   else if (tem == false_edge
8832 	   || (single_pred_p (false_edge->dest)
8833 	       && (tem->src == false_edge->dest
8834 		   || dominated_by_p (CDI_DOMINATORS,
8835 				      tem->src, false_edge->dest))))
8836     e1 = tem;
8837   else
8838     return false;
8839   tem = EDGE_PRED (bb, 1);
8840   if (tem == true_edge
8841       || (single_pred_p (true_edge->dest)
8842 	  && (tem->src == true_edge->dest
8843 	      || dominated_by_p (CDI_DOMINATORS,
8844 				 tem->src, true_edge->dest))))
8845     e0 = tem;
8846   else if (tem == false_edge
8847 	   || (single_pred_p (false_edge->dest)
8848 	       && (tem->src == false_edge->dest
8849 		   || dominated_by_p (CDI_DOMINATORS,
8850 				      tem->src, false_edge->dest))))
8851     e1 = tem;
8852   else
8853     return false;
8854   if (!e0 || !e1)
8855     return false;
8856 
8857   if (true_controlled_edge)
8858     *true_controlled_edge = e0;
8859   if (false_controlled_edge)
8860     *false_controlled_edge = e1;
8861 
8862   return true;
8863 }
8864 
8865 
8866 
8867 /* Emit return warnings.  */
8868 
8869 namespace {
8870 
8871 const pass_data pass_data_warn_function_return =
8872 {
8873   GIMPLE_PASS, /* type */
8874   "*warn_function_return", /* name */
8875   OPTGROUP_NONE, /* optinfo_flags */
8876   TV_NONE, /* tv_id */
8877   PROP_cfg, /* properties_required */
8878   0, /* properties_provided */
8879   0, /* properties_destroyed */
8880   0, /* todo_flags_start */
8881   0, /* todo_flags_finish */
8882 };
8883 
8884 class pass_warn_function_return : public gimple_opt_pass
8885 {
8886 public:
8887   pass_warn_function_return (gcc::context *ctxt)
8888     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8889   {}
8890 
8891   /* opt_pass methods: */
8892   virtual unsigned int execute (function *);
8893 
8894 }; // class pass_warn_function_return
8895 
8896 unsigned int
8897 pass_warn_function_return::execute (function *fun)
8898 {
8899   source_location location;
8900   gimple *last;
8901   edge e;
8902   edge_iterator ei;
8903 
8904   if (!targetm.warn_func_return (fun->decl))
8905     return 0;
8906 
8907   /* If we have a path to EXIT, then we do return.  */
8908   if (TREE_THIS_VOLATILE (fun->decl)
8909       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8910     {
8911       location = UNKNOWN_LOCATION;
8912       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8913 	{
8914 	  last = last_stmt (e->src);
8915 	  if ((gimple_code (last) == GIMPLE_RETURN
8916 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8917 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8918 	    break;
8919 	}
8920       if (location == UNKNOWN_LOCATION)
8921 	location = cfun->function_end_locus;
8922 
8923 #ifdef notyet
8924       if (warn_missing_noreturn)
8925         warning_at (location, 0, "%<noreturn%> function does return");
8926 #endif
8927     }
8928 
8929   /* If we see "return;" in some basic block, then we do reach the end
8930      without returning a value.  */
8931   else if (warn_return_type
8932 	   && !TREE_NO_WARNING (fun->decl)
8933 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8934     {
8935       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8936 	{
8937 	  gimple *last = last_stmt (e->src);
8938 	  greturn *return_stmt = dyn_cast <greturn *> (last);
8939 	  if (return_stmt
8940 	      && gimple_return_retval (return_stmt) == NULL
8941 	      && !gimple_no_warning_p (last))
8942 	    {
8943 	      location = gimple_location (last);
8944 	      if (location == UNKNOWN_LOCATION)
8945 		location = fun->function_end_locus;
8946 	      warning_at (location, OPT_Wreturn_type,
8947 			  "control reaches end of non-void function");
8948 	      TREE_NO_WARNING (fun->decl) = 1;
8949 	      break;
8950 	    }
8951 	}
8952       /* -fsanitize=return turns fallthrough from the end of non-void function
8953 	 into __builtin___ubsan_handle_missing_return () call.
8954 	 Recognize those too.  */
8955       basic_block bb;
8956       if (!TREE_NO_WARNING (fun->decl) && (flag_sanitize & SANITIZE_RETURN))
8957 	FOR_EACH_BB_FN (bb, fun)
8958 	  if (EDGE_COUNT (bb->succs) == 0)
8959 	    {
8960 	      gimple *last = last_stmt (bb);
8961 	      const enum built_in_function ubsan_missing_ret
8962 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
8963 	      if (last && gimple_call_builtin_p (last, ubsan_missing_ret))
8964 		{
8965 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
8966 		  gsi_prev_nondebug (&gsi);
8967 		  gimple *prev = gsi_stmt (gsi);
8968 		  if (prev == NULL)
8969 		    location = UNKNOWN_LOCATION;
8970 		  else
8971 		    location = gimple_location (prev);
8972 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
8973 		    location = fun->function_end_locus;
8974 		  warning_at (location, OPT_Wreturn_type,
8975 			      "control reaches end of non-void function");
8976 		  TREE_NO_WARNING (fun->decl) = 1;
8977 		  break;
8978 		}
8979 	    }
8980     }
8981   return 0;
8982 }
8983 
8984 } // anon namespace
8985 
8986 gimple_opt_pass *
8987 make_pass_warn_function_return (gcc::context *ctxt)
8988 {
8989   return new pass_warn_function_return (ctxt);
8990 }
8991 
8992 /* Walk a gimplified function and warn for functions whose return value is
8993    ignored and attribute((warn_unused_result)) is set.  This is done before
8994    inlining, so we don't have to worry about that.  */
8995 
8996 static void
8997 do_warn_unused_result (gimple_seq seq)
8998 {
8999   tree fdecl, ftype;
9000   gimple_stmt_iterator i;
9001 
9002   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9003     {
9004       gimple *g = gsi_stmt (i);
9005 
9006       switch (gimple_code (g))
9007 	{
9008 	case GIMPLE_BIND:
9009 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9010 	  break;
9011 	case GIMPLE_TRY:
9012 	  do_warn_unused_result (gimple_try_eval (g));
9013 	  do_warn_unused_result (gimple_try_cleanup (g));
9014 	  break;
9015 	case GIMPLE_CATCH:
9016 	  do_warn_unused_result (gimple_catch_handler (
9017 				   as_a <gcatch *> (g)));
9018 	  break;
9019 	case GIMPLE_EH_FILTER:
9020 	  do_warn_unused_result (gimple_eh_filter_failure (g));
9021 	  break;
9022 
9023 	case GIMPLE_CALL:
9024 	  if (gimple_call_lhs (g))
9025 	    break;
9026 	  if (gimple_call_internal_p (g))
9027 	    break;
9028 
9029 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
9030 	     LHS.  All calls whose value is ignored should be
9031 	     represented like this.  Look for the attribute.  */
9032 	  fdecl = gimple_call_fndecl (g);
9033 	  ftype = gimple_call_fntype (g);
9034 
9035 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9036 	    {
9037 	      location_t loc = gimple_location (g);
9038 
9039 	      if (fdecl)
9040 		warning_at (loc, OPT_Wunused_result,
9041 			    "ignoring return value of %qD, "
9042 			    "declared with attribute warn_unused_result",
9043 			    fdecl);
9044 	      else
9045 		warning_at (loc, OPT_Wunused_result,
9046 			    "ignoring return value of function "
9047 			    "declared with attribute warn_unused_result");
9048 	    }
9049 	  break;
9050 
9051 	default:
9052 	  /* Not a container, not a call, or a call whose value is used.  */
9053 	  break;
9054 	}
9055     }
9056 }
9057 
9058 namespace {
9059 
9060 const pass_data pass_data_warn_unused_result =
9061 {
9062   GIMPLE_PASS, /* type */
9063   "*warn_unused_result", /* name */
9064   OPTGROUP_NONE, /* optinfo_flags */
9065   TV_NONE, /* tv_id */
9066   PROP_gimple_any, /* properties_required */
9067   0, /* properties_provided */
9068   0, /* properties_destroyed */
9069   0, /* todo_flags_start */
9070   0, /* todo_flags_finish */
9071 };
9072 
9073 class pass_warn_unused_result : public gimple_opt_pass
9074 {
9075 public:
9076   pass_warn_unused_result (gcc::context *ctxt)
9077     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9078   {}
9079 
9080   /* opt_pass methods: */
9081   virtual bool gate (function *) { return flag_warn_unused_result; }
9082   virtual unsigned int execute (function *)
9083     {
9084       do_warn_unused_result (gimple_body (current_function_decl));
9085       return 0;
9086     }
9087 
9088 }; // class pass_warn_unused_result
9089 
9090 } // anon namespace
9091 
9092 gimple_opt_pass *
9093 make_pass_warn_unused_result (gcc::context *ctxt)
9094 {
9095   return new pass_warn_unused_result (ctxt);
9096 }
9097 
9098 /* IPA passes, compilation of earlier functions or inlining
9099    might have changed some properties, such as marked functions nothrow,
9100    pure, const or noreturn.
9101    Remove redundant edges and basic blocks, and create new ones if necessary.
9102 
9103    This pass can't be executed as stand alone pass from pass manager, because
9104    in between inlining and this fixup the verify_flow_info would fail.  */
9105 
9106 unsigned int
9107 execute_fixup_cfg (void)
9108 {
9109   basic_block bb;
9110   gimple_stmt_iterator gsi;
9111   int todo = 0;
9112   gcov_type count_scale;
9113   edge e;
9114   edge_iterator ei;
9115   cgraph_node *node = cgraph_node::get (current_function_decl);
9116 
9117   count_scale
9118     = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
9119 
9120   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9121   EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9122     = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
9123 
9124   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9125     e->count = apply_scale (e->count, count_scale);
9126 
9127   FOR_EACH_BB_FN (bb, cfun)
9128     {
9129       bb->count = apply_scale (bb->count, count_scale);
9130       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9131 	{
9132 	  gimple *stmt = gsi_stmt (gsi);
9133 	  tree decl = is_gimple_call (stmt)
9134 		      ? gimple_call_fndecl (stmt)
9135 		      : NULL;
9136 	  if (decl)
9137 	    {
9138 	      int flags = gimple_call_flags (stmt);
9139 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9140 		{
9141 		  if (gimple_purge_dead_abnormal_call_edges (bb))
9142 		    todo |= TODO_cleanup_cfg;
9143 
9144 		  if (gimple_in_ssa_p (cfun))
9145 		    {
9146 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9147 		      update_stmt (stmt);
9148 		    }
9149 		}
9150 
9151 	      if (flags & ECF_NORETURN
9152 		  && fixup_noreturn_call (stmt))
9153 		todo |= TODO_cleanup_cfg;
9154 	     }
9155 
9156 	  /* Remove stores to variables we marked write-only.
9157 	     Keep access when store has side effect, i.e. in case when source
9158 	     is volatile.  */
9159 	  if (gimple_store_p (stmt)
9160 	      && !gimple_has_side_effects (stmt))
9161 	    {
9162 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9163 
9164 	      if (VAR_P (lhs)
9165 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9166 		  && varpool_node::get (lhs)->writeonly)
9167 		{
9168 		  unlink_stmt_vdef (stmt);
9169 		  gsi_remove (&gsi, true);
9170 		  release_defs (stmt);
9171 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9172 	          continue;
9173 		}
9174 	    }
9175 	  /* For calls we can simply remove LHS when it is known
9176 	     to be write-only.  */
9177 	  if (is_gimple_call (stmt)
9178 	      && gimple_get_lhs (stmt))
9179 	    {
9180 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9181 
9182 	      if (VAR_P (lhs)
9183 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9184 		  && varpool_node::get (lhs)->writeonly)
9185 		{
9186 		  gimple_call_set_lhs (stmt, NULL);
9187 		  update_stmt (stmt);
9188 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9189 		}
9190 	    }
9191 
9192 	  if (maybe_clean_eh_stmt (stmt)
9193 	      && gimple_purge_dead_eh_edges (bb))
9194 	    todo |= TODO_cleanup_cfg;
9195 	  gsi_next (&gsi);
9196 	}
9197 
9198       FOR_EACH_EDGE (e, ei, bb->succs)
9199         e->count = apply_scale (e->count, count_scale);
9200 
9201       /* If we have a basic block with no successors that does not
9202 	 end with a control statement or a noreturn call end it with
9203 	 a call to __builtin_unreachable.  This situation can occur
9204 	 when inlining a noreturn call that does in fact return.  */
9205       if (EDGE_COUNT (bb->succs) == 0)
9206 	{
9207 	  gimple *stmt = last_stmt (bb);
9208 	  if (!stmt
9209 	      || (!is_ctrl_stmt (stmt)
9210 		  && (!is_gimple_call (stmt)
9211 		      || !gimple_call_noreturn_p (stmt))))
9212 	    {
9213 	      if (stmt && is_gimple_call (stmt))
9214 		gimple_call_set_ctrl_altering (stmt, false);
9215 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9216 	      stmt = gimple_build_call (fndecl, 0);
9217 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9218 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9219 	      if (!cfun->after_inlining)
9220 		{
9221 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9222 		  int freq
9223 		    = compute_call_stmt_bb_frequency (current_function_decl,
9224 						      bb);
9225 		  node->create_edge (cgraph_node::get_create (fndecl),
9226 				     call_stmt, bb->count, freq);
9227 		}
9228 	    }
9229 	}
9230     }
9231   if (count_scale != REG_BR_PROB_BASE)
9232     compute_function_frequency ();
9233 
9234   if (current_loops
9235       && (todo & TODO_cleanup_cfg))
9236     loops_state_set (LOOPS_NEED_FIXUP);
9237 
9238   return todo;
9239 }
9240 
9241 namespace {
9242 
9243 const pass_data pass_data_fixup_cfg =
9244 {
9245   GIMPLE_PASS, /* type */
9246   "fixup_cfg", /* name */
9247   OPTGROUP_NONE, /* optinfo_flags */
9248   TV_NONE, /* tv_id */
9249   PROP_cfg, /* properties_required */
9250   0, /* properties_provided */
9251   0, /* properties_destroyed */
9252   0, /* todo_flags_start */
9253   0, /* todo_flags_finish */
9254 };
9255 
9256 class pass_fixup_cfg : public gimple_opt_pass
9257 {
9258 public:
9259   pass_fixup_cfg (gcc::context *ctxt)
9260     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9261   {}
9262 
9263   /* opt_pass methods: */
9264   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9265   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9266 
9267 }; // class pass_fixup_cfg
9268 
9269 } // anon namespace
9270 
9271 gimple_opt_pass *
9272 make_pass_fixup_cfg (gcc::context *ctxt)
9273 {
9274   return new pass_fixup_cfg (ctxt);
9275 }
9276 
9277 /* Garbage collection support for edge_def.  */
9278 
9279 extern void gt_ggc_mx (tree&);
9280 extern void gt_ggc_mx (gimple *&);
9281 extern void gt_ggc_mx (rtx&);
9282 extern void gt_ggc_mx (basic_block&);
9283 
9284 static void
9285 gt_ggc_mx (rtx_insn *& x)
9286 {
9287   if (x)
9288     gt_ggc_mx_rtx_def ((void *) x);
9289 }
9290 
9291 void
9292 gt_ggc_mx (edge_def *e)
9293 {
9294   tree block = LOCATION_BLOCK (e->goto_locus);
9295   gt_ggc_mx (e->src);
9296   gt_ggc_mx (e->dest);
9297   if (current_ir_type () == IR_GIMPLE)
9298     gt_ggc_mx (e->insns.g);
9299   else
9300     gt_ggc_mx (e->insns.r);
9301   gt_ggc_mx (block);
9302 }
9303 
9304 /* PCH support for edge_def.  */
9305 
9306 extern void gt_pch_nx (tree&);
9307 extern void gt_pch_nx (gimple *&);
9308 extern void gt_pch_nx (rtx&);
9309 extern void gt_pch_nx (basic_block&);
9310 
9311 static void
9312 gt_pch_nx (rtx_insn *& x)
9313 {
9314   if (x)
9315     gt_pch_nx_rtx_def ((void *) x);
9316 }
9317 
9318 void
9319 gt_pch_nx (edge_def *e)
9320 {
9321   tree block = LOCATION_BLOCK (e->goto_locus);
9322   gt_pch_nx (e->src);
9323   gt_pch_nx (e->dest);
9324   if (current_ir_type () == IR_GIMPLE)
9325     gt_pch_nx (e->insns.g);
9326   else
9327     gt_pch_nx (e->insns.r);
9328   gt_pch_nx (block);
9329 }
9330 
9331 void
9332 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9333 {
9334   tree block = LOCATION_BLOCK (e->goto_locus);
9335   op (&(e->src), cookie);
9336   op (&(e->dest), cookie);
9337   if (current_ir_type () == IR_GIMPLE)
9338     op (&(e->insns.g), cookie);
9339   else
9340     op (&(e->insns.r), cookie);
9341   op (&(block), cookie);
9342 }
9343 
9344 #if CHECKING_P
9345 
9346 namespace selftest {
9347 
9348 /* Helper function for CFG selftests: create a dummy function decl
9349    and push it as cfun.  */
9350 
9351 static tree
9352 push_fndecl (const char *name)
9353 {
9354   tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9355   /* FIXME: this uses input_location: */
9356   tree fndecl = build_fn_decl (name, fn_type);
9357   tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9358 			    NULL_TREE, integer_type_node);
9359   DECL_RESULT (fndecl) = retval;
9360   push_struct_function (fndecl);
9361   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9362   ASSERT_TRUE (fun != NULL);
9363   init_empty_tree_cfg_for_function (fun);
9364   ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9365   ASSERT_EQ (0, n_edges_for_fn (fun));
9366   return fndecl;
9367 }
9368 
9369 /* These tests directly create CFGs.
9370    Compare with the static fns within tree-cfg.c:
9371      - build_gimple_cfg
9372      - make_blocks: calls create_basic_block (seq, bb);
9373      - make_edges.   */
9374 
9375 /* Verify a simple cfg of the form:
9376      ENTRY -> A -> B -> C -> EXIT.  */
9377 
9378 static void
9379 test_linear_chain ()
9380 {
9381   gimple_register_cfg_hooks ();
9382 
9383   tree fndecl = push_fndecl ("cfg_test_linear_chain");
9384   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9385 
9386   /* Create some empty blocks.  */
9387   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9388   basic_block bb_b = create_empty_bb (bb_a);
9389   basic_block bb_c = create_empty_bb (bb_b);
9390 
9391   ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9392   ASSERT_EQ (0, n_edges_for_fn (fun));
9393 
9394   /* Create some edges: a simple linear chain of BBs.  */
9395   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9396   make_edge (bb_a, bb_b, 0);
9397   make_edge (bb_b, bb_c, 0);
9398   make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9399 
9400   /* Verify the edges.  */
9401   ASSERT_EQ (4, n_edges_for_fn (fun));
9402   ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9403   ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9404   ASSERT_EQ (1, bb_a->preds->length ());
9405   ASSERT_EQ (1, bb_a->succs->length ());
9406   ASSERT_EQ (1, bb_b->preds->length ());
9407   ASSERT_EQ (1, bb_b->succs->length ());
9408   ASSERT_EQ (1, bb_c->preds->length ());
9409   ASSERT_EQ (1, bb_c->succs->length ());
9410   ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9411   ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9412 
9413   /* Verify the dominance information
9414      Each BB in our simple chain should be dominated by the one before
9415      it.  */
9416   calculate_dominance_info (CDI_DOMINATORS);
9417   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9418   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9419   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9420   ASSERT_EQ (1, dom_by_b.length ());
9421   ASSERT_EQ (bb_c, dom_by_b[0]);
9422   free_dominance_info (CDI_DOMINATORS);
9423   dom_by_b.release ();
9424 
9425   /* Similarly for post-dominance: each BB in our chain is post-dominated
9426      by the one after it.  */
9427   calculate_dominance_info (CDI_POST_DOMINATORS);
9428   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9429   ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9430   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9431   ASSERT_EQ (1, postdom_by_b.length ());
9432   ASSERT_EQ (bb_a, postdom_by_b[0]);
9433   free_dominance_info (CDI_POST_DOMINATORS);
9434   postdom_by_b.release ();
9435 
9436   pop_cfun ();
9437 }
9438 
9439 /* Verify a simple CFG of the form:
9440      ENTRY
9441        |
9442        A
9443       / \
9444      /t  \f
9445     B     C
9446      \   /
9447       \ /
9448        D
9449        |
9450       EXIT.  */
9451 
9452 static void
9453 test_diamond ()
9454 {
9455   gimple_register_cfg_hooks ();
9456 
9457   tree fndecl = push_fndecl ("cfg_test_diamond");
9458   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9459 
9460   /* Create some empty blocks.  */
9461   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9462   basic_block bb_b = create_empty_bb (bb_a);
9463   basic_block bb_c = create_empty_bb (bb_a);
9464   basic_block bb_d = create_empty_bb (bb_b);
9465 
9466   ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9467   ASSERT_EQ (0, n_edges_for_fn (fun));
9468 
9469   /* Create the edges.  */
9470   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9471   make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9472   make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9473   make_edge (bb_b, bb_d, 0);
9474   make_edge (bb_c, bb_d, 0);
9475   make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9476 
9477   /* Verify the edges.  */
9478   ASSERT_EQ (6, n_edges_for_fn (fun));
9479   ASSERT_EQ (1, bb_a->preds->length ());
9480   ASSERT_EQ (2, bb_a->succs->length ());
9481   ASSERT_EQ (1, bb_b->preds->length ());
9482   ASSERT_EQ (1, bb_b->succs->length ());
9483   ASSERT_EQ (1, bb_c->preds->length ());
9484   ASSERT_EQ (1, bb_c->succs->length ());
9485   ASSERT_EQ (2, bb_d->preds->length ());
9486   ASSERT_EQ (1, bb_d->succs->length ());
9487 
9488   /* Verify the dominance information.  */
9489   calculate_dominance_info (CDI_DOMINATORS);
9490   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9491   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9492   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9493   vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9494   ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order.  */
9495   dom_by_a.release ();
9496   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9497   ASSERT_EQ (0, dom_by_b.length ());
9498   dom_by_b.release ();
9499   free_dominance_info (CDI_DOMINATORS);
9500 
9501   /* Similarly for post-dominance.  */
9502   calculate_dominance_info (CDI_POST_DOMINATORS);
9503   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9504   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9505   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9506   vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9507   ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order.  */
9508   postdom_by_d.release ();
9509   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9510   ASSERT_EQ (0, postdom_by_b.length ());
9511   postdom_by_b.release ();
9512   free_dominance_info (CDI_POST_DOMINATORS);
9513 
9514   pop_cfun ();
9515 }
9516 
9517 /* Verify that we can handle a CFG containing a "complete" aka
9518    fully-connected subgraph (where A B C D below all have edges
9519    pointing to each other node, also to themselves).
9520    e.g.:
9521      ENTRY  EXIT
9522        |    ^
9523        |   /
9524        |  /
9525        | /
9526        V/
9527        A<--->B
9528        ^^   ^^
9529        | \ / |
9530        |  X  |
9531        | / \ |
9532        VV   VV
9533        C<--->D
9534 */
9535 
9536 static void
9537 test_fully_connected ()
9538 {
9539   gimple_register_cfg_hooks ();
9540 
9541   tree fndecl = push_fndecl ("cfg_fully_connected");
9542   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9543 
9544   const int n = 4;
9545 
9546   /* Create some empty blocks.  */
9547   auto_vec <basic_block> subgraph_nodes;
9548   for (int i = 0; i < n; i++)
9549     subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9550 
9551   ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9552   ASSERT_EQ (0, n_edges_for_fn (fun));
9553 
9554   /* Create the edges.  */
9555   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9556   make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9557   for (int i = 0; i < n; i++)
9558     for (int j = 0; j < n; j++)
9559       make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9560 
9561   /* Verify the edges.  */
9562   ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9563   /* The first one is linked to ENTRY/EXIT as well as itself and
9564      everything else.  */
9565   ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9566   ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9567   /* The other ones in the subgraph are linked to everything in
9568      the subgraph (including themselves).  */
9569   for (int i = 1; i < n; i++)
9570     {
9571       ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9572       ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9573     }
9574 
9575   /* Verify the dominance information.  */
9576   calculate_dominance_info (CDI_DOMINATORS);
9577   /* The initial block in the subgraph should be dominated by ENTRY.  */
9578   ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9579 	     get_immediate_dominator (CDI_DOMINATORS,
9580 				      subgraph_nodes[0]));
9581   /* Every other block in the subgraph should be dominated by the
9582      initial block.  */
9583   for (int i = 1; i < n; i++)
9584     ASSERT_EQ (subgraph_nodes[0],
9585 	       get_immediate_dominator (CDI_DOMINATORS,
9586 					subgraph_nodes[i]));
9587   free_dominance_info (CDI_DOMINATORS);
9588 
9589   /* Similarly for post-dominance.  */
9590   calculate_dominance_info (CDI_POST_DOMINATORS);
9591   /* The initial block in the subgraph should be postdominated by EXIT.  */
9592   ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9593 	     get_immediate_dominator (CDI_POST_DOMINATORS,
9594 				      subgraph_nodes[0]));
9595   /* Every other block in the subgraph should be postdominated by the
9596      initial block, since that leads to EXIT.  */
9597   for (int i = 1; i < n; i++)
9598     ASSERT_EQ (subgraph_nodes[0],
9599 	       get_immediate_dominator (CDI_POST_DOMINATORS,
9600 					subgraph_nodes[i]));
9601   free_dominance_info (CDI_POST_DOMINATORS);
9602 
9603   pop_cfun ();
9604 }
9605 
9606 /* Run all of the selftests within this file.  */
9607 
9608 void
9609 tree_cfg_c_tests ()
9610 {
9611   test_linear_chain ();
9612   test_diamond ();
9613   test_fully_connected ();
9614 }
9615 
9616 } // namespace selftest
9617 
9618 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9619    - loop
9620    - nested loops
9621    - switch statement (a block with many out-edges)
9622    - something that jumps to itself
9623    - etc  */
9624 
9625 #endif /* CHECKING_P */
9626