xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2016 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 
62 /* This file contains functions for building the Control Flow Graph (CFG)
63    for a function tree.  */
64 
65 /* Local declarations.  */
66 
67 /* Initial capacity for the basic block array.  */
68 static const int initial_cfg_capacity = 20;
69 
70 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
71    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
72    via their CASE_CHAIN field, which we clear after we're done with the
73    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
74 
75    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
76    update the case vector in response to edge redirections.
77 
78    Right now this table is set up and torn down at key points in the
79    compilation process.  It would be nice if we could make the table
80    more persistent.  The key is getting notification of changes to
81    the CFG (particularly edge removal, creation and redirection).  */
82 
83 static hash_map<edge, tree> *edge_to_cases;
84 
85 /* If we record edge_to_cases, this bitmap will hold indexes
86    of basic blocks that end in a GIMPLE_SWITCH which we touched
87    due to edge manipulations.  */
88 
89 static bitmap touched_switch_bbs;
90 
91 /* CFG statistics.  */
92 struct cfg_stats_d
93 {
94   long num_merged_labels;
95 };
96 
97 static struct cfg_stats_d cfg_stats;
98 
99 /* Data to pass to replace_block_vars_by_duplicates_1.  */
100 struct replace_decls_d
101 {
102   hash_map<tree, tree> *vars_map;
103   tree to_context;
104 };
105 
106 /* Hash table to store last discriminator assigned for each locus.  */
107 struct locus_discrim_map
108 {
109   location_t locus;
110   int discriminator;
111 };
112 
113 /* Hashtable helpers.  */
114 
115 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
116 {
117   static inline hashval_t hash (const locus_discrim_map *);
118   static inline bool equal (const locus_discrim_map *,
119 			    const locus_discrim_map *);
120 };
121 
122 /* Trivial hash function for a location_t.  ITEM is a pointer to
123    a hash table entry that maps a location_t to a discriminator.  */
124 
125 inline hashval_t
126 locus_discrim_hasher::hash (const locus_discrim_map *item)
127 {
128   return LOCATION_LINE (item->locus);
129 }
130 
131 /* Equality function for the locus-to-discriminator map.  A and B
132    point to the two hash table entries to compare.  */
133 
134 inline bool
135 locus_discrim_hasher::equal (const locus_discrim_map *a,
136 			     const locus_discrim_map *b)
137 {
138   return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
139 }
140 
141 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
142 
143 /* Basic blocks and flowgraphs.  */
144 static void make_blocks (gimple_seq);
145 
146 /* Edges.  */
147 static void make_edges (void);
148 static void assign_discriminators (void);
149 static void make_cond_expr_edges (basic_block);
150 static void make_gimple_switch_edges (gswitch *, basic_block);
151 static bool make_goto_expr_edges (basic_block);
152 static void make_gimple_asm_edges (basic_block);
153 static edge gimple_redirect_edge_and_branch (edge, basic_block);
154 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
155 
156 /* Various helpers.  */
157 static inline bool stmt_starts_bb_p (gimple *, gimple *);
158 static int gimple_verify_flow_info (void);
159 static void gimple_make_forwarder_block (edge);
160 static gimple *first_non_label_stmt (basic_block);
161 static bool verify_gimple_transaction (gtransaction *);
162 static bool call_can_make_abnormal_goto (gimple *);
163 
164 /* Flowgraph optimization and cleanup.  */
165 static void gimple_merge_blocks (basic_block, basic_block);
166 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
167 static void remove_bb (basic_block);
168 static edge find_taken_edge_computed_goto (basic_block, tree);
169 static edge find_taken_edge_cond_expr (basic_block, tree);
170 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
171 static tree find_case_label_for_value (gswitch *, tree);
172 
173 void
174 init_empty_tree_cfg_for_function (struct function *fn)
175 {
176   /* Initialize the basic block array.  */
177   init_flow (fn);
178   profile_status_for_fn (fn) = PROFILE_ABSENT;
179   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
180   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
181   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
182   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
183 			 initial_cfg_capacity);
184 
185   /* Build a mapping of labels to their associated blocks.  */
186   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
187   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
188 			 initial_cfg_capacity);
189 
190   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
191   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
192 
193   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
194     = EXIT_BLOCK_PTR_FOR_FN (fn);
195   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
196     = ENTRY_BLOCK_PTR_FOR_FN (fn);
197 }
198 
199 void
200 init_empty_tree_cfg (void)
201 {
202   init_empty_tree_cfg_for_function (cfun);
203 }
204 
205 /*---------------------------------------------------------------------------
206 			      Create basic blocks
207 ---------------------------------------------------------------------------*/
208 
209 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
210    statements to be added to the flowgraph.  */
211 
212 static void
213 build_gimple_cfg (gimple_seq seq)
214 {
215   /* Register specific gimple functions.  */
216   gimple_register_cfg_hooks ();
217 
218   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
219 
220   init_empty_tree_cfg ();
221 
222   make_blocks (seq);
223 
224   /* Make sure there is always at least one block, even if it's empty.  */
225   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
226     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
227 
228   /* Adjust the size of the array.  */
229   if (basic_block_info_for_fn (cfun)->length ()
230       < (size_t) n_basic_blocks_for_fn (cfun))
231     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
232 			   n_basic_blocks_for_fn (cfun));
233 
234   /* To speed up statement iterator walks, we first purge dead labels.  */
235   cleanup_dead_labels ();
236 
237   /* Group case nodes to reduce the number of edges.
238      We do this after cleaning up dead labels because otherwise we miss
239      a lot of obvious case merging opportunities.  */
240   group_case_labels ();
241 
242   /* Create the edges of the flowgraph.  */
243   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
244   make_edges ();
245   assign_discriminators ();
246   cleanup_dead_labels ();
247   delete discriminator_per_locus;
248   discriminator_per_locus = NULL;
249 }
250 
251 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
252    them and propagate the information to LOOP.  We assume that the annotations
253    come immediately before the condition in BB, if any.  */
254 
255 static void
256 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
257 {
258   gimple_stmt_iterator gsi = gsi_last_bb (bb);
259   gimple *stmt = gsi_stmt (gsi);
260 
261   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
262     return;
263 
264   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
265     {
266       stmt = gsi_stmt (gsi);
267       if (gimple_code (stmt) != GIMPLE_CALL)
268 	break;
269       if (!gimple_call_internal_p (stmt)
270 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
271 	break;
272 
273       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
274 	{
275 	case annot_expr_ivdep_kind:
276 	  loop->safelen = INT_MAX;
277 	  break;
278 	case annot_expr_no_vector_kind:
279 	  loop->dont_vectorize = true;
280 	  break;
281 	case annot_expr_vector_kind:
282 	  loop->force_vectorize = true;
283 	  cfun->has_force_vectorize_loops = true;
284 	  break;
285 	default:
286 	  gcc_unreachable ();
287 	}
288 
289       stmt = gimple_build_assign (gimple_call_lhs (stmt),
290 				  gimple_call_arg (stmt, 0));
291       gsi_replace (&gsi, stmt, true);
292     }
293 }
294 
295 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
296    them and propagate the information to the loop.  We assume that the
297    annotations come immediately before the condition of the loop.  */
298 
299 static void
300 replace_loop_annotate (void)
301 {
302   struct loop *loop;
303   basic_block bb;
304   gimple_stmt_iterator gsi;
305   gimple *stmt;
306 
307   FOR_EACH_LOOP (loop, 0)
308     {
309       /* First look into the header.  */
310       replace_loop_annotate_in_block (loop->header, loop);
311 
312       /* Then look into the latch, if any.  */
313       if (loop->latch)
314 	replace_loop_annotate_in_block (loop->latch, loop);
315     }
316 
317   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
318   FOR_EACH_BB_FN (bb, cfun)
319     {
320       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
321 	{
322 	  stmt = gsi_stmt (gsi);
323 	  if (gimple_code (stmt) != GIMPLE_CALL)
324 	    continue;
325 	  if (!gimple_call_internal_p (stmt)
326 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
327 	    continue;
328 
329 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
330 	    {
331 	    case annot_expr_ivdep_kind:
332 	    case annot_expr_no_vector_kind:
333 	    case annot_expr_vector_kind:
334 	      break;
335 	    default:
336 	      gcc_unreachable ();
337 	    }
338 
339 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
340 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
341 				      gimple_call_arg (stmt, 0));
342 	  gsi_replace (&gsi, stmt, true);
343 	}
344     }
345 }
346 
347 
348 static unsigned int
349 execute_build_cfg (void)
350 {
351   gimple_seq body = gimple_body (current_function_decl);
352 
353   build_gimple_cfg (body);
354   gimple_set_body (current_function_decl, NULL);
355   if (dump_file && (dump_flags & TDF_DETAILS))
356     {
357       fprintf (dump_file, "Scope blocks:\n");
358       dump_scope_blocks (dump_file, dump_flags);
359     }
360   cleanup_tree_cfg ();
361   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
362   replace_loop_annotate ();
363   return 0;
364 }
365 
366 namespace {
367 
368 const pass_data pass_data_build_cfg =
369 {
370   GIMPLE_PASS, /* type */
371   "cfg", /* name */
372   OPTGROUP_NONE, /* optinfo_flags */
373   TV_TREE_CFG, /* tv_id */
374   PROP_gimple_leh, /* properties_required */
375   ( PROP_cfg | PROP_loops ), /* properties_provided */
376   0, /* properties_destroyed */
377   0, /* todo_flags_start */
378   0, /* todo_flags_finish */
379 };
380 
381 class pass_build_cfg : public gimple_opt_pass
382 {
383 public:
384   pass_build_cfg (gcc::context *ctxt)
385     : gimple_opt_pass (pass_data_build_cfg, ctxt)
386   {}
387 
388   /* opt_pass methods: */
389   virtual unsigned int execute (function *) { return execute_build_cfg (); }
390 
391 }; // class pass_build_cfg
392 
393 } // anon namespace
394 
395 gimple_opt_pass *
396 make_pass_build_cfg (gcc::context *ctxt)
397 {
398   return new pass_build_cfg (ctxt);
399 }
400 
401 
402 /* Return true if T is a computed goto.  */
403 
404 bool
405 computed_goto_p (gimple *t)
406 {
407   return (gimple_code (t) == GIMPLE_GOTO
408 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
409 }
410 
411 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
412    the other edge points to a bb with just __builtin_unreachable ().
413    I.e. return true for C->M edge in:
414    <bb C>:
415    ...
416    if (something)
417      goto <bb N>;
418    else
419      goto <bb M>;
420    <bb N>:
421    __builtin_unreachable ();
422    <bb M>:  */
423 
424 bool
425 assert_unreachable_fallthru_edge_p (edge e)
426 {
427   basic_block pred_bb = e->src;
428   gimple *last = last_stmt (pred_bb);
429   if (last && gimple_code (last) == GIMPLE_COND)
430     {
431       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
432       if (other_bb == e->dest)
433 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
434       if (EDGE_COUNT (other_bb->succs) == 0)
435 	{
436 	  gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
437 	  gimple *stmt;
438 
439 	  if (gsi_end_p (gsi))
440 	    return false;
441 	  stmt = gsi_stmt (gsi);
442 	  while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
443 	    {
444 	      gsi_next (&gsi);
445 	      if (gsi_end_p (gsi))
446 		return false;
447 	      stmt = gsi_stmt (gsi);
448 	    }
449 	  return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
450 	}
451     }
452   return false;
453 }
454 
455 
456 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
457    could alter control flow except via eh. We initialize the flag at
458    CFG build time and only ever clear it later.  */
459 
460 static void
461 gimple_call_initialize_ctrl_altering (gimple *stmt)
462 {
463   int flags = gimple_call_flags (stmt);
464 
465   /* A call alters control flow if it can make an abnormal goto.  */
466   if (call_can_make_abnormal_goto (stmt)
467       /* A call also alters control flow if it does not return.  */
468       || flags & ECF_NORETURN
469       /* TM ending statements have backedges out of the transaction.
470 	 Return true so we split the basic block containing them.
471 	 Note that the TM_BUILTIN test is merely an optimization.  */
472       || ((flags & ECF_TM_BUILTIN)
473 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
474       /* BUILT_IN_RETURN call is same as return statement.  */
475       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
476       /* IFN_UNIQUE should be the last insn, to make checking for it
477 	 as cheap as possible.  */
478       || (gimple_call_internal_p (stmt)
479 	  && gimple_call_internal_unique_p (stmt)))
480     gimple_call_set_ctrl_altering (stmt, true);
481   else
482     gimple_call_set_ctrl_altering (stmt, false);
483 }
484 
485 
486 /* Insert SEQ after BB and build a flowgraph.  */
487 
488 static basic_block
489 make_blocks_1 (gimple_seq seq, basic_block bb)
490 {
491   gimple_stmt_iterator i = gsi_start (seq);
492   gimple *stmt = NULL;
493   bool start_new_block = true;
494   bool first_stmt_of_seq = true;
495 
496   while (!gsi_end_p (i))
497     {
498       gimple *prev_stmt;
499 
500       prev_stmt = stmt;
501       stmt = gsi_stmt (i);
502 
503       if (stmt && is_gimple_call (stmt))
504 	gimple_call_initialize_ctrl_altering (stmt);
505 
506       /* If the statement starts a new basic block or if we have determined
507 	 in a previous pass that we need to create a new block for STMT, do
508 	 so now.  */
509       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
510 	{
511 	  if (!first_stmt_of_seq)
512 	    gsi_split_seq_before (&i, &seq);
513 	  bb = create_basic_block (seq, bb);
514 	  start_new_block = false;
515 	}
516 
517       /* Now add STMT to BB and create the subgraphs for special statement
518 	 codes.  */
519       gimple_set_bb (stmt, bb);
520 
521       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
522 	 next iteration.  */
523       if (stmt_ends_bb_p (stmt))
524 	{
525 	  /* If the stmt can make abnormal goto use a new temporary
526 	     for the assignment to the LHS.  This makes sure the old value
527 	     of the LHS is available on the abnormal edge.  Otherwise
528 	     we will end up with overlapping life-ranges for abnormal
529 	     SSA names.  */
530 	  if (gimple_has_lhs (stmt)
531 	      && stmt_can_make_abnormal_goto (stmt)
532 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
533 	    {
534 	      tree lhs = gimple_get_lhs (stmt);
535 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
536 	      gimple *s = gimple_build_assign (lhs, tmp);
537 	      gimple_set_location (s, gimple_location (stmt));
538 	      gimple_set_block (s, gimple_block (stmt));
539 	      gimple_set_lhs (stmt, tmp);
540 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
541 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
542 		DECL_GIMPLE_REG_P (tmp) = 1;
543 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
544 	    }
545 	  start_new_block = true;
546 	}
547 
548       gsi_next (&i);
549       first_stmt_of_seq = false;
550     }
551   return bb;
552 }
553 
554 /* Build a flowgraph for the sequence of stmts SEQ.  */
555 
556 static void
557 make_blocks (gimple_seq seq)
558 {
559   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
560 }
561 
562 /* Create and return a new empty basic block after bb AFTER.  */
563 
564 static basic_block
565 create_bb (void *h, void *e, basic_block after)
566 {
567   basic_block bb;
568 
569   gcc_assert (!e);
570 
571   /* Create and initialize a new basic block.  Since alloc_block uses
572      GC allocation that clears memory to allocate a basic block, we do
573      not have to clear the newly allocated basic block here.  */
574   bb = alloc_block ();
575 
576   bb->index = last_basic_block_for_fn (cfun);
577   bb->flags = BB_NEW;
578   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
579 
580   /* Add the new block to the linked list of blocks.  */
581   link_block (bb, after);
582 
583   /* Grow the basic block array if needed.  */
584   if ((size_t) last_basic_block_for_fn (cfun)
585       == basic_block_info_for_fn (cfun)->length ())
586     {
587       size_t new_size =
588 	(last_basic_block_for_fn (cfun)
589 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
590       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
591     }
592 
593   /* Add the newly created block to the array.  */
594   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
595 
596   n_basic_blocks_for_fn (cfun)++;
597   last_basic_block_for_fn (cfun)++;
598 
599   return bb;
600 }
601 
602 
603 /*---------------------------------------------------------------------------
604 				 Edge creation
605 ---------------------------------------------------------------------------*/
606 
607 /* If basic block BB has an abnormal edge to a basic block
608    containing IFN_ABNORMAL_DISPATCHER internal call, return
609    that the dispatcher's basic block, otherwise return NULL.  */
610 
611 basic_block
612 get_abnormal_succ_dispatcher (basic_block bb)
613 {
614   edge e;
615   edge_iterator ei;
616 
617   FOR_EACH_EDGE (e, ei, bb->succs)
618     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
619       {
620 	gimple_stmt_iterator gsi
621 	  = gsi_start_nondebug_after_labels_bb (e->dest);
622 	gimple *g = gsi_stmt (gsi);
623 	if (g
624 	    && is_gimple_call (g)
625 	    && gimple_call_internal_p (g)
626 	    && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
627 	  return e->dest;
628       }
629   return NULL;
630 }
631 
632 /* Helper function for make_edges.  Create a basic block with
633    with ABNORMAL_DISPATCHER internal call in it if needed, and
634    create abnormal edges from BBS to it and from it to FOR_BB
635    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
636 
637 static void
638 handle_abnormal_edges (basic_block *dispatcher_bbs,
639 		       basic_block for_bb, int *bb_to_omp_idx,
640 		       auto_vec<basic_block> *bbs, bool computed_goto)
641 {
642   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
643   unsigned int idx = 0;
644   basic_block bb;
645   bool inner = false;
646 
647   if (bb_to_omp_idx)
648     {
649       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
650       if (bb_to_omp_idx[for_bb->index] != 0)
651 	inner = true;
652     }
653 
654   /* If the dispatcher has been created already, then there are basic
655      blocks with abnormal edges to it, so just make a new edge to
656      for_bb.  */
657   if (*dispatcher == NULL)
658     {
659       /* Check if there are any basic blocks that need to have
660 	 abnormal edges to this dispatcher.  If there are none, return
661 	 early.  */
662       if (bb_to_omp_idx == NULL)
663 	{
664 	  if (bbs->is_empty ())
665 	    return;
666 	}
667       else
668 	{
669 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
670 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
671 	      break;
672 	  if (bb == NULL)
673 	    return;
674 	}
675 
676       /* Create the dispatcher bb.  */
677       *dispatcher = create_basic_block (NULL, for_bb);
678       if (computed_goto)
679 	{
680 	  /* Factor computed gotos into a common computed goto site.  Also
681 	     record the location of that site so that we can un-factor the
682 	     gotos after we have converted back to normal form.  */
683 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
684 
685 	  /* Create the destination of the factored goto.  Each original
686 	     computed goto will put its desired destination into this
687 	     variable and jump to the label we create immediately below.  */
688 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
689 
690 	  /* Build a label for the new block which will contain the
691 	     factored computed goto.  */
692 	  tree factored_label_decl
693 	    = create_artificial_label (UNKNOWN_LOCATION);
694 	  gimple *factored_computed_goto_label
695 	    = gimple_build_label (factored_label_decl);
696 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
697 
698 	  /* Build our new computed goto.  */
699 	  gimple *factored_computed_goto = gimple_build_goto (var);
700 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
701 
702 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
703 	    {
704 	      if (bb_to_omp_idx
705 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
706 		continue;
707 
708 	      gsi = gsi_last_bb (bb);
709 	      gimple *last = gsi_stmt (gsi);
710 
711 	      gcc_assert (computed_goto_p (last));
712 
713 	      /* Copy the original computed goto's destination into VAR.  */
714 	      gimple *assignment
715 		= gimple_build_assign (var, gimple_goto_dest (last));
716 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
717 
718 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
719 	      e->goto_locus = gimple_location (last);
720 	      gsi_remove (&gsi, true);
721 	    }
722 	}
723       else
724 	{
725 	  tree arg = inner ? boolean_true_node : boolean_false_node;
726 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
727 						 1, arg);
728 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
729 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
730 
731 	  /* Create predecessor edges of the dispatcher.  */
732 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
733 	    {
734 	      if (bb_to_omp_idx
735 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
736 		continue;
737 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
738 	    }
739 	}
740     }
741 
742   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
743 }
744 
745 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
746    computed goto, returns 2 when it ends with a statement that
747    might return to this function via an nonlocal goto, otherwise
748    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
749 
750 static int
751 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
752 {
753   gimple *last = last_stmt (bb);
754   bool fallthru = false;
755   int ret = 0;
756 
757   if (!last)
758     return ret;
759 
760   switch (gimple_code (last))
761     {
762     case GIMPLE_GOTO:
763       if (make_goto_expr_edges (bb))
764 	ret = 1;
765       fallthru = false;
766       break;
767     case GIMPLE_RETURN:
768       {
769 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
770 	e->goto_locus = gimple_location (last);
771 	fallthru = false;
772       }
773       break;
774     case GIMPLE_COND:
775       make_cond_expr_edges (bb);
776       fallthru = false;
777       break;
778     case GIMPLE_SWITCH:
779       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
780       fallthru = false;
781       break;
782     case GIMPLE_RESX:
783       make_eh_edges (last);
784       fallthru = false;
785       break;
786     case GIMPLE_EH_DISPATCH:
787       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
788       break;
789 
790     case GIMPLE_CALL:
791       /* If this function receives a nonlocal goto, then we need to
792 	 make edges from this call site to all the nonlocal goto
793 	 handlers.  */
794       if (stmt_can_make_abnormal_goto (last))
795 	ret = 2;
796 
797       /* If this statement has reachable exception handlers, then
798 	 create abnormal edges to them.  */
799       make_eh_edges (last);
800 
801       /* BUILTIN_RETURN is really a return statement.  */
802       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
803 	{
804 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
805 	  fallthru = false;
806 	}
807       /* Some calls are known not to return.  */
808       else
809 	fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
810       break;
811 
812     case GIMPLE_ASSIGN:
813       /* A GIMPLE_ASSIGN may throw internally and thus be considered
814 	 control-altering.  */
815       if (is_ctrl_altering_stmt (last))
816 	make_eh_edges (last);
817       fallthru = true;
818       break;
819 
820     case GIMPLE_ASM:
821       make_gimple_asm_edges (bb);
822       fallthru = true;
823       break;
824 
825     CASE_GIMPLE_OMP:
826       fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
827       break;
828 
829     case GIMPLE_TRANSACTION:
830       {
831         gtransaction *txn = as_a <gtransaction *> (last);
832 	tree label1 = gimple_transaction_label_norm (txn);
833 	tree label2 = gimple_transaction_label_uninst (txn);
834 
835 	if (label1)
836 	  make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
837 	if (label2)
838 	  make_edge (bb, label_to_block (label2),
839 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
840 
841 	tree label3 = gimple_transaction_label_over (txn);
842 	if (gimple_transaction_subcode (txn)
843 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
844 	  make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
845 
846 	fallthru = false;
847       }
848       break;
849 
850     default:
851       gcc_assert (!stmt_ends_bb_p (last));
852       fallthru = true;
853       break;
854     }
855 
856   if (fallthru)
857     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
858 
859   return ret;
860 }
861 
862 /* Join all the blocks in the flowgraph.  */
863 
864 static void
865 make_edges (void)
866 {
867   basic_block bb;
868   struct omp_region *cur_region = NULL;
869   auto_vec<basic_block> ab_edge_goto;
870   auto_vec<basic_block> ab_edge_call;
871   int *bb_to_omp_idx = NULL;
872   int cur_omp_region_idx = 0;
873 
874   /* Create an edge from entry to the first block with executable
875      statements in it.  */
876   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
877 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
878 	     EDGE_FALLTHRU);
879 
880   /* Traverse the basic block array placing edges.  */
881   FOR_EACH_BB_FN (bb, cfun)
882     {
883       int mer;
884 
885       if (bb_to_omp_idx)
886 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
887 
888       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
889       if (mer == 1)
890 	ab_edge_goto.safe_push (bb);
891       else if (mer == 2)
892 	ab_edge_call.safe_push (bb);
893 
894       if (cur_region && bb_to_omp_idx == NULL)
895 	bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
896     }
897 
898   /* Computed gotos are hell to deal with, especially if there are
899      lots of them with a large number of destinations.  So we factor
900      them to a common computed goto location before we build the
901      edge list.  After we convert back to normal form, we will un-factor
902      the computed gotos since factoring introduces an unwanted jump.
903      For non-local gotos and abnormal edges from calls to calls that return
904      twice or forced labels, factor the abnormal edges too, by having all
905      abnormal edges from the calls go to a common artificial basic block
906      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
907      basic block to all forced labels and calls returning twice.
908      We do this per-OpenMP structured block, because those regions
909      are guaranteed to be single entry single exit by the standard,
910      so it is not allowed to enter or exit such regions abnormally this way,
911      thus all computed gotos, non-local gotos and setjmp/longjmp calls
912      must not transfer control across SESE region boundaries.  */
913   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
914     {
915       gimple_stmt_iterator gsi;
916       basic_block dispatcher_bb_array[2] = { NULL, NULL };
917       basic_block *dispatcher_bbs = dispatcher_bb_array;
918       int count = n_basic_blocks_for_fn (cfun);
919 
920       if (bb_to_omp_idx)
921 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
922 
923       FOR_EACH_BB_FN (bb, cfun)
924 	{
925 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
926 	    {
927 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
928 	      tree target;
929 
930 	      if (!label_stmt)
931 		break;
932 
933 	      target = gimple_label_label (label_stmt);
934 
935 	      /* Make an edge to every label block that has been marked as a
936 		 potential target for a computed goto or a non-local goto.  */
937 	      if (FORCED_LABEL (target))
938 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
939 				       &ab_edge_goto, true);
940 	      if (DECL_NONLOCAL (target))
941 		{
942 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
943 					 &ab_edge_call, false);
944 		  break;
945 		}
946 	    }
947 
948 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
949 	    gsi_next_nondebug (&gsi);
950 	  if (!gsi_end_p (gsi))
951 	    {
952 	      /* Make an edge to every setjmp-like call.  */
953 	      gimple *call_stmt = gsi_stmt (gsi);
954 	      if (is_gimple_call (call_stmt)
955 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
956 		      || gimple_call_builtin_p (call_stmt,
957 						BUILT_IN_SETJMP_RECEIVER)))
958 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
959 				       &ab_edge_call, false);
960 	    }
961 	}
962 
963       if (bb_to_omp_idx)
964 	XDELETE (dispatcher_bbs);
965     }
966 
967   XDELETE (bb_to_omp_idx);
968 
969   free_omp_regions ();
970 }
971 
972 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
973    needed.  Returns true if new bbs were created.
974    Note: This is transitional code, and should not be used for new code.  We
975    should be able to get rid of this by rewriting all target va-arg
976    gimplification hooks to use an interface gimple_build_cond_value as described
977    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
978 
979 bool
980 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
981 {
982   gimple *stmt = gsi_stmt (*gsi);
983   basic_block bb = gimple_bb (stmt);
984   basic_block lastbb, afterbb;
985   int old_num_bbs = n_basic_blocks_for_fn (cfun);
986   edge e;
987   lastbb = make_blocks_1 (seq, bb);
988   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
989     return false;
990   e = split_block (bb, stmt);
991   /* Move e->dest to come after the new basic blocks.  */
992   afterbb = e->dest;
993   unlink_block (afterbb);
994   link_block (afterbb, lastbb);
995   redirect_edge_succ (e, bb->next_bb);
996   bb = bb->next_bb;
997   while (bb != afterbb)
998     {
999       struct omp_region *cur_region = NULL;
1000       int cur_omp_region_idx = 0;
1001       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1002       gcc_assert (!mer && !cur_region);
1003       add_bb_to_loop (bb, afterbb->loop_father);
1004       bb = bb->next_bb;
1005     }
1006   return true;
1007 }
1008 
1009 /* Find the next available discriminator value for LOCUS.  The
1010    discriminator distinguishes among several basic blocks that
1011    share a common locus, allowing for more accurate sample-based
1012    profiling.  */
1013 
1014 static int
1015 next_discriminator_for_locus (location_t locus)
1016 {
1017   struct locus_discrim_map item;
1018   struct locus_discrim_map **slot;
1019 
1020   item.locus = locus;
1021   item.discriminator = 0;
1022   slot = discriminator_per_locus->find_slot_with_hash (
1023       &item, LOCATION_LINE (locus), INSERT);
1024   gcc_assert (slot);
1025   if (*slot == HTAB_EMPTY_ENTRY)
1026     {
1027       *slot = XNEW (struct locus_discrim_map);
1028       gcc_assert (*slot);
1029       (*slot)->locus = locus;
1030       (*slot)->discriminator = 0;
1031     }
1032   (*slot)->discriminator++;
1033   return (*slot)->discriminator;
1034 }
1035 
1036 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1037 
1038 static bool
1039 same_line_p (location_t locus1, location_t locus2)
1040 {
1041   expanded_location from, to;
1042 
1043   if (locus1 == locus2)
1044     return true;
1045 
1046   from = expand_location (locus1);
1047   to = expand_location (locus2);
1048 
1049   if (from.line != to.line)
1050     return false;
1051   if (from.file == to.file)
1052     return true;
1053   return (from.file != NULL
1054           && to.file != NULL
1055           && filename_cmp (from.file, to.file) == 0);
1056 }
1057 
1058 /* Assign discriminators to each basic block.  */
1059 
1060 static void
1061 assign_discriminators (void)
1062 {
1063   basic_block bb;
1064 
1065   FOR_EACH_BB_FN (bb, cfun)
1066     {
1067       edge e;
1068       edge_iterator ei;
1069       gimple *last = last_stmt (bb);
1070       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1071 
1072       if (locus == UNKNOWN_LOCATION)
1073 	continue;
1074 
1075       FOR_EACH_EDGE (e, ei, bb->succs)
1076 	{
1077 	  gimple *first = first_non_label_stmt (e->dest);
1078 	  gimple *last = last_stmt (e->dest);
1079 	  if ((first && same_line_p (locus, gimple_location (first)))
1080 	      || (last && same_line_p (locus, gimple_location (last))))
1081 	    {
1082 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1083 		bb->discriminator = next_discriminator_for_locus (locus);
1084 	      else
1085 		e->dest->discriminator = next_discriminator_for_locus (locus);
1086 	    }
1087 	}
1088     }
1089 }
1090 
1091 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1092 
1093 static void
1094 make_cond_expr_edges (basic_block bb)
1095 {
1096   gcond *entry = as_a <gcond *> (last_stmt (bb));
1097   gimple *then_stmt, *else_stmt;
1098   basic_block then_bb, else_bb;
1099   tree then_label, else_label;
1100   edge e;
1101 
1102   gcc_assert (entry);
1103   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1104 
1105   /* Entry basic blocks for each component.  */
1106   then_label = gimple_cond_true_label (entry);
1107   else_label = gimple_cond_false_label (entry);
1108   then_bb = label_to_block (then_label);
1109   else_bb = label_to_block (else_label);
1110   then_stmt = first_stmt (then_bb);
1111   else_stmt = first_stmt (else_bb);
1112 
1113   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1114   e->goto_locus = gimple_location (then_stmt);
1115   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1116   if (e)
1117     e->goto_locus = gimple_location (else_stmt);
1118 
1119   /* We do not need the labels anymore.  */
1120   gimple_cond_set_true_label (entry, NULL_TREE);
1121   gimple_cond_set_false_label (entry, NULL_TREE);
1122 }
1123 
1124 
1125 /* Called for each element in the hash table (P) as we delete the
1126    edge to cases hash table.
1127 
1128    Clear all the TREE_CHAINs to prevent problems with copying of
1129    SWITCH_EXPRs and structure sharing rules, then free the hash table
1130    element.  */
1131 
1132 bool
1133 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1134 {
1135   tree t, next;
1136 
1137   for (t = value; t; t = next)
1138     {
1139       next = CASE_CHAIN (t);
1140       CASE_CHAIN (t) = NULL;
1141     }
1142 
1143   return true;
1144 }
1145 
1146 /* Start recording information mapping edges to case labels.  */
1147 
1148 void
1149 start_recording_case_labels (void)
1150 {
1151   gcc_assert (edge_to_cases == NULL);
1152   edge_to_cases = new hash_map<edge, tree>;
1153   touched_switch_bbs = BITMAP_ALLOC (NULL);
1154 }
1155 
1156 /* Return nonzero if we are recording information for case labels.  */
1157 
1158 static bool
1159 recording_case_labels_p (void)
1160 {
1161   return (edge_to_cases != NULL);
1162 }
1163 
1164 /* Stop recording information mapping edges to case labels and
1165    remove any information we have recorded.  */
1166 void
1167 end_recording_case_labels (void)
1168 {
1169   bitmap_iterator bi;
1170   unsigned i;
1171   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1172   delete edge_to_cases;
1173   edge_to_cases = NULL;
1174   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1175     {
1176       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1177       if (bb)
1178 	{
1179 	  gimple *stmt = last_stmt (bb);
1180 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1181 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1182 	}
1183     }
1184   BITMAP_FREE (touched_switch_bbs);
1185 }
1186 
1187 /* If we are inside a {start,end}_recording_cases block, then return
1188    a chain of CASE_LABEL_EXPRs from T which reference E.
1189 
1190    Otherwise return NULL.  */
1191 
1192 static tree
1193 get_cases_for_edge (edge e, gswitch *t)
1194 {
1195   tree *slot;
1196   size_t i, n;
1197 
1198   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1199      chains available.  Return NULL so the caller can detect this case.  */
1200   if (!recording_case_labels_p ())
1201     return NULL;
1202 
1203   slot = edge_to_cases->get (e);
1204   if (slot)
1205     return *slot;
1206 
1207   /* If we did not find E in the hash table, then this must be the first
1208      time we have been queried for information about E & T.  Add all the
1209      elements from T to the hash table then perform the query again.  */
1210 
1211   n = gimple_switch_num_labels (t);
1212   for (i = 0; i < n; i++)
1213     {
1214       tree elt = gimple_switch_label (t, i);
1215       tree lab = CASE_LABEL (elt);
1216       basic_block label_bb = label_to_block (lab);
1217       edge this_edge = find_edge (e->src, label_bb);
1218 
1219       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1220 	 a new chain.  */
1221       tree &s = edge_to_cases->get_or_insert (this_edge);
1222       CASE_CHAIN (elt) = s;
1223       s = elt;
1224     }
1225 
1226   return *edge_to_cases->get (e);
1227 }
1228 
1229 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1230 
1231 static void
1232 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1233 {
1234   size_t i, n;
1235 
1236   n = gimple_switch_num_labels (entry);
1237 
1238   for (i = 0; i < n; ++i)
1239     {
1240       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1241       basic_block label_bb = label_to_block (lab);
1242       make_edge (bb, label_bb, 0);
1243     }
1244 }
1245 
1246 
1247 /* Return the basic block holding label DEST.  */
1248 
1249 basic_block
1250 label_to_block_fn (struct function *ifun, tree dest)
1251 {
1252   int uid = LABEL_DECL_UID (dest);
1253 
1254   /* We would die hard when faced by an undefined label.  Emit a label to
1255      the very first basic block.  This will hopefully make even the dataflow
1256      and undefined variable warnings quite right.  */
1257   if (seen_error () && uid < 0)
1258     {
1259       gimple_stmt_iterator gsi =
1260 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1261       gimple *stmt;
1262 
1263       stmt = gimple_build_label (dest);
1264       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1265       uid = LABEL_DECL_UID (dest);
1266     }
1267   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1268     return NULL;
1269   return (*ifun->cfg->x_label_to_block_map)[uid];
1270 }
1271 
1272 /* Create edges for a goto statement at block BB.  Returns true
1273    if abnormal edges should be created.  */
1274 
1275 static bool
1276 make_goto_expr_edges (basic_block bb)
1277 {
1278   gimple_stmt_iterator last = gsi_last_bb (bb);
1279   gimple *goto_t = gsi_stmt (last);
1280 
1281   /* A simple GOTO creates normal edges.  */
1282   if (simple_goto_p (goto_t))
1283     {
1284       tree dest = gimple_goto_dest (goto_t);
1285       basic_block label_bb = label_to_block (dest);
1286       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1287       e->goto_locus = gimple_location (goto_t);
1288       gsi_remove (&last, true);
1289       return false;
1290     }
1291 
1292   /* A computed GOTO creates abnormal edges.  */
1293   return true;
1294 }
1295 
1296 /* Create edges for an asm statement with labels at block BB.  */
1297 
1298 static void
1299 make_gimple_asm_edges (basic_block bb)
1300 {
1301   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1302   int i, n = gimple_asm_nlabels (stmt);
1303 
1304   for (i = 0; i < n; ++i)
1305     {
1306       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1307       basic_block label_bb = label_to_block (label);
1308       make_edge (bb, label_bb, 0);
1309     }
1310 }
1311 
1312 /*---------------------------------------------------------------------------
1313 			       Flowgraph analysis
1314 ---------------------------------------------------------------------------*/
1315 
1316 /* Cleanup useless labels in basic blocks.  This is something we wish
1317    to do early because it allows us to group case labels before creating
1318    the edges for the CFG, and it speeds up block statement iterators in
1319    all passes later on.
1320    We rerun this pass after CFG is created, to get rid of the labels that
1321    are no longer referenced.  After then we do not run it any more, since
1322    (almost) no new labels should be created.  */
1323 
1324 /* A map from basic block index to the leading label of that block.  */
1325 static struct label_record
1326 {
1327   /* The label.  */
1328   tree label;
1329 
1330   /* True if the label is referenced from somewhere.  */
1331   bool used;
1332 } *label_for_bb;
1333 
1334 /* Given LABEL return the first label in the same basic block.  */
1335 
1336 static tree
1337 main_block_label (tree label)
1338 {
1339   basic_block bb = label_to_block (label);
1340   tree main_label = label_for_bb[bb->index].label;
1341 
1342   /* label_to_block possibly inserted undefined label into the chain.  */
1343   if (!main_label)
1344     {
1345       label_for_bb[bb->index].label = label;
1346       main_label = label;
1347     }
1348 
1349   label_for_bb[bb->index].used = true;
1350   return main_label;
1351 }
1352 
1353 /* Clean up redundant labels within the exception tree.  */
1354 
1355 static void
1356 cleanup_dead_labels_eh (void)
1357 {
1358   eh_landing_pad lp;
1359   eh_region r;
1360   tree lab;
1361   int i;
1362 
1363   if (cfun->eh == NULL)
1364     return;
1365 
1366   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1367     if (lp && lp->post_landing_pad)
1368       {
1369 	lab = main_block_label (lp->post_landing_pad);
1370 	if (lab != lp->post_landing_pad)
1371 	  {
1372 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1373 	    EH_LANDING_PAD_NR (lab) = lp->index;
1374 	  }
1375       }
1376 
1377   FOR_ALL_EH_REGION (r)
1378     switch (r->type)
1379       {
1380       case ERT_CLEANUP:
1381       case ERT_MUST_NOT_THROW:
1382 	break;
1383 
1384       case ERT_TRY:
1385 	{
1386 	  eh_catch c;
1387 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1388 	    {
1389 	      lab = c->label;
1390 	      if (lab)
1391 		c->label = main_block_label (lab);
1392 	    }
1393 	}
1394 	break;
1395 
1396       case ERT_ALLOWED_EXCEPTIONS:
1397 	lab = r->u.allowed.label;
1398 	if (lab)
1399 	  r->u.allowed.label = main_block_label (lab);
1400 	break;
1401       }
1402 }
1403 
1404 
1405 /* Cleanup redundant labels.  This is a three-step process:
1406      1) Find the leading label for each block.
1407      2) Redirect all references to labels to the leading labels.
1408      3) Cleanup all useless labels.  */
1409 
1410 void
1411 cleanup_dead_labels (void)
1412 {
1413   basic_block bb;
1414   label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1415 
1416   /* Find a suitable label for each block.  We use the first user-defined
1417      label if there is one, or otherwise just the first label we see.  */
1418   FOR_EACH_BB_FN (bb, cfun)
1419     {
1420       gimple_stmt_iterator i;
1421 
1422       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 	{
1424 	  tree label;
1425 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1426 
1427 	  if (!label_stmt)
1428 	    break;
1429 
1430 	  label = gimple_label_label (label_stmt);
1431 
1432 	  /* If we have not yet seen a label for the current block,
1433 	     remember this one and see if there are more labels.  */
1434 	  if (!label_for_bb[bb->index].label)
1435 	    {
1436 	      label_for_bb[bb->index].label = label;
1437 	      continue;
1438 	    }
1439 
1440 	  /* If we did see a label for the current block already, but it
1441 	     is an artificially created label, replace it if the current
1442 	     label is a user defined label.  */
1443 	  if (!DECL_ARTIFICIAL (label)
1444 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1445 	    {
1446 	      label_for_bb[bb->index].label = label;
1447 	      break;
1448 	    }
1449 	}
1450     }
1451 
1452   /* Now redirect all jumps/branches to the selected label.
1453      First do so for each block ending in a control statement.  */
1454   FOR_EACH_BB_FN (bb, cfun)
1455     {
1456       gimple *stmt = last_stmt (bb);
1457       tree label, new_label;
1458 
1459       if (!stmt)
1460 	continue;
1461 
1462       switch (gimple_code (stmt))
1463 	{
1464 	case GIMPLE_COND:
1465 	  {
1466 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1467 	    label = gimple_cond_true_label (cond_stmt);
1468 	    if (label)
1469 	      {
1470 		new_label = main_block_label (label);
1471 		if (new_label != label)
1472 		  gimple_cond_set_true_label (cond_stmt, new_label);
1473 	      }
1474 
1475 	    label = gimple_cond_false_label (cond_stmt);
1476 	    if (label)
1477 	      {
1478 		new_label = main_block_label (label);
1479 		if (new_label != label)
1480 		  gimple_cond_set_false_label (cond_stmt, new_label);
1481 	      }
1482 	  }
1483 	  break;
1484 
1485 	case GIMPLE_SWITCH:
1486 	  {
1487 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1488 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1489 
1490 	    /* Replace all destination labels.  */
1491 	    for (i = 0; i < n; ++i)
1492 	      {
1493 		tree case_label = gimple_switch_label (switch_stmt, i);
1494 		label = CASE_LABEL (case_label);
1495 		new_label = main_block_label (label);
1496 		if (new_label != label)
1497 		  CASE_LABEL (case_label) = new_label;
1498 	      }
1499 	    break;
1500 	  }
1501 
1502 	case GIMPLE_ASM:
1503 	  {
1504 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1505 	    int i, n = gimple_asm_nlabels (asm_stmt);
1506 
1507 	    for (i = 0; i < n; ++i)
1508 	      {
1509 		tree cons = gimple_asm_label_op (asm_stmt, i);
1510 		tree label = main_block_label (TREE_VALUE (cons));
1511 		TREE_VALUE (cons) = label;
1512 	      }
1513 	    break;
1514 	  }
1515 
1516 	/* We have to handle gotos until they're removed, and we don't
1517 	   remove them until after we've created the CFG edges.  */
1518 	case GIMPLE_GOTO:
1519 	  if (!computed_goto_p (stmt))
1520 	    {
1521 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1522 	      label = gimple_goto_dest (goto_stmt);
1523 	      new_label = main_block_label (label);
1524 	      if (new_label != label)
1525 		gimple_goto_set_dest (goto_stmt, new_label);
1526 	    }
1527 	  break;
1528 
1529 	case GIMPLE_TRANSACTION:
1530 	  {
1531 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1532 
1533 	    label = gimple_transaction_label_norm (txn);
1534 	    if (label)
1535 	      {
1536 		new_label = main_block_label (label);
1537 		if (new_label != label)
1538 		  gimple_transaction_set_label_norm (txn, new_label);
1539 	      }
1540 
1541 	    label = gimple_transaction_label_uninst (txn);
1542 	    if (label)
1543 	      {
1544 		new_label = main_block_label (label);
1545 		if (new_label != label)
1546 		  gimple_transaction_set_label_uninst (txn, new_label);
1547 	      }
1548 
1549 	    label = gimple_transaction_label_over (txn);
1550 	    if (label)
1551 	      {
1552 		new_label = main_block_label (label);
1553 		if (new_label != label)
1554 		  gimple_transaction_set_label_over (txn, new_label);
1555 	      }
1556 	  }
1557 	  break;
1558 
1559 	default:
1560 	  break;
1561       }
1562     }
1563 
1564   /* Do the same for the exception region tree labels.  */
1565   cleanup_dead_labels_eh ();
1566 
1567   /* Finally, purge dead labels.  All user-defined labels and labels that
1568      can be the target of non-local gotos and labels which have their
1569      address taken are preserved.  */
1570   FOR_EACH_BB_FN (bb, cfun)
1571     {
1572       gimple_stmt_iterator i;
1573       tree label_for_this_bb = label_for_bb[bb->index].label;
1574 
1575       if (!label_for_this_bb)
1576 	continue;
1577 
1578       /* If the main label of the block is unused, we may still remove it.  */
1579       if (!label_for_bb[bb->index].used)
1580 	label_for_this_bb = NULL;
1581 
1582       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1583 	{
1584 	  tree label;
1585 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1586 
1587 	  if (!label_stmt)
1588 	    break;
1589 
1590 	  label = gimple_label_label (label_stmt);
1591 
1592 	  if (label == label_for_this_bb
1593 	      || !DECL_ARTIFICIAL (label)
1594 	      || DECL_NONLOCAL (label)
1595 	      || FORCED_LABEL (label))
1596 	    gsi_next (&i);
1597 	  else
1598 	    gsi_remove (&i, true);
1599 	}
1600     }
1601 
1602   free (label_for_bb);
1603 }
1604 
1605 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1606    the ones jumping to the same label.
1607    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1608 
1609 void
1610 group_case_labels_stmt (gswitch *stmt)
1611 {
1612   int old_size = gimple_switch_num_labels (stmt);
1613   int i, j, new_size = old_size;
1614   basic_block default_bb = NULL;
1615 
1616   default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1617 
1618   /* Look for possible opportunities to merge cases.  */
1619   i = 1;
1620   while (i < old_size)
1621     {
1622       tree base_case, base_high;
1623       basic_block base_bb;
1624 
1625       base_case = gimple_switch_label (stmt, i);
1626 
1627       gcc_assert (base_case);
1628       base_bb = label_to_block (CASE_LABEL (base_case));
1629 
1630       /* Discard cases that have the same destination as the
1631 	 default case.  */
1632       if (base_bb == default_bb)
1633 	{
1634 	  gimple_switch_set_label (stmt, i, NULL_TREE);
1635 	  i++;
1636 	  new_size--;
1637 	  continue;
1638 	}
1639 
1640       base_high = CASE_HIGH (base_case)
1641 	  ? CASE_HIGH (base_case)
1642 	  : CASE_LOW (base_case);
1643       i++;
1644 
1645       /* Try to merge case labels.  Break out when we reach the end
1646 	 of the label vector or when we cannot merge the next case
1647 	 label with the current one.  */
1648       while (i < old_size)
1649 	{
1650 	  tree merge_case = gimple_switch_label (stmt, i);
1651 	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1652 	  wide_int bhp1 = wi::add (base_high, 1);
1653 
1654 	  /* Merge the cases if they jump to the same place,
1655 	     and their ranges are consecutive.  */
1656 	  if (merge_bb == base_bb
1657 	      && wi::eq_p (CASE_LOW (merge_case), bhp1))
1658 	    {
1659 	      base_high = CASE_HIGH (merge_case) ?
1660 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1661 	      CASE_HIGH (base_case) = base_high;
1662 	      gimple_switch_set_label (stmt, i, NULL_TREE);
1663 	      new_size--;
1664 	      i++;
1665 	    }
1666 	  else
1667 	    break;
1668 	}
1669     }
1670 
1671   /* Compress the case labels in the label vector, and adjust the
1672      length of the vector.  */
1673   for (i = 0, j = 0; i < new_size; i++)
1674     {
1675       while (! gimple_switch_label (stmt, j))
1676 	j++;
1677       gimple_switch_set_label (stmt, i,
1678 			       gimple_switch_label (stmt, j++));
1679     }
1680 
1681   gcc_assert (new_size <= old_size);
1682   gimple_switch_set_num_labels (stmt, new_size);
1683 }
1684 
1685 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1686    and scan the sorted vector of cases.  Combine the ones jumping to the
1687    same label.  */
1688 
1689 void
1690 group_case_labels (void)
1691 {
1692   basic_block bb;
1693 
1694   FOR_EACH_BB_FN (bb, cfun)
1695     {
1696       gimple *stmt = last_stmt (bb);
1697       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1698 	group_case_labels_stmt (as_a <gswitch *> (stmt));
1699     }
1700 }
1701 
1702 /* Checks whether we can merge block B into block A.  */
1703 
1704 static bool
1705 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1706 {
1707   gimple *stmt;
1708 
1709   if (!single_succ_p (a))
1710     return false;
1711 
1712   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1713     return false;
1714 
1715   if (single_succ (a) != b)
1716     return false;
1717 
1718   if (!single_pred_p (b))
1719     return false;
1720 
1721   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1722       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1723     return false;
1724 
1725   /* If A ends by a statement causing exceptions or something similar, we
1726      cannot merge the blocks.  */
1727   stmt = last_stmt (a);
1728   if (stmt && stmt_ends_bb_p (stmt))
1729     return false;
1730 
1731   /* Do not allow a block with only a non-local label to be merged.  */
1732   if (stmt)
1733     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1734       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1735 	return false;
1736 
1737   /* Examine the labels at the beginning of B.  */
1738   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1739        gsi_next (&gsi))
1740     {
1741       tree lab;
1742       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1743       if (!label_stmt)
1744 	break;
1745       lab = gimple_label_label (label_stmt);
1746 
1747       /* Do not remove user forced labels or for -O0 any user labels.  */
1748       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1749 	return false;
1750     }
1751 
1752   /* Protect simple loop latches.  We only want to avoid merging
1753      the latch with the loop header or with a block in another
1754      loop in this case.  */
1755   if (current_loops
1756       && b->loop_father->latch == b
1757       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1758       && (b->loop_father->header == a
1759 	  || b->loop_father != a->loop_father))
1760     return false;
1761 
1762   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1763      is not up-to-date and a name-mapping is registered, we cannot eliminate
1764      any phis.  Symbols marked for renaming are never a problem though.  */
1765   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1766        gsi_next (&gsi))
1767     {
1768       gphi *phi = gsi.phi ();
1769       /* Technically only new names matter.  */
1770       if (name_registered_for_update_p (PHI_RESULT (phi)))
1771 	return false;
1772     }
1773 
1774   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1775   if (!optimize
1776       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1777     {
1778       location_t goto_locus = single_succ_edge (a)->goto_locus;
1779       gimple_stmt_iterator prev, next;
1780       prev = gsi_last_nondebug_bb (a);
1781       next = gsi_after_labels (b);
1782       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1783 	gsi_next_nondebug (&next);
1784       if ((gsi_end_p (prev)
1785 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1786 	  && (gsi_end_p (next)
1787 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1788 	return false;
1789     }
1790 
1791   return true;
1792 }
1793 
1794 /* Replaces all uses of NAME by VAL.  */
1795 
1796 void
1797 replace_uses_by (tree name, tree val)
1798 {
1799   imm_use_iterator imm_iter;
1800   use_operand_p use;
1801   gimple *stmt;
1802   edge e;
1803 
1804   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1805     {
1806       /* Mark the block if we change the last stmt in it.  */
1807       if (cfgcleanup_altered_bbs
1808 	  && stmt_ends_bb_p (stmt))
1809 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1810 
1811       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1812         {
1813 	  replace_exp (use, val);
1814 
1815 	  if (gimple_code (stmt) == GIMPLE_PHI)
1816 	    {
1817 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1818 				       PHI_ARG_INDEX_FROM_USE (use));
1819 	      if (e->flags & EDGE_ABNORMAL
1820 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1821 		{
1822 		  /* This can only occur for virtual operands, since
1823 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1824 		     would prevent replacement.  */
1825 		  gcc_checking_assert (virtual_operand_p (name));
1826 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1827 		}
1828 	    }
1829 	}
1830 
1831       if (gimple_code (stmt) != GIMPLE_PHI)
1832 	{
1833 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1834 	  gimple *orig_stmt = stmt;
1835 	  size_t i;
1836 
1837 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1838 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1839 	     only change sth from non-invariant to invariant, and only
1840 	     when propagating constants.  */
1841 	  if (is_gimple_min_invariant (val))
1842 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1843 	      {
1844 		tree op = gimple_op (stmt, i);
1845 		/* Operands may be empty here.  For example, the labels
1846 		   of a GIMPLE_COND are nulled out following the creation
1847 		   of the corresponding CFG edges.  */
1848 		if (op && TREE_CODE (op) == ADDR_EXPR)
1849 		  recompute_tree_invariant_for_addr_expr (op);
1850 	      }
1851 
1852 	  if (fold_stmt (&gsi))
1853 	    stmt = gsi_stmt (gsi);
1854 
1855 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1856 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1857 
1858 	  update_stmt (stmt);
1859 	}
1860     }
1861 
1862   gcc_checking_assert (has_zero_uses (name));
1863 
1864   /* Also update the trees stored in loop structures.  */
1865   if (current_loops)
1866     {
1867       struct loop *loop;
1868 
1869       FOR_EACH_LOOP (loop, 0)
1870 	{
1871 	  substitute_in_loop_info (loop, name, val);
1872 	}
1873     }
1874 }
1875 
1876 /* Merge block B into block A.  */
1877 
1878 static void
1879 gimple_merge_blocks (basic_block a, basic_block b)
1880 {
1881   gimple_stmt_iterator last, gsi;
1882   gphi_iterator psi;
1883 
1884   if (dump_file)
1885     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1886 
1887   /* Remove all single-valued PHI nodes from block B of the form
1888      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1889   gsi = gsi_last_bb (a);
1890   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1891     {
1892       gimple *phi = gsi_stmt (psi);
1893       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1894       gimple *copy;
1895       bool may_replace_uses = (virtual_operand_p (def)
1896 			       || may_propagate_copy (def, use));
1897 
1898       /* In case we maintain loop closed ssa form, do not propagate arguments
1899 	 of loop exit phi nodes.  */
1900       if (current_loops
1901 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1902 	  && !virtual_operand_p (def)
1903 	  && TREE_CODE (use) == SSA_NAME
1904 	  && a->loop_father != b->loop_father)
1905 	may_replace_uses = false;
1906 
1907       if (!may_replace_uses)
1908 	{
1909 	  gcc_assert (!virtual_operand_p (def));
1910 
1911 	  /* Note that just emitting the copies is fine -- there is no problem
1912 	     with ordering of phi nodes.  This is because A is the single
1913 	     predecessor of B, therefore results of the phi nodes cannot
1914 	     appear as arguments of the phi nodes.  */
1915 	  copy = gimple_build_assign (def, use);
1916 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1917           remove_phi_node (&psi, false);
1918 	}
1919       else
1920         {
1921 	  /* If we deal with a PHI for virtual operands, we can simply
1922 	     propagate these without fussing with folding or updating
1923 	     the stmt.  */
1924 	  if (virtual_operand_p (def))
1925 	    {
1926 	      imm_use_iterator iter;
1927 	      use_operand_p use_p;
1928 	      gimple *stmt;
1929 
1930 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1931 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1932 		  SET_USE (use_p, use);
1933 
1934 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1935 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1936 	    }
1937 	  else
1938             replace_uses_by (def, use);
1939 
1940           remove_phi_node (&psi, true);
1941         }
1942     }
1943 
1944   /* Ensure that B follows A.  */
1945   move_block_after (b, a);
1946 
1947   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1948   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1949 
1950   /* Remove labels from B and set gimple_bb to A for other statements.  */
1951   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1952     {
1953       gimple *stmt = gsi_stmt (gsi);
1954       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1955 	{
1956 	  tree label = gimple_label_label (label_stmt);
1957 	  int lp_nr;
1958 
1959 	  gsi_remove (&gsi, false);
1960 
1961 	  /* Now that we can thread computed gotos, we might have
1962 	     a situation where we have a forced label in block B
1963 	     However, the label at the start of block B might still be
1964 	     used in other ways (think about the runtime checking for
1965 	     Fortran assigned gotos).  So we can not just delete the
1966 	     label.  Instead we move the label to the start of block A.  */
1967 	  if (FORCED_LABEL (label))
1968 	    {
1969 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1970 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1971 	    }
1972 	  /* Other user labels keep around in a form of a debug stmt.  */
1973 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1974 	    {
1975 	      gimple *dbg = gimple_build_debug_bind (label,
1976 						     integer_zero_node,
1977 						     stmt);
1978 	      gimple_debug_bind_reset_value (dbg);
1979 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1980 	    }
1981 
1982 	  lp_nr = EH_LANDING_PAD_NR (label);
1983 	  if (lp_nr)
1984 	    {
1985 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1986 	      lp->post_landing_pad = NULL;
1987 	    }
1988 	}
1989       else
1990 	{
1991 	  gimple_set_bb (stmt, a);
1992 	  gsi_next (&gsi);
1993 	}
1994     }
1995 
1996   /* When merging two BBs, if their counts are different, the larger count
1997      is selected as the new bb count. This is to handle inconsistent
1998      profiles.  */
1999   if (a->loop_father == b->loop_father)
2000     {
2001       a->count = MAX (a->count, b->count);
2002       a->frequency = MAX (a->frequency, b->frequency);
2003     }
2004 
2005   /* Merge the sequences.  */
2006   last = gsi_last_bb (a);
2007   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2008   set_bb_seq (b, NULL);
2009 
2010   if (cfgcleanup_altered_bbs)
2011     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2012 }
2013 
2014 
2015 /* Return the one of two successors of BB that is not reachable by a
2016    complex edge, if there is one.  Else, return BB.  We use
2017    this in optimizations that use post-dominators for their heuristics,
2018    to catch the cases in C++ where function calls are involved.  */
2019 
2020 basic_block
2021 single_noncomplex_succ (basic_block bb)
2022 {
2023   edge e0, e1;
2024   if (EDGE_COUNT (bb->succs) != 2)
2025     return bb;
2026 
2027   e0 = EDGE_SUCC (bb, 0);
2028   e1 = EDGE_SUCC (bb, 1);
2029   if (e0->flags & EDGE_COMPLEX)
2030     return e1->dest;
2031   if (e1->flags & EDGE_COMPLEX)
2032     return e0->dest;
2033 
2034   return bb;
2035 }
2036 
2037 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2038 
2039 void
2040 notice_special_calls (gcall *call)
2041 {
2042   int flags = gimple_call_flags (call);
2043 
2044   if (flags & ECF_MAY_BE_ALLOCA)
2045     cfun->calls_alloca = true;
2046   if (flags & ECF_RETURNS_TWICE)
2047     cfun->calls_setjmp = true;
2048 }
2049 
2050 
2051 /* Clear flags set by notice_special_calls.  Used by dead code removal
2052    to update the flags.  */
2053 
2054 void
2055 clear_special_calls (void)
2056 {
2057   cfun->calls_alloca = false;
2058   cfun->calls_setjmp = false;
2059 }
2060 
2061 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2062 
2063 static void
2064 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2065 {
2066   /* Since this block is no longer reachable, we can just delete all
2067      of its PHI nodes.  */
2068   remove_phi_nodes (bb);
2069 
2070   /* Remove edges to BB's successors.  */
2071   while (EDGE_COUNT (bb->succs) > 0)
2072     remove_edge (EDGE_SUCC (bb, 0));
2073 }
2074 
2075 
2076 /* Remove statements of basic block BB.  */
2077 
2078 static void
2079 remove_bb (basic_block bb)
2080 {
2081   gimple_stmt_iterator i;
2082 
2083   if (dump_file)
2084     {
2085       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2086       if (dump_flags & TDF_DETAILS)
2087 	{
2088 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2089 	  fprintf (dump_file, "\n");
2090 	}
2091     }
2092 
2093   if (current_loops)
2094     {
2095       struct loop *loop = bb->loop_father;
2096 
2097       /* If a loop gets removed, clean up the information associated
2098 	 with it.  */
2099       if (loop->latch == bb
2100 	  || loop->header == bb)
2101 	free_numbers_of_iterations_estimates_loop (loop);
2102     }
2103 
2104   /* Remove all the instructions in the block.  */
2105   if (bb_seq (bb) != NULL)
2106     {
2107       /* Walk backwards so as to get a chance to substitute all
2108 	 released DEFs into debug stmts.  See
2109 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2110 	 details.  */
2111       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2112 	{
2113 	  gimple *stmt = gsi_stmt (i);
2114 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2115 	  if (label_stmt
2116 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2117 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2118 	    {
2119 	      basic_block new_bb;
2120 	      gimple_stmt_iterator new_gsi;
2121 
2122 	      /* A non-reachable non-local label may still be referenced.
2123 		 But it no longer needs to carry the extra semantics of
2124 		 non-locality.  */
2125 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2126 		{
2127 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2128 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2129 		}
2130 
2131 	      new_bb = bb->prev_bb;
2132 	      new_gsi = gsi_start_bb (new_bb);
2133 	      gsi_remove (&i, false);
2134 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2135 	    }
2136 	  else
2137 	    {
2138 	      /* Release SSA definitions if we are in SSA.  Note that we
2139 		 may be called when not in SSA.  For example,
2140 		 final_cleanup calls this function via
2141 		 cleanup_tree_cfg.  */
2142 	      if (gimple_in_ssa_p (cfun))
2143 		release_defs (stmt);
2144 
2145 	      gsi_remove (&i, true);
2146 	    }
2147 
2148 	  if (gsi_end_p (i))
2149 	    i = gsi_last_bb (bb);
2150 	  else
2151 	    gsi_prev (&i);
2152 	}
2153     }
2154 
2155   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2156   bb->il.gimple.seq = NULL;
2157   bb->il.gimple.phi_nodes = NULL;
2158 }
2159 
2160 
2161 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2162    predicate VAL, return the edge that will be taken out of the block.
2163    If VAL does not match a unique edge, NULL is returned.  */
2164 
2165 edge
2166 find_taken_edge (basic_block bb, tree val)
2167 {
2168   gimple *stmt;
2169 
2170   stmt = last_stmt (bb);
2171 
2172   gcc_assert (stmt);
2173   gcc_assert (is_ctrl_stmt (stmt));
2174 
2175   if (val == NULL)
2176     return NULL;
2177 
2178   if (!is_gimple_min_invariant (val))
2179     return NULL;
2180 
2181   if (gimple_code (stmt) == GIMPLE_COND)
2182     return find_taken_edge_cond_expr (bb, val);
2183 
2184   if (gimple_code (stmt) == GIMPLE_SWITCH)
2185     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2186 
2187   if (computed_goto_p (stmt))
2188     {
2189       /* Only optimize if the argument is a label, if the argument is
2190 	 not a label then we can not construct a proper CFG.
2191 
2192          It may be the case that we only need to allow the LABEL_REF to
2193          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2194          appear inside a LABEL_EXPR just to be safe.  */
2195       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2196 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2197 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2198       return NULL;
2199     }
2200 
2201   gcc_unreachable ();
2202 }
2203 
2204 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2205    statement, determine which of the outgoing edges will be taken out of the
2206    block.  Return NULL if either edge may be taken.  */
2207 
2208 static edge
2209 find_taken_edge_computed_goto (basic_block bb, tree val)
2210 {
2211   basic_block dest;
2212   edge e = NULL;
2213 
2214   dest = label_to_block (val);
2215   if (dest)
2216     {
2217       e = find_edge (bb, dest);
2218       gcc_assert (e != NULL);
2219     }
2220 
2221   return e;
2222 }
2223 
2224 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2225    statement, determine which of the two edges will be taken out of the
2226    block.  Return NULL if either edge may be taken.  */
2227 
2228 static edge
2229 find_taken_edge_cond_expr (basic_block bb, tree val)
2230 {
2231   edge true_edge, false_edge;
2232 
2233   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2234 
2235   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2236   return (integer_zerop (val) ? false_edge : true_edge);
2237 }
2238 
2239 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2240    statement, determine which edge will be taken out of the block.  Return
2241    NULL if any edge may be taken.  */
2242 
2243 static edge
2244 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2245 			     tree val)
2246 {
2247   basic_block dest_bb;
2248   edge e;
2249   tree taken_case;
2250 
2251   taken_case = find_case_label_for_value (switch_stmt, val);
2252   dest_bb = label_to_block (CASE_LABEL (taken_case));
2253 
2254   e = find_edge (bb, dest_bb);
2255   gcc_assert (e);
2256   return e;
2257 }
2258 
2259 
2260 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2261    We can make optimal use here of the fact that the case labels are
2262    sorted: We can do a binary search for a case matching VAL.  */
2263 
2264 static tree
2265 find_case_label_for_value (gswitch *switch_stmt, tree val)
2266 {
2267   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2268   tree default_case = gimple_switch_default_label (switch_stmt);
2269 
2270   for (low = 0, high = n; high - low > 1; )
2271     {
2272       size_t i = (high + low) / 2;
2273       tree t = gimple_switch_label (switch_stmt, i);
2274       int cmp;
2275 
2276       /* Cache the result of comparing CASE_LOW and val.  */
2277       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2278 
2279       if (cmp > 0)
2280 	high = i;
2281       else
2282 	low = i;
2283 
2284       if (CASE_HIGH (t) == NULL)
2285 	{
2286 	  /* A singe-valued case label.  */
2287 	  if (cmp == 0)
2288 	    return t;
2289 	}
2290       else
2291 	{
2292 	  /* A case range.  We can only handle integer ranges.  */
2293 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2294 	    return t;
2295 	}
2296     }
2297 
2298   return default_case;
2299 }
2300 
2301 
2302 /* Dump a basic block on stderr.  */
2303 
2304 void
2305 gimple_debug_bb (basic_block bb)
2306 {
2307   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2308 }
2309 
2310 
2311 /* Dump basic block with index N on stderr.  */
2312 
2313 basic_block
2314 gimple_debug_bb_n (int n)
2315 {
2316   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2317   return BASIC_BLOCK_FOR_FN (cfun, n);
2318 }
2319 
2320 
2321 /* Dump the CFG on stderr.
2322 
2323    FLAGS are the same used by the tree dumping functions
2324    (see TDF_* in dumpfile.h).  */
2325 
2326 void
2327 gimple_debug_cfg (int flags)
2328 {
2329   gimple_dump_cfg (stderr, flags);
2330 }
2331 
2332 
2333 /* Dump the program showing basic block boundaries on the given FILE.
2334 
2335    FLAGS are the same used by the tree dumping functions (see TDF_* in
2336    tree.h).  */
2337 
2338 void
2339 gimple_dump_cfg (FILE *file, int flags)
2340 {
2341   if (flags & TDF_DETAILS)
2342     {
2343       dump_function_header (file, current_function_decl, flags);
2344       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2345 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2346 	       last_basic_block_for_fn (cfun));
2347 
2348       brief_dump_cfg (file, flags | TDF_COMMENT);
2349       fprintf (file, "\n");
2350     }
2351 
2352   if (flags & TDF_STATS)
2353     dump_cfg_stats (file);
2354 
2355   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2356 }
2357 
2358 
2359 /* Dump CFG statistics on FILE.  */
2360 
2361 void
2362 dump_cfg_stats (FILE *file)
2363 {
2364   static long max_num_merged_labels = 0;
2365   unsigned long size, total = 0;
2366   long num_edges;
2367   basic_block bb;
2368   const char * const fmt_str   = "%-30s%-13s%12s\n";
2369   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2370   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2371   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2372   const char *funcname = current_function_name ();
2373 
2374   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2375 
2376   fprintf (file, "---------------------------------------------------------\n");
2377   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2378   fprintf (file, fmt_str, "", "  instances  ", "used ");
2379   fprintf (file, "---------------------------------------------------------\n");
2380 
2381   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2382   total += size;
2383   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2384 	   SCALE (size), LABEL (size));
2385 
2386   num_edges = 0;
2387   FOR_EACH_BB_FN (bb, cfun)
2388     num_edges += EDGE_COUNT (bb->succs);
2389   size = num_edges * sizeof (struct edge_def);
2390   total += size;
2391   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2392 
2393   fprintf (file, "---------------------------------------------------------\n");
2394   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2395 	   LABEL (total));
2396   fprintf (file, "---------------------------------------------------------\n");
2397   fprintf (file, "\n");
2398 
2399   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2400     max_num_merged_labels = cfg_stats.num_merged_labels;
2401 
2402   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2403 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2404 
2405   fprintf (file, "\n");
2406 }
2407 
2408 
2409 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2410    linked in the final executable.  */
2411 
2412 DEBUG_FUNCTION void
2413 debug_cfg_stats (void)
2414 {
2415   dump_cfg_stats (stderr);
2416 }
2417 
2418 /*---------------------------------------------------------------------------
2419 			     Miscellaneous helpers
2420 ---------------------------------------------------------------------------*/
2421 
2422 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2423    flow.  Transfers of control flow associated with EH are excluded.  */
2424 
2425 static bool
2426 call_can_make_abnormal_goto (gimple *t)
2427 {
2428   /* If the function has no non-local labels, then a call cannot make an
2429      abnormal transfer of control.  */
2430   if (!cfun->has_nonlocal_label
2431       && !cfun->calls_setjmp)
2432    return false;
2433 
2434   /* Likewise if the call has no side effects.  */
2435   if (!gimple_has_side_effects (t))
2436     return false;
2437 
2438   /* Likewise if the called function is leaf.  */
2439   if (gimple_call_flags (t) & ECF_LEAF)
2440     return false;
2441 
2442   return true;
2443 }
2444 
2445 
2446 /* Return true if T can make an abnormal transfer of control flow.
2447    Transfers of control flow associated with EH are excluded.  */
2448 
2449 bool
2450 stmt_can_make_abnormal_goto (gimple *t)
2451 {
2452   if (computed_goto_p (t))
2453     return true;
2454   if (is_gimple_call (t))
2455     return call_can_make_abnormal_goto (t);
2456   return false;
2457 }
2458 
2459 
2460 /* Return true if T represents a stmt that always transfers control.  */
2461 
2462 bool
2463 is_ctrl_stmt (gimple *t)
2464 {
2465   switch (gimple_code (t))
2466     {
2467     case GIMPLE_COND:
2468     case GIMPLE_SWITCH:
2469     case GIMPLE_GOTO:
2470     case GIMPLE_RETURN:
2471     case GIMPLE_RESX:
2472       return true;
2473     default:
2474       return false;
2475     }
2476 }
2477 
2478 
2479 /* Return true if T is a statement that may alter the flow of control
2480    (e.g., a call to a non-returning function).  */
2481 
2482 bool
2483 is_ctrl_altering_stmt (gimple *t)
2484 {
2485   gcc_assert (t);
2486 
2487   switch (gimple_code (t))
2488     {
2489     case GIMPLE_CALL:
2490       /* Per stmt call flag indicates whether the call could alter
2491 	 controlflow.  */
2492       if (gimple_call_ctrl_altering_p (t))
2493 	return true;
2494       break;
2495 
2496     case GIMPLE_EH_DISPATCH:
2497       /* EH_DISPATCH branches to the individual catch handlers at
2498 	 this level of a try or allowed-exceptions region.  It can
2499 	 fallthru to the next statement as well.  */
2500       return true;
2501 
2502     case GIMPLE_ASM:
2503       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2504 	return true;
2505       break;
2506 
2507     CASE_GIMPLE_OMP:
2508       /* OpenMP directives alter control flow.  */
2509       return true;
2510 
2511     case GIMPLE_TRANSACTION:
2512       /* A transaction start alters control flow.  */
2513       return true;
2514 
2515     default:
2516       break;
2517     }
2518 
2519   /* If a statement can throw, it alters control flow.  */
2520   return stmt_can_throw_internal (t);
2521 }
2522 
2523 
2524 /* Return true if T is a simple local goto.  */
2525 
2526 bool
2527 simple_goto_p (gimple *t)
2528 {
2529   return (gimple_code (t) == GIMPLE_GOTO
2530 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2531 }
2532 
2533 
2534 /* Return true if STMT should start a new basic block.  PREV_STMT is
2535    the statement preceding STMT.  It is used when STMT is a label or a
2536    case label.  Labels should only start a new basic block if their
2537    previous statement wasn't a label.  Otherwise, sequence of labels
2538    would generate unnecessary basic blocks that only contain a single
2539    label.  */
2540 
2541 static inline bool
2542 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2543 {
2544   if (stmt == NULL)
2545     return false;
2546 
2547   /* Labels start a new basic block only if the preceding statement
2548      wasn't a label of the same type.  This prevents the creation of
2549      consecutive blocks that have nothing but a single label.  */
2550   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2551     {
2552       /* Nonlocal and computed GOTO targets always start a new block.  */
2553       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2554 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2555 	return true;
2556 
2557       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2558 	{
2559 	  if (DECL_NONLOCAL (gimple_label_label (
2560 			       as_a <glabel *> (prev_stmt))))
2561 	    return true;
2562 
2563 	  cfg_stats.num_merged_labels++;
2564 	  return false;
2565 	}
2566       else
2567 	return true;
2568     }
2569   else if (gimple_code (stmt) == GIMPLE_CALL
2570 	   && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2571     /* setjmp acts similar to a nonlocal GOTO target and thus should
2572        start a new block.  */
2573     return true;
2574 
2575   return false;
2576 }
2577 
2578 
2579 /* Return true if T should end a basic block.  */
2580 
2581 bool
2582 stmt_ends_bb_p (gimple *t)
2583 {
2584   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2585 }
2586 
2587 /* Remove block annotations and other data structures.  */
2588 
2589 void
2590 delete_tree_cfg_annotations (struct function *fn)
2591 {
2592   vec_free (label_to_block_map_for_fn (fn));
2593 }
2594 
2595 /* Return the virtual phi in BB.  */
2596 
2597 gphi *
2598 get_virtual_phi (basic_block bb)
2599 {
2600   for (gphi_iterator gsi = gsi_start_phis (bb);
2601        !gsi_end_p (gsi);
2602        gsi_next (&gsi))
2603     {
2604       gphi *phi = gsi.phi ();
2605 
2606       if (virtual_operand_p (PHI_RESULT (phi)))
2607 	return phi;
2608     }
2609 
2610   return NULL;
2611 }
2612 
2613 /* Return the first statement in basic block BB.  */
2614 
2615 gimple *
2616 first_stmt (basic_block bb)
2617 {
2618   gimple_stmt_iterator i = gsi_start_bb (bb);
2619   gimple *stmt = NULL;
2620 
2621   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2622     {
2623       gsi_next (&i);
2624       stmt = NULL;
2625     }
2626   return stmt;
2627 }
2628 
2629 /* Return the first non-label statement in basic block BB.  */
2630 
2631 static gimple *
2632 first_non_label_stmt (basic_block bb)
2633 {
2634   gimple_stmt_iterator i = gsi_start_bb (bb);
2635   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2636     gsi_next (&i);
2637   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2638 }
2639 
2640 /* Return the last statement in basic block BB.  */
2641 
2642 gimple *
2643 last_stmt (basic_block bb)
2644 {
2645   gimple_stmt_iterator i = gsi_last_bb (bb);
2646   gimple *stmt = NULL;
2647 
2648   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2649     {
2650       gsi_prev (&i);
2651       stmt = NULL;
2652     }
2653   return stmt;
2654 }
2655 
2656 /* Return the last statement of an otherwise empty block.  Return NULL
2657    if the block is totally empty, or if it contains more than one
2658    statement.  */
2659 
2660 gimple *
2661 last_and_only_stmt (basic_block bb)
2662 {
2663   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2664   gimple *last, *prev;
2665 
2666   if (gsi_end_p (i))
2667     return NULL;
2668 
2669   last = gsi_stmt (i);
2670   gsi_prev_nondebug (&i);
2671   if (gsi_end_p (i))
2672     return last;
2673 
2674   /* Empty statements should no longer appear in the instruction stream.
2675      Everything that might have appeared before should be deleted by
2676      remove_useless_stmts, and the optimizers should just gsi_remove
2677      instead of smashing with build_empty_stmt.
2678 
2679      Thus the only thing that should appear here in a block containing
2680      one executable statement is a label.  */
2681   prev = gsi_stmt (i);
2682   if (gimple_code (prev) == GIMPLE_LABEL)
2683     return last;
2684   else
2685     return NULL;
2686 }
2687 
2688 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2689 
2690 static void
2691 reinstall_phi_args (edge new_edge, edge old_edge)
2692 {
2693   edge_var_map *vm;
2694   int i;
2695   gphi_iterator phis;
2696 
2697   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2698   if (!v)
2699     return;
2700 
2701   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2702        v->iterate (i, &vm) && !gsi_end_p (phis);
2703        i++, gsi_next (&phis))
2704     {
2705       gphi *phi = phis.phi ();
2706       tree result = redirect_edge_var_map_result (vm);
2707       tree arg = redirect_edge_var_map_def (vm);
2708 
2709       gcc_assert (result == gimple_phi_result (phi));
2710 
2711       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2712     }
2713 
2714   redirect_edge_var_map_clear (old_edge);
2715 }
2716 
2717 /* Returns the basic block after which the new basic block created
2718    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2719    near its "logical" location.  This is of most help to humans looking
2720    at debugging dumps.  */
2721 
2722 basic_block
2723 split_edge_bb_loc (edge edge_in)
2724 {
2725   basic_block dest = edge_in->dest;
2726   basic_block dest_prev = dest->prev_bb;
2727 
2728   if (dest_prev)
2729     {
2730       edge e = find_edge (dest_prev, dest);
2731       if (e && !(e->flags & EDGE_COMPLEX))
2732 	return edge_in->src;
2733     }
2734   return dest_prev;
2735 }
2736 
2737 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2738    Abort on abnormal edges.  */
2739 
2740 static basic_block
2741 gimple_split_edge (edge edge_in)
2742 {
2743   basic_block new_bb, after_bb, dest;
2744   edge new_edge, e;
2745 
2746   /* Abnormal edges cannot be split.  */
2747   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2748 
2749   dest = edge_in->dest;
2750 
2751   after_bb = split_edge_bb_loc (edge_in);
2752 
2753   new_bb = create_empty_bb (after_bb);
2754   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2755   new_bb->count = edge_in->count;
2756   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2757   new_edge->probability = REG_BR_PROB_BASE;
2758   new_edge->count = edge_in->count;
2759 
2760   e = redirect_edge_and_branch (edge_in, new_bb);
2761   gcc_assert (e == edge_in);
2762   reinstall_phi_args (new_edge, e);
2763 
2764   return new_bb;
2765 }
2766 
2767 
2768 /* Verify properties of the address expression T with base object BASE.  */
2769 
2770 static tree
2771 verify_address (tree t, tree base)
2772 {
2773   bool old_constant;
2774   bool old_side_effects;
2775   bool new_constant;
2776   bool new_side_effects;
2777 
2778   old_constant = TREE_CONSTANT (t);
2779   old_side_effects = TREE_SIDE_EFFECTS (t);
2780 
2781   recompute_tree_invariant_for_addr_expr (t);
2782   new_side_effects = TREE_SIDE_EFFECTS (t);
2783   new_constant = TREE_CONSTANT (t);
2784 
2785   if (old_constant != new_constant)
2786     {
2787       error ("constant not recomputed when ADDR_EXPR changed");
2788       return t;
2789     }
2790   if (old_side_effects != new_side_effects)
2791     {
2792       error ("side effects not recomputed when ADDR_EXPR changed");
2793       return t;
2794     }
2795 
2796   if (!(TREE_CODE (base) == VAR_DECL
2797 	|| TREE_CODE (base) == PARM_DECL
2798 	|| TREE_CODE (base) == RESULT_DECL))
2799     return NULL_TREE;
2800 
2801   if (DECL_GIMPLE_REG_P (base))
2802     {
2803       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2804       return base;
2805     }
2806 
2807   return NULL_TREE;
2808 }
2809 
2810 /* Callback for walk_tree, check that all elements with address taken are
2811    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2812    inside a PHI node.  */
2813 
2814 static tree
2815 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2816 {
2817   tree t = *tp, x;
2818 
2819   if (TYPE_P (t))
2820     *walk_subtrees = 0;
2821 
2822   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2823 #define CHECK_OP(N, MSG) \
2824   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2825        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2826 
2827   switch (TREE_CODE (t))
2828     {
2829     case SSA_NAME:
2830       if (SSA_NAME_IN_FREE_LIST (t))
2831 	{
2832 	  error ("SSA name in freelist but still referenced");
2833 	  return *tp;
2834 	}
2835       break;
2836 
2837     case INDIRECT_REF:
2838       error ("INDIRECT_REF in gimple IL");
2839       return t;
2840 
2841     case MEM_REF:
2842       x = TREE_OPERAND (t, 0);
2843       if (!POINTER_TYPE_P (TREE_TYPE (x))
2844 	  || !is_gimple_mem_ref_addr (x))
2845 	{
2846 	  error ("invalid first operand of MEM_REF");
2847 	  return x;
2848 	}
2849       if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2850 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2851 	{
2852 	  error ("invalid offset operand of MEM_REF");
2853 	  return TREE_OPERAND (t, 1);
2854 	}
2855       if (TREE_CODE (x) == ADDR_EXPR
2856 	  && (x = verify_address (x, TREE_OPERAND (x, 0))))
2857 	return x;
2858       *walk_subtrees = 0;
2859       break;
2860 
2861     case ASSERT_EXPR:
2862       x = fold (ASSERT_EXPR_COND (t));
2863       if (x == boolean_false_node)
2864 	{
2865 	  error ("ASSERT_EXPR with an always-false condition");
2866 	  return *tp;
2867 	}
2868       break;
2869 
2870     case MODIFY_EXPR:
2871       error ("MODIFY_EXPR not expected while having tuples");
2872       return *tp;
2873 
2874     case ADDR_EXPR:
2875       {
2876 	tree tem;
2877 
2878 	gcc_assert (is_gimple_address (t));
2879 
2880 	/* Skip any references (they will be checked when we recurse down the
2881 	   tree) and ensure that any variable used as a prefix is marked
2882 	   addressable.  */
2883 	for (x = TREE_OPERAND (t, 0);
2884 	     handled_component_p (x);
2885 	     x = TREE_OPERAND (x, 0))
2886 	  ;
2887 
2888 	if ((tem = verify_address (t, x)))
2889 	  return tem;
2890 
2891 	if (!(TREE_CODE (x) == VAR_DECL
2892 	      || TREE_CODE (x) == PARM_DECL
2893 	      || TREE_CODE (x) == RESULT_DECL))
2894 	  return NULL;
2895 
2896 	if (!TREE_ADDRESSABLE (x))
2897 	  {
2898 	    error ("address taken, but ADDRESSABLE bit not set");
2899 	    return x;
2900 	  }
2901 
2902 	break;
2903       }
2904 
2905     case COND_EXPR:
2906       x = COND_EXPR_COND (t);
2907       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2908 	{
2909 	  error ("non-integral used in condition");
2910 	  return x;
2911 	}
2912       if (!is_gimple_condexpr (x))
2913         {
2914 	  error ("invalid conditional operand");
2915 	  return x;
2916 	}
2917       break;
2918 
2919     case NON_LVALUE_EXPR:
2920     case TRUTH_NOT_EXPR:
2921       gcc_unreachable ();
2922 
2923     CASE_CONVERT:
2924     case FIX_TRUNC_EXPR:
2925     case FLOAT_EXPR:
2926     case NEGATE_EXPR:
2927     case ABS_EXPR:
2928     case BIT_NOT_EXPR:
2929       CHECK_OP (0, "invalid operand to unary operator");
2930       break;
2931 
2932     case REALPART_EXPR:
2933     case IMAGPART_EXPR:
2934     case BIT_FIELD_REF:
2935       if (!is_gimple_reg_type (TREE_TYPE (t)))
2936 	{
2937 	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2938 	  return t;
2939 	}
2940 
2941       if (TREE_CODE (t) == BIT_FIELD_REF)
2942 	{
2943 	  tree t0 = TREE_OPERAND (t, 0);
2944 	  tree t1 = TREE_OPERAND (t, 1);
2945 	  tree t2 = TREE_OPERAND (t, 2);
2946 	  if (!tree_fits_uhwi_p (t1)
2947 	      || !tree_fits_uhwi_p (t2))
2948 	    {
2949 	      error ("invalid position or size operand to BIT_FIELD_REF");
2950 	      return t;
2951 	    }
2952 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2953 	      && (TYPE_PRECISION (TREE_TYPE (t))
2954 		  != tree_to_uhwi (t1)))
2955 	    {
2956 	      error ("integral result type precision does not match "
2957 		     "field size of BIT_FIELD_REF");
2958 	      return t;
2959 	    }
2960 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2961 		   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2962 		   && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2963 		       != tree_to_uhwi (t1)))
2964 	    {
2965 	      error ("mode size of non-integral result does not "
2966 		     "match field size of BIT_FIELD_REF");
2967 	      return t;
2968 	    }
2969 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2970 	      && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2971 		  > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2972 	    {
2973 	      error ("position plus size exceeds size of referenced object in "
2974 		     "BIT_FIELD_REF");
2975 	      return t;
2976 	    }
2977 	}
2978       t = TREE_OPERAND (t, 0);
2979 
2980       /* Fall-through.  */
2981     case COMPONENT_REF:
2982     case ARRAY_REF:
2983     case ARRAY_RANGE_REF:
2984     case VIEW_CONVERT_EXPR:
2985       /* We have a nest of references.  Verify that each of the operands
2986 	 that determine where to reference is either a constant or a variable,
2987 	 verify that the base is valid, and then show we've already checked
2988 	 the subtrees.  */
2989       while (handled_component_p (t))
2990 	{
2991 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2992 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2993 	  else if (TREE_CODE (t) == ARRAY_REF
2994 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2995 	    {
2996 	      CHECK_OP (1, "invalid array index");
2997 	      if (TREE_OPERAND (t, 2))
2998 		CHECK_OP (2, "invalid array lower bound");
2999 	      if (TREE_OPERAND (t, 3))
3000 		CHECK_OP (3, "invalid array stride");
3001 	    }
3002 	  else if (TREE_CODE (t) == BIT_FIELD_REF
3003 		   || TREE_CODE (t) == REALPART_EXPR
3004 		   || TREE_CODE (t) == IMAGPART_EXPR)
3005 	    {
3006 	      error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3007 		     "REALPART_EXPR");
3008 	      return t;
3009 	    }
3010 
3011 	  t = TREE_OPERAND (t, 0);
3012 	}
3013 
3014       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3015 	{
3016 	  error ("invalid reference prefix");
3017 	  return t;
3018 	}
3019       *walk_subtrees = 0;
3020       break;
3021     case PLUS_EXPR:
3022     case MINUS_EXPR:
3023       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3024 	 POINTER_PLUS_EXPR. */
3025       if (POINTER_TYPE_P (TREE_TYPE (t)))
3026 	{
3027 	  error ("invalid operand to plus/minus, type is a pointer");
3028 	  return t;
3029 	}
3030       CHECK_OP (0, "invalid operand to binary operator");
3031       CHECK_OP (1, "invalid operand to binary operator");
3032       break;
3033 
3034     case POINTER_PLUS_EXPR:
3035       /* Check to make sure the first operand is a pointer or reference type. */
3036       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3037 	{
3038 	  error ("invalid operand to pointer plus, first operand is not a pointer");
3039 	  return t;
3040 	}
3041       /* Check to make sure the second operand is a ptrofftype.  */
3042       if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3043 	{
3044 	  error ("invalid operand to pointer plus, second operand is not an "
3045 		 "integer type of appropriate width");
3046 	  return t;
3047 	}
3048       /* FALLTHROUGH */
3049     case LT_EXPR:
3050     case LE_EXPR:
3051     case GT_EXPR:
3052     case GE_EXPR:
3053     case EQ_EXPR:
3054     case NE_EXPR:
3055     case UNORDERED_EXPR:
3056     case ORDERED_EXPR:
3057     case UNLT_EXPR:
3058     case UNLE_EXPR:
3059     case UNGT_EXPR:
3060     case UNGE_EXPR:
3061     case UNEQ_EXPR:
3062     case LTGT_EXPR:
3063     case MULT_EXPR:
3064     case TRUNC_DIV_EXPR:
3065     case CEIL_DIV_EXPR:
3066     case FLOOR_DIV_EXPR:
3067     case ROUND_DIV_EXPR:
3068     case TRUNC_MOD_EXPR:
3069     case CEIL_MOD_EXPR:
3070     case FLOOR_MOD_EXPR:
3071     case ROUND_MOD_EXPR:
3072     case RDIV_EXPR:
3073     case EXACT_DIV_EXPR:
3074     case MIN_EXPR:
3075     case MAX_EXPR:
3076     case LSHIFT_EXPR:
3077     case RSHIFT_EXPR:
3078     case LROTATE_EXPR:
3079     case RROTATE_EXPR:
3080     case BIT_IOR_EXPR:
3081     case BIT_XOR_EXPR:
3082     case BIT_AND_EXPR:
3083       CHECK_OP (0, "invalid operand to binary operator");
3084       CHECK_OP (1, "invalid operand to binary operator");
3085       break;
3086 
3087     case CONSTRUCTOR:
3088       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3089 	*walk_subtrees = 0;
3090       break;
3091 
3092     case CASE_LABEL_EXPR:
3093       if (CASE_CHAIN (t))
3094 	{
3095 	  error ("invalid CASE_CHAIN");
3096 	  return t;
3097 	}
3098       break;
3099 
3100     default:
3101       break;
3102     }
3103   return NULL;
3104 
3105 #undef CHECK_OP
3106 }
3107 
3108 
3109 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3110    Returns true if there is an error, otherwise false.  */
3111 
3112 static bool
3113 verify_types_in_gimple_min_lval (tree expr)
3114 {
3115   tree op;
3116 
3117   if (is_gimple_id (expr))
3118     return false;
3119 
3120   if (TREE_CODE (expr) != TARGET_MEM_REF
3121       && TREE_CODE (expr) != MEM_REF)
3122     {
3123       error ("invalid expression for min lvalue");
3124       return true;
3125     }
3126 
3127   /* TARGET_MEM_REFs are strange beasts.  */
3128   if (TREE_CODE (expr) == TARGET_MEM_REF)
3129     return false;
3130 
3131   op = TREE_OPERAND (expr, 0);
3132   if (!is_gimple_val (op))
3133     {
3134       error ("invalid operand in indirect reference");
3135       debug_generic_stmt (op);
3136       return true;
3137     }
3138   /* Memory references now generally can involve a value conversion.  */
3139 
3140   return false;
3141 }
3142 
3143 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3144    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3145    if there is an error, otherwise false.  */
3146 
3147 static bool
3148 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3149 {
3150   while (handled_component_p (expr))
3151     {
3152       tree op = TREE_OPERAND (expr, 0);
3153 
3154       if (TREE_CODE (expr) == ARRAY_REF
3155 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3156 	{
3157 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3158 	      || (TREE_OPERAND (expr, 2)
3159 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3160 	      || (TREE_OPERAND (expr, 3)
3161 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3162 	    {
3163 	      error ("invalid operands to array reference");
3164 	      debug_generic_stmt (expr);
3165 	      return true;
3166 	    }
3167 	}
3168 
3169       /* Verify if the reference array element types are compatible.  */
3170       if (TREE_CODE (expr) == ARRAY_REF
3171 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3172 					 TREE_TYPE (TREE_TYPE (op))))
3173 	{
3174 	  error ("type mismatch in array reference");
3175 	  debug_generic_stmt (TREE_TYPE (expr));
3176 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3177 	  return true;
3178 	}
3179       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3180 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3181 					 TREE_TYPE (TREE_TYPE (op))))
3182 	{
3183 	  error ("type mismatch in array range reference");
3184 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3185 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3186 	  return true;
3187 	}
3188 
3189       if ((TREE_CODE (expr) == REALPART_EXPR
3190 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3191 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3192 					 TREE_TYPE (TREE_TYPE (op))))
3193 	{
3194 	  error ("type mismatch in real/imagpart reference");
3195 	  debug_generic_stmt (TREE_TYPE (expr));
3196 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3197 	  return true;
3198 	}
3199 
3200       if (TREE_CODE (expr) == COMPONENT_REF
3201 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3202 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
3203 	{
3204 	  error ("type mismatch in component reference");
3205 	  debug_generic_stmt (TREE_TYPE (expr));
3206 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3207 	  return true;
3208 	}
3209 
3210       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3211 	{
3212 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3213 	     that their operand is not an SSA name or an invariant when
3214 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3215 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3216 	     most invoke undefined behavior.  */
3217 	  if (require_lvalue
3218 	      && (TREE_CODE (op) == SSA_NAME
3219 		  || is_gimple_min_invariant (op)))
3220 	    {
3221 	      error ("conversion of an SSA_NAME on the left hand side");
3222 	      debug_generic_stmt (expr);
3223 	      return true;
3224 	    }
3225 	  else if (TREE_CODE (op) == SSA_NAME
3226 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3227 	    {
3228 	      error ("conversion of register to a different size");
3229 	      debug_generic_stmt (expr);
3230 	      return true;
3231 	    }
3232 	  else if (!handled_component_p (op))
3233 	    return false;
3234 	}
3235 
3236       expr = op;
3237     }
3238 
3239   if (TREE_CODE (expr) == MEM_REF)
3240     {
3241       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3242 	{
3243 	  error ("invalid address operand in MEM_REF");
3244 	  debug_generic_stmt (expr);
3245 	  return true;
3246 	}
3247       if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3248 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3249 	{
3250 	  error ("invalid offset operand in MEM_REF");
3251 	  debug_generic_stmt (expr);
3252 	  return true;
3253 	}
3254     }
3255   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3256     {
3257       if (!TMR_BASE (expr)
3258 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3259 	{
3260 	  error ("invalid address operand in TARGET_MEM_REF");
3261 	  return true;
3262 	}
3263       if (!TMR_OFFSET (expr)
3264 	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3265 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3266 	{
3267 	  error ("invalid offset operand in TARGET_MEM_REF");
3268 	  debug_generic_stmt (expr);
3269 	  return true;
3270 	}
3271     }
3272 
3273   return ((require_lvalue || !is_gimple_min_invariant (expr))
3274 	  && verify_types_in_gimple_min_lval (expr));
3275 }
3276 
3277 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3278    list of pointer-to types that is trivially convertible to DEST.  */
3279 
3280 static bool
3281 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3282 {
3283   tree src;
3284 
3285   if (!TYPE_POINTER_TO (src_obj))
3286     return true;
3287 
3288   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3289     if (useless_type_conversion_p (dest, src))
3290       return true;
3291 
3292   return false;
3293 }
3294 
3295 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3296    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3297 
3298 static bool
3299 valid_fixed_convert_types_p (tree type1, tree type2)
3300 {
3301   return (FIXED_POINT_TYPE_P (type1)
3302 	  && (INTEGRAL_TYPE_P (type2)
3303 	      || SCALAR_FLOAT_TYPE_P (type2)
3304 	      || FIXED_POINT_TYPE_P (type2)));
3305 }
3306 
3307 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3308    is a problem, otherwise false.  */
3309 
3310 static bool
3311 verify_gimple_call (gcall *stmt)
3312 {
3313   tree fn = gimple_call_fn (stmt);
3314   tree fntype, fndecl;
3315   unsigned i;
3316 
3317   if (gimple_call_internal_p (stmt))
3318     {
3319       if (fn)
3320 	{
3321 	  error ("gimple call has two targets");
3322 	  debug_generic_stmt (fn);
3323 	  return true;
3324 	}
3325     }
3326   else
3327     {
3328       if (!fn)
3329 	{
3330 	  error ("gimple call has no target");
3331 	  return true;
3332 	}
3333     }
3334 
3335   if (fn && !is_gimple_call_addr (fn))
3336     {
3337       error ("invalid function in gimple call");
3338       debug_generic_stmt (fn);
3339       return true;
3340     }
3341 
3342   if (fn
3343       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3344 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3345 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3346     {
3347       error ("non-function in gimple call");
3348       return true;
3349     }
3350 
3351    fndecl = gimple_call_fndecl (stmt);
3352    if (fndecl
3353        && TREE_CODE (fndecl) == FUNCTION_DECL
3354        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3355        && !DECL_PURE_P (fndecl)
3356        && !TREE_READONLY (fndecl))
3357      {
3358        error ("invalid pure const state for function");
3359        return true;
3360      }
3361 
3362   tree lhs = gimple_call_lhs (stmt);
3363   if (lhs
3364       && (!is_gimple_lvalue (lhs)
3365 	  || verify_types_in_gimple_reference (lhs, true)))
3366     {
3367       error ("invalid LHS in gimple call");
3368       return true;
3369     }
3370 
3371   if (lhs
3372       && gimple_call_ctrl_altering_p (stmt)
3373       && gimple_call_noreturn_p (stmt)
3374       && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST
3375       && !TREE_ADDRESSABLE (TREE_TYPE (lhs)))
3376     {
3377       error ("LHS in noreturn call");
3378       return true;
3379     }
3380 
3381   fntype = gimple_call_fntype (stmt);
3382   if (fntype
3383       && lhs
3384       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3385       /* ???  At least C++ misses conversions at assignments from
3386 	 void * call results.
3387 	 ???  Java is completely off.  Especially with functions
3388 	 returning java.lang.Object.
3389 	 For now simply allow arbitrary pointer type conversions.  */
3390       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3391 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3392     {
3393       error ("invalid conversion in gimple call");
3394       debug_generic_stmt (TREE_TYPE (lhs));
3395       debug_generic_stmt (TREE_TYPE (fntype));
3396       return true;
3397     }
3398 
3399   if (gimple_call_chain (stmt)
3400       && !is_gimple_val (gimple_call_chain (stmt)))
3401     {
3402       error ("invalid static chain in gimple call");
3403       debug_generic_stmt (gimple_call_chain (stmt));
3404       return true;
3405     }
3406 
3407   /* If there is a static chain argument, the call should either be
3408      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3409   if (gimple_call_chain (stmt)
3410       && fndecl
3411       && !DECL_STATIC_CHAIN (fndecl))
3412     {
3413       error ("static chain with function that doesn%'t use one");
3414       return true;
3415     }
3416 
3417   /* ???  The C frontend passes unpromoted arguments in case it
3418      didn't see a function declaration before the call.  So for now
3419      leave the call arguments mostly unverified.  Once we gimplify
3420      unit-at-a-time we have a chance to fix this.  */
3421 
3422   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3423     {
3424       tree arg = gimple_call_arg (stmt, i);
3425       if ((is_gimple_reg_type (TREE_TYPE (arg))
3426 	   && !is_gimple_val (arg))
3427 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3428 	      && !is_gimple_lvalue (arg)))
3429 	{
3430 	  error ("invalid argument to gimple call");
3431 	  debug_generic_expr (arg);
3432 	  return true;
3433 	}
3434     }
3435 
3436   return false;
3437 }
3438 
3439 /* Verifies the gimple comparison with the result type TYPE and
3440    the operands OP0 and OP1, comparison code is CODE.  */
3441 
3442 static bool
3443 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3444 {
3445   tree op0_type = TREE_TYPE (op0);
3446   tree op1_type = TREE_TYPE (op1);
3447 
3448   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3449     {
3450       error ("invalid operands in gimple comparison");
3451       return true;
3452     }
3453 
3454   /* For comparisons we do not have the operations type as the
3455      effective type the comparison is carried out in.  Instead
3456      we require that either the first operand is trivially
3457      convertible into the second, or the other way around.
3458      Because we special-case pointers to void we allow
3459      comparisons of pointers with the same mode as well.  */
3460   if (!useless_type_conversion_p (op0_type, op1_type)
3461       && !useless_type_conversion_p (op1_type, op0_type)
3462       && (!POINTER_TYPE_P (op0_type)
3463 	  || !POINTER_TYPE_P (op1_type)
3464 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3465     {
3466       error ("mismatching comparison operand types");
3467       debug_generic_expr (op0_type);
3468       debug_generic_expr (op1_type);
3469       return true;
3470     }
3471 
3472   /* The resulting type of a comparison may be an effective boolean type.  */
3473   if (INTEGRAL_TYPE_P (type)
3474       && (TREE_CODE (type) == BOOLEAN_TYPE
3475 	  || TYPE_PRECISION (type) == 1))
3476     {
3477       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3478 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3479 	  && code != EQ_EXPR && code != NE_EXPR
3480 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3481 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3482 	{
3483 	  error ("unsupported operation or type for vector comparison"
3484 		 " returning a boolean");
3485 	  debug_generic_expr (op0_type);
3486 	  debug_generic_expr (op1_type);
3487 	  return true;
3488         }
3489     }
3490   /* Or a boolean vector type with the same element count
3491      as the comparison operand types.  */
3492   else if (TREE_CODE (type) == VECTOR_TYPE
3493 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3494     {
3495       if (TREE_CODE (op0_type) != VECTOR_TYPE
3496 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3497         {
3498           error ("non-vector operands in vector comparison");
3499           debug_generic_expr (op0_type);
3500           debug_generic_expr (op1_type);
3501           return true;
3502         }
3503 
3504       if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3505         {
3506           error ("invalid vector comparison resulting type");
3507           debug_generic_expr (type);
3508           return true;
3509         }
3510     }
3511   else
3512     {
3513       error ("bogus comparison result type");
3514       debug_generic_expr (type);
3515       return true;
3516     }
3517 
3518   return false;
3519 }
3520 
3521 /* Verify a gimple assignment statement STMT with an unary rhs.
3522    Returns true if anything is wrong.  */
3523 
3524 static bool
3525 verify_gimple_assign_unary (gassign *stmt)
3526 {
3527   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3528   tree lhs = gimple_assign_lhs (stmt);
3529   tree lhs_type = TREE_TYPE (lhs);
3530   tree rhs1 = gimple_assign_rhs1 (stmt);
3531   tree rhs1_type = TREE_TYPE (rhs1);
3532 
3533   if (!is_gimple_reg (lhs))
3534     {
3535       error ("non-register as LHS of unary operation");
3536       return true;
3537     }
3538 
3539   if (!is_gimple_val (rhs1))
3540     {
3541       error ("invalid operand in unary operation");
3542       return true;
3543     }
3544 
3545   /* First handle conversions.  */
3546   switch (rhs_code)
3547     {
3548     CASE_CONVERT:
3549       {
3550 	/* Allow conversions from pointer type to integral type only if
3551 	   there is no sign or zero extension involved.
3552 	   For targets were the precision of ptrofftype doesn't match that
3553 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3554 	if ((POINTER_TYPE_P (lhs_type)
3555 	     && INTEGRAL_TYPE_P (rhs1_type))
3556 	    || (POINTER_TYPE_P (rhs1_type)
3557 		&& INTEGRAL_TYPE_P (lhs_type)
3558 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3559 		    || ptrofftype_p (sizetype))))
3560 	  return false;
3561 
3562 	/* Allow conversion from integral to offset type and vice versa.  */
3563 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3564 	     && INTEGRAL_TYPE_P (rhs1_type))
3565 	    || (INTEGRAL_TYPE_P (lhs_type)
3566 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3567 	  return false;
3568 
3569 	/* Otherwise assert we are converting between types of the
3570 	   same kind.  */
3571 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3572 	  {
3573 	    error ("invalid types in nop conversion");
3574 	    debug_generic_expr (lhs_type);
3575 	    debug_generic_expr (rhs1_type);
3576 	    return true;
3577 	  }
3578 
3579 	return false;
3580       }
3581 
3582     case ADDR_SPACE_CONVERT_EXPR:
3583       {
3584 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3585 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3586 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3587 	  {
3588 	    error ("invalid types in address space conversion");
3589 	    debug_generic_expr (lhs_type);
3590 	    debug_generic_expr (rhs1_type);
3591 	    return true;
3592 	  }
3593 
3594 	return false;
3595       }
3596 
3597     case FIXED_CONVERT_EXPR:
3598       {
3599 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3600 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3601 	  {
3602 	    error ("invalid types in fixed-point conversion");
3603 	    debug_generic_expr (lhs_type);
3604 	    debug_generic_expr (rhs1_type);
3605 	    return true;
3606 	  }
3607 
3608 	return false;
3609       }
3610 
3611     case FLOAT_EXPR:
3612       {
3613 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3614 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3615 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3616 	  {
3617 	    error ("invalid types in conversion to floating point");
3618 	    debug_generic_expr (lhs_type);
3619 	    debug_generic_expr (rhs1_type);
3620 	    return true;
3621 	  }
3622 
3623         return false;
3624       }
3625 
3626     case FIX_TRUNC_EXPR:
3627       {
3628         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3629             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3630                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3631 	  {
3632 	    error ("invalid types in conversion to integer");
3633 	    debug_generic_expr (lhs_type);
3634 	    debug_generic_expr (rhs1_type);
3635 	    return true;
3636 	  }
3637 
3638         return false;
3639       }
3640     case REDUC_MAX_EXPR:
3641     case REDUC_MIN_EXPR:
3642     case REDUC_PLUS_EXPR:
3643       if (!VECTOR_TYPE_P (rhs1_type)
3644 	  || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3645         {
3646 	  error ("reduction should convert from vector to element type");
3647 	  debug_generic_expr (lhs_type);
3648 	  debug_generic_expr (rhs1_type);
3649 	  return true;
3650 	}
3651       return false;
3652 
3653     case VEC_UNPACK_HI_EXPR:
3654     case VEC_UNPACK_LO_EXPR:
3655     case VEC_UNPACK_FLOAT_HI_EXPR:
3656     case VEC_UNPACK_FLOAT_LO_EXPR:
3657       /* FIXME.  */
3658       return false;
3659 
3660     case NEGATE_EXPR:
3661     case ABS_EXPR:
3662     case BIT_NOT_EXPR:
3663     case PAREN_EXPR:
3664     case CONJ_EXPR:
3665       break;
3666 
3667     default:
3668       gcc_unreachable ();
3669     }
3670 
3671   /* For the remaining codes assert there is no conversion involved.  */
3672   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3673     {
3674       error ("non-trivial conversion in unary operation");
3675       debug_generic_expr (lhs_type);
3676       debug_generic_expr (rhs1_type);
3677       return true;
3678     }
3679 
3680   return false;
3681 }
3682 
3683 /* Verify a gimple assignment statement STMT with a binary rhs.
3684    Returns true if anything is wrong.  */
3685 
3686 static bool
3687 verify_gimple_assign_binary (gassign *stmt)
3688 {
3689   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3690   tree lhs = gimple_assign_lhs (stmt);
3691   tree lhs_type = TREE_TYPE (lhs);
3692   tree rhs1 = gimple_assign_rhs1 (stmt);
3693   tree rhs1_type = TREE_TYPE (rhs1);
3694   tree rhs2 = gimple_assign_rhs2 (stmt);
3695   tree rhs2_type = TREE_TYPE (rhs2);
3696 
3697   if (!is_gimple_reg (lhs))
3698     {
3699       error ("non-register as LHS of binary operation");
3700       return true;
3701     }
3702 
3703   if (!is_gimple_val (rhs1)
3704       || !is_gimple_val (rhs2))
3705     {
3706       error ("invalid operands in binary operation");
3707       return true;
3708     }
3709 
3710   /* First handle operations that involve different types.  */
3711   switch (rhs_code)
3712     {
3713     case COMPLEX_EXPR:
3714       {
3715 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3716 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3717 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3718 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3719 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3720 	  {
3721 	    error ("type mismatch in complex expression");
3722 	    debug_generic_expr (lhs_type);
3723 	    debug_generic_expr (rhs1_type);
3724 	    debug_generic_expr (rhs2_type);
3725 	    return true;
3726 	  }
3727 
3728 	return false;
3729       }
3730 
3731     case LSHIFT_EXPR:
3732     case RSHIFT_EXPR:
3733     case LROTATE_EXPR:
3734     case RROTATE_EXPR:
3735       {
3736 	/* Shifts and rotates are ok on integral types, fixed point
3737 	   types and integer vector types.  */
3738 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3739 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3740 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3741 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3742 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3743 		/* Vector shifts of vectors are also ok.  */
3744 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3745 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3746 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3747 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3748 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3749 	  {
3750 	    error ("type mismatch in shift expression");
3751 	    debug_generic_expr (lhs_type);
3752 	    debug_generic_expr (rhs1_type);
3753 	    debug_generic_expr (rhs2_type);
3754 	    return true;
3755 	  }
3756 
3757 	return false;
3758       }
3759 
3760     case WIDEN_LSHIFT_EXPR:
3761       {
3762         if (!INTEGRAL_TYPE_P (lhs_type)
3763             || !INTEGRAL_TYPE_P (rhs1_type)
3764             || TREE_CODE (rhs2) != INTEGER_CST
3765             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3766           {
3767             error ("type mismatch in widening vector shift expression");
3768             debug_generic_expr (lhs_type);
3769             debug_generic_expr (rhs1_type);
3770             debug_generic_expr (rhs2_type);
3771             return true;
3772           }
3773 
3774         return false;
3775       }
3776 
3777     case VEC_WIDEN_LSHIFT_HI_EXPR:
3778     case VEC_WIDEN_LSHIFT_LO_EXPR:
3779       {
3780         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3781             || TREE_CODE (lhs_type) != VECTOR_TYPE
3782             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3783             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3784             || TREE_CODE (rhs2) != INTEGER_CST
3785             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3786                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3787           {
3788             error ("type mismatch in widening vector shift expression");
3789             debug_generic_expr (lhs_type);
3790             debug_generic_expr (rhs1_type);
3791             debug_generic_expr (rhs2_type);
3792             return true;
3793           }
3794 
3795         return false;
3796       }
3797 
3798     case PLUS_EXPR:
3799     case MINUS_EXPR:
3800       {
3801 	tree lhs_etype = lhs_type;
3802 	tree rhs1_etype = rhs1_type;
3803 	tree rhs2_etype = rhs2_type;
3804 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3805 	  {
3806 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3807 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3808 	      {
3809 		error ("invalid non-vector operands to vector valued plus");
3810 		return true;
3811 	      }
3812 	    lhs_etype = TREE_TYPE (lhs_type);
3813 	    rhs1_etype = TREE_TYPE (rhs1_type);
3814 	    rhs2_etype = TREE_TYPE (rhs2_type);
3815 	  }
3816 	if (POINTER_TYPE_P (lhs_etype)
3817 	    || POINTER_TYPE_P (rhs1_etype)
3818 	    || POINTER_TYPE_P (rhs2_etype))
3819 	  {
3820 	    error ("invalid (pointer) operands to plus/minus");
3821 	    return true;
3822 	  }
3823 
3824 	/* Continue with generic binary expression handling.  */
3825 	break;
3826       }
3827 
3828     case POINTER_PLUS_EXPR:
3829       {
3830 	if (!POINTER_TYPE_P (rhs1_type)
3831 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3832 	    || !ptrofftype_p (rhs2_type))
3833 	  {
3834 	    error ("type mismatch in pointer plus expression");
3835 	    debug_generic_stmt (lhs_type);
3836 	    debug_generic_stmt (rhs1_type);
3837 	    debug_generic_stmt (rhs2_type);
3838 	    return true;
3839 	  }
3840 
3841 	return false;
3842       }
3843 
3844     case TRUTH_ANDIF_EXPR:
3845     case TRUTH_ORIF_EXPR:
3846     case TRUTH_AND_EXPR:
3847     case TRUTH_OR_EXPR:
3848     case TRUTH_XOR_EXPR:
3849 
3850       gcc_unreachable ();
3851 
3852     case LT_EXPR:
3853     case LE_EXPR:
3854     case GT_EXPR:
3855     case GE_EXPR:
3856     case EQ_EXPR:
3857     case NE_EXPR:
3858     case UNORDERED_EXPR:
3859     case ORDERED_EXPR:
3860     case UNLT_EXPR:
3861     case UNLE_EXPR:
3862     case UNGT_EXPR:
3863     case UNGE_EXPR:
3864     case UNEQ_EXPR:
3865     case LTGT_EXPR:
3866       /* Comparisons are also binary, but the result type is not
3867 	 connected to the operand types.  */
3868       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3869 
3870     case WIDEN_MULT_EXPR:
3871       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3872 	return true;
3873       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3874 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3875 
3876     case WIDEN_SUM_EXPR:
3877     case VEC_WIDEN_MULT_HI_EXPR:
3878     case VEC_WIDEN_MULT_LO_EXPR:
3879     case VEC_WIDEN_MULT_EVEN_EXPR:
3880     case VEC_WIDEN_MULT_ODD_EXPR:
3881     case VEC_PACK_TRUNC_EXPR:
3882     case VEC_PACK_SAT_EXPR:
3883     case VEC_PACK_FIX_TRUNC_EXPR:
3884       /* FIXME.  */
3885       return false;
3886 
3887     case MULT_EXPR:
3888     case MULT_HIGHPART_EXPR:
3889     case TRUNC_DIV_EXPR:
3890     case CEIL_DIV_EXPR:
3891     case FLOOR_DIV_EXPR:
3892     case ROUND_DIV_EXPR:
3893     case TRUNC_MOD_EXPR:
3894     case CEIL_MOD_EXPR:
3895     case FLOOR_MOD_EXPR:
3896     case ROUND_MOD_EXPR:
3897     case RDIV_EXPR:
3898     case EXACT_DIV_EXPR:
3899     case MIN_EXPR:
3900     case MAX_EXPR:
3901     case BIT_IOR_EXPR:
3902     case BIT_XOR_EXPR:
3903     case BIT_AND_EXPR:
3904       /* Continue with generic binary expression handling.  */
3905       break;
3906 
3907     default:
3908       gcc_unreachable ();
3909     }
3910 
3911   if (!useless_type_conversion_p (lhs_type, rhs1_type)
3912       || !useless_type_conversion_p (lhs_type, rhs2_type))
3913     {
3914       error ("type mismatch in binary expression");
3915       debug_generic_stmt (lhs_type);
3916       debug_generic_stmt (rhs1_type);
3917       debug_generic_stmt (rhs2_type);
3918       return true;
3919     }
3920 
3921   return false;
3922 }
3923 
3924 /* Verify a gimple assignment statement STMT with a ternary rhs.
3925    Returns true if anything is wrong.  */
3926 
3927 static bool
3928 verify_gimple_assign_ternary (gassign *stmt)
3929 {
3930   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3931   tree lhs = gimple_assign_lhs (stmt);
3932   tree lhs_type = TREE_TYPE (lhs);
3933   tree rhs1 = gimple_assign_rhs1 (stmt);
3934   tree rhs1_type = TREE_TYPE (rhs1);
3935   tree rhs2 = gimple_assign_rhs2 (stmt);
3936   tree rhs2_type = TREE_TYPE (rhs2);
3937   tree rhs3 = gimple_assign_rhs3 (stmt);
3938   tree rhs3_type = TREE_TYPE (rhs3);
3939 
3940   if (!is_gimple_reg (lhs))
3941     {
3942       error ("non-register as LHS of ternary operation");
3943       return true;
3944     }
3945 
3946   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3947        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3948       || !is_gimple_val (rhs2)
3949       || !is_gimple_val (rhs3))
3950     {
3951       error ("invalid operands in ternary operation");
3952       return true;
3953     }
3954 
3955   /* First handle operations that involve different types.  */
3956   switch (rhs_code)
3957     {
3958     case WIDEN_MULT_PLUS_EXPR:
3959     case WIDEN_MULT_MINUS_EXPR:
3960       if ((!INTEGRAL_TYPE_P (rhs1_type)
3961 	   && !FIXED_POINT_TYPE_P (rhs1_type))
3962 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
3963 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
3964 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3965 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3966 	{
3967 	  error ("type mismatch in widening multiply-accumulate expression");
3968 	  debug_generic_expr (lhs_type);
3969 	  debug_generic_expr (rhs1_type);
3970 	  debug_generic_expr (rhs2_type);
3971 	  debug_generic_expr (rhs3_type);
3972 	  return true;
3973 	}
3974       break;
3975 
3976     case FMA_EXPR:
3977       if (!useless_type_conversion_p (lhs_type, rhs1_type)
3978 	  || !useless_type_conversion_p (lhs_type, rhs2_type)
3979 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3980 	{
3981 	  error ("type mismatch in fused multiply-add expression");
3982 	  debug_generic_expr (lhs_type);
3983 	  debug_generic_expr (rhs1_type);
3984 	  debug_generic_expr (rhs2_type);
3985 	  debug_generic_expr (rhs3_type);
3986 	  return true;
3987 	}
3988       break;
3989 
3990     case VEC_COND_EXPR:
3991       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3992 	  || TYPE_VECTOR_SUBPARTS (rhs1_type)
3993 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
3994 	{
3995 	  error ("the first argument of a VEC_COND_EXPR must be of a "
3996 		 "boolean vector type of the same number of elements "
3997 		 "as the result");
3998 	  debug_generic_expr (lhs_type);
3999 	  debug_generic_expr (rhs1_type);
4000 	  return true;
4001 	}
4002       /* Fallthrough.  */
4003     case COND_EXPR:
4004       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4005 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4006 	{
4007 	  error ("type mismatch in conditional expression");
4008 	  debug_generic_expr (lhs_type);
4009 	  debug_generic_expr (rhs2_type);
4010 	  debug_generic_expr (rhs3_type);
4011 	  return true;
4012 	}
4013       break;
4014 
4015     case VEC_PERM_EXPR:
4016       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4017 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4018 	{
4019 	  error ("type mismatch in vector permute expression");
4020 	  debug_generic_expr (lhs_type);
4021 	  debug_generic_expr (rhs1_type);
4022 	  debug_generic_expr (rhs2_type);
4023 	  debug_generic_expr (rhs3_type);
4024 	  return true;
4025 	}
4026 
4027       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4028 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4029 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4030 	{
4031 	  error ("vector types expected in vector permute expression");
4032 	  debug_generic_expr (lhs_type);
4033 	  debug_generic_expr (rhs1_type);
4034 	  debug_generic_expr (rhs2_type);
4035 	  debug_generic_expr (rhs3_type);
4036 	  return true;
4037 	}
4038 
4039       if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4040 	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
4041 	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
4042 	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
4043 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
4044 	{
4045 	  error ("vectors with different element number found "
4046 		 "in vector permute expression");
4047 	  debug_generic_expr (lhs_type);
4048 	  debug_generic_expr (rhs1_type);
4049 	  debug_generic_expr (rhs2_type);
4050 	  debug_generic_expr (rhs3_type);
4051 	  return true;
4052 	}
4053 
4054       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4055 	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4056 	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4057 	{
4058 	  error ("invalid mask type in vector permute expression");
4059 	  debug_generic_expr (lhs_type);
4060 	  debug_generic_expr (rhs1_type);
4061 	  debug_generic_expr (rhs2_type);
4062 	  debug_generic_expr (rhs3_type);
4063 	  return true;
4064 	}
4065 
4066       return false;
4067 
4068     case SAD_EXPR:
4069       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4070 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4071 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4072 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4073 	{
4074 	  error ("type mismatch in sad expression");
4075 	  debug_generic_expr (lhs_type);
4076 	  debug_generic_expr (rhs1_type);
4077 	  debug_generic_expr (rhs2_type);
4078 	  debug_generic_expr (rhs3_type);
4079 	  return true;
4080 	}
4081 
4082       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4083 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4084 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4085 	{
4086 	  error ("vector types expected in sad expression");
4087 	  debug_generic_expr (lhs_type);
4088 	  debug_generic_expr (rhs1_type);
4089 	  debug_generic_expr (rhs2_type);
4090 	  debug_generic_expr (rhs3_type);
4091 	  return true;
4092 	}
4093 
4094       return false;
4095 
4096     case DOT_PROD_EXPR:
4097     case REALIGN_LOAD_EXPR:
4098       /* FIXME.  */
4099       return false;
4100 
4101     default:
4102       gcc_unreachable ();
4103     }
4104   return false;
4105 }
4106 
4107 /* Verify a gimple assignment statement STMT with a single rhs.
4108    Returns true if anything is wrong.  */
4109 
4110 static bool
4111 verify_gimple_assign_single (gassign *stmt)
4112 {
4113   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4114   tree lhs = gimple_assign_lhs (stmt);
4115   tree lhs_type = TREE_TYPE (lhs);
4116   tree rhs1 = gimple_assign_rhs1 (stmt);
4117   tree rhs1_type = TREE_TYPE (rhs1);
4118   bool res = false;
4119 
4120   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4121     {
4122       error ("non-trivial conversion at assignment");
4123       debug_generic_expr (lhs_type);
4124       debug_generic_expr (rhs1_type);
4125       return true;
4126     }
4127 
4128   if (gimple_clobber_p (stmt)
4129       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4130     {
4131       error ("non-decl/MEM_REF LHS in clobber statement");
4132       debug_generic_expr (lhs);
4133       return true;
4134     }
4135 
4136   if (handled_component_p (lhs)
4137       || TREE_CODE (lhs) == MEM_REF
4138       || TREE_CODE (lhs) == TARGET_MEM_REF)
4139     res |= verify_types_in_gimple_reference (lhs, true);
4140 
4141   /* Special codes we cannot handle via their class.  */
4142   switch (rhs_code)
4143     {
4144     case ADDR_EXPR:
4145       {
4146 	tree op = TREE_OPERAND (rhs1, 0);
4147 	if (!is_gimple_addressable (op))
4148 	  {
4149 	    error ("invalid operand in unary expression");
4150 	    return true;
4151 	  }
4152 
4153 	/* Technically there is no longer a need for matching types, but
4154 	   gimple hygiene asks for this check.  In LTO we can end up
4155 	   combining incompatible units and thus end up with addresses
4156 	   of globals that change their type to a common one.  */
4157 	if (!in_lto_p
4158 	    && !types_compatible_p (TREE_TYPE (op),
4159 				    TREE_TYPE (TREE_TYPE (rhs1)))
4160 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4161 							  TREE_TYPE (op)))
4162 	  {
4163 	    error ("type mismatch in address expression");
4164 	    debug_generic_stmt (TREE_TYPE (rhs1));
4165 	    debug_generic_stmt (TREE_TYPE (op));
4166 	    return true;
4167 	  }
4168 
4169 	return verify_types_in_gimple_reference (op, true);
4170       }
4171 
4172     /* tcc_reference  */
4173     case INDIRECT_REF:
4174       error ("INDIRECT_REF in gimple IL");
4175       return true;
4176 
4177     case COMPONENT_REF:
4178     case BIT_FIELD_REF:
4179     case ARRAY_REF:
4180     case ARRAY_RANGE_REF:
4181     case VIEW_CONVERT_EXPR:
4182     case REALPART_EXPR:
4183     case IMAGPART_EXPR:
4184     case TARGET_MEM_REF:
4185     case MEM_REF:
4186       if (!is_gimple_reg (lhs)
4187 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4188 	{
4189 	  error ("invalid rhs for gimple memory store");
4190 	  debug_generic_stmt (lhs);
4191 	  debug_generic_stmt (rhs1);
4192 	  return true;
4193 	}
4194       return res || verify_types_in_gimple_reference (rhs1, false);
4195 
4196     /* tcc_constant  */
4197     case SSA_NAME:
4198     case INTEGER_CST:
4199     case REAL_CST:
4200     case FIXED_CST:
4201     case COMPLEX_CST:
4202     case VECTOR_CST:
4203     case STRING_CST:
4204       return res;
4205 
4206     /* tcc_declaration  */
4207     case CONST_DECL:
4208       return res;
4209     case VAR_DECL:
4210     case PARM_DECL:
4211       if (!is_gimple_reg (lhs)
4212 	  && !is_gimple_reg (rhs1)
4213 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4214 	{
4215 	  error ("invalid rhs for gimple memory store");
4216 	  debug_generic_stmt (lhs);
4217 	  debug_generic_stmt (rhs1);
4218 	  return true;
4219 	}
4220       return res;
4221 
4222     case CONSTRUCTOR:
4223       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4224 	{
4225 	  unsigned int i;
4226 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4227 
4228 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4229 	    return res;
4230 	  /* For vector CONSTRUCTORs we require that either it is empty
4231 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4232 	     (then the element count must be correct to cover the whole
4233 	     outer vector and index must be NULL on all elements, or it is
4234 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4235 	     smaller number of elements (assuming zero filling) and
4236 	     consecutive indexes as compared to NULL indexes (such
4237 	     CONSTRUCTORs can appear in the IL from FEs).  */
4238 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4239 	    {
4240 	      if (elt_t == NULL_TREE)
4241 		{
4242 		  elt_t = TREE_TYPE (elt_v);
4243 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4244 		    {
4245 		      tree elt_t = TREE_TYPE (elt_v);
4246 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4247 						      TREE_TYPE (elt_t)))
4248 			{
4249 			  error ("incorrect type of vector CONSTRUCTOR"
4250 				 " elements");
4251 			  debug_generic_stmt (rhs1);
4252 			  return true;
4253 			}
4254 		      else if (CONSTRUCTOR_NELTS (rhs1)
4255 			       * TYPE_VECTOR_SUBPARTS (elt_t)
4256 			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
4257 			{
4258 			  error ("incorrect number of vector CONSTRUCTOR"
4259 				 " elements");
4260 			  debug_generic_stmt (rhs1);
4261 			  return true;
4262 			}
4263 		    }
4264 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4265 						       elt_t))
4266 		    {
4267 		      error ("incorrect type of vector CONSTRUCTOR elements");
4268 		      debug_generic_stmt (rhs1);
4269 		      return true;
4270 		    }
4271 		  else if (CONSTRUCTOR_NELTS (rhs1)
4272 			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
4273 		    {
4274 		      error ("incorrect number of vector CONSTRUCTOR elements");
4275 		      debug_generic_stmt (rhs1);
4276 		      return true;
4277 		    }
4278 		}
4279 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4280 		{
4281 		  error ("incorrect type of vector CONSTRUCTOR elements");
4282 		  debug_generic_stmt (rhs1);
4283 		  return true;
4284 		}
4285 	      if (elt_i != NULL_TREE
4286 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4287 		      || TREE_CODE (elt_i) != INTEGER_CST
4288 		      || compare_tree_int (elt_i, i) != 0))
4289 		{
4290 		  error ("vector CONSTRUCTOR with non-NULL element index");
4291 		  debug_generic_stmt (rhs1);
4292 		  return true;
4293 		}
4294 	      if (!is_gimple_val (elt_v))
4295 		{
4296 		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4297 		  debug_generic_stmt (rhs1);
4298 		  return true;
4299 		}
4300 	    }
4301 	}
4302       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4303 	{
4304 	  error ("non-vector CONSTRUCTOR with elements");
4305 	  debug_generic_stmt (rhs1);
4306 	  return true;
4307 	}
4308       return res;
4309     case OBJ_TYPE_REF:
4310     case ASSERT_EXPR:
4311     case WITH_SIZE_EXPR:
4312       /* FIXME.  */
4313       return res;
4314 
4315     default:;
4316     }
4317 
4318   return res;
4319 }
4320 
4321 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4322    is a problem, otherwise false.  */
4323 
4324 static bool
4325 verify_gimple_assign (gassign *stmt)
4326 {
4327   switch (gimple_assign_rhs_class (stmt))
4328     {
4329     case GIMPLE_SINGLE_RHS:
4330       return verify_gimple_assign_single (stmt);
4331 
4332     case GIMPLE_UNARY_RHS:
4333       return verify_gimple_assign_unary (stmt);
4334 
4335     case GIMPLE_BINARY_RHS:
4336       return verify_gimple_assign_binary (stmt);
4337 
4338     case GIMPLE_TERNARY_RHS:
4339       return verify_gimple_assign_ternary (stmt);
4340 
4341     default:
4342       gcc_unreachable ();
4343     }
4344 }
4345 
4346 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4347    is a problem, otherwise false.  */
4348 
4349 static bool
4350 verify_gimple_return (greturn *stmt)
4351 {
4352   tree op = gimple_return_retval (stmt);
4353   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4354 
4355   /* We cannot test for present return values as we do not fix up missing
4356      return values from the original source.  */
4357   if (op == NULL)
4358     return false;
4359 
4360   if (!is_gimple_val (op)
4361       && TREE_CODE (op) != RESULT_DECL)
4362     {
4363       error ("invalid operand in return statement");
4364       debug_generic_stmt (op);
4365       return true;
4366     }
4367 
4368   if ((TREE_CODE (op) == RESULT_DECL
4369        && DECL_BY_REFERENCE (op))
4370       || (TREE_CODE (op) == SSA_NAME
4371 	  && SSA_NAME_VAR (op)
4372 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4373 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4374     op = TREE_TYPE (op);
4375 
4376   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4377     {
4378       error ("invalid conversion in return statement");
4379       debug_generic_stmt (restype);
4380       debug_generic_stmt (TREE_TYPE (op));
4381       return true;
4382     }
4383 
4384   return false;
4385 }
4386 
4387 
4388 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4389    is a problem, otherwise false.  */
4390 
4391 static bool
4392 verify_gimple_goto (ggoto *stmt)
4393 {
4394   tree dest = gimple_goto_dest (stmt);
4395 
4396   /* ???  We have two canonical forms of direct goto destinations, a
4397      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4398   if (TREE_CODE (dest) != LABEL_DECL
4399       && (!is_gimple_val (dest)
4400 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4401     {
4402       error ("goto destination is neither a label nor a pointer");
4403       return true;
4404     }
4405 
4406   return false;
4407 }
4408 
4409 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4410    is a problem, otherwise false.  */
4411 
4412 static bool
4413 verify_gimple_switch (gswitch *stmt)
4414 {
4415   unsigned int i, n;
4416   tree elt, prev_upper_bound = NULL_TREE;
4417   tree index_type, elt_type = NULL_TREE;
4418 
4419   if (!is_gimple_val (gimple_switch_index (stmt)))
4420     {
4421       error ("invalid operand to switch statement");
4422       debug_generic_stmt (gimple_switch_index (stmt));
4423       return true;
4424     }
4425 
4426   index_type = TREE_TYPE (gimple_switch_index (stmt));
4427   if (! INTEGRAL_TYPE_P (index_type))
4428     {
4429       error ("non-integral type switch statement");
4430       debug_generic_expr (index_type);
4431       return true;
4432     }
4433 
4434   elt = gimple_switch_label (stmt, 0);
4435   if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4436     {
4437       error ("invalid default case label in switch statement");
4438       debug_generic_expr (elt);
4439       return true;
4440     }
4441 
4442   n = gimple_switch_num_labels (stmt);
4443   for (i = 1; i < n; i++)
4444     {
4445       elt = gimple_switch_label (stmt, i);
4446 
4447       if (! CASE_LOW (elt))
4448 	{
4449 	  error ("invalid case label in switch statement");
4450 	  debug_generic_expr (elt);
4451 	  return true;
4452 	}
4453       if (CASE_HIGH (elt)
4454 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4455 	{
4456 	  error ("invalid case range in switch statement");
4457 	  debug_generic_expr (elt);
4458 	  return true;
4459 	}
4460 
4461       if (elt_type)
4462 	{
4463 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4464 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4465 	    {
4466 	      error ("type mismatch for case label in switch statement");
4467 	      debug_generic_expr (elt);
4468 	      return true;
4469 	    }
4470 	}
4471       else
4472 	{
4473 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4474 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4475 	    {
4476 	      error ("type precision mismatch in switch statement");
4477 	      return true;
4478 	    }
4479 	}
4480 
4481       if (prev_upper_bound)
4482 	{
4483 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4484 	    {
4485 	      error ("case labels not sorted in switch statement");
4486 	      return true;
4487 	    }
4488 	}
4489 
4490       prev_upper_bound = CASE_HIGH (elt);
4491       if (! prev_upper_bound)
4492 	prev_upper_bound = CASE_LOW (elt);
4493     }
4494 
4495   return false;
4496 }
4497 
4498 /* Verify a gimple debug statement STMT.
4499    Returns true if anything is wrong.  */
4500 
4501 static bool
4502 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4503 {
4504   /* There isn't much that could be wrong in a gimple debug stmt.  A
4505      gimple debug bind stmt, for example, maps a tree, that's usually
4506      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4507      component or member of an aggregate type, to another tree, that
4508      can be an arbitrary expression.  These stmts expand into debug
4509      insns, and are converted to debug notes by var-tracking.c.  */
4510   return false;
4511 }
4512 
4513 /* Verify a gimple label statement STMT.
4514    Returns true if anything is wrong.  */
4515 
4516 static bool
4517 verify_gimple_label (glabel *stmt)
4518 {
4519   tree decl = gimple_label_label (stmt);
4520   int uid;
4521   bool err = false;
4522 
4523   if (TREE_CODE (decl) != LABEL_DECL)
4524     return true;
4525   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4526       && DECL_CONTEXT (decl) != current_function_decl)
4527     {
4528       error ("label's context is not the current function decl");
4529       err |= true;
4530     }
4531 
4532   uid = LABEL_DECL_UID (decl);
4533   if (cfun->cfg
4534       && (uid == -1
4535 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4536     {
4537       error ("incorrect entry in label_to_block_map");
4538       err |= true;
4539     }
4540 
4541   uid = EH_LANDING_PAD_NR (decl);
4542   if (uid)
4543     {
4544       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4545       if (decl != lp->post_landing_pad)
4546 	{
4547 	  error ("incorrect setting of landing pad number");
4548 	  err |= true;
4549 	}
4550     }
4551 
4552   return err;
4553 }
4554 
4555 /* Verify a gimple cond statement STMT.
4556    Returns true if anything is wrong.  */
4557 
4558 static bool
4559 verify_gimple_cond (gcond *stmt)
4560 {
4561   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4562     {
4563       error ("invalid comparison code in gimple cond");
4564       return true;
4565     }
4566   if (!(!gimple_cond_true_label (stmt)
4567 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4568       || !(!gimple_cond_false_label (stmt)
4569 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4570     {
4571       error ("invalid labels in gimple cond");
4572       return true;
4573     }
4574 
4575   return verify_gimple_comparison (boolean_type_node,
4576 				   gimple_cond_lhs (stmt),
4577 				   gimple_cond_rhs (stmt),
4578 				   gimple_cond_code (stmt));
4579 }
4580 
4581 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4582    error, otherwise false.  */
4583 
4584 static bool
4585 verify_gimple_stmt (gimple *stmt)
4586 {
4587   switch (gimple_code (stmt))
4588     {
4589     case GIMPLE_ASSIGN:
4590       return verify_gimple_assign (as_a <gassign *> (stmt));
4591 
4592     case GIMPLE_LABEL:
4593       return verify_gimple_label (as_a <glabel *> (stmt));
4594 
4595     case GIMPLE_CALL:
4596       return verify_gimple_call (as_a <gcall *> (stmt));
4597 
4598     case GIMPLE_COND:
4599       return verify_gimple_cond (as_a <gcond *> (stmt));
4600 
4601     case GIMPLE_GOTO:
4602       return verify_gimple_goto (as_a <ggoto *> (stmt));
4603 
4604     case GIMPLE_SWITCH:
4605       return verify_gimple_switch (as_a <gswitch *> (stmt));
4606 
4607     case GIMPLE_RETURN:
4608       return verify_gimple_return (as_a <greturn *> (stmt));
4609 
4610     case GIMPLE_ASM:
4611       return false;
4612 
4613     case GIMPLE_TRANSACTION:
4614       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4615 
4616     /* Tuples that do not have tree operands.  */
4617     case GIMPLE_NOP:
4618     case GIMPLE_PREDICT:
4619     case GIMPLE_RESX:
4620     case GIMPLE_EH_DISPATCH:
4621     case GIMPLE_EH_MUST_NOT_THROW:
4622       return false;
4623 
4624     CASE_GIMPLE_OMP:
4625       /* OpenMP directives are validated by the FE and never operated
4626 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4627 	 non-gimple expressions when the main index variable has had
4628 	 its address taken.  This does not affect the loop itself
4629 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4630 	 how to setup the parallel iteration.  */
4631       return false;
4632 
4633     case GIMPLE_DEBUG:
4634       return verify_gimple_debug (stmt);
4635 
4636     default:
4637       gcc_unreachable ();
4638     }
4639 }
4640 
4641 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4642    and false otherwise.  */
4643 
4644 static bool
4645 verify_gimple_phi (gimple *phi)
4646 {
4647   bool err = false;
4648   unsigned i;
4649   tree phi_result = gimple_phi_result (phi);
4650   bool virtual_p;
4651 
4652   if (!phi_result)
4653     {
4654       error ("invalid PHI result");
4655       return true;
4656     }
4657 
4658   virtual_p = virtual_operand_p (phi_result);
4659   if (TREE_CODE (phi_result) != SSA_NAME
4660       || (virtual_p
4661 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4662     {
4663       error ("invalid PHI result");
4664       err = true;
4665     }
4666 
4667   for (i = 0; i < gimple_phi_num_args (phi); i++)
4668     {
4669       tree t = gimple_phi_arg_def (phi, i);
4670 
4671       if (!t)
4672 	{
4673 	  error ("missing PHI def");
4674 	  err |= true;
4675 	  continue;
4676 	}
4677       /* Addressable variables do have SSA_NAMEs but they
4678 	 are not considered gimple values.  */
4679       else if ((TREE_CODE (t) == SSA_NAME
4680 		&& virtual_p != virtual_operand_p (t))
4681 	       || (virtual_p
4682 		   && (TREE_CODE (t) != SSA_NAME
4683 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4684 	       || (!virtual_p
4685 		   && !is_gimple_val (t)))
4686 	{
4687 	  error ("invalid PHI argument");
4688 	  debug_generic_expr (t);
4689 	  err |= true;
4690 	}
4691 #ifdef ENABLE_TYPES_CHECKING
4692       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4693 	{
4694 	  error ("incompatible types in PHI argument %u", i);
4695 	  debug_generic_stmt (TREE_TYPE (phi_result));
4696 	  debug_generic_stmt (TREE_TYPE (t));
4697 	  err |= true;
4698 	}
4699 #endif
4700     }
4701 
4702   return err;
4703 }
4704 
4705 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4706 
4707 static bool
4708 verify_gimple_in_seq_2 (gimple_seq stmts)
4709 {
4710   gimple_stmt_iterator ittr;
4711   bool err = false;
4712 
4713   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4714     {
4715       gimple *stmt = gsi_stmt (ittr);
4716 
4717       switch (gimple_code (stmt))
4718         {
4719 	case GIMPLE_BIND:
4720 	  err |= verify_gimple_in_seq_2 (
4721                    gimple_bind_body (as_a <gbind *> (stmt)));
4722 	  break;
4723 
4724 	case GIMPLE_TRY:
4725 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4726 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4727 	  break;
4728 
4729 	case GIMPLE_EH_FILTER:
4730 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4731 	  break;
4732 
4733 	case GIMPLE_EH_ELSE:
4734 	  {
4735 	    geh_else *eh_else = as_a <geh_else *> (stmt);
4736 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4737 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4738 	  }
4739 	  break;
4740 
4741 	case GIMPLE_CATCH:
4742 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4743 					   as_a <gcatch *> (stmt)));
4744 	  break;
4745 
4746 	case GIMPLE_TRANSACTION:
4747 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4748 	  break;
4749 
4750 	default:
4751 	  {
4752 	    bool err2 = verify_gimple_stmt (stmt);
4753 	    if (err2)
4754 	      debug_gimple_stmt (stmt);
4755 	    err |= err2;
4756 	  }
4757 	}
4758     }
4759 
4760   return err;
4761 }
4762 
4763 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4764    is a problem, otherwise false.  */
4765 
4766 static bool
4767 verify_gimple_transaction (gtransaction *stmt)
4768 {
4769   tree lab;
4770 
4771   lab = gimple_transaction_label_norm (stmt);
4772   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4773     return true;
4774   lab = gimple_transaction_label_uninst (stmt);
4775   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4776     return true;
4777   lab = gimple_transaction_label_over (stmt);
4778   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4779     return true;
4780 
4781   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4782 }
4783 
4784 
4785 /* Verify the GIMPLE statements inside the statement list STMTS.  */
4786 
4787 DEBUG_FUNCTION void
4788 verify_gimple_in_seq (gimple_seq stmts)
4789 {
4790   timevar_push (TV_TREE_STMT_VERIFY);
4791   if (verify_gimple_in_seq_2 (stmts))
4792     internal_error ("verify_gimple failed");
4793   timevar_pop (TV_TREE_STMT_VERIFY);
4794 }
4795 
4796 /* Return true when the T can be shared.  */
4797 
4798 static bool
4799 tree_node_can_be_shared (tree t)
4800 {
4801   if (IS_TYPE_OR_DECL_P (t)
4802       || is_gimple_min_invariant (t)
4803       || TREE_CODE (t) == SSA_NAME
4804       || t == error_mark_node
4805       || TREE_CODE (t) == IDENTIFIER_NODE)
4806     return true;
4807 
4808   if (TREE_CODE (t) == CASE_LABEL_EXPR)
4809     return true;
4810 
4811   if (DECL_P (t))
4812     return true;
4813 
4814   return false;
4815 }
4816 
4817 /* Called via walk_tree.  Verify tree sharing.  */
4818 
4819 static tree
4820 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4821 {
4822   hash_set<void *> *visited = (hash_set<void *> *) data;
4823 
4824   if (tree_node_can_be_shared (*tp))
4825     {
4826       *walk_subtrees = false;
4827       return NULL;
4828     }
4829 
4830   if (visited->add (*tp))
4831     return *tp;
4832 
4833   return NULL;
4834 }
4835 
4836 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4837 
4838 static tree
4839 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4840 {
4841   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4842   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4843 }
4844 
4845 static bool eh_error_found;
4846 bool
4847 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4848 			   hash_set<gimple *> *visited)
4849 {
4850   if (!visited->contains (stmt))
4851     {
4852       error ("dead STMT in EH table");
4853       debug_gimple_stmt (stmt);
4854       eh_error_found = true;
4855     }
4856   return true;
4857 }
4858 
4859 /* Verify if the location LOCs block is in BLOCKS.  */
4860 
4861 static bool
4862 verify_location (hash_set<tree> *blocks, location_t loc)
4863 {
4864   tree block = LOCATION_BLOCK (loc);
4865   if (block != NULL_TREE
4866       && !blocks->contains (block))
4867     {
4868       error ("location references block not in block tree");
4869       return true;
4870     }
4871   if (block != NULL_TREE)
4872     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4873   return false;
4874 }
4875 
4876 /* Called via walk_tree.  Verify that expressions have no blocks.  */
4877 
4878 static tree
4879 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4880 {
4881   if (!EXPR_P (*tp))
4882     {
4883       *walk_subtrees = false;
4884       return NULL;
4885     }
4886 
4887   location_t loc = EXPR_LOCATION (*tp);
4888   if (LOCATION_BLOCK (loc) != NULL)
4889     return *tp;
4890 
4891   return NULL;
4892 }
4893 
4894 /* Called via walk_tree.  Verify locations of expressions.  */
4895 
4896 static tree
4897 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4898 {
4899   hash_set<tree> *blocks = (hash_set<tree> *) data;
4900 
4901   if (TREE_CODE (*tp) == VAR_DECL
4902       && DECL_HAS_DEBUG_EXPR_P (*tp))
4903     {
4904       tree t = DECL_DEBUG_EXPR (*tp);
4905       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4906       if (addr)
4907 	return addr;
4908     }
4909   if ((TREE_CODE (*tp) == VAR_DECL
4910        || TREE_CODE (*tp) == PARM_DECL
4911        || TREE_CODE (*tp) == RESULT_DECL)
4912       && DECL_HAS_VALUE_EXPR_P (*tp))
4913     {
4914       tree t = DECL_VALUE_EXPR (*tp);
4915       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4916       if (addr)
4917 	return addr;
4918     }
4919 
4920   if (!EXPR_P (*tp))
4921     {
4922       *walk_subtrees = false;
4923       return NULL;
4924     }
4925 
4926   location_t loc = EXPR_LOCATION (*tp);
4927   if (verify_location (blocks, loc))
4928     return *tp;
4929 
4930   return NULL;
4931 }
4932 
4933 /* Called via walk_gimple_op.  Verify locations of expressions.  */
4934 
4935 static tree
4936 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4937 {
4938   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4939   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4940 }
4941 
4942 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
4943 
4944 static void
4945 collect_subblocks (hash_set<tree> *blocks, tree block)
4946 {
4947   tree t;
4948   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4949     {
4950       blocks->add (t);
4951       collect_subblocks (blocks, t);
4952     }
4953 }
4954 
4955 /* Verify the GIMPLE statements in the CFG of FN.  */
4956 
4957 DEBUG_FUNCTION void
4958 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4959 {
4960   basic_block bb;
4961   bool err = false;
4962 
4963   timevar_push (TV_TREE_STMT_VERIFY);
4964   hash_set<void *> visited;
4965   hash_set<gimple *> visited_stmts;
4966 
4967   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
4968   hash_set<tree> blocks;
4969   if (DECL_INITIAL (fn->decl))
4970     {
4971       blocks.add (DECL_INITIAL (fn->decl));
4972       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4973     }
4974 
4975   FOR_EACH_BB_FN (bb, fn)
4976     {
4977       gimple_stmt_iterator gsi;
4978 
4979       for (gphi_iterator gpi = gsi_start_phis (bb);
4980 	   !gsi_end_p (gpi);
4981 	   gsi_next (&gpi))
4982 	{
4983 	  gphi *phi = gpi.phi ();
4984 	  bool err2 = false;
4985 	  unsigned i;
4986 
4987 	  visited_stmts.add (phi);
4988 
4989 	  if (gimple_bb (phi) != bb)
4990 	    {
4991 	      error ("gimple_bb (phi) is set to a wrong basic block");
4992 	      err2 = true;
4993 	    }
4994 
4995 	  err2 |= verify_gimple_phi (phi);
4996 
4997 	  /* Only PHI arguments have locations.  */
4998 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
4999 	    {
5000 	      error ("PHI node with location");
5001 	      err2 = true;
5002 	    }
5003 
5004 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5005 	    {
5006 	      tree arg = gimple_phi_arg_def (phi, i);
5007 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5008 				     &visited, NULL);
5009 	      if (addr)
5010 		{
5011 		  error ("incorrect sharing of tree nodes");
5012 		  debug_generic_expr (addr);
5013 		  err2 |= true;
5014 		}
5015 	      location_t loc = gimple_phi_arg_location (phi, i);
5016 	      if (virtual_operand_p (gimple_phi_result (phi))
5017 		  && loc != UNKNOWN_LOCATION)
5018 		{
5019 		  error ("virtual PHI with argument locations");
5020 		  err2 = true;
5021 		}
5022 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5023 	      if (addr)
5024 		{
5025 		  debug_generic_expr (addr);
5026 		  err2 = true;
5027 		}
5028 	      err2 |= verify_location (&blocks, loc);
5029 	    }
5030 
5031 	  if (err2)
5032 	    debug_gimple_stmt (phi);
5033 	  err |= err2;
5034 	}
5035 
5036       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5037 	{
5038 	  gimple *stmt = gsi_stmt (gsi);
5039 	  bool err2 = false;
5040 	  struct walk_stmt_info wi;
5041 	  tree addr;
5042 	  int lp_nr;
5043 
5044 	  visited_stmts.add (stmt);
5045 
5046 	  if (gimple_bb (stmt) != bb)
5047 	    {
5048 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5049 	      err2 = true;
5050 	    }
5051 
5052 	  err2 |= verify_gimple_stmt (stmt);
5053 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5054 
5055 	  memset (&wi, 0, sizeof (wi));
5056 	  wi.info = (void *) &visited;
5057 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5058 	  if (addr)
5059 	    {
5060 	      error ("incorrect sharing of tree nodes");
5061 	      debug_generic_expr (addr);
5062 	      err2 |= true;
5063 	    }
5064 
5065 	  memset (&wi, 0, sizeof (wi));
5066 	  wi.info = (void *) &blocks;
5067 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5068 	  if (addr)
5069 	    {
5070 	      debug_generic_expr (addr);
5071 	      err2 |= true;
5072 	    }
5073 
5074 	  /* ???  Instead of not checking these stmts at all the walker
5075 	     should know its context via wi.  */
5076 	  if (!is_gimple_debug (stmt)
5077 	      && !is_gimple_omp (stmt))
5078 	    {
5079 	      memset (&wi, 0, sizeof (wi));
5080 	      addr = walk_gimple_op (stmt, verify_expr, &wi);
5081 	      if (addr)
5082 		{
5083 		  debug_generic_expr (addr);
5084 		  inform (gimple_location (stmt), "in statement");
5085 		  err2 |= true;
5086 		}
5087 	    }
5088 
5089 	  /* If the statement is marked as part of an EH region, then it is
5090 	     expected that the statement could throw.  Verify that when we
5091 	     have optimizations that simplify statements such that we prove
5092 	     that they cannot throw, that we update other data structures
5093 	     to match.  */
5094 	  lp_nr = lookup_stmt_eh_lp (stmt);
5095 	  if (lp_nr > 0)
5096 	    {
5097 	      if (!stmt_could_throw_p (stmt))
5098 		{
5099 		  if (verify_nothrow)
5100 		    {
5101 		      error ("statement marked for throw, but doesn%'t");
5102 		      err2 |= true;
5103 		    }
5104 		}
5105 	      else if (!gsi_one_before_end_p (gsi))
5106 		{
5107 		  error ("statement marked for throw in middle of block");
5108 		  err2 |= true;
5109 		}
5110 	    }
5111 
5112 	  if (err2)
5113 	    debug_gimple_stmt (stmt);
5114 	  err |= err2;
5115 	}
5116     }
5117 
5118   eh_error_found = false;
5119   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5120   if (eh_table)
5121     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5122       (&visited_stmts);
5123 
5124   if (err || eh_error_found)
5125     internal_error ("verify_gimple failed");
5126 
5127   verify_histograms ();
5128   timevar_pop (TV_TREE_STMT_VERIFY);
5129 }
5130 
5131 
5132 /* Verifies that the flow information is OK.  */
5133 
5134 static int
5135 gimple_verify_flow_info (void)
5136 {
5137   int err = 0;
5138   basic_block bb;
5139   gimple_stmt_iterator gsi;
5140   gimple *stmt;
5141   edge e;
5142   edge_iterator ei;
5143 
5144   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5145       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5146     {
5147       error ("ENTRY_BLOCK has IL associated with it");
5148       err = 1;
5149     }
5150 
5151   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5152       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5153     {
5154       error ("EXIT_BLOCK has IL associated with it");
5155       err = 1;
5156     }
5157 
5158   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5159     if (e->flags & EDGE_FALLTHRU)
5160       {
5161 	error ("fallthru to exit from bb %d", e->src->index);
5162 	err = 1;
5163       }
5164 
5165   FOR_EACH_BB_FN (bb, cfun)
5166     {
5167       bool found_ctrl_stmt = false;
5168 
5169       stmt = NULL;
5170 
5171       /* Skip labels on the start of basic block.  */
5172       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5173 	{
5174 	  tree label;
5175 	  gimple *prev_stmt = stmt;
5176 
5177 	  stmt = gsi_stmt (gsi);
5178 
5179 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5180 	    break;
5181 
5182 	  label = gimple_label_label (as_a <glabel *> (stmt));
5183 	  if (prev_stmt && DECL_NONLOCAL (label))
5184 	    {
5185 	      error ("nonlocal label ");
5186 	      print_generic_expr (stderr, label, 0);
5187 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5188 		       bb->index);
5189 	      err = 1;
5190 	    }
5191 
5192 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5193 	    {
5194 	      error ("EH landing pad label ");
5195 	      print_generic_expr (stderr, label, 0);
5196 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5197 		       bb->index);
5198 	      err = 1;
5199 	    }
5200 
5201 	  if (label_to_block (label) != bb)
5202 	    {
5203 	      error ("label ");
5204 	      print_generic_expr (stderr, label, 0);
5205 	      fprintf (stderr, " to block does not match in bb %d",
5206 		       bb->index);
5207 	      err = 1;
5208 	    }
5209 
5210 	  if (decl_function_context (label) != current_function_decl)
5211 	    {
5212 	      error ("label ");
5213 	      print_generic_expr (stderr, label, 0);
5214 	      fprintf (stderr, " has incorrect context in bb %d",
5215 		       bb->index);
5216 	      err = 1;
5217 	    }
5218 	}
5219 
5220       /* Verify that body of basic block BB is free of control flow.  */
5221       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5222 	{
5223 	  gimple *stmt = gsi_stmt (gsi);
5224 
5225 	  if (found_ctrl_stmt)
5226 	    {
5227 	      error ("control flow in the middle of basic block %d",
5228 		     bb->index);
5229 	      err = 1;
5230 	    }
5231 
5232 	  if (stmt_ends_bb_p (stmt))
5233 	    found_ctrl_stmt = true;
5234 
5235 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5236 	    {
5237 	      error ("label ");
5238 	      print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5239 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5240 	      err = 1;
5241 	    }
5242 	}
5243 
5244       gsi = gsi_last_bb (bb);
5245       if (gsi_end_p (gsi))
5246 	continue;
5247 
5248       stmt = gsi_stmt (gsi);
5249 
5250       if (gimple_code (stmt) == GIMPLE_LABEL)
5251 	continue;
5252 
5253       err |= verify_eh_edges (stmt);
5254 
5255       if (is_ctrl_stmt (stmt))
5256 	{
5257 	  FOR_EACH_EDGE (e, ei, bb->succs)
5258 	    if (e->flags & EDGE_FALLTHRU)
5259 	      {
5260 		error ("fallthru edge after a control statement in bb %d",
5261 		       bb->index);
5262 		err = 1;
5263 	      }
5264 	}
5265 
5266       if (gimple_code (stmt) != GIMPLE_COND)
5267 	{
5268 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5269 	     after anything else but if statement.  */
5270 	  FOR_EACH_EDGE (e, ei, bb->succs)
5271 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5272 	      {
5273 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5274 		       bb->index);
5275 		err = 1;
5276 	      }
5277 	}
5278 
5279       switch (gimple_code (stmt))
5280 	{
5281 	case GIMPLE_COND:
5282 	  {
5283 	    edge true_edge;
5284 	    edge false_edge;
5285 
5286 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5287 
5288 	    if (!true_edge
5289 		|| !false_edge
5290 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5291 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5292 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5293 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5294 		|| EDGE_COUNT (bb->succs) >= 3)
5295 	      {
5296 		error ("wrong outgoing edge flags at end of bb %d",
5297 		       bb->index);
5298 		err = 1;
5299 	      }
5300 	  }
5301 	  break;
5302 
5303 	case GIMPLE_GOTO:
5304 	  if (simple_goto_p (stmt))
5305 	    {
5306 	      error ("explicit goto at end of bb %d", bb->index);
5307 	      err = 1;
5308 	    }
5309 	  else
5310 	    {
5311 	      /* FIXME.  We should double check that the labels in the
5312 		 destination blocks have their address taken.  */
5313 	      FOR_EACH_EDGE (e, ei, bb->succs)
5314 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5315 				 | EDGE_FALSE_VALUE))
5316 		    || !(e->flags & EDGE_ABNORMAL))
5317 		  {
5318 		    error ("wrong outgoing edge flags at end of bb %d",
5319 			   bb->index);
5320 		    err = 1;
5321 		  }
5322 	    }
5323 	  break;
5324 
5325 	case GIMPLE_CALL:
5326 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5327 	    break;
5328 	  /* ... fallthru ... */
5329 	case GIMPLE_RETURN:
5330 	  if (!single_succ_p (bb)
5331 	      || (single_succ_edge (bb)->flags
5332 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5333 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5334 	    {
5335 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5336 	      err = 1;
5337 	    }
5338 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5339 	    {
5340 	      error ("return edge does not point to exit in bb %d",
5341 		     bb->index);
5342 	      err = 1;
5343 	    }
5344 	  break;
5345 
5346 	case GIMPLE_SWITCH:
5347 	  {
5348 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5349 	    tree prev;
5350 	    edge e;
5351 	    size_t i, n;
5352 
5353 	    n = gimple_switch_num_labels (switch_stmt);
5354 
5355 	    /* Mark all the destination basic blocks.  */
5356 	    for (i = 0; i < n; ++i)
5357 	      {
5358 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5359 		basic_block label_bb = label_to_block (lab);
5360 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5361 		label_bb->aux = (void *)1;
5362 	      }
5363 
5364 	    /* Verify that the case labels are sorted.  */
5365 	    prev = gimple_switch_label (switch_stmt, 0);
5366 	    for (i = 1; i < n; ++i)
5367 	      {
5368 		tree c = gimple_switch_label (switch_stmt, i);
5369 		if (!CASE_LOW (c))
5370 		  {
5371 		    error ("found default case not at the start of "
5372 			   "case vector");
5373 		    err = 1;
5374 		    continue;
5375 		  }
5376 		if (CASE_LOW (prev)
5377 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5378 		  {
5379 		    error ("case labels not sorted: ");
5380 		    print_generic_expr (stderr, prev, 0);
5381 		    fprintf (stderr," is greater than ");
5382 		    print_generic_expr (stderr, c, 0);
5383 		    fprintf (stderr," but comes before it.\n");
5384 		    err = 1;
5385 		  }
5386 		prev = c;
5387 	      }
5388 	    /* VRP will remove the default case if it can prove it will
5389 	       never be executed.  So do not verify there always exists
5390 	       a default case here.  */
5391 
5392 	    FOR_EACH_EDGE (e, ei, bb->succs)
5393 	      {
5394 		if (!e->dest->aux)
5395 		  {
5396 		    error ("extra outgoing edge %d->%d",
5397 			   bb->index, e->dest->index);
5398 		    err = 1;
5399 		  }
5400 
5401 		e->dest->aux = (void *)2;
5402 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5403 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5404 		  {
5405 		    error ("wrong outgoing edge flags at end of bb %d",
5406 			   bb->index);
5407 		    err = 1;
5408 		  }
5409 	      }
5410 
5411 	    /* Check that we have all of them.  */
5412 	    for (i = 0; i < n; ++i)
5413 	      {
5414 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5415 		basic_block label_bb = label_to_block (lab);
5416 
5417 		if (label_bb->aux != (void *)2)
5418 		  {
5419 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5420 		    err = 1;
5421 		  }
5422 	      }
5423 
5424 	    FOR_EACH_EDGE (e, ei, bb->succs)
5425 	      e->dest->aux = (void *)0;
5426 	  }
5427 	  break;
5428 
5429 	case GIMPLE_EH_DISPATCH:
5430 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5431 	  break;
5432 
5433 	default:
5434 	  break;
5435 	}
5436     }
5437 
5438   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5439     verify_dominators (CDI_DOMINATORS);
5440 
5441   return err;
5442 }
5443 
5444 
5445 /* Updates phi nodes after creating a forwarder block joined
5446    by edge FALLTHRU.  */
5447 
5448 static void
5449 gimple_make_forwarder_block (edge fallthru)
5450 {
5451   edge e;
5452   edge_iterator ei;
5453   basic_block dummy, bb;
5454   tree var;
5455   gphi_iterator gsi;
5456 
5457   dummy = fallthru->src;
5458   bb = fallthru->dest;
5459 
5460   if (single_pred_p (bb))
5461     return;
5462 
5463   /* If we redirected a branch we must create new PHI nodes at the
5464      start of BB.  */
5465   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5466     {
5467       gphi *phi, *new_phi;
5468 
5469       phi = gsi.phi ();
5470       var = gimple_phi_result (phi);
5471       new_phi = create_phi_node (var, bb);
5472       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5473       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5474 		   UNKNOWN_LOCATION);
5475     }
5476 
5477   /* Add the arguments we have stored on edges.  */
5478   FOR_EACH_EDGE (e, ei, bb->preds)
5479     {
5480       if (e == fallthru)
5481 	continue;
5482 
5483       flush_pending_stmts (e);
5484     }
5485 }
5486 
5487 
5488 /* Return a non-special label in the head of basic block BLOCK.
5489    Create one if it doesn't exist.  */
5490 
5491 tree
5492 gimple_block_label (basic_block bb)
5493 {
5494   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5495   bool first = true;
5496   tree label;
5497   glabel *stmt;
5498 
5499   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5500     {
5501       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5502       if (!stmt)
5503 	break;
5504       label = gimple_label_label (stmt);
5505       if (!DECL_NONLOCAL (label))
5506 	{
5507 	  if (!first)
5508 	    gsi_move_before (&i, &s);
5509 	  return label;
5510 	}
5511     }
5512 
5513   label = create_artificial_label (UNKNOWN_LOCATION);
5514   stmt = gimple_build_label (label);
5515   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5516   return label;
5517 }
5518 
5519 
5520 /* Attempt to perform edge redirection by replacing a possibly complex
5521    jump instruction by a goto or by removing the jump completely.
5522    This can apply only if all edges now point to the same block.  The
5523    parameters and return values are equivalent to
5524    redirect_edge_and_branch.  */
5525 
5526 static edge
5527 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5528 {
5529   basic_block src = e->src;
5530   gimple_stmt_iterator i;
5531   gimple *stmt;
5532 
5533   /* We can replace or remove a complex jump only when we have exactly
5534      two edges.  */
5535   if (EDGE_COUNT (src->succs) != 2
5536       /* Verify that all targets will be TARGET.  Specifically, the
5537 	 edge that is not E must also go to TARGET.  */
5538       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5539     return NULL;
5540 
5541   i = gsi_last_bb (src);
5542   if (gsi_end_p (i))
5543     return NULL;
5544 
5545   stmt = gsi_stmt (i);
5546 
5547   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5548     {
5549       gsi_remove (&i, true);
5550       e = ssa_redirect_edge (e, target);
5551       e->flags = EDGE_FALLTHRU;
5552       return e;
5553     }
5554 
5555   return NULL;
5556 }
5557 
5558 
5559 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5560    edge representing the redirected branch.  */
5561 
5562 static edge
5563 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5564 {
5565   basic_block bb = e->src;
5566   gimple_stmt_iterator gsi;
5567   edge ret;
5568   gimple *stmt;
5569 
5570   if (e->flags & EDGE_ABNORMAL)
5571     return NULL;
5572 
5573   if (e->dest == dest)
5574     return NULL;
5575 
5576   if (e->flags & EDGE_EH)
5577     return redirect_eh_edge (e, dest);
5578 
5579   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5580     {
5581       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5582       if (ret)
5583 	return ret;
5584     }
5585 
5586   gsi = gsi_last_bb (bb);
5587   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5588 
5589   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5590     {
5591     case GIMPLE_COND:
5592       /* For COND_EXPR, we only need to redirect the edge.  */
5593       break;
5594 
5595     case GIMPLE_GOTO:
5596       /* No non-abnormal edges should lead from a non-simple goto, and
5597 	 simple ones should be represented implicitly.  */
5598       gcc_unreachable ();
5599 
5600     case GIMPLE_SWITCH:
5601       {
5602 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5603 	tree label = gimple_block_label (dest);
5604         tree cases = get_cases_for_edge (e, switch_stmt);
5605 
5606 	/* If we have a list of cases associated with E, then use it
5607 	   as it's a lot faster than walking the entire case vector.  */
5608 	if (cases)
5609 	  {
5610 	    edge e2 = find_edge (e->src, dest);
5611 	    tree last, first;
5612 
5613 	    first = cases;
5614 	    while (cases)
5615 	      {
5616 		last = cases;
5617 		CASE_LABEL (cases) = label;
5618 		cases = CASE_CHAIN (cases);
5619 	      }
5620 
5621 	    /* If there was already an edge in the CFG, then we need
5622 	       to move all the cases associated with E to E2.  */
5623 	    if (e2)
5624 	      {
5625 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5626 
5627 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5628 		CASE_CHAIN (cases2) = first;
5629 	      }
5630 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5631 	  }
5632 	else
5633 	  {
5634 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5635 
5636 	    for (i = 0; i < n; i++)
5637 	      {
5638 		tree elt = gimple_switch_label (switch_stmt, i);
5639 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5640 		  CASE_LABEL (elt) = label;
5641 	      }
5642 	  }
5643       }
5644       break;
5645 
5646     case GIMPLE_ASM:
5647       {
5648 	gasm *asm_stmt = as_a <gasm *> (stmt);
5649 	int i, n = gimple_asm_nlabels (asm_stmt);
5650 	tree label = NULL;
5651 
5652 	for (i = 0; i < n; ++i)
5653 	  {
5654 	    tree cons = gimple_asm_label_op (asm_stmt, i);
5655 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5656 	      {
5657 		if (!label)
5658 		  label = gimple_block_label (dest);
5659 		TREE_VALUE (cons) = label;
5660 	      }
5661 	  }
5662 
5663 	/* If we didn't find any label matching the former edge in the
5664 	   asm labels, we must be redirecting the fallthrough
5665 	   edge.  */
5666 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5667       }
5668       break;
5669 
5670     case GIMPLE_RETURN:
5671       gsi_remove (&gsi, true);
5672       e->flags |= EDGE_FALLTHRU;
5673       break;
5674 
5675     case GIMPLE_OMP_RETURN:
5676     case GIMPLE_OMP_CONTINUE:
5677     case GIMPLE_OMP_SECTIONS_SWITCH:
5678     case GIMPLE_OMP_FOR:
5679       /* The edges from OMP constructs can be simply redirected.  */
5680       break;
5681 
5682     case GIMPLE_EH_DISPATCH:
5683       if (!(e->flags & EDGE_FALLTHRU))
5684 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5685       break;
5686 
5687     case GIMPLE_TRANSACTION:
5688       if (e->flags & EDGE_TM_ABORT)
5689 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5690 				           gimple_block_label (dest));
5691       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5692 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5693 				             gimple_block_label (dest));
5694       else
5695 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5696 				           gimple_block_label (dest));
5697       break;
5698 
5699     default:
5700       /* Otherwise it must be a fallthru edge, and we don't need to
5701 	 do anything besides redirecting it.  */
5702       gcc_assert (e->flags & EDGE_FALLTHRU);
5703       break;
5704     }
5705 
5706   /* Update/insert PHI nodes as necessary.  */
5707 
5708   /* Now update the edges in the CFG.  */
5709   e = ssa_redirect_edge (e, dest);
5710 
5711   return e;
5712 }
5713 
5714 /* Returns true if it is possible to remove edge E by redirecting
5715    it to the destination of the other edge from E->src.  */
5716 
5717 static bool
5718 gimple_can_remove_branch_p (const_edge e)
5719 {
5720   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5721     return false;
5722 
5723   return true;
5724 }
5725 
5726 /* Simple wrapper, as we can always redirect fallthru edges.  */
5727 
5728 static basic_block
5729 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5730 {
5731   e = gimple_redirect_edge_and_branch (e, dest);
5732   gcc_assert (e);
5733 
5734   return NULL;
5735 }
5736 
5737 
5738 /* Splits basic block BB after statement STMT (but at least after the
5739    labels).  If STMT is NULL, BB is split just after the labels.  */
5740 
5741 static basic_block
5742 gimple_split_block (basic_block bb, void *stmt)
5743 {
5744   gimple_stmt_iterator gsi;
5745   gimple_stmt_iterator gsi_tgt;
5746   gimple_seq list;
5747   basic_block new_bb;
5748   edge e;
5749   edge_iterator ei;
5750 
5751   new_bb = create_empty_bb (bb);
5752 
5753   /* Redirect the outgoing edges.  */
5754   new_bb->succs = bb->succs;
5755   bb->succs = NULL;
5756   FOR_EACH_EDGE (e, ei, new_bb->succs)
5757     e->src = new_bb;
5758 
5759   /* Get a stmt iterator pointing to the first stmt to move.  */
5760   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5761     gsi = gsi_after_labels (bb);
5762   else
5763     {
5764       gsi = gsi_for_stmt ((gimple *) stmt);
5765       gsi_next (&gsi);
5766     }
5767 
5768   /* Move everything from GSI to the new basic block.  */
5769   if (gsi_end_p (gsi))
5770     return new_bb;
5771 
5772   /* Split the statement list - avoid re-creating new containers as this
5773      brings ugly quadratic memory consumption in the inliner.
5774      (We are still quadratic since we need to update stmt BB pointers,
5775      sadly.)  */
5776   gsi_split_seq_before (&gsi, &list);
5777   set_bb_seq (new_bb, list);
5778   for (gsi_tgt = gsi_start (list);
5779        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5780     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5781 
5782   return new_bb;
5783 }
5784 
5785 
5786 /* Moves basic block BB after block AFTER.  */
5787 
5788 static bool
5789 gimple_move_block_after (basic_block bb, basic_block after)
5790 {
5791   if (bb->prev_bb == after)
5792     return true;
5793 
5794   unlink_block (bb);
5795   link_block (bb, after);
5796 
5797   return true;
5798 }
5799 
5800 
5801 /* Return TRUE if block BB has no executable statements, otherwise return
5802    FALSE.  */
5803 
5804 static bool
5805 gimple_empty_block_p (basic_block bb)
5806 {
5807   /* BB must have no executable statements.  */
5808   gimple_stmt_iterator gsi = gsi_after_labels (bb);
5809   if (phi_nodes (bb))
5810     return false;
5811   if (gsi_end_p (gsi))
5812     return true;
5813   if (is_gimple_debug (gsi_stmt (gsi)))
5814     gsi_next_nondebug (&gsi);
5815   return gsi_end_p (gsi);
5816 }
5817 
5818 
5819 /* Split a basic block if it ends with a conditional branch and if the
5820    other part of the block is not empty.  */
5821 
5822 static basic_block
5823 gimple_split_block_before_cond_jump (basic_block bb)
5824 {
5825   gimple *last, *split_point;
5826   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5827   if (gsi_end_p (gsi))
5828     return NULL;
5829   last = gsi_stmt (gsi);
5830   if (gimple_code (last) != GIMPLE_COND
5831       && gimple_code (last) != GIMPLE_SWITCH)
5832     return NULL;
5833   gsi_prev (&gsi);
5834   split_point = gsi_stmt (gsi);
5835   return split_block (bb, split_point)->dest;
5836 }
5837 
5838 
5839 /* Return true if basic_block can be duplicated.  */
5840 
5841 static bool
5842 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5843 {
5844   return true;
5845 }
5846 
5847 /* Create a duplicate of the basic block BB.  NOTE: This does not
5848    preserve SSA form.  */
5849 
5850 static basic_block
5851 gimple_duplicate_bb (basic_block bb)
5852 {
5853   basic_block new_bb;
5854   gimple_stmt_iterator gsi_tgt;
5855 
5856   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5857 
5858   /* Copy the PHI nodes.  We ignore PHI node arguments here because
5859      the incoming edges have not been setup yet.  */
5860   for (gphi_iterator gpi = gsi_start_phis (bb);
5861        !gsi_end_p (gpi);
5862        gsi_next (&gpi))
5863     {
5864       gphi *phi, *copy;
5865       phi = gpi.phi ();
5866       copy = create_phi_node (NULL_TREE, new_bb);
5867       create_new_def_for (gimple_phi_result (phi), copy,
5868 			  gimple_phi_result_ptr (copy));
5869       gimple_set_uid (copy, gimple_uid (phi));
5870     }
5871 
5872   gsi_tgt = gsi_start_bb (new_bb);
5873   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5874        !gsi_end_p (gsi);
5875        gsi_next (&gsi))
5876     {
5877       def_operand_p def_p;
5878       ssa_op_iter op_iter;
5879       tree lhs;
5880       gimple *stmt, *copy;
5881 
5882       stmt = gsi_stmt (gsi);
5883       if (gimple_code (stmt) == GIMPLE_LABEL)
5884 	continue;
5885 
5886       /* Don't duplicate label debug stmts.  */
5887       if (gimple_debug_bind_p (stmt)
5888 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
5889 	     == LABEL_DECL)
5890 	continue;
5891 
5892       /* Create a new copy of STMT and duplicate STMT's virtual
5893 	 operands.  */
5894       copy = gimple_copy (stmt);
5895       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5896 
5897       maybe_duplicate_eh_stmt (copy, stmt);
5898       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5899 
5900       /* When copying around a stmt writing into a local non-user
5901 	 aggregate, make sure it won't share stack slot with other
5902 	 vars.  */
5903       lhs = gimple_get_lhs (stmt);
5904       if (lhs && TREE_CODE (lhs) != SSA_NAME)
5905 	{
5906 	  tree base = get_base_address (lhs);
5907 	  if (base
5908 	      && (TREE_CODE (base) == VAR_DECL
5909 		  || TREE_CODE (base) == RESULT_DECL)
5910 	      && DECL_IGNORED_P (base)
5911 	      && !TREE_STATIC (base)
5912 	      && !DECL_EXTERNAL (base)
5913 	      && (TREE_CODE (base) != VAR_DECL
5914 		  || !DECL_HAS_VALUE_EXPR_P (base)))
5915 	    DECL_NONSHAREABLE (base) = 1;
5916 	}
5917 
5918       /* Create new names for all the definitions created by COPY and
5919 	 add replacement mappings for each new name.  */
5920       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5921 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5922     }
5923 
5924   return new_bb;
5925 }
5926 
5927 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
5928 
5929 static void
5930 add_phi_args_after_copy_edge (edge e_copy)
5931 {
5932   basic_block bb, bb_copy = e_copy->src, dest;
5933   edge e;
5934   edge_iterator ei;
5935   gphi *phi, *phi_copy;
5936   tree def;
5937   gphi_iterator psi, psi_copy;
5938 
5939   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5940     return;
5941 
5942   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5943 
5944   if (e_copy->dest->flags & BB_DUPLICATED)
5945     dest = get_bb_original (e_copy->dest);
5946   else
5947     dest = e_copy->dest;
5948 
5949   e = find_edge (bb, dest);
5950   if (!e)
5951     {
5952       /* During loop unrolling the target of the latch edge is copied.
5953 	 In this case we are not looking for edge to dest, but to
5954 	 duplicated block whose original was dest.  */
5955       FOR_EACH_EDGE (e, ei, bb->succs)
5956 	{
5957 	  if ((e->dest->flags & BB_DUPLICATED)
5958 	      && get_bb_original (e->dest) == dest)
5959 	    break;
5960 	}
5961 
5962       gcc_assert (e != NULL);
5963     }
5964 
5965   for (psi = gsi_start_phis (e->dest),
5966        psi_copy = gsi_start_phis (e_copy->dest);
5967        !gsi_end_p (psi);
5968        gsi_next (&psi), gsi_next (&psi_copy))
5969     {
5970       phi = psi.phi ();
5971       phi_copy = psi_copy.phi ();
5972       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5973       add_phi_arg (phi_copy, def, e_copy,
5974 		   gimple_phi_arg_location_from_edge (phi, e));
5975     }
5976 }
5977 
5978 
5979 /* Basic block BB_COPY was created by code duplication.  Add phi node
5980    arguments for edges going out of BB_COPY.  The blocks that were
5981    duplicated have BB_DUPLICATED set.  */
5982 
5983 void
5984 add_phi_args_after_copy_bb (basic_block bb_copy)
5985 {
5986   edge e_copy;
5987   edge_iterator ei;
5988 
5989   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5990     {
5991       add_phi_args_after_copy_edge (e_copy);
5992     }
5993 }
5994 
5995 /* Blocks in REGION_COPY array of length N_REGION were created by
5996    duplication of basic blocks.  Add phi node arguments for edges
5997    going from these blocks.  If E_COPY is not NULL, also add
5998    phi node arguments for its destination.*/
5999 
6000 void
6001 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6002 			 edge e_copy)
6003 {
6004   unsigned i;
6005 
6006   for (i = 0; i < n_region; i++)
6007     region_copy[i]->flags |= BB_DUPLICATED;
6008 
6009   for (i = 0; i < n_region; i++)
6010     add_phi_args_after_copy_bb (region_copy[i]);
6011   if (e_copy)
6012     add_phi_args_after_copy_edge (e_copy);
6013 
6014   for (i = 0; i < n_region; i++)
6015     region_copy[i]->flags &= ~BB_DUPLICATED;
6016 }
6017 
6018 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6019    important exit edge EXIT.  By important we mean that no SSA name defined
6020    inside region is live over the other exit edges of the region.  All entry
6021    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6022    to the duplicate of the region.  Dominance and loop information is
6023    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6024    UPDATE_DOMINANCE is false then we assume that the caller will update the
6025    dominance information after calling this function.  The new basic
6026    blocks are stored to REGION_COPY in the same order as they had in REGION,
6027    provided that REGION_COPY is not NULL.
6028    The function returns false if it is unable to copy the region,
6029    true otherwise.  */
6030 
6031 bool
6032 gimple_duplicate_sese_region (edge entry, edge exit,
6033 			    basic_block *region, unsigned n_region,
6034 			    basic_block *region_copy,
6035 			    bool update_dominance)
6036 {
6037   unsigned i;
6038   bool free_region_copy = false, copying_header = false;
6039   struct loop *loop = entry->dest->loop_father;
6040   edge exit_copy;
6041   vec<basic_block> doms;
6042   edge redirected;
6043   int total_freq = 0, entry_freq = 0;
6044   gcov_type total_count = 0, entry_count = 0;
6045 
6046   if (!can_copy_bbs_p (region, n_region))
6047     return false;
6048 
6049   /* Some sanity checking.  Note that we do not check for all possible
6050      missuses of the functions.  I.e. if you ask to copy something weird,
6051      it will work, but the state of structures probably will not be
6052      correct.  */
6053   for (i = 0; i < n_region; i++)
6054     {
6055       /* We do not handle subloops, i.e. all the blocks must belong to the
6056 	 same loop.  */
6057       if (region[i]->loop_father != loop)
6058 	return false;
6059 
6060       if (region[i] != entry->dest
6061 	  && region[i] == loop->header)
6062 	return false;
6063     }
6064 
6065   /* In case the function is used for loop header copying (which is the primary
6066      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6067   if (loop->header == entry->dest)
6068     {
6069       copying_header = true;
6070 
6071       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6072 	return false;
6073 
6074       for (i = 0; i < n_region; i++)
6075 	if (region[i] != exit->src
6076 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6077 	  return false;
6078     }
6079 
6080   initialize_original_copy_tables ();
6081 
6082   if (copying_header)
6083     set_loop_copy (loop, loop_outer (loop));
6084   else
6085     set_loop_copy (loop, loop);
6086 
6087   if (!region_copy)
6088     {
6089       region_copy = XNEWVEC (basic_block, n_region);
6090       free_region_copy = true;
6091     }
6092 
6093   /* Record blocks outside the region that are dominated by something
6094      inside.  */
6095   if (update_dominance)
6096     {
6097       doms.create (0);
6098       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6099     }
6100 
6101   if (entry->dest->count)
6102     {
6103       total_count = entry->dest->count;
6104       entry_count = entry->count;
6105       /* Fix up corner cases, to avoid division by zero or creation of negative
6106 	 frequencies.  */
6107       if (entry_count > total_count)
6108 	entry_count = total_count;
6109     }
6110   else
6111     {
6112       total_freq = entry->dest->frequency;
6113       entry_freq = EDGE_FREQUENCY (entry);
6114       /* Fix up corner cases, to avoid division by zero or creation of negative
6115 	 frequencies.  */
6116       if (total_freq == 0)
6117 	total_freq = 1;
6118       else if (entry_freq > total_freq)
6119 	entry_freq = total_freq;
6120     }
6121 
6122   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6123 	    split_edge_bb_loc (entry), update_dominance);
6124   if (total_count)
6125     {
6126       scale_bbs_frequencies_gcov_type (region, n_region,
6127 				       total_count - entry_count,
6128 				       total_count);
6129       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6130 				       total_count);
6131     }
6132   else
6133     {
6134       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6135 				 total_freq);
6136       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6137     }
6138 
6139   if (copying_header)
6140     {
6141       loop->header = exit->dest;
6142       loop->latch = exit->src;
6143     }
6144 
6145   /* Redirect the entry and add the phi node arguments.  */
6146   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6147   gcc_assert (redirected != NULL);
6148   flush_pending_stmts (entry);
6149 
6150   /* Concerning updating of dominators:  We must recount dominators
6151      for entry block and its copy.  Anything that is outside of the
6152      region, but was dominated by something inside needs recounting as
6153      well.  */
6154   if (update_dominance)
6155     {
6156       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6157       doms.safe_push (get_bb_original (entry->dest));
6158       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6159       doms.release ();
6160     }
6161 
6162   /* Add the other PHI node arguments.  */
6163   add_phi_args_after_copy (region_copy, n_region, NULL);
6164 
6165   if (free_region_copy)
6166     free (region_copy);
6167 
6168   free_original_copy_tables ();
6169   return true;
6170 }
6171 
6172 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6173 static bool
6174 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6175 {
6176   unsigned int n;
6177 
6178   for (n = 0; n < n_region; n++)
6179     {
6180      if (bb == bbs[n])
6181        return true;
6182     }
6183   return false;
6184 }
6185 
6186 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6187    are stored to REGION_COPY in the same order in that they appear
6188    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6189    the region, EXIT an exit from it.  The condition guarding EXIT
6190    is moved to ENTRY.  Returns true if duplication succeeds, false
6191    otherwise.
6192 
6193    For example,
6194 
6195    some_code;
6196    if (cond)
6197      A;
6198    else
6199      B;
6200 
6201    is transformed to
6202 
6203    if (cond)
6204      {
6205        some_code;
6206        A;
6207      }
6208    else
6209      {
6210        some_code;
6211        B;
6212      }
6213 */
6214 
6215 bool
6216 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6217 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6218 			  basic_block *region_copy ATTRIBUTE_UNUSED)
6219 {
6220   unsigned i;
6221   bool free_region_copy = false;
6222   struct loop *loop = exit->dest->loop_father;
6223   struct loop *orig_loop = entry->dest->loop_father;
6224   basic_block switch_bb, entry_bb, nentry_bb;
6225   vec<basic_block> doms;
6226   int total_freq = 0, exit_freq = 0;
6227   gcov_type total_count = 0, exit_count = 0;
6228   edge exits[2], nexits[2], e;
6229   gimple_stmt_iterator gsi;
6230   gimple *cond_stmt;
6231   edge sorig, snew;
6232   basic_block exit_bb;
6233   gphi_iterator psi;
6234   gphi *phi;
6235   tree def;
6236   struct loop *target, *aloop, *cloop;
6237 
6238   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6239   exits[0] = exit;
6240   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6241 
6242   if (!can_copy_bbs_p (region, n_region))
6243     return false;
6244 
6245   initialize_original_copy_tables ();
6246   set_loop_copy (orig_loop, loop);
6247 
6248   target= loop;
6249   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6250     {
6251       if (bb_part_of_region_p (aloop->header, region, n_region))
6252 	{
6253 	  cloop = duplicate_loop (aloop, target);
6254 	  duplicate_subloops (aloop, cloop);
6255 	}
6256     }
6257 
6258   if (!region_copy)
6259     {
6260       region_copy = XNEWVEC (basic_block, n_region);
6261       free_region_copy = true;
6262     }
6263 
6264   gcc_assert (!need_ssa_update_p (cfun));
6265 
6266   /* Record blocks outside the region that are dominated by something
6267      inside.  */
6268   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6269 
6270   if (exit->src->count)
6271     {
6272       total_count = exit->src->count;
6273       exit_count = exit->count;
6274       /* Fix up corner cases, to avoid division by zero or creation of negative
6275 	 frequencies.  */
6276       if (exit_count > total_count)
6277 	exit_count = total_count;
6278     }
6279   else
6280     {
6281       total_freq = exit->src->frequency;
6282       exit_freq = EDGE_FREQUENCY (exit);
6283       /* Fix up corner cases, to avoid division by zero or creation of negative
6284 	 frequencies.  */
6285       if (total_freq == 0)
6286 	total_freq = 1;
6287       if (exit_freq > total_freq)
6288 	exit_freq = total_freq;
6289     }
6290 
6291   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6292 	    split_edge_bb_loc (exit), true);
6293   if (total_count)
6294     {
6295       scale_bbs_frequencies_gcov_type (region, n_region,
6296 				       total_count - exit_count,
6297 				       total_count);
6298       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6299 				       total_count);
6300     }
6301   else
6302     {
6303       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6304 				 total_freq);
6305       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6306     }
6307 
6308   /* Create the switch block, and put the exit condition to it.  */
6309   entry_bb = entry->dest;
6310   nentry_bb = get_bb_copy (entry_bb);
6311   if (!last_stmt (entry->src)
6312       || !stmt_ends_bb_p (last_stmt (entry->src)))
6313     switch_bb = entry->src;
6314   else
6315     switch_bb = split_edge (entry);
6316   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6317 
6318   gsi = gsi_last_bb (switch_bb);
6319   cond_stmt = last_stmt (exit->src);
6320   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6321   cond_stmt = gimple_copy (cond_stmt);
6322 
6323   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6324 
6325   sorig = single_succ_edge (switch_bb);
6326   sorig->flags = exits[1]->flags;
6327   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6328 
6329   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6330   rescan_loop_exit (snew, true, false);
6331 
6332   /* Add the PHI node arguments.  */
6333   add_phi_args_after_copy (region_copy, n_region, snew);
6334 
6335   /* Get rid of now superfluous conditions and associated edges (and phi node
6336      arguments).  */
6337   exit_bb = exit->dest;
6338 
6339   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6340   PENDING_STMT (e) = NULL;
6341 
6342   /* The latch of ORIG_LOOP was copied, and so was the backedge
6343      to the original header.  We redirect this backedge to EXIT_BB.  */
6344   for (i = 0; i < n_region; i++)
6345     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6346       {
6347 	gcc_assert (single_succ_edge (region_copy[i]));
6348 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6349 	PENDING_STMT (e) = NULL;
6350 	for (psi = gsi_start_phis (exit_bb);
6351 	     !gsi_end_p (psi);
6352 	     gsi_next (&psi))
6353 	  {
6354 	    phi = psi.phi ();
6355 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6356 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6357 	  }
6358       }
6359   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6360   PENDING_STMT (e) = NULL;
6361 
6362   /* Anything that is outside of the region, but was dominated by something
6363      inside needs to update dominance info.  */
6364   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6365   doms.release ();
6366   /* Update the SSA web.  */
6367   update_ssa (TODO_update_ssa);
6368 
6369   if (free_region_copy)
6370     free (region_copy);
6371 
6372   free_original_copy_tables ();
6373   return true;
6374 }
6375 
6376 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6377    adding blocks when the dominator traversal reaches EXIT.  This
6378    function silently assumes that ENTRY strictly dominates EXIT.  */
6379 
6380 void
6381 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6382 			      vec<basic_block> *bbs_p)
6383 {
6384   basic_block son;
6385 
6386   for (son = first_dom_son (CDI_DOMINATORS, entry);
6387        son;
6388        son = next_dom_son (CDI_DOMINATORS, son))
6389     {
6390       bbs_p->safe_push (son);
6391       if (son != exit)
6392 	gather_blocks_in_sese_region (son, exit, bbs_p);
6393     }
6394 }
6395 
6396 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6397    The duplicates are recorded in VARS_MAP.  */
6398 
6399 static void
6400 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6401 			   tree to_context)
6402 {
6403   tree t = *tp, new_t;
6404   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6405 
6406   if (DECL_CONTEXT (t) == to_context)
6407     return;
6408 
6409   bool existed;
6410   tree &loc = vars_map->get_or_insert (t, &existed);
6411 
6412   if (!existed)
6413     {
6414       if (SSA_VAR_P (t))
6415 	{
6416 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6417 	  add_local_decl (f, new_t);
6418 	}
6419       else
6420 	{
6421 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6422 	  new_t = copy_node (t);
6423 	}
6424       DECL_CONTEXT (new_t) = to_context;
6425 
6426       loc = new_t;
6427     }
6428   else
6429     new_t = loc;
6430 
6431   *tp = new_t;
6432 }
6433 
6434 
6435 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6436    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6437 
6438 static tree
6439 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6440 		  tree to_context)
6441 {
6442   tree new_name;
6443 
6444   gcc_assert (!virtual_operand_p (name));
6445 
6446   tree *loc = vars_map->get (name);
6447 
6448   if (!loc)
6449     {
6450       tree decl = SSA_NAME_VAR (name);
6451       if (decl)
6452 	{
6453 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6454 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6455 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6456 				       decl, SSA_NAME_DEF_STMT (name));
6457 	}
6458       else
6459 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6460 				     name, SSA_NAME_DEF_STMT (name));
6461 
6462       /* Now that we've used the def stmt to define new_name, make sure it
6463 	 doesn't define name anymore.  */
6464       SSA_NAME_DEF_STMT (name) = NULL;
6465 
6466       vars_map->put (name, new_name);
6467     }
6468   else
6469     new_name = *loc;
6470 
6471   return new_name;
6472 }
6473 
6474 struct move_stmt_d
6475 {
6476   tree orig_block;
6477   tree new_block;
6478   tree from_context;
6479   tree to_context;
6480   hash_map<tree, tree> *vars_map;
6481   htab_t new_label_map;
6482   hash_map<void *, void *> *eh_map;
6483   bool remap_decls_p;
6484 };
6485 
6486 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6487    contained in *TP if it has been ORIG_BLOCK previously and change the
6488    DECL_CONTEXT of every local variable referenced in *TP.  */
6489 
6490 static tree
6491 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6492 {
6493   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6494   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6495   tree t = *tp;
6496 
6497   if (EXPR_P (t))
6498     {
6499       tree block = TREE_BLOCK (t);
6500       if (block == p->orig_block
6501 	  || (p->orig_block == NULL_TREE
6502 	      && block != NULL_TREE))
6503 	TREE_SET_BLOCK (t, p->new_block);
6504       else if (flag_checking && block != NULL_TREE)
6505 	{
6506 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6507 	    block = BLOCK_SUPERCONTEXT (block);
6508 	  gcc_assert (block == p->orig_block);
6509 	}
6510     }
6511   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6512     {
6513       if (TREE_CODE (t) == SSA_NAME)
6514 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6515       else if (TREE_CODE (t) == PARM_DECL
6516 	       && gimple_in_ssa_p (cfun))
6517 	*tp = *(p->vars_map->get (t));
6518       else if (TREE_CODE (t) == LABEL_DECL)
6519 	{
6520 	  if (p->new_label_map)
6521 	    {
6522 	      struct tree_map in, *out;
6523 	      in.base.from = t;
6524 	      out = (struct tree_map *)
6525 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6526 	      if (out)
6527 		*tp = t = out->to;
6528 	    }
6529 
6530 	  /* For FORCED_LABELs we can end up with references from other
6531 	     functions if some SESE regions are outlined.  It is UB to
6532 	     jump in between them, but they could be used just for printing
6533 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6534 	     be the function containing the glabel stmt with that LABEL_DECL,
6535 	     rather than whatever function a reference to the label was seen
6536 	     last time.  */
6537 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6538 	    DECL_CONTEXT (t) = p->to_context;
6539 	}
6540       else if (p->remap_decls_p)
6541 	{
6542 	  /* Replace T with its duplicate.  T should no longer appear in the
6543 	     parent function, so this looks wasteful; however, it may appear
6544 	     in referenced_vars, and more importantly, as virtual operands of
6545 	     statements, and in alias lists of other variables.  It would be
6546 	     quite difficult to expunge it from all those places.  ??? It might
6547 	     suffice to do this for addressable variables.  */
6548 	  if ((TREE_CODE (t) == VAR_DECL
6549 	       && !is_global_var (t))
6550 	      || TREE_CODE (t) == CONST_DECL)
6551 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6552 	}
6553       *walk_subtrees = 0;
6554     }
6555   else if (TYPE_P (t))
6556     *walk_subtrees = 0;
6557 
6558   return NULL_TREE;
6559 }
6560 
6561 /* Helper for move_stmt_r.  Given an EH region number for the source
6562    function, map that to the duplicate EH regio number in the dest.  */
6563 
6564 static int
6565 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6566 {
6567   eh_region old_r, new_r;
6568 
6569   old_r = get_eh_region_from_number (old_nr);
6570   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6571 
6572   return new_r->index;
6573 }
6574 
6575 /* Similar, but operate on INTEGER_CSTs.  */
6576 
6577 static tree
6578 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6579 {
6580   int old_nr, new_nr;
6581 
6582   old_nr = tree_to_shwi (old_t_nr);
6583   new_nr = move_stmt_eh_region_nr (old_nr, p);
6584 
6585   return build_int_cst (integer_type_node, new_nr);
6586 }
6587 
6588 /* Like move_stmt_op, but for gimple statements.
6589 
6590    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6591    contained in the current statement in *GSI_P and change the
6592    DECL_CONTEXT of every local variable referenced in the current
6593    statement.  */
6594 
6595 static tree
6596 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6597 	     struct walk_stmt_info *wi)
6598 {
6599   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6600   gimple *stmt = gsi_stmt (*gsi_p);
6601   tree block = gimple_block (stmt);
6602 
6603   if (block == p->orig_block
6604       || (p->orig_block == NULL_TREE
6605 	  && block != NULL_TREE))
6606     gimple_set_block (stmt, p->new_block);
6607 
6608   switch (gimple_code (stmt))
6609     {
6610     case GIMPLE_CALL:
6611       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6612       {
6613 	tree r, fndecl = gimple_call_fndecl (stmt);
6614 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6615 	  switch (DECL_FUNCTION_CODE (fndecl))
6616 	    {
6617 	    case BUILT_IN_EH_COPY_VALUES:
6618 	      r = gimple_call_arg (stmt, 1);
6619 	      r = move_stmt_eh_region_tree_nr (r, p);
6620 	      gimple_call_set_arg (stmt, 1, r);
6621 	      /* FALLTHRU */
6622 
6623 	    case BUILT_IN_EH_POINTER:
6624 	    case BUILT_IN_EH_FILTER:
6625 	      r = gimple_call_arg (stmt, 0);
6626 	      r = move_stmt_eh_region_tree_nr (r, p);
6627 	      gimple_call_set_arg (stmt, 0, r);
6628 	      break;
6629 
6630 	    default:
6631 	      break;
6632 	    }
6633       }
6634       break;
6635 
6636     case GIMPLE_RESX:
6637       {
6638 	gresx *resx_stmt = as_a <gresx *> (stmt);
6639 	int r = gimple_resx_region (resx_stmt);
6640 	r = move_stmt_eh_region_nr (r, p);
6641 	gimple_resx_set_region (resx_stmt, r);
6642       }
6643       break;
6644 
6645     case GIMPLE_EH_DISPATCH:
6646       {
6647 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6648 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6649 	r = move_stmt_eh_region_nr (r, p);
6650 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6651       }
6652       break;
6653 
6654     case GIMPLE_OMP_RETURN:
6655     case GIMPLE_OMP_CONTINUE:
6656       break;
6657 
6658     case GIMPLE_LABEL:
6659       {
6660 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6661 	   so that such labels can be referenced from other regions.
6662 	   Make sure to update it when seeing a GIMPLE_LABEL though,
6663 	   that is the owner of the label.  */
6664 	walk_gimple_op (stmt, move_stmt_op, wi);
6665 	*handled_ops_p = true;
6666 	tree label = gimple_label_label (as_a <glabel *> (stmt));
6667 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6668 	  DECL_CONTEXT (label) = p->to_context;
6669       }
6670       break;
6671 
6672     default:
6673       if (is_gimple_omp (stmt))
6674 	{
6675 	  /* Do not remap variables inside OMP directives.  Variables
6676 	     referenced in clauses and directive header belong to the
6677 	     parent function and should not be moved into the child
6678 	     function.  */
6679 	  bool save_remap_decls_p = p->remap_decls_p;
6680 	  p->remap_decls_p = false;
6681 	  *handled_ops_p = true;
6682 
6683 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6684 			       move_stmt_op, wi);
6685 
6686 	  p->remap_decls_p = save_remap_decls_p;
6687 	}
6688       break;
6689     }
6690 
6691   return NULL_TREE;
6692 }
6693 
6694 /* Move basic block BB from function CFUN to function DEST_FN.  The
6695    block is moved out of the original linked list and placed after
6696    block AFTER in the new list.  Also, the block is removed from the
6697    original array of blocks and placed in DEST_FN's array of blocks.
6698    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6699    updated to reflect the moved edges.
6700 
6701    The local variables are remapped to new instances, VARS_MAP is used
6702    to record the mapping.  */
6703 
6704 static void
6705 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6706 		  basic_block after, bool update_edge_count_p,
6707 		  struct move_stmt_d *d)
6708 {
6709   struct control_flow_graph *cfg;
6710   edge_iterator ei;
6711   edge e;
6712   gimple_stmt_iterator si;
6713   unsigned old_len, new_len;
6714 
6715   /* Remove BB from dominance structures.  */
6716   delete_from_dominance_info (CDI_DOMINATORS, bb);
6717 
6718   /* Move BB from its current loop to the copy in the new function.  */
6719   if (current_loops)
6720     {
6721       struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6722       if (new_loop)
6723 	bb->loop_father = new_loop;
6724     }
6725 
6726   /* Link BB to the new linked list.  */
6727   move_block_after (bb, after);
6728 
6729   /* Update the edge count in the corresponding flowgraphs.  */
6730   if (update_edge_count_p)
6731     FOR_EACH_EDGE (e, ei, bb->succs)
6732       {
6733 	cfun->cfg->x_n_edges--;
6734 	dest_cfun->cfg->x_n_edges++;
6735       }
6736 
6737   /* Remove BB from the original basic block array.  */
6738   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6739   cfun->cfg->x_n_basic_blocks--;
6740 
6741   /* Grow DEST_CFUN's basic block array if needed.  */
6742   cfg = dest_cfun->cfg;
6743   cfg->x_n_basic_blocks++;
6744   if (bb->index >= cfg->x_last_basic_block)
6745     cfg->x_last_basic_block = bb->index + 1;
6746 
6747   old_len = vec_safe_length (cfg->x_basic_block_info);
6748   if ((unsigned) cfg->x_last_basic_block >= old_len)
6749     {
6750       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6751       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6752     }
6753 
6754   (*cfg->x_basic_block_info)[bb->index] = bb;
6755 
6756   /* Remap the variables in phi nodes.  */
6757   for (gphi_iterator psi = gsi_start_phis (bb);
6758        !gsi_end_p (psi); )
6759     {
6760       gphi *phi = psi.phi ();
6761       use_operand_p use;
6762       tree op = PHI_RESULT (phi);
6763       ssa_op_iter oi;
6764       unsigned i;
6765 
6766       if (virtual_operand_p (op))
6767 	{
6768 	  /* Remove the phi nodes for virtual operands (alias analysis will be
6769 	     run for the new function, anyway).  */
6770           remove_phi_node (&psi, true);
6771 	  continue;
6772 	}
6773 
6774       SET_PHI_RESULT (phi,
6775 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6776       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6777 	{
6778 	  op = USE_FROM_PTR (use);
6779 	  if (TREE_CODE (op) == SSA_NAME)
6780 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6781 	}
6782 
6783       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6784 	{
6785 	  location_t locus = gimple_phi_arg_location (phi, i);
6786 	  tree block = LOCATION_BLOCK (locus);
6787 
6788 	  if (locus == UNKNOWN_LOCATION)
6789 	    continue;
6790 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6791 	    {
6792 	      locus = set_block (locus, d->new_block);
6793 	      gimple_phi_arg_set_location (phi, i, locus);
6794 	    }
6795 	}
6796 
6797       gsi_next (&psi);
6798     }
6799 
6800   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6801     {
6802       gimple *stmt = gsi_stmt (si);
6803       struct walk_stmt_info wi;
6804 
6805       memset (&wi, 0, sizeof (wi));
6806       wi.info = d;
6807       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6808 
6809       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6810 	{
6811 	  tree label = gimple_label_label (label_stmt);
6812 	  int uid = LABEL_DECL_UID (label);
6813 
6814 	  gcc_assert (uid > -1);
6815 
6816 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
6817 	  if (old_len <= (unsigned) uid)
6818 	    {
6819 	      new_len = 3 * uid / 2 + 1;
6820 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6821 	    }
6822 
6823 	  (*cfg->x_label_to_block_map)[uid] = bb;
6824 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6825 
6826 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6827 
6828 	  if (uid >= dest_cfun->cfg->last_label_uid)
6829 	    dest_cfun->cfg->last_label_uid = uid + 1;
6830 	}
6831 
6832       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6833       remove_stmt_from_eh_lp_fn (cfun, stmt);
6834 
6835       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6836       gimple_remove_stmt_histograms (cfun, stmt);
6837 
6838       /* We cannot leave any operands allocated from the operand caches of
6839 	 the current function.  */
6840       free_stmt_operands (cfun, stmt);
6841       push_cfun (dest_cfun);
6842       update_stmt (stmt);
6843       pop_cfun ();
6844     }
6845 
6846   FOR_EACH_EDGE (e, ei, bb->succs)
6847     if (e->goto_locus != UNKNOWN_LOCATION)
6848       {
6849 	tree block = LOCATION_BLOCK (e->goto_locus);
6850 	if (d->orig_block == NULL_TREE
6851 	    || block == d->orig_block)
6852 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
6853       }
6854 }
6855 
6856 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6857    the outermost EH region.  Use REGION as the incoming base EH region.  */
6858 
6859 static eh_region
6860 find_outermost_region_in_block (struct function *src_cfun,
6861 				basic_block bb, eh_region region)
6862 {
6863   gimple_stmt_iterator si;
6864 
6865   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6866     {
6867       gimple *stmt = gsi_stmt (si);
6868       eh_region stmt_region;
6869       int lp_nr;
6870 
6871       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6872       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6873       if (stmt_region)
6874 	{
6875 	  if (region == NULL)
6876 	    region = stmt_region;
6877 	  else if (stmt_region != region)
6878 	    {
6879 	      region = eh_region_outermost (src_cfun, stmt_region, region);
6880 	      gcc_assert (region != NULL);
6881 	    }
6882 	}
6883     }
6884 
6885   return region;
6886 }
6887 
6888 static tree
6889 new_label_mapper (tree decl, void *data)
6890 {
6891   htab_t hash = (htab_t) data;
6892   struct tree_map *m;
6893   void **slot;
6894 
6895   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6896 
6897   m = XNEW (struct tree_map);
6898   m->hash = DECL_UID (decl);
6899   m->base.from = decl;
6900   m->to = create_artificial_label (UNKNOWN_LOCATION);
6901   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6902   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6903     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6904 
6905   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6906   gcc_assert (*slot == NULL);
6907 
6908   *slot = m;
6909 
6910   return m->to;
6911 }
6912 
6913 /* Tree walker to replace the decls used inside value expressions by
6914    duplicates.  */
6915 
6916 static tree
6917 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6918 {
6919   struct replace_decls_d *rd = (struct replace_decls_d *)data;
6920 
6921   switch (TREE_CODE (*tp))
6922     {
6923     case VAR_DECL:
6924     case PARM_DECL:
6925     case RESULT_DECL:
6926       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6927       break;
6928     default:
6929       break;
6930     }
6931 
6932   if (IS_TYPE_OR_DECL_P (*tp))
6933     *walk_subtrees = false;
6934 
6935   return NULL;
6936 }
6937 
6938 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6939    subblocks.  */
6940 
6941 static void
6942 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6943 				  tree to_context)
6944 {
6945   tree *tp, t;
6946 
6947   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6948     {
6949       t = *tp;
6950       if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6951 	continue;
6952       replace_by_duplicate_decl (&t, vars_map, to_context);
6953       if (t != *tp)
6954 	{
6955 	  if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6956 	    {
6957 	      tree x = DECL_VALUE_EXPR (*tp);
6958 	      struct replace_decls_d rd = { vars_map, to_context };
6959 	      unshare_expr (x);
6960 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6961 	      SET_DECL_VALUE_EXPR (t, x);
6962 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
6963 	    }
6964 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
6965 	  *tp = t;
6966 	}
6967     }
6968 
6969   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6970     replace_block_vars_by_duplicates (block, vars_map, to_context);
6971 }
6972 
6973 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6974    from FN1 to FN2.  */
6975 
6976 static void
6977 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6978 			      struct loop *loop)
6979 {
6980   /* Discard it from the old loop array.  */
6981   (*get_loops (fn1))[loop->num] = NULL;
6982 
6983   /* Place it in the new loop array, assigning it a new number.  */
6984   loop->num = number_of_loops (fn2);
6985   vec_safe_push (loops_for_fn (fn2)->larray, loop);
6986 
6987   /* Recurse to children.  */
6988   for (loop = loop->inner; loop; loop = loop->next)
6989     fixup_loop_arrays_after_move (fn1, fn2, loop);
6990 }
6991 
6992 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6993    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
6994 
6995 DEBUG_FUNCTION void
6996 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6997 {
6998   basic_block bb;
6999   edge_iterator ei;
7000   edge e;
7001   bitmap bbs = BITMAP_ALLOC (NULL);
7002   int i;
7003 
7004   gcc_assert (entry != NULL);
7005   gcc_assert (entry != exit);
7006   gcc_assert (bbs_p != NULL);
7007 
7008   gcc_assert (bbs_p->length () > 0);
7009 
7010   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7011     bitmap_set_bit (bbs, bb->index);
7012 
7013   gcc_assert (bitmap_bit_p (bbs, entry->index));
7014   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7015 
7016   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7017     {
7018       if (bb == entry)
7019 	{
7020 	  gcc_assert (single_pred_p (entry));
7021 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7022 	}
7023       else
7024 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7025 	  {
7026 	    e = ei_edge (ei);
7027 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7028 	  }
7029 
7030       if (bb == exit)
7031 	{
7032 	  gcc_assert (single_succ_p (exit));
7033 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7034 	}
7035       else
7036 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7037 	  {
7038 	    e = ei_edge (ei);
7039 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7040 	  }
7041     }
7042 
7043   BITMAP_FREE (bbs);
7044 }
7045 
7046 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7047 
7048 bool
7049 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7050 {
7051   bitmap release_names = (bitmap)data;
7052 
7053   if (TREE_CODE (from) != SSA_NAME)
7054     return true;
7055 
7056   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7057   return true;
7058 }
7059 
7060 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7061    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7062    single basic block in the original CFG and the new basic block is
7063    returned.  DEST_CFUN must not have a CFG yet.
7064 
7065    Note that the region need not be a pure SESE region.  Blocks inside
7066    the region may contain calls to abort/exit.  The only restriction
7067    is that ENTRY_BB should be the only entry point and it must
7068    dominate EXIT_BB.
7069 
7070    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7071    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7072    to the new function.
7073 
7074    All local variables referenced in the region are assumed to be in
7075    the corresponding BLOCK_VARS and unexpanded variable lists
7076    associated with DEST_CFUN.
7077 
7078    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7079    reimplement move_sese_region_to_fn by duplicating the region rather than
7080    moving it.  */
7081 
7082 basic_block
7083 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7084 		        basic_block exit_bb, tree orig_block)
7085 {
7086   vec<basic_block> bbs, dom_bbs;
7087   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7088   basic_block after, bb, *entry_pred, *exit_succ, abb;
7089   struct function *saved_cfun = cfun;
7090   int *entry_flag, *exit_flag;
7091   unsigned *entry_prob, *exit_prob;
7092   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7093   edge e;
7094   edge_iterator ei;
7095   htab_t new_label_map;
7096   hash_map<void *, void *> *eh_map;
7097   struct loop *loop = entry_bb->loop_father;
7098   struct loop *loop0 = get_loop (saved_cfun, 0);
7099   struct move_stmt_d d;
7100 
7101   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7102      region.  */
7103   gcc_assert (entry_bb != exit_bb
7104               && (!exit_bb
7105 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7106 
7107   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7108      because it won't be added by dfs_enumerate_from.  */
7109   bbs.create (0);
7110   bbs.safe_push (entry_bb);
7111   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7112 
7113   if (flag_checking)
7114     verify_sese (entry_bb, exit_bb, &bbs);
7115 
7116   /* The blocks that used to be dominated by something in BBS will now be
7117      dominated by the new block.  */
7118   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7119 				     bbs.address (),
7120 				     bbs.length ());
7121 
7122   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7123      the predecessor edges to ENTRY_BB and the successor edges to
7124      EXIT_BB so that we can re-attach them to the new basic block that
7125      will replace the region.  */
7126   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7127   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7128   entry_flag = XNEWVEC (int, num_entry_edges);
7129   entry_prob = XNEWVEC (unsigned, num_entry_edges);
7130   i = 0;
7131   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7132     {
7133       entry_prob[i] = e->probability;
7134       entry_flag[i] = e->flags;
7135       entry_pred[i++] = e->src;
7136       remove_edge (e);
7137     }
7138 
7139   if (exit_bb)
7140     {
7141       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7142       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7143       exit_flag = XNEWVEC (int, num_exit_edges);
7144       exit_prob = XNEWVEC (unsigned, num_exit_edges);
7145       i = 0;
7146       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7147 	{
7148 	  exit_prob[i] = e->probability;
7149 	  exit_flag[i] = e->flags;
7150 	  exit_succ[i++] = e->dest;
7151 	  remove_edge (e);
7152 	}
7153     }
7154   else
7155     {
7156       num_exit_edges = 0;
7157       exit_succ = NULL;
7158       exit_flag = NULL;
7159       exit_prob = NULL;
7160     }
7161 
7162   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7163   gcc_assert (dest_cfun->cfg == NULL);
7164   push_cfun (dest_cfun);
7165 
7166   init_empty_tree_cfg ();
7167 
7168   /* Initialize EH information for the new function.  */
7169   eh_map = NULL;
7170   new_label_map = NULL;
7171   if (saved_cfun->eh)
7172     {
7173       eh_region region = NULL;
7174 
7175       FOR_EACH_VEC_ELT (bbs, i, bb)
7176 	region = find_outermost_region_in_block (saved_cfun, bb, region);
7177 
7178       init_eh_for_function ();
7179       if (region != NULL)
7180 	{
7181 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7182 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7183 					 new_label_mapper, new_label_map);
7184 	}
7185     }
7186 
7187   /* Initialize an empty loop tree.  */
7188   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7189   init_loops_structure (dest_cfun, loops, 1);
7190   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7191   set_loops_for_fn (dest_cfun, loops);
7192 
7193   /* Move the outlined loop tree part.  */
7194   num_nodes = bbs.length ();
7195   FOR_EACH_VEC_ELT (bbs, i, bb)
7196     {
7197       if (bb->loop_father->header == bb)
7198 	{
7199 	  struct loop *this_loop = bb->loop_father;
7200 	  struct loop *outer = loop_outer (this_loop);
7201 	  if (outer == loop
7202 	      /* If the SESE region contains some bbs ending with
7203 		 a noreturn call, those are considered to belong
7204 		 to the outermost loop in saved_cfun, rather than
7205 		 the entry_bb's loop_father.  */
7206 	      || outer == loop0)
7207 	    {
7208 	      if (outer != loop)
7209 		num_nodes -= this_loop->num_nodes;
7210 	      flow_loop_tree_node_remove (bb->loop_father);
7211 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7212 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7213 	    }
7214 	}
7215       else if (bb->loop_father == loop0 && loop0 != loop)
7216 	num_nodes--;
7217 
7218       /* Remove loop exits from the outlined region.  */
7219       if (loops_for_fn (saved_cfun)->exits)
7220 	FOR_EACH_EDGE (e, ei, bb->succs)
7221 	  {
7222 	    struct loops *l = loops_for_fn (saved_cfun);
7223 	    loop_exit **slot
7224 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7225 					       NO_INSERT);
7226 	    if (slot)
7227 	      l->exits->clear_slot (slot);
7228 	  }
7229     }
7230 
7231 
7232   /* Adjust the number of blocks in the tree root of the outlined part.  */
7233   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7234 
7235   /* Setup a mapping to be used by move_block_to_fn.  */
7236   loop->aux = current_loops->tree_root;
7237   loop0->aux = current_loops->tree_root;
7238 
7239   pop_cfun ();
7240 
7241   /* Move blocks from BBS into DEST_CFUN.  */
7242   gcc_assert (bbs.length () >= 2);
7243   after = dest_cfun->cfg->x_entry_block_ptr;
7244   hash_map<tree, tree> vars_map;
7245 
7246   memset (&d, 0, sizeof (d));
7247   d.orig_block = orig_block;
7248   d.new_block = DECL_INITIAL (dest_cfun->decl);
7249   d.from_context = cfun->decl;
7250   d.to_context = dest_cfun->decl;
7251   d.vars_map = &vars_map;
7252   d.new_label_map = new_label_map;
7253   d.eh_map = eh_map;
7254   d.remap_decls_p = true;
7255 
7256   if (gimple_in_ssa_p (cfun))
7257     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7258       {
7259 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7260 	set_ssa_default_def (dest_cfun, arg, narg);
7261 	vars_map.put (arg, narg);
7262       }
7263 
7264   FOR_EACH_VEC_ELT (bbs, i, bb)
7265     {
7266       /* No need to update edge counts on the last block.  It has
7267 	 already been updated earlier when we detached the region from
7268 	 the original CFG.  */
7269       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7270       after = bb;
7271     }
7272 
7273   loop->aux = NULL;
7274   loop0->aux = NULL;
7275   /* Loop sizes are no longer correct, fix them up.  */
7276   loop->num_nodes -= num_nodes;
7277   for (struct loop *outer = loop_outer (loop);
7278        outer; outer = loop_outer (outer))
7279     outer->num_nodes -= num_nodes;
7280   loop0->num_nodes -= bbs.length () - num_nodes;
7281 
7282   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7283     {
7284       struct loop *aloop;
7285       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7286 	if (aloop != NULL)
7287 	  {
7288 	    if (aloop->simduid)
7289 	      {
7290 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7291 					   d.to_context);
7292 		dest_cfun->has_simduid_loops = true;
7293 	      }
7294 	    if (aloop->force_vectorize)
7295 	      dest_cfun->has_force_vectorize_loops = true;
7296 	  }
7297     }
7298 
7299   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7300   if (orig_block)
7301     {
7302       tree block;
7303       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7304 		  == NULL_TREE);
7305       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7306 	= BLOCK_SUBBLOCKS (orig_block);
7307       for (block = BLOCK_SUBBLOCKS (orig_block);
7308 	   block; block = BLOCK_CHAIN (block))
7309 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7310       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7311     }
7312 
7313   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7314 				    &vars_map, dest_cfun->decl);
7315 
7316   if (new_label_map)
7317     htab_delete (new_label_map);
7318   if (eh_map)
7319     delete eh_map;
7320 
7321   if (gimple_in_ssa_p (cfun))
7322     {
7323       /* We need to release ssa-names in a defined order, so first find them,
7324 	 and then iterate in ascending version order.  */
7325       bitmap release_names = BITMAP_ALLOC (NULL);
7326       vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7327       bitmap_iterator bi;
7328       unsigned i;
7329       EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7330 	release_ssa_name (ssa_name (i));
7331       BITMAP_FREE (release_names);
7332     }
7333 
7334   /* Rewire the entry and exit blocks.  The successor to the entry
7335      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7336      the child function.  Similarly, the predecessor of DEST_FN's
7337      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7338      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7339      various CFG manipulation function get to the right CFG.
7340 
7341      FIXME, this is silly.  The CFG ought to become a parameter to
7342      these helpers.  */
7343   push_cfun (dest_cfun);
7344   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7345   if (exit_bb)
7346     make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7347   pop_cfun ();
7348 
7349   /* Back in the original function, the SESE region has disappeared,
7350      create a new basic block in its place.  */
7351   bb = create_empty_bb (entry_pred[0]);
7352   if (current_loops)
7353     add_bb_to_loop (bb, loop);
7354   for (i = 0; i < num_entry_edges; i++)
7355     {
7356       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7357       e->probability = entry_prob[i];
7358     }
7359 
7360   for (i = 0; i < num_exit_edges; i++)
7361     {
7362       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7363       e->probability = exit_prob[i];
7364     }
7365 
7366   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7367   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7368     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7369   dom_bbs.release ();
7370 
7371   if (exit_bb)
7372     {
7373       free (exit_prob);
7374       free (exit_flag);
7375       free (exit_succ);
7376     }
7377   free (entry_prob);
7378   free (entry_flag);
7379   free (entry_pred);
7380   bbs.release ();
7381 
7382   return bb;
7383 }
7384 
7385 /* Dump default def DEF to file FILE using FLAGS and indentation
7386    SPC.  */
7387 
7388 static void
7389 dump_default_def (FILE *file, tree def, int spc, int flags)
7390 {
7391   for (int i = 0; i < spc; ++i)
7392     fprintf (file, " ");
7393   dump_ssaname_info_to_file (file, def, spc);
7394 
7395   print_generic_expr (file, TREE_TYPE (def), flags);
7396   fprintf (file, " ");
7397   print_generic_expr (file, def, flags);
7398   fprintf (file, " = ");
7399   print_generic_expr (file, SSA_NAME_VAR (def), flags);
7400   fprintf (file, ";\n");
7401 }
7402 
7403 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7404    */
7405 
7406 void
7407 dump_function_to_file (tree fndecl, FILE *file, int flags)
7408 {
7409   tree arg, var, old_current_fndecl = current_function_decl;
7410   struct function *dsf;
7411   bool ignore_topmost_bind = false, any_var = false;
7412   basic_block bb;
7413   tree chain;
7414   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7415 		  && decl_is_tm_clone (fndecl));
7416   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7417 
7418   if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7419     {
7420       fprintf (file, "__attribute__((");
7421 
7422       bool first = true;
7423       tree chain;
7424       for (chain = DECL_ATTRIBUTES (fndecl); chain;
7425 	   first = false, chain = TREE_CHAIN (chain))
7426 	{
7427 	  if (!first)
7428 	    fprintf (file, ", ");
7429 
7430 	  print_generic_expr (file, get_attribute_name (chain), dump_flags);
7431 	  if (TREE_VALUE (chain) != NULL_TREE)
7432 	    {
7433 	      fprintf (file, " (");
7434 	      print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7435 	      fprintf (file, ")");
7436 	    }
7437 	}
7438 
7439       fprintf (file, "))\n");
7440     }
7441 
7442   current_function_decl = fndecl;
7443   fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7444 
7445   arg = DECL_ARGUMENTS (fndecl);
7446   while (arg)
7447     {
7448       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7449       fprintf (file, " ");
7450       print_generic_expr (file, arg, dump_flags);
7451       if (flags & TDF_VERBOSE)
7452 	print_node (file, "", arg, 4);
7453       if (DECL_CHAIN (arg))
7454 	fprintf (file, ", ");
7455       arg = DECL_CHAIN (arg);
7456     }
7457   fprintf (file, ")\n");
7458 
7459   if (flags & TDF_VERBOSE)
7460     print_node (file, "", fndecl, 2);
7461 
7462   dsf = DECL_STRUCT_FUNCTION (fndecl);
7463   if (dsf && (flags & TDF_EH))
7464     dump_eh_tree (file, dsf);
7465 
7466   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7467     {
7468       dump_node (fndecl, TDF_SLIM | flags, file);
7469       current_function_decl = old_current_fndecl;
7470       return;
7471     }
7472 
7473   /* When GIMPLE is lowered, the variables are no longer available in
7474      BIND_EXPRs, so display them separately.  */
7475   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7476     {
7477       unsigned ix;
7478       ignore_topmost_bind = true;
7479 
7480       fprintf (file, "{\n");
7481       if (gimple_in_ssa_p (fun)
7482 	  && (flags & TDF_ALIAS))
7483 	{
7484 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7485 	       arg = DECL_CHAIN (arg))
7486 	    {
7487 	      tree def = ssa_default_def (fun, arg);
7488 	      if (def)
7489 		dump_default_def (file, def, 2, flags);
7490 	    }
7491 
7492 	  tree res = DECL_RESULT (fun->decl);
7493 	  if (res != NULL_TREE
7494 	      && DECL_BY_REFERENCE (res))
7495 	    {
7496 	      tree def = ssa_default_def (fun, res);
7497 	      if (def)
7498 		dump_default_def (file, def, 2, flags);
7499 	    }
7500 
7501 	  tree static_chain = fun->static_chain_decl;
7502 	  if (static_chain != NULL_TREE)
7503 	    {
7504 	      tree def = ssa_default_def (fun, static_chain);
7505 	      if (def)
7506 		dump_default_def (file, def, 2, flags);
7507 	    }
7508 	}
7509 
7510       if (!vec_safe_is_empty (fun->local_decls))
7511 	FOR_EACH_LOCAL_DECL (fun, ix, var)
7512 	  {
7513 	    print_generic_decl (file, var, flags);
7514 	    if (flags & TDF_VERBOSE)
7515 	      print_node (file, "", var, 4);
7516 	    fprintf (file, "\n");
7517 
7518 	    any_var = true;
7519 	  }
7520       if (gimple_in_ssa_p (cfun))
7521 	for (ix = 1; ix < num_ssa_names; ++ix)
7522 	  {
7523 	    tree name = ssa_name (ix);
7524 	    if (name && !SSA_NAME_VAR (name))
7525 	      {
7526 		fprintf (file, "  ");
7527 		print_generic_expr (file, TREE_TYPE (name), flags);
7528 		fprintf (file, " ");
7529 		print_generic_expr (file, name, flags);
7530 		fprintf (file, ";\n");
7531 
7532 		any_var = true;
7533 	      }
7534 	  }
7535     }
7536 
7537   if (fun && fun->decl == fndecl
7538       && fun->cfg
7539       && basic_block_info_for_fn (fun))
7540     {
7541       /* If the CFG has been built, emit a CFG-based dump.  */
7542       if (!ignore_topmost_bind)
7543 	fprintf (file, "{\n");
7544 
7545       if (any_var && n_basic_blocks_for_fn (fun))
7546 	fprintf (file, "\n");
7547 
7548       FOR_EACH_BB_FN (bb, fun)
7549 	dump_bb (file, bb, 2, flags | TDF_COMMENT);
7550 
7551       fprintf (file, "}\n");
7552     }
7553   else if (DECL_SAVED_TREE (fndecl) == NULL)
7554     {
7555       /* The function is now in GIMPLE form but the CFG has not been
7556 	 built yet.  Emit the single sequence of GIMPLE statements
7557 	 that make up its body.  */
7558       gimple_seq body = gimple_body (fndecl);
7559 
7560       if (gimple_seq_first_stmt (body)
7561 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7562 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7563 	print_gimple_seq (file, body, 0, flags);
7564       else
7565 	{
7566 	  if (!ignore_topmost_bind)
7567 	    fprintf (file, "{\n");
7568 
7569 	  if (any_var)
7570 	    fprintf (file, "\n");
7571 
7572 	  print_gimple_seq (file, body, 2, flags);
7573 	  fprintf (file, "}\n");
7574 	}
7575     }
7576   else
7577     {
7578       int indent;
7579 
7580       /* Make a tree based dump.  */
7581       chain = DECL_SAVED_TREE (fndecl);
7582       if (chain && TREE_CODE (chain) == BIND_EXPR)
7583 	{
7584 	  if (ignore_topmost_bind)
7585 	    {
7586 	      chain = BIND_EXPR_BODY (chain);
7587 	      indent = 2;
7588 	    }
7589 	  else
7590 	    indent = 0;
7591 	}
7592       else
7593 	{
7594 	  if (!ignore_topmost_bind)
7595 	    {
7596 	      fprintf (file, "{\n");
7597 	      /* No topmost bind, pretend it's ignored for later.  */
7598 	      ignore_topmost_bind = true;
7599 	    }
7600 	  indent = 2;
7601 	}
7602 
7603       if (any_var)
7604 	fprintf (file, "\n");
7605 
7606       print_generic_stmt_indented (file, chain, flags, indent);
7607       if (ignore_topmost_bind)
7608 	fprintf (file, "}\n");
7609     }
7610 
7611   if (flags & TDF_ENUMERATE_LOCALS)
7612     dump_enumerated_decls (file, flags);
7613   fprintf (file, "\n\n");
7614 
7615   current_function_decl = old_current_fndecl;
7616 }
7617 
7618 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
7619 
7620 DEBUG_FUNCTION void
7621 debug_function (tree fn, int flags)
7622 {
7623   dump_function_to_file (fn, stderr, flags);
7624 }
7625 
7626 
7627 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
7628 
7629 static void
7630 print_pred_bbs (FILE *file, basic_block bb)
7631 {
7632   edge e;
7633   edge_iterator ei;
7634 
7635   FOR_EACH_EDGE (e, ei, bb->preds)
7636     fprintf (file, "bb_%d ", e->src->index);
7637 }
7638 
7639 
7640 /* Print on FILE the indexes for the successors of basic_block BB.  */
7641 
7642 static void
7643 print_succ_bbs (FILE *file, basic_block bb)
7644 {
7645   edge e;
7646   edge_iterator ei;
7647 
7648   FOR_EACH_EDGE (e, ei, bb->succs)
7649     fprintf (file, "bb_%d ", e->dest->index);
7650 }
7651 
7652 /* Print to FILE the basic block BB following the VERBOSITY level.  */
7653 
7654 void
7655 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7656 {
7657   char *s_indent = (char *) alloca ((size_t) indent + 1);
7658   memset ((void *) s_indent, ' ', (size_t) indent);
7659   s_indent[indent] = '\0';
7660 
7661   /* Print basic_block's header.  */
7662   if (verbosity >= 2)
7663     {
7664       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
7665       print_pred_bbs (file, bb);
7666       fprintf (file, "}, succs = {");
7667       print_succ_bbs (file, bb);
7668       fprintf (file, "})\n");
7669     }
7670 
7671   /* Print basic_block's body.  */
7672   if (verbosity >= 3)
7673     {
7674       fprintf (file, "%s  {\n", s_indent);
7675       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7676       fprintf (file, "%s  }\n", s_indent);
7677     }
7678 }
7679 
7680 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7681 
7682 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
7683    VERBOSITY level this outputs the contents of the loop, or just its
7684    structure.  */
7685 
7686 static void
7687 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7688 {
7689   char *s_indent;
7690   basic_block bb;
7691 
7692   if (loop == NULL)
7693     return;
7694 
7695   s_indent = (char *) alloca ((size_t) indent + 1);
7696   memset ((void *) s_indent, ' ', (size_t) indent);
7697   s_indent[indent] = '\0';
7698 
7699   /* Print loop's header.  */
7700   fprintf (file, "%sloop_%d (", s_indent, loop->num);
7701   if (loop->header)
7702     fprintf (file, "header = %d", loop->header->index);
7703   else
7704     {
7705       fprintf (file, "deleted)\n");
7706       return;
7707     }
7708   if (loop->latch)
7709     fprintf (file, ", latch = %d", loop->latch->index);
7710   else
7711     fprintf (file, ", multiple latches");
7712   fprintf (file, ", niter = ");
7713   print_generic_expr (file, loop->nb_iterations, 0);
7714 
7715   if (loop->any_upper_bound)
7716     {
7717       fprintf (file, ", upper_bound = ");
7718       print_decu (loop->nb_iterations_upper_bound, file);
7719     }
7720 
7721   if (loop->any_estimate)
7722     {
7723       fprintf (file, ", estimate = ");
7724       print_decu (loop->nb_iterations_estimate, file);
7725     }
7726   fprintf (file, ")\n");
7727 
7728   /* Print loop's body.  */
7729   if (verbosity >= 1)
7730     {
7731       fprintf (file, "%s{\n", s_indent);
7732       FOR_EACH_BB_FN (bb, cfun)
7733 	if (bb->loop_father == loop)
7734 	  print_loops_bb (file, bb, indent, verbosity);
7735 
7736       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7737       fprintf (file, "%s}\n", s_indent);
7738     }
7739 }
7740 
7741 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7742    spaces.  Following VERBOSITY level this outputs the contents of the
7743    loop, or just its structure.  */
7744 
7745 static void
7746 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7747 			 int verbosity)
7748 {
7749   if (loop == NULL)
7750     return;
7751 
7752   print_loop (file, loop, indent, verbosity);
7753   print_loop_and_siblings (file, loop->next, indent, verbosity);
7754 }
7755 
7756 /* Follow a CFG edge from the entry point of the program, and on entry
7757    of a loop, pretty print the loop structure on FILE.  */
7758 
7759 void
7760 print_loops (FILE *file, int verbosity)
7761 {
7762   basic_block bb;
7763 
7764   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7765   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7766   if (bb && bb->loop_father)
7767     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7768 }
7769 
7770 /* Dump a loop.  */
7771 
7772 DEBUG_FUNCTION void
7773 debug (struct loop &ref)
7774 {
7775   print_loop (stderr, &ref, 0, /*verbosity*/0);
7776 }
7777 
7778 DEBUG_FUNCTION void
7779 debug (struct loop *ptr)
7780 {
7781   if (ptr)
7782     debug (*ptr);
7783   else
7784     fprintf (stderr, "<nil>\n");
7785 }
7786 
7787 /* Dump a loop verbosely.  */
7788 
7789 DEBUG_FUNCTION void
7790 debug_verbose (struct loop &ref)
7791 {
7792   print_loop (stderr, &ref, 0, /*verbosity*/3);
7793 }
7794 
7795 DEBUG_FUNCTION void
7796 debug_verbose (struct loop *ptr)
7797 {
7798   if (ptr)
7799     debug (*ptr);
7800   else
7801     fprintf (stderr, "<nil>\n");
7802 }
7803 
7804 
7805 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
7806 
7807 DEBUG_FUNCTION void
7808 debug_loops (int verbosity)
7809 {
7810   print_loops (stderr, verbosity);
7811 }
7812 
7813 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
7814 
7815 DEBUG_FUNCTION void
7816 debug_loop (struct loop *loop, int verbosity)
7817 {
7818   print_loop (stderr, loop, 0, verbosity);
7819 }
7820 
7821 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7822    level.  */
7823 
7824 DEBUG_FUNCTION void
7825 debug_loop_num (unsigned num, int verbosity)
7826 {
7827   debug_loop (get_loop (cfun, num), verbosity);
7828 }
7829 
7830 /* Return true if BB ends with a call, possibly followed by some
7831    instructions that must stay with the call.  Return false,
7832    otherwise.  */
7833 
7834 static bool
7835 gimple_block_ends_with_call_p (basic_block bb)
7836 {
7837   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7838   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7839 }
7840 
7841 
7842 /* Return true if BB ends with a conditional branch.  Return false,
7843    otherwise.  */
7844 
7845 static bool
7846 gimple_block_ends_with_condjump_p (const_basic_block bb)
7847 {
7848   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7849   return (stmt && gimple_code (stmt) == GIMPLE_COND);
7850 }
7851 
7852 
7853 /* Return true if we need to add fake edge to exit at statement T.
7854    Helper function for gimple_flow_call_edges_add.  */
7855 
7856 static bool
7857 need_fake_edge_p (gimple *t)
7858 {
7859   tree fndecl = NULL_TREE;
7860   int call_flags = 0;
7861 
7862   /* NORETURN and LONGJMP calls already have an edge to exit.
7863      CONST and PURE calls do not need one.
7864      We don't currently check for CONST and PURE here, although
7865      it would be a good idea, because those attributes are
7866      figured out from the RTL in mark_constant_function, and
7867      the counter incrementation code from -fprofile-arcs
7868      leads to different results from -fbranch-probabilities.  */
7869   if (is_gimple_call (t))
7870     {
7871       fndecl = gimple_call_fndecl (t);
7872       call_flags = gimple_call_flags (t);
7873     }
7874 
7875   if (is_gimple_call (t)
7876       && fndecl
7877       && DECL_BUILT_IN (fndecl)
7878       && (call_flags & ECF_NOTHROW)
7879       && !(call_flags & ECF_RETURNS_TWICE)
7880       /* fork() doesn't really return twice, but the effect of
7881          wrapping it in __gcov_fork() which calls __gcov_flush()
7882 	 and clears the counters before forking has the same
7883 	 effect as returning twice.  Force a fake edge.  */
7884       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7885 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7886     return false;
7887 
7888   if (is_gimple_call (t))
7889     {
7890       edge_iterator ei;
7891       edge e;
7892       basic_block bb;
7893 
7894       if (!(call_flags & ECF_NORETURN))
7895 	return true;
7896 
7897       bb = gimple_bb (t);
7898       FOR_EACH_EDGE (e, ei, bb->succs)
7899 	if ((e->flags & EDGE_FAKE) == 0)
7900 	  return true;
7901     }
7902 
7903   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7904     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7905       return true;
7906 
7907   return false;
7908 }
7909 
7910 
7911 /* Add fake edges to the function exit for any non constant and non
7912    noreturn calls (or noreturn calls with EH/abnormal edges),
7913    volatile inline assembly in the bitmap of blocks specified by BLOCKS
7914    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
7915    that were split.
7916 
7917    The goal is to expose cases in which entering a basic block does
7918    not imply that all subsequent instructions must be executed.  */
7919 
7920 static int
7921 gimple_flow_call_edges_add (sbitmap blocks)
7922 {
7923   int i;
7924   int blocks_split = 0;
7925   int last_bb = last_basic_block_for_fn (cfun);
7926   bool check_last_block = false;
7927 
7928   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7929     return 0;
7930 
7931   if (! blocks)
7932     check_last_block = true;
7933   else
7934     check_last_block = bitmap_bit_p (blocks,
7935 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7936 
7937   /* In the last basic block, before epilogue generation, there will be
7938      a fallthru edge to EXIT.  Special care is required if the last insn
7939      of the last basic block is a call because make_edge folds duplicate
7940      edges, which would result in the fallthru edge also being marked
7941      fake, which would result in the fallthru edge being removed by
7942      remove_fake_edges, which would result in an invalid CFG.
7943 
7944      Moreover, we can't elide the outgoing fake edge, since the block
7945      profiler needs to take this into account in order to solve the minimal
7946      spanning tree in the case that the call doesn't return.
7947 
7948      Handle this by adding a dummy instruction in a new last basic block.  */
7949   if (check_last_block)
7950     {
7951       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7952       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7953       gimple *t = NULL;
7954 
7955       if (!gsi_end_p (gsi))
7956 	t = gsi_stmt (gsi);
7957 
7958       if (t && need_fake_edge_p (t))
7959 	{
7960 	  edge e;
7961 
7962 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7963 	  if (e)
7964 	    {
7965 	      gsi_insert_on_edge (e, gimple_build_nop ());
7966 	      gsi_commit_edge_inserts ();
7967 	    }
7968 	}
7969     }
7970 
7971   /* Now add fake edges to the function exit for any non constant
7972      calls since there is no way that we can determine if they will
7973      return or not...  */
7974   for (i = 0; i < last_bb; i++)
7975     {
7976       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7977       gimple_stmt_iterator gsi;
7978       gimple *stmt, *last_stmt;
7979 
7980       if (!bb)
7981 	continue;
7982 
7983       if (blocks && !bitmap_bit_p (blocks, i))
7984 	continue;
7985 
7986       gsi = gsi_last_nondebug_bb (bb);
7987       if (!gsi_end_p (gsi))
7988 	{
7989 	  last_stmt = gsi_stmt (gsi);
7990 	  do
7991 	    {
7992 	      stmt = gsi_stmt (gsi);
7993 	      if (need_fake_edge_p (stmt))
7994 		{
7995 		  edge e;
7996 
7997 		  /* The handling above of the final block before the
7998 		     epilogue should be enough to verify that there is
7999 		     no edge to the exit block in CFG already.
8000 		     Calling make_edge in such case would cause us to
8001 		     mark that edge as fake and remove it later.  */
8002 		  if (flag_checking && stmt == last_stmt)
8003 		    {
8004 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8005 		      gcc_assert (e == NULL);
8006 		    }
8007 
8008 		  /* Note that the following may create a new basic block
8009 		     and renumber the existing basic blocks.  */
8010 		  if (stmt != last_stmt)
8011 		    {
8012 		      e = split_block (bb, stmt);
8013 		      if (e)
8014 			blocks_split++;
8015 		    }
8016 		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8017 		}
8018 	      gsi_prev (&gsi);
8019 	    }
8020 	  while (!gsi_end_p (gsi));
8021 	}
8022     }
8023 
8024   if (blocks_split)
8025     verify_flow_info ();
8026 
8027   return blocks_split;
8028 }
8029 
8030 /* Removes edge E and all the blocks dominated by it, and updates dominance
8031    information.  The IL in E->src needs to be updated separately.
8032    If dominance info is not available, only the edge E is removed.*/
8033 
8034 void
8035 remove_edge_and_dominated_blocks (edge e)
8036 {
8037   vec<basic_block> bbs_to_remove = vNULL;
8038   vec<basic_block> bbs_to_fix_dom = vNULL;
8039   bitmap df, df_idom;
8040   edge f;
8041   edge_iterator ei;
8042   bool none_removed = false;
8043   unsigned i;
8044   basic_block bb, dbb;
8045   bitmap_iterator bi;
8046 
8047   /* If we are removing a path inside a non-root loop that may change
8048      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8049   if (current_loops
8050       && loop_outer (e->src->loop_father) != NULL
8051       && e->src->loop_father == e->dest->loop_father)
8052     loops_state_set (LOOPS_NEED_FIXUP);
8053 
8054   if (!dom_info_available_p (CDI_DOMINATORS))
8055     {
8056       remove_edge (e);
8057       return;
8058     }
8059 
8060   /* No updating is needed for edges to exit.  */
8061   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8062     {
8063       if (cfgcleanup_altered_bbs)
8064 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8065       remove_edge (e);
8066       return;
8067     }
8068 
8069   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8070      that is not dominated by E->dest, then this set is empty.  Otherwise,
8071      all the basic blocks dominated by E->dest are removed.
8072 
8073      Also, to DF_IDOM we store the immediate dominators of the blocks in
8074      the dominance frontier of E (i.e., of the successors of the
8075      removed blocks, if there are any, and of E->dest otherwise).  */
8076   FOR_EACH_EDGE (f, ei, e->dest->preds)
8077     {
8078       if (f == e)
8079 	continue;
8080 
8081       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8082 	{
8083 	  none_removed = true;
8084 	  break;
8085 	}
8086     }
8087 
8088   df = BITMAP_ALLOC (NULL);
8089   df_idom = BITMAP_ALLOC (NULL);
8090 
8091   if (none_removed)
8092     bitmap_set_bit (df_idom,
8093 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8094   else
8095     {
8096       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8097       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8098 	{
8099 	  FOR_EACH_EDGE (f, ei, bb->succs)
8100 	    {
8101 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8102 		bitmap_set_bit (df, f->dest->index);
8103 	    }
8104 	}
8105       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8106 	bitmap_clear_bit (df, bb->index);
8107 
8108       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8109 	{
8110 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8111 	  bitmap_set_bit (df_idom,
8112 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8113 	}
8114     }
8115 
8116   if (cfgcleanup_altered_bbs)
8117     {
8118       /* Record the set of the altered basic blocks.  */
8119       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8120       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8121     }
8122 
8123   /* Remove E and the cancelled blocks.  */
8124   if (none_removed)
8125     remove_edge (e);
8126   else
8127     {
8128       /* Walk backwards so as to get a chance to substitute all
8129 	 released DEFs into debug stmts.  See
8130 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8131 	 details.  */
8132       for (i = bbs_to_remove.length (); i-- > 0; )
8133 	delete_basic_block (bbs_to_remove[i]);
8134     }
8135 
8136   /* Update the dominance information.  The immediate dominator may change only
8137      for blocks whose immediate dominator belongs to DF_IDOM:
8138 
8139      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8140      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8141      Z dominates X after the removal.  Before removal, there exists a path P
8142      from Y to X that avoids Z.  Let F be the last edge on P that is
8143      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8144      dominates W, and because of P, Z does not dominate W), and W belongs to
8145      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8146   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8147     {
8148       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8149       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8150 	   dbb;
8151 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8152 	bbs_to_fix_dom.safe_push (dbb);
8153     }
8154 
8155   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8156 
8157   BITMAP_FREE (df);
8158   BITMAP_FREE (df_idom);
8159   bbs_to_remove.release ();
8160   bbs_to_fix_dom.release ();
8161 }
8162 
8163 /* Purge dead EH edges from basic block BB.  */
8164 
8165 bool
8166 gimple_purge_dead_eh_edges (basic_block bb)
8167 {
8168   bool changed = false;
8169   edge e;
8170   edge_iterator ei;
8171   gimple *stmt = last_stmt (bb);
8172 
8173   if (stmt && stmt_can_throw_internal (stmt))
8174     return false;
8175 
8176   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8177     {
8178       if (e->flags & EDGE_EH)
8179 	{
8180 	  remove_edge_and_dominated_blocks (e);
8181 	  changed = true;
8182 	}
8183       else
8184 	ei_next (&ei);
8185     }
8186 
8187   return changed;
8188 }
8189 
8190 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8191 
8192 bool
8193 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8194 {
8195   bool changed = false;
8196   unsigned i;
8197   bitmap_iterator bi;
8198 
8199   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8200     {
8201       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8202 
8203       /* Earlier gimple_purge_dead_eh_edges could have removed
8204 	 this basic block already.  */
8205       gcc_assert (bb || changed);
8206       if (bb != NULL)
8207 	changed |= gimple_purge_dead_eh_edges (bb);
8208     }
8209 
8210   return changed;
8211 }
8212 
8213 /* Purge dead abnormal call edges from basic block BB.  */
8214 
8215 bool
8216 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8217 {
8218   bool changed = false;
8219   edge e;
8220   edge_iterator ei;
8221   gimple *stmt = last_stmt (bb);
8222 
8223   if (!cfun->has_nonlocal_label
8224       && !cfun->calls_setjmp)
8225     return false;
8226 
8227   if (stmt && stmt_can_make_abnormal_goto (stmt))
8228     return false;
8229 
8230   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8231     {
8232       if (e->flags & EDGE_ABNORMAL)
8233 	{
8234 	  if (e->flags & EDGE_FALLTHRU)
8235 	    e->flags &= ~EDGE_ABNORMAL;
8236 	  else
8237 	    remove_edge_and_dominated_blocks (e);
8238 	  changed = true;
8239 	}
8240       else
8241 	ei_next (&ei);
8242     }
8243 
8244   return changed;
8245 }
8246 
8247 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8248 
8249 bool
8250 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8251 {
8252   bool changed = false;
8253   unsigned i;
8254   bitmap_iterator bi;
8255 
8256   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8257     {
8258       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8259 
8260       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8261 	 this basic block already.  */
8262       gcc_assert (bb || changed);
8263       if (bb != NULL)
8264 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8265     }
8266 
8267   return changed;
8268 }
8269 
8270 /* This function is called whenever a new edge is created or
8271    redirected.  */
8272 
8273 static void
8274 gimple_execute_on_growing_pred (edge e)
8275 {
8276   basic_block bb = e->dest;
8277 
8278   if (!gimple_seq_empty_p (phi_nodes (bb)))
8279     reserve_phi_args_for_new_edge (bb);
8280 }
8281 
8282 /* This function is called immediately before edge E is removed from
8283    the edge vector E->dest->preds.  */
8284 
8285 static void
8286 gimple_execute_on_shrinking_pred (edge e)
8287 {
8288   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8289     remove_phi_args (e);
8290 }
8291 
8292 /*---------------------------------------------------------------------------
8293   Helper functions for Loop versioning
8294   ---------------------------------------------------------------------------*/
8295 
8296 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8297    of 'first'. Both of them are dominated by 'new_head' basic block. When
8298    'new_head' was created by 'second's incoming edge it received phi arguments
8299    on the edge by split_edge(). Later, additional edge 'e' was created to
8300    connect 'new_head' and 'first'. Now this routine adds phi args on this
8301    additional edge 'e' that new_head to second edge received as part of edge
8302    splitting.  */
8303 
8304 static void
8305 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8306 				  basic_block new_head, edge e)
8307 {
8308   gphi *phi1, *phi2;
8309   gphi_iterator psi1, psi2;
8310   tree def;
8311   edge e2 = find_edge (new_head, second);
8312 
8313   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8314      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8315   gcc_assert (e2 != NULL);
8316 
8317   /* Browse all 'second' basic block phi nodes and add phi args to
8318      edge 'e' for 'first' head. PHI args are always in correct order.  */
8319 
8320   for (psi2 = gsi_start_phis (second),
8321        psi1 = gsi_start_phis (first);
8322        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8323        gsi_next (&psi2),  gsi_next (&psi1))
8324     {
8325       phi1 = psi1.phi ();
8326       phi2 = psi2.phi ();
8327       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8328       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8329     }
8330 }
8331 
8332 
8333 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8334    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8335    the destination of the ELSE part.  */
8336 
8337 static void
8338 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8339 			       basic_block second_head ATTRIBUTE_UNUSED,
8340 			       basic_block cond_bb, void *cond_e)
8341 {
8342   gimple_stmt_iterator gsi;
8343   gimple *new_cond_expr;
8344   tree cond_expr = (tree) cond_e;
8345   edge e0;
8346 
8347   /* Build new conditional expr */
8348   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8349 					       NULL_TREE, NULL_TREE);
8350 
8351   /* Add new cond in cond_bb.  */
8352   gsi = gsi_last_bb (cond_bb);
8353   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8354 
8355   /* Adjust edges appropriately to connect new head with first head
8356      as well as second head.  */
8357   e0 = single_succ_edge (cond_bb);
8358   e0->flags &= ~EDGE_FALLTHRU;
8359   e0->flags |= EDGE_FALSE_VALUE;
8360 }
8361 
8362 
8363 /* Do book-keeping of basic block BB for the profile consistency checker.
8364    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8365    then do post-pass accounting.  Store the counting in RECORD.  */
8366 static void
8367 gimple_account_profile_record (basic_block bb, int after_pass,
8368 			       struct profile_record *record)
8369 {
8370   gimple_stmt_iterator i;
8371   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8372     {
8373       record->size[after_pass]
8374 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8375       if (profile_status_for_fn (cfun) == PROFILE_READ)
8376 	record->time[after_pass]
8377 	  += estimate_num_insns (gsi_stmt (i),
8378 				 &eni_time_weights) * bb->count;
8379       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8380 	record->time[after_pass]
8381 	  += estimate_num_insns (gsi_stmt (i),
8382 				 &eni_time_weights) * bb->frequency;
8383     }
8384 }
8385 
8386 struct cfg_hooks gimple_cfg_hooks = {
8387   "gimple",
8388   gimple_verify_flow_info,
8389   gimple_dump_bb,		/* dump_bb  */
8390   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8391   create_bb,			/* create_basic_block  */
8392   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8393   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8394   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8395   remove_bb,			/* delete_basic_block  */
8396   gimple_split_block,		/* split_block  */
8397   gimple_move_block_after,	/* move_block_after  */
8398   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8399   gimple_merge_blocks,		/* merge_blocks  */
8400   gimple_predict_edge,		/* predict_edge  */
8401   gimple_predicted_by_p,	/* predicted_by_p  */
8402   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8403   gimple_duplicate_bb,		/* duplicate_block  */
8404   gimple_split_edge,		/* split_edge  */
8405   gimple_make_forwarder_block,	/* make_forward_block  */
8406   NULL,				/* tidy_fallthru_edge  */
8407   NULL,				/* force_nonfallthru */
8408   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8409   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8410   gimple_flow_call_edges_add,   /* flow_call_edges_add */
8411   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8412   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8413   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8414   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8415   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8416   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8417   flush_pending_stmts, 		/* flush_pending_stmts */
8418   gimple_empty_block_p,           /* block_empty_p */
8419   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8420   gimple_account_profile_record,
8421 };
8422 
8423 
8424 /* Split all critical edges.  */
8425 
8426 unsigned int
8427 split_critical_edges (void)
8428 {
8429   basic_block bb;
8430   edge e;
8431   edge_iterator ei;
8432 
8433   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8434      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8435      mappings around the calls to split_edge.  */
8436   start_recording_case_labels ();
8437   FOR_ALL_BB_FN (bb, cfun)
8438     {
8439       FOR_EACH_EDGE (e, ei, bb->succs)
8440         {
8441 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8442 	    split_edge (e);
8443 	  /* PRE inserts statements to edges and expects that
8444 	     since split_critical_edges was done beforehand, committing edge
8445 	     insertions will not split more edges.  In addition to critical
8446 	     edges we must split edges that have multiple successors and
8447 	     end by control flow statements, such as RESX.
8448 	     Go ahead and split them too.  This matches the logic in
8449 	     gimple_find_edge_insert_loc.  */
8450 	  else if ((!single_pred_p (e->dest)
8451 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8452 		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8453 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8454 	           && !(e->flags & EDGE_ABNORMAL))
8455 	    {
8456 	      gimple_stmt_iterator gsi;
8457 
8458 	      gsi = gsi_last_bb (e->src);
8459 	      if (!gsi_end_p (gsi)
8460 		  && stmt_ends_bb_p (gsi_stmt (gsi))
8461 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8462 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8463 						 BUILT_IN_RETURN)))
8464 		split_edge (e);
8465 	    }
8466 	}
8467     }
8468   end_recording_case_labels ();
8469   return 0;
8470 }
8471 
8472 namespace {
8473 
8474 const pass_data pass_data_split_crit_edges =
8475 {
8476   GIMPLE_PASS, /* type */
8477   "crited", /* name */
8478   OPTGROUP_NONE, /* optinfo_flags */
8479   TV_TREE_SPLIT_EDGES, /* tv_id */
8480   PROP_cfg, /* properties_required */
8481   PROP_no_crit_edges, /* properties_provided */
8482   0, /* properties_destroyed */
8483   0, /* todo_flags_start */
8484   0, /* todo_flags_finish */
8485 };
8486 
8487 class pass_split_crit_edges : public gimple_opt_pass
8488 {
8489 public:
8490   pass_split_crit_edges (gcc::context *ctxt)
8491     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8492   {}
8493 
8494   /* opt_pass methods: */
8495   virtual unsigned int execute (function *) { return split_critical_edges (); }
8496 
8497   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8498 }; // class pass_split_crit_edges
8499 
8500 } // anon namespace
8501 
8502 gimple_opt_pass *
8503 make_pass_split_crit_edges (gcc::context *ctxt)
8504 {
8505   return new pass_split_crit_edges (ctxt);
8506 }
8507 
8508 
8509 /* Insert COND expression which is GIMPLE_COND after STMT
8510    in basic block BB with appropriate basic block split
8511    and creation of a new conditionally executed basic block.
8512    Return created basic block.  */
8513 basic_block
8514 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8515 {
8516   edge fall = split_block (bb, stmt);
8517   gimple_stmt_iterator iter = gsi_last_bb (bb);
8518   basic_block new_bb;
8519 
8520   /* Insert cond statement.  */
8521   gcc_assert (gimple_code (cond) == GIMPLE_COND);
8522   if (gsi_end_p (iter))
8523     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8524   else
8525     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8526 
8527   /* Create conditionally executed block.  */
8528   new_bb = create_empty_bb (bb);
8529   make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8530   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8531 
8532   /* Fix edge for split bb.  */
8533   fall->flags = EDGE_FALSE_VALUE;
8534 
8535   /* Update dominance info.  */
8536   if (dom_info_available_p (CDI_DOMINATORS))
8537     {
8538       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8539       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8540     }
8541 
8542   /* Update loop info.  */
8543   if (current_loops)
8544     add_bb_to_loop (new_bb, bb->loop_father);
8545 
8546   return new_bb;
8547 }
8548 
8549 /* Build a ternary operation and gimplify it.  Emit code before GSI.
8550    Return the gimple_val holding the result.  */
8551 
8552 tree
8553 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8554 		 tree type, tree a, tree b, tree c)
8555 {
8556   tree ret;
8557   location_t loc = gimple_location (gsi_stmt (*gsi));
8558 
8559   ret = fold_build3_loc (loc, code, type, a, b, c);
8560   STRIP_NOPS (ret);
8561 
8562   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8563                                    GSI_SAME_STMT);
8564 }
8565 
8566 /* Build a binary operation and gimplify it.  Emit code before GSI.
8567    Return the gimple_val holding the result.  */
8568 
8569 tree
8570 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8571 		 tree type, tree a, tree b)
8572 {
8573   tree ret;
8574 
8575   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8576   STRIP_NOPS (ret);
8577 
8578   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8579                                    GSI_SAME_STMT);
8580 }
8581 
8582 /* Build a unary operation and gimplify it.  Emit code before GSI.
8583    Return the gimple_val holding the result.  */
8584 
8585 tree
8586 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8587 		 tree a)
8588 {
8589   tree ret;
8590 
8591   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8592   STRIP_NOPS (ret);
8593 
8594   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8595                                    GSI_SAME_STMT);
8596 }
8597 
8598 
8599 
8600 /* Given a basic block B which ends with a conditional and has
8601    precisely two successors, determine which of the edges is taken if
8602    the conditional is true and which is taken if the conditional is
8603    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
8604 
8605 void
8606 extract_true_false_edges_from_block (basic_block b,
8607 				     edge *true_edge,
8608 				     edge *false_edge)
8609 {
8610   edge e = EDGE_SUCC (b, 0);
8611 
8612   if (e->flags & EDGE_TRUE_VALUE)
8613     {
8614       *true_edge = e;
8615       *false_edge = EDGE_SUCC (b, 1);
8616     }
8617   else
8618     {
8619       *false_edge = e;
8620       *true_edge = EDGE_SUCC (b, 1);
8621     }
8622 }
8623 
8624 
8625 /* From a controlling predicate in the immediate dominator DOM of
8626    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8627    predicate evaluates to true and false and store them to
8628    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8629    they are non-NULL.  Returns true if the edges can be determined,
8630    else return false.  */
8631 
8632 bool
8633 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8634 				     edge *true_controlled_edge,
8635 				     edge *false_controlled_edge)
8636 {
8637   basic_block bb = phiblock;
8638   edge true_edge, false_edge, tem;
8639   edge e0 = NULL, e1 = NULL;
8640 
8641   /* We have to verify that one edge into the PHI node is dominated
8642      by the true edge of the predicate block and the other edge
8643      dominated by the false edge.  This ensures that the PHI argument
8644      we are going to take is completely determined by the path we
8645      take from the predicate block.
8646      We can only use BB dominance checks below if the destination of
8647      the true/false edges are dominated by their edge, thus only
8648      have a single predecessor.  */
8649   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8650   tem = EDGE_PRED (bb, 0);
8651   if (tem == true_edge
8652       || (single_pred_p (true_edge->dest)
8653 	  && (tem->src == true_edge->dest
8654 	      || dominated_by_p (CDI_DOMINATORS,
8655 				 tem->src, true_edge->dest))))
8656     e0 = tem;
8657   else if (tem == false_edge
8658 	   || (single_pred_p (false_edge->dest)
8659 	       && (tem->src == false_edge->dest
8660 		   || dominated_by_p (CDI_DOMINATORS,
8661 				      tem->src, false_edge->dest))))
8662     e1 = tem;
8663   else
8664     return false;
8665   tem = EDGE_PRED (bb, 1);
8666   if (tem == true_edge
8667       || (single_pred_p (true_edge->dest)
8668 	  && (tem->src == true_edge->dest
8669 	      || dominated_by_p (CDI_DOMINATORS,
8670 				 tem->src, true_edge->dest))))
8671     e0 = tem;
8672   else if (tem == false_edge
8673 	   || (single_pred_p (false_edge->dest)
8674 	       && (tem->src == false_edge->dest
8675 		   || dominated_by_p (CDI_DOMINATORS,
8676 				      tem->src, false_edge->dest))))
8677     e1 = tem;
8678   else
8679     return false;
8680   if (!e0 || !e1)
8681     return false;
8682 
8683   if (true_controlled_edge)
8684     *true_controlled_edge = e0;
8685   if (false_controlled_edge)
8686     *false_controlled_edge = e1;
8687 
8688   return true;
8689 }
8690 
8691 
8692 
8693 /* Emit return warnings.  */
8694 
8695 namespace {
8696 
8697 const pass_data pass_data_warn_function_return =
8698 {
8699   GIMPLE_PASS, /* type */
8700   "*warn_function_return", /* name */
8701   OPTGROUP_NONE, /* optinfo_flags */
8702   TV_NONE, /* tv_id */
8703   PROP_cfg, /* properties_required */
8704   0, /* properties_provided */
8705   0, /* properties_destroyed */
8706   0, /* todo_flags_start */
8707   0, /* todo_flags_finish */
8708 };
8709 
8710 class pass_warn_function_return : public gimple_opt_pass
8711 {
8712 public:
8713   pass_warn_function_return (gcc::context *ctxt)
8714     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8715   {}
8716 
8717   /* opt_pass methods: */
8718   virtual unsigned int execute (function *);
8719 
8720 }; // class pass_warn_function_return
8721 
8722 unsigned int
8723 pass_warn_function_return::execute (function *fun)
8724 {
8725   source_location location;
8726   gimple *last;
8727   edge e;
8728   edge_iterator ei;
8729 
8730   if (!targetm.warn_func_return (fun->decl))
8731     return 0;
8732 
8733   /* If we have a path to EXIT, then we do return.  */
8734   if (TREE_THIS_VOLATILE (fun->decl)
8735       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8736     {
8737       location = UNKNOWN_LOCATION;
8738       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8739 	{
8740 	  last = last_stmt (e->src);
8741 	  if ((gimple_code (last) == GIMPLE_RETURN
8742 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8743 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8744 	    break;
8745 	}
8746       if (location == UNKNOWN_LOCATION)
8747 	location = cfun->function_end_locus;
8748 
8749 #ifdef notyet
8750       if (warn_missing_noreturn)
8751         warning_at (location, 0, "%<noreturn%> function does return");
8752 #endif
8753     }
8754 
8755   /* If we see "return;" in some basic block, then we do reach the end
8756      without returning a value.  */
8757   else if (warn_return_type
8758 	   && !TREE_NO_WARNING (fun->decl)
8759 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8760     {
8761       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8762 	{
8763 	  gimple *last = last_stmt (e->src);
8764 	  greturn *return_stmt = dyn_cast <greturn *> (last);
8765 	  if (return_stmt
8766 	      && gimple_return_retval (return_stmt) == NULL
8767 	      && !gimple_no_warning_p (last))
8768 	    {
8769 	      location = gimple_location (last);
8770 	      if (location == UNKNOWN_LOCATION)
8771 		location = fun->function_end_locus;
8772 	      warning_at (location, OPT_Wreturn_type,
8773 			  "control reaches end of non-void function");
8774 	      TREE_NO_WARNING (fun->decl) = 1;
8775 	      break;
8776 	    }
8777 	}
8778       /* -fsanitize=return turns fallthrough from the end of non-void function
8779 	 into __builtin___ubsan_handle_missing_return () call.
8780 	 Recognize those too.  */
8781       basic_block bb;
8782       if (!TREE_NO_WARNING (fun->decl) && (flag_sanitize & SANITIZE_RETURN))
8783 	FOR_EACH_BB_FN (bb, fun)
8784 	  if (EDGE_COUNT (bb->succs) == 0)
8785 	    {
8786 	      gimple *last = last_stmt (bb);
8787 	      const enum built_in_function ubsan_missing_ret
8788 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
8789 	      if (last && gimple_call_builtin_p (last, ubsan_missing_ret))
8790 		{
8791 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
8792 		  gsi_prev_nondebug (&gsi);
8793 		  gimple *prev = gsi_stmt (gsi);
8794 		  if (prev == NULL)
8795 		    location = UNKNOWN_LOCATION;
8796 		  else
8797 		    location = gimple_location (prev);
8798 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
8799 		    location = fun->function_end_locus;
8800 		  warning_at (location, OPT_Wreturn_type,
8801 			      "control reaches end of non-void function");
8802 		  TREE_NO_WARNING (fun->decl) = 1;
8803 		  break;
8804 		}
8805 	    }
8806     }
8807   return 0;
8808 }
8809 
8810 } // anon namespace
8811 
8812 gimple_opt_pass *
8813 make_pass_warn_function_return (gcc::context *ctxt)
8814 {
8815   return new pass_warn_function_return (ctxt);
8816 }
8817 
8818 /* Walk a gimplified function and warn for functions whose return value is
8819    ignored and attribute((warn_unused_result)) is set.  This is done before
8820    inlining, so we don't have to worry about that.  */
8821 
8822 static void
8823 do_warn_unused_result (gimple_seq seq)
8824 {
8825   tree fdecl, ftype;
8826   gimple_stmt_iterator i;
8827 
8828   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8829     {
8830       gimple *g = gsi_stmt (i);
8831 
8832       switch (gimple_code (g))
8833 	{
8834 	case GIMPLE_BIND:
8835 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8836 	  break;
8837 	case GIMPLE_TRY:
8838 	  do_warn_unused_result (gimple_try_eval (g));
8839 	  do_warn_unused_result (gimple_try_cleanup (g));
8840 	  break;
8841 	case GIMPLE_CATCH:
8842 	  do_warn_unused_result (gimple_catch_handler (
8843 				   as_a <gcatch *> (g)));
8844 	  break;
8845 	case GIMPLE_EH_FILTER:
8846 	  do_warn_unused_result (gimple_eh_filter_failure (g));
8847 	  break;
8848 
8849 	case GIMPLE_CALL:
8850 	  if (gimple_call_lhs (g))
8851 	    break;
8852 	  if (gimple_call_internal_p (g))
8853 	    break;
8854 
8855 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
8856 	     LHS.  All calls whose value is ignored should be
8857 	     represented like this.  Look for the attribute.  */
8858 	  fdecl = gimple_call_fndecl (g);
8859 	  ftype = gimple_call_fntype (g);
8860 
8861 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8862 	    {
8863 	      location_t loc = gimple_location (g);
8864 
8865 	      if (fdecl)
8866 		warning_at (loc, OPT_Wunused_result,
8867 			    "ignoring return value of %qD, "
8868 			    "declared with attribute warn_unused_result",
8869 			    fdecl);
8870 	      else
8871 		warning_at (loc, OPT_Wunused_result,
8872 			    "ignoring return value of function "
8873 			    "declared with attribute warn_unused_result");
8874 	    }
8875 	  break;
8876 
8877 	default:
8878 	  /* Not a container, not a call, or a call whose value is used.  */
8879 	  break;
8880 	}
8881     }
8882 }
8883 
8884 namespace {
8885 
8886 const pass_data pass_data_warn_unused_result =
8887 {
8888   GIMPLE_PASS, /* type */
8889   "*warn_unused_result", /* name */
8890   OPTGROUP_NONE, /* optinfo_flags */
8891   TV_NONE, /* tv_id */
8892   PROP_gimple_any, /* properties_required */
8893   0, /* properties_provided */
8894   0, /* properties_destroyed */
8895   0, /* todo_flags_start */
8896   0, /* todo_flags_finish */
8897 };
8898 
8899 class pass_warn_unused_result : public gimple_opt_pass
8900 {
8901 public:
8902   pass_warn_unused_result (gcc::context *ctxt)
8903     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8904   {}
8905 
8906   /* opt_pass methods: */
8907   virtual bool gate (function *) { return flag_warn_unused_result; }
8908   virtual unsigned int execute (function *)
8909     {
8910       do_warn_unused_result (gimple_body (current_function_decl));
8911       return 0;
8912     }
8913 
8914 }; // class pass_warn_unused_result
8915 
8916 } // anon namespace
8917 
8918 gimple_opt_pass *
8919 make_pass_warn_unused_result (gcc::context *ctxt)
8920 {
8921   return new pass_warn_unused_result (ctxt);
8922 }
8923 
8924 /* IPA passes, compilation of earlier functions or inlining
8925    might have changed some properties, such as marked functions nothrow,
8926    pure, const or noreturn.
8927    Remove redundant edges and basic blocks, and create new ones if necessary.
8928 
8929    This pass can't be executed as stand alone pass from pass manager, because
8930    in between inlining and this fixup the verify_flow_info would fail.  */
8931 
8932 unsigned int
8933 execute_fixup_cfg (void)
8934 {
8935   basic_block bb;
8936   gimple_stmt_iterator gsi;
8937   int todo = 0;
8938   gcov_type count_scale;
8939   edge e;
8940   edge_iterator ei;
8941   cgraph_node *node = cgraph_node::get (current_function_decl);
8942 
8943   count_scale
8944     = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8945 
8946   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
8947   EXIT_BLOCK_PTR_FOR_FN (cfun)->count
8948     = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
8949 
8950   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8951     e->count = apply_scale (e->count, count_scale);
8952 
8953   FOR_EACH_BB_FN (bb, cfun)
8954     {
8955       bb->count = apply_scale (bb->count, count_scale);
8956       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8957 	{
8958 	  gimple *stmt = gsi_stmt (gsi);
8959 	  tree decl = is_gimple_call (stmt)
8960 		      ? gimple_call_fndecl (stmt)
8961 		      : NULL;
8962 	  if (decl)
8963 	    {
8964 	      int flags = gimple_call_flags (stmt);
8965 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8966 		{
8967 		  if (gimple_purge_dead_abnormal_call_edges (bb))
8968 		    todo |= TODO_cleanup_cfg;
8969 
8970 		  if (gimple_in_ssa_p (cfun))
8971 		    {
8972 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
8973 		      update_stmt (stmt);
8974 		    }
8975 		}
8976 
8977 	      if (flags & ECF_NORETURN
8978 		  && fixup_noreturn_call (stmt))
8979 		todo |= TODO_cleanup_cfg;
8980 	     }
8981 
8982 	  /* Remove stores to variables we marked write-only.
8983 	     Keep access when store has side effect, i.e. in case when source
8984 	     is volatile.  */
8985 	  if (gimple_store_p (stmt)
8986 	      && !gimple_has_side_effects (stmt))
8987 	    {
8988 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
8989 
8990 	      if (TREE_CODE (lhs) == VAR_DECL
8991 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8992 		  && varpool_node::get (lhs)->writeonly)
8993 		{
8994 		  unlink_stmt_vdef (stmt);
8995 		  gsi_remove (&gsi, true);
8996 		  release_defs (stmt);
8997 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
8998 	          continue;
8999 		}
9000 	    }
9001 	  /* For calls we can simply remove LHS when it is known
9002 	     to be write-only.  */
9003 	  if (is_gimple_call (stmt)
9004 	      && gimple_get_lhs (stmt))
9005 	    {
9006 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9007 
9008 	      if (TREE_CODE (lhs) == VAR_DECL
9009 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9010 		  && varpool_node::get (lhs)->writeonly)
9011 		{
9012 		  gimple_call_set_lhs (stmt, NULL);
9013 		  update_stmt (stmt);
9014 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9015 		}
9016 	    }
9017 
9018 	  if (maybe_clean_eh_stmt (stmt)
9019 	      && gimple_purge_dead_eh_edges (bb))
9020 	    todo |= TODO_cleanup_cfg;
9021 	  gsi_next (&gsi);
9022 	}
9023 
9024       FOR_EACH_EDGE (e, ei, bb->succs)
9025         e->count = apply_scale (e->count, count_scale);
9026 
9027       /* If we have a basic block with no successors that does not
9028 	 end with a control statement or a noreturn call end it with
9029 	 a call to __builtin_unreachable.  This situation can occur
9030 	 when inlining a noreturn call that does in fact return.  */
9031       if (EDGE_COUNT (bb->succs) == 0)
9032 	{
9033 	  gimple *stmt = last_stmt (bb);
9034 	  if (!stmt
9035 	      || (!is_ctrl_stmt (stmt)
9036 		  && (!is_gimple_call (stmt)
9037 		      || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
9038 	    {
9039 	      if (stmt && is_gimple_call (stmt))
9040 		gimple_call_set_ctrl_altering (stmt, false);
9041 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9042 	      stmt = gimple_build_call (fndecl, 0);
9043 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9044 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9045 	      if (!cfun->after_inlining)
9046 		{
9047 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9048 		  int freq
9049 		    = compute_call_stmt_bb_frequency (current_function_decl,
9050 						      bb);
9051 		  node->create_edge (cgraph_node::get_create (fndecl),
9052 				     call_stmt, bb->count, freq);
9053 		}
9054 	    }
9055 	}
9056     }
9057   if (count_scale != REG_BR_PROB_BASE)
9058     compute_function_frequency ();
9059 
9060   if (current_loops
9061       && (todo & TODO_cleanup_cfg))
9062     loops_state_set (LOOPS_NEED_FIXUP);
9063 
9064   return todo;
9065 }
9066 
9067 namespace {
9068 
9069 const pass_data pass_data_fixup_cfg =
9070 {
9071   GIMPLE_PASS, /* type */
9072   "fixup_cfg", /* name */
9073   OPTGROUP_NONE, /* optinfo_flags */
9074   TV_NONE, /* tv_id */
9075   PROP_cfg, /* properties_required */
9076   0, /* properties_provided */
9077   0, /* properties_destroyed */
9078   0, /* todo_flags_start */
9079   0, /* todo_flags_finish */
9080 };
9081 
9082 class pass_fixup_cfg : public gimple_opt_pass
9083 {
9084 public:
9085   pass_fixup_cfg (gcc::context *ctxt)
9086     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9087   {}
9088 
9089   /* opt_pass methods: */
9090   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9091   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9092 
9093 }; // class pass_fixup_cfg
9094 
9095 } // anon namespace
9096 
9097 gimple_opt_pass *
9098 make_pass_fixup_cfg (gcc::context *ctxt)
9099 {
9100   return new pass_fixup_cfg (ctxt);
9101 }
9102 
9103 /* Garbage collection support for edge_def.  */
9104 
9105 extern void gt_ggc_mx (tree&);
9106 extern void gt_ggc_mx (gimple *&);
9107 extern void gt_ggc_mx (rtx&);
9108 extern void gt_ggc_mx (basic_block&);
9109 
9110 static void
9111 gt_ggc_mx (rtx_insn *& x)
9112 {
9113   if (x)
9114     gt_ggc_mx_rtx_def ((void *) x);
9115 }
9116 
9117 void
9118 gt_ggc_mx (edge_def *e)
9119 {
9120   tree block = LOCATION_BLOCK (e->goto_locus);
9121   gt_ggc_mx (e->src);
9122   gt_ggc_mx (e->dest);
9123   if (current_ir_type () == IR_GIMPLE)
9124     gt_ggc_mx (e->insns.g);
9125   else
9126     gt_ggc_mx (e->insns.r);
9127   gt_ggc_mx (block);
9128 }
9129 
9130 /* PCH support for edge_def.  */
9131 
9132 extern void gt_pch_nx (tree&);
9133 extern void gt_pch_nx (gimple *&);
9134 extern void gt_pch_nx (rtx&);
9135 extern void gt_pch_nx (basic_block&);
9136 
9137 static void
9138 gt_pch_nx (rtx_insn *& x)
9139 {
9140   if (x)
9141     gt_pch_nx_rtx_def ((void *) x);
9142 }
9143 
9144 void
9145 gt_pch_nx (edge_def *e)
9146 {
9147   tree block = LOCATION_BLOCK (e->goto_locus);
9148   gt_pch_nx (e->src);
9149   gt_pch_nx (e->dest);
9150   if (current_ir_type () == IR_GIMPLE)
9151     gt_pch_nx (e->insns.g);
9152   else
9153     gt_pch_nx (e->insns.r);
9154   gt_pch_nx (block);
9155 }
9156 
9157 void
9158 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9159 {
9160   tree block = LOCATION_BLOCK (e->goto_locus);
9161   op (&(e->src), cookie);
9162   op (&(e->dest), cookie);
9163   if (current_ir_type () == IR_GIMPLE)
9164     op (&(e->insns.g), cookie);
9165   else
9166     op (&(e->insns.r), cookie);
9167   op (&(block), cookie);
9168 }
9169