xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision 796c32c94f6e154afc9de0f63da35c91bb739b45)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2015 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "trans-mem.h"
39 #include "stor-layout.h"
40 #include "print-tree.h"
41 #include "tm_p.h"
42 #include "predict.h"
43 #include "hard-reg-set.h"
44 #include "function.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "cfganal.h"
48 #include "basic-block.h"
49 #include "flags.h"
50 #include "gimple-pretty-print.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-fold.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimplify-me.h"
60 #include "gimple-walk.h"
61 #include "gimple-ssa.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "stringpool.h"
69 #include "tree-ssanames.h"
70 #include "tree-ssa-loop-manip.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "tree-into-ssa.h"
73 #include "hashtab.h"
74 #include "rtl.h"
75 #include "statistics.h"
76 #include "real.h"
77 #include "fixed-value.h"
78 #include "insn-config.h"
79 #include "expmed.h"
80 #include "dojump.h"
81 #include "explow.h"
82 #include "calls.h"
83 #include "emit-rtl.h"
84 #include "varasm.h"
85 #include "stmt.h"
86 #include "expr.h"
87 #include "tree-dfa.h"
88 #include "tree-ssa.h"
89 #include "tree-dump.h"
90 #include "tree-pass.h"
91 #include "diagnostic-core.h"
92 #include "except.h"
93 #include "cfgloop.h"
94 #include "tree-ssa-propagate.h"
95 #include "value-prof.h"
96 #include "tree-inline.h"
97 #include "target.h"
98 #include "tree-ssa-live.h"
99 #include "omp-low.h"
100 #include "tree-cfgcleanup.h"
101 #include "wide-int-print.h"
102 
103 /* This file contains functions for building the Control Flow Graph (CFG)
104    for a function tree.  */
105 
106 /* Local declarations.  */
107 
108 /* Initial capacity for the basic block array.  */
109 static const int initial_cfg_capacity = 20;
110 
111 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
113    via their CASE_CHAIN field, which we clear after we're done with the
114    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
115 
116    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117    update the case vector in response to edge redirections.
118 
119    Right now this table is set up and torn down at key points in the
120    compilation process.  It would be nice if we could make the table
121    more persistent.  The key is getting notification of changes to
122    the CFG (particularly edge removal, creation and redirection).  */
123 
124 static hash_map<edge, tree> *edge_to_cases;
125 
126 /* If we record edge_to_cases, this bitmap will hold indexes
127    of basic blocks that end in a GIMPLE_SWITCH which we touched
128    due to edge manipulations.  */
129 
130 static bitmap touched_switch_bbs;
131 
132 /* CFG statistics.  */
133 struct cfg_stats_d
134 {
135   long num_merged_labels;
136 };
137 
138 static struct cfg_stats_d cfg_stats;
139 
140 /* Hash table to store last discriminator assigned for each locus.  */
141 struct locus_discrim_map
142 {
143   location_t locus;
144   int discriminator;
145 };
146 
147 /* Hashtable helpers.  */
148 
149 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
150 {
151   typedef locus_discrim_map value_type;
152   typedef locus_discrim_map compare_type;
153   static inline hashval_t hash (const value_type *);
154   static inline bool equal (const value_type *, const compare_type *);
155 };
156 
157 /* Trivial hash function for a location_t.  ITEM is a pointer to
158    a hash table entry that maps a location_t to a discriminator.  */
159 
160 inline hashval_t
161 locus_discrim_hasher::hash (const value_type *item)
162 {
163   return LOCATION_LINE (item->locus);
164 }
165 
166 /* Equality function for the locus-to-discriminator map.  A and B
167    point to the two hash table entries to compare.  */
168 
169 inline bool
170 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
171 {
172   return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
173 }
174 
175 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
176 
177 /* Basic blocks and flowgraphs.  */
178 static void make_blocks (gimple_seq);
179 
180 /* Edges.  */
181 static void make_edges (void);
182 static void assign_discriminators (void);
183 static void make_cond_expr_edges (basic_block);
184 static void make_gimple_switch_edges (gswitch *, basic_block);
185 static bool make_goto_expr_edges (basic_block);
186 static void make_gimple_asm_edges (basic_block);
187 static edge gimple_redirect_edge_and_branch (edge, basic_block);
188 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
189 
190 /* Various helpers.  */
191 static inline bool stmt_starts_bb_p (gimple, gimple);
192 static int gimple_verify_flow_info (void);
193 static void gimple_make_forwarder_block (edge);
194 static gimple first_non_label_stmt (basic_block);
195 static bool verify_gimple_transaction (gtransaction *);
196 static bool call_can_make_abnormal_goto (gimple);
197 
198 /* Flowgraph optimization and cleanup.  */
199 static void gimple_merge_blocks (basic_block, basic_block);
200 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
201 static void remove_bb (basic_block);
202 static edge find_taken_edge_computed_goto (basic_block, tree);
203 static edge find_taken_edge_cond_expr (basic_block, tree);
204 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
205 static tree find_case_label_for_value (gswitch *, tree);
206 
207 void
208 init_empty_tree_cfg_for_function (struct function *fn)
209 {
210   /* Initialize the basic block array.  */
211   init_flow (fn);
212   profile_status_for_fn (fn) = PROFILE_ABSENT;
213   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
214   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
215   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
216   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
217 			 initial_cfg_capacity);
218 
219   /* Build a mapping of labels to their associated blocks.  */
220   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
221   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
222 			 initial_cfg_capacity);
223 
224   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
225   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
226 
227   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
228     = EXIT_BLOCK_PTR_FOR_FN (fn);
229   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
230     = ENTRY_BLOCK_PTR_FOR_FN (fn);
231 }
232 
233 void
234 init_empty_tree_cfg (void)
235 {
236   init_empty_tree_cfg_for_function (cfun);
237 }
238 
239 /*---------------------------------------------------------------------------
240 			      Create basic blocks
241 ---------------------------------------------------------------------------*/
242 
243 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
244    statements to be added to the flowgraph.  */
245 
246 static void
247 build_gimple_cfg (gimple_seq seq)
248 {
249   /* Register specific gimple functions.  */
250   gimple_register_cfg_hooks ();
251 
252   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
253 
254   init_empty_tree_cfg ();
255 
256   make_blocks (seq);
257 
258   /* Make sure there is always at least one block, even if it's empty.  */
259   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
260     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
261 
262   /* Adjust the size of the array.  */
263   if (basic_block_info_for_fn (cfun)->length ()
264       < (size_t) n_basic_blocks_for_fn (cfun))
265     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
266 			   n_basic_blocks_for_fn (cfun));
267 
268   /* To speed up statement iterator walks, we first purge dead labels.  */
269   cleanup_dead_labels ();
270 
271   /* Group case nodes to reduce the number of edges.
272      We do this after cleaning up dead labels because otherwise we miss
273      a lot of obvious case merging opportunities.  */
274   group_case_labels ();
275 
276   /* Create the edges of the flowgraph.  */
277   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
278   make_edges ();
279   assign_discriminators ();
280   cleanup_dead_labels ();
281   delete discriminator_per_locus;
282   discriminator_per_locus = NULL;
283 }
284 
285 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
286    them and propagate the information to LOOP.  We assume that the annotations
287    come immediately before the condition in BB, if any.  */
288 
289 static void
290 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
291 {
292   gimple_stmt_iterator gsi = gsi_last_bb (bb);
293   gimple stmt = gsi_stmt (gsi);
294 
295   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
296     return;
297 
298   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
299     {
300       stmt = gsi_stmt (gsi);
301       if (gimple_code (stmt) != GIMPLE_CALL)
302 	break;
303       if (!gimple_call_internal_p (stmt)
304 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
305 	break;
306 
307       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
308 	{
309 	case annot_expr_ivdep_kind:
310 	  loop->safelen = INT_MAX;
311 	  break;
312 	case annot_expr_no_vector_kind:
313 	  loop->dont_vectorize = true;
314 	  break;
315 	case annot_expr_vector_kind:
316 	  loop->force_vectorize = true;
317 	  cfun->has_force_vectorize_loops = true;
318 	  break;
319 	default:
320 	  gcc_unreachable ();
321 	}
322 
323       stmt = gimple_build_assign (gimple_call_lhs (stmt),
324 				  gimple_call_arg (stmt, 0));
325       gsi_replace (&gsi, stmt, true);
326     }
327 }
328 
329 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
330    them and propagate the information to the loop.  We assume that the
331    annotations come immediately before the condition of the loop.  */
332 
333 static void
334 replace_loop_annotate (void)
335 {
336   struct loop *loop;
337   basic_block bb;
338   gimple_stmt_iterator gsi;
339   gimple stmt;
340 
341   FOR_EACH_LOOP (loop, 0)
342     {
343       /* First look into the header.  */
344       replace_loop_annotate_in_block (loop->header, loop);
345 
346       /* Then look into the latch, if any.  */
347       if (loop->latch)
348 	replace_loop_annotate_in_block (loop->latch, loop);
349     }
350 
351   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
352   FOR_EACH_BB_FN (bb, cfun)
353     {
354       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
355 	{
356 	  stmt = gsi_stmt (gsi);
357 	  if (gimple_code (stmt) != GIMPLE_CALL)
358 	    continue;
359 	  if (!gimple_call_internal_p (stmt)
360 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
361 	    continue;
362 
363 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
364 	    {
365 	    case annot_expr_ivdep_kind:
366 	    case annot_expr_no_vector_kind:
367 	    case annot_expr_vector_kind:
368 	      break;
369 	    default:
370 	      gcc_unreachable ();
371 	    }
372 
373 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
374 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
375 				      gimple_call_arg (stmt, 0));
376 	  gsi_replace (&gsi, stmt, true);
377 	}
378     }
379 }
380 
381 
382 static unsigned int
383 execute_build_cfg (void)
384 {
385   gimple_seq body = gimple_body (current_function_decl);
386 
387   build_gimple_cfg (body);
388   gimple_set_body (current_function_decl, NULL);
389   if (dump_file && (dump_flags & TDF_DETAILS))
390     {
391       fprintf (dump_file, "Scope blocks:\n");
392       dump_scope_blocks (dump_file, dump_flags);
393     }
394   cleanup_tree_cfg ();
395   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
396   replace_loop_annotate ();
397   return 0;
398 }
399 
400 namespace {
401 
402 const pass_data pass_data_build_cfg =
403 {
404   GIMPLE_PASS, /* type */
405   "cfg", /* name */
406   OPTGROUP_NONE, /* optinfo_flags */
407   TV_TREE_CFG, /* tv_id */
408   PROP_gimple_leh, /* properties_required */
409   ( PROP_cfg | PROP_loops ), /* properties_provided */
410   0, /* properties_destroyed */
411   0, /* todo_flags_start */
412   0, /* todo_flags_finish */
413 };
414 
415 class pass_build_cfg : public gimple_opt_pass
416 {
417 public:
418   pass_build_cfg (gcc::context *ctxt)
419     : gimple_opt_pass (pass_data_build_cfg, ctxt)
420   {}
421 
422   /* opt_pass methods: */
423   virtual unsigned int execute (function *) { return execute_build_cfg (); }
424 
425 }; // class pass_build_cfg
426 
427 } // anon namespace
428 
429 gimple_opt_pass *
430 make_pass_build_cfg (gcc::context *ctxt)
431 {
432   return new pass_build_cfg (ctxt);
433 }
434 
435 
436 /* Return true if T is a computed goto.  */
437 
438 bool
439 computed_goto_p (gimple t)
440 {
441   return (gimple_code (t) == GIMPLE_GOTO
442 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
443 }
444 
445 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
446    the other edge points to a bb with just __builtin_unreachable ().
447    I.e. return true for C->M edge in:
448    <bb C>:
449    ...
450    if (something)
451      goto <bb N>;
452    else
453      goto <bb M>;
454    <bb N>:
455    __builtin_unreachable ();
456    <bb M>:  */
457 
458 bool
459 assert_unreachable_fallthru_edge_p (edge e)
460 {
461   basic_block pred_bb = e->src;
462   gimple last = last_stmt (pred_bb);
463   if (last && gimple_code (last) == GIMPLE_COND)
464     {
465       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
466       if (other_bb == e->dest)
467 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
468       if (EDGE_COUNT (other_bb->succs) == 0)
469 	{
470 	  gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
471 	  gimple stmt;
472 
473 	  if (gsi_end_p (gsi))
474 	    return false;
475 	  stmt = gsi_stmt (gsi);
476 	  while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
477 	    {
478 	      gsi_next (&gsi);
479 	      if (gsi_end_p (gsi))
480 		return false;
481 	      stmt = gsi_stmt (gsi);
482 	    }
483 	  return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
484 	}
485     }
486   return false;
487 }
488 
489 
490 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491    could alter control flow except via eh. We initialize the flag at
492    CFG build time and only ever clear it later.  */
493 
494 static void
495 gimple_call_initialize_ctrl_altering (gimple stmt)
496 {
497   int flags = gimple_call_flags (stmt);
498 
499   /* A call alters control flow if it can make an abnormal goto.  */
500   if (call_can_make_abnormal_goto (stmt)
501       /* A call also alters control flow if it does not return.  */
502       || flags & ECF_NORETURN
503       /* TM ending statements have backedges out of the transaction.
504 	 Return true so we split the basic block containing them.
505 	 Note that the TM_BUILTIN test is merely an optimization.  */
506       || ((flags & ECF_TM_BUILTIN)
507 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508       /* BUILT_IN_RETURN call is same as return statement.  */
509       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
510     gimple_call_set_ctrl_altering (stmt, true);
511   else
512     gimple_call_set_ctrl_altering (stmt, false);
513 }
514 
515 
516 /* Build a flowgraph for the sequence of stmts SEQ.  */
517 
518 static void
519 make_blocks (gimple_seq seq)
520 {
521   gimple_stmt_iterator i = gsi_start (seq);
522   gimple stmt = NULL;
523   bool start_new_block = true;
524   bool first_stmt_of_seq = true;
525   basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
526 
527   while (!gsi_end_p (i))
528     {
529       gimple prev_stmt;
530 
531       prev_stmt = stmt;
532       stmt = gsi_stmt (i);
533 
534       if (stmt && is_gimple_call (stmt))
535 	gimple_call_initialize_ctrl_altering (stmt);
536 
537       /* If the statement starts a new basic block or if we have determined
538 	 in a previous pass that we need to create a new block for STMT, do
539 	 so now.  */
540       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
541 	{
542 	  if (!first_stmt_of_seq)
543 	    gsi_split_seq_before (&i, &seq);
544 	  bb = create_basic_block (seq, NULL, bb);
545 	  start_new_block = false;
546 	}
547 
548       /* Now add STMT to BB and create the subgraphs for special statement
549 	 codes.  */
550       gimple_set_bb (stmt, bb);
551 
552       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
553 	 next iteration.  */
554       if (stmt_ends_bb_p (stmt))
555 	{
556 	  /* If the stmt can make abnormal goto use a new temporary
557 	     for the assignment to the LHS.  This makes sure the old value
558 	     of the LHS is available on the abnormal edge.  Otherwise
559 	     we will end up with overlapping life-ranges for abnormal
560 	     SSA names.  */
561 	  if (gimple_has_lhs (stmt)
562 	      && stmt_can_make_abnormal_goto (stmt)
563 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
564 	    {
565 	      tree lhs = gimple_get_lhs (stmt);
566 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
567 	      gimple s = gimple_build_assign (lhs, tmp);
568 	      gimple_set_location (s, gimple_location (stmt));
569 	      gimple_set_block (s, gimple_block (stmt));
570 	      gimple_set_lhs (stmt, tmp);
571 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
572 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
573 		DECL_GIMPLE_REG_P (tmp) = 1;
574 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
575 	    }
576 	  start_new_block = true;
577 	}
578 
579       gsi_next (&i);
580       first_stmt_of_seq = false;
581     }
582 }
583 
584 
585 /* Create and return a new empty basic block after bb AFTER.  */
586 
587 static basic_block
588 create_bb (void *h, void *e, basic_block after)
589 {
590   basic_block bb;
591 
592   gcc_assert (!e);
593 
594   /* Create and initialize a new basic block.  Since alloc_block uses
595      GC allocation that clears memory to allocate a basic block, we do
596      not have to clear the newly allocated basic block here.  */
597   bb = alloc_block ();
598 
599   bb->index = last_basic_block_for_fn (cfun);
600   bb->flags = BB_NEW;
601   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
602 
603   /* Add the new block to the linked list of blocks.  */
604   link_block (bb, after);
605 
606   /* Grow the basic block array if needed.  */
607   if ((size_t) last_basic_block_for_fn (cfun)
608       == basic_block_info_for_fn (cfun)->length ())
609     {
610       size_t new_size =
611 	(last_basic_block_for_fn (cfun)
612 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
613       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
614     }
615 
616   /* Add the newly created block to the array.  */
617   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
618 
619   n_basic_blocks_for_fn (cfun)++;
620   last_basic_block_for_fn (cfun)++;
621 
622   return bb;
623 }
624 
625 
626 /*---------------------------------------------------------------------------
627 				 Edge creation
628 ---------------------------------------------------------------------------*/
629 
630 /* Fold COND_EXPR_COND of each COND_EXPR.  */
631 
632 void
633 fold_cond_expr_cond (void)
634 {
635   basic_block bb;
636 
637   FOR_EACH_BB_FN (bb, cfun)
638     {
639       gimple stmt = last_stmt (bb);
640 
641       if (stmt && gimple_code (stmt) == GIMPLE_COND)
642 	{
643 	  gcond *cond_stmt = as_a <gcond *> (stmt);
644 	  location_t loc = gimple_location (stmt);
645 	  tree cond;
646 	  bool zerop, onep;
647 
648 	  fold_defer_overflow_warnings ();
649 	  cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
650 				  boolean_type_node,
651 				  gimple_cond_lhs (cond_stmt),
652 				  gimple_cond_rhs (cond_stmt));
653 	  if (cond)
654 	    {
655 	      zerop = integer_zerop (cond);
656 	      onep = integer_onep (cond);
657 	    }
658 	  else
659 	    zerop = onep = false;
660 
661 	  fold_undefer_overflow_warnings (zerop || onep,
662 					  stmt,
663 					  WARN_STRICT_OVERFLOW_CONDITIONAL);
664 	  if (zerop)
665 	    gimple_cond_make_false (cond_stmt);
666 	  else if (onep)
667 	    gimple_cond_make_true (cond_stmt);
668 	}
669     }
670 }
671 
672 /* If basic block BB has an abnormal edge to a basic block
673    containing IFN_ABNORMAL_DISPATCHER internal call, return
674    that the dispatcher's basic block, otherwise return NULL.  */
675 
676 basic_block
677 get_abnormal_succ_dispatcher (basic_block bb)
678 {
679   edge e;
680   edge_iterator ei;
681 
682   FOR_EACH_EDGE (e, ei, bb->succs)
683     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
684       {
685 	gimple_stmt_iterator gsi
686 	  = gsi_start_nondebug_after_labels_bb (e->dest);
687 	gimple g = gsi_stmt (gsi);
688 	if (g
689 	    && is_gimple_call (g)
690 	    && gimple_call_internal_p (g)
691 	    && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
692 	  return e->dest;
693       }
694   return NULL;
695 }
696 
697 /* Helper function for make_edges.  Create a basic block with
698    with ABNORMAL_DISPATCHER internal call in it if needed, and
699    create abnormal edges from BBS to it and from it to FOR_BB
700    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
701 
702 static void
703 handle_abnormal_edges (basic_block *dispatcher_bbs,
704 		       basic_block for_bb, int *bb_to_omp_idx,
705 		       auto_vec<basic_block> *bbs, bool computed_goto)
706 {
707   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
708   unsigned int idx = 0;
709   basic_block bb;
710   bool inner = false;
711 
712   if (bb_to_omp_idx)
713     {
714       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
715       if (bb_to_omp_idx[for_bb->index] != 0)
716 	inner = true;
717     }
718 
719   /* If the dispatcher has been created already, then there are basic
720      blocks with abnormal edges to it, so just make a new edge to
721      for_bb.  */
722   if (*dispatcher == NULL)
723     {
724       /* Check if there are any basic blocks that need to have
725 	 abnormal edges to this dispatcher.  If there are none, return
726 	 early.  */
727       if (bb_to_omp_idx == NULL)
728 	{
729 	  if (bbs->is_empty ())
730 	    return;
731 	}
732       else
733 	{
734 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
735 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
736 	      break;
737 	  if (bb == NULL)
738 	    return;
739 	}
740 
741       /* Create the dispatcher bb.  */
742       *dispatcher = create_basic_block (NULL, NULL, for_bb);
743       if (computed_goto)
744 	{
745 	  /* Factor computed gotos into a common computed goto site.  Also
746 	     record the location of that site so that we can un-factor the
747 	     gotos after we have converted back to normal form.  */
748 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
749 
750 	  /* Create the destination of the factored goto.  Each original
751 	     computed goto will put its desired destination into this
752 	     variable and jump to the label we create immediately below.  */
753 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
754 
755 	  /* Build a label for the new block which will contain the
756 	     factored computed goto.  */
757 	  tree factored_label_decl
758 	    = create_artificial_label (UNKNOWN_LOCATION);
759 	  gimple factored_computed_goto_label
760 	    = gimple_build_label (factored_label_decl);
761 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
762 
763 	  /* Build our new computed goto.  */
764 	  gimple factored_computed_goto = gimple_build_goto (var);
765 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
766 
767 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 	    {
769 	      if (bb_to_omp_idx
770 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
771 		continue;
772 
773 	      gsi = gsi_last_bb (bb);
774 	      gimple last = gsi_stmt (gsi);
775 
776 	      gcc_assert (computed_goto_p (last));
777 
778 	      /* Copy the original computed goto's destination into VAR.  */
779 	      gimple assignment
780 		= gimple_build_assign (var, gimple_goto_dest (last));
781 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
782 
783 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
784 	      e->goto_locus = gimple_location (last);
785 	      gsi_remove (&gsi, true);
786 	    }
787 	}
788       else
789 	{
790 	  tree arg = inner ? boolean_true_node : boolean_false_node;
791 	  gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
792 						 1, arg);
793 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
794 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
795 
796 	  /* Create predecessor edges of the dispatcher.  */
797 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 	    {
799 	      if (bb_to_omp_idx
800 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 		continue;
802 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
803 	    }
804 	}
805     }
806 
807   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
808 }
809 
810 /* Join all the blocks in the flowgraph.  */
811 
812 static void
813 make_edges (void)
814 {
815   basic_block bb;
816   struct omp_region *cur_region = NULL;
817   auto_vec<basic_block> ab_edge_goto;
818   auto_vec<basic_block> ab_edge_call;
819   int *bb_to_omp_idx = NULL;
820   int cur_omp_region_idx = 0;
821 
822   /* Create an edge from entry to the first block with executable
823      statements in it.  */
824   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
825 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
826 	     EDGE_FALLTHRU);
827 
828   /* Traverse the basic block array placing edges.  */
829   FOR_EACH_BB_FN (bb, cfun)
830     {
831       gimple last = last_stmt (bb);
832       bool fallthru;
833 
834       if (bb_to_omp_idx)
835 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
836 
837       if (last)
838 	{
839 	  enum gimple_code code = gimple_code (last);
840 	  switch (code)
841 	    {
842 	    case GIMPLE_GOTO:
843 	      if (make_goto_expr_edges (bb))
844 		ab_edge_goto.safe_push (bb);
845 	      fallthru = false;
846 	      break;
847 	    case GIMPLE_RETURN:
848 	      {
849 		edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
850 		e->goto_locus = gimple_location (last);
851 		fallthru = false;
852 	      }
853 	      break;
854 	    case GIMPLE_COND:
855 	      make_cond_expr_edges (bb);
856 	      fallthru = false;
857 	      break;
858 	    case GIMPLE_SWITCH:
859 	      make_gimple_switch_edges (as_a <gswitch *> (last), bb);
860 	      fallthru = false;
861 	      break;
862 	    case GIMPLE_RESX:
863 	      make_eh_edges (last);
864 	      fallthru = false;
865 	      break;
866 	    case GIMPLE_EH_DISPATCH:
867 	      fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
868 	      break;
869 
870 	    case GIMPLE_CALL:
871 	      /* If this function receives a nonlocal goto, then we need to
872 		 make edges from this call site to all the nonlocal goto
873 		 handlers.  */
874 	      if (stmt_can_make_abnormal_goto (last))
875 		ab_edge_call.safe_push (bb);
876 
877 	      /* If this statement has reachable exception handlers, then
878 		 create abnormal edges to them.  */
879 	      make_eh_edges (last);
880 
881 	      /* BUILTIN_RETURN is really a return statement.  */
882 	      if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
883 		{
884 		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
885 		  fallthru = false;
886 		}
887 	      /* Some calls are known not to return.  */
888 	      else
889 	        fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
890 	      break;
891 
892 	    case GIMPLE_ASSIGN:
893 	       /* A GIMPLE_ASSIGN may throw internally and thus be considered
894 		  control-altering. */
895 	      if (is_ctrl_altering_stmt (last))
896 		make_eh_edges (last);
897 	      fallthru = true;
898 	      break;
899 
900 	    case GIMPLE_ASM:
901 	      make_gimple_asm_edges (bb);
902 	      fallthru = true;
903 	      break;
904 
905 	    CASE_GIMPLE_OMP:
906 	      fallthru = make_gimple_omp_edges (bb, &cur_region,
907 						&cur_omp_region_idx);
908 	      if (cur_region && bb_to_omp_idx == NULL)
909 		bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
910 	      break;
911 
912 	    case GIMPLE_TRANSACTION:
913 	      {
914 		tree abort_label
915 		  = gimple_transaction_label (as_a <gtransaction *> (last));
916 		if (abort_label)
917 		  make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
918 		fallthru = true;
919 	      }
920 	      break;
921 
922 	    default:
923 	      gcc_assert (!stmt_ends_bb_p (last));
924 	      fallthru = true;
925 	    }
926 	}
927       else
928 	fallthru = true;
929 
930       if (fallthru)
931 	make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
932     }
933 
934   /* Computed gotos are hell to deal with, especially if there are
935      lots of them with a large number of destinations.  So we factor
936      them to a common computed goto location before we build the
937      edge list.  After we convert back to normal form, we will un-factor
938      the computed gotos since factoring introduces an unwanted jump.
939      For non-local gotos and abnormal edges from calls to calls that return
940      twice or forced labels, factor the abnormal edges too, by having all
941      abnormal edges from the calls go to a common artificial basic block
942      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
943      basic block to all forced labels and calls returning twice.
944      We do this per-OpenMP structured block, because those regions
945      are guaranteed to be single entry single exit by the standard,
946      so it is not allowed to enter or exit such regions abnormally this way,
947      thus all computed gotos, non-local gotos and setjmp/longjmp calls
948      must not transfer control across SESE region boundaries.  */
949   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
950     {
951       gimple_stmt_iterator gsi;
952       basic_block dispatcher_bb_array[2] = { NULL, NULL };
953       basic_block *dispatcher_bbs = dispatcher_bb_array;
954       int count = n_basic_blocks_for_fn (cfun);
955 
956       if (bb_to_omp_idx)
957 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
958 
959       FOR_EACH_BB_FN (bb, cfun)
960 	{
961 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
962 	    {
963 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
964 	      tree target;
965 
966 	      if (!label_stmt)
967 		break;
968 
969 	      target = gimple_label_label (label_stmt);
970 
971 	      /* Make an edge to every label block that has been marked as a
972 		 potential target for a computed goto or a non-local goto.  */
973 	      if (FORCED_LABEL (target))
974 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975 				       &ab_edge_goto, true);
976 	      if (DECL_NONLOCAL (target))
977 		{
978 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979 					 &ab_edge_call, false);
980 		  break;
981 		}
982 	    }
983 
984 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
985 	    gsi_next_nondebug (&gsi);
986 	  if (!gsi_end_p (gsi))
987 	    {
988 	      /* Make an edge to every setjmp-like call.  */
989 	      gimple call_stmt = gsi_stmt (gsi);
990 	      if (is_gimple_call (call_stmt)
991 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
992 		      || gimple_call_builtin_p (call_stmt,
993 						BUILT_IN_SETJMP_RECEIVER)))
994 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 				       &ab_edge_call, false);
996 	    }
997 	}
998 
999       if (bb_to_omp_idx)
1000 	XDELETE (dispatcher_bbs);
1001     }
1002 
1003   XDELETE (bb_to_omp_idx);
1004 
1005   free_omp_regions ();
1006 
1007   /* Fold COND_EXPR_COND of each COND_EXPR.  */
1008   fold_cond_expr_cond ();
1009 }
1010 
1011 /* Find the next available discriminator value for LOCUS.  The
1012    discriminator distinguishes among several basic blocks that
1013    share a common locus, allowing for more accurate sample-based
1014    profiling.  */
1015 
1016 static int
1017 next_discriminator_for_locus (location_t locus)
1018 {
1019   struct locus_discrim_map item;
1020   struct locus_discrim_map **slot;
1021 
1022   item.locus = locus;
1023   item.discriminator = 0;
1024   slot = discriminator_per_locus->find_slot_with_hash (
1025       &item, LOCATION_LINE (locus), INSERT);
1026   gcc_assert (slot);
1027   if (*slot == HTAB_EMPTY_ENTRY)
1028     {
1029       *slot = XNEW (struct locus_discrim_map);
1030       gcc_assert (*slot);
1031       (*slot)->locus = locus;
1032       (*slot)->discriminator = 0;
1033     }
1034   (*slot)->discriminator++;
1035   return (*slot)->discriminator;
1036 }
1037 
1038 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1039 
1040 static bool
1041 same_line_p (location_t locus1, location_t locus2)
1042 {
1043   expanded_location from, to;
1044 
1045   if (locus1 == locus2)
1046     return true;
1047 
1048   from = expand_location (locus1);
1049   to = expand_location (locus2);
1050 
1051   if (from.line != to.line)
1052     return false;
1053   if (from.file == to.file)
1054     return true;
1055   return (from.file != NULL
1056           && to.file != NULL
1057           && filename_cmp (from.file, to.file) == 0);
1058 }
1059 
1060 /* Assign discriminators to each basic block.  */
1061 
1062 static void
1063 assign_discriminators (void)
1064 {
1065   basic_block bb;
1066 
1067   FOR_EACH_BB_FN (bb, cfun)
1068     {
1069       edge e;
1070       edge_iterator ei;
1071       gimple last = last_stmt (bb);
1072       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1073 
1074       if (locus == UNKNOWN_LOCATION)
1075 	continue;
1076 
1077       FOR_EACH_EDGE (e, ei, bb->succs)
1078 	{
1079 	  gimple first = first_non_label_stmt (e->dest);
1080 	  gimple last = last_stmt (e->dest);
1081 	  if ((first && same_line_p (locus, gimple_location (first)))
1082 	      || (last && same_line_p (locus, gimple_location (last))))
1083 	    {
1084 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1085 		bb->discriminator = next_discriminator_for_locus (locus);
1086 	      else
1087 		e->dest->discriminator = next_discriminator_for_locus (locus);
1088 	    }
1089 	}
1090     }
1091 }
1092 
1093 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1094 
1095 static void
1096 make_cond_expr_edges (basic_block bb)
1097 {
1098   gcond *entry = as_a <gcond *> (last_stmt (bb));
1099   gimple then_stmt, else_stmt;
1100   basic_block then_bb, else_bb;
1101   tree then_label, else_label;
1102   edge e;
1103 
1104   gcc_assert (entry);
1105   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1106 
1107   /* Entry basic blocks for each component.  */
1108   then_label = gimple_cond_true_label (entry);
1109   else_label = gimple_cond_false_label (entry);
1110   then_bb = label_to_block (then_label);
1111   else_bb = label_to_block (else_label);
1112   then_stmt = first_stmt (then_bb);
1113   else_stmt = first_stmt (else_bb);
1114 
1115   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1116   e->goto_locus = gimple_location (then_stmt);
1117   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1118   if (e)
1119     e->goto_locus = gimple_location (else_stmt);
1120 
1121   /* We do not need the labels anymore.  */
1122   gimple_cond_set_true_label (entry, NULL_TREE);
1123   gimple_cond_set_false_label (entry, NULL_TREE);
1124 }
1125 
1126 
1127 /* Called for each element in the hash table (P) as we delete the
1128    edge to cases hash table.
1129 
1130    Clear all the TREE_CHAINs to prevent problems with copying of
1131    SWITCH_EXPRs and structure sharing rules, then free the hash table
1132    element.  */
1133 
1134 bool
1135 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1136 {
1137   tree t, next;
1138 
1139   for (t = value; t; t = next)
1140     {
1141       next = CASE_CHAIN (t);
1142       CASE_CHAIN (t) = NULL;
1143     }
1144 
1145   return true;
1146 }
1147 
1148 /* Start recording information mapping edges to case labels.  */
1149 
1150 void
1151 start_recording_case_labels (void)
1152 {
1153   gcc_assert (edge_to_cases == NULL);
1154   edge_to_cases = new hash_map<edge, tree>;
1155   touched_switch_bbs = BITMAP_ALLOC (NULL);
1156 }
1157 
1158 /* Return nonzero if we are recording information for case labels.  */
1159 
1160 static bool
1161 recording_case_labels_p (void)
1162 {
1163   return (edge_to_cases != NULL);
1164 }
1165 
1166 /* Stop recording information mapping edges to case labels and
1167    remove any information we have recorded.  */
1168 void
1169 end_recording_case_labels (void)
1170 {
1171   bitmap_iterator bi;
1172   unsigned i;
1173   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1174   delete edge_to_cases;
1175   edge_to_cases = NULL;
1176   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1177     {
1178       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179       if (bb)
1180 	{
1181 	  gimple stmt = last_stmt (bb);
1182 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1183 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1184 	}
1185     }
1186   BITMAP_FREE (touched_switch_bbs);
1187 }
1188 
1189 /* If we are inside a {start,end}_recording_cases block, then return
1190    a chain of CASE_LABEL_EXPRs from T which reference E.
1191 
1192    Otherwise return NULL.  */
1193 
1194 static tree
1195 get_cases_for_edge (edge e, gswitch *t)
1196 {
1197   tree *slot;
1198   size_t i, n;
1199 
1200   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1201      chains available.  Return NULL so the caller can detect this case.  */
1202   if (!recording_case_labels_p ())
1203     return NULL;
1204 
1205   slot = edge_to_cases->get (e);
1206   if (slot)
1207     return *slot;
1208 
1209   /* If we did not find E in the hash table, then this must be the first
1210      time we have been queried for information about E & T.  Add all the
1211      elements from T to the hash table then perform the query again.  */
1212 
1213   n = gimple_switch_num_labels (t);
1214   for (i = 0; i < n; i++)
1215     {
1216       tree elt = gimple_switch_label (t, i);
1217       tree lab = CASE_LABEL (elt);
1218       basic_block label_bb = label_to_block (lab);
1219       edge this_edge = find_edge (e->src, label_bb);
1220 
1221       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222 	 a new chain.  */
1223       tree &s = edge_to_cases->get_or_insert (this_edge);
1224       CASE_CHAIN (elt) = s;
1225       s = elt;
1226     }
1227 
1228   return *edge_to_cases->get (e);
1229 }
1230 
1231 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1232 
1233 static void
1234 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1235 {
1236   size_t i, n;
1237 
1238   n = gimple_switch_num_labels (entry);
1239 
1240   for (i = 0; i < n; ++i)
1241     {
1242       tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1243       basic_block label_bb = label_to_block (lab);
1244       make_edge (bb, label_bb, 0);
1245     }
1246 }
1247 
1248 
1249 /* Return the basic block holding label DEST.  */
1250 
1251 basic_block
1252 label_to_block_fn (struct function *ifun, tree dest)
1253 {
1254   int uid = LABEL_DECL_UID (dest);
1255 
1256   /* We would die hard when faced by an undefined label.  Emit a label to
1257      the very first basic block.  This will hopefully make even the dataflow
1258      and undefined variable warnings quite right.  */
1259   if (seen_error () && uid < 0)
1260     {
1261       gimple_stmt_iterator gsi =
1262 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263       gimple stmt;
1264 
1265       stmt = gimple_build_label (dest);
1266       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1267       uid = LABEL_DECL_UID (dest);
1268     }
1269   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1270     return NULL;
1271   return (*ifun->cfg->x_label_to_block_map)[uid];
1272 }
1273 
1274 /* Create edges for a goto statement at block BB.  Returns true
1275    if abnormal edges should be created.  */
1276 
1277 static bool
1278 make_goto_expr_edges (basic_block bb)
1279 {
1280   gimple_stmt_iterator last = gsi_last_bb (bb);
1281   gimple goto_t = gsi_stmt (last);
1282 
1283   /* A simple GOTO creates normal edges.  */
1284   if (simple_goto_p (goto_t))
1285     {
1286       tree dest = gimple_goto_dest (goto_t);
1287       basic_block label_bb = label_to_block (dest);
1288       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1289       e->goto_locus = gimple_location (goto_t);
1290       gsi_remove (&last, true);
1291       return false;
1292     }
1293 
1294   /* A computed GOTO creates abnormal edges.  */
1295   return true;
1296 }
1297 
1298 /* Create edges for an asm statement with labels at block BB.  */
1299 
1300 static void
1301 make_gimple_asm_edges (basic_block bb)
1302 {
1303   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1304   int i, n = gimple_asm_nlabels (stmt);
1305 
1306   for (i = 0; i < n; ++i)
1307     {
1308       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1309       basic_block label_bb = label_to_block (label);
1310       make_edge (bb, label_bb, 0);
1311     }
1312 }
1313 
1314 /*---------------------------------------------------------------------------
1315 			       Flowgraph analysis
1316 ---------------------------------------------------------------------------*/
1317 
1318 /* Cleanup useless labels in basic blocks.  This is something we wish
1319    to do early because it allows us to group case labels before creating
1320    the edges for the CFG, and it speeds up block statement iterators in
1321    all passes later on.
1322    We rerun this pass after CFG is created, to get rid of the labels that
1323    are no longer referenced.  After then we do not run it any more, since
1324    (almost) no new labels should be created.  */
1325 
1326 /* A map from basic block index to the leading label of that block.  */
1327 static struct label_record
1328 {
1329   /* The label.  */
1330   tree label;
1331 
1332   /* True if the label is referenced from somewhere.  */
1333   bool used;
1334 } *label_for_bb;
1335 
1336 /* Given LABEL return the first label in the same basic block.  */
1337 
1338 static tree
1339 main_block_label (tree label)
1340 {
1341   basic_block bb = label_to_block (label);
1342   tree main_label = label_for_bb[bb->index].label;
1343 
1344   /* label_to_block possibly inserted undefined label into the chain.  */
1345   if (!main_label)
1346     {
1347       label_for_bb[bb->index].label = label;
1348       main_label = label;
1349     }
1350 
1351   label_for_bb[bb->index].used = true;
1352   return main_label;
1353 }
1354 
1355 /* Clean up redundant labels within the exception tree.  */
1356 
1357 static void
1358 cleanup_dead_labels_eh (void)
1359 {
1360   eh_landing_pad lp;
1361   eh_region r;
1362   tree lab;
1363   int i;
1364 
1365   if (cfun->eh == NULL)
1366     return;
1367 
1368   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1369     if (lp && lp->post_landing_pad)
1370       {
1371 	lab = main_block_label (lp->post_landing_pad);
1372 	if (lab != lp->post_landing_pad)
1373 	  {
1374 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1375 	    EH_LANDING_PAD_NR (lab) = lp->index;
1376 	  }
1377       }
1378 
1379   FOR_ALL_EH_REGION (r)
1380     switch (r->type)
1381       {
1382       case ERT_CLEANUP:
1383       case ERT_MUST_NOT_THROW:
1384 	break;
1385 
1386       case ERT_TRY:
1387 	{
1388 	  eh_catch c;
1389 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1390 	    {
1391 	      lab = c->label;
1392 	      if (lab)
1393 		c->label = main_block_label (lab);
1394 	    }
1395 	}
1396 	break;
1397 
1398       case ERT_ALLOWED_EXCEPTIONS:
1399 	lab = r->u.allowed.label;
1400 	if (lab)
1401 	  r->u.allowed.label = main_block_label (lab);
1402 	break;
1403       }
1404 }
1405 
1406 
1407 /* Cleanup redundant labels.  This is a three-step process:
1408      1) Find the leading label for each block.
1409      2) Redirect all references to labels to the leading labels.
1410      3) Cleanup all useless labels.  */
1411 
1412 void
1413 cleanup_dead_labels (void)
1414 {
1415   basic_block bb;
1416   label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1417 
1418   /* Find a suitable label for each block.  We use the first user-defined
1419      label if there is one, or otherwise just the first label we see.  */
1420   FOR_EACH_BB_FN (bb, cfun)
1421     {
1422       gimple_stmt_iterator i;
1423 
1424       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1425 	{
1426 	  tree label;
1427 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1428 
1429 	  if (!label_stmt)
1430 	    break;
1431 
1432 	  label = gimple_label_label (label_stmt);
1433 
1434 	  /* If we have not yet seen a label for the current block,
1435 	     remember this one and see if there are more labels.  */
1436 	  if (!label_for_bb[bb->index].label)
1437 	    {
1438 	      label_for_bb[bb->index].label = label;
1439 	      continue;
1440 	    }
1441 
1442 	  /* If we did see a label for the current block already, but it
1443 	     is an artificially created label, replace it if the current
1444 	     label is a user defined label.  */
1445 	  if (!DECL_ARTIFICIAL (label)
1446 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1447 	    {
1448 	      label_for_bb[bb->index].label = label;
1449 	      break;
1450 	    }
1451 	}
1452     }
1453 
1454   /* Now redirect all jumps/branches to the selected label.
1455      First do so for each block ending in a control statement.  */
1456   FOR_EACH_BB_FN (bb, cfun)
1457     {
1458       gimple stmt = last_stmt (bb);
1459       tree label, new_label;
1460 
1461       if (!stmt)
1462 	continue;
1463 
1464       switch (gimple_code (stmt))
1465 	{
1466 	case GIMPLE_COND:
1467 	  {
1468 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1469 	    label = gimple_cond_true_label (cond_stmt);
1470 	    if (label)
1471 	      {
1472 		new_label = main_block_label (label);
1473 		if (new_label != label)
1474 		  gimple_cond_set_true_label (cond_stmt, new_label);
1475 	      }
1476 
1477 	    label = gimple_cond_false_label (cond_stmt);
1478 	    if (label)
1479 	      {
1480 		new_label = main_block_label (label);
1481 		if (new_label != label)
1482 		  gimple_cond_set_false_label (cond_stmt, new_label);
1483 	      }
1484 	  }
1485 	  break;
1486 
1487 	case GIMPLE_SWITCH:
1488 	  {
1489 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1490 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1491 
1492 	    /* Replace all destination labels.  */
1493 	    for (i = 0; i < n; ++i)
1494 	      {
1495 		tree case_label = gimple_switch_label (switch_stmt, i);
1496 		label = CASE_LABEL (case_label);
1497 		new_label = main_block_label (label);
1498 		if (new_label != label)
1499 		  CASE_LABEL (case_label) = new_label;
1500 	      }
1501 	    break;
1502 	  }
1503 
1504 	case GIMPLE_ASM:
1505 	  {
1506 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1507 	    int i, n = gimple_asm_nlabels (asm_stmt);
1508 
1509 	    for (i = 0; i < n; ++i)
1510 	      {
1511 		tree cons = gimple_asm_label_op (asm_stmt, i);
1512 		tree label = main_block_label (TREE_VALUE (cons));
1513 		TREE_VALUE (cons) = label;
1514 	      }
1515 	    break;
1516 	  }
1517 
1518 	/* We have to handle gotos until they're removed, and we don't
1519 	   remove them until after we've created the CFG edges.  */
1520 	case GIMPLE_GOTO:
1521 	  if (!computed_goto_p (stmt))
1522 	    {
1523 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1524 	      label = gimple_goto_dest (goto_stmt);
1525 	      new_label = main_block_label (label);
1526 	      if (new_label != label)
1527 		gimple_goto_set_dest (goto_stmt, new_label);
1528 	    }
1529 	  break;
1530 
1531 	case GIMPLE_TRANSACTION:
1532 	  {
1533 	    gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1534 	    tree label = gimple_transaction_label (trans_stmt);
1535 	    if (label)
1536 	      {
1537 		tree new_label = main_block_label (label);
1538 		if (new_label != label)
1539 		  gimple_transaction_set_label (trans_stmt, new_label);
1540 	      }
1541 	  }
1542 	  break;
1543 
1544 	default:
1545 	  break;
1546       }
1547     }
1548 
1549   /* Do the same for the exception region tree labels.  */
1550   cleanup_dead_labels_eh ();
1551 
1552   /* Finally, purge dead labels.  All user-defined labels and labels that
1553      can be the target of non-local gotos and labels which have their
1554      address taken are preserved.  */
1555   FOR_EACH_BB_FN (bb, cfun)
1556     {
1557       gimple_stmt_iterator i;
1558       tree label_for_this_bb = label_for_bb[bb->index].label;
1559 
1560       if (!label_for_this_bb)
1561 	continue;
1562 
1563       /* If the main label of the block is unused, we may still remove it.  */
1564       if (!label_for_bb[bb->index].used)
1565 	label_for_this_bb = NULL;
1566 
1567       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1568 	{
1569 	  tree label;
1570 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1571 
1572 	  if (!label_stmt)
1573 	    break;
1574 
1575 	  label = gimple_label_label (label_stmt);
1576 
1577 	  if (label == label_for_this_bb
1578 	      || !DECL_ARTIFICIAL (label)
1579 	      || DECL_NONLOCAL (label)
1580 	      || FORCED_LABEL (label))
1581 	    gsi_next (&i);
1582 	  else
1583 	    gsi_remove (&i, true);
1584 	}
1585     }
1586 
1587   free (label_for_bb);
1588 }
1589 
1590 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1591    the ones jumping to the same label.
1592    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1593 
1594 void
1595 group_case_labels_stmt (gswitch *stmt)
1596 {
1597   int old_size = gimple_switch_num_labels (stmt);
1598   int i, j, new_size = old_size;
1599   basic_block default_bb = NULL;
1600 
1601   default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1602 
1603   /* Look for possible opportunities to merge cases.  */
1604   i = 1;
1605   while (i < old_size)
1606     {
1607       tree base_case, base_high;
1608       basic_block base_bb;
1609 
1610       base_case = gimple_switch_label (stmt, i);
1611 
1612       gcc_assert (base_case);
1613       base_bb = label_to_block (CASE_LABEL (base_case));
1614 
1615       /* Discard cases that have the same destination as the
1616 	 default case.  */
1617       if (base_bb == default_bb)
1618 	{
1619 	  gimple_switch_set_label (stmt, i, NULL_TREE);
1620 	  i++;
1621 	  new_size--;
1622 	  continue;
1623 	}
1624 
1625       base_high = CASE_HIGH (base_case)
1626 	  ? CASE_HIGH (base_case)
1627 	  : CASE_LOW (base_case);
1628       i++;
1629 
1630       /* Try to merge case labels.  Break out when we reach the end
1631 	 of the label vector or when we cannot merge the next case
1632 	 label with the current one.  */
1633       while (i < old_size)
1634 	{
1635 	  tree merge_case = gimple_switch_label (stmt, i);
1636 	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1637 	  wide_int bhp1 = wi::add (base_high, 1);
1638 
1639 	  /* Merge the cases if they jump to the same place,
1640 	     and their ranges are consecutive.  */
1641 	  if (merge_bb == base_bb
1642 	      && wi::eq_p (CASE_LOW (merge_case), bhp1))
1643 	    {
1644 	      base_high = CASE_HIGH (merge_case) ?
1645 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1646 	      CASE_HIGH (base_case) = base_high;
1647 	      gimple_switch_set_label (stmt, i, NULL_TREE);
1648 	      new_size--;
1649 	      i++;
1650 	    }
1651 	  else
1652 	    break;
1653 	}
1654     }
1655 
1656   /* Compress the case labels in the label vector, and adjust the
1657      length of the vector.  */
1658   for (i = 0, j = 0; i < new_size; i++)
1659     {
1660       while (! gimple_switch_label (stmt, j))
1661 	j++;
1662       gimple_switch_set_label (stmt, i,
1663 			       gimple_switch_label (stmt, j++));
1664     }
1665 
1666   gcc_assert (new_size <= old_size);
1667   gimple_switch_set_num_labels (stmt, new_size);
1668 }
1669 
1670 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1671    and scan the sorted vector of cases.  Combine the ones jumping to the
1672    same label.  */
1673 
1674 void
1675 group_case_labels (void)
1676 {
1677   basic_block bb;
1678 
1679   FOR_EACH_BB_FN (bb, cfun)
1680     {
1681       gimple stmt = last_stmt (bb);
1682       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1683 	group_case_labels_stmt (as_a <gswitch *> (stmt));
1684     }
1685 }
1686 
1687 /* Checks whether we can merge block B into block A.  */
1688 
1689 static bool
1690 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1691 {
1692   gimple stmt;
1693 
1694   if (!single_succ_p (a))
1695     return false;
1696 
1697   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698     return false;
1699 
1700   if (single_succ (a) != b)
1701     return false;
1702 
1703   if (!single_pred_p (b))
1704     return false;
1705 
1706   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1707       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1708     return false;
1709 
1710   /* If A ends by a statement causing exceptions or something similar, we
1711      cannot merge the blocks.  */
1712   stmt = last_stmt (a);
1713   if (stmt && stmt_ends_bb_p (stmt))
1714     return false;
1715 
1716   /* Do not allow a block with only a non-local label to be merged.  */
1717   if (stmt)
1718     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1719       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1720 	return false;
1721 
1722   /* Examine the labels at the beginning of B.  */
1723   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1724        gsi_next (&gsi))
1725     {
1726       tree lab;
1727       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1728       if (!label_stmt)
1729 	break;
1730       lab = gimple_label_label (label_stmt);
1731 
1732       /* Do not remove user forced labels or for -O0 any user labels.  */
1733       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1734 	return false;
1735     }
1736 
1737   /* Protect simple loop latches.  We only want to avoid merging
1738      the latch with the loop header or with a block in another
1739      loop in this case.  */
1740   if (current_loops
1741       && b->loop_father->latch == b
1742       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1743       && (b->loop_father->header == a
1744 	  || b->loop_father != a->loop_father))
1745     return false;
1746 
1747   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1748      is not up-to-date and a name-mapping is registered, we cannot eliminate
1749      any phis.  Symbols marked for renaming are never a problem though.  */
1750   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1751        gsi_next (&gsi))
1752     {
1753       gphi *phi = gsi.phi ();
1754       /* Technically only new names matter.  */
1755       if (name_registered_for_update_p (PHI_RESULT (phi)))
1756 	return false;
1757     }
1758 
1759   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1760   if (!optimize
1761       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1762     {
1763       location_t goto_locus = single_succ_edge (a)->goto_locus;
1764       gimple_stmt_iterator prev, next;
1765       prev = gsi_last_nondebug_bb (a);
1766       next = gsi_after_labels (b);
1767       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1768 	gsi_next_nondebug (&next);
1769       if ((gsi_end_p (prev)
1770 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1771 	  && (gsi_end_p (next)
1772 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1773 	return false;
1774     }
1775 
1776   return true;
1777 }
1778 
1779 /* Replaces all uses of NAME by VAL.  */
1780 
1781 void
1782 replace_uses_by (tree name, tree val)
1783 {
1784   imm_use_iterator imm_iter;
1785   use_operand_p use;
1786   gimple stmt;
1787   edge e;
1788 
1789   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1790     {
1791       /* Mark the block if we change the last stmt in it.  */
1792       if (cfgcleanup_altered_bbs
1793 	  && stmt_ends_bb_p (stmt))
1794 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1795 
1796       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1797         {
1798 	  replace_exp (use, val);
1799 
1800 	  if (gimple_code (stmt) == GIMPLE_PHI)
1801 	    {
1802 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1803 				       PHI_ARG_INDEX_FROM_USE (use));
1804 	      if (e->flags & EDGE_ABNORMAL
1805 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1806 		{
1807 		  /* This can only occur for virtual operands, since
1808 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1809 		     would prevent replacement.  */
1810 		  gcc_checking_assert (virtual_operand_p (name));
1811 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1812 		}
1813 	    }
1814 	}
1815 
1816       if (gimple_code (stmt) != GIMPLE_PHI)
1817 	{
1818 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1819 	  gimple orig_stmt = stmt;
1820 	  size_t i;
1821 
1822 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1823 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1824 	     only change sth from non-invariant to invariant, and only
1825 	     when propagating constants.  */
1826 	  if (is_gimple_min_invariant (val))
1827 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1828 	      {
1829 		tree op = gimple_op (stmt, i);
1830 		/* Operands may be empty here.  For example, the labels
1831 		   of a GIMPLE_COND are nulled out following the creation
1832 		   of the corresponding CFG edges.  */
1833 		if (op && TREE_CODE (op) == ADDR_EXPR)
1834 		  recompute_tree_invariant_for_addr_expr (op);
1835 	      }
1836 
1837 	  if (fold_stmt (&gsi))
1838 	    stmt = gsi_stmt (gsi);
1839 
1840 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1841 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1842 
1843 	  update_stmt (stmt);
1844 	}
1845     }
1846 
1847   gcc_checking_assert (has_zero_uses (name));
1848 
1849   /* Also update the trees stored in loop structures.  */
1850   if (current_loops)
1851     {
1852       struct loop *loop;
1853 
1854       FOR_EACH_LOOP (loop, 0)
1855 	{
1856 	  substitute_in_loop_info (loop, name, val);
1857 	}
1858     }
1859 }
1860 
1861 /* Merge block B into block A.  */
1862 
1863 static void
1864 gimple_merge_blocks (basic_block a, basic_block b)
1865 {
1866   gimple_stmt_iterator last, gsi;
1867   gphi_iterator psi;
1868 
1869   if (dump_file)
1870     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1871 
1872   /* Remove all single-valued PHI nodes from block B of the form
1873      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1874   gsi = gsi_last_bb (a);
1875   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1876     {
1877       gimple phi = gsi_stmt (psi);
1878       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1879       gimple copy;
1880       bool may_replace_uses = (virtual_operand_p (def)
1881 			       || may_propagate_copy (def, use));
1882 
1883       /* In case we maintain loop closed ssa form, do not propagate arguments
1884 	 of loop exit phi nodes.  */
1885       if (current_loops
1886 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1887 	  && !virtual_operand_p (def)
1888 	  && TREE_CODE (use) == SSA_NAME
1889 	  && a->loop_father != b->loop_father)
1890 	may_replace_uses = false;
1891 
1892       if (!may_replace_uses)
1893 	{
1894 	  gcc_assert (!virtual_operand_p (def));
1895 
1896 	  /* Note that just emitting the copies is fine -- there is no problem
1897 	     with ordering of phi nodes.  This is because A is the single
1898 	     predecessor of B, therefore results of the phi nodes cannot
1899 	     appear as arguments of the phi nodes.  */
1900 	  copy = gimple_build_assign (def, use);
1901 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1902           remove_phi_node (&psi, false);
1903 	}
1904       else
1905         {
1906 	  /* If we deal with a PHI for virtual operands, we can simply
1907 	     propagate these without fussing with folding or updating
1908 	     the stmt.  */
1909 	  if (virtual_operand_p (def))
1910 	    {
1911 	      imm_use_iterator iter;
1912 	      use_operand_p use_p;
1913 	      gimple stmt;
1914 
1915 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1916 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1917 		  SET_USE (use_p, use);
1918 
1919 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1920 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1921 	    }
1922 	  else
1923             replace_uses_by (def, use);
1924 
1925           remove_phi_node (&psi, true);
1926         }
1927     }
1928 
1929   /* Ensure that B follows A.  */
1930   move_block_after (b, a);
1931 
1932   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1933   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1934 
1935   /* Remove labels from B and set gimple_bb to A for other statements.  */
1936   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1937     {
1938       gimple stmt = gsi_stmt (gsi);
1939       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1940 	{
1941 	  tree label = gimple_label_label (label_stmt);
1942 	  int lp_nr;
1943 
1944 	  gsi_remove (&gsi, false);
1945 
1946 	  /* Now that we can thread computed gotos, we might have
1947 	     a situation where we have a forced label in block B
1948 	     However, the label at the start of block B might still be
1949 	     used in other ways (think about the runtime checking for
1950 	     Fortran assigned gotos).  So we can not just delete the
1951 	     label.  Instead we move the label to the start of block A.  */
1952 	  if (FORCED_LABEL (label))
1953 	    {
1954 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1955 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1956 	    }
1957 	  /* Other user labels keep around in a form of a debug stmt.  */
1958 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1959 	    {
1960 	      gimple dbg = gimple_build_debug_bind (label,
1961 						    integer_zero_node,
1962 						    stmt);
1963 	      gimple_debug_bind_reset_value (dbg);
1964 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1965 	    }
1966 
1967 	  lp_nr = EH_LANDING_PAD_NR (label);
1968 	  if (lp_nr)
1969 	    {
1970 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1971 	      lp->post_landing_pad = NULL;
1972 	    }
1973 	}
1974       else
1975 	{
1976 	  gimple_set_bb (stmt, a);
1977 	  gsi_next (&gsi);
1978 	}
1979     }
1980 
1981   /* When merging two BBs, if their counts are different, the larger count
1982      is selected as the new bb count. This is to handle inconsistent
1983      profiles.  */
1984   if (a->loop_father == b->loop_father)
1985     {
1986       a->count = MAX (a->count, b->count);
1987       a->frequency = MAX (a->frequency, b->frequency);
1988     }
1989 
1990   /* Merge the sequences.  */
1991   last = gsi_last_bb (a);
1992   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1993   set_bb_seq (b, NULL);
1994 
1995   if (cfgcleanup_altered_bbs)
1996     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1997 }
1998 
1999 
2000 /* Return the one of two successors of BB that is not reachable by a
2001    complex edge, if there is one.  Else, return BB.  We use
2002    this in optimizations that use post-dominators for their heuristics,
2003    to catch the cases in C++ where function calls are involved.  */
2004 
2005 basic_block
2006 single_noncomplex_succ (basic_block bb)
2007 {
2008   edge e0, e1;
2009   if (EDGE_COUNT (bb->succs) != 2)
2010     return bb;
2011 
2012   e0 = EDGE_SUCC (bb, 0);
2013   e1 = EDGE_SUCC (bb, 1);
2014   if (e0->flags & EDGE_COMPLEX)
2015     return e1->dest;
2016   if (e1->flags & EDGE_COMPLEX)
2017     return e0->dest;
2018 
2019   return bb;
2020 }
2021 
2022 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2023 
2024 void
2025 notice_special_calls (gcall *call)
2026 {
2027   int flags = gimple_call_flags (call);
2028 
2029   if (flags & ECF_MAY_BE_ALLOCA)
2030     cfun->calls_alloca = true;
2031   if (flags & ECF_RETURNS_TWICE)
2032     cfun->calls_setjmp = true;
2033 }
2034 
2035 
2036 /* Clear flags set by notice_special_calls.  Used by dead code removal
2037    to update the flags.  */
2038 
2039 void
2040 clear_special_calls (void)
2041 {
2042   cfun->calls_alloca = false;
2043   cfun->calls_setjmp = false;
2044 }
2045 
2046 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2047 
2048 static void
2049 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2050 {
2051   /* Since this block is no longer reachable, we can just delete all
2052      of its PHI nodes.  */
2053   remove_phi_nodes (bb);
2054 
2055   /* Remove edges to BB's successors.  */
2056   while (EDGE_COUNT (bb->succs) > 0)
2057     remove_edge (EDGE_SUCC (bb, 0));
2058 }
2059 
2060 
2061 /* Remove statements of basic block BB.  */
2062 
2063 static void
2064 remove_bb (basic_block bb)
2065 {
2066   gimple_stmt_iterator i;
2067 
2068   if (dump_file)
2069     {
2070       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2071       if (dump_flags & TDF_DETAILS)
2072 	{
2073 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2074 	  fprintf (dump_file, "\n");
2075 	}
2076     }
2077 
2078   if (current_loops)
2079     {
2080       struct loop *loop = bb->loop_father;
2081 
2082       /* If a loop gets removed, clean up the information associated
2083 	 with it.  */
2084       if (loop->latch == bb
2085 	  || loop->header == bb)
2086 	free_numbers_of_iterations_estimates_loop (loop);
2087     }
2088 
2089   /* Remove all the instructions in the block.  */
2090   if (bb_seq (bb) != NULL)
2091     {
2092       /* Walk backwards so as to get a chance to substitute all
2093 	 released DEFs into debug stmts.  See
2094 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2095 	 details.  */
2096       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2097 	{
2098 	  gimple stmt = gsi_stmt (i);
2099 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2100 	  if (label_stmt
2101 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2102 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2103 	    {
2104 	      basic_block new_bb;
2105 	      gimple_stmt_iterator new_gsi;
2106 
2107 	      /* A non-reachable non-local label may still be referenced.
2108 		 But it no longer needs to carry the extra semantics of
2109 		 non-locality.  */
2110 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2111 		{
2112 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2113 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2114 		}
2115 
2116 	      new_bb = bb->prev_bb;
2117 	      new_gsi = gsi_start_bb (new_bb);
2118 	      gsi_remove (&i, false);
2119 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2120 	    }
2121 	  else
2122 	    {
2123 	      /* Release SSA definitions if we are in SSA.  Note that we
2124 		 may be called when not in SSA.  For example,
2125 		 final_cleanup calls this function via
2126 		 cleanup_tree_cfg.  */
2127 	      if (gimple_in_ssa_p (cfun))
2128 		release_defs (stmt);
2129 
2130 	      gsi_remove (&i, true);
2131 	    }
2132 
2133 	  if (gsi_end_p (i))
2134 	    i = gsi_last_bb (bb);
2135 	  else
2136 	    gsi_prev (&i);
2137 	}
2138     }
2139 
2140   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2141   bb->il.gimple.seq = NULL;
2142   bb->il.gimple.phi_nodes = NULL;
2143 }
2144 
2145 
2146 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2147    predicate VAL, return the edge that will be taken out of the block.
2148    If VAL does not match a unique edge, NULL is returned.  */
2149 
2150 edge
2151 find_taken_edge (basic_block bb, tree val)
2152 {
2153   gimple stmt;
2154 
2155   stmt = last_stmt (bb);
2156 
2157   gcc_assert (stmt);
2158   gcc_assert (is_ctrl_stmt (stmt));
2159 
2160   if (val == NULL)
2161     return NULL;
2162 
2163   if (!is_gimple_min_invariant (val))
2164     return NULL;
2165 
2166   if (gimple_code (stmt) == GIMPLE_COND)
2167     return find_taken_edge_cond_expr (bb, val);
2168 
2169   if (gimple_code (stmt) == GIMPLE_SWITCH)
2170     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2171 
2172   if (computed_goto_p (stmt))
2173     {
2174       /* Only optimize if the argument is a label, if the argument is
2175 	 not a label then we can not construct a proper CFG.
2176 
2177          It may be the case that we only need to allow the LABEL_REF to
2178          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2179          appear inside a LABEL_EXPR just to be safe.  */
2180       if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2181 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2182 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2183       return NULL;
2184     }
2185 
2186   gcc_unreachable ();
2187 }
2188 
2189 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2190    statement, determine which of the outgoing edges will be taken out of the
2191    block.  Return NULL if either edge may be taken.  */
2192 
2193 static edge
2194 find_taken_edge_computed_goto (basic_block bb, tree val)
2195 {
2196   basic_block dest;
2197   edge e = NULL;
2198 
2199   dest = label_to_block (val);
2200   if (dest)
2201     {
2202       e = find_edge (bb, dest);
2203       gcc_assert (e != NULL);
2204     }
2205 
2206   return e;
2207 }
2208 
2209 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2210    statement, determine which of the two edges will be taken out of the
2211    block.  Return NULL if either edge may be taken.  */
2212 
2213 static edge
2214 find_taken_edge_cond_expr (basic_block bb, tree val)
2215 {
2216   edge true_edge, false_edge;
2217 
2218   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2219 
2220   gcc_assert (TREE_CODE (val) == INTEGER_CST);
2221   return (integer_zerop (val) ? false_edge : true_edge);
2222 }
2223 
2224 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2225    statement, determine which edge will be taken out of the block.  Return
2226    NULL if any edge may be taken.  */
2227 
2228 static edge
2229 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2230 			     tree val)
2231 {
2232   basic_block dest_bb;
2233   edge e;
2234   tree taken_case;
2235 
2236   taken_case = find_case_label_for_value (switch_stmt, val);
2237   dest_bb = label_to_block (CASE_LABEL (taken_case));
2238 
2239   e = find_edge (bb, dest_bb);
2240   gcc_assert (e);
2241   return e;
2242 }
2243 
2244 
2245 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2246    We can make optimal use here of the fact that the case labels are
2247    sorted: We can do a binary search for a case matching VAL.  */
2248 
2249 static tree
2250 find_case_label_for_value (gswitch *switch_stmt, tree val)
2251 {
2252   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2253   tree default_case = gimple_switch_default_label (switch_stmt);
2254 
2255   for (low = 0, high = n; high - low > 1; )
2256     {
2257       size_t i = (high + low) / 2;
2258       tree t = gimple_switch_label (switch_stmt, i);
2259       int cmp;
2260 
2261       /* Cache the result of comparing CASE_LOW and val.  */
2262       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2263 
2264       if (cmp > 0)
2265 	high = i;
2266       else
2267 	low = i;
2268 
2269       if (CASE_HIGH (t) == NULL)
2270 	{
2271 	  /* A singe-valued case label.  */
2272 	  if (cmp == 0)
2273 	    return t;
2274 	}
2275       else
2276 	{
2277 	  /* A case range.  We can only handle integer ranges.  */
2278 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2279 	    return t;
2280 	}
2281     }
2282 
2283   return default_case;
2284 }
2285 
2286 
2287 /* Dump a basic block on stderr.  */
2288 
2289 void
2290 gimple_debug_bb (basic_block bb)
2291 {
2292   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2293 }
2294 
2295 
2296 /* Dump basic block with index N on stderr.  */
2297 
2298 basic_block
2299 gimple_debug_bb_n (int n)
2300 {
2301   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2302   return BASIC_BLOCK_FOR_FN (cfun, n);
2303 }
2304 
2305 
2306 /* Dump the CFG on stderr.
2307 
2308    FLAGS are the same used by the tree dumping functions
2309    (see TDF_* in dumpfile.h).  */
2310 
2311 void
2312 gimple_debug_cfg (int flags)
2313 {
2314   gimple_dump_cfg (stderr, flags);
2315 }
2316 
2317 
2318 /* Dump the program showing basic block boundaries on the given FILE.
2319 
2320    FLAGS are the same used by the tree dumping functions (see TDF_* in
2321    tree.h).  */
2322 
2323 void
2324 gimple_dump_cfg (FILE *file, int flags)
2325 {
2326   if (flags & TDF_DETAILS)
2327     {
2328       dump_function_header (file, current_function_decl, flags);
2329       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2330 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2331 	       last_basic_block_for_fn (cfun));
2332 
2333       brief_dump_cfg (file, flags | TDF_COMMENT);
2334       fprintf (file, "\n");
2335     }
2336 
2337   if (flags & TDF_STATS)
2338     dump_cfg_stats (file);
2339 
2340   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2341 }
2342 
2343 
2344 /* Dump CFG statistics on FILE.  */
2345 
2346 void
2347 dump_cfg_stats (FILE *file)
2348 {
2349   static long max_num_merged_labels = 0;
2350   unsigned long size, total = 0;
2351   long num_edges;
2352   basic_block bb;
2353   const char * const fmt_str   = "%-30s%-13s%12s\n";
2354   const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2355   const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2356   const char * const fmt_str_3 = "%-43s%11lu%c\n";
2357   const char *funcname = current_function_name ();
2358 
2359   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2360 
2361   fprintf (file, "---------------------------------------------------------\n");
2362   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2363   fprintf (file, fmt_str, "", "  instances  ", "used ");
2364   fprintf (file, "---------------------------------------------------------\n");
2365 
2366   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2367   total += size;
2368   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2369 	   SCALE (size), LABEL (size));
2370 
2371   num_edges = 0;
2372   FOR_EACH_BB_FN (bb, cfun)
2373     num_edges += EDGE_COUNT (bb->succs);
2374   size = num_edges * sizeof (struct edge_def);
2375   total += size;
2376   fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2377 
2378   fprintf (file, "---------------------------------------------------------\n");
2379   fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2380 	   LABEL (total));
2381   fprintf (file, "---------------------------------------------------------\n");
2382   fprintf (file, "\n");
2383 
2384   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2385     max_num_merged_labels = cfg_stats.num_merged_labels;
2386 
2387   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2388 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2389 
2390   fprintf (file, "\n");
2391 }
2392 
2393 
2394 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2395    linked in the final executable.  */
2396 
2397 DEBUG_FUNCTION void
2398 debug_cfg_stats (void)
2399 {
2400   dump_cfg_stats (stderr);
2401 }
2402 
2403 /*---------------------------------------------------------------------------
2404 			     Miscellaneous helpers
2405 ---------------------------------------------------------------------------*/
2406 
2407 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2408    flow.  Transfers of control flow associated with EH are excluded.  */
2409 
2410 static bool
2411 call_can_make_abnormal_goto (gimple t)
2412 {
2413   /* If the function has no non-local labels, then a call cannot make an
2414      abnormal transfer of control.  */
2415   if (!cfun->has_nonlocal_label
2416       && !cfun->calls_setjmp)
2417    return false;
2418 
2419   /* Likewise if the call has no side effects.  */
2420   if (!gimple_has_side_effects (t))
2421     return false;
2422 
2423   /* Likewise if the called function is leaf.  */
2424   if (gimple_call_flags (t) & ECF_LEAF)
2425     return false;
2426 
2427   return true;
2428 }
2429 
2430 
2431 /* Return true if T can make an abnormal transfer of control flow.
2432    Transfers of control flow associated with EH are excluded.  */
2433 
2434 bool
2435 stmt_can_make_abnormal_goto (gimple t)
2436 {
2437   if (computed_goto_p (t))
2438     return true;
2439   if (is_gimple_call (t))
2440     return call_can_make_abnormal_goto (t);
2441   return false;
2442 }
2443 
2444 
2445 /* Return true if T represents a stmt that always transfers control.  */
2446 
2447 bool
2448 is_ctrl_stmt (gimple t)
2449 {
2450   switch (gimple_code (t))
2451     {
2452     case GIMPLE_COND:
2453     case GIMPLE_SWITCH:
2454     case GIMPLE_GOTO:
2455     case GIMPLE_RETURN:
2456     case GIMPLE_RESX:
2457       return true;
2458     default:
2459       return false;
2460     }
2461 }
2462 
2463 
2464 /* Return true if T is a statement that may alter the flow of control
2465    (e.g., a call to a non-returning function).  */
2466 
2467 bool
2468 is_ctrl_altering_stmt (gimple t)
2469 {
2470   gcc_assert (t);
2471 
2472   switch (gimple_code (t))
2473     {
2474     case GIMPLE_CALL:
2475       /* Per stmt call flag indicates whether the call could alter
2476 	 controlflow.  */
2477       if (gimple_call_ctrl_altering_p (t))
2478 	return true;
2479       break;
2480 
2481     case GIMPLE_EH_DISPATCH:
2482       /* EH_DISPATCH branches to the individual catch handlers at
2483 	 this level of a try or allowed-exceptions region.  It can
2484 	 fallthru to the next statement as well.  */
2485       return true;
2486 
2487     case GIMPLE_ASM:
2488       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2489 	return true;
2490       break;
2491 
2492     CASE_GIMPLE_OMP:
2493       /* OpenMP directives alter control flow.  */
2494       return true;
2495 
2496     case GIMPLE_TRANSACTION:
2497       /* A transaction start alters control flow.  */
2498       return true;
2499 
2500     default:
2501       break;
2502     }
2503 
2504   /* If a statement can throw, it alters control flow.  */
2505   return stmt_can_throw_internal (t);
2506 }
2507 
2508 
2509 /* Return true if T is a simple local goto.  */
2510 
2511 bool
2512 simple_goto_p (gimple t)
2513 {
2514   return (gimple_code (t) == GIMPLE_GOTO
2515 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2516 }
2517 
2518 
2519 /* Return true if STMT should start a new basic block.  PREV_STMT is
2520    the statement preceding STMT.  It is used when STMT is a label or a
2521    case label.  Labels should only start a new basic block if their
2522    previous statement wasn't a label.  Otherwise, sequence of labels
2523    would generate unnecessary basic blocks that only contain a single
2524    label.  */
2525 
2526 static inline bool
2527 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2528 {
2529   if (stmt == NULL)
2530     return false;
2531 
2532   /* Labels start a new basic block only if the preceding statement
2533      wasn't a label of the same type.  This prevents the creation of
2534      consecutive blocks that have nothing but a single label.  */
2535   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2536     {
2537       /* Nonlocal and computed GOTO targets always start a new block.  */
2538       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2539 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2540 	return true;
2541 
2542       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2543 	{
2544 	  if (DECL_NONLOCAL (gimple_label_label (
2545 			       as_a <glabel *> (prev_stmt))))
2546 	    return true;
2547 
2548 	  cfg_stats.num_merged_labels++;
2549 	  return false;
2550 	}
2551       else
2552 	return true;
2553     }
2554   else if (gimple_code (stmt) == GIMPLE_CALL
2555 	   && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2556     /* setjmp acts similar to a nonlocal GOTO target and thus should
2557        start a new block.  */
2558     return true;
2559 
2560   return false;
2561 }
2562 
2563 
2564 /* Return true if T should end a basic block.  */
2565 
2566 bool
2567 stmt_ends_bb_p (gimple t)
2568 {
2569   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2570 }
2571 
2572 /* Remove block annotations and other data structures.  */
2573 
2574 void
2575 delete_tree_cfg_annotations (void)
2576 {
2577   vec_free (label_to_block_map_for_fn (cfun));
2578 }
2579 
2580 
2581 /* Return the first statement in basic block BB.  */
2582 
2583 gimple
2584 first_stmt (basic_block bb)
2585 {
2586   gimple_stmt_iterator i = gsi_start_bb (bb);
2587   gimple stmt = NULL;
2588 
2589   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2590     {
2591       gsi_next (&i);
2592       stmt = NULL;
2593     }
2594   return stmt;
2595 }
2596 
2597 /* Return the first non-label statement in basic block BB.  */
2598 
2599 static gimple
2600 first_non_label_stmt (basic_block bb)
2601 {
2602   gimple_stmt_iterator i = gsi_start_bb (bb);
2603   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2604     gsi_next (&i);
2605   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2606 }
2607 
2608 /* Return the last statement in basic block BB.  */
2609 
2610 gimple
2611 last_stmt (basic_block bb)
2612 {
2613   gimple_stmt_iterator i = gsi_last_bb (bb);
2614   gimple stmt = NULL;
2615 
2616   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2617     {
2618       gsi_prev (&i);
2619       stmt = NULL;
2620     }
2621   return stmt;
2622 }
2623 
2624 /* Return the last statement of an otherwise empty block.  Return NULL
2625    if the block is totally empty, or if it contains more than one
2626    statement.  */
2627 
2628 gimple
2629 last_and_only_stmt (basic_block bb)
2630 {
2631   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2632   gimple last, prev;
2633 
2634   if (gsi_end_p (i))
2635     return NULL;
2636 
2637   last = gsi_stmt (i);
2638   gsi_prev_nondebug (&i);
2639   if (gsi_end_p (i))
2640     return last;
2641 
2642   /* Empty statements should no longer appear in the instruction stream.
2643      Everything that might have appeared before should be deleted by
2644      remove_useless_stmts, and the optimizers should just gsi_remove
2645      instead of smashing with build_empty_stmt.
2646 
2647      Thus the only thing that should appear here in a block containing
2648      one executable statement is a label.  */
2649   prev = gsi_stmt (i);
2650   if (gimple_code (prev) == GIMPLE_LABEL)
2651     return last;
2652   else
2653     return NULL;
2654 }
2655 
2656 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2657 
2658 static void
2659 reinstall_phi_args (edge new_edge, edge old_edge)
2660 {
2661   edge_var_map *vm;
2662   int i;
2663   gphi_iterator phis;
2664 
2665   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2666   if (!v)
2667     return;
2668 
2669   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2670        v->iterate (i, &vm) && !gsi_end_p (phis);
2671        i++, gsi_next (&phis))
2672     {
2673       gphi *phi = phis.phi ();
2674       tree result = redirect_edge_var_map_result (vm);
2675       tree arg = redirect_edge_var_map_def (vm);
2676 
2677       gcc_assert (result == gimple_phi_result (phi));
2678 
2679       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2680     }
2681 
2682   redirect_edge_var_map_clear (old_edge);
2683 }
2684 
2685 /* Returns the basic block after which the new basic block created
2686    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2687    near its "logical" location.  This is of most help to humans looking
2688    at debugging dumps.  */
2689 
2690 basic_block
2691 split_edge_bb_loc (edge edge_in)
2692 {
2693   basic_block dest = edge_in->dest;
2694   basic_block dest_prev = dest->prev_bb;
2695 
2696   if (dest_prev)
2697     {
2698       edge e = find_edge (dest_prev, dest);
2699       if (e && !(e->flags & EDGE_COMPLEX))
2700 	return edge_in->src;
2701     }
2702   return dest_prev;
2703 }
2704 
2705 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2706    Abort on abnormal edges.  */
2707 
2708 static basic_block
2709 gimple_split_edge (edge edge_in)
2710 {
2711   basic_block new_bb, after_bb, dest;
2712   edge new_edge, e;
2713 
2714   /* Abnormal edges cannot be split.  */
2715   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2716 
2717   dest = edge_in->dest;
2718 
2719   after_bb = split_edge_bb_loc (edge_in);
2720 
2721   new_bb = create_empty_bb (after_bb);
2722   new_bb->frequency = EDGE_FREQUENCY (edge_in);
2723   new_bb->count = edge_in->count;
2724   new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2725   new_edge->probability = REG_BR_PROB_BASE;
2726   new_edge->count = edge_in->count;
2727 
2728   e = redirect_edge_and_branch (edge_in, new_bb);
2729   gcc_assert (e == edge_in);
2730   reinstall_phi_args (new_edge, e);
2731 
2732   return new_bb;
2733 }
2734 
2735 
2736 /* Verify properties of the address expression T with base object BASE.  */
2737 
2738 static tree
2739 verify_address (tree t, tree base)
2740 {
2741   bool old_constant;
2742   bool old_side_effects;
2743   bool new_constant;
2744   bool new_side_effects;
2745 
2746   old_constant = TREE_CONSTANT (t);
2747   old_side_effects = TREE_SIDE_EFFECTS (t);
2748 
2749   recompute_tree_invariant_for_addr_expr (t);
2750   new_side_effects = TREE_SIDE_EFFECTS (t);
2751   new_constant = TREE_CONSTANT (t);
2752 
2753   if (old_constant != new_constant)
2754     {
2755       error ("constant not recomputed when ADDR_EXPR changed");
2756       return t;
2757     }
2758   if (old_side_effects != new_side_effects)
2759     {
2760       error ("side effects not recomputed when ADDR_EXPR changed");
2761       return t;
2762     }
2763 
2764   if (!(TREE_CODE (base) == VAR_DECL
2765 	|| TREE_CODE (base) == PARM_DECL
2766 	|| TREE_CODE (base) == RESULT_DECL))
2767     return NULL_TREE;
2768 
2769   if (DECL_GIMPLE_REG_P (base))
2770     {
2771       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2772       return base;
2773     }
2774 
2775   return NULL_TREE;
2776 }
2777 
2778 /* Callback for walk_tree, check that all elements with address taken are
2779    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2780    inside a PHI node.  */
2781 
2782 static tree
2783 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2784 {
2785   tree t = *tp, x;
2786 
2787   if (TYPE_P (t))
2788     *walk_subtrees = 0;
2789 
2790   /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2791 #define CHECK_OP(N, MSG) \
2792   do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2793        { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2794 
2795   switch (TREE_CODE (t))
2796     {
2797     case SSA_NAME:
2798       if (SSA_NAME_IN_FREE_LIST (t))
2799 	{
2800 	  error ("SSA name in freelist but still referenced");
2801 	  return *tp;
2802 	}
2803       break;
2804 
2805     case INDIRECT_REF:
2806       error ("INDIRECT_REF in gimple IL");
2807       return t;
2808 
2809     case MEM_REF:
2810       x = TREE_OPERAND (t, 0);
2811       if (!POINTER_TYPE_P (TREE_TYPE (x))
2812 	  || !is_gimple_mem_ref_addr (x))
2813 	{
2814 	  error ("invalid first operand of MEM_REF");
2815 	  return x;
2816 	}
2817       if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2818 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2819 	{
2820 	  error ("invalid offset operand of MEM_REF");
2821 	  return TREE_OPERAND (t, 1);
2822 	}
2823       if (TREE_CODE (x) == ADDR_EXPR
2824 	  && (x = verify_address (x, TREE_OPERAND (x, 0))))
2825 	return x;
2826       *walk_subtrees = 0;
2827       break;
2828 
2829     case ASSERT_EXPR:
2830       x = fold (ASSERT_EXPR_COND (t));
2831       if (x == boolean_false_node)
2832 	{
2833 	  error ("ASSERT_EXPR with an always-false condition");
2834 	  return *tp;
2835 	}
2836       break;
2837 
2838     case MODIFY_EXPR:
2839       error ("MODIFY_EXPR not expected while having tuples");
2840       return *tp;
2841 
2842     case ADDR_EXPR:
2843       {
2844 	tree tem;
2845 
2846 	gcc_assert (is_gimple_address (t));
2847 
2848 	/* Skip any references (they will be checked when we recurse down the
2849 	   tree) and ensure that any variable used as a prefix is marked
2850 	   addressable.  */
2851 	for (x = TREE_OPERAND (t, 0);
2852 	     handled_component_p (x);
2853 	     x = TREE_OPERAND (x, 0))
2854 	  ;
2855 
2856 	if ((tem = verify_address (t, x)))
2857 	  return tem;
2858 
2859 	if (!(TREE_CODE (x) == VAR_DECL
2860 	      || TREE_CODE (x) == PARM_DECL
2861 	      || TREE_CODE (x) == RESULT_DECL))
2862 	  return NULL;
2863 
2864 	if (!TREE_ADDRESSABLE (x))
2865 	  {
2866 	    error ("address taken, but ADDRESSABLE bit not set");
2867 	    return x;
2868 	  }
2869 
2870 	break;
2871       }
2872 
2873     case COND_EXPR:
2874       x = COND_EXPR_COND (t);
2875       if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2876 	{
2877 	  error ("non-integral used in condition");
2878 	  return x;
2879 	}
2880       if (!is_gimple_condexpr (x))
2881         {
2882 	  error ("invalid conditional operand");
2883 	  return x;
2884 	}
2885       break;
2886 
2887     case NON_LVALUE_EXPR:
2888     case TRUTH_NOT_EXPR:
2889       gcc_unreachable ();
2890 
2891     CASE_CONVERT:
2892     case FIX_TRUNC_EXPR:
2893     case FLOAT_EXPR:
2894     case NEGATE_EXPR:
2895     case ABS_EXPR:
2896     case BIT_NOT_EXPR:
2897       CHECK_OP (0, "invalid operand to unary operator");
2898       break;
2899 
2900     case REALPART_EXPR:
2901     case IMAGPART_EXPR:
2902     case BIT_FIELD_REF:
2903       if (!is_gimple_reg_type (TREE_TYPE (t)))
2904 	{
2905 	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2906 	  return t;
2907 	}
2908 
2909       if (TREE_CODE (t) == BIT_FIELD_REF)
2910 	{
2911 	  tree t0 = TREE_OPERAND (t, 0);
2912 	  tree t1 = TREE_OPERAND (t, 1);
2913 	  tree t2 = TREE_OPERAND (t, 2);
2914 	  if (!tree_fits_uhwi_p (t1)
2915 	      || !tree_fits_uhwi_p (t2))
2916 	    {
2917 	      error ("invalid position or size operand to BIT_FIELD_REF");
2918 	      return t;
2919 	    }
2920 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2921 	      && (TYPE_PRECISION (TREE_TYPE (t))
2922 		  != tree_to_uhwi (t1)))
2923 	    {
2924 	      error ("integral result type precision does not match "
2925 		     "field size of BIT_FIELD_REF");
2926 	      return t;
2927 	    }
2928 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2929 		   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2930 		   && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2931 		       != tree_to_uhwi (t1)))
2932 	    {
2933 	      error ("mode size of non-integral result does not "
2934 		     "match field size of BIT_FIELD_REF");
2935 	      return t;
2936 	    }
2937 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2938 	      && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2939 		  > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2940 	    {
2941 	      error ("position plus size exceeds size of referenced object in "
2942 		     "BIT_FIELD_REF");
2943 	      return t;
2944 	    }
2945 	}
2946       t = TREE_OPERAND (t, 0);
2947 
2948       /* Fall-through.  */
2949     case COMPONENT_REF:
2950     case ARRAY_REF:
2951     case ARRAY_RANGE_REF:
2952     case VIEW_CONVERT_EXPR:
2953       /* We have a nest of references.  Verify that each of the operands
2954 	 that determine where to reference is either a constant or a variable,
2955 	 verify that the base is valid, and then show we've already checked
2956 	 the subtrees.  */
2957       while (handled_component_p (t))
2958 	{
2959 	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2960 	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2961 	  else if (TREE_CODE (t) == ARRAY_REF
2962 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2963 	    {
2964 	      CHECK_OP (1, "invalid array index");
2965 	      if (TREE_OPERAND (t, 2))
2966 		CHECK_OP (2, "invalid array lower bound");
2967 	      if (TREE_OPERAND (t, 3))
2968 		CHECK_OP (3, "invalid array stride");
2969 	    }
2970 	  else if (TREE_CODE (t) == BIT_FIELD_REF
2971 		   || TREE_CODE (t) == REALPART_EXPR
2972 		   || TREE_CODE (t) == IMAGPART_EXPR)
2973 	    {
2974 	      error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2975 		     "REALPART_EXPR");
2976 	      return t;
2977 	    }
2978 
2979 	  t = TREE_OPERAND (t, 0);
2980 	}
2981 
2982       if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2983 	{
2984 	  error ("invalid reference prefix");
2985 	  return t;
2986 	}
2987       *walk_subtrees = 0;
2988       break;
2989     case PLUS_EXPR:
2990     case MINUS_EXPR:
2991       /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2992 	 POINTER_PLUS_EXPR. */
2993       if (POINTER_TYPE_P (TREE_TYPE (t)))
2994 	{
2995 	  error ("invalid operand to plus/minus, type is a pointer");
2996 	  return t;
2997 	}
2998       CHECK_OP (0, "invalid operand to binary operator");
2999       CHECK_OP (1, "invalid operand to binary operator");
3000       break;
3001 
3002     case POINTER_PLUS_EXPR:
3003       /* Check to make sure the first operand is a pointer or reference type. */
3004       if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3005 	{
3006 	  error ("invalid operand to pointer plus, first operand is not a pointer");
3007 	  return t;
3008 	}
3009       /* Check to make sure the second operand is a ptrofftype.  */
3010       if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3011 	{
3012 	  error ("invalid operand to pointer plus, second operand is not an "
3013 		 "integer type of appropriate width");
3014 	  return t;
3015 	}
3016       /* FALLTHROUGH */
3017     case LT_EXPR:
3018     case LE_EXPR:
3019     case GT_EXPR:
3020     case GE_EXPR:
3021     case EQ_EXPR:
3022     case NE_EXPR:
3023     case UNORDERED_EXPR:
3024     case ORDERED_EXPR:
3025     case UNLT_EXPR:
3026     case UNLE_EXPR:
3027     case UNGT_EXPR:
3028     case UNGE_EXPR:
3029     case UNEQ_EXPR:
3030     case LTGT_EXPR:
3031     case MULT_EXPR:
3032     case TRUNC_DIV_EXPR:
3033     case CEIL_DIV_EXPR:
3034     case FLOOR_DIV_EXPR:
3035     case ROUND_DIV_EXPR:
3036     case TRUNC_MOD_EXPR:
3037     case CEIL_MOD_EXPR:
3038     case FLOOR_MOD_EXPR:
3039     case ROUND_MOD_EXPR:
3040     case RDIV_EXPR:
3041     case EXACT_DIV_EXPR:
3042     case MIN_EXPR:
3043     case MAX_EXPR:
3044     case LSHIFT_EXPR:
3045     case RSHIFT_EXPR:
3046     case LROTATE_EXPR:
3047     case RROTATE_EXPR:
3048     case BIT_IOR_EXPR:
3049     case BIT_XOR_EXPR:
3050     case BIT_AND_EXPR:
3051       CHECK_OP (0, "invalid operand to binary operator");
3052       CHECK_OP (1, "invalid operand to binary operator");
3053       break;
3054 
3055     case CONSTRUCTOR:
3056       if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3057 	*walk_subtrees = 0;
3058       break;
3059 
3060     case CASE_LABEL_EXPR:
3061       if (CASE_CHAIN (t))
3062 	{
3063 	  error ("invalid CASE_CHAIN");
3064 	  return t;
3065 	}
3066       break;
3067 
3068     default:
3069       break;
3070     }
3071   return NULL;
3072 
3073 #undef CHECK_OP
3074 }
3075 
3076 
3077 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3078    Returns true if there is an error, otherwise false.  */
3079 
3080 static bool
3081 verify_types_in_gimple_min_lval (tree expr)
3082 {
3083   tree op;
3084 
3085   if (is_gimple_id (expr))
3086     return false;
3087 
3088   if (TREE_CODE (expr) != TARGET_MEM_REF
3089       && TREE_CODE (expr) != MEM_REF)
3090     {
3091       error ("invalid expression for min lvalue");
3092       return true;
3093     }
3094 
3095   /* TARGET_MEM_REFs are strange beasts.  */
3096   if (TREE_CODE (expr) == TARGET_MEM_REF)
3097     return false;
3098 
3099   op = TREE_OPERAND (expr, 0);
3100   if (!is_gimple_val (op))
3101     {
3102       error ("invalid operand in indirect reference");
3103       debug_generic_stmt (op);
3104       return true;
3105     }
3106   /* Memory references now generally can involve a value conversion.  */
3107 
3108   return false;
3109 }
3110 
3111 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3112    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3113    if there is an error, otherwise false.  */
3114 
3115 static bool
3116 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3117 {
3118   while (handled_component_p (expr))
3119     {
3120       tree op = TREE_OPERAND (expr, 0);
3121 
3122       if (TREE_CODE (expr) == ARRAY_REF
3123 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3124 	{
3125 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3126 	      || (TREE_OPERAND (expr, 2)
3127 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3128 	      || (TREE_OPERAND (expr, 3)
3129 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3130 	    {
3131 	      error ("invalid operands to array reference");
3132 	      debug_generic_stmt (expr);
3133 	      return true;
3134 	    }
3135 	}
3136 
3137       /* Verify if the reference array element types are compatible.  */
3138       if (TREE_CODE (expr) == ARRAY_REF
3139 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3140 					 TREE_TYPE (TREE_TYPE (op))))
3141 	{
3142 	  error ("type mismatch in array reference");
3143 	  debug_generic_stmt (TREE_TYPE (expr));
3144 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145 	  return true;
3146 	}
3147       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3148 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3149 					 TREE_TYPE (TREE_TYPE (op))))
3150 	{
3151 	  error ("type mismatch in array range reference");
3152 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3153 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3154 	  return true;
3155 	}
3156 
3157       if ((TREE_CODE (expr) == REALPART_EXPR
3158 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3159 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3160 					 TREE_TYPE (TREE_TYPE (op))))
3161 	{
3162 	  error ("type mismatch in real/imagpart reference");
3163 	  debug_generic_stmt (TREE_TYPE (expr));
3164 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 	  return true;
3166 	}
3167 
3168       if (TREE_CODE (expr) == COMPONENT_REF
3169 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3170 					 TREE_TYPE (TREE_OPERAND (expr, 1))))
3171 	{
3172 	  error ("type mismatch in component reference");
3173 	  debug_generic_stmt (TREE_TYPE (expr));
3174 	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175 	  return true;
3176 	}
3177 
3178       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3179 	{
3180 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181 	     that their operand is not an SSA name or an invariant when
3182 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3184 	     most invoke undefined behavior.  */
3185 	  if (require_lvalue
3186 	      && (TREE_CODE (op) == SSA_NAME
3187 		  || is_gimple_min_invariant (op)))
3188 	    {
3189 	      error ("conversion of an SSA_NAME on the left hand side");
3190 	      debug_generic_stmt (expr);
3191 	      return true;
3192 	    }
3193 	  else if (TREE_CODE (op) == SSA_NAME
3194 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3195 	    {
3196 	      error ("conversion of register to a different size");
3197 	      debug_generic_stmt (expr);
3198 	      return true;
3199 	    }
3200 	  else if (!handled_component_p (op))
3201 	    return false;
3202 	}
3203 
3204       expr = op;
3205     }
3206 
3207   if (TREE_CODE (expr) == MEM_REF)
3208     {
3209       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3210 	{
3211 	  error ("invalid address operand in MEM_REF");
3212 	  debug_generic_stmt (expr);
3213 	  return true;
3214 	}
3215       if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3216 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3217 	{
3218 	  error ("invalid offset operand in MEM_REF");
3219 	  debug_generic_stmt (expr);
3220 	  return true;
3221 	}
3222     }
3223   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3224     {
3225       if (!TMR_BASE (expr)
3226 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3227 	{
3228 	  error ("invalid address operand in TARGET_MEM_REF");
3229 	  return true;
3230 	}
3231       if (!TMR_OFFSET (expr)
3232 	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3233 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3234 	{
3235 	  error ("invalid offset operand in TARGET_MEM_REF");
3236 	  debug_generic_stmt (expr);
3237 	  return true;
3238 	}
3239     }
3240 
3241   return ((require_lvalue || !is_gimple_min_invariant (expr))
3242 	  && verify_types_in_gimple_min_lval (expr));
3243 }
3244 
3245 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3246    list of pointer-to types that is trivially convertible to DEST.  */
3247 
3248 static bool
3249 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3250 {
3251   tree src;
3252 
3253   if (!TYPE_POINTER_TO (src_obj))
3254     return true;
3255 
3256   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3257     if (useless_type_conversion_p (dest, src))
3258       return true;
3259 
3260   return false;
3261 }
3262 
3263 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3264    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3265 
3266 static bool
3267 valid_fixed_convert_types_p (tree type1, tree type2)
3268 {
3269   return (FIXED_POINT_TYPE_P (type1)
3270 	  && (INTEGRAL_TYPE_P (type2)
3271 	      || SCALAR_FLOAT_TYPE_P (type2)
3272 	      || FIXED_POINT_TYPE_P (type2)));
3273 }
3274 
3275 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3276    is a problem, otherwise false.  */
3277 
3278 static bool
3279 verify_gimple_call (gcall *stmt)
3280 {
3281   tree fn = gimple_call_fn (stmt);
3282   tree fntype, fndecl;
3283   unsigned i;
3284 
3285   if (gimple_call_internal_p (stmt))
3286     {
3287       if (fn)
3288 	{
3289 	  error ("gimple call has two targets");
3290 	  debug_generic_stmt (fn);
3291 	  return true;
3292 	}
3293     }
3294   else
3295     {
3296       if (!fn)
3297 	{
3298 	  error ("gimple call has no target");
3299 	  return true;
3300 	}
3301     }
3302 
3303   if (fn && !is_gimple_call_addr (fn))
3304     {
3305       error ("invalid function in gimple call");
3306       debug_generic_stmt (fn);
3307       return true;
3308     }
3309 
3310   if (fn
3311       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3312 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3313 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3314     {
3315       error ("non-function in gimple call");
3316       return true;
3317     }
3318 
3319    fndecl = gimple_call_fndecl (stmt);
3320    if (fndecl
3321        && TREE_CODE (fndecl) == FUNCTION_DECL
3322        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3323        && !DECL_PURE_P (fndecl)
3324        && !TREE_READONLY (fndecl))
3325      {
3326        error ("invalid pure const state for function");
3327        return true;
3328      }
3329 
3330   if (gimple_call_lhs (stmt)
3331       && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3332 	  || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3333     {
3334       error ("invalid LHS in gimple call");
3335       return true;
3336     }
3337 
3338   if (gimple_call_ctrl_altering_p (stmt)
3339       && gimple_call_lhs (stmt)
3340       && gimple_call_noreturn_p (stmt))
3341     {
3342       error ("LHS in noreturn call");
3343       return true;
3344     }
3345 
3346   fntype = gimple_call_fntype (stmt);
3347   if (fntype
3348       && gimple_call_lhs (stmt)
3349       && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3350 				     TREE_TYPE (fntype))
3351       /* ???  At least C++ misses conversions at assignments from
3352 	 void * call results.
3353 	 ???  Java is completely off.  Especially with functions
3354 	 returning java.lang.Object.
3355 	 For now simply allow arbitrary pointer type conversions.  */
3356       && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3357 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3358     {
3359       error ("invalid conversion in gimple call");
3360       debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3361       debug_generic_stmt (TREE_TYPE (fntype));
3362       return true;
3363     }
3364 
3365   if (gimple_call_chain (stmt)
3366       && !is_gimple_val (gimple_call_chain (stmt)))
3367     {
3368       error ("invalid static chain in gimple call");
3369       debug_generic_stmt (gimple_call_chain (stmt));
3370       return true;
3371     }
3372 
3373   /* If there is a static chain argument, the call should either be
3374      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3375   if (gimple_call_chain (stmt)
3376       && fndecl
3377       && !DECL_STATIC_CHAIN (fndecl))
3378     {
3379       error ("static chain with function that doesn%'t use one");
3380       return true;
3381     }
3382 
3383   /* ???  The C frontend passes unpromoted arguments in case it
3384      didn't see a function declaration before the call.  So for now
3385      leave the call arguments mostly unverified.  Once we gimplify
3386      unit-at-a-time we have a chance to fix this.  */
3387 
3388   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3389     {
3390       tree arg = gimple_call_arg (stmt, i);
3391       if ((is_gimple_reg_type (TREE_TYPE (arg))
3392 	   && !is_gimple_val (arg))
3393 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3394 	      && !is_gimple_lvalue (arg)))
3395 	{
3396 	  error ("invalid argument to gimple call");
3397 	  debug_generic_expr (arg);
3398 	  return true;
3399 	}
3400     }
3401 
3402   return false;
3403 }
3404 
3405 /* Verifies the gimple comparison with the result type TYPE and
3406    the operands OP0 and OP1.  */
3407 
3408 static bool
3409 verify_gimple_comparison (tree type, tree op0, tree op1)
3410 {
3411   tree op0_type = TREE_TYPE (op0);
3412   tree op1_type = TREE_TYPE (op1);
3413 
3414   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3415     {
3416       error ("invalid operands in gimple comparison");
3417       return true;
3418     }
3419 
3420   /* For comparisons we do not have the operations type as the
3421      effective type the comparison is carried out in.  Instead
3422      we require that either the first operand is trivially
3423      convertible into the second, or the other way around.
3424      Because we special-case pointers to void we allow
3425      comparisons of pointers with the same mode as well.  */
3426   if (!useless_type_conversion_p (op0_type, op1_type)
3427       && !useless_type_conversion_p (op1_type, op0_type)
3428       && (!POINTER_TYPE_P (op0_type)
3429 	  || !POINTER_TYPE_P (op1_type)
3430 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3431     {
3432       error ("mismatching comparison operand types");
3433       debug_generic_expr (op0_type);
3434       debug_generic_expr (op1_type);
3435       return true;
3436     }
3437 
3438   /* The resulting type of a comparison may be an effective boolean type.  */
3439   if (INTEGRAL_TYPE_P (type)
3440       && (TREE_CODE (type) == BOOLEAN_TYPE
3441 	  || TYPE_PRECISION (type) == 1))
3442     {
3443       if (TREE_CODE (op0_type) == VECTOR_TYPE
3444 	  || TREE_CODE (op1_type) == VECTOR_TYPE)
3445         {
3446           error ("vector comparison returning a boolean");
3447           debug_generic_expr (op0_type);
3448           debug_generic_expr (op1_type);
3449           return true;
3450         }
3451     }
3452   /* Or an integer vector type with the same size and element count
3453      as the comparison operand types.  */
3454   else if (TREE_CODE (type) == VECTOR_TYPE
3455 	   && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3456     {
3457       if (TREE_CODE (op0_type) != VECTOR_TYPE
3458 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3459         {
3460           error ("non-vector operands in vector comparison");
3461           debug_generic_expr (op0_type);
3462           debug_generic_expr (op1_type);
3463           return true;
3464         }
3465 
3466       if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3467 	  || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3468 	      != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3469 	  /* The result of a vector comparison is of signed
3470 	     integral type.  */
3471 	  || TYPE_UNSIGNED (TREE_TYPE (type)))
3472         {
3473           error ("invalid vector comparison resulting type");
3474           debug_generic_expr (type);
3475           return true;
3476         }
3477     }
3478   else
3479     {
3480       error ("bogus comparison result type");
3481       debug_generic_expr (type);
3482       return true;
3483     }
3484 
3485   return false;
3486 }
3487 
3488 /* Verify a gimple assignment statement STMT with an unary rhs.
3489    Returns true if anything is wrong.  */
3490 
3491 static bool
3492 verify_gimple_assign_unary (gassign *stmt)
3493 {
3494   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3495   tree lhs = gimple_assign_lhs (stmt);
3496   tree lhs_type = TREE_TYPE (lhs);
3497   tree rhs1 = gimple_assign_rhs1 (stmt);
3498   tree rhs1_type = TREE_TYPE (rhs1);
3499 
3500   if (!is_gimple_reg (lhs))
3501     {
3502       error ("non-register as LHS of unary operation");
3503       return true;
3504     }
3505 
3506   if (!is_gimple_val (rhs1))
3507     {
3508       error ("invalid operand in unary operation");
3509       return true;
3510     }
3511 
3512   /* First handle conversions.  */
3513   switch (rhs_code)
3514     {
3515     CASE_CONVERT:
3516       {
3517 	/* Allow conversions from pointer type to integral type only if
3518 	   there is no sign or zero extension involved.
3519 	   For targets were the precision of ptrofftype doesn't match that
3520 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3521 	if ((POINTER_TYPE_P (lhs_type)
3522 	     && INTEGRAL_TYPE_P (rhs1_type))
3523 	    || (POINTER_TYPE_P (rhs1_type)
3524 		&& INTEGRAL_TYPE_P (lhs_type)
3525 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3526 		    || ptrofftype_p (sizetype))))
3527 	  return false;
3528 
3529 	/* Allow conversion from integral to offset type and vice versa.  */
3530 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3531 	     && INTEGRAL_TYPE_P (rhs1_type))
3532 	    || (INTEGRAL_TYPE_P (lhs_type)
3533 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3534 	  return false;
3535 
3536 	/* Otherwise assert we are converting between types of the
3537 	   same kind.  */
3538 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3539 	  {
3540 	    error ("invalid types in nop conversion");
3541 	    debug_generic_expr (lhs_type);
3542 	    debug_generic_expr (rhs1_type);
3543 	    return true;
3544 	  }
3545 
3546 	return false;
3547       }
3548 
3549     case ADDR_SPACE_CONVERT_EXPR:
3550       {
3551 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3552 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3553 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3554 	  {
3555 	    error ("invalid types in address space conversion");
3556 	    debug_generic_expr (lhs_type);
3557 	    debug_generic_expr (rhs1_type);
3558 	    return true;
3559 	  }
3560 
3561 	return false;
3562       }
3563 
3564     case FIXED_CONVERT_EXPR:
3565       {
3566 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3567 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3568 	  {
3569 	    error ("invalid types in fixed-point conversion");
3570 	    debug_generic_expr (lhs_type);
3571 	    debug_generic_expr (rhs1_type);
3572 	    return true;
3573 	  }
3574 
3575 	return false;
3576       }
3577 
3578     case FLOAT_EXPR:
3579       {
3580 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3581 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3582 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3583 	  {
3584 	    error ("invalid types in conversion to floating point");
3585 	    debug_generic_expr (lhs_type);
3586 	    debug_generic_expr (rhs1_type);
3587 	    return true;
3588 	  }
3589 
3590         return false;
3591       }
3592 
3593     case FIX_TRUNC_EXPR:
3594       {
3595         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3596             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3597                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3598 	  {
3599 	    error ("invalid types in conversion to integer");
3600 	    debug_generic_expr (lhs_type);
3601 	    debug_generic_expr (rhs1_type);
3602 	    return true;
3603 	  }
3604 
3605         return false;
3606       }
3607     case REDUC_MAX_EXPR:
3608     case REDUC_MIN_EXPR:
3609     case REDUC_PLUS_EXPR:
3610       if (!VECTOR_TYPE_P (rhs1_type)
3611 	  || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3612         {
3613 	  error ("reduction should convert from vector to element type");
3614 	  debug_generic_expr (lhs_type);
3615 	  debug_generic_expr (rhs1_type);
3616 	  return true;
3617 	}
3618       return false;
3619 
3620     case VEC_UNPACK_HI_EXPR:
3621     case VEC_UNPACK_LO_EXPR:
3622     case VEC_UNPACK_FLOAT_HI_EXPR:
3623     case VEC_UNPACK_FLOAT_LO_EXPR:
3624       /* FIXME.  */
3625       return false;
3626 
3627     case NEGATE_EXPR:
3628     case ABS_EXPR:
3629     case BIT_NOT_EXPR:
3630     case PAREN_EXPR:
3631     case CONJ_EXPR:
3632       break;
3633 
3634     default:
3635       gcc_unreachable ();
3636     }
3637 
3638   /* For the remaining codes assert there is no conversion involved.  */
3639   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3640     {
3641       error ("non-trivial conversion in unary operation");
3642       debug_generic_expr (lhs_type);
3643       debug_generic_expr (rhs1_type);
3644       return true;
3645     }
3646 
3647   return false;
3648 }
3649 
3650 /* Verify a gimple assignment statement STMT with a binary rhs.
3651    Returns true if anything is wrong.  */
3652 
3653 static bool
3654 verify_gimple_assign_binary (gassign *stmt)
3655 {
3656   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3657   tree lhs = gimple_assign_lhs (stmt);
3658   tree lhs_type = TREE_TYPE (lhs);
3659   tree rhs1 = gimple_assign_rhs1 (stmt);
3660   tree rhs1_type = TREE_TYPE (rhs1);
3661   tree rhs2 = gimple_assign_rhs2 (stmt);
3662   tree rhs2_type = TREE_TYPE (rhs2);
3663 
3664   if (!is_gimple_reg (lhs))
3665     {
3666       error ("non-register as LHS of binary operation");
3667       return true;
3668     }
3669 
3670   if (!is_gimple_val (rhs1)
3671       || !is_gimple_val (rhs2))
3672     {
3673       error ("invalid operands in binary operation");
3674       return true;
3675     }
3676 
3677   /* First handle operations that involve different types.  */
3678   switch (rhs_code)
3679     {
3680     case COMPLEX_EXPR:
3681       {
3682 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3683 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3684 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3685 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3686 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3687 	  {
3688 	    error ("type mismatch in complex expression");
3689 	    debug_generic_expr (lhs_type);
3690 	    debug_generic_expr (rhs1_type);
3691 	    debug_generic_expr (rhs2_type);
3692 	    return true;
3693 	  }
3694 
3695 	return false;
3696       }
3697 
3698     case LSHIFT_EXPR:
3699     case RSHIFT_EXPR:
3700     case LROTATE_EXPR:
3701     case RROTATE_EXPR:
3702       {
3703 	/* Shifts and rotates are ok on integral types, fixed point
3704 	   types and integer vector types.  */
3705 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3706 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3707 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3708 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3709 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3710 		/* Vector shifts of vectors are also ok.  */
3711 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3712 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3713 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3714 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3715 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3716 	  {
3717 	    error ("type mismatch in shift expression");
3718 	    debug_generic_expr (lhs_type);
3719 	    debug_generic_expr (rhs1_type);
3720 	    debug_generic_expr (rhs2_type);
3721 	    return true;
3722 	  }
3723 
3724 	return false;
3725       }
3726 
3727     case WIDEN_LSHIFT_EXPR:
3728       {
3729         if (!INTEGRAL_TYPE_P (lhs_type)
3730             || !INTEGRAL_TYPE_P (rhs1_type)
3731             || TREE_CODE (rhs2) != INTEGER_CST
3732             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3733           {
3734             error ("type mismatch in widening vector shift expression");
3735             debug_generic_expr (lhs_type);
3736             debug_generic_expr (rhs1_type);
3737             debug_generic_expr (rhs2_type);
3738             return true;
3739           }
3740 
3741         return false;
3742       }
3743 
3744     case VEC_WIDEN_LSHIFT_HI_EXPR:
3745     case VEC_WIDEN_LSHIFT_LO_EXPR:
3746       {
3747         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3748             || TREE_CODE (lhs_type) != VECTOR_TYPE
3749             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3750             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3751             || TREE_CODE (rhs2) != INTEGER_CST
3752             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3753                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3754           {
3755             error ("type mismatch in widening vector shift expression");
3756             debug_generic_expr (lhs_type);
3757             debug_generic_expr (rhs1_type);
3758             debug_generic_expr (rhs2_type);
3759             return true;
3760           }
3761 
3762         return false;
3763       }
3764 
3765     case PLUS_EXPR:
3766     case MINUS_EXPR:
3767       {
3768 	tree lhs_etype = lhs_type;
3769 	tree rhs1_etype = rhs1_type;
3770 	tree rhs2_etype = rhs2_type;
3771 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3772 	  {
3773 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3774 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3775 	      {
3776 		error ("invalid non-vector operands to vector valued plus");
3777 		return true;
3778 	      }
3779 	    lhs_etype = TREE_TYPE (lhs_type);
3780 	    rhs1_etype = TREE_TYPE (rhs1_type);
3781 	    rhs2_etype = TREE_TYPE (rhs2_type);
3782 	  }
3783 	if (POINTER_TYPE_P (lhs_etype)
3784 	    || POINTER_TYPE_P (rhs1_etype)
3785 	    || POINTER_TYPE_P (rhs2_etype))
3786 	  {
3787 	    error ("invalid (pointer) operands to plus/minus");
3788 	    return true;
3789 	  }
3790 
3791 	/* Continue with generic binary expression handling.  */
3792 	break;
3793       }
3794 
3795     case POINTER_PLUS_EXPR:
3796       {
3797 	if (!POINTER_TYPE_P (rhs1_type)
3798 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3799 	    || !ptrofftype_p (rhs2_type))
3800 	  {
3801 	    error ("type mismatch in pointer plus expression");
3802 	    debug_generic_stmt (lhs_type);
3803 	    debug_generic_stmt (rhs1_type);
3804 	    debug_generic_stmt (rhs2_type);
3805 	    return true;
3806 	  }
3807 
3808 	return false;
3809       }
3810 
3811     case TRUTH_ANDIF_EXPR:
3812     case TRUTH_ORIF_EXPR:
3813     case TRUTH_AND_EXPR:
3814     case TRUTH_OR_EXPR:
3815     case TRUTH_XOR_EXPR:
3816 
3817       gcc_unreachable ();
3818 
3819     case LT_EXPR:
3820     case LE_EXPR:
3821     case GT_EXPR:
3822     case GE_EXPR:
3823     case EQ_EXPR:
3824     case NE_EXPR:
3825     case UNORDERED_EXPR:
3826     case ORDERED_EXPR:
3827     case UNLT_EXPR:
3828     case UNLE_EXPR:
3829     case UNGT_EXPR:
3830     case UNGE_EXPR:
3831     case UNEQ_EXPR:
3832     case LTGT_EXPR:
3833       /* Comparisons are also binary, but the result type is not
3834 	 connected to the operand types.  */
3835       return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3836 
3837     case WIDEN_MULT_EXPR:
3838       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3839 	return true;
3840       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3841 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3842 
3843     case WIDEN_SUM_EXPR:
3844     case VEC_WIDEN_MULT_HI_EXPR:
3845     case VEC_WIDEN_MULT_LO_EXPR:
3846     case VEC_WIDEN_MULT_EVEN_EXPR:
3847     case VEC_WIDEN_MULT_ODD_EXPR:
3848     case VEC_PACK_TRUNC_EXPR:
3849     case VEC_PACK_SAT_EXPR:
3850     case VEC_PACK_FIX_TRUNC_EXPR:
3851       /* FIXME.  */
3852       return false;
3853 
3854     case MULT_EXPR:
3855     case MULT_HIGHPART_EXPR:
3856     case TRUNC_DIV_EXPR:
3857     case CEIL_DIV_EXPR:
3858     case FLOOR_DIV_EXPR:
3859     case ROUND_DIV_EXPR:
3860     case TRUNC_MOD_EXPR:
3861     case CEIL_MOD_EXPR:
3862     case FLOOR_MOD_EXPR:
3863     case ROUND_MOD_EXPR:
3864     case RDIV_EXPR:
3865     case EXACT_DIV_EXPR:
3866     case MIN_EXPR:
3867     case MAX_EXPR:
3868     case BIT_IOR_EXPR:
3869     case BIT_XOR_EXPR:
3870     case BIT_AND_EXPR:
3871       /* Continue with generic binary expression handling.  */
3872       break;
3873 
3874     default:
3875       gcc_unreachable ();
3876     }
3877 
3878   if (!useless_type_conversion_p (lhs_type, rhs1_type)
3879       || !useless_type_conversion_p (lhs_type, rhs2_type))
3880     {
3881       error ("type mismatch in binary expression");
3882       debug_generic_stmt (lhs_type);
3883       debug_generic_stmt (rhs1_type);
3884       debug_generic_stmt (rhs2_type);
3885       return true;
3886     }
3887 
3888   return false;
3889 }
3890 
3891 /* Verify a gimple assignment statement STMT with a ternary rhs.
3892    Returns true if anything is wrong.  */
3893 
3894 static bool
3895 verify_gimple_assign_ternary (gassign *stmt)
3896 {
3897   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3898   tree lhs = gimple_assign_lhs (stmt);
3899   tree lhs_type = TREE_TYPE (lhs);
3900   tree rhs1 = gimple_assign_rhs1 (stmt);
3901   tree rhs1_type = TREE_TYPE (rhs1);
3902   tree rhs2 = gimple_assign_rhs2 (stmt);
3903   tree rhs2_type = TREE_TYPE (rhs2);
3904   tree rhs3 = gimple_assign_rhs3 (stmt);
3905   tree rhs3_type = TREE_TYPE (rhs3);
3906 
3907   if (!is_gimple_reg (lhs))
3908     {
3909       error ("non-register as LHS of ternary operation");
3910       return true;
3911     }
3912 
3913   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3914        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3915       || !is_gimple_val (rhs2)
3916       || !is_gimple_val (rhs3))
3917     {
3918       error ("invalid operands in ternary operation");
3919       return true;
3920     }
3921 
3922   /* First handle operations that involve different types.  */
3923   switch (rhs_code)
3924     {
3925     case WIDEN_MULT_PLUS_EXPR:
3926     case WIDEN_MULT_MINUS_EXPR:
3927       if ((!INTEGRAL_TYPE_P (rhs1_type)
3928 	   && !FIXED_POINT_TYPE_P (rhs1_type))
3929 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
3930 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
3931 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3932 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3933 	{
3934 	  error ("type mismatch in widening multiply-accumulate expression");
3935 	  debug_generic_expr (lhs_type);
3936 	  debug_generic_expr (rhs1_type);
3937 	  debug_generic_expr (rhs2_type);
3938 	  debug_generic_expr (rhs3_type);
3939 	  return true;
3940 	}
3941       break;
3942 
3943     case FMA_EXPR:
3944       if (!useless_type_conversion_p (lhs_type, rhs1_type)
3945 	  || !useless_type_conversion_p (lhs_type, rhs2_type)
3946 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3947 	{
3948 	  error ("type mismatch in fused multiply-add expression");
3949 	  debug_generic_expr (lhs_type);
3950 	  debug_generic_expr (rhs1_type);
3951 	  debug_generic_expr (rhs2_type);
3952 	  debug_generic_expr (rhs3_type);
3953 	  return true;
3954 	}
3955       break;
3956 
3957     case COND_EXPR:
3958     case VEC_COND_EXPR:
3959       if (!useless_type_conversion_p (lhs_type, rhs2_type)
3960 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3961 	{
3962 	  error ("type mismatch in conditional expression");
3963 	  debug_generic_expr (lhs_type);
3964 	  debug_generic_expr (rhs2_type);
3965 	  debug_generic_expr (rhs3_type);
3966 	  return true;
3967 	}
3968       break;
3969 
3970     case VEC_PERM_EXPR:
3971       if (!useless_type_conversion_p (lhs_type, rhs1_type)
3972 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
3973 	{
3974 	  error ("type mismatch in vector permute expression");
3975 	  debug_generic_expr (lhs_type);
3976 	  debug_generic_expr (rhs1_type);
3977 	  debug_generic_expr (rhs2_type);
3978 	  debug_generic_expr (rhs3_type);
3979 	  return true;
3980 	}
3981 
3982       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3983 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
3984 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3985 	{
3986 	  error ("vector types expected in vector permute expression");
3987 	  debug_generic_expr (lhs_type);
3988 	  debug_generic_expr (rhs1_type);
3989 	  debug_generic_expr (rhs2_type);
3990 	  debug_generic_expr (rhs3_type);
3991 	  return true;
3992 	}
3993 
3994       if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3995 	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
3996 	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
3997 	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
3998 	     != TYPE_VECTOR_SUBPARTS (lhs_type))
3999 	{
4000 	  error ("vectors with different element number found "
4001 		 "in vector permute expression");
4002 	  debug_generic_expr (lhs_type);
4003 	  debug_generic_expr (rhs1_type);
4004 	  debug_generic_expr (rhs2_type);
4005 	  debug_generic_expr (rhs3_type);
4006 	  return true;
4007 	}
4008 
4009       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4010 	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4011 	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4012 	{
4013 	  error ("invalid mask type in vector permute expression");
4014 	  debug_generic_expr (lhs_type);
4015 	  debug_generic_expr (rhs1_type);
4016 	  debug_generic_expr (rhs2_type);
4017 	  debug_generic_expr (rhs3_type);
4018 	  return true;
4019 	}
4020 
4021       return false;
4022 
4023     case SAD_EXPR:
4024       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4025 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4026 	  || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4027 				     (TYPE_MODE (TREE_TYPE (rhs1_type))))
4028 	       > GET_MODE_BITSIZE (GET_MODE_INNER
4029 				     (TYPE_MODE (TREE_TYPE (lhs_type)))))
4030 	{
4031 	  error ("type mismatch in sad expression");
4032 	  debug_generic_expr (lhs_type);
4033 	  debug_generic_expr (rhs1_type);
4034 	  debug_generic_expr (rhs2_type);
4035 	  debug_generic_expr (rhs3_type);
4036 	  return true;
4037 	}
4038 
4039       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4041 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4042 	{
4043 	  error ("vector types expected in sad expression");
4044 	  debug_generic_expr (lhs_type);
4045 	  debug_generic_expr (rhs1_type);
4046 	  debug_generic_expr (rhs2_type);
4047 	  debug_generic_expr (rhs3_type);
4048 	  return true;
4049 	}
4050 
4051       return false;
4052 
4053     case DOT_PROD_EXPR:
4054     case REALIGN_LOAD_EXPR:
4055       /* FIXME.  */
4056       return false;
4057 
4058     default:
4059       gcc_unreachable ();
4060     }
4061   return false;
4062 }
4063 
4064 /* Verify a gimple assignment statement STMT with a single rhs.
4065    Returns true if anything is wrong.  */
4066 
4067 static bool
4068 verify_gimple_assign_single (gassign *stmt)
4069 {
4070   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4071   tree lhs = gimple_assign_lhs (stmt);
4072   tree lhs_type = TREE_TYPE (lhs);
4073   tree rhs1 = gimple_assign_rhs1 (stmt);
4074   tree rhs1_type = TREE_TYPE (rhs1);
4075   bool res = false;
4076 
4077   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4078     {
4079       error ("non-trivial conversion at assignment");
4080       debug_generic_expr (lhs_type);
4081       debug_generic_expr (rhs1_type);
4082       return true;
4083     }
4084 
4085   if (gimple_clobber_p (stmt)
4086       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4087     {
4088       error ("non-decl/MEM_REF LHS in clobber statement");
4089       debug_generic_expr (lhs);
4090       return true;
4091     }
4092 
4093   if (handled_component_p (lhs)
4094       || TREE_CODE (lhs) == MEM_REF
4095       || TREE_CODE (lhs) == TARGET_MEM_REF)
4096     res |= verify_types_in_gimple_reference (lhs, true);
4097 
4098   /* Special codes we cannot handle via their class.  */
4099   switch (rhs_code)
4100     {
4101     case ADDR_EXPR:
4102       {
4103 	tree op = TREE_OPERAND (rhs1, 0);
4104 	if (!is_gimple_addressable (op))
4105 	  {
4106 	    error ("invalid operand in unary expression");
4107 	    return true;
4108 	  }
4109 
4110 	/* Technically there is no longer a need for matching types, but
4111 	   gimple hygiene asks for this check.  In LTO we can end up
4112 	   combining incompatible units and thus end up with addresses
4113 	   of globals that change their type to a common one.  */
4114 	if (!in_lto_p
4115 	    && !types_compatible_p (TREE_TYPE (op),
4116 				    TREE_TYPE (TREE_TYPE (rhs1)))
4117 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4118 							  TREE_TYPE (op)))
4119 	  {
4120 	    error ("type mismatch in address expression");
4121 	    debug_generic_stmt (TREE_TYPE (rhs1));
4122 	    debug_generic_stmt (TREE_TYPE (op));
4123 	    return true;
4124 	  }
4125 
4126 	return verify_types_in_gimple_reference (op, true);
4127       }
4128 
4129     /* tcc_reference  */
4130     case INDIRECT_REF:
4131       error ("INDIRECT_REF in gimple IL");
4132       return true;
4133 
4134     case COMPONENT_REF:
4135     case BIT_FIELD_REF:
4136     case ARRAY_REF:
4137     case ARRAY_RANGE_REF:
4138     case VIEW_CONVERT_EXPR:
4139     case REALPART_EXPR:
4140     case IMAGPART_EXPR:
4141     case TARGET_MEM_REF:
4142     case MEM_REF:
4143       if (!is_gimple_reg (lhs)
4144 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4145 	{
4146 	  error ("invalid rhs for gimple memory store");
4147 	  debug_generic_stmt (lhs);
4148 	  debug_generic_stmt (rhs1);
4149 	  return true;
4150 	}
4151       return res || verify_types_in_gimple_reference (rhs1, false);
4152 
4153     /* tcc_constant  */
4154     case SSA_NAME:
4155     case INTEGER_CST:
4156     case REAL_CST:
4157     case FIXED_CST:
4158     case COMPLEX_CST:
4159     case VECTOR_CST:
4160     case STRING_CST:
4161       return res;
4162 
4163     /* tcc_declaration  */
4164     case CONST_DECL:
4165       return res;
4166     case VAR_DECL:
4167     case PARM_DECL:
4168       if (!is_gimple_reg (lhs)
4169 	  && !is_gimple_reg (rhs1)
4170 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4171 	{
4172 	  error ("invalid rhs for gimple memory store");
4173 	  debug_generic_stmt (lhs);
4174 	  debug_generic_stmt (rhs1);
4175 	  return true;
4176 	}
4177       return res;
4178 
4179     case CONSTRUCTOR:
4180       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4181 	{
4182 	  unsigned int i;
4183 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4184 
4185 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4186 	    return res;
4187 	  /* For vector CONSTRUCTORs we require that either it is empty
4188 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4189 	     (then the element count must be correct to cover the whole
4190 	     outer vector and index must be NULL on all elements, or it is
4191 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4192 	     smaller number of elements (assuming zero filling) and
4193 	     consecutive indexes as compared to NULL indexes (such
4194 	     CONSTRUCTORs can appear in the IL from FEs).  */
4195 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4196 	    {
4197 	      if (elt_t == NULL_TREE)
4198 		{
4199 		  elt_t = TREE_TYPE (elt_v);
4200 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4201 		    {
4202 		      tree elt_t = TREE_TYPE (elt_v);
4203 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4204 						      TREE_TYPE (elt_t)))
4205 			{
4206 			  error ("incorrect type of vector CONSTRUCTOR"
4207 				 " elements");
4208 			  debug_generic_stmt (rhs1);
4209 			  return true;
4210 			}
4211 		      else if (CONSTRUCTOR_NELTS (rhs1)
4212 			       * TYPE_VECTOR_SUBPARTS (elt_t)
4213 			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
4214 			{
4215 			  error ("incorrect number of vector CONSTRUCTOR"
4216 				 " elements");
4217 			  debug_generic_stmt (rhs1);
4218 			  return true;
4219 			}
4220 		    }
4221 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4222 						       elt_t))
4223 		    {
4224 		      error ("incorrect type of vector CONSTRUCTOR elements");
4225 		      debug_generic_stmt (rhs1);
4226 		      return true;
4227 		    }
4228 		  else if (CONSTRUCTOR_NELTS (rhs1)
4229 			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
4230 		    {
4231 		      error ("incorrect number of vector CONSTRUCTOR elements");
4232 		      debug_generic_stmt (rhs1);
4233 		      return true;
4234 		    }
4235 		}
4236 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4237 		{
4238 		  error ("incorrect type of vector CONSTRUCTOR elements");
4239 		  debug_generic_stmt (rhs1);
4240 		  return true;
4241 		}
4242 	      if (elt_i != NULL_TREE
4243 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4244 		      || TREE_CODE (elt_i) != INTEGER_CST
4245 		      || compare_tree_int (elt_i, i) != 0))
4246 		{
4247 		  error ("vector CONSTRUCTOR with non-NULL element index");
4248 		  debug_generic_stmt (rhs1);
4249 		  return true;
4250 		}
4251 	      if (!is_gimple_val (elt_v))
4252 		{
4253 		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4254 		  debug_generic_stmt (rhs1);
4255 		  return true;
4256 		}
4257 	    }
4258 	}
4259       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4260 	{
4261 	  error ("non-vector CONSTRUCTOR with elements");
4262 	  debug_generic_stmt (rhs1);
4263 	  return true;
4264 	}
4265       return res;
4266     case OBJ_TYPE_REF:
4267     case ASSERT_EXPR:
4268     case WITH_SIZE_EXPR:
4269       /* FIXME.  */
4270       return res;
4271 
4272     default:;
4273     }
4274 
4275   return res;
4276 }
4277 
4278 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4279    is a problem, otherwise false.  */
4280 
4281 static bool
4282 verify_gimple_assign (gassign *stmt)
4283 {
4284   switch (gimple_assign_rhs_class (stmt))
4285     {
4286     case GIMPLE_SINGLE_RHS:
4287       return verify_gimple_assign_single (stmt);
4288 
4289     case GIMPLE_UNARY_RHS:
4290       return verify_gimple_assign_unary (stmt);
4291 
4292     case GIMPLE_BINARY_RHS:
4293       return verify_gimple_assign_binary (stmt);
4294 
4295     case GIMPLE_TERNARY_RHS:
4296       return verify_gimple_assign_ternary (stmt);
4297 
4298     default:
4299       gcc_unreachable ();
4300     }
4301 }
4302 
4303 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4304    is a problem, otherwise false.  */
4305 
4306 static bool
4307 verify_gimple_return (greturn *stmt)
4308 {
4309   tree op = gimple_return_retval (stmt);
4310   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4311 
4312   /* We cannot test for present return values as we do not fix up missing
4313      return values from the original source.  */
4314   if (op == NULL)
4315     return false;
4316 
4317   if (!is_gimple_val (op)
4318       && TREE_CODE (op) != RESULT_DECL)
4319     {
4320       error ("invalid operand in return statement");
4321       debug_generic_stmt (op);
4322       return true;
4323     }
4324 
4325   if ((TREE_CODE (op) == RESULT_DECL
4326        && DECL_BY_REFERENCE (op))
4327       || (TREE_CODE (op) == SSA_NAME
4328 	  && SSA_NAME_VAR (op)
4329 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4330 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4331     op = TREE_TYPE (op);
4332 
4333   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4334     {
4335       error ("invalid conversion in return statement");
4336       debug_generic_stmt (restype);
4337       debug_generic_stmt (TREE_TYPE (op));
4338       return true;
4339     }
4340 
4341   return false;
4342 }
4343 
4344 
4345 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4346    is a problem, otherwise false.  */
4347 
4348 static bool
4349 verify_gimple_goto (ggoto *stmt)
4350 {
4351   tree dest = gimple_goto_dest (stmt);
4352 
4353   /* ???  We have two canonical forms of direct goto destinations, a
4354      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4355   if (TREE_CODE (dest) != LABEL_DECL
4356       && (!is_gimple_val (dest)
4357 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4358     {
4359       error ("goto destination is neither a label nor a pointer");
4360       return true;
4361     }
4362 
4363   return false;
4364 }
4365 
4366 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4367    is a problem, otherwise false.  */
4368 
4369 static bool
4370 verify_gimple_switch (gswitch *stmt)
4371 {
4372   unsigned int i, n;
4373   tree elt, prev_upper_bound = NULL_TREE;
4374   tree index_type, elt_type = NULL_TREE;
4375 
4376   if (!is_gimple_val (gimple_switch_index (stmt)))
4377     {
4378       error ("invalid operand to switch statement");
4379       debug_generic_stmt (gimple_switch_index (stmt));
4380       return true;
4381     }
4382 
4383   index_type = TREE_TYPE (gimple_switch_index (stmt));
4384   if (! INTEGRAL_TYPE_P (index_type))
4385     {
4386       error ("non-integral type switch statement");
4387       debug_generic_expr (index_type);
4388       return true;
4389     }
4390 
4391   elt = gimple_switch_label (stmt, 0);
4392   if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4393     {
4394       error ("invalid default case label in switch statement");
4395       debug_generic_expr (elt);
4396       return true;
4397     }
4398 
4399   n = gimple_switch_num_labels (stmt);
4400   for (i = 1; i < n; i++)
4401     {
4402       elt = gimple_switch_label (stmt, i);
4403 
4404       if (! CASE_LOW (elt))
4405 	{
4406 	  error ("invalid case label in switch statement");
4407 	  debug_generic_expr (elt);
4408 	  return true;
4409 	}
4410       if (CASE_HIGH (elt)
4411 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4412 	{
4413 	  error ("invalid case range in switch statement");
4414 	  debug_generic_expr (elt);
4415 	  return true;
4416 	}
4417 
4418       if (elt_type)
4419 	{
4420 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4421 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4422 	    {
4423 	      error ("type mismatch for case label in switch statement");
4424 	      debug_generic_expr (elt);
4425 	      return true;
4426 	    }
4427 	}
4428       else
4429 	{
4430 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4431 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4432 	    {
4433 	      error ("type precision mismatch in switch statement");
4434 	      return true;
4435 	    }
4436 	}
4437 
4438       if (prev_upper_bound)
4439 	{
4440 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4441 	    {
4442 	      error ("case labels not sorted in switch statement");
4443 	      return true;
4444 	    }
4445 	}
4446 
4447       prev_upper_bound = CASE_HIGH (elt);
4448       if (! prev_upper_bound)
4449 	prev_upper_bound = CASE_LOW (elt);
4450     }
4451 
4452   return false;
4453 }
4454 
4455 /* Verify a gimple debug statement STMT.
4456    Returns true if anything is wrong.  */
4457 
4458 static bool
4459 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4460 {
4461   /* There isn't much that could be wrong in a gimple debug stmt.  A
4462      gimple debug bind stmt, for example, maps a tree, that's usually
4463      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4464      component or member of an aggregate type, to another tree, that
4465      can be an arbitrary expression.  These stmts expand into debug
4466      insns, and are converted to debug notes by var-tracking.c.  */
4467   return false;
4468 }
4469 
4470 /* Verify a gimple label statement STMT.
4471    Returns true if anything is wrong.  */
4472 
4473 static bool
4474 verify_gimple_label (glabel *stmt)
4475 {
4476   tree decl = gimple_label_label (stmt);
4477   int uid;
4478   bool err = false;
4479 
4480   if (TREE_CODE (decl) != LABEL_DECL)
4481     return true;
4482   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4483       && DECL_CONTEXT (decl) != current_function_decl)
4484     {
4485       error ("label's context is not the current function decl");
4486       err |= true;
4487     }
4488 
4489   uid = LABEL_DECL_UID (decl);
4490   if (cfun->cfg
4491       && (uid == -1
4492 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4493     {
4494       error ("incorrect entry in label_to_block_map");
4495       err |= true;
4496     }
4497 
4498   uid = EH_LANDING_PAD_NR (decl);
4499   if (uid)
4500     {
4501       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4502       if (decl != lp->post_landing_pad)
4503 	{
4504 	  error ("incorrect setting of landing pad number");
4505 	  err |= true;
4506 	}
4507     }
4508 
4509   return err;
4510 }
4511 
4512 /* Verify a gimple cond statement STMT.
4513    Returns true if anything is wrong.  */
4514 
4515 static bool
4516 verify_gimple_cond (gcond *stmt)
4517 {
4518   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4519     {
4520       error ("invalid comparison code in gimple cond");
4521       return true;
4522     }
4523   if (!(!gimple_cond_true_label (stmt)
4524 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4525       || !(!gimple_cond_false_label (stmt)
4526 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4527     {
4528       error ("invalid labels in gimple cond");
4529       return true;
4530     }
4531 
4532   return verify_gimple_comparison (boolean_type_node,
4533 				   gimple_cond_lhs (stmt),
4534 				   gimple_cond_rhs (stmt));
4535 }
4536 
4537 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4538    error, otherwise false.  */
4539 
4540 static bool
4541 verify_gimple_stmt (gimple stmt)
4542 {
4543   switch (gimple_code (stmt))
4544     {
4545     case GIMPLE_ASSIGN:
4546       return verify_gimple_assign (as_a <gassign *> (stmt));
4547 
4548     case GIMPLE_LABEL:
4549       return verify_gimple_label (as_a <glabel *> (stmt));
4550 
4551     case GIMPLE_CALL:
4552       return verify_gimple_call (as_a <gcall *> (stmt));
4553 
4554     case GIMPLE_COND:
4555       return verify_gimple_cond (as_a <gcond *> (stmt));
4556 
4557     case GIMPLE_GOTO:
4558       return verify_gimple_goto (as_a <ggoto *> (stmt));
4559 
4560     case GIMPLE_SWITCH:
4561       return verify_gimple_switch (as_a <gswitch *> (stmt));
4562 
4563     case GIMPLE_RETURN:
4564       return verify_gimple_return (as_a <greturn *> (stmt));
4565 
4566     case GIMPLE_ASM:
4567       return false;
4568 
4569     case GIMPLE_TRANSACTION:
4570       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4571 
4572     /* Tuples that do not have tree operands.  */
4573     case GIMPLE_NOP:
4574     case GIMPLE_PREDICT:
4575     case GIMPLE_RESX:
4576     case GIMPLE_EH_DISPATCH:
4577     case GIMPLE_EH_MUST_NOT_THROW:
4578       return false;
4579 
4580     CASE_GIMPLE_OMP:
4581       /* OpenMP directives are validated by the FE and never operated
4582 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4583 	 non-gimple expressions when the main index variable has had
4584 	 its address taken.  This does not affect the loop itself
4585 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4586 	 how to setup the parallel iteration.  */
4587       return false;
4588 
4589     case GIMPLE_DEBUG:
4590       return verify_gimple_debug (stmt);
4591 
4592     default:
4593       gcc_unreachable ();
4594     }
4595 }
4596 
4597 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4598    and false otherwise.  */
4599 
4600 static bool
4601 verify_gimple_phi (gimple phi)
4602 {
4603   bool err = false;
4604   unsigned i;
4605   tree phi_result = gimple_phi_result (phi);
4606   bool virtual_p;
4607 
4608   if (!phi_result)
4609     {
4610       error ("invalid PHI result");
4611       return true;
4612     }
4613 
4614   virtual_p = virtual_operand_p (phi_result);
4615   if (TREE_CODE (phi_result) != SSA_NAME
4616       || (virtual_p
4617 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4618     {
4619       error ("invalid PHI result");
4620       err = true;
4621     }
4622 
4623   for (i = 0; i < gimple_phi_num_args (phi); i++)
4624     {
4625       tree t = gimple_phi_arg_def (phi, i);
4626 
4627       if (!t)
4628 	{
4629 	  error ("missing PHI def");
4630 	  err |= true;
4631 	  continue;
4632 	}
4633       /* Addressable variables do have SSA_NAMEs but they
4634 	 are not considered gimple values.  */
4635       else if ((TREE_CODE (t) == SSA_NAME
4636 		&& virtual_p != virtual_operand_p (t))
4637 	       || (virtual_p
4638 		   && (TREE_CODE (t) != SSA_NAME
4639 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4640 	       || (!virtual_p
4641 		   && !is_gimple_val (t)))
4642 	{
4643 	  error ("invalid PHI argument");
4644 	  debug_generic_expr (t);
4645 	  err |= true;
4646 	}
4647 #ifdef ENABLE_TYPES_CHECKING
4648       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4649 	{
4650 	  error ("incompatible types in PHI argument %u", i);
4651 	  debug_generic_stmt (TREE_TYPE (phi_result));
4652 	  debug_generic_stmt (TREE_TYPE (t));
4653 	  err |= true;
4654 	}
4655 #endif
4656     }
4657 
4658   return err;
4659 }
4660 
4661 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4662 
4663 static bool
4664 verify_gimple_in_seq_2 (gimple_seq stmts)
4665 {
4666   gimple_stmt_iterator ittr;
4667   bool err = false;
4668 
4669   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4670     {
4671       gimple stmt = gsi_stmt (ittr);
4672 
4673       switch (gimple_code (stmt))
4674         {
4675 	case GIMPLE_BIND:
4676 	  err |= verify_gimple_in_seq_2 (
4677                    gimple_bind_body (as_a <gbind *> (stmt)));
4678 	  break;
4679 
4680 	case GIMPLE_TRY:
4681 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4682 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4683 	  break;
4684 
4685 	case GIMPLE_EH_FILTER:
4686 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4687 	  break;
4688 
4689 	case GIMPLE_EH_ELSE:
4690 	  {
4691 	    geh_else *eh_else = as_a <geh_else *> (stmt);
4692 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4693 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4694 	  }
4695 	  break;
4696 
4697 	case GIMPLE_CATCH:
4698 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4699 					   as_a <gcatch *> (stmt)));
4700 	  break;
4701 
4702 	case GIMPLE_TRANSACTION:
4703 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4704 	  break;
4705 
4706 	default:
4707 	  {
4708 	    bool err2 = verify_gimple_stmt (stmt);
4709 	    if (err2)
4710 	      debug_gimple_stmt (stmt);
4711 	    err |= err2;
4712 	  }
4713 	}
4714     }
4715 
4716   return err;
4717 }
4718 
4719 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4720    is a problem, otherwise false.  */
4721 
4722 static bool
4723 verify_gimple_transaction (gtransaction *stmt)
4724 {
4725   tree lab = gimple_transaction_label (stmt);
4726   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4727     return true;
4728   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4729 }
4730 
4731 
4732 /* Verify the GIMPLE statements inside the statement list STMTS.  */
4733 
4734 DEBUG_FUNCTION void
4735 verify_gimple_in_seq (gimple_seq stmts)
4736 {
4737   timevar_push (TV_TREE_STMT_VERIFY);
4738   if (verify_gimple_in_seq_2 (stmts))
4739     internal_error ("verify_gimple failed");
4740   timevar_pop (TV_TREE_STMT_VERIFY);
4741 }
4742 
4743 /* Return true when the T can be shared.  */
4744 
4745 static bool
4746 tree_node_can_be_shared (tree t)
4747 {
4748   if (IS_TYPE_OR_DECL_P (t)
4749       || is_gimple_min_invariant (t)
4750       || TREE_CODE (t) == SSA_NAME
4751       || t == error_mark_node
4752       || TREE_CODE (t) == IDENTIFIER_NODE)
4753     return true;
4754 
4755   if (TREE_CODE (t) == CASE_LABEL_EXPR)
4756     return true;
4757 
4758   if (DECL_P (t))
4759     return true;
4760 
4761   return false;
4762 }
4763 
4764 /* Called via walk_tree.  Verify tree sharing.  */
4765 
4766 static tree
4767 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4768 {
4769   hash_set<void *> *visited = (hash_set<void *> *) data;
4770 
4771   if (tree_node_can_be_shared (*tp))
4772     {
4773       *walk_subtrees = false;
4774       return NULL;
4775     }
4776 
4777   if (visited->add (*tp))
4778     return *tp;
4779 
4780   return NULL;
4781 }
4782 
4783 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
4784 
4785 static tree
4786 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4787 {
4788   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4789   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4790 }
4791 
4792 static bool eh_error_found;
4793 bool
4794 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4795 			   hash_set<gimple> *visited)
4796 {
4797   if (!visited->contains (stmt))
4798     {
4799       error ("dead STMT in EH table");
4800       debug_gimple_stmt (stmt);
4801       eh_error_found = true;
4802     }
4803   return true;
4804 }
4805 
4806 /* Verify if the location LOCs block is in BLOCKS.  */
4807 
4808 static bool
4809 verify_location (hash_set<tree> *blocks, location_t loc)
4810 {
4811   tree block = LOCATION_BLOCK (loc);
4812   if (block != NULL_TREE
4813       && !blocks->contains (block))
4814     {
4815       error ("location references block not in block tree");
4816       return true;
4817     }
4818   if (block != NULL_TREE)
4819     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4820   return false;
4821 }
4822 
4823 /* Called via walk_tree.  Verify that expressions have no blocks.  */
4824 
4825 static tree
4826 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4827 {
4828   if (!EXPR_P (*tp))
4829     {
4830       *walk_subtrees = false;
4831       return NULL;
4832     }
4833 
4834   location_t loc = EXPR_LOCATION (*tp);
4835   if (LOCATION_BLOCK (loc) != NULL)
4836     return *tp;
4837 
4838   return NULL;
4839 }
4840 
4841 /* Called via walk_tree.  Verify locations of expressions.  */
4842 
4843 static tree
4844 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4845 {
4846   hash_set<tree> *blocks = (hash_set<tree> *) data;
4847 
4848   if (TREE_CODE (*tp) == VAR_DECL
4849       && DECL_HAS_DEBUG_EXPR_P (*tp))
4850     {
4851       tree t = DECL_DEBUG_EXPR (*tp);
4852       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4853       if (addr)
4854 	return addr;
4855     }
4856   if ((TREE_CODE (*tp) == VAR_DECL
4857        || TREE_CODE (*tp) == PARM_DECL
4858        || TREE_CODE (*tp) == RESULT_DECL)
4859       && DECL_HAS_VALUE_EXPR_P (*tp))
4860     {
4861       tree t = DECL_VALUE_EXPR (*tp);
4862       tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4863       if (addr)
4864 	return addr;
4865     }
4866 
4867   if (!EXPR_P (*tp))
4868     {
4869       *walk_subtrees = false;
4870       return NULL;
4871     }
4872 
4873   location_t loc = EXPR_LOCATION (*tp);
4874   if (verify_location (blocks, loc))
4875     return *tp;
4876 
4877   return NULL;
4878 }
4879 
4880 /* Called via walk_gimple_op.  Verify locations of expressions.  */
4881 
4882 static tree
4883 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4884 {
4885   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4886   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4887 }
4888 
4889 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
4890 
4891 static void
4892 collect_subblocks (hash_set<tree> *blocks, tree block)
4893 {
4894   tree t;
4895   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4896     {
4897       blocks->add (t);
4898       collect_subblocks (blocks, t);
4899     }
4900 }
4901 
4902 /* Verify the GIMPLE statements in the CFG of FN.  */
4903 
4904 DEBUG_FUNCTION void
4905 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4906 {
4907   basic_block bb;
4908   bool err = false;
4909 
4910   timevar_push (TV_TREE_STMT_VERIFY);
4911   hash_set<void *> visited;
4912   hash_set<gimple> visited_stmts;
4913 
4914   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
4915   hash_set<tree> blocks;
4916   if (DECL_INITIAL (fn->decl))
4917     {
4918       blocks.add (DECL_INITIAL (fn->decl));
4919       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4920     }
4921 
4922   FOR_EACH_BB_FN (bb, fn)
4923     {
4924       gimple_stmt_iterator gsi;
4925 
4926       for (gphi_iterator gpi = gsi_start_phis (bb);
4927 	   !gsi_end_p (gpi);
4928 	   gsi_next (&gpi))
4929 	{
4930 	  gphi *phi = gpi.phi ();
4931 	  bool err2 = false;
4932 	  unsigned i;
4933 
4934 	  visited_stmts.add (phi);
4935 
4936 	  if (gimple_bb (phi) != bb)
4937 	    {
4938 	      error ("gimple_bb (phi) is set to a wrong basic block");
4939 	      err2 = true;
4940 	    }
4941 
4942 	  err2 |= verify_gimple_phi (phi);
4943 
4944 	  /* Only PHI arguments have locations.  */
4945 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
4946 	    {
4947 	      error ("PHI node with location");
4948 	      err2 = true;
4949 	    }
4950 
4951 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
4952 	    {
4953 	      tree arg = gimple_phi_arg_def (phi, i);
4954 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
4955 				     &visited, NULL);
4956 	      if (addr)
4957 		{
4958 		  error ("incorrect sharing of tree nodes");
4959 		  debug_generic_expr (addr);
4960 		  err2 |= true;
4961 		}
4962 	      location_t loc = gimple_phi_arg_location (phi, i);
4963 	      if (virtual_operand_p (gimple_phi_result (phi))
4964 		  && loc != UNKNOWN_LOCATION)
4965 		{
4966 		  error ("virtual PHI with argument locations");
4967 		  err2 = true;
4968 		}
4969 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4970 	      if (addr)
4971 		{
4972 		  debug_generic_expr (addr);
4973 		  err2 = true;
4974 		}
4975 	      err2 |= verify_location (&blocks, loc);
4976 	    }
4977 
4978 	  if (err2)
4979 	    debug_gimple_stmt (phi);
4980 	  err |= err2;
4981 	}
4982 
4983       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4984 	{
4985 	  gimple stmt = gsi_stmt (gsi);
4986 	  bool err2 = false;
4987 	  struct walk_stmt_info wi;
4988 	  tree addr;
4989 	  int lp_nr;
4990 
4991 	  visited_stmts.add (stmt);
4992 
4993 	  if (gimple_bb (stmt) != bb)
4994 	    {
4995 	      error ("gimple_bb (stmt) is set to a wrong basic block");
4996 	      err2 = true;
4997 	    }
4998 
4999 	  err2 |= verify_gimple_stmt (stmt);
5000 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5001 
5002 	  memset (&wi, 0, sizeof (wi));
5003 	  wi.info = (void *) &visited;
5004 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5005 	  if (addr)
5006 	    {
5007 	      error ("incorrect sharing of tree nodes");
5008 	      debug_generic_expr (addr);
5009 	      err2 |= true;
5010 	    }
5011 
5012 	  memset (&wi, 0, sizeof (wi));
5013 	  wi.info = (void *) &blocks;
5014 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5015 	  if (addr)
5016 	    {
5017 	      debug_generic_expr (addr);
5018 	      err2 |= true;
5019 	    }
5020 
5021 	  /* ???  Instead of not checking these stmts at all the walker
5022 	     should know its context via wi.  */
5023 	  if (!is_gimple_debug (stmt)
5024 	      && !is_gimple_omp (stmt))
5025 	    {
5026 	      memset (&wi, 0, sizeof (wi));
5027 	      addr = walk_gimple_op (stmt, verify_expr, &wi);
5028 	      if (addr)
5029 		{
5030 		  debug_generic_expr (addr);
5031 		  inform (gimple_location (stmt), "in statement");
5032 		  err2 |= true;
5033 		}
5034 	    }
5035 
5036 	  /* If the statement is marked as part of an EH region, then it is
5037 	     expected that the statement could throw.  Verify that when we
5038 	     have optimizations that simplify statements such that we prove
5039 	     that they cannot throw, that we update other data structures
5040 	     to match.  */
5041 	  lp_nr = lookup_stmt_eh_lp (stmt);
5042 	  if (lp_nr > 0)
5043 	    {
5044 	      if (!stmt_could_throw_p (stmt))
5045 		{
5046 		  if (verify_nothrow)
5047 		    {
5048 		      error ("statement marked for throw, but doesn%'t");
5049 		      err2 |= true;
5050 		    }
5051 		}
5052 	      else if (!gsi_one_before_end_p (gsi))
5053 		{
5054 		  error ("statement marked for throw in middle of block");
5055 		  err2 |= true;
5056 		}
5057 	    }
5058 
5059 	  if (err2)
5060 	    debug_gimple_stmt (stmt);
5061 	  err |= err2;
5062 	}
5063     }
5064 
5065   eh_error_found = false;
5066   hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5067   if (eh_table)
5068     eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5069       (&visited_stmts);
5070 
5071   if (err || eh_error_found)
5072     internal_error ("verify_gimple failed");
5073 
5074   verify_histograms ();
5075   timevar_pop (TV_TREE_STMT_VERIFY);
5076 }
5077 
5078 
5079 /* Verifies that the flow information is OK.  */
5080 
5081 static int
5082 gimple_verify_flow_info (void)
5083 {
5084   int err = 0;
5085   basic_block bb;
5086   gimple_stmt_iterator gsi;
5087   gimple stmt;
5088   edge e;
5089   edge_iterator ei;
5090 
5091   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5092       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5093     {
5094       error ("ENTRY_BLOCK has IL associated with it");
5095       err = 1;
5096     }
5097 
5098   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5099       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5100     {
5101       error ("EXIT_BLOCK has IL associated with it");
5102       err = 1;
5103     }
5104 
5105   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5106     if (e->flags & EDGE_FALLTHRU)
5107       {
5108 	error ("fallthru to exit from bb %d", e->src->index);
5109 	err = 1;
5110       }
5111 
5112   FOR_EACH_BB_FN (bb, cfun)
5113     {
5114       bool found_ctrl_stmt = false;
5115 
5116       stmt = NULL;
5117 
5118       /* Skip labels on the start of basic block.  */
5119       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5120 	{
5121 	  tree label;
5122 	  gimple prev_stmt = stmt;
5123 
5124 	  stmt = gsi_stmt (gsi);
5125 
5126 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5127 	    break;
5128 
5129 	  label = gimple_label_label (as_a <glabel *> (stmt));
5130 	  if (prev_stmt && DECL_NONLOCAL (label))
5131 	    {
5132 	      error ("nonlocal label ");
5133 	      print_generic_expr (stderr, label, 0);
5134 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5135 		       bb->index);
5136 	      err = 1;
5137 	    }
5138 
5139 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5140 	    {
5141 	      error ("EH landing pad label ");
5142 	      print_generic_expr (stderr, label, 0);
5143 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5144 		       bb->index);
5145 	      err = 1;
5146 	    }
5147 
5148 	  if (label_to_block (label) != bb)
5149 	    {
5150 	      error ("label ");
5151 	      print_generic_expr (stderr, label, 0);
5152 	      fprintf (stderr, " to block does not match in bb %d",
5153 		       bb->index);
5154 	      err = 1;
5155 	    }
5156 
5157 	  if (decl_function_context (label) != current_function_decl)
5158 	    {
5159 	      error ("label ");
5160 	      print_generic_expr (stderr, label, 0);
5161 	      fprintf (stderr, " has incorrect context in bb %d",
5162 		       bb->index);
5163 	      err = 1;
5164 	    }
5165 	}
5166 
5167       /* Verify that body of basic block BB is free of control flow.  */
5168       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5169 	{
5170 	  gimple stmt = gsi_stmt (gsi);
5171 
5172 	  if (found_ctrl_stmt)
5173 	    {
5174 	      error ("control flow in the middle of basic block %d",
5175 		     bb->index);
5176 	      err = 1;
5177 	    }
5178 
5179 	  if (stmt_ends_bb_p (stmt))
5180 	    found_ctrl_stmt = true;
5181 
5182 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5183 	    {
5184 	      error ("label ");
5185 	      print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5186 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5187 	      err = 1;
5188 	    }
5189 	}
5190 
5191       gsi = gsi_last_bb (bb);
5192       if (gsi_end_p (gsi))
5193 	continue;
5194 
5195       stmt = gsi_stmt (gsi);
5196 
5197       if (gimple_code (stmt) == GIMPLE_LABEL)
5198 	continue;
5199 
5200       err |= verify_eh_edges (stmt);
5201 
5202       if (is_ctrl_stmt (stmt))
5203 	{
5204 	  FOR_EACH_EDGE (e, ei, bb->succs)
5205 	    if (e->flags & EDGE_FALLTHRU)
5206 	      {
5207 		error ("fallthru edge after a control statement in bb %d",
5208 		       bb->index);
5209 		err = 1;
5210 	      }
5211 	}
5212 
5213       if (gimple_code (stmt) != GIMPLE_COND)
5214 	{
5215 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5216 	     after anything else but if statement.  */
5217 	  FOR_EACH_EDGE (e, ei, bb->succs)
5218 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5219 	      {
5220 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5221 		       bb->index);
5222 		err = 1;
5223 	      }
5224 	}
5225 
5226       switch (gimple_code (stmt))
5227 	{
5228 	case GIMPLE_COND:
5229 	  {
5230 	    edge true_edge;
5231 	    edge false_edge;
5232 
5233 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5234 
5235 	    if (!true_edge
5236 		|| !false_edge
5237 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5238 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5239 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5240 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5241 		|| EDGE_COUNT (bb->succs) >= 3)
5242 	      {
5243 		error ("wrong outgoing edge flags at end of bb %d",
5244 		       bb->index);
5245 		err = 1;
5246 	      }
5247 	  }
5248 	  break;
5249 
5250 	case GIMPLE_GOTO:
5251 	  if (simple_goto_p (stmt))
5252 	    {
5253 	      error ("explicit goto at end of bb %d", bb->index);
5254 	      err = 1;
5255 	    }
5256 	  else
5257 	    {
5258 	      /* FIXME.  We should double check that the labels in the
5259 		 destination blocks have their address taken.  */
5260 	      FOR_EACH_EDGE (e, ei, bb->succs)
5261 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5262 				 | EDGE_FALSE_VALUE))
5263 		    || !(e->flags & EDGE_ABNORMAL))
5264 		  {
5265 		    error ("wrong outgoing edge flags at end of bb %d",
5266 			   bb->index);
5267 		    err = 1;
5268 		  }
5269 	    }
5270 	  break;
5271 
5272 	case GIMPLE_CALL:
5273 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5274 	    break;
5275 	  /* ... fallthru ... */
5276 	case GIMPLE_RETURN:
5277 	  if (!single_succ_p (bb)
5278 	      || (single_succ_edge (bb)->flags
5279 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5280 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5281 	    {
5282 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5283 	      err = 1;
5284 	    }
5285 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5286 	    {
5287 	      error ("return edge does not point to exit in bb %d",
5288 		     bb->index);
5289 	      err = 1;
5290 	    }
5291 	  break;
5292 
5293 	case GIMPLE_SWITCH:
5294 	  {
5295 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5296 	    tree prev;
5297 	    edge e;
5298 	    size_t i, n;
5299 
5300 	    n = gimple_switch_num_labels (switch_stmt);
5301 
5302 	    /* Mark all the destination basic blocks.  */
5303 	    for (i = 0; i < n; ++i)
5304 	      {
5305 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5306 		basic_block label_bb = label_to_block (lab);
5307 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5308 		label_bb->aux = (void *)1;
5309 	      }
5310 
5311 	    /* Verify that the case labels are sorted.  */
5312 	    prev = gimple_switch_label (switch_stmt, 0);
5313 	    for (i = 1; i < n; ++i)
5314 	      {
5315 		tree c = gimple_switch_label (switch_stmt, i);
5316 		if (!CASE_LOW (c))
5317 		  {
5318 		    error ("found default case not at the start of "
5319 			   "case vector");
5320 		    err = 1;
5321 		    continue;
5322 		  }
5323 		if (CASE_LOW (prev)
5324 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5325 		  {
5326 		    error ("case labels not sorted: ");
5327 		    print_generic_expr (stderr, prev, 0);
5328 		    fprintf (stderr," is greater than ");
5329 		    print_generic_expr (stderr, c, 0);
5330 		    fprintf (stderr," but comes before it.\n");
5331 		    err = 1;
5332 		  }
5333 		prev = c;
5334 	      }
5335 	    /* VRP will remove the default case if it can prove it will
5336 	       never be executed.  So do not verify there always exists
5337 	       a default case here.  */
5338 
5339 	    FOR_EACH_EDGE (e, ei, bb->succs)
5340 	      {
5341 		if (!e->dest->aux)
5342 		  {
5343 		    error ("extra outgoing edge %d->%d",
5344 			   bb->index, e->dest->index);
5345 		    err = 1;
5346 		  }
5347 
5348 		e->dest->aux = (void *)2;
5349 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5350 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5351 		  {
5352 		    error ("wrong outgoing edge flags at end of bb %d",
5353 			   bb->index);
5354 		    err = 1;
5355 		  }
5356 	      }
5357 
5358 	    /* Check that we have all of them.  */
5359 	    for (i = 0; i < n; ++i)
5360 	      {
5361 		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5362 		basic_block label_bb = label_to_block (lab);
5363 
5364 		if (label_bb->aux != (void *)2)
5365 		  {
5366 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5367 		    err = 1;
5368 		  }
5369 	      }
5370 
5371 	    FOR_EACH_EDGE (e, ei, bb->succs)
5372 	      e->dest->aux = (void *)0;
5373 	  }
5374 	  break;
5375 
5376 	case GIMPLE_EH_DISPATCH:
5377 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5378 	  break;
5379 
5380 	default:
5381 	  break;
5382 	}
5383     }
5384 
5385   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5386     verify_dominators (CDI_DOMINATORS);
5387 
5388   return err;
5389 }
5390 
5391 
5392 /* Updates phi nodes after creating a forwarder block joined
5393    by edge FALLTHRU.  */
5394 
5395 static void
5396 gimple_make_forwarder_block (edge fallthru)
5397 {
5398   edge e;
5399   edge_iterator ei;
5400   basic_block dummy, bb;
5401   tree var;
5402   gphi_iterator gsi;
5403 
5404   dummy = fallthru->src;
5405   bb = fallthru->dest;
5406 
5407   if (single_pred_p (bb))
5408     return;
5409 
5410   /* If we redirected a branch we must create new PHI nodes at the
5411      start of BB.  */
5412   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5413     {
5414       gphi *phi, *new_phi;
5415 
5416       phi = gsi.phi ();
5417       var = gimple_phi_result (phi);
5418       new_phi = create_phi_node (var, bb);
5419       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5420       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5421 		   UNKNOWN_LOCATION);
5422     }
5423 
5424   /* Add the arguments we have stored on edges.  */
5425   FOR_EACH_EDGE (e, ei, bb->preds)
5426     {
5427       if (e == fallthru)
5428 	continue;
5429 
5430       flush_pending_stmts (e);
5431     }
5432 }
5433 
5434 
5435 /* Return a non-special label in the head of basic block BLOCK.
5436    Create one if it doesn't exist.  */
5437 
5438 tree
5439 gimple_block_label (basic_block bb)
5440 {
5441   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5442   bool first = true;
5443   tree label;
5444   glabel *stmt;
5445 
5446   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5447     {
5448       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5449       if (!stmt)
5450 	break;
5451       label = gimple_label_label (stmt);
5452       if (!DECL_NONLOCAL (label))
5453 	{
5454 	  if (!first)
5455 	    gsi_move_before (&i, &s);
5456 	  return label;
5457 	}
5458     }
5459 
5460   label = create_artificial_label (UNKNOWN_LOCATION);
5461   stmt = gimple_build_label (label);
5462   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5463   return label;
5464 }
5465 
5466 
5467 /* Attempt to perform edge redirection by replacing a possibly complex
5468    jump instruction by a goto or by removing the jump completely.
5469    This can apply only if all edges now point to the same block.  The
5470    parameters and return values are equivalent to
5471    redirect_edge_and_branch.  */
5472 
5473 static edge
5474 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5475 {
5476   basic_block src = e->src;
5477   gimple_stmt_iterator i;
5478   gimple stmt;
5479 
5480   /* We can replace or remove a complex jump only when we have exactly
5481      two edges.  */
5482   if (EDGE_COUNT (src->succs) != 2
5483       /* Verify that all targets will be TARGET.  Specifically, the
5484 	 edge that is not E must also go to TARGET.  */
5485       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5486     return NULL;
5487 
5488   i = gsi_last_bb (src);
5489   if (gsi_end_p (i))
5490     return NULL;
5491 
5492   stmt = gsi_stmt (i);
5493 
5494   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5495     {
5496       gsi_remove (&i, true);
5497       e = ssa_redirect_edge (e, target);
5498       e->flags = EDGE_FALLTHRU;
5499       return e;
5500     }
5501 
5502   return NULL;
5503 }
5504 
5505 
5506 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5507    edge representing the redirected branch.  */
5508 
5509 static edge
5510 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5511 {
5512   basic_block bb = e->src;
5513   gimple_stmt_iterator gsi;
5514   edge ret;
5515   gimple stmt;
5516 
5517   if (e->flags & EDGE_ABNORMAL)
5518     return NULL;
5519 
5520   if (e->dest == dest)
5521     return NULL;
5522 
5523   if (e->flags & EDGE_EH)
5524     return redirect_eh_edge (e, dest);
5525 
5526   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5527     {
5528       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5529       if (ret)
5530 	return ret;
5531     }
5532 
5533   gsi = gsi_last_bb (bb);
5534   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5535 
5536   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5537     {
5538     case GIMPLE_COND:
5539       /* For COND_EXPR, we only need to redirect the edge.  */
5540       break;
5541 
5542     case GIMPLE_GOTO:
5543       /* No non-abnormal edges should lead from a non-simple goto, and
5544 	 simple ones should be represented implicitly.  */
5545       gcc_unreachable ();
5546 
5547     case GIMPLE_SWITCH:
5548       {
5549 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5550 	tree label = gimple_block_label (dest);
5551         tree cases = get_cases_for_edge (e, switch_stmt);
5552 
5553 	/* If we have a list of cases associated with E, then use it
5554 	   as it's a lot faster than walking the entire case vector.  */
5555 	if (cases)
5556 	  {
5557 	    edge e2 = find_edge (e->src, dest);
5558 	    tree last, first;
5559 
5560 	    first = cases;
5561 	    while (cases)
5562 	      {
5563 		last = cases;
5564 		CASE_LABEL (cases) = label;
5565 		cases = CASE_CHAIN (cases);
5566 	      }
5567 
5568 	    /* If there was already an edge in the CFG, then we need
5569 	       to move all the cases associated with E to E2.  */
5570 	    if (e2)
5571 	      {
5572 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5573 
5574 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5575 		CASE_CHAIN (cases2) = first;
5576 	      }
5577 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5578 	  }
5579 	else
5580 	  {
5581 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5582 
5583 	    for (i = 0; i < n; i++)
5584 	      {
5585 		tree elt = gimple_switch_label (switch_stmt, i);
5586 		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5587 		  CASE_LABEL (elt) = label;
5588 	      }
5589 	  }
5590       }
5591       break;
5592 
5593     case GIMPLE_ASM:
5594       {
5595 	gasm *asm_stmt = as_a <gasm *> (stmt);
5596 	int i, n = gimple_asm_nlabels (asm_stmt);
5597 	tree label = NULL;
5598 
5599 	for (i = 0; i < n; ++i)
5600 	  {
5601 	    tree cons = gimple_asm_label_op (asm_stmt, i);
5602 	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5603 	      {
5604 		if (!label)
5605 		  label = gimple_block_label (dest);
5606 		TREE_VALUE (cons) = label;
5607 	      }
5608 	  }
5609 
5610 	/* If we didn't find any label matching the former edge in the
5611 	   asm labels, we must be redirecting the fallthrough
5612 	   edge.  */
5613 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5614       }
5615       break;
5616 
5617     case GIMPLE_RETURN:
5618       gsi_remove (&gsi, true);
5619       e->flags |= EDGE_FALLTHRU;
5620       break;
5621 
5622     case GIMPLE_OMP_RETURN:
5623     case GIMPLE_OMP_CONTINUE:
5624     case GIMPLE_OMP_SECTIONS_SWITCH:
5625     case GIMPLE_OMP_FOR:
5626       /* The edges from OMP constructs can be simply redirected.  */
5627       break;
5628 
5629     case GIMPLE_EH_DISPATCH:
5630       if (!(e->flags & EDGE_FALLTHRU))
5631 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5632       break;
5633 
5634     case GIMPLE_TRANSACTION:
5635       /* The ABORT edge has a stored label associated with it, otherwise
5636 	 the edges are simply redirectable.  */
5637       if (e->flags == 0)
5638 	gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5639 				      gimple_block_label (dest));
5640       break;
5641 
5642     default:
5643       /* Otherwise it must be a fallthru edge, and we don't need to
5644 	 do anything besides redirecting it.  */
5645       gcc_assert (e->flags & EDGE_FALLTHRU);
5646       break;
5647     }
5648 
5649   /* Update/insert PHI nodes as necessary.  */
5650 
5651   /* Now update the edges in the CFG.  */
5652   e = ssa_redirect_edge (e, dest);
5653 
5654   return e;
5655 }
5656 
5657 /* Returns true if it is possible to remove edge E by redirecting
5658    it to the destination of the other edge from E->src.  */
5659 
5660 static bool
5661 gimple_can_remove_branch_p (const_edge e)
5662 {
5663   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5664     return false;
5665 
5666   return true;
5667 }
5668 
5669 /* Simple wrapper, as we can always redirect fallthru edges.  */
5670 
5671 static basic_block
5672 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5673 {
5674   e = gimple_redirect_edge_and_branch (e, dest);
5675   gcc_assert (e);
5676 
5677   return NULL;
5678 }
5679 
5680 
5681 /* Splits basic block BB after statement STMT (but at least after the
5682    labels).  If STMT is NULL, BB is split just after the labels.  */
5683 
5684 static basic_block
5685 gimple_split_block (basic_block bb, void *stmt)
5686 {
5687   gimple_stmt_iterator gsi;
5688   gimple_stmt_iterator gsi_tgt;
5689   gimple_seq list;
5690   basic_block new_bb;
5691   edge e;
5692   edge_iterator ei;
5693 
5694   new_bb = create_empty_bb (bb);
5695 
5696   /* Redirect the outgoing edges.  */
5697   new_bb->succs = bb->succs;
5698   bb->succs = NULL;
5699   FOR_EACH_EDGE (e, ei, new_bb->succs)
5700     e->src = new_bb;
5701 
5702   /* Get a stmt iterator pointing to the first stmt to move.  */
5703   if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5704     gsi = gsi_after_labels (bb);
5705   else
5706     {
5707       gsi = gsi_for_stmt ((gimple) stmt);
5708       gsi_next (&gsi);
5709     }
5710 
5711   /* Move everything from GSI to the new basic block.  */
5712   if (gsi_end_p (gsi))
5713     return new_bb;
5714 
5715   /* Split the statement list - avoid re-creating new containers as this
5716      brings ugly quadratic memory consumption in the inliner.
5717      (We are still quadratic since we need to update stmt BB pointers,
5718      sadly.)  */
5719   gsi_split_seq_before (&gsi, &list);
5720   set_bb_seq (new_bb, list);
5721   for (gsi_tgt = gsi_start (list);
5722        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5723     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5724 
5725   return new_bb;
5726 }
5727 
5728 
5729 /* Moves basic block BB after block AFTER.  */
5730 
5731 static bool
5732 gimple_move_block_after (basic_block bb, basic_block after)
5733 {
5734   if (bb->prev_bb == after)
5735     return true;
5736 
5737   unlink_block (bb);
5738   link_block (bb, after);
5739 
5740   return true;
5741 }
5742 
5743 
5744 /* Return TRUE if block BB has no executable statements, otherwise return
5745    FALSE.  */
5746 
5747 static bool
5748 gimple_empty_block_p (basic_block bb)
5749 {
5750   /* BB must have no executable statements.  */
5751   gimple_stmt_iterator gsi = gsi_after_labels (bb);
5752   if (phi_nodes (bb))
5753     return false;
5754   if (gsi_end_p (gsi))
5755     return true;
5756   if (is_gimple_debug (gsi_stmt (gsi)))
5757     gsi_next_nondebug (&gsi);
5758   return gsi_end_p (gsi);
5759 }
5760 
5761 
5762 /* Split a basic block if it ends with a conditional branch and if the
5763    other part of the block is not empty.  */
5764 
5765 static basic_block
5766 gimple_split_block_before_cond_jump (basic_block bb)
5767 {
5768   gimple last, split_point;
5769   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5770   if (gsi_end_p (gsi))
5771     return NULL;
5772   last = gsi_stmt (gsi);
5773   if (gimple_code (last) != GIMPLE_COND
5774       && gimple_code (last) != GIMPLE_SWITCH)
5775     return NULL;
5776   gsi_prev_nondebug (&gsi);
5777   split_point = gsi_stmt (gsi);
5778   return split_block (bb, split_point)->dest;
5779 }
5780 
5781 
5782 /* Return true if basic_block can be duplicated.  */
5783 
5784 static bool
5785 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5786 {
5787   return true;
5788 }
5789 
5790 /* Create a duplicate of the basic block BB.  NOTE: This does not
5791    preserve SSA form.  */
5792 
5793 static basic_block
5794 gimple_duplicate_bb (basic_block bb)
5795 {
5796   basic_block new_bb;
5797   gimple_stmt_iterator gsi_tgt;
5798 
5799   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5800 
5801   /* Copy the PHI nodes.  We ignore PHI node arguments here because
5802      the incoming edges have not been setup yet.  */
5803   for (gphi_iterator gpi = gsi_start_phis (bb);
5804        !gsi_end_p (gpi);
5805        gsi_next (&gpi))
5806     {
5807       gphi *phi, *copy;
5808       phi = gpi.phi ();
5809       copy = create_phi_node (NULL_TREE, new_bb);
5810       create_new_def_for (gimple_phi_result (phi), copy,
5811 			  gimple_phi_result_ptr (copy));
5812       gimple_set_uid (copy, gimple_uid (phi));
5813     }
5814 
5815   gsi_tgt = gsi_start_bb (new_bb);
5816   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5817        !gsi_end_p (gsi);
5818        gsi_next (&gsi))
5819     {
5820       def_operand_p def_p;
5821       ssa_op_iter op_iter;
5822       tree lhs;
5823       gimple stmt, copy;
5824 
5825       stmt = gsi_stmt (gsi);
5826       if (gimple_code (stmt) == GIMPLE_LABEL)
5827 	continue;
5828 
5829       /* Don't duplicate label debug stmts.  */
5830       if (gimple_debug_bind_p (stmt)
5831 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
5832 	     == LABEL_DECL)
5833 	continue;
5834 
5835       /* Create a new copy of STMT and duplicate STMT's virtual
5836 	 operands.  */
5837       copy = gimple_copy (stmt);
5838       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5839 
5840       maybe_duplicate_eh_stmt (copy, stmt);
5841       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5842 
5843       /* When copying around a stmt writing into a local non-user
5844 	 aggregate, make sure it won't share stack slot with other
5845 	 vars.  */
5846       lhs = gimple_get_lhs (stmt);
5847       if (lhs && TREE_CODE (lhs) != SSA_NAME)
5848 	{
5849 	  tree base = get_base_address (lhs);
5850 	  if (base
5851 	      && (TREE_CODE (base) == VAR_DECL
5852 		  || TREE_CODE (base) == RESULT_DECL)
5853 	      && DECL_IGNORED_P (base)
5854 	      && !TREE_STATIC (base)
5855 	      && !DECL_EXTERNAL (base)
5856 	      && (TREE_CODE (base) != VAR_DECL
5857 		  || !DECL_HAS_VALUE_EXPR_P (base)))
5858 	    DECL_NONSHAREABLE (base) = 1;
5859 	}
5860 
5861       /* Create new names for all the definitions created by COPY and
5862 	 add replacement mappings for each new name.  */
5863       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5864 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5865     }
5866 
5867   return new_bb;
5868 }
5869 
5870 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
5871 
5872 static void
5873 add_phi_args_after_copy_edge (edge e_copy)
5874 {
5875   basic_block bb, bb_copy = e_copy->src, dest;
5876   edge e;
5877   edge_iterator ei;
5878   gphi *phi, *phi_copy;
5879   tree def;
5880   gphi_iterator psi, psi_copy;
5881 
5882   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5883     return;
5884 
5885   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5886 
5887   if (e_copy->dest->flags & BB_DUPLICATED)
5888     dest = get_bb_original (e_copy->dest);
5889   else
5890     dest = e_copy->dest;
5891 
5892   e = find_edge (bb, dest);
5893   if (!e)
5894     {
5895       /* During loop unrolling the target of the latch edge is copied.
5896 	 In this case we are not looking for edge to dest, but to
5897 	 duplicated block whose original was dest.  */
5898       FOR_EACH_EDGE (e, ei, bb->succs)
5899 	{
5900 	  if ((e->dest->flags & BB_DUPLICATED)
5901 	      && get_bb_original (e->dest) == dest)
5902 	    break;
5903 	}
5904 
5905       gcc_assert (e != NULL);
5906     }
5907 
5908   for (psi = gsi_start_phis (e->dest),
5909        psi_copy = gsi_start_phis (e_copy->dest);
5910        !gsi_end_p (psi);
5911        gsi_next (&psi), gsi_next (&psi_copy))
5912     {
5913       phi = psi.phi ();
5914       phi_copy = psi_copy.phi ();
5915       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5916       add_phi_arg (phi_copy, def, e_copy,
5917 		   gimple_phi_arg_location_from_edge (phi, e));
5918     }
5919 }
5920 
5921 
5922 /* Basic block BB_COPY was created by code duplication.  Add phi node
5923    arguments for edges going out of BB_COPY.  The blocks that were
5924    duplicated have BB_DUPLICATED set.  */
5925 
5926 void
5927 add_phi_args_after_copy_bb (basic_block bb_copy)
5928 {
5929   edge e_copy;
5930   edge_iterator ei;
5931 
5932   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5933     {
5934       add_phi_args_after_copy_edge (e_copy);
5935     }
5936 }
5937 
5938 /* Blocks in REGION_COPY array of length N_REGION were created by
5939    duplication of basic blocks.  Add phi node arguments for edges
5940    going from these blocks.  If E_COPY is not NULL, also add
5941    phi node arguments for its destination.*/
5942 
5943 void
5944 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5945 			 edge e_copy)
5946 {
5947   unsigned i;
5948 
5949   for (i = 0; i < n_region; i++)
5950     region_copy[i]->flags |= BB_DUPLICATED;
5951 
5952   for (i = 0; i < n_region; i++)
5953     add_phi_args_after_copy_bb (region_copy[i]);
5954   if (e_copy)
5955     add_phi_args_after_copy_edge (e_copy);
5956 
5957   for (i = 0; i < n_region; i++)
5958     region_copy[i]->flags &= ~BB_DUPLICATED;
5959 }
5960 
5961 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5962    important exit edge EXIT.  By important we mean that no SSA name defined
5963    inside region is live over the other exit edges of the region.  All entry
5964    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
5965    to the duplicate of the region.  Dominance and loop information is
5966    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
5967    UPDATE_DOMINANCE is false then we assume that the caller will update the
5968    dominance information after calling this function.  The new basic
5969    blocks are stored to REGION_COPY in the same order as they had in REGION,
5970    provided that REGION_COPY is not NULL.
5971    The function returns false if it is unable to copy the region,
5972    true otherwise.  */
5973 
5974 bool
5975 gimple_duplicate_sese_region (edge entry, edge exit,
5976 			    basic_block *region, unsigned n_region,
5977 			    basic_block *region_copy,
5978 			    bool update_dominance)
5979 {
5980   unsigned i;
5981   bool free_region_copy = false, copying_header = false;
5982   struct loop *loop = entry->dest->loop_father;
5983   edge exit_copy;
5984   vec<basic_block> doms;
5985   edge redirected;
5986   int total_freq = 0, entry_freq = 0;
5987   gcov_type total_count = 0, entry_count = 0;
5988 
5989   if (!can_copy_bbs_p (region, n_region))
5990     return false;
5991 
5992   /* Some sanity checking.  Note that we do not check for all possible
5993      missuses of the functions.  I.e. if you ask to copy something weird,
5994      it will work, but the state of structures probably will not be
5995      correct.  */
5996   for (i = 0; i < n_region; i++)
5997     {
5998       /* We do not handle subloops, i.e. all the blocks must belong to the
5999 	 same loop.  */
6000       if (region[i]->loop_father != loop)
6001 	return false;
6002 
6003       if (region[i] != entry->dest
6004 	  && region[i] == loop->header)
6005 	return false;
6006     }
6007 
6008   /* In case the function is used for loop header copying (which is the primary
6009      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6010   if (loop->header == entry->dest)
6011     {
6012       copying_header = true;
6013 
6014       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6015 	return false;
6016 
6017       for (i = 0; i < n_region; i++)
6018 	if (region[i] != exit->src
6019 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6020 	  return false;
6021     }
6022 
6023   initialize_original_copy_tables ();
6024 
6025   if (copying_header)
6026     set_loop_copy (loop, loop_outer (loop));
6027   else
6028     set_loop_copy (loop, loop);
6029 
6030   if (!region_copy)
6031     {
6032       region_copy = XNEWVEC (basic_block, n_region);
6033       free_region_copy = true;
6034     }
6035 
6036   /* Record blocks outside the region that are dominated by something
6037      inside.  */
6038   if (update_dominance)
6039     {
6040       doms.create (0);
6041       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6042     }
6043 
6044   if (entry->dest->count)
6045     {
6046       total_count = entry->dest->count;
6047       entry_count = entry->count;
6048       /* Fix up corner cases, to avoid division by zero or creation of negative
6049 	 frequencies.  */
6050       if (entry_count > total_count)
6051 	entry_count = total_count;
6052     }
6053   else
6054     {
6055       total_freq = entry->dest->frequency;
6056       entry_freq = EDGE_FREQUENCY (entry);
6057       /* Fix up corner cases, to avoid division by zero or creation of negative
6058 	 frequencies.  */
6059       if (total_freq == 0)
6060 	total_freq = 1;
6061       else if (entry_freq > total_freq)
6062 	entry_freq = total_freq;
6063     }
6064 
6065   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6066 	    split_edge_bb_loc (entry), update_dominance);
6067   if (total_count)
6068     {
6069       scale_bbs_frequencies_gcov_type (region, n_region,
6070 				       total_count - entry_count,
6071 				       total_count);
6072       scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6073 				       total_count);
6074     }
6075   else
6076     {
6077       scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6078 				 total_freq);
6079       scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6080     }
6081 
6082   if (copying_header)
6083     {
6084       loop->header = exit->dest;
6085       loop->latch = exit->src;
6086     }
6087 
6088   /* Redirect the entry and add the phi node arguments.  */
6089   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6090   gcc_assert (redirected != NULL);
6091   flush_pending_stmts (entry);
6092 
6093   /* Concerning updating of dominators:  We must recount dominators
6094      for entry block and its copy.  Anything that is outside of the
6095      region, but was dominated by something inside needs recounting as
6096      well.  */
6097   if (update_dominance)
6098     {
6099       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6100       doms.safe_push (get_bb_original (entry->dest));
6101       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6102       doms.release ();
6103     }
6104 
6105   /* Add the other PHI node arguments.  */
6106   add_phi_args_after_copy (region_copy, n_region, NULL);
6107 
6108   if (free_region_copy)
6109     free (region_copy);
6110 
6111   free_original_copy_tables ();
6112   return true;
6113 }
6114 
6115 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6116 static bool
6117 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6118 {
6119   unsigned int n;
6120 
6121   for (n = 0; n < n_region; n++)
6122     {
6123      if (bb == bbs[n])
6124        return true;
6125     }
6126   return false;
6127 }
6128 
6129 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6130    are stored to REGION_COPY in the same order in that they appear
6131    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6132    the region, EXIT an exit from it.  The condition guarding EXIT
6133    is moved to ENTRY.  Returns true if duplication succeeds, false
6134    otherwise.
6135 
6136    For example,
6137 
6138    some_code;
6139    if (cond)
6140      A;
6141    else
6142      B;
6143 
6144    is transformed to
6145 
6146    if (cond)
6147      {
6148        some_code;
6149        A;
6150      }
6151    else
6152      {
6153        some_code;
6154        B;
6155      }
6156 */
6157 
6158 bool
6159 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6160 			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6161 			  basic_block *region_copy ATTRIBUTE_UNUSED)
6162 {
6163   unsigned i;
6164   bool free_region_copy = false;
6165   struct loop *loop = exit->dest->loop_father;
6166   struct loop *orig_loop = entry->dest->loop_father;
6167   basic_block switch_bb, entry_bb, nentry_bb;
6168   vec<basic_block> doms;
6169   int total_freq = 0, exit_freq = 0;
6170   gcov_type total_count = 0, exit_count = 0;
6171   edge exits[2], nexits[2], e;
6172   gimple_stmt_iterator gsi;
6173   gimple cond_stmt;
6174   edge sorig, snew;
6175   basic_block exit_bb;
6176   gphi_iterator psi;
6177   gphi *phi;
6178   tree def;
6179   struct loop *target, *aloop, *cloop;
6180 
6181   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6182   exits[0] = exit;
6183   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6184 
6185   if (!can_copy_bbs_p (region, n_region))
6186     return false;
6187 
6188   initialize_original_copy_tables ();
6189   set_loop_copy (orig_loop, loop);
6190 
6191   target= loop;
6192   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6193     {
6194       if (bb_part_of_region_p (aloop->header, region, n_region))
6195 	{
6196 	  cloop = duplicate_loop (aloop, target);
6197 	  duplicate_subloops (aloop, cloop);
6198 	}
6199     }
6200 
6201   if (!region_copy)
6202     {
6203       region_copy = XNEWVEC (basic_block, n_region);
6204       free_region_copy = true;
6205     }
6206 
6207   gcc_assert (!need_ssa_update_p (cfun));
6208 
6209   /* Record blocks outside the region that are dominated by something
6210      inside.  */
6211   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6212 
6213   if (exit->src->count)
6214     {
6215       total_count = exit->src->count;
6216       exit_count = exit->count;
6217       /* Fix up corner cases, to avoid division by zero or creation of negative
6218 	 frequencies.  */
6219       if (exit_count > total_count)
6220 	exit_count = total_count;
6221     }
6222   else
6223     {
6224       total_freq = exit->src->frequency;
6225       exit_freq = EDGE_FREQUENCY (exit);
6226       /* Fix up corner cases, to avoid division by zero or creation of negative
6227 	 frequencies.  */
6228       if (total_freq == 0)
6229 	total_freq = 1;
6230       if (exit_freq > total_freq)
6231 	exit_freq = total_freq;
6232     }
6233 
6234   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6235 	    split_edge_bb_loc (exit), true);
6236   if (total_count)
6237     {
6238       scale_bbs_frequencies_gcov_type (region, n_region,
6239 				       total_count - exit_count,
6240 				       total_count);
6241       scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6242 				       total_count);
6243     }
6244   else
6245     {
6246       scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6247 				 total_freq);
6248       scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6249     }
6250 
6251   /* Create the switch block, and put the exit condition to it.  */
6252   entry_bb = entry->dest;
6253   nentry_bb = get_bb_copy (entry_bb);
6254   if (!last_stmt (entry->src)
6255       || !stmt_ends_bb_p (last_stmt (entry->src)))
6256     switch_bb = entry->src;
6257   else
6258     switch_bb = split_edge (entry);
6259   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6260 
6261   gsi = gsi_last_bb (switch_bb);
6262   cond_stmt = last_stmt (exit->src);
6263   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6264   cond_stmt = gimple_copy (cond_stmt);
6265 
6266   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6267 
6268   sorig = single_succ_edge (switch_bb);
6269   sorig->flags = exits[1]->flags;
6270   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6271 
6272   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6273   rescan_loop_exit (snew, true, false);
6274 
6275   /* Add the PHI node arguments.  */
6276   add_phi_args_after_copy (region_copy, n_region, snew);
6277 
6278   /* Get rid of now superfluous conditions and associated edges (and phi node
6279      arguments).  */
6280   exit_bb = exit->dest;
6281 
6282   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6283   PENDING_STMT (e) = NULL;
6284 
6285   /* The latch of ORIG_LOOP was copied, and so was the backedge
6286      to the original header.  We redirect this backedge to EXIT_BB.  */
6287   for (i = 0; i < n_region; i++)
6288     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6289       {
6290 	gcc_assert (single_succ_edge (region_copy[i]));
6291 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6292 	PENDING_STMT (e) = NULL;
6293 	for (psi = gsi_start_phis (exit_bb);
6294 	     !gsi_end_p (psi);
6295 	     gsi_next (&psi))
6296 	  {
6297 	    phi = psi.phi ();
6298 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6299 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6300 	  }
6301       }
6302   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6303   PENDING_STMT (e) = NULL;
6304 
6305   /* Anything that is outside of the region, but was dominated by something
6306      inside needs to update dominance info.  */
6307   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6308   doms.release ();
6309   /* Update the SSA web.  */
6310   update_ssa (TODO_update_ssa);
6311 
6312   if (free_region_copy)
6313     free (region_copy);
6314 
6315   free_original_copy_tables ();
6316   return true;
6317 }
6318 
6319 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6320    adding blocks when the dominator traversal reaches EXIT.  This
6321    function silently assumes that ENTRY strictly dominates EXIT.  */
6322 
6323 void
6324 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6325 			      vec<basic_block> *bbs_p)
6326 {
6327   basic_block son;
6328 
6329   for (son = first_dom_son (CDI_DOMINATORS, entry);
6330        son;
6331        son = next_dom_son (CDI_DOMINATORS, son))
6332     {
6333       bbs_p->safe_push (son);
6334       if (son != exit)
6335 	gather_blocks_in_sese_region (son, exit, bbs_p);
6336     }
6337 }
6338 
6339 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6340    The duplicates are recorded in VARS_MAP.  */
6341 
6342 static void
6343 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6344 			   tree to_context)
6345 {
6346   tree t = *tp, new_t;
6347   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6348 
6349   if (DECL_CONTEXT (t) == to_context)
6350     return;
6351 
6352   bool existed;
6353   tree &loc = vars_map->get_or_insert (t, &existed);
6354 
6355   if (!existed)
6356     {
6357       if (SSA_VAR_P (t))
6358 	{
6359 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6360 	  add_local_decl (f, new_t);
6361 	}
6362       else
6363 	{
6364 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6365 	  new_t = copy_node (t);
6366 	}
6367       DECL_CONTEXT (new_t) = to_context;
6368 
6369       loc = new_t;
6370     }
6371   else
6372     new_t = loc;
6373 
6374   *tp = new_t;
6375 }
6376 
6377 
6378 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6379    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6380 
6381 static tree
6382 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6383 		  tree to_context)
6384 {
6385   tree new_name;
6386 
6387   gcc_assert (!virtual_operand_p (name));
6388 
6389   tree *loc = vars_map->get (name);
6390 
6391   if (!loc)
6392     {
6393       tree decl = SSA_NAME_VAR (name);
6394       if (decl)
6395 	{
6396 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6397 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6398 				       decl, SSA_NAME_DEF_STMT (name));
6399 	  if (SSA_NAME_IS_DEFAULT_DEF (name))
6400 	    set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6401 				 decl, new_name);
6402 	}
6403       else
6404 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6405 				     name, SSA_NAME_DEF_STMT (name));
6406 
6407       vars_map->put (name, new_name);
6408     }
6409   else
6410     new_name = *loc;
6411 
6412   return new_name;
6413 }
6414 
6415 struct move_stmt_d
6416 {
6417   tree orig_block;
6418   tree new_block;
6419   tree from_context;
6420   tree to_context;
6421   hash_map<tree, tree> *vars_map;
6422   htab_t new_label_map;
6423   hash_map<void *, void *> *eh_map;
6424   bool remap_decls_p;
6425 };
6426 
6427 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6428    contained in *TP if it has been ORIG_BLOCK previously and change the
6429    DECL_CONTEXT of every local variable referenced in *TP.  */
6430 
6431 static tree
6432 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6433 {
6434   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6435   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6436   tree t = *tp;
6437 
6438   if (EXPR_P (t))
6439     {
6440       tree block = TREE_BLOCK (t);
6441       if (block == p->orig_block
6442 	  || (p->orig_block == NULL_TREE
6443 	      && block != NULL_TREE))
6444 	TREE_SET_BLOCK (t, p->new_block);
6445 #ifdef ENABLE_CHECKING
6446       else if (block != NULL_TREE)
6447 	{
6448 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6449 	    block = BLOCK_SUPERCONTEXT (block);
6450 	  gcc_assert (block == p->orig_block);
6451 	}
6452 #endif
6453     }
6454   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6455     {
6456       if (TREE_CODE (t) == SSA_NAME)
6457 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6458       else if (TREE_CODE (t) == LABEL_DECL)
6459 	{
6460 	  if (p->new_label_map)
6461 	    {
6462 	      struct tree_map in, *out;
6463 	      in.base.from = t;
6464 	      out = (struct tree_map *)
6465 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6466 	      if (out)
6467 		*tp = t = out->to;
6468 	    }
6469 
6470 	  /* For FORCED_LABELs we can end up with references from other
6471 	     functions if some SESE regions are outlined.  It is UB to
6472 	     jump in between them, but they could be used just for printing
6473 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6474 	     be the function containing the glabel stmt with that LABEL_DECL,
6475 	     rather than whatever function a reference to the label was seen
6476 	     last time.  */
6477 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6478 	    DECL_CONTEXT (t) = p->to_context;
6479 	}
6480       else if (p->remap_decls_p)
6481 	{
6482 	  /* Replace T with its duplicate.  T should no longer appear in the
6483 	     parent function, so this looks wasteful; however, it may appear
6484 	     in referenced_vars, and more importantly, as virtual operands of
6485 	     statements, and in alias lists of other variables.  It would be
6486 	     quite difficult to expunge it from all those places.  ??? It might
6487 	     suffice to do this for addressable variables.  */
6488 	  if ((TREE_CODE (t) == VAR_DECL
6489 	       && !is_global_var (t))
6490 	      || TREE_CODE (t) == CONST_DECL)
6491 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6492 	}
6493       *walk_subtrees = 0;
6494     }
6495   else if (TYPE_P (t))
6496     *walk_subtrees = 0;
6497 
6498   return NULL_TREE;
6499 }
6500 
6501 /* Helper for move_stmt_r.  Given an EH region number for the source
6502    function, map that to the duplicate EH regio number in the dest.  */
6503 
6504 static int
6505 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6506 {
6507   eh_region old_r, new_r;
6508 
6509   old_r = get_eh_region_from_number (old_nr);
6510   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6511 
6512   return new_r->index;
6513 }
6514 
6515 /* Similar, but operate on INTEGER_CSTs.  */
6516 
6517 static tree
6518 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6519 {
6520   int old_nr, new_nr;
6521 
6522   old_nr = tree_to_shwi (old_t_nr);
6523   new_nr = move_stmt_eh_region_nr (old_nr, p);
6524 
6525   return build_int_cst (integer_type_node, new_nr);
6526 }
6527 
6528 /* Like move_stmt_op, but for gimple statements.
6529 
6530    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6531    contained in the current statement in *GSI_P and change the
6532    DECL_CONTEXT of every local variable referenced in the current
6533    statement.  */
6534 
6535 static tree
6536 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6537 	     struct walk_stmt_info *wi)
6538 {
6539   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6540   gimple stmt = gsi_stmt (*gsi_p);
6541   tree block = gimple_block (stmt);
6542 
6543   if (block == p->orig_block
6544       || (p->orig_block == NULL_TREE
6545 	  && block != NULL_TREE))
6546     gimple_set_block (stmt, p->new_block);
6547 
6548   switch (gimple_code (stmt))
6549     {
6550     case GIMPLE_CALL:
6551       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6552       {
6553 	tree r, fndecl = gimple_call_fndecl (stmt);
6554 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6555 	  switch (DECL_FUNCTION_CODE (fndecl))
6556 	    {
6557 	    case BUILT_IN_EH_COPY_VALUES:
6558 	      r = gimple_call_arg (stmt, 1);
6559 	      r = move_stmt_eh_region_tree_nr (r, p);
6560 	      gimple_call_set_arg (stmt, 1, r);
6561 	      /* FALLTHRU */
6562 
6563 	    case BUILT_IN_EH_POINTER:
6564 	    case BUILT_IN_EH_FILTER:
6565 	      r = gimple_call_arg (stmt, 0);
6566 	      r = move_stmt_eh_region_tree_nr (r, p);
6567 	      gimple_call_set_arg (stmt, 0, r);
6568 	      break;
6569 
6570 	    default:
6571 	      break;
6572 	    }
6573       }
6574       break;
6575 
6576     case GIMPLE_RESX:
6577       {
6578 	gresx *resx_stmt = as_a <gresx *> (stmt);
6579 	int r = gimple_resx_region (resx_stmt);
6580 	r = move_stmt_eh_region_nr (r, p);
6581 	gimple_resx_set_region (resx_stmt, r);
6582       }
6583       break;
6584 
6585     case GIMPLE_EH_DISPATCH:
6586       {
6587 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6588 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6589 	r = move_stmt_eh_region_nr (r, p);
6590 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6591       }
6592       break;
6593 
6594     case GIMPLE_OMP_RETURN:
6595     case GIMPLE_OMP_CONTINUE:
6596       break;
6597 
6598     case GIMPLE_LABEL:
6599       {
6600 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6601 	   so that such labels can be referenced from other regions.
6602 	   Make sure to update it when seeing a GIMPLE_LABEL though,
6603 	   that is the owner of the label.  */
6604 	walk_gimple_op (stmt, move_stmt_op, wi);
6605 	*handled_ops_p = true;
6606 	tree label = gimple_label_label (as_a <glabel *> (stmt));
6607 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6608 	  DECL_CONTEXT (label) = p->to_context;
6609       }
6610       break;
6611 
6612     default:
6613       if (is_gimple_omp (stmt))
6614 	{
6615 	  /* Do not remap variables inside OMP directives.  Variables
6616 	     referenced in clauses and directive header belong to the
6617 	     parent function and should not be moved into the child
6618 	     function.  */
6619 	  bool save_remap_decls_p = p->remap_decls_p;
6620 	  p->remap_decls_p = false;
6621 	  *handled_ops_p = true;
6622 
6623 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6624 			       move_stmt_op, wi);
6625 
6626 	  p->remap_decls_p = save_remap_decls_p;
6627 	}
6628       break;
6629     }
6630 
6631   return NULL_TREE;
6632 }
6633 
6634 /* Move basic block BB from function CFUN to function DEST_FN.  The
6635    block is moved out of the original linked list and placed after
6636    block AFTER in the new list.  Also, the block is removed from the
6637    original array of blocks and placed in DEST_FN's array of blocks.
6638    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6639    updated to reflect the moved edges.
6640 
6641    The local variables are remapped to new instances, VARS_MAP is used
6642    to record the mapping.  */
6643 
6644 static void
6645 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6646 		  basic_block after, bool update_edge_count_p,
6647 		  struct move_stmt_d *d)
6648 {
6649   struct control_flow_graph *cfg;
6650   edge_iterator ei;
6651   edge e;
6652   gimple_stmt_iterator si;
6653   unsigned old_len, new_len;
6654 
6655   /* Remove BB from dominance structures.  */
6656   delete_from_dominance_info (CDI_DOMINATORS, bb);
6657 
6658   /* Move BB from its current loop to the copy in the new function.  */
6659   if (current_loops)
6660     {
6661       struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6662       if (new_loop)
6663 	bb->loop_father = new_loop;
6664     }
6665 
6666   /* Link BB to the new linked list.  */
6667   move_block_after (bb, after);
6668 
6669   /* Update the edge count in the corresponding flowgraphs.  */
6670   if (update_edge_count_p)
6671     FOR_EACH_EDGE (e, ei, bb->succs)
6672       {
6673 	cfun->cfg->x_n_edges--;
6674 	dest_cfun->cfg->x_n_edges++;
6675       }
6676 
6677   /* Remove BB from the original basic block array.  */
6678   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6679   cfun->cfg->x_n_basic_blocks--;
6680 
6681   /* Grow DEST_CFUN's basic block array if needed.  */
6682   cfg = dest_cfun->cfg;
6683   cfg->x_n_basic_blocks++;
6684   if (bb->index >= cfg->x_last_basic_block)
6685     cfg->x_last_basic_block = bb->index + 1;
6686 
6687   old_len = vec_safe_length (cfg->x_basic_block_info);
6688   if ((unsigned) cfg->x_last_basic_block >= old_len)
6689     {
6690       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6691       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6692     }
6693 
6694   (*cfg->x_basic_block_info)[bb->index] = bb;
6695 
6696   /* Remap the variables in phi nodes.  */
6697   for (gphi_iterator psi = gsi_start_phis (bb);
6698        !gsi_end_p (psi); )
6699     {
6700       gphi *phi = psi.phi ();
6701       use_operand_p use;
6702       tree op = PHI_RESULT (phi);
6703       ssa_op_iter oi;
6704       unsigned i;
6705 
6706       if (virtual_operand_p (op))
6707 	{
6708 	  /* Remove the phi nodes for virtual operands (alias analysis will be
6709 	     run for the new function, anyway).  */
6710           remove_phi_node (&psi, true);
6711 	  continue;
6712 	}
6713 
6714       SET_PHI_RESULT (phi,
6715 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6716       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6717 	{
6718 	  op = USE_FROM_PTR (use);
6719 	  if (TREE_CODE (op) == SSA_NAME)
6720 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6721 	}
6722 
6723       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6724 	{
6725 	  location_t locus = gimple_phi_arg_location (phi, i);
6726 	  tree block = LOCATION_BLOCK (locus);
6727 
6728 	  if (locus == UNKNOWN_LOCATION)
6729 	    continue;
6730 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6731 	    {
6732 	      if (d->new_block == NULL_TREE)
6733 		locus = LOCATION_LOCUS (locus);
6734 	      else
6735 		locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6736 	      gimple_phi_arg_set_location (phi, i, locus);
6737 	    }
6738 	}
6739 
6740       gsi_next (&psi);
6741     }
6742 
6743   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6744     {
6745       gimple stmt = gsi_stmt (si);
6746       struct walk_stmt_info wi;
6747 
6748       memset (&wi, 0, sizeof (wi));
6749       wi.info = d;
6750       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6751 
6752       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6753 	{
6754 	  tree label = gimple_label_label (label_stmt);
6755 	  int uid = LABEL_DECL_UID (label);
6756 
6757 	  gcc_assert (uid > -1);
6758 
6759 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
6760 	  if (old_len <= (unsigned) uid)
6761 	    {
6762 	      new_len = 3 * uid / 2 + 1;
6763 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6764 	    }
6765 
6766 	  (*cfg->x_label_to_block_map)[uid] = bb;
6767 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6768 
6769 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6770 
6771 	  if (uid >= dest_cfun->cfg->last_label_uid)
6772 	    dest_cfun->cfg->last_label_uid = uid + 1;
6773 	}
6774 
6775       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6776       remove_stmt_from_eh_lp_fn (cfun, stmt);
6777 
6778       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6779       gimple_remove_stmt_histograms (cfun, stmt);
6780 
6781       /* We cannot leave any operands allocated from the operand caches of
6782 	 the current function.  */
6783       free_stmt_operands (cfun, stmt);
6784       push_cfun (dest_cfun);
6785       update_stmt (stmt);
6786       pop_cfun ();
6787     }
6788 
6789   FOR_EACH_EDGE (e, ei, bb->succs)
6790     if (e->goto_locus != UNKNOWN_LOCATION)
6791       {
6792 	tree block = LOCATION_BLOCK (e->goto_locus);
6793 	if (d->orig_block == NULL_TREE
6794 	    || block == d->orig_block)
6795 	  e->goto_locus = d->new_block ?
6796 	      COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6797 	      LOCATION_LOCUS (e->goto_locus);
6798       }
6799 }
6800 
6801 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6802    the outermost EH region.  Use REGION as the incoming base EH region.  */
6803 
6804 static eh_region
6805 find_outermost_region_in_block (struct function *src_cfun,
6806 				basic_block bb, eh_region region)
6807 {
6808   gimple_stmt_iterator si;
6809 
6810   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6811     {
6812       gimple stmt = gsi_stmt (si);
6813       eh_region stmt_region;
6814       int lp_nr;
6815 
6816       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6817       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6818       if (stmt_region)
6819 	{
6820 	  if (region == NULL)
6821 	    region = stmt_region;
6822 	  else if (stmt_region != region)
6823 	    {
6824 	      region = eh_region_outermost (src_cfun, stmt_region, region);
6825 	      gcc_assert (region != NULL);
6826 	    }
6827 	}
6828     }
6829 
6830   return region;
6831 }
6832 
6833 static tree
6834 new_label_mapper (tree decl, void *data)
6835 {
6836   htab_t hash = (htab_t) data;
6837   struct tree_map *m;
6838   void **slot;
6839 
6840   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6841 
6842   m = XNEW (struct tree_map);
6843   m->hash = DECL_UID (decl);
6844   m->base.from = decl;
6845   m->to = create_artificial_label (UNKNOWN_LOCATION);
6846   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6847   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6848     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6849 
6850   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6851   gcc_assert (*slot == NULL);
6852 
6853   *slot = m;
6854 
6855   return m->to;
6856 }
6857 
6858 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6859    subblocks.  */
6860 
6861 static void
6862 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6863 				  tree to_context)
6864 {
6865   tree *tp, t;
6866 
6867   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6868     {
6869       t = *tp;
6870       if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6871 	continue;
6872       replace_by_duplicate_decl (&t, vars_map, to_context);
6873       if (t != *tp)
6874 	{
6875 	  if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6876 	    {
6877 	      SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6878 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
6879 	    }
6880 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
6881 	  *tp = t;
6882 	}
6883     }
6884 
6885   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6886     replace_block_vars_by_duplicates (block, vars_map, to_context);
6887 }
6888 
6889 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6890    from FN1 to FN2.  */
6891 
6892 static void
6893 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6894 			      struct loop *loop)
6895 {
6896   /* Discard it from the old loop array.  */
6897   (*get_loops (fn1))[loop->num] = NULL;
6898 
6899   /* Place it in the new loop array, assigning it a new number.  */
6900   loop->num = number_of_loops (fn2);
6901   vec_safe_push (loops_for_fn (fn2)->larray, loop);
6902 
6903   /* Recurse to children.  */
6904   for (loop = loop->inner; loop; loop = loop->next)
6905     fixup_loop_arrays_after_move (fn1, fn2, loop);
6906 }
6907 
6908 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6909    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
6910 
6911 DEBUG_FUNCTION void
6912 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6913 {
6914   basic_block bb;
6915   edge_iterator ei;
6916   edge e;
6917   bitmap bbs = BITMAP_ALLOC (NULL);
6918   int i;
6919 
6920   gcc_assert (entry != NULL);
6921   gcc_assert (entry != exit);
6922   gcc_assert (bbs_p != NULL);
6923 
6924   gcc_assert (bbs_p->length () > 0);
6925 
6926   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6927     bitmap_set_bit (bbs, bb->index);
6928 
6929   gcc_assert (bitmap_bit_p (bbs, entry->index));
6930   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6931 
6932   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6933     {
6934       if (bb == entry)
6935 	{
6936 	  gcc_assert (single_pred_p (entry));
6937 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6938 	}
6939       else
6940 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6941 	  {
6942 	    e = ei_edge (ei);
6943 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
6944 	  }
6945 
6946       if (bb == exit)
6947 	{
6948 	  gcc_assert (single_succ_p (exit));
6949 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6950 	}
6951       else
6952 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6953 	  {
6954 	    e = ei_edge (ei);
6955 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6956 	  }
6957     }
6958 
6959   BITMAP_FREE (bbs);
6960 }
6961 
6962 
6963 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6964    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
6965    single basic block in the original CFG and the new basic block is
6966    returned.  DEST_CFUN must not have a CFG yet.
6967 
6968    Note that the region need not be a pure SESE region.  Blocks inside
6969    the region may contain calls to abort/exit.  The only restriction
6970    is that ENTRY_BB should be the only entry point and it must
6971    dominate EXIT_BB.
6972 
6973    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6974    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6975    to the new function.
6976 
6977    All local variables referenced in the region are assumed to be in
6978    the corresponding BLOCK_VARS and unexpanded variable lists
6979    associated with DEST_CFUN.  */
6980 
6981 basic_block
6982 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6983 		        basic_block exit_bb, tree orig_block)
6984 {
6985   vec<basic_block> bbs, dom_bbs;
6986   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6987   basic_block after, bb, *entry_pred, *exit_succ, abb;
6988   struct function *saved_cfun = cfun;
6989   int *entry_flag, *exit_flag;
6990   unsigned *entry_prob, *exit_prob;
6991   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6992   edge e;
6993   edge_iterator ei;
6994   htab_t new_label_map;
6995   hash_map<void *, void *> *eh_map;
6996   struct loop *loop = entry_bb->loop_father;
6997   struct loop *loop0 = get_loop (saved_cfun, 0);
6998   struct move_stmt_d d;
6999 
7000   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7001      region.  */
7002   gcc_assert (entry_bb != exit_bb
7003               && (!exit_bb
7004 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7005 
7006   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7007      because it won't be added by dfs_enumerate_from.  */
7008   bbs.create (0);
7009   bbs.safe_push (entry_bb);
7010   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7011 #ifdef ENABLE_CHECKING
7012   verify_sese (entry_bb, exit_bb, &bbs);
7013 #endif
7014 
7015   /* The blocks that used to be dominated by something in BBS will now be
7016      dominated by the new block.  */
7017   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7018 				     bbs.address (),
7019 				     bbs.length ());
7020 
7021   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7022      the predecessor edges to ENTRY_BB and the successor edges to
7023      EXIT_BB so that we can re-attach them to the new basic block that
7024      will replace the region.  */
7025   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7026   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7027   entry_flag = XNEWVEC (int, num_entry_edges);
7028   entry_prob = XNEWVEC (unsigned, num_entry_edges);
7029   i = 0;
7030   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7031     {
7032       entry_prob[i] = e->probability;
7033       entry_flag[i] = e->flags;
7034       entry_pred[i++] = e->src;
7035       remove_edge (e);
7036     }
7037 
7038   if (exit_bb)
7039     {
7040       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7041       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7042       exit_flag = XNEWVEC (int, num_exit_edges);
7043       exit_prob = XNEWVEC (unsigned, num_exit_edges);
7044       i = 0;
7045       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7046 	{
7047 	  exit_prob[i] = e->probability;
7048 	  exit_flag[i] = e->flags;
7049 	  exit_succ[i++] = e->dest;
7050 	  remove_edge (e);
7051 	}
7052     }
7053   else
7054     {
7055       num_exit_edges = 0;
7056       exit_succ = NULL;
7057       exit_flag = NULL;
7058       exit_prob = NULL;
7059     }
7060 
7061   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7062   gcc_assert (dest_cfun->cfg == NULL);
7063   push_cfun (dest_cfun);
7064 
7065   init_empty_tree_cfg ();
7066 
7067   /* Initialize EH information for the new function.  */
7068   eh_map = NULL;
7069   new_label_map = NULL;
7070   if (saved_cfun->eh)
7071     {
7072       eh_region region = NULL;
7073 
7074       FOR_EACH_VEC_ELT (bbs, i, bb)
7075 	region = find_outermost_region_in_block (saved_cfun, bb, region);
7076 
7077       init_eh_for_function ();
7078       if (region != NULL)
7079 	{
7080 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7081 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7082 					 new_label_mapper, new_label_map);
7083 	}
7084     }
7085 
7086   /* Initialize an empty loop tree.  */
7087   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7088   init_loops_structure (dest_cfun, loops, 1);
7089   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7090   set_loops_for_fn (dest_cfun, loops);
7091 
7092   /* Move the outlined loop tree part.  */
7093   num_nodes = bbs.length ();
7094   FOR_EACH_VEC_ELT (bbs, i, bb)
7095     {
7096       if (bb->loop_father->header == bb)
7097 	{
7098 	  struct loop *this_loop = bb->loop_father;
7099 	  struct loop *outer = loop_outer (this_loop);
7100 	  if (outer == loop
7101 	      /* If the SESE region contains some bbs ending with
7102 		 a noreturn call, those are considered to belong
7103 		 to the outermost loop in saved_cfun, rather than
7104 		 the entry_bb's loop_father.  */
7105 	      || outer == loop0)
7106 	    {
7107 	      if (outer != loop)
7108 		num_nodes -= this_loop->num_nodes;
7109 	      flow_loop_tree_node_remove (bb->loop_father);
7110 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7111 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7112 	    }
7113 	}
7114       else if (bb->loop_father == loop0 && loop0 != loop)
7115 	num_nodes--;
7116 
7117       /* Remove loop exits from the outlined region.  */
7118       if (loops_for_fn (saved_cfun)->exits)
7119 	FOR_EACH_EDGE (e, ei, bb->succs)
7120 	  {
7121 	    struct loops *l = loops_for_fn (saved_cfun);
7122 	    loop_exit **slot
7123 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7124 					       NO_INSERT);
7125 	    if (slot)
7126 	      l->exits->clear_slot (slot);
7127 	  }
7128     }
7129 
7130 
7131   /* Adjust the number of blocks in the tree root of the outlined part.  */
7132   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7133 
7134   /* Setup a mapping to be used by move_block_to_fn.  */
7135   loop->aux = current_loops->tree_root;
7136   loop0->aux = current_loops->tree_root;
7137 
7138   pop_cfun ();
7139 
7140   /* Move blocks from BBS into DEST_CFUN.  */
7141   gcc_assert (bbs.length () >= 2);
7142   after = dest_cfun->cfg->x_entry_block_ptr;
7143   hash_map<tree, tree> vars_map;
7144 
7145   memset (&d, 0, sizeof (d));
7146   d.orig_block = orig_block;
7147   d.new_block = DECL_INITIAL (dest_cfun->decl);
7148   d.from_context = cfun->decl;
7149   d.to_context = dest_cfun->decl;
7150   d.vars_map = &vars_map;
7151   d.new_label_map = new_label_map;
7152   d.eh_map = eh_map;
7153   d.remap_decls_p = true;
7154 
7155   FOR_EACH_VEC_ELT (bbs, i, bb)
7156     {
7157       /* No need to update edge counts on the last block.  It has
7158 	 already been updated earlier when we detached the region from
7159 	 the original CFG.  */
7160       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7161       after = bb;
7162     }
7163 
7164   loop->aux = NULL;
7165   loop0->aux = NULL;
7166   /* Loop sizes are no longer correct, fix them up.  */
7167   loop->num_nodes -= num_nodes;
7168   for (struct loop *outer = loop_outer (loop);
7169        outer; outer = loop_outer (outer))
7170     outer->num_nodes -= num_nodes;
7171   loop0->num_nodes -= bbs.length () - num_nodes;
7172 
7173   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7174     {
7175       struct loop *aloop;
7176       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7177 	if (aloop != NULL)
7178 	  {
7179 	    if (aloop->simduid)
7180 	      {
7181 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7182 					   d.to_context);
7183 		dest_cfun->has_simduid_loops = true;
7184 	      }
7185 	    if (aloop->force_vectorize)
7186 	      dest_cfun->has_force_vectorize_loops = true;
7187 	  }
7188     }
7189 
7190   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7191   if (orig_block)
7192     {
7193       tree block;
7194       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7195 		  == NULL_TREE);
7196       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7197 	= BLOCK_SUBBLOCKS (orig_block);
7198       for (block = BLOCK_SUBBLOCKS (orig_block);
7199 	   block; block = BLOCK_CHAIN (block))
7200 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7201       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7202     }
7203 
7204   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7205 				    &vars_map, dest_cfun->decl);
7206 
7207   if (new_label_map)
7208     htab_delete (new_label_map);
7209   if (eh_map)
7210     delete eh_map;
7211 
7212   /* Rewire the entry and exit blocks.  The successor to the entry
7213      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7214      the child function.  Similarly, the predecessor of DEST_FN's
7215      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7216      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7217      various CFG manipulation function get to the right CFG.
7218 
7219      FIXME, this is silly.  The CFG ought to become a parameter to
7220      these helpers.  */
7221   push_cfun (dest_cfun);
7222   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7223   if (exit_bb)
7224     make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7225   pop_cfun ();
7226 
7227   /* Back in the original function, the SESE region has disappeared,
7228      create a new basic block in its place.  */
7229   bb = create_empty_bb (entry_pred[0]);
7230   if (current_loops)
7231     add_bb_to_loop (bb, loop);
7232   for (i = 0; i < num_entry_edges; i++)
7233     {
7234       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7235       e->probability = entry_prob[i];
7236     }
7237 
7238   for (i = 0; i < num_exit_edges; i++)
7239     {
7240       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7241       e->probability = exit_prob[i];
7242     }
7243 
7244   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7245   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7246     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7247   dom_bbs.release ();
7248 
7249   if (exit_bb)
7250     {
7251       free (exit_prob);
7252       free (exit_flag);
7253       free (exit_succ);
7254     }
7255   free (entry_prob);
7256   free (entry_flag);
7257   free (entry_pred);
7258   bbs.release ();
7259 
7260   return bb;
7261 }
7262 
7263 
7264 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7265    */
7266 
7267 void
7268 dump_function_to_file (tree fndecl, FILE *file, int flags)
7269 {
7270   tree arg, var, old_current_fndecl = current_function_decl;
7271   struct function *dsf;
7272   bool ignore_topmost_bind = false, any_var = false;
7273   basic_block bb;
7274   tree chain;
7275   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7276 		  && decl_is_tm_clone (fndecl));
7277   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7278 
7279   current_function_decl = fndecl;
7280   fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7281 
7282   arg = DECL_ARGUMENTS (fndecl);
7283   while (arg)
7284     {
7285       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7286       fprintf (file, " ");
7287       print_generic_expr (file, arg, dump_flags);
7288       if (flags & TDF_VERBOSE)
7289 	print_node (file, "", arg, 4);
7290       if (DECL_CHAIN (arg))
7291 	fprintf (file, ", ");
7292       arg = DECL_CHAIN (arg);
7293     }
7294   fprintf (file, ")\n");
7295 
7296   if (flags & TDF_VERBOSE)
7297     print_node (file, "", fndecl, 2);
7298 
7299   dsf = DECL_STRUCT_FUNCTION (fndecl);
7300   if (dsf && (flags & TDF_EH))
7301     dump_eh_tree (file, dsf);
7302 
7303   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7304     {
7305       dump_node (fndecl, TDF_SLIM | flags, file);
7306       current_function_decl = old_current_fndecl;
7307       return;
7308     }
7309 
7310   /* When GIMPLE is lowered, the variables are no longer available in
7311      BIND_EXPRs, so display them separately.  */
7312   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7313     {
7314       unsigned ix;
7315       ignore_topmost_bind = true;
7316 
7317       fprintf (file, "{\n");
7318       if (!vec_safe_is_empty (fun->local_decls))
7319 	FOR_EACH_LOCAL_DECL (fun, ix, var)
7320 	  {
7321 	    print_generic_decl (file, var, flags);
7322 	    if (flags & TDF_VERBOSE)
7323 	      print_node (file, "", var, 4);
7324 	    fprintf (file, "\n");
7325 
7326 	    any_var = true;
7327 	  }
7328       if (gimple_in_ssa_p (cfun))
7329 	for (ix = 1; ix < num_ssa_names; ++ix)
7330 	  {
7331 	    tree name = ssa_name (ix);
7332 	    if (name && !SSA_NAME_VAR (name))
7333 	      {
7334 		fprintf (file, "  ");
7335 		print_generic_expr (file, TREE_TYPE (name), flags);
7336 		fprintf (file, " ");
7337 		print_generic_expr (file, name, flags);
7338 		fprintf (file, ";\n");
7339 
7340 		any_var = true;
7341 	      }
7342 	  }
7343     }
7344 
7345   if (fun && fun->decl == fndecl
7346       && fun->cfg
7347       && basic_block_info_for_fn (fun))
7348     {
7349       /* If the CFG has been built, emit a CFG-based dump.  */
7350       if (!ignore_topmost_bind)
7351 	fprintf (file, "{\n");
7352 
7353       if (any_var && n_basic_blocks_for_fn (fun))
7354 	fprintf (file, "\n");
7355 
7356       FOR_EACH_BB_FN (bb, fun)
7357 	dump_bb (file, bb, 2, flags | TDF_COMMENT);
7358 
7359       fprintf (file, "}\n");
7360     }
7361   else if (DECL_SAVED_TREE (fndecl) == NULL)
7362     {
7363       /* The function is now in GIMPLE form but the CFG has not been
7364 	 built yet.  Emit the single sequence of GIMPLE statements
7365 	 that make up its body.  */
7366       gimple_seq body = gimple_body (fndecl);
7367 
7368       if (gimple_seq_first_stmt (body)
7369 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7370 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7371 	print_gimple_seq (file, body, 0, flags);
7372       else
7373 	{
7374 	  if (!ignore_topmost_bind)
7375 	    fprintf (file, "{\n");
7376 
7377 	  if (any_var)
7378 	    fprintf (file, "\n");
7379 
7380 	  print_gimple_seq (file, body, 2, flags);
7381 	  fprintf (file, "}\n");
7382 	}
7383     }
7384   else
7385     {
7386       int indent;
7387 
7388       /* Make a tree based dump.  */
7389       chain = DECL_SAVED_TREE (fndecl);
7390       if (chain && TREE_CODE (chain) == BIND_EXPR)
7391 	{
7392 	  if (ignore_topmost_bind)
7393 	    {
7394 	      chain = BIND_EXPR_BODY (chain);
7395 	      indent = 2;
7396 	    }
7397 	  else
7398 	    indent = 0;
7399 	}
7400       else
7401 	{
7402 	  if (!ignore_topmost_bind)
7403 	    fprintf (file, "{\n");
7404 	  indent = 2;
7405 	}
7406 
7407       if (any_var)
7408 	fprintf (file, "\n");
7409 
7410       print_generic_stmt_indented (file, chain, flags, indent);
7411       if (ignore_topmost_bind)
7412 	fprintf (file, "}\n");
7413     }
7414 
7415   if (flags & TDF_ENUMERATE_LOCALS)
7416     dump_enumerated_decls (file, flags);
7417   fprintf (file, "\n\n");
7418 
7419   current_function_decl = old_current_fndecl;
7420 }
7421 
7422 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
7423 
7424 DEBUG_FUNCTION void
7425 debug_function (tree fn, int flags)
7426 {
7427   dump_function_to_file (fn, stderr, flags);
7428 }
7429 
7430 
7431 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
7432 
7433 static void
7434 print_pred_bbs (FILE *file, basic_block bb)
7435 {
7436   edge e;
7437   edge_iterator ei;
7438 
7439   FOR_EACH_EDGE (e, ei, bb->preds)
7440     fprintf (file, "bb_%d ", e->src->index);
7441 }
7442 
7443 
7444 /* Print on FILE the indexes for the successors of basic_block BB.  */
7445 
7446 static void
7447 print_succ_bbs (FILE *file, basic_block bb)
7448 {
7449   edge e;
7450   edge_iterator ei;
7451 
7452   FOR_EACH_EDGE (e, ei, bb->succs)
7453     fprintf (file, "bb_%d ", e->dest->index);
7454 }
7455 
7456 /* Print to FILE the basic block BB following the VERBOSITY level.  */
7457 
7458 void
7459 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7460 {
7461   char *s_indent = (char *) alloca ((size_t) indent + 1);
7462   memset ((void *) s_indent, ' ', (size_t) indent);
7463   s_indent[indent] = '\0';
7464 
7465   /* Print basic_block's header.  */
7466   if (verbosity >= 2)
7467     {
7468       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
7469       print_pred_bbs (file, bb);
7470       fprintf (file, "}, succs = {");
7471       print_succ_bbs (file, bb);
7472       fprintf (file, "})\n");
7473     }
7474 
7475   /* Print basic_block's body.  */
7476   if (verbosity >= 3)
7477     {
7478       fprintf (file, "%s  {\n", s_indent);
7479       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7480       fprintf (file, "%s  }\n", s_indent);
7481     }
7482 }
7483 
7484 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7485 
7486 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
7487    VERBOSITY level this outputs the contents of the loop, or just its
7488    structure.  */
7489 
7490 static void
7491 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7492 {
7493   char *s_indent;
7494   basic_block bb;
7495 
7496   if (loop == NULL)
7497     return;
7498 
7499   s_indent = (char *) alloca ((size_t) indent + 1);
7500   memset ((void *) s_indent, ' ', (size_t) indent);
7501   s_indent[indent] = '\0';
7502 
7503   /* Print loop's header.  */
7504   fprintf (file, "%sloop_%d (", s_indent, loop->num);
7505   if (loop->header)
7506     fprintf (file, "header = %d", loop->header->index);
7507   else
7508     {
7509       fprintf (file, "deleted)\n");
7510       return;
7511     }
7512   if (loop->latch)
7513     fprintf (file, ", latch = %d", loop->latch->index);
7514   else
7515     fprintf (file, ", multiple latches");
7516   fprintf (file, ", niter = ");
7517   print_generic_expr (file, loop->nb_iterations, 0);
7518 
7519   if (loop->any_upper_bound)
7520     {
7521       fprintf (file, ", upper_bound = ");
7522       print_decu (loop->nb_iterations_upper_bound, file);
7523     }
7524 
7525   if (loop->any_estimate)
7526     {
7527       fprintf (file, ", estimate = ");
7528       print_decu (loop->nb_iterations_estimate, file);
7529     }
7530   fprintf (file, ")\n");
7531 
7532   /* Print loop's body.  */
7533   if (verbosity >= 1)
7534     {
7535       fprintf (file, "%s{\n", s_indent);
7536       FOR_EACH_BB_FN (bb, cfun)
7537 	if (bb->loop_father == loop)
7538 	  print_loops_bb (file, bb, indent, verbosity);
7539 
7540       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7541       fprintf (file, "%s}\n", s_indent);
7542     }
7543 }
7544 
7545 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7546    spaces.  Following VERBOSITY level this outputs the contents of the
7547    loop, or just its structure.  */
7548 
7549 static void
7550 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7551 			 int verbosity)
7552 {
7553   if (loop == NULL)
7554     return;
7555 
7556   print_loop (file, loop, indent, verbosity);
7557   print_loop_and_siblings (file, loop->next, indent, verbosity);
7558 }
7559 
7560 /* Follow a CFG edge from the entry point of the program, and on entry
7561    of a loop, pretty print the loop structure on FILE.  */
7562 
7563 void
7564 print_loops (FILE *file, int verbosity)
7565 {
7566   basic_block bb;
7567 
7568   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7569   if (bb && bb->loop_father)
7570     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7571 }
7572 
7573 /* Dump a loop.  */
7574 
7575 DEBUG_FUNCTION void
7576 debug (struct loop &ref)
7577 {
7578   print_loop (stderr, &ref, 0, /*verbosity*/0);
7579 }
7580 
7581 DEBUG_FUNCTION void
7582 debug (struct loop *ptr)
7583 {
7584   if (ptr)
7585     debug (*ptr);
7586   else
7587     fprintf (stderr, "<nil>\n");
7588 }
7589 
7590 /* Dump a loop verbosely.  */
7591 
7592 DEBUG_FUNCTION void
7593 debug_verbose (struct loop &ref)
7594 {
7595   print_loop (stderr, &ref, 0, /*verbosity*/3);
7596 }
7597 
7598 DEBUG_FUNCTION void
7599 debug_verbose (struct loop *ptr)
7600 {
7601   if (ptr)
7602     debug (*ptr);
7603   else
7604     fprintf (stderr, "<nil>\n");
7605 }
7606 
7607 
7608 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
7609 
7610 DEBUG_FUNCTION void
7611 debug_loops (int verbosity)
7612 {
7613   print_loops (stderr, verbosity);
7614 }
7615 
7616 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
7617 
7618 DEBUG_FUNCTION void
7619 debug_loop (struct loop *loop, int verbosity)
7620 {
7621   print_loop (stderr, loop, 0, verbosity);
7622 }
7623 
7624 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7625    level.  */
7626 
7627 DEBUG_FUNCTION void
7628 debug_loop_num (unsigned num, int verbosity)
7629 {
7630   debug_loop (get_loop (cfun, num), verbosity);
7631 }
7632 
7633 /* Return true if BB ends with a call, possibly followed by some
7634    instructions that must stay with the call.  Return false,
7635    otherwise.  */
7636 
7637 static bool
7638 gimple_block_ends_with_call_p (basic_block bb)
7639 {
7640   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7641   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7642 }
7643 
7644 
7645 /* Return true if BB ends with a conditional branch.  Return false,
7646    otherwise.  */
7647 
7648 static bool
7649 gimple_block_ends_with_condjump_p (const_basic_block bb)
7650 {
7651   gimple stmt = last_stmt (CONST_CAST_BB (bb));
7652   return (stmt && gimple_code (stmt) == GIMPLE_COND);
7653 }
7654 
7655 
7656 /* Return true if we need to add fake edge to exit at statement T.
7657    Helper function for gimple_flow_call_edges_add.  */
7658 
7659 static bool
7660 need_fake_edge_p (gimple t)
7661 {
7662   tree fndecl = NULL_TREE;
7663   int call_flags = 0;
7664 
7665   /* NORETURN and LONGJMP calls already have an edge to exit.
7666      CONST and PURE calls do not need one.
7667      We don't currently check for CONST and PURE here, although
7668      it would be a good idea, because those attributes are
7669      figured out from the RTL in mark_constant_function, and
7670      the counter incrementation code from -fprofile-arcs
7671      leads to different results from -fbranch-probabilities.  */
7672   if (is_gimple_call (t))
7673     {
7674       fndecl = gimple_call_fndecl (t);
7675       call_flags = gimple_call_flags (t);
7676     }
7677 
7678   if (is_gimple_call (t)
7679       && fndecl
7680       && DECL_BUILT_IN (fndecl)
7681       && (call_flags & ECF_NOTHROW)
7682       && !(call_flags & ECF_RETURNS_TWICE)
7683       /* fork() doesn't really return twice, but the effect of
7684          wrapping it in __gcov_fork() which calls __gcov_flush()
7685 	 and clears the counters before forking has the same
7686 	 effect as returning twice.  Force a fake edge.  */
7687       && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7688 	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7689     return false;
7690 
7691   if (is_gimple_call (t))
7692     {
7693       edge_iterator ei;
7694       edge e;
7695       basic_block bb;
7696 
7697       if (!(call_flags & ECF_NORETURN))
7698 	return true;
7699 
7700       bb = gimple_bb (t);
7701       FOR_EACH_EDGE (e, ei, bb->succs)
7702 	if ((e->flags & EDGE_FAKE) == 0)
7703 	  return true;
7704     }
7705 
7706   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7707     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7708       return true;
7709 
7710   return false;
7711 }
7712 
7713 
7714 /* Add fake edges to the function exit for any non constant and non
7715    noreturn calls (or noreturn calls with EH/abnormal edges),
7716    volatile inline assembly in the bitmap of blocks specified by BLOCKS
7717    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
7718    that were split.
7719 
7720    The goal is to expose cases in which entering a basic block does
7721    not imply that all subsequent instructions must be executed.  */
7722 
7723 static int
7724 gimple_flow_call_edges_add (sbitmap blocks)
7725 {
7726   int i;
7727   int blocks_split = 0;
7728   int last_bb = last_basic_block_for_fn (cfun);
7729   bool check_last_block = false;
7730 
7731   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7732     return 0;
7733 
7734   if (! blocks)
7735     check_last_block = true;
7736   else
7737     check_last_block = bitmap_bit_p (blocks,
7738 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7739 
7740   /* In the last basic block, before epilogue generation, there will be
7741      a fallthru edge to EXIT.  Special care is required if the last insn
7742      of the last basic block is a call because make_edge folds duplicate
7743      edges, which would result in the fallthru edge also being marked
7744      fake, which would result in the fallthru edge being removed by
7745      remove_fake_edges, which would result in an invalid CFG.
7746 
7747      Moreover, we can't elide the outgoing fake edge, since the block
7748      profiler needs to take this into account in order to solve the minimal
7749      spanning tree in the case that the call doesn't return.
7750 
7751      Handle this by adding a dummy instruction in a new last basic block.  */
7752   if (check_last_block)
7753     {
7754       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7755       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7756       gimple t = NULL;
7757 
7758       if (!gsi_end_p (gsi))
7759 	t = gsi_stmt (gsi);
7760 
7761       if (t && need_fake_edge_p (t))
7762 	{
7763 	  edge e;
7764 
7765 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7766 	  if (e)
7767 	    {
7768 	      gsi_insert_on_edge (e, gimple_build_nop ());
7769 	      gsi_commit_edge_inserts ();
7770 	    }
7771 	}
7772     }
7773 
7774   /* Now add fake edges to the function exit for any non constant
7775      calls since there is no way that we can determine if they will
7776      return or not...  */
7777   for (i = 0; i < last_bb; i++)
7778     {
7779       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7780       gimple_stmt_iterator gsi;
7781       gimple stmt, last_stmt;
7782 
7783       if (!bb)
7784 	continue;
7785 
7786       if (blocks && !bitmap_bit_p (blocks, i))
7787 	continue;
7788 
7789       gsi = gsi_last_nondebug_bb (bb);
7790       if (!gsi_end_p (gsi))
7791 	{
7792 	  last_stmt = gsi_stmt (gsi);
7793 	  do
7794 	    {
7795 	      stmt = gsi_stmt (gsi);
7796 	      if (need_fake_edge_p (stmt))
7797 		{
7798 		  edge e;
7799 
7800 		  /* The handling above of the final block before the
7801 		     epilogue should be enough to verify that there is
7802 		     no edge to the exit block in CFG already.
7803 		     Calling make_edge in such case would cause us to
7804 		     mark that edge as fake and remove it later.  */
7805 #ifdef ENABLE_CHECKING
7806 		  if (stmt == last_stmt)
7807 		    {
7808 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7809 		      gcc_assert (e == NULL);
7810 		    }
7811 #endif
7812 
7813 		  /* Note that the following may create a new basic block
7814 		     and renumber the existing basic blocks.  */
7815 		  if (stmt != last_stmt)
7816 		    {
7817 		      e = split_block (bb, stmt);
7818 		      if (e)
7819 			blocks_split++;
7820 		    }
7821 		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7822 		}
7823 	      gsi_prev (&gsi);
7824 	    }
7825 	  while (!gsi_end_p (gsi));
7826 	}
7827     }
7828 
7829   if (blocks_split)
7830     verify_flow_info ();
7831 
7832   return blocks_split;
7833 }
7834 
7835 /* Removes edge E and all the blocks dominated by it, and updates dominance
7836    information.  The IL in E->src needs to be updated separately.
7837    If dominance info is not available, only the edge E is removed.*/
7838 
7839 void
7840 remove_edge_and_dominated_blocks (edge e)
7841 {
7842   vec<basic_block> bbs_to_remove = vNULL;
7843   vec<basic_block> bbs_to_fix_dom = vNULL;
7844   bitmap df, df_idom;
7845   edge f;
7846   edge_iterator ei;
7847   bool none_removed = false;
7848   unsigned i;
7849   basic_block bb, dbb;
7850   bitmap_iterator bi;
7851 
7852   /* If we are removing a path inside a non-root loop that may change
7853      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
7854   if (current_loops
7855       && loop_outer (e->src->loop_father) != NULL
7856       && e->src->loop_father == e->dest->loop_father)
7857     loops_state_set (LOOPS_NEED_FIXUP);
7858 
7859   if (!dom_info_available_p (CDI_DOMINATORS))
7860     {
7861       remove_edge (e);
7862       return;
7863     }
7864 
7865   /* No updating is needed for edges to exit.  */
7866   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7867     {
7868       if (cfgcleanup_altered_bbs)
7869 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7870       remove_edge (e);
7871       return;
7872     }
7873 
7874   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
7875      that is not dominated by E->dest, then this set is empty.  Otherwise,
7876      all the basic blocks dominated by E->dest are removed.
7877 
7878      Also, to DF_IDOM we store the immediate dominators of the blocks in
7879      the dominance frontier of E (i.e., of the successors of the
7880      removed blocks, if there are any, and of E->dest otherwise).  */
7881   FOR_EACH_EDGE (f, ei, e->dest->preds)
7882     {
7883       if (f == e)
7884 	continue;
7885 
7886       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7887 	{
7888 	  none_removed = true;
7889 	  break;
7890 	}
7891     }
7892 
7893   df = BITMAP_ALLOC (NULL);
7894   df_idom = BITMAP_ALLOC (NULL);
7895 
7896   if (none_removed)
7897     bitmap_set_bit (df_idom,
7898 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7899   else
7900     {
7901       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7902       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7903 	{
7904 	  FOR_EACH_EDGE (f, ei, bb->succs)
7905 	    {
7906 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7907 		bitmap_set_bit (df, f->dest->index);
7908 	    }
7909 	}
7910       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7911 	bitmap_clear_bit (df, bb->index);
7912 
7913       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7914 	{
7915 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
7916 	  bitmap_set_bit (df_idom,
7917 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7918 	}
7919     }
7920 
7921   if (cfgcleanup_altered_bbs)
7922     {
7923       /* Record the set of the altered basic blocks.  */
7924       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7925       bitmap_ior_into (cfgcleanup_altered_bbs, df);
7926     }
7927 
7928   /* Remove E and the cancelled blocks.  */
7929   if (none_removed)
7930     remove_edge (e);
7931   else
7932     {
7933       /* Walk backwards so as to get a chance to substitute all
7934 	 released DEFs into debug stmts.  See
7935 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7936 	 details.  */
7937       for (i = bbs_to_remove.length (); i-- > 0; )
7938 	delete_basic_block (bbs_to_remove[i]);
7939     }
7940 
7941   /* Update the dominance information.  The immediate dominator may change only
7942      for blocks whose immediate dominator belongs to DF_IDOM:
7943 
7944      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7945      removal.  Let Z the arbitrary block such that idom(Z) = Y and
7946      Z dominates X after the removal.  Before removal, there exists a path P
7947      from Y to X that avoids Z.  Let F be the last edge on P that is
7948      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
7949      dominates W, and because of P, Z does not dominate W), and W belongs to
7950      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
7951   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7952     {
7953       bb = BASIC_BLOCK_FOR_FN (cfun, i);
7954       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7955 	   dbb;
7956 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
7957 	bbs_to_fix_dom.safe_push (dbb);
7958     }
7959 
7960   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7961 
7962   BITMAP_FREE (df);
7963   BITMAP_FREE (df_idom);
7964   bbs_to_remove.release ();
7965   bbs_to_fix_dom.release ();
7966 }
7967 
7968 /* Purge dead EH edges from basic block BB.  */
7969 
7970 bool
7971 gimple_purge_dead_eh_edges (basic_block bb)
7972 {
7973   bool changed = false;
7974   edge e;
7975   edge_iterator ei;
7976   gimple stmt = last_stmt (bb);
7977 
7978   if (stmt && stmt_can_throw_internal (stmt))
7979     return false;
7980 
7981   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7982     {
7983       if (e->flags & EDGE_EH)
7984 	{
7985 	  remove_edge_and_dominated_blocks (e);
7986 	  changed = true;
7987 	}
7988       else
7989 	ei_next (&ei);
7990     }
7991 
7992   return changed;
7993 }
7994 
7995 /* Purge dead EH edges from basic block listed in BLOCKS.  */
7996 
7997 bool
7998 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7999 {
8000   bool changed = false;
8001   unsigned i;
8002   bitmap_iterator bi;
8003 
8004   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8005     {
8006       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8007 
8008       /* Earlier gimple_purge_dead_eh_edges could have removed
8009 	 this basic block already.  */
8010       gcc_assert (bb || changed);
8011       if (bb != NULL)
8012 	changed |= gimple_purge_dead_eh_edges (bb);
8013     }
8014 
8015   return changed;
8016 }
8017 
8018 /* Purge dead abnormal call edges from basic block BB.  */
8019 
8020 bool
8021 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8022 {
8023   bool changed = false;
8024   edge e;
8025   edge_iterator ei;
8026   gimple stmt = last_stmt (bb);
8027 
8028   if (!cfun->has_nonlocal_label
8029       && !cfun->calls_setjmp)
8030     return false;
8031 
8032   if (stmt && stmt_can_make_abnormal_goto (stmt))
8033     return false;
8034 
8035   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8036     {
8037       if (e->flags & EDGE_ABNORMAL)
8038 	{
8039 	  if (e->flags & EDGE_FALLTHRU)
8040 	    e->flags &= ~EDGE_ABNORMAL;
8041 	  else
8042 	    remove_edge_and_dominated_blocks (e);
8043 	  changed = true;
8044 	}
8045       else
8046 	ei_next (&ei);
8047     }
8048 
8049   return changed;
8050 }
8051 
8052 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8053 
8054 bool
8055 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8056 {
8057   bool changed = false;
8058   unsigned i;
8059   bitmap_iterator bi;
8060 
8061   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8062     {
8063       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8064 
8065       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8066 	 this basic block already.  */
8067       gcc_assert (bb || changed);
8068       if (bb != NULL)
8069 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8070     }
8071 
8072   return changed;
8073 }
8074 
8075 /* This function is called whenever a new edge is created or
8076    redirected.  */
8077 
8078 static void
8079 gimple_execute_on_growing_pred (edge e)
8080 {
8081   basic_block bb = e->dest;
8082 
8083   if (!gimple_seq_empty_p (phi_nodes (bb)))
8084     reserve_phi_args_for_new_edge (bb);
8085 }
8086 
8087 /* This function is called immediately before edge E is removed from
8088    the edge vector E->dest->preds.  */
8089 
8090 static void
8091 gimple_execute_on_shrinking_pred (edge e)
8092 {
8093   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8094     remove_phi_args (e);
8095 }
8096 
8097 /*---------------------------------------------------------------------------
8098   Helper functions for Loop versioning
8099   ---------------------------------------------------------------------------*/
8100 
8101 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8102    of 'first'. Both of them are dominated by 'new_head' basic block. When
8103    'new_head' was created by 'second's incoming edge it received phi arguments
8104    on the edge by split_edge(). Later, additional edge 'e' was created to
8105    connect 'new_head' and 'first'. Now this routine adds phi args on this
8106    additional edge 'e' that new_head to second edge received as part of edge
8107    splitting.  */
8108 
8109 static void
8110 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8111 				  basic_block new_head, edge e)
8112 {
8113   gphi *phi1, *phi2;
8114   gphi_iterator psi1, psi2;
8115   tree def;
8116   edge e2 = find_edge (new_head, second);
8117 
8118   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8119      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8120   gcc_assert (e2 != NULL);
8121 
8122   /* Browse all 'second' basic block phi nodes and add phi args to
8123      edge 'e' for 'first' head. PHI args are always in correct order.  */
8124 
8125   for (psi2 = gsi_start_phis (second),
8126        psi1 = gsi_start_phis (first);
8127        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8128        gsi_next (&psi2),  gsi_next (&psi1))
8129     {
8130       phi1 = psi1.phi ();
8131       phi2 = psi2.phi ();
8132       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8133       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8134     }
8135 }
8136 
8137 
8138 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8139    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8140    the destination of the ELSE part.  */
8141 
8142 static void
8143 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8144 			       basic_block second_head ATTRIBUTE_UNUSED,
8145 			       basic_block cond_bb, void *cond_e)
8146 {
8147   gimple_stmt_iterator gsi;
8148   gimple new_cond_expr;
8149   tree cond_expr = (tree) cond_e;
8150   edge e0;
8151 
8152   /* Build new conditional expr */
8153   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8154 					       NULL_TREE, NULL_TREE);
8155 
8156   /* Add new cond in cond_bb.  */
8157   gsi = gsi_last_bb (cond_bb);
8158   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8159 
8160   /* Adjust edges appropriately to connect new head with first head
8161      as well as second head.  */
8162   e0 = single_succ_edge (cond_bb);
8163   e0->flags &= ~EDGE_FALLTHRU;
8164   e0->flags |= EDGE_FALSE_VALUE;
8165 }
8166 
8167 
8168 /* Do book-keeping of basic block BB for the profile consistency checker.
8169    If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8170    then do post-pass accounting.  Store the counting in RECORD.  */
8171 static void
8172 gimple_account_profile_record (basic_block bb, int after_pass,
8173 			       struct profile_record *record)
8174 {
8175   gimple_stmt_iterator i;
8176   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8177     {
8178       record->size[after_pass]
8179 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8180       if (profile_status_for_fn (cfun) == PROFILE_READ)
8181 	record->time[after_pass]
8182 	  += estimate_num_insns (gsi_stmt (i),
8183 				 &eni_time_weights) * bb->count;
8184       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8185 	record->time[after_pass]
8186 	  += estimate_num_insns (gsi_stmt (i),
8187 				 &eni_time_weights) * bb->frequency;
8188     }
8189 }
8190 
8191 struct cfg_hooks gimple_cfg_hooks = {
8192   "gimple",
8193   gimple_verify_flow_info,
8194   gimple_dump_bb,		/* dump_bb  */
8195   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8196   create_bb,			/* create_basic_block  */
8197   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8198   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8199   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8200   remove_bb,			/* delete_basic_block  */
8201   gimple_split_block,		/* split_block  */
8202   gimple_move_block_after,	/* move_block_after  */
8203   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8204   gimple_merge_blocks,		/* merge_blocks  */
8205   gimple_predict_edge,		/* predict_edge  */
8206   gimple_predicted_by_p,	/* predicted_by_p  */
8207   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8208   gimple_duplicate_bb,		/* duplicate_block  */
8209   gimple_split_edge,		/* split_edge  */
8210   gimple_make_forwarder_block,	/* make_forward_block  */
8211   NULL,				/* tidy_fallthru_edge  */
8212   NULL,				/* force_nonfallthru */
8213   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8214   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8215   gimple_flow_call_edges_add,   /* flow_call_edges_add */
8216   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8217   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8218   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8219   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8220   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8221   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8222   flush_pending_stmts, 		/* flush_pending_stmts */
8223   gimple_empty_block_p,           /* block_empty_p */
8224   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8225   gimple_account_profile_record,
8226 };
8227 
8228 
8229 /* Split all critical edges.  */
8230 
8231 unsigned int
8232 split_critical_edges (void)
8233 {
8234   basic_block bb;
8235   edge e;
8236   edge_iterator ei;
8237 
8238   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8239      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8240      mappings around the calls to split_edge.  */
8241   start_recording_case_labels ();
8242   FOR_ALL_BB_FN (bb, cfun)
8243     {
8244       FOR_EACH_EDGE (e, ei, bb->succs)
8245         {
8246 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8247 	    split_edge (e);
8248 	  /* PRE inserts statements to edges and expects that
8249 	     since split_critical_edges was done beforehand, committing edge
8250 	     insertions will not split more edges.  In addition to critical
8251 	     edges we must split edges that have multiple successors and
8252 	     end by control flow statements, such as RESX.
8253 	     Go ahead and split them too.  This matches the logic in
8254 	     gimple_find_edge_insert_loc.  */
8255 	  else if ((!single_pred_p (e->dest)
8256 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8257 		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8258 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8259 	           && !(e->flags & EDGE_ABNORMAL))
8260 	    {
8261 	      gimple_stmt_iterator gsi;
8262 
8263 	      gsi = gsi_last_bb (e->src);
8264 	      if (!gsi_end_p (gsi)
8265 		  && stmt_ends_bb_p (gsi_stmt (gsi))
8266 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8267 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8268 						 BUILT_IN_RETURN)))
8269 		split_edge (e);
8270 	    }
8271 	}
8272     }
8273   end_recording_case_labels ();
8274   return 0;
8275 }
8276 
8277 namespace {
8278 
8279 const pass_data pass_data_split_crit_edges =
8280 {
8281   GIMPLE_PASS, /* type */
8282   "crited", /* name */
8283   OPTGROUP_NONE, /* optinfo_flags */
8284   TV_TREE_SPLIT_EDGES, /* tv_id */
8285   PROP_cfg, /* properties_required */
8286   PROP_no_crit_edges, /* properties_provided */
8287   0, /* properties_destroyed */
8288   0, /* todo_flags_start */
8289   0, /* todo_flags_finish */
8290 };
8291 
8292 class pass_split_crit_edges : public gimple_opt_pass
8293 {
8294 public:
8295   pass_split_crit_edges (gcc::context *ctxt)
8296     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8297   {}
8298 
8299   /* opt_pass methods: */
8300   virtual unsigned int execute (function *) { return split_critical_edges (); }
8301 
8302   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8303 }; // class pass_split_crit_edges
8304 
8305 } // anon namespace
8306 
8307 gimple_opt_pass *
8308 make_pass_split_crit_edges (gcc::context *ctxt)
8309 {
8310   return new pass_split_crit_edges (ctxt);
8311 }
8312 
8313 
8314 /* Insert COND expression which is GIMPLE_COND after STMT
8315    in basic block BB with appropriate basic block split
8316    and creation of a new conditionally executed basic block.
8317    Return created basic block.  */
8318 basic_block
8319 insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8320 {
8321   edge fall = split_block (bb, stmt);
8322   gimple_stmt_iterator iter = gsi_last_bb (bb);
8323   basic_block new_bb;
8324 
8325   /* Insert cond statement.  */
8326   gcc_assert (gimple_code (cond) == GIMPLE_COND);
8327   if (gsi_end_p (iter))
8328     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8329   else
8330     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8331 
8332   /* Create conditionally executed block.  */
8333   new_bb = create_empty_bb (bb);
8334   make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8335   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8336 
8337   /* Fix edge for split bb.  */
8338   fall->flags = EDGE_FALSE_VALUE;
8339 
8340   /* Update dominance info.  */
8341   if (dom_info_available_p (CDI_DOMINATORS))
8342     {
8343       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8344       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8345     }
8346 
8347   /* Update loop info.  */
8348   if (current_loops)
8349     add_bb_to_loop (new_bb, bb->loop_father);
8350 
8351   return new_bb;
8352 }
8353 
8354 /* Build a ternary operation and gimplify it.  Emit code before GSI.
8355    Return the gimple_val holding the result.  */
8356 
8357 tree
8358 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8359 		 tree type, tree a, tree b, tree c)
8360 {
8361   tree ret;
8362   location_t loc = gimple_location (gsi_stmt (*gsi));
8363 
8364   ret = fold_build3_loc (loc, code, type, a, b, c);
8365   STRIP_NOPS (ret);
8366 
8367   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8368                                    GSI_SAME_STMT);
8369 }
8370 
8371 /* Build a binary operation and gimplify it.  Emit code before GSI.
8372    Return the gimple_val holding the result.  */
8373 
8374 tree
8375 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8376 		 tree type, tree a, tree b)
8377 {
8378   tree ret;
8379 
8380   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8381   STRIP_NOPS (ret);
8382 
8383   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8384                                    GSI_SAME_STMT);
8385 }
8386 
8387 /* Build a unary operation and gimplify it.  Emit code before GSI.
8388    Return the gimple_val holding the result.  */
8389 
8390 tree
8391 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8392 		 tree a)
8393 {
8394   tree ret;
8395 
8396   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8397   STRIP_NOPS (ret);
8398 
8399   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8400                                    GSI_SAME_STMT);
8401 }
8402 
8403 
8404 
8405 /* Given a basic block B which ends with a conditional and has
8406    precisely two successors, determine which of the edges is taken if
8407    the conditional is true and which is taken if the conditional is
8408    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
8409 
8410 void
8411 extract_true_false_edges_from_block (basic_block b,
8412 				     edge *true_edge,
8413 				     edge *false_edge)
8414 {
8415   edge e = EDGE_SUCC (b, 0);
8416 
8417   if (e->flags & EDGE_TRUE_VALUE)
8418     {
8419       *true_edge = e;
8420       *false_edge = EDGE_SUCC (b, 1);
8421     }
8422   else
8423     {
8424       *false_edge = e;
8425       *true_edge = EDGE_SUCC (b, 1);
8426     }
8427 }
8428 
8429 /* Emit return warnings.  */
8430 
8431 namespace {
8432 
8433 const pass_data pass_data_warn_function_return =
8434 {
8435   GIMPLE_PASS, /* type */
8436   "*warn_function_return", /* name */
8437   OPTGROUP_NONE, /* optinfo_flags */
8438   TV_NONE, /* tv_id */
8439   PROP_cfg, /* properties_required */
8440   0, /* properties_provided */
8441   0, /* properties_destroyed */
8442   0, /* todo_flags_start */
8443   0, /* todo_flags_finish */
8444 };
8445 
8446 class pass_warn_function_return : public gimple_opt_pass
8447 {
8448 public:
8449   pass_warn_function_return (gcc::context *ctxt)
8450     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8451   {}
8452 
8453   /* opt_pass methods: */
8454   virtual unsigned int execute (function *);
8455 
8456 }; // class pass_warn_function_return
8457 
8458 unsigned int
8459 pass_warn_function_return::execute (function *fun)
8460 {
8461   source_location location;
8462   gimple last;
8463   edge e;
8464   edge_iterator ei;
8465 
8466   if (!targetm.warn_func_return (fun->decl))
8467     return 0;
8468 
8469   /* If we have a path to EXIT, then we do return.  */
8470   if (TREE_THIS_VOLATILE (fun->decl)
8471       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8472     {
8473       location = UNKNOWN_LOCATION;
8474       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8475 	{
8476 	  last = last_stmt (e->src);
8477 	  if ((gimple_code (last) == GIMPLE_RETURN
8478 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8479 	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8480 	    break;
8481 	}
8482       if (location == UNKNOWN_LOCATION)
8483 	location = cfun->function_end_locus;
8484 
8485 #ifdef notyet
8486       if (warn_missing_noreturn)
8487         warning_at (location, 0, "%<noreturn%> function does return");
8488 #endif
8489     }
8490 
8491   /* If we see "return;" in some basic block, then we do reach the end
8492      without returning a value.  */
8493   else if (warn_return_type
8494 	   && !TREE_NO_WARNING (fun->decl)
8495 	   && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8496 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8497     {
8498       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8499 	{
8500 	  gimple last = last_stmt (e->src);
8501 	  greturn *return_stmt = dyn_cast <greturn *> (last);
8502 	  if (return_stmt
8503 	      && gimple_return_retval (return_stmt) == NULL
8504 	      && !gimple_no_warning_p (last))
8505 	    {
8506 	      location = gimple_location (last);
8507 	      if (location == UNKNOWN_LOCATION)
8508 		location = fun->function_end_locus;
8509 	      warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8510 	      TREE_NO_WARNING (fun->decl) = 1;
8511 	      break;
8512 	    }
8513 	}
8514     }
8515   return 0;
8516 }
8517 
8518 } // anon namespace
8519 
8520 gimple_opt_pass *
8521 make_pass_warn_function_return (gcc::context *ctxt)
8522 {
8523   return new pass_warn_function_return (ctxt);
8524 }
8525 
8526 /* Walk a gimplified function and warn for functions whose return value is
8527    ignored and attribute((warn_unused_result)) is set.  This is done before
8528    inlining, so we don't have to worry about that.  */
8529 
8530 static void
8531 do_warn_unused_result (gimple_seq seq)
8532 {
8533   tree fdecl, ftype;
8534   gimple_stmt_iterator i;
8535 
8536   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8537     {
8538       gimple g = gsi_stmt (i);
8539 
8540       switch (gimple_code (g))
8541 	{
8542 	case GIMPLE_BIND:
8543 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8544 	  break;
8545 	case GIMPLE_TRY:
8546 	  do_warn_unused_result (gimple_try_eval (g));
8547 	  do_warn_unused_result (gimple_try_cleanup (g));
8548 	  break;
8549 	case GIMPLE_CATCH:
8550 	  do_warn_unused_result (gimple_catch_handler (
8551 				   as_a <gcatch *> (g)));
8552 	  break;
8553 	case GIMPLE_EH_FILTER:
8554 	  do_warn_unused_result (gimple_eh_filter_failure (g));
8555 	  break;
8556 
8557 	case GIMPLE_CALL:
8558 	  if (gimple_call_lhs (g))
8559 	    break;
8560 	  if (gimple_call_internal_p (g))
8561 	    break;
8562 
8563 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
8564 	     LHS.  All calls whose value is ignored should be
8565 	     represented like this.  Look for the attribute.  */
8566 	  fdecl = gimple_call_fndecl (g);
8567 	  ftype = gimple_call_fntype (g);
8568 
8569 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8570 	    {
8571 	      location_t loc = gimple_location (g);
8572 
8573 	      if (fdecl)
8574 		warning_at (loc, OPT_Wunused_result,
8575 			    "ignoring return value of %qD, "
8576 			    "declared with attribute warn_unused_result",
8577 			    fdecl);
8578 	      else
8579 		warning_at (loc, OPT_Wunused_result,
8580 			    "ignoring return value of function "
8581 			    "declared with attribute warn_unused_result");
8582 	    }
8583 	  break;
8584 
8585 	default:
8586 	  /* Not a container, not a call, or a call whose value is used.  */
8587 	  break;
8588 	}
8589     }
8590 }
8591 
8592 namespace {
8593 
8594 const pass_data pass_data_warn_unused_result =
8595 {
8596   GIMPLE_PASS, /* type */
8597   "*warn_unused_result", /* name */
8598   OPTGROUP_NONE, /* optinfo_flags */
8599   TV_NONE, /* tv_id */
8600   PROP_gimple_any, /* properties_required */
8601   0, /* properties_provided */
8602   0, /* properties_destroyed */
8603   0, /* todo_flags_start */
8604   0, /* todo_flags_finish */
8605 };
8606 
8607 class pass_warn_unused_result : public gimple_opt_pass
8608 {
8609 public:
8610   pass_warn_unused_result (gcc::context *ctxt)
8611     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8612   {}
8613 
8614   /* opt_pass methods: */
8615   virtual bool gate (function *) { return flag_warn_unused_result; }
8616   virtual unsigned int execute (function *)
8617     {
8618       do_warn_unused_result (gimple_body (current_function_decl));
8619       return 0;
8620     }
8621 
8622 }; // class pass_warn_unused_result
8623 
8624 } // anon namespace
8625 
8626 gimple_opt_pass *
8627 make_pass_warn_unused_result (gcc::context *ctxt)
8628 {
8629   return new pass_warn_unused_result (ctxt);
8630 }
8631 
8632 /* IPA passes, compilation of earlier functions or inlining
8633    might have changed some properties, such as marked functions nothrow,
8634    pure, const or noreturn.
8635    Remove redundant edges and basic blocks, and create new ones if necessary.
8636 
8637    This pass can't be executed as stand alone pass from pass manager, because
8638    in between inlining and this fixup the verify_flow_info would fail.  */
8639 
8640 unsigned int
8641 execute_fixup_cfg (void)
8642 {
8643   basic_block bb;
8644   gimple_stmt_iterator gsi;
8645   int todo = 0;
8646   gcov_type count_scale;
8647   edge e;
8648   edge_iterator ei;
8649   cgraph_node *node = cgraph_node::get (current_function_decl);
8650 
8651   count_scale
8652     = GCOV_COMPUTE_SCALE (node->count, ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8653 
8654   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
8655   EXIT_BLOCK_PTR_FOR_FN (cfun)->count
8656     = apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count, count_scale);
8657 
8658   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8659     e->count = apply_scale (e->count, count_scale);
8660 
8661   FOR_EACH_BB_FN (bb, cfun)
8662     {
8663       bb->count = apply_scale (bb->count, count_scale);
8664       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8665 	{
8666 	  gimple stmt = gsi_stmt (gsi);
8667 	  tree decl = is_gimple_call (stmt)
8668 		      ? gimple_call_fndecl (stmt)
8669 		      : NULL;
8670 	  if (decl)
8671 	    {
8672 	      int flags = gimple_call_flags (stmt);
8673 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8674 		{
8675 		  if (gimple_purge_dead_abnormal_call_edges (bb))
8676 		    todo |= TODO_cleanup_cfg;
8677 
8678 		  if (gimple_in_ssa_p (cfun))
8679 		    {
8680 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
8681 		      update_stmt (stmt);
8682 		    }
8683 		}
8684 
8685 	      if (flags & ECF_NORETURN
8686 		  && fixup_noreturn_call (stmt))
8687 		todo |= TODO_cleanup_cfg;
8688 	     }
8689 
8690 	  /* Remove stores to variables we marked write-only.
8691 	     Keep access when store has side effect, i.e. in case when source
8692 	     is volatile.  */
8693 	  if (gimple_store_p (stmt)
8694 	      && !gimple_has_side_effects (stmt))
8695 	    {
8696 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
8697 
8698 	      if (TREE_CODE (lhs) == VAR_DECL
8699 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8700 		  && varpool_node::get (lhs)->writeonly)
8701 		{
8702 		  unlink_stmt_vdef (stmt);
8703 		  gsi_remove (&gsi, true);
8704 		  release_defs (stmt);
8705 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
8706 	          continue;
8707 		}
8708 	    }
8709 	  /* For calls we can simply remove LHS when it is known
8710 	     to be write-only.  */
8711 	  if (is_gimple_call (stmt)
8712 	      && gimple_get_lhs (stmt))
8713 	    {
8714 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
8715 
8716 	      if (TREE_CODE (lhs) == VAR_DECL
8717 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8718 		  && varpool_node::get (lhs)->writeonly)
8719 		{
8720 		  gimple_call_set_lhs (stmt, NULL);
8721 		  update_stmt (stmt);
8722 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
8723 		}
8724 	    }
8725 
8726 	  if (maybe_clean_eh_stmt (stmt)
8727 	      && gimple_purge_dead_eh_edges (bb))
8728 	    todo |= TODO_cleanup_cfg;
8729 	  gsi_next (&gsi);
8730 	}
8731 
8732       FOR_EACH_EDGE (e, ei, bb->succs)
8733         e->count = apply_scale (e->count, count_scale);
8734 
8735       /* If we have a basic block with no successors that does not
8736 	 end with a control statement or a noreturn call end it with
8737 	 a call to __builtin_unreachable.  This situation can occur
8738 	 when inlining a noreturn call that does in fact return.  */
8739       if (EDGE_COUNT (bb->succs) == 0)
8740 	{
8741 	  gimple stmt = last_stmt (bb);
8742 	  if (!stmt
8743 	      || (!is_ctrl_stmt (stmt)
8744 		  && (!is_gimple_call (stmt)
8745 		      || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8746 	    {
8747 	      if (stmt && is_gimple_call (stmt))
8748 		gimple_call_set_ctrl_altering (stmt, false);
8749 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8750 	      stmt = gimple_build_call (fndecl, 0);
8751 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
8752 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8753 	      if (!cfun->after_inlining)
8754 		{
8755 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
8756 		  int freq
8757 		    = compute_call_stmt_bb_frequency (current_function_decl,
8758 						      bb);
8759 		  node->create_edge (cgraph_node::get_create (fndecl),
8760 				     call_stmt, bb->count, freq);
8761 		}
8762 	    }
8763 	}
8764     }
8765   if (count_scale != REG_BR_PROB_BASE)
8766     compute_function_frequency ();
8767 
8768   if (current_loops
8769       && (todo & TODO_cleanup_cfg))
8770     loops_state_set (LOOPS_NEED_FIXUP);
8771 
8772   return todo;
8773 }
8774 
8775 namespace {
8776 
8777 const pass_data pass_data_fixup_cfg =
8778 {
8779   GIMPLE_PASS, /* type */
8780   "fixup_cfg", /* name */
8781   OPTGROUP_NONE, /* optinfo_flags */
8782   TV_NONE, /* tv_id */
8783   PROP_cfg, /* properties_required */
8784   0, /* properties_provided */
8785   0, /* properties_destroyed */
8786   0, /* todo_flags_start */
8787   0, /* todo_flags_finish */
8788 };
8789 
8790 class pass_fixup_cfg : public gimple_opt_pass
8791 {
8792 public:
8793   pass_fixup_cfg (gcc::context *ctxt)
8794     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8795   {}
8796 
8797   /* opt_pass methods: */
8798   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8799   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8800 
8801 }; // class pass_fixup_cfg
8802 
8803 } // anon namespace
8804 
8805 gimple_opt_pass *
8806 make_pass_fixup_cfg (gcc::context *ctxt)
8807 {
8808   return new pass_fixup_cfg (ctxt);
8809 }
8810 
8811 /* Garbage collection support for edge_def.  */
8812 
8813 extern void gt_ggc_mx (tree&);
8814 extern void gt_ggc_mx (gimple&);
8815 extern void gt_ggc_mx (rtx&);
8816 extern void gt_ggc_mx (basic_block&);
8817 
8818 static void
8819 gt_ggc_mx (rtx_insn *& x)
8820 {
8821   if (x)
8822     gt_ggc_mx_rtx_def ((void *) x);
8823 }
8824 
8825 void
8826 gt_ggc_mx (edge_def *e)
8827 {
8828   tree block = LOCATION_BLOCK (e->goto_locus);
8829   gt_ggc_mx (e->src);
8830   gt_ggc_mx (e->dest);
8831   if (current_ir_type () == IR_GIMPLE)
8832     gt_ggc_mx (e->insns.g);
8833   else
8834     gt_ggc_mx (e->insns.r);
8835   gt_ggc_mx (block);
8836 }
8837 
8838 /* PCH support for edge_def.  */
8839 
8840 extern void gt_pch_nx (tree&);
8841 extern void gt_pch_nx (gimple&);
8842 extern void gt_pch_nx (rtx&);
8843 extern void gt_pch_nx (basic_block&);
8844 
8845 static void
8846 gt_pch_nx (rtx_insn *& x)
8847 {
8848   if (x)
8849     gt_pch_nx_rtx_def ((void *) x);
8850 }
8851 
8852 void
8853 gt_pch_nx (edge_def *e)
8854 {
8855   tree block = LOCATION_BLOCK (e->goto_locus);
8856   gt_pch_nx (e->src);
8857   gt_pch_nx (e->dest);
8858   if (current_ir_type () == IR_GIMPLE)
8859     gt_pch_nx (e->insns.g);
8860   else
8861     gt_pch_nx (e->insns.r);
8862   gt_pch_nx (block);
8863 }
8864 
8865 void
8866 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8867 {
8868   tree block = LOCATION_BLOCK (e->goto_locus);
8869   op (&(e->src), cookie);
8870   op (&(e->dest), cookie);
8871   if (current_ir_type () == IR_GIMPLE)
8872     op (&(e->insns.g), cookie);
8873   else
8874     op (&(e->insns.r), cookie);
8875   op (&(block), cookie);
8876 }
8877