xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfg.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2019 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 
66 /* This file contains functions for building the Control Flow Graph (CFG)
67    for a function tree.  */
68 
69 /* Local declarations.  */
70 
71 /* Initial capacity for the basic block array.  */
72 static const int initial_cfg_capacity = 20;
73 
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
76    via their CASE_CHAIN field, which we clear after we're done with the
77    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78 
79    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80    update the case vector in response to edge redirections.
81 
82    Right now this table is set up and torn down at key points in the
83    compilation process.  It would be nice if we could make the table
84    more persistent.  The key is getting notification of changes to
85    the CFG (particularly edge removal, creation and redirection).  */
86 
87 static hash_map<edge, tree> *edge_to_cases;
88 
89 /* If we record edge_to_cases, this bitmap will hold indexes
90    of basic blocks that end in a GIMPLE_SWITCH which we touched
91    due to edge manipulations.  */
92 
93 static bitmap touched_switch_bbs;
94 
95 /* CFG statistics.  */
96 struct cfg_stats_d
97 {
98   long num_merged_labels;
99 };
100 
101 static struct cfg_stats_d cfg_stats;
102 
103 /* Data to pass to replace_block_vars_by_duplicates_1.  */
104 struct replace_decls_d
105 {
106   hash_map<tree, tree> *vars_map;
107   tree to_context;
108 };
109 
110 /* Hash table to store last discriminator assigned for each locus.  */
111 struct locus_discrim_map
112 {
113   int location_line;
114   int discriminator;
115 };
116 
117 /* Hashtable helpers.  */
118 
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 {
121   static inline hashval_t hash (const locus_discrim_map *);
122   static inline bool equal (const locus_discrim_map *,
123 			    const locus_discrim_map *);
124 };
125 
126 /* Trivial hash function for a location_t.  ITEM is a pointer to
127    a hash table entry that maps a location_t to a discriminator.  */
128 
129 inline hashval_t
130 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 {
132   return item->location_line;
133 }
134 
135 /* Equality function for the locus-to-discriminator map.  A and B
136    point to the two hash table entries to compare.  */
137 
138 inline bool
139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 			     const locus_discrim_map *b)
141 {
142   return a->location_line == b->location_line;
143 }
144 
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146 
147 /* Basic blocks and flowgraphs.  */
148 static void make_blocks (gimple_seq);
149 
150 /* Edges.  */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159 
160 /* Various helpers.  */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
167 
168 /* Flowgraph optimization and cleanup.  */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174 
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178   /* Initialize the basic block array.  */
179   init_flow (fn);
180   profile_status_for_fn (fn) = PROFILE_ABSENT;
181   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
183   vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
184   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 			 initial_cfg_capacity);
186 
187   /* Build a mapping of labels to their associated blocks.  */
188   vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
189   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
190 			 initial_cfg_capacity);
191 
192   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
193   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
194 
195   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
196     = EXIT_BLOCK_PTR_FOR_FN (fn);
197   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
198     = ENTRY_BLOCK_PTR_FOR_FN (fn);
199 }
200 
201 void
202 init_empty_tree_cfg (void)
203 {
204   init_empty_tree_cfg_for_function (cfun);
205 }
206 
207 /*---------------------------------------------------------------------------
208 			      Create basic blocks
209 ---------------------------------------------------------------------------*/
210 
211 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
212    statements to be added to the flowgraph.  */
213 
214 static void
215 build_gimple_cfg (gimple_seq seq)
216 {
217   /* Register specific gimple functions.  */
218   gimple_register_cfg_hooks ();
219 
220   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
221 
222   init_empty_tree_cfg ();
223 
224   make_blocks (seq);
225 
226   /* Make sure there is always at least one block, even if it's empty.  */
227   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
228     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
229 
230   /* Adjust the size of the array.  */
231   if (basic_block_info_for_fn (cfun)->length ()
232       < (size_t) n_basic_blocks_for_fn (cfun))
233     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
234 			   n_basic_blocks_for_fn (cfun));
235 
236   /* To speed up statement iterator walks, we first purge dead labels.  */
237   cleanup_dead_labels ();
238 
239   /* Group case nodes to reduce the number of edges.
240      We do this after cleaning up dead labels because otherwise we miss
241      a lot of obvious case merging opportunities.  */
242   group_case_labels ();
243 
244   /* Create the edges of the flowgraph.  */
245   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
246   make_edges ();
247   assign_discriminators ();
248   cleanup_dead_labels ();
249   delete discriminator_per_locus;
250   discriminator_per_locus = NULL;
251 }
252 
253 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
254    them and propagate the information to LOOP.  We assume that the annotations
255    come immediately before the condition in BB, if any.  */
256 
257 static void
258 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
259 {
260   gimple_stmt_iterator gsi = gsi_last_bb (bb);
261   gimple *stmt = gsi_stmt (gsi);
262 
263   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
264     return;
265 
266   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
267     {
268       stmt = gsi_stmt (gsi);
269       if (gimple_code (stmt) != GIMPLE_CALL)
270 	break;
271       if (!gimple_call_internal_p (stmt)
272 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
273 	break;
274 
275       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
276 	{
277 	case annot_expr_ivdep_kind:
278 	  loop->safelen = INT_MAX;
279 	  break;
280 	case annot_expr_unroll_kind:
281 	  loop->unroll
282 	    = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
283 	  cfun->has_unroll = true;
284 	  break;
285 	case annot_expr_no_vector_kind:
286 	  loop->dont_vectorize = true;
287 	  break;
288 	case annot_expr_vector_kind:
289 	  loop->force_vectorize = true;
290 	  cfun->has_force_vectorize_loops = true;
291 	  break;
292 	case annot_expr_parallel_kind:
293 	  loop->can_be_parallel = true;
294 	  loop->safelen = INT_MAX;
295 	  break;
296 	default:
297 	  gcc_unreachable ();
298 	}
299 
300       stmt = gimple_build_assign (gimple_call_lhs (stmt),
301 				  gimple_call_arg (stmt, 0));
302       gsi_replace (&gsi, stmt, true);
303     }
304 }
305 
306 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
307    them and propagate the information to the loop.  We assume that the
308    annotations come immediately before the condition of the loop.  */
309 
310 static void
311 replace_loop_annotate (void)
312 {
313   struct loop *loop;
314   basic_block bb;
315   gimple_stmt_iterator gsi;
316   gimple *stmt;
317 
318   FOR_EACH_LOOP (loop, 0)
319     {
320       /* First look into the header.  */
321       replace_loop_annotate_in_block (loop->header, loop);
322 
323       /* Then look into the latch, if any.  */
324       if (loop->latch)
325 	replace_loop_annotate_in_block (loop->latch, loop);
326     }
327 
328   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
329   FOR_EACH_BB_FN (bb, cfun)
330     {
331       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
332 	{
333 	  stmt = gsi_stmt (gsi);
334 	  if (gimple_code (stmt) != GIMPLE_CALL)
335 	    continue;
336 	  if (!gimple_call_internal_p (stmt)
337 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
338 	    continue;
339 
340 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
341 	    {
342 	    case annot_expr_ivdep_kind:
343 	    case annot_expr_unroll_kind:
344 	    case annot_expr_no_vector_kind:
345 	    case annot_expr_vector_kind:
346 	    case annot_expr_parallel_kind:
347 	      break;
348 	    default:
349 	      gcc_unreachable ();
350 	    }
351 
352 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
353 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
354 				      gimple_call_arg (stmt, 0));
355 	  gsi_replace (&gsi, stmt, true);
356 	}
357     }
358 }
359 
360 static unsigned int
361 execute_build_cfg (void)
362 {
363   gimple_seq body = gimple_body (current_function_decl);
364 
365   build_gimple_cfg (body);
366   gimple_set_body (current_function_decl, NULL);
367   if (dump_file && (dump_flags & TDF_DETAILS))
368     {
369       fprintf (dump_file, "Scope blocks:\n");
370       dump_scope_blocks (dump_file, dump_flags);
371     }
372   cleanup_tree_cfg ();
373   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
374   replace_loop_annotate ();
375   return 0;
376 }
377 
378 namespace {
379 
380 const pass_data pass_data_build_cfg =
381 {
382   GIMPLE_PASS, /* type */
383   "cfg", /* name */
384   OPTGROUP_NONE, /* optinfo_flags */
385   TV_TREE_CFG, /* tv_id */
386   PROP_gimple_leh, /* properties_required */
387   ( PROP_cfg | PROP_loops ), /* properties_provided */
388   0, /* properties_destroyed */
389   0, /* todo_flags_start */
390   0, /* todo_flags_finish */
391 };
392 
393 class pass_build_cfg : public gimple_opt_pass
394 {
395 public:
396   pass_build_cfg (gcc::context *ctxt)
397     : gimple_opt_pass (pass_data_build_cfg, ctxt)
398   {}
399 
400   /* opt_pass methods: */
401   virtual unsigned int execute (function *) { return execute_build_cfg (); }
402 
403 }; // class pass_build_cfg
404 
405 } // anon namespace
406 
407 gimple_opt_pass *
408 make_pass_build_cfg (gcc::context *ctxt)
409 {
410   return new pass_build_cfg (ctxt);
411 }
412 
413 
414 /* Return true if T is a computed goto.  */
415 
416 bool
417 computed_goto_p (gimple *t)
418 {
419   return (gimple_code (t) == GIMPLE_GOTO
420 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
421 }
422 
423 /* Returns true if the sequence of statements STMTS only contains
424    a call to __builtin_unreachable ().  */
425 
426 bool
427 gimple_seq_unreachable_p (gimple_seq stmts)
428 {
429   if (stmts == NULL
430       /* Return false if -fsanitize=unreachable, we don't want to
431 	 optimize away those calls, but rather turn them into
432 	 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
433 	 later.  */
434       || sanitize_flags_p (SANITIZE_UNREACHABLE))
435     return false;
436 
437   gimple_stmt_iterator gsi = gsi_last (stmts);
438 
439   if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
440     return false;
441 
442   for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
443     {
444       gimple *stmt = gsi_stmt (gsi);
445       if (gimple_code (stmt) != GIMPLE_LABEL
446 	  && !is_gimple_debug (stmt)
447 	  && !gimple_clobber_p (stmt))
448       return false;
449     }
450   return true;
451 }
452 
453 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
454    the other edge points to a bb with just __builtin_unreachable ().
455    I.e. return true for C->M edge in:
456    <bb C>:
457    ...
458    if (something)
459      goto <bb N>;
460    else
461      goto <bb M>;
462    <bb N>:
463    __builtin_unreachable ();
464    <bb M>:  */
465 
466 bool
467 assert_unreachable_fallthru_edge_p (edge e)
468 {
469   basic_block pred_bb = e->src;
470   gimple *last = last_stmt (pred_bb);
471   if (last && gimple_code (last) == GIMPLE_COND)
472     {
473       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
474       if (other_bb == e->dest)
475 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
476       if (EDGE_COUNT (other_bb->succs) == 0)
477 	return gimple_seq_unreachable_p (bb_seq (other_bb));
478     }
479   return false;
480 }
481 
482 
483 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
484    could alter control flow except via eh. We initialize the flag at
485    CFG build time and only ever clear it later.  */
486 
487 static void
488 gimple_call_initialize_ctrl_altering (gimple *stmt)
489 {
490   int flags = gimple_call_flags (stmt);
491 
492   /* A call alters control flow if it can make an abnormal goto.  */
493   if (call_can_make_abnormal_goto (stmt)
494       /* A call also alters control flow if it does not return.  */
495       || flags & ECF_NORETURN
496       /* TM ending statements have backedges out of the transaction.
497 	 Return true so we split the basic block containing them.
498 	 Note that the TM_BUILTIN test is merely an optimization.  */
499       || ((flags & ECF_TM_BUILTIN)
500 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
501       /* BUILT_IN_RETURN call is same as return statement.  */
502       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
503       /* IFN_UNIQUE should be the last insn, to make checking for it
504 	 as cheap as possible.  */
505       || (gimple_call_internal_p (stmt)
506 	  && gimple_call_internal_unique_p (stmt)))
507     gimple_call_set_ctrl_altering (stmt, true);
508   else
509     gimple_call_set_ctrl_altering (stmt, false);
510 }
511 
512 
513 /* Insert SEQ after BB and build a flowgraph.  */
514 
515 static basic_block
516 make_blocks_1 (gimple_seq seq, basic_block bb)
517 {
518   gimple_stmt_iterator i = gsi_start (seq);
519   gimple *stmt = NULL;
520   gimple *prev_stmt = NULL;
521   bool start_new_block = true;
522   bool first_stmt_of_seq = true;
523 
524   while (!gsi_end_p (i))
525     {
526       /* PREV_STMT should only be set to a debug stmt if the debug
527 	 stmt is before nondebug stmts.  Once stmt reaches a nondebug
528 	 nonlabel, prev_stmt will be set to it, so that
529 	 stmt_starts_bb_p will know to start a new block if a label is
530 	 found.  However, if stmt was a label after debug stmts only,
531 	 keep the label in prev_stmt even if we find further debug
532 	 stmts, for there may be other labels after them, and they
533 	 should land in the same block.  */
534       if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
535 	prev_stmt = stmt;
536       stmt = gsi_stmt (i);
537 
538       if (stmt && is_gimple_call (stmt))
539 	gimple_call_initialize_ctrl_altering (stmt);
540 
541       /* If the statement starts a new basic block or if we have determined
542 	 in a previous pass that we need to create a new block for STMT, do
543 	 so now.  */
544       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
545 	{
546 	  if (!first_stmt_of_seq)
547 	    gsi_split_seq_before (&i, &seq);
548 	  bb = create_basic_block (seq, bb);
549 	  start_new_block = false;
550 	  prev_stmt = NULL;
551 	}
552 
553       /* Now add STMT to BB and create the subgraphs for special statement
554 	 codes.  */
555       gimple_set_bb (stmt, bb);
556 
557       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
558 	 next iteration.  */
559       if (stmt_ends_bb_p (stmt))
560 	{
561 	  /* If the stmt can make abnormal goto use a new temporary
562 	     for the assignment to the LHS.  This makes sure the old value
563 	     of the LHS is available on the abnormal edge.  Otherwise
564 	     we will end up with overlapping life-ranges for abnormal
565 	     SSA names.  */
566 	  if (gimple_has_lhs (stmt)
567 	      && stmt_can_make_abnormal_goto (stmt)
568 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
569 	    {
570 	      tree lhs = gimple_get_lhs (stmt);
571 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
572 	      gimple *s = gimple_build_assign (lhs, tmp);
573 	      gimple_set_location (s, gimple_location (stmt));
574 	      gimple_set_block (s, gimple_block (stmt));
575 	      gimple_set_lhs (stmt, tmp);
576 	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
577 		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
578 		DECL_GIMPLE_REG_P (tmp) = 1;
579 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
580 	    }
581 	  start_new_block = true;
582 	}
583 
584       gsi_next (&i);
585       first_stmt_of_seq = false;
586     }
587   return bb;
588 }
589 
590 /* Build a flowgraph for the sequence of stmts SEQ.  */
591 
592 static void
593 make_blocks (gimple_seq seq)
594 {
595   /* Look for debug markers right before labels, and move the debug
596      stmts after the labels.  Accepting labels among debug markers
597      adds no value, just complexity; if we wanted to annotate labels
598      with view numbers (so sequencing among markers would matter) or
599      somesuch, we're probably better off still moving the labels, but
600      adding other debug annotations in their original positions or
601      emitting nonbind or bind markers associated with the labels in
602      the original position of the labels.
603 
604      Moving labels would probably be simpler, but we can't do that:
605      moving labels assigns label ids to them, and doing so because of
606      debug markers makes for -fcompare-debug and possibly even codegen
607      differences.  So, we have to move the debug stmts instead.  To
608      that end, we scan SEQ backwards, marking the position of the
609      latest (earliest we find) label, and moving debug stmts that are
610      not separated from it by nondebug nonlabel stmts after the
611      label.  */
612   if (MAY_HAVE_DEBUG_MARKER_STMTS)
613     {
614       gimple_stmt_iterator label = gsi_none ();
615 
616       for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
617 	{
618 	  gimple *stmt = gsi_stmt (i);
619 
620 	  /* If this is the first label we encounter (latest in SEQ)
621 	     before nondebug stmts, record its position.  */
622 	  if (is_a <glabel *> (stmt))
623 	    {
624 	      if (gsi_end_p (label))
625 		label = i;
626 	      continue;
627 	    }
628 
629 	  /* Without a recorded label position to move debug stmts to,
630 	     there's nothing to do.  */
631 	  if (gsi_end_p (label))
632 	    continue;
633 
634 	  /* Move the debug stmt at I after LABEL.  */
635 	  if (is_gimple_debug (stmt))
636 	    {
637 	      gcc_assert (gimple_debug_nonbind_marker_p (stmt));
638 	      /* As STMT is removed, I advances to the stmt after
639 		 STMT, so the gsi_prev in the for "increment"
640 		 expression gets us to the stmt we're to visit after
641 		 STMT.  LABEL, however, would advance to the moved
642 		 stmt if we passed it to gsi_move_after, so pass it a
643 		 copy instead, so as to keep LABEL pointing to the
644 		 LABEL.  */
645 	      gimple_stmt_iterator copy = label;
646 	      gsi_move_after (&i, &copy);
647 	      continue;
648 	    }
649 
650 	  /* There aren't any (more?) debug stmts before label, so
651 	     there isn't anything else to move after it.  */
652 	  label = gsi_none ();
653 	}
654     }
655 
656   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
657 }
658 
659 /* Create and return a new empty basic block after bb AFTER.  */
660 
661 static basic_block
662 create_bb (void *h, void *e, basic_block after)
663 {
664   basic_block bb;
665 
666   gcc_assert (!e);
667 
668   /* Create and initialize a new basic block.  Since alloc_block uses
669      GC allocation that clears memory to allocate a basic block, we do
670      not have to clear the newly allocated basic block here.  */
671   bb = alloc_block ();
672 
673   bb->index = last_basic_block_for_fn (cfun);
674   bb->flags = BB_NEW;
675   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
676 
677   /* Add the new block to the linked list of blocks.  */
678   link_block (bb, after);
679 
680   /* Grow the basic block array if needed.  */
681   if ((size_t) last_basic_block_for_fn (cfun)
682       == basic_block_info_for_fn (cfun)->length ())
683     {
684       size_t new_size =
685 	(last_basic_block_for_fn (cfun)
686 	 + (last_basic_block_for_fn (cfun) + 3) / 4);
687       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
688     }
689 
690   /* Add the newly created block to the array.  */
691   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
692 
693   n_basic_blocks_for_fn (cfun)++;
694   last_basic_block_for_fn (cfun)++;
695 
696   return bb;
697 }
698 
699 
700 /*---------------------------------------------------------------------------
701 				 Edge creation
702 ---------------------------------------------------------------------------*/
703 
704 /* If basic block BB has an abnormal edge to a basic block
705    containing IFN_ABNORMAL_DISPATCHER internal call, return
706    that the dispatcher's basic block, otherwise return NULL.  */
707 
708 basic_block
709 get_abnormal_succ_dispatcher (basic_block bb)
710 {
711   edge e;
712   edge_iterator ei;
713 
714   FOR_EACH_EDGE (e, ei, bb->succs)
715     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
716       {
717 	gimple_stmt_iterator gsi
718 	  = gsi_start_nondebug_after_labels_bb (e->dest);
719 	gimple *g = gsi_stmt (gsi);
720 	if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
721 	  return e->dest;
722       }
723   return NULL;
724 }
725 
726 /* Helper function for make_edges.  Create a basic block with
727    with ABNORMAL_DISPATCHER internal call in it if needed, and
728    create abnormal edges from BBS to it and from it to FOR_BB
729    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
730 
731 static void
732 handle_abnormal_edges (basic_block *dispatcher_bbs,
733 		       basic_block for_bb, int *bb_to_omp_idx,
734 		       auto_vec<basic_block> *bbs, bool computed_goto)
735 {
736   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
737   unsigned int idx = 0;
738   basic_block bb;
739   bool inner = false;
740 
741   if (bb_to_omp_idx)
742     {
743       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
744       if (bb_to_omp_idx[for_bb->index] != 0)
745 	inner = true;
746     }
747 
748   /* If the dispatcher has been created already, then there are basic
749      blocks with abnormal edges to it, so just make a new edge to
750      for_bb.  */
751   if (*dispatcher == NULL)
752     {
753       /* Check if there are any basic blocks that need to have
754 	 abnormal edges to this dispatcher.  If there are none, return
755 	 early.  */
756       if (bb_to_omp_idx == NULL)
757 	{
758 	  if (bbs->is_empty ())
759 	    return;
760 	}
761       else
762 	{
763 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
764 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
765 	      break;
766 	  if (bb == NULL)
767 	    return;
768 	}
769 
770       /* Create the dispatcher bb.  */
771       *dispatcher = create_basic_block (NULL, for_bb);
772       if (computed_goto)
773 	{
774 	  /* Factor computed gotos into a common computed goto site.  Also
775 	     record the location of that site so that we can un-factor the
776 	     gotos after we have converted back to normal form.  */
777 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
778 
779 	  /* Create the destination of the factored goto.  Each original
780 	     computed goto will put its desired destination into this
781 	     variable and jump to the label we create immediately below.  */
782 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
783 
784 	  /* Build a label for the new block which will contain the
785 	     factored computed goto.  */
786 	  tree factored_label_decl
787 	    = create_artificial_label (UNKNOWN_LOCATION);
788 	  gimple *factored_computed_goto_label
789 	    = gimple_build_label (factored_label_decl);
790 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
791 
792 	  /* Build our new computed goto.  */
793 	  gimple *factored_computed_goto = gimple_build_goto (var);
794 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
795 
796 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
797 	    {
798 	      if (bb_to_omp_idx
799 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
800 		continue;
801 
802 	      gsi = gsi_last_bb (bb);
803 	      gimple *last = gsi_stmt (gsi);
804 
805 	      gcc_assert (computed_goto_p (last));
806 
807 	      /* Copy the original computed goto's destination into VAR.  */
808 	      gimple *assignment
809 		= gimple_build_assign (var, gimple_goto_dest (last));
810 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
811 
812 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
813 	      e->goto_locus = gimple_location (last);
814 	      gsi_remove (&gsi, true);
815 	    }
816 	}
817       else
818 	{
819 	  tree arg = inner ? boolean_true_node : boolean_false_node;
820 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
821 						 1, arg);
822 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
823 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
824 
825 	  /* Create predecessor edges of the dispatcher.  */
826 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
827 	    {
828 	      if (bb_to_omp_idx
829 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
830 		continue;
831 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
832 	    }
833 	}
834     }
835 
836   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
837 }
838 
839 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
840    computed goto, returns 2 when it ends with a statement that
841    might return to this function via an nonlocal goto, otherwise
842    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
843 
844 static int
845 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
846 {
847   gimple *last = last_stmt (bb);
848   bool fallthru = false;
849   int ret = 0;
850 
851   if (!last)
852     return ret;
853 
854   switch (gimple_code (last))
855     {
856     case GIMPLE_GOTO:
857       if (make_goto_expr_edges (bb))
858 	ret = 1;
859       fallthru = false;
860       break;
861     case GIMPLE_RETURN:
862       {
863 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
864 	e->goto_locus = gimple_location (last);
865 	fallthru = false;
866       }
867       break;
868     case GIMPLE_COND:
869       make_cond_expr_edges (bb);
870       fallthru = false;
871       break;
872     case GIMPLE_SWITCH:
873       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
874       fallthru = false;
875       break;
876     case GIMPLE_RESX:
877       make_eh_edges (last);
878       fallthru = false;
879       break;
880     case GIMPLE_EH_DISPATCH:
881       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
882       break;
883 
884     case GIMPLE_CALL:
885       /* If this function receives a nonlocal goto, then we need to
886 	 make edges from this call site to all the nonlocal goto
887 	 handlers.  */
888       if (stmt_can_make_abnormal_goto (last))
889 	ret = 2;
890 
891       /* If this statement has reachable exception handlers, then
892 	 create abnormal edges to them.  */
893       make_eh_edges (last);
894 
895       /* BUILTIN_RETURN is really a return statement.  */
896       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
897 	{
898 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
899 	  fallthru = false;
900 	}
901       /* Some calls are known not to return.  */
902       else
903 	fallthru = !gimple_call_noreturn_p (last);
904       break;
905 
906     case GIMPLE_ASSIGN:
907       /* A GIMPLE_ASSIGN may throw internally and thus be considered
908 	 control-altering.  */
909       if (is_ctrl_altering_stmt (last))
910 	make_eh_edges (last);
911       fallthru = true;
912       break;
913 
914     case GIMPLE_ASM:
915       make_gimple_asm_edges (bb);
916       fallthru = true;
917       break;
918 
919     CASE_GIMPLE_OMP:
920       fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
921       break;
922 
923     case GIMPLE_TRANSACTION:
924       {
925         gtransaction *txn = as_a <gtransaction *> (last);
926 	tree label1 = gimple_transaction_label_norm (txn);
927 	tree label2 = gimple_transaction_label_uninst (txn);
928 
929 	if (label1)
930 	  make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
931 	if (label2)
932 	  make_edge (bb, label_to_block (cfun, label2),
933 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
934 
935 	tree label3 = gimple_transaction_label_over (txn);
936 	if (gimple_transaction_subcode (txn)
937 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
938 	  make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
939 
940 	fallthru = false;
941       }
942       break;
943 
944     default:
945       gcc_assert (!stmt_ends_bb_p (last));
946       fallthru = true;
947       break;
948     }
949 
950   if (fallthru)
951     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
952 
953   return ret;
954 }
955 
956 /* Join all the blocks in the flowgraph.  */
957 
958 static void
959 make_edges (void)
960 {
961   basic_block bb;
962   struct omp_region *cur_region = NULL;
963   auto_vec<basic_block> ab_edge_goto;
964   auto_vec<basic_block> ab_edge_call;
965   int *bb_to_omp_idx = NULL;
966   int cur_omp_region_idx = 0;
967 
968   /* Create an edge from entry to the first block with executable
969      statements in it.  */
970   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 	     EDGE_FALLTHRU);
973 
974   /* Traverse the basic block array placing edges.  */
975   FOR_EACH_BB_FN (bb, cfun)
976     {
977       int mer;
978 
979       if (bb_to_omp_idx)
980 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
981 
982       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983       if (mer == 1)
984 	ab_edge_goto.safe_push (bb);
985       else if (mer == 2)
986 	ab_edge_call.safe_push (bb);
987 
988       if (cur_region && bb_to_omp_idx == NULL)
989 	bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
990     }
991 
992   /* Computed gotos are hell to deal with, especially if there are
993      lots of them with a large number of destinations.  So we factor
994      them to a common computed goto location before we build the
995      edge list.  After we convert back to normal form, we will un-factor
996      the computed gotos since factoring introduces an unwanted jump.
997      For non-local gotos and abnormal edges from calls to calls that return
998      twice or forced labels, factor the abnormal edges too, by having all
999      abnormal edges from the calls go to a common artificial basic block
1000      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001      basic block to all forced labels and calls returning twice.
1002      We do this per-OpenMP structured block, because those regions
1003      are guaranteed to be single entry single exit by the standard,
1004      so it is not allowed to enter or exit such regions abnormally this way,
1005      thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006      must not transfer control across SESE region boundaries.  */
1007   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1008     {
1009       gimple_stmt_iterator gsi;
1010       basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011       basic_block *dispatcher_bbs = dispatcher_bb_array;
1012       int count = n_basic_blocks_for_fn (cfun);
1013 
1014       if (bb_to_omp_idx)
1015 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1016 
1017       FOR_EACH_BB_FN (bb, cfun)
1018 	{
1019 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 	    {
1021 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 	      tree target;
1023 
1024 	      if (!label_stmt)
1025 		break;
1026 
1027 	      target = gimple_label_label (label_stmt);
1028 
1029 	      /* Make an edge to every label block that has been marked as a
1030 		 potential target for a computed goto or a non-local goto.  */
1031 	      if (FORCED_LABEL (target))
1032 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1033 				       &ab_edge_goto, true);
1034 	      if (DECL_NONLOCAL (target))
1035 		{
1036 		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1037 					 &ab_edge_call, false);
1038 		  break;
1039 		}
1040 	    }
1041 
1042 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 	    gsi_next_nondebug (&gsi);
1044 	  if (!gsi_end_p (gsi))
1045 	    {
1046 	      /* Make an edge to every setjmp-like call.  */
1047 	      gimple *call_stmt = gsi_stmt (gsi);
1048 	      if (is_gimple_call (call_stmt)
1049 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 		      || gimple_call_builtin_p (call_stmt,
1051 						BUILT_IN_SETJMP_RECEIVER)))
1052 		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1053 				       &ab_edge_call, false);
1054 	    }
1055 	}
1056 
1057       if (bb_to_omp_idx)
1058 	XDELETE (dispatcher_bbs);
1059     }
1060 
1061   XDELETE (bb_to_omp_idx);
1062 
1063   omp_free_regions ();
1064 }
1065 
1066 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
1067    needed.  Returns true if new bbs were created.
1068    Note: This is transitional code, and should not be used for new code.  We
1069    should be able to get rid of this by rewriting all target va-arg
1070    gimplification hooks to use an interface gimple_build_cond_value as described
1071    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
1072 
1073 bool
1074 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1075 {
1076   gimple *stmt = gsi_stmt (*gsi);
1077   basic_block bb = gimple_bb (stmt);
1078   basic_block lastbb, afterbb;
1079   int old_num_bbs = n_basic_blocks_for_fn (cfun);
1080   edge e;
1081   lastbb = make_blocks_1 (seq, bb);
1082   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1083     return false;
1084   e = split_block (bb, stmt);
1085   /* Move e->dest to come after the new basic blocks.  */
1086   afterbb = e->dest;
1087   unlink_block (afterbb);
1088   link_block (afterbb, lastbb);
1089   redirect_edge_succ (e, bb->next_bb);
1090   bb = bb->next_bb;
1091   while (bb != afterbb)
1092     {
1093       struct omp_region *cur_region = NULL;
1094       profile_count cnt = profile_count::zero ();
1095       bool all = true;
1096 
1097       int cur_omp_region_idx = 0;
1098       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1099       gcc_assert (!mer && !cur_region);
1100       add_bb_to_loop (bb, afterbb->loop_father);
1101 
1102       edge e;
1103       edge_iterator ei;
1104       FOR_EACH_EDGE (e, ei, bb->preds)
1105 	{
1106 	  if (e->count ().initialized_p ())
1107 	    cnt += e->count ();
1108 	  else
1109 	    all = false;
1110 	}
1111       tree_guess_outgoing_edge_probabilities (bb);
1112       if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1113         bb->count = cnt;
1114 
1115       bb = bb->next_bb;
1116     }
1117   return true;
1118 }
1119 
1120 /* Find the next available discriminator value for LOCUS.  The
1121    discriminator distinguishes among several basic blocks that
1122    share a common locus, allowing for more accurate sample-based
1123    profiling.  */
1124 
1125 static int
1126 next_discriminator_for_locus (int line)
1127 {
1128   struct locus_discrim_map item;
1129   struct locus_discrim_map **slot;
1130 
1131   item.location_line = line;
1132   item.discriminator = 0;
1133   slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1134   gcc_assert (slot);
1135   if (*slot == HTAB_EMPTY_ENTRY)
1136     {
1137       *slot = XNEW (struct locus_discrim_map);
1138       gcc_assert (*slot);
1139       (*slot)->location_line = line;
1140       (*slot)->discriminator = 0;
1141     }
1142   (*slot)->discriminator++;
1143   return (*slot)->discriminator;
1144 }
1145 
1146 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1147 
1148 static bool
1149 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1150 {
1151   expanded_location to;
1152 
1153   if (locus1 == locus2)
1154     return true;
1155 
1156   to = expand_location (locus2);
1157 
1158   if (from->line != to.line)
1159     return false;
1160   if (from->file == to.file)
1161     return true;
1162   return (from->file != NULL
1163           && to.file != NULL
1164           && filename_cmp (from->file, to.file) == 0);
1165 }
1166 
1167 /* Assign discriminators to each basic block.  */
1168 
1169 static void
1170 assign_discriminators (void)
1171 {
1172   basic_block bb;
1173 
1174   FOR_EACH_BB_FN (bb, cfun)
1175     {
1176       edge e;
1177       edge_iterator ei;
1178       gimple *last = last_stmt (bb);
1179       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1180 
1181       if (locus == UNKNOWN_LOCATION)
1182 	continue;
1183 
1184       expanded_location locus_e = expand_location (locus);
1185 
1186       FOR_EACH_EDGE (e, ei, bb->succs)
1187 	{
1188 	  gimple *first = first_non_label_stmt (e->dest);
1189 	  gimple *last = last_stmt (e->dest);
1190 	  if ((first && same_line_p (locus, &locus_e,
1191 				     gimple_location (first)))
1192 	      || (last && same_line_p (locus, &locus_e,
1193 				       gimple_location (last))))
1194 	    {
1195 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1196 		bb->discriminator
1197 		  = next_discriminator_for_locus (locus_e.line);
1198 	      else
1199 		e->dest->discriminator
1200 		  = next_discriminator_for_locus (locus_e.line);
1201 	    }
1202 	}
1203     }
1204 }
1205 
1206 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1207 
1208 static void
1209 make_cond_expr_edges (basic_block bb)
1210 {
1211   gcond *entry = as_a <gcond *> (last_stmt (bb));
1212   gimple *then_stmt, *else_stmt;
1213   basic_block then_bb, else_bb;
1214   tree then_label, else_label;
1215   edge e;
1216 
1217   gcc_assert (entry);
1218   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1219 
1220   /* Entry basic blocks for each component.  */
1221   then_label = gimple_cond_true_label (entry);
1222   else_label = gimple_cond_false_label (entry);
1223   then_bb = label_to_block (cfun, then_label);
1224   else_bb = label_to_block (cfun, else_label);
1225   then_stmt = first_stmt (then_bb);
1226   else_stmt = first_stmt (else_bb);
1227 
1228   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1229   e->goto_locus = gimple_location (then_stmt);
1230   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1231   if (e)
1232     e->goto_locus = gimple_location (else_stmt);
1233 
1234   /* We do not need the labels anymore.  */
1235   gimple_cond_set_true_label (entry, NULL_TREE);
1236   gimple_cond_set_false_label (entry, NULL_TREE);
1237 }
1238 
1239 
1240 /* Called for each element in the hash table (P) as we delete the
1241    edge to cases hash table.
1242 
1243    Clear all the CASE_CHAINs to prevent problems with copying of
1244    SWITCH_EXPRs and structure sharing rules, then free the hash table
1245    element.  */
1246 
1247 bool
1248 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1249 {
1250   tree t, next;
1251 
1252   for (t = value; t; t = next)
1253     {
1254       next = CASE_CHAIN (t);
1255       CASE_CHAIN (t) = NULL;
1256     }
1257 
1258   return true;
1259 }
1260 
1261 /* Start recording information mapping edges to case labels.  */
1262 
1263 void
1264 start_recording_case_labels (void)
1265 {
1266   gcc_assert (edge_to_cases == NULL);
1267   edge_to_cases = new hash_map<edge, tree>;
1268   touched_switch_bbs = BITMAP_ALLOC (NULL);
1269 }
1270 
1271 /* Return nonzero if we are recording information for case labels.  */
1272 
1273 static bool
1274 recording_case_labels_p (void)
1275 {
1276   return (edge_to_cases != NULL);
1277 }
1278 
1279 /* Stop recording information mapping edges to case labels and
1280    remove any information we have recorded.  */
1281 void
1282 end_recording_case_labels (void)
1283 {
1284   bitmap_iterator bi;
1285   unsigned i;
1286   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1287   delete edge_to_cases;
1288   edge_to_cases = NULL;
1289   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1290     {
1291       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1292       if (bb)
1293 	{
1294 	  gimple *stmt = last_stmt (bb);
1295 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1296 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1297 	}
1298     }
1299   BITMAP_FREE (touched_switch_bbs);
1300 }
1301 
1302 /* If we are inside a {start,end}_recording_cases block, then return
1303    a chain of CASE_LABEL_EXPRs from T which reference E.
1304 
1305    Otherwise return NULL.  */
1306 
1307 static tree
1308 get_cases_for_edge (edge e, gswitch *t)
1309 {
1310   tree *slot;
1311   size_t i, n;
1312 
1313   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1314      chains available.  Return NULL so the caller can detect this case.  */
1315   if (!recording_case_labels_p ())
1316     return NULL;
1317 
1318   slot = edge_to_cases->get (e);
1319   if (slot)
1320     return *slot;
1321 
1322   /* If we did not find E in the hash table, then this must be the first
1323      time we have been queried for information about E & T.  Add all the
1324      elements from T to the hash table then perform the query again.  */
1325 
1326   n = gimple_switch_num_labels (t);
1327   for (i = 0; i < n; i++)
1328     {
1329       tree elt = gimple_switch_label (t, i);
1330       tree lab = CASE_LABEL (elt);
1331       basic_block label_bb = label_to_block (cfun, lab);
1332       edge this_edge = find_edge (e->src, label_bb);
1333 
1334       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1335 	 a new chain.  */
1336       tree &s = edge_to_cases->get_or_insert (this_edge);
1337       CASE_CHAIN (elt) = s;
1338       s = elt;
1339     }
1340 
1341   return *edge_to_cases->get (e);
1342 }
1343 
1344 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1345 
1346 static void
1347 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1348 {
1349   size_t i, n;
1350 
1351   n = gimple_switch_num_labels (entry);
1352 
1353   for (i = 0; i < n; ++i)
1354     {
1355       basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1356       make_edge (bb, label_bb, 0);
1357     }
1358 }
1359 
1360 
1361 /* Return the basic block holding label DEST.  */
1362 
1363 basic_block
1364 label_to_block (struct function *ifun, tree dest)
1365 {
1366   int uid = LABEL_DECL_UID (dest);
1367 
1368   /* We would die hard when faced by an undefined label.  Emit a label to
1369      the very first basic block.  This will hopefully make even the dataflow
1370      and undefined variable warnings quite right.  */
1371   if (seen_error () && uid < 0)
1372     {
1373       gimple_stmt_iterator gsi =
1374 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1375       gimple *stmt;
1376 
1377       stmt = gimple_build_label (dest);
1378       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1379       uid = LABEL_DECL_UID (dest);
1380     }
1381   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1382     return NULL;
1383   return (*ifun->cfg->x_label_to_block_map)[uid];
1384 }
1385 
1386 /* Create edges for a goto statement at block BB.  Returns true
1387    if abnormal edges should be created.  */
1388 
1389 static bool
1390 make_goto_expr_edges (basic_block bb)
1391 {
1392   gimple_stmt_iterator last = gsi_last_bb (bb);
1393   gimple *goto_t = gsi_stmt (last);
1394 
1395   /* A simple GOTO creates normal edges.  */
1396   if (simple_goto_p (goto_t))
1397     {
1398       tree dest = gimple_goto_dest (goto_t);
1399       basic_block label_bb = label_to_block (cfun, dest);
1400       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1401       e->goto_locus = gimple_location (goto_t);
1402       gsi_remove (&last, true);
1403       return false;
1404     }
1405 
1406   /* A computed GOTO creates abnormal edges.  */
1407   return true;
1408 }
1409 
1410 /* Create edges for an asm statement with labels at block BB.  */
1411 
1412 static void
1413 make_gimple_asm_edges (basic_block bb)
1414 {
1415   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1416   int i, n = gimple_asm_nlabels (stmt);
1417 
1418   for (i = 0; i < n; ++i)
1419     {
1420       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1421       basic_block label_bb = label_to_block (cfun, label);
1422       make_edge (bb, label_bb, 0);
1423     }
1424 }
1425 
1426 /*---------------------------------------------------------------------------
1427 			       Flowgraph analysis
1428 ---------------------------------------------------------------------------*/
1429 
1430 /* Cleanup useless labels in basic blocks.  This is something we wish
1431    to do early because it allows us to group case labels before creating
1432    the edges for the CFG, and it speeds up block statement iterators in
1433    all passes later on.
1434    We rerun this pass after CFG is created, to get rid of the labels that
1435    are no longer referenced.  After then we do not run it any more, since
1436    (almost) no new labels should be created.  */
1437 
1438 /* A map from basic block index to the leading label of that block.  */
1439 static struct label_record
1440 {
1441   /* The label.  */
1442   tree label;
1443 
1444   /* True if the label is referenced from somewhere.  */
1445   bool used;
1446 } *label_for_bb;
1447 
1448 /* Given LABEL return the first label in the same basic block.  */
1449 
1450 static tree
1451 main_block_label (tree label)
1452 {
1453   basic_block bb = label_to_block (cfun, label);
1454   tree main_label = label_for_bb[bb->index].label;
1455 
1456   /* label_to_block possibly inserted undefined label into the chain.  */
1457   if (!main_label)
1458     {
1459       label_for_bb[bb->index].label = label;
1460       main_label = label;
1461     }
1462 
1463   label_for_bb[bb->index].used = true;
1464   return main_label;
1465 }
1466 
1467 /* Clean up redundant labels within the exception tree.  */
1468 
1469 static void
1470 cleanup_dead_labels_eh (void)
1471 {
1472   eh_landing_pad lp;
1473   eh_region r;
1474   tree lab;
1475   int i;
1476 
1477   if (cfun->eh == NULL)
1478     return;
1479 
1480   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1481     if (lp && lp->post_landing_pad)
1482       {
1483 	lab = main_block_label (lp->post_landing_pad);
1484 	if (lab != lp->post_landing_pad)
1485 	  {
1486 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1487 	    EH_LANDING_PAD_NR (lab) = lp->index;
1488 	  }
1489       }
1490 
1491   FOR_ALL_EH_REGION (r)
1492     switch (r->type)
1493       {
1494       case ERT_CLEANUP:
1495       case ERT_MUST_NOT_THROW:
1496 	break;
1497 
1498       case ERT_TRY:
1499 	{
1500 	  eh_catch c;
1501 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1502 	    {
1503 	      lab = c->label;
1504 	      if (lab)
1505 		c->label = main_block_label (lab);
1506 	    }
1507 	}
1508 	break;
1509 
1510       case ERT_ALLOWED_EXCEPTIONS:
1511 	lab = r->u.allowed.label;
1512 	if (lab)
1513 	  r->u.allowed.label = main_block_label (lab);
1514 	break;
1515       }
1516 }
1517 
1518 
1519 /* Cleanup redundant labels.  This is a three-step process:
1520      1) Find the leading label for each block.
1521      2) Redirect all references to labels to the leading labels.
1522      3) Cleanup all useless labels.  */
1523 
1524 void
1525 cleanup_dead_labels (void)
1526 {
1527   basic_block bb;
1528   label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1529 
1530   /* Find a suitable label for each block.  We use the first user-defined
1531      label if there is one, or otherwise just the first label we see.  */
1532   FOR_EACH_BB_FN (bb, cfun)
1533     {
1534       gimple_stmt_iterator i;
1535 
1536       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1537 	{
1538 	  tree label;
1539 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1540 
1541 	  if (!label_stmt)
1542 	    break;
1543 
1544 	  label = gimple_label_label (label_stmt);
1545 
1546 	  /* If we have not yet seen a label for the current block,
1547 	     remember this one and see if there are more labels.  */
1548 	  if (!label_for_bb[bb->index].label)
1549 	    {
1550 	      label_for_bb[bb->index].label = label;
1551 	      continue;
1552 	    }
1553 
1554 	  /* If we did see a label for the current block already, but it
1555 	     is an artificially created label, replace it if the current
1556 	     label is a user defined label.  */
1557 	  if (!DECL_ARTIFICIAL (label)
1558 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1559 	    {
1560 	      label_for_bb[bb->index].label = label;
1561 	      break;
1562 	    }
1563 	}
1564     }
1565 
1566   /* Now redirect all jumps/branches to the selected label.
1567      First do so for each block ending in a control statement.  */
1568   FOR_EACH_BB_FN (bb, cfun)
1569     {
1570       gimple *stmt = last_stmt (bb);
1571       tree label, new_label;
1572 
1573       if (!stmt)
1574 	continue;
1575 
1576       switch (gimple_code (stmt))
1577 	{
1578 	case GIMPLE_COND:
1579 	  {
1580 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1581 	    label = gimple_cond_true_label (cond_stmt);
1582 	    if (label)
1583 	      {
1584 		new_label = main_block_label (label);
1585 		if (new_label != label)
1586 		  gimple_cond_set_true_label (cond_stmt, new_label);
1587 	      }
1588 
1589 	    label = gimple_cond_false_label (cond_stmt);
1590 	    if (label)
1591 	      {
1592 		new_label = main_block_label (label);
1593 		if (new_label != label)
1594 		  gimple_cond_set_false_label (cond_stmt, new_label);
1595 	      }
1596 	  }
1597 	  break;
1598 
1599 	case GIMPLE_SWITCH:
1600 	  {
1601 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1602 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1603 
1604 	    /* Replace all destination labels.  */
1605 	    for (i = 0; i < n; ++i)
1606 	      {
1607 		tree case_label = gimple_switch_label (switch_stmt, i);
1608 		label = CASE_LABEL (case_label);
1609 		new_label = main_block_label (label);
1610 		if (new_label != label)
1611 		  CASE_LABEL (case_label) = new_label;
1612 	      }
1613 	    break;
1614 	  }
1615 
1616 	case GIMPLE_ASM:
1617 	  {
1618 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1619 	    int i, n = gimple_asm_nlabels (asm_stmt);
1620 
1621 	    for (i = 0; i < n; ++i)
1622 	      {
1623 		tree cons = gimple_asm_label_op (asm_stmt, i);
1624 		tree label = main_block_label (TREE_VALUE (cons));
1625 		TREE_VALUE (cons) = label;
1626 	      }
1627 	    break;
1628 	  }
1629 
1630 	/* We have to handle gotos until they're removed, and we don't
1631 	   remove them until after we've created the CFG edges.  */
1632 	case GIMPLE_GOTO:
1633 	  if (!computed_goto_p (stmt))
1634 	    {
1635 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1636 	      label = gimple_goto_dest (goto_stmt);
1637 	      new_label = main_block_label (label);
1638 	      if (new_label != label)
1639 		gimple_goto_set_dest (goto_stmt, new_label);
1640 	    }
1641 	  break;
1642 
1643 	case GIMPLE_TRANSACTION:
1644 	  {
1645 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1646 
1647 	    label = gimple_transaction_label_norm (txn);
1648 	    if (label)
1649 	      {
1650 		new_label = main_block_label (label);
1651 		if (new_label != label)
1652 		  gimple_transaction_set_label_norm (txn, new_label);
1653 	      }
1654 
1655 	    label = gimple_transaction_label_uninst (txn);
1656 	    if (label)
1657 	      {
1658 		new_label = main_block_label (label);
1659 		if (new_label != label)
1660 		  gimple_transaction_set_label_uninst (txn, new_label);
1661 	      }
1662 
1663 	    label = gimple_transaction_label_over (txn);
1664 	    if (label)
1665 	      {
1666 		new_label = main_block_label (label);
1667 		if (new_label != label)
1668 		  gimple_transaction_set_label_over (txn, new_label);
1669 	      }
1670 	  }
1671 	  break;
1672 
1673 	default:
1674 	  break;
1675       }
1676     }
1677 
1678   /* Do the same for the exception region tree labels.  */
1679   cleanup_dead_labels_eh ();
1680 
1681   /* Finally, purge dead labels.  All user-defined labels and labels that
1682      can be the target of non-local gotos and labels which have their
1683      address taken are preserved.  */
1684   FOR_EACH_BB_FN (bb, cfun)
1685     {
1686       gimple_stmt_iterator i;
1687       tree label_for_this_bb = label_for_bb[bb->index].label;
1688 
1689       if (!label_for_this_bb)
1690 	continue;
1691 
1692       /* If the main label of the block is unused, we may still remove it.  */
1693       if (!label_for_bb[bb->index].used)
1694 	label_for_this_bb = NULL;
1695 
1696       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1697 	{
1698 	  tree label;
1699 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1700 
1701 	  if (!label_stmt)
1702 	    break;
1703 
1704 	  label = gimple_label_label (label_stmt);
1705 
1706 	  if (label == label_for_this_bb
1707 	      || !DECL_ARTIFICIAL (label)
1708 	      || DECL_NONLOCAL (label)
1709 	      || FORCED_LABEL (label))
1710 	    gsi_next (&i);
1711 	  else
1712 	    gsi_remove (&i, true);
1713 	}
1714     }
1715 
1716   free (label_for_bb);
1717 }
1718 
1719 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1720    the ones jumping to the same label.
1721    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1722 
1723 bool
1724 group_case_labels_stmt (gswitch *stmt)
1725 {
1726   int old_size = gimple_switch_num_labels (stmt);
1727   int i, next_index, new_size;
1728   basic_block default_bb = NULL;
1729 
1730   default_bb = gimple_switch_default_bb (cfun, stmt);
1731 
1732   /* Look for possible opportunities to merge cases.  */
1733   new_size = i = 1;
1734   while (i < old_size)
1735     {
1736       tree base_case, base_high;
1737       basic_block base_bb;
1738 
1739       base_case = gimple_switch_label (stmt, i);
1740 
1741       gcc_assert (base_case);
1742       base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1743 
1744       /* Discard cases that have the same destination as the default case or
1745 	 whose destiniation blocks have already been removed as unreachable.  */
1746       if (base_bb == NULL || base_bb == default_bb)
1747 	{
1748 	  i++;
1749 	  continue;
1750 	}
1751 
1752       base_high = CASE_HIGH (base_case)
1753 	  ? CASE_HIGH (base_case)
1754 	  : CASE_LOW (base_case);
1755       next_index = i + 1;
1756 
1757       /* Try to merge case labels.  Break out when we reach the end
1758 	 of the label vector or when we cannot merge the next case
1759 	 label with the current one.  */
1760       while (next_index < old_size)
1761 	{
1762 	  tree merge_case = gimple_switch_label (stmt, next_index);
1763 	  basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1764 	  wide_int bhp1 = wi::to_wide (base_high) + 1;
1765 
1766 	  /* Merge the cases if they jump to the same place,
1767 	     and their ranges are consecutive.  */
1768 	  if (merge_bb == base_bb
1769 	      && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1770 	    {
1771 	      base_high = CASE_HIGH (merge_case) ?
1772 		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1773 	      CASE_HIGH (base_case) = base_high;
1774 	      next_index++;
1775 	    }
1776 	  else
1777 	    break;
1778 	}
1779 
1780       /* Discard cases that have an unreachable destination block.  */
1781       if (EDGE_COUNT (base_bb->succs) == 0
1782 	  && gimple_seq_unreachable_p (bb_seq (base_bb))
1783 	  /* Don't optimize this if __builtin_unreachable () is the
1784 	     implicitly added one by the C++ FE too early, before
1785 	     -Wreturn-type can be diagnosed.  We'll optimize it later
1786 	     during switchconv pass or any other cfg cleanup.  */
1787 	  && (gimple_in_ssa_p (cfun)
1788 	      || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1789 		  != BUILTINS_LOCATION)))
1790 	{
1791 	  edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1792 	  if (base_edge != NULL)
1793 	    remove_edge_and_dominated_blocks (base_edge);
1794 	  i = next_index;
1795 	  continue;
1796 	}
1797 
1798       if (new_size < i)
1799 	gimple_switch_set_label (stmt, new_size,
1800 				 gimple_switch_label (stmt, i));
1801       i = next_index;
1802       new_size++;
1803     }
1804 
1805   gcc_assert (new_size <= old_size);
1806 
1807   if (new_size < old_size)
1808     gimple_switch_set_num_labels (stmt, new_size);
1809 
1810   return new_size < old_size;
1811 }
1812 
1813 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1814    and scan the sorted vector of cases.  Combine the ones jumping to the
1815    same label.  */
1816 
1817 bool
1818 group_case_labels (void)
1819 {
1820   basic_block bb;
1821   bool changed = false;
1822 
1823   FOR_EACH_BB_FN (bb, cfun)
1824     {
1825       gimple *stmt = last_stmt (bb);
1826       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1827 	changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1828     }
1829 
1830   return changed;
1831 }
1832 
1833 /* Checks whether we can merge block B into block A.  */
1834 
1835 static bool
1836 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1837 {
1838   gimple *stmt;
1839 
1840   if (!single_succ_p (a))
1841     return false;
1842 
1843   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1844     return false;
1845 
1846   if (single_succ (a) != b)
1847     return false;
1848 
1849   if (!single_pred_p (b))
1850     return false;
1851 
1852   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1853       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1854     return false;
1855 
1856   /* If A ends by a statement causing exceptions or something similar, we
1857      cannot merge the blocks.  */
1858   stmt = last_stmt (a);
1859   if (stmt && stmt_ends_bb_p (stmt))
1860     return false;
1861 
1862   /* Do not allow a block with only a non-local label to be merged.  */
1863   if (stmt)
1864     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1865       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1866 	return false;
1867 
1868   /* Examine the labels at the beginning of B.  */
1869   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1870        gsi_next (&gsi))
1871     {
1872       tree lab;
1873       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1874       if (!label_stmt)
1875 	break;
1876       lab = gimple_label_label (label_stmt);
1877 
1878       /* Do not remove user forced labels or for -O0 any user labels.  */
1879       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1880 	return false;
1881     }
1882 
1883   /* Protect simple loop latches.  We only want to avoid merging
1884      the latch with the loop header or with a block in another
1885      loop in this case.  */
1886   if (current_loops
1887       && b->loop_father->latch == b
1888       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1889       && (b->loop_father->header == a
1890 	  || b->loop_father != a->loop_father))
1891     return false;
1892 
1893   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1894      is not up-to-date and a name-mapping is registered, we cannot eliminate
1895      any phis.  Symbols marked for renaming are never a problem though.  */
1896   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1897        gsi_next (&gsi))
1898     {
1899       gphi *phi = gsi.phi ();
1900       /* Technically only new names matter.  */
1901       if (name_registered_for_update_p (PHI_RESULT (phi)))
1902 	return false;
1903     }
1904 
1905   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1906   if (!optimize
1907       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1908     {
1909       location_t goto_locus = single_succ_edge (a)->goto_locus;
1910       gimple_stmt_iterator prev, next;
1911       prev = gsi_last_nondebug_bb (a);
1912       next = gsi_after_labels (b);
1913       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1914 	gsi_next_nondebug (&next);
1915       if ((gsi_end_p (prev)
1916 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1917 	  && (gsi_end_p (next)
1918 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1919 	return false;
1920     }
1921 
1922   return true;
1923 }
1924 
1925 /* Replaces all uses of NAME by VAL.  */
1926 
1927 void
1928 replace_uses_by (tree name, tree val)
1929 {
1930   imm_use_iterator imm_iter;
1931   use_operand_p use;
1932   gimple *stmt;
1933   edge e;
1934 
1935   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1936     {
1937       /* Mark the block if we change the last stmt in it.  */
1938       if (cfgcleanup_altered_bbs
1939 	  && stmt_ends_bb_p (stmt))
1940 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1941 
1942       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1943         {
1944 	  replace_exp (use, val);
1945 
1946 	  if (gimple_code (stmt) == GIMPLE_PHI)
1947 	    {
1948 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1949 				       PHI_ARG_INDEX_FROM_USE (use));
1950 	      if (e->flags & EDGE_ABNORMAL
1951 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1952 		{
1953 		  /* This can only occur for virtual operands, since
1954 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1955 		     would prevent replacement.  */
1956 		  gcc_checking_assert (virtual_operand_p (name));
1957 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1958 		}
1959 	    }
1960 	}
1961 
1962       if (gimple_code (stmt) != GIMPLE_PHI)
1963 	{
1964 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1965 	  gimple *orig_stmt = stmt;
1966 	  size_t i;
1967 
1968 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1969 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1970 	     only change sth from non-invariant to invariant, and only
1971 	     when propagating constants.  */
1972 	  if (is_gimple_min_invariant (val))
1973 	    for (i = 0; i < gimple_num_ops (stmt); i++)
1974 	      {
1975 		tree op = gimple_op (stmt, i);
1976 		/* Operands may be empty here.  For example, the labels
1977 		   of a GIMPLE_COND are nulled out following the creation
1978 		   of the corresponding CFG edges.  */
1979 		if (op && TREE_CODE (op) == ADDR_EXPR)
1980 		  recompute_tree_invariant_for_addr_expr (op);
1981 	      }
1982 
1983 	  if (fold_stmt (&gsi))
1984 	    stmt = gsi_stmt (gsi);
1985 
1986 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1987 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1988 
1989 	  update_stmt (stmt);
1990 	}
1991     }
1992 
1993   gcc_checking_assert (has_zero_uses (name));
1994 
1995   /* Also update the trees stored in loop structures.  */
1996   if (current_loops)
1997     {
1998       struct loop *loop;
1999 
2000       FOR_EACH_LOOP (loop, 0)
2001 	{
2002 	  substitute_in_loop_info (loop, name, val);
2003 	}
2004     }
2005 }
2006 
2007 /* Merge block B into block A.  */
2008 
2009 static void
2010 gimple_merge_blocks (basic_block a, basic_block b)
2011 {
2012   gimple_stmt_iterator last, gsi;
2013   gphi_iterator psi;
2014 
2015   if (dump_file)
2016     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2017 
2018   /* Remove all single-valued PHI nodes from block B of the form
2019      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
2020   gsi = gsi_last_bb (a);
2021   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2022     {
2023       gimple *phi = gsi_stmt (psi);
2024       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2025       gimple *copy;
2026       bool may_replace_uses = (virtual_operand_p (def)
2027 			       || may_propagate_copy (def, use));
2028 
2029       /* In case we maintain loop closed ssa form, do not propagate arguments
2030 	 of loop exit phi nodes.  */
2031       if (current_loops
2032 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2033 	  && !virtual_operand_p (def)
2034 	  && TREE_CODE (use) == SSA_NAME
2035 	  && a->loop_father != b->loop_father)
2036 	may_replace_uses = false;
2037 
2038       if (!may_replace_uses)
2039 	{
2040 	  gcc_assert (!virtual_operand_p (def));
2041 
2042 	  /* Note that just emitting the copies is fine -- there is no problem
2043 	     with ordering of phi nodes.  This is because A is the single
2044 	     predecessor of B, therefore results of the phi nodes cannot
2045 	     appear as arguments of the phi nodes.  */
2046 	  copy = gimple_build_assign (def, use);
2047 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2048           remove_phi_node (&psi, false);
2049 	}
2050       else
2051         {
2052 	  /* If we deal with a PHI for virtual operands, we can simply
2053 	     propagate these without fussing with folding or updating
2054 	     the stmt.  */
2055 	  if (virtual_operand_p (def))
2056 	    {
2057 	      imm_use_iterator iter;
2058 	      use_operand_p use_p;
2059 	      gimple *stmt;
2060 
2061 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2062 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2063 		  SET_USE (use_p, use);
2064 
2065 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2066 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2067 	    }
2068 	  else
2069             replace_uses_by (def, use);
2070 
2071           remove_phi_node (&psi, true);
2072         }
2073     }
2074 
2075   /* Ensure that B follows A.  */
2076   move_block_after (b, a);
2077 
2078   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2079   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2080 
2081   /* Remove labels from B and set gimple_bb to A for other statements.  */
2082   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2083     {
2084       gimple *stmt = gsi_stmt (gsi);
2085       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2086 	{
2087 	  tree label = gimple_label_label (label_stmt);
2088 	  int lp_nr;
2089 
2090 	  gsi_remove (&gsi, false);
2091 
2092 	  /* Now that we can thread computed gotos, we might have
2093 	     a situation where we have a forced label in block B
2094 	     However, the label at the start of block B might still be
2095 	     used in other ways (think about the runtime checking for
2096 	     Fortran assigned gotos).  So we cannot just delete the
2097 	     label.  Instead we move the label to the start of block A.  */
2098 	  if (FORCED_LABEL (label))
2099 	    {
2100 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2101 	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2102 	    }
2103 	  /* Other user labels keep around in a form of a debug stmt.  */
2104 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2105 	    {
2106 	      gimple *dbg = gimple_build_debug_bind (label,
2107 						     integer_zero_node,
2108 						     stmt);
2109 	      gimple_debug_bind_reset_value (dbg);
2110 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2111 	    }
2112 
2113 	  lp_nr = EH_LANDING_PAD_NR (label);
2114 	  if (lp_nr)
2115 	    {
2116 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2117 	      lp->post_landing_pad = NULL;
2118 	    }
2119 	}
2120       else
2121 	{
2122 	  gimple_set_bb (stmt, a);
2123 	  gsi_next (&gsi);
2124 	}
2125     }
2126 
2127   /* When merging two BBs, if their counts are different, the larger count
2128      is selected as the new bb count. This is to handle inconsistent
2129      profiles.  */
2130   if (a->loop_father == b->loop_father)
2131     {
2132       a->count = a->count.merge (b->count);
2133     }
2134 
2135   /* Merge the sequences.  */
2136   last = gsi_last_bb (a);
2137   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2138   set_bb_seq (b, NULL);
2139 
2140   if (cfgcleanup_altered_bbs)
2141     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2142 }
2143 
2144 
2145 /* Return the one of two successors of BB that is not reachable by a
2146    complex edge, if there is one.  Else, return BB.  We use
2147    this in optimizations that use post-dominators for their heuristics,
2148    to catch the cases in C++ where function calls are involved.  */
2149 
2150 basic_block
2151 single_noncomplex_succ (basic_block bb)
2152 {
2153   edge e0, e1;
2154   if (EDGE_COUNT (bb->succs) != 2)
2155     return bb;
2156 
2157   e0 = EDGE_SUCC (bb, 0);
2158   e1 = EDGE_SUCC (bb, 1);
2159   if (e0->flags & EDGE_COMPLEX)
2160     return e1->dest;
2161   if (e1->flags & EDGE_COMPLEX)
2162     return e0->dest;
2163 
2164   return bb;
2165 }
2166 
2167 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2168 
2169 void
2170 notice_special_calls (gcall *call)
2171 {
2172   int flags = gimple_call_flags (call);
2173 
2174   if (flags & ECF_MAY_BE_ALLOCA)
2175     cfun->calls_alloca = true;
2176   if (flags & ECF_RETURNS_TWICE)
2177     cfun->calls_setjmp = true;
2178 }
2179 
2180 
2181 /* Clear flags set by notice_special_calls.  Used by dead code removal
2182    to update the flags.  */
2183 
2184 void
2185 clear_special_calls (void)
2186 {
2187   cfun->calls_alloca = false;
2188   cfun->calls_setjmp = false;
2189 }
2190 
2191 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2192 
2193 static void
2194 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2195 {
2196   /* Since this block is no longer reachable, we can just delete all
2197      of its PHI nodes.  */
2198   remove_phi_nodes (bb);
2199 
2200   /* Remove edges to BB's successors.  */
2201   while (EDGE_COUNT (bb->succs) > 0)
2202     remove_edge (EDGE_SUCC (bb, 0));
2203 }
2204 
2205 
2206 /* Remove statements of basic block BB.  */
2207 
2208 static void
2209 remove_bb (basic_block bb)
2210 {
2211   gimple_stmt_iterator i;
2212 
2213   if (dump_file)
2214     {
2215       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2216       if (dump_flags & TDF_DETAILS)
2217 	{
2218 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2219 	  fprintf (dump_file, "\n");
2220 	}
2221     }
2222 
2223   if (current_loops)
2224     {
2225       struct loop *loop = bb->loop_father;
2226 
2227       /* If a loop gets removed, clean up the information associated
2228 	 with it.  */
2229       if (loop->latch == bb
2230 	  || loop->header == bb)
2231 	free_numbers_of_iterations_estimates (loop);
2232     }
2233 
2234   /* Remove all the instructions in the block.  */
2235   if (bb_seq (bb) != NULL)
2236     {
2237       /* Walk backwards so as to get a chance to substitute all
2238 	 released DEFs into debug stmts.  See
2239 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2240 	 details.  */
2241       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2242 	{
2243 	  gimple *stmt = gsi_stmt (i);
2244 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2245 	  if (label_stmt
2246 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2247 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2248 	    {
2249 	      basic_block new_bb;
2250 	      gimple_stmt_iterator new_gsi;
2251 
2252 	      /* A non-reachable non-local label may still be referenced.
2253 		 But it no longer needs to carry the extra semantics of
2254 		 non-locality.  */
2255 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2256 		{
2257 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2258 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2259 		}
2260 
2261 	      new_bb = bb->prev_bb;
2262 	      /* Don't move any labels into ENTRY block.  */
2263 	      if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2264 		{
2265 		  new_bb = single_succ (new_bb);
2266 		  gcc_assert (new_bb != bb);
2267 		}
2268 	      new_gsi = gsi_after_labels (new_bb);
2269 	      gsi_remove (&i, false);
2270 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2271 	    }
2272 	  else
2273 	    {
2274 	      /* Release SSA definitions.  */
2275 	      release_defs (stmt);
2276 	      gsi_remove (&i, true);
2277 	    }
2278 
2279 	  if (gsi_end_p (i))
2280 	    i = gsi_last_bb (bb);
2281 	  else
2282 	    gsi_prev (&i);
2283 	}
2284     }
2285 
2286   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2287   bb->il.gimple.seq = NULL;
2288   bb->il.gimple.phi_nodes = NULL;
2289 }
2290 
2291 
2292 /* Given a basic block BB and a value VAL for use in the final statement
2293    of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2294    the edge that will be taken out of the block.
2295    If VAL is NULL_TREE, then the current value of the final statement's
2296    predicate or index is used.
2297    If the value does not match a unique edge, NULL is returned.  */
2298 
2299 edge
2300 find_taken_edge (basic_block bb, tree val)
2301 {
2302   gimple *stmt;
2303 
2304   stmt = last_stmt (bb);
2305 
2306   /* Handle ENTRY and EXIT.  */
2307   if (!stmt)
2308     return NULL;
2309 
2310   if (gimple_code (stmt) == GIMPLE_COND)
2311     return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2312 
2313   if (gimple_code (stmt) == GIMPLE_SWITCH)
2314     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2315 
2316   if (computed_goto_p (stmt))
2317     {
2318       /* Only optimize if the argument is a label, if the argument is
2319 	 not a label then we cannot construct a proper CFG.
2320 
2321          It may be the case that we only need to allow the LABEL_REF to
2322          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2323          appear inside a LABEL_EXPR just to be safe.  */
2324       if (val
2325 	  && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2326 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2327 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2328     }
2329 
2330   /* Otherwise we only know the taken successor edge if it's unique.  */
2331   return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2332 }
2333 
2334 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2335    statement, determine which of the outgoing edges will be taken out of the
2336    block.  Return NULL if either edge may be taken.  */
2337 
2338 static edge
2339 find_taken_edge_computed_goto (basic_block bb, tree val)
2340 {
2341   basic_block dest;
2342   edge e = NULL;
2343 
2344   dest = label_to_block (cfun, val);
2345   if (dest)
2346     e = find_edge (bb, dest);
2347 
2348   /* It's possible for find_edge to return NULL here on invalid code
2349      that abuses the labels-as-values extension (e.g. code that attempts to
2350      jump *between* functions via stored labels-as-values; PR 84136).
2351      If so, then we simply return that NULL for the edge.
2352      We don't currently have a way of detecting such invalid code, so we
2353      can't assert that it was the case when a NULL edge occurs here.  */
2354 
2355   return e;
2356 }
2357 
2358 /* Given COND_STMT and a constant value VAL for use as the predicate,
2359    determine which of the two edges will be taken out of
2360    the statement's block.  Return NULL if either edge may be taken.
2361    If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2362    is used.  */
2363 
2364 static edge
2365 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2366 {
2367   edge true_edge, false_edge;
2368 
2369   if (val == NULL_TREE)
2370     {
2371       /* Use the current value of the predicate.  */
2372       if (gimple_cond_true_p (cond_stmt))
2373 	val = integer_one_node;
2374       else if (gimple_cond_false_p (cond_stmt))
2375 	val = integer_zero_node;
2376       else
2377 	return NULL;
2378     }
2379   else if (TREE_CODE (val) != INTEGER_CST)
2380     return NULL;
2381 
2382   extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2383 				       &true_edge, &false_edge);
2384 
2385   return (integer_zerop (val) ? false_edge : true_edge);
2386 }
2387 
2388 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2389    which edge will be taken out of the statement's block.  Return NULL if any
2390    edge may be taken.
2391    If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2392    is used.  */
2393 
2394 edge
2395 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2396 {
2397   basic_block dest_bb;
2398   edge e;
2399   tree taken_case;
2400 
2401   if (gimple_switch_num_labels (switch_stmt) == 1)
2402     taken_case = gimple_switch_default_label (switch_stmt);
2403   else
2404     {
2405       if (val == NULL_TREE)
2406 	val = gimple_switch_index (switch_stmt);
2407       if (TREE_CODE (val) != INTEGER_CST)
2408 	return NULL;
2409       else
2410 	taken_case = find_case_label_for_value (switch_stmt, val);
2411     }
2412   dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2413 
2414   e = find_edge (gimple_bb (switch_stmt), dest_bb);
2415   gcc_assert (e);
2416   return e;
2417 }
2418 
2419 
2420 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2421    We can make optimal use here of the fact that the case labels are
2422    sorted: We can do a binary search for a case matching VAL.  */
2423 
2424 tree
2425 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2426 {
2427   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428   tree default_case = gimple_switch_default_label (switch_stmt);
2429 
2430   for (low = 0, high = n; high - low > 1; )
2431     {
2432       size_t i = (high + low) / 2;
2433       tree t = gimple_switch_label (switch_stmt, i);
2434       int cmp;
2435 
2436       /* Cache the result of comparing CASE_LOW and val.  */
2437       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2438 
2439       if (cmp > 0)
2440 	high = i;
2441       else
2442 	low = i;
2443 
2444       if (CASE_HIGH (t) == NULL)
2445 	{
2446 	  /* A singe-valued case label.  */
2447 	  if (cmp == 0)
2448 	    return t;
2449 	}
2450       else
2451 	{
2452 	  /* A case range.  We can only handle integer ranges.  */
2453 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2454 	    return t;
2455 	}
2456     }
2457 
2458   return default_case;
2459 }
2460 
2461 
2462 /* Dump a basic block on stderr.  */
2463 
2464 void
2465 gimple_debug_bb (basic_block bb)
2466 {
2467   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2468 }
2469 
2470 
2471 /* Dump basic block with index N on stderr.  */
2472 
2473 basic_block
2474 gimple_debug_bb_n (int n)
2475 {
2476   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2477   return BASIC_BLOCK_FOR_FN (cfun, n);
2478 }
2479 
2480 
2481 /* Dump the CFG on stderr.
2482 
2483    FLAGS are the same used by the tree dumping functions
2484    (see TDF_* in dumpfile.h).  */
2485 
2486 void
2487 gimple_debug_cfg (dump_flags_t flags)
2488 {
2489   gimple_dump_cfg (stderr, flags);
2490 }
2491 
2492 
2493 /* Dump the program showing basic block boundaries on the given FILE.
2494 
2495    FLAGS are the same used by the tree dumping functions (see TDF_* in
2496    tree.h).  */
2497 
2498 void
2499 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2500 {
2501   if (flags & TDF_DETAILS)
2502     {
2503       dump_function_header (file, current_function_decl, flags);
2504       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2505 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2506 	       last_basic_block_for_fn (cfun));
2507 
2508       brief_dump_cfg (file, flags);
2509       fprintf (file, "\n");
2510     }
2511 
2512   if (flags & TDF_STATS)
2513     dump_cfg_stats (file);
2514 
2515   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2516 }
2517 
2518 
2519 /* Dump CFG statistics on FILE.  */
2520 
2521 void
2522 dump_cfg_stats (FILE *file)
2523 {
2524   static long max_num_merged_labels = 0;
2525   unsigned long size, total = 0;
2526   long num_edges;
2527   basic_block bb;
2528   const char * const fmt_str   = "%-30s%-13s%12s\n";
2529   const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2530   const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2531   const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2532   const char *funcname = current_function_name ();
2533 
2534   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2535 
2536   fprintf (file, "---------------------------------------------------------\n");
2537   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2538   fprintf (file, fmt_str, "", "  instances  ", "used ");
2539   fprintf (file, "---------------------------------------------------------\n");
2540 
2541   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2542   total += size;
2543   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2544 	   SIZE_AMOUNT (size));
2545 
2546   num_edges = 0;
2547   FOR_EACH_BB_FN (bb, cfun)
2548     num_edges += EDGE_COUNT (bb->succs);
2549   size = num_edges * sizeof (struct edge_def);
2550   total += size;
2551   fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2552 
2553   fprintf (file, "---------------------------------------------------------\n");
2554   fprintf (file, fmt_str_3, "Total memory used by CFG data",
2555 	   SIZE_AMOUNT (total));
2556   fprintf (file, "---------------------------------------------------------\n");
2557   fprintf (file, "\n");
2558 
2559   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2560     max_num_merged_labels = cfg_stats.num_merged_labels;
2561 
2562   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2563 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2564 
2565   fprintf (file, "\n");
2566 }
2567 
2568 
2569 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2570    linked in the final executable.  */
2571 
2572 DEBUG_FUNCTION void
2573 debug_cfg_stats (void)
2574 {
2575   dump_cfg_stats (stderr);
2576 }
2577 
2578 /*---------------------------------------------------------------------------
2579 			     Miscellaneous helpers
2580 ---------------------------------------------------------------------------*/
2581 
2582 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2583    flow.  Transfers of control flow associated with EH are excluded.  */
2584 
2585 static bool
2586 call_can_make_abnormal_goto (gimple *t)
2587 {
2588   /* If the function has no non-local labels, then a call cannot make an
2589      abnormal transfer of control.  */
2590   if (!cfun->has_nonlocal_label
2591       && !cfun->calls_setjmp)
2592    return false;
2593 
2594   /* Likewise if the call has no side effects.  */
2595   if (!gimple_has_side_effects (t))
2596     return false;
2597 
2598   /* Likewise if the called function is leaf.  */
2599   if (gimple_call_flags (t) & ECF_LEAF)
2600     return false;
2601 
2602   return true;
2603 }
2604 
2605 
2606 /* Return true if T can make an abnormal transfer of control flow.
2607    Transfers of control flow associated with EH are excluded.  */
2608 
2609 bool
2610 stmt_can_make_abnormal_goto (gimple *t)
2611 {
2612   if (computed_goto_p (t))
2613     return true;
2614   if (is_gimple_call (t))
2615     return call_can_make_abnormal_goto (t);
2616   return false;
2617 }
2618 
2619 
2620 /* Return true if T represents a stmt that always transfers control.  */
2621 
2622 bool
2623 is_ctrl_stmt (gimple *t)
2624 {
2625   switch (gimple_code (t))
2626     {
2627     case GIMPLE_COND:
2628     case GIMPLE_SWITCH:
2629     case GIMPLE_GOTO:
2630     case GIMPLE_RETURN:
2631     case GIMPLE_RESX:
2632       return true;
2633     default:
2634       return false;
2635     }
2636 }
2637 
2638 
2639 /* Return true if T is a statement that may alter the flow of control
2640    (e.g., a call to a non-returning function).  */
2641 
2642 bool
2643 is_ctrl_altering_stmt (gimple *t)
2644 {
2645   gcc_assert (t);
2646 
2647   switch (gimple_code (t))
2648     {
2649     case GIMPLE_CALL:
2650       /* Per stmt call flag indicates whether the call could alter
2651 	 controlflow.  */
2652       if (gimple_call_ctrl_altering_p (t))
2653 	return true;
2654       break;
2655 
2656     case GIMPLE_EH_DISPATCH:
2657       /* EH_DISPATCH branches to the individual catch handlers at
2658 	 this level of a try or allowed-exceptions region.  It can
2659 	 fallthru to the next statement as well.  */
2660       return true;
2661 
2662     case GIMPLE_ASM:
2663       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2664 	return true;
2665       break;
2666 
2667     CASE_GIMPLE_OMP:
2668       /* OpenMP directives alter control flow.  */
2669       return true;
2670 
2671     case GIMPLE_TRANSACTION:
2672       /* A transaction start alters control flow.  */
2673       return true;
2674 
2675     default:
2676       break;
2677     }
2678 
2679   /* If a statement can throw, it alters control flow.  */
2680   return stmt_can_throw_internal (cfun, t);
2681 }
2682 
2683 
2684 /* Return true if T is a simple local goto.  */
2685 
2686 bool
2687 simple_goto_p (gimple *t)
2688 {
2689   return (gimple_code (t) == GIMPLE_GOTO
2690 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2691 }
2692 
2693 
2694 /* Return true if STMT should start a new basic block.  PREV_STMT is
2695    the statement preceding STMT.  It is used when STMT is a label or a
2696    case label.  Labels should only start a new basic block if their
2697    previous statement wasn't a label.  Otherwise, sequence of labels
2698    would generate unnecessary basic blocks that only contain a single
2699    label.  */
2700 
2701 static inline bool
2702 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2703 {
2704   if (stmt == NULL)
2705     return false;
2706 
2707   /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2708      any nondebug stmts in the block.  We don't want to start another
2709      block in this case: the debug stmt will already have started the
2710      one STMT would start if we weren't outputting debug stmts.  */
2711   if (prev_stmt && is_gimple_debug (prev_stmt))
2712     return false;
2713 
2714   /* Labels start a new basic block only if the preceding statement
2715      wasn't a label of the same type.  This prevents the creation of
2716      consecutive blocks that have nothing but a single label.  */
2717   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2718     {
2719       /* Nonlocal and computed GOTO targets always start a new block.  */
2720       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2721 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2722 	return true;
2723 
2724       if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2725 	{
2726 	  if (DECL_NONLOCAL (gimple_label_label (
2727 			       as_a <glabel *> (prev_stmt))))
2728 	    return true;
2729 
2730 	  cfg_stats.num_merged_labels++;
2731 	  return false;
2732 	}
2733       else
2734 	return true;
2735     }
2736   else if (gimple_code (stmt) == GIMPLE_CALL)
2737     {
2738       if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2739 	/* setjmp acts similar to a nonlocal GOTO target and thus should
2740 	   start a new block.  */
2741 	return true;
2742       if (gimple_call_internal_p (stmt, IFN_PHI)
2743 	  && prev_stmt
2744 	  && gimple_code (prev_stmt) != GIMPLE_LABEL
2745 	  && (gimple_code (prev_stmt) != GIMPLE_CALL
2746 	      || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2747 	/* PHI nodes start a new block unless preceeded by a label
2748 	   or another PHI.  */
2749 	return true;
2750     }
2751 
2752   return false;
2753 }
2754 
2755 
2756 /* Return true if T should end a basic block.  */
2757 
2758 bool
2759 stmt_ends_bb_p (gimple *t)
2760 {
2761   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2762 }
2763 
2764 /* Remove block annotations and other data structures.  */
2765 
2766 void
2767 delete_tree_cfg_annotations (struct function *fn)
2768 {
2769   vec_free (label_to_block_map_for_fn (fn));
2770 }
2771 
2772 /* Return the virtual phi in BB.  */
2773 
2774 gphi *
2775 get_virtual_phi (basic_block bb)
2776 {
2777   for (gphi_iterator gsi = gsi_start_phis (bb);
2778        !gsi_end_p (gsi);
2779        gsi_next (&gsi))
2780     {
2781       gphi *phi = gsi.phi ();
2782 
2783       if (virtual_operand_p (PHI_RESULT (phi)))
2784 	return phi;
2785     }
2786 
2787   return NULL;
2788 }
2789 
2790 /* Return the first statement in basic block BB.  */
2791 
2792 gimple *
2793 first_stmt (basic_block bb)
2794 {
2795   gimple_stmt_iterator i = gsi_start_bb (bb);
2796   gimple *stmt = NULL;
2797 
2798   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2799     {
2800       gsi_next (&i);
2801       stmt = NULL;
2802     }
2803   return stmt;
2804 }
2805 
2806 /* Return the first non-label statement in basic block BB.  */
2807 
2808 static gimple *
2809 first_non_label_stmt (basic_block bb)
2810 {
2811   gimple_stmt_iterator i = gsi_start_bb (bb);
2812   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2813     gsi_next (&i);
2814   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2815 }
2816 
2817 /* Return the last statement in basic block BB.  */
2818 
2819 gimple *
2820 last_stmt (basic_block bb)
2821 {
2822   gimple_stmt_iterator i = gsi_last_bb (bb);
2823   gimple *stmt = NULL;
2824 
2825   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2826     {
2827       gsi_prev (&i);
2828       stmt = NULL;
2829     }
2830   return stmt;
2831 }
2832 
2833 /* Return the last statement of an otherwise empty block.  Return NULL
2834    if the block is totally empty, or if it contains more than one
2835    statement.  */
2836 
2837 gimple *
2838 last_and_only_stmt (basic_block bb)
2839 {
2840   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2841   gimple *last, *prev;
2842 
2843   if (gsi_end_p (i))
2844     return NULL;
2845 
2846   last = gsi_stmt (i);
2847   gsi_prev_nondebug (&i);
2848   if (gsi_end_p (i))
2849     return last;
2850 
2851   /* Empty statements should no longer appear in the instruction stream.
2852      Everything that might have appeared before should be deleted by
2853      remove_useless_stmts, and the optimizers should just gsi_remove
2854      instead of smashing with build_empty_stmt.
2855 
2856      Thus the only thing that should appear here in a block containing
2857      one executable statement is a label.  */
2858   prev = gsi_stmt (i);
2859   if (gimple_code (prev) == GIMPLE_LABEL)
2860     return last;
2861   else
2862     return NULL;
2863 }
2864 
2865 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2866 
2867 static void
2868 reinstall_phi_args (edge new_edge, edge old_edge)
2869 {
2870   edge_var_map *vm;
2871   int i;
2872   gphi_iterator phis;
2873 
2874   vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2875   if (!v)
2876     return;
2877 
2878   for (i = 0, phis = gsi_start_phis (new_edge->dest);
2879        v->iterate (i, &vm) && !gsi_end_p (phis);
2880        i++, gsi_next (&phis))
2881     {
2882       gphi *phi = phis.phi ();
2883       tree result = redirect_edge_var_map_result (vm);
2884       tree arg = redirect_edge_var_map_def (vm);
2885 
2886       gcc_assert (result == gimple_phi_result (phi));
2887 
2888       add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2889     }
2890 
2891   redirect_edge_var_map_clear (old_edge);
2892 }
2893 
2894 /* Returns the basic block after which the new basic block created
2895    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2896    near its "logical" location.  This is of most help to humans looking
2897    at debugging dumps.  */
2898 
2899 basic_block
2900 split_edge_bb_loc (edge edge_in)
2901 {
2902   basic_block dest = edge_in->dest;
2903   basic_block dest_prev = dest->prev_bb;
2904 
2905   if (dest_prev)
2906     {
2907       edge e = find_edge (dest_prev, dest);
2908       if (e && !(e->flags & EDGE_COMPLEX))
2909 	return edge_in->src;
2910     }
2911   return dest_prev;
2912 }
2913 
2914 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2915    Abort on abnormal edges.  */
2916 
2917 static basic_block
2918 gimple_split_edge (edge edge_in)
2919 {
2920   basic_block new_bb, after_bb, dest;
2921   edge new_edge, e;
2922 
2923   /* Abnormal edges cannot be split.  */
2924   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2925 
2926   dest = edge_in->dest;
2927 
2928   after_bb = split_edge_bb_loc (edge_in);
2929 
2930   new_bb = create_empty_bb (after_bb);
2931   new_bb->count = edge_in->count ();
2932 
2933   e = redirect_edge_and_branch (edge_in, new_bb);
2934   gcc_assert (e == edge_in);
2935 
2936   new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2937   reinstall_phi_args (new_edge, e);
2938 
2939   return new_bb;
2940 }
2941 
2942 
2943 /* Verify properties of the address expression T whose base should be
2944    TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true.  */
2945 
2946 static bool
2947 verify_address (tree t, bool verify_addressable)
2948 {
2949   bool old_constant;
2950   bool old_side_effects;
2951   bool new_constant;
2952   bool new_side_effects;
2953 
2954   old_constant = TREE_CONSTANT (t);
2955   old_side_effects = TREE_SIDE_EFFECTS (t);
2956 
2957   recompute_tree_invariant_for_addr_expr (t);
2958   new_side_effects = TREE_SIDE_EFFECTS (t);
2959   new_constant = TREE_CONSTANT (t);
2960 
2961   if (old_constant != new_constant)
2962     {
2963       error ("constant not recomputed when ADDR_EXPR changed");
2964       return true;
2965     }
2966   if (old_side_effects != new_side_effects)
2967     {
2968       error ("side effects not recomputed when ADDR_EXPR changed");
2969       return true;
2970     }
2971 
2972   tree base = TREE_OPERAND (t, 0);
2973   while (handled_component_p (base))
2974     base = TREE_OPERAND (base, 0);
2975 
2976   if (!(VAR_P (base)
2977 	|| TREE_CODE (base) == PARM_DECL
2978 	|| TREE_CODE (base) == RESULT_DECL))
2979     return false;
2980 
2981   if (DECL_GIMPLE_REG_P (base))
2982     {
2983       error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2984       return true;
2985     }
2986 
2987   if (verify_addressable && !TREE_ADDRESSABLE (base))
2988     {
2989       error ("address taken, but ADDRESSABLE bit not set");
2990       return true;
2991     }
2992 
2993   return false;
2994 }
2995 
2996 
2997 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2998    Returns true if there is an error, otherwise false.  */
2999 
3000 static bool
3001 verify_types_in_gimple_min_lval (tree expr)
3002 {
3003   tree op;
3004 
3005   if (is_gimple_id (expr))
3006     return false;
3007 
3008   if (TREE_CODE (expr) != TARGET_MEM_REF
3009       && TREE_CODE (expr) != MEM_REF)
3010     {
3011       error ("invalid expression for min lvalue");
3012       return true;
3013     }
3014 
3015   /* TARGET_MEM_REFs are strange beasts.  */
3016   if (TREE_CODE (expr) == TARGET_MEM_REF)
3017     return false;
3018 
3019   op = TREE_OPERAND (expr, 0);
3020   if (!is_gimple_val (op))
3021     {
3022       error ("invalid operand in indirect reference");
3023       debug_generic_stmt (op);
3024       return true;
3025     }
3026   /* Memory references now generally can involve a value conversion.  */
3027 
3028   return false;
3029 }
3030 
3031 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3032    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3033    if there is an error, otherwise false.  */
3034 
3035 static bool
3036 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3037 {
3038   if (TREE_CODE (expr) == REALPART_EXPR
3039       || TREE_CODE (expr) == IMAGPART_EXPR
3040       || TREE_CODE (expr) == BIT_FIELD_REF)
3041     {
3042       tree op = TREE_OPERAND (expr, 0);
3043       if (!is_gimple_reg_type (TREE_TYPE (expr)))
3044 	{
3045 	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3046 	  return true;
3047 	}
3048 
3049       if (TREE_CODE (expr) == BIT_FIELD_REF)
3050 	{
3051 	  tree t1 = TREE_OPERAND (expr, 1);
3052 	  tree t2 = TREE_OPERAND (expr, 2);
3053 	  poly_uint64 size, bitpos;
3054 	  if (!poly_int_tree_p (t1, &size)
3055 	      || !poly_int_tree_p (t2, &bitpos)
3056 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3057 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3058 	    {
3059 	      error ("invalid position or size operand to BIT_FIELD_REF");
3060 	      return true;
3061 	    }
3062 	  if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3063 	      && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3064 	    {
3065 	      error ("integral result type precision does not match "
3066 		     "field size of BIT_FIELD_REF");
3067 	      return true;
3068 	    }
3069 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3070 		   && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3071 		   && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3072 				size))
3073 	    {
3074 	      error ("mode size of non-integral result does not "
3075 		     "match field size of BIT_FIELD_REF");
3076 	      return true;
3077 	    }
3078 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3079 	      && !type_has_mode_precision_p (TREE_TYPE (op)))
3080 	    {
3081 	      error ("BIT_FIELD_REF of non-mode-precision operand");
3082 	      return true;
3083 	    }
3084 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3085 	      && maybe_gt (size + bitpos,
3086 			   tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3087 	    {
3088 	      error ("position plus size exceeds size of referenced object in "
3089 		     "BIT_FIELD_REF");
3090 	      return true;
3091 	    }
3092 	}
3093 
3094       if ((TREE_CODE (expr) == REALPART_EXPR
3095 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3096 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3097 					 TREE_TYPE (TREE_TYPE (op))))
3098 	{
3099 	  error ("type mismatch in real/imagpart reference");
3100 	  debug_generic_stmt (TREE_TYPE (expr));
3101 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3102 	  return true;
3103 	}
3104       expr = op;
3105     }
3106 
3107   while (handled_component_p (expr))
3108     {
3109       if (TREE_CODE (expr) == REALPART_EXPR
3110 	  || TREE_CODE (expr) == IMAGPART_EXPR
3111 	  || TREE_CODE (expr) == BIT_FIELD_REF)
3112 	{
3113 	  error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3114 	  return true;
3115 	}
3116 
3117       tree op = TREE_OPERAND (expr, 0);
3118 
3119       if (TREE_CODE (expr) == ARRAY_REF
3120 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3121 	{
3122 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3123 	      || (TREE_OPERAND (expr, 2)
3124 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3125 	      || (TREE_OPERAND (expr, 3)
3126 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3127 	    {
3128 	      error ("invalid operands to array reference");
3129 	      debug_generic_stmt (expr);
3130 	      return true;
3131 	    }
3132 	}
3133 
3134       /* Verify if the reference array element types are compatible.  */
3135       if (TREE_CODE (expr) == ARRAY_REF
3136 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3137 					 TREE_TYPE (TREE_TYPE (op))))
3138 	{
3139 	  error ("type mismatch in array reference");
3140 	  debug_generic_stmt (TREE_TYPE (expr));
3141 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3142 	  return true;
3143 	}
3144       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3145 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3146 					 TREE_TYPE (TREE_TYPE (op))))
3147 	{
3148 	  error ("type mismatch in array range reference");
3149 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3150 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3151 	  return true;
3152 	}
3153 
3154       if (TREE_CODE (expr) == COMPONENT_REF)
3155 	{
3156 	  if (TREE_OPERAND (expr, 2)
3157 	      && !is_gimple_val (TREE_OPERAND (expr, 2)))
3158 	    {
3159 	      error ("invalid COMPONENT_REF offset operator");
3160 	      return true;
3161 	    }
3162 	  if (!useless_type_conversion_p (TREE_TYPE (expr),
3163 					  TREE_TYPE (TREE_OPERAND (expr, 1))))
3164 	    {
3165 	      error ("type mismatch in component reference");
3166 	      debug_generic_stmt (TREE_TYPE (expr));
3167 	      debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3168 	      return true;
3169 	    }
3170 	}
3171 
3172       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3173 	{
3174 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3175 	     that their operand is not an SSA name or an invariant when
3176 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3177 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3178 	     most invoke undefined behavior.  */
3179 	  if (require_lvalue
3180 	      && (TREE_CODE (op) == SSA_NAME
3181 		  || is_gimple_min_invariant (op)))
3182 	    {
3183 	      error ("conversion of an SSA_NAME on the left hand side");
3184 	      debug_generic_stmt (expr);
3185 	      return true;
3186 	    }
3187 	  else if (TREE_CODE (op) == SSA_NAME
3188 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3189 	    {
3190 	      error ("conversion of register to a different size");
3191 	      debug_generic_stmt (expr);
3192 	      return true;
3193 	    }
3194 	  else if (!handled_component_p (op))
3195 	    return false;
3196 	}
3197 
3198       expr = op;
3199     }
3200 
3201   if (TREE_CODE (expr) == MEM_REF)
3202     {
3203       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3204 	  || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3205 	      && verify_address (TREE_OPERAND (expr, 0), false)))
3206 	{
3207 	  error ("invalid address operand in MEM_REF");
3208 	  debug_generic_stmt (expr);
3209 	  return true;
3210 	}
3211       if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3212 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3213 	{
3214 	  error ("invalid offset operand in MEM_REF");
3215 	  debug_generic_stmt (expr);
3216 	  return true;
3217 	}
3218       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3219 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3220 	{
3221 	  error ("invalid clique in MEM_REF");
3222 	  debug_generic_stmt (expr);
3223 	  return true;
3224 	}
3225     }
3226   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3227     {
3228       if (!TMR_BASE (expr)
3229 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3230 	  || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3231 	      && verify_address (TMR_BASE (expr), false)))
3232 	{
3233 	  error ("invalid address operand in TARGET_MEM_REF");
3234 	  return true;
3235 	}
3236       if (!TMR_OFFSET (expr)
3237 	  || !poly_int_tree_p (TMR_OFFSET (expr))
3238 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3239 	{
3240 	  error ("invalid offset operand in TARGET_MEM_REF");
3241 	  debug_generic_stmt (expr);
3242 	  return true;
3243 	}
3244       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3245 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3246 	{
3247 	  error ("invalid clique in TARGET_MEM_REF");
3248 	  debug_generic_stmt (expr);
3249 	  return true;
3250 	}
3251     }
3252   else if (TREE_CODE (expr) == INDIRECT_REF)
3253     {
3254       error ("INDIRECT_REF in gimple IL");
3255       debug_generic_stmt (expr);
3256       return true;
3257     }
3258 
3259   return ((require_lvalue || !is_gimple_min_invariant (expr))
3260 	  && verify_types_in_gimple_min_lval (expr));
3261 }
3262 
3263 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3264    list of pointer-to types that is trivially convertible to DEST.  */
3265 
3266 static bool
3267 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3268 {
3269   tree src;
3270 
3271   if (!TYPE_POINTER_TO (src_obj))
3272     return true;
3273 
3274   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3275     if (useless_type_conversion_p (dest, src))
3276       return true;
3277 
3278   return false;
3279 }
3280 
3281 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3282    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3283 
3284 static bool
3285 valid_fixed_convert_types_p (tree type1, tree type2)
3286 {
3287   return (FIXED_POINT_TYPE_P (type1)
3288 	  && (INTEGRAL_TYPE_P (type2)
3289 	      || SCALAR_FLOAT_TYPE_P (type2)
3290 	      || FIXED_POINT_TYPE_P (type2)));
3291 }
3292 
3293 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3294    is a problem, otherwise false.  */
3295 
3296 static bool
3297 verify_gimple_call (gcall *stmt)
3298 {
3299   tree fn = gimple_call_fn (stmt);
3300   tree fntype, fndecl;
3301   unsigned i;
3302 
3303   if (gimple_call_internal_p (stmt))
3304     {
3305       if (fn)
3306 	{
3307 	  error ("gimple call has two targets");
3308 	  debug_generic_stmt (fn);
3309 	  return true;
3310 	}
3311     }
3312   else
3313     {
3314       if (!fn)
3315 	{
3316 	  error ("gimple call has no target");
3317 	  return true;
3318 	}
3319     }
3320 
3321   if (fn && !is_gimple_call_addr (fn))
3322     {
3323       error ("invalid function in gimple call");
3324       debug_generic_stmt (fn);
3325       return true;
3326     }
3327 
3328   if (fn
3329       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3330 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3331 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3332     {
3333       error ("non-function in gimple call");
3334       return true;
3335     }
3336 
3337    fndecl = gimple_call_fndecl (stmt);
3338    if (fndecl
3339        && TREE_CODE (fndecl) == FUNCTION_DECL
3340        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3341        && !DECL_PURE_P (fndecl)
3342        && !TREE_READONLY (fndecl))
3343      {
3344        error ("invalid pure const state for function");
3345        return true;
3346      }
3347 
3348   tree lhs = gimple_call_lhs (stmt);
3349   if (lhs
3350       && (!is_gimple_lvalue (lhs)
3351 	  || verify_types_in_gimple_reference (lhs, true)))
3352     {
3353       error ("invalid LHS in gimple call");
3354       return true;
3355     }
3356 
3357   if (gimple_call_ctrl_altering_p (stmt)
3358       && gimple_call_noreturn_p (stmt)
3359       && should_remove_lhs_p (lhs))
3360     {
3361       error ("LHS in noreturn call");
3362       return true;
3363     }
3364 
3365   fntype = gimple_call_fntype (stmt);
3366   if (fntype
3367       && lhs
3368       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3369       /* ???  At least C++ misses conversions at assignments from
3370 	 void * call results.
3371 	 For now simply allow arbitrary pointer type conversions.  */
3372       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3373 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3374     {
3375       error ("invalid conversion in gimple call");
3376       debug_generic_stmt (TREE_TYPE (lhs));
3377       debug_generic_stmt (TREE_TYPE (fntype));
3378       return true;
3379     }
3380 
3381   if (gimple_call_chain (stmt)
3382       && !is_gimple_val (gimple_call_chain (stmt)))
3383     {
3384       error ("invalid static chain in gimple call");
3385       debug_generic_stmt (gimple_call_chain (stmt));
3386       return true;
3387     }
3388 
3389   /* If there is a static chain argument, the call should either be
3390      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3391   if (gimple_call_chain (stmt)
3392       && fndecl
3393       && !DECL_STATIC_CHAIN (fndecl))
3394     {
3395       error ("static chain with function that doesn%'t use one");
3396       return true;
3397     }
3398 
3399   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3400     {
3401       switch (DECL_FUNCTION_CODE (fndecl))
3402 	{
3403 	case BUILT_IN_UNREACHABLE:
3404 	case BUILT_IN_TRAP:
3405 	  if (gimple_call_num_args (stmt) > 0)
3406 	    {
3407 	      /* Built-in unreachable with parameters might not be caught by
3408 		 undefined behavior sanitizer.  Front-ends do check users do not
3409 		 call them that way but we also produce calls to
3410 		 __builtin_unreachable internally, for example when IPA figures
3411 		 out a call cannot happen in a legal program.  In such cases,
3412 		 we must make sure arguments are stripped off.  */
3413 	      error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3414 		     "with arguments");
3415 	      return true;
3416 	    }
3417 	  break;
3418 	default:
3419 	  break;
3420 	}
3421     }
3422 
3423   /* ???  The C frontend passes unpromoted arguments in case it
3424      didn't see a function declaration before the call.  So for now
3425      leave the call arguments mostly unverified.  Once we gimplify
3426      unit-at-a-time we have a chance to fix this.  */
3427 
3428   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3429     {
3430       tree arg = gimple_call_arg (stmt, i);
3431       if ((is_gimple_reg_type (TREE_TYPE (arg))
3432 	   && !is_gimple_val (arg))
3433 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3434 	      && !is_gimple_lvalue (arg)))
3435 	{
3436 	  error ("invalid argument to gimple call");
3437 	  debug_generic_expr (arg);
3438 	  return true;
3439 	}
3440     }
3441 
3442   return false;
3443 }
3444 
3445 /* Verifies the gimple comparison with the result type TYPE and
3446    the operands OP0 and OP1, comparison code is CODE.  */
3447 
3448 static bool
3449 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3450 {
3451   tree op0_type = TREE_TYPE (op0);
3452   tree op1_type = TREE_TYPE (op1);
3453 
3454   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3455     {
3456       error ("invalid operands in gimple comparison");
3457       return true;
3458     }
3459 
3460   /* For comparisons we do not have the operations type as the
3461      effective type the comparison is carried out in.  Instead
3462      we require that either the first operand is trivially
3463      convertible into the second, or the other way around.
3464      Because we special-case pointers to void we allow
3465      comparisons of pointers with the same mode as well.  */
3466   if (!useless_type_conversion_p (op0_type, op1_type)
3467       && !useless_type_conversion_p (op1_type, op0_type)
3468       && (!POINTER_TYPE_P (op0_type)
3469 	  || !POINTER_TYPE_P (op1_type)
3470 	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3471     {
3472       error ("mismatching comparison operand types");
3473       debug_generic_expr (op0_type);
3474       debug_generic_expr (op1_type);
3475       return true;
3476     }
3477 
3478   /* The resulting type of a comparison may be an effective boolean type.  */
3479   if (INTEGRAL_TYPE_P (type)
3480       && (TREE_CODE (type) == BOOLEAN_TYPE
3481 	  || TYPE_PRECISION (type) == 1))
3482     {
3483       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3484 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3485 	  && code != EQ_EXPR && code != NE_EXPR
3486 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3487 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3488 	{
3489 	  error ("unsupported operation or type for vector comparison"
3490 		 " returning a boolean");
3491 	  debug_generic_expr (op0_type);
3492 	  debug_generic_expr (op1_type);
3493 	  return true;
3494         }
3495     }
3496   /* Or a boolean vector type with the same element count
3497      as the comparison operand types.  */
3498   else if (TREE_CODE (type) == VECTOR_TYPE
3499 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3500     {
3501       if (TREE_CODE (op0_type) != VECTOR_TYPE
3502 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3503         {
3504           error ("non-vector operands in vector comparison");
3505           debug_generic_expr (op0_type);
3506           debug_generic_expr (op1_type);
3507           return true;
3508         }
3509 
3510       if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3511 		    TYPE_VECTOR_SUBPARTS (op0_type)))
3512         {
3513           error ("invalid vector comparison resulting type");
3514           debug_generic_expr (type);
3515           return true;
3516         }
3517     }
3518   else
3519     {
3520       error ("bogus comparison result type");
3521       debug_generic_expr (type);
3522       return true;
3523     }
3524 
3525   return false;
3526 }
3527 
3528 /* Verify a gimple assignment statement STMT with an unary rhs.
3529    Returns true if anything is wrong.  */
3530 
3531 static bool
3532 verify_gimple_assign_unary (gassign *stmt)
3533 {
3534   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3535   tree lhs = gimple_assign_lhs (stmt);
3536   tree lhs_type = TREE_TYPE (lhs);
3537   tree rhs1 = gimple_assign_rhs1 (stmt);
3538   tree rhs1_type = TREE_TYPE (rhs1);
3539 
3540   if (!is_gimple_reg (lhs))
3541     {
3542       error ("non-register as LHS of unary operation");
3543       return true;
3544     }
3545 
3546   if (!is_gimple_val (rhs1))
3547     {
3548       error ("invalid operand in unary operation");
3549       return true;
3550     }
3551 
3552   /* First handle conversions.  */
3553   switch (rhs_code)
3554     {
3555     CASE_CONVERT:
3556       {
3557 	/* Allow conversions from pointer type to integral type only if
3558 	   there is no sign or zero extension involved.
3559 	   For targets were the precision of ptrofftype doesn't match that
3560 	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3561 	if ((POINTER_TYPE_P (lhs_type)
3562 	     && INTEGRAL_TYPE_P (rhs1_type))
3563 	    || (POINTER_TYPE_P (rhs1_type)
3564 		&& INTEGRAL_TYPE_P (lhs_type)
3565 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3566 		    || ptrofftype_p (lhs_type))))
3567 	  return false;
3568 
3569 	/* Allow conversion from integral to offset type and vice versa.  */
3570 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3571 	     && INTEGRAL_TYPE_P (rhs1_type))
3572 	    || (INTEGRAL_TYPE_P (lhs_type)
3573 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3574 	  return false;
3575 
3576 	/* Otherwise assert we are converting between types of the
3577 	   same kind.  */
3578 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3579 	  {
3580 	    error ("invalid types in nop conversion");
3581 	    debug_generic_expr (lhs_type);
3582 	    debug_generic_expr (rhs1_type);
3583 	    return true;
3584 	  }
3585 
3586 	return false;
3587       }
3588 
3589     case ADDR_SPACE_CONVERT_EXPR:
3590       {
3591 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3592 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3593 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3594 	  {
3595 	    error ("invalid types in address space conversion");
3596 	    debug_generic_expr (lhs_type);
3597 	    debug_generic_expr (rhs1_type);
3598 	    return true;
3599 	  }
3600 
3601 	return false;
3602       }
3603 
3604     case FIXED_CONVERT_EXPR:
3605       {
3606 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3607 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3608 	  {
3609 	    error ("invalid types in fixed-point conversion");
3610 	    debug_generic_expr (lhs_type);
3611 	    debug_generic_expr (rhs1_type);
3612 	    return true;
3613 	  }
3614 
3615 	return false;
3616       }
3617 
3618     case FLOAT_EXPR:
3619       {
3620 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3621 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3622 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3623 	  {
3624 	    error ("invalid types in conversion to floating point");
3625 	    debug_generic_expr (lhs_type);
3626 	    debug_generic_expr (rhs1_type);
3627 	    return true;
3628 	  }
3629 
3630         return false;
3631       }
3632 
3633     case FIX_TRUNC_EXPR:
3634       {
3635         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3636             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3637                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3638 	  {
3639 	    error ("invalid types in conversion to integer");
3640 	    debug_generic_expr (lhs_type);
3641 	    debug_generic_expr (rhs1_type);
3642 	    return true;
3643 	  }
3644 
3645         return false;
3646       }
3647 
3648     case VEC_UNPACK_HI_EXPR:
3649     case VEC_UNPACK_LO_EXPR:
3650     case VEC_UNPACK_FLOAT_HI_EXPR:
3651     case VEC_UNPACK_FLOAT_LO_EXPR:
3652     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3653     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3654       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3655           || TREE_CODE (lhs_type) != VECTOR_TYPE
3656           || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3657 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3658           || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3659 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3660 	  || ((rhs_code == VEC_UNPACK_HI_EXPR
3661 	       || rhs_code == VEC_UNPACK_LO_EXPR)
3662 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3663 		  != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3664 	  || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3665 	       || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3666 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3667 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3668 	  || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3669 	       || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3670 	      && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3671 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3672 	  || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3673 			2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3674 	      && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3675 		  || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3676 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3677 		       TYPE_VECTOR_SUBPARTS (rhs1_type)))
3678 	{
3679 	  error ("type mismatch in vector unpack expression");
3680 	  debug_generic_expr (lhs_type);
3681 	  debug_generic_expr (rhs1_type);
3682 	  return true;
3683         }
3684 
3685       return false;
3686 
3687     case NEGATE_EXPR:
3688     case ABS_EXPR:
3689     case BIT_NOT_EXPR:
3690     case PAREN_EXPR:
3691     case CONJ_EXPR:
3692       break;
3693 
3694     case ABSU_EXPR:
3695       if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3696 	  || !TYPE_UNSIGNED (lhs_type)
3697 	  || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3698 	  || TYPE_UNSIGNED (rhs1_type)
3699 	  || element_precision (lhs_type) != element_precision (rhs1_type))
3700 	{
3701 	  error ("invalid types for ABSU_EXPR");
3702 	  debug_generic_expr (lhs_type);
3703 	  debug_generic_expr (rhs1_type);
3704 	  return true;
3705 	}
3706       return false;
3707 
3708     case VEC_DUPLICATE_EXPR:
3709       if (TREE_CODE (lhs_type) != VECTOR_TYPE
3710 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3711 	{
3712 	  error ("vec_duplicate should be from a scalar to a like vector");
3713 	  debug_generic_expr (lhs_type);
3714 	  debug_generic_expr (rhs1_type);
3715 	  return true;
3716 	}
3717       return false;
3718 
3719     default:
3720       gcc_unreachable ();
3721     }
3722 
3723   /* For the remaining codes assert there is no conversion involved.  */
3724   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3725     {
3726       error ("non-trivial conversion in unary operation");
3727       debug_generic_expr (lhs_type);
3728       debug_generic_expr (rhs1_type);
3729       return true;
3730     }
3731 
3732   return false;
3733 }
3734 
3735 /* Verify a gimple assignment statement STMT with a binary rhs.
3736    Returns true if anything is wrong.  */
3737 
3738 static bool
3739 verify_gimple_assign_binary (gassign *stmt)
3740 {
3741   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3742   tree lhs = gimple_assign_lhs (stmt);
3743   tree lhs_type = TREE_TYPE (lhs);
3744   tree rhs1 = gimple_assign_rhs1 (stmt);
3745   tree rhs1_type = TREE_TYPE (rhs1);
3746   tree rhs2 = gimple_assign_rhs2 (stmt);
3747   tree rhs2_type = TREE_TYPE (rhs2);
3748 
3749   if (!is_gimple_reg (lhs))
3750     {
3751       error ("non-register as LHS of binary operation");
3752       return true;
3753     }
3754 
3755   if (!is_gimple_val (rhs1)
3756       || !is_gimple_val (rhs2))
3757     {
3758       error ("invalid operands in binary operation");
3759       return true;
3760     }
3761 
3762   /* First handle operations that involve different types.  */
3763   switch (rhs_code)
3764     {
3765     case COMPLEX_EXPR:
3766       {
3767 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3768 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3769 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3770 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3771 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3772 	  {
3773 	    error ("type mismatch in complex expression");
3774 	    debug_generic_expr (lhs_type);
3775 	    debug_generic_expr (rhs1_type);
3776 	    debug_generic_expr (rhs2_type);
3777 	    return true;
3778 	  }
3779 
3780 	return false;
3781       }
3782 
3783     case LSHIFT_EXPR:
3784     case RSHIFT_EXPR:
3785     case LROTATE_EXPR:
3786     case RROTATE_EXPR:
3787       {
3788 	/* Shifts and rotates are ok on integral types, fixed point
3789 	   types and integer vector types.  */
3790 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3791 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3792 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3793 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3794 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3795 		/* Vector shifts of vectors are also ok.  */
3796 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3797 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3798 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3799 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3800 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3801 	  {
3802 	    error ("type mismatch in shift expression");
3803 	    debug_generic_expr (lhs_type);
3804 	    debug_generic_expr (rhs1_type);
3805 	    debug_generic_expr (rhs2_type);
3806 	    return true;
3807 	  }
3808 
3809 	return false;
3810       }
3811 
3812     case WIDEN_LSHIFT_EXPR:
3813       {
3814         if (!INTEGRAL_TYPE_P (lhs_type)
3815             || !INTEGRAL_TYPE_P (rhs1_type)
3816             || TREE_CODE (rhs2) != INTEGER_CST
3817             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3818           {
3819             error ("type mismatch in widening vector shift expression");
3820             debug_generic_expr (lhs_type);
3821             debug_generic_expr (rhs1_type);
3822             debug_generic_expr (rhs2_type);
3823             return true;
3824           }
3825 
3826         return false;
3827       }
3828 
3829     case VEC_WIDEN_LSHIFT_HI_EXPR:
3830     case VEC_WIDEN_LSHIFT_LO_EXPR:
3831       {
3832         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3833             || TREE_CODE (lhs_type) != VECTOR_TYPE
3834             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3835             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3836             || TREE_CODE (rhs2) != INTEGER_CST
3837             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3838                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3839           {
3840             error ("type mismatch in widening vector shift expression");
3841             debug_generic_expr (lhs_type);
3842             debug_generic_expr (rhs1_type);
3843             debug_generic_expr (rhs2_type);
3844             return true;
3845           }
3846 
3847         return false;
3848       }
3849 
3850     case PLUS_EXPR:
3851     case MINUS_EXPR:
3852       {
3853 	tree lhs_etype = lhs_type;
3854 	tree rhs1_etype = rhs1_type;
3855 	tree rhs2_etype = rhs2_type;
3856 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3857 	  {
3858 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3859 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3860 	      {
3861 		error ("invalid non-vector operands to vector valued plus");
3862 		return true;
3863 	      }
3864 	    lhs_etype = TREE_TYPE (lhs_type);
3865 	    rhs1_etype = TREE_TYPE (rhs1_type);
3866 	    rhs2_etype = TREE_TYPE (rhs2_type);
3867 	  }
3868 	if (POINTER_TYPE_P (lhs_etype)
3869 	    || POINTER_TYPE_P (rhs1_etype)
3870 	    || POINTER_TYPE_P (rhs2_etype))
3871 	  {
3872 	    error ("invalid (pointer) operands to plus/minus");
3873 	    return true;
3874 	  }
3875 
3876 	/* Continue with generic binary expression handling.  */
3877 	break;
3878       }
3879 
3880     case POINTER_PLUS_EXPR:
3881       {
3882 	if (!POINTER_TYPE_P (rhs1_type)
3883 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3884 	    || !ptrofftype_p (rhs2_type))
3885 	  {
3886 	    error ("type mismatch in pointer plus expression");
3887 	    debug_generic_stmt (lhs_type);
3888 	    debug_generic_stmt (rhs1_type);
3889 	    debug_generic_stmt (rhs2_type);
3890 	    return true;
3891 	  }
3892 
3893 	return false;
3894       }
3895 
3896     case POINTER_DIFF_EXPR:
3897       {
3898 	if (!POINTER_TYPE_P (rhs1_type)
3899 	    || !POINTER_TYPE_P (rhs2_type)
3900 	    /* Because we special-case pointers to void we allow difference
3901 	       of arbitrary pointers with the same mode.  */
3902 	    || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3903 	    || TREE_CODE (lhs_type) != INTEGER_TYPE
3904 	    || TYPE_UNSIGNED (lhs_type)
3905 	    || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3906 	  {
3907 	    error ("type mismatch in pointer diff expression");
3908 	    debug_generic_stmt (lhs_type);
3909 	    debug_generic_stmt (rhs1_type);
3910 	    debug_generic_stmt (rhs2_type);
3911 	    return true;
3912 	  }
3913 
3914 	return false;
3915       }
3916 
3917     case TRUTH_ANDIF_EXPR:
3918     case TRUTH_ORIF_EXPR:
3919     case TRUTH_AND_EXPR:
3920     case TRUTH_OR_EXPR:
3921     case TRUTH_XOR_EXPR:
3922 
3923       gcc_unreachable ();
3924 
3925     case LT_EXPR:
3926     case LE_EXPR:
3927     case GT_EXPR:
3928     case GE_EXPR:
3929     case EQ_EXPR:
3930     case NE_EXPR:
3931     case UNORDERED_EXPR:
3932     case ORDERED_EXPR:
3933     case UNLT_EXPR:
3934     case UNLE_EXPR:
3935     case UNGT_EXPR:
3936     case UNGE_EXPR:
3937     case UNEQ_EXPR:
3938     case LTGT_EXPR:
3939       /* Comparisons are also binary, but the result type is not
3940 	 connected to the operand types.  */
3941       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3942 
3943     case WIDEN_MULT_EXPR:
3944       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3945 	return true;
3946       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3947 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3948 
3949     case WIDEN_SUM_EXPR:
3950       {
3951         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3952 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
3953 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
3954 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3955 		 || (!INTEGRAL_TYPE_P (lhs_type)
3956 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3957 	    || !useless_type_conversion_p (lhs_type, rhs2_type)
3958 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3959 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3960           {
3961             error ("type mismatch in widening sum reduction");
3962             debug_generic_expr (lhs_type);
3963             debug_generic_expr (rhs1_type);
3964             debug_generic_expr (rhs2_type);
3965             return true;
3966           }
3967         return false;
3968       }
3969 
3970     case VEC_WIDEN_MULT_HI_EXPR:
3971     case VEC_WIDEN_MULT_LO_EXPR:
3972     case VEC_WIDEN_MULT_EVEN_EXPR:
3973     case VEC_WIDEN_MULT_ODD_EXPR:
3974       {
3975         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3976             || TREE_CODE (lhs_type) != VECTOR_TYPE
3977 	    || !types_compatible_p (rhs1_type, rhs2_type)
3978 	    || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3979 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3980           {
3981             error ("type mismatch in vector widening multiplication");
3982             debug_generic_expr (lhs_type);
3983             debug_generic_expr (rhs1_type);
3984             debug_generic_expr (rhs2_type);
3985             return true;
3986           }
3987         return false;
3988       }
3989 
3990     case VEC_PACK_TRUNC_EXPR:
3991       /* ???  We currently use VEC_PACK_TRUNC_EXPR to simply concat
3992 	 vector boolean types.  */
3993       if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3994 	  && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3995 	  && types_compatible_p (rhs1_type, rhs2_type)
3996 	  && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3997 		       2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3998 	return false;
3999 
4000       /* Fallthru.  */
4001     case VEC_PACK_SAT_EXPR:
4002     case VEC_PACK_FIX_TRUNC_EXPR:
4003       {
4004         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4005             || TREE_CODE (lhs_type) != VECTOR_TYPE
4006             || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4007 		  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4008 		  && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4009 		 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4010 		     == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4011 	    || !types_compatible_p (rhs1_type, rhs2_type)
4012 	    || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4013 			 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4014 	    || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4015 			 TYPE_VECTOR_SUBPARTS (lhs_type)))
4016           {
4017             error ("type mismatch in vector pack expression");
4018             debug_generic_expr (lhs_type);
4019             debug_generic_expr (rhs1_type);
4020             debug_generic_expr (rhs2_type);
4021             return true;
4022           }
4023 
4024         return false;
4025       }
4026 
4027     case VEC_PACK_FLOAT_EXPR:
4028       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4029 	  || TREE_CODE (lhs_type) != VECTOR_TYPE
4030 	  || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4031 	  || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4032 	  || !types_compatible_p (rhs1_type, rhs2_type)
4033 	  || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4034 		       2 * GET_MODE_SIZE (element_mode (lhs_type)))
4035 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4036 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4037 	{
4038 	  error ("type mismatch in vector pack expression");
4039 	  debug_generic_expr (lhs_type);
4040 	  debug_generic_expr (rhs1_type);
4041 	  debug_generic_expr (rhs2_type);
4042 	  return true;
4043 	}
4044 
4045       return false;
4046 
4047     case MULT_EXPR:
4048     case MULT_HIGHPART_EXPR:
4049     case TRUNC_DIV_EXPR:
4050     case CEIL_DIV_EXPR:
4051     case FLOOR_DIV_EXPR:
4052     case ROUND_DIV_EXPR:
4053     case TRUNC_MOD_EXPR:
4054     case CEIL_MOD_EXPR:
4055     case FLOOR_MOD_EXPR:
4056     case ROUND_MOD_EXPR:
4057     case RDIV_EXPR:
4058     case EXACT_DIV_EXPR:
4059     case MIN_EXPR:
4060     case MAX_EXPR:
4061     case BIT_IOR_EXPR:
4062     case BIT_XOR_EXPR:
4063     case BIT_AND_EXPR:
4064       /* Continue with generic binary expression handling.  */
4065       break;
4066 
4067     case VEC_SERIES_EXPR:
4068       if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4069 	{
4070 	  error ("type mismatch in series expression");
4071 	  debug_generic_expr (rhs1_type);
4072 	  debug_generic_expr (rhs2_type);
4073 	  return true;
4074 	}
4075       if (TREE_CODE (lhs_type) != VECTOR_TYPE
4076 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4077 	{
4078 	  error ("vector type expected in series expression");
4079 	  debug_generic_expr (lhs_type);
4080 	  return true;
4081 	}
4082       return false;
4083 
4084     default:
4085       gcc_unreachable ();
4086     }
4087 
4088   if (!useless_type_conversion_p (lhs_type, rhs1_type)
4089       || !useless_type_conversion_p (lhs_type, rhs2_type))
4090     {
4091       error ("type mismatch in binary expression");
4092       debug_generic_stmt (lhs_type);
4093       debug_generic_stmt (rhs1_type);
4094       debug_generic_stmt (rhs2_type);
4095       return true;
4096     }
4097 
4098   return false;
4099 }
4100 
4101 /* Verify a gimple assignment statement STMT with a ternary rhs.
4102    Returns true if anything is wrong.  */
4103 
4104 static bool
4105 verify_gimple_assign_ternary (gassign *stmt)
4106 {
4107   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4108   tree lhs = gimple_assign_lhs (stmt);
4109   tree lhs_type = TREE_TYPE (lhs);
4110   tree rhs1 = gimple_assign_rhs1 (stmt);
4111   tree rhs1_type = TREE_TYPE (rhs1);
4112   tree rhs2 = gimple_assign_rhs2 (stmt);
4113   tree rhs2_type = TREE_TYPE (rhs2);
4114   tree rhs3 = gimple_assign_rhs3 (stmt);
4115   tree rhs3_type = TREE_TYPE (rhs3);
4116 
4117   if (!is_gimple_reg (lhs))
4118     {
4119       error ("non-register as LHS of ternary operation");
4120       return true;
4121     }
4122 
4123   if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4124        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4125       || !is_gimple_val (rhs2)
4126       || !is_gimple_val (rhs3))
4127     {
4128       error ("invalid operands in ternary operation");
4129       return true;
4130     }
4131 
4132   /* First handle operations that involve different types.  */
4133   switch (rhs_code)
4134     {
4135     case WIDEN_MULT_PLUS_EXPR:
4136     case WIDEN_MULT_MINUS_EXPR:
4137       if ((!INTEGRAL_TYPE_P (rhs1_type)
4138 	   && !FIXED_POINT_TYPE_P (rhs1_type))
4139 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
4140 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4141 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4142 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4143 	{
4144 	  error ("type mismatch in widening multiply-accumulate expression");
4145 	  debug_generic_expr (lhs_type);
4146 	  debug_generic_expr (rhs1_type);
4147 	  debug_generic_expr (rhs2_type);
4148 	  debug_generic_expr (rhs3_type);
4149 	  return true;
4150 	}
4151       break;
4152 
4153     case VEC_COND_EXPR:
4154       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4155 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4156 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4157 	{
4158 	  error ("the first argument of a VEC_COND_EXPR must be of a "
4159 		 "boolean vector type of the same number of elements "
4160 		 "as the result");
4161 	  debug_generic_expr (lhs_type);
4162 	  debug_generic_expr (rhs1_type);
4163 	  return true;
4164 	}
4165       /* Fallthrough.  */
4166     case COND_EXPR:
4167       if (!is_gimple_val (rhs1)
4168 	  && verify_gimple_comparison (TREE_TYPE (rhs1),
4169 				       TREE_OPERAND (rhs1, 0),
4170 				       TREE_OPERAND (rhs1, 1),
4171 				       TREE_CODE (rhs1)))
4172 	return true;
4173       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4174 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4175 	{
4176 	  error ("type mismatch in conditional expression");
4177 	  debug_generic_expr (lhs_type);
4178 	  debug_generic_expr (rhs2_type);
4179 	  debug_generic_expr (rhs3_type);
4180 	  return true;
4181 	}
4182       break;
4183 
4184     case VEC_PERM_EXPR:
4185       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4186 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4187 	{
4188 	  error ("type mismatch in vector permute expression");
4189 	  debug_generic_expr (lhs_type);
4190 	  debug_generic_expr (rhs1_type);
4191 	  debug_generic_expr (rhs2_type);
4192 	  debug_generic_expr (rhs3_type);
4193 	  return true;
4194 	}
4195 
4196       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4197 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4198 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4199 	{
4200 	  error ("vector types expected in vector permute expression");
4201 	  debug_generic_expr (lhs_type);
4202 	  debug_generic_expr (rhs1_type);
4203 	  debug_generic_expr (rhs2_type);
4204 	  debug_generic_expr (rhs3_type);
4205 	  return true;
4206 	}
4207 
4208       if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4209 		    TYPE_VECTOR_SUBPARTS (rhs2_type))
4210 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4211 		       TYPE_VECTOR_SUBPARTS (rhs3_type))
4212 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4213 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4214 	{
4215 	  error ("vectors with different element number found "
4216 		 "in vector permute expression");
4217 	  debug_generic_expr (lhs_type);
4218 	  debug_generic_expr (rhs1_type);
4219 	  debug_generic_expr (rhs2_type);
4220 	  debug_generic_expr (rhs3_type);
4221 	  return true;
4222 	}
4223 
4224       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4225 	  || (TREE_CODE (rhs3) != VECTOR_CST
4226 	      && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4227 				    (TREE_TYPE (rhs3_type)))
4228 		  != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4229 				       (TREE_TYPE (rhs1_type))))))
4230 	{
4231 	  error ("invalid mask type in vector permute expression");
4232 	  debug_generic_expr (lhs_type);
4233 	  debug_generic_expr (rhs1_type);
4234 	  debug_generic_expr (rhs2_type);
4235 	  debug_generic_expr (rhs3_type);
4236 	  return true;
4237 	}
4238 
4239       return false;
4240 
4241     case SAD_EXPR:
4242       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4243 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4244 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4245 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4246 	{
4247 	  error ("type mismatch in sad expression");
4248 	  debug_generic_expr (lhs_type);
4249 	  debug_generic_expr (rhs1_type);
4250 	  debug_generic_expr (rhs2_type);
4251 	  debug_generic_expr (rhs3_type);
4252 	  return true;
4253 	}
4254 
4255       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4256 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4257 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4258 	{
4259 	  error ("vector types expected in sad expression");
4260 	  debug_generic_expr (lhs_type);
4261 	  debug_generic_expr (rhs1_type);
4262 	  debug_generic_expr (rhs2_type);
4263 	  debug_generic_expr (rhs3_type);
4264 	  return true;
4265 	}
4266 
4267       return false;
4268 
4269     case BIT_INSERT_EXPR:
4270       if (! useless_type_conversion_p (lhs_type, rhs1_type))
4271 	{
4272 	  error ("type mismatch in BIT_INSERT_EXPR");
4273 	  debug_generic_expr (lhs_type);
4274 	  debug_generic_expr (rhs1_type);
4275 	  return true;
4276 	}
4277       if (! ((INTEGRAL_TYPE_P (rhs1_type)
4278 	      && INTEGRAL_TYPE_P (rhs2_type))
4279 	     || (VECTOR_TYPE_P (rhs1_type)
4280 		 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4281 	{
4282 	  error ("not allowed type combination in BIT_INSERT_EXPR");
4283 	  debug_generic_expr (rhs1_type);
4284 	  debug_generic_expr (rhs2_type);
4285 	  return true;
4286 	}
4287       if (! tree_fits_uhwi_p (rhs3)
4288 	  || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4289 	  || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4290 	{
4291 	  error ("invalid position or size in BIT_INSERT_EXPR");
4292 	  return true;
4293 	}
4294       if (INTEGRAL_TYPE_P (rhs1_type)
4295 	  && !type_has_mode_precision_p (rhs1_type))
4296 	{
4297 	  error ("BIT_INSERT_EXPR into non-mode-precision operand");
4298 	  return true;
4299 	}
4300       if (INTEGRAL_TYPE_P (rhs1_type))
4301 	{
4302 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4303 	  if (bitpos >= TYPE_PRECISION (rhs1_type)
4304 	      || (bitpos + TYPE_PRECISION (rhs2_type)
4305 		  > TYPE_PRECISION (rhs1_type)))
4306 	    {
4307 	      error ("insertion out of range in BIT_INSERT_EXPR");
4308 	      return true;
4309 	    }
4310 	}
4311       else if (VECTOR_TYPE_P (rhs1_type))
4312 	{
4313 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4314 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4315 	  if (bitpos % bitsize != 0)
4316 	    {
4317 	      error ("vector insertion not at element boundary");
4318 	      return true;
4319 	    }
4320 	}
4321       return false;
4322 
4323     case DOT_PROD_EXPR:
4324       {
4325         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4326 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
4327 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
4328 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4329 		 || (!INTEGRAL_TYPE_P (lhs_type)
4330 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4331 	    || !types_compatible_p (rhs1_type, rhs2_type)
4332 	    || !useless_type_conversion_p (lhs_type, rhs3_type)
4333 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4334 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4335           {
4336             error ("type mismatch in dot product reduction");
4337             debug_generic_expr (lhs_type);
4338             debug_generic_expr (rhs1_type);
4339             debug_generic_expr (rhs2_type);
4340             return true;
4341           }
4342         return false;
4343       }
4344 
4345     case REALIGN_LOAD_EXPR:
4346       /* FIXME.  */
4347       return false;
4348 
4349     default:
4350       gcc_unreachable ();
4351     }
4352   return false;
4353 }
4354 
4355 /* Verify a gimple assignment statement STMT with a single rhs.
4356    Returns true if anything is wrong.  */
4357 
4358 static bool
4359 verify_gimple_assign_single (gassign *stmt)
4360 {
4361   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4362   tree lhs = gimple_assign_lhs (stmt);
4363   tree lhs_type = TREE_TYPE (lhs);
4364   tree rhs1 = gimple_assign_rhs1 (stmt);
4365   tree rhs1_type = TREE_TYPE (rhs1);
4366   bool res = false;
4367 
4368   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4369     {
4370       error ("non-trivial conversion at assignment");
4371       debug_generic_expr (lhs_type);
4372       debug_generic_expr (rhs1_type);
4373       return true;
4374     }
4375 
4376   if (gimple_clobber_p (stmt)
4377       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4378     {
4379       error ("non-decl/MEM_REF LHS in clobber statement");
4380       debug_generic_expr (lhs);
4381       return true;
4382     }
4383 
4384   if (handled_component_p (lhs)
4385       || TREE_CODE (lhs) == MEM_REF
4386       || TREE_CODE (lhs) == TARGET_MEM_REF)
4387     res |= verify_types_in_gimple_reference (lhs, true);
4388 
4389   /* Special codes we cannot handle via their class.  */
4390   switch (rhs_code)
4391     {
4392     case ADDR_EXPR:
4393       {
4394 	tree op = TREE_OPERAND (rhs1, 0);
4395 	if (!is_gimple_addressable (op))
4396 	  {
4397 	    error ("invalid operand in unary expression");
4398 	    return true;
4399 	  }
4400 
4401 	/* Technically there is no longer a need for matching types, but
4402 	   gimple hygiene asks for this check.  In LTO we can end up
4403 	   combining incompatible units and thus end up with addresses
4404 	   of globals that change their type to a common one.  */
4405 	if (!in_lto_p
4406 	    && !types_compatible_p (TREE_TYPE (op),
4407 				    TREE_TYPE (TREE_TYPE (rhs1)))
4408 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4409 							  TREE_TYPE (op)))
4410 	  {
4411 	    error ("type mismatch in address expression");
4412 	    debug_generic_stmt (TREE_TYPE (rhs1));
4413 	    debug_generic_stmt (TREE_TYPE (op));
4414 	    return true;
4415 	  }
4416 
4417 	return (verify_address (rhs1, true)
4418 		|| verify_types_in_gimple_reference (op, true));
4419       }
4420 
4421     /* tcc_reference  */
4422     case INDIRECT_REF:
4423       error ("INDIRECT_REF in gimple IL");
4424       return true;
4425 
4426     case COMPONENT_REF:
4427     case BIT_FIELD_REF:
4428     case ARRAY_REF:
4429     case ARRAY_RANGE_REF:
4430     case VIEW_CONVERT_EXPR:
4431     case REALPART_EXPR:
4432     case IMAGPART_EXPR:
4433     case TARGET_MEM_REF:
4434     case MEM_REF:
4435       if (!is_gimple_reg (lhs)
4436 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4437 	{
4438 	  error ("invalid rhs for gimple memory store");
4439 	  debug_generic_stmt (lhs);
4440 	  debug_generic_stmt (rhs1);
4441 	  return true;
4442 	}
4443       return res || verify_types_in_gimple_reference (rhs1, false);
4444 
4445     /* tcc_constant  */
4446     case SSA_NAME:
4447     case INTEGER_CST:
4448     case REAL_CST:
4449     case FIXED_CST:
4450     case COMPLEX_CST:
4451     case VECTOR_CST:
4452     case STRING_CST:
4453       return res;
4454 
4455     /* tcc_declaration  */
4456     case CONST_DECL:
4457       return res;
4458     case VAR_DECL:
4459     case PARM_DECL:
4460       if (!is_gimple_reg (lhs)
4461 	  && !is_gimple_reg (rhs1)
4462 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4463 	{
4464 	  error ("invalid rhs for gimple memory store");
4465 	  debug_generic_stmt (lhs);
4466 	  debug_generic_stmt (rhs1);
4467 	  return true;
4468 	}
4469       return res;
4470 
4471     case CONSTRUCTOR:
4472       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4473 	{
4474 	  unsigned int i;
4475 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4476 
4477 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4478 	    return res;
4479 	  /* For vector CONSTRUCTORs we require that either it is empty
4480 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4481 	     (then the element count must be correct to cover the whole
4482 	     outer vector and index must be NULL on all elements, or it is
4483 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4484 	     smaller number of elements (assuming zero filling) and
4485 	     consecutive indexes as compared to NULL indexes (such
4486 	     CONSTRUCTORs can appear in the IL from FEs).  */
4487 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4488 	    {
4489 	      if (elt_t == NULL_TREE)
4490 		{
4491 		  elt_t = TREE_TYPE (elt_v);
4492 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4493 		    {
4494 		      tree elt_t = TREE_TYPE (elt_v);
4495 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4496 						      TREE_TYPE (elt_t)))
4497 			{
4498 			  error ("incorrect type of vector CONSTRUCTOR"
4499 				 " elements");
4500 			  debug_generic_stmt (rhs1);
4501 			  return true;
4502 			}
4503 		      else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4504 					 * TYPE_VECTOR_SUBPARTS (elt_t),
4505 					 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4506 			{
4507 			  error ("incorrect number of vector CONSTRUCTOR"
4508 				 " elements");
4509 			  debug_generic_stmt (rhs1);
4510 			  return true;
4511 			}
4512 		    }
4513 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4514 						       elt_t))
4515 		    {
4516 		      error ("incorrect type of vector CONSTRUCTOR elements");
4517 		      debug_generic_stmt (rhs1);
4518 		      return true;
4519 		    }
4520 		  else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4521 				     TYPE_VECTOR_SUBPARTS (rhs1_type)))
4522 		    {
4523 		      error ("incorrect number of vector CONSTRUCTOR elements");
4524 		      debug_generic_stmt (rhs1);
4525 		      return true;
4526 		    }
4527 		}
4528 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4529 		{
4530 		  error ("incorrect type of vector CONSTRUCTOR elements");
4531 		  debug_generic_stmt (rhs1);
4532 		  return true;
4533 		}
4534 	      if (elt_i != NULL_TREE
4535 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4536 		      || TREE_CODE (elt_i) != INTEGER_CST
4537 		      || compare_tree_int (elt_i, i) != 0))
4538 		{
4539 		  error ("vector CONSTRUCTOR with non-NULL element index");
4540 		  debug_generic_stmt (rhs1);
4541 		  return true;
4542 		}
4543 	      if (!is_gimple_val (elt_v))
4544 		{
4545 		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4546 		  debug_generic_stmt (rhs1);
4547 		  return true;
4548 		}
4549 	    }
4550 	}
4551       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4552 	{
4553 	  error ("non-vector CONSTRUCTOR with elements");
4554 	  debug_generic_stmt (rhs1);
4555 	  return true;
4556 	}
4557       return res;
4558 
4559     case ASSERT_EXPR:
4560       /* FIXME.  */
4561       rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4562       if (rhs1 == boolean_false_node)
4563 	{
4564 	  error ("ASSERT_EXPR with an always-false condition");
4565 	  debug_generic_stmt (rhs1);
4566 	  return true;
4567 	}
4568       break;
4569 
4570     case OBJ_TYPE_REF:
4571     case WITH_SIZE_EXPR:
4572       /* FIXME.  */
4573       return res;
4574 
4575     default:;
4576     }
4577 
4578   return res;
4579 }
4580 
4581 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4582    is a problem, otherwise false.  */
4583 
4584 static bool
4585 verify_gimple_assign (gassign *stmt)
4586 {
4587   switch (gimple_assign_rhs_class (stmt))
4588     {
4589     case GIMPLE_SINGLE_RHS:
4590       return verify_gimple_assign_single (stmt);
4591 
4592     case GIMPLE_UNARY_RHS:
4593       return verify_gimple_assign_unary (stmt);
4594 
4595     case GIMPLE_BINARY_RHS:
4596       return verify_gimple_assign_binary (stmt);
4597 
4598     case GIMPLE_TERNARY_RHS:
4599       return verify_gimple_assign_ternary (stmt);
4600 
4601     default:
4602       gcc_unreachable ();
4603     }
4604 }
4605 
4606 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4607    is a problem, otherwise false.  */
4608 
4609 static bool
4610 verify_gimple_return (greturn *stmt)
4611 {
4612   tree op = gimple_return_retval (stmt);
4613   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4614 
4615   /* We cannot test for present return values as we do not fix up missing
4616      return values from the original source.  */
4617   if (op == NULL)
4618     return false;
4619 
4620   if (!is_gimple_val (op)
4621       && TREE_CODE (op) != RESULT_DECL)
4622     {
4623       error ("invalid operand in return statement");
4624       debug_generic_stmt (op);
4625       return true;
4626     }
4627 
4628   if ((TREE_CODE (op) == RESULT_DECL
4629        && DECL_BY_REFERENCE (op))
4630       || (TREE_CODE (op) == SSA_NAME
4631 	  && SSA_NAME_VAR (op)
4632 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4633 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4634     op = TREE_TYPE (op);
4635 
4636   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4637     {
4638       error ("invalid conversion in return statement");
4639       debug_generic_stmt (restype);
4640       debug_generic_stmt (TREE_TYPE (op));
4641       return true;
4642     }
4643 
4644   return false;
4645 }
4646 
4647 
4648 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4649    is a problem, otherwise false.  */
4650 
4651 static bool
4652 verify_gimple_goto (ggoto *stmt)
4653 {
4654   tree dest = gimple_goto_dest (stmt);
4655 
4656   /* ???  We have two canonical forms of direct goto destinations, a
4657      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4658   if (TREE_CODE (dest) != LABEL_DECL
4659       && (!is_gimple_val (dest)
4660 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4661     {
4662       error ("goto destination is neither a label nor a pointer");
4663       return true;
4664     }
4665 
4666   return false;
4667 }
4668 
4669 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4670    is a problem, otherwise false.  */
4671 
4672 static bool
4673 verify_gimple_switch (gswitch *stmt)
4674 {
4675   unsigned int i, n;
4676   tree elt, prev_upper_bound = NULL_TREE;
4677   tree index_type, elt_type = NULL_TREE;
4678 
4679   if (!is_gimple_val (gimple_switch_index (stmt)))
4680     {
4681       error ("invalid operand to switch statement");
4682       debug_generic_stmt (gimple_switch_index (stmt));
4683       return true;
4684     }
4685 
4686   index_type = TREE_TYPE (gimple_switch_index (stmt));
4687   if (! INTEGRAL_TYPE_P (index_type))
4688     {
4689       error ("non-integral type switch statement");
4690       debug_generic_expr (index_type);
4691       return true;
4692     }
4693 
4694   elt = gimple_switch_label (stmt, 0);
4695   if (CASE_LOW (elt) != NULL_TREE
4696       || CASE_HIGH (elt) != NULL_TREE
4697       || CASE_CHAIN (elt) != NULL_TREE)
4698     {
4699       error ("invalid default case label in switch statement");
4700       debug_generic_expr (elt);
4701       return true;
4702     }
4703 
4704   n = gimple_switch_num_labels (stmt);
4705   for (i = 1; i < n; i++)
4706     {
4707       elt = gimple_switch_label (stmt, i);
4708 
4709       if (CASE_CHAIN (elt))
4710 	{
4711 	  error ("invalid CASE_CHAIN");
4712 	  debug_generic_expr (elt);
4713 	  return true;
4714 	}
4715       if (! CASE_LOW (elt))
4716 	{
4717 	  error ("invalid case label in switch statement");
4718 	  debug_generic_expr (elt);
4719 	  return true;
4720 	}
4721       if (CASE_HIGH (elt)
4722 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4723 	{
4724 	  error ("invalid case range in switch statement");
4725 	  debug_generic_expr (elt);
4726 	  return true;
4727 	}
4728 
4729       if (elt_type)
4730 	{
4731 	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4732 	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4733 	    {
4734 	      error ("type mismatch for case label in switch statement");
4735 	      debug_generic_expr (elt);
4736 	      return true;
4737 	    }
4738 	}
4739       else
4740 	{
4741 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4742 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4743 	    {
4744 	      error ("type precision mismatch in switch statement");
4745 	      return true;
4746 	    }
4747 	}
4748 
4749       if (prev_upper_bound)
4750 	{
4751 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4752 	    {
4753 	      error ("case labels not sorted in switch statement");
4754 	      return true;
4755 	    }
4756 	}
4757 
4758       prev_upper_bound = CASE_HIGH (elt);
4759       if (! prev_upper_bound)
4760 	prev_upper_bound = CASE_LOW (elt);
4761     }
4762 
4763   return false;
4764 }
4765 
4766 /* Verify a gimple debug statement STMT.
4767    Returns true if anything is wrong.  */
4768 
4769 static bool
4770 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4771 {
4772   /* There isn't much that could be wrong in a gimple debug stmt.  A
4773      gimple debug bind stmt, for example, maps a tree, that's usually
4774      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4775      component or member of an aggregate type, to another tree, that
4776      can be an arbitrary expression.  These stmts expand into debug
4777      insns, and are converted to debug notes by var-tracking.c.  */
4778   return false;
4779 }
4780 
4781 /* Verify a gimple label statement STMT.
4782    Returns true if anything is wrong.  */
4783 
4784 static bool
4785 verify_gimple_label (glabel *stmt)
4786 {
4787   tree decl = gimple_label_label (stmt);
4788   int uid;
4789   bool err = false;
4790 
4791   if (TREE_CODE (decl) != LABEL_DECL)
4792     return true;
4793   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4794       && DECL_CONTEXT (decl) != current_function_decl)
4795     {
4796       error ("label%'s context is not the current function decl");
4797       err |= true;
4798     }
4799 
4800   uid = LABEL_DECL_UID (decl);
4801   if (cfun->cfg
4802       && (uid == -1
4803 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4804     {
4805       error ("incorrect entry in label_to_block_map");
4806       err |= true;
4807     }
4808 
4809   uid = EH_LANDING_PAD_NR (decl);
4810   if (uid)
4811     {
4812       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4813       if (decl != lp->post_landing_pad)
4814 	{
4815 	  error ("incorrect setting of landing pad number");
4816 	  err |= true;
4817 	}
4818     }
4819 
4820   return err;
4821 }
4822 
4823 /* Verify a gimple cond statement STMT.
4824    Returns true if anything is wrong.  */
4825 
4826 static bool
4827 verify_gimple_cond (gcond *stmt)
4828 {
4829   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4830     {
4831       error ("invalid comparison code in gimple cond");
4832       return true;
4833     }
4834   if (!(!gimple_cond_true_label (stmt)
4835 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4836       || !(!gimple_cond_false_label (stmt)
4837 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4838     {
4839       error ("invalid labels in gimple cond");
4840       return true;
4841     }
4842 
4843   return verify_gimple_comparison (boolean_type_node,
4844 				   gimple_cond_lhs (stmt),
4845 				   gimple_cond_rhs (stmt),
4846 				   gimple_cond_code (stmt));
4847 }
4848 
4849 /* Verify the GIMPLE statement STMT.  Returns true if there is an
4850    error, otherwise false.  */
4851 
4852 static bool
4853 verify_gimple_stmt (gimple *stmt)
4854 {
4855   switch (gimple_code (stmt))
4856     {
4857     case GIMPLE_ASSIGN:
4858       return verify_gimple_assign (as_a <gassign *> (stmt));
4859 
4860     case GIMPLE_LABEL:
4861       return verify_gimple_label (as_a <glabel *> (stmt));
4862 
4863     case GIMPLE_CALL:
4864       return verify_gimple_call (as_a <gcall *> (stmt));
4865 
4866     case GIMPLE_COND:
4867       return verify_gimple_cond (as_a <gcond *> (stmt));
4868 
4869     case GIMPLE_GOTO:
4870       return verify_gimple_goto (as_a <ggoto *> (stmt));
4871 
4872     case GIMPLE_SWITCH:
4873       return verify_gimple_switch (as_a <gswitch *> (stmt));
4874 
4875     case GIMPLE_RETURN:
4876       return verify_gimple_return (as_a <greturn *> (stmt));
4877 
4878     case GIMPLE_ASM:
4879       return false;
4880 
4881     case GIMPLE_TRANSACTION:
4882       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4883 
4884     /* Tuples that do not have tree operands.  */
4885     case GIMPLE_NOP:
4886     case GIMPLE_PREDICT:
4887     case GIMPLE_RESX:
4888     case GIMPLE_EH_DISPATCH:
4889     case GIMPLE_EH_MUST_NOT_THROW:
4890       return false;
4891 
4892     CASE_GIMPLE_OMP:
4893       /* OpenMP directives are validated by the FE and never operated
4894 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4895 	 non-gimple expressions when the main index variable has had
4896 	 its address taken.  This does not affect the loop itself
4897 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4898 	 how to setup the parallel iteration.  */
4899       return false;
4900 
4901     case GIMPLE_DEBUG:
4902       return verify_gimple_debug (stmt);
4903 
4904     default:
4905       gcc_unreachable ();
4906     }
4907 }
4908 
4909 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4910    and false otherwise.  */
4911 
4912 static bool
4913 verify_gimple_phi (gphi *phi)
4914 {
4915   bool err = false;
4916   unsigned i;
4917   tree phi_result = gimple_phi_result (phi);
4918   bool virtual_p;
4919 
4920   if (!phi_result)
4921     {
4922       error ("invalid PHI result");
4923       return true;
4924     }
4925 
4926   virtual_p = virtual_operand_p (phi_result);
4927   if (TREE_CODE (phi_result) != SSA_NAME
4928       || (virtual_p
4929 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4930     {
4931       error ("invalid PHI result");
4932       err = true;
4933     }
4934 
4935   for (i = 0; i < gimple_phi_num_args (phi); i++)
4936     {
4937       tree t = gimple_phi_arg_def (phi, i);
4938 
4939       if (!t)
4940 	{
4941 	  error ("missing PHI def");
4942 	  err |= true;
4943 	  continue;
4944 	}
4945       /* Addressable variables do have SSA_NAMEs but they
4946 	 are not considered gimple values.  */
4947       else if ((TREE_CODE (t) == SSA_NAME
4948 		&& virtual_p != virtual_operand_p (t))
4949 	       || (virtual_p
4950 		   && (TREE_CODE (t) != SSA_NAME
4951 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4952 	       || (!virtual_p
4953 		   && !is_gimple_val (t)))
4954 	{
4955 	  error ("invalid PHI argument");
4956 	  debug_generic_expr (t);
4957 	  err |= true;
4958 	}
4959 #ifdef ENABLE_TYPES_CHECKING
4960       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4961 	{
4962 	  error ("incompatible types in PHI argument %u", i);
4963 	  debug_generic_stmt (TREE_TYPE (phi_result));
4964 	  debug_generic_stmt (TREE_TYPE (t));
4965 	  err |= true;
4966 	}
4967 #endif
4968     }
4969 
4970   return err;
4971 }
4972 
4973 /* Verify the GIMPLE statements inside the sequence STMTS.  */
4974 
4975 static bool
4976 verify_gimple_in_seq_2 (gimple_seq stmts)
4977 {
4978   gimple_stmt_iterator ittr;
4979   bool err = false;
4980 
4981   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4982     {
4983       gimple *stmt = gsi_stmt (ittr);
4984 
4985       switch (gimple_code (stmt))
4986         {
4987 	case GIMPLE_BIND:
4988 	  err |= verify_gimple_in_seq_2 (
4989                    gimple_bind_body (as_a <gbind *> (stmt)));
4990 	  break;
4991 
4992 	case GIMPLE_TRY:
4993 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4994 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4995 	  break;
4996 
4997 	case GIMPLE_EH_FILTER:
4998 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4999 	  break;
5000 
5001 	case GIMPLE_EH_ELSE:
5002 	  {
5003 	    geh_else *eh_else = as_a <geh_else *> (stmt);
5004 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5005 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5006 	  }
5007 	  break;
5008 
5009 	case GIMPLE_CATCH:
5010 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5011 					   as_a <gcatch *> (stmt)));
5012 	  break;
5013 
5014 	case GIMPLE_TRANSACTION:
5015 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5016 	  break;
5017 
5018 	default:
5019 	  {
5020 	    bool err2 = verify_gimple_stmt (stmt);
5021 	    if (err2)
5022 	      debug_gimple_stmt (stmt);
5023 	    err |= err2;
5024 	  }
5025 	}
5026     }
5027 
5028   return err;
5029 }
5030 
5031 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
5032    is a problem, otherwise false.  */
5033 
5034 static bool
5035 verify_gimple_transaction (gtransaction *stmt)
5036 {
5037   tree lab;
5038 
5039   lab = gimple_transaction_label_norm (stmt);
5040   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5041     return true;
5042   lab = gimple_transaction_label_uninst (stmt);
5043   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5044     return true;
5045   lab = gimple_transaction_label_over (stmt);
5046   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5047     return true;
5048 
5049   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5050 }
5051 
5052 
5053 /* Verify the GIMPLE statements inside the statement list STMTS.  */
5054 
5055 DEBUG_FUNCTION void
5056 verify_gimple_in_seq (gimple_seq stmts)
5057 {
5058   timevar_push (TV_TREE_STMT_VERIFY);
5059   if (verify_gimple_in_seq_2 (stmts))
5060     internal_error ("verify_gimple failed");
5061   timevar_pop (TV_TREE_STMT_VERIFY);
5062 }
5063 
5064 /* Return true when the T can be shared.  */
5065 
5066 static bool
5067 tree_node_can_be_shared (tree t)
5068 {
5069   if (IS_TYPE_OR_DECL_P (t)
5070       || TREE_CODE (t) == SSA_NAME
5071       || TREE_CODE (t) == IDENTIFIER_NODE
5072       || TREE_CODE (t) == CASE_LABEL_EXPR
5073       || is_gimple_min_invariant (t))
5074     return true;
5075 
5076   if (t == error_mark_node)
5077     return true;
5078 
5079   return false;
5080 }
5081 
5082 /* Called via walk_tree.  Verify tree sharing.  */
5083 
5084 static tree
5085 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5086 {
5087   hash_set<void *> *visited = (hash_set<void *> *) data;
5088 
5089   if (tree_node_can_be_shared (*tp))
5090     {
5091       *walk_subtrees = false;
5092       return NULL;
5093     }
5094 
5095   if (visited->add (*tp))
5096     return *tp;
5097 
5098   return NULL;
5099 }
5100 
5101 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
5102 
5103 static tree
5104 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5105 {
5106   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5107   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5108 }
5109 
5110 static bool eh_error_found;
5111 bool
5112 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5113 			   hash_set<gimple *> *visited)
5114 {
5115   if (!visited->contains (stmt))
5116     {
5117       error ("dead STMT in EH table");
5118       debug_gimple_stmt (stmt);
5119       eh_error_found = true;
5120     }
5121   return true;
5122 }
5123 
5124 /* Verify if the location LOCs block is in BLOCKS.  */
5125 
5126 static bool
5127 verify_location (hash_set<tree> *blocks, location_t loc)
5128 {
5129   tree block = LOCATION_BLOCK (loc);
5130   if (block != NULL_TREE
5131       && !blocks->contains (block))
5132     {
5133       error ("location references block not in block tree");
5134       return true;
5135     }
5136   if (block != NULL_TREE)
5137     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5138   return false;
5139 }
5140 
5141 /* Called via walk_tree.  Verify that expressions have no blocks.  */
5142 
5143 static tree
5144 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5145 {
5146   if (!EXPR_P (*tp))
5147     {
5148       *walk_subtrees = false;
5149       return NULL;
5150     }
5151 
5152   location_t loc = EXPR_LOCATION (*tp);
5153   if (LOCATION_BLOCK (loc) != NULL)
5154     return *tp;
5155 
5156   return NULL;
5157 }
5158 
5159 /* Called via walk_tree.  Verify locations of expressions.  */
5160 
5161 static tree
5162 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5163 {
5164   hash_set<tree> *blocks = (hash_set<tree> *) data;
5165   tree t = *tp;
5166 
5167   /* ???  This doesn't really belong here but there's no good place to
5168      stick this remainder of old verify_expr.  */
5169   /* ???  This barfs on debug stmts which contain binds to vars with
5170      different function context.  */
5171 #if 0
5172   if (VAR_P (t)
5173       || TREE_CODE (t) == PARM_DECL
5174       || TREE_CODE (t) == RESULT_DECL)
5175     {
5176       tree context = decl_function_context (t);
5177       if (context != cfun->decl
5178 	  && !SCOPE_FILE_SCOPE_P (context)
5179 	  && !TREE_STATIC (t)
5180 	  && !DECL_EXTERNAL (t))
5181 	{
5182 	  error ("local declaration from a different function");
5183 	  return t;
5184 	}
5185     }
5186 #endif
5187 
5188   if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5189     {
5190       tree x = DECL_DEBUG_EXPR (t);
5191       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5192       if (addr)
5193 	return addr;
5194     }
5195   if ((VAR_P (t)
5196        || TREE_CODE (t) == PARM_DECL
5197        || TREE_CODE (t) == RESULT_DECL)
5198       && DECL_HAS_VALUE_EXPR_P (t))
5199     {
5200       tree x = DECL_VALUE_EXPR (t);
5201       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5202       if (addr)
5203 	return addr;
5204     }
5205 
5206   if (!EXPR_P (t))
5207     {
5208       *walk_subtrees = false;
5209       return NULL;
5210     }
5211 
5212   location_t loc = EXPR_LOCATION (t);
5213   if (verify_location (blocks, loc))
5214     return t;
5215 
5216   return NULL;
5217 }
5218 
5219 /* Called via walk_gimple_op.  Verify locations of expressions.  */
5220 
5221 static tree
5222 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5223 {
5224   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5225   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5226 }
5227 
5228 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
5229 
5230 static void
5231 collect_subblocks (hash_set<tree> *blocks, tree block)
5232 {
5233   tree t;
5234   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5235     {
5236       blocks->add (t);
5237       collect_subblocks (blocks, t);
5238     }
5239 }
5240 
5241 /* Verify the GIMPLE statements in the CFG of FN.  */
5242 
5243 DEBUG_FUNCTION void
5244 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5245 {
5246   basic_block bb;
5247   bool err = false;
5248 
5249   timevar_push (TV_TREE_STMT_VERIFY);
5250   hash_set<void *> visited;
5251   hash_set<gimple *> visited_throwing_stmts;
5252 
5253   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
5254   hash_set<tree> blocks;
5255   if (DECL_INITIAL (fn->decl))
5256     {
5257       blocks.add (DECL_INITIAL (fn->decl));
5258       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5259     }
5260 
5261   FOR_EACH_BB_FN (bb, fn)
5262     {
5263       gimple_stmt_iterator gsi;
5264       edge_iterator ei;
5265       edge e;
5266 
5267       for (gphi_iterator gpi = gsi_start_phis (bb);
5268 	   !gsi_end_p (gpi);
5269 	   gsi_next (&gpi))
5270 	{
5271 	  gphi *phi = gpi.phi ();
5272 	  bool err2 = false;
5273 	  unsigned i;
5274 
5275 	  if (gimple_bb (phi) != bb)
5276 	    {
5277 	      error ("gimple_bb (phi) is set to a wrong basic block");
5278 	      err2 = true;
5279 	    }
5280 
5281 	  err2 |= verify_gimple_phi (phi);
5282 
5283 	  /* Only PHI arguments have locations.  */
5284 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
5285 	    {
5286 	      error ("PHI node with location");
5287 	      err2 = true;
5288 	    }
5289 
5290 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5291 	    {
5292 	      tree arg = gimple_phi_arg_def (phi, i);
5293 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5294 				     &visited, NULL);
5295 	      if (addr)
5296 		{
5297 		  error ("incorrect sharing of tree nodes");
5298 		  debug_generic_expr (addr);
5299 		  err2 |= true;
5300 		}
5301 	      location_t loc = gimple_phi_arg_location (phi, i);
5302 	      if (virtual_operand_p (gimple_phi_result (phi))
5303 		  && loc != UNKNOWN_LOCATION)
5304 		{
5305 		  error ("virtual PHI with argument locations");
5306 		  err2 = true;
5307 		}
5308 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5309 	      if (addr)
5310 		{
5311 		  debug_generic_expr (addr);
5312 		  err2 = true;
5313 		}
5314 	      err2 |= verify_location (&blocks, loc);
5315 	    }
5316 
5317 	  if (err2)
5318 	    debug_gimple_stmt (phi);
5319 	  err |= err2;
5320 	}
5321 
5322       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5323 	{
5324 	  gimple *stmt = gsi_stmt (gsi);
5325 	  bool err2 = false;
5326 	  struct walk_stmt_info wi;
5327 	  tree addr;
5328 	  int lp_nr;
5329 
5330 	  if (gimple_bb (stmt) != bb)
5331 	    {
5332 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5333 	      err2 = true;
5334 	    }
5335 
5336 	  err2 |= verify_gimple_stmt (stmt);
5337 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5338 
5339 	  memset (&wi, 0, sizeof (wi));
5340 	  wi.info = (void *) &visited;
5341 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5342 	  if (addr)
5343 	    {
5344 	      error ("incorrect sharing of tree nodes");
5345 	      debug_generic_expr (addr);
5346 	      err2 |= true;
5347 	    }
5348 
5349 	  memset (&wi, 0, sizeof (wi));
5350 	  wi.info = (void *) &blocks;
5351 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5352 	  if (addr)
5353 	    {
5354 	      debug_generic_expr (addr);
5355 	      err2 |= true;
5356 	    }
5357 
5358 	  /* If the statement is marked as part of an EH region, then it is
5359 	     expected that the statement could throw.  Verify that when we
5360 	     have optimizations that simplify statements such that we prove
5361 	     that they cannot throw, that we update other data structures
5362 	     to match.  */
5363 	  lp_nr = lookup_stmt_eh_lp (stmt);
5364 	  if (lp_nr != 0)
5365 	    visited_throwing_stmts.add (stmt);
5366 	  if (lp_nr > 0)
5367 	    {
5368 	      if (!stmt_could_throw_p (cfun, stmt))
5369 		{
5370 		  if (verify_nothrow)
5371 		    {
5372 		      error ("statement marked for throw, but doesn%'t");
5373 		      err2 |= true;
5374 		    }
5375 		}
5376 	      else if (!gsi_one_before_end_p (gsi))
5377 		{
5378 		  error ("statement marked for throw in middle of block");
5379 		  err2 |= true;
5380 		}
5381 	    }
5382 
5383 	  if (err2)
5384 	    debug_gimple_stmt (stmt);
5385 	  err |= err2;
5386 	}
5387 
5388       FOR_EACH_EDGE (e, ei, bb->succs)
5389 	if (e->goto_locus != UNKNOWN_LOCATION)
5390 	  err |= verify_location (&blocks, e->goto_locus);
5391     }
5392 
5393   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5394   eh_error_found = false;
5395   if (eh_table)
5396     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5397       (&visited_throwing_stmts);
5398 
5399   if (err || eh_error_found)
5400     internal_error ("verify_gimple failed");
5401 
5402   verify_histograms ();
5403   timevar_pop (TV_TREE_STMT_VERIFY);
5404 }
5405 
5406 
5407 /* Verifies that the flow information is OK.  */
5408 
5409 static int
5410 gimple_verify_flow_info (void)
5411 {
5412   int err = 0;
5413   basic_block bb;
5414   gimple_stmt_iterator gsi;
5415   gimple *stmt;
5416   edge e;
5417   edge_iterator ei;
5418 
5419   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5420       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5421     {
5422       error ("ENTRY_BLOCK has IL associated with it");
5423       err = 1;
5424     }
5425 
5426   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5427       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5428     {
5429       error ("EXIT_BLOCK has IL associated with it");
5430       err = 1;
5431     }
5432 
5433   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5434     if (e->flags & EDGE_FALLTHRU)
5435       {
5436 	error ("fallthru to exit from bb %d", e->src->index);
5437 	err = 1;
5438       }
5439 
5440   FOR_EACH_BB_FN (bb, cfun)
5441     {
5442       bool found_ctrl_stmt = false;
5443 
5444       stmt = NULL;
5445 
5446       /* Skip labels on the start of basic block.  */
5447       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5448 	{
5449 	  tree label;
5450 	  gimple *prev_stmt = stmt;
5451 
5452 	  stmt = gsi_stmt (gsi);
5453 
5454 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5455 	    break;
5456 
5457 	  label = gimple_label_label (as_a <glabel *> (stmt));
5458 	  if (prev_stmt && DECL_NONLOCAL (label))
5459 	    {
5460 	      error ("nonlocal label ");
5461 	      print_generic_expr (stderr, label);
5462 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5463 		       bb->index);
5464 	      err = 1;
5465 	    }
5466 
5467 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5468 	    {
5469 	      error ("EH landing pad label ");
5470 	      print_generic_expr (stderr, label);
5471 	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5472 		       bb->index);
5473 	      err = 1;
5474 	    }
5475 
5476 	  if (label_to_block (cfun, label) != bb)
5477 	    {
5478 	      error ("label ");
5479 	      print_generic_expr (stderr, label);
5480 	      fprintf (stderr, " to block does not match in bb %d",
5481 		       bb->index);
5482 	      err = 1;
5483 	    }
5484 
5485 	  if (decl_function_context (label) != current_function_decl)
5486 	    {
5487 	      error ("label ");
5488 	      print_generic_expr (stderr, label);
5489 	      fprintf (stderr, " has incorrect context in bb %d",
5490 		       bb->index);
5491 	      err = 1;
5492 	    }
5493 	}
5494 
5495       /* Verify that body of basic block BB is free of control flow.  */
5496       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5497 	{
5498 	  gimple *stmt = gsi_stmt (gsi);
5499 
5500 	  if (found_ctrl_stmt)
5501 	    {
5502 	      error ("control flow in the middle of basic block %d",
5503 		     bb->index);
5504 	      err = 1;
5505 	    }
5506 
5507 	  if (stmt_ends_bb_p (stmt))
5508 	    found_ctrl_stmt = true;
5509 
5510 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5511 	    {
5512 	      error ("label ");
5513 	      print_generic_expr (stderr, gimple_label_label (label_stmt));
5514 	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5515 	      err = 1;
5516 	    }
5517 	}
5518 
5519       gsi = gsi_last_nondebug_bb (bb);
5520       if (gsi_end_p (gsi))
5521 	continue;
5522 
5523       stmt = gsi_stmt (gsi);
5524 
5525       if (gimple_code (stmt) == GIMPLE_LABEL)
5526 	continue;
5527 
5528       err |= verify_eh_edges (stmt);
5529 
5530       if (is_ctrl_stmt (stmt))
5531 	{
5532 	  FOR_EACH_EDGE (e, ei, bb->succs)
5533 	    if (e->flags & EDGE_FALLTHRU)
5534 	      {
5535 		error ("fallthru edge after a control statement in bb %d",
5536 		       bb->index);
5537 		err = 1;
5538 	      }
5539 	}
5540 
5541       if (gimple_code (stmt) != GIMPLE_COND)
5542 	{
5543 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5544 	     after anything else but if statement.  */
5545 	  FOR_EACH_EDGE (e, ei, bb->succs)
5546 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5547 	      {
5548 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5549 		       bb->index);
5550 		err = 1;
5551 	      }
5552 	}
5553 
5554       switch (gimple_code (stmt))
5555 	{
5556 	case GIMPLE_COND:
5557 	  {
5558 	    edge true_edge;
5559 	    edge false_edge;
5560 
5561 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5562 
5563 	    if (!true_edge
5564 		|| !false_edge
5565 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5566 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5567 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5568 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5569 		|| EDGE_COUNT (bb->succs) >= 3)
5570 	      {
5571 		error ("wrong outgoing edge flags at end of bb %d",
5572 		       bb->index);
5573 		err = 1;
5574 	      }
5575 	  }
5576 	  break;
5577 
5578 	case GIMPLE_GOTO:
5579 	  if (simple_goto_p (stmt))
5580 	    {
5581 	      error ("explicit goto at end of bb %d", bb->index);
5582 	      err = 1;
5583 	    }
5584 	  else
5585 	    {
5586 	      /* FIXME.  We should double check that the labels in the
5587 		 destination blocks have their address taken.  */
5588 	      FOR_EACH_EDGE (e, ei, bb->succs)
5589 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5590 				 | EDGE_FALSE_VALUE))
5591 		    || !(e->flags & EDGE_ABNORMAL))
5592 		  {
5593 		    error ("wrong outgoing edge flags at end of bb %d",
5594 			   bb->index);
5595 		    err = 1;
5596 		  }
5597 	    }
5598 	  break;
5599 
5600 	case GIMPLE_CALL:
5601 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5602 	    break;
5603 	  /* fallthru */
5604 	case GIMPLE_RETURN:
5605 	  if (!single_succ_p (bb)
5606 	      || (single_succ_edge (bb)->flags
5607 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5608 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5609 	    {
5610 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5611 	      err = 1;
5612 	    }
5613 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5614 	    {
5615 	      error ("return edge does not point to exit in bb %d",
5616 		     bb->index);
5617 	      err = 1;
5618 	    }
5619 	  break;
5620 
5621 	case GIMPLE_SWITCH:
5622 	  {
5623 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5624 	    tree prev;
5625 	    edge e;
5626 	    size_t i, n;
5627 
5628 	    n = gimple_switch_num_labels (switch_stmt);
5629 
5630 	    /* Mark all the destination basic blocks.  */
5631 	    for (i = 0; i < n; ++i)
5632 	      {
5633 		basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5634 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5635 		label_bb->aux = (void *)1;
5636 	      }
5637 
5638 	    /* Verify that the case labels are sorted.  */
5639 	    prev = gimple_switch_label (switch_stmt, 0);
5640 	    for (i = 1; i < n; ++i)
5641 	      {
5642 		tree c = gimple_switch_label (switch_stmt, i);
5643 		if (!CASE_LOW (c))
5644 		  {
5645 		    error ("found default case not at the start of "
5646 			   "case vector");
5647 		    err = 1;
5648 		    continue;
5649 		  }
5650 		if (CASE_LOW (prev)
5651 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5652 		  {
5653 		    error ("case labels not sorted: ");
5654 		    print_generic_expr (stderr, prev);
5655 		    fprintf (stderr," is greater than ");
5656 		    print_generic_expr (stderr, c);
5657 		    fprintf (stderr," but comes before it.\n");
5658 		    err = 1;
5659 		  }
5660 		prev = c;
5661 	      }
5662 	    /* VRP will remove the default case if it can prove it will
5663 	       never be executed.  So do not verify there always exists
5664 	       a default case here.  */
5665 
5666 	    FOR_EACH_EDGE (e, ei, bb->succs)
5667 	      {
5668 		if (!e->dest->aux)
5669 		  {
5670 		    error ("extra outgoing edge %d->%d",
5671 			   bb->index, e->dest->index);
5672 		    err = 1;
5673 		  }
5674 
5675 		e->dest->aux = (void *)2;
5676 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5677 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5678 		  {
5679 		    error ("wrong outgoing edge flags at end of bb %d",
5680 			   bb->index);
5681 		    err = 1;
5682 		  }
5683 	      }
5684 
5685 	    /* Check that we have all of them.  */
5686 	    for (i = 0; i < n; ++i)
5687 	      {
5688 		basic_block label_bb = gimple_switch_label_bb (cfun,
5689 							       switch_stmt, i);
5690 
5691 		if (label_bb->aux != (void *)2)
5692 		  {
5693 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5694 		    err = 1;
5695 		  }
5696 	      }
5697 
5698 	    FOR_EACH_EDGE (e, ei, bb->succs)
5699 	      e->dest->aux = (void *)0;
5700 	  }
5701 	  break;
5702 
5703 	case GIMPLE_EH_DISPATCH:
5704 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5705 	  break;
5706 
5707 	default:
5708 	  break;
5709 	}
5710     }
5711 
5712   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5713     verify_dominators (CDI_DOMINATORS);
5714 
5715   return err;
5716 }
5717 
5718 
5719 /* Updates phi nodes after creating a forwarder block joined
5720    by edge FALLTHRU.  */
5721 
5722 static void
5723 gimple_make_forwarder_block (edge fallthru)
5724 {
5725   edge e;
5726   edge_iterator ei;
5727   basic_block dummy, bb;
5728   tree var;
5729   gphi_iterator gsi;
5730 
5731   dummy = fallthru->src;
5732   bb = fallthru->dest;
5733 
5734   if (single_pred_p (bb))
5735     return;
5736 
5737   /* If we redirected a branch we must create new PHI nodes at the
5738      start of BB.  */
5739   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5740     {
5741       gphi *phi, *new_phi;
5742 
5743       phi = gsi.phi ();
5744       var = gimple_phi_result (phi);
5745       new_phi = create_phi_node (var, bb);
5746       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5747       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5748 		   UNKNOWN_LOCATION);
5749     }
5750 
5751   /* Add the arguments we have stored on edges.  */
5752   FOR_EACH_EDGE (e, ei, bb->preds)
5753     {
5754       if (e == fallthru)
5755 	continue;
5756 
5757       flush_pending_stmts (e);
5758     }
5759 }
5760 
5761 
5762 /* Return a non-special label in the head of basic block BLOCK.
5763    Create one if it doesn't exist.  */
5764 
5765 tree
5766 gimple_block_label (basic_block bb)
5767 {
5768   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5769   bool first = true;
5770   tree label;
5771   glabel *stmt;
5772 
5773   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5774     {
5775       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5776       if (!stmt)
5777 	break;
5778       label = gimple_label_label (stmt);
5779       if (!DECL_NONLOCAL (label))
5780 	{
5781 	  if (!first)
5782 	    gsi_move_before (&i, &s);
5783 	  return label;
5784 	}
5785     }
5786 
5787   label = create_artificial_label (UNKNOWN_LOCATION);
5788   stmt = gimple_build_label (label);
5789   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5790   return label;
5791 }
5792 
5793 
5794 /* Attempt to perform edge redirection by replacing a possibly complex
5795    jump instruction by a goto or by removing the jump completely.
5796    This can apply only if all edges now point to the same block.  The
5797    parameters and return values are equivalent to
5798    redirect_edge_and_branch.  */
5799 
5800 static edge
5801 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5802 {
5803   basic_block src = e->src;
5804   gimple_stmt_iterator i;
5805   gimple *stmt;
5806 
5807   /* We can replace or remove a complex jump only when we have exactly
5808      two edges.  */
5809   if (EDGE_COUNT (src->succs) != 2
5810       /* Verify that all targets will be TARGET.  Specifically, the
5811 	 edge that is not E must also go to TARGET.  */
5812       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5813     return NULL;
5814 
5815   i = gsi_last_bb (src);
5816   if (gsi_end_p (i))
5817     return NULL;
5818 
5819   stmt = gsi_stmt (i);
5820 
5821   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5822     {
5823       gsi_remove (&i, true);
5824       e = ssa_redirect_edge (e, target);
5825       e->flags = EDGE_FALLTHRU;
5826       return e;
5827     }
5828 
5829   return NULL;
5830 }
5831 
5832 
5833 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5834    edge representing the redirected branch.  */
5835 
5836 static edge
5837 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5838 {
5839   basic_block bb = e->src;
5840   gimple_stmt_iterator gsi;
5841   edge ret;
5842   gimple *stmt;
5843 
5844   if (e->flags & EDGE_ABNORMAL)
5845     return NULL;
5846 
5847   if (e->dest == dest)
5848     return NULL;
5849 
5850   if (e->flags & EDGE_EH)
5851     return redirect_eh_edge (e, dest);
5852 
5853   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5854     {
5855       ret = gimple_try_redirect_by_replacing_jump (e, dest);
5856       if (ret)
5857 	return ret;
5858     }
5859 
5860   gsi = gsi_last_nondebug_bb (bb);
5861   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5862 
5863   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5864     {
5865     case GIMPLE_COND:
5866       /* For COND_EXPR, we only need to redirect the edge.  */
5867       break;
5868 
5869     case GIMPLE_GOTO:
5870       /* No non-abnormal edges should lead from a non-simple goto, and
5871 	 simple ones should be represented implicitly.  */
5872       gcc_unreachable ();
5873 
5874     case GIMPLE_SWITCH:
5875       {
5876 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5877 	tree label = gimple_block_label (dest);
5878         tree cases = get_cases_for_edge (e, switch_stmt);
5879 
5880 	/* If we have a list of cases associated with E, then use it
5881 	   as it's a lot faster than walking the entire case vector.  */
5882 	if (cases)
5883 	  {
5884 	    edge e2 = find_edge (e->src, dest);
5885 	    tree last, first;
5886 
5887 	    first = cases;
5888 	    while (cases)
5889 	      {
5890 		last = cases;
5891 		CASE_LABEL (cases) = label;
5892 		cases = CASE_CHAIN (cases);
5893 	      }
5894 
5895 	    /* If there was already an edge in the CFG, then we need
5896 	       to move all the cases associated with E to E2.  */
5897 	    if (e2)
5898 	      {
5899 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5900 
5901 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5902 		CASE_CHAIN (cases2) = first;
5903 	      }
5904 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5905 	  }
5906 	else
5907 	  {
5908 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5909 
5910 	    for (i = 0; i < n; i++)
5911 	      {
5912 		tree elt = gimple_switch_label (switch_stmt, i);
5913 		if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5914 		  CASE_LABEL (elt) = label;
5915 	      }
5916 	  }
5917       }
5918       break;
5919 
5920     case GIMPLE_ASM:
5921       {
5922 	gasm *asm_stmt = as_a <gasm *> (stmt);
5923 	int i, n = gimple_asm_nlabels (asm_stmt);
5924 	tree label = NULL;
5925 
5926 	for (i = 0; i < n; ++i)
5927 	  {
5928 	    tree cons = gimple_asm_label_op (asm_stmt, i);
5929 	    if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5930 	      {
5931 		if (!label)
5932 		  label = gimple_block_label (dest);
5933 		TREE_VALUE (cons) = label;
5934 	      }
5935 	  }
5936 
5937 	/* If we didn't find any label matching the former edge in the
5938 	   asm labels, we must be redirecting the fallthrough
5939 	   edge.  */
5940 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5941       }
5942       break;
5943 
5944     case GIMPLE_RETURN:
5945       gsi_remove (&gsi, true);
5946       e->flags |= EDGE_FALLTHRU;
5947       break;
5948 
5949     case GIMPLE_OMP_RETURN:
5950     case GIMPLE_OMP_CONTINUE:
5951     case GIMPLE_OMP_SECTIONS_SWITCH:
5952     case GIMPLE_OMP_FOR:
5953       /* The edges from OMP constructs can be simply redirected.  */
5954       break;
5955 
5956     case GIMPLE_EH_DISPATCH:
5957       if (!(e->flags & EDGE_FALLTHRU))
5958 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5959       break;
5960 
5961     case GIMPLE_TRANSACTION:
5962       if (e->flags & EDGE_TM_ABORT)
5963 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5964 				           gimple_block_label (dest));
5965       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5966 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5967 				             gimple_block_label (dest));
5968       else
5969 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5970 				           gimple_block_label (dest));
5971       break;
5972 
5973     default:
5974       /* Otherwise it must be a fallthru edge, and we don't need to
5975 	 do anything besides redirecting it.  */
5976       gcc_assert (e->flags & EDGE_FALLTHRU);
5977       break;
5978     }
5979 
5980   /* Update/insert PHI nodes as necessary.  */
5981 
5982   /* Now update the edges in the CFG.  */
5983   e = ssa_redirect_edge (e, dest);
5984 
5985   return e;
5986 }
5987 
5988 /* Returns true if it is possible to remove edge E by redirecting
5989    it to the destination of the other edge from E->src.  */
5990 
5991 static bool
5992 gimple_can_remove_branch_p (const_edge e)
5993 {
5994   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5995     return false;
5996 
5997   return true;
5998 }
5999 
6000 /* Simple wrapper, as we can always redirect fallthru edges.  */
6001 
6002 static basic_block
6003 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6004 {
6005   e = gimple_redirect_edge_and_branch (e, dest);
6006   gcc_assert (e);
6007 
6008   return NULL;
6009 }
6010 
6011 
6012 /* Splits basic block BB after statement STMT (but at least after the
6013    labels).  If STMT is NULL, BB is split just after the labels.  */
6014 
6015 static basic_block
6016 gimple_split_block (basic_block bb, void *stmt)
6017 {
6018   gimple_stmt_iterator gsi;
6019   gimple_stmt_iterator gsi_tgt;
6020   gimple_seq list;
6021   basic_block new_bb;
6022   edge e;
6023   edge_iterator ei;
6024 
6025   new_bb = create_empty_bb (bb);
6026 
6027   /* Redirect the outgoing edges.  */
6028   new_bb->succs = bb->succs;
6029   bb->succs = NULL;
6030   FOR_EACH_EDGE (e, ei, new_bb->succs)
6031     e->src = new_bb;
6032 
6033   /* Get a stmt iterator pointing to the first stmt to move.  */
6034   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6035     gsi = gsi_after_labels (bb);
6036   else
6037     {
6038       gsi = gsi_for_stmt ((gimple *) stmt);
6039       gsi_next (&gsi);
6040     }
6041 
6042   /* Move everything from GSI to the new basic block.  */
6043   if (gsi_end_p (gsi))
6044     return new_bb;
6045 
6046   /* Split the statement list - avoid re-creating new containers as this
6047      brings ugly quadratic memory consumption in the inliner.
6048      (We are still quadratic since we need to update stmt BB pointers,
6049      sadly.)  */
6050   gsi_split_seq_before (&gsi, &list);
6051   set_bb_seq (new_bb, list);
6052   for (gsi_tgt = gsi_start (list);
6053        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6054     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6055 
6056   return new_bb;
6057 }
6058 
6059 
6060 /* Moves basic block BB after block AFTER.  */
6061 
6062 static bool
6063 gimple_move_block_after (basic_block bb, basic_block after)
6064 {
6065   if (bb->prev_bb == after)
6066     return true;
6067 
6068   unlink_block (bb);
6069   link_block (bb, after);
6070 
6071   return true;
6072 }
6073 
6074 
6075 /* Return TRUE if block BB has no executable statements, otherwise return
6076    FALSE.  */
6077 
6078 static bool
6079 gimple_empty_block_p (basic_block bb)
6080 {
6081   /* BB must have no executable statements.  */
6082   gimple_stmt_iterator gsi = gsi_after_labels (bb);
6083   if (phi_nodes (bb))
6084     return false;
6085   while (!gsi_end_p (gsi))
6086     {
6087       gimple *stmt = gsi_stmt (gsi);
6088       if (is_gimple_debug (stmt))
6089 	;
6090       else if (gimple_code (stmt) == GIMPLE_NOP
6091 	       || gimple_code (stmt) == GIMPLE_PREDICT)
6092 	;
6093       else
6094 	return false;
6095       gsi_next (&gsi);
6096     }
6097   return true;
6098 }
6099 
6100 
6101 /* Split a basic block if it ends with a conditional branch and if the
6102    other part of the block is not empty.  */
6103 
6104 static basic_block
6105 gimple_split_block_before_cond_jump (basic_block bb)
6106 {
6107   gimple *last, *split_point;
6108   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6109   if (gsi_end_p (gsi))
6110     return NULL;
6111   last = gsi_stmt (gsi);
6112   if (gimple_code (last) != GIMPLE_COND
6113       && gimple_code (last) != GIMPLE_SWITCH)
6114     return NULL;
6115   gsi_prev (&gsi);
6116   split_point = gsi_stmt (gsi);
6117   return split_block (bb, split_point)->dest;
6118 }
6119 
6120 
6121 /* Return true if basic_block can be duplicated.  */
6122 
6123 static bool
6124 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6125 {
6126   return true;
6127 }
6128 
6129 /* Create a duplicate of the basic block BB.  NOTE: This does not
6130    preserve SSA form.  */
6131 
6132 static basic_block
6133 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6134 {
6135   basic_block new_bb;
6136   gimple_stmt_iterator gsi_tgt;
6137 
6138   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6139 
6140   /* Copy the PHI nodes.  We ignore PHI node arguments here because
6141      the incoming edges have not been setup yet.  */
6142   for (gphi_iterator gpi = gsi_start_phis (bb);
6143        !gsi_end_p (gpi);
6144        gsi_next (&gpi))
6145     {
6146       gphi *phi, *copy;
6147       phi = gpi.phi ();
6148       copy = create_phi_node (NULL_TREE, new_bb);
6149       create_new_def_for (gimple_phi_result (phi), copy,
6150 			  gimple_phi_result_ptr (copy));
6151       gimple_set_uid (copy, gimple_uid (phi));
6152     }
6153 
6154   gsi_tgt = gsi_start_bb (new_bb);
6155   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6156        !gsi_end_p (gsi);
6157        gsi_next (&gsi))
6158     {
6159       def_operand_p def_p;
6160       ssa_op_iter op_iter;
6161       tree lhs;
6162       gimple *stmt, *copy;
6163 
6164       stmt = gsi_stmt (gsi);
6165       if (gimple_code (stmt) == GIMPLE_LABEL)
6166 	continue;
6167 
6168       /* Don't duplicate label debug stmts.  */
6169       if (gimple_debug_bind_p (stmt)
6170 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
6171 	     == LABEL_DECL)
6172 	continue;
6173 
6174       /* Create a new copy of STMT and duplicate STMT's virtual
6175 	 operands.  */
6176       copy = gimple_copy (stmt);
6177       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6178 
6179       maybe_duplicate_eh_stmt (copy, stmt);
6180       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6181 
6182       /* When copying around a stmt writing into a local non-user
6183 	 aggregate, make sure it won't share stack slot with other
6184 	 vars.  */
6185       lhs = gimple_get_lhs (stmt);
6186       if (lhs && TREE_CODE (lhs) != SSA_NAME)
6187 	{
6188 	  tree base = get_base_address (lhs);
6189 	  if (base
6190 	      && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6191 	      && DECL_IGNORED_P (base)
6192 	      && !TREE_STATIC (base)
6193 	      && !DECL_EXTERNAL (base)
6194 	      && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6195 	    DECL_NONSHAREABLE (base) = 1;
6196 	}
6197 
6198       /* If requested remap dependence info of cliques brought in
6199          via inlining.  */
6200       if (id)
6201 	for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6202 	  {
6203 	    tree op = gimple_op (copy, i);
6204 	    if (!op)
6205 	      continue;
6206 	    if (TREE_CODE (op) == ADDR_EXPR
6207 		|| TREE_CODE (op) == WITH_SIZE_EXPR)
6208 	      op = TREE_OPERAND (op, 0);
6209 	    while (handled_component_p (op))
6210 	      op = TREE_OPERAND (op, 0);
6211 	    if ((TREE_CODE (op) == MEM_REF
6212 		 || TREE_CODE (op) == TARGET_MEM_REF)
6213 		&& MR_DEPENDENCE_CLIQUE (op) > 1
6214 		&& MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6215 	      {
6216 		if (!id->dependence_map)
6217 		  id->dependence_map = new hash_map<dependence_hash,
6218 						    unsigned short>;
6219 		bool existed;
6220 		unsigned short &newc = id->dependence_map->get_or_insert
6221 		    (MR_DEPENDENCE_CLIQUE (op), &existed);
6222 		if (!existed)
6223 		  {
6224 		    gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6225 		    newc = ++cfun->last_clique;
6226 		  }
6227 		MR_DEPENDENCE_CLIQUE (op) = newc;
6228 	      }
6229 	  }
6230 
6231       /* Create new names for all the definitions created by COPY and
6232 	 add replacement mappings for each new name.  */
6233       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6234 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6235     }
6236 
6237   return new_bb;
6238 }
6239 
6240 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
6241 
6242 static void
6243 add_phi_args_after_copy_edge (edge e_copy)
6244 {
6245   basic_block bb, bb_copy = e_copy->src, dest;
6246   edge e;
6247   edge_iterator ei;
6248   gphi *phi, *phi_copy;
6249   tree def;
6250   gphi_iterator psi, psi_copy;
6251 
6252   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6253     return;
6254 
6255   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6256 
6257   if (e_copy->dest->flags & BB_DUPLICATED)
6258     dest = get_bb_original (e_copy->dest);
6259   else
6260     dest = e_copy->dest;
6261 
6262   e = find_edge (bb, dest);
6263   if (!e)
6264     {
6265       /* During loop unrolling the target of the latch edge is copied.
6266 	 In this case we are not looking for edge to dest, but to
6267 	 duplicated block whose original was dest.  */
6268       FOR_EACH_EDGE (e, ei, bb->succs)
6269 	{
6270 	  if ((e->dest->flags & BB_DUPLICATED)
6271 	      && get_bb_original (e->dest) == dest)
6272 	    break;
6273 	}
6274 
6275       gcc_assert (e != NULL);
6276     }
6277 
6278   for (psi = gsi_start_phis (e->dest),
6279        psi_copy = gsi_start_phis (e_copy->dest);
6280        !gsi_end_p (psi);
6281        gsi_next (&psi), gsi_next (&psi_copy))
6282     {
6283       phi = psi.phi ();
6284       phi_copy = psi_copy.phi ();
6285       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6286       add_phi_arg (phi_copy, def, e_copy,
6287 		   gimple_phi_arg_location_from_edge (phi, e));
6288     }
6289 }
6290 
6291 
6292 /* Basic block BB_COPY was created by code duplication.  Add phi node
6293    arguments for edges going out of BB_COPY.  The blocks that were
6294    duplicated have BB_DUPLICATED set.  */
6295 
6296 void
6297 add_phi_args_after_copy_bb (basic_block bb_copy)
6298 {
6299   edge e_copy;
6300   edge_iterator ei;
6301 
6302   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6303     {
6304       add_phi_args_after_copy_edge (e_copy);
6305     }
6306 }
6307 
6308 /* Blocks in REGION_COPY array of length N_REGION were created by
6309    duplication of basic blocks.  Add phi node arguments for edges
6310    going from these blocks.  If E_COPY is not NULL, also add
6311    phi node arguments for its destination.*/
6312 
6313 void
6314 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6315 			 edge e_copy)
6316 {
6317   unsigned i;
6318 
6319   for (i = 0; i < n_region; i++)
6320     region_copy[i]->flags |= BB_DUPLICATED;
6321 
6322   for (i = 0; i < n_region; i++)
6323     add_phi_args_after_copy_bb (region_copy[i]);
6324   if (e_copy)
6325     add_phi_args_after_copy_edge (e_copy);
6326 
6327   for (i = 0; i < n_region; i++)
6328     region_copy[i]->flags &= ~BB_DUPLICATED;
6329 }
6330 
6331 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6332    important exit edge EXIT.  By important we mean that no SSA name defined
6333    inside region is live over the other exit edges of the region.  All entry
6334    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6335    to the duplicate of the region.  Dominance and loop information is
6336    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6337    UPDATE_DOMINANCE is false then we assume that the caller will update the
6338    dominance information after calling this function.  The new basic
6339    blocks are stored to REGION_COPY in the same order as they had in REGION,
6340    provided that REGION_COPY is not NULL.
6341    The function returns false if it is unable to copy the region,
6342    true otherwise.  */
6343 
6344 bool
6345 gimple_duplicate_sese_region (edge entry, edge exit,
6346 			    basic_block *region, unsigned n_region,
6347 			    basic_block *region_copy,
6348 			    bool update_dominance)
6349 {
6350   unsigned i;
6351   bool free_region_copy = false, copying_header = false;
6352   struct loop *loop = entry->dest->loop_father;
6353   edge exit_copy;
6354   vec<basic_block> doms = vNULL;
6355   edge redirected;
6356   profile_count total_count = profile_count::uninitialized ();
6357   profile_count entry_count = profile_count::uninitialized ();
6358 
6359   if (!can_copy_bbs_p (region, n_region))
6360     return false;
6361 
6362   /* Some sanity checking.  Note that we do not check for all possible
6363      missuses of the functions.  I.e. if you ask to copy something weird,
6364      it will work, but the state of structures probably will not be
6365      correct.  */
6366   for (i = 0; i < n_region; i++)
6367     {
6368       /* We do not handle subloops, i.e. all the blocks must belong to the
6369 	 same loop.  */
6370       if (region[i]->loop_father != loop)
6371 	return false;
6372 
6373       if (region[i] != entry->dest
6374 	  && region[i] == loop->header)
6375 	return false;
6376     }
6377 
6378   /* In case the function is used for loop header copying (which is the primary
6379      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6380   if (loop->header == entry->dest)
6381     {
6382       copying_header = true;
6383 
6384       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6385 	return false;
6386 
6387       for (i = 0; i < n_region; i++)
6388 	if (region[i] != exit->src
6389 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6390 	  return false;
6391     }
6392 
6393   initialize_original_copy_tables ();
6394 
6395   if (copying_header)
6396     set_loop_copy (loop, loop_outer (loop));
6397   else
6398     set_loop_copy (loop, loop);
6399 
6400   if (!region_copy)
6401     {
6402       region_copy = XNEWVEC (basic_block, n_region);
6403       free_region_copy = true;
6404     }
6405 
6406   /* Record blocks outside the region that are dominated by something
6407      inside.  */
6408   if (update_dominance)
6409     {
6410       doms.create (0);
6411       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6412     }
6413 
6414   if (entry->dest->count.initialized_p ())
6415     {
6416       total_count = entry->dest->count;
6417       entry_count = entry->count ();
6418       /* Fix up corner cases, to avoid division by zero or creation of negative
6419 	 frequencies.  */
6420       if (entry_count > total_count)
6421 	entry_count = total_count;
6422     }
6423 
6424   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6425 	    split_edge_bb_loc (entry), update_dominance);
6426   if (total_count.initialized_p () && entry_count.initialized_p ())
6427     {
6428       scale_bbs_frequencies_profile_count (region, n_region,
6429 				           total_count - entry_count,
6430 				           total_count);
6431       scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6432 				           total_count);
6433     }
6434 
6435   if (copying_header)
6436     {
6437       loop->header = exit->dest;
6438       loop->latch = exit->src;
6439     }
6440 
6441   /* Redirect the entry and add the phi node arguments.  */
6442   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6443   gcc_assert (redirected != NULL);
6444   flush_pending_stmts (entry);
6445 
6446   /* Concerning updating of dominators:  We must recount dominators
6447      for entry block and its copy.  Anything that is outside of the
6448      region, but was dominated by something inside needs recounting as
6449      well.  */
6450   if (update_dominance)
6451     {
6452       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6453       doms.safe_push (get_bb_original (entry->dest));
6454       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6455       doms.release ();
6456     }
6457 
6458   /* Add the other PHI node arguments.  */
6459   add_phi_args_after_copy (region_copy, n_region, NULL);
6460 
6461   if (free_region_copy)
6462     free (region_copy);
6463 
6464   free_original_copy_tables ();
6465   return true;
6466 }
6467 
6468 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6469 static bool
6470 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6471 {
6472   unsigned int n;
6473 
6474   for (n = 0; n < n_region; n++)
6475     {
6476      if (bb == bbs[n])
6477        return true;
6478     }
6479   return false;
6480 }
6481 
6482 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6483    are stored to REGION_COPY in the same order in that they appear
6484    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6485    the region, EXIT an exit from it.  The condition guarding EXIT
6486    is moved to ENTRY.  Returns true if duplication succeeds, false
6487    otherwise.
6488 
6489    For example,
6490 
6491    some_code;
6492    if (cond)
6493      A;
6494    else
6495      B;
6496 
6497    is transformed to
6498 
6499    if (cond)
6500      {
6501        some_code;
6502        A;
6503      }
6504    else
6505      {
6506        some_code;
6507        B;
6508      }
6509 */
6510 
6511 bool
6512 gimple_duplicate_sese_tail (edge entry, edge exit,
6513 			  basic_block *region, unsigned n_region,
6514 			  basic_block *region_copy)
6515 {
6516   unsigned i;
6517   bool free_region_copy = false;
6518   struct loop *loop = exit->dest->loop_father;
6519   struct loop *orig_loop = entry->dest->loop_father;
6520   basic_block switch_bb, entry_bb, nentry_bb;
6521   vec<basic_block> doms;
6522   profile_count total_count = profile_count::uninitialized (),
6523 		exit_count = profile_count::uninitialized ();
6524   edge exits[2], nexits[2], e;
6525   gimple_stmt_iterator gsi;
6526   gimple *cond_stmt;
6527   edge sorig, snew;
6528   basic_block exit_bb;
6529   gphi_iterator psi;
6530   gphi *phi;
6531   tree def;
6532   struct loop *target, *aloop, *cloop;
6533 
6534   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6535   exits[0] = exit;
6536   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6537 
6538   if (!can_copy_bbs_p (region, n_region))
6539     return false;
6540 
6541   initialize_original_copy_tables ();
6542   set_loop_copy (orig_loop, loop);
6543 
6544   target= loop;
6545   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6546     {
6547       if (bb_part_of_region_p (aloop->header, region, n_region))
6548 	{
6549 	  cloop = duplicate_loop (aloop, target);
6550 	  duplicate_subloops (aloop, cloop);
6551 	}
6552     }
6553 
6554   if (!region_copy)
6555     {
6556       region_copy = XNEWVEC (basic_block, n_region);
6557       free_region_copy = true;
6558     }
6559 
6560   gcc_assert (!need_ssa_update_p (cfun));
6561 
6562   /* Record blocks outside the region that are dominated by something
6563      inside.  */
6564   doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6565 
6566   total_count = exit->src->count;
6567   exit_count = exit->count ();
6568   /* Fix up corner cases, to avoid division by zero or creation of negative
6569      frequencies.  */
6570   if (exit_count > total_count)
6571     exit_count = total_count;
6572 
6573   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6574 	    split_edge_bb_loc (exit), true);
6575   if (total_count.initialized_p () && exit_count.initialized_p ())
6576     {
6577       scale_bbs_frequencies_profile_count (region, n_region,
6578 				           total_count - exit_count,
6579 				           total_count);
6580       scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6581 				           total_count);
6582     }
6583 
6584   /* Create the switch block, and put the exit condition to it.  */
6585   entry_bb = entry->dest;
6586   nentry_bb = get_bb_copy (entry_bb);
6587   if (!last_stmt (entry->src)
6588       || !stmt_ends_bb_p (last_stmt (entry->src)))
6589     switch_bb = entry->src;
6590   else
6591     switch_bb = split_edge (entry);
6592   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6593 
6594   gsi = gsi_last_bb (switch_bb);
6595   cond_stmt = last_stmt (exit->src);
6596   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6597   cond_stmt = gimple_copy (cond_stmt);
6598 
6599   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6600 
6601   sorig = single_succ_edge (switch_bb);
6602   sorig->flags = exits[1]->flags;
6603   sorig->probability = exits[1]->probability;
6604   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6605   snew->probability = exits[0]->probability;
6606 
6607 
6608   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6609   rescan_loop_exit (snew, true, false);
6610 
6611   /* Add the PHI node arguments.  */
6612   add_phi_args_after_copy (region_copy, n_region, snew);
6613 
6614   /* Get rid of now superfluous conditions and associated edges (and phi node
6615      arguments).  */
6616   exit_bb = exit->dest;
6617 
6618   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6619   PENDING_STMT (e) = NULL;
6620 
6621   /* The latch of ORIG_LOOP was copied, and so was the backedge
6622      to the original header.  We redirect this backedge to EXIT_BB.  */
6623   for (i = 0; i < n_region; i++)
6624     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6625       {
6626 	gcc_assert (single_succ_edge (region_copy[i]));
6627 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6628 	PENDING_STMT (e) = NULL;
6629 	for (psi = gsi_start_phis (exit_bb);
6630 	     !gsi_end_p (psi);
6631 	     gsi_next (&psi))
6632 	  {
6633 	    phi = psi.phi ();
6634 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6635 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6636 	  }
6637       }
6638   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6639   PENDING_STMT (e) = NULL;
6640 
6641   /* Anything that is outside of the region, but was dominated by something
6642      inside needs to update dominance info.  */
6643   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6644   doms.release ();
6645   /* Update the SSA web.  */
6646   update_ssa (TODO_update_ssa);
6647 
6648   if (free_region_copy)
6649     free (region_copy);
6650 
6651   free_original_copy_tables ();
6652   return true;
6653 }
6654 
6655 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6656    adding blocks when the dominator traversal reaches EXIT.  This
6657    function silently assumes that ENTRY strictly dominates EXIT.  */
6658 
6659 void
6660 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6661 			      vec<basic_block> *bbs_p)
6662 {
6663   basic_block son;
6664 
6665   for (son = first_dom_son (CDI_DOMINATORS, entry);
6666        son;
6667        son = next_dom_son (CDI_DOMINATORS, son))
6668     {
6669       bbs_p->safe_push (son);
6670       if (son != exit)
6671 	gather_blocks_in_sese_region (son, exit, bbs_p);
6672     }
6673 }
6674 
6675 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6676    The duplicates are recorded in VARS_MAP.  */
6677 
6678 static void
6679 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6680 			   tree to_context)
6681 {
6682   tree t = *tp, new_t;
6683   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6684 
6685   if (DECL_CONTEXT (t) == to_context)
6686     return;
6687 
6688   bool existed;
6689   tree &loc = vars_map->get_or_insert (t, &existed);
6690 
6691   if (!existed)
6692     {
6693       if (SSA_VAR_P (t))
6694 	{
6695 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6696 	  add_local_decl (f, new_t);
6697 	}
6698       else
6699 	{
6700 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6701 	  new_t = copy_node (t);
6702 	}
6703       DECL_CONTEXT (new_t) = to_context;
6704 
6705       loc = new_t;
6706     }
6707   else
6708     new_t = loc;
6709 
6710   *tp = new_t;
6711 }
6712 
6713 
6714 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6715    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6716 
6717 static tree
6718 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6719 		  tree to_context)
6720 {
6721   tree new_name;
6722 
6723   gcc_assert (!virtual_operand_p (name));
6724 
6725   tree *loc = vars_map->get (name);
6726 
6727   if (!loc)
6728     {
6729       tree decl = SSA_NAME_VAR (name);
6730       if (decl)
6731 	{
6732 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6733 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6734 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6735 				       decl, SSA_NAME_DEF_STMT (name));
6736 	}
6737       else
6738 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6739 				     name, SSA_NAME_DEF_STMT (name));
6740 
6741       /* Now that we've used the def stmt to define new_name, make sure it
6742 	 doesn't define name anymore.  */
6743       SSA_NAME_DEF_STMT (name) = NULL;
6744 
6745       vars_map->put (name, new_name);
6746     }
6747   else
6748     new_name = *loc;
6749 
6750   return new_name;
6751 }
6752 
6753 struct move_stmt_d
6754 {
6755   tree orig_block;
6756   tree new_block;
6757   tree from_context;
6758   tree to_context;
6759   hash_map<tree, tree> *vars_map;
6760   htab_t new_label_map;
6761   hash_map<void *, void *> *eh_map;
6762   bool remap_decls_p;
6763 };
6764 
6765 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6766    contained in *TP if it has been ORIG_BLOCK previously and change the
6767    DECL_CONTEXT of every local variable referenced in *TP.  */
6768 
6769 static tree
6770 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6771 {
6772   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6773   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6774   tree t = *tp;
6775 
6776   if (EXPR_P (t))
6777     {
6778       tree block = TREE_BLOCK (t);
6779       if (block == NULL_TREE)
6780 	;
6781       else if (block == p->orig_block
6782 	       || p->orig_block == NULL_TREE)
6783 	{
6784 	  /* tree_node_can_be_shared says we can share invariant
6785 	     addresses but unshare_expr copies them anyways.  Make sure
6786 	     to unshare before adjusting the block in place - we do not
6787 	     always see a copy here.  */
6788 	  if (TREE_CODE (t) == ADDR_EXPR
6789 	      && is_gimple_min_invariant (t))
6790 	    *tp = t = unshare_expr (t);
6791 	  TREE_SET_BLOCK (t, p->new_block);
6792 	}
6793       else if (flag_checking)
6794 	{
6795 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6796 	    block = BLOCK_SUPERCONTEXT (block);
6797 	  gcc_assert (block == p->orig_block);
6798 	}
6799     }
6800   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6801     {
6802       if (TREE_CODE (t) == SSA_NAME)
6803 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6804       else if (TREE_CODE (t) == PARM_DECL
6805 	       && gimple_in_ssa_p (cfun))
6806 	*tp = *(p->vars_map->get (t));
6807       else if (TREE_CODE (t) == LABEL_DECL)
6808 	{
6809 	  if (p->new_label_map)
6810 	    {
6811 	      struct tree_map in, *out;
6812 	      in.base.from = t;
6813 	      out = (struct tree_map *)
6814 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6815 	      if (out)
6816 		*tp = t = out->to;
6817 	    }
6818 
6819 	  /* For FORCED_LABELs we can end up with references from other
6820 	     functions if some SESE regions are outlined.  It is UB to
6821 	     jump in between them, but they could be used just for printing
6822 	     addresses etc.  In that case, DECL_CONTEXT on the label should
6823 	     be the function containing the glabel stmt with that LABEL_DECL,
6824 	     rather than whatever function a reference to the label was seen
6825 	     last time.  */
6826 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6827 	    DECL_CONTEXT (t) = p->to_context;
6828 	}
6829       else if (p->remap_decls_p)
6830 	{
6831 	  /* Replace T with its duplicate.  T should no longer appear in the
6832 	     parent function, so this looks wasteful; however, it may appear
6833 	     in referenced_vars, and more importantly, as virtual operands of
6834 	     statements, and in alias lists of other variables.  It would be
6835 	     quite difficult to expunge it from all those places.  ??? It might
6836 	     suffice to do this for addressable variables.  */
6837 	  if ((VAR_P (t) && !is_global_var (t))
6838 	      || TREE_CODE (t) == CONST_DECL)
6839 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6840 	}
6841       *walk_subtrees = 0;
6842     }
6843   else if (TYPE_P (t))
6844     *walk_subtrees = 0;
6845 
6846   return NULL_TREE;
6847 }
6848 
6849 /* Helper for move_stmt_r.  Given an EH region number for the source
6850    function, map that to the duplicate EH regio number in the dest.  */
6851 
6852 static int
6853 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6854 {
6855   eh_region old_r, new_r;
6856 
6857   old_r = get_eh_region_from_number (old_nr);
6858   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6859 
6860   return new_r->index;
6861 }
6862 
6863 /* Similar, but operate on INTEGER_CSTs.  */
6864 
6865 static tree
6866 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6867 {
6868   int old_nr, new_nr;
6869 
6870   old_nr = tree_to_shwi (old_t_nr);
6871   new_nr = move_stmt_eh_region_nr (old_nr, p);
6872 
6873   return build_int_cst (integer_type_node, new_nr);
6874 }
6875 
6876 /* Like move_stmt_op, but for gimple statements.
6877 
6878    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6879    contained in the current statement in *GSI_P and change the
6880    DECL_CONTEXT of every local variable referenced in the current
6881    statement.  */
6882 
6883 static tree
6884 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6885 	     struct walk_stmt_info *wi)
6886 {
6887   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6888   gimple *stmt = gsi_stmt (*gsi_p);
6889   tree block = gimple_block (stmt);
6890 
6891   if (block == p->orig_block
6892       || (p->orig_block == NULL_TREE
6893 	  && block != NULL_TREE))
6894     gimple_set_block (stmt, p->new_block);
6895 
6896   switch (gimple_code (stmt))
6897     {
6898     case GIMPLE_CALL:
6899       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6900       {
6901 	tree r, fndecl = gimple_call_fndecl (stmt);
6902 	if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6903 	  switch (DECL_FUNCTION_CODE (fndecl))
6904 	    {
6905 	    case BUILT_IN_EH_COPY_VALUES:
6906 	      r = gimple_call_arg (stmt, 1);
6907 	      r = move_stmt_eh_region_tree_nr (r, p);
6908 	      gimple_call_set_arg (stmt, 1, r);
6909 	      /* FALLTHRU */
6910 
6911 	    case BUILT_IN_EH_POINTER:
6912 	    case BUILT_IN_EH_FILTER:
6913 	      r = gimple_call_arg (stmt, 0);
6914 	      r = move_stmt_eh_region_tree_nr (r, p);
6915 	      gimple_call_set_arg (stmt, 0, r);
6916 	      break;
6917 
6918 	    default:
6919 	      break;
6920 	    }
6921       }
6922       break;
6923 
6924     case GIMPLE_RESX:
6925       {
6926 	gresx *resx_stmt = as_a <gresx *> (stmt);
6927 	int r = gimple_resx_region (resx_stmt);
6928 	r = move_stmt_eh_region_nr (r, p);
6929 	gimple_resx_set_region (resx_stmt, r);
6930       }
6931       break;
6932 
6933     case GIMPLE_EH_DISPATCH:
6934       {
6935 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6936 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6937 	r = move_stmt_eh_region_nr (r, p);
6938 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6939       }
6940       break;
6941 
6942     case GIMPLE_OMP_RETURN:
6943     case GIMPLE_OMP_CONTINUE:
6944       break;
6945 
6946     case GIMPLE_LABEL:
6947       {
6948 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6949 	   so that such labels can be referenced from other regions.
6950 	   Make sure to update it when seeing a GIMPLE_LABEL though,
6951 	   that is the owner of the label.  */
6952 	walk_gimple_op (stmt, move_stmt_op, wi);
6953 	*handled_ops_p = true;
6954 	tree label = gimple_label_label (as_a <glabel *> (stmt));
6955 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6956 	  DECL_CONTEXT (label) = p->to_context;
6957       }
6958       break;
6959 
6960     default:
6961       if (is_gimple_omp (stmt))
6962 	{
6963 	  /* Do not remap variables inside OMP directives.  Variables
6964 	     referenced in clauses and directive header belong to the
6965 	     parent function and should not be moved into the child
6966 	     function.  */
6967 	  bool save_remap_decls_p = p->remap_decls_p;
6968 	  p->remap_decls_p = false;
6969 	  *handled_ops_p = true;
6970 
6971 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6972 			       move_stmt_op, wi);
6973 
6974 	  p->remap_decls_p = save_remap_decls_p;
6975 	}
6976       break;
6977     }
6978 
6979   return NULL_TREE;
6980 }
6981 
6982 /* Move basic block BB from function CFUN to function DEST_FN.  The
6983    block is moved out of the original linked list and placed after
6984    block AFTER in the new list.  Also, the block is removed from the
6985    original array of blocks and placed in DEST_FN's array of blocks.
6986    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6987    updated to reflect the moved edges.
6988 
6989    The local variables are remapped to new instances, VARS_MAP is used
6990    to record the mapping.  */
6991 
6992 static void
6993 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6994 		  basic_block after, bool update_edge_count_p,
6995 		  struct move_stmt_d *d)
6996 {
6997   struct control_flow_graph *cfg;
6998   edge_iterator ei;
6999   edge e;
7000   gimple_stmt_iterator si;
7001   unsigned old_len, new_len;
7002 
7003   /* Remove BB from dominance structures.  */
7004   delete_from_dominance_info (CDI_DOMINATORS, bb);
7005 
7006   /* Move BB from its current loop to the copy in the new function.  */
7007   if (current_loops)
7008     {
7009       struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7010       if (new_loop)
7011 	bb->loop_father = new_loop;
7012     }
7013 
7014   /* Link BB to the new linked list.  */
7015   move_block_after (bb, after);
7016 
7017   /* Update the edge count in the corresponding flowgraphs.  */
7018   if (update_edge_count_p)
7019     FOR_EACH_EDGE (e, ei, bb->succs)
7020       {
7021 	cfun->cfg->x_n_edges--;
7022 	dest_cfun->cfg->x_n_edges++;
7023       }
7024 
7025   /* Remove BB from the original basic block array.  */
7026   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7027   cfun->cfg->x_n_basic_blocks--;
7028 
7029   /* Grow DEST_CFUN's basic block array if needed.  */
7030   cfg = dest_cfun->cfg;
7031   cfg->x_n_basic_blocks++;
7032   if (bb->index >= cfg->x_last_basic_block)
7033     cfg->x_last_basic_block = bb->index + 1;
7034 
7035   old_len = vec_safe_length (cfg->x_basic_block_info);
7036   if ((unsigned) cfg->x_last_basic_block >= old_len)
7037     {
7038       new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7039       vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7040     }
7041 
7042   (*cfg->x_basic_block_info)[bb->index] = bb;
7043 
7044   /* Remap the variables in phi nodes.  */
7045   for (gphi_iterator psi = gsi_start_phis (bb);
7046        !gsi_end_p (psi); )
7047     {
7048       gphi *phi = psi.phi ();
7049       use_operand_p use;
7050       tree op = PHI_RESULT (phi);
7051       ssa_op_iter oi;
7052       unsigned i;
7053 
7054       if (virtual_operand_p (op))
7055 	{
7056 	  /* Remove the phi nodes for virtual operands (alias analysis will be
7057 	     run for the new function, anyway).  But replace all uses that
7058 	     might be outside of the region we move.  */
7059 	  use_operand_p use_p;
7060 	  imm_use_iterator iter;
7061 	  gimple *use_stmt;
7062 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7063 	    FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7064 	      SET_USE (use_p, SSA_NAME_VAR (op));
7065           remove_phi_node (&psi, true);
7066 	  continue;
7067 	}
7068 
7069       SET_PHI_RESULT (phi,
7070 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7071       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7072 	{
7073 	  op = USE_FROM_PTR (use);
7074 	  if (TREE_CODE (op) == SSA_NAME)
7075 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7076 	}
7077 
7078       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7079 	{
7080 	  location_t locus = gimple_phi_arg_location (phi, i);
7081 	  tree block = LOCATION_BLOCK (locus);
7082 
7083 	  if (locus == UNKNOWN_LOCATION)
7084 	    continue;
7085 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
7086 	    {
7087 	      locus = set_block (locus, d->new_block);
7088 	      gimple_phi_arg_set_location (phi, i, locus);
7089 	    }
7090 	}
7091 
7092       gsi_next (&psi);
7093     }
7094 
7095   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7096     {
7097       gimple *stmt = gsi_stmt (si);
7098       struct walk_stmt_info wi;
7099 
7100       memset (&wi, 0, sizeof (wi));
7101       wi.info = d;
7102       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7103 
7104       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7105 	{
7106 	  tree label = gimple_label_label (label_stmt);
7107 	  int uid = LABEL_DECL_UID (label);
7108 
7109 	  gcc_assert (uid > -1);
7110 
7111 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
7112 	  if (old_len <= (unsigned) uid)
7113 	    {
7114 	      new_len = 3 * uid / 2 + 1;
7115 	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7116 	    }
7117 
7118 	  (*cfg->x_label_to_block_map)[uid] = bb;
7119 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7120 
7121 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7122 
7123 	  if (uid >= dest_cfun->cfg->last_label_uid)
7124 	    dest_cfun->cfg->last_label_uid = uid + 1;
7125 	}
7126 
7127       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7128       remove_stmt_from_eh_lp_fn (cfun, stmt);
7129 
7130       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7131       gimple_remove_stmt_histograms (cfun, stmt);
7132 
7133       /* We cannot leave any operands allocated from the operand caches of
7134 	 the current function.  */
7135       free_stmt_operands (cfun, stmt);
7136       push_cfun (dest_cfun);
7137       update_stmt (stmt);
7138       pop_cfun ();
7139     }
7140 
7141   FOR_EACH_EDGE (e, ei, bb->succs)
7142     if (e->goto_locus != UNKNOWN_LOCATION)
7143       {
7144 	tree block = LOCATION_BLOCK (e->goto_locus);
7145 	if (d->orig_block == NULL_TREE
7146 	    || block == d->orig_block)
7147 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
7148       }
7149 }
7150 
7151 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7152    the outermost EH region.  Use REGION as the incoming base EH region.
7153    If there is no single outermost region, return NULL and set *ALL to
7154    true.  */
7155 
7156 static eh_region
7157 find_outermost_region_in_block (struct function *src_cfun,
7158 				basic_block bb, eh_region region,
7159 				bool *all)
7160 {
7161   gimple_stmt_iterator si;
7162 
7163   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7164     {
7165       gimple *stmt = gsi_stmt (si);
7166       eh_region stmt_region;
7167       int lp_nr;
7168 
7169       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7170       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7171       if (stmt_region)
7172 	{
7173 	  if (region == NULL)
7174 	    region = stmt_region;
7175 	  else if (stmt_region != region)
7176 	    {
7177 	      region = eh_region_outermost (src_cfun, stmt_region, region);
7178 	      if (region == NULL)
7179 		{
7180 		  *all = true;
7181 		  return NULL;
7182 		}
7183 	    }
7184 	}
7185     }
7186 
7187   return region;
7188 }
7189 
7190 static tree
7191 new_label_mapper (tree decl, void *data)
7192 {
7193   htab_t hash = (htab_t) data;
7194   struct tree_map *m;
7195   void **slot;
7196 
7197   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7198 
7199   m = XNEW (struct tree_map);
7200   m->hash = DECL_UID (decl);
7201   m->base.from = decl;
7202   m->to = create_artificial_label (UNKNOWN_LOCATION);
7203   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7204   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7205     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7206 
7207   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7208   gcc_assert (*slot == NULL);
7209 
7210   *slot = m;
7211 
7212   return m->to;
7213 }
7214 
7215 /* Tree walker to replace the decls used inside value expressions by
7216    duplicates.  */
7217 
7218 static tree
7219 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7220 {
7221   struct replace_decls_d *rd = (struct replace_decls_d *)data;
7222 
7223   switch (TREE_CODE (*tp))
7224     {
7225     case VAR_DECL:
7226     case PARM_DECL:
7227     case RESULT_DECL:
7228       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7229       break;
7230     default:
7231       break;
7232     }
7233 
7234   if (IS_TYPE_OR_DECL_P (*tp))
7235     *walk_subtrees = false;
7236 
7237   return NULL;
7238 }
7239 
7240 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7241    subblocks.  */
7242 
7243 static void
7244 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7245 				  tree to_context)
7246 {
7247   tree *tp, t;
7248 
7249   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7250     {
7251       t = *tp;
7252       if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7253 	continue;
7254       replace_by_duplicate_decl (&t, vars_map, to_context);
7255       if (t != *tp)
7256 	{
7257 	  if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7258 	    {
7259 	      tree x = DECL_VALUE_EXPR (*tp);
7260 	      struct replace_decls_d rd = { vars_map, to_context };
7261 	      unshare_expr (x);
7262 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7263 	      SET_DECL_VALUE_EXPR (t, x);
7264 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
7265 	    }
7266 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
7267 	  *tp = t;
7268 	}
7269     }
7270 
7271   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7272     replace_block_vars_by_duplicates (block, vars_map, to_context);
7273 }
7274 
7275 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7276    from FN1 to FN2.  */
7277 
7278 static void
7279 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7280 			      struct loop *loop)
7281 {
7282   /* Discard it from the old loop array.  */
7283   (*get_loops (fn1))[loop->num] = NULL;
7284 
7285   /* Place it in the new loop array, assigning it a new number.  */
7286   loop->num = number_of_loops (fn2);
7287   vec_safe_push (loops_for_fn (fn2)->larray, loop);
7288 
7289   /* Recurse to children.  */
7290   for (loop = loop->inner; loop; loop = loop->next)
7291     fixup_loop_arrays_after_move (fn1, fn2, loop);
7292 }
7293 
7294 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7295    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
7296 
7297 DEBUG_FUNCTION void
7298 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7299 {
7300   basic_block bb;
7301   edge_iterator ei;
7302   edge e;
7303   bitmap bbs = BITMAP_ALLOC (NULL);
7304   int i;
7305 
7306   gcc_assert (entry != NULL);
7307   gcc_assert (entry != exit);
7308   gcc_assert (bbs_p != NULL);
7309 
7310   gcc_assert (bbs_p->length () > 0);
7311 
7312   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7313     bitmap_set_bit (bbs, bb->index);
7314 
7315   gcc_assert (bitmap_bit_p (bbs, entry->index));
7316   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7317 
7318   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7319     {
7320       if (bb == entry)
7321 	{
7322 	  gcc_assert (single_pred_p (entry));
7323 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7324 	}
7325       else
7326 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7327 	  {
7328 	    e = ei_edge (ei);
7329 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7330 	  }
7331 
7332       if (bb == exit)
7333 	{
7334 	  gcc_assert (single_succ_p (exit));
7335 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7336 	}
7337       else
7338 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7339 	  {
7340 	    e = ei_edge (ei);
7341 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7342 	  }
7343     }
7344 
7345   BITMAP_FREE (bbs);
7346 }
7347 
7348 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7349 
7350 bool
7351 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7352 {
7353   bitmap release_names = (bitmap)data;
7354 
7355   if (TREE_CODE (from) != SSA_NAME)
7356     return true;
7357 
7358   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7359   return true;
7360 }
7361 
7362 /* Return LOOP_DIST_ALIAS call if present in BB.  */
7363 
7364 static gimple *
7365 find_loop_dist_alias (basic_block bb)
7366 {
7367   gimple *g = last_stmt (bb);
7368   if (g == NULL || gimple_code (g) != GIMPLE_COND)
7369     return NULL;
7370 
7371   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7372   gsi_prev (&gsi);
7373   if (gsi_end_p (gsi))
7374     return NULL;
7375 
7376   g = gsi_stmt (gsi);
7377   if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7378     return g;
7379   return NULL;
7380 }
7381 
7382 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7383    to VALUE and update any immediate uses of it's LHS.  */
7384 
7385 void
7386 fold_loop_internal_call (gimple *g, tree value)
7387 {
7388   tree lhs = gimple_call_lhs (g);
7389   use_operand_p use_p;
7390   imm_use_iterator iter;
7391   gimple *use_stmt;
7392   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7393 
7394   update_call_from_tree (&gsi, value);
7395   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7396     {
7397       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7398 	SET_USE (use_p, value);
7399       update_stmt (use_stmt);
7400     }
7401 }
7402 
7403 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7404    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7405    single basic block in the original CFG and the new basic block is
7406    returned.  DEST_CFUN must not have a CFG yet.
7407 
7408    Note that the region need not be a pure SESE region.  Blocks inside
7409    the region may contain calls to abort/exit.  The only restriction
7410    is that ENTRY_BB should be the only entry point and it must
7411    dominate EXIT_BB.
7412 
7413    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7414    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7415    to the new function.
7416 
7417    All local variables referenced in the region are assumed to be in
7418    the corresponding BLOCK_VARS and unexpanded variable lists
7419    associated with DEST_CFUN.
7420 
7421    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7422    reimplement move_sese_region_to_fn by duplicating the region rather than
7423    moving it.  */
7424 
7425 basic_block
7426 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7427 		        basic_block exit_bb, tree orig_block)
7428 {
7429   vec<basic_block> bbs, dom_bbs;
7430   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7431   basic_block after, bb, *entry_pred, *exit_succ, abb;
7432   struct function *saved_cfun = cfun;
7433   int *entry_flag, *exit_flag;
7434   profile_probability *entry_prob, *exit_prob;
7435   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7436   edge e;
7437   edge_iterator ei;
7438   htab_t new_label_map;
7439   hash_map<void *, void *> *eh_map;
7440   struct loop *loop = entry_bb->loop_father;
7441   struct loop *loop0 = get_loop (saved_cfun, 0);
7442   struct move_stmt_d d;
7443 
7444   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7445      region.  */
7446   gcc_assert (entry_bb != exit_bb
7447               && (!exit_bb
7448 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7449 
7450   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7451      because it won't be added by dfs_enumerate_from.  */
7452   bbs.create (0);
7453   bbs.safe_push (entry_bb);
7454   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7455 
7456   if (flag_checking)
7457     verify_sese (entry_bb, exit_bb, &bbs);
7458 
7459   /* The blocks that used to be dominated by something in BBS will now be
7460      dominated by the new block.  */
7461   dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7462 				     bbs.address (),
7463 				     bbs.length ());
7464 
7465   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7466      the predecessor edges to ENTRY_BB and the successor edges to
7467      EXIT_BB so that we can re-attach them to the new basic block that
7468      will replace the region.  */
7469   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7470   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7471   entry_flag = XNEWVEC (int, num_entry_edges);
7472   entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7473   i = 0;
7474   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7475     {
7476       entry_prob[i] = e->probability;
7477       entry_flag[i] = e->flags;
7478       entry_pred[i++] = e->src;
7479       remove_edge (e);
7480     }
7481 
7482   if (exit_bb)
7483     {
7484       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7485       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7486       exit_flag = XNEWVEC (int, num_exit_edges);
7487       exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7488       i = 0;
7489       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7490 	{
7491 	  exit_prob[i] = e->probability;
7492 	  exit_flag[i] = e->flags;
7493 	  exit_succ[i++] = e->dest;
7494 	  remove_edge (e);
7495 	}
7496     }
7497   else
7498     {
7499       num_exit_edges = 0;
7500       exit_succ = NULL;
7501       exit_flag = NULL;
7502       exit_prob = NULL;
7503     }
7504 
7505   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7506   gcc_assert (dest_cfun->cfg == NULL);
7507   push_cfun (dest_cfun);
7508 
7509   init_empty_tree_cfg ();
7510 
7511   /* Initialize EH information for the new function.  */
7512   eh_map = NULL;
7513   new_label_map = NULL;
7514   if (saved_cfun->eh)
7515     {
7516       eh_region region = NULL;
7517       bool all = false;
7518 
7519       FOR_EACH_VEC_ELT (bbs, i, bb)
7520 	{
7521 	  region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7522 	  if (all)
7523 	    break;
7524 	}
7525 
7526       init_eh_for_function ();
7527       if (region != NULL || all)
7528 	{
7529 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7530 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7531 					 new_label_mapper, new_label_map);
7532 	}
7533     }
7534 
7535   /* Initialize an empty loop tree.  */
7536   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7537   init_loops_structure (dest_cfun, loops, 1);
7538   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7539   set_loops_for_fn (dest_cfun, loops);
7540 
7541   vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7542 
7543   /* Move the outlined loop tree part.  */
7544   num_nodes = bbs.length ();
7545   FOR_EACH_VEC_ELT (bbs, i, bb)
7546     {
7547       if (bb->loop_father->header == bb)
7548 	{
7549 	  struct loop *this_loop = bb->loop_father;
7550 	  struct loop *outer = loop_outer (this_loop);
7551 	  if (outer == loop
7552 	      /* If the SESE region contains some bbs ending with
7553 		 a noreturn call, those are considered to belong
7554 		 to the outermost loop in saved_cfun, rather than
7555 		 the entry_bb's loop_father.  */
7556 	      || outer == loop0)
7557 	    {
7558 	      if (outer != loop)
7559 		num_nodes -= this_loop->num_nodes;
7560 	      flow_loop_tree_node_remove (bb->loop_father);
7561 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7562 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7563 	    }
7564 	}
7565       else if (bb->loop_father == loop0 && loop0 != loop)
7566 	num_nodes--;
7567 
7568       /* Remove loop exits from the outlined region.  */
7569       if (loops_for_fn (saved_cfun)->exits)
7570 	FOR_EACH_EDGE (e, ei, bb->succs)
7571 	  {
7572 	    struct loops *l = loops_for_fn (saved_cfun);
7573 	    loop_exit **slot
7574 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7575 					       NO_INSERT);
7576 	    if (slot)
7577 	      l->exits->clear_slot (slot);
7578 	  }
7579     }
7580 
7581   /* Adjust the number of blocks in the tree root of the outlined part.  */
7582   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7583 
7584   /* Setup a mapping to be used by move_block_to_fn.  */
7585   loop->aux = current_loops->tree_root;
7586   loop0->aux = current_loops->tree_root;
7587 
7588   /* Fix up orig_loop_num.  If the block referenced in it has been moved
7589      to dest_cfun, update orig_loop_num field, otherwise clear it.  */
7590   struct loop *dloop;
7591   signed char *moved_orig_loop_num = NULL;
7592   FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7593     if (dloop->orig_loop_num)
7594       {
7595 	if (moved_orig_loop_num == NULL)
7596 	  moved_orig_loop_num
7597 	    = XCNEWVEC (signed char, vec_safe_length (larray));
7598 	if ((*larray)[dloop->orig_loop_num] != NULL
7599 	    && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7600 	  {
7601 	    if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7602 		&& moved_orig_loop_num[dloop->orig_loop_num] < 2)
7603 	      moved_orig_loop_num[dloop->orig_loop_num]++;
7604 	    dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7605 	  }
7606 	else
7607 	  {
7608 	    moved_orig_loop_num[dloop->orig_loop_num] = -1;
7609 	    dloop->orig_loop_num = 0;
7610 	  }
7611       }
7612   pop_cfun ();
7613 
7614   if (moved_orig_loop_num)
7615     {
7616       FOR_EACH_VEC_ELT (bbs, i, bb)
7617 	{
7618 	  gimple *g = find_loop_dist_alias (bb);
7619 	  if (g == NULL)
7620 	    continue;
7621 
7622 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7623 	  gcc_assert (orig_loop_num
7624 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7625 	  if (moved_orig_loop_num[orig_loop_num] == 2)
7626 	    {
7627 	      /* If we have moved both loops with this orig_loop_num into
7628 		 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7629 		 too, update the first argument.  */
7630 	      gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7631 			  && (get_loop (saved_cfun, dloop->orig_loop_num)
7632 			      == NULL));
7633 	      tree t = build_int_cst (integer_type_node,
7634 				      (*larray)[dloop->orig_loop_num]->num);
7635 	      gimple_call_set_arg (g, 0, t);
7636 	      update_stmt (g);
7637 	      /* Make sure the following loop will not update it.  */
7638 	      moved_orig_loop_num[orig_loop_num] = 0;
7639 	    }
7640 	  else
7641 	    /* Otherwise at least one of the loops stayed in saved_cfun.
7642 	       Remove the LOOP_DIST_ALIAS call.  */
7643 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7644 	}
7645       FOR_EACH_BB_FN (bb, saved_cfun)
7646 	{
7647 	  gimple *g = find_loop_dist_alias (bb);
7648 	  if (g == NULL)
7649 	    continue;
7650 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7651 	  gcc_assert (orig_loop_num
7652 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7653 	  if (moved_orig_loop_num[orig_loop_num])
7654 	    /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7655 	       of the corresponding loops was moved, remove it.  */
7656 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7657 	}
7658       XDELETEVEC (moved_orig_loop_num);
7659     }
7660   ggc_free (larray);
7661 
7662   /* Move blocks from BBS into DEST_CFUN.  */
7663   gcc_assert (bbs.length () >= 2);
7664   after = dest_cfun->cfg->x_entry_block_ptr;
7665   hash_map<tree, tree> vars_map;
7666 
7667   memset (&d, 0, sizeof (d));
7668   d.orig_block = orig_block;
7669   d.new_block = DECL_INITIAL (dest_cfun->decl);
7670   d.from_context = cfun->decl;
7671   d.to_context = dest_cfun->decl;
7672   d.vars_map = &vars_map;
7673   d.new_label_map = new_label_map;
7674   d.eh_map = eh_map;
7675   d.remap_decls_p = true;
7676 
7677   if (gimple_in_ssa_p (cfun))
7678     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7679       {
7680 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7681 	set_ssa_default_def (dest_cfun, arg, narg);
7682 	vars_map.put (arg, narg);
7683       }
7684 
7685   FOR_EACH_VEC_ELT (bbs, i, bb)
7686     {
7687       /* No need to update edge counts on the last block.  It has
7688 	 already been updated earlier when we detached the region from
7689 	 the original CFG.  */
7690       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7691       after = bb;
7692     }
7693 
7694   /* Adjust the maximum clique used.  */
7695   dest_cfun->last_clique = saved_cfun->last_clique;
7696 
7697   loop->aux = NULL;
7698   loop0->aux = NULL;
7699   /* Loop sizes are no longer correct, fix them up.  */
7700   loop->num_nodes -= num_nodes;
7701   for (struct loop *outer = loop_outer (loop);
7702        outer; outer = loop_outer (outer))
7703     outer->num_nodes -= num_nodes;
7704   loop0->num_nodes -= bbs.length () - num_nodes;
7705 
7706   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7707     {
7708       struct loop *aloop;
7709       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7710 	if (aloop != NULL)
7711 	  {
7712 	    if (aloop->simduid)
7713 	      {
7714 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7715 					   d.to_context);
7716 		dest_cfun->has_simduid_loops = true;
7717 	      }
7718 	    if (aloop->force_vectorize)
7719 	      dest_cfun->has_force_vectorize_loops = true;
7720 	  }
7721     }
7722 
7723   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7724   if (orig_block)
7725     {
7726       tree block;
7727       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7728 		  == NULL_TREE);
7729       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7730 	= BLOCK_SUBBLOCKS (orig_block);
7731       for (block = BLOCK_SUBBLOCKS (orig_block);
7732 	   block; block = BLOCK_CHAIN (block))
7733 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7734       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7735     }
7736 
7737   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7738 				    &vars_map, dest_cfun->decl);
7739 
7740   if (new_label_map)
7741     htab_delete (new_label_map);
7742   if (eh_map)
7743     delete eh_map;
7744 
7745   if (gimple_in_ssa_p (cfun))
7746     {
7747       /* We need to release ssa-names in a defined order, so first find them,
7748 	 and then iterate in ascending version order.  */
7749       bitmap release_names = BITMAP_ALLOC (NULL);
7750       vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7751       bitmap_iterator bi;
7752       unsigned i;
7753       EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7754 	release_ssa_name (ssa_name (i));
7755       BITMAP_FREE (release_names);
7756     }
7757 
7758   /* Rewire the entry and exit blocks.  The successor to the entry
7759      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7760      the child function.  Similarly, the predecessor of DEST_FN's
7761      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7762      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7763      various CFG manipulation function get to the right CFG.
7764 
7765      FIXME, this is silly.  The CFG ought to become a parameter to
7766      these helpers.  */
7767   push_cfun (dest_cfun);
7768   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7769   make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7770   if (exit_bb)
7771     {
7772       make_single_succ_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7773       EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7774     }
7775   else
7776     EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7777   pop_cfun ();
7778 
7779   /* Back in the original function, the SESE region has disappeared,
7780      create a new basic block in its place.  */
7781   bb = create_empty_bb (entry_pred[0]);
7782   if (current_loops)
7783     add_bb_to_loop (bb, loop);
7784   for (i = 0; i < num_entry_edges; i++)
7785     {
7786       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7787       e->probability = entry_prob[i];
7788     }
7789 
7790   for (i = 0; i < num_exit_edges; i++)
7791     {
7792       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7793       e->probability = exit_prob[i];
7794     }
7795 
7796   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7797   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7798     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7799   dom_bbs.release ();
7800 
7801   if (exit_bb)
7802     {
7803       free (exit_prob);
7804       free (exit_flag);
7805       free (exit_succ);
7806     }
7807   free (entry_prob);
7808   free (entry_flag);
7809   free (entry_pred);
7810   bbs.release ();
7811 
7812   return bb;
7813 }
7814 
7815 /* Dump default def DEF to file FILE using FLAGS and indentation
7816    SPC.  */
7817 
7818 static void
7819 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7820 {
7821   for (int i = 0; i < spc; ++i)
7822     fprintf (file, " ");
7823   dump_ssaname_info_to_file (file, def, spc);
7824 
7825   print_generic_expr (file, TREE_TYPE (def), flags);
7826   fprintf (file, " ");
7827   print_generic_expr (file, def, flags);
7828   fprintf (file, " = ");
7829   print_generic_expr (file, SSA_NAME_VAR (def), flags);
7830   fprintf (file, ";\n");
7831 }
7832 
7833 /* Print no_sanitize attribute to FILE for a given attribute VALUE.  */
7834 
7835 static void
7836 print_no_sanitize_attr_value (FILE *file, tree value)
7837 {
7838   unsigned int flags = tree_to_uhwi (value);
7839   bool first = true;
7840   for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7841     {
7842       if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7843 	{
7844 	  if (!first)
7845 	    fprintf (file, " | ");
7846 	  fprintf (file, "%s", sanitizer_opts[i].name);
7847 	  first = false;
7848 	}
7849     }
7850 }
7851 
7852 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7853    */
7854 
7855 void
7856 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7857 {
7858   tree arg, var, old_current_fndecl = current_function_decl;
7859   struct function *dsf;
7860   bool ignore_topmost_bind = false, any_var = false;
7861   basic_block bb;
7862   tree chain;
7863   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7864 		  && decl_is_tm_clone (fndecl));
7865   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7866 
7867   if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7868     {
7869       fprintf (file, "__attribute__((");
7870 
7871       bool first = true;
7872       tree chain;
7873       for (chain = DECL_ATTRIBUTES (fndecl); chain;
7874 	   first = false, chain = TREE_CHAIN (chain))
7875 	{
7876 	  if (!first)
7877 	    fprintf (file, ", ");
7878 
7879 	  tree name = get_attribute_name (chain);
7880 	  print_generic_expr (file, name, dump_flags);
7881 	  if (TREE_VALUE (chain) != NULL_TREE)
7882 	    {
7883 	      fprintf (file, " (");
7884 
7885 	      if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7886 		print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7887 	      else
7888 		print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7889 	      fprintf (file, ")");
7890 	    }
7891 	}
7892 
7893       fprintf (file, "))\n");
7894     }
7895 
7896   current_function_decl = fndecl;
7897   if (flags & TDF_GIMPLE)
7898     {
7899       print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7900 			  dump_flags | TDF_SLIM);
7901       fprintf (file, " __GIMPLE (%s)\n%s (",
7902 	       (fun->curr_properties & PROP_ssa) ? "ssa"
7903 	       : (fun->curr_properties & PROP_cfg) ? "cfg"
7904 	       : "",
7905 	       function_name (fun));
7906     }
7907   else
7908     fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7909 
7910   arg = DECL_ARGUMENTS (fndecl);
7911   while (arg)
7912     {
7913       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7914       fprintf (file, " ");
7915       print_generic_expr (file, arg, dump_flags);
7916       if (DECL_CHAIN (arg))
7917 	fprintf (file, ", ");
7918       arg = DECL_CHAIN (arg);
7919     }
7920   fprintf (file, ")\n");
7921 
7922   dsf = DECL_STRUCT_FUNCTION (fndecl);
7923   if (dsf && (flags & TDF_EH))
7924     dump_eh_tree (file, dsf);
7925 
7926   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7927     {
7928       dump_node (fndecl, TDF_SLIM | flags, file);
7929       current_function_decl = old_current_fndecl;
7930       return;
7931     }
7932 
7933   /* When GIMPLE is lowered, the variables are no longer available in
7934      BIND_EXPRs, so display them separately.  */
7935   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7936     {
7937       unsigned ix;
7938       ignore_topmost_bind = true;
7939 
7940       fprintf (file, "{\n");
7941       if (gimple_in_ssa_p (fun)
7942 	  && (flags & TDF_ALIAS))
7943 	{
7944 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7945 	       arg = DECL_CHAIN (arg))
7946 	    {
7947 	      tree def = ssa_default_def (fun, arg);
7948 	      if (def)
7949 		dump_default_def (file, def, 2, flags);
7950 	    }
7951 
7952 	  tree res = DECL_RESULT (fun->decl);
7953 	  if (res != NULL_TREE
7954 	      && DECL_BY_REFERENCE (res))
7955 	    {
7956 	      tree def = ssa_default_def (fun, res);
7957 	      if (def)
7958 		dump_default_def (file, def, 2, flags);
7959 	    }
7960 
7961 	  tree static_chain = fun->static_chain_decl;
7962 	  if (static_chain != NULL_TREE)
7963 	    {
7964 	      tree def = ssa_default_def (fun, static_chain);
7965 	      if (def)
7966 		dump_default_def (file, def, 2, flags);
7967 	    }
7968 	}
7969 
7970       if (!vec_safe_is_empty (fun->local_decls))
7971 	FOR_EACH_LOCAL_DECL (fun, ix, var)
7972 	  {
7973 	    print_generic_decl (file, var, flags);
7974 	    fprintf (file, "\n");
7975 
7976 	    any_var = true;
7977 	  }
7978 
7979       tree name;
7980 
7981       if (gimple_in_ssa_p (cfun))
7982 	FOR_EACH_SSA_NAME (ix, name, cfun)
7983 	  {
7984 	    if (!SSA_NAME_VAR (name))
7985 	      {
7986 		fprintf (file, "  ");
7987 		print_generic_expr (file, TREE_TYPE (name), flags);
7988 		fprintf (file, " ");
7989 		print_generic_expr (file, name, flags);
7990 		fprintf (file, ";\n");
7991 
7992 		any_var = true;
7993 	      }
7994 	  }
7995     }
7996 
7997   if (fun && fun->decl == fndecl
7998       && fun->cfg
7999       && basic_block_info_for_fn (fun))
8000     {
8001       /* If the CFG has been built, emit a CFG-based dump.  */
8002       if (!ignore_topmost_bind)
8003 	fprintf (file, "{\n");
8004 
8005       if (any_var && n_basic_blocks_for_fn (fun))
8006 	fprintf (file, "\n");
8007 
8008       FOR_EACH_BB_FN (bb, fun)
8009 	dump_bb (file, bb, 2, flags);
8010 
8011       fprintf (file, "}\n");
8012     }
8013   else if (fun->curr_properties & PROP_gimple_any)
8014     {
8015       /* The function is now in GIMPLE form but the CFG has not been
8016 	 built yet.  Emit the single sequence of GIMPLE statements
8017 	 that make up its body.  */
8018       gimple_seq body = gimple_body (fndecl);
8019 
8020       if (gimple_seq_first_stmt (body)
8021 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8022 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8023 	print_gimple_seq (file, body, 0, flags);
8024       else
8025 	{
8026 	  if (!ignore_topmost_bind)
8027 	    fprintf (file, "{\n");
8028 
8029 	  if (any_var)
8030 	    fprintf (file, "\n");
8031 
8032 	  print_gimple_seq (file, body, 2, flags);
8033 	  fprintf (file, "}\n");
8034 	}
8035     }
8036   else
8037     {
8038       int indent;
8039 
8040       /* Make a tree based dump.  */
8041       chain = DECL_SAVED_TREE (fndecl);
8042       if (chain && TREE_CODE (chain) == BIND_EXPR)
8043 	{
8044 	  if (ignore_topmost_bind)
8045 	    {
8046 	      chain = BIND_EXPR_BODY (chain);
8047 	      indent = 2;
8048 	    }
8049 	  else
8050 	    indent = 0;
8051 	}
8052       else
8053 	{
8054 	  if (!ignore_topmost_bind)
8055 	    {
8056 	      fprintf (file, "{\n");
8057 	      /* No topmost bind, pretend it's ignored for later.  */
8058 	      ignore_topmost_bind = true;
8059 	    }
8060 	  indent = 2;
8061 	}
8062 
8063       if (any_var)
8064 	fprintf (file, "\n");
8065 
8066       print_generic_stmt_indented (file, chain, flags, indent);
8067       if (ignore_topmost_bind)
8068 	fprintf (file, "}\n");
8069     }
8070 
8071   if (flags & TDF_ENUMERATE_LOCALS)
8072     dump_enumerated_decls (file, flags);
8073   fprintf (file, "\n\n");
8074 
8075   current_function_decl = old_current_fndecl;
8076 }
8077 
8078 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
8079 
8080 DEBUG_FUNCTION void
8081 debug_function (tree fn, dump_flags_t flags)
8082 {
8083   dump_function_to_file (fn, stderr, flags);
8084 }
8085 
8086 
8087 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
8088 
8089 static void
8090 print_pred_bbs (FILE *file, basic_block bb)
8091 {
8092   edge e;
8093   edge_iterator ei;
8094 
8095   FOR_EACH_EDGE (e, ei, bb->preds)
8096     fprintf (file, "bb_%d ", e->src->index);
8097 }
8098 
8099 
8100 /* Print on FILE the indexes for the successors of basic_block BB.  */
8101 
8102 static void
8103 print_succ_bbs (FILE *file, basic_block bb)
8104 {
8105   edge e;
8106   edge_iterator ei;
8107 
8108   FOR_EACH_EDGE (e, ei, bb->succs)
8109     fprintf (file, "bb_%d ", e->dest->index);
8110 }
8111 
8112 /* Print to FILE the basic block BB following the VERBOSITY level.  */
8113 
8114 void
8115 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8116 {
8117   char *s_indent = (char *) alloca ((size_t) indent + 1);
8118   memset ((void *) s_indent, ' ', (size_t) indent);
8119   s_indent[indent] = '\0';
8120 
8121   /* Print basic_block's header.  */
8122   if (verbosity >= 2)
8123     {
8124       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
8125       print_pred_bbs (file, bb);
8126       fprintf (file, "}, succs = {");
8127       print_succ_bbs (file, bb);
8128       fprintf (file, "})\n");
8129     }
8130 
8131   /* Print basic_block's body.  */
8132   if (verbosity >= 3)
8133     {
8134       fprintf (file, "%s  {\n", s_indent);
8135       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8136       fprintf (file, "%s  }\n", s_indent);
8137     }
8138 }
8139 
8140 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8141 
8142 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
8143    VERBOSITY level this outputs the contents of the loop, or just its
8144    structure.  */
8145 
8146 static void
8147 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8148 {
8149   char *s_indent;
8150   basic_block bb;
8151 
8152   if (loop == NULL)
8153     return;
8154 
8155   s_indent = (char *) alloca ((size_t) indent + 1);
8156   memset ((void *) s_indent, ' ', (size_t) indent);
8157   s_indent[indent] = '\0';
8158 
8159   /* Print loop's header.  */
8160   fprintf (file, "%sloop_%d (", s_indent, loop->num);
8161   if (loop->header)
8162     fprintf (file, "header = %d", loop->header->index);
8163   else
8164     {
8165       fprintf (file, "deleted)\n");
8166       return;
8167     }
8168   if (loop->latch)
8169     fprintf (file, ", latch = %d", loop->latch->index);
8170   else
8171     fprintf (file, ", multiple latches");
8172   fprintf (file, ", niter = ");
8173   print_generic_expr (file, loop->nb_iterations);
8174 
8175   if (loop->any_upper_bound)
8176     {
8177       fprintf (file, ", upper_bound = ");
8178       print_decu (loop->nb_iterations_upper_bound, file);
8179     }
8180   if (loop->any_likely_upper_bound)
8181     {
8182       fprintf (file, ", likely_upper_bound = ");
8183       print_decu (loop->nb_iterations_likely_upper_bound, file);
8184     }
8185 
8186   if (loop->any_estimate)
8187     {
8188       fprintf (file, ", estimate = ");
8189       print_decu (loop->nb_iterations_estimate, file);
8190     }
8191   if (loop->unroll)
8192     fprintf (file, ", unroll = %d", loop->unroll);
8193   fprintf (file, ")\n");
8194 
8195   /* Print loop's body.  */
8196   if (verbosity >= 1)
8197     {
8198       fprintf (file, "%s{\n", s_indent);
8199       FOR_EACH_BB_FN (bb, cfun)
8200 	if (bb->loop_father == loop)
8201 	  print_loops_bb (file, bb, indent, verbosity);
8202 
8203       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8204       fprintf (file, "%s}\n", s_indent);
8205     }
8206 }
8207 
8208 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8209    spaces.  Following VERBOSITY level this outputs the contents of the
8210    loop, or just its structure.  */
8211 
8212 static void
8213 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8214 			 int verbosity)
8215 {
8216   if (loop == NULL)
8217     return;
8218 
8219   print_loop (file, loop, indent, verbosity);
8220   print_loop_and_siblings (file, loop->next, indent, verbosity);
8221 }
8222 
8223 /* Follow a CFG edge from the entry point of the program, and on entry
8224    of a loop, pretty print the loop structure on FILE.  */
8225 
8226 void
8227 print_loops (FILE *file, int verbosity)
8228 {
8229   basic_block bb;
8230 
8231   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8232   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8233   if (bb && bb->loop_father)
8234     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8235 }
8236 
8237 /* Dump a loop.  */
8238 
8239 DEBUG_FUNCTION void
8240 debug (struct loop &ref)
8241 {
8242   print_loop (stderr, &ref, 0, /*verbosity*/0);
8243 }
8244 
8245 DEBUG_FUNCTION void
8246 debug (struct loop *ptr)
8247 {
8248   if (ptr)
8249     debug (*ptr);
8250   else
8251     fprintf (stderr, "<nil>\n");
8252 }
8253 
8254 /* Dump a loop verbosely.  */
8255 
8256 DEBUG_FUNCTION void
8257 debug_verbose (struct loop &ref)
8258 {
8259   print_loop (stderr, &ref, 0, /*verbosity*/3);
8260 }
8261 
8262 DEBUG_FUNCTION void
8263 debug_verbose (struct loop *ptr)
8264 {
8265   if (ptr)
8266     debug (*ptr);
8267   else
8268     fprintf (stderr, "<nil>\n");
8269 }
8270 
8271 
8272 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
8273 
8274 DEBUG_FUNCTION void
8275 debug_loops (int verbosity)
8276 {
8277   print_loops (stderr, verbosity);
8278 }
8279 
8280 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
8281 
8282 DEBUG_FUNCTION void
8283 debug_loop (struct loop *loop, int verbosity)
8284 {
8285   print_loop (stderr, loop, 0, verbosity);
8286 }
8287 
8288 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8289    level.  */
8290 
8291 DEBUG_FUNCTION void
8292 debug_loop_num (unsigned num, int verbosity)
8293 {
8294   debug_loop (get_loop (cfun, num), verbosity);
8295 }
8296 
8297 /* Return true if BB ends with a call, possibly followed by some
8298    instructions that must stay with the call.  Return false,
8299    otherwise.  */
8300 
8301 static bool
8302 gimple_block_ends_with_call_p (basic_block bb)
8303 {
8304   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8305   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8306 }
8307 
8308 
8309 /* Return true if BB ends with a conditional branch.  Return false,
8310    otherwise.  */
8311 
8312 static bool
8313 gimple_block_ends_with_condjump_p (const_basic_block bb)
8314 {
8315   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8316   return (stmt && gimple_code (stmt) == GIMPLE_COND);
8317 }
8318 
8319 
8320 /* Return true if statement T may terminate execution of BB in ways not
8321    explicitly represtented in the CFG.  */
8322 
8323 bool
8324 stmt_can_terminate_bb_p (gimple *t)
8325 {
8326   tree fndecl = NULL_TREE;
8327   int call_flags = 0;
8328 
8329   /* Eh exception not handled internally terminates execution of the whole
8330      function.  */
8331   if (stmt_can_throw_external (cfun, t))
8332     return true;
8333 
8334   /* NORETURN and LONGJMP calls already have an edge to exit.
8335      CONST and PURE calls do not need one.
8336      We don't currently check for CONST and PURE here, although
8337      it would be a good idea, because those attributes are
8338      figured out from the RTL in mark_constant_function, and
8339      the counter incrementation code from -fprofile-arcs
8340      leads to different results from -fbranch-probabilities.  */
8341   if (is_gimple_call (t))
8342     {
8343       fndecl = gimple_call_fndecl (t);
8344       call_flags = gimple_call_flags (t);
8345     }
8346 
8347   if (is_gimple_call (t)
8348       && fndecl
8349       && fndecl_built_in_p (fndecl)
8350       && (call_flags & ECF_NOTHROW)
8351       && !(call_flags & ECF_RETURNS_TWICE)
8352       /* fork() doesn't really return twice, but the effect of
8353          wrapping it in __gcov_fork() which calls __gcov_flush()
8354 	 and clears the counters before forking has the same
8355 	 effect as returning twice.  Force a fake edge.  */
8356       && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8357     return false;
8358 
8359   if (is_gimple_call (t))
8360     {
8361       edge_iterator ei;
8362       edge e;
8363       basic_block bb;
8364 
8365       if (call_flags & (ECF_PURE | ECF_CONST)
8366 	  && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8367 	return false;
8368 
8369       /* Function call may do longjmp, terminate program or do other things.
8370 	 Special case noreturn that have non-abnormal edges out as in this case
8371 	 the fact is sufficiently represented by lack of edges out of T.  */
8372       if (!(call_flags & ECF_NORETURN))
8373 	return true;
8374 
8375       bb = gimple_bb (t);
8376       FOR_EACH_EDGE (e, ei, bb->succs)
8377 	if ((e->flags & EDGE_FAKE) == 0)
8378 	  return true;
8379     }
8380 
8381   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8382     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8383       return true;
8384 
8385   return false;
8386 }
8387 
8388 
8389 /* Add fake edges to the function exit for any non constant and non
8390    noreturn calls (or noreturn calls with EH/abnormal edges),
8391    volatile inline assembly in the bitmap of blocks specified by BLOCKS
8392    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
8393    that were split.
8394 
8395    The goal is to expose cases in which entering a basic block does
8396    not imply that all subsequent instructions must be executed.  */
8397 
8398 static int
8399 gimple_flow_call_edges_add (sbitmap blocks)
8400 {
8401   int i;
8402   int blocks_split = 0;
8403   int last_bb = last_basic_block_for_fn (cfun);
8404   bool check_last_block = false;
8405 
8406   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8407     return 0;
8408 
8409   if (! blocks)
8410     check_last_block = true;
8411   else
8412     check_last_block = bitmap_bit_p (blocks,
8413 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8414 
8415   /* In the last basic block, before epilogue generation, there will be
8416      a fallthru edge to EXIT.  Special care is required if the last insn
8417      of the last basic block is a call because make_edge folds duplicate
8418      edges, which would result in the fallthru edge also being marked
8419      fake, which would result in the fallthru edge being removed by
8420      remove_fake_edges, which would result in an invalid CFG.
8421 
8422      Moreover, we can't elide the outgoing fake edge, since the block
8423      profiler needs to take this into account in order to solve the minimal
8424      spanning tree in the case that the call doesn't return.
8425 
8426      Handle this by adding a dummy instruction in a new last basic block.  */
8427   if (check_last_block)
8428     {
8429       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8430       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8431       gimple *t = NULL;
8432 
8433       if (!gsi_end_p (gsi))
8434 	t = gsi_stmt (gsi);
8435 
8436       if (t && stmt_can_terminate_bb_p (t))
8437 	{
8438 	  edge e;
8439 
8440 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8441 	  if (e)
8442 	    {
8443 	      gsi_insert_on_edge (e, gimple_build_nop ());
8444 	      gsi_commit_edge_inserts ();
8445 	    }
8446 	}
8447     }
8448 
8449   /* Now add fake edges to the function exit for any non constant
8450      calls since there is no way that we can determine if they will
8451      return or not...  */
8452   for (i = 0; i < last_bb; i++)
8453     {
8454       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8455       gimple_stmt_iterator gsi;
8456       gimple *stmt, *last_stmt;
8457 
8458       if (!bb)
8459 	continue;
8460 
8461       if (blocks && !bitmap_bit_p (blocks, i))
8462 	continue;
8463 
8464       gsi = gsi_last_nondebug_bb (bb);
8465       if (!gsi_end_p (gsi))
8466 	{
8467 	  last_stmt = gsi_stmt (gsi);
8468 	  do
8469 	    {
8470 	      stmt = gsi_stmt (gsi);
8471 	      if (stmt_can_terminate_bb_p (stmt))
8472 		{
8473 		  edge e;
8474 
8475 		  /* The handling above of the final block before the
8476 		     epilogue should be enough to verify that there is
8477 		     no edge to the exit block in CFG already.
8478 		     Calling make_edge in such case would cause us to
8479 		     mark that edge as fake and remove it later.  */
8480 		  if (flag_checking && stmt == last_stmt)
8481 		    {
8482 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8483 		      gcc_assert (e == NULL);
8484 		    }
8485 
8486 		  /* Note that the following may create a new basic block
8487 		     and renumber the existing basic blocks.  */
8488 		  if (stmt != last_stmt)
8489 		    {
8490 		      e = split_block (bb, stmt);
8491 		      if (e)
8492 			blocks_split++;
8493 		    }
8494 		  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8495 		  e->probability = profile_probability::guessed_never ();
8496 		}
8497 	      gsi_prev (&gsi);
8498 	    }
8499 	  while (!gsi_end_p (gsi));
8500 	}
8501     }
8502 
8503   if (blocks_split)
8504     checking_verify_flow_info ();
8505 
8506   return blocks_split;
8507 }
8508 
8509 /* Removes edge E and all the blocks dominated by it, and updates dominance
8510    information.  The IL in E->src needs to be updated separately.
8511    If dominance info is not available, only the edge E is removed.*/
8512 
8513 void
8514 remove_edge_and_dominated_blocks (edge e)
8515 {
8516   vec<basic_block> bbs_to_remove = vNULL;
8517   vec<basic_block> bbs_to_fix_dom = vNULL;
8518   edge f;
8519   edge_iterator ei;
8520   bool none_removed = false;
8521   unsigned i;
8522   basic_block bb, dbb;
8523   bitmap_iterator bi;
8524 
8525   /* If we are removing a path inside a non-root loop that may change
8526      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8527   if (current_loops
8528       && loop_outer (e->src->loop_father) != NULL
8529       && e->src->loop_father == e->dest->loop_father)
8530     loops_state_set (LOOPS_NEED_FIXUP);
8531 
8532   if (!dom_info_available_p (CDI_DOMINATORS))
8533     {
8534       remove_edge (e);
8535       return;
8536     }
8537 
8538   /* No updating is needed for edges to exit.  */
8539   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8540     {
8541       if (cfgcleanup_altered_bbs)
8542 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8543       remove_edge (e);
8544       return;
8545     }
8546 
8547   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8548      that is not dominated by E->dest, then this set is empty.  Otherwise,
8549      all the basic blocks dominated by E->dest are removed.
8550 
8551      Also, to DF_IDOM we store the immediate dominators of the blocks in
8552      the dominance frontier of E (i.e., of the successors of the
8553      removed blocks, if there are any, and of E->dest otherwise).  */
8554   FOR_EACH_EDGE (f, ei, e->dest->preds)
8555     {
8556       if (f == e)
8557 	continue;
8558 
8559       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8560 	{
8561 	  none_removed = true;
8562 	  break;
8563 	}
8564     }
8565 
8566   auto_bitmap df, df_idom;
8567   if (none_removed)
8568     bitmap_set_bit (df_idom,
8569 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8570   else
8571     {
8572       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8573       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8574 	{
8575 	  FOR_EACH_EDGE (f, ei, bb->succs)
8576 	    {
8577 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8578 		bitmap_set_bit (df, f->dest->index);
8579 	    }
8580 	}
8581       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8582 	bitmap_clear_bit (df, bb->index);
8583 
8584       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8585 	{
8586 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8587 	  bitmap_set_bit (df_idom,
8588 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8589 	}
8590     }
8591 
8592   if (cfgcleanup_altered_bbs)
8593     {
8594       /* Record the set of the altered basic blocks.  */
8595       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8596       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8597     }
8598 
8599   /* Remove E and the cancelled blocks.  */
8600   if (none_removed)
8601     remove_edge (e);
8602   else
8603     {
8604       /* Walk backwards so as to get a chance to substitute all
8605 	 released DEFs into debug stmts.  See
8606 	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8607 	 details.  */
8608       for (i = bbs_to_remove.length (); i-- > 0; )
8609 	delete_basic_block (bbs_to_remove[i]);
8610     }
8611 
8612   /* Update the dominance information.  The immediate dominator may change only
8613      for blocks whose immediate dominator belongs to DF_IDOM:
8614 
8615      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8616      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8617      Z dominates X after the removal.  Before removal, there exists a path P
8618      from Y to X that avoids Z.  Let F be the last edge on P that is
8619      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8620      dominates W, and because of P, Z does not dominate W), and W belongs to
8621      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8622   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8623     {
8624       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8625       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8626 	   dbb;
8627 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8628 	bbs_to_fix_dom.safe_push (dbb);
8629     }
8630 
8631   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8632 
8633   bbs_to_remove.release ();
8634   bbs_to_fix_dom.release ();
8635 }
8636 
8637 /* Purge dead EH edges from basic block BB.  */
8638 
8639 bool
8640 gimple_purge_dead_eh_edges (basic_block bb)
8641 {
8642   bool changed = false;
8643   edge e;
8644   edge_iterator ei;
8645   gimple *stmt = last_stmt (bb);
8646 
8647   if (stmt && stmt_can_throw_internal (cfun, stmt))
8648     return false;
8649 
8650   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8651     {
8652       if (e->flags & EDGE_EH)
8653 	{
8654 	  remove_edge_and_dominated_blocks (e);
8655 	  changed = true;
8656 	}
8657       else
8658 	ei_next (&ei);
8659     }
8660 
8661   return changed;
8662 }
8663 
8664 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8665 
8666 bool
8667 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8668 {
8669   bool changed = false;
8670   unsigned i;
8671   bitmap_iterator bi;
8672 
8673   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8674     {
8675       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8676 
8677       /* Earlier gimple_purge_dead_eh_edges could have removed
8678 	 this basic block already.  */
8679       gcc_assert (bb || changed);
8680       if (bb != NULL)
8681 	changed |= gimple_purge_dead_eh_edges (bb);
8682     }
8683 
8684   return changed;
8685 }
8686 
8687 /* Purge dead abnormal call edges from basic block BB.  */
8688 
8689 bool
8690 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8691 {
8692   bool changed = false;
8693   edge e;
8694   edge_iterator ei;
8695   gimple *stmt = last_stmt (bb);
8696 
8697   if (!cfun->has_nonlocal_label
8698       && !cfun->calls_setjmp)
8699     return false;
8700 
8701   if (stmt && stmt_can_make_abnormal_goto (stmt))
8702     return false;
8703 
8704   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8705     {
8706       if (e->flags & EDGE_ABNORMAL)
8707 	{
8708 	  if (e->flags & EDGE_FALLTHRU)
8709 	    e->flags &= ~EDGE_ABNORMAL;
8710 	  else
8711 	    remove_edge_and_dominated_blocks (e);
8712 	  changed = true;
8713 	}
8714       else
8715 	ei_next (&ei);
8716     }
8717 
8718   return changed;
8719 }
8720 
8721 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8722 
8723 bool
8724 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8725 {
8726   bool changed = false;
8727   unsigned i;
8728   bitmap_iterator bi;
8729 
8730   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8731     {
8732       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8733 
8734       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8735 	 this basic block already.  */
8736       gcc_assert (bb || changed);
8737       if (bb != NULL)
8738 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8739     }
8740 
8741   return changed;
8742 }
8743 
8744 /* This function is called whenever a new edge is created or
8745    redirected.  */
8746 
8747 static void
8748 gimple_execute_on_growing_pred (edge e)
8749 {
8750   basic_block bb = e->dest;
8751 
8752   if (!gimple_seq_empty_p (phi_nodes (bb)))
8753     reserve_phi_args_for_new_edge (bb);
8754 }
8755 
8756 /* This function is called immediately before edge E is removed from
8757    the edge vector E->dest->preds.  */
8758 
8759 static void
8760 gimple_execute_on_shrinking_pred (edge e)
8761 {
8762   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8763     remove_phi_args (e);
8764 }
8765 
8766 /*---------------------------------------------------------------------------
8767   Helper functions for Loop versioning
8768   ---------------------------------------------------------------------------*/
8769 
8770 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8771    of 'first'. Both of them are dominated by 'new_head' basic block. When
8772    'new_head' was created by 'second's incoming edge it received phi arguments
8773    on the edge by split_edge(). Later, additional edge 'e' was created to
8774    connect 'new_head' and 'first'. Now this routine adds phi args on this
8775    additional edge 'e' that new_head to second edge received as part of edge
8776    splitting.  */
8777 
8778 static void
8779 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8780 				  basic_block new_head, edge e)
8781 {
8782   gphi *phi1, *phi2;
8783   gphi_iterator psi1, psi2;
8784   tree def;
8785   edge e2 = find_edge (new_head, second);
8786 
8787   /* Because NEW_HEAD has been created by splitting SECOND's incoming
8788      edge, we should always have an edge from NEW_HEAD to SECOND.  */
8789   gcc_assert (e2 != NULL);
8790 
8791   /* Browse all 'second' basic block phi nodes and add phi args to
8792      edge 'e' for 'first' head. PHI args are always in correct order.  */
8793 
8794   for (psi2 = gsi_start_phis (second),
8795        psi1 = gsi_start_phis (first);
8796        !gsi_end_p (psi2) && !gsi_end_p (psi1);
8797        gsi_next (&psi2),  gsi_next (&psi1))
8798     {
8799       phi1 = psi1.phi ();
8800       phi2 = psi2.phi ();
8801       def = PHI_ARG_DEF (phi2, e2->dest_idx);
8802       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8803     }
8804 }
8805 
8806 
8807 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8808    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8809    the destination of the ELSE part.  */
8810 
8811 static void
8812 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8813 			       basic_block second_head ATTRIBUTE_UNUSED,
8814 			       basic_block cond_bb, void *cond_e)
8815 {
8816   gimple_stmt_iterator gsi;
8817   gimple *new_cond_expr;
8818   tree cond_expr = (tree) cond_e;
8819   edge e0;
8820 
8821   /* Build new conditional expr */
8822   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8823 					       NULL_TREE, NULL_TREE);
8824 
8825   /* Add new cond in cond_bb.  */
8826   gsi = gsi_last_bb (cond_bb);
8827   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8828 
8829   /* Adjust edges appropriately to connect new head with first head
8830      as well as second head.  */
8831   e0 = single_succ_edge (cond_bb);
8832   e0->flags &= ~EDGE_FALLTHRU;
8833   e0->flags |= EDGE_FALSE_VALUE;
8834 }
8835 
8836 
8837 /* Do book-keeping of basic block BB for the profile consistency checker.
8838    Store the counting in RECORD.  */
8839 static void
8840 gimple_account_profile_record (basic_block bb,
8841 			       struct profile_record *record)
8842 {
8843   gimple_stmt_iterator i;
8844   for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8845     {
8846       record->size
8847 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8848       if (bb->count.initialized_p ())
8849 	record->time
8850 	  += estimate_num_insns (gsi_stmt (i),
8851 				 &eni_time_weights) * bb->count.to_gcov_type ();
8852       else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8853 	record->time
8854 	  += estimate_num_insns (gsi_stmt (i),
8855 				 &eni_time_weights) * bb->count.to_frequency (cfun);
8856     }
8857 }
8858 
8859 struct cfg_hooks gimple_cfg_hooks = {
8860   "gimple",
8861   gimple_verify_flow_info,
8862   gimple_dump_bb,		/* dump_bb  */
8863   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8864   create_bb,			/* create_basic_block  */
8865   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8866   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8867   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8868   remove_bb,			/* delete_basic_block  */
8869   gimple_split_block,		/* split_block  */
8870   gimple_move_block_after,	/* move_block_after  */
8871   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8872   gimple_merge_blocks,		/* merge_blocks  */
8873   gimple_predict_edge,		/* predict_edge  */
8874   gimple_predicted_by_p,	/* predicted_by_p  */
8875   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8876   gimple_duplicate_bb,		/* duplicate_block  */
8877   gimple_split_edge,		/* split_edge  */
8878   gimple_make_forwarder_block,	/* make_forward_block  */
8879   NULL,				/* tidy_fallthru_edge  */
8880   NULL,				/* force_nonfallthru */
8881   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8882   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8883   gimple_flow_call_edges_add,   /* flow_call_edges_add */
8884   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8885   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8886   gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8887   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8888   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8889   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8890   flush_pending_stmts, 		/* flush_pending_stmts */
8891   gimple_empty_block_p,           /* block_empty_p */
8892   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8893   gimple_account_profile_record,
8894 };
8895 
8896 
8897 /* Split all critical edges.  */
8898 
8899 unsigned int
8900 split_critical_edges (void)
8901 {
8902   basic_block bb;
8903   edge e;
8904   edge_iterator ei;
8905 
8906   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8907      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8908      mappings around the calls to split_edge.  */
8909   start_recording_case_labels ();
8910   FOR_ALL_BB_FN (bb, cfun)
8911     {
8912       FOR_EACH_EDGE (e, ei, bb->succs)
8913         {
8914 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8915 	    split_edge (e);
8916 	  /* PRE inserts statements to edges and expects that
8917 	     since split_critical_edges was done beforehand, committing edge
8918 	     insertions will not split more edges.  In addition to critical
8919 	     edges we must split edges that have multiple successors and
8920 	     end by control flow statements, such as RESX.
8921 	     Go ahead and split them too.  This matches the logic in
8922 	     gimple_find_edge_insert_loc.  */
8923 	  else if ((!single_pred_p (e->dest)
8924 	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8925 		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8926 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8927 	           && !(e->flags & EDGE_ABNORMAL))
8928 	    {
8929 	      gimple_stmt_iterator gsi;
8930 
8931 	      gsi = gsi_last_bb (e->src);
8932 	      if (!gsi_end_p (gsi)
8933 		  && stmt_ends_bb_p (gsi_stmt (gsi))
8934 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8935 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8936 						 BUILT_IN_RETURN)))
8937 		split_edge (e);
8938 	    }
8939 	}
8940     }
8941   end_recording_case_labels ();
8942   return 0;
8943 }
8944 
8945 namespace {
8946 
8947 const pass_data pass_data_split_crit_edges =
8948 {
8949   GIMPLE_PASS, /* type */
8950   "crited", /* name */
8951   OPTGROUP_NONE, /* optinfo_flags */
8952   TV_TREE_SPLIT_EDGES, /* tv_id */
8953   PROP_cfg, /* properties_required */
8954   PROP_no_crit_edges, /* properties_provided */
8955   0, /* properties_destroyed */
8956   0, /* todo_flags_start */
8957   0, /* todo_flags_finish */
8958 };
8959 
8960 class pass_split_crit_edges : public gimple_opt_pass
8961 {
8962 public:
8963   pass_split_crit_edges (gcc::context *ctxt)
8964     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8965   {}
8966 
8967   /* opt_pass methods: */
8968   virtual unsigned int execute (function *) { return split_critical_edges (); }
8969 
8970   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8971 }; // class pass_split_crit_edges
8972 
8973 } // anon namespace
8974 
8975 gimple_opt_pass *
8976 make_pass_split_crit_edges (gcc::context *ctxt)
8977 {
8978   return new pass_split_crit_edges (ctxt);
8979 }
8980 
8981 
8982 /* Insert COND expression which is GIMPLE_COND after STMT
8983    in basic block BB with appropriate basic block split
8984    and creation of a new conditionally executed basic block.
8985    Update profile so the new bb is visited with probability PROB.
8986    Return created basic block.  */
8987 basic_block
8988 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8989 	        profile_probability prob)
8990 {
8991   edge fall = split_block (bb, stmt);
8992   gimple_stmt_iterator iter = gsi_last_bb (bb);
8993   basic_block new_bb;
8994 
8995   /* Insert cond statement.  */
8996   gcc_assert (gimple_code (cond) == GIMPLE_COND);
8997   if (gsi_end_p (iter))
8998     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8999   else
9000     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9001 
9002   /* Create conditionally executed block.  */
9003   new_bb = create_empty_bb (bb);
9004   edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9005   e->probability = prob;
9006   new_bb->count = e->count ();
9007   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9008 
9009   /* Fix edge for split bb.  */
9010   fall->flags = EDGE_FALSE_VALUE;
9011   fall->probability -= e->probability;
9012 
9013   /* Update dominance info.  */
9014   if (dom_info_available_p (CDI_DOMINATORS))
9015     {
9016       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9017       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9018     }
9019 
9020   /* Update loop info.  */
9021   if (current_loops)
9022     add_bb_to_loop (new_bb, bb->loop_father);
9023 
9024   return new_bb;
9025 }
9026 
9027 /* Build a ternary operation and gimplify it.  Emit code before GSI.
9028    Return the gimple_val holding the result.  */
9029 
9030 tree
9031 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9032 		 tree type, tree a, tree b, tree c)
9033 {
9034   tree ret;
9035   location_t loc = gimple_location (gsi_stmt (*gsi));
9036 
9037   ret = fold_build3_loc (loc, code, type, a, b, c);
9038   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9039                                    GSI_SAME_STMT);
9040 }
9041 
9042 /* Build a binary operation and gimplify it.  Emit code before GSI.
9043    Return the gimple_val holding the result.  */
9044 
9045 tree
9046 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9047 		 tree type, tree a, tree b)
9048 {
9049   tree ret;
9050 
9051   ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9052   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9053                                    GSI_SAME_STMT);
9054 }
9055 
9056 /* Build a unary operation and gimplify it.  Emit code before GSI.
9057    Return the gimple_val holding the result.  */
9058 
9059 tree
9060 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9061 		 tree a)
9062 {
9063   tree ret;
9064 
9065   ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9066   return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9067                                    GSI_SAME_STMT);
9068 }
9069 
9070 
9071 
9072 /* Given a basic block B which ends with a conditional and has
9073    precisely two successors, determine which of the edges is taken if
9074    the conditional is true and which is taken if the conditional is
9075    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
9076 
9077 void
9078 extract_true_false_edges_from_block (basic_block b,
9079 				     edge *true_edge,
9080 				     edge *false_edge)
9081 {
9082   edge e = EDGE_SUCC (b, 0);
9083 
9084   if (e->flags & EDGE_TRUE_VALUE)
9085     {
9086       *true_edge = e;
9087       *false_edge = EDGE_SUCC (b, 1);
9088     }
9089   else
9090     {
9091       *false_edge = e;
9092       *true_edge = EDGE_SUCC (b, 1);
9093     }
9094 }
9095 
9096 
9097 /* From a controlling predicate in the immediate dominator DOM of
9098    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9099    predicate evaluates to true and false and store them to
9100    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9101    they are non-NULL.  Returns true if the edges can be determined,
9102    else return false.  */
9103 
9104 bool
9105 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9106 				     edge *true_controlled_edge,
9107 				     edge *false_controlled_edge)
9108 {
9109   basic_block bb = phiblock;
9110   edge true_edge, false_edge, tem;
9111   edge e0 = NULL, e1 = NULL;
9112 
9113   /* We have to verify that one edge into the PHI node is dominated
9114      by the true edge of the predicate block and the other edge
9115      dominated by the false edge.  This ensures that the PHI argument
9116      we are going to take is completely determined by the path we
9117      take from the predicate block.
9118      We can only use BB dominance checks below if the destination of
9119      the true/false edges are dominated by their edge, thus only
9120      have a single predecessor.  */
9121   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9122   tem = EDGE_PRED (bb, 0);
9123   if (tem == true_edge
9124       || (single_pred_p (true_edge->dest)
9125 	  && (tem->src == true_edge->dest
9126 	      || dominated_by_p (CDI_DOMINATORS,
9127 				 tem->src, true_edge->dest))))
9128     e0 = tem;
9129   else if (tem == false_edge
9130 	   || (single_pred_p (false_edge->dest)
9131 	       && (tem->src == false_edge->dest
9132 		   || dominated_by_p (CDI_DOMINATORS,
9133 				      tem->src, false_edge->dest))))
9134     e1 = tem;
9135   else
9136     return false;
9137   tem = EDGE_PRED (bb, 1);
9138   if (tem == true_edge
9139       || (single_pred_p (true_edge->dest)
9140 	  && (tem->src == true_edge->dest
9141 	      || dominated_by_p (CDI_DOMINATORS,
9142 				 tem->src, true_edge->dest))))
9143     e0 = tem;
9144   else if (tem == false_edge
9145 	   || (single_pred_p (false_edge->dest)
9146 	       && (tem->src == false_edge->dest
9147 		   || dominated_by_p (CDI_DOMINATORS,
9148 				      tem->src, false_edge->dest))))
9149     e1 = tem;
9150   else
9151     return false;
9152   if (!e0 || !e1)
9153     return false;
9154 
9155   if (true_controlled_edge)
9156     *true_controlled_edge = e0;
9157   if (false_controlled_edge)
9158     *false_controlled_edge = e1;
9159 
9160   return true;
9161 }
9162 
9163 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9164     range [low, high].  Place associated stmts before *GSI.  */
9165 
9166 void
9167 generate_range_test (basic_block bb, tree index, tree low, tree high,
9168 		     tree *lhs, tree *rhs)
9169 {
9170   tree type = TREE_TYPE (index);
9171   tree utype = range_check_type (type);
9172 
9173   low = fold_convert (utype, low);
9174   high = fold_convert (utype, high);
9175 
9176   gimple_seq seq = NULL;
9177   index = gimple_convert (&seq, utype, index);
9178   *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9179   *rhs = const_binop (MINUS_EXPR, utype, high, low);
9180 
9181   gimple_stmt_iterator gsi = gsi_last_bb (bb);
9182   gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9183 }
9184 
9185 /* Return the basic block that belongs to label numbered INDEX
9186    of a switch statement.  */
9187 
9188 basic_block
9189 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9190 {
9191   return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9192 }
9193 
9194 /* Return the default basic block of a switch statement.  */
9195 
9196 basic_block
9197 gimple_switch_default_bb (function *ifun, gswitch *gs)
9198 {
9199   return gimple_switch_label_bb (ifun, gs, 0);
9200 }
9201 
9202 /* Return the edge that belongs to label numbered INDEX
9203    of a switch statement.  */
9204 
9205 edge
9206 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9207 {
9208   return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9209 }
9210 
9211 /* Return the default edge of a switch statement.  */
9212 
9213 edge
9214 gimple_switch_default_edge (function *ifun, gswitch *gs)
9215 {
9216   return gimple_switch_edge (ifun, gs, 0);
9217 }
9218 
9219 
9220 /* Emit return warnings.  */
9221 
9222 namespace {
9223 
9224 const pass_data pass_data_warn_function_return =
9225 {
9226   GIMPLE_PASS, /* type */
9227   "*warn_function_return", /* name */
9228   OPTGROUP_NONE, /* optinfo_flags */
9229   TV_NONE, /* tv_id */
9230   PROP_cfg, /* properties_required */
9231   0, /* properties_provided */
9232   0, /* properties_destroyed */
9233   0, /* todo_flags_start */
9234   0, /* todo_flags_finish */
9235 };
9236 
9237 class pass_warn_function_return : public gimple_opt_pass
9238 {
9239 public:
9240   pass_warn_function_return (gcc::context *ctxt)
9241     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9242   {}
9243 
9244   /* opt_pass methods: */
9245   virtual unsigned int execute (function *);
9246 
9247 }; // class pass_warn_function_return
9248 
9249 unsigned int
9250 pass_warn_function_return::execute (function *fun)
9251 {
9252   location_t location;
9253   gimple *last;
9254   edge e;
9255   edge_iterator ei;
9256 
9257   if (!targetm.warn_func_return (fun->decl))
9258     return 0;
9259 
9260   /* If we have a path to EXIT, then we do return.  */
9261   if (TREE_THIS_VOLATILE (fun->decl)
9262       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9263     {
9264       location = UNKNOWN_LOCATION;
9265       for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9266 	   (e = ei_safe_edge (ei)); )
9267 	{
9268 	  last = last_stmt (e->src);
9269 	  if ((gimple_code (last) == GIMPLE_RETURN
9270 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9271 	      && location == UNKNOWN_LOCATION
9272 	      && ((location = LOCATION_LOCUS (gimple_location (last)))
9273 		  != UNKNOWN_LOCATION)
9274 	      && !optimize)
9275 	    break;
9276 	  /* When optimizing, replace return stmts in noreturn functions
9277 	     with __builtin_unreachable () call.  */
9278 	  if (optimize && gimple_code (last) == GIMPLE_RETURN)
9279 	    {
9280 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9281 	      gimple *new_stmt = gimple_build_call (fndecl, 0);
9282 	      gimple_set_location (new_stmt, gimple_location (last));
9283 	      gimple_stmt_iterator gsi = gsi_for_stmt (last);
9284 	      gsi_replace (&gsi, new_stmt, true);
9285 	      remove_edge (e);
9286 	    }
9287 	  else
9288 	    ei_next (&ei);
9289 	}
9290       if (location == UNKNOWN_LOCATION)
9291 	location = cfun->function_end_locus;
9292 
9293 #ifdef notyet
9294       if (warn_missing_noreturn)
9295         warning_at (location, 0, "%<noreturn%> function does return");
9296 #endif
9297     }
9298 
9299   /* If we see "return;" in some basic block, then we do reach the end
9300      without returning a value.  */
9301   else if (warn_return_type > 0
9302 	   && !TREE_NO_WARNING (fun->decl)
9303 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9304     {
9305       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9306 	{
9307 	  gimple *last = last_stmt (e->src);
9308 	  greturn *return_stmt = dyn_cast <greturn *> (last);
9309 	  if (return_stmt
9310 	      && gimple_return_retval (return_stmt) == NULL
9311 	      && !gimple_no_warning_p (last))
9312 	    {
9313 	      location = gimple_location (last);
9314 	      if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9315 		location = fun->function_end_locus;
9316 	      if (warning_at (location, OPT_Wreturn_type,
9317 			      "control reaches end of non-void function"))
9318 		TREE_NO_WARNING (fun->decl) = 1;
9319 	      break;
9320 	    }
9321 	}
9322       /* The C++ FE turns fallthrough from the end of non-void function
9323 	 into __builtin_unreachable () call with BUILTINS_LOCATION.
9324 	 Recognize those too.  */
9325       basic_block bb;
9326       if (!TREE_NO_WARNING (fun->decl))
9327 	FOR_EACH_BB_FN (bb, fun)
9328 	  if (EDGE_COUNT (bb->succs) == 0)
9329 	    {
9330 	      gimple *last = last_stmt (bb);
9331 	      const enum built_in_function ubsan_missing_ret
9332 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9333 	      if (last
9334 		  && ((LOCATION_LOCUS (gimple_location (last))
9335 		       == BUILTINS_LOCATION
9336 		       && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9337 		      || gimple_call_builtin_p (last, ubsan_missing_ret)))
9338 		{
9339 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
9340 		  gsi_prev_nondebug (&gsi);
9341 		  gimple *prev = gsi_stmt (gsi);
9342 		  if (prev == NULL)
9343 		    location = UNKNOWN_LOCATION;
9344 		  else
9345 		    location = gimple_location (prev);
9346 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9347 		    location = fun->function_end_locus;
9348 		  if (warning_at (location, OPT_Wreturn_type,
9349 				  "control reaches end of non-void function"))
9350 		    TREE_NO_WARNING (fun->decl) = 1;
9351 		  break;
9352 		}
9353 	    }
9354     }
9355   return 0;
9356 }
9357 
9358 } // anon namespace
9359 
9360 gimple_opt_pass *
9361 make_pass_warn_function_return (gcc::context *ctxt)
9362 {
9363   return new pass_warn_function_return (ctxt);
9364 }
9365 
9366 /* Walk a gimplified function and warn for functions whose return value is
9367    ignored and attribute((warn_unused_result)) is set.  This is done before
9368    inlining, so we don't have to worry about that.  */
9369 
9370 static void
9371 do_warn_unused_result (gimple_seq seq)
9372 {
9373   tree fdecl, ftype;
9374   gimple_stmt_iterator i;
9375 
9376   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9377     {
9378       gimple *g = gsi_stmt (i);
9379 
9380       switch (gimple_code (g))
9381 	{
9382 	case GIMPLE_BIND:
9383 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9384 	  break;
9385 	case GIMPLE_TRY:
9386 	  do_warn_unused_result (gimple_try_eval (g));
9387 	  do_warn_unused_result (gimple_try_cleanup (g));
9388 	  break;
9389 	case GIMPLE_CATCH:
9390 	  do_warn_unused_result (gimple_catch_handler (
9391 				   as_a <gcatch *> (g)));
9392 	  break;
9393 	case GIMPLE_EH_FILTER:
9394 	  do_warn_unused_result (gimple_eh_filter_failure (g));
9395 	  break;
9396 
9397 	case GIMPLE_CALL:
9398 	  if (gimple_call_lhs (g))
9399 	    break;
9400 	  if (gimple_call_internal_p (g))
9401 	    break;
9402 
9403 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
9404 	     LHS.  All calls whose value is ignored should be
9405 	     represented like this.  Look for the attribute.  */
9406 	  fdecl = gimple_call_fndecl (g);
9407 	  ftype = gimple_call_fntype (g);
9408 
9409 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9410 	    {
9411 	      location_t loc = gimple_location (g);
9412 
9413 	      if (fdecl)
9414 		warning_at (loc, OPT_Wunused_result,
9415 			    "ignoring return value of %qD, "
9416 			    "declared with attribute warn_unused_result",
9417 			    fdecl);
9418 	      else
9419 		warning_at (loc, OPT_Wunused_result,
9420 			    "ignoring return value of function "
9421 			    "declared with attribute warn_unused_result");
9422 	    }
9423 	  break;
9424 
9425 	default:
9426 	  /* Not a container, not a call, or a call whose value is used.  */
9427 	  break;
9428 	}
9429     }
9430 }
9431 
9432 namespace {
9433 
9434 const pass_data pass_data_warn_unused_result =
9435 {
9436   GIMPLE_PASS, /* type */
9437   "*warn_unused_result", /* name */
9438   OPTGROUP_NONE, /* optinfo_flags */
9439   TV_NONE, /* tv_id */
9440   PROP_gimple_any, /* properties_required */
9441   0, /* properties_provided */
9442   0, /* properties_destroyed */
9443   0, /* todo_flags_start */
9444   0, /* todo_flags_finish */
9445 };
9446 
9447 class pass_warn_unused_result : public gimple_opt_pass
9448 {
9449 public:
9450   pass_warn_unused_result (gcc::context *ctxt)
9451     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9452   {}
9453 
9454   /* opt_pass methods: */
9455   virtual bool gate (function *) { return flag_warn_unused_result; }
9456   virtual unsigned int execute (function *)
9457     {
9458       do_warn_unused_result (gimple_body (current_function_decl));
9459       return 0;
9460     }
9461 
9462 }; // class pass_warn_unused_result
9463 
9464 } // anon namespace
9465 
9466 gimple_opt_pass *
9467 make_pass_warn_unused_result (gcc::context *ctxt)
9468 {
9469   return new pass_warn_unused_result (ctxt);
9470 }
9471 
9472 /* IPA passes, compilation of earlier functions or inlining
9473    might have changed some properties, such as marked functions nothrow,
9474    pure, const or noreturn.
9475    Remove redundant edges and basic blocks, and create new ones if necessary.
9476 
9477    This pass can't be executed as stand alone pass from pass manager, because
9478    in between inlining and this fixup the verify_flow_info would fail.  */
9479 
9480 unsigned int
9481 execute_fixup_cfg (void)
9482 {
9483   basic_block bb;
9484   gimple_stmt_iterator gsi;
9485   int todo = 0;
9486   cgraph_node *node = cgraph_node::get (current_function_decl);
9487   profile_count num = node->count;
9488   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9489   bool scale = num.initialized_p () && !(num == den);
9490 
9491   if (scale)
9492     {
9493       profile_count::adjust_for_ipa_scaling (&num, &den);
9494       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9495       EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9496         = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9497     }
9498 
9499   FOR_EACH_BB_FN (bb, cfun)
9500     {
9501       if (scale)
9502         bb->count = bb->count.apply_scale (num, den);
9503       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9504 	{
9505 	  gimple *stmt = gsi_stmt (gsi);
9506 	  tree decl = is_gimple_call (stmt)
9507 		      ? gimple_call_fndecl (stmt)
9508 		      : NULL;
9509 	  if (decl)
9510 	    {
9511 	      int flags = gimple_call_flags (stmt);
9512 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9513 		{
9514 		  if (gimple_purge_dead_abnormal_call_edges (bb))
9515 		    todo |= TODO_cleanup_cfg;
9516 
9517 		  if (gimple_in_ssa_p (cfun))
9518 		    {
9519 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9520 		      update_stmt (stmt);
9521 		    }
9522 		}
9523 
9524 	      if (flags & ECF_NORETURN
9525 		  && fixup_noreturn_call (stmt))
9526 		todo |= TODO_cleanup_cfg;
9527 	     }
9528 
9529 	  /* Remove stores to variables we marked write-only.
9530 	     Keep access when store has side effect, i.e. in case when source
9531 	     is volatile.  */
9532 	  if (gimple_store_p (stmt)
9533 	      && !gimple_has_side_effects (stmt))
9534 	    {
9535 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9536 
9537 	      if (VAR_P (lhs)
9538 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9539 		  && varpool_node::get (lhs)->writeonly)
9540 		{
9541 		  unlink_stmt_vdef (stmt);
9542 		  gsi_remove (&gsi, true);
9543 		  release_defs (stmt);
9544 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9545 	          continue;
9546 		}
9547 	    }
9548 	  /* For calls we can simply remove LHS when it is known
9549 	     to be write-only.  */
9550 	  if (is_gimple_call (stmt)
9551 	      && gimple_get_lhs (stmt))
9552 	    {
9553 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9554 
9555 	      if (VAR_P (lhs)
9556 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9557 		  && varpool_node::get (lhs)->writeonly)
9558 		{
9559 		  gimple_call_set_lhs (stmt, NULL);
9560 		  update_stmt (stmt);
9561 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9562 		}
9563 	    }
9564 
9565 	  if (maybe_clean_eh_stmt (stmt)
9566 	      && gimple_purge_dead_eh_edges (bb))
9567 	    todo |= TODO_cleanup_cfg;
9568 	  gsi_next (&gsi);
9569 	}
9570 
9571       /* If we have a basic block with no successors that does not
9572 	 end with a control statement or a noreturn call end it with
9573 	 a call to __builtin_unreachable.  This situation can occur
9574 	 when inlining a noreturn call that does in fact return.  */
9575       if (EDGE_COUNT (bb->succs) == 0)
9576 	{
9577 	  gimple *stmt = last_stmt (bb);
9578 	  if (!stmt
9579 	      || (!is_ctrl_stmt (stmt)
9580 		  && (!is_gimple_call (stmt)
9581 		      || !gimple_call_noreturn_p (stmt))))
9582 	    {
9583 	      if (stmt && is_gimple_call (stmt))
9584 		gimple_call_set_ctrl_altering (stmt, false);
9585 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9586 	      stmt = gimple_build_call (fndecl, 0);
9587 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9588 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9589 	      if (!cfun->after_inlining)
9590 		{
9591 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9592 		  node->create_edge (cgraph_node::get_create (fndecl),
9593 				     call_stmt, bb->count);
9594 		}
9595 	    }
9596 	}
9597     }
9598   if (scale)
9599     compute_function_frequency ();
9600 
9601   if (current_loops
9602       && (todo & TODO_cleanup_cfg))
9603     loops_state_set (LOOPS_NEED_FIXUP);
9604 
9605   return todo;
9606 }
9607 
9608 namespace {
9609 
9610 const pass_data pass_data_fixup_cfg =
9611 {
9612   GIMPLE_PASS, /* type */
9613   "fixup_cfg", /* name */
9614   OPTGROUP_NONE, /* optinfo_flags */
9615   TV_NONE, /* tv_id */
9616   PROP_cfg, /* properties_required */
9617   0, /* properties_provided */
9618   0, /* properties_destroyed */
9619   0, /* todo_flags_start */
9620   0, /* todo_flags_finish */
9621 };
9622 
9623 class pass_fixup_cfg : public gimple_opt_pass
9624 {
9625 public:
9626   pass_fixup_cfg (gcc::context *ctxt)
9627     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9628   {}
9629 
9630   /* opt_pass methods: */
9631   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9632   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9633 
9634 }; // class pass_fixup_cfg
9635 
9636 } // anon namespace
9637 
9638 gimple_opt_pass *
9639 make_pass_fixup_cfg (gcc::context *ctxt)
9640 {
9641   return new pass_fixup_cfg (ctxt);
9642 }
9643 
9644 /* Garbage collection support for edge_def.  */
9645 
9646 extern void gt_ggc_mx (tree&);
9647 extern void gt_ggc_mx (gimple *&);
9648 extern void gt_ggc_mx (rtx&);
9649 extern void gt_ggc_mx (basic_block&);
9650 
9651 static void
9652 gt_ggc_mx (rtx_insn *& x)
9653 {
9654   if (x)
9655     gt_ggc_mx_rtx_def ((void *) x);
9656 }
9657 
9658 void
9659 gt_ggc_mx (edge_def *e)
9660 {
9661   tree block = LOCATION_BLOCK (e->goto_locus);
9662   gt_ggc_mx (e->src);
9663   gt_ggc_mx (e->dest);
9664   if (current_ir_type () == IR_GIMPLE)
9665     gt_ggc_mx (e->insns.g);
9666   else
9667     gt_ggc_mx (e->insns.r);
9668   gt_ggc_mx (block);
9669 }
9670 
9671 /* PCH support for edge_def.  */
9672 
9673 extern void gt_pch_nx (tree&);
9674 extern void gt_pch_nx (gimple *&);
9675 extern void gt_pch_nx (rtx&);
9676 extern void gt_pch_nx (basic_block&);
9677 
9678 static void
9679 gt_pch_nx (rtx_insn *& x)
9680 {
9681   if (x)
9682     gt_pch_nx_rtx_def ((void *) x);
9683 }
9684 
9685 void
9686 gt_pch_nx (edge_def *e)
9687 {
9688   tree block = LOCATION_BLOCK (e->goto_locus);
9689   gt_pch_nx (e->src);
9690   gt_pch_nx (e->dest);
9691   if (current_ir_type () == IR_GIMPLE)
9692     gt_pch_nx (e->insns.g);
9693   else
9694     gt_pch_nx (e->insns.r);
9695   gt_pch_nx (block);
9696 }
9697 
9698 void
9699 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9700 {
9701   tree block = LOCATION_BLOCK (e->goto_locus);
9702   op (&(e->src), cookie);
9703   op (&(e->dest), cookie);
9704   if (current_ir_type () == IR_GIMPLE)
9705     op (&(e->insns.g), cookie);
9706   else
9707     op (&(e->insns.r), cookie);
9708   op (&(block), cookie);
9709 }
9710 
9711 #if CHECKING_P
9712 
9713 namespace selftest {
9714 
9715 /* Helper function for CFG selftests: create a dummy function decl
9716    and push it as cfun.  */
9717 
9718 static tree
9719 push_fndecl (const char *name)
9720 {
9721   tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9722   /* FIXME: this uses input_location: */
9723   tree fndecl = build_fn_decl (name, fn_type);
9724   tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9725 			    NULL_TREE, integer_type_node);
9726   DECL_RESULT (fndecl) = retval;
9727   push_struct_function (fndecl);
9728   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9729   ASSERT_TRUE (fun != NULL);
9730   init_empty_tree_cfg_for_function (fun);
9731   ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9732   ASSERT_EQ (0, n_edges_for_fn (fun));
9733   return fndecl;
9734 }
9735 
9736 /* These tests directly create CFGs.
9737    Compare with the static fns within tree-cfg.c:
9738      - build_gimple_cfg
9739      - make_blocks: calls create_basic_block (seq, bb);
9740      - make_edges.   */
9741 
9742 /* Verify a simple cfg of the form:
9743      ENTRY -> A -> B -> C -> EXIT.  */
9744 
9745 static void
9746 test_linear_chain ()
9747 {
9748   gimple_register_cfg_hooks ();
9749 
9750   tree fndecl = push_fndecl ("cfg_test_linear_chain");
9751   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9752 
9753   /* Create some empty blocks.  */
9754   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9755   basic_block bb_b = create_empty_bb (bb_a);
9756   basic_block bb_c = create_empty_bb (bb_b);
9757 
9758   ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9759   ASSERT_EQ (0, n_edges_for_fn (fun));
9760 
9761   /* Create some edges: a simple linear chain of BBs.  */
9762   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9763   make_edge (bb_a, bb_b, 0);
9764   make_edge (bb_b, bb_c, 0);
9765   make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9766 
9767   /* Verify the edges.  */
9768   ASSERT_EQ (4, n_edges_for_fn (fun));
9769   ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9770   ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9771   ASSERT_EQ (1, bb_a->preds->length ());
9772   ASSERT_EQ (1, bb_a->succs->length ());
9773   ASSERT_EQ (1, bb_b->preds->length ());
9774   ASSERT_EQ (1, bb_b->succs->length ());
9775   ASSERT_EQ (1, bb_c->preds->length ());
9776   ASSERT_EQ (1, bb_c->succs->length ());
9777   ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9778   ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9779 
9780   /* Verify the dominance information
9781      Each BB in our simple chain should be dominated by the one before
9782      it.  */
9783   calculate_dominance_info (CDI_DOMINATORS);
9784   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9785   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9786   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9787   ASSERT_EQ (1, dom_by_b.length ());
9788   ASSERT_EQ (bb_c, dom_by_b[0]);
9789   free_dominance_info (CDI_DOMINATORS);
9790   dom_by_b.release ();
9791 
9792   /* Similarly for post-dominance: each BB in our chain is post-dominated
9793      by the one after it.  */
9794   calculate_dominance_info (CDI_POST_DOMINATORS);
9795   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9796   ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9797   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9798   ASSERT_EQ (1, postdom_by_b.length ());
9799   ASSERT_EQ (bb_a, postdom_by_b[0]);
9800   free_dominance_info (CDI_POST_DOMINATORS);
9801   postdom_by_b.release ();
9802 
9803   pop_cfun ();
9804 }
9805 
9806 /* Verify a simple CFG of the form:
9807      ENTRY
9808        |
9809        A
9810       / \
9811      /t  \f
9812     B     C
9813      \   /
9814       \ /
9815        D
9816        |
9817       EXIT.  */
9818 
9819 static void
9820 test_diamond ()
9821 {
9822   gimple_register_cfg_hooks ();
9823 
9824   tree fndecl = push_fndecl ("cfg_test_diamond");
9825   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9826 
9827   /* Create some empty blocks.  */
9828   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9829   basic_block bb_b = create_empty_bb (bb_a);
9830   basic_block bb_c = create_empty_bb (bb_a);
9831   basic_block bb_d = create_empty_bb (bb_b);
9832 
9833   ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9834   ASSERT_EQ (0, n_edges_for_fn (fun));
9835 
9836   /* Create the edges.  */
9837   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9838   make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9839   make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9840   make_edge (bb_b, bb_d, 0);
9841   make_edge (bb_c, bb_d, 0);
9842   make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9843 
9844   /* Verify the edges.  */
9845   ASSERT_EQ (6, n_edges_for_fn (fun));
9846   ASSERT_EQ (1, bb_a->preds->length ());
9847   ASSERT_EQ (2, bb_a->succs->length ());
9848   ASSERT_EQ (1, bb_b->preds->length ());
9849   ASSERT_EQ (1, bb_b->succs->length ());
9850   ASSERT_EQ (1, bb_c->preds->length ());
9851   ASSERT_EQ (1, bb_c->succs->length ());
9852   ASSERT_EQ (2, bb_d->preds->length ());
9853   ASSERT_EQ (1, bb_d->succs->length ());
9854 
9855   /* Verify the dominance information.  */
9856   calculate_dominance_info (CDI_DOMINATORS);
9857   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9858   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9859   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9860   vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9861   ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order.  */
9862   dom_by_a.release ();
9863   vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9864   ASSERT_EQ (0, dom_by_b.length ());
9865   dom_by_b.release ();
9866   free_dominance_info (CDI_DOMINATORS);
9867 
9868   /* Similarly for post-dominance.  */
9869   calculate_dominance_info (CDI_POST_DOMINATORS);
9870   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9871   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9872   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9873   vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9874   ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order.  */
9875   postdom_by_d.release ();
9876   vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9877   ASSERT_EQ (0, postdom_by_b.length ());
9878   postdom_by_b.release ();
9879   free_dominance_info (CDI_POST_DOMINATORS);
9880 
9881   pop_cfun ();
9882 }
9883 
9884 /* Verify that we can handle a CFG containing a "complete" aka
9885    fully-connected subgraph (where A B C D below all have edges
9886    pointing to each other node, also to themselves).
9887    e.g.:
9888      ENTRY  EXIT
9889        |    ^
9890        |   /
9891        |  /
9892        | /
9893        V/
9894        A<--->B
9895        ^^   ^^
9896        | \ / |
9897        |  X  |
9898        | / \ |
9899        VV   VV
9900        C<--->D
9901 */
9902 
9903 static void
9904 test_fully_connected ()
9905 {
9906   gimple_register_cfg_hooks ();
9907 
9908   tree fndecl = push_fndecl ("cfg_fully_connected");
9909   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9910 
9911   const int n = 4;
9912 
9913   /* Create some empty blocks.  */
9914   auto_vec <basic_block> subgraph_nodes;
9915   for (int i = 0; i < n; i++)
9916     subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9917 
9918   ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9919   ASSERT_EQ (0, n_edges_for_fn (fun));
9920 
9921   /* Create the edges.  */
9922   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9923   make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9924   for (int i = 0; i < n; i++)
9925     for (int j = 0; j < n; j++)
9926       make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9927 
9928   /* Verify the edges.  */
9929   ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9930   /* The first one is linked to ENTRY/EXIT as well as itself and
9931      everything else.  */
9932   ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9933   ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9934   /* The other ones in the subgraph are linked to everything in
9935      the subgraph (including themselves).  */
9936   for (int i = 1; i < n; i++)
9937     {
9938       ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9939       ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9940     }
9941 
9942   /* Verify the dominance information.  */
9943   calculate_dominance_info (CDI_DOMINATORS);
9944   /* The initial block in the subgraph should be dominated by ENTRY.  */
9945   ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9946 	     get_immediate_dominator (CDI_DOMINATORS,
9947 				      subgraph_nodes[0]));
9948   /* Every other block in the subgraph should be dominated by the
9949      initial block.  */
9950   for (int i = 1; i < n; i++)
9951     ASSERT_EQ (subgraph_nodes[0],
9952 	       get_immediate_dominator (CDI_DOMINATORS,
9953 					subgraph_nodes[i]));
9954   free_dominance_info (CDI_DOMINATORS);
9955 
9956   /* Similarly for post-dominance.  */
9957   calculate_dominance_info (CDI_POST_DOMINATORS);
9958   /* The initial block in the subgraph should be postdominated by EXIT.  */
9959   ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9960 	     get_immediate_dominator (CDI_POST_DOMINATORS,
9961 				      subgraph_nodes[0]));
9962   /* Every other block in the subgraph should be postdominated by the
9963      initial block, since that leads to EXIT.  */
9964   for (int i = 1; i < n; i++)
9965     ASSERT_EQ (subgraph_nodes[0],
9966 	       get_immediate_dominator (CDI_POST_DOMINATORS,
9967 					subgraph_nodes[i]));
9968   free_dominance_info (CDI_POST_DOMINATORS);
9969 
9970   pop_cfun ();
9971 }
9972 
9973 /* Run all of the selftests within this file.  */
9974 
9975 void
9976 tree_cfg_c_tests ()
9977 {
9978   test_linear_chain ();
9979   test_diamond ();
9980   test_fully_connected ();
9981 }
9982 
9983 } // namespace selftest
9984 
9985 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9986    - loop
9987    - nested loops
9988    - switch statement (a block with many out-edges)
9989    - something that jumps to itself
9990    - etc  */
9991 
9992 #endif /* CHECKING_P */
9993