xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree-cfg.cc (revision 2683f5b185977c9184701f18c843971cd908b00e)
1 /* Control flow functions for trees.
2    Copyright (C) 2001-2022 Free Software Foundation, Inc.
3    Contributed by Diego Novillo <dnovillo@redhat.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
68 
69 /* This file contains functions for building the Control Flow Graph (CFG)
70    for a function tree.  */
71 
72 /* Local declarations.  */
73 
74 /* Initial capacity for the basic block array.  */
75 static const int initial_cfg_capacity = 20;
76 
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
79    via their CASE_CHAIN field, which we clear after we're done with the
80    hash table to prevent problems with duplication of GIMPLE_SWITCHes.
81 
82    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83    update the case vector in response to edge redirections.
84 
85    Right now this table is set up and torn down at key points in the
86    compilation process.  It would be nice if we could make the table
87    more persistent.  The key is getting notification of changes to
88    the CFG (particularly edge removal, creation and redirection).  */
89 
90 static hash_map<edge, tree> *edge_to_cases;
91 
92 /* If we record edge_to_cases, this bitmap will hold indexes
93    of basic blocks that end in a GIMPLE_SWITCH which we touched
94    due to edge manipulations.  */
95 
96 static bitmap touched_switch_bbs;
97 
98 /* OpenMP region idxs for blocks during cfg pass.  */
99 static vec<int> bb_to_omp_idx;
100 
101 /* CFG statistics.  */
102 struct cfg_stats_d
103 {
104   long num_merged_labels;
105 };
106 
107 static struct cfg_stats_d cfg_stats;
108 
109 /* Data to pass to replace_block_vars_by_duplicates_1.  */
110 struct replace_decls_d
111 {
112   hash_map<tree, tree> *vars_map;
113   tree to_context;
114 };
115 
116 /* Hash table to store last discriminator assigned for each locus.  */
117 struct locus_discrim_map
118 {
119   int location_line;
120   int discriminator;
121 };
122 
123 /* Hashtable helpers.  */
124 
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
126 {
127   static inline hashval_t hash (const locus_discrim_map *);
128   static inline bool equal (const locus_discrim_map *,
129 			    const locus_discrim_map *);
130 };
131 
132 /* Trivial hash function for a location_t.  ITEM is a pointer to
133    a hash table entry that maps a location_t to a discriminator.  */
134 
135 inline hashval_t
hash(const locus_discrim_map * item)136 locus_discrim_hasher::hash (const locus_discrim_map *item)
137 {
138   return item->location_line;
139 }
140 
141 /* Equality function for the locus-to-discriminator map.  A and B
142    point to the two hash table entries to compare.  */
143 
144 inline bool
equal(const locus_discrim_map * a,const locus_discrim_map * b)145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 			     const locus_discrim_map *b)
147 {
148   return a->location_line == b->location_line;
149 }
150 
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
152 
153 /* Basic blocks and flowgraphs.  */
154 static void make_blocks (gimple_seq);
155 
156 /* Edges.  */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
165 
166 /* Various helpers.  */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
173 
174 /* Flowgraph optimization and cleanup.  */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
180 
181 void
init_empty_tree_cfg_for_function(struct function * fn)182 init_empty_tree_cfg_for_function (struct function *fn)
183 {
184   /* Initialize the basic block array.  */
185   init_flow (fn);
186   profile_status_for_fn (fn) = PROFILE_ABSENT;
187   n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188   last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189   vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 			 initial_cfg_capacity, true);
191 
192   /* Build a mapping of labels to their associated blocks.  */
193   vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 			 initial_cfg_capacity, true);
195 
196   SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197   SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198 
199   ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200     = EXIT_BLOCK_PTR_FOR_FN (fn);
201   EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202     = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 }
204 
205 void
init_empty_tree_cfg(void)206 init_empty_tree_cfg (void)
207 {
208   init_empty_tree_cfg_for_function (cfun);
209 }
210 
211 /*---------------------------------------------------------------------------
212 			      Create basic blocks
213 ---------------------------------------------------------------------------*/
214 
215 /* Entry point to the CFG builder for trees.  SEQ is the sequence of
216    statements to be added to the flowgraph.  */
217 
218 static void
build_gimple_cfg(gimple_seq seq)219 build_gimple_cfg (gimple_seq seq)
220 {
221   /* Register specific gimple functions.  */
222   gimple_register_cfg_hooks ();
223 
224   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225 
226   init_empty_tree_cfg ();
227 
228   make_blocks (seq);
229 
230   /* Make sure there is always at least one block, even if it's empty.  */
231   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232     create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233 
234   /* Adjust the size of the array.  */
235   if (basic_block_info_for_fn (cfun)->length ()
236       < (size_t) n_basic_blocks_for_fn (cfun))
237     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 			   n_basic_blocks_for_fn (cfun));
239 
240   /* To speed up statement iterator walks, we first purge dead labels.  */
241   cleanup_dead_labels ();
242 
243   /* Group case nodes to reduce the number of edges.
244      We do this after cleaning up dead labels because otherwise we miss
245      a lot of obvious case merging opportunities.  */
246   group_case_labels ();
247 
248   /* Create the edges of the flowgraph.  */
249   discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250   make_edges ();
251   assign_discriminators ();
252   cleanup_dead_labels ();
253   delete discriminator_per_locus;
254   discriminator_per_locus = NULL;
255 }
256 
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258    them and propagate the information to LOOP.  We assume that the annotations
259    come immediately before the condition in BB, if any.  */
260 
261 static void
replace_loop_annotate_in_block(basic_block bb,class loop * loop)262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
263 {
264   gimple_stmt_iterator gsi = gsi_last_bb (bb);
265   gimple *stmt = gsi_stmt (gsi);
266 
267   if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268     return;
269 
270   for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271     {
272       stmt = gsi_stmt (gsi);
273       if (gimple_code (stmt) != GIMPLE_CALL)
274 	break;
275       if (!gimple_call_internal_p (stmt)
276 	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 	break;
278 
279       switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 	{
281 	case annot_expr_ivdep_kind:
282 	  loop->safelen = INT_MAX;
283 	  break;
284 	case annot_expr_unroll_kind:
285 	  loop->unroll
286 	    = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 	  cfun->has_unroll = true;
288 	  break;
289 	case annot_expr_no_vector_kind:
290 	  loop->dont_vectorize = true;
291 	  break;
292 	case annot_expr_vector_kind:
293 	  loop->force_vectorize = true;
294 	  cfun->has_force_vectorize_loops = true;
295 	  break;
296 	case annot_expr_parallel_kind:
297 	  loop->can_be_parallel = true;
298 	  loop->safelen = INT_MAX;
299 	  break;
300 	default:
301 	  gcc_unreachable ();
302 	}
303 
304       stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 				  gimple_call_arg (stmt, 0));
306       gsi_replace (&gsi, stmt, true);
307     }
308 }
309 
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311    them and propagate the information to the loop.  We assume that the
312    annotations come immediately before the condition of the loop.  */
313 
314 static void
replace_loop_annotate(void)315 replace_loop_annotate (void)
316 {
317   basic_block bb;
318   gimple_stmt_iterator gsi;
319   gimple *stmt;
320 
321   for (auto loop : loops_list (cfun, 0))
322     {
323       /* First look into the header.  */
324       replace_loop_annotate_in_block (loop->header, loop);
325 
326       /* Then look into the latch, if any.  */
327       if (loop->latch)
328 	replace_loop_annotate_in_block (loop->latch, loop);
329 
330       /* Push the global flag_finite_loops state down to individual loops.  */
331       loop->finite_p = flag_finite_loops;
332     }
333 
334   /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
335   FOR_EACH_BB_FN (bb, cfun)
336     {
337       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
338 	{
339 	  stmt = gsi_stmt (gsi);
340 	  if (gimple_code (stmt) != GIMPLE_CALL)
341 	    continue;
342 	  if (!gimple_call_internal_p (stmt)
343 	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 	    continue;
345 
346 	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
347 	    {
348 	    case annot_expr_ivdep_kind:
349 	    case annot_expr_unroll_kind:
350 	    case annot_expr_no_vector_kind:
351 	    case annot_expr_vector_kind:
352 	    case annot_expr_parallel_kind:
353 	      break;
354 	    default:
355 	      gcc_unreachable ();
356 	    }
357 
358 	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 				      gimple_call_arg (stmt, 0));
361 	  gsi_replace (&gsi, stmt, true);
362 	}
363     }
364 }
365 
366 static unsigned int
execute_build_cfg(void)367 execute_build_cfg (void)
368 {
369   gimple_seq body = gimple_body (current_function_decl);
370 
371   build_gimple_cfg (body);
372   gimple_set_body (current_function_decl, NULL);
373   if (dump_file && (dump_flags & TDF_DETAILS))
374     {
375       fprintf (dump_file, "Scope blocks:\n");
376       dump_scope_blocks (dump_file, dump_flags);
377     }
378   cleanup_tree_cfg ();
379 
380   bb_to_omp_idx.release ();
381 
382   loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383   replace_loop_annotate ();
384   return 0;
385 }
386 
387 namespace {
388 
389 const pass_data pass_data_build_cfg =
390 {
391   GIMPLE_PASS, /* type */
392   "cfg", /* name */
393   OPTGROUP_NONE, /* optinfo_flags */
394   TV_TREE_CFG, /* tv_id */
395   PROP_gimple_leh, /* properties_required */
396   ( PROP_cfg | PROP_loops ), /* properties_provided */
397   0, /* properties_destroyed */
398   0, /* todo_flags_start */
399   0, /* todo_flags_finish */
400 };
401 
402 class pass_build_cfg : public gimple_opt_pass
403 {
404 public:
pass_build_cfg(gcc::context * ctxt)405   pass_build_cfg (gcc::context *ctxt)
406     : gimple_opt_pass (pass_data_build_cfg, ctxt)
407   {}
408 
409   /* opt_pass methods: */
execute(function *)410   virtual unsigned int execute (function *) { return execute_build_cfg (); }
411 
412 }; // class pass_build_cfg
413 
414 } // anon namespace
415 
416 gimple_opt_pass *
make_pass_build_cfg(gcc::context * ctxt)417 make_pass_build_cfg (gcc::context *ctxt)
418 {
419   return new pass_build_cfg (ctxt);
420 }
421 
422 
423 /* Return true if T is a computed goto.  */
424 
425 bool
computed_goto_p(gimple * t)426 computed_goto_p (gimple *t)
427 {
428   return (gimple_code (t) == GIMPLE_GOTO
429 	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
430 }
431 
432 /* Returns true if the sequence of statements STMTS only contains
433    a call to __builtin_unreachable ().  */
434 
435 bool
gimple_seq_unreachable_p(gimple_seq stmts)436 gimple_seq_unreachable_p (gimple_seq stmts)
437 {
438   if (stmts == NULL
439       /* Return false if -fsanitize=unreachable, we don't want to
440 	 optimize away those calls, but rather turn them into
441 	 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
442 	 later.  */
443       || sanitize_flags_p (SANITIZE_UNREACHABLE))
444     return false;
445 
446   gimple_stmt_iterator gsi = gsi_last (stmts);
447 
448   if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
449     return false;
450 
451   for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
452     {
453       gimple *stmt = gsi_stmt (gsi);
454       if (gimple_code (stmt) != GIMPLE_LABEL
455 	  && !is_gimple_debug (stmt)
456 	  && !gimple_clobber_p (stmt))
457       return false;
458     }
459   return true;
460 }
461 
462 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
463    the other edge points to a bb with just __builtin_unreachable ().
464    I.e. return true for C->M edge in:
465    <bb C>:
466    ...
467    if (something)
468      goto <bb N>;
469    else
470      goto <bb M>;
471    <bb N>:
472    __builtin_unreachable ();
473    <bb M>:  */
474 
475 bool
assert_unreachable_fallthru_edge_p(edge e)476 assert_unreachable_fallthru_edge_p (edge e)
477 {
478   basic_block pred_bb = e->src;
479   gimple *last = last_stmt (pred_bb);
480   if (last && gimple_code (last) == GIMPLE_COND)
481     {
482       basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
483       if (other_bb == e->dest)
484 	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
485       if (EDGE_COUNT (other_bb->succs) == 0)
486 	return gimple_seq_unreachable_p (bb_seq (other_bb));
487     }
488   return false;
489 }
490 
491 
492 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
493    could alter control flow except via eh. We initialize the flag at
494    CFG build time and only ever clear it later.  */
495 
496 static void
gimple_call_initialize_ctrl_altering(gimple * stmt)497 gimple_call_initialize_ctrl_altering (gimple *stmt)
498 {
499   int flags = gimple_call_flags (stmt);
500 
501   /* A call alters control flow if it can make an abnormal goto.  */
502   if (call_can_make_abnormal_goto (stmt)
503       /* A call also alters control flow if it does not return.  */
504       || flags & ECF_NORETURN
505       /* TM ending statements have backedges out of the transaction.
506 	 Return true so we split the basic block containing them.
507 	 Note that the TM_BUILTIN test is merely an optimization.  */
508       || ((flags & ECF_TM_BUILTIN)
509 	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
510       /* BUILT_IN_RETURN call is same as return statement.  */
511       || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
512       /* IFN_UNIQUE should be the last insn, to make checking for it
513 	 as cheap as possible.  */
514       || (gimple_call_internal_p (stmt)
515 	  && gimple_call_internal_unique_p (stmt)))
516     gimple_call_set_ctrl_altering (stmt, true);
517   else
518     gimple_call_set_ctrl_altering (stmt, false);
519 }
520 
521 
522 /* Insert SEQ after BB and build a flowgraph.  */
523 
524 static basic_block
make_blocks_1(gimple_seq seq,basic_block bb)525 make_blocks_1 (gimple_seq seq, basic_block bb)
526 {
527   gimple_stmt_iterator i = gsi_start (seq);
528   gimple *stmt = NULL;
529   gimple *prev_stmt = NULL;
530   bool start_new_block = true;
531   bool first_stmt_of_seq = true;
532 
533   while (!gsi_end_p (i))
534     {
535       /* PREV_STMT should only be set to a debug stmt if the debug
536 	 stmt is before nondebug stmts.  Once stmt reaches a nondebug
537 	 nonlabel, prev_stmt will be set to it, so that
538 	 stmt_starts_bb_p will know to start a new block if a label is
539 	 found.  However, if stmt was a label after debug stmts only,
540 	 keep the label in prev_stmt even if we find further debug
541 	 stmts, for there may be other labels after them, and they
542 	 should land in the same block.  */
543       if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
544 	prev_stmt = stmt;
545       stmt = gsi_stmt (i);
546 
547       if (stmt && is_gimple_call (stmt))
548 	gimple_call_initialize_ctrl_altering (stmt);
549 
550       /* If the statement starts a new basic block or if we have determined
551 	 in a previous pass that we need to create a new block for STMT, do
552 	 so now.  */
553       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 	{
555 	  if (!first_stmt_of_seq)
556 	    gsi_split_seq_before (&i, &seq);
557 	  bb = create_basic_block (seq, bb);
558 	  start_new_block = false;
559 	  prev_stmt = NULL;
560 	}
561 
562       /* Now add STMT to BB and create the subgraphs for special statement
563 	 codes.  */
564       gimple_set_bb (stmt, bb);
565 
566       /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
567 	 next iteration.  */
568       if (stmt_ends_bb_p (stmt))
569 	{
570 	  /* If the stmt can make abnormal goto use a new temporary
571 	     for the assignment to the LHS.  This makes sure the old value
572 	     of the LHS is available on the abnormal edge.  Otherwise
573 	     we will end up with overlapping life-ranges for abnormal
574 	     SSA names.  */
575 	  if (gimple_has_lhs (stmt)
576 	      && stmt_can_make_abnormal_goto (stmt)
577 	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
578 	    {
579 	      tree lhs = gimple_get_lhs (stmt);
580 	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
581 	      gimple *s = gimple_build_assign (lhs, tmp);
582 	      gimple_set_location (s, gimple_location (stmt));
583 	      gimple_set_block (s, gimple_block (stmt));
584 	      gimple_set_lhs (stmt, tmp);
585 	      gsi_insert_after (&i, s, GSI_SAME_STMT);
586 	    }
587 	  start_new_block = true;
588 	}
589 
590       gsi_next (&i);
591       first_stmt_of_seq = false;
592     }
593   return bb;
594 }
595 
596 /* Build a flowgraph for the sequence of stmts SEQ.  */
597 
598 static void
make_blocks(gimple_seq seq)599 make_blocks (gimple_seq seq)
600 {
601   /* Look for debug markers right before labels, and move the debug
602      stmts after the labels.  Accepting labels among debug markers
603      adds no value, just complexity; if we wanted to annotate labels
604      with view numbers (so sequencing among markers would matter) or
605      somesuch, we're probably better off still moving the labels, but
606      adding other debug annotations in their original positions or
607      emitting nonbind or bind markers associated with the labels in
608      the original position of the labels.
609 
610      Moving labels would probably be simpler, but we can't do that:
611      moving labels assigns label ids to them, and doing so because of
612      debug markers makes for -fcompare-debug and possibly even codegen
613      differences.  So, we have to move the debug stmts instead.  To
614      that end, we scan SEQ backwards, marking the position of the
615      latest (earliest we find) label, and moving debug stmts that are
616      not separated from it by nondebug nonlabel stmts after the
617      label.  */
618   if (MAY_HAVE_DEBUG_MARKER_STMTS)
619     {
620       gimple_stmt_iterator label = gsi_none ();
621 
622       for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
623 	{
624 	  gimple *stmt = gsi_stmt (i);
625 
626 	  /* If this is the first label we encounter (latest in SEQ)
627 	     before nondebug stmts, record its position.  */
628 	  if (is_a <glabel *> (stmt))
629 	    {
630 	      if (gsi_end_p (label))
631 		label = i;
632 	      continue;
633 	    }
634 
635 	  /* Without a recorded label position to move debug stmts to,
636 	     there's nothing to do.  */
637 	  if (gsi_end_p (label))
638 	    continue;
639 
640 	  /* Move the debug stmt at I after LABEL.  */
641 	  if (is_gimple_debug (stmt))
642 	    {
643 	      gcc_assert (gimple_debug_nonbind_marker_p (stmt));
644 	      /* As STMT is removed, I advances to the stmt after
645 		 STMT, so the gsi_prev in the for "increment"
646 		 expression gets us to the stmt we're to visit after
647 		 STMT.  LABEL, however, would advance to the moved
648 		 stmt if we passed it to gsi_move_after, so pass it a
649 		 copy instead, so as to keep LABEL pointing to the
650 		 LABEL.  */
651 	      gimple_stmt_iterator copy = label;
652 	      gsi_move_after (&i, &copy);
653 	      continue;
654 	    }
655 
656 	  /* There aren't any (more?) debug stmts before label, so
657 	     there isn't anything else to move after it.  */
658 	  label = gsi_none ();
659 	}
660     }
661 
662   make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
663 }
664 
665 /* Create and return a new empty basic block after bb AFTER.  */
666 
667 static basic_block
create_bb(void * h,void * e,basic_block after)668 create_bb (void *h, void *e, basic_block after)
669 {
670   basic_block bb;
671 
672   gcc_assert (!e);
673 
674   /* Create and initialize a new basic block.  Since alloc_block uses
675      GC allocation that clears memory to allocate a basic block, we do
676      not have to clear the newly allocated basic block here.  */
677   bb = alloc_block ();
678 
679   bb->index = last_basic_block_for_fn (cfun);
680   bb->flags = BB_NEW;
681   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
682 
683   /* Add the new block to the linked list of blocks.  */
684   link_block (bb, after);
685 
686   /* Grow the basic block array if needed.  */
687   if ((size_t) last_basic_block_for_fn (cfun)
688       == basic_block_info_for_fn (cfun)->length ())
689     vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
690 			   last_basic_block_for_fn (cfun) + 1);
691 
692   /* Add the newly created block to the array.  */
693   SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
694 
695   n_basic_blocks_for_fn (cfun)++;
696   last_basic_block_for_fn (cfun)++;
697 
698   return bb;
699 }
700 
701 
702 /*---------------------------------------------------------------------------
703 				 Edge creation
704 ---------------------------------------------------------------------------*/
705 
706 /* If basic block BB has an abnormal edge to a basic block
707    containing IFN_ABNORMAL_DISPATCHER internal call, return
708    that the dispatcher's basic block, otherwise return NULL.  */
709 
710 basic_block
get_abnormal_succ_dispatcher(basic_block bb)711 get_abnormal_succ_dispatcher (basic_block bb)
712 {
713   edge e;
714   edge_iterator ei;
715 
716   FOR_EACH_EDGE (e, ei, bb->succs)
717     if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
718       {
719 	gimple_stmt_iterator gsi
720 	  = gsi_start_nondebug_after_labels_bb (e->dest);
721 	gimple *g = gsi_stmt (gsi);
722 	if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
723 	  return e->dest;
724       }
725   return NULL;
726 }
727 
728 /* Helper function for make_edges.  Create a basic block with
729    with ABNORMAL_DISPATCHER internal call in it if needed, and
730    create abnormal edges from BBS to it and from it to FOR_BB
731    if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
732 
733 static void
handle_abnormal_edges(basic_block * dispatcher_bbs,basic_block for_bb,auto_vec<basic_block> * bbs,bool computed_goto)734 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
735 		       auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737   basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738   unsigned int idx = 0;
739   basic_block bb;
740   bool inner = false;
741 
742   if (!bb_to_omp_idx.is_empty ())
743     {
744       dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745       if (bb_to_omp_idx[for_bb->index] != 0)
746 	inner = true;
747     }
748 
749   /* If the dispatcher has been created already, then there are basic
750      blocks with abnormal edges to it, so just make a new edge to
751      for_bb.  */
752   if (*dispatcher == NULL)
753     {
754       /* Check if there are any basic blocks that need to have
755 	 abnormal edges to this dispatcher.  If there are none, return
756 	 early.  */
757       if (bb_to_omp_idx.is_empty ())
758 	{
759 	  if (bbs->is_empty ())
760 	    return;
761 	}
762       else
763 	{
764 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 	      break;
767 	  if (bb == NULL)
768 	    return;
769 	}
770 
771       /* Create the dispatcher bb.  */
772       *dispatcher = create_basic_block (NULL, for_bb);
773       if (computed_goto)
774 	{
775 	  /* Factor computed gotos into a common computed goto site.  Also
776 	     record the location of that site so that we can un-factor the
777 	     gotos after we have converted back to normal form.  */
778 	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779 
780 	  /* Create the destination of the factored goto.  Each original
781 	     computed goto will put its desired destination into this
782 	     variable and jump to the label we create immediately below.  */
783 	  tree var = create_tmp_var (ptr_type_node, "gotovar");
784 
785 	  /* Build a label for the new block which will contain the
786 	     factored computed goto.  */
787 	  tree factored_label_decl
788 	    = create_artificial_label (UNKNOWN_LOCATION);
789 	  gimple *factored_computed_goto_label
790 	    = gimple_build_label (factored_label_decl);
791 	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792 
793 	  /* Build our new computed goto.  */
794 	  gimple *factored_computed_goto = gimple_build_goto (var);
795 	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796 
797 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 	    {
799 	      if (!bb_to_omp_idx.is_empty ()
800 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 		continue;
802 
803 	      gsi = gsi_last_bb (bb);
804 	      gimple *last = gsi_stmt (gsi);
805 
806 	      gcc_assert (computed_goto_p (last));
807 
808 	      /* Copy the original computed goto's destination into VAR.  */
809 	      gimple *assignment
810 		= gimple_build_assign (var, gimple_goto_dest (last));
811 	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812 
813 	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 	      e->goto_locus = gimple_location (last);
815 	      gsi_remove (&gsi, true);
816 	    }
817 	}
818       else
819 	{
820 	  tree arg = inner ? boolean_true_node : boolean_false_node;
821 	  gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 						 1, arg);
823 	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825 
826 	  /* Create predecessor edges of the dispatcher.  */
827 	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 	    {
829 	      if (!bb_to_omp_idx.is_empty ()
830 		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 		continue;
832 	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 	    }
834 	}
835     }
836 
837   make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839 
840 /* Creates outgoing edges for BB.  Returns 1 when it ends with an
841    computed goto, returns 2 when it ends with a statement that
842    might return to this function via an nonlocal goto, otherwise
843    return 0.  Updates *PCUR_REGION with the OMP region this BB is in.  */
844 
845 static int
make_edges_bb(basic_block bb,struct omp_region ** pcur_region,int * pomp_index)846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848   gimple *last = last_stmt (bb);
849   bool fallthru = false;
850   int ret = 0;
851 
852   if (!last)
853     return ret;
854 
855   switch (gimple_code (last))
856     {
857     case GIMPLE_GOTO:
858       if (make_goto_expr_edges (bb))
859 	ret = 1;
860       fallthru = false;
861       break;
862     case GIMPLE_RETURN:
863       {
864 	edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 	e->goto_locus = gimple_location (last);
866 	fallthru = false;
867       }
868       break;
869     case GIMPLE_COND:
870       make_cond_expr_edges (bb);
871       fallthru = false;
872       break;
873     case GIMPLE_SWITCH:
874       make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875       fallthru = false;
876       break;
877     case GIMPLE_RESX:
878       make_eh_edges (last);
879       fallthru = false;
880       break;
881     case GIMPLE_EH_DISPATCH:
882       fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883       break;
884 
885     case GIMPLE_CALL:
886       /* If this function receives a nonlocal goto, then we need to
887 	 make edges from this call site to all the nonlocal goto
888 	 handlers.  */
889       if (stmt_can_make_abnormal_goto (last))
890 	ret = 2;
891 
892       /* If this statement has reachable exception handlers, then
893 	 create abnormal edges to them.  */
894       make_eh_edges (last);
895 
896       /* BUILTIN_RETURN is really a return statement.  */
897       if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 	{
899 	  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 	  fallthru = false;
901 	}
902       /* Some calls are known not to return.  */
903       else
904 	fallthru = !gimple_call_noreturn_p (last);
905       break;
906 
907     case GIMPLE_ASSIGN:
908       /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 	 control-altering.  */
910       if (is_ctrl_altering_stmt (last))
911 	make_eh_edges (last);
912       fallthru = true;
913       break;
914 
915     case GIMPLE_ASM:
916       make_gimple_asm_edges (bb);
917       fallthru = true;
918       break;
919 
920     CASE_GIMPLE_OMP:
921       fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922       break;
923 
924     case GIMPLE_TRANSACTION:
925       {
926         gtransaction *txn = as_a <gtransaction *> (last);
927 	tree label1 = gimple_transaction_label_norm (txn);
928 	tree label2 = gimple_transaction_label_uninst (txn);
929 
930 	if (label1)
931 	  make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 	if (label2)
933 	  make_edge (bb, label_to_block (cfun, label2),
934 		     EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935 
936 	tree label3 = gimple_transaction_label_over (txn);
937 	if (gimple_transaction_subcode (txn)
938 	    & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 	  make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940 
941 	fallthru = false;
942       }
943       break;
944 
945     default:
946       gcc_assert (!stmt_ends_bb_p (last));
947       fallthru = true;
948       break;
949     }
950 
951   if (fallthru)
952     make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953 
954   return ret;
955 }
956 
957 /* Join all the blocks in the flowgraph.  */
958 
959 static void
make_edges(void)960 make_edges (void)
961 {
962   basic_block bb;
963   struct omp_region *cur_region = NULL;
964   auto_vec<basic_block> ab_edge_goto;
965   auto_vec<basic_block> ab_edge_call;
966   int cur_omp_region_idx = 0;
967 
968   /* Create an edge from entry to the first block with executable
969      statements in it.  */
970   make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 	     EDGE_FALLTHRU);
973 
974   /* Traverse the basic block array placing edges.  */
975   FOR_EACH_BB_FN (bb, cfun)
976     {
977       int mer;
978 
979       if (!bb_to_omp_idx.is_empty ())
980 	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
981 
982       mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983       if (mer == 1)
984 	ab_edge_goto.safe_push (bb);
985       else if (mer == 2)
986 	ab_edge_call.safe_push (bb);
987 
988       if (cur_region && bb_to_omp_idx.is_empty ())
989 	bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
990     }
991 
992   /* Computed gotos are hell to deal with, especially if there are
993      lots of them with a large number of destinations.  So we factor
994      them to a common computed goto location before we build the
995      edge list.  After we convert back to normal form, we will un-factor
996      the computed gotos since factoring introduces an unwanted jump.
997      For non-local gotos and abnormal edges from calls to calls that return
998      twice or forced labels, factor the abnormal edges too, by having all
999      abnormal edges from the calls go to a common artificial basic block
1000      with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001      basic block to all forced labels and calls returning twice.
1002      We do this per-OpenMP structured block, because those regions
1003      are guaranteed to be single entry single exit by the standard,
1004      so it is not allowed to enter or exit such regions abnormally this way,
1005      thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006      must not transfer control across SESE region boundaries.  */
1007   if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1008     {
1009       gimple_stmt_iterator gsi;
1010       basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011       basic_block *dispatcher_bbs = dispatcher_bb_array;
1012       int count = n_basic_blocks_for_fn (cfun);
1013 
1014       if (!bb_to_omp_idx.is_empty ())
1015 	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1016 
1017       FOR_EACH_BB_FN (bb, cfun)
1018 	{
1019 	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 	    {
1021 	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 	      tree target;
1023 
1024 	      if (!label_stmt)
1025 		break;
1026 
1027 	      target = gimple_label_label (label_stmt);
1028 
1029 	      /* Make an edge to every label block that has been marked as a
1030 		 potential target for a computed goto or a non-local goto.  */
1031 	      if (FORCED_LABEL (target))
1032 		handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1033 				       true);
1034 	      if (DECL_NONLOCAL (target))
1035 		{
1036 		  handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1037 					 false);
1038 		  break;
1039 		}
1040 	    }
1041 
1042 	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 	    gsi_next_nondebug (&gsi);
1044 	  if (!gsi_end_p (gsi))
1045 	    {
1046 	      /* Make an edge to every setjmp-like call.  */
1047 	      gimple *call_stmt = gsi_stmt (gsi);
1048 	      if (is_gimple_call (call_stmt)
1049 		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 		      || gimple_call_builtin_p (call_stmt,
1051 						BUILT_IN_SETJMP_RECEIVER)))
1052 		handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1053 				       false);
1054 	    }
1055 	}
1056 
1057       if (!bb_to_omp_idx.is_empty ())
1058 	XDELETE (dispatcher_bbs);
1059     }
1060 
1061   omp_free_regions ();
1062 }
1063 
1064 /* Add SEQ after GSI.  Start new bb after GSI, and created further bbs as
1065    needed.  Returns true if new bbs were created.
1066    Note: This is transitional code, and should not be used for new code.  We
1067    should be able to get rid of this by rewriting all target va-arg
1068    gimplification hooks to use an interface gimple_build_cond_value as described
1069    in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html.  */
1070 
1071 bool
gimple_find_sub_bbs(gimple_seq seq,gimple_stmt_iterator * gsi)1072 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1073 {
1074   gimple *stmt = gsi_stmt (*gsi);
1075   basic_block bb = gimple_bb (stmt);
1076   basic_block lastbb, afterbb;
1077   int old_num_bbs = n_basic_blocks_for_fn (cfun);
1078   edge e;
1079   lastbb = make_blocks_1 (seq, bb);
1080   if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1081     return false;
1082   e = split_block (bb, stmt);
1083   /* Move e->dest to come after the new basic blocks.  */
1084   afterbb = e->dest;
1085   unlink_block (afterbb);
1086   link_block (afterbb, lastbb);
1087   redirect_edge_succ (e, bb->next_bb);
1088   bb = bb->next_bb;
1089   while (bb != afterbb)
1090     {
1091       struct omp_region *cur_region = NULL;
1092       profile_count cnt = profile_count::zero ();
1093       bool all = true;
1094 
1095       int cur_omp_region_idx = 0;
1096       int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1097       gcc_assert (!mer && !cur_region);
1098       add_bb_to_loop (bb, afterbb->loop_father);
1099 
1100       edge e;
1101       edge_iterator ei;
1102       FOR_EACH_EDGE (e, ei, bb->preds)
1103 	{
1104 	  if (e->count ().initialized_p ())
1105 	    cnt += e->count ();
1106 	  else
1107 	    all = false;
1108 	}
1109       tree_guess_outgoing_edge_probabilities (bb);
1110       if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1111         bb->count = cnt;
1112 
1113       bb = bb->next_bb;
1114     }
1115   return true;
1116 }
1117 
1118 /* Find the next available discriminator value for LOCUS.  The
1119    discriminator distinguishes among several basic blocks that
1120    share a common locus, allowing for more accurate sample-based
1121    profiling.  */
1122 
1123 static int
next_discriminator_for_locus(int line)1124 next_discriminator_for_locus (int line)
1125 {
1126   struct locus_discrim_map item;
1127   struct locus_discrim_map **slot;
1128 
1129   item.location_line = line;
1130   item.discriminator = 0;
1131   slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1132   gcc_assert (slot);
1133   if (*slot == HTAB_EMPTY_ENTRY)
1134     {
1135       *slot = XNEW (struct locus_discrim_map);
1136       gcc_assert (*slot);
1137       (*slot)->location_line = line;
1138       (*slot)->discriminator = 0;
1139     }
1140   (*slot)->discriminator++;
1141   return (*slot)->discriminator;
1142 }
1143 
1144 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1145 
1146 static bool
same_line_p(location_t locus1,expanded_location * from,location_t locus2)1147 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1148 {
1149   expanded_location to;
1150 
1151   if (locus1 == locus2)
1152     return true;
1153 
1154   to = expand_location (locus2);
1155 
1156   if (from->line != to.line)
1157     return false;
1158   if (from->file == to.file)
1159     return true;
1160   return (from->file != NULL
1161           && to.file != NULL
1162           && filename_cmp (from->file, to.file) == 0);
1163 }
1164 
1165 /* Assign discriminators to each basic block.  */
1166 
1167 static void
assign_discriminators(void)1168 assign_discriminators (void)
1169 {
1170   basic_block bb;
1171 
1172   FOR_EACH_BB_FN (bb, cfun)
1173     {
1174       edge e;
1175       edge_iterator ei;
1176       gimple *last = last_stmt (bb);
1177       location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1178 
1179       if (locus == UNKNOWN_LOCATION)
1180 	continue;
1181 
1182       expanded_location locus_e = expand_location (locus);
1183 
1184       FOR_EACH_EDGE (e, ei, bb->succs)
1185 	{
1186 	  gimple *first = first_non_label_stmt (e->dest);
1187 	  gimple *last = last_stmt (e->dest);
1188 	  if ((first && same_line_p (locus, &locus_e,
1189 				     gimple_location (first)))
1190 	      || (last && same_line_p (locus, &locus_e,
1191 				       gimple_location (last))))
1192 	    {
1193 	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1194 		bb->discriminator
1195 		  = next_discriminator_for_locus (locus_e.line);
1196 	      else
1197 		e->dest->discriminator
1198 		  = next_discriminator_for_locus (locus_e.line);
1199 	    }
1200 	}
1201     }
1202 }
1203 
1204 /* Create the edges for a GIMPLE_COND starting at block BB.  */
1205 
1206 static void
make_cond_expr_edges(basic_block bb)1207 make_cond_expr_edges (basic_block bb)
1208 {
1209   gcond *entry = as_a <gcond *> (last_stmt (bb));
1210   gimple *then_stmt, *else_stmt;
1211   basic_block then_bb, else_bb;
1212   tree then_label, else_label;
1213   edge e;
1214 
1215   gcc_assert (entry);
1216   gcc_assert (gimple_code (entry) == GIMPLE_COND);
1217 
1218   /* Entry basic blocks for each component.  */
1219   then_label = gimple_cond_true_label (entry);
1220   else_label = gimple_cond_false_label (entry);
1221   then_bb = label_to_block (cfun, then_label);
1222   else_bb = label_to_block (cfun, else_label);
1223   then_stmt = first_stmt (then_bb);
1224   else_stmt = first_stmt (else_bb);
1225 
1226   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1227   e->goto_locus = gimple_location (then_stmt);
1228   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1229   if (e)
1230     e->goto_locus = gimple_location (else_stmt);
1231 
1232   /* We do not need the labels anymore.  */
1233   gimple_cond_set_true_label (entry, NULL_TREE);
1234   gimple_cond_set_false_label (entry, NULL_TREE);
1235 }
1236 
1237 
1238 /* Called for each element in the hash table (P) as we delete the
1239    edge to cases hash table.
1240 
1241    Clear all the CASE_CHAINs to prevent problems with copying of
1242    SWITCH_EXPRs and structure sharing rules, then free the hash table
1243    element.  */
1244 
1245 bool
edge_to_cases_cleanup(edge const &,tree const & value,void *)1246 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1247 {
1248   tree t, next;
1249 
1250   for (t = value; t; t = next)
1251     {
1252       next = CASE_CHAIN (t);
1253       CASE_CHAIN (t) = NULL;
1254     }
1255 
1256   return true;
1257 }
1258 
1259 /* Start recording information mapping edges to case labels.  */
1260 
1261 void
start_recording_case_labels(void)1262 start_recording_case_labels (void)
1263 {
1264   gcc_assert (edge_to_cases == NULL);
1265   edge_to_cases = new hash_map<edge, tree>;
1266   touched_switch_bbs = BITMAP_ALLOC (NULL);
1267 }
1268 
1269 /* Return nonzero if we are recording information for case labels.  */
1270 
1271 static bool
recording_case_labels_p(void)1272 recording_case_labels_p (void)
1273 {
1274   return (edge_to_cases != NULL);
1275 }
1276 
1277 /* Stop recording information mapping edges to case labels and
1278    remove any information we have recorded.  */
1279 void
end_recording_case_labels(void)1280 end_recording_case_labels (void)
1281 {
1282   bitmap_iterator bi;
1283   unsigned i;
1284   edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1285   delete edge_to_cases;
1286   edge_to_cases = NULL;
1287   EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1288     {
1289       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1290       if (bb)
1291 	{
1292 	  gimple *stmt = last_stmt (bb);
1293 	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1294 	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1295 	}
1296     }
1297   BITMAP_FREE (touched_switch_bbs);
1298 }
1299 
1300 /* If we are inside a {start,end}_recording_cases block, then return
1301    a chain of CASE_LABEL_EXPRs from T which reference E.
1302 
1303    Otherwise return NULL.  */
1304 
1305 static tree
get_cases_for_edge(edge e,gswitch * t)1306 get_cases_for_edge (edge e, gswitch *t)
1307 {
1308   tree *slot;
1309   size_t i, n;
1310 
1311   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1312      chains available.  Return NULL so the caller can detect this case.  */
1313   if (!recording_case_labels_p ())
1314     return NULL;
1315 
1316   slot = edge_to_cases->get (e);
1317   if (slot)
1318     return *slot;
1319 
1320   /* If we did not find E in the hash table, then this must be the first
1321      time we have been queried for information about E & T.  Add all the
1322      elements from T to the hash table then perform the query again.  */
1323 
1324   n = gimple_switch_num_labels (t);
1325   for (i = 0; i < n; i++)
1326     {
1327       tree elt = gimple_switch_label (t, i);
1328       tree lab = CASE_LABEL (elt);
1329       basic_block label_bb = label_to_block (cfun, lab);
1330       edge this_edge = find_edge (e->src, label_bb);
1331 
1332       /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1333 	 a new chain.  */
1334       tree &s = edge_to_cases->get_or_insert (this_edge);
1335       CASE_CHAIN (elt) = s;
1336       s = elt;
1337     }
1338 
1339   return *edge_to_cases->get (e);
1340 }
1341 
1342 /* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1343 
1344 static void
make_gimple_switch_edges(gswitch * entry,basic_block bb)1345 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1346 {
1347   size_t i, n;
1348 
1349   n = gimple_switch_num_labels (entry);
1350 
1351   for (i = 0; i < n; ++i)
1352     {
1353       basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1354       make_edge (bb, label_bb, 0);
1355     }
1356 }
1357 
1358 
1359 /* Return the basic block holding label DEST.  */
1360 
1361 basic_block
label_to_block(struct function * ifun,tree dest)1362 label_to_block (struct function *ifun, tree dest)
1363 {
1364   int uid = LABEL_DECL_UID (dest);
1365 
1366   /* We would die hard when faced by an undefined label.  Emit a label to
1367      the very first basic block.  This will hopefully make even the dataflow
1368      and undefined variable warnings quite right.  */
1369   if (seen_error () && uid < 0)
1370     {
1371       gimple_stmt_iterator gsi =
1372 	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1373       gimple *stmt;
1374 
1375       stmt = gimple_build_label (dest);
1376       gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1377       uid = LABEL_DECL_UID (dest);
1378     }
1379   if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1380     return NULL;
1381   return (*ifun->cfg->x_label_to_block_map)[uid];
1382 }
1383 
1384 /* Create edges for a goto statement at block BB.  Returns true
1385    if abnormal edges should be created.  */
1386 
1387 static bool
make_goto_expr_edges(basic_block bb)1388 make_goto_expr_edges (basic_block bb)
1389 {
1390   gimple_stmt_iterator last = gsi_last_bb (bb);
1391   gimple *goto_t = gsi_stmt (last);
1392 
1393   /* A simple GOTO creates normal edges.  */
1394   if (simple_goto_p (goto_t))
1395     {
1396       tree dest = gimple_goto_dest (goto_t);
1397       basic_block label_bb = label_to_block (cfun, dest);
1398       edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1399       e->goto_locus = gimple_location (goto_t);
1400       gsi_remove (&last, true);
1401       return false;
1402     }
1403 
1404   /* A computed GOTO creates abnormal edges.  */
1405   return true;
1406 }
1407 
1408 /* Create edges for an asm statement with labels at block BB.  */
1409 
1410 static void
make_gimple_asm_edges(basic_block bb)1411 make_gimple_asm_edges (basic_block bb)
1412 {
1413   gasm *stmt = as_a <gasm *> (last_stmt (bb));
1414   int i, n = gimple_asm_nlabels (stmt);
1415 
1416   for (i = 0; i < n; ++i)
1417     {
1418       tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1419       basic_block label_bb = label_to_block (cfun, label);
1420       make_edge (bb, label_bb, 0);
1421     }
1422 }
1423 
1424 /*---------------------------------------------------------------------------
1425 			       Flowgraph analysis
1426 ---------------------------------------------------------------------------*/
1427 
1428 /* Cleanup useless labels in basic blocks.  This is something we wish
1429    to do early because it allows us to group case labels before creating
1430    the edges for the CFG, and it speeds up block statement iterators in
1431    all passes later on.
1432    We rerun this pass after CFG is created, to get rid of the labels that
1433    are no longer referenced.  After then we do not run it any more, since
1434    (almost) no new labels should be created.  */
1435 
1436 /* A map from basic block index to the leading label of that block.  */
1437 struct label_record
1438 {
1439   /* The label.  */
1440   tree label;
1441 
1442   /* True if the label is referenced from somewhere.  */
1443   bool used;
1444 };
1445 
1446 /* Given LABEL return the first label in the same basic block.  */
1447 
1448 static tree
main_block_label(tree label,label_record * label_for_bb)1449 main_block_label (tree label, label_record *label_for_bb)
1450 {
1451   basic_block bb = label_to_block (cfun, label);
1452   tree main_label = label_for_bb[bb->index].label;
1453 
1454   /* label_to_block possibly inserted undefined label into the chain.  */
1455   if (!main_label)
1456     {
1457       label_for_bb[bb->index].label = label;
1458       main_label = label;
1459     }
1460 
1461   label_for_bb[bb->index].used = true;
1462   return main_label;
1463 }
1464 
1465 /* Clean up redundant labels within the exception tree.  */
1466 
1467 static void
cleanup_dead_labels_eh(label_record * label_for_bb)1468 cleanup_dead_labels_eh (label_record *label_for_bb)
1469 {
1470   eh_landing_pad lp;
1471   eh_region r;
1472   tree lab;
1473   int i;
1474 
1475   if (cfun->eh == NULL)
1476     return;
1477 
1478   for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1479     if (lp && lp->post_landing_pad)
1480       {
1481 	lab = main_block_label (lp->post_landing_pad, label_for_bb);
1482 	if (lab != lp->post_landing_pad)
1483 	  {
1484 	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1485 	    lp->post_landing_pad = lab;
1486 	    EH_LANDING_PAD_NR (lab) = lp->index;
1487 	  }
1488       }
1489 
1490   FOR_ALL_EH_REGION (r)
1491     switch (r->type)
1492       {
1493       case ERT_CLEANUP:
1494       case ERT_MUST_NOT_THROW:
1495 	break;
1496 
1497       case ERT_TRY:
1498 	{
1499 	  eh_catch c;
1500 	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1501 	    {
1502 	      lab = c->label;
1503 	      if (lab)
1504 		c->label = main_block_label (lab, label_for_bb);
1505 	    }
1506 	}
1507 	break;
1508 
1509       case ERT_ALLOWED_EXCEPTIONS:
1510 	lab = r->u.allowed.label;
1511 	if (lab)
1512 	  r->u.allowed.label = main_block_label (lab, label_for_bb);
1513 	break;
1514       }
1515 }
1516 
1517 
1518 /* Cleanup redundant labels.  This is a three-step process:
1519      1) Find the leading label for each block.
1520      2) Redirect all references to labels to the leading labels.
1521      3) Cleanup all useless labels.  */
1522 
1523 void
cleanup_dead_labels(void)1524 cleanup_dead_labels (void)
1525 {
1526   basic_block bb;
1527   label_record *label_for_bb = XCNEWVEC (struct label_record,
1528 					 last_basic_block_for_fn (cfun));
1529 
1530   /* Find a suitable label for each block.  We use the first user-defined
1531      label if there is one, or otherwise just the first label we see.  */
1532   FOR_EACH_BB_FN (bb, cfun)
1533     {
1534       gimple_stmt_iterator i;
1535 
1536       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1537 	{
1538 	  tree label;
1539 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1540 
1541 	  if (!label_stmt)
1542 	    break;
1543 
1544 	  label = gimple_label_label (label_stmt);
1545 
1546 	  /* If we have not yet seen a label for the current block,
1547 	     remember this one and see if there are more labels.  */
1548 	  if (!label_for_bb[bb->index].label)
1549 	    {
1550 	      label_for_bb[bb->index].label = label;
1551 	      continue;
1552 	    }
1553 
1554 	  /* If we did see a label for the current block already, but it
1555 	     is an artificially created label, replace it if the current
1556 	     label is a user defined label.  */
1557 	  if (!DECL_ARTIFICIAL (label)
1558 	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1559 	    {
1560 	      label_for_bb[bb->index].label = label;
1561 	      break;
1562 	    }
1563 	}
1564     }
1565 
1566   /* Now redirect all jumps/branches to the selected label.
1567      First do so for each block ending in a control statement.  */
1568   FOR_EACH_BB_FN (bb, cfun)
1569     {
1570       gimple *stmt = last_stmt (bb);
1571       tree label, new_label;
1572 
1573       if (!stmt)
1574 	continue;
1575 
1576       switch (gimple_code (stmt))
1577 	{
1578 	case GIMPLE_COND:
1579 	  {
1580 	    gcond *cond_stmt = as_a <gcond *> (stmt);
1581 	    label = gimple_cond_true_label (cond_stmt);
1582 	    if (label)
1583 	      {
1584 		new_label = main_block_label (label, label_for_bb);
1585 		if (new_label != label)
1586 		  gimple_cond_set_true_label (cond_stmt, new_label);
1587 	      }
1588 
1589 	    label = gimple_cond_false_label (cond_stmt);
1590 	    if (label)
1591 	      {
1592 		new_label = main_block_label (label, label_for_bb);
1593 		if (new_label != label)
1594 		  gimple_cond_set_false_label (cond_stmt, new_label);
1595 	      }
1596 	  }
1597 	  break;
1598 
1599 	case GIMPLE_SWITCH:
1600 	  {
1601 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1602 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1603 
1604 	    /* Replace all destination labels.  */
1605 	    for (i = 0; i < n; ++i)
1606 	      {
1607 		tree case_label = gimple_switch_label (switch_stmt, i);
1608 		label = CASE_LABEL (case_label);
1609 		new_label = main_block_label (label, label_for_bb);
1610 		if (new_label != label)
1611 		  CASE_LABEL (case_label) = new_label;
1612 	      }
1613 	    break;
1614 	  }
1615 
1616 	case GIMPLE_ASM:
1617 	  {
1618 	    gasm *asm_stmt = as_a <gasm *> (stmt);
1619 	    int i, n = gimple_asm_nlabels (asm_stmt);
1620 
1621 	    for (i = 0; i < n; ++i)
1622 	      {
1623 		tree cons = gimple_asm_label_op (asm_stmt, i);
1624 		tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1625 		TREE_VALUE (cons) = label;
1626 	      }
1627 	    break;
1628 	  }
1629 
1630 	/* We have to handle gotos until they're removed, and we don't
1631 	   remove them until after we've created the CFG edges.  */
1632 	case GIMPLE_GOTO:
1633 	  if (!computed_goto_p (stmt))
1634 	    {
1635 	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1636 	      label = gimple_goto_dest (goto_stmt);
1637 	      new_label = main_block_label (label, label_for_bb);
1638 	      if (new_label != label)
1639 		gimple_goto_set_dest (goto_stmt, new_label);
1640 	    }
1641 	  break;
1642 
1643 	case GIMPLE_TRANSACTION:
1644 	  {
1645 	    gtransaction *txn = as_a <gtransaction *> (stmt);
1646 
1647 	    label = gimple_transaction_label_norm (txn);
1648 	    if (label)
1649 	      {
1650 		new_label = main_block_label (label, label_for_bb);
1651 		if (new_label != label)
1652 		  gimple_transaction_set_label_norm (txn, new_label);
1653 	      }
1654 
1655 	    label = gimple_transaction_label_uninst (txn);
1656 	    if (label)
1657 	      {
1658 		new_label = main_block_label (label, label_for_bb);
1659 		if (new_label != label)
1660 		  gimple_transaction_set_label_uninst (txn, new_label);
1661 	      }
1662 
1663 	    label = gimple_transaction_label_over (txn);
1664 	    if (label)
1665 	      {
1666 		new_label = main_block_label (label, label_for_bb);
1667 		if (new_label != label)
1668 		  gimple_transaction_set_label_over (txn, new_label);
1669 	      }
1670 	  }
1671 	  break;
1672 
1673 	default:
1674 	  break;
1675       }
1676     }
1677 
1678   /* Do the same for the exception region tree labels.  */
1679   cleanup_dead_labels_eh (label_for_bb);
1680 
1681   /* Finally, purge dead labels.  All user-defined labels and labels that
1682      can be the target of non-local gotos and labels which have their
1683      address taken are preserved.  */
1684   FOR_EACH_BB_FN (bb, cfun)
1685     {
1686       gimple_stmt_iterator i;
1687       tree label_for_this_bb = label_for_bb[bb->index].label;
1688 
1689       if (!label_for_this_bb)
1690 	continue;
1691 
1692       /* If the main label of the block is unused, we may still remove it.  */
1693       if (!label_for_bb[bb->index].used)
1694 	label_for_this_bb = NULL;
1695 
1696       for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1697 	{
1698 	  tree label;
1699 	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1700 
1701 	  if (!label_stmt)
1702 	    break;
1703 
1704 	  label = gimple_label_label (label_stmt);
1705 
1706 	  if (label == label_for_this_bb
1707 	      || !DECL_ARTIFICIAL (label)
1708 	      || DECL_NONLOCAL (label)
1709 	      || FORCED_LABEL (label))
1710 	    gsi_next (&i);
1711 	  else
1712 	    {
1713 	      gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1714 	      gsi_remove (&i, true);
1715 	    }
1716 	}
1717     }
1718 
1719   free (label_for_bb);
1720 }
1721 
1722 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1723    the ones jumping to the same label.
1724    Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1725 
1726 bool
group_case_labels_stmt(gswitch * stmt)1727 group_case_labels_stmt (gswitch *stmt)
1728 {
1729   int old_size = gimple_switch_num_labels (stmt);
1730   int i, next_index, new_size;
1731   basic_block default_bb = NULL;
1732   hash_set<tree> *removed_labels = NULL;
1733 
1734   default_bb = gimple_switch_default_bb (cfun, stmt);
1735 
1736   /* Look for possible opportunities to merge cases.  */
1737   new_size = i = 1;
1738   while (i < old_size)
1739     {
1740       tree base_case, base_high;
1741       basic_block base_bb;
1742 
1743       base_case = gimple_switch_label (stmt, i);
1744 
1745       gcc_assert (base_case);
1746       base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1747 
1748       /* Discard cases that have the same destination as the default case or
1749 	 whose destination blocks have already been removed as unreachable.  */
1750       if (base_bb == NULL
1751 	  || base_bb == default_bb
1752 	  || (removed_labels
1753 	      && removed_labels->contains (CASE_LABEL (base_case))))
1754 	{
1755 	  i++;
1756 	  continue;
1757 	}
1758 
1759       base_high = CASE_HIGH (base_case)
1760 	  ? CASE_HIGH (base_case)
1761 	  : CASE_LOW (base_case);
1762       next_index = i + 1;
1763 
1764       /* Try to merge case labels.  Break out when we reach the end
1765 	 of the label vector or when we cannot merge the next case
1766 	 label with the current one.  */
1767       while (next_index < old_size)
1768 	{
1769 	  tree merge_case = gimple_switch_label (stmt, next_index);
1770 	  basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1771 	  wide_int bhp1 = wi::to_wide (base_high) + 1;
1772 
1773 	  /* Merge the cases if they jump to the same place,
1774 	     and their ranges are consecutive.  */
1775 	  if (merge_bb == base_bb
1776 	      && (removed_labels == NULL
1777 		  || !removed_labels->contains (CASE_LABEL (merge_case)))
1778 	      && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1779 	    {
1780 	      base_high
1781 		= (CASE_HIGH (merge_case)
1782 		   ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1783 	      CASE_HIGH (base_case) = base_high;
1784 	      next_index++;
1785 	    }
1786 	  else
1787 	    break;
1788 	}
1789 
1790       /* Discard cases that have an unreachable destination block.  */
1791       if (EDGE_COUNT (base_bb->succs) == 0
1792 	  && gimple_seq_unreachable_p (bb_seq (base_bb))
1793 	  /* Don't optimize this if __builtin_unreachable () is the
1794 	     implicitly added one by the C++ FE too early, before
1795 	     -Wreturn-type can be diagnosed.  We'll optimize it later
1796 	     during switchconv pass or any other cfg cleanup.  */
1797 	  && (gimple_in_ssa_p (cfun)
1798 	      || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1799 		  != BUILTINS_LOCATION)))
1800 	{
1801 	  edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1802 	  if (base_edge != NULL)
1803 	    {
1804 	      for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1805 		   !gsi_end_p (gsi); gsi_next (&gsi))
1806 		if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1807 		  {
1808 		    if (FORCED_LABEL (gimple_label_label (stmt))
1809 			|| DECL_NONLOCAL (gimple_label_label (stmt)))
1810 		      {
1811 			/* Forced/non-local labels aren't going to be removed,
1812 			   but they will be moved to some neighbouring basic
1813 			   block. If some later case label refers to one of
1814 			   those labels, we should throw that case away rather
1815 			   than keeping it around and refering to some random
1816 			   other basic block without an edge to it.  */
1817 			if (removed_labels == NULL)
1818 			  removed_labels = new hash_set<tree>;
1819 			removed_labels->add (gimple_label_label (stmt));
1820 		      }
1821 		  }
1822 		else
1823 		  break;
1824 	      remove_edge_and_dominated_blocks (base_edge);
1825 	    }
1826 	  i = next_index;
1827 	  continue;
1828 	}
1829 
1830       if (new_size < i)
1831 	gimple_switch_set_label (stmt, new_size,
1832 				 gimple_switch_label (stmt, i));
1833       i = next_index;
1834       new_size++;
1835     }
1836 
1837   gcc_assert (new_size <= old_size);
1838 
1839   if (new_size < old_size)
1840     gimple_switch_set_num_labels (stmt, new_size);
1841 
1842   delete removed_labels;
1843   return new_size < old_size;
1844 }
1845 
1846 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1847    and scan the sorted vector of cases.  Combine the ones jumping to the
1848    same label.  */
1849 
1850 bool
group_case_labels(void)1851 group_case_labels (void)
1852 {
1853   basic_block bb;
1854   bool changed = false;
1855 
1856   FOR_EACH_BB_FN (bb, cfun)
1857     {
1858       gimple *stmt = last_stmt (bb);
1859       if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1860 	changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1861     }
1862 
1863   return changed;
1864 }
1865 
1866 /* Checks whether we can merge block B into block A.  */
1867 
1868 static bool
gimple_can_merge_blocks_p(basic_block a,basic_block b)1869 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1870 {
1871   gimple *stmt;
1872 
1873   if (!single_succ_p (a))
1874     return false;
1875 
1876   if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1877     return false;
1878 
1879   if (single_succ (a) != b)
1880     return false;
1881 
1882   if (!single_pred_p (b))
1883     return false;
1884 
1885   if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1886       || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1887     return false;
1888 
1889   /* If A ends by a statement causing exceptions or something similar, we
1890      cannot merge the blocks.  */
1891   stmt = last_stmt (a);
1892   if (stmt && stmt_ends_bb_p (stmt))
1893     return false;
1894 
1895   /* Do not allow a block with only a non-local label to be merged.  */
1896   if (stmt)
1897     if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1898       if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1899 	return false;
1900 
1901   /* Examine the labels at the beginning of B.  */
1902   for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1903        gsi_next (&gsi))
1904     {
1905       tree lab;
1906       glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1907       if (!label_stmt)
1908 	break;
1909       lab = gimple_label_label (label_stmt);
1910 
1911       /* Do not remove user forced labels or for -O0 any user labels.  */
1912       if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1913 	return false;
1914     }
1915 
1916   /* Protect simple loop latches.  We only want to avoid merging
1917      the latch with the loop header or with a block in another
1918      loop in this case.  */
1919   if (current_loops
1920       && b->loop_father->latch == b
1921       && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1922       && (b->loop_father->header == a
1923 	  || b->loop_father != a->loop_father))
1924     return false;
1925 
1926   /* It must be possible to eliminate all phi nodes in B.  If ssa form
1927      is not up-to-date and a name-mapping is registered, we cannot eliminate
1928      any phis.  Symbols marked for renaming are never a problem though.  */
1929   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1930        gsi_next (&gsi))
1931     {
1932       gphi *phi = gsi.phi ();
1933       /* Technically only new names matter.  */
1934       if (name_registered_for_update_p (PHI_RESULT (phi)))
1935 	return false;
1936     }
1937 
1938   /* When not optimizing, don't merge if we'd lose goto_locus.  */
1939   if (!optimize
1940       && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1941     {
1942       location_t goto_locus = single_succ_edge (a)->goto_locus;
1943       gimple_stmt_iterator prev, next;
1944       prev = gsi_last_nondebug_bb (a);
1945       next = gsi_after_labels (b);
1946       if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1947 	gsi_next_nondebug (&next);
1948       if ((gsi_end_p (prev)
1949 	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1950 	  && (gsi_end_p (next)
1951 	      || gimple_location (gsi_stmt (next)) != goto_locus))
1952 	return false;
1953     }
1954 
1955   return true;
1956 }
1957 
1958 /* Replaces all uses of NAME by VAL.  */
1959 
1960 void
replace_uses_by(tree name,tree val)1961 replace_uses_by (tree name, tree val)
1962 {
1963   imm_use_iterator imm_iter;
1964   use_operand_p use;
1965   gimple *stmt;
1966   edge e;
1967 
1968   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1969     {
1970       /* Mark the block if we change the last stmt in it.  */
1971       if (cfgcleanup_altered_bbs
1972 	  && stmt_ends_bb_p (stmt))
1973 	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1974 
1975       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1976         {
1977 	  replace_exp (use, val);
1978 
1979 	  if (gimple_code (stmt) == GIMPLE_PHI)
1980 	    {
1981 	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1982 				       PHI_ARG_INDEX_FROM_USE (use));
1983 	      if (e->flags & EDGE_ABNORMAL
1984 		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1985 		{
1986 		  /* This can only occur for virtual operands, since
1987 		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1988 		     would prevent replacement.  */
1989 		  gcc_checking_assert (virtual_operand_p (name));
1990 		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1991 		}
1992 	    }
1993 	}
1994 
1995       if (gimple_code (stmt) != GIMPLE_PHI)
1996 	{
1997 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1998 	  gimple *orig_stmt = stmt;
1999 	  size_t i;
2000 
2001 	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
2002 	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
2003 	     only change sth from non-invariant to invariant, and only
2004 	     when propagating constants.  */
2005 	  if (is_gimple_min_invariant (val))
2006 	    for (i = 0; i < gimple_num_ops (stmt); i++)
2007 	      {
2008 		tree op = gimple_op (stmt, i);
2009 		/* Operands may be empty here.  For example, the labels
2010 		   of a GIMPLE_COND are nulled out following the creation
2011 		   of the corresponding CFG edges.  */
2012 		if (op && TREE_CODE (op) == ADDR_EXPR)
2013 		  recompute_tree_invariant_for_addr_expr (op);
2014 	      }
2015 
2016 	  if (fold_stmt (&gsi))
2017 	    stmt = gsi_stmt (gsi);
2018 
2019 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2020 	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
2021 
2022 	  update_stmt (stmt);
2023 	}
2024     }
2025 
2026   gcc_checking_assert (has_zero_uses (name));
2027 
2028   /* Also update the trees stored in loop structures.  */
2029   if (current_loops)
2030     {
2031       for (auto loop : loops_list (cfun, 0))
2032 	  substitute_in_loop_info (loop, name, val);
2033     }
2034 }
2035 
2036 /* Merge block B into block A.  */
2037 
2038 static void
gimple_merge_blocks(basic_block a,basic_block b)2039 gimple_merge_blocks (basic_block a, basic_block b)
2040 {
2041   gimple_stmt_iterator last, gsi;
2042   gphi_iterator psi;
2043 
2044   if (dump_file)
2045     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2046 
2047   /* Remove all single-valued PHI nodes from block B of the form
2048      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
2049   gsi = gsi_last_bb (a);
2050   for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2051     {
2052       gimple *phi = gsi_stmt (psi);
2053       tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2054       gimple *copy;
2055       bool may_replace_uses = (virtual_operand_p (def)
2056 			       || may_propagate_copy (def, use));
2057 
2058       /* In case we maintain loop closed ssa form, do not propagate arguments
2059 	 of loop exit phi nodes.  */
2060       if (current_loops
2061 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2062 	  && !virtual_operand_p (def)
2063 	  && TREE_CODE (use) == SSA_NAME
2064 	  && a->loop_father != b->loop_father)
2065 	may_replace_uses = false;
2066 
2067       if (!may_replace_uses)
2068 	{
2069 	  gcc_assert (!virtual_operand_p (def));
2070 
2071 	  /* Note that just emitting the copies is fine -- there is no problem
2072 	     with ordering of phi nodes.  This is because A is the single
2073 	     predecessor of B, therefore results of the phi nodes cannot
2074 	     appear as arguments of the phi nodes.  */
2075 	  copy = gimple_build_assign (def, use);
2076 	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2077           remove_phi_node (&psi, false);
2078 	}
2079       else
2080         {
2081 	  /* If we deal with a PHI for virtual operands, we can simply
2082 	     propagate these without fussing with folding or updating
2083 	     the stmt.  */
2084 	  if (virtual_operand_p (def))
2085 	    {
2086 	      imm_use_iterator iter;
2087 	      use_operand_p use_p;
2088 	      gimple *stmt;
2089 
2090 	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2091 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2092 		  SET_USE (use_p, use);
2093 
2094 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2095 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2096 	    }
2097 	  else
2098             replace_uses_by (def, use);
2099 
2100           remove_phi_node (&psi, true);
2101         }
2102     }
2103 
2104   /* Ensure that B follows A.  */
2105   move_block_after (b, a);
2106 
2107   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2108   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2109 
2110   /* Remove labels from B and set gimple_bb to A for other statements.  */
2111   for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2112     {
2113       gimple *stmt = gsi_stmt (gsi);
2114       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2115 	{
2116 	  tree label = gimple_label_label (label_stmt);
2117 	  int lp_nr;
2118 
2119 	  gsi_remove (&gsi, false);
2120 
2121 	  /* Now that we can thread computed gotos, we might have
2122 	     a situation where we have a forced label in block B
2123 	     However, the label at the start of block B might still be
2124 	     used in other ways (think about the runtime checking for
2125 	     Fortran assigned gotos).  So we cannot just delete the
2126 	     label.  Instead we move the label to the start of block A.  */
2127 	  if (FORCED_LABEL (label))
2128 	    {
2129 	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2130 	      tree first_label = NULL_TREE;
2131 	      if (!gsi_end_p (dest_gsi))
2132 		if (glabel *first_label_stmt
2133 		    = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2134 		  first_label = gimple_label_label (first_label_stmt);
2135 	      if (first_label
2136 		  && (DECL_NONLOCAL (first_label)
2137 		      || EH_LANDING_PAD_NR (first_label) != 0))
2138 		gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2139 	      else
2140 		gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2141 	    }
2142 	  /* Other user labels keep around in a form of a debug stmt.  */
2143 	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2144 	    {
2145 	      gimple *dbg = gimple_build_debug_bind (label,
2146 						     integer_zero_node,
2147 						     stmt);
2148 	      gimple_debug_bind_reset_value (dbg);
2149 	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2150 	    }
2151 
2152 	  lp_nr = EH_LANDING_PAD_NR (label);
2153 	  if (lp_nr)
2154 	    {
2155 	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2156 	      lp->post_landing_pad = NULL;
2157 	    }
2158 	}
2159       else
2160 	{
2161 	  gimple_set_bb (stmt, a);
2162 	  gsi_next (&gsi);
2163 	}
2164     }
2165 
2166   /* When merging two BBs, if their counts are different, the larger count
2167      is selected as the new bb count. This is to handle inconsistent
2168      profiles.  */
2169   if (a->loop_father == b->loop_father)
2170     {
2171       a->count = a->count.merge (b->count);
2172     }
2173 
2174   /* Merge the sequences.  */
2175   last = gsi_last_bb (a);
2176   gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2177   set_bb_seq (b, NULL);
2178 
2179   if (cfgcleanup_altered_bbs)
2180     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2181 }
2182 
2183 
2184 /* Return the one of two successors of BB that is not reachable by a
2185    complex edge, if there is one.  Else, return BB.  We use
2186    this in optimizations that use post-dominators for their heuristics,
2187    to catch the cases in C++ where function calls are involved.  */
2188 
2189 basic_block
single_noncomplex_succ(basic_block bb)2190 single_noncomplex_succ (basic_block bb)
2191 {
2192   edge e0, e1;
2193   if (EDGE_COUNT (bb->succs) != 2)
2194     return bb;
2195 
2196   e0 = EDGE_SUCC (bb, 0);
2197   e1 = EDGE_SUCC (bb, 1);
2198   if (e0->flags & EDGE_COMPLEX)
2199     return e1->dest;
2200   if (e1->flags & EDGE_COMPLEX)
2201     return e0->dest;
2202 
2203   return bb;
2204 }
2205 
2206 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2207 
2208 void
notice_special_calls(gcall * call)2209 notice_special_calls (gcall *call)
2210 {
2211   int flags = gimple_call_flags (call);
2212 
2213   if (flags & ECF_MAY_BE_ALLOCA)
2214     cfun->calls_alloca = true;
2215   if (flags & ECF_RETURNS_TWICE)
2216     cfun->calls_setjmp = true;
2217 }
2218 
2219 
2220 /* Clear flags set by notice_special_calls.  Used by dead code removal
2221    to update the flags.  */
2222 
2223 void
clear_special_calls(void)2224 clear_special_calls (void)
2225 {
2226   cfun->calls_alloca = false;
2227   cfun->calls_setjmp = false;
2228 }
2229 
2230 /* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2231 
2232 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)2233 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2234 {
2235   /* Since this block is no longer reachable, we can just delete all
2236      of its PHI nodes.  */
2237   remove_phi_nodes (bb);
2238 
2239   /* Remove edges to BB's successors.  */
2240   while (EDGE_COUNT (bb->succs) > 0)
2241     remove_edge (EDGE_SUCC (bb, 0));
2242 }
2243 
2244 
2245 /* Remove statements of basic block BB.  */
2246 
2247 static void
remove_bb(basic_block bb)2248 remove_bb (basic_block bb)
2249 {
2250   gimple_stmt_iterator i;
2251 
2252   if (dump_file)
2253     {
2254       fprintf (dump_file, "Removing basic block %d\n", bb->index);
2255       if (dump_flags & TDF_DETAILS)
2256 	{
2257 	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2258 	  fprintf (dump_file, "\n");
2259 	}
2260     }
2261 
2262   if (current_loops)
2263     {
2264       class loop *loop = bb->loop_father;
2265 
2266       /* If a loop gets removed, clean up the information associated
2267 	 with it.  */
2268       if (loop->latch == bb
2269 	  || loop->header == bb)
2270 	free_numbers_of_iterations_estimates (loop);
2271     }
2272 
2273   /* Remove all the instructions in the block.  */
2274   if (bb_seq (bb) != NULL)
2275     {
2276       /* Walk backwards so as to get a chance to substitute all
2277 	 released DEFs into debug stmts.  See
2278 	 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2279 	 details.  */
2280       for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2281 	{
2282 	  gimple *stmt = gsi_stmt (i);
2283 	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2284 	  if (label_stmt
2285 	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2286 		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2287 	    {
2288 	      basic_block new_bb;
2289 	      gimple_stmt_iterator new_gsi;
2290 
2291 	      /* A non-reachable non-local label may still be referenced.
2292 		 But it no longer needs to carry the extra semantics of
2293 		 non-locality.  */
2294 	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2295 		{
2296 		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2297 		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2298 		}
2299 
2300 	      new_bb = bb->prev_bb;
2301 	      /* Don't move any labels into ENTRY block.  */
2302 	      if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2303 		{
2304 		  new_bb = single_succ (new_bb);
2305 		  gcc_assert (new_bb != bb);
2306 		}
2307 	      if ((unsigned) bb->index < bb_to_omp_idx.length ()
2308 		  && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2309 		      || (bb_to_omp_idx[bb->index]
2310 			  != bb_to_omp_idx[new_bb->index])))
2311 		{
2312 		  /* During cfg pass make sure to put orphaned labels
2313 		     into the right OMP region.  */
2314 		  unsigned int i;
2315 		  int idx;
2316 		  new_bb = NULL;
2317 		  FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2318 		    if (i >= NUM_FIXED_BLOCKS
2319 			&& idx == bb_to_omp_idx[bb->index]
2320 			&& i != (unsigned) bb->index)
2321 		      {
2322 			new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2323 			break;
2324 		      }
2325 		  if (new_bb == NULL)
2326 		    {
2327 		      new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2328 		      gcc_assert (new_bb != bb);
2329 		    }
2330 		}
2331 	      new_gsi = gsi_after_labels (new_bb);
2332 	      gsi_remove (&i, false);
2333 	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2334 	    }
2335 	  else
2336 	    {
2337 	      /* Release SSA definitions.  */
2338 	      release_defs (stmt);
2339 	      gsi_remove (&i, true);
2340 	    }
2341 
2342 	  if (gsi_end_p (i))
2343 	    i = gsi_last_bb (bb);
2344 	  else
2345 	    gsi_prev (&i);
2346 	}
2347     }
2348 
2349   if ((unsigned) bb->index < bb_to_omp_idx.length ())
2350     bb_to_omp_idx[bb->index] = -1;
2351   remove_phi_nodes_and_edges_for_unreachable_block (bb);
2352   bb->il.gimple.seq = NULL;
2353   bb->il.gimple.phi_nodes = NULL;
2354 }
2355 
2356 
2357 /* Given a basic block BB and a value VAL for use in the final statement
2358    of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2359    the edge that will be taken out of the block.
2360    If VAL is NULL_TREE, then the current value of the final statement's
2361    predicate or index is used.
2362    If the value does not match a unique edge, NULL is returned.  */
2363 
2364 edge
find_taken_edge(basic_block bb,tree val)2365 find_taken_edge (basic_block bb, tree val)
2366 {
2367   gimple *stmt;
2368 
2369   stmt = last_stmt (bb);
2370 
2371   /* Handle ENTRY and EXIT.  */
2372   if (!stmt)
2373     return NULL;
2374 
2375   if (gimple_code (stmt) == GIMPLE_COND)
2376     return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2377 
2378   if (gimple_code (stmt) == GIMPLE_SWITCH)
2379     return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2380 
2381   if (computed_goto_p (stmt))
2382     {
2383       /* Only optimize if the argument is a label, if the argument is
2384 	 not a label then we cannot construct a proper CFG.
2385 
2386          It may be the case that we only need to allow the LABEL_REF to
2387          appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2388          appear inside a LABEL_EXPR just to be safe.  */
2389       if (val
2390 	  && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2391 	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2392 	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2393     }
2394 
2395   /* Otherwise we only know the taken successor edge if it's unique.  */
2396   return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2397 }
2398 
2399 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2400    statement, determine which of the outgoing edges will be taken out of the
2401    block.  Return NULL if either edge may be taken.  */
2402 
2403 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2404 find_taken_edge_computed_goto (basic_block bb, tree val)
2405 {
2406   basic_block dest;
2407   edge e = NULL;
2408 
2409   dest = label_to_block (cfun, val);
2410   if (dest)
2411     e = find_edge (bb, dest);
2412 
2413   /* It's possible for find_edge to return NULL here on invalid code
2414      that abuses the labels-as-values extension (e.g. code that attempts to
2415      jump *between* functions via stored labels-as-values; PR 84136).
2416      If so, then we simply return that NULL for the edge.
2417      We don't currently have a way of detecting such invalid code, so we
2418      can't assert that it was the case when a NULL edge occurs here.  */
2419 
2420   return e;
2421 }
2422 
2423 /* Given COND_STMT and a constant value VAL for use as the predicate,
2424    determine which of the two edges will be taken out of
2425    the statement's block.  Return NULL if either edge may be taken.
2426    If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2427    is used.  */
2428 
2429 static edge
find_taken_edge_cond_expr(const gcond * cond_stmt,tree val)2430 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2431 {
2432   edge true_edge, false_edge;
2433 
2434   if (val == NULL_TREE)
2435     {
2436       /* Use the current value of the predicate.  */
2437       if (gimple_cond_true_p (cond_stmt))
2438 	val = integer_one_node;
2439       else if (gimple_cond_false_p (cond_stmt))
2440 	val = integer_zero_node;
2441       else
2442 	return NULL;
2443     }
2444   else if (TREE_CODE (val) != INTEGER_CST)
2445     return NULL;
2446 
2447   extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2448 				       &true_edge, &false_edge);
2449 
2450   return (integer_zerop (val) ? false_edge : true_edge);
2451 }
2452 
2453 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2454    which edge will be taken out of the statement's block.  Return NULL if any
2455    edge may be taken.
2456    If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2457    is used.  */
2458 
2459 edge
find_taken_edge_switch_expr(const gswitch * switch_stmt,tree val)2460 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2461 {
2462   basic_block dest_bb;
2463   edge e;
2464   tree taken_case;
2465 
2466   if (gimple_switch_num_labels (switch_stmt) == 1)
2467     taken_case = gimple_switch_default_label (switch_stmt);
2468   else
2469     {
2470       if (val == NULL_TREE)
2471 	val = gimple_switch_index (switch_stmt);
2472       if (TREE_CODE (val) != INTEGER_CST)
2473 	return NULL;
2474       else
2475 	taken_case = find_case_label_for_value (switch_stmt, val);
2476     }
2477   dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2478 
2479   e = find_edge (gimple_bb (switch_stmt), dest_bb);
2480   gcc_assert (e);
2481   return e;
2482 }
2483 
2484 
2485 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2486    We can make optimal use here of the fact that the case labels are
2487    sorted: We can do a binary search for a case matching VAL.  */
2488 
2489 tree
find_case_label_for_value(const gswitch * switch_stmt,tree val)2490 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2491 {
2492   size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2493   tree default_case = gimple_switch_default_label (switch_stmt);
2494 
2495   for (low = 0, high = n; high - low > 1; )
2496     {
2497       size_t i = (high + low) / 2;
2498       tree t = gimple_switch_label (switch_stmt, i);
2499       int cmp;
2500 
2501       /* Cache the result of comparing CASE_LOW and val.  */
2502       cmp = tree_int_cst_compare (CASE_LOW (t), val);
2503 
2504       if (cmp > 0)
2505 	high = i;
2506       else
2507 	low = i;
2508 
2509       if (CASE_HIGH (t) == NULL)
2510 	{
2511 	  /* A singe-valued case label.  */
2512 	  if (cmp == 0)
2513 	    return t;
2514 	}
2515       else
2516 	{
2517 	  /* A case range.  We can only handle integer ranges.  */
2518 	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2519 	    return t;
2520 	}
2521     }
2522 
2523   return default_case;
2524 }
2525 
2526 
2527 /* Dump a basic block on stderr.  */
2528 
2529 void
gimple_debug_bb(basic_block bb)2530 gimple_debug_bb (basic_block bb)
2531 {
2532   dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2533 }
2534 
2535 
2536 /* Dump basic block with index N on stderr.  */
2537 
2538 basic_block
gimple_debug_bb_n(int n)2539 gimple_debug_bb_n (int n)
2540 {
2541   gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2542   return BASIC_BLOCK_FOR_FN (cfun, n);
2543 }
2544 
2545 
2546 /* Dump the CFG on stderr.
2547 
2548    FLAGS are the same used by the tree dumping functions
2549    (see TDF_* in dumpfile.h).  */
2550 
2551 void
gimple_debug_cfg(dump_flags_t flags)2552 gimple_debug_cfg (dump_flags_t flags)
2553 {
2554   gimple_dump_cfg (stderr, flags);
2555 }
2556 
2557 
2558 /* Dump the program showing basic block boundaries on the given FILE.
2559 
2560    FLAGS are the same used by the tree dumping functions (see TDF_* in
2561    tree.h).  */
2562 
2563 void
gimple_dump_cfg(FILE * file,dump_flags_t flags)2564 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2565 {
2566   if (flags & TDF_DETAILS)
2567     {
2568       dump_function_header (file, current_function_decl, flags);
2569       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2570 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2571 	       last_basic_block_for_fn (cfun));
2572 
2573       brief_dump_cfg (file, flags);
2574       fprintf (file, "\n");
2575     }
2576 
2577   if (flags & TDF_STATS)
2578     dump_cfg_stats (file);
2579 
2580   dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2581 }
2582 
2583 
2584 /* Dump CFG statistics on FILE.  */
2585 
2586 void
dump_cfg_stats(FILE * file)2587 dump_cfg_stats (FILE *file)
2588 {
2589   static long max_num_merged_labels = 0;
2590   unsigned long size, total = 0;
2591   long num_edges;
2592   basic_block bb;
2593   const char * const fmt_str   = "%-30s%-13s%12s\n";
2594   const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2595   const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2596   const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2597   const char *funcname = current_function_name ();
2598 
2599   fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2600 
2601   fprintf (file, "---------------------------------------------------------\n");
2602   fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2603   fprintf (file, fmt_str, "", "  instances  ", "used ");
2604   fprintf (file, "---------------------------------------------------------\n");
2605 
2606   size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2607   total += size;
2608   fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2609 	   SIZE_AMOUNT (size));
2610 
2611   num_edges = 0;
2612   FOR_EACH_BB_FN (bb, cfun)
2613     num_edges += EDGE_COUNT (bb->succs);
2614   size = num_edges * sizeof (class edge_def);
2615   total += size;
2616   fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2617 
2618   fprintf (file, "---------------------------------------------------------\n");
2619   fprintf (file, fmt_str_3, "Total memory used by CFG data",
2620 	   SIZE_AMOUNT (total));
2621   fprintf (file, "---------------------------------------------------------\n");
2622   fprintf (file, "\n");
2623 
2624   if (cfg_stats.num_merged_labels > max_num_merged_labels)
2625     max_num_merged_labels = cfg_stats.num_merged_labels;
2626 
2627   fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2628 	   cfg_stats.num_merged_labels, max_num_merged_labels);
2629 
2630   fprintf (file, "\n");
2631 }
2632 
2633 
2634 /* Dump CFG statistics on stderr.  Keep extern so that it's always
2635    linked in the final executable.  */
2636 
2637 DEBUG_FUNCTION void
debug_cfg_stats(void)2638 debug_cfg_stats (void)
2639 {
2640   dump_cfg_stats (stderr);
2641 }
2642 
2643 /*---------------------------------------------------------------------------
2644 			     Miscellaneous helpers
2645 ---------------------------------------------------------------------------*/
2646 
2647 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2648    flow.  Transfers of control flow associated with EH are excluded.  */
2649 
2650 static bool
call_can_make_abnormal_goto(gimple * t)2651 call_can_make_abnormal_goto (gimple *t)
2652 {
2653   /* If the function has no non-local labels, then a call cannot make an
2654      abnormal transfer of control.  */
2655   if (!cfun->has_nonlocal_label
2656       && !cfun->calls_setjmp)
2657    return false;
2658 
2659   /* Likewise if the call has no side effects.  */
2660   if (!gimple_has_side_effects (t))
2661     return false;
2662 
2663   /* Likewise if the called function is leaf.  */
2664   if (gimple_call_flags (t) & ECF_LEAF)
2665     return false;
2666 
2667   return true;
2668 }
2669 
2670 
2671 /* Return true if T can make an abnormal transfer of control flow.
2672    Transfers of control flow associated with EH are excluded.  */
2673 
2674 bool
stmt_can_make_abnormal_goto(gimple * t)2675 stmt_can_make_abnormal_goto (gimple *t)
2676 {
2677   if (computed_goto_p (t))
2678     return true;
2679   if (is_gimple_call (t))
2680     return call_can_make_abnormal_goto (t);
2681   return false;
2682 }
2683 
2684 
2685 /* Return true if T represents a stmt that always transfers control.  */
2686 
2687 bool
is_ctrl_stmt(gimple * t)2688 is_ctrl_stmt (gimple *t)
2689 {
2690   switch (gimple_code (t))
2691     {
2692     case GIMPLE_COND:
2693     case GIMPLE_SWITCH:
2694     case GIMPLE_GOTO:
2695     case GIMPLE_RETURN:
2696     case GIMPLE_RESX:
2697       return true;
2698     default:
2699       return false;
2700     }
2701 }
2702 
2703 
2704 /* Return true if T is a statement that may alter the flow of control
2705    (e.g., a call to a non-returning function).  */
2706 
2707 bool
is_ctrl_altering_stmt(gimple * t)2708 is_ctrl_altering_stmt (gimple *t)
2709 {
2710   gcc_assert (t);
2711 
2712   switch (gimple_code (t))
2713     {
2714     case GIMPLE_CALL:
2715       /* Per stmt call flag indicates whether the call could alter
2716 	 controlflow.  */
2717       if (gimple_call_ctrl_altering_p (t))
2718 	return true;
2719       break;
2720 
2721     case GIMPLE_EH_DISPATCH:
2722       /* EH_DISPATCH branches to the individual catch handlers at
2723 	 this level of a try or allowed-exceptions region.  It can
2724 	 fallthru to the next statement as well.  */
2725       return true;
2726 
2727     case GIMPLE_ASM:
2728       if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2729 	return true;
2730       break;
2731 
2732     CASE_GIMPLE_OMP:
2733       /* OpenMP directives alter control flow.  */
2734       return true;
2735 
2736     case GIMPLE_TRANSACTION:
2737       /* A transaction start alters control flow.  */
2738       return true;
2739 
2740     default:
2741       break;
2742     }
2743 
2744   /* If a statement can throw, it alters control flow.  */
2745   return stmt_can_throw_internal (cfun, t);
2746 }
2747 
2748 
2749 /* Return true if T is a simple local goto.  */
2750 
2751 bool
simple_goto_p(gimple * t)2752 simple_goto_p (gimple *t)
2753 {
2754   return (gimple_code (t) == GIMPLE_GOTO
2755 	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2756 }
2757 
2758 
2759 /* Return true if STMT should start a new basic block.  PREV_STMT is
2760    the statement preceding STMT.  It is used when STMT is a label or a
2761    case label.  Labels should only start a new basic block if their
2762    previous statement wasn't a label.  Otherwise, sequence of labels
2763    would generate unnecessary basic blocks that only contain a single
2764    label.  */
2765 
2766 static inline bool
stmt_starts_bb_p(gimple * stmt,gimple * prev_stmt)2767 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2768 {
2769   if (stmt == NULL)
2770     return false;
2771 
2772   /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2773      any nondebug stmts in the block.  We don't want to start another
2774      block in this case: the debug stmt will already have started the
2775      one STMT would start if we weren't outputting debug stmts.  */
2776   if (prev_stmt && is_gimple_debug (prev_stmt))
2777     return false;
2778 
2779   /* Labels start a new basic block only if the preceding statement
2780      wasn't a label of the same type.  This prevents the creation of
2781      consecutive blocks that have nothing but a single label.  */
2782   if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2783     {
2784       /* Nonlocal and computed GOTO targets always start a new block.  */
2785       if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2786 	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2787 	return true;
2788 
2789       if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2790 	{
2791 	  if (DECL_NONLOCAL (gimple_label_label (plabel))
2792 	      || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2793 	    return true;
2794 
2795 	  cfg_stats.num_merged_labels++;
2796 	  return false;
2797 	}
2798       else
2799 	return true;
2800     }
2801   else if (gimple_code (stmt) == GIMPLE_CALL)
2802     {
2803       if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2804 	/* setjmp acts similar to a nonlocal GOTO target and thus should
2805 	   start a new block.  */
2806 	return true;
2807       if (gimple_call_internal_p (stmt, IFN_PHI)
2808 	  && prev_stmt
2809 	  && gimple_code (prev_stmt) != GIMPLE_LABEL
2810 	  && (gimple_code (prev_stmt) != GIMPLE_CALL
2811 	      || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2812 	/* PHI nodes start a new block unless preceeded by a label
2813 	   or another PHI.  */
2814 	return true;
2815     }
2816 
2817   return false;
2818 }
2819 
2820 
2821 /* Return true if T should end a basic block.  */
2822 
2823 bool
stmt_ends_bb_p(gimple * t)2824 stmt_ends_bb_p (gimple *t)
2825 {
2826   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2827 }
2828 
2829 /* Remove block annotations and other data structures.  */
2830 
2831 void
delete_tree_cfg_annotations(struct function * fn)2832 delete_tree_cfg_annotations (struct function *fn)
2833 {
2834   vec_free (label_to_block_map_for_fn (fn));
2835 }
2836 
2837 /* Return the virtual phi in BB.  */
2838 
2839 gphi *
get_virtual_phi(basic_block bb)2840 get_virtual_phi (basic_block bb)
2841 {
2842   for (gphi_iterator gsi = gsi_start_phis (bb);
2843        !gsi_end_p (gsi);
2844        gsi_next (&gsi))
2845     {
2846       gphi *phi = gsi.phi ();
2847 
2848       if (virtual_operand_p (PHI_RESULT (phi)))
2849 	return phi;
2850     }
2851 
2852   return NULL;
2853 }
2854 
2855 /* Return the first statement in basic block BB.  */
2856 
2857 gimple *
first_stmt(basic_block bb)2858 first_stmt (basic_block bb)
2859 {
2860   gimple_stmt_iterator i = gsi_start_bb (bb);
2861   gimple *stmt = NULL;
2862 
2863   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2864     {
2865       gsi_next (&i);
2866       stmt = NULL;
2867     }
2868   return stmt;
2869 }
2870 
2871 /* Return the first non-label statement in basic block BB.  */
2872 
2873 static gimple *
first_non_label_stmt(basic_block bb)2874 first_non_label_stmt (basic_block bb)
2875 {
2876   gimple_stmt_iterator i = gsi_start_bb (bb);
2877   while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2878     gsi_next (&i);
2879   return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2880 }
2881 
2882 /* Return the last statement in basic block BB.  */
2883 
2884 gimple *
last_stmt(basic_block bb)2885 last_stmt (basic_block bb)
2886 {
2887   gimple_stmt_iterator i = gsi_last_bb (bb);
2888   gimple *stmt = NULL;
2889 
2890   while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2891     {
2892       gsi_prev (&i);
2893       stmt = NULL;
2894     }
2895   return stmt;
2896 }
2897 
2898 /* Return the last statement of an otherwise empty block.  Return NULL
2899    if the block is totally empty, or if it contains more than one
2900    statement.  */
2901 
2902 gimple *
last_and_only_stmt(basic_block bb)2903 last_and_only_stmt (basic_block bb)
2904 {
2905   gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2906   gimple *last, *prev;
2907 
2908   if (gsi_end_p (i))
2909     return NULL;
2910 
2911   last = gsi_stmt (i);
2912   gsi_prev_nondebug (&i);
2913   if (gsi_end_p (i))
2914     return last;
2915 
2916   /* Empty statements should no longer appear in the instruction stream.
2917      Everything that might have appeared before should be deleted by
2918      remove_useless_stmts, and the optimizers should just gsi_remove
2919      instead of smashing with build_empty_stmt.
2920 
2921      Thus the only thing that should appear here in a block containing
2922      one executable statement is a label.  */
2923   prev = gsi_stmt (i);
2924   if (gimple_code (prev) == GIMPLE_LABEL)
2925     return last;
2926   else
2927     return NULL;
2928 }
2929 
2930 /* Returns the basic block after which the new basic block created
2931    by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2932    near its "logical" location.  This is of most help to humans looking
2933    at debugging dumps.  */
2934 
2935 basic_block
split_edge_bb_loc(edge edge_in)2936 split_edge_bb_loc (edge edge_in)
2937 {
2938   basic_block dest = edge_in->dest;
2939   basic_block dest_prev = dest->prev_bb;
2940 
2941   if (dest_prev)
2942     {
2943       edge e = find_edge (dest_prev, dest);
2944       if (e && !(e->flags & EDGE_COMPLEX))
2945 	return edge_in->src;
2946     }
2947   return dest_prev;
2948 }
2949 
2950 /* Split a (typically critical) edge EDGE_IN.  Return the new block.
2951    Abort on abnormal edges.  */
2952 
2953 static basic_block
gimple_split_edge(edge edge_in)2954 gimple_split_edge (edge edge_in)
2955 {
2956   basic_block new_bb, after_bb, dest;
2957   edge new_edge, e;
2958 
2959   /* Abnormal edges cannot be split.  */
2960   gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2961 
2962   dest = edge_in->dest;
2963 
2964   after_bb = split_edge_bb_loc (edge_in);
2965 
2966   new_bb = create_empty_bb (after_bb);
2967   new_bb->count = edge_in->count ();
2968 
2969   /* We want to avoid re-allocating PHIs when we first
2970      add the fallthru edge from new_bb to dest but we also
2971      want to avoid changing PHI argument order when
2972      first redirecting edge_in away from dest.  The former
2973      avoids changing PHI argument order by adding them
2974      last and then the redirection swapping it back into
2975      place by means of unordered remove.
2976      So hack around things by temporarily removing all PHIs
2977      from the destination during the edge redirection and then
2978      making sure the edges stay in order.  */
2979   gimple_seq saved_phis = phi_nodes (dest);
2980   unsigned old_dest_idx = edge_in->dest_idx;
2981   set_phi_nodes (dest, NULL);
2982   new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2983   e = redirect_edge_and_branch (edge_in, new_bb);
2984   gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2985   /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here.  */
2986   dest->il.gimple.phi_nodes = saved_phis;
2987 
2988   return new_bb;
2989 }
2990 
2991 
2992 /* Verify properties of the address expression T whose base should be
2993    TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true.  */
2994 
2995 static bool
verify_address(tree t,bool verify_addressable)2996 verify_address (tree t, bool verify_addressable)
2997 {
2998   bool old_constant;
2999   bool old_side_effects;
3000   bool new_constant;
3001   bool new_side_effects;
3002 
3003   old_constant = TREE_CONSTANT (t);
3004   old_side_effects = TREE_SIDE_EFFECTS (t);
3005 
3006   recompute_tree_invariant_for_addr_expr (t);
3007   new_side_effects = TREE_SIDE_EFFECTS (t);
3008   new_constant = TREE_CONSTANT (t);
3009 
3010   if (old_constant != new_constant)
3011     {
3012       error ("constant not recomputed when %<ADDR_EXPR%> changed");
3013       return true;
3014     }
3015   if (old_side_effects != new_side_effects)
3016     {
3017       error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3018       return true;
3019     }
3020 
3021   tree base = TREE_OPERAND (t, 0);
3022   while (handled_component_p (base))
3023     base = TREE_OPERAND (base, 0);
3024 
3025   if (!(VAR_P (base)
3026 	|| TREE_CODE (base) == PARM_DECL
3027 	|| TREE_CODE (base) == RESULT_DECL))
3028     return false;
3029 
3030   if (verify_addressable && !TREE_ADDRESSABLE (base))
3031     {
3032       error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3033       return true;
3034     }
3035 
3036   return false;
3037 }
3038 
3039 
3040 /* Verify if EXPR is a valid GIMPLE reference expression.  If
3041    REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3042    if there is an error, otherwise false.  */
3043 
3044 static bool
verify_types_in_gimple_reference(tree expr,bool require_lvalue)3045 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3046 {
3047   const char *code_name = get_tree_code_name (TREE_CODE (expr));
3048 
3049   if (TREE_CODE (expr) == REALPART_EXPR
3050       || TREE_CODE (expr) == IMAGPART_EXPR
3051       || TREE_CODE (expr) == BIT_FIELD_REF)
3052     {
3053       tree op = TREE_OPERAND (expr, 0);
3054       if (!is_gimple_reg_type (TREE_TYPE (expr)))
3055 	{
3056 	  error ("non-scalar %qs", code_name);
3057 	  return true;
3058 	}
3059 
3060       if (TREE_CODE (expr) == BIT_FIELD_REF)
3061 	{
3062 	  tree t1 = TREE_OPERAND (expr, 1);
3063 	  tree t2 = TREE_OPERAND (expr, 2);
3064 	  poly_uint64 size, bitpos;
3065 	  if (!poly_int_tree_p (t1, &size)
3066 	      || !poly_int_tree_p (t2, &bitpos)
3067 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3068 	      || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3069 	    {
3070 	      error ("invalid position or size operand to %qs", code_name);
3071 	      return true;
3072 	    }
3073 	  if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3074 	      && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3075 	    {
3076 	      error ("integral result type precision does not match "
3077 		     "field size of %qs", code_name);
3078 	      return true;
3079 	    }
3080 	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3081 		   && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3082 		   && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3083 				size))
3084 	    {
3085 	      error ("mode size of non-integral result does not "
3086 		     "match field size of %qs",
3087 		     code_name);
3088 	      return true;
3089 	    }
3090 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3091 	      && !type_has_mode_precision_p (TREE_TYPE (op)))
3092 	    {
3093 	      error ("%qs of non-mode-precision operand", code_name);
3094 	      return true;
3095 	    }
3096 	  if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3097 	      && maybe_gt (size + bitpos,
3098 			   tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3099 	    {
3100 	      error ("position plus size exceeds size of referenced object in "
3101 		     "%qs", code_name);
3102 	      return true;
3103 	    }
3104 	}
3105 
3106       if ((TREE_CODE (expr) == REALPART_EXPR
3107 	   || TREE_CODE (expr) == IMAGPART_EXPR)
3108 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3109 					 TREE_TYPE (TREE_TYPE (op))))
3110 	{
3111 	  error ("type mismatch in %qs reference", code_name);
3112 	  debug_generic_stmt (TREE_TYPE (expr));
3113 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3114 	  return true;
3115 	}
3116       expr = op;
3117     }
3118 
3119   while (handled_component_p (expr))
3120     {
3121       code_name = get_tree_code_name (TREE_CODE (expr));
3122 
3123       if (TREE_CODE (expr) == REALPART_EXPR
3124 	  || TREE_CODE (expr) == IMAGPART_EXPR
3125 	  || TREE_CODE (expr) == BIT_FIELD_REF)
3126 	{
3127 	  error ("non-top-level %qs", code_name);
3128 	  return true;
3129 	}
3130 
3131       tree op = TREE_OPERAND (expr, 0);
3132 
3133       if (TREE_CODE (expr) == ARRAY_REF
3134 	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3135 	{
3136 	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3137 	      || (TREE_OPERAND (expr, 2)
3138 		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3139 	      || (TREE_OPERAND (expr, 3)
3140 		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3141 	    {
3142 	      error ("invalid operands to %qs", code_name);
3143 	      debug_generic_stmt (expr);
3144 	      return true;
3145 	    }
3146 	}
3147 
3148       /* Verify if the reference array element types are compatible.  */
3149       if (TREE_CODE (expr) == ARRAY_REF
3150 	  && !useless_type_conversion_p (TREE_TYPE (expr),
3151 					 TREE_TYPE (TREE_TYPE (op))))
3152 	{
3153 	  error ("type mismatch in %qs", code_name);
3154 	  debug_generic_stmt (TREE_TYPE (expr));
3155 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3156 	  return true;
3157 	}
3158       if (TREE_CODE (expr) == ARRAY_RANGE_REF
3159 	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3160 					 TREE_TYPE (TREE_TYPE (op))))
3161 	{
3162 	  error ("type mismatch in %qs", code_name);
3163 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3164 	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 	  return true;
3166 	}
3167 
3168       if (TREE_CODE (expr) == COMPONENT_REF)
3169 	{
3170 	  if (TREE_OPERAND (expr, 2)
3171 	      && !is_gimple_val (TREE_OPERAND (expr, 2)))
3172 	    {
3173 	      error ("invalid %qs offset operator", code_name);
3174 	      return true;
3175 	    }
3176 	  if (!useless_type_conversion_p (TREE_TYPE (expr),
3177 					  TREE_TYPE (TREE_OPERAND (expr, 1))))
3178 	    {
3179 	      error ("type mismatch in %qs", code_name);
3180 	      debug_generic_stmt (TREE_TYPE (expr));
3181 	      debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3182 	      return true;
3183 	    }
3184 	}
3185 
3186       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3187 	{
3188 	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3189 	     that their operand is not an SSA name or an invariant when
3190 	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3191 	     bug).  Otherwise there is nothing to verify, gross mismatches at
3192 	     most invoke undefined behavior.  */
3193 	  if (require_lvalue
3194 	      && (TREE_CODE (op) == SSA_NAME
3195 		  || is_gimple_min_invariant (op)))
3196 	    {
3197 	      error ("conversion of %qs on the left hand side of %qs",
3198 		     get_tree_code_name (TREE_CODE (op)), code_name);
3199 	      debug_generic_stmt (expr);
3200 	      return true;
3201 	    }
3202 	  else if (TREE_CODE (op) == SSA_NAME
3203 		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3204 	    {
3205 	      error ("conversion of register to a different size in %qs",
3206 		     code_name);
3207 	      debug_generic_stmt (expr);
3208 	      return true;
3209 	    }
3210 	  else if (!handled_component_p (op))
3211 	    return false;
3212 	}
3213 
3214       expr = op;
3215     }
3216 
3217   code_name = get_tree_code_name (TREE_CODE (expr));
3218 
3219   if (TREE_CODE (expr) == MEM_REF)
3220     {
3221       if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3222 	  || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3223 	      && verify_address (TREE_OPERAND (expr, 0), false)))
3224 	{
3225 	  error ("invalid address operand in %qs", code_name);
3226 	  debug_generic_stmt (expr);
3227 	  return true;
3228 	}
3229       if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3230 	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3231 	{
3232 	  error ("invalid offset operand in %qs", code_name);
3233 	  debug_generic_stmt (expr);
3234 	  return true;
3235 	}
3236       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3237 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3238 	{
3239 	  error ("invalid clique in %qs", code_name);
3240 	  debug_generic_stmt (expr);
3241 	  return true;
3242 	}
3243     }
3244   else if (TREE_CODE (expr) == TARGET_MEM_REF)
3245     {
3246       if (!TMR_BASE (expr)
3247 	  || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3248 	  || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3249 	      && verify_address (TMR_BASE (expr), false)))
3250 	{
3251 	  error ("invalid address operand in %qs", code_name);
3252 	  return true;
3253 	}
3254       if (!TMR_OFFSET (expr)
3255 	  || !poly_int_tree_p (TMR_OFFSET (expr))
3256 	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3257 	{
3258 	  error ("invalid offset operand in %qs", code_name);
3259 	  debug_generic_stmt (expr);
3260 	  return true;
3261 	}
3262       if (MR_DEPENDENCE_CLIQUE (expr) != 0
3263 	  && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3264 	{
3265 	  error ("invalid clique in %qs", code_name);
3266 	  debug_generic_stmt (expr);
3267 	  return true;
3268 	}
3269     }
3270   else if (TREE_CODE (expr) == INDIRECT_REF)
3271     {
3272       error ("%qs in gimple IL", code_name);
3273       debug_generic_stmt (expr);
3274       return true;
3275     }
3276 
3277   if (!require_lvalue
3278       && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3279     return false;
3280 
3281   if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3282     return false;
3283 
3284   if (TREE_CODE (expr) != TARGET_MEM_REF
3285       && TREE_CODE (expr) != MEM_REF)
3286     {
3287       error ("invalid expression for min lvalue");
3288       return true;
3289     }
3290 
3291   return false;
3292 }
3293 
3294 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3295    list of pointer-to types that is trivially convertible to DEST.  */
3296 
3297 static bool
one_pointer_to_useless_type_conversion_p(tree dest,tree src_obj)3298 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3299 {
3300   tree src;
3301 
3302   if (!TYPE_POINTER_TO (src_obj))
3303     return true;
3304 
3305   for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3306     if (useless_type_conversion_p (dest, src))
3307       return true;
3308 
3309   return false;
3310 }
3311 
3312 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3313    from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3314 
3315 static bool
valid_fixed_convert_types_p(tree type1,tree type2)3316 valid_fixed_convert_types_p (tree type1, tree type2)
3317 {
3318   return (FIXED_POINT_TYPE_P (type1)
3319 	  && (INTEGRAL_TYPE_P (type2)
3320 	      || SCALAR_FLOAT_TYPE_P (type2)
3321 	      || FIXED_POINT_TYPE_P (type2)));
3322 }
3323 
3324 /* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3325    is a problem, otherwise false.  */
3326 
3327 static bool
verify_gimple_call(gcall * stmt)3328 verify_gimple_call (gcall *stmt)
3329 {
3330   tree fn = gimple_call_fn (stmt);
3331   tree fntype, fndecl;
3332   unsigned i;
3333 
3334   if (gimple_call_internal_p (stmt))
3335     {
3336       if (fn)
3337 	{
3338 	  error ("gimple call has two targets");
3339 	  debug_generic_stmt (fn);
3340 	  return true;
3341 	}
3342     }
3343   else
3344     {
3345       if (!fn)
3346 	{
3347 	  error ("gimple call has no target");
3348 	  return true;
3349 	}
3350     }
3351 
3352   if (fn && !is_gimple_call_addr (fn))
3353     {
3354       error ("invalid function in gimple call");
3355       debug_generic_stmt (fn);
3356       return true;
3357     }
3358 
3359   if (fn
3360       && (!POINTER_TYPE_P (TREE_TYPE (fn))
3361 	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3362 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3363     {
3364       error ("non-function in gimple call");
3365       return true;
3366     }
3367 
3368    fndecl = gimple_call_fndecl (stmt);
3369    if (fndecl
3370        && TREE_CODE (fndecl) == FUNCTION_DECL
3371        && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3372        && !DECL_PURE_P (fndecl)
3373        && !TREE_READONLY (fndecl))
3374      {
3375        error ("invalid pure const state for function");
3376        return true;
3377      }
3378 
3379   tree lhs = gimple_call_lhs (stmt);
3380   if (lhs
3381       && (!is_gimple_reg (lhs)
3382 	  && (!is_gimple_lvalue (lhs)
3383 	      || verify_types_in_gimple_reference
3384 		   (TREE_CODE (lhs) == WITH_SIZE_EXPR
3385 		    ? TREE_OPERAND (lhs, 0) : lhs, true))))
3386     {
3387       error ("invalid LHS in gimple call");
3388       return true;
3389     }
3390 
3391   if (gimple_call_ctrl_altering_p (stmt)
3392       && gimple_call_noreturn_p (stmt)
3393       && should_remove_lhs_p (lhs))
3394     {
3395       error ("LHS in %<noreturn%> call");
3396       return true;
3397     }
3398 
3399   fntype = gimple_call_fntype (stmt);
3400   if (fntype
3401       && lhs
3402       && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3403       /* ???  At least C++ misses conversions at assignments from
3404 	 void * call results.
3405 	 For now simply allow arbitrary pointer type conversions.  */
3406       && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3407 	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3408     {
3409       error ("invalid conversion in gimple call");
3410       debug_generic_stmt (TREE_TYPE (lhs));
3411       debug_generic_stmt (TREE_TYPE (fntype));
3412       return true;
3413     }
3414 
3415   if (gimple_call_chain (stmt)
3416       && !is_gimple_val (gimple_call_chain (stmt)))
3417     {
3418       error ("invalid static chain in gimple call");
3419       debug_generic_stmt (gimple_call_chain (stmt));
3420       return true;
3421     }
3422 
3423   /* If there is a static chain argument, the call should either be
3424      indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3425   if (gimple_call_chain (stmt)
3426       && fndecl
3427       && !DECL_STATIC_CHAIN (fndecl))
3428     {
3429       error ("static chain with function that doesn%'t use one");
3430       return true;
3431     }
3432 
3433   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3434     {
3435       switch (DECL_FUNCTION_CODE (fndecl))
3436 	{
3437 	case BUILT_IN_UNREACHABLE:
3438 	case BUILT_IN_TRAP:
3439 	  if (gimple_call_num_args (stmt) > 0)
3440 	    {
3441 	      /* Built-in unreachable with parameters might not be caught by
3442 		 undefined behavior sanitizer.  Front-ends do check users do not
3443 		 call them that way but we also produce calls to
3444 		 __builtin_unreachable internally, for example when IPA figures
3445 		 out a call cannot happen in a legal program.  In such cases,
3446 		 we must make sure arguments are stripped off.  */
3447 	      error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3448 		     "with arguments");
3449 	      return true;
3450 	    }
3451 	  break;
3452 	default:
3453 	  break;
3454 	}
3455     }
3456 
3457   /* For a call to .DEFERRED_INIT,
3458      LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3459      we should guarantee that when the 1st argument is a constant, it should
3460      be the same as the size of the LHS.  */
3461 
3462   if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3463     {
3464       tree size_of_arg0 = gimple_call_arg (stmt, 0);
3465       tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3466 
3467       if (TREE_CODE (lhs) == SSA_NAME)
3468 	lhs = SSA_NAME_VAR (lhs);
3469 
3470       poly_uint64 size_from_arg0, size_from_lhs;
3471       bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3472 						    &size_from_arg0);
3473       bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3474 						   &size_from_lhs);
3475       if (is_constant_size_arg0 && is_constant_size_lhs)
3476 	if (maybe_ne (size_from_arg0, size_from_lhs))
3477 	  {
3478 	    error ("%<DEFERRED_INIT%> calls should have same "
3479 		   "constant size for the first argument and LHS");
3480 	    return true;
3481 	  }
3482     }
3483 
3484   /* ???  The C frontend passes unpromoted arguments in case it
3485      didn't see a function declaration before the call.  So for now
3486      leave the call arguments mostly unverified.  Once we gimplify
3487      unit-at-a-time we have a chance to fix this.  */
3488   for (i = 0; i < gimple_call_num_args (stmt); ++i)
3489     {
3490       tree arg = gimple_call_arg (stmt, i);
3491       if ((is_gimple_reg_type (TREE_TYPE (arg))
3492 	   && !is_gimple_val (arg))
3493 	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3494 	      && !is_gimple_lvalue (arg)))
3495 	{
3496 	  error ("invalid argument to gimple call");
3497 	  debug_generic_expr (arg);
3498 	  return true;
3499 	}
3500       if (!is_gimple_reg (arg))
3501 	{
3502 	  if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3503 	    arg = TREE_OPERAND (arg, 0);
3504 	  if (verify_types_in_gimple_reference (arg, false))
3505 	    return true;
3506 	}
3507     }
3508 
3509   return false;
3510 }
3511 
3512 /* Verifies the gimple comparison with the result type TYPE and
3513    the operands OP0 and OP1, comparison code is CODE.  */
3514 
3515 static bool
verify_gimple_comparison(tree type,tree op0,tree op1,enum tree_code code)3516 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3517 {
3518   tree op0_type = TREE_TYPE (op0);
3519   tree op1_type = TREE_TYPE (op1);
3520 
3521   if (!is_gimple_val (op0) || !is_gimple_val (op1))
3522     {
3523       error ("invalid operands in gimple comparison");
3524       return true;
3525     }
3526 
3527   /* For comparisons we do not have the operations type as the
3528      effective type the comparison is carried out in.  Instead
3529      we require that either the first operand is trivially
3530      convertible into the second, or the other way around.  */
3531   if (!useless_type_conversion_p (op0_type, op1_type)
3532       && !useless_type_conversion_p (op1_type, op0_type))
3533     {
3534       error ("mismatching comparison operand types");
3535       debug_generic_expr (op0_type);
3536       debug_generic_expr (op1_type);
3537       return true;
3538     }
3539 
3540   /* The resulting type of a comparison may be an effective boolean type.  */
3541   if (INTEGRAL_TYPE_P (type)
3542       && (TREE_CODE (type) == BOOLEAN_TYPE
3543 	  || TYPE_PRECISION (type) == 1))
3544     {
3545       if ((TREE_CODE (op0_type) == VECTOR_TYPE
3546 	   || TREE_CODE (op1_type) == VECTOR_TYPE)
3547 	  && code != EQ_EXPR && code != NE_EXPR
3548 	  && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3549 	  && !VECTOR_INTEGER_TYPE_P (op0_type))
3550 	{
3551 	  error ("unsupported operation or type for vector comparison"
3552 		 " returning a boolean");
3553 	  debug_generic_expr (op0_type);
3554 	  debug_generic_expr (op1_type);
3555 	  return true;
3556         }
3557     }
3558   /* Or a boolean vector type with the same element count
3559      as the comparison operand types.  */
3560   else if (TREE_CODE (type) == VECTOR_TYPE
3561 	   && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3562     {
3563       if (TREE_CODE (op0_type) != VECTOR_TYPE
3564 	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3565         {
3566           error ("non-vector operands in vector comparison");
3567           debug_generic_expr (op0_type);
3568           debug_generic_expr (op1_type);
3569           return true;
3570         }
3571 
3572       if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3573 		    TYPE_VECTOR_SUBPARTS (op0_type)))
3574         {
3575           error ("invalid vector comparison resulting type");
3576           debug_generic_expr (type);
3577           return true;
3578         }
3579     }
3580   else
3581     {
3582       error ("bogus comparison result type");
3583       debug_generic_expr (type);
3584       return true;
3585     }
3586 
3587   return false;
3588 }
3589 
3590 /* Verify a gimple assignment statement STMT with an unary rhs.
3591    Returns true if anything is wrong.  */
3592 
3593 static bool
verify_gimple_assign_unary(gassign * stmt)3594 verify_gimple_assign_unary (gassign *stmt)
3595 {
3596   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3597   tree lhs = gimple_assign_lhs (stmt);
3598   tree lhs_type = TREE_TYPE (lhs);
3599   tree rhs1 = gimple_assign_rhs1 (stmt);
3600   tree rhs1_type = TREE_TYPE (rhs1);
3601 
3602   if (!is_gimple_reg (lhs))
3603     {
3604       error ("non-register as LHS of unary operation");
3605       return true;
3606     }
3607 
3608   if (!is_gimple_val (rhs1))
3609     {
3610       error ("invalid operand in unary operation");
3611       return true;
3612     }
3613 
3614   const char* const code_name = get_tree_code_name (rhs_code);
3615 
3616   /* First handle conversions.  */
3617   switch (rhs_code)
3618     {
3619     CASE_CONVERT:
3620       {
3621 	/* Allow conversions between vectors with the same number of elements,
3622 	   provided that the conversion is OK for the element types too.  */
3623 	if (VECTOR_TYPE_P (lhs_type)
3624 	    && VECTOR_TYPE_P (rhs1_type)
3625 	    && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3626 			 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3627 	  {
3628 	    lhs_type = TREE_TYPE (lhs_type);
3629 	    rhs1_type = TREE_TYPE (rhs1_type);
3630 	  }
3631 	else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3632 	  {
3633 	    error ("invalid vector types in nop conversion");
3634 	    debug_generic_expr (lhs_type);
3635 	    debug_generic_expr (rhs1_type);
3636 	    return true;
3637 	  }
3638 
3639 	/* Allow conversions from pointer type to integral type only if
3640 	   there is no sign or zero extension involved.
3641 	   For targets were the precision of ptrofftype doesn't match that
3642 	   of pointers we allow conversions to types where
3643 	   POINTERS_EXTEND_UNSIGNED specifies how that works.  */
3644 	if ((POINTER_TYPE_P (lhs_type)
3645 	     && INTEGRAL_TYPE_P (rhs1_type))
3646 	    || (POINTER_TYPE_P (rhs1_type)
3647 		&& INTEGRAL_TYPE_P (lhs_type)
3648 		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3649 #if defined(POINTERS_EXTEND_UNSIGNED)
3650 		    || (TYPE_MODE (rhs1_type) == ptr_mode
3651 			&& (TYPE_PRECISION (lhs_type)
3652 			      == BITS_PER_WORD /* word_mode */
3653 			    || (TYPE_PRECISION (lhs_type)
3654 				  == GET_MODE_PRECISION (Pmode))))
3655 #endif
3656 		   )))
3657 	  return false;
3658 
3659 	/* Allow conversion from integral to offset type and vice versa.  */
3660 	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3661 	     && INTEGRAL_TYPE_P (rhs1_type))
3662 	    || (INTEGRAL_TYPE_P (lhs_type)
3663 		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3664 	  return false;
3665 
3666 	/* Otherwise assert we are converting between types of the
3667 	   same kind.  */
3668 	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3669 	  {
3670 	    error ("invalid types in nop conversion");
3671 	    debug_generic_expr (lhs_type);
3672 	    debug_generic_expr (rhs1_type);
3673 	    return true;
3674 	  }
3675 
3676 	return false;
3677       }
3678 
3679     case ADDR_SPACE_CONVERT_EXPR:
3680       {
3681 	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3682 	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3683 		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3684 	  {
3685 	    error ("invalid types in address space conversion");
3686 	    debug_generic_expr (lhs_type);
3687 	    debug_generic_expr (rhs1_type);
3688 	    return true;
3689 	  }
3690 
3691 	return false;
3692       }
3693 
3694     case FIXED_CONVERT_EXPR:
3695       {
3696 	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3697 	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3698 	  {
3699 	    error ("invalid types in fixed-point conversion");
3700 	    debug_generic_expr (lhs_type);
3701 	    debug_generic_expr (rhs1_type);
3702 	    return true;
3703 	  }
3704 
3705 	return false;
3706       }
3707 
3708     case FLOAT_EXPR:
3709       {
3710 	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3711 	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3712 	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3713 	  {
3714 	    error ("invalid types in conversion to floating-point");
3715 	    debug_generic_expr (lhs_type);
3716 	    debug_generic_expr (rhs1_type);
3717 	    return true;
3718 	  }
3719 
3720         return false;
3721       }
3722 
3723     case FIX_TRUNC_EXPR:
3724       {
3725         if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3726             && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3727                 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3728 	  {
3729 	    error ("invalid types in conversion to integer");
3730 	    debug_generic_expr (lhs_type);
3731 	    debug_generic_expr (rhs1_type);
3732 	    return true;
3733 	  }
3734 
3735         return false;
3736       }
3737 
3738     case VEC_UNPACK_HI_EXPR:
3739     case VEC_UNPACK_LO_EXPR:
3740     case VEC_UNPACK_FLOAT_HI_EXPR:
3741     case VEC_UNPACK_FLOAT_LO_EXPR:
3742     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3743     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3744       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3745           || TREE_CODE (lhs_type) != VECTOR_TYPE
3746           || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3747 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3748           || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3749 	      && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3750 	  || ((rhs_code == VEC_UNPACK_HI_EXPR
3751 	       || rhs_code == VEC_UNPACK_LO_EXPR)
3752 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3753 		  != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3754 	  || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3755 	       || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3756 	      && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3757 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3758 	  || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3759 	       || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3760 	      && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3761 		  || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3762 	  || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3763 			2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3764 	      && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3765 		  || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3766 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3767 		       TYPE_VECTOR_SUBPARTS (rhs1_type)))
3768 	{
3769 	  error ("type mismatch in %qs expression", code_name);
3770 	  debug_generic_expr (lhs_type);
3771 	  debug_generic_expr (rhs1_type);
3772 	  return true;
3773         }
3774 
3775       return false;
3776 
3777     case NEGATE_EXPR:
3778     case ABS_EXPR:
3779     case BIT_NOT_EXPR:
3780     case PAREN_EXPR:
3781     case CONJ_EXPR:
3782       /* Disallow pointer and offset types for many of the unary gimple. */
3783       if (POINTER_TYPE_P (lhs_type)
3784 	  || TREE_CODE (lhs_type) == OFFSET_TYPE)
3785 	{
3786 	  error ("invalid types for %qs", code_name);
3787 	  debug_generic_expr (lhs_type);
3788 	  debug_generic_expr (rhs1_type);
3789 	  return true;
3790 	}
3791       break;
3792 
3793     case ABSU_EXPR:
3794       if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3795 	  || !TYPE_UNSIGNED (lhs_type)
3796 	  || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3797 	  || TYPE_UNSIGNED (rhs1_type)
3798 	  || element_precision (lhs_type) != element_precision (rhs1_type))
3799 	{
3800 	  error ("invalid types for %qs", code_name);
3801 	  debug_generic_expr (lhs_type);
3802 	  debug_generic_expr (rhs1_type);
3803 	  return true;
3804 	}
3805       return false;
3806 
3807     case VEC_DUPLICATE_EXPR:
3808       if (TREE_CODE (lhs_type) != VECTOR_TYPE
3809 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3810 	{
3811 	  error ("%qs should be from a scalar to a like vector", code_name);
3812 	  debug_generic_expr (lhs_type);
3813 	  debug_generic_expr (rhs1_type);
3814 	  return true;
3815 	}
3816       return false;
3817 
3818     default:
3819       gcc_unreachable ();
3820     }
3821 
3822   /* For the remaining codes assert there is no conversion involved.  */
3823   if (!useless_type_conversion_p (lhs_type, rhs1_type))
3824     {
3825       error ("non-trivial conversion in unary operation");
3826       debug_generic_expr (lhs_type);
3827       debug_generic_expr (rhs1_type);
3828       return true;
3829     }
3830 
3831   return false;
3832 }
3833 
3834 /* Verify a gimple assignment statement STMT with a binary rhs.
3835    Returns true if anything is wrong.  */
3836 
3837 static bool
verify_gimple_assign_binary(gassign * stmt)3838 verify_gimple_assign_binary (gassign *stmt)
3839 {
3840   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3841   tree lhs = gimple_assign_lhs (stmt);
3842   tree lhs_type = TREE_TYPE (lhs);
3843   tree rhs1 = gimple_assign_rhs1 (stmt);
3844   tree rhs1_type = TREE_TYPE (rhs1);
3845   tree rhs2 = gimple_assign_rhs2 (stmt);
3846   tree rhs2_type = TREE_TYPE (rhs2);
3847 
3848   if (!is_gimple_reg (lhs))
3849     {
3850       error ("non-register as LHS of binary operation");
3851       return true;
3852     }
3853 
3854   if (!is_gimple_val (rhs1)
3855       || !is_gimple_val (rhs2))
3856     {
3857       error ("invalid operands in binary operation");
3858       return true;
3859     }
3860 
3861   const char* const code_name = get_tree_code_name (rhs_code);
3862 
3863   /* First handle operations that involve different types.  */
3864   switch (rhs_code)
3865     {
3866     case COMPLEX_EXPR:
3867       {
3868 	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3869 	    || !(INTEGRAL_TYPE_P (rhs1_type)
3870 	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3871 	    || !(INTEGRAL_TYPE_P (rhs2_type)
3872 	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3873 	  {
3874 	    error ("type mismatch in %qs", code_name);
3875 	    debug_generic_expr (lhs_type);
3876 	    debug_generic_expr (rhs1_type);
3877 	    debug_generic_expr (rhs2_type);
3878 	    return true;
3879 	  }
3880 
3881 	return false;
3882       }
3883 
3884     case LSHIFT_EXPR:
3885     case RSHIFT_EXPR:
3886     case LROTATE_EXPR:
3887     case RROTATE_EXPR:
3888       {
3889 	/* Shifts and rotates are ok on integral types, fixed point
3890 	   types and integer vector types.  */
3891 	if ((!INTEGRAL_TYPE_P (rhs1_type)
3892 	     && !FIXED_POINT_TYPE_P (rhs1_type)
3893 	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3894 		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3895 	    || (!INTEGRAL_TYPE_P (rhs2_type)
3896 		/* Vector shifts of vectors are also ok.  */
3897 		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3898 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3899 		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3900 		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3901 	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3902 	  {
3903 	    error ("type mismatch in %qs", code_name);
3904 	    debug_generic_expr (lhs_type);
3905 	    debug_generic_expr (rhs1_type);
3906 	    debug_generic_expr (rhs2_type);
3907 	    return true;
3908 	  }
3909 
3910 	return false;
3911       }
3912 
3913     case WIDEN_LSHIFT_EXPR:
3914       {
3915         if (!INTEGRAL_TYPE_P (lhs_type)
3916             || !INTEGRAL_TYPE_P (rhs1_type)
3917             || TREE_CODE (rhs2) != INTEGER_CST
3918             || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3919           {
3920 	    error ("type mismatch in %qs", code_name);
3921             debug_generic_expr (lhs_type);
3922             debug_generic_expr (rhs1_type);
3923             debug_generic_expr (rhs2_type);
3924             return true;
3925           }
3926 
3927         return false;
3928       }
3929 
3930     case VEC_WIDEN_LSHIFT_HI_EXPR:
3931     case VEC_WIDEN_LSHIFT_LO_EXPR:
3932       {
3933         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3934             || TREE_CODE (lhs_type) != VECTOR_TYPE
3935             || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3936             || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3937             || TREE_CODE (rhs2) != INTEGER_CST
3938             || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3939                 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3940           {
3941 	    error ("type mismatch in %qs", code_name);
3942             debug_generic_expr (lhs_type);
3943             debug_generic_expr (rhs1_type);
3944             debug_generic_expr (rhs2_type);
3945             return true;
3946           }
3947 
3948         return false;
3949       }
3950 
3951     case WIDEN_PLUS_EXPR:
3952     case WIDEN_MINUS_EXPR:
3953     case PLUS_EXPR:
3954     case MINUS_EXPR:
3955       {
3956 	tree lhs_etype = lhs_type;
3957 	tree rhs1_etype = rhs1_type;
3958 	tree rhs2_etype = rhs2_type;
3959 	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3960 	  {
3961 	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3962 		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3963 	      {
3964 		error ("invalid non-vector operands to %qs", code_name);
3965 		return true;
3966 	      }
3967 	    lhs_etype = TREE_TYPE (lhs_type);
3968 	    rhs1_etype = TREE_TYPE (rhs1_type);
3969 	    rhs2_etype = TREE_TYPE (rhs2_type);
3970 	  }
3971 	if (POINTER_TYPE_P (lhs_etype)
3972 	    || POINTER_TYPE_P (rhs1_etype)
3973 	    || POINTER_TYPE_P (rhs2_etype))
3974 	  {
3975 	    error ("invalid (pointer) operands %qs", code_name);
3976 	    return true;
3977 	  }
3978 
3979 	/* Continue with generic binary expression handling.  */
3980 	break;
3981       }
3982 
3983     case POINTER_PLUS_EXPR:
3984       {
3985 	if (!POINTER_TYPE_P (rhs1_type)
3986 	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3987 	    || !ptrofftype_p (rhs2_type))
3988 	  {
3989 	    error ("type mismatch in %qs", code_name);
3990 	    debug_generic_stmt (lhs_type);
3991 	    debug_generic_stmt (rhs1_type);
3992 	    debug_generic_stmt (rhs2_type);
3993 	    return true;
3994 	  }
3995 
3996 	return false;
3997       }
3998 
3999     case POINTER_DIFF_EXPR:
4000       {
4001 	if (!POINTER_TYPE_P (rhs1_type)
4002 	    || !POINTER_TYPE_P (rhs2_type)
4003 	    /* Because we special-case pointers to void we allow difference
4004 	       of arbitrary pointers with the same mode.  */
4005 	    || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4006 	    || !INTEGRAL_TYPE_P (lhs_type)
4007 	    || TYPE_UNSIGNED (lhs_type)
4008 	    || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4009 	  {
4010 	    error ("type mismatch in %qs", code_name);
4011 	    debug_generic_stmt (lhs_type);
4012 	    debug_generic_stmt (rhs1_type);
4013 	    debug_generic_stmt (rhs2_type);
4014 	    return true;
4015 	  }
4016 
4017 	return false;
4018       }
4019 
4020     case TRUTH_ANDIF_EXPR:
4021     case TRUTH_ORIF_EXPR:
4022     case TRUTH_AND_EXPR:
4023     case TRUTH_OR_EXPR:
4024     case TRUTH_XOR_EXPR:
4025 
4026       gcc_unreachable ();
4027 
4028     case LT_EXPR:
4029     case LE_EXPR:
4030     case GT_EXPR:
4031     case GE_EXPR:
4032     case EQ_EXPR:
4033     case NE_EXPR:
4034     case UNORDERED_EXPR:
4035     case ORDERED_EXPR:
4036     case UNLT_EXPR:
4037     case UNLE_EXPR:
4038     case UNGT_EXPR:
4039     case UNGE_EXPR:
4040     case UNEQ_EXPR:
4041     case LTGT_EXPR:
4042       /* Comparisons are also binary, but the result type is not
4043 	 connected to the operand types.  */
4044       return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4045 
4046     case WIDEN_MULT_EXPR:
4047       if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4048 	return true;
4049       return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4050 	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4051 
4052     case WIDEN_SUM_EXPR:
4053       {
4054         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4055 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
4056 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
4057 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4058 		 || (!INTEGRAL_TYPE_P (lhs_type)
4059 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4060 	    || !useless_type_conversion_p (lhs_type, rhs2_type)
4061 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4062 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4063           {
4064 	    error ("type mismatch in %qs", code_name);
4065             debug_generic_expr (lhs_type);
4066             debug_generic_expr (rhs1_type);
4067             debug_generic_expr (rhs2_type);
4068             return true;
4069           }
4070         return false;
4071       }
4072 
4073     case VEC_WIDEN_MINUS_HI_EXPR:
4074     case VEC_WIDEN_MINUS_LO_EXPR:
4075     case VEC_WIDEN_PLUS_HI_EXPR:
4076     case VEC_WIDEN_PLUS_LO_EXPR:
4077     case VEC_WIDEN_MULT_HI_EXPR:
4078     case VEC_WIDEN_MULT_LO_EXPR:
4079     case VEC_WIDEN_MULT_EVEN_EXPR:
4080     case VEC_WIDEN_MULT_ODD_EXPR:
4081       {
4082         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4083             || TREE_CODE (lhs_type) != VECTOR_TYPE
4084 	    || !types_compatible_p (rhs1_type, rhs2_type)
4085 	    || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4086 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4087           {
4088 	    error ("type mismatch in %qs", code_name);
4089             debug_generic_expr (lhs_type);
4090             debug_generic_expr (rhs1_type);
4091             debug_generic_expr (rhs2_type);
4092             return true;
4093           }
4094         return false;
4095       }
4096 
4097     case VEC_PACK_TRUNC_EXPR:
4098       /* ???  We currently use VEC_PACK_TRUNC_EXPR to simply concat
4099 	 vector boolean types.  */
4100       if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4101 	  && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4102 	  && types_compatible_p (rhs1_type, rhs2_type)
4103 	  && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4104 		       2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4105 	return false;
4106 
4107       /* Fallthru.  */
4108     case VEC_PACK_SAT_EXPR:
4109     case VEC_PACK_FIX_TRUNC_EXPR:
4110       {
4111         if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4112             || TREE_CODE (lhs_type) != VECTOR_TYPE
4113             || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4114 		  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4115 		  && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4116 		 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4117 		     == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4118 	    || !types_compatible_p (rhs1_type, rhs2_type)
4119 	    || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4120 			 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4121 	    || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4122 			 TYPE_VECTOR_SUBPARTS (lhs_type)))
4123           {
4124 	    error ("type mismatch in %qs", code_name);
4125             debug_generic_expr (lhs_type);
4126             debug_generic_expr (rhs1_type);
4127             debug_generic_expr (rhs2_type);
4128             return true;
4129           }
4130 
4131         return false;
4132       }
4133 
4134     case VEC_PACK_FLOAT_EXPR:
4135       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4136 	  || TREE_CODE (lhs_type) != VECTOR_TYPE
4137 	  || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4138 	  || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4139 	  || !types_compatible_p (rhs1_type, rhs2_type)
4140 	  || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4141 		       2 * GET_MODE_SIZE (element_mode (lhs_type)))
4142 	  || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4143 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4144 	{
4145 	  error ("type mismatch in %qs", code_name);
4146 	  debug_generic_expr (lhs_type);
4147 	  debug_generic_expr (rhs1_type);
4148 	  debug_generic_expr (rhs2_type);
4149 	  return true;
4150 	}
4151 
4152       return false;
4153 
4154     case MULT_EXPR:
4155     case MULT_HIGHPART_EXPR:
4156     case TRUNC_DIV_EXPR:
4157     case CEIL_DIV_EXPR:
4158     case FLOOR_DIV_EXPR:
4159     case ROUND_DIV_EXPR:
4160     case TRUNC_MOD_EXPR:
4161     case CEIL_MOD_EXPR:
4162     case FLOOR_MOD_EXPR:
4163     case ROUND_MOD_EXPR:
4164     case RDIV_EXPR:
4165     case EXACT_DIV_EXPR:
4166       /* Disallow pointer and offset types for many of the binary gimple. */
4167       if (POINTER_TYPE_P (lhs_type)
4168 	  || TREE_CODE (lhs_type) == OFFSET_TYPE)
4169 	{
4170 	  error ("invalid types for %qs", code_name);
4171 	  debug_generic_expr (lhs_type);
4172 	  debug_generic_expr (rhs1_type);
4173 	  debug_generic_expr (rhs2_type);
4174 	  return true;
4175 	}
4176       /* Continue with generic binary expression handling.  */
4177       break;
4178 
4179     case MIN_EXPR:
4180     case MAX_EXPR:
4181     case BIT_IOR_EXPR:
4182     case BIT_XOR_EXPR:
4183     case BIT_AND_EXPR:
4184       /* Continue with generic binary expression handling.  */
4185       break;
4186 
4187     case VEC_SERIES_EXPR:
4188       if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4189 	{
4190 	  error ("type mismatch in %qs", code_name);
4191 	  debug_generic_expr (rhs1_type);
4192 	  debug_generic_expr (rhs2_type);
4193 	  return true;
4194 	}
4195       if (TREE_CODE (lhs_type) != VECTOR_TYPE
4196 	  || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4197 	{
4198 	  error ("vector type expected in %qs", code_name);
4199 	  debug_generic_expr (lhs_type);
4200 	  return true;
4201 	}
4202       return false;
4203 
4204     default:
4205       gcc_unreachable ();
4206     }
4207 
4208   if (!useless_type_conversion_p (lhs_type, rhs1_type)
4209       || !useless_type_conversion_p (lhs_type, rhs2_type))
4210     {
4211       error ("type mismatch in binary expression");
4212       debug_generic_stmt (lhs_type);
4213       debug_generic_stmt (rhs1_type);
4214       debug_generic_stmt (rhs2_type);
4215       return true;
4216     }
4217 
4218   return false;
4219 }
4220 
4221 /* Verify a gimple assignment statement STMT with a ternary rhs.
4222    Returns true if anything is wrong.  */
4223 
4224 static bool
verify_gimple_assign_ternary(gassign * stmt)4225 verify_gimple_assign_ternary (gassign *stmt)
4226 {
4227   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4228   tree lhs = gimple_assign_lhs (stmt);
4229   tree lhs_type = TREE_TYPE (lhs);
4230   tree rhs1 = gimple_assign_rhs1 (stmt);
4231   tree rhs1_type = TREE_TYPE (rhs1);
4232   tree rhs2 = gimple_assign_rhs2 (stmt);
4233   tree rhs2_type = TREE_TYPE (rhs2);
4234   tree rhs3 = gimple_assign_rhs3 (stmt);
4235   tree rhs3_type = TREE_TYPE (rhs3);
4236 
4237   if (!is_gimple_reg (lhs))
4238     {
4239       error ("non-register as LHS of ternary operation");
4240       return true;
4241     }
4242 
4243   if ((rhs_code == COND_EXPR
4244        ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4245       || !is_gimple_val (rhs2)
4246       || !is_gimple_val (rhs3))
4247     {
4248       error ("invalid operands in ternary operation");
4249       return true;
4250     }
4251 
4252   const char* const code_name = get_tree_code_name (rhs_code);
4253 
4254   /* First handle operations that involve different types.  */
4255   switch (rhs_code)
4256     {
4257     case WIDEN_MULT_PLUS_EXPR:
4258     case WIDEN_MULT_MINUS_EXPR:
4259       if ((!INTEGRAL_TYPE_P (rhs1_type)
4260 	   && !FIXED_POINT_TYPE_P (rhs1_type))
4261 	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
4262 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4263 	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4264 	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4265 	{
4266 	  error ("type mismatch in %qs", code_name);
4267 	  debug_generic_expr (lhs_type);
4268 	  debug_generic_expr (rhs1_type);
4269 	  debug_generic_expr (rhs2_type);
4270 	  debug_generic_expr (rhs3_type);
4271 	  return true;
4272 	}
4273       break;
4274 
4275     case VEC_COND_EXPR:
4276       if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4277 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4278 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4279 	{
4280 	  error ("the first argument of a %qs must be of a "
4281 		 "boolean vector type of the same number of elements "
4282 		 "as the result", code_name);
4283 	  debug_generic_expr (lhs_type);
4284 	  debug_generic_expr (rhs1_type);
4285 	  return true;
4286 	}
4287       if (!is_gimple_val (rhs1))
4288 	return true;
4289       /* Fallthrough.  */
4290     case COND_EXPR:
4291       if (!is_gimple_val (rhs1)
4292 	  && (!is_gimple_condexpr (rhs1)
4293 	      || verify_gimple_comparison (TREE_TYPE (rhs1),
4294 					   TREE_OPERAND (rhs1, 0),
4295 					   TREE_OPERAND (rhs1, 1),
4296 					   TREE_CODE (rhs1))))
4297 	return true;
4298       if (!useless_type_conversion_p (lhs_type, rhs2_type)
4299 	  || !useless_type_conversion_p (lhs_type, rhs3_type))
4300 	{
4301 	  error ("type mismatch in %qs", code_name);
4302 	  debug_generic_expr (lhs_type);
4303 	  debug_generic_expr (rhs2_type);
4304 	  debug_generic_expr (rhs3_type);
4305 	  return true;
4306 	}
4307       break;
4308 
4309     case VEC_PERM_EXPR:
4310       if (!useless_type_conversion_p (lhs_type, rhs1_type)
4311 	  || !useless_type_conversion_p (lhs_type, rhs2_type))
4312 	{
4313 	  error ("type mismatch in %qs", code_name);
4314 	  debug_generic_expr (lhs_type);
4315 	  debug_generic_expr (rhs1_type);
4316 	  debug_generic_expr (rhs2_type);
4317 	  debug_generic_expr (rhs3_type);
4318 	  return true;
4319 	}
4320 
4321       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4322 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4323 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4324 	{
4325 	  error ("vector types expected in %qs", code_name);
4326 	  debug_generic_expr (lhs_type);
4327 	  debug_generic_expr (rhs1_type);
4328 	  debug_generic_expr (rhs2_type);
4329 	  debug_generic_expr (rhs3_type);
4330 	  return true;
4331 	}
4332 
4333       if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4334 		    TYPE_VECTOR_SUBPARTS (rhs2_type))
4335 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4336 		       TYPE_VECTOR_SUBPARTS (rhs3_type))
4337 	  || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4338 		       TYPE_VECTOR_SUBPARTS (lhs_type)))
4339 	{
4340 	  error ("vectors with different element number found in %qs",
4341 		 code_name);
4342 	  debug_generic_expr (lhs_type);
4343 	  debug_generic_expr (rhs1_type);
4344 	  debug_generic_expr (rhs2_type);
4345 	  debug_generic_expr (rhs3_type);
4346 	  return true;
4347 	}
4348 
4349       if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4350 	  || (TREE_CODE (rhs3) != VECTOR_CST
4351 	      && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4352 				    (TREE_TYPE (rhs3_type)))
4353 		  != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4354 				       (TREE_TYPE (rhs1_type))))))
4355 	{
4356 	  error ("invalid mask type in %qs", code_name);
4357 	  debug_generic_expr (lhs_type);
4358 	  debug_generic_expr (rhs1_type);
4359 	  debug_generic_expr (rhs2_type);
4360 	  debug_generic_expr (rhs3_type);
4361 	  return true;
4362 	}
4363 
4364       return false;
4365 
4366     case SAD_EXPR:
4367       if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4368 	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4369 	  || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4370 	       > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4371 	{
4372 	  error ("type mismatch in %qs", code_name);
4373 	  debug_generic_expr (lhs_type);
4374 	  debug_generic_expr (rhs1_type);
4375 	  debug_generic_expr (rhs2_type);
4376 	  debug_generic_expr (rhs3_type);
4377 	  return true;
4378 	}
4379 
4380       if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4381 	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4382 	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4383 	{
4384 	  error ("vector types expected in %qs", code_name);
4385 	  debug_generic_expr (lhs_type);
4386 	  debug_generic_expr (rhs1_type);
4387 	  debug_generic_expr (rhs2_type);
4388 	  debug_generic_expr (rhs3_type);
4389 	  return true;
4390 	}
4391 
4392       return false;
4393 
4394     case BIT_INSERT_EXPR:
4395       if (! useless_type_conversion_p (lhs_type, rhs1_type))
4396 	{
4397 	  error ("type mismatch in %qs", code_name);
4398 	  debug_generic_expr (lhs_type);
4399 	  debug_generic_expr (rhs1_type);
4400 	  return true;
4401 	}
4402       if (! ((INTEGRAL_TYPE_P (rhs1_type)
4403 	      && INTEGRAL_TYPE_P (rhs2_type))
4404 	     /* Vector element insert.  */
4405 	     || (VECTOR_TYPE_P (rhs1_type)
4406 		 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4407 	     /* Aligned sub-vector insert.  */
4408 	     || (VECTOR_TYPE_P (rhs1_type)
4409 		 && VECTOR_TYPE_P (rhs2_type)
4410 		 && types_compatible_p (TREE_TYPE (rhs1_type),
4411 					TREE_TYPE (rhs2_type))
4412 		 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4413 				TYPE_VECTOR_SUBPARTS (rhs2_type))
4414 		 && multiple_p (wi::to_poly_offset (rhs3),
4415 				wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4416 	{
4417 	  error ("not allowed type combination in %qs", code_name);
4418 	  debug_generic_expr (rhs1_type);
4419 	  debug_generic_expr (rhs2_type);
4420 	  return true;
4421 	}
4422       if (! tree_fits_uhwi_p (rhs3)
4423 	  || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4424 	  || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4425 	{
4426 	  error ("invalid position or size in %qs", code_name);
4427 	  return true;
4428 	}
4429       if (INTEGRAL_TYPE_P (rhs1_type)
4430 	  && !type_has_mode_precision_p (rhs1_type))
4431 	{
4432 	  error ("%qs into non-mode-precision operand", code_name);
4433 	  return true;
4434 	}
4435       if (INTEGRAL_TYPE_P (rhs1_type))
4436 	{
4437 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4438 	  if (bitpos >= TYPE_PRECISION (rhs1_type)
4439 	      || (bitpos + TYPE_PRECISION (rhs2_type)
4440 		  > TYPE_PRECISION (rhs1_type)))
4441 	    {
4442 	      error ("insertion out of range in %qs", code_name);
4443 	      return true;
4444 	    }
4445 	}
4446       else if (VECTOR_TYPE_P (rhs1_type))
4447 	{
4448 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4449 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4450 	  if (bitpos % bitsize != 0)
4451 	    {
4452 	      error ("%qs not at element boundary", code_name);
4453 	      return true;
4454 	    }
4455 	}
4456       return false;
4457 
4458     case DOT_PROD_EXPR:
4459       {
4460         if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4461 	      || TREE_CODE (lhs_type) != VECTOR_TYPE)
4462 	     && ((!INTEGRAL_TYPE_P (rhs1_type)
4463 		  && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4464 		 || (!INTEGRAL_TYPE_P (lhs_type)
4465 		     && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4466 	    /* rhs1_type and rhs2_type may differ in sign.  */
4467 	    || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4468 	    || !useless_type_conversion_p (lhs_type, rhs3_type)
4469 	    || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4470 			 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4471           {
4472 	    error ("type mismatch in %qs", code_name);
4473             debug_generic_expr (lhs_type);
4474             debug_generic_expr (rhs1_type);
4475             debug_generic_expr (rhs2_type);
4476             return true;
4477           }
4478         return false;
4479       }
4480 
4481     case REALIGN_LOAD_EXPR:
4482       /* FIXME.  */
4483       return false;
4484 
4485     default:
4486       gcc_unreachable ();
4487     }
4488   return false;
4489 }
4490 
4491 /* Verify a gimple assignment statement STMT with a single rhs.
4492    Returns true if anything is wrong.  */
4493 
4494 static bool
verify_gimple_assign_single(gassign * stmt)4495 verify_gimple_assign_single (gassign *stmt)
4496 {
4497   enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4498   tree lhs = gimple_assign_lhs (stmt);
4499   tree lhs_type = TREE_TYPE (lhs);
4500   tree rhs1 = gimple_assign_rhs1 (stmt);
4501   tree rhs1_type = TREE_TYPE (rhs1);
4502   bool res = false;
4503 
4504   const char* const code_name = get_tree_code_name (rhs_code);
4505 
4506   if (!useless_type_conversion_p (lhs_type, rhs1_type))
4507     {
4508       error ("non-trivial conversion in %qs", code_name);
4509       debug_generic_expr (lhs_type);
4510       debug_generic_expr (rhs1_type);
4511       return true;
4512     }
4513 
4514   if (gimple_clobber_p (stmt)
4515       && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4516     {
4517       error ("%qs LHS in clobber statement",
4518 	     get_tree_code_name (TREE_CODE (lhs)));
4519       debug_generic_expr (lhs);
4520       return true;
4521     }
4522 
4523   if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4524     {
4525       error ("%qs LHS in assignment statement",
4526 	     get_tree_code_name (TREE_CODE (lhs)));
4527       debug_generic_expr (lhs);
4528       return true;
4529     }
4530 
4531   if (handled_component_p (lhs)
4532       || TREE_CODE (lhs) == MEM_REF
4533       || TREE_CODE (lhs) == TARGET_MEM_REF)
4534     res |= verify_types_in_gimple_reference (lhs, true);
4535 
4536   /* Special codes we cannot handle via their class.  */
4537   switch (rhs_code)
4538     {
4539     case ADDR_EXPR:
4540       {
4541 	tree op = TREE_OPERAND (rhs1, 0);
4542 	if (!is_gimple_addressable (op))
4543 	  {
4544 	    error ("invalid operand in %qs", code_name);
4545 	    return true;
4546 	  }
4547 
4548 	/* Technically there is no longer a need for matching types, but
4549 	   gimple hygiene asks for this check.  In LTO we can end up
4550 	   combining incompatible units and thus end up with addresses
4551 	   of globals that change their type to a common one.  */
4552 	if (!in_lto_p
4553 	    && !types_compatible_p (TREE_TYPE (op),
4554 				    TREE_TYPE (TREE_TYPE (rhs1)))
4555 	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4556 							  TREE_TYPE (op)))
4557 	  {
4558 	    error ("type mismatch in %qs", code_name);
4559 	    debug_generic_stmt (TREE_TYPE (rhs1));
4560 	    debug_generic_stmt (TREE_TYPE (op));
4561 	    return true;
4562 	  }
4563 
4564 	return (verify_address (rhs1, true)
4565 		|| verify_types_in_gimple_reference (op, true));
4566       }
4567 
4568     /* tcc_reference  */
4569     case INDIRECT_REF:
4570       error ("%qs in gimple IL", code_name);
4571       return true;
4572 
4573     case COMPONENT_REF:
4574     case BIT_FIELD_REF:
4575     case ARRAY_REF:
4576     case ARRAY_RANGE_REF:
4577     case VIEW_CONVERT_EXPR:
4578     case REALPART_EXPR:
4579     case IMAGPART_EXPR:
4580     case TARGET_MEM_REF:
4581     case MEM_REF:
4582       if (!is_gimple_reg (lhs)
4583 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4584 	{
4585 	  error ("invalid RHS for gimple memory store: %qs", code_name);
4586 	  debug_generic_stmt (lhs);
4587 	  debug_generic_stmt (rhs1);
4588 	  return true;
4589 	}
4590       return res || verify_types_in_gimple_reference (rhs1, false);
4591 
4592     /* tcc_constant  */
4593     case SSA_NAME:
4594     case INTEGER_CST:
4595     case REAL_CST:
4596     case FIXED_CST:
4597     case COMPLEX_CST:
4598     case VECTOR_CST:
4599     case STRING_CST:
4600       return res;
4601 
4602     /* tcc_declaration  */
4603     case CONST_DECL:
4604       return res;
4605     case VAR_DECL:
4606     case PARM_DECL:
4607       if (!is_gimple_reg (lhs)
4608 	  && !is_gimple_reg (rhs1)
4609 	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4610 	{
4611 	  error ("invalid RHS for gimple memory store: %qs", code_name);
4612 	  debug_generic_stmt (lhs);
4613 	  debug_generic_stmt (rhs1);
4614 	  return true;
4615 	}
4616       return res;
4617 
4618     case CONSTRUCTOR:
4619       if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4620 	{
4621 	  unsigned int i;
4622 	  tree elt_i, elt_v, elt_t = NULL_TREE;
4623 
4624 	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4625 	    return res;
4626 	  /* For vector CONSTRUCTORs we require that either it is empty
4627 	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4628 	     (then the element count must be correct to cover the whole
4629 	     outer vector and index must be NULL on all elements, or it is
4630 	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4631 	     smaller number of elements (assuming zero filling) and
4632 	     consecutive indexes as compared to NULL indexes (such
4633 	     CONSTRUCTORs can appear in the IL from FEs).  */
4634 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4635 	    {
4636 	      if (elt_t == NULL_TREE)
4637 		{
4638 		  elt_t = TREE_TYPE (elt_v);
4639 		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4640 		    {
4641 		      tree elt_t = TREE_TYPE (elt_v);
4642 		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4643 						      TREE_TYPE (elt_t)))
4644 			{
4645 			  error ("incorrect type of vector %qs elements",
4646 				 code_name);
4647 			  debug_generic_stmt (rhs1);
4648 			  return true;
4649 			}
4650 		      else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4651 					 * TYPE_VECTOR_SUBPARTS (elt_t),
4652 					 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4653 			{
4654 			  error ("incorrect number of vector %qs elements",
4655 				 code_name);
4656 			  debug_generic_stmt (rhs1);
4657 			  return true;
4658 			}
4659 		    }
4660 		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4661 						       elt_t))
4662 		    {
4663 		      error ("incorrect type of vector %qs elements",
4664 			     code_name);
4665 		      debug_generic_stmt (rhs1);
4666 		      return true;
4667 		    }
4668 		  else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4669 				     TYPE_VECTOR_SUBPARTS (rhs1_type)))
4670 		    {
4671 		      error ("incorrect number of vector %qs elements",
4672 			     code_name);
4673 		      debug_generic_stmt (rhs1);
4674 		      return true;
4675 		    }
4676 		}
4677 	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4678 		{
4679 		  error ("incorrect type of vector CONSTRUCTOR elements");
4680 		  debug_generic_stmt (rhs1);
4681 		  return true;
4682 		}
4683 	      if (elt_i != NULL_TREE
4684 		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4685 		      || TREE_CODE (elt_i) != INTEGER_CST
4686 		      || compare_tree_int (elt_i, i) != 0))
4687 		{
4688 		  error ("vector %qs with non-NULL element index",
4689 			 code_name);
4690 		  debug_generic_stmt (rhs1);
4691 		  return true;
4692 		}
4693 	      if (!is_gimple_val (elt_v))
4694 		{
4695 		  error ("vector %qs element is not a GIMPLE value",
4696 			 code_name);
4697 		  debug_generic_stmt (rhs1);
4698 		  return true;
4699 		}
4700 	    }
4701 	}
4702       else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4703 	{
4704 	  error ("non-vector %qs with elements", code_name);
4705 	  debug_generic_stmt (rhs1);
4706 	  return true;
4707 	}
4708       return res;
4709 
4710     case ASSERT_EXPR:
4711       /* FIXME.  */
4712       rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4713       if (rhs1 == boolean_false_node)
4714 	{
4715 	  error ("%qs with an always-false condition", code_name);
4716 	  debug_generic_stmt (rhs1);
4717 	  return true;
4718 	}
4719       break;
4720 
4721     case WITH_SIZE_EXPR:
4722       error ("%qs RHS in assignment statement",
4723 	     get_tree_code_name (rhs_code));
4724       debug_generic_expr (rhs1);
4725       return true;
4726 
4727     case OBJ_TYPE_REF:
4728       /* FIXME.  */
4729       return res;
4730 
4731     default:;
4732     }
4733 
4734   return res;
4735 }
4736 
4737 /* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4738    is a problem, otherwise false.  */
4739 
4740 static bool
verify_gimple_assign(gassign * stmt)4741 verify_gimple_assign (gassign *stmt)
4742 {
4743   switch (gimple_assign_rhs_class (stmt))
4744     {
4745     case GIMPLE_SINGLE_RHS:
4746       return verify_gimple_assign_single (stmt);
4747 
4748     case GIMPLE_UNARY_RHS:
4749       return verify_gimple_assign_unary (stmt);
4750 
4751     case GIMPLE_BINARY_RHS:
4752       return verify_gimple_assign_binary (stmt);
4753 
4754     case GIMPLE_TERNARY_RHS:
4755       return verify_gimple_assign_ternary (stmt);
4756 
4757     default:
4758       gcc_unreachable ();
4759     }
4760 }
4761 
4762 /* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4763    is a problem, otherwise false.  */
4764 
4765 static bool
verify_gimple_return(greturn * stmt)4766 verify_gimple_return (greturn *stmt)
4767 {
4768   tree op = gimple_return_retval (stmt);
4769   tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4770 
4771   /* We cannot test for present return values as we do not fix up missing
4772      return values from the original source.  */
4773   if (op == NULL)
4774     return false;
4775 
4776   if (!is_gimple_val (op)
4777       && TREE_CODE (op) != RESULT_DECL)
4778     {
4779       error ("invalid operand in return statement");
4780       debug_generic_stmt (op);
4781       return true;
4782     }
4783 
4784   if ((TREE_CODE (op) == RESULT_DECL
4785        && DECL_BY_REFERENCE (op))
4786       || (TREE_CODE (op) == SSA_NAME
4787 	  && SSA_NAME_VAR (op)
4788 	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4789 	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4790     op = TREE_TYPE (op);
4791 
4792   if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4793     {
4794       error ("invalid conversion in return statement");
4795       debug_generic_stmt (restype);
4796       debug_generic_stmt (TREE_TYPE (op));
4797       return true;
4798     }
4799 
4800   return false;
4801 }
4802 
4803 
4804 /* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4805    is a problem, otherwise false.  */
4806 
4807 static bool
verify_gimple_goto(ggoto * stmt)4808 verify_gimple_goto (ggoto *stmt)
4809 {
4810   tree dest = gimple_goto_dest (stmt);
4811 
4812   /* ???  We have two canonical forms of direct goto destinations, a
4813      bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4814   if (TREE_CODE (dest) != LABEL_DECL
4815       && (!is_gimple_val (dest)
4816 	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4817     {
4818       error ("goto destination is neither a label nor a pointer");
4819       return true;
4820     }
4821 
4822   return false;
4823 }
4824 
4825 /* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4826    is a problem, otherwise false.  */
4827 
4828 static bool
verify_gimple_switch(gswitch * stmt)4829 verify_gimple_switch (gswitch *stmt)
4830 {
4831   unsigned int i, n;
4832   tree elt, prev_upper_bound = NULL_TREE;
4833   tree index_type, elt_type = NULL_TREE;
4834 
4835   if (!is_gimple_val (gimple_switch_index (stmt)))
4836     {
4837       error ("invalid operand to switch statement");
4838       debug_generic_stmt (gimple_switch_index (stmt));
4839       return true;
4840     }
4841 
4842   index_type = TREE_TYPE (gimple_switch_index (stmt));
4843   if (! INTEGRAL_TYPE_P (index_type))
4844     {
4845       error ("non-integral type switch statement");
4846       debug_generic_expr (index_type);
4847       return true;
4848     }
4849 
4850   elt = gimple_switch_label (stmt, 0);
4851   if (CASE_LOW (elt) != NULL_TREE
4852       || CASE_HIGH (elt) != NULL_TREE
4853       || CASE_CHAIN (elt) != NULL_TREE)
4854     {
4855       error ("invalid default case label in switch statement");
4856       debug_generic_expr (elt);
4857       return true;
4858     }
4859 
4860   n = gimple_switch_num_labels (stmt);
4861   for (i = 1; i < n; i++)
4862     {
4863       elt = gimple_switch_label (stmt, i);
4864 
4865       if (CASE_CHAIN (elt))
4866 	{
4867 	  error ("invalid %<CASE_CHAIN%>");
4868 	  debug_generic_expr (elt);
4869 	  return true;
4870 	}
4871       if (! CASE_LOW (elt))
4872 	{
4873 	  error ("invalid case label in switch statement");
4874 	  debug_generic_expr (elt);
4875 	  return true;
4876 	}
4877       if (CASE_HIGH (elt)
4878 	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4879 	{
4880 	  error ("invalid case range in switch statement");
4881 	  debug_generic_expr (elt);
4882 	  return true;
4883 	}
4884 
4885       if (! elt_type)
4886 	{
4887 	  elt_type = TREE_TYPE (CASE_LOW (elt));
4888 	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4889 	    {
4890 	      error ("type precision mismatch in switch statement");
4891 	      return true;
4892 	    }
4893 	}
4894       if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4895           || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4896 	{
4897 	  error ("type mismatch for case label in switch statement");
4898 	  debug_generic_expr (elt);
4899 	  return true;
4900 	}
4901 
4902       if (prev_upper_bound)
4903 	{
4904 	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4905 	    {
4906 	      error ("case labels not sorted in switch statement");
4907 	      return true;
4908 	    }
4909 	}
4910 
4911       prev_upper_bound = CASE_HIGH (elt);
4912       if (! prev_upper_bound)
4913 	prev_upper_bound = CASE_LOW (elt);
4914     }
4915 
4916   return false;
4917 }
4918 
4919 /* Verify a gimple debug statement STMT.
4920    Returns true if anything is wrong.  */
4921 
4922 static bool
verify_gimple_debug(gimple * stmt ATTRIBUTE_UNUSED)4923 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4924 {
4925   /* There isn't much that could be wrong in a gimple debug stmt.  A
4926      gimple debug bind stmt, for example, maps a tree, that's usually
4927      a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4928      component or member of an aggregate type, to another tree, that
4929      can be an arbitrary expression.  These stmts expand into debug
4930      insns, and are converted to debug notes by var-tracking.cc.  */
4931   return false;
4932 }
4933 
4934 /* Verify a gimple label statement STMT.
4935    Returns true if anything is wrong.  */
4936 
4937 static bool
verify_gimple_label(glabel * stmt)4938 verify_gimple_label (glabel *stmt)
4939 {
4940   tree decl = gimple_label_label (stmt);
4941   int uid;
4942   bool err = false;
4943 
4944   if (TREE_CODE (decl) != LABEL_DECL)
4945     return true;
4946   if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4947       && DECL_CONTEXT (decl) != current_function_decl)
4948     {
4949       error ("label context is not the current function declaration");
4950       err |= true;
4951     }
4952 
4953   uid = LABEL_DECL_UID (decl);
4954   if (cfun->cfg
4955       && (uid == -1
4956 	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4957     {
4958       error ("incorrect entry in %<label_to_block_map%>");
4959       err |= true;
4960     }
4961 
4962   uid = EH_LANDING_PAD_NR (decl);
4963   if (uid)
4964     {
4965       eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4966       if (decl != lp->post_landing_pad)
4967 	{
4968 	  error ("incorrect setting of landing pad number");
4969 	  err |= true;
4970 	}
4971     }
4972 
4973   return err;
4974 }
4975 
4976 /* Verify a gimple cond statement STMT.
4977    Returns true if anything is wrong.  */
4978 
4979 static bool
verify_gimple_cond(gcond * stmt)4980 verify_gimple_cond (gcond *stmt)
4981 {
4982   if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4983     {
4984       error ("invalid comparison code in gimple cond");
4985       return true;
4986     }
4987   if (!(!gimple_cond_true_label (stmt)
4988 	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4989       || !(!gimple_cond_false_label (stmt)
4990 	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4991     {
4992       error ("invalid labels in gimple cond");
4993       return true;
4994     }
4995 
4996   return verify_gimple_comparison (boolean_type_node,
4997 				   gimple_cond_lhs (stmt),
4998 				   gimple_cond_rhs (stmt),
4999 				   gimple_cond_code (stmt));
5000 }
5001 
5002 /* Verify the GIMPLE statement STMT.  Returns true if there is an
5003    error, otherwise false.  */
5004 
5005 static bool
verify_gimple_stmt(gimple * stmt)5006 verify_gimple_stmt (gimple *stmt)
5007 {
5008   switch (gimple_code (stmt))
5009     {
5010     case GIMPLE_ASSIGN:
5011       return verify_gimple_assign (as_a <gassign *> (stmt));
5012 
5013     case GIMPLE_LABEL:
5014       return verify_gimple_label (as_a <glabel *> (stmt));
5015 
5016     case GIMPLE_CALL:
5017       return verify_gimple_call (as_a <gcall *> (stmt));
5018 
5019     case GIMPLE_COND:
5020       return verify_gimple_cond (as_a <gcond *> (stmt));
5021 
5022     case GIMPLE_GOTO:
5023       return verify_gimple_goto (as_a <ggoto *> (stmt));
5024 
5025     case GIMPLE_SWITCH:
5026       return verify_gimple_switch (as_a <gswitch *> (stmt));
5027 
5028     case GIMPLE_RETURN:
5029       return verify_gimple_return (as_a <greturn *> (stmt));
5030 
5031     case GIMPLE_ASM:
5032       return false;
5033 
5034     case GIMPLE_TRANSACTION:
5035       return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5036 
5037     /* Tuples that do not have tree operands.  */
5038     case GIMPLE_NOP:
5039     case GIMPLE_PREDICT:
5040     case GIMPLE_RESX:
5041     case GIMPLE_EH_DISPATCH:
5042     case GIMPLE_EH_MUST_NOT_THROW:
5043       return false;
5044 
5045     CASE_GIMPLE_OMP:
5046       /* OpenMP directives are validated by the FE and never operated
5047 	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
5048 	 non-gimple expressions when the main index variable has had
5049 	 its address taken.  This does not affect the loop itself
5050 	 because the header of an GIMPLE_OMP_FOR is merely used to determine
5051 	 how to setup the parallel iteration.  */
5052       return false;
5053 
5054     case GIMPLE_DEBUG:
5055       return verify_gimple_debug (stmt);
5056 
5057     default:
5058       gcc_unreachable ();
5059     }
5060 }
5061 
5062 /* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
5063    and false otherwise.  */
5064 
5065 static bool
verify_gimple_phi(gphi * phi)5066 verify_gimple_phi (gphi *phi)
5067 {
5068   bool err = false;
5069   unsigned i;
5070   tree phi_result = gimple_phi_result (phi);
5071   bool virtual_p;
5072 
5073   if (!phi_result)
5074     {
5075       error ("invalid %<PHI%> result");
5076       return true;
5077     }
5078 
5079   virtual_p = virtual_operand_p (phi_result);
5080   if (TREE_CODE (phi_result) != SSA_NAME
5081       || (virtual_p
5082 	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5083     {
5084       error ("invalid %<PHI%> result");
5085       err = true;
5086     }
5087 
5088   for (i = 0; i < gimple_phi_num_args (phi); i++)
5089     {
5090       tree t = gimple_phi_arg_def (phi, i);
5091 
5092       if (!t)
5093 	{
5094 	  error ("missing %<PHI%> def");
5095 	  err |= true;
5096 	  continue;
5097 	}
5098       /* Addressable variables do have SSA_NAMEs but they
5099 	 are not considered gimple values.  */
5100       else if ((TREE_CODE (t) == SSA_NAME
5101 		&& virtual_p != virtual_operand_p (t))
5102 	       || (virtual_p
5103 		   && (TREE_CODE (t) != SSA_NAME
5104 		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5105 	       || (!virtual_p
5106 		   && !is_gimple_val (t)))
5107 	{
5108 	  error ("invalid %<PHI%> argument");
5109 	  debug_generic_expr (t);
5110 	  err |= true;
5111 	}
5112 #ifdef ENABLE_TYPES_CHECKING
5113       if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5114 	{
5115 	  error ("incompatible types in %<PHI%> argument %u", i);
5116 	  debug_generic_stmt (TREE_TYPE (phi_result));
5117 	  debug_generic_stmt (TREE_TYPE (t));
5118 	  err |= true;
5119 	}
5120 #endif
5121     }
5122 
5123   return err;
5124 }
5125 
5126 /* Verify the GIMPLE statements inside the sequence STMTS.  */
5127 
5128 static bool
verify_gimple_in_seq_2(gimple_seq stmts)5129 verify_gimple_in_seq_2 (gimple_seq stmts)
5130 {
5131   gimple_stmt_iterator ittr;
5132   bool err = false;
5133 
5134   for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5135     {
5136       gimple *stmt = gsi_stmt (ittr);
5137 
5138       switch (gimple_code (stmt))
5139         {
5140 	case GIMPLE_BIND:
5141 	  err |= verify_gimple_in_seq_2 (
5142                    gimple_bind_body (as_a <gbind *> (stmt)));
5143 	  break;
5144 
5145 	case GIMPLE_TRY:
5146 	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5147 	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5148 	  break;
5149 
5150 	case GIMPLE_EH_FILTER:
5151 	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5152 	  break;
5153 
5154 	case GIMPLE_EH_ELSE:
5155 	  {
5156 	    geh_else *eh_else = as_a <geh_else *> (stmt);
5157 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5158 	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5159 	  }
5160 	  break;
5161 
5162 	case GIMPLE_CATCH:
5163 	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5164 					   as_a <gcatch *> (stmt)));
5165 	  break;
5166 
5167 	case GIMPLE_TRANSACTION:
5168 	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5169 	  break;
5170 
5171 	default:
5172 	  {
5173 	    bool err2 = verify_gimple_stmt (stmt);
5174 	    if (err2)
5175 	      debug_gimple_stmt (stmt);
5176 	    err |= err2;
5177 	  }
5178 	}
5179     }
5180 
5181   return err;
5182 }
5183 
5184 /* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
5185    is a problem, otherwise false.  */
5186 
5187 static bool
verify_gimple_transaction(gtransaction * stmt)5188 verify_gimple_transaction (gtransaction *stmt)
5189 {
5190   tree lab;
5191 
5192   lab = gimple_transaction_label_norm (stmt);
5193   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5194     return true;
5195   lab = gimple_transaction_label_uninst (stmt);
5196   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5197     return true;
5198   lab = gimple_transaction_label_over (stmt);
5199   if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5200     return true;
5201 
5202   return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5203 }
5204 
5205 
5206 /* Verify the GIMPLE statements inside the statement list STMTS.  */
5207 
5208 DEBUG_FUNCTION void
verify_gimple_in_seq(gimple_seq stmts)5209 verify_gimple_in_seq (gimple_seq stmts)
5210 {
5211   timevar_push (TV_TREE_STMT_VERIFY);
5212   if (verify_gimple_in_seq_2 (stmts))
5213     internal_error ("%<verify_gimple%> failed");
5214   timevar_pop (TV_TREE_STMT_VERIFY);
5215 }
5216 
5217 /* Return true when the T can be shared.  */
5218 
5219 static bool
tree_node_can_be_shared(tree t)5220 tree_node_can_be_shared (tree t)
5221 {
5222   if (IS_TYPE_OR_DECL_P (t)
5223       || TREE_CODE (t) == SSA_NAME
5224       || TREE_CODE (t) == IDENTIFIER_NODE
5225       || TREE_CODE (t) == CASE_LABEL_EXPR
5226       || is_gimple_min_invariant (t))
5227     return true;
5228 
5229   if (t == error_mark_node)
5230     return true;
5231 
5232   return false;
5233 }
5234 
5235 /* Called via walk_tree.  Verify tree sharing.  */
5236 
5237 static tree
verify_node_sharing_1(tree * tp,int * walk_subtrees,void * data)5238 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5239 {
5240   hash_set<void *> *visited = (hash_set<void *> *) data;
5241 
5242   if (tree_node_can_be_shared (*tp))
5243     {
5244       *walk_subtrees = false;
5245       return NULL;
5246     }
5247 
5248   if (visited->add (*tp))
5249     return *tp;
5250 
5251   return NULL;
5252 }
5253 
5254 /* Called via walk_gimple_stmt.  Verify tree sharing.  */
5255 
5256 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)5257 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5258 {
5259   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5260   return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5261 }
5262 
5263 static bool eh_error_found;
5264 bool
verify_eh_throw_stmt_node(gimple * const & stmt,const int &,hash_set<gimple * > * visited)5265 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5266 			   hash_set<gimple *> *visited)
5267 {
5268   if (!visited->contains (stmt))
5269     {
5270       error ("dead statement in EH table");
5271       debug_gimple_stmt (stmt);
5272       eh_error_found = true;
5273     }
5274   return true;
5275 }
5276 
5277 /* Verify if the location LOCs block is in BLOCKS.  */
5278 
5279 static bool
verify_location(hash_set<tree> * blocks,location_t loc)5280 verify_location (hash_set<tree> *blocks, location_t loc)
5281 {
5282   tree block = LOCATION_BLOCK (loc);
5283   if (block != NULL_TREE
5284       && !blocks->contains (block))
5285     {
5286       error ("location references block not in block tree");
5287       return true;
5288     }
5289   if (block != NULL_TREE)
5290     return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5291   return false;
5292 }
5293 
5294 /* Called via walk_tree.  Verify that expressions have no blocks.  */
5295 
5296 static tree
verify_expr_no_block(tree * tp,int * walk_subtrees,void *)5297 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5298 {
5299   if (!EXPR_P (*tp))
5300     {
5301       *walk_subtrees = false;
5302       return NULL;
5303     }
5304 
5305   location_t loc = EXPR_LOCATION (*tp);
5306   if (LOCATION_BLOCK (loc) != NULL)
5307     return *tp;
5308 
5309   return NULL;
5310 }
5311 
5312 /* Called via walk_tree.  Verify locations of expressions.  */
5313 
5314 static tree
verify_expr_location_1(tree * tp,int * walk_subtrees,void * data)5315 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5316 {
5317   hash_set<tree> *blocks = (hash_set<tree> *) data;
5318   tree t = *tp;
5319 
5320   /* ???  This doesn't really belong here but there's no good place to
5321      stick this remainder of old verify_expr.  */
5322   /* ???  This barfs on debug stmts which contain binds to vars with
5323      different function context.  */
5324 #if 0
5325   if (VAR_P (t)
5326       || TREE_CODE (t) == PARM_DECL
5327       || TREE_CODE (t) == RESULT_DECL)
5328     {
5329       tree context = decl_function_context (t);
5330       if (context != cfun->decl
5331 	  && !SCOPE_FILE_SCOPE_P (context)
5332 	  && !TREE_STATIC (t)
5333 	  && !DECL_EXTERNAL (t))
5334 	{
5335 	  error ("local declaration from a different function");
5336 	  return t;
5337 	}
5338     }
5339 #endif
5340 
5341   if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5342     {
5343       tree x = DECL_DEBUG_EXPR (t);
5344       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5345       if (addr)
5346 	return addr;
5347     }
5348   if ((VAR_P (t)
5349        || TREE_CODE (t) == PARM_DECL
5350        || TREE_CODE (t) == RESULT_DECL)
5351       && DECL_HAS_VALUE_EXPR_P (t))
5352     {
5353       tree x = DECL_VALUE_EXPR (t);
5354       tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5355       if (addr)
5356 	return addr;
5357     }
5358 
5359   if (!EXPR_P (t))
5360     {
5361       *walk_subtrees = false;
5362       return NULL;
5363     }
5364 
5365   location_t loc = EXPR_LOCATION (t);
5366   if (verify_location (blocks, loc))
5367     return t;
5368 
5369   return NULL;
5370 }
5371 
5372 /* Called via walk_gimple_op.  Verify locations of expressions.  */
5373 
5374 static tree
verify_expr_location(tree * tp,int * walk_subtrees,void * data)5375 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5376 {
5377   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5378   return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5379 }
5380 
5381 /* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
5382 
5383 static void
collect_subblocks(hash_set<tree> * blocks,tree block)5384 collect_subblocks (hash_set<tree> *blocks, tree block)
5385 {
5386   tree t;
5387   for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5388     {
5389       blocks->add (t);
5390       collect_subblocks (blocks, t);
5391     }
5392 }
5393 
5394 /* Disable warnings about missing quoting in GCC diagnostics for
5395    the verification errors.  Their format strings don't follow
5396    GCC diagnostic conventions and trigger an ICE in the end.  */
5397 #if __GNUC__ >= 10
5398 #  pragma GCC diagnostic push
5399 #  pragma GCC diagnostic ignored "-Wformat-diag"
5400 #endif
5401 
5402 /* Verify the GIMPLE statements in the CFG of FN.  */
5403 
5404 DEBUG_FUNCTION void
verify_gimple_in_cfg(struct function * fn,bool verify_nothrow)5405 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5406 {
5407   basic_block bb;
5408   bool err = false;
5409 
5410   timevar_push (TV_TREE_STMT_VERIFY);
5411   hash_set<void *> visited;
5412   hash_set<gimple *> visited_throwing_stmts;
5413 
5414   /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
5415   hash_set<tree> blocks;
5416   if (DECL_INITIAL (fn->decl))
5417     {
5418       blocks.add (DECL_INITIAL (fn->decl));
5419       collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5420     }
5421 
5422   FOR_EACH_BB_FN (bb, fn)
5423     {
5424       gimple_stmt_iterator gsi;
5425       edge_iterator ei;
5426       edge e;
5427 
5428       for (gphi_iterator gpi = gsi_start_phis (bb);
5429 	   !gsi_end_p (gpi);
5430 	   gsi_next (&gpi))
5431 	{
5432 	  gphi *phi = gpi.phi ();
5433 	  bool err2 = false;
5434 	  unsigned i;
5435 
5436 	  if (gimple_bb (phi) != bb)
5437 	    {
5438 	      error ("gimple_bb (phi) is set to a wrong basic block");
5439 	      err2 = true;
5440 	    }
5441 
5442 	  err2 |= verify_gimple_phi (phi);
5443 
5444 	  /* Only PHI arguments have locations.  */
5445 	  if (gimple_location (phi) != UNKNOWN_LOCATION)
5446 	    {
5447 	      error ("PHI node with location");
5448 	      err2 = true;
5449 	    }
5450 
5451 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
5452 	    {
5453 	      tree arg = gimple_phi_arg_def (phi, i);
5454 	      tree addr = walk_tree (&arg, verify_node_sharing_1,
5455 				     &visited, NULL);
5456 	      if (addr)
5457 		{
5458 		  error ("incorrect sharing of tree nodes");
5459 		  debug_generic_expr (addr);
5460 		  err2 |= true;
5461 		}
5462 	      location_t loc = gimple_phi_arg_location (phi, i);
5463 	      if (virtual_operand_p (gimple_phi_result (phi))
5464 		  && loc != UNKNOWN_LOCATION)
5465 		{
5466 		  error ("virtual PHI with argument locations");
5467 		  err2 = true;
5468 		}
5469 	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5470 	      if (addr)
5471 		{
5472 		  debug_generic_expr (addr);
5473 		  err2 = true;
5474 		}
5475 	      err2 |= verify_location (&blocks, loc);
5476 	    }
5477 
5478 	  if (err2)
5479 	    debug_gimple_stmt (phi);
5480 	  err |= err2;
5481 	}
5482 
5483       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5484 	{
5485 	  gimple *stmt = gsi_stmt (gsi);
5486 	  bool err2 = false;
5487 	  struct walk_stmt_info wi;
5488 	  tree addr;
5489 	  int lp_nr;
5490 
5491 	  if (gimple_bb (stmt) != bb)
5492 	    {
5493 	      error ("gimple_bb (stmt) is set to a wrong basic block");
5494 	      err2 = true;
5495 	    }
5496 
5497 	  err2 |= verify_gimple_stmt (stmt);
5498 	  err2 |= verify_location (&blocks, gimple_location (stmt));
5499 
5500 	  memset (&wi, 0, sizeof (wi));
5501 	  wi.info = (void *) &visited;
5502 	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5503 	  if (addr)
5504 	    {
5505 	      error ("incorrect sharing of tree nodes");
5506 	      debug_generic_expr (addr);
5507 	      err2 |= true;
5508 	    }
5509 
5510 	  memset (&wi, 0, sizeof (wi));
5511 	  wi.info = (void *) &blocks;
5512 	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5513 	  if (addr)
5514 	    {
5515 	      debug_generic_expr (addr);
5516 	      err2 |= true;
5517 	    }
5518 
5519 	  /* If the statement is marked as part of an EH region, then it is
5520 	     expected that the statement could throw.  Verify that when we
5521 	     have optimizations that simplify statements such that we prove
5522 	     that they cannot throw, that we update other data structures
5523 	     to match.  */
5524 	  lp_nr = lookup_stmt_eh_lp (stmt);
5525 	  if (lp_nr != 0)
5526 	    visited_throwing_stmts.add (stmt);
5527 	  if (lp_nr > 0)
5528 	    {
5529 	      if (!stmt_could_throw_p (cfun, stmt))
5530 		{
5531 		  if (verify_nothrow)
5532 		    {
5533 		      error ("statement marked for throw, but doesn%'t");
5534 		      err2 |= true;
5535 		    }
5536 		}
5537 	      else if (!gsi_one_before_end_p (gsi))
5538 		{
5539 		  error ("statement marked for throw in middle of block");
5540 		  err2 |= true;
5541 		}
5542 	    }
5543 
5544 	  if (err2)
5545 	    debug_gimple_stmt (stmt);
5546 	  err |= err2;
5547 	}
5548 
5549       FOR_EACH_EDGE (e, ei, bb->succs)
5550 	if (e->goto_locus != UNKNOWN_LOCATION)
5551 	  err |= verify_location (&blocks, e->goto_locus);
5552     }
5553 
5554   hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5555   eh_error_found = false;
5556   if (eh_table)
5557     eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5558       (&visited_throwing_stmts);
5559 
5560   if (err || eh_error_found)
5561     internal_error ("verify_gimple failed");
5562 
5563   verify_histograms ();
5564   timevar_pop (TV_TREE_STMT_VERIFY);
5565 }
5566 
5567 
5568 /* Verifies that the flow information is OK.  */
5569 
5570 static int
gimple_verify_flow_info(void)5571 gimple_verify_flow_info (void)
5572 {
5573   int err = 0;
5574   basic_block bb;
5575   gimple_stmt_iterator gsi;
5576   gimple *stmt;
5577   edge e;
5578   edge_iterator ei;
5579 
5580   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5581       || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5582     {
5583       error ("ENTRY_BLOCK has IL associated with it");
5584       err = 1;
5585     }
5586 
5587   if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5588       || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5589     {
5590       error ("EXIT_BLOCK has IL associated with it");
5591       err = 1;
5592     }
5593 
5594   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5595     if (e->flags & EDGE_FALLTHRU)
5596       {
5597 	error ("fallthru to exit from bb %d", e->src->index);
5598 	err = 1;
5599       }
5600 
5601   FOR_EACH_BB_FN (bb, cfun)
5602     {
5603       bool found_ctrl_stmt = false;
5604 
5605       stmt = NULL;
5606 
5607       /* Skip labels on the start of basic block.  */
5608       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5609 	{
5610 	  tree label;
5611 	  gimple *prev_stmt = stmt;
5612 
5613 	  stmt = gsi_stmt (gsi);
5614 
5615 	  if (gimple_code (stmt) != GIMPLE_LABEL)
5616 	    break;
5617 
5618 	  label = gimple_label_label (as_a <glabel *> (stmt));
5619 	  if (prev_stmt && DECL_NONLOCAL (label))
5620 	    {
5621 	      error ("nonlocal label %qD is not first in a sequence "
5622 		     "of labels in bb %d", label, bb->index);
5623 	      err = 1;
5624 	    }
5625 
5626 	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5627 	    {
5628 	      error ("EH landing pad label %qD is not first in a sequence "
5629 		     "of labels in bb %d", label, bb->index);
5630 	      err = 1;
5631 	    }
5632 
5633 	  if (label_to_block (cfun, label) != bb)
5634 	    {
5635 	      error ("label %qD to block does not match in bb %d",
5636 		     label, bb->index);
5637 	      err = 1;
5638 	    }
5639 
5640 	  if (decl_function_context (label) != current_function_decl)
5641 	    {
5642 	      error ("label %qD has incorrect context in bb %d",
5643 		     label, bb->index);
5644 	      err = 1;
5645 	    }
5646 	}
5647 
5648       /* Verify that body of basic block BB is free of control flow.  */
5649       for (; !gsi_end_p (gsi); gsi_next (&gsi))
5650 	{
5651 	  gimple *stmt = gsi_stmt (gsi);
5652 
5653 	  if (found_ctrl_stmt)
5654 	    {
5655 	      error ("control flow in the middle of basic block %d",
5656 		     bb->index);
5657 	      err = 1;
5658 	    }
5659 
5660 	  if (stmt_ends_bb_p (stmt))
5661 	    found_ctrl_stmt = true;
5662 
5663 	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5664 	    {
5665 	      error ("label %qD in the middle of basic block %d",
5666 		     gimple_label_label (label_stmt), bb->index);
5667 	      err = 1;
5668 	    }
5669 	}
5670 
5671       gsi = gsi_last_nondebug_bb (bb);
5672       if (gsi_end_p (gsi))
5673 	continue;
5674 
5675       stmt = gsi_stmt (gsi);
5676 
5677       if (gimple_code (stmt) == GIMPLE_LABEL)
5678 	continue;
5679 
5680       err |= verify_eh_edges (stmt);
5681 
5682       if (is_ctrl_stmt (stmt))
5683 	{
5684 	  FOR_EACH_EDGE (e, ei, bb->succs)
5685 	    if (e->flags & EDGE_FALLTHRU)
5686 	      {
5687 		error ("fallthru edge after a control statement in bb %d",
5688 		       bb->index);
5689 		err = 1;
5690 	      }
5691 	}
5692 
5693       if (gimple_code (stmt) != GIMPLE_COND)
5694 	{
5695 	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5696 	     after anything else but if statement.  */
5697 	  FOR_EACH_EDGE (e, ei, bb->succs)
5698 	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5699 	      {
5700 		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5701 		       bb->index);
5702 		err = 1;
5703 	      }
5704 	}
5705 
5706       switch (gimple_code (stmt))
5707 	{
5708 	case GIMPLE_COND:
5709 	  {
5710 	    edge true_edge;
5711 	    edge false_edge;
5712 
5713 	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5714 
5715 	    if (!true_edge
5716 		|| !false_edge
5717 		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5718 		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5719 		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5720 		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5721 		|| EDGE_COUNT (bb->succs) >= 3)
5722 	      {
5723 		error ("wrong outgoing edge flags at end of bb %d",
5724 		       bb->index);
5725 		err = 1;
5726 	      }
5727 	  }
5728 	  break;
5729 
5730 	case GIMPLE_GOTO:
5731 	  if (simple_goto_p (stmt))
5732 	    {
5733 	      error ("explicit goto at end of bb %d", bb->index);
5734 	      err = 1;
5735 	    }
5736 	  else
5737 	    {
5738 	      /* FIXME.  We should double check that the labels in the
5739 		 destination blocks have their address taken.  */
5740 	      FOR_EACH_EDGE (e, ei, bb->succs)
5741 		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5742 				 | EDGE_FALSE_VALUE))
5743 		    || !(e->flags & EDGE_ABNORMAL))
5744 		  {
5745 		    error ("wrong outgoing edge flags at end of bb %d",
5746 			   bb->index);
5747 		    err = 1;
5748 		  }
5749 	    }
5750 	  break;
5751 
5752 	case GIMPLE_CALL:
5753 	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5754 	    break;
5755 	  /* fallthru */
5756 	case GIMPLE_RETURN:
5757 	  if (!single_succ_p (bb)
5758 	      || (single_succ_edge (bb)->flags
5759 		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5760 		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5761 	    {
5762 	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5763 	      err = 1;
5764 	    }
5765 	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5766 	    {
5767 	      error ("return edge does not point to exit in bb %d",
5768 		     bb->index);
5769 	      err = 1;
5770 	    }
5771 	  break;
5772 
5773 	case GIMPLE_SWITCH:
5774 	  {
5775 	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5776 	    tree prev;
5777 	    edge e;
5778 	    size_t i, n;
5779 
5780 	    n = gimple_switch_num_labels (switch_stmt);
5781 
5782 	    /* Mark all the destination basic blocks.  */
5783 	    for (i = 0; i < n; ++i)
5784 	      {
5785 		basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5786 		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5787 		label_bb->aux = (void *)1;
5788 	      }
5789 
5790 	    /* Verify that the case labels are sorted.  */
5791 	    prev = gimple_switch_label (switch_stmt, 0);
5792 	    for (i = 1; i < n; ++i)
5793 	      {
5794 		tree c = gimple_switch_label (switch_stmt, i);
5795 		if (!CASE_LOW (c))
5796 		  {
5797 		    error ("found default case not at the start of "
5798 			   "case vector");
5799 		    err = 1;
5800 		    continue;
5801 		  }
5802 		if (CASE_LOW (prev)
5803 		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5804 		  {
5805 		    error ("case labels not sorted: ");
5806 		    print_generic_expr (stderr, prev);
5807 		    fprintf (stderr," is greater than ");
5808 		    print_generic_expr (stderr, c);
5809 		    fprintf (stderr," but comes before it.\n");
5810 		    err = 1;
5811 		  }
5812 		prev = c;
5813 	      }
5814 	    /* VRP will remove the default case if it can prove it will
5815 	       never be executed.  So do not verify there always exists
5816 	       a default case here.  */
5817 
5818 	    FOR_EACH_EDGE (e, ei, bb->succs)
5819 	      {
5820 		if (!e->dest->aux)
5821 		  {
5822 		    error ("extra outgoing edge %d->%d",
5823 			   bb->index, e->dest->index);
5824 		    err = 1;
5825 		  }
5826 
5827 		e->dest->aux = (void *)2;
5828 		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5829 				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5830 		  {
5831 		    error ("wrong outgoing edge flags at end of bb %d",
5832 			   bb->index);
5833 		    err = 1;
5834 		  }
5835 	      }
5836 
5837 	    /* Check that we have all of them.  */
5838 	    for (i = 0; i < n; ++i)
5839 	      {
5840 		basic_block label_bb = gimple_switch_label_bb (cfun,
5841 							       switch_stmt, i);
5842 
5843 		if (label_bb->aux != (void *)2)
5844 		  {
5845 		    error ("missing edge %i->%i", bb->index, label_bb->index);
5846 		    err = 1;
5847 		  }
5848 	      }
5849 
5850 	    FOR_EACH_EDGE (e, ei, bb->succs)
5851 	      e->dest->aux = (void *)0;
5852 	  }
5853 	  break;
5854 
5855 	case GIMPLE_EH_DISPATCH:
5856 	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5857 	  break;
5858 
5859 	default:
5860 	  break;
5861 	}
5862     }
5863 
5864   if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5865     verify_dominators (CDI_DOMINATORS);
5866 
5867   return err;
5868 }
5869 
5870 #if __GNUC__ >= 10
5871 #  pragma GCC diagnostic pop
5872 #endif
5873 
5874 /* Updates phi nodes after creating a forwarder block joined
5875    by edge FALLTHRU.  */
5876 
5877 static void
gimple_make_forwarder_block(edge fallthru)5878 gimple_make_forwarder_block (edge fallthru)
5879 {
5880   edge e;
5881   edge_iterator ei;
5882   basic_block dummy, bb;
5883   tree var;
5884   gphi_iterator gsi;
5885   bool forward_location_p;
5886 
5887   dummy = fallthru->src;
5888   bb = fallthru->dest;
5889 
5890   if (single_pred_p (bb))
5891     return;
5892 
5893   /* We can forward location info if we have only one predecessor.  */
5894   forward_location_p = single_pred_p (dummy);
5895 
5896   /* If we redirected a branch we must create new PHI nodes at the
5897      start of BB.  */
5898   for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5899     {
5900       gphi *phi, *new_phi;
5901 
5902       phi = gsi.phi ();
5903       var = gimple_phi_result (phi);
5904       new_phi = create_phi_node (var, bb);
5905       gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5906       add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5907 		   forward_location_p
5908 		   ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5909     }
5910 
5911   /* Add the arguments we have stored on edges.  */
5912   FOR_EACH_EDGE (e, ei, bb->preds)
5913     {
5914       if (e == fallthru)
5915 	continue;
5916 
5917       flush_pending_stmts (e);
5918     }
5919 }
5920 
5921 
5922 /* Return a non-special label in the head of basic block BLOCK.
5923    Create one if it doesn't exist.  */
5924 
5925 tree
gimple_block_label(basic_block bb)5926 gimple_block_label (basic_block bb)
5927 {
5928   gimple_stmt_iterator i, s = gsi_start_bb (bb);
5929   bool first = true;
5930   tree label;
5931   glabel *stmt;
5932 
5933   for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5934     {
5935       stmt = dyn_cast <glabel *> (gsi_stmt (i));
5936       if (!stmt)
5937 	break;
5938       label = gimple_label_label (stmt);
5939       if (!DECL_NONLOCAL (label))
5940 	{
5941 	  if (!first)
5942 	    gsi_move_before (&i, &s);
5943 	  return label;
5944 	}
5945     }
5946 
5947   label = create_artificial_label (UNKNOWN_LOCATION);
5948   stmt = gimple_build_label (label);
5949   gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5950   return label;
5951 }
5952 
5953 
5954 /* Attempt to perform edge redirection by replacing a possibly complex
5955    jump instruction by a goto or by removing the jump completely.
5956    This can apply only if all edges now point to the same block.  The
5957    parameters and return values are equivalent to
5958    redirect_edge_and_branch.  */
5959 
5960 static edge
gimple_try_redirect_by_replacing_jump(edge e,basic_block target)5961 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5962 {
5963   basic_block src = e->src;
5964   gimple_stmt_iterator i;
5965   gimple *stmt;
5966 
5967   /* We can replace or remove a complex jump only when we have exactly
5968      two edges.  */
5969   if (EDGE_COUNT (src->succs) != 2
5970       /* Verify that all targets will be TARGET.  Specifically, the
5971 	 edge that is not E must also go to TARGET.  */
5972       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5973     return NULL;
5974 
5975   i = gsi_last_bb (src);
5976   if (gsi_end_p (i))
5977     return NULL;
5978 
5979   stmt = gsi_stmt (i);
5980 
5981   if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5982     {
5983       gsi_remove (&i, true);
5984       e = ssa_redirect_edge (e, target);
5985       e->flags = EDGE_FALLTHRU;
5986       return e;
5987     }
5988 
5989   return NULL;
5990 }
5991 
5992 
5993 /* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5994    edge representing the redirected branch.  */
5995 
5996 static edge
gimple_redirect_edge_and_branch(edge e,basic_block dest)5997 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5998 {
5999   basic_block bb = e->src;
6000   gimple_stmt_iterator gsi;
6001   edge ret;
6002   gimple *stmt;
6003 
6004   if (e->flags & EDGE_ABNORMAL)
6005     return NULL;
6006 
6007   if (e->dest == dest)
6008     return NULL;
6009 
6010   if (e->flags & EDGE_EH)
6011     return redirect_eh_edge (e, dest);
6012 
6013   if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6014     {
6015       ret = gimple_try_redirect_by_replacing_jump (e, dest);
6016       if (ret)
6017 	return ret;
6018     }
6019 
6020   gsi = gsi_last_nondebug_bb (bb);
6021   stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6022 
6023   switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6024     {
6025     case GIMPLE_COND:
6026       /* For COND_EXPR, we only need to redirect the edge.  */
6027       break;
6028 
6029     case GIMPLE_GOTO:
6030       /* No non-abnormal edges should lead from a non-simple goto, and
6031 	 simple ones should be represented implicitly.  */
6032       gcc_unreachable ();
6033 
6034     case GIMPLE_SWITCH:
6035       {
6036 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
6037 	tree label = gimple_block_label (dest);
6038         tree cases = get_cases_for_edge (e, switch_stmt);
6039 
6040 	/* If we have a list of cases associated with E, then use it
6041 	   as it's a lot faster than walking the entire case vector.  */
6042 	if (cases)
6043 	  {
6044 	    edge e2 = find_edge (e->src, dest);
6045 	    tree last, first;
6046 
6047 	    first = cases;
6048 	    while (cases)
6049 	      {
6050 		last = cases;
6051 		CASE_LABEL (cases) = label;
6052 		cases = CASE_CHAIN (cases);
6053 	      }
6054 
6055 	    /* If there was already an edge in the CFG, then we need
6056 	       to move all the cases associated with E to E2.  */
6057 	    if (e2)
6058 	      {
6059 		tree cases2 = get_cases_for_edge (e2, switch_stmt);
6060 
6061 		CASE_CHAIN (last) = CASE_CHAIN (cases2);
6062 		CASE_CHAIN (cases2) = first;
6063 	      }
6064 	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6065 	  }
6066 	else
6067 	  {
6068 	    size_t i, n = gimple_switch_num_labels (switch_stmt);
6069 
6070 	    for (i = 0; i < n; i++)
6071 	      {
6072 		tree elt = gimple_switch_label (switch_stmt, i);
6073 		if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6074 		  CASE_LABEL (elt) = label;
6075 	      }
6076 	  }
6077       }
6078       break;
6079 
6080     case GIMPLE_ASM:
6081       {
6082 	gasm *asm_stmt = as_a <gasm *> (stmt);
6083 	int i, n = gimple_asm_nlabels (asm_stmt);
6084 	tree label = NULL;
6085 
6086 	for (i = 0; i < n; ++i)
6087 	  {
6088 	    tree cons = gimple_asm_label_op (asm_stmt, i);
6089 	    if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6090 	      {
6091 		if (!label)
6092 		  label = gimple_block_label (dest);
6093 		TREE_VALUE (cons) = label;
6094 	      }
6095 	  }
6096 
6097 	/* If we didn't find any label matching the former edge in the
6098 	   asm labels, we must be redirecting the fallthrough
6099 	   edge.  */
6100 	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6101       }
6102       break;
6103 
6104     case GIMPLE_RETURN:
6105       gsi_remove (&gsi, true);
6106       e->flags |= EDGE_FALLTHRU;
6107       break;
6108 
6109     case GIMPLE_OMP_RETURN:
6110     case GIMPLE_OMP_CONTINUE:
6111     case GIMPLE_OMP_SECTIONS_SWITCH:
6112     case GIMPLE_OMP_FOR:
6113       /* The edges from OMP constructs can be simply redirected.  */
6114       break;
6115 
6116     case GIMPLE_EH_DISPATCH:
6117       if (!(e->flags & EDGE_FALLTHRU))
6118 	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6119       break;
6120 
6121     case GIMPLE_TRANSACTION:
6122       if (e->flags & EDGE_TM_ABORT)
6123 	gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6124 				           gimple_block_label (dest));
6125       else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6126 	gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6127 				             gimple_block_label (dest));
6128       else
6129 	gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6130 				           gimple_block_label (dest));
6131       break;
6132 
6133     default:
6134       /* Otherwise it must be a fallthru edge, and we don't need to
6135 	 do anything besides redirecting it.  */
6136       gcc_assert (e->flags & EDGE_FALLTHRU);
6137       break;
6138     }
6139 
6140   /* Update/insert PHI nodes as necessary.  */
6141 
6142   /* Now update the edges in the CFG.  */
6143   e = ssa_redirect_edge (e, dest);
6144 
6145   return e;
6146 }
6147 
6148 /* Returns true if it is possible to remove edge E by redirecting
6149    it to the destination of the other edge from E->src.  */
6150 
6151 static bool
gimple_can_remove_branch_p(const_edge e)6152 gimple_can_remove_branch_p (const_edge e)
6153 {
6154   if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6155     return false;
6156 
6157   return true;
6158 }
6159 
6160 /* Simple wrapper, as we can always redirect fallthru edges.  */
6161 
6162 static basic_block
gimple_redirect_edge_and_branch_force(edge e,basic_block dest)6163 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6164 {
6165   e = gimple_redirect_edge_and_branch (e, dest);
6166   gcc_assert (e);
6167 
6168   return NULL;
6169 }
6170 
6171 
6172 /* Splits basic block BB after statement STMT (but at least after the
6173    labels).  If STMT is NULL, BB is split just after the labels.  */
6174 
6175 static basic_block
gimple_split_block(basic_block bb,void * stmt)6176 gimple_split_block (basic_block bb, void *stmt)
6177 {
6178   gimple_stmt_iterator gsi;
6179   gimple_stmt_iterator gsi_tgt;
6180   gimple_seq list;
6181   basic_block new_bb;
6182   edge e;
6183   edge_iterator ei;
6184 
6185   new_bb = create_empty_bb (bb);
6186 
6187   /* Redirect the outgoing edges.  */
6188   new_bb->succs = bb->succs;
6189   bb->succs = NULL;
6190   FOR_EACH_EDGE (e, ei, new_bb->succs)
6191     e->src = new_bb;
6192 
6193   /* Get a stmt iterator pointing to the first stmt to move.  */
6194   if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6195     gsi = gsi_after_labels (bb);
6196   else
6197     {
6198       gsi = gsi_for_stmt ((gimple *) stmt);
6199       gsi_next (&gsi);
6200     }
6201 
6202   /* Move everything from GSI to the new basic block.  */
6203   if (gsi_end_p (gsi))
6204     return new_bb;
6205 
6206   /* Split the statement list - avoid re-creating new containers as this
6207      brings ugly quadratic memory consumption in the inliner.
6208      (We are still quadratic since we need to update stmt BB pointers,
6209      sadly.)  */
6210   gsi_split_seq_before (&gsi, &list);
6211   set_bb_seq (new_bb, list);
6212   for (gsi_tgt = gsi_start (list);
6213        !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6214     gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6215 
6216   return new_bb;
6217 }
6218 
6219 
6220 /* Moves basic block BB after block AFTER.  */
6221 
6222 static bool
gimple_move_block_after(basic_block bb,basic_block after)6223 gimple_move_block_after (basic_block bb, basic_block after)
6224 {
6225   if (bb->prev_bb == after)
6226     return true;
6227 
6228   unlink_block (bb);
6229   link_block (bb, after);
6230 
6231   return true;
6232 }
6233 
6234 
6235 /* Return TRUE if block BB has no executable statements, otherwise return
6236    FALSE.  */
6237 
6238 static bool
gimple_empty_block_p(basic_block bb)6239 gimple_empty_block_p (basic_block bb)
6240 {
6241   /* BB must have no executable statements.  */
6242   gimple_stmt_iterator gsi = gsi_after_labels (bb);
6243   if (phi_nodes (bb))
6244     return false;
6245   while (!gsi_end_p (gsi))
6246     {
6247       gimple *stmt = gsi_stmt (gsi);
6248       if (is_gimple_debug (stmt))
6249 	;
6250       else if (gimple_code (stmt) == GIMPLE_NOP
6251 	       || gimple_code (stmt) == GIMPLE_PREDICT)
6252 	;
6253       else
6254 	return false;
6255       gsi_next (&gsi);
6256     }
6257   return true;
6258 }
6259 
6260 
6261 /* Split a basic block if it ends with a conditional branch and if the
6262    other part of the block is not empty.  */
6263 
6264 static basic_block
gimple_split_block_before_cond_jump(basic_block bb)6265 gimple_split_block_before_cond_jump (basic_block bb)
6266 {
6267   gimple *last, *split_point;
6268   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6269   if (gsi_end_p (gsi))
6270     return NULL;
6271   last = gsi_stmt (gsi);
6272   if (gimple_code (last) != GIMPLE_COND
6273       && gimple_code (last) != GIMPLE_SWITCH)
6274     return NULL;
6275   gsi_prev (&gsi);
6276   split_point = gsi_stmt (gsi);
6277   return split_block (bb, split_point)->dest;
6278 }
6279 
6280 
6281 /* Return true if basic_block can be duplicated.  */
6282 
6283 static bool
gimple_can_duplicate_bb_p(const_basic_block bb)6284 gimple_can_duplicate_bb_p (const_basic_block bb)
6285 {
6286   gimple *last = last_stmt (CONST_CAST_BB (bb));
6287 
6288   /* Do checks that can only fail for the last stmt, to minimize the work in the
6289      stmt loop.  */
6290   if (last) {
6291     /* A transaction is a single entry multiple exit region.  It
6292        must be duplicated in its entirety or not at all.  */
6293     if (gimple_code (last) == GIMPLE_TRANSACTION)
6294       return false;
6295 
6296     /* An IFN_UNIQUE call must be duplicated as part of its group,
6297        or not at all.  */
6298     if (is_gimple_call (last)
6299 	&& gimple_call_internal_p (last)
6300 	&& gimple_call_internal_unique_p (last))
6301       return false;
6302   }
6303 
6304   for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6305        !gsi_end_p (gsi); gsi_next (&gsi))
6306     {
6307       gimple *g = gsi_stmt (gsi);
6308 
6309       /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6310 	 duplicated as part of its group, or not at all.
6311 	 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6312 	 group, so the same holds there.  */
6313       if (is_gimple_call (g)
6314 	  && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6315 	      || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6316 	      || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6317 	      || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6318 	      || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6319 	return false;
6320     }
6321 
6322   return true;
6323 }
6324 
6325 /* Create a duplicate of the basic block BB.  NOTE: This does not
6326    preserve SSA form.  */
6327 
6328 static basic_block
gimple_duplicate_bb(basic_block bb,copy_bb_data * id)6329 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6330 {
6331   basic_block new_bb;
6332   gimple_stmt_iterator gsi_tgt;
6333 
6334   new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6335 
6336   /* Copy the PHI nodes.  We ignore PHI node arguments here because
6337      the incoming edges have not been setup yet.  */
6338   for (gphi_iterator gpi = gsi_start_phis (bb);
6339        !gsi_end_p (gpi);
6340        gsi_next (&gpi))
6341     {
6342       gphi *phi, *copy;
6343       phi = gpi.phi ();
6344       copy = create_phi_node (NULL_TREE, new_bb);
6345       create_new_def_for (gimple_phi_result (phi), copy,
6346 			  gimple_phi_result_ptr (copy));
6347       gimple_set_uid (copy, gimple_uid (phi));
6348     }
6349 
6350   gsi_tgt = gsi_start_bb (new_bb);
6351   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6352        !gsi_end_p (gsi);
6353        gsi_next (&gsi))
6354     {
6355       def_operand_p def_p;
6356       ssa_op_iter op_iter;
6357       tree lhs;
6358       gimple *stmt, *copy;
6359 
6360       stmt = gsi_stmt (gsi);
6361       if (gimple_code (stmt) == GIMPLE_LABEL)
6362 	continue;
6363 
6364       /* Don't duplicate label debug stmts.  */
6365       if (gimple_debug_bind_p (stmt)
6366 	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
6367 	     == LABEL_DECL)
6368 	continue;
6369 
6370       /* Create a new copy of STMT and duplicate STMT's virtual
6371 	 operands.  */
6372       copy = gimple_copy (stmt);
6373       gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6374 
6375       maybe_duplicate_eh_stmt (copy, stmt);
6376       gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6377 
6378       /* When copying around a stmt writing into a local non-user
6379 	 aggregate, make sure it won't share stack slot with other
6380 	 vars.  */
6381       lhs = gimple_get_lhs (stmt);
6382       if (lhs && TREE_CODE (lhs) != SSA_NAME)
6383 	{
6384 	  tree base = get_base_address (lhs);
6385 	  if (base
6386 	      && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6387 	      && DECL_IGNORED_P (base)
6388 	      && !TREE_STATIC (base)
6389 	      && !DECL_EXTERNAL (base)
6390 	      && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6391 	    DECL_NONSHAREABLE (base) = 1;
6392 	}
6393 
6394       /* If requested remap dependence info of cliques brought in
6395          via inlining.  */
6396       if (id)
6397 	for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6398 	  {
6399 	    tree op = gimple_op (copy, i);
6400 	    if (!op)
6401 	      continue;
6402 	    if (TREE_CODE (op) == ADDR_EXPR
6403 		|| TREE_CODE (op) == WITH_SIZE_EXPR)
6404 	      op = TREE_OPERAND (op, 0);
6405 	    while (handled_component_p (op))
6406 	      op = TREE_OPERAND (op, 0);
6407 	    if ((TREE_CODE (op) == MEM_REF
6408 		 || TREE_CODE (op) == TARGET_MEM_REF)
6409 		&& MR_DEPENDENCE_CLIQUE (op) > 1
6410 		&& MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6411 	      {
6412 		if (!id->dependence_map)
6413 		  id->dependence_map = new hash_map<dependence_hash,
6414 						    unsigned short>;
6415 		bool existed;
6416 		unsigned short &newc = id->dependence_map->get_or_insert
6417 		    (MR_DEPENDENCE_CLIQUE (op), &existed);
6418 		if (!existed)
6419 		  {
6420 		    gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6421 		    newc = ++cfun->last_clique;
6422 		  }
6423 		MR_DEPENDENCE_CLIQUE (op) = newc;
6424 	      }
6425 	  }
6426 
6427       /* Create new names for all the definitions created by COPY and
6428 	 add replacement mappings for each new name.  */
6429       FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6430 	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6431     }
6432 
6433   return new_bb;
6434 }
6435 
6436 /* Adds phi node arguments for edge E_COPY after basic block duplication.  */
6437 
6438 static void
add_phi_args_after_copy_edge(edge e_copy)6439 add_phi_args_after_copy_edge (edge e_copy)
6440 {
6441   basic_block bb, bb_copy = e_copy->src, dest;
6442   edge e;
6443   edge_iterator ei;
6444   gphi *phi, *phi_copy;
6445   tree def;
6446   gphi_iterator psi, psi_copy;
6447 
6448   if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6449     return;
6450 
6451   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6452 
6453   if (e_copy->dest->flags & BB_DUPLICATED)
6454     dest = get_bb_original (e_copy->dest);
6455   else
6456     dest = e_copy->dest;
6457 
6458   e = find_edge (bb, dest);
6459   if (!e)
6460     {
6461       /* During loop unrolling the target of the latch edge is copied.
6462 	 In this case we are not looking for edge to dest, but to
6463 	 duplicated block whose original was dest.  */
6464       FOR_EACH_EDGE (e, ei, bb->succs)
6465 	{
6466 	  if ((e->dest->flags & BB_DUPLICATED)
6467 	      && get_bb_original (e->dest) == dest)
6468 	    break;
6469 	}
6470 
6471       gcc_assert (e != NULL);
6472     }
6473 
6474   for (psi = gsi_start_phis (e->dest),
6475        psi_copy = gsi_start_phis (e_copy->dest);
6476        !gsi_end_p (psi);
6477        gsi_next (&psi), gsi_next (&psi_copy))
6478     {
6479       phi = psi.phi ();
6480       phi_copy = psi_copy.phi ();
6481       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6482       add_phi_arg (phi_copy, def, e_copy,
6483 		   gimple_phi_arg_location_from_edge (phi, e));
6484     }
6485 }
6486 
6487 
6488 /* Basic block BB_COPY was created by code duplication.  Add phi node
6489    arguments for edges going out of BB_COPY.  The blocks that were
6490    duplicated have BB_DUPLICATED set.  */
6491 
6492 void
add_phi_args_after_copy_bb(basic_block bb_copy)6493 add_phi_args_after_copy_bb (basic_block bb_copy)
6494 {
6495   edge e_copy;
6496   edge_iterator ei;
6497 
6498   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6499     {
6500       add_phi_args_after_copy_edge (e_copy);
6501     }
6502 }
6503 
6504 /* Blocks in REGION_COPY array of length N_REGION were created by
6505    duplication of basic blocks.  Add phi node arguments for edges
6506    going from these blocks.  If E_COPY is not NULL, also add
6507    phi node arguments for its destination.*/
6508 
6509 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region,edge e_copy)6510 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6511 			 edge e_copy)
6512 {
6513   unsigned i;
6514 
6515   for (i = 0; i < n_region; i++)
6516     region_copy[i]->flags |= BB_DUPLICATED;
6517 
6518   for (i = 0; i < n_region; i++)
6519     add_phi_args_after_copy_bb (region_copy[i]);
6520   if (e_copy)
6521     add_phi_args_after_copy_edge (e_copy);
6522 
6523   for (i = 0; i < n_region; i++)
6524     region_copy[i]->flags &= ~BB_DUPLICATED;
6525 }
6526 
6527 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6528    important exit edge EXIT.  By important we mean that no SSA name defined
6529    inside region is live over the other exit edges of the region.  All entry
6530    edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
6531    to the duplicate of the region.  Dominance and loop information is
6532    updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
6533    UPDATE_DOMINANCE is false then we assume that the caller will update the
6534    dominance information after calling this function.  The new basic
6535    blocks are stored to REGION_COPY in the same order as they had in REGION,
6536    provided that REGION_COPY is not NULL.
6537    The function returns false if it is unable to copy the region,
6538    true otherwise.  */
6539 
6540 bool
gimple_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy,bool update_dominance)6541 gimple_duplicate_sese_region (edge entry, edge exit,
6542 			    basic_block *region, unsigned n_region,
6543 			    basic_block *region_copy,
6544 			    bool update_dominance)
6545 {
6546   unsigned i;
6547   bool free_region_copy = false, copying_header = false;
6548   class loop *loop = entry->dest->loop_father;
6549   edge exit_copy;
6550   edge redirected;
6551   profile_count total_count = profile_count::uninitialized ();
6552   profile_count entry_count = profile_count::uninitialized ();
6553 
6554   if (!can_copy_bbs_p (region, n_region))
6555     return false;
6556 
6557   /* Some sanity checking.  Note that we do not check for all possible
6558      missuses of the functions.  I.e. if you ask to copy something weird,
6559      it will work, but the state of structures probably will not be
6560      correct.  */
6561   for (i = 0; i < n_region; i++)
6562     {
6563       /* We do not handle subloops, i.e. all the blocks must belong to the
6564 	 same loop.  */
6565       if (region[i]->loop_father != loop)
6566 	return false;
6567 
6568       if (region[i] != entry->dest
6569 	  && region[i] == loop->header)
6570 	return false;
6571     }
6572 
6573   /* In case the function is used for loop header copying (which is the primary
6574      use), ensure that EXIT and its copy will be new latch and entry edges.  */
6575   if (loop->header == entry->dest)
6576     {
6577       copying_header = true;
6578 
6579       if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6580 	return false;
6581 
6582       for (i = 0; i < n_region; i++)
6583 	if (region[i] != exit->src
6584 	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6585 	  return false;
6586     }
6587 
6588   initialize_original_copy_tables ();
6589 
6590   if (copying_header)
6591     set_loop_copy (loop, loop_outer (loop));
6592   else
6593     set_loop_copy (loop, loop);
6594 
6595   if (!region_copy)
6596     {
6597       region_copy = XNEWVEC (basic_block, n_region);
6598       free_region_copy = true;
6599     }
6600 
6601   /* Record blocks outside the region that are dominated by something
6602      inside.  */
6603   auto_vec<basic_block> doms;
6604   if (update_dominance)
6605     {
6606       doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6607     }
6608 
6609   if (entry->dest->count.initialized_p ())
6610     {
6611       total_count = entry->dest->count;
6612       entry_count = entry->count ();
6613       /* Fix up corner cases, to avoid division by zero or creation of negative
6614 	 frequencies.  */
6615       if (entry_count > total_count)
6616 	entry_count = total_count;
6617     }
6618 
6619   copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6620 	    split_edge_bb_loc (entry), update_dominance);
6621   if (total_count.initialized_p () && entry_count.initialized_p ())
6622     {
6623       scale_bbs_frequencies_profile_count (region, n_region,
6624 				           total_count - entry_count,
6625 				           total_count);
6626       scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6627 				           total_count);
6628     }
6629 
6630   if (copying_header)
6631     {
6632       loop->header = exit->dest;
6633       loop->latch = exit->src;
6634     }
6635 
6636   /* Redirect the entry and add the phi node arguments.  */
6637   redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6638   gcc_assert (redirected != NULL);
6639   flush_pending_stmts (entry);
6640 
6641   /* Concerning updating of dominators:  We must recount dominators
6642      for entry block and its copy.  Anything that is outside of the
6643      region, but was dominated by something inside needs recounting as
6644      well.  */
6645   if (update_dominance)
6646     {
6647       set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6648       doms.safe_push (get_bb_original (entry->dest));
6649       iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6650     }
6651 
6652   /* Add the other PHI node arguments.  */
6653   add_phi_args_after_copy (region_copy, n_region, NULL);
6654 
6655   if (free_region_copy)
6656     free (region_copy);
6657 
6658   free_original_copy_tables ();
6659   return true;
6660 }
6661 
6662 /* Checks if BB is part of the region defined by N_REGION BBS.  */
6663 static bool
bb_part_of_region_p(basic_block bb,basic_block * bbs,unsigned n_region)6664 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6665 {
6666   unsigned int n;
6667 
6668   for (n = 0; n < n_region; n++)
6669     {
6670      if (bb == bbs[n])
6671        return true;
6672     }
6673   return false;
6674 }
6675 
6676 /* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6677    are stored to REGION_COPY in the same order in that they appear
6678    in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6679    the region, EXIT an exit from it.  The condition guarding EXIT
6680    is moved to ENTRY.  Returns true if duplication succeeds, false
6681    otherwise.
6682 
6683    For example,
6684 
6685    some_code;
6686    if (cond)
6687      A;
6688    else
6689      B;
6690 
6691    is transformed to
6692 
6693    if (cond)
6694      {
6695        some_code;
6696        A;
6697      }
6698    else
6699      {
6700        some_code;
6701        B;
6702      }
6703 */
6704 
6705 bool
gimple_duplicate_sese_tail(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)6706 gimple_duplicate_sese_tail (edge entry, edge exit,
6707 			  basic_block *region, unsigned n_region,
6708 			  basic_block *region_copy)
6709 {
6710   unsigned i;
6711   bool free_region_copy = false;
6712   class loop *loop = exit->dest->loop_father;
6713   class loop *orig_loop = entry->dest->loop_father;
6714   basic_block switch_bb, entry_bb, nentry_bb;
6715   profile_count total_count = profile_count::uninitialized (),
6716 		exit_count = profile_count::uninitialized ();
6717   edge exits[2], nexits[2], e;
6718   gimple_stmt_iterator gsi;
6719   gimple *cond_stmt;
6720   edge sorig, snew;
6721   basic_block exit_bb;
6722   gphi_iterator psi;
6723   gphi *phi;
6724   tree def;
6725   class loop *target, *aloop, *cloop;
6726 
6727   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6728   exits[0] = exit;
6729   exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6730 
6731   if (!can_copy_bbs_p (region, n_region))
6732     return false;
6733 
6734   initialize_original_copy_tables ();
6735   set_loop_copy (orig_loop, loop);
6736 
6737   target= loop;
6738   for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6739     {
6740       if (bb_part_of_region_p (aloop->header, region, n_region))
6741 	{
6742 	  cloop = duplicate_loop (aloop, target);
6743 	  duplicate_subloops (aloop, cloop);
6744 	}
6745     }
6746 
6747   if (!region_copy)
6748     {
6749       region_copy = XNEWVEC (basic_block, n_region);
6750       free_region_copy = true;
6751     }
6752 
6753   gcc_assert (!need_ssa_update_p (cfun));
6754 
6755   /* Record blocks outside the region that are dominated by something
6756      inside.  */
6757   auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6758 							n_region);
6759 
6760   total_count = exit->src->count;
6761   exit_count = exit->count ();
6762   /* Fix up corner cases, to avoid division by zero or creation of negative
6763      frequencies.  */
6764   if (exit_count > total_count)
6765     exit_count = total_count;
6766 
6767   copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6768 	    split_edge_bb_loc (exit), true);
6769   if (total_count.initialized_p () && exit_count.initialized_p ())
6770     {
6771       scale_bbs_frequencies_profile_count (region, n_region,
6772 				           total_count - exit_count,
6773 				           total_count);
6774       scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6775 				           total_count);
6776     }
6777 
6778   /* Create the switch block, and put the exit condition to it.  */
6779   entry_bb = entry->dest;
6780   nentry_bb = get_bb_copy (entry_bb);
6781   if (!last_stmt (entry->src)
6782       || !stmt_ends_bb_p (last_stmt (entry->src)))
6783     switch_bb = entry->src;
6784   else
6785     switch_bb = split_edge (entry);
6786   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6787 
6788   gsi = gsi_last_bb (switch_bb);
6789   cond_stmt = last_stmt (exit->src);
6790   gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6791   cond_stmt = gimple_copy (cond_stmt);
6792 
6793   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6794 
6795   sorig = single_succ_edge (switch_bb);
6796   sorig->flags = exits[1]->flags;
6797   sorig->probability = exits[1]->probability;
6798   snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6799   snew->probability = exits[0]->probability;
6800 
6801 
6802   /* Register the new edge from SWITCH_BB in loop exit lists.  */
6803   rescan_loop_exit (snew, true, false);
6804 
6805   /* Add the PHI node arguments.  */
6806   add_phi_args_after_copy (region_copy, n_region, snew);
6807 
6808   /* Get rid of now superfluous conditions and associated edges (and phi node
6809      arguments).  */
6810   exit_bb = exit->dest;
6811 
6812   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6813   PENDING_STMT (e) = NULL;
6814 
6815   /* The latch of ORIG_LOOP was copied, and so was the backedge
6816      to the original header.  We redirect this backedge to EXIT_BB.  */
6817   for (i = 0; i < n_region; i++)
6818     if (get_bb_original (region_copy[i]) == orig_loop->latch)
6819       {
6820 	gcc_assert (single_succ_edge (region_copy[i]));
6821 	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6822 	PENDING_STMT (e) = NULL;
6823 	for (psi = gsi_start_phis (exit_bb);
6824 	     !gsi_end_p (psi);
6825 	     gsi_next (&psi))
6826 	  {
6827 	    phi = psi.phi ();
6828 	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6829 	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6830 	  }
6831       }
6832   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6833   PENDING_STMT (e) = NULL;
6834 
6835   /* Anything that is outside of the region, but was dominated by something
6836      inside needs to update dominance info.  */
6837   iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6838   /* Update the SSA web.  */
6839   update_ssa (TODO_update_ssa);
6840 
6841   if (free_region_copy)
6842     free (region_copy);
6843 
6844   free_original_copy_tables ();
6845   return true;
6846 }
6847 
6848 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6849    adding blocks when the dominator traversal reaches EXIT.  This
6850    function silently assumes that ENTRY strictly dominates EXIT.  */
6851 
6852 void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)6853 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6854 			      vec<basic_block> *bbs_p)
6855 {
6856   basic_block son;
6857 
6858   for (son = first_dom_son (CDI_DOMINATORS, entry);
6859        son;
6860        son = next_dom_son (CDI_DOMINATORS, son))
6861     {
6862       bbs_p->safe_push (son);
6863       if (son != exit)
6864 	gather_blocks_in_sese_region (son, exit, bbs_p);
6865     }
6866 }
6867 
6868 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6869    The duplicates are recorded in VARS_MAP.  */
6870 
6871 static void
replace_by_duplicate_decl(tree * tp,hash_map<tree,tree> * vars_map,tree to_context)6872 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6873 			   tree to_context)
6874 {
6875   tree t = *tp, new_t;
6876   struct function *f = DECL_STRUCT_FUNCTION (to_context);
6877 
6878   if (DECL_CONTEXT (t) == to_context)
6879     return;
6880 
6881   bool existed;
6882   tree &loc = vars_map->get_or_insert (t, &existed);
6883 
6884   if (!existed)
6885     {
6886       if (SSA_VAR_P (t))
6887 	{
6888 	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6889 	  add_local_decl (f, new_t);
6890 	}
6891       else
6892 	{
6893 	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6894 	  new_t = copy_node (t);
6895 	}
6896       DECL_CONTEXT (new_t) = to_context;
6897 
6898       loc = new_t;
6899     }
6900   else
6901     new_t = loc;
6902 
6903   *tp = new_t;
6904 }
6905 
6906 
6907 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6908    VARS_MAP maps old ssa names and var_decls to the new ones.  */
6909 
6910 static tree
replace_ssa_name(tree name,hash_map<tree,tree> * vars_map,tree to_context)6911 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6912 		  tree to_context)
6913 {
6914   tree new_name;
6915 
6916   gcc_assert (!virtual_operand_p (name));
6917 
6918   tree *loc = vars_map->get (name);
6919 
6920   if (!loc)
6921     {
6922       tree decl = SSA_NAME_VAR (name);
6923       if (decl)
6924 	{
6925 	  gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6926 	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6927 	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6928 				       decl, SSA_NAME_DEF_STMT (name));
6929 	}
6930       else
6931 	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6932 				     name, SSA_NAME_DEF_STMT (name));
6933 
6934       /* Now that we've used the def stmt to define new_name, make sure it
6935 	 doesn't define name anymore.  */
6936       SSA_NAME_DEF_STMT (name) = NULL;
6937 
6938       vars_map->put (name, new_name);
6939     }
6940   else
6941     new_name = *loc;
6942 
6943   return new_name;
6944 }
6945 
6946 struct move_stmt_d
6947 {
6948   tree orig_block;
6949   tree new_block;
6950   tree from_context;
6951   tree to_context;
6952   hash_map<tree, tree> *vars_map;
6953   htab_t new_label_map;
6954   hash_map<void *, void *> *eh_map;
6955   bool remap_decls_p;
6956 };
6957 
6958 /* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6959    contained in *TP if it has been ORIG_BLOCK previously and change the
6960    DECL_CONTEXT of every local variable referenced in *TP.  */
6961 
6962 static tree
move_stmt_op(tree * tp,int * walk_subtrees,void * data)6963 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6964 {
6965   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6966   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6967   tree t = *tp;
6968 
6969   if (EXPR_P (t))
6970     {
6971       tree block = TREE_BLOCK (t);
6972       if (block == NULL_TREE)
6973 	;
6974       else if (block == p->orig_block
6975 	       || p->orig_block == NULL_TREE)
6976 	{
6977 	  /* tree_node_can_be_shared says we can share invariant
6978 	     addresses but unshare_expr copies them anyways.  Make sure
6979 	     to unshare before adjusting the block in place - we do not
6980 	     always see a copy here.  */
6981 	  if (TREE_CODE (t) == ADDR_EXPR
6982 	      && is_gimple_min_invariant (t))
6983 	    *tp = t = unshare_expr (t);
6984 	  TREE_SET_BLOCK (t, p->new_block);
6985 	}
6986       else if (flag_checking)
6987 	{
6988 	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6989 	    block = BLOCK_SUPERCONTEXT (block);
6990 	  gcc_assert (block == p->orig_block);
6991 	}
6992     }
6993   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6994     {
6995       if (TREE_CODE (t) == SSA_NAME)
6996 	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6997       else if (TREE_CODE (t) == PARM_DECL
6998 	       && gimple_in_ssa_p (cfun))
6999 	*tp = *(p->vars_map->get (t));
7000       else if (TREE_CODE (t) == LABEL_DECL)
7001 	{
7002 	  if (p->new_label_map)
7003 	    {
7004 	      struct tree_map in, *out;
7005 	      in.base.from = t;
7006 	      out = (struct tree_map *)
7007 		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7008 	      if (out)
7009 		*tp = t = out->to;
7010 	    }
7011 
7012 	  /* For FORCED_LABELs we can end up with references from other
7013 	     functions if some SESE regions are outlined.  It is UB to
7014 	     jump in between them, but they could be used just for printing
7015 	     addresses etc.  In that case, DECL_CONTEXT on the label should
7016 	     be the function containing the glabel stmt with that LABEL_DECL,
7017 	     rather than whatever function a reference to the label was seen
7018 	     last time.  */
7019 	  if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7020 	    DECL_CONTEXT (t) = p->to_context;
7021 	}
7022       else if (p->remap_decls_p)
7023 	{
7024 	  /* Replace T with its duplicate.  T should no longer appear in the
7025 	     parent function, so this looks wasteful; however, it may appear
7026 	     in referenced_vars, and more importantly, as virtual operands of
7027 	     statements, and in alias lists of other variables.  It would be
7028 	     quite difficult to expunge it from all those places.  ??? It might
7029 	     suffice to do this for addressable variables.  */
7030 	  if ((VAR_P (t) && !is_global_var (t))
7031 	      || TREE_CODE (t) == CONST_DECL)
7032 	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7033 	}
7034       *walk_subtrees = 0;
7035     }
7036   else if (TYPE_P (t))
7037     *walk_subtrees = 0;
7038 
7039   return NULL_TREE;
7040 }
7041 
7042 /* Helper for move_stmt_r.  Given an EH region number for the source
7043    function, map that to the duplicate EH regio number in the dest.  */
7044 
7045 static int
move_stmt_eh_region_nr(int old_nr,struct move_stmt_d * p)7046 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7047 {
7048   eh_region old_r, new_r;
7049 
7050   old_r = get_eh_region_from_number (old_nr);
7051   new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7052 
7053   return new_r->index;
7054 }
7055 
7056 /* Similar, but operate on INTEGER_CSTs.  */
7057 
7058 static tree
move_stmt_eh_region_tree_nr(tree old_t_nr,struct move_stmt_d * p)7059 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7060 {
7061   int old_nr, new_nr;
7062 
7063   old_nr = tree_to_shwi (old_t_nr);
7064   new_nr = move_stmt_eh_region_nr (old_nr, p);
7065 
7066   return build_int_cst (integer_type_node, new_nr);
7067 }
7068 
7069 /* Like move_stmt_op, but for gimple statements.
7070 
7071    Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
7072    contained in the current statement in *GSI_P and change the
7073    DECL_CONTEXT of every local variable referenced in the current
7074    statement.  */
7075 
7076 static tree
move_stmt_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)7077 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7078 	     struct walk_stmt_info *wi)
7079 {
7080   struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7081   gimple *stmt = gsi_stmt (*gsi_p);
7082   tree block = gimple_block (stmt);
7083 
7084   if (block == p->orig_block
7085       || (p->orig_block == NULL_TREE
7086 	  && block != NULL_TREE))
7087     gimple_set_block (stmt, p->new_block);
7088 
7089   switch (gimple_code (stmt))
7090     {
7091     case GIMPLE_CALL:
7092       /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
7093       {
7094 	tree r, fndecl = gimple_call_fndecl (stmt);
7095 	if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7096 	  switch (DECL_FUNCTION_CODE (fndecl))
7097 	    {
7098 	    case BUILT_IN_EH_COPY_VALUES:
7099 	      r = gimple_call_arg (stmt, 1);
7100 	      r = move_stmt_eh_region_tree_nr (r, p);
7101 	      gimple_call_set_arg (stmt, 1, r);
7102 	      /* FALLTHRU */
7103 
7104 	    case BUILT_IN_EH_POINTER:
7105 	    case BUILT_IN_EH_FILTER:
7106 	      r = gimple_call_arg (stmt, 0);
7107 	      r = move_stmt_eh_region_tree_nr (r, p);
7108 	      gimple_call_set_arg (stmt, 0, r);
7109 	      break;
7110 
7111 	    default:
7112 	      break;
7113 	    }
7114       }
7115       break;
7116 
7117     case GIMPLE_RESX:
7118       {
7119 	gresx *resx_stmt = as_a <gresx *> (stmt);
7120 	int r = gimple_resx_region (resx_stmt);
7121 	r = move_stmt_eh_region_nr (r, p);
7122 	gimple_resx_set_region (resx_stmt, r);
7123       }
7124       break;
7125 
7126     case GIMPLE_EH_DISPATCH:
7127       {
7128 	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7129 	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7130 	r = move_stmt_eh_region_nr (r, p);
7131 	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7132       }
7133       break;
7134 
7135     case GIMPLE_OMP_RETURN:
7136     case GIMPLE_OMP_CONTINUE:
7137       break;
7138 
7139     case GIMPLE_LABEL:
7140       {
7141 	/* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7142 	   so that such labels can be referenced from other regions.
7143 	   Make sure to update it when seeing a GIMPLE_LABEL though,
7144 	   that is the owner of the label.  */
7145 	walk_gimple_op (stmt, move_stmt_op, wi);
7146 	*handled_ops_p = true;
7147 	tree label = gimple_label_label (as_a <glabel *> (stmt));
7148 	if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7149 	  DECL_CONTEXT (label) = p->to_context;
7150       }
7151       break;
7152 
7153     default:
7154       if (is_gimple_omp (stmt))
7155 	{
7156 	  /* Do not remap variables inside OMP directives.  Variables
7157 	     referenced in clauses and directive header belong to the
7158 	     parent function and should not be moved into the child
7159 	     function.  */
7160 	  bool save_remap_decls_p = p->remap_decls_p;
7161 	  p->remap_decls_p = false;
7162 	  *handled_ops_p = true;
7163 
7164 	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7165 			       move_stmt_op, wi);
7166 
7167 	  p->remap_decls_p = save_remap_decls_p;
7168 	}
7169       break;
7170     }
7171 
7172   return NULL_TREE;
7173 }
7174 
7175 /* Move basic block BB from function CFUN to function DEST_FN.  The
7176    block is moved out of the original linked list and placed after
7177    block AFTER in the new list.  Also, the block is removed from the
7178    original array of blocks and placed in DEST_FN's array of blocks.
7179    If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7180    updated to reflect the moved edges.
7181 
7182    The local variables are remapped to new instances, VARS_MAP is used
7183    to record the mapping.  */
7184 
7185 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,struct move_stmt_d * d)7186 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7187 		  basic_block after, bool update_edge_count_p,
7188 		  struct move_stmt_d *d)
7189 {
7190   struct control_flow_graph *cfg;
7191   edge_iterator ei;
7192   edge e;
7193   gimple_stmt_iterator si;
7194   unsigned old_len;
7195 
7196   /* Remove BB from dominance structures.  */
7197   delete_from_dominance_info (CDI_DOMINATORS, bb);
7198 
7199   /* Move BB from its current loop to the copy in the new function.  */
7200   if (current_loops)
7201     {
7202       class loop *new_loop = (class loop *)bb->loop_father->aux;
7203       if (new_loop)
7204 	bb->loop_father = new_loop;
7205     }
7206 
7207   /* Link BB to the new linked list.  */
7208   move_block_after (bb, after);
7209 
7210   /* Update the edge count in the corresponding flowgraphs.  */
7211   if (update_edge_count_p)
7212     FOR_EACH_EDGE (e, ei, bb->succs)
7213       {
7214 	cfun->cfg->x_n_edges--;
7215 	dest_cfun->cfg->x_n_edges++;
7216       }
7217 
7218   /* Remove BB from the original basic block array.  */
7219   (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7220   cfun->cfg->x_n_basic_blocks--;
7221 
7222   /* Grow DEST_CFUN's basic block array if needed.  */
7223   cfg = dest_cfun->cfg;
7224   cfg->x_n_basic_blocks++;
7225   if (bb->index >= cfg->x_last_basic_block)
7226     cfg->x_last_basic_block = bb->index + 1;
7227 
7228   old_len = vec_safe_length (cfg->x_basic_block_info);
7229   if ((unsigned) cfg->x_last_basic_block >= old_len)
7230     vec_safe_grow_cleared (cfg->x_basic_block_info,
7231 			   cfg->x_last_basic_block + 1);
7232 
7233   (*cfg->x_basic_block_info)[bb->index] = bb;
7234 
7235   /* Remap the variables in phi nodes.  */
7236   for (gphi_iterator psi = gsi_start_phis (bb);
7237        !gsi_end_p (psi); )
7238     {
7239       gphi *phi = psi.phi ();
7240       use_operand_p use;
7241       tree op = PHI_RESULT (phi);
7242       ssa_op_iter oi;
7243       unsigned i;
7244 
7245       if (virtual_operand_p (op))
7246 	{
7247 	  /* Remove the phi nodes for virtual operands (alias analysis will be
7248 	     run for the new function, anyway).  But replace all uses that
7249 	     might be outside of the region we move.  */
7250 	  use_operand_p use_p;
7251 	  imm_use_iterator iter;
7252 	  gimple *use_stmt;
7253 	  FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7254 	    FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7255 	      SET_USE (use_p, SSA_NAME_VAR (op));
7256           remove_phi_node (&psi, true);
7257 	  continue;
7258 	}
7259 
7260       SET_PHI_RESULT (phi,
7261 		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7262       FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7263 	{
7264 	  op = USE_FROM_PTR (use);
7265 	  if (TREE_CODE (op) == SSA_NAME)
7266 	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7267 	}
7268 
7269       for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7270 	{
7271 	  location_t locus = gimple_phi_arg_location (phi, i);
7272 	  tree block = LOCATION_BLOCK (locus);
7273 
7274 	  if (locus == UNKNOWN_LOCATION)
7275 	    continue;
7276 	  if (d->orig_block == NULL_TREE || block == d->orig_block)
7277 	    {
7278 	      locus = set_block (locus, d->new_block);
7279 	      gimple_phi_arg_set_location (phi, i, locus);
7280 	    }
7281 	}
7282 
7283       gsi_next (&psi);
7284     }
7285 
7286   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7287     {
7288       gimple *stmt = gsi_stmt (si);
7289       struct walk_stmt_info wi;
7290 
7291       memset (&wi, 0, sizeof (wi));
7292       wi.info = d;
7293       walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7294 
7295       if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7296 	{
7297 	  tree label = gimple_label_label (label_stmt);
7298 	  int uid = LABEL_DECL_UID (label);
7299 
7300 	  gcc_assert (uid > -1);
7301 
7302 	  old_len = vec_safe_length (cfg->x_label_to_block_map);
7303 	  if (old_len <= (unsigned) uid)
7304 	    vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7305 
7306 	  (*cfg->x_label_to_block_map)[uid] = bb;
7307 	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7308 
7309 	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7310 
7311 	  if (uid >= dest_cfun->cfg->last_label_uid)
7312 	    dest_cfun->cfg->last_label_uid = uid + 1;
7313 	}
7314 
7315       maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7316       remove_stmt_from_eh_lp_fn (cfun, stmt);
7317 
7318       gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7319       gimple_remove_stmt_histograms (cfun, stmt);
7320 
7321       /* We cannot leave any operands allocated from the operand caches of
7322 	 the current function.  */
7323       free_stmt_operands (cfun, stmt);
7324       push_cfun (dest_cfun);
7325       update_stmt (stmt);
7326       if (is_gimple_call (stmt))
7327 	notice_special_calls (as_a <gcall *> (stmt));
7328       pop_cfun ();
7329     }
7330 
7331   FOR_EACH_EDGE (e, ei, bb->succs)
7332     if (e->goto_locus != UNKNOWN_LOCATION)
7333       {
7334 	tree block = LOCATION_BLOCK (e->goto_locus);
7335 	if (d->orig_block == NULL_TREE
7336 	    || block == d->orig_block)
7337 	  e->goto_locus = set_block (e->goto_locus, d->new_block);
7338       }
7339 }
7340 
7341 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7342    the outermost EH region.  Use REGION as the incoming base EH region.
7343    If there is no single outermost region, return NULL and set *ALL to
7344    true.  */
7345 
7346 static eh_region
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,eh_region region,bool * all)7347 find_outermost_region_in_block (struct function *src_cfun,
7348 				basic_block bb, eh_region region,
7349 				bool *all)
7350 {
7351   gimple_stmt_iterator si;
7352 
7353   for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7354     {
7355       gimple *stmt = gsi_stmt (si);
7356       eh_region stmt_region;
7357       int lp_nr;
7358 
7359       lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7360       stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7361       if (stmt_region)
7362 	{
7363 	  if (region == NULL)
7364 	    region = stmt_region;
7365 	  else if (stmt_region != region)
7366 	    {
7367 	      region = eh_region_outermost (src_cfun, stmt_region, region);
7368 	      if (region == NULL)
7369 		{
7370 		  *all = true;
7371 		  return NULL;
7372 		}
7373 	    }
7374 	}
7375     }
7376 
7377   return region;
7378 }
7379 
7380 static tree
new_label_mapper(tree decl,void * data)7381 new_label_mapper (tree decl, void *data)
7382 {
7383   htab_t hash = (htab_t) data;
7384   struct tree_map *m;
7385   void **slot;
7386 
7387   gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7388 
7389   m = XNEW (struct tree_map);
7390   m->hash = DECL_UID (decl);
7391   m->base.from = decl;
7392   m->to = create_artificial_label (UNKNOWN_LOCATION);
7393   LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7394   if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7395     cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7396 
7397   slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7398   gcc_assert (*slot == NULL);
7399 
7400   *slot = m;
7401 
7402   return m->to;
7403 }
7404 
7405 /* Tree walker to replace the decls used inside value expressions by
7406    duplicates.  */
7407 
7408 static tree
replace_block_vars_by_duplicates_1(tree * tp,int * walk_subtrees,void * data)7409 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7410 {
7411   struct replace_decls_d *rd = (struct replace_decls_d *)data;
7412 
7413   switch (TREE_CODE (*tp))
7414     {
7415     case VAR_DECL:
7416     case PARM_DECL:
7417     case RESULT_DECL:
7418       replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7419       break;
7420     default:
7421       break;
7422     }
7423 
7424   if (IS_TYPE_OR_DECL_P (*tp))
7425     *walk_subtrees = false;
7426 
7427   return NULL;
7428 }
7429 
7430 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7431    subblocks.  */
7432 
7433 static void
replace_block_vars_by_duplicates(tree block,hash_map<tree,tree> * vars_map,tree to_context)7434 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7435 				  tree to_context)
7436 {
7437   tree *tp, t;
7438 
7439   for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7440     {
7441       t = *tp;
7442       if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7443 	continue;
7444       replace_by_duplicate_decl (&t, vars_map, to_context);
7445       if (t != *tp)
7446 	{
7447 	  if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7448 	    {
7449 	      tree x = DECL_VALUE_EXPR (*tp);
7450 	      struct replace_decls_d rd = { vars_map, to_context };
7451 	      unshare_expr (x);
7452 	      walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7453 	      SET_DECL_VALUE_EXPR (t, x);
7454 	      DECL_HAS_VALUE_EXPR_P (t) = 1;
7455 	    }
7456 	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
7457 	  *tp = t;
7458 	}
7459     }
7460 
7461   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7462     replace_block_vars_by_duplicates (block, vars_map, to_context);
7463 }
7464 
7465 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7466    from FN1 to FN2.  */
7467 
7468 static void
fixup_loop_arrays_after_move(struct function * fn1,struct function * fn2,class loop * loop)7469 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7470 			      class loop *loop)
7471 {
7472   /* Discard it from the old loop array.  */
7473   (*get_loops (fn1))[loop->num] = NULL;
7474 
7475   /* Place it in the new loop array, assigning it a new number.  */
7476   loop->num = number_of_loops (fn2);
7477   vec_safe_push (loops_for_fn (fn2)->larray, loop);
7478 
7479   /* Recurse to children.  */
7480   for (loop = loop->inner; loop; loop = loop->next)
7481     fixup_loop_arrays_after_move (fn1, fn2, loop);
7482 }
7483 
7484 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7485    delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
7486 
7487 DEBUG_FUNCTION void
verify_sese(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)7488 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7489 {
7490   basic_block bb;
7491   edge_iterator ei;
7492   edge e;
7493   bitmap bbs = BITMAP_ALLOC (NULL);
7494   int i;
7495 
7496   gcc_assert (entry != NULL);
7497   gcc_assert (entry != exit);
7498   gcc_assert (bbs_p != NULL);
7499 
7500   gcc_assert (bbs_p->length () > 0);
7501 
7502   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7503     bitmap_set_bit (bbs, bb->index);
7504 
7505   gcc_assert (bitmap_bit_p (bbs, entry->index));
7506   gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7507 
7508   FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7509     {
7510       if (bb == entry)
7511 	{
7512 	  gcc_assert (single_pred_p (entry));
7513 	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7514 	}
7515       else
7516 	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7517 	  {
7518 	    e = ei_edge (ei);
7519 	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
7520 	  }
7521 
7522       if (bb == exit)
7523 	{
7524 	  gcc_assert (single_succ_p (exit));
7525 	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7526 	}
7527       else
7528 	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7529 	  {
7530 	    e = ei_edge (ei);
7531 	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7532 	  }
7533     }
7534 
7535   BITMAP_FREE (bbs);
7536 }
7537 
7538 /* If FROM is an SSA_NAME, mark the version in bitmap DATA.  */
7539 
7540 bool
gather_ssa_name_hash_map_from(tree const & from,tree const &,void * data)7541 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7542 {
7543   bitmap release_names = (bitmap)data;
7544 
7545   if (TREE_CODE (from) != SSA_NAME)
7546     return true;
7547 
7548   bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7549   return true;
7550 }
7551 
7552 /* Return LOOP_DIST_ALIAS call if present in BB.  */
7553 
7554 static gimple *
find_loop_dist_alias(basic_block bb)7555 find_loop_dist_alias (basic_block bb)
7556 {
7557   gimple *g = last_stmt (bb);
7558   if (g == NULL || gimple_code (g) != GIMPLE_COND)
7559     return NULL;
7560 
7561   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7562   gsi_prev (&gsi);
7563   if (gsi_end_p (gsi))
7564     return NULL;
7565 
7566   g = gsi_stmt (gsi);
7567   if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7568     return g;
7569   return NULL;
7570 }
7571 
7572 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7573    to VALUE and update any immediate uses of it's LHS.  */
7574 
7575 void
fold_loop_internal_call(gimple * g,tree value)7576 fold_loop_internal_call (gimple *g, tree value)
7577 {
7578   tree lhs = gimple_call_lhs (g);
7579   use_operand_p use_p;
7580   imm_use_iterator iter;
7581   gimple *use_stmt;
7582   gimple_stmt_iterator gsi = gsi_for_stmt (g);
7583 
7584   replace_call_with_value (&gsi, value);
7585   FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7586     {
7587       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7588 	SET_USE (use_p, value);
7589       update_stmt (use_stmt);
7590     }
7591 }
7592 
7593 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7594    EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
7595    single basic block in the original CFG and the new basic block is
7596    returned.  DEST_CFUN must not have a CFG yet.
7597 
7598    Note that the region need not be a pure SESE region.  Blocks inside
7599    the region may contain calls to abort/exit.  The only restriction
7600    is that ENTRY_BB should be the only entry point and it must
7601    dominate EXIT_BB.
7602 
7603    Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7604    functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7605    to the new function.
7606 
7607    All local variables referenced in the region are assumed to be in
7608    the corresponding BLOCK_VARS and unexpanded variable lists
7609    associated with DEST_CFUN.
7610 
7611    TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7612    reimplement move_sese_region_to_fn by duplicating the region rather than
7613    moving it.  */
7614 
7615 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb,tree orig_block)7616 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7617 		        basic_block exit_bb, tree orig_block)
7618 {
7619   vec<basic_block> bbs;
7620   basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7621   basic_block after, bb, *entry_pred, *exit_succ, abb;
7622   struct function *saved_cfun = cfun;
7623   int *entry_flag, *exit_flag;
7624   profile_probability *entry_prob, *exit_prob;
7625   unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7626   edge e;
7627   edge_iterator ei;
7628   htab_t new_label_map;
7629   hash_map<void *, void *> *eh_map;
7630   class loop *loop = entry_bb->loop_father;
7631   class loop *loop0 = get_loop (saved_cfun, 0);
7632   struct move_stmt_d d;
7633 
7634   /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7635      region.  */
7636   gcc_assert (entry_bb != exit_bb
7637               && (!exit_bb
7638 		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7639 
7640   /* Collect all the blocks in the region.  Manually add ENTRY_BB
7641      because it won't be added by dfs_enumerate_from.  */
7642   bbs.create (0);
7643   bbs.safe_push (entry_bb);
7644   gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7645 
7646   if (flag_checking)
7647     verify_sese (entry_bb, exit_bb, &bbs);
7648 
7649   /* The blocks that used to be dominated by something in BBS will now be
7650      dominated by the new block.  */
7651   auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7652 							   bbs.address (),
7653 							   bbs.length ());
7654 
7655   /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
7656      the predecessor edges to ENTRY_BB and the successor edges to
7657      EXIT_BB so that we can re-attach them to the new basic block that
7658      will replace the region.  */
7659   num_entry_edges = EDGE_COUNT (entry_bb->preds);
7660   entry_pred = XNEWVEC (basic_block, num_entry_edges);
7661   entry_flag = XNEWVEC (int, num_entry_edges);
7662   entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7663   i = 0;
7664   for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7665     {
7666       entry_prob[i] = e->probability;
7667       entry_flag[i] = e->flags;
7668       entry_pred[i++] = e->src;
7669       remove_edge (e);
7670     }
7671 
7672   if (exit_bb)
7673     {
7674       num_exit_edges = EDGE_COUNT (exit_bb->succs);
7675       exit_succ = XNEWVEC (basic_block, num_exit_edges);
7676       exit_flag = XNEWVEC (int, num_exit_edges);
7677       exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7678       i = 0;
7679       for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7680 	{
7681 	  exit_prob[i] = e->probability;
7682 	  exit_flag[i] = e->flags;
7683 	  exit_succ[i++] = e->dest;
7684 	  remove_edge (e);
7685 	}
7686     }
7687   else
7688     {
7689       num_exit_edges = 0;
7690       exit_succ = NULL;
7691       exit_flag = NULL;
7692       exit_prob = NULL;
7693     }
7694 
7695   /* Switch context to the child function to initialize DEST_FN's CFG.  */
7696   gcc_assert (dest_cfun->cfg == NULL);
7697   push_cfun (dest_cfun);
7698 
7699   init_empty_tree_cfg ();
7700 
7701   /* Initialize EH information for the new function.  */
7702   eh_map = NULL;
7703   new_label_map = NULL;
7704   if (saved_cfun->eh)
7705     {
7706       eh_region region = NULL;
7707       bool all = false;
7708 
7709       FOR_EACH_VEC_ELT (bbs, i, bb)
7710 	{
7711 	  region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7712 	  if (all)
7713 	    break;
7714 	}
7715 
7716       init_eh_for_function ();
7717       if (region != NULL || all)
7718 	{
7719 	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7720 	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7721 					 new_label_mapper, new_label_map);
7722 	}
7723     }
7724 
7725   /* Initialize an empty loop tree.  */
7726   struct loops *loops = ggc_cleared_alloc<struct loops> ();
7727   init_loops_structure (dest_cfun, loops, 1);
7728   loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7729   set_loops_for_fn (dest_cfun, loops);
7730 
7731   vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7732 
7733   /* Move the outlined loop tree part.  */
7734   num_nodes = bbs.length ();
7735   FOR_EACH_VEC_ELT (bbs, i, bb)
7736     {
7737       if (bb->loop_father->header == bb)
7738 	{
7739 	  class loop *this_loop = bb->loop_father;
7740 	  /* Avoid the need to remap SSA names used in nb_iterations.  */
7741 	  free_numbers_of_iterations_estimates (this_loop);
7742 	  class loop *outer = loop_outer (this_loop);
7743 	  if (outer == loop
7744 	      /* If the SESE region contains some bbs ending with
7745 		 a noreturn call, those are considered to belong
7746 		 to the outermost loop in saved_cfun, rather than
7747 		 the entry_bb's loop_father.  */
7748 	      || outer == loop0)
7749 	    {
7750 	      if (outer != loop)
7751 		num_nodes -= this_loop->num_nodes;
7752 	      flow_loop_tree_node_remove (bb->loop_father);
7753 	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7754 	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7755 	    }
7756 	}
7757       else if (bb->loop_father == loop0 && loop0 != loop)
7758 	num_nodes--;
7759 
7760       /* Remove loop exits from the outlined region.  */
7761       if (loops_for_fn (saved_cfun)->exits)
7762 	FOR_EACH_EDGE (e, ei, bb->succs)
7763 	  {
7764 	    struct loops *l = loops_for_fn (saved_cfun);
7765 	    loop_exit **slot
7766 	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7767 					       NO_INSERT);
7768 	    if (slot)
7769 	      l->exits->clear_slot (slot);
7770 	  }
7771     }
7772 
7773   /* Adjust the number of blocks in the tree root of the outlined part.  */
7774   get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7775 
7776   /* Setup a mapping to be used by move_block_to_fn.  */
7777   loop->aux = current_loops->tree_root;
7778   loop0->aux = current_loops->tree_root;
7779 
7780   /* Fix up orig_loop_num.  If the block referenced in it has been moved
7781      to dest_cfun, update orig_loop_num field, otherwise clear it.  */
7782   signed char *moved_orig_loop_num = NULL;
7783   for (auto dloop : loops_list (dest_cfun, 0))
7784     if (dloop->orig_loop_num)
7785       {
7786 	if (moved_orig_loop_num == NULL)
7787 	  moved_orig_loop_num
7788 	    = XCNEWVEC (signed char, vec_safe_length (larray));
7789 	if ((*larray)[dloop->orig_loop_num] != NULL
7790 	    && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7791 	  {
7792 	    if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7793 		&& moved_orig_loop_num[dloop->orig_loop_num] < 2)
7794 	      moved_orig_loop_num[dloop->orig_loop_num]++;
7795 	    dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7796 	  }
7797 	else
7798 	  {
7799 	    moved_orig_loop_num[dloop->orig_loop_num] = -1;
7800 	    dloop->orig_loop_num = 0;
7801 	  }
7802       }
7803   pop_cfun ();
7804 
7805   if (moved_orig_loop_num)
7806     {
7807       FOR_EACH_VEC_ELT (bbs, i, bb)
7808 	{
7809 	  gimple *g = find_loop_dist_alias (bb);
7810 	  if (g == NULL)
7811 	    continue;
7812 
7813 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7814 	  gcc_assert (orig_loop_num
7815 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7816 	  if (moved_orig_loop_num[orig_loop_num] == 2)
7817 	    {
7818 	      /* If we have moved both loops with this orig_loop_num into
7819 		 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7820 		 too, update the first argument.  */
7821 	      gcc_assert ((*larray)[orig_loop_num] != NULL
7822 			  && (get_loop (saved_cfun, orig_loop_num) == NULL));
7823 	      tree t = build_int_cst (integer_type_node,
7824 				      (*larray)[orig_loop_num]->num);
7825 	      gimple_call_set_arg (g, 0, t);
7826 	      update_stmt (g);
7827 	      /* Make sure the following loop will not update it.  */
7828 	      moved_orig_loop_num[orig_loop_num] = 0;
7829 	    }
7830 	  else
7831 	    /* Otherwise at least one of the loops stayed in saved_cfun.
7832 	       Remove the LOOP_DIST_ALIAS call.  */
7833 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7834 	}
7835       FOR_EACH_BB_FN (bb, saved_cfun)
7836 	{
7837 	  gimple *g = find_loop_dist_alias (bb);
7838 	  if (g == NULL)
7839 	    continue;
7840 	  int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7841 	  gcc_assert (orig_loop_num
7842 		      && (unsigned) orig_loop_num < vec_safe_length (larray));
7843 	  if (moved_orig_loop_num[orig_loop_num])
7844 	    /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7845 	       of the corresponding loops was moved, remove it.  */
7846 	    fold_loop_internal_call (g, gimple_call_arg (g, 1));
7847 	}
7848       XDELETEVEC (moved_orig_loop_num);
7849     }
7850   ggc_free (larray);
7851 
7852   /* Move blocks from BBS into DEST_CFUN.  */
7853   gcc_assert (bbs.length () >= 2);
7854   after = dest_cfun->cfg->x_entry_block_ptr;
7855   hash_map<tree, tree> vars_map;
7856 
7857   memset (&d, 0, sizeof (d));
7858   d.orig_block = orig_block;
7859   d.new_block = DECL_INITIAL (dest_cfun->decl);
7860   d.from_context = cfun->decl;
7861   d.to_context = dest_cfun->decl;
7862   d.vars_map = &vars_map;
7863   d.new_label_map = new_label_map;
7864   d.eh_map = eh_map;
7865   d.remap_decls_p = true;
7866 
7867   if (gimple_in_ssa_p (cfun))
7868     for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7869       {
7870 	tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7871 	set_ssa_default_def (dest_cfun, arg, narg);
7872 	vars_map.put (arg, narg);
7873       }
7874 
7875   FOR_EACH_VEC_ELT (bbs, i, bb)
7876     {
7877       /* No need to update edge counts on the last block.  It has
7878 	 already been updated earlier when we detached the region from
7879 	 the original CFG.  */
7880       move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7881       after = bb;
7882     }
7883 
7884   /* Adjust the maximum clique used.  */
7885   dest_cfun->last_clique = saved_cfun->last_clique;
7886 
7887   loop->aux = NULL;
7888   loop0->aux = NULL;
7889   /* Loop sizes are no longer correct, fix them up.  */
7890   loop->num_nodes -= num_nodes;
7891   for (class loop *outer = loop_outer (loop);
7892        outer; outer = loop_outer (outer))
7893     outer->num_nodes -= num_nodes;
7894   loop0->num_nodes -= bbs.length () - num_nodes;
7895 
7896   if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7897     {
7898       class loop *aloop;
7899       for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7900 	if (aloop != NULL)
7901 	  {
7902 	    if (aloop->simduid)
7903 	      {
7904 		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7905 					   d.to_context);
7906 		dest_cfun->has_simduid_loops = true;
7907 	      }
7908 	    if (aloop->force_vectorize)
7909 	      dest_cfun->has_force_vectorize_loops = true;
7910 	  }
7911     }
7912 
7913   /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7914   if (orig_block)
7915     {
7916       tree block;
7917       gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7918 		  == NULL_TREE);
7919       BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7920 	= BLOCK_SUBBLOCKS (orig_block);
7921       for (block = BLOCK_SUBBLOCKS (orig_block);
7922 	   block; block = BLOCK_CHAIN (block))
7923 	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7924       BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7925     }
7926 
7927   replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7928 				    &vars_map, dest_cfun->decl);
7929 
7930   if (new_label_map)
7931     htab_delete (new_label_map);
7932   if (eh_map)
7933     delete eh_map;
7934 
7935   /* We need to release ssa-names in a defined order, so first find them,
7936      and then iterate in ascending version order.  */
7937   bitmap release_names = BITMAP_ALLOC (NULL);
7938   vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7939   bitmap_iterator bi;
7940   EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7941     release_ssa_name (ssa_name (i));
7942   BITMAP_FREE (release_names);
7943 
7944   /* Rewire the entry and exit blocks.  The successor to the entry
7945      block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7946      the child function.  Similarly, the predecessor of DEST_FN's
7947      EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7948      need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7949      various CFG manipulation function get to the right CFG.
7950 
7951      FIXME, this is silly.  The CFG ought to become a parameter to
7952      these helpers.  */
7953   push_cfun (dest_cfun);
7954   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7955   make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7956   if (exit_bb)
7957     {
7958       make_single_succ_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7959       EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7960     }
7961   else
7962     EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7963   pop_cfun ();
7964 
7965   /* Back in the original function, the SESE region has disappeared,
7966      create a new basic block in its place.  */
7967   bb = create_empty_bb (entry_pred[0]);
7968   if (current_loops)
7969     add_bb_to_loop (bb, loop);
7970   for (i = 0; i < num_entry_edges; i++)
7971     {
7972       e = make_edge (entry_pred[i], bb, entry_flag[i]);
7973       e->probability = entry_prob[i];
7974     }
7975 
7976   for (i = 0; i < num_exit_edges; i++)
7977     {
7978       e = make_edge (bb, exit_succ[i], exit_flag[i]);
7979       e->probability = exit_prob[i];
7980     }
7981 
7982   set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7983   FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7984     set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7985 
7986   if (exit_bb)
7987     {
7988       free (exit_prob);
7989       free (exit_flag);
7990       free (exit_succ);
7991     }
7992   free (entry_prob);
7993   free (entry_flag);
7994   free (entry_pred);
7995   bbs.release ();
7996 
7997   return bb;
7998 }
7999 
8000 /* Dump default def DEF to file FILE using FLAGS and indentation
8001    SPC.  */
8002 
8003 static void
dump_default_def(FILE * file,tree def,int spc,dump_flags_t flags)8004 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8005 {
8006   for (int i = 0; i < spc; ++i)
8007     fprintf (file, " ");
8008   dump_ssaname_info_to_file (file, def, spc);
8009 
8010   print_generic_expr (file, TREE_TYPE (def), flags);
8011   fprintf (file, " ");
8012   print_generic_expr (file, def, flags);
8013   fprintf (file, " = ");
8014   print_generic_expr (file, SSA_NAME_VAR (def), flags);
8015   fprintf (file, ";\n");
8016 }
8017 
8018 /* Print no_sanitize attribute to FILE for a given attribute VALUE.  */
8019 
8020 static void
print_no_sanitize_attr_value(FILE * file,tree value)8021 print_no_sanitize_attr_value (FILE *file, tree value)
8022 {
8023   unsigned int flags = tree_to_uhwi (value);
8024   bool first = true;
8025   for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8026     {
8027       if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8028 	{
8029 	  if (!first)
8030 	    fprintf (file, " | ");
8031 	  fprintf (file, "%s", sanitizer_opts[i].name);
8032 	  first = false;
8033 	}
8034     }
8035 }
8036 
8037 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8038    */
8039 
8040 void
dump_function_to_file(tree fndecl,FILE * file,dump_flags_t flags)8041 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8042 {
8043   tree arg, var, old_current_fndecl = current_function_decl;
8044   struct function *dsf;
8045   bool ignore_topmost_bind = false, any_var = false;
8046   basic_block bb;
8047   tree chain;
8048   bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8049 		  && decl_is_tm_clone (fndecl));
8050   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8051 
8052   tree fntype = TREE_TYPE (fndecl);
8053   tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8054 
8055   for (int i = 0; i != 2; ++i)
8056     {
8057       if (!attrs[i])
8058 	continue;
8059 
8060       fprintf (file, "__attribute__((");
8061 
8062       bool first = true;
8063       tree chain;
8064       for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8065 	{
8066 	  if (!first)
8067 	    fprintf (file, ", ");
8068 
8069 	  tree name = get_attribute_name (chain);
8070 	  print_generic_expr (file, name, dump_flags);
8071 	  if (TREE_VALUE (chain) != NULL_TREE)
8072 	    {
8073 	      fprintf (file, " (");
8074 
8075 	      if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8076 		print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8077 	      else
8078 		print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8079 	      fprintf (file, ")");
8080 	    }
8081 	}
8082 
8083       fprintf (file, "))\n");
8084     }
8085 
8086   current_function_decl = fndecl;
8087   if (flags & TDF_GIMPLE)
8088     {
8089       static bool hotness_bb_param_printed = false;
8090       if (profile_info != NULL
8091 	  && !hotness_bb_param_printed)
8092 	{
8093 	  hotness_bb_param_printed = true;
8094 	  fprintf (file,
8095 		   "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8096 		   " */\n", get_hot_bb_threshold ());
8097 	}
8098 
8099       print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8100 			  dump_flags | TDF_SLIM);
8101       fprintf (file, " __GIMPLE (%s",
8102 	       (fun->curr_properties & PROP_ssa) ? "ssa"
8103 	       : (fun->curr_properties & PROP_cfg) ? "cfg"
8104 	       : "");
8105 
8106       if (fun && fun->cfg)
8107 	{
8108 	  basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8109 	  if (bb->count.initialized_p ())
8110 	    fprintf (file, ",%s(%" PRIu64 ")",
8111 		     profile_quality_as_string (bb->count.quality ()),
8112 		     bb->count.value ());
8113 	  if (dump_flags & TDF_UID)
8114 	    fprintf (file, ")\n%sD_%u (", function_name (fun),
8115 		     DECL_UID (fndecl));
8116 	  else
8117 	    fprintf (file, ")\n%s (", function_name (fun));
8118 	}
8119     }
8120   else
8121     {
8122       print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8123       if (dump_flags & TDF_UID)
8124 	fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8125 		 tmclone ? "[tm-clone] " : "");
8126       else
8127 	fprintf (file, " %s %s(", function_name (fun),
8128 		 tmclone ? "[tm-clone] " : "");
8129     }
8130 
8131   arg = DECL_ARGUMENTS (fndecl);
8132   while (arg)
8133     {
8134       print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8135       fprintf (file, " ");
8136       print_generic_expr (file, arg, dump_flags);
8137       if (DECL_CHAIN (arg))
8138 	fprintf (file, ", ");
8139       arg = DECL_CHAIN (arg);
8140     }
8141   fprintf (file, ")\n");
8142 
8143   dsf = DECL_STRUCT_FUNCTION (fndecl);
8144   if (dsf && (flags & TDF_EH))
8145     dump_eh_tree (file, dsf);
8146 
8147   if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8148     {
8149       dump_node (fndecl, TDF_SLIM | flags, file);
8150       current_function_decl = old_current_fndecl;
8151       return;
8152     }
8153 
8154   /* When GIMPLE is lowered, the variables are no longer available in
8155      BIND_EXPRs, so display them separately.  */
8156   if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8157     {
8158       unsigned ix;
8159       ignore_topmost_bind = true;
8160 
8161       fprintf (file, "{\n");
8162       if (gimple_in_ssa_p (fun)
8163 	  && (flags & TDF_ALIAS))
8164 	{
8165 	  for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8166 	       arg = DECL_CHAIN (arg))
8167 	    {
8168 	      tree def = ssa_default_def (fun, arg);
8169 	      if (def)
8170 		dump_default_def (file, def, 2, flags);
8171 	    }
8172 
8173 	  tree res = DECL_RESULT (fun->decl);
8174 	  if (res != NULL_TREE
8175 	      && DECL_BY_REFERENCE (res))
8176 	    {
8177 	      tree def = ssa_default_def (fun, res);
8178 	      if (def)
8179 		dump_default_def (file, def, 2, flags);
8180 	    }
8181 
8182 	  tree static_chain = fun->static_chain_decl;
8183 	  if (static_chain != NULL_TREE)
8184 	    {
8185 	      tree def = ssa_default_def (fun, static_chain);
8186 	      if (def)
8187 		dump_default_def (file, def, 2, flags);
8188 	    }
8189 	}
8190 
8191       if (!vec_safe_is_empty (fun->local_decls))
8192 	FOR_EACH_LOCAL_DECL (fun, ix, var)
8193 	  {
8194 	    print_generic_decl (file, var, flags);
8195 	    fprintf (file, "\n");
8196 
8197 	    any_var = true;
8198 	  }
8199 
8200       tree name;
8201 
8202       if (gimple_in_ssa_p (fun))
8203 	FOR_EACH_SSA_NAME (ix, name, fun)
8204 	  {
8205 	    if (!SSA_NAME_VAR (name)
8206 		/* SSA name with decls without a name still get
8207 		   dumped as _N, list those explicitely as well even
8208 		   though we've dumped the decl declaration as D.xxx
8209 		   above.  */
8210 		|| !SSA_NAME_IDENTIFIER (name))
8211 	      {
8212 		fprintf (file, "  ");
8213 		print_generic_expr (file, TREE_TYPE (name), flags);
8214 		fprintf (file, " ");
8215 		print_generic_expr (file, name, flags);
8216 		fprintf (file, ";\n");
8217 
8218 		any_var = true;
8219 	      }
8220 	  }
8221     }
8222 
8223   if (fun && fun->decl == fndecl
8224       && fun->cfg
8225       && basic_block_info_for_fn (fun))
8226     {
8227       /* If the CFG has been built, emit a CFG-based dump.  */
8228       if (!ignore_topmost_bind)
8229 	fprintf (file, "{\n");
8230 
8231       if (any_var && n_basic_blocks_for_fn (fun))
8232 	fprintf (file, "\n");
8233 
8234       FOR_EACH_BB_FN (bb, fun)
8235 	dump_bb (file, bb, 2, flags);
8236 
8237       fprintf (file, "}\n");
8238     }
8239   else if (fun && (fun->curr_properties & PROP_gimple_any))
8240     {
8241       /* The function is now in GIMPLE form but the CFG has not been
8242 	 built yet.  Emit the single sequence of GIMPLE statements
8243 	 that make up its body.  */
8244       gimple_seq body = gimple_body (fndecl);
8245 
8246       if (gimple_seq_first_stmt (body)
8247 	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8248 	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8249 	print_gimple_seq (file, body, 0, flags);
8250       else
8251 	{
8252 	  if (!ignore_topmost_bind)
8253 	    fprintf (file, "{\n");
8254 
8255 	  if (any_var)
8256 	    fprintf (file, "\n");
8257 
8258 	  print_gimple_seq (file, body, 2, flags);
8259 	  fprintf (file, "}\n");
8260 	}
8261     }
8262   else
8263     {
8264       int indent;
8265 
8266       /* Make a tree based dump.  */
8267       chain = DECL_SAVED_TREE (fndecl);
8268       if (chain && TREE_CODE (chain) == BIND_EXPR)
8269 	{
8270 	  if (ignore_topmost_bind)
8271 	    {
8272 	      chain = BIND_EXPR_BODY (chain);
8273 	      indent = 2;
8274 	    }
8275 	  else
8276 	    indent = 0;
8277 	}
8278       else
8279 	{
8280 	  if (!ignore_topmost_bind)
8281 	    {
8282 	      fprintf (file, "{\n");
8283 	      /* No topmost bind, pretend it's ignored for later.  */
8284 	      ignore_topmost_bind = true;
8285 	    }
8286 	  indent = 2;
8287 	}
8288 
8289       if (any_var)
8290 	fprintf (file, "\n");
8291 
8292       print_generic_stmt_indented (file, chain, flags, indent);
8293       if (ignore_topmost_bind)
8294 	fprintf (file, "}\n");
8295     }
8296 
8297   if (flags & TDF_ENUMERATE_LOCALS)
8298     dump_enumerated_decls (file, flags);
8299   fprintf (file, "\n\n");
8300 
8301   current_function_decl = old_current_fndecl;
8302 }
8303 
8304 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
8305 
8306 DEBUG_FUNCTION void
debug_function(tree fn,dump_flags_t flags)8307 debug_function (tree fn, dump_flags_t flags)
8308 {
8309   dump_function_to_file (fn, stderr, flags);
8310 }
8311 
8312 
8313 /* Print on FILE the indexes for the predecessors of basic_block BB.  */
8314 
8315 static void
print_pred_bbs(FILE * file,basic_block bb)8316 print_pred_bbs (FILE *file, basic_block bb)
8317 {
8318   edge e;
8319   edge_iterator ei;
8320 
8321   FOR_EACH_EDGE (e, ei, bb->preds)
8322     fprintf (file, "bb_%d ", e->src->index);
8323 }
8324 
8325 
8326 /* Print on FILE the indexes for the successors of basic_block BB.  */
8327 
8328 static void
print_succ_bbs(FILE * file,basic_block bb)8329 print_succ_bbs (FILE *file, basic_block bb)
8330 {
8331   edge e;
8332   edge_iterator ei;
8333 
8334   FOR_EACH_EDGE (e, ei, bb->succs)
8335     fprintf (file, "bb_%d ", e->dest->index);
8336 }
8337 
8338 /* Print to FILE the basic block BB following the VERBOSITY level.  */
8339 
8340 void
print_loops_bb(FILE * file,basic_block bb,int indent,int verbosity)8341 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8342 {
8343   char *s_indent = (char *) alloca ((size_t) indent + 1);
8344   memset ((void *) s_indent, ' ', (size_t) indent);
8345   s_indent[indent] = '\0';
8346 
8347   /* Print basic_block's header.  */
8348   if (verbosity >= 2)
8349     {
8350       fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
8351       print_pred_bbs (file, bb);
8352       fprintf (file, "}, succs = {");
8353       print_succ_bbs (file, bb);
8354       fprintf (file, "})\n");
8355     }
8356 
8357   /* Print basic_block's body.  */
8358   if (verbosity >= 3)
8359     {
8360       fprintf (file, "%s  {\n", s_indent);
8361       dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8362       fprintf (file, "%s  }\n", s_indent);
8363     }
8364 }
8365 
8366 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8367 
8368 /* Pretty print LOOP on FILE, indented INDENT spaces.  Following
8369    VERBOSITY level this outputs the contents of the loop, or just its
8370    structure.  */
8371 
8372 static void
print_loop(FILE * file,class loop * loop,int indent,int verbosity)8373 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8374 {
8375   char *s_indent;
8376   basic_block bb;
8377 
8378   if (loop == NULL)
8379     return;
8380 
8381   s_indent = (char *) alloca ((size_t) indent + 1);
8382   memset ((void *) s_indent, ' ', (size_t) indent);
8383   s_indent[indent] = '\0';
8384 
8385   /* Print loop's header.  */
8386   fprintf (file, "%sloop_%d (", s_indent, loop->num);
8387   if (loop->header)
8388     fprintf (file, "header = %d", loop->header->index);
8389   else
8390     {
8391       fprintf (file, "deleted)\n");
8392       return;
8393     }
8394   if (loop->latch)
8395     fprintf (file, ", latch = %d", loop->latch->index);
8396   else
8397     fprintf (file, ", multiple latches");
8398   fprintf (file, ", niter = ");
8399   print_generic_expr (file, loop->nb_iterations);
8400 
8401   if (loop->any_upper_bound)
8402     {
8403       fprintf (file, ", upper_bound = ");
8404       print_decu (loop->nb_iterations_upper_bound, file);
8405     }
8406   if (loop->any_likely_upper_bound)
8407     {
8408       fprintf (file, ", likely_upper_bound = ");
8409       print_decu (loop->nb_iterations_likely_upper_bound, file);
8410     }
8411 
8412   if (loop->any_estimate)
8413     {
8414       fprintf (file, ", estimate = ");
8415       print_decu (loop->nb_iterations_estimate, file);
8416     }
8417   if (loop->unroll)
8418     fprintf (file, ", unroll = %d", loop->unroll);
8419   fprintf (file, ")\n");
8420 
8421   /* Print loop's body.  */
8422   if (verbosity >= 1)
8423     {
8424       fprintf (file, "%s{\n", s_indent);
8425       FOR_EACH_BB_FN (bb, cfun)
8426 	if (bb->loop_father == loop)
8427 	  print_loops_bb (file, bb, indent, verbosity);
8428 
8429       print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8430       fprintf (file, "%s}\n", s_indent);
8431     }
8432 }
8433 
8434 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8435    spaces.  Following VERBOSITY level this outputs the contents of the
8436    loop, or just its structure.  */
8437 
8438 static void
print_loop_and_siblings(FILE * file,class loop * loop,int indent,int verbosity)8439 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8440 			 int verbosity)
8441 {
8442   if (loop == NULL)
8443     return;
8444 
8445   print_loop (file, loop, indent, verbosity);
8446   print_loop_and_siblings (file, loop->next, indent, verbosity);
8447 }
8448 
8449 /* Follow a CFG edge from the entry point of the program, and on entry
8450    of a loop, pretty print the loop structure on FILE.  */
8451 
8452 void
print_loops(FILE * file,int verbosity)8453 print_loops (FILE *file, int verbosity)
8454 {
8455   basic_block bb;
8456 
8457   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8458   fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8459   if (bb && bb->loop_father)
8460     print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8461 }
8462 
8463 /* Dump a loop.  */
8464 
8465 DEBUG_FUNCTION void
debug(class loop & ref)8466 debug (class loop &ref)
8467 {
8468   print_loop (stderr, &ref, 0, /*verbosity*/0);
8469 }
8470 
8471 DEBUG_FUNCTION void
debug(class loop * ptr)8472 debug (class loop *ptr)
8473 {
8474   if (ptr)
8475     debug (*ptr);
8476   else
8477     fprintf (stderr, "<nil>\n");
8478 }
8479 
8480 /* Dump a loop verbosely.  */
8481 
8482 DEBUG_FUNCTION void
debug_verbose(class loop & ref)8483 debug_verbose (class loop &ref)
8484 {
8485   print_loop (stderr, &ref, 0, /*verbosity*/3);
8486 }
8487 
8488 DEBUG_FUNCTION void
debug_verbose(class loop * ptr)8489 debug_verbose (class loop *ptr)
8490 {
8491   if (ptr)
8492     debug (*ptr);
8493   else
8494     fprintf (stderr, "<nil>\n");
8495 }
8496 
8497 
8498 /* Debugging loops structure at tree level, at some VERBOSITY level.  */
8499 
8500 DEBUG_FUNCTION void
debug_loops(int verbosity)8501 debug_loops (int verbosity)
8502 {
8503   print_loops (stderr, verbosity);
8504 }
8505 
8506 /* Print on stderr the code of LOOP, at some VERBOSITY level.  */
8507 
8508 DEBUG_FUNCTION void
debug_loop(class loop * loop,int verbosity)8509 debug_loop (class loop *loop, int verbosity)
8510 {
8511   print_loop (stderr, loop, 0, verbosity);
8512 }
8513 
8514 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8515    level.  */
8516 
8517 DEBUG_FUNCTION void
debug_loop_num(unsigned num,int verbosity)8518 debug_loop_num (unsigned num, int verbosity)
8519 {
8520   debug_loop (get_loop (cfun, num), verbosity);
8521 }
8522 
8523 /* Return true if BB ends with a call, possibly followed by some
8524    instructions that must stay with the call.  Return false,
8525    otherwise.  */
8526 
8527 static bool
gimple_block_ends_with_call_p(basic_block bb)8528 gimple_block_ends_with_call_p (basic_block bb)
8529 {
8530   gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8531   return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8532 }
8533 
8534 
8535 /* Return true if BB ends with a conditional branch.  Return false,
8536    otherwise.  */
8537 
8538 static bool
gimple_block_ends_with_condjump_p(const_basic_block bb)8539 gimple_block_ends_with_condjump_p (const_basic_block bb)
8540 {
8541   gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8542   return (stmt && gimple_code (stmt) == GIMPLE_COND);
8543 }
8544 
8545 
8546 /* Return true if statement T may terminate execution of BB in ways not
8547    explicitly represtented in the CFG.  */
8548 
8549 bool
stmt_can_terminate_bb_p(gimple * t)8550 stmt_can_terminate_bb_p (gimple *t)
8551 {
8552   tree fndecl = NULL_TREE;
8553   int call_flags = 0;
8554 
8555   /* Eh exception not handled internally terminates execution of the whole
8556      function.  */
8557   if (stmt_can_throw_external (cfun, t))
8558     return true;
8559 
8560   /* NORETURN and LONGJMP calls already have an edge to exit.
8561      CONST and PURE calls do not need one.
8562      We don't currently check for CONST and PURE here, although
8563      it would be a good idea, because those attributes are
8564      figured out from the RTL in mark_constant_function, and
8565      the counter incrementation code from -fprofile-arcs
8566      leads to different results from -fbranch-probabilities.  */
8567   if (is_gimple_call (t))
8568     {
8569       fndecl = gimple_call_fndecl (t);
8570       call_flags = gimple_call_flags (t);
8571     }
8572 
8573   if (is_gimple_call (t)
8574       && fndecl
8575       && fndecl_built_in_p (fndecl)
8576       && (call_flags & ECF_NOTHROW)
8577       && !(call_flags & ECF_RETURNS_TWICE)
8578       /* fork() doesn't really return twice, but the effect of
8579 	 wrapping it in __gcov_fork() which calls __gcov_dump() and
8580 	 __gcov_reset() and clears the counters before forking has the same
8581 	 effect as returning twice.  Force a fake edge.  */
8582       && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8583     return false;
8584 
8585   if (is_gimple_call (t))
8586     {
8587       edge_iterator ei;
8588       edge e;
8589       basic_block bb;
8590 
8591       if (call_flags & (ECF_PURE | ECF_CONST)
8592 	  && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8593 	return false;
8594 
8595       /* Function call may do longjmp, terminate program or do other things.
8596 	 Special case noreturn that have non-abnormal edges out as in this case
8597 	 the fact is sufficiently represented by lack of edges out of T.  */
8598       if (!(call_flags & ECF_NORETURN))
8599 	return true;
8600 
8601       bb = gimple_bb (t);
8602       FOR_EACH_EDGE (e, ei, bb->succs)
8603 	if ((e->flags & EDGE_FAKE) == 0)
8604 	  return true;
8605     }
8606 
8607   if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8608     if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8609       return true;
8610 
8611   return false;
8612 }
8613 
8614 
8615 /* Add fake edges to the function exit for any non constant and non
8616    noreturn calls (or noreturn calls with EH/abnormal edges),
8617    volatile inline assembly in the bitmap of blocks specified by BLOCKS
8618    or to the whole CFG if BLOCKS is zero.  Return the number of blocks
8619    that were split.
8620 
8621    The goal is to expose cases in which entering a basic block does
8622    not imply that all subsequent instructions must be executed.  */
8623 
8624 static int
gimple_flow_call_edges_add(sbitmap blocks)8625 gimple_flow_call_edges_add (sbitmap blocks)
8626 {
8627   int i;
8628   int blocks_split = 0;
8629   int last_bb = last_basic_block_for_fn (cfun);
8630   bool check_last_block = false;
8631 
8632   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8633     return 0;
8634 
8635   if (! blocks)
8636     check_last_block = true;
8637   else
8638     check_last_block = bitmap_bit_p (blocks,
8639 				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8640 
8641   /* In the last basic block, before epilogue generation, there will be
8642      a fallthru edge to EXIT.  Special care is required if the last insn
8643      of the last basic block is a call because make_edge folds duplicate
8644      edges, which would result in the fallthru edge also being marked
8645      fake, which would result in the fallthru edge being removed by
8646      remove_fake_edges, which would result in an invalid CFG.
8647 
8648      Moreover, we can't elide the outgoing fake edge, since the block
8649      profiler needs to take this into account in order to solve the minimal
8650      spanning tree in the case that the call doesn't return.
8651 
8652      Handle this by adding a dummy instruction in a new last basic block.  */
8653   if (check_last_block)
8654     {
8655       basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8656       gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8657       gimple *t = NULL;
8658 
8659       if (!gsi_end_p (gsi))
8660 	t = gsi_stmt (gsi);
8661 
8662       if (t && stmt_can_terminate_bb_p (t))
8663 	{
8664 	  edge e;
8665 
8666 	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8667 	  if (e)
8668 	    {
8669 	      gsi_insert_on_edge (e, gimple_build_nop ());
8670 	      gsi_commit_edge_inserts ();
8671 	    }
8672 	}
8673     }
8674 
8675   /* Now add fake edges to the function exit for any non constant
8676      calls since there is no way that we can determine if they will
8677      return or not...  */
8678   for (i = 0; i < last_bb; i++)
8679     {
8680       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8681       gimple_stmt_iterator gsi;
8682       gimple *stmt, *last_stmt;
8683 
8684       if (!bb)
8685 	continue;
8686 
8687       if (blocks && !bitmap_bit_p (blocks, i))
8688 	continue;
8689 
8690       gsi = gsi_last_nondebug_bb (bb);
8691       if (!gsi_end_p (gsi))
8692 	{
8693 	  last_stmt = gsi_stmt (gsi);
8694 	  do
8695 	    {
8696 	      stmt = gsi_stmt (gsi);
8697 	      if (stmt_can_terminate_bb_p (stmt))
8698 		{
8699 		  edge e;
8700 
8701 		  /* The handling above of the final block before the
8702 		     epilogue should be enough to verify that there is
8703 		     no edge to the exit block in CFG already.
8704 		     Calling make_edge in such case would cause us to
8705 		     mark that edge as fake and remove it later.  */
8706 		  if (flag_checking && stmt == last_stmt)
8707 		    {
8708 		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8709 		      gcc_assert (e == NULL);
8710 		    }
8711 
8712 		  /* Note that the following may create a new basic block
8713 		     and renumber the existing basic blocks.  */
8714 		  if (stmt != last_stmt)
8715 		    {
8716 		      e = split_block (bb, stmt);
8717 		      if (e)
8718 			blocks_split++;
8719 		    }
8720 		  e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8721 		  e->probability = profile_probability::guessed_never ();
8722 		}
8723 	      gsi_prev (&gsi);
8724 	    }
8725 	  while (!gsi_end_p (gsi));
8726 	}
8727     }
8728 
8729   if (blocks_split)
8730     checking_verify_flow_info ();
8731 
8732   return blocks_split;
8733 }
8734 
8735 /* Removes edge E and all the blocks dominated by it, and updates dominance
8736    information.  The IL in E->src needs to be updated separately.
8737    If dominance info is not available, only the edge E is removed.*/
8738 
8739 void
remove_edge_and_dominated_blocks(edge e)8740 remove_edge_and_dominated_blocks (edge e)
8741 {
8742   vec<basic_block> bbs_to_fix_dom = vNULL;
8743   edge f;
8744   edge_iterator ei;
8745   bool none_removed = false;
8746   unsigned i;
8747   basic_block bb, dbb;
8748   bitmap_iterator bi;
8749 
8750   /* If we are removing a path inside a non-root loop that may change
8751      loop ownership of blocks or remove loops.  Mark loops for fixup.  */
8752   if (current_loops
8753       && loop_outer (e->src->loop_father) != NULL
8754       && e->src->loop_father == e->dest->loop_father)
8755     loops_state_set (LOOPS_NEED_FIXUP);
8756 
8757   if (!dom_info_available_p (CDI_DOMINATORS))
8758     {
8759       remove_edge (e);
8760       return;
8761     }
8762 
8763   /* No updating is needed for edges to exit.  */
8764   if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8765     {
8766       if (cfgcleanup_altered_bbs)
8767 	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8768       remove_edge (e);
8769       return;
8770     }
8771 
8772   /* First, we find the basic blocks to remove.  If E->dest has a predecessor
8773      that is not dominated by E->dest, then this set is empty.  Otherwise,
8774      all the basic blocks dominated by E->dest are removed.
8775 
8776      Also, to DF_IDOM we store the immediate dominators of the blocks in
8777      the dominance frontier of E (i.e., of the successors of the
8778      removed blocks, if there are any, and of E->dest otherwise).  */
8779   FOR_EACH_EDGE (f, ei, e->dest->preds)
8780     {
8781       if (f == e)
8782 	continue;
8783 
8784       if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8785 	{
8786 	  none_removed = true;
8787 	  break;
8788 	}
8789     }
8790 
8791   auto_bitmap df, df_idom;
8792   auto_vec<basic_block> bbs_to_remove;
8793   if (none_removed)
8794     bitmap_set_bit (df_idom,
8795 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8796   else
8797     {
8798       bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8799       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8800 	{
8801 	  FOR_EACH_EDGE (f, ei, bb->succs)
8802 	    {
8803 	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8804 		bitmap_set_bit (df, f->dest->index);
8805 	    }
8806 	}
8807       FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8808 	bitmap_clear_bit (df, bb->index);
8809 
8810       EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8811 	{
8812 	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
8813 	  bitmap_set_bit (df_idom,
8814 			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8815 	}
8816     }
8817 
8818   if (cfgcleanup_altered_bbs)
8819     {
8820       /* Record the set of the altered basic blocks.  */
8821       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8822       bitmap_ior_into (cfgcleanup_altered_bbs, df);
8823     }
8824 
8825   /* Remove E and the cancelled blocks.  */
8826   if (none_removed)
8827     remove_edge (e);
8828   else
8829     {
8830       /* Walk backwards so as to get a chance to substitute all
8831 	 released DEFs into debug stmts.  See
8832 	 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8833 	 details.  */
8834       for (i = bbs_to_remove.length (); i-- > 0; )
8835 	delete_basic_block (bbs_to_remove[i]);
8836     }
8837 
8838   /* Update the dominance information.  The immediate dominator may change only
8839      for blocks whose immediate dominator belongs to DF_IDOM:
8840 
8841      Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8842      removal.  Let Z the arbitrary block such that idom(Z) = Y and
8843      Z dominates X after the removal.  Before removal, there exists a path P
8844      from Y to X that avoids Z.  Let F be the last edge on P that is
8845      removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
8846      dominates W, and because of P, Z does not dominate W), and W belongs to
8847      the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
8848   EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8849     {
8850       bb = BASIC_BLOCK_FOR_FN (cfun, i);
8851       for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8852 	   dbb;
8853 	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
8854 	bbs_to_fix_dom.safe_push (dbb);
8855     }
8856 
8857   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8858 
8859   bbs_to_fix_dom.release ();
8860 }
8861 
8862 /* Purge dead EH edges from basic block BB.  */
8863 
8864 bool
gimple_purge_dead_eh_edges(basic_block bb)8865 gimple_purge_dead_eh_edges (basic_block bb)
8866 {
8867   bool changed = false;
8868   edge e;
8869   edge_iterator ei;
8870   gimple *stmt = last_stmt (bb);
8871 
8872   if (stmt && stmt_can_throw_internal (cfun, stmt))
8873     return false;
8874 
8875   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8876     {
8877       if (e->flags & EDGE_EH)
8878 	{
8879 	  remove_edge_and_dominated_blocks (e);
8880 	  changed = true;
8881 	}
8882       else
8883 	ei_next (&ei);
8884     }
8885 
8886   return changed;
8887 }
8888 
8889 /* Purge dead EH edges from basic block listed in BLOCKS.  */
8890 
8891 bool
gimple_purge_all_dead_eh_edges(const_bitmap blocks)8892 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8893 {
8894   bool changed = false;
8895   unsigned i;
8896   bitmap_iterator bi;
8897 
8898   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8899     {
8900       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8901 
8902       /* Earlier gimple_purge_dead_eh_edges could have removed
8903 	 this basic block already.  */
8904       gcc_assert (bb || changed);
8905       if (bb != NULL)
8906 	changed |= gimple_purge_dead_eh_edges (bb);
8907     }
8908 
8909   return changed;
8910 }
8911 
8912 /* Purge dead abnormal call edges from basic block BB.  */
8913 
8914 bool
gimple_purge_dead_abnormal_call_edges(basic_block bb)8915 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8916 {
8917   bool changed = false;
8918   edge e;
8919   edge_iterator ei;
8920   gimple *stmt = last_stmt (bb);
8921 
8922   if (stmt && stmt_can_make_abnormal_goto (stmt))
8923     return false;
8924 
8925   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8926     {
8927       if (e->flags & EDGE_ABNORMAL)
8928 	{
8929 	  if (e->flags & EDGE_FALLTHRU)
8930 	    e->flags &= ~EDGE_ABNORMAL;
8931 	  else
8932 	    remove_edge_and_dominated_blocks (e);
8933 	  changed = true;
8934 	}
8935       else
8936 	ei_next (&ei);
8937     }
8938 
8939   return changed;
8940 }
8941 
8942 /* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8943 
8944 bool
gimple_purge_all_dead_abnormal_call_edges(const_bitmap blocks)8945 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8946 {
8947   bool changed = false;
8948   unsigned i;
8949   bitmap_iterator bi;
8950 
8951   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8952     {
8953       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8954 
8955       /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8956 	 this basic block already.  */
8957       gcc_assert (bb || changed);
8958       if (bb != NULL)
8959 	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8960     }
8961 
8962   return changed;
8963 }
8964 
8965 /* This function is called whenever a new edge is created or
8966    redirected.  */
8967 
8968 static void
gimple_execute_on_growing_pred(edge e)8969 gimple_execute_on_growing_pred (edge e)
8970 {
8971   basic_block bb = e->dest;
8972 
8973   if (!gimple_seq_empty_p (phi_nodes (bb)))
8974     reserve_phi_args_for_new_edge (bb);
8975 }
8976 
8977 /* This function is called immediately before edge E is removed from
8978    the edge vector E->dest->preds.  */
8979 
8980 static void
gimple_execute_on_shrinking_pred(edge e)8981 gimple_execute_on_shrinking_pred (edge e)
8982 {
8983   if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8984     remove_phi_args (e);
8985 }
8986 
8987 /*---------------------------------------------------------------------------
8988   Helper functions for Loop versioning
8989   ---------------------------------------------------------------------------*/
8990 
8991 /* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8992    of 'first'. Both of them are dominated by 'new_head' basic block. When
8993    'new_head' was created by 'second's incoming edge it received phi arguments
8994    on the edge by split_edge(). Later, additional edge 'e' was created to
8995    connect 'new_head' and 'first'. Now this routine adds phi args on this
8996    additional edge 'e' that new_head to second edge received as part of edge
8997    splitting.  */
8998 
8999 static void
gimple_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)9000 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9001 				  basic_block new_head, edge e)
9002 {
9003   gphi *phi1, *phi2;
9004   gphi_iterator psi1, psi2;
9005   tree def;
9006   edge e2 = find_edge (new_head, second);
9007 
9008   /* Because NEW_HEAD has been created by splitting SECOND's incoming
9009      edge, we should always have an edge from NEW_HEAD to SECOND.  */
9010   gcc_assert (e2 != NULL);
9011 
9012   /* Browse all 'second' basic block phi nodes and add phi args to
9013      edge 'e' for 'first' head. PHI args are always in correct order.  */
9014 
9015   for (psi2 = gsi_start_phis (second),
9016        psi1 = gsi_start_phis (first);
9017        !gsi_end_p (psi2) && !gsi_end_p (psi1);
9018        gsi_next (&psi2),  gsi_next (&psi1))
9019     {
9020       phi1 = psi1.phi ();
9021       phi2 = psi2.phi ();
9022       def = PHI_ARG_DEF (phi2, e2->dest_idx);
9023       add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9024     }
9025 }
9026 
9027 
9028 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9029    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9030    the destination of the ELSE part.  */
9031 
9032 static void
gimple_lv_add_condition_to_bb(basic_block first_head ATTRIBUTE_UNUSED,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * cond_e)9033 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9034 			       basic_block second_head ATTRIBUTE_UNUSED,
9035 			       basic_block cond_bb, void *cond_e)
9036 {
9037   gimple_stmt_iterator gsi;
9038   gimple *new_cond_expr;
9039   tree cond_expr = (tree) cond_e;
9040   edge e0;
9041 
9042   /* Build new conditional expr */
9043   new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9044 					       NULL_TREE, NULL_TREE);
9045 
9046   /* Add new cond in cond_bb.  */
9047   gsi = gsi_last_bb (cond_bb);
9048   gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9049 
9050   /* Adjust edges appropriately to connect new head with first head
9051      as well as second head.  */
9052   e0 = single_succ_edge (cond_bb);
9053   e0->flags &= ~EDGE_FALLTHRU;
9054   e0->flags |= EDGE_FALSE_VALUE;
9055 }
9056 
9057 
9058 /* Do book-keeping of basic block BB for the profile consistency checker.
9059    Store the counting in RECORD.  */
9060 static void
gimple_account_profile_record(basic_block bb,struct profile_record * record)9061 gimple_account_profile_record (basic_block bb,
9062 			       struct profile_record *record)
9063 {
9064   gimple_stmt_iterator i;
9065   for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9066        gsi_next_nondebug (&i))
9067     {
9068       record->size
9069 	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9070       if (profile_info)
9071 	{
9072 	  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9073 	      && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9074 	      && bb->count.ipa ().initialized_p ())
9075 	    record->time
9076 	      += estimate_num_insns (gsi_stmt (i),
9077 				     &eni_time_weights)
9078 				     * bb->count.ipa ().to_gcov_type ();
9079 	}
9080       else if (bb->count.initialized_p ()
9081 	       && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9082 	record->time
9083 	  += estimate_num_insns
9084 		(gsi_stmt (i),
9085 		 &eni_time_weights)
9086 		 * bb->count.to_sreal_scale
9087 			(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9088      else
9089       record->time
9090 	+= estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9091     }
9092 }
9093 
9094 struct cfg_hooks gimple_cfg_hooks = {
9095   "gimple",
9096   gimple_verify_flow_info,
9097   gimple_dump_bb,		/* dump_bb  */
9098   gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
9099   create_bb,			/* create_basic_block  */
9100   gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
9101   gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
9102   gimple_can_remove_branch_p,	/* can_remove_branch_p  */
9103   remove_bb,			/* delete_basic_block  */
9104   gimple_split_block,		/* split_block  */
9105   gimple_move_block_after,	/* move_block_after  */
9106   gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
9107   gimple_merge_blocks,		/* merge_blocks  */
9108   gimple_predict_edge,		/* predict_edge  */
9109   gimple_predicted_by_p,	/* predicted_by_p  */
9110   gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
9111   gimple_duplicate_bb,		/* duplicate_block  */
9112   gimple_split_edge,		/* split_edge  */
9113   gimple_make_forwarder_block,	/* make_forward_block  */
9114   NULL,				/* tidy_fallthru_edge  */
9115   NULL,				/* force_nonfallthru */
9116   gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9117   gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9118   gimple_flow_call_edges_add,   /* flow_call_edges_add */
9119   gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
9120   gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9121   gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9122   gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9123   gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9124   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9125   flush_pending_stmts, 		/* flush_pending_stmts */
9126   gimple_empty_block_p,           /* block_empty_p */
9127   gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9128   gimple_account_profile_record,
9129 };
9130 
9131 
9132 /* Split all critical edges.  Split some extra (not necessarily critical) edges
9133    if FOR_EDGE_INSERTION_P is true.  */
9134 
9135 unsigned int
split_critical_edges(bool for_edge_insertion_p)9136 split_critical_edges (bool for_edge_insertion_p /* = false */)
9137 {
9138   basic_block bb;
9139   edge e;
9140   edge_iterator ei;
9141 
9142   /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9143      expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
9144      mappings around the calls to split_edge.  */
9145   start_recording_case_labels ();
9146   FOR_ALL_BB_FN (bb, cfun)
9147     {
9148       FOR_EACH_EDGE (e, ei, bb->succs)
9149         {
9150 	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9151 	    split_edge (e);
9152 	  /* PRE inserts statements to edges and expects that
9153 	     since split_critical_edges was done beforehand, committing edge
9154 	     insertions will not split more edges.  In addition to critical
9155 	     edges we must split edges that have multiple successors and
9156 	     end by control flow statements, such as RESX.
9157 	     Go ahead and split them too.  This matches the logic in
9158 	     gimple_find_edge_insert_loc.  */
9159 	  else if (for_edge_insertion_p
9160 		   && (!single_pred_p (e->dest)
9161 		       || !gimple_seq_empty_p (phi_nodes (e->dest))
9162 		       || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9163 		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9164 		   && !(e->flags & EDGE_ABNORMAL))
9165 	    {
9166 	      gimple_stmt_iterator gsi;
9167 
9168 	      gsi = gsi_last_bb (e->src);
9169 	      if (!gsi_end_p (gsi)
9170 		  && stmt_ends_bb_p (gsi_stmt (gsi))
9171 		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9172 		      && !gimple_call_builtin_p (gsi_stmt (gsi),
9173 						 BUILT_IN_RETURN)))
9174 		split_edge (e);
9175 	    }
9176 	}
9177     }
9178   end_recording_case_labels ();
9179   return 0;
9180 }
9181 
9182 namespace {
9183 
9184 const pass_data pass_data_split_crit_edges =
9185 {
9186   GIMPLE_PASS, /* type */
9187   "crited", /* name */
9188   OPTGROUP_NONE, /* optinfo_flags */
9189   TV_TREE_SPLIT_EDGES, /* tv_id */
9190   PROP_cfg, /* properties_required */
9191   PROP_no_crit_edges, /* properties_provided */
9192   0, /* properties_destroyed */
9193   0, /* todo_flags_start */
9194   0, /* todo_flags_finish */
9195 };
9196 
9197 class pass_split_crit_edges : public gimple_opt_pass
9198 {
9199 public:
pass_split_crit_edges(gcc::context * ctxt)9200   pass_split_crit_edges (gcc::context *ctxt)
9201     : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9202   {}
9203 
9204   /* opt_pass methods: */
execute(function *)9205   virtual unsigned int execute (function *) { return split_critical_edges (); }
9206 
clone()9207   opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9208 }; // class pass_split_crit_edges
9209 
9210 } // anon namespace
9211 
9212 gimple_opt_pass *
make_pass_split_crit_edges(gcc::context * ctxt)9213 make_pass_split_crit_edges (gcc::context *ctxt)
9214 {
9215   return new pass_split_crit_edges (ctxt);
9216 }
9217 
9218 
9219 /* Insert COND expression which is GIMPLE_COND after STMT
9220    in basic block BB with appropriate basic block split
9221    and creation of a new conditionally executed basic block.
9222    Update profile so the new bb is visited with probability PROB.
9223    Return created basic block.  */
9224 basic_block
insert_cond_bb(basic_block bb,gimple * stmt,gimple * cond,profile_probability prob)9225 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9226 	        profile_probability prob)
9227 {
9228   edge fall = split_block (bb, stmt);
9229   gimple_stmt_iterator iter = gsi_last_bb (bb);
9230   basic_block new_bb;
9231 
9232   /* Insert cond statement.  */
9233   gcc_assert (gimple_code (cond) == GIMPLE_COND);
9234   if (gsi_end_p (iter))
9235     gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9236   else
9237     gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9238 
9239   /* Create conditionally executed block.  */
9240   new_bb = create_empty_bb (bb);
9241   edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9242   e->probability = prob;
9243   new_bb->count = e->count ();
9244   make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9245 
9246   /* Fix edge for split bb.  */
9247   fall->flags = EDGE_FALSE_VALUE;
9248   fall->probability -= e->probability;
9249 
9250   /* Update dominance info.  */
9251   if (dom_info_available_p (CDI_DOMINATORS))
9252     {
9253       set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9254       set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9255     }
9256 
9257   /* Update loop info.  */
9258   if (current_loops)
9259     add_bb_to_loop (new_bb, bb->loop_father);
9260 
9261   return new_bb;
9262 }
9263 
9264 
9265 
9266 /* Given a basic block B which ends with a conditional and has
9267    precisely two successors, determine which of the edges is taken if
9268    the conditional is true and which is taken if the conditional is
9269    false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
9270 
9271 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)9272 extract_true_false_edges_from_block (basic_block b,
9273 				     edge *true_edge,
9274 				     edge *false_edge)
9275 {
9276   edge e = EDGE_SUCC (b, 0);
9277 
9278   if (e->flags & EDGE_TRUE_VALUE)
9279     {
9280       *true_edge = e;
9281       *false_edge = EDGE_SUCC (b, 1);
9282     }
9283   else
9284     {
9285       *false_edge = e;
9286       *true_edge = EDGE_SUCC (b, 1);
9287     }
9288 }
9289 
9290 
9291 /* From a controlling predicate in the immediate dominator DOM of
9292    PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9293    predicate evaluates to true and false and store them to
9294    *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9295    they are non-NULL.  Returns true if the edges can be determined,
9296    else return false.  */
9297 
9298 bool
extract_true_false_controlled_edges(basic_block dom,basic_block phiblock,edge * true_controlled_edge,edge * false_controlled_edge)9299 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9300 				     edge *true_controlled_edge,
9301 				     edge *false_controlled_edge)
9302 {
9303   basic_block bb = phiblock;
9304   edge true_edge, false_edge, tem;
9305   edge e0 = NULL, e1 = NULL;
9306 
9307   /* We have to verify that one edge into the PHI node is dominated
9308      by the true edge of the predicate block and the other edge
9309      dominated by the false edge.  This ensures that the PHI argument
9310      we are going to take is completely determined by the path we
9311      take from the predicate block.
9312      We can only use BB dominance checks below if the destination of
9313      the true/false edges are dominated by their edge, thus only
9314      have a single predecessor.  */
9315   extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9316   tem = EDGE_PRED (bb, 0);
9317   if (tem == true_edge
9318       || (single_pred_p (true_edge->dest)
9319 	  && (tem->src == true_edge->dest
9320 	      || dominated_by_p (CDI_DOMINATORS,
9321 				 tem->src, true_edge->dest))))
9322     e0 = tem;
9323   else if (tem == false_edge
9324 	   || (single_pred_p (false_edge->dest)
9325 	       && (tem->src == false_edge->dest
9326 		   || dominated_by_p (CDI_DOMINATORS,
9327 				      tem->src, false_edge->dest))))
9328     e1 = tem;
9329   else
9330     return false;
9331   tem = EDGE_PRED (bb, 1);
9332   if (tem == true_edge
9333       || (single_pred_p (true_edge->dest)
9334 	  && (tem->src == true_edge->dest
9335 	      || dominated_by_p (CDI_DOMINATORS,
9336 				 tem->src, true_edge->dest))))
9337     e0 = tem;
9338   else if (tem == false_edge
9339 	   || (single_pred_p (false_edge->dest)
9340 	       && (tem->src == false_edge->dest
9341 		   || dominated_by_p (CDI_DOMINATORS,
9342 				      tem->src, false_edge->dest))))
9343     e1 = tem;
9344   else
9345     return false;
9346   if (!e0 || !e1)
9347     return false;
9348 
9349   if (true_controlled_edge)
9350     *true_controlled_edge = e0;
9351   if (false_controlled_edge)
9352     *false_controlled_edge = e1;
9353 
9354   return true;
9355 }
9356 
9357 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9358     range [low, high].  Place associated stmts before *GSI.  */
9359 
9360 void
generate_range_test(basic_block bb,tree index,tree low,tree high,tree * lhs,tree * rhs)9361 generate_range_test (basic_block bb, tree index, tree low, tree high,
9362 		     tree *lhs, tree *rhs)
9363 {
9364   tree type = TREE_TYPE (index);
9365   tree utype = range_check_type (type);
9366 
9367   low = fold_convert (utype, low);
9368   high = fold_convert (utype, high);
9369 
9370   gimple_seq seq = NULL;
9371   index = gimple_convert (&seq, utype, index);
9372   *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9373   *rhs = const_binop (MINUS_EXPR, utype, high, low);
9374 
9375   gimple_stmt_iterator gsi = gsi_last_bb (bb);
9376   gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9377 }
9378 
9379 /* Return the basic block that belongs to label numbered INDEX
9380    of a switch statement.  */
9381 
9382 basic_block
gimple_switch_label_bb(function * ifun,gswitch * gs,unsigned index)9383 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9384 {
9385   return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9386 }
9387 
9388 /* Return the default basic block of a switch statement.  */
9389 
9390 basic_block
gimple_switch_default_bb(function * ifun,gswitch * gs)9391 gimple_switch_default_bb (function *ifun, gswitch *gs)
9392 {
9393   return gimple_switch_label_bb (ifun, gs, 0);
9394 }
9395 
9396 /* Return the edge that belongs to label numbered INDEX
9397    of a switch statement.  */
9398 
9399 edge
gimple_switch_edge(function * ifun,gswitch * gs,unsigned index)9400 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9401 {
9402   return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9403 }
9404 
9405 /* Return the default edge of a switch statement.  */
9406 
9407 edge
gimple_switch_default_edge(function * ifun,gswitch * gs)9408 gimple_switch_default_edge (function *ifun, gswitch *gs)
9409 {
9410   return gimple_switch_edge (ifun, gs, 0);
9411 }
9412 
9413 /* Return true if the only executable statement in BB is a GIMPLE_COND.  */
9414 
9415 bool
cond_only_block_p(basic_block bb)9416 cond_only_block_p (basic_block bb)
9417 {
9418   /* BB must have no executable statements.  */
9419   gimple_stmt_iterator gsi = gsi_after_labels (bb);
9420   if (phi_nodes (bb))
9421     return false;
9422   while (!gsi_end_p (gsi))
9423     {
9424       gimple *stmt = gsi_stmt (gsi);
9425       if (is_gimple_debug (stmt))
9426 	;
9427       else if (gimple_code (stmt) == GIMPLE_NOP
9428 	       || gimple_code (stmt) == GIMPLE_PREDICT
9429 	       || gimple_code (stmt) == GIMPLE_COND)
9430 	;
9431       else
9432 	return false;
9433       gsi_next (&gsi);
9434     }
9435   return true;
9436 }
9437 
9438 
9439 /* Emit return warnings.  */
9440 
9441 namespace {
9442 
9443 const pass_data pass_data_warn_function_return =
9444 {
9445   GIMPLE_PASS, /* type */
9446   "*warn_function_return", /* name */
9447   OPTGROUP_NONE, /* optinfo_flags */
9448   TV_NONE, /* tv_id */
9449   PROP_cfg, /* properties_required */
9450   0, /* properties_provided */
9451   0, /* properties_destroyed */
9452   0, /* todo_flags_start */
9453   0, /* todo_flags_finish */
9454 };
9455 
9456 class pass_warn_function_return : public gimple_opt_pass
9457 {
9458 public:
pass_warn_function_return(gcc::context * ctxt)9459   pass_warn_function_return (gcc::context *ctxt)
9460     : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9461   {}
9462 
9463   /* opt_pass methods: */
9464   virtual unsigned int execute (function *);
9465 
9466 }; // class pass_warn_function_return
9467 
9468 unsigned int
execute(function * fun)9469 pass_warn_function_return::execute (function *fun)
9470 {
9471   location_t location;
9472   gimple *last;
9473   edge e;
9474   edge_iterator ei;
9475 
9476   if (!targetm.warn_func_return (fun->decl))
9477     return 0;
9478 
9479   /* If we have a path to EXIT, then we do return.  */
9480   if (TREE_THIS_VOLATILE (fun->decl)
9481       && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9482     {
9483       location = UNKNOWN_LOCATION;
9484       for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9485 	   (e = ei_safe_edge (ei)); )
9486 	{
9487 	  last = last_stmt (e->src);
9488 	  if ((gimple_code (last) == GIMPLE_RETURN
9489 	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9490 	      && location == UNKNOWN_LOCATION
9491 	      && ((location = LOCATION_LOCUS (gimple_location (last)))
9492 		  != UNKNOWN_LOCATION)
9493 	      && !optimize)
9494 	    break;
9495 	  /* When optimizing, replace return stmts in noreturn functions
9496 	     with __builtin_unreachable () call.  */
9497 	  if (optimize && gimple_code (last) == GIMPLE_RETURN)
9498 	    {
9499 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9500 	      gimple *new_stmt = gimple_build_call (fndecl, 0);
9501 	      gimple_set_location (new_stmt, gimple_location (last));
9502 	      gimple_stmt_iterator gsi = gsi_for_stmt (last);
9503 	      gsi_replace (&gsi, new_stmt, true);
9504 	      remove_edge (e);
9505 	    }
9506 	  else
9507 	    ei_next (&ei);
9508 	}
9509       if (location == UNKNOWN_LOCATION)
9510 	location = cfun->function_end_locus;
9511 
9512 #ifdef notyet
9513       if (warn_missing_noreturn)
9514         warning_at (location, 0, "%<noreturn%> function does return");
9515 #endif
9516     }
9517 
9518   /* If we see "return;" in some basic block, then we do reach the end
9519      without returning a value.  */
9520   else if (warn_return_type > 0
9521 	   && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9522 	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9523     {
9524       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9525 	{
9526 	  gimple *last = last_stmt (e->src);
9527 	  greturn *return_stmt = dyn_cast <greturn *> (last);
9528 	  if (return_stmt
9529 	      && gimple_return_retval (return_stmt) == NULL
9530 	      && !warning_suppressed_p (last, OPT_Wreturn_type))
9531 	    {
9532 	      location = gimple_location (last);
9533 	      if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9534 		location = fun->function_end_locus;
9535 	      if (warning_at (location, OPT_Wreturn_type,
9536 			      "control reaches end of non-void function"))
9537 		suppress_warning (fun->decl, OPT_Wreturn_type);
9538 	      break;
9539 	    }
9540 	}
9541       /* The C++ FE turns fallthrough from the end of non-void function
9542 	 into __builtin_unreachable () call with BUILTINS_LOCATION.
9543 	 Recognize those too.  */
9544       basic_block bb;
9545       if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9546 	FOR_EACH_BB_FN (bb, fun)
9547 	  if (EDGE_COUNT (bb->succs) == 0)
9548 	    {
9549 	      gimple *last = last_stmt (bb);
9550 	      const enum built_in_function ubsan_missing_ret
9551 		= BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9552 	      if (last
9553 		  && ((LOCATION_LOCUS (gimple_location (last))
9554 		       == BUILTINS_LOCATION
9555 		       && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9556 		      || gimple_call_builtin_p (last, ubsan_missing_ret)))
9557 		{
9558 		  gimple_stmt_iterator gsi = gsi_for_stmt (last);
9559 		  gsi_prev_nondebug (&gsi);
9560 		  gimple *prev = gsi_stmt (gsi);
9561 		  if (prev == NULL)
9562 		    location = UNKNOWN_LOCATION;
9563 		  else
9564 		    location = gimple_location (prev);
9565 		  if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9566 		    location = fun->function_end_locus;
9567 		  if (warning_at (location, OPT_Wreturn_type,
9568 				  "control reaches end of non-void function"))
9569 		    suppress_warning (fun->decl, OPT_Wreturn_type);
9570 		  break;
9571 		}
9572 	    }
9573     }
9574   return 0;
9575 }
9576 
9577 } // anon namespace
9578 
9579 gimple_opt_pass *
make_pass_warn_function_return(gcc::context * ctxt)9580 make_pass_warn_function_return (gcc::context *ctxt)
9581 {
9582   return new pass_warn_function_return (ctxt);
9583 }
9584 
9585 /* Walk a gimplified function and warn for functions whose return value is
9586    ignored and attribute((warn_unused_result)) is set.  This is done before
9587    inlining, so we don't have to worry about that.  */
9588 
9589 static void
do_warn_unused_result(gimple_seq seq)9590 do_warn_unused_result (gimple_seq seq)
9591 {
9592   tree fdecl, ftype;
9593   gimple_stmt_iterator i;
9594 
9595   for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9596     {
9597       gimple *g = gsi_stmt (i);
9598 
9599       switch (gimple_code (g))
9600 	{
9601 	case GIMPLE_BIND:
9602 	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9603 	  break;
9604 	case GIMPLE_TRY:
9605 	  do_warn_unused_result (gimple_try_eval (g));
9606 	  do_warn_unused_result (gimple_try_cleanup (g));
9607 	  break;
9608 	case GIMPLE_CATCH:
9609 	  do_warn_unused_result (gimple_catch_handler (
9610 				   as_a <gcatch *> (g)));
9611 	  break;
9612 	case GIMPLE_EH_FILTER:
9613 	  do_warn_unused_result (gimple_eh_filter_failure (g));
9614 	  break;
9615 
9616 	case GIMPLE_CALL:
9617 	  if (gimple_call_lhs (g))
9618 	    break;
9619 	  if (gimple_call_internal_p (g))
9620 	    break;
9621 
9622 	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
9623 	     LHS.  All calls whose value is ignored should be
9624 	     represented like this.  Look for the attribute.  */
9625 	  fdecl = gimple_call_fndecl (g);
9626 	  ftype = gimple_call_fntype (g);
9627 
9628 	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9629 	    {
9630 	      location_t loc = gimple_location (g);
9631 
9632 	      if (fdecl)
9633 		warning_at (loc, OPT_Wunused_result,
9634 			    "ignoring return value of %qD "
9635 			    "declared with attribute %<warn_unused_result%>",
9636 			    fdecl);
9637 	      else
9638 		warning_at (loc, OPT_Wunused_result,
9639 			    "ignoring return value of function "
9640 			    "declared with attribute %<warn_unused_result%>");
9641 	    }
9642 	  break;
9643 
9644 	default:
9645 	  /* Not a container, not a call, or a call whose value is used.  */
9646 	  break;
9647 	}
9648     }
9649 }
9650 
9651 namespace {
9652 
9653 const pass_data pass_data_warn_unused_result =
9654 {
9655   GIMPLE_PASS, /* type */
9656   "*warn_unused_result", /* name */
9657   OPTGROUP_NONE, /* optinfo_flags */
9658   TV_NONE, /* tv_id */
9659   PROP_gimple_any, /* properties_required */
9660   0, /* properties_provided */
9661   0, /* properties_destroyed */
9662   0, /* todo_flags_start */
9663   0, /* todo_flags_finish */
9664 };
9665 
9666 class pass_warn_unused_result : public gimple_opt_pass
9667 {
9668 public:
pass_warn_unused_result(gcc::context * ctxt)9669   pass_warn_unused_result (gcc::context *ctxt)
9670     : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9671   {}
9672 
9673   /* opt_pass methods: */
gate(function *)9674   virtual bool gate (function *) { return flag_warn_unused_result; }
execute(function *)9675   virtual unsigned int execute (function *)
9676     {
9677       do_warn_unused_result (gimple_body (current_function_decl));
9678       return 0;
9679     }
9680 
9681 }; // class pass_warn_unused_result
9682 
9683 } // anon namespace
9684 
9685 gimple_opt_pass *
make_pass_warn_unused_result(gcc::context * ctxt)9686 make_pass_warn_unused_result (gcc::context *ctxt)
9687 {
9688   return new pass_warn_unused_result (ctxt);
9689 }
9690 
9691 /* Maybe Remove stores to variables we marked write-only.
9692    Return true if a store was removed. */
9693 static bool
maybe_remove_writeonly_store(gimple_stmt_iterator & gsi,gimple * stmt,bitmap dce_ssa_names)9694 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9695 			      bitmap dce_ssa_names)
9696 {
9697   /* Keep access when store has side effect, i.e. in case when source
9698      is volatile.  */
9699   if (!gimple_store_p (stmt)
9700       || gimple_has_side_effects (stmt)
9701       || optimize_debug)
9702     return false;
9703 
9704   tree lhs = get_base_address (gimple_get_lhs (stmt));
9705 
9706   if (!VAR_P (lhs)
9707       || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9708       || !varpool_node::get (lhs)->writeonly)
9709     return false;
9710 
9711   if (dump_file && (dump_flags & TDF_DETAILS))
9712     {
9713       fprintf (dump_file, "Removing statement, writes"
9714 	       " to write only var:\n");
9715       print_gimple_stmt (dump_file, stmt, 0,
9716 			 TDF_VOPS|TDF_MEMSYMS);
9717     }
9718 
9719   /* Mark ssa name defining to be checked for simple dce. */
9720   if (gimple_assign_single_p (stmt))
9721     {
9722       tree rhs = gimple_assign_rhs1 (stmt);
9723       if (TREE_CODE (rhs) == SSA_NAME
9724 	  && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9725 	bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9726     }
9727   unlink_stmt_vdef (stmt);
9728   gsi_remove (&gsi, true);
9729   release_defs (stmt);
9730   return true;
9731 }
9732 
9733 /* IPA passes, compilation of earlier functions or inlining
9734    might have changed some properties, such as marked functions nothrow,
9735    pure, const or noreturn.
9736    Remove redundant edges and basic blocks, and create new ones if necessary. */
9737 
9738 unsigned int
execute_fixup_cfg(void)9739 execute_fixup_cfg (void)
9740 {
9741   basic_block bb;
9742   gimple_stmt_iterator gsi;
9743   int todo = 0;
9744   cgraph_node *node = cgraph_node::get (current_function_decl);
9745   /* Same scaling is also done by ipa_merge_profiles.  */
9746   profile_count num = node->count;
9747   profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9748   bool scale = num.initialized_p () && !(num == den);
9749   auto_bitmap dce_ssa_names;
9750 
9751   if (scale)
9752     {
9753       profile_count::adjust_for_ipa_scaling (&num, &den);
9754       ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9755       EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9756         = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9757     }
9758 
9759   FOR_EACH_BB_FN (bb, cfun)
9760     {
9761       if (scale)
9762         bb->count = bb->count.apply_scale (num, den);
9763       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9764 	{
9765 	  gimple *stmt = gsi_stmt (gsi);
9766 	  tree decl = is_gimple_call (stmt)
9767 		      ? gimple_call_fndecl (stmt)
9768 		      : NULL;
9769 	  if (decl)
9770 	    {
9771 	      int flags = gimple_call_flags (stmt);
9772 	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9773 		{
9774 		  if (gimple_purge_dead_abnormal_call_edges (bb))
9775 		    todo |= TODO_cleanup_cfg;
9776 
9777 		  if (gimple_in_ssa_p (cfun))
9778 		    {
9779 		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9780 		      update_stmt (stmt);
9781 		    }
9782 		}
9783 
9784 	      if (flags & ECF_NORETURN
9785 		  && fixup_noreturn_call (stmt))
9786 		todo |= TODO_cleanup_cfg;
9787 	     }
9788 
9789 	  /* Remove stores to variables we marked write-only. */
9790 	  if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9791 	    {
9792 	      todo |= TODO_update_ssa | TODO_cleanup_cfg;
9793 	      continue;
9794 	    }
9795 
9796 	  /* For calls we can simply remove LHS when it is known
9797 	     to be write-only.  */
9798 	  if (is_gimple_call (stmt)
9799 	      && gimple_get_lhs (stmt))
9800 	    {
9801 	      tree lhs = get_base_address (gimple_get_lhs (stmt));
9802 
9803 	      if (VAR_P (lhs)
9804 		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9805 		  && varpool_node::get (lhs)->writeonly)
9806 		{
9807 		  gimple_call_set_lhs (stmt, NULL);
9808 		  update_stmt (stmt);
9809 	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
9810 		}
9811 	    }
9812 
9813 	  if (maybe_clean_eh_stmt (stmt)
9814 	      && gimple_purge_dead_eh_edges (bb))
9815 	    todo |= TODO_cleanup_cfg;
9816 	  gsi_next (&gsi);
9817 	}
9818 
9819       /* If we have a basic block with no successors that does not
9820 	 end with a control statement or a noreturn call end it with
9821 	 a call to __builtin_unreachable.  This situation can occur
9822 	 when inlining a noreturn call that does in fact return.  */
9823       if (EDGE_COUNT (bb->succs) == 0)
9824 	{
9825 	  gimple *stmt = last_stmt (bb);
9826 	  if (!stmt
9827 	      || (!is_ctrl_stmt (stmt)
9828 		  && (!is_gimple_call (stmt)
9829 		      || !gimple_call_noreturn_p (stmt))))
9830 	    {
9831 	      if (stmt && is_gimple_call (stmt))
9832 		gimple_call_set_ctrl_altering (stmt, false);
9833 	      tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9834 	      stmt = gimple_build_call (fndecl, 0);
9835 	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
9836 	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9837 	      if (!cfun->after_inlining)
9838 		{
9839 		  gcall *call_stmt = dyn_cast <gcall *> (stmt);
9840 		  node->create_edge (cgraph_node::get_create (fndecl),
9841 				     call_stmt, bb->count);
9842 		}
9843 	    }
9844 	}
9845     }
9846   if (scale)
9847     {
9848       update_max_bb_count ();
9849       compute_function_frequency ();
9850     }
9851 
9852   if (current_loops
9853       && (todo & TODO_cleanup_cfg))
9854     loops_state_set (LOOPS_NEED_FIXUP);
9855 
9856   simple_dce_from_worklist (dce_ssa_names);
9857 
9858   return todo;
9859 }
9860 
9861 namespace {
9862 
9863 const pass_data pass_data_fixup_cfg =
9864 {
9865   GIMPLE_PASS, /* type */
9866   "fixup_cfg", /* name */
9867   OPTGROUP_NONE, /* optinfo_flags */
9868   TV_NONE, /* tv_id */
9869   PROP_cfg, /* properties_required */
9870   0, /* properties_provided */
9871   0, /* properties_destroyed */
9872   0, /* todo_flags_start */
9873   0, /* todo_flags_finish */
9874 };
9875 
9876 class pass_fixup_cfg : public gimple_opt_pass
9877 {
9878 public:
pass_fixup_cfg(gcc::context * ctxt)9879   pass_fixup_cfg (gcc::context *ctxt)
9880     : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9881   {}
9882 
9883   /* opt_pass methods: */
clone()9884   opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
execute(function *)9885   virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9886 
9887 }; // class pass_fixup_cfg
9888 
9889 } // anon namespace
9890 
9891 gimple_opt_pass *
make_pass_fixup_cfg(gcc::context * ctxt)9892 make_pass_fixup_cfg (gcc::context *ctxt)
9893 {
9894   return new pass_fixup_cfg (ctxt);
9895 }
9896 
9897 /* Garbage collection support for edge_def.  */
9898 
9899 extern void gt_ggc_mx (tree&);
9900 extern void gt_ggc_mx (gimple *&);
9901 extern void gt_ggc_mx (rtx&);
9902 extern void gt_ggc_mx (basic_block&);
9903 
9904 static void
gt_ggc_mx(rtx_insn * & x)9905 gt_ggc_mx (rtx_insn *& x)
9906 {
9907   if (x)
9908     gt_ggc_mx_rtx_def ((void *) x);
9909 }
9910 
9911 void
gt_ggc_mx(edge_def * e)9912 gt_ggc_mx (edge_def *e)
9913 {
9914   tree block = LOCATION_BLOCK (e->goto_locus);
9915   gt_ggc_mx (e->src);
9916   gt_ggc_mx (e->dest);
9917   if (current_ir_type () == IR_GIMPLE)
9918     gt_ggc_mx (e->insns.g);
9919   else
9920     gt_ggc_mx (e->insns.r);
9921   gt_ggc_mx (block);
9922 }
9923 
9924 /* PCH support for edge_def.  */
9925 
9926 extern void gt_pch_nx (tree&);
9927 extern void gt_pch_nx (gimple *&);
9928 extern void gt_pch_nx (rtx&);
9929 extern void gt_pch_nx (basic_block&);
9930 
9931 static void
gt_pch_nx(rtx_insn * & x)9932 gt_pch_nx (rtx_insn *& x)
9933 {
9934   if (x)
9935     gt_pch_nx_rtx_def ((void *) x);
9936 }
9937 
9938 void
gt_pch_nx(edge_def * e)9939 gt_pch_nx (edge_def *e)
9940 {
9941   tree block = LOCATION_BLOCK (e->goto_locus);
9942   gt_pch_nx (e->src);
9943   gt_pch_nx (e->dest);
9944   if (current_ir_type () == IR_GIMPLE)
9945     gt_pch_nx (e->insns.g);
9946   else
9947     gt_pch_nx (e->insns.r);
9948   gt_pch_nx (block);
9949 }
9950 
9951 void
gt_pch_nx(edge_def * e,gt_pointer_operator op,void * cookie)9952 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9953 {
9954   tree block = LOCATION_BLOCK (e->goto_locus);
9955   op (&(e->src), NULL, cookie);
9956   op (&(e->dest), NULL, cookie);
9957   if (current_ir_type () == IR_GIMPLE)
9958     op (&(e->insns.g), NULL, cookie);
9959   else
9960     op (&(e->insns.r), NULL, cookie);
9961   op (&(block), &(block), cookie);
9962 }
9963 
9964 #if CHECKING_P
9965 
9966 namespace selftest {
9967 
9968 /* Helper function for CFG selftests: create a dummy function decl
9969    and push it as cfun.  */
9970 
9971 static tree
push_fndecl(const char * name)9972 push_fndecl (const char *name)
9973 {
9974   tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9975   /* FIXME: this uses input_location: */
9976   tree fndecl = build_fn_decl (name, fn_type);
9977   tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9978 			    NULL_TREE, integer_type_node);
9979   DECL_RESULT (fndecl) = retval;
9980   push_struct_function (fndecl);
9981   function *fun = DECL_STRUCT_FUNCTION (fndecl);
9982   ASSERT_TRUE (fun != NULL);
9983   init_empty_tree_cfg_for_function (fun);
9984   ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9985   ASSERT_EQ (0, n_edges_for_fn (fun));
9986   return fndecl;
9987 }
9988 
9989 /* These tests directly create CFGs.
9990    Compare with the static fns within tree-cfg.cc:
9991      - build_gimple_cfg
9992      - make_blocks: calls create_basic_block (seq, bb);
9993      - make_edges.   */
9994 
9995 /* Verify a simple cfg of the form:
9996      ENTRY -> A -> B -> C -> EXIT.  */
9997 
9998 static void
test_linear_chain()9999 test_linear_chain ()
10000 {
10001   gimple_register_cfg_hooks ();
10002 
10003   tree fndecl = push_fndecl ("cfg_test_linear_chain");
10004   function *fun = DECL_STRUCT_FUNCTION (fndecl);
10005 
10006   /* Create some empty blocks.  */
10007   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10008   basic_block bb_b = create_empty_bb (bb_a);
10009   basic_block bb_c = create_empty_bb (bb_b);
10010 
10011   ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10012   ASSERT_EQ (0, n_edges_for_fn (fun));
10013 
10014   /* Create some edges: a simple linear chain of BBs.  */
10015   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10016   make_edge (bb_a, bb_b, 0);
10017   make_edge (bb_b, bb_c, 0);
10018   make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10019 
10020   /* Verify the edges.  */
10021   ASSERT_EQ (4, n_edges_for_fn (fun));
10022   ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10023   ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10024   ASSERT_EQ (1, bb_a->preds->length ());
10025   ASSERT_EQ (1, bb_a->succs->length ());
10026   ASSERT_EQ (1, bb_b->preds->length ());
10027   ASSERT_EQ (1, bb_b->succs->length ());
10028   ASSERT_EQ (1, bb_c->preds->length ());
10029   ASSERT_EQ (1, bb_c->succs->length ());
10030   ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10031   ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10032 
10033   /* Verify the dominance information
10034      Each BB in our simple chain should be dominated by the one before
10035      it.  */
10036   calculate_dominance_info (CDI_DOMINATORS);
10037   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10038   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10039   auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10040   ASSERT_EQ (1, dom_by_b.length ());
10041   ASSERT_EQ (bb_c, dom_by_b[0]);
10042   free_dominance_info (CDI_DOMINATORS);
10043 
10044   /* Similarly for post-dominance: each BB in our chain is post-dominated
10045      by the one after it.  */
10046   calculate_dominance_info (CDI_POST_DOMINATORS);
10047   ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10048   ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10049   auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10050   ASSERT_EQ (1, postdom_by_b.length ());
10051   ASSERT_EQ (bb_a, postdom_by_b[0]);
10052   free_dominance_info (CDI_POST_DOMINATORS);
10053 
10054   pop_cfun ();
10055 }
10056 
10057 /* Verify a simple CFG of the form:
10058      ENTRY
10059        |
10060        A
10061       / \
10062      /t  \f
10063     B     C
10064      \   /
10065       \ /
10066        D
10067        |
10068       EXIT.  */
10069 
10070 static void
test_diamond()10071 test_diamond ()
10072 {
10073   gimple_register_cfg_hooks ();
10074 
10075   tree fndecl = push_fndecl ("cfg_test_diamond");
10076   function *fun = DECL_STRUCT_FUNCTION (fndecl);
10077 
10078   /* Create some empty blocks.  */
10079   basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10080   basic_block bb_b = create_empty_bb (bb_a);
10081   basic_block bb_c = create_empty_bb (bb_a);
10082   basic_block bb_d = create_empty_bb (bb_b);
10083 
10084   ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10085   ASSERT_EQ (0, n_edges_for_fn (fun));
10086 
10087   /* Create the edges.  */
10088   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10089   make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10090   make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10091   make_edge (bb_b, bb_d, 0);
10092   make_edge (bb_c, bb_d, 0);
10093   make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10094 
10095   /* Verify the edges.  */
10096   ASSERT_EQ (6, n_edges_for_fn (fun));
10097   ASSERT_EQ (1, bb_a->preds->length ());
10098   ASSERT_EQ (2, bb_a->succs->length ());
10099   ASSERT_EQ (1, bb_b->preds->length ());
10100   ASSERT_EQ (1, bb_b->succs->length ());
10101   ASSERT_EQ (1, bb_c->preds->length ());
10102   ASSERT_EQ (1, bb_c->succs->length ());
10103   ASSERT_EQ (2, bb_d->preds->length ());
10104   ASSERT_EQ (1, bb_d->succs->length ());
10105 
10106   /* Verify the dominance information.  */
10107   calculate_dominance_info (CDI_DOMINATORS);
10108   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10109   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10110   ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10111   auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10112   ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order.  */
10113   dom_by_a.release ();
10114   auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10115   ASSERT_EQ (0, dom_by_b.length ());
10116   dom_by_b.release ();
10117   free_dominance_info (CDI_DOMINATORS);
10118 
10119   /* Similarly for post-dominance.  */
10120   calculate_dominance_info (CDI_POST_DOMINATORS);
10121   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10122   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10123   ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10124   auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10125   ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order.  */
10126   postdom_by_d.release ();
10127   auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10128   ASSERT_EQ (0, postdom_by_b.length ());
10129   postdom_by_b.release ();
10130   free_dominance_info (CDI_POST_DOMINATORS);
10131 
10132   pop_cfun ();
10133 }
10134 
10135 /* Verify that we can handle a CFG containing a "complete" aka
10136    fully-connected subgraph (where A B C D below all have edges
10137    pointing to each other node, also to themselves).
10138    e.g.:
10139      ENTRY  EXIT
10140        |    ^
10141        |   /
10142        |  /
10143        | /
10144        V/
10145        A<--->B
10146        ^^   ^^
10147        | \ / |
10148        |  X  |
10149        | / \ |
10150        VV   VV
10151        C<--->D
10152 */
10153 
10154 static void
test_fully_connected()10155 test_fully_connected ()
10156 {
10157   gimple_register_cfg_hooks ();
10158 
10159   tree fndecl = push_fndecl ("cfg_fully_connected");
10160   function *fun = DECL_STRUCT_FUNCTION (fndecl);
10161 
10162   const int n = 4;
10163 
10164   /* Create some empty blocks.  */
10165   auto_vec <basic_block> subgraph_nodes;
10166   for (int i = 0; i < n; i++)
10167     subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10168 
10169   ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10170   ASSERT_EQ (0, n_edges_for_fn (fun));
10171 
10172   /* Create the edges.  */
10173   make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10174   make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10175   for (int i = 0; i < n; i++)
10176     for (int j = 0; j < n; j++)
10177       make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10178 
10179   /* Verify the edges.  */
10180   ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10181   /* The first one is linked to ENTRY/EXIT as well as itself and
10182      everything else.  */
10183   ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10184   ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10185   /* The other ones in the subgraph are linked to everything in
10186      the subgraph (including themselves).  */
10187   for (int i = 1; i < n; i++)
10188     {
10189       ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10190       ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10191     }
10192 
10193   /* Verify the dominance information.  */
10194   calculate_dominance_info (CDI_DOMINATORS);
10195   /* The initial block in the subgraph should be dominated by ENTRY.  */
10196   ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10197 	     get_immediate_dominator (CDI_DOMINATORS,
10198 				      subgraph_nodes[0]));
10199   /* Every other block in the subgraph should be dominated by the
10200      initial block.  */
10201   for (int i = 1; i < n; i++)
10202     ASSERT_EQ (subgraph_nodes[0],
10203 	       get_immediate_dominator (CDI_DOMINATORS,
10204 					subgraph_nodes[i]));
10205   free_dominance_info (CDI_DOMINATORS);
10206 
10207   /* Similarly for post-dominance.  */
10208   calculate_dominance_info (CDI_POST_DOMINATORS);
10209   /* The initial block in the subgraph should be postdominated by EXIT.  */
10210   ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10211 	     get_immediate_dominator (CDI_POST_DOMINATORS,
10212 				      subgraph_nodes[0]));
10213   /* Every other block in the subgraph should be postdominated by the
10214      initial block, since that leads to EXIT.  */
10215   for (int i = 1; i < n; i++)
10216     ASSERT_EQ (subgraph_nodes[0],
10217 	       get_immediate_dominator (CDI_POST_DOMINATORS,
10218 					subgraph_nodes[i]));
10219   free_dominance_info (CDI_POST_DOMINATORS);
10220 
10221   pop_cfun ();
10222 }
10223 
10224 /* Run all of the selftests within this file.  */
10225 
10226 void
tree_cfg_cc_tests()10227 tree_cfg_cc_tests ()
10228 {
10229   test_linear_chain ();
10230   test_diamond ();
10231   test_fully_connected ();
10232 }
10233 
10234 } // namespace selftest
10235 
10236 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10237    - loop
10238    - nested loops
10239    - switch statement (a block with many out-edges)
10240    - something that jumps to itself
10241    - etc  */
10242 
10243 #endif /* CHECKING_P */
10244