xref: /netbsd-src/external/gpl3/gcc/dist/gcc/gimple-low.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003-2022 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
30 #include "calls.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
33 #include "predict.h"
34 #include "gimple-predict.h"
35 #include "gimple-fold.h"
36 
37 /* The differences between High GIMPLE and Low GIMPLE are the
38    following:
39 
40    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 
42    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43       flow and exception regions are built as an on-the-side region
44       hierarchy (See tree-eh.cc:lower_eh_constructs).
45 
46    3- Multiple identical return statements are grouped into a single
47       return and gotos to the unique return site.  */
48 
49 /* Match a return statement with a label.  During lowering, we identify
50    identical return statements and replace duplicates with a jump to
51    the corresponding label.  */
52 struct return_statements_t
53 {
54   tree label;
55   greturn *stmt;
56 };
57 typedef struct return_statements_t return_statements_t;
58 
59 
60 struct lower_data
61 {
62   /* Block the current statement belongs to.  */
63   tree block;
64 
65   /* A vector of label and return statements to be moved to the end
66      of the function.  */
67   vec<return_statements_t> return_statements;
68 
69   /* True if the current statement cannot fall through.  */
70   bool cannot_fallthru;
71 };
72 
73 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
74 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
75 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_builtin_setjmp (gimple_stmt_iterator *);
78 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
79 
80 
81 /* Lower the body of current_function_decl from High GIMPLE into Low
82    GIMPLE.  */
83 
84 static unsigned int
lower_function_body(void)85 lower_function_body (void)
86 {
87   struct lower_data data;
88   gimple_seq body = gimple_body (current_function_decl);
89   gimple_seq lowered_body;
90   gimple_stmt_iterator i;
91   gimple *bind;
92   gimple *x;
93 
94   /* The gimplifier should've left a body of exactly one statement,
95      namely a GIMPLE_BIND.  */
96   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
97 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
98 
99   memset (&data, 0, sizeof (data));
100   data.block = DECL_INITIAL (current_function_decl);
101   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
102   BLOCK_CHAIN (data.block) = NULL_TREE;
103   TREE_ASM_WRITTEN (data.block) = 1;
104   data.return_statements.create (8);
105 
106   bind = gimple_seq_first_stmt (body);
107   lowered_body = NULL;
108   gimple_seq_add_stmt (&lowered_body, bind);
109   i = gsi_start (lowered_body);
110   lower_gimple_bind (&i, &data);
111 
112   i = gsi_last (lowered_body);
113 
114   /* If we had begin stmt markers from e.g. PCH, but this compilation
115      doesn't want them, lower_stmt will have cleaned them up; we can
116      now clear the flag that indicates we had them.  */
117   if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
118     {
119       /* This counter needs not be exact, but before lowering it will
120 	 most certainly be.  */
121       gcc_assert (cfun->debug_marker_count == 0);
122       cfun->debug_nonbind_markers = false;
123     }
124 
125   /* If the function falls off the end, we need a null return statement.
126      If we've already got one in the return_statements vector, we don't
127      need to do anything special.  Otherwise build one by hand.  */
128   bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
129   if (may_fallthru
130       && (data.return_statements.is_empty ()
131 	  || (gimple_return_retval (data.return_statements.last().stmt)
132 	      != NULL)))
133     {
134       x = gimple_build_return (NULL);
135       gimple_set_location (x, cfun->function_end_locus);
136       gimple_set_block (x, DECL_INITIAL (current_function_decl));
137       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
138       may_fallthru = false;
139     }
140 
141   /* If we lowered any return statements, emit the representative
142      at the end of the function.  */
143   while (!data.return_statements.is_empty ())
144     {
145       return_statements_t t = data.return_statements.pop ();
146       x = gimple_build_label (t.label);
147       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
148       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
149       if (may_fallthru)
150 	{
151 	  /* Remove the line number from the representative return statement.
152 	     It now fills in for the fallthru too.  Failure to remove this
153 	     will result in incorrect results for coverage analysis.  */
154 	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
155 	  may_fallthru = false;
156 	}
157     }
158 
159   /* Once the old body has been lowered, replace it with the new
160      lowered sequence.  */
161   gimple_set_body (current_function_decl, lowered_body);
162 
163   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
164   BLOCK_SUBBLOCKS (data.block)
165     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
166 
167   clear_block_marks (data.block);
168   data.return_statements.release ();
169   return 0;
170 }
171 
172 namespace {
173 
174 const pass_data pass_data_lower_cf =
175 {
176   GIMPLE_PASS, /* type */
177   "lower", /* name */
178   OPTGROUP_NONE, /* optinfo_flags */
179   TV_NONE, /* tv_id */
180   PROP_gimple_any, /* properties_required */
181   PROP_gimple_lcf, /* properties_provided */
182   0, /* properties_destroyed */
183   0, /* todo_flags_start */
184   0, /* todo_flags_finish */
185 };
186 
187 class pass_lower_cf : public gimple_opt_pass
188 {
189 public:
pass_lower_cf(gcc::context * ctxt)190   pass_lower_cf (gcc::context *ctxt)
191     : gimple_opt_pass (pass_data_lower_cf, ctxt)
192   {}
193 
194   /* opt_pass methods: */
execute(function *)195   virtual unsigned int execute (function *) { return lower_function_body (); }
196 
197 }; // class pass_lower_cf
198 
199 } // anon namespace
200 
201 gimple_opt_pass *
make_pass_lower_cf(gcc::context * ctxt)202 make_pass_lower_cf (gcc::context *ctxt)
203 {
204   return new pass_lower_cf (ctxt);
205 }
206 
207 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
208    when they are changed -- if this has to be done, the lowering routine must
209    do it explicitly.  DATA is passed through the recursion.  */
210 
211 static void
lower_sequence(gimple_seq * seq,struct lower_data * data)212 lower_sequence (gimple_seq *seq, struct lower_data *data)
213 {
214   gimple_stmt_iterator gsi;
215 
216   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
217     lower_stmt (&gsi, data);
218 }
219 
220 
221 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
222    passed through the recursion.  */
223 
224 static void
lower_omp_directive(gimple_stmt_iterator * gsi,struct lower_data * data)225 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
226 {
227   gimple *stmt;
228 
229   stmt = gsi_stmt (*gsi);
230 
231   lower_sequence (gimple_omp_body_ptr (stmt), data);
232   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
233   gimple_omp_set_body (stmt, NULL);
234   gsi_next (gsi);
235 }
236 
237 
238 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
239    track the fallthruness of statements and get rid of unreachable return
240    statements in order to prevent the EH lowering pass from adding useless
241    edges that can cause bogus warnings to be issued later; this guess need
242    not be 100% accurate, simply be conservative and reset cannot_fallthru
243    to false if we don't know.  */
244 
245 static void
lower_stmt(gimple_stmt_iterator * gsi,struct lower_data * data)246 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
247 {
248   gimple *stmt = gsi_stmt (*gsi);
249 
250   gimple_set_block (stmt, data->block);
251 
252   switch (gimple_code (stmt))
253     {
254     case GIMPLE_BIND:
255       lower_gimple_bind (gsi, data);
256       /* Propagate fallthruness.  */
257       return;
258 
259     case GIMPLE_COND:
260     case GIMPLE_GOTO:
261     case GIMPLE_SWITCH:
262       data->cannot_fallthru = true;
263       gsi_next (gsi);
264       return;
265 
266     case GIMPLE_RETURN:
267       if (data->cannot_fallthru)
268 	{
269 	  gsi_remove (gsi, false);
270 	  /* Propagate fallthruness.  */
271 	}
272       else
273 	{
274 	  lower_gimple_return (gsi, data);
275 	  data->cannot_fallthru = true;
276 	}
277       return;
278 
279     case GIMPLE_TRY:
280       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
281 	lower_try_catch (gsi, data);
282       else
283 	{
284 	  /* It must be a GIMPLE_TRY_FINALLY.  */
285 	  bool cannot_fallthru;
286 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
287 	  cannot_fallthru = data->cannot_fallthru;
288 
289 	  /* The finally clause is always executed after the try clause,
290 	     so if it does not fall through, then the try-finally will not
291 	     fall through.  Otherwise, if the try clause does not fall
292 	     through, then when the finally clause falls through it will
293 	     resume execution wherever the try clause was going.  So the
294 	     whole try-finally will only fall through if both the try
295 	     clause and the finally clause fall through.  */
296 	  data->cannot_fallthru = false;
297 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
298 	  data->cannot_fallthru |= cannot_fallthru;
299 	  gsi_next (gsi);
300 	}
301       return;
302 
303     case GIMPLE_EH_ELSE:
304       {
305 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
306 	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
307 	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
308       }
309       break;
310 
311     case GIMPLE_DEBUG:
312       gcc_checking_assert (cfun->debug_nonbind_markers);
313       /* We can't possibly have debug bind stmts before lowering, we
314 	 first emit them when entering SSA.  */
315       gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
316       /* Propagate fallthruness.  */
317       /* If the function (e.g. from PCH) had debug stmts, but they're
318 	 disabled for this compilation, remove them.  */
319       if (!MAY_HAVE_DEBUG_MARKER_STMTS)
320 	gsi_remove (gsi, true);
321       else
322 	gsi_next (gsi);
323       return;
324 
325     case GIMPLE_NOP:
326     case GIMPLE_ASM:
327     case GIMPLE_ASSIGN:
328     case GIMPLE_PREDICT:
329     case GIMPLE_LABEL:
330     case GIMPLE_EH_MUST_NOT_THROW:
331     case GIMPLE_OMP_FOR:
332     case GIMPLE_OMP_SCOPE:
333     case GIMPLE_OMP_SECTIONS:
334     case GIMPLE_OMP_SECTIONS_SWITCH:
335     case GIMPLE_OMP_SECTION:
336     case GIMPLE_OMP_SINGLE:
337     case GIMPLE_OMP_MASTER:
338     case GIMPLE_OMP_MASKED:
339     case GIMPLE_OMP_TASKGROUP:
340     case GIMPLE_OMP_ORDERED:
341     case GIMPLE_OMP_SCAN:
342     case GIMPLE_OMP_CRITICAL:
343     case GIMPLE_OMP_RETURN:
344     case GIMPLE_OMP_ATOMIC_LOAD:
345     case GIMPLE_OMP_ATOMIC_STORE:
346     case GIMPLE_OMP_CONTINUE:
347       break;
348 
349     case GIMPLE_CALL:
350       {
351 	tree decl = gimple_call_fndecl (stmt);
352 	unsigned i;
353 
354 	for (i = 0; i < gimple_call_num_args (stmt); i++)
355 	  {
356 	    tree arg = gimple_call_arg (stmt, i);
357 	    if (EXPR_P (arg))
358 	      TREE_SET_BLOCK (arg, data->block);
359 	  }
360 
361 	if (decl
362 	    && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
363 	  {
364 	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
365 	      {
366 		lower_builtin_setjmp (gsi);
367 		data->cannot_fallthru = false;
368 		return;
369 	      }
370 	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
371 		     && flag_tree_bit_ccp
372 		     && gimple_builtin_call_types_compatible_p (stmt, decl))
373 	      {
374 		lower_builtin_posix_memalign (gsi);
375 		return;
376 	      }
377 	  }
378 
379 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
380 	  {
381 	    data->cannot_fallthru = true;
382 	    gsi_next (gsi);
383 	    return;
384 	  }
385 
386 	/* We delay folding of built calls from gimplification to
387 	   here so the IL is in consistent state for the diagnostic
388 	   machineries job.  */
389 	if (gimple_call_builtin_p (stmt))
390 	  fold_stmt (gsi);
391       }
392       break;
393 
394     case GIMPLE_OMP_PARALLEL:
395     case GIMPLE_OMP_TASK:
396     case GIMPLE_OMP_TARGET:
397     case GIMPLE_OMP_TEAMS:
398       data->cannot_fallthru = false;
399       lower_omp_directive (gsi, data);
400       data->cannot_fallthru = false;
401       return;
402 
403     case GIMPLE_TRANSACTION:
404       lower_sequence (gimple_transaction_body_ptr (
405 			as_a <gtransaction *> (stmt)),
406 		      data);
407       break;
408 
409     default:
410       gcc_unreachable ();
411     }
412 
413   data->cannot_fallthru = false;
414   gsi_next (gsi);
415 }
416 
417 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
418 
419 static void
lower_gimple_bind(gimple_stmt_iterator * gsi,struct lower_data * data)420 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
421 {
422   tree old_block = data->block;
423   gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
424   tree new_block = gimple_bind_block (stmt);
425 
426   if (new_block)
427     {
428       if (new_block == old_block)
429 	{
430 	  /* The outermost block of the original function may not be the
431 	     outermost statement chain of the gimplified function.  So we
432 	     may see the outermost block just inside the function.  */
433 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
434 	  new_block = NULL;
435 	}
436       else
437 	{
438 	  /* We do not expect to handle duplicate blocks.  */
439 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
440 	  TREE_ASM_WRITTEN (new_block) = 1;
441 
442 	  /* Block tree may get clobbered by inlining.  Normally this would
443 	     be fixed in rest_of_decl_compilation using block notes, but
444 	     since we are not going to emit them, it is up to us.  */
445 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
446 	  BLOCK_SUBBLOCKS (old_block) = new_block;
447 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
448 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
449 
450 	  data->block = new_block;
451 	}
452     }
453 
454   record_vars (gimple_bind_vars (stmt));
455 
456   /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
457      need gimple_bind_vars.  */
458   tree next;
459   /* BLOCK_VARS and gimple_bind_vars share a common sub-chain.  Find
460      it by marking all BLOCK_VARS.  */
461   if (gimple_bind_block (stmt))
462     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
463       TREE_VISITED (t) = 1;
464   for (tree var = gimple_bind_vars (stmt);
465        var && ! TREE_VISITED (var); var = next)
466     {
467       next = DECL_CHAIN (var);
468       DECL_CHAIN (var) = NULL_TREE;
469     }
470   /* Unmark BLOCK_VARS.  */
471   if (gimple_bind_block (stmt))
472     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
473       TREE_VISITED (t) = 0;
474 
475   lower_sequence (gimple_bind_body_ptr (stmt), data);
476 
477   if (new_block)
478     {
479       gcc_assert (data->block == new_block);
480 
481       BLOCK_SUBBLOCKS (new_block)
482 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
483       data->block = old_block;
484     }
485 
486   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
487   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
488   gsi_remove (gsi, false);
489 }
490 
491 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
492 
493 static void
lower_try_catch(gimple_stmt_iterator * gsi,struct lower_data * data)494 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
495 {
496   bool cannot_fallthru;
497   gimple *stmt = gsi_stmt (*gsi);
498   gimple_stmt_iterator i;
499 
500   /* We don't handle GIMPLE_TRY_FINALLY.  */
501   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
502 
503   lower_sequence (gimple_try_eval_ptr (stmt), data);
504   cannot_fallthru = data->cannot_fallthru;
505 
506   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
507   switch (gimple_code (gsi_stmt (i)))
508     {
509     case GIMPLE_CATCH:
510       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
511 	 catch expression and a body.  The whole try/catch may fall
512 	 through iff any of the catch bodies falls through.  */
513       for (; !gsi_end_p (i); gsi_next (&i))
514 	{
515 	  data->cannot_fallthru = false;
516 	  lower_sequence (gimple_catch_handler_ptr (
517                             as_a <gcatch *> (gsi_stmt (i))),
518 			  data);
519 	  if (!data->cannot_fallthru)
520 	    cannot_fallthru = false;
521 	}
522       break;
523 
524     case GIMPLE_EH_FILTER:
525       /* The exception filter expression only matters if there is an
526 	 exception.  If the exception does not match EH_FILTER_TYPES,
527 	 we will execute EH_FILTER_FAILURE, and we will fall through
528 	 if that falls through.  If the exception does match
529 	 EH_FILTER_TYPES, the stack unwinder will continue up the
530 	 stack, so we will not fall through.  We don't know whether we
531 	 will throw an exception which matches EH_FILTER_TYPES or not,
532 	 so we just ignore EH_FILTER_TYPES and assume that we might
533 	 throw an exception which doesn't match.  */
534       data->cannot_fallthru = false;
535       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
536       if (!data->cannot_fallthru)
537 	cannot_fallthru = false;
538       break;
539 
540     case GIMPLE_DEBUG:
541       gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
542       break;
543 
544     default:
545       /* This case represents statements to be executed when an
546 	 exception occurs.  Those statements are implicitly followed
547 	 by a GIMPLE_RESX to resume execution after the exception.  So
548 	 in this case the try/catch never falls through.  */
549       data->cannot_fallthru = false;
550       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
551       break;
552     }
553 
554   data->cannot_fallthru = cannot_fallthru;
555   gsi_next (gsi);
556 }
557 
558 
559 /* Try to determine whether a TRY_CATCH expression can fall through.
560    This is a subroutine of gimple_stmt_may_fallthru.  */
561 
562 static bool
gimple_try_catch_may_fallthru(gtry * stmt)563 gimple_try_catch_may_fallthru (gtry *stmt)
564 {
565   gimple_stmt_iterator i;
566 
567   /* We don't handle GIMPLE_TRY_FINALLY.  */
568   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
569 
570   /* If the TRY block can fall through, the whole TRY_CATCH can
571      fall through.  */
572   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
573     return true;
574 
575   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
576   switch (gimple_code (gsi_stmt (i)))
577     {
578     case GIMPLE_CATCH:
579       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
580 	 catch expression and a body.  The whole try/catch may fall
581 	 through iff any of the catch bodies falls through.  */
582       for (; !gsi_end_p (i); gsi_next (&i))
583 	{
584 	  if (gimple_seq_may_fallthru (gimple_catch_handler (
585 					 as_a <gcatch *> (gsi_stmt (i)))))
586 	    return true;
587 	}
588       return false;
589 
590     case GIMPLE_EH_FILTER:
591       /* The exception filter expression only matters if there is an
592 	 exception.  If the exception does not match EH_FILTER_TYPES,
593 	 we will execute EH_FILTER_FAILURE, and we will fall through
594 	 if that falls through.  If the exception does match
595 	 EH_FILTER_TYPES, the stack unwinder will continue up the
596 	 stack, so we will not fall through.  We don't know whether we
597 	 will throw an exception which matches EH_FILTER_TYPES or not,
598 	 so we just ignore EH_FILTER_TYPES and assume that we might
599 	 throw an exception which doesn't match.  */
600       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
601 
602     default:
603       /* This case represents statements to be executed when an
604 	 exception occurs.  Those statements are implicitly followed
605 	 by a GIMPLE_RESX to resume execution after the exception.  So
606 	 in this case the try/catch never falls through.  */
607       return false;
608     }
609 }
610 
611 
612 /* Try to determine if we can continue executing the statement
613    immediately following STMT.  This guess need not be 100% accurate;
614    simply be conservative and return true if we don't know.  This is
615    used only to avoid stupidly generating extra code. If we're wrong,
616    we'll just delete the extra code later.  */
617 
618 bool
gimple_stmt_may_fallthru(gimple * stmt)619 gimple_stmt_may_fallthru (gimple *stmt)
620 {
621   if (!stmt)
622     return true;
623 
624   switch (gimple_code (stmt))
625     {
626     case GIMPLE_GOTO:
627     case GIMPLE_RETURN:
628     case GIMPLE_RESX:
629       /* Easy cases.  If the last statement of the seq implies
630 	 control transfer, then we can't fall through.  */
631       return false;
632 
633     case GIMPLE_SWITCH:
634       /* Switch has already been lowered and represents a branch
635 	 to a selected label and hence can't fall through.  */
636       return false;
637 
638     case GIMPLE_COND:
639       /* GIMPLE_COND's are already lowered into a two-way branch.  They
640 	 can't fall through.  */
641       return false;
642 
643     case GIMPLE_BIND:
644       return gimple_seq_may_fallthru (
645 	       gimple_bind_body (as_a <gbind *> (stmt)));
646 
647     case GIMPLE_TRY:
648       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
649         return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
650 
651       /* It must be a GIMPLE_TRY_FINALLY.  */
652 
653       /* The finally clause is always executed after the try clause,
654 	 so if it does not fall through, then the try-finally will not
655 	 fall through.  Otherwise, if the try clause does not fall
656 	 through, then when the finally clause falls through it will
657 	 resume execution wherever the try clause was going.  So the
658 	 whole try-finally will only fall through if both the try
659 	 clause and the finally clause fall through.  */
660       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
661 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
662 
663     case GIMPLE_EH_ELSE:
664       {
665 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
666 	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
667 		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
668 					      eh_else_stmt)));
669       }
670 
671     case GIMPLE_CALL:
672       /* Functions that do not return do not fall through.  */
673       return !gimple_call_noreturn_p (stmt);
674 
675     default:
676       return true;
677     }
678 }
679 
680 
681 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
682 
683 bool
gimple_seq_may_fallthru(gimple_seq seq)684 gimple_seq_may_fallthru (gimple_seq seq)
685 {
686   return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
687 }
688 
689 
690 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
691 
692 static void
lower_gimple_return(gimple_stmt_iterator * gsi,struct lower_data * data)693 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
694 {
695   greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
696   gimple *t;
697   int i;
698   return_statements_t tmp_rs;
699 
700   /* Match this up with an existing return statement that's been created.  */
701   for (i = data->return_statements.length () - 1;
702        i >= 0; i--)
703     {
704       tmp_rs = data->return_statements[i];
705 
706       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
707 	{
708 	  /* Remove the line number from the representative return statement.
709 	     It now fills in for many such returns.  Failure to remove this
710 	     will result in incorrect results for coverage analysis.  */
711 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
712 
713 	  goto found;
714 	}
715     }
716 
717   /* Not found.  Create a new label and record the return statement.  */
718   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
719   tmp_rs.stmt = stmt;
720   data->return_statements.safe_push (tmp_rs);
721 
722   /* Generate a goto statement and remove the return statement.  */
723  found:
724   /* When not optimizing, make sure user returns are preserved.  */
725   if (!optimize && gimple_has_location (stmt))
726     DECL_ARTIFICIAL (tmp_rs.label) = 0;
727   t = gimple_build_goto (tmp_rs.label);
728   /* location includes block.  */
729   gimple_set_location (t, gimple_location (stmt));
730   gsi_insert_before (gsi, t, GSI_SAME_STMT);
731   gsi_remove (gsi, false);
732 }
733 
734 /* Lower a __builtin_setjmp GSI.
735 
736    __builtin_setjmp is passed a pointer to an array of five words (not
737    all will be used on all machines).  It operates similarly to the C
738    library function of the same name, but is more efficient.
739 
740    It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
741    __builtin_setjmp_receiver.
742 
743    After full lowering, the body of the function should look like:
744 
745     {
746       int D.1844;
747       int D.2844;
748 
749       [...]
750 
751       __builtin_setjmp_setup (&buf, &<D1847>);
752       D.1844 = 0;
753       goto <D1846>;
754       <D1847>:;
755       __builtin_setjmp_receiver (&<D1847>);
756       D.1844 = 1;
757       <D1846>:;
758       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
759 
760       [...]
761 
762       __builtin_setjmp_setup (&buf, &<D2847>);
763       D.2844 = 0;
764       goto <D2846>;
765       <D2847>:;
766       __builtin_setjmp_receiver (&<D2847>);
767       D.2844 = 1;
768       <D2846>:;
769       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
770 
771       [...]
772 
773       <D3850>:;
774       return;
775     }
776 
777    During cfg creation an extra per-function (or per-OpenMP region)
778    block with ABNORMAL_DISPATCHER internal call will be added, unique
779    destination of all the abnormal call edges and the unique source of
780    all the abnormal edges to the receivers, thus keeping the complexity
781    explosion localized.  */
782 
783 static void
lower_builtin_setjmp(gimple_stmt_iterator * gsi)784 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
785 {
786   gimple *stmt = gsi_stmt (*gsi);
787   location_t loc = gimple_location (stmt);
788   tree cont_label = create_artificial_label (loc);
789   tree next_label = create_artificial_label (loc);
790   tree dest, t, arg;
791   gimple *g;
792 
793   /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
794      these builtins are modelled as non-local label jumps to the label
795      that is passed to these two builtins, so pretend we have a non-local
796      label during GIMPLE passes too.  See PR60003.  */
797   cfun->has_nonlocal_label = 1;
798 
799   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
800      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
801   FORCED_LABEL (next_label) = 1;
802 
803   tree orig_dest = dest = gimple_call_lhs (stmt);
804   if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
805     dest = create_tmp_reg (TREE_TYPE (orig_dest));
806 
807   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
808   arg = build_addr (next_label);
809   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
810   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
811   /* location includes block.  */
812   gimple_set_location (g, loc);
813   gsi_insert_before (gsi, g, GSI_SAME_STMT);
814 
815   /* Build 'DEST = 0' and insert.  */
816   if (dest)
817     {
818       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
819       gimple_set_location (g, loc);
820       gsi_insert_before (gsi, g, GSI_SAME_STMT);
821     }
822 
823   /* Build 'goto CONT_LABEL' and insert.  */
824   g = gimple_build_goto (cont_label);
825   gsi_insert_before (gsi, g, GSI_SAME_STMT);
826 
827   /* Build 'NEXT_LABEL:' and insert.  */
828   g = gimple_build_label (next_label);
829   gsi_insert_before (gsi, g, GSI_SAME_STMT);
830 
831   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
832   arg = build_addr (next_label);
833   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
834   g = gimple_build_call (t, 1, arg);
835   gimple_set_location (g, loc);
836   gsi_insert_before (gsi, g, GSI_SAME_STMT);
837 
838   /* Build 'DEST = 1' and insert.  */
839   if (dest)
840     {
841       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
842 						       integer_one_node));
843       gimple_set_location (g, loc);
844       gsi_insert_before (gsi, g, GSI_SAME_STMT);
845     }
846 
847   /* Build 'CONT_LABEL:' and insert.  */
848   g = gimple_build_label (cont_label);
849   gsi_insert_before (gsi, g, GSI_SAME_STMT);
850 
851   /* Build orig_dest = dest if necessary.  */
852   if (dest != orig_dest)
853     {
854       g = gimple_build_assign (orig_dest, dest);
855       gsi_insert_before (gsi, g, GSI_SAME_STMT);
856     }
857 
858   /* Remove the call to __builtin_setjmp.  */
859   gsi_remove (gsi, false);
860 }
861 
862 /* Lower calls to posix_memalign to
863      res = posix_memalign (ptr, align, size);
864      if (res == 0)
865        *ptr = __builtin_assume_aligned (*ptr, align);
866    or to
867      void *tem;
868      res = posix_memalign (&tem, align, size);
869      if (res == 0)
870        ptr = __builtin_assume_aligned (tem, align);
871    in case the first argument was &ptr.  That way we can get at the
872    alignment of the heap pointer in CCP.  */
873 
874 static void
lower_builtin_posix_memalign(gimple_stmt_iterator * gsi)875 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
876 {
877   gimple *stmt, *call = gsi_stmt (*gsi);
878   tree pptr = gimple_call_arg (call, 0);
879   tree align = gimple_call_arg (call, 1);
880   tree res = gimple_call_lhs (call);
881   tree ptr = create_tmp_reg (ptr_type_node);
882   if (TREE_CODE (pptr) == ADDR_EXPR)
883     {
884       tree tem = create_tmp_var (ptr_type_node);
885       TREE_ADDRESSABLE (tem) = 1;
886       gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
887       stmt = gimple_build_assign (ptr, tem);
888     }
889   else
890     stmt = gimple_build_assign (ptr,
891 				fold_build2 (MEM_REF, ptr_type_node, pptr,
892 					     build_int_cst (ptr_type_node, 0)));
893   if (res == NULL_TREE)
894     {
895       res = create_tmp_reg (integer_type_node);
896       gimple_call_set_lhs (call, res);
897     }
898   tree align_label = create_artificial_label (UNKNOWN_LOCATION);
899   tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
900   gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
901 				   align_label, noalign_label);
902   gsi_insert_after (gsi, cond, GSI_NEW_STMT);
903   gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
904   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
905   stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
906 			    2, ptr, align);
907   gimple_call_set_lhs (stmt, ptr);
908   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
909   stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
910 					   build_int_cst (ptr_type_node, 0)),
911 			      ptr);
912   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
913   gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
914 }
915 
916 
917 /* Record the variables in VARS into function FN.  */
918 
919 void
record_vars_into(tree vars,tree fn)920 record_vars_into (tree vars, tree fn)
921 {
922   for (; vars; vars = DECL_CHAIN (vars))
923     {
924       tree var = vars;
925 
926       /* BIND_EXPRs contains also function/type/constant declarations
927          we don't need to care about.  */
928       if (!VAR_P (var))
929 	continue;
930 
931       /* Nothing to do in this case.  */
932       if (DECL_EXTERNAL (var))
933 	continue;
934 
935       /* Record the variable.  */
936       add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
937     }
938 }
939 
940 
941 /* Record the variables in VARS into current_function_decl.  */
942 
943 void
record_vars(tree vars)944 record_vars (tree vars)
945 {
946   record_vars_into (vars, current_function_decl);
947 }
948