xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimple-low.c (revision a24efa7dea9f1f56c3bdb15a927d3516792ace1c)
1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-iterator.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "diagnostic-core.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
35 
36 /* The differences between High GIMPLE and Low GIMPLE are the
37    following:
38 
39    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40 
41    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42       flow and exception regions are built as an on-the-side region
43       hierarchy (See tree-eh.c:lower_eh_constructs).
44 
45    3- Multiple identical return statements are grouped into a single
46       return and gotos to the unique return site.  */
47 
48 /* Match a return statement with a label.  During lowering, we identify
49    identical return statements and replace duplicates with a jump to
50    the corresponding label.  */
51 struct return_statements_t
52 {
53   tree label;
54   gimple stmt;
55 };
56 typedef struct return_statements_t return_statements_t;
57 
58 
59 struct lower_data
60 {
61   /* Block the current statement belongs to.  */
62   tree block;
63 
64   /* A vector of label and return statements to be moved to the end
65      of the function.  */
66   vec<return_statements_t> return_statements;
67 
68   /* True if the current statement cannot fall through.  */
69   bool cannot_fallthru;
70 
71   /* True if the function calls __builtin_setjmp.  */
72   bool calls_builtin_setjmp;
73 };
74 
75 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
77 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_builtin_setjmp (gimple_stmt_iterator *);
80 
81 
82 /* Lower the body of current_function_decl from High GIMPLE into Low
83    GIMPLE.  */
84 
85 static unsigned int
86 lower_function_body (void)
87 {
88   struct lower_data data;
89   gimple_seq body = gimple_body (current_function_decl);
90   gimple_seq lowered_body;
91   gimple_stmt_iterator i;
92   gimple bind;
93   tree t;
94   gimple x;
95 
96   /* The gimplifier should've left a body of exactly one statement,
97      namely a GIMPLE_BIND.  */
98   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
99 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
100 
101   memset (&data, 0, sizeof (data));
102   data.block = DECL_INITIAL (current_function_decl);
103   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
104   BLOCK_CHAIN (data.block) = NULL_TREE;
105   TREE_ASM_WRITTEN (data.block) = 1;
106   data.return_statements.create (8);
107 
108   bind = gimple_seq_first_stmt (body);
109   lowered_body = NULL;
110   gimple_seq_add_stmt (&lowered_body, bind);
111   i = gsi_start (lowered_body);
112   lower_gimple_bind (&i, &data);
113 
114   i = gsi_last (lowered_body);
115 
116   /* If the function falls off the end, we need a null return statement.
117      If we've already got one in the return_statements vector, we don't
118      need to do anything special.  Otherwise build one by hand.  */
119   if (gimple_seq_may_fallthru (lowered_body)
120       && (data.return_statements.is_empty ()
121 	  || gimple_return_retval (data.return_statements.last().stmt) != NULL))
122     {
123       x = gimple_build_return (NULL);
124       gimple_set_location (x, cfun->function_end_locus);
125       gimple_set_block (x, DECL_INITIAL (current_function_decl));
126       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
127     }
128 
129   /* If we lowered any return statements, emit the representative
130      at the end of the function.  */
131   while (!data.return_statements.is_empty ())
132     {
133       return_statements_t t = data.return_statements.pop ();
134       x = gimple_build_label (t.label);
135       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
136       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
137     }
138 
139   /* If the function calls __builtin_setjmp, we need to emit the computed
140      goto that will serve as the unique dispatcher for all the receivers.  */
141   if (data.calls_builtin_setjmp)
142     {
143       tree disp_label, disp_var, arg;
144 
145       /* Build 'DISP_LABEL:' and insert.  */
146       disp_label = create_artificial_label (cfun->function_end_locus);
147       /* This mark will create forward edges from every call site.  */
148       DECL_NONLOCAL (disp_label) = 1;
149       cfun->has_nonlocal_label = 1;
150       x = gimple_build_label (disp_label);
151       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152 
153       /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
154 	 and insert.  */
155       disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
156       arg = build_addr (disp_label, current_function_decl);
157       t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
158       x = gimple_build_call (t, 1, arg);
159       gimple_call_set_lhs (x, disp_var);
160 
161       /* Build 'goto DISP_VAR;' and insert.  */
162       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
163       x = gimple_build_goto (disp_var);
164       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
165     }
166 
167   /* Once the old body has been lowered, replace it with the new
168      lowered sequence.  */
169   gimple_set_body (current_function_decl, lowered_body);
170 
171   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
172   BLOCK_SUBBLOCKS (data.block)
173     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
174 
175   clear_block_marks (data.block);
176   data.return_statements.release ();
177   return 0;
178 }
179 
180 struct gimple_opt_pass pass_lower_cf =
181 {
182  {
183   GIMPLE_PASS,
184   "lower",				/* name */
185   OPTGROUP_NONE,                        /* optinfo_flags */
186   NULL,					/* gate */
187   lower_function_body,			/* execute */
188   NULL,					/* sub */
189   NULL,					/* next */
190   0,					/* static_pass_number */
191   TV_NONE,				/* tv_id */
192   PROP_gimple_any,			/* properties_required */
193   PROP_gimple_lcf,			/* properties_provided */
194   0,					/* properties_destroyed */
195   0,					/* todo_flags_start */
196   0             			/* todo_flags_finish */
197  }
198 };
199 
200 
201 
202 /* Verify if the type of the argument matches that of the function
203    declaration.  If we cannot verify this or there is a mismatch,
204    return false.  */
205 
206 static bool
207 gimple_check_call_args (gimple stmt, tree fndecl)
208 {
209   tree parms, p;
210   unsigned int i, nargs;
211 
212   /* Calls to internal functions always match their signature.  */
213   if (gimple_call_internal_p (stmt))
214     return true;
215 
216   nargs = gimple_call_num_args (stmt);
217 
218   /* Get argument types for verification.  */
219   if (fndecl)
220     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
221   else
222     parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
223 
224   /* Verify if the type of the argument matches that of the function
225      declaration.  If we cannot verify this or there is a mismatch,
226      return false.  */
227   if (fndecl && DECL_ARGUMENTS (fndecl))
228     {
229       for (i = 0, p = DECL_ARGUMENTS (fndecl);
230 	   i < nargs;
231 	   i++, p = DECL_CHAIN (p))
232 	{
233 	  tree arg;
234 	  /* We cannot distinguish a varargs function from the case
235 	     of excess parameters, still deferring the inlining decision
236 	     to the callee is possible.  */
237 	  if (!p)
238 	    break;
239 	  arg = gimple_call_arg (stmt, i);
240 	  if (p == error_mark_node
241 	      || DECL_ARG_TYPE (p) == error_mark_node
242 	      || arg == error_mark_node
243 	      || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
244 		  && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
245             return false;
246 	}
247     }
248   else if (parms)
249     {
250       for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
251 	{
252 	  tree arg;
253 	  /* If this is a varargs function defer inlining decision
254 	     to callee.  */
255 	  if (!p)
256 	    break;
257 	  arg = gimple_call_arg (stmt, i);
258 	  if (TREE_VALUE (p) == error_mark_node
259 	      || arg == error_mark_node
260 	      || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
261 	      || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
262 		  && !fold_convertible_p (TREE_VALUE (p), arg)))
263             return false;
264 	}
265     }
266   else
267     {
268       if (nargs != 0)
269         return false;
270     }
271   return true;
272 }
273 
274 /* Verify if the type of the argument and lhs of CALL_STMT matches
275    that of the function declaration CALLEE.
276    If we cannot verify this or there is a mismatch, return false.  */
277 
278 bool
279 gimple_check_call_matching_types (gimple call_stmt, tree callee)
280 {
281   tree lhs;
282 
283   if ((DECL_RESULT (callee)
284        && !DECL_BY_REFERENCE (DECL_RESULT (callee))
285        && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
286        && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
287                                       TREE_TYPE (lhs))
288        && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
289       || !gimple_check_call_args (call_stmt, callee))
290     return false;
291   return true;
292 }
293 
294 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
295    when they are changed -- if this has to be done, the lowering routine must
296    do it explicitly.  DATA is passed through the recursion.  */
297 
298 static void
299 lower_sequence (gimple_seq *seq, struct lower_data *data)
300 {
301   gimple_stmt_iterator gsi;
302 
303   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
304     lower_stmt (&gsi, data);
305 }
306 
307 
308 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
309    passed through the recursion.  */
310 
311 static void
312 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
313 {
314   gimple stmt;
315 
316   stmt = gsi_stmt (*gsi);
317 
318   lower_sequence (gimple_omp_body_ptr (stmt), data);
319   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
320   gimple_omp_set_body (stmt, NULL);
321   gsi_next (gsi);
322 }
323 
324 
325 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
326    track the fallthruness of statements and get rid of unreachable return
327    statements in order to prevent the EH lowering pass from adding useless
328    edges that can cause bogus warnings to be issued later; this guess need
329    not be 100% accurate, simply be conservative and reset cannot_fallthru
330    to false if we don't know.  */
331 
332 static void
333 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
334 {
335   gimple stmt = gsi_stmt (*gsi);
336 
337   gimple_set_block (stmt, data->block);
338 
339   switch (gimple_code (stmt))
340     {
341     case GIMPLE_BIND:
342       lower_gimple_bind (gsi, data);
343       /* Propagate fallthruness.  */
344       return;
345 
346     case GIMPLE_COND:
347     case GIMPLE_GOTO:
348     case GIMPLE_SWITCH:
349       data->cannot_fallthru = true;
350       gsi_next (gsi);
351       return;
352 
353     case GIMPLE_RETURN:
354       if (data->cannot_fallthru)
355 	{
356 	  gsi_remove (gsi, false);
357 	  /* Propagate fallthruness.  */
358 	}
359       else
360 	{
361 	  lower_gimple_return (gsi, data);
362 	  data->cannot_fallthru = true;
363 	}
364       return;
365 
366     case GIMPLE_TRY:
367       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
368 	lower_try_catch (gsi, data);
369       else
370 	{
371 	  /* It must be a GIMPLE_TRY_FINALLY.  */
372 	  bool cannot_fallthru;
373 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
374 	  cannot_fallthru = data->cannot_fallthru;
375 
376 	  /* The finally clause is always executed after the try clause,
377 	     so if it does not fall through, then the try-finally will not
378 	     fall through.  Otherwise, if the try clause does not fall
379 	     through, then when the finally clause falls through it will
380 	     resume execution wherever the try clause was going.  So the
381 	     whole try-finally will only fall through if both the try
382 	     clause and the finally clause fall through.  */
383 	  data->cannot_fallthru = false;
384 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
385 	  data->cannot_fallthru |= cannot_fallthru;
386 	  gsi_next (gsi);
387 	}
388       return;
389 
390     case GIMPLE_EH_ELSE:
391       lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
392       lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
393       break;
394 
395     case GIMPLE_NOP:
396     case GIMPLE_ASM:
397     case GIMPLE_ASSIGN:
398     case GIMPLE_PREDICT:
399     case GIMPLE_LABEL:
400     case GIMPLE_EH_MUST_NOT_THROW:
401     case GIMPLE_OMP_FOR:
402     case GIMPLE_OMP_SECTIONS:
403     case GIMPLE_OMP_SECTIONS_SWITCH:
404     case GIMPLE_OMP_SECTION:
405     case GIMPLE_OMP_SINGLE:
406     case GIMPLE_OMP_MASTER:
407     case GIMPLE_OMP_ORDERED:
408     case GIMPLE_OMP_CRITICAL:
409     case GIMPLE_OMP_RETURN:
410     case GIMPLE_OMP_ATOMIC_LOAD:
411     case GIMPLE_OMP_ATOMIC_STORE:
412     case GIMPLE_OMP_CONTINUE:
413       break;
414 
415     case GIMPLE_CALL:
416       {
417 	tree decl = gimple_call_fndecl (stmt);
418 	unsigned i;
419 
420 	for (i = 0; i < gimple_call_num_args (stmt); i++)
421 	  {
422 	    tree arg = gimple_call_arg (stmt, i);
423 	    if (EXPR_P (arg))
424 	      TREE_SET_BLOCK (arg, data->block);
425 	  }
426 
427 	if (decl
428 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
429 	    && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
430 	  {
431 	    lower_builtin_setjmp (gsi);
432 	    data->cannot_fallthru = false;
433 	    data->calls_builtin_setjmp = true;
434 	    return;
435 	  }
436 
437 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
438 	  {
439 	    data->cannot_fallthru = true;
440 	    gsi_next (gsi);
441 	    return;
442 	  }
443       }
444       break;
445 
446     case GIMPLE_OMP_PARALLEL:
447     case GIMPLE_OMP_TASK:
448       data->cannot_fallthru = false;
449       lower_omp_directive (gsi, data);
450       data->cannot_fallthru = false;
451       return;
452 
453     case GIMPLE_TRANSACTION:
454       lower_sequence (gimple_transaction_body_ptr (stmt), data);
455       break;
456 
457     default:
458       gcc_unreachable ();
459     }
460 
461   data->cannot_fallthru = false;
462   gsi_next (gsi);
463 }
464 
465 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
466 
467 static void
468 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
469 {
470   tree old_block = data->block;
471   gimple stmt = gsi_stmt (*gsi);
472   tree new_block = gimple_bind_block (stmt);
473 
474   if (new_block)
475     {
476       if (new_block == old_block)
477 	{
478 	  /* The outermost block of the original function may not be the
479 	     outermost statement chain of the gimplified function.  So we
480 	     may see the outermost block just inside the function.  */
481 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
482 	  new_block = NULL;
483 	}
484       else
485 	{
486 	  /* We do not expect to handle duplicate blocks.  */
487 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
488 	  TREE_ASM_WRITTEN (new_block) = 1;
489 
490 	  /* Block tree may get clobbered by inlining.  Normally this would
491 	     be fixed in rest_of_decl_compilation using block notes, but
492 	     since we are not going to emit them, it is up to us.  */
493 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
494 	  BLOCK_SUBBLOCKS (old_block) = new_block;
495 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
496 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
497 
498 	  data->block = new_block;
499 	}
500     }
501 
502   record_vars (gimple_bind_vars (stmt));
503   lower_sequence (gimple_bind_body_ptr (stmt), data);
504 
505   if (new_block)
506     {
507       gcc_assert (data->block == new_block);
508 
509       BLOCK_SUBBLOCKS (new_block)
510 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
511       data->block = old_block;
512     }
513 
514   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
515   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
516   gsi_remove (gsi, false);
517 }
518 
519 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
520 
521 static void
522 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
523 {
524   bool cannot_fallthru;
525   gimple stmt = gsi_stmt (*gsi);
526   gimple_stmt_iterator i;
527 
528   /* We don't handle GIMPLE_TRY_FINALLY.  */
529   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
530 
531   lower_sequence (gimple_try_eval_ptr (stmt), data);
532   cannot_fallthru = data->cannot_fallthru;
533 
534   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
535   switch (gimple_code (gsi_stmt (i)))
536     {
537     case GIMPLE_CATCH:
538       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
539 	 catch expression and a body.  The whole try/catch may fall
540 	 through iff any of the catch bodies falls through.  */
541       for (; !gsi_end_p (i); gsi_next (&i))
542 	{
543 	  data->cannot_fallthru = false;
544 	  lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
545 	  if (!data->cannot_fallthru)
546 	    cannot_fallthru = false;
547 	}
548       break;
549 
550     case GIMPLE_EH_FILTER:
551       /* The exception filter expression only matters if there is an
552 	 exception.  If the exception does not match EH_FILTER_TYPES,
553 	 we will execute EH_FILTER_FAILURE, and we will fall through
554 	 if that falls through.  If the exception does match
555 	 EH_FILTER_TYPES, the stack unwinder will continue up the
556 	 stack, so we will not fall through.  We don't know whether we
557 	 will throw an exception which matches EH_FILTER_TYPES or not,
558 	 so we just ignore EH_FILTER_TYPES and assume that we might
559 	 throw an exception which doesn't match.  */
560       data->cannot_fallthru = false;
561       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
562       if (!data->cannot_fallthru)
563 	cannot_fallthru = false;
564       break;
565 
566     default:
567       /* This case represents statements to be executed when an
568 	 exception occurs.  Those statements are implicitly followed
569 	 by a GIMPLE_RESX to resume execution after the exception.  So
570 	 in this case the try/catch never falls through.  */
571       data->cannot_fallthru = false;
572       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
573       break;
574     }
575 
576   data->cannot_fallthru = cannot_fallthru;
577   gsi_next (gsi);
578 }
579 
580 /* Try to determine whether a TRY_CATCH expression can fall through.
581    This is a subroutine of block_may_fallthru.  */
582 
583 static bool
584 try_catch_may_fallthru (const_tree stmt)
585 {
586   tree_stmt_iterator i;
587 
588   /* If the TRY block can fall through, the whole TRY_CATCH can
589      fall through.  */
590   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
591     return true;
592 
593   i = tsi_start (TREE_OPERAND (stmt, 1));
594   switch (TREE_CODE (tsi_stmt (i)))
595     {
596     case CATCH_EXPR:
597       /* We expect to see a sequence of CATCH_EXPR trees, each with a
598 	 catch expression and a body.  The whole TRY_CATCH may fall
599 	 through iff any of the catch bodies falls through.  */
600       for (; !tsi_end_p (i); tsi_next (&i))
601 	{
602 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
603 	    return true;
604 	}
605       return false;
606 
607     case EH_FILTER_EXPR:
608       /* The exception filter expression only matters if there is an
609 	 exception.  If the exception does not match EH_FILTER_TYPES,
610 	 we will execute EH_FILTER_FAILURE, and we will fall through
611 	 if that falls through.  If the exception does match
612 	 EH_FILTER_TYPES, the stack unwinder will continue up the
613 	 stack, so we will not fall through.  We don't know whether we
614 	 will throw an exception which matches EH_FILTER_TYPES or not,
615 	 so we just ignore EH_FILTER_TYPES and assume that we might
616 	 throw an exception which doesn't match.  */
617       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
618 
619     default:
620       /* This case represents statements to be executed when an
621 	 exception occurs.  Those statements are implicitly followed
622 	 by a RESX statement to resume execution after the exception.
623 	 So in this case the TRY_CATCH never falls through.  */
624       return false;
625     }
626 }
627 
628 
629 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
630 
631 static bool
632 gimple_try_catch_may_fallthru (gimple stmt)
633 {
634   gimple_stmt_iterator i;
635 
636   /* We don't handle GIMPLE_TRY_FINALLY.  */
637   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
638 
639   /* If the TRY block can fall through, the whole TRY_CATCH can
640      fall through.  */
641   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
642     return true;
643 
644   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
645   switch (gimple_code (gsi_stmt (i)))
646     {
647     case GIMPLE_CATCH:
648       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
649 	 catch expression and a body.  The whole try/catch may fall
650 	 through iff any of the catch bodies falls through.  */
651       for (; !gsi_end_p (i); gsi_next (&i))
652 	{
653 	  if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
654 	    return true;
655 	}
656       return false;
657 
658     case GIMPLE_EH_FILTER:
659       /* The exception filter expression only matters if there is an
660 	 exception.  If the exception does not match EH_FILTER_TYPES,
661 	 we will execute EH_FILTER_FAILURE, and we will fall through
662 	 if that falls through.  If the exception does match
663 	 EH_FILTER_TYPES, the stack unwinder will continue up the
664 	 stack, so we will not fall through.  We don't know whether we
665 	 will throw an exception which matches EH_FILTER_TYPES or not,
666 	 so we just ignore EH_FILTER_TYPES and assume that we might
667 	 throw an exception which doesn't match.  */
668       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
669 
670     default:
671       /* This case represents statements to be executed when an
672 	 exception occurs.  Those statements are implicitly followed
673 	 by a GIMPLE_RESX to resume execution after the exception.  So
674 	 in this case the try/catch never falls through.  */
675       return false;
676     }
677 }
678 
679 
680 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
681    need not be 100% accurate; simply be conservative and return true if we
682    don't know.  This is used only to avoid stupidly generating extra code.
683    If we're wrong, we'll just delete the extra code later.  */
684 
685 bool
686 block_may_fallthru (const_tree block)
687 {
688   /* This CONST_CAST is okay because expr_last returns its argument
689      unmodified and we assign it to a const_tree.  */
690   const_tree stmt = expr_last (CONST_CAST_TREE(block));
691 
692   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
693     {
694     case GOTO_EXPR:
695     case RETURN_EXPR:
696       /* Easy cases.  If the last statement of the block implies
697 	 control transfer, then we can't fall through.  */
698       return false;
699 
700     case SWITCH_EXPR:
701       /* If SWITCH_LABELS is set, this is lowered, and represents a
702 	 branch to a selected label and hence can not fall through.
703 	 Otherwise SWITCH_BODY is set, and the switch can fall
704 	 through.  */
705       return SWITCH_LABELS (stmt) == NULL_TREE;
706 
707     case COND_EXPR:
708       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
709 	return true;
710       return block_may_fallthru (COND_EXPR_ELSE (stmt));
711 
712     case BIND_EXPR:
713       return block_may_fallthru (BIND_EXPR_BODY (stmt));
714 
715     case TRY_CATCH_EXPR:
716       return try_catch_may_fallthru (stmt);
717 
718     case TRY_FINALLY_EXPR:
719       /* The finally clause is always executed after the try clause,
720 	 so if it does not fall through, then the try-finally will not
721 	 fall through.  Otherwise, if the try clause does not fall
722 	 through, then when the finally clause falls through it will
723 	 resume execution wherever the try clause was going.  So the
724 	 whole try-finally will only fall through if both the try
725 	 clause and the finally clause fall through.  */
726       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
727 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
728 
729     case MODIFY_EXPR:
730       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
731 	stmt = TREE_OPERAND (stmt, 1);
732       else
733 	return true;
734       /* FALLTHRU */
735 
736     case CALL_EXPR:
737       /* Functions that do not return do not fall through.  */
738       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
739 
740     case CLEANUP_POINT_EXPR:
741       return block_may_fallthru (TREE_OPERAND (stmt, 0));
742 
743     case TARGET_EXPR:
744       return block_may_fallthru (TREE_OPERAND (stmt, 1));
745 
746     case ERROR_MARK:
747       return true;
748 
749     default:
750       return lang_hooks.block_may_fallthru (stmt);
751     }
752 }
753 
754 
755 /* Try to determine if we can continue executing the statement
756    immediately following STMT.  This guess need not be 100% accurate;
757    simply be conservative and return true if we don't know.  This is
758    used only to avoid stupidly generating extra code. If we're wrong,
759    we'll just delete the extra code later.  */
760 
761 bool
762 gimple_stmt_may_fallthru (gimple stmt)
763 {
764   if (!stmt)
765     return true;
766 
767   switch (gimple_code (stmt))
768     {
769     case GIMPLE_GOTO:
770     case GIMPLE_RETURN:
771     case GIMPLE_RESX:
772       /* Easy cases.  If the last statement of the seq implies
773 	 control transfer, then we can't fall through.  */
774       return false;
775 
776     case GIMPLE_SWITCH:
777       /* Switch has already been lowered and represents a branch
778 	 to a selected label and hence can't fall through.  */
779       return false;
780 
781     case GIMPLE_COND:
782       /* GIMPLE_COND's are already lowered into a two-way branch.  They
783 	 can't fall through.  */
784       return false;
785 
786     case GIMPLE_BIND:
787       return gimple_seq_may_fallthru (gimple_bind_body (stmt));
788 
789     case GIMPLE_TRY:
790       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
791         return gimple_try_catch_may_fallthru (stmt);
792 
793       /* It must be a GIMPLE_TRY_FINALLY.  */
794 
795       /* The finally clause is always executed after the try clause,
796 	 so if it does not fall through, then the try-finally will not
797 	 fall through.  Otherwise, if the try clause does not fall
798 	 through, then when the finally clause falls through it will
799 	 resume execution wherever the try clause was going.  So the
800 	 whole try-finally will only fall through if both the try
801 	 clause and the finally clause fall through.  */
802       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
803 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
804 
805     case GIMPLE_EH_ELSE:
806       return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
807 	      || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
808 
809     case GIMPLE_CALL:
810       /* Functions that do not return do not fall through.  */
811       return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
812 
813     default:
814       return true;
815     }
816 }
817 
818 
819 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
820 
821 bool
822 gimple_seq_may_fallthru (gimple_seq seq)
823 {
824   return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
825 }
826 
827 
828 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
829 
830 static void
831 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
832 {
833   gimple stmt = gsi_stmt (*gsi);
834   gimple t;
835   int i;
836   return_statements_t tmp_rs;
837 
838   /* Match this up with an existing return statement that's been created.  */
839   for (i = data->return_statements.length () - 1;
840        i >= 0; i--)
841     {
842       tmp_rs = data->return_statements[i];
843 
844       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
845 	{
846 	  /* Remove the line number from the representative return statement.
847 	     It now fills in for many such returns.  Failure to remove this
848 	     will result in incorrect results for coverage analysis.  */
849 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
850 
851 	  goto found;
852 	}
853     }
854 
855   /* Not found.  Create a new label and record the return statement.  */
856   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
857   tmp_rs.stmt = stmt;
858   data->return_statements.safe_push (tmp_rs);
859 
860   /* Generate a goto statement and remove the return statement.  */
861  found:
862   /* When not optimizing, make sure user returns are preserved.  */
863   if (!optimize && gimple_has_location (stmt))
864     DECL_ARTIFICIAL (tmp_rs.label) = 0;
865   t = gimple_build_goto (tmp_rs.label);
866   gimple_set_location (t, gimple_location (stmt));
867   gimple_set_block (t, gimple_block (stmt));
868   gsi_insert_before (gsi, t, GSI_SAME_STMT);
869   gsi_remove (gsi, false);
870 }
871 
872 /* Lower a __builtin_setjmp GSI.
873 
874    __builtin_setjmp is passed a pointer to an array of five words (not
875    all will be used on all machines).  It operates similarly to the C
876    library function of the same name, but is more efficient.
877 
878    It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
879    __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
880    __builtin_setjmp_dispatcher shared among all the instances; that's
881    why it is only emitted at the end by lower_function_body.
882 
883    After full lowering, the body of the function should look like:
884 
885     {
886       void * setjmpvar.0;
887       int D.1844;
888       int D.2844;
889 
890       [...]
891 
892       __builtin_setjmp_setup (&buf, &<D1847>);
893       D.1844 = 0;
894       goto <D1846>;
895       <D1847>:;
896       __builtin_setjmp_receiver (&<D1847>);
897       D.1844 = 1;
898       <D1846>:;
899       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
900 
901       [...]
902 
903       __builtin_setjmp_setup (&buf, &<D2847>);
904       D.2844 = 0;
905       goto <D2846>;
906       <D2847>:;
907       __builtin_setjmp_receiver (&<D2847>);
908       D.2844 = 1;
909       <D2846>:;
910       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
911 
912       [...]
913 
914       <D3850>:;
915       return;
916       <D3853>: [non-local];
917       setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
918       goto setjmpvar.0;
919     }
920 
921    The dispatcher block will be both the unique destination of all the
922    abnormal call edges and the unique source of all the abnormal edges
923    to the receivers, thus keeping the complexity explosion localized.  */
924 
925 static void
926 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
927 {
928   gimple stmt = gsi_stmt (*gsi);
929   location_t loc = gimple_location (stmt);
930   tree cont_label = create_artificial_label (loc);
931   tree next_label = create_artificial_label (loc);
932   tree dest, t, arg;
933   gimple g;
934 
935   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
936      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
937   FORCED_LABEL (next_label) = 1;
938 
939   dest = gimple_call_lhs (stmt);
940 
941   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
942   arg = build_addr (next_label, current_function_decl);
943   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
944   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
945   gimple_set_location (g, loc);
946   gimple_set_block (g, gimple_block (stmt));
947   gsi_insert_before (gsi, g, GSI_SAME_STMT);
948 
949   /* Build 'DEST = 0' and insert.  */
950   if (dest)
951     {
952       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
953       gimple_set_location (g, loc);
954       gimple_set_block (g, gimple_block (stmt));
955       gsi_insert_before (gsi, g, GSI_SAME_STMT);
956     }
957 
958   /* Build 'goto CONT_LABEL' and insert.  */
959   g = gimple_build_goto (cont_label);
960   gsi_insert_before (gsi, g, GSI_SAME_STMT);
961 
962   /* Build 'NEXT_LABEL:' and insert.  */
963   g = gimple_build_label (next_label);
964   gsi_insert_before (gsi, g, GSI_SAME_STMT);
965 
966   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
967   arg = build_addr (next_label, current_function_decl);
968   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
969   g = gimple_build_call (t, 1, arg);
970   gimple_set_location (g, loc);
971   gimple_set_block (g, gimple_block (stmt));
972   gsi_insert_before (gsi, g, GSI_SAME_STMT);
973 
974   /* Build 'DEST = 1' and insert.  */
975   if (dest)
976     {
977       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
978 						       integer_one_node));
979       gimple_set_location (g, loc);
980       gimple_set_block (g, gimple_block (stmt));
981       gsi_insert_before (gsi, g, GSI_SAME_STMT);
982     }
983 
984   /* Build 'CONT_LABEL:' and insert.  */
985   g = gimple_build_label (cont_label);
986   gsi_insert_before (gsi, g, GSI_SAME_STMT);
987 
988   /* Remove the call to __builtin_setjmp.  */
989   gsi_remove (gsi, false);
990 }
991 
992 
993 /* Record the variables in VARS into function FN.  */
994 
995 void
996 record_vars_into (tree vars, tree fn)
997 {
998   bool change_cfun = fn != current_function_decl;
999 
1000   if (change_cfun)
1001     push_cfun (DECL_STRUCT_FUNCTION (fn));
1002 
1003   for (; vars; vars = DECL_CHAIN (vars))
1004     {
1005       tree var = vars;
1006 
1007       /* BIND_EXPRs contains also function/type/constant declarations
1008          we don't need to care about.  */
1009       if (TREE_CODE (var) != VAR_DECL)
1010 	continue;
1011 
1012       /* Nothing to do in this case.  */
1013       if (DECL_EXTERNAL (var))
1014 	continue;
1015 
1016       /* Record the variable.  */
1017       add_local_decl (cfun, var);
1018     }
1019 
1020   if (change_cfun)
1021     pop_cfun ();
1022 }
1023 
1024 
1025 /* Record the variables in VARS into current_function_decl.  */
1026 
1027 void
1028 record_vars (tree vars)
1029 {
1030   record_vars_into (vars, current_function_decl);
1031 }
1032