xref: /dflybsd-src/contrib/gcc-4.7/gcc/gimple-low.c (revision 2d0700913d3c55b6181d2b703dd69aae2179ce8c)
1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35 #include "langhooks.h"
36 
37 /* The differences between High GIMPLE and Low GIMPLE are the
38    following:
39 
40    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 
42    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43       flow and exception regions are built as an on-the-side region
44       hierarchy (See tree-eh.c:lower_eh_constructs).
45 
46    3- Multiple identical return statements are grouped into a single
47       return and gotos to the unique return site.  */
48 
49 /* Match a return statement with a label.  During lowering, we identify
50    identical return statements and replace duplicates with a jump to
51    the corresponding label.  */
52 struct return_statements_t
53 {
54   tree label;
55   gimple stmt;
56 };
57 typedef struct return_statements_t return_statements_t;
58 
59 DEF_VEC_O(return_statements_t);
60 DEF_VEC_ALLOC_O(return_statements_t,heap);
61 
62 struct lower_data
63 {
64   /* Block the current statement belongs to.  */
65   tree block;
66 
67   /* A vector of label and return statements to be moved to the end
68      of the function.  */
69   VEC(return_statements_t,heap) *return_statements;
70 
71   /* True if the current statement cannot fall through.  */
72   bool cannot_fallthru;
73 
74   /* True if the function calls __builtin_setjmp.  */
75   bool calls_builtin_setjmp;
76 };
77 
78 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
81 static void lower_builtin_setjmp (gimple_stmt_iterator *);
82 
83 
84 /* Lower the body of current_function_decl from High GIMPLE into Low
85    GIMPLE.  */
86 
87 static unsigned int
88 lower_function_body (void)
89 {
90   struct lower_data data;
91   gimple_seq body = gimple_body (current_function_decl);
92   gimple_seq lowered_body;
93   gimple_stmt_iterator i;
94   gimple bind;
95   tree t;
96   gimple x;
97 
98   /* The gimplifier should've left a body of exactly one statement,
99      namely a GIMPLE_BIND.  */
100   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
101 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
102 
103   memset (&data, 0, sizeof (data));
104   data.block = DECL_INITIAL (current_function_decl);
105   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
106   BLOCK_CHAIN (data.block) = NULL_TREE;
107   TREE_ASM_WRITTEN (data.block) = 1;
108   data.return_statements = VEC_alloc (return_statements_t, heap, 8);
109 
110   bind = gimple_seq_first_stmt (body);
111   lowered_body = NULL;
112   gimple_seq_add_stmt (&lowered_body, bind);
113   i = gsi_start (lowered_body);
114   lower_gimple_bind (&i, &data);
115 
116   /* Once the old body has been lowered, replace it with the new
117      lowered sequence.  */
118   gimple_set_body (current_function_decl, lowered_body);
119 
120   i = gsi_last (lowered_body);
121 
122   /* If the function falls off the end, we need a null return statement.
123      If we've already got one in the return_statements vector, we don't
124      need to do anything special.  Otherwise build one by hand.  */
125   if (gimple_seq_may_fallthru (lowered_body)
126       && (VEC_empty (return_statements_t, data.return_statements)
127 	  || gimple_return_retval (VEC_last (return_statements_t,
128 			           data.return_statements)->stmt) != NULL))
129     {
130       x = gimple_build_return (NULL);
131       gimple_set_location (x, cfun->function_end_locus);
132       gimple_set_block (x, DECL_INITIAL (current_function_decl));
133       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
134     }
135 
136   /* If we lowered any return statements, emit the representative
137      at the end of the function.  */
138   while (!VEC_empty (return_statements_t, data.return_statements))
139     {
140       return_statements_t t;
141 
142       /* Unfortunately, we can't use VEC_pop because it returns void for
143 	 objects.  */
144       t = *VEC_last (return_statements_t, data.return_statements);
145       VEC_truncate (return_statements_t,
146 	  	    data.return_statements,
147 	  	    VEC_length (return_statements_t,
148 		      		data.return_statements) - 1);
149 
150       x = gimple_build_label (t.label);
151       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
153     }
154 
155   /* If the function calls __builtin_setjmp, we need to emit the computed
156      goto that will serve as the unique dispatcher for all the receivers.  */
157   if (data.calls_builtin_setjmp)
158     {
159       tree disp_label, disp_var, arg;
160 
161       /* Build 'DISP_LABEL:' and insert.  */
162       disp_label = create_artificial_label (cfun->function_end_locus);
163       /* This mark will create forward edges from every call site.  */
164       DECL_NONLOCAL (disp_label) = 1;
165       cfun->has_nonlocal_label = 1;
166       x = gimple_build_label (disp_label);
167       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
168 
169       /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
170 	 and insert.  */
171       disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
172       arg = build_addr (disp_label, current_function_decl);
173       t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
174       x = gimple_build_call (t, 1, arg);
175       gimple_call_set_lhs (x, disp_var);
176 
177       /* Build 'goto DISP_VAR;' and insert.  */
178       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
179       x = gimple_build_goto (disp_var);
180       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
181     }
182 
183   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
184   BLOCK_SUBBLOCKS (data.block)
185     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
186 
187   clear_block_marks (data.block);
188   VEC_free(return_statements_t, heap, data.return_statements);
189   return 0;
190 }
191 
192 struct gimple_opt_pass pass_lower_cf =
193 {
194  {
195   GIMPLE_PASS,
196   "lower",				/* name */
197   NULL,					/* gate */
198   lower_function_body,			/* execute */
199   NULL,					/* sub */
200   NULL,					/* next */
201   0,					/* static_pass_number */
202   TV_NONE,				/* tv_id */
203   PROP_gimple_any,			/* properties_required */
204   PROP_gimple_lcf,			/* properties_provided */
205   0,					/* properties_destroyed */
206   0,					/* todo_flags_start */
207   0             			/* todo_flags_finish */
208  }
209 };
210 
211 
212 
213 /* Verify if the type of the argument matches that of the function
214    declaration.  If we cannot verify this or there is a mismatch,
215    return false.  */
216 
217 static bool
218 gimple_check_call_args (gimple stmt, tree fndecl)
219 {
220   tree parms, p;
221   unsigned int i, nargs;
222 
223   /* Calls to internal functions always match their signature.  */
224   if (gimple_call_internal_p (stmt))
225     return true;
226 
227   nargs = gimple_call_num_args (stmt);
228 
229   /* Get argument types for verification.  */
230   if (fndecl)
231     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
232   else
233     parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
234 
235   /* Verify if the type of the argument matches that of the function
236      declaration.  If we cannot verify this or there is a mismatch,
237      return false.  */
238   if (fndecl && DECL_ARGUMENTS (fndecl))
239     {
240       for (i = 0, p = DECL_ARGUMENTS (fndecl);
241 	   i < nargs;
242 	   i++, p = DECL_CHAIN (p))
243 	{
244 	  tree arg;
245 	  /* We cannot distinguish a varargs function from the case
246 	     of excess parameters, still deferring the inlining decision
247 	     to the callee is possible.  */
248 	  if (!p)
249 	    break;
250 	  arg = gimple_call_arg (stmt, i);
251 	  if (p == error_mark_node
252 	      || arg == error_mark_node
253 	      || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
254 		  && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
255             return false;
256 	}
257     }
258   else if (parms)
259     {
260       for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
261 	{
262 	  tree arg;
263 	  /* If this is a varargs function defer inlining decision
264 	     to callee.  */
265 	  if (!p)
266 	    break;
267 	  arg = gimple_call_arg (stmt, i);
268 	  if (TREE_VALUE (p) == error_mark_node
269 	      || arg == error_mark_node
270 	      || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
271 	      || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
272 		  && !fold_convertible_p (TREE_VALUE (p), arg)))
273             return false;
274 	}
275     }
276   else
277     {
278       if (nargs != 0)
279         return false;
280     }
281   return true;
282 }
283 
284 /* Verify if the type of the argument and lhs of CALL_STMT matches
285    that of the function declaration CALLEE.
286    If we cannot verify this or there is a mismatch, return false.  */
287 
288 bool
289 gimple_check_call_matching_types (gimple call_stmt, tree callee)
290 {
291   tree lhs;
292 
293   if ((DECL_RESULT (callee)
294        && !DECL_BY_REFERENCE (DECL_RESULT (callee))
295        && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
296        && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
297                                       TREE_TYPE (lhs))
298        && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
299       || !gimple_check_call_args (call_stmt, callee))
300     return false;
301   return true;
302 }
303 
304 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
305    when they are changed -- if this has to be done, the lowering routine must
306    do it explicitly.  DATA is passed through the recursion.  */
307 
308 static void
309 lower_sequence (gimple_seq seq, struct lower_data *data)
310 {
311   gimple_stmt_iterator gsi;
312 
313   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
314     lower_stmt (&gsi, data);
315 }
316 
317 
318 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
319    passed through the recursion.  */
320 
321 static void
322 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
323 {
324   gimple stmt;
325 
326   stmt = gsi_stmt (*gsi);
327 
328   lower_sequence (gimple_omp_body (stmt), data);
329   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
330   gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
331   gimple_omp_set_body (stmt, NULL);
332   gsi_remove (gsi, false);
333 }
334 
335 
336 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
337    track the fallthruness of statements and get rid of unreachable return
338    statements in order to prevent the EH lowering pass from adding useless
339    edges that can cause bogus warnings to be issued later; this guess need
340    not be 100% accurate, simply be conservative and reset cannot_fallthru
341    to false if we don't know.  */
342 
343 static void
344 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
345 {
346   gimple stmt = gsi_stmt (*gsi);
347 
348   gimple_set_block (stmt, data->block);
349 
350   switch (gimple_code (stmt))
351     {
352     case GIMPLE_BIND:
353       lower_gimple_bind (gsi, data);
354       /* Propagate fallthruness.  */
355       return;
356 
357     case GIMPLE_COND:
358     case GIMPLE_GOTO:
359     case GIMPLE_SWITCH:
360       data->cannot_fallthru = true;
361       gsi_next (gsi);
362       return;
363 
364     case GIMPLE_RETURN:
365       if (data->cannot_fallthru)
366 	{
367 	  gsi_remove (gsi, false);
368 	  /* Propagate fallthruness.  */
369 	}
370       else
371 	{
372 	  lower_gimple_return (gsi, data);
373 	  data->cannot_fallthru = true;
374 	}
375       return;
376 
377     case GIMPLE_TRY:
378       {
379 	bool try_cannot_fallthru;
380 	lower_sequence (gimple_try_eval (stmt), data);
381 	try_cannot_fallthru = data->cannot_fallthru;
382 	data->cannot_fallthru = false;
383 	lower_sequence (gimple_try_cleanup (stmt), data);
384 	/* See gimple_stmt_may_fallthru for the rationale.  */
385 	if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
386 	  {
387 	    data->cannot_fallthru |= try_cannot_fallthru;
388 	    gsi_next (gsi);
389 	    return;
390 	  }
391       }
392       break;
393 
394     case GIMPLE_CATCH:
395       data->cannot_fallthru = false;
396       lower_sequence (gimple_catch_handler (stmt), data);
397       break;
398 
399     case GIMPLE_EH_FILTER:
400       data->cannot_fallthru = false;
401       lower_sequence (gimple_eh_filter_failure (stmt), data);
402       break;
403 
404     case GIMPLE_EH_ELSE:
405       lower_sequence (gimple_eh_else_n_body (stmt), data);
406       lower_sequence (gimple_eh_else_e_body (stmt), data);
407       break;
408 
409     case GIMPLE_NOP:
410     case GIMPLE_ASM:
411     case GIMPLE_ASSIGN:
412     case GIMPLE_PREDICT:
413     case GIMPLE_LABEL:
414     case GIMPLE_EH_MUST_NOT_THROW:
415     case GIMPLE_OMP_FOR:
416     case GIMPLE_OMP_SECTIONS:
417     case GIMPLE_OMP_SECTIONS_SWITCH:
418     case GIMPLE_OMP_SECTION:
419     case GIMPLE_OMP_SINGLE:
420     case GIMPLE_OMP_MASTER:
421     case GIMPLE_OMP_ORDERED:
422     case GIMPLE_OMP_CRITICAL:
423     case GIMPLE_OMP_RETURN:
424     case GIMPLE_OMP_ATOMIC_LOAD:
425     case GIMPLE_OMP_ATOMIC_STORE:
426     case GIMPLE_OMP_CONTINUE:
427       break;
428 
429     case GIMPLE_CALL:
430       {
431 	tree decl = gimple_call_fndecl (stmt);
432 
433 	if (decl
434 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
435 	    && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
436 	  {
437 	    lower_builtin_setjmp (gsi);
438 	    data->cannot_fallthru = false;
439 	    data->calls_builtin_setjmp = true;
440 	    return;
441 	  }
442 
443 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
444 	  {
445 	    data->cannot_fallthru = true;
446 	    gsi_next (gsi);
447 	    return;
448 	  }
449       }
450       break;
451 
452     case GIMPLE_OMP_PARALLEL:
453     case GIMPLE_OMP_TASK:
454       data->cannot_fallthru = false;
455       lower_omp_directive (gsi, data);
456       data->cannot_fallthru = false;
457       return;
458 
459     case GIMPLE_TRANSACTION:
460       lower_sequence (gimple_transaction_body (stmt), data);
461       break;
462 
463     default:
464       gcc_unreachable ();
465     }
466 
467   data->cannot_fallthru = false;
468   gsi_next (gsi);
469 }
470 
471 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
472 
473 static void
474 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
475 {
476   tree old_block = data->block;
477   gimple stmt = gsi_stmt (*gsi);
478   tree new_block = gimple_bind_block (stmt);
479 
480   if (new_block)
481     {
482       if (new_block == old_block)
483 	{
484 	  /* The outermost block of the original function may not be the
485 	     outermost statement chain of the gimplified function.  So we
486 	     may see the outermost block just inside the function.  */
487 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
488 	  new_block = NULL;
489 	}
490       else
491 	{
492 	  /* We do not expect to handle duplicate blocks.  */
493 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
494 	  TREE_ASM_WRITTEN (new_block) = 1;
495 
496 	  /* Block tree may get clobbered by inlining.  Normally this would
497 	     be fixed in rest_of_decl_compilation using block notes, but
498 	     since we are not going to emit them, it is up to us.  */
499 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
500 	  BLOCK_SUBBLOCKS (old_block) = new_block;
501 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
502 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
503 
504 	  data->block = new_block;
505 	}
506     }
507 
508   record_vars (gimple_bind_vars (stmt));
509   lower_sequence (gimple_bind_body (stmt), data);
510 
511   if (new_block)
512     {
513       gcc_assert (data->block == new_block);
514 
515       BLOCK_SUBBLOCKS (new_block)
516 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
517       data->block = old_block;
518     }
519 
520   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
521   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
522   gsi_remove (gsi, false);
523 }
524 
525 /* Try to determine whether a TRY_CATCH expression can fall through.
526    This is a subroutine of block_may_fallthru.  */
527 
528 static bool
529 try_catch_may_fallthru (const_tree stmt)
530 {
531   tree_stmt_iterator i;
532 
533   /* If the TRY block can fall through, the whole TRY_CATCH can
534      fall through.  */
535   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
536     return true;
537 
538   i = tsi_start (TREE_OPERAND (stmt, 1));
539   switch (TREE_CODE (tsi_stmt (i)))
540     {
541     case CATCH_EXPR:
542       /* We expect to see a sequence of CATCH_EXPR trees, each with a
543 	 catch expression and a body.  The whole TRY_CATCH may fall
544 	 through iff any of the catch bodies falls through.  */
545       for (; !tsi_end_p (i); tsi_next (&i))
546 	{
547 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
548 	    return true;
549 	}
550       return false;
551 
552     case EH_FILTER_EXPR:
553       /* The exception filter expression only matters if there is an
554 	 exception.  If the exception does not match EH_FILTER_TYPES,
555 	 we will execute EH_FILTER_FAILURE, and we will fall through
556 	 if that falls through.  If the exception does match
557 	 EH_FILTER_TYPES, the stack unwinder will continue up the
558 	 stack, so we will not fall through.  We don't know whether we
559 	 will throw an exception which matches EH_FILTER_TYPES or not,
560 	 so we just ignore EH_FILTER_TYPES and assume that we might
561 	 throw an exception which doesn't match.  */
562       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
563 
564     default:
565       /* This case represents statements to be executed when an
566 	 exception occurs.  Those statements are implicitly followed
567 	 by a RESX statement to resume execution after the exception.
568 	 So in this case the TRY_CATCH never falls through.  */
569       return false;
570     }
571 }
572 
573 
574 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
575 
576 static bool
577 gimple_try_catch_may_fallthru (gimple stmt)
578 {
579   gimple_stmt_iterator i;
580 
581   /* We don't handle GIMPLE_TRY_FINALLY.  */
582   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
583 
584   /* If the TRY block can fall through, the whole TRY_CATCH can
585      fall through.  */
586   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
587     return true;
588 
589   i = gsi_start (gimple_try_cleanup (stmt));
590   switch (gimple_code (gsi_stmt (i)))
591     {
592     case GIMPLE_CATCH:
593       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
594 	 catch expression and a body.  The whole try/catch may fall
595 	 through iff any of the catch bodies falls through.  */
596       for (; !gsi_end_p (i); gsi_next (&i))
597 	{
598 	  if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
599 	    return true;
600 	}
601       return false;
602 
603     case GIMPLE_EH_FILTER:
604       /* The exception filter expression only matters if there is an
605 	 exception.  If the exception does not match EH_FILTER_TYPES,
606 	 we will execute EH_FILTER_FAILURE, and we will fall through
607 	 if that falls through.  If the exception does match
608 	 EH_FILTER_TYPES, the stack unwinder will continue up the
609 	 stack, so we will not fall through.  We don't know whether we
610 	 will throw an exception which matches EH_FILTER_TYPES or not,
611 	 so we just ignore EH_FILTER_TYPES and assume that we might
612 	 throw an exception which doesn't match.  */
613       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
614 
615     default:
616       /* This case represents statements to be executed when an
617 	 exception occurs.  Those statements are implicitly followed
618 	 by a GIMPLE_RESX to resume execution after the exception.  So
619 	 in this case the try/catch never falls through.  */
620       return false;
621     }
622 }
623 
624 
625 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
626    need not be 100% accurate; simply be conservative and return true if we
627    don't know.  This is used only to avoid stupidly generating extra code.
628    If we're wrong, we'll just delete the extra code later.  */
629 
630 bool
631 block_may_fallthru (const_tree block)
632 {
633   /* This CONST_CAST is okay because expr_last returns its argument
634      unmodified and we assign it to a const_tree.  */
635   const_tree stmt = expr_last (CONST_CAST_TREE(block));
636 
637   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
638     {
639     case GOTO_EXPR:
640     case RETURN_EXPR:
641       /* Easy cases.  If the last statement of the block implies
642 	 control transfer, then we can't fall through.  */
643       return false;
644 
645     case SWITCH_EXPR:
646       /* If SWITCH_LABELS is set, this is lowered, and represents a
647 	 branch to a selected label and hence can not fall through.
648 	 Otherwise SWITCH_BODY is set, and the switch can fall
649 	 through.  */
650       return SWITCH_LABELS (stmt) == NULL_TREE;
651 
652     case COND_EXPR:
653       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
654 	return true;
655       return block_may_fallthru (COND_EXPR_ELSE (stmt));
656 
657     case BIND_EXPR:
658       return block_may_fallthru (BIND_EXPR_BODY (stmt));
659 
660     case TRY_CATCH_EXPR:
661       return try_catch_may_fallthru (stmt);
662 
663     case TRY_FINALLY_EXPR:
664       /* The finally clause is always executed after the try clause,
665 	 so if it does not fall through, then the try-finally will not
666 	 fall through.  Otherwise, if the try clause does not fall
667 	 through, then when the finally clause falls through it will
668 	 resume execution wherever the try clause was going.  So the
669 	 whole try-finally will only fall through if both the try
670 	 clause and the finally clause fall through.  */
671       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
672 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
673 
674     case MODIFY_EXPR:
675       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
676 	stmt = TREE_OPERAND (stmt, 1);
677       else
678 	return true;
679       /* FALLTHRU */
680 
681     case CALL_EXPR:
682       /* Functions that do not return do not fall through.  */
683       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
684 
685     case CLEANUP_POINT_EXPR:
686       return block_may_fallthru (TREE_OPERAND (stmt, 0));
687 
688     case TARGET_EXPR:
689       return block_may_fallthru (TREE_OPERAND (stmt, 1));
690 
691     case ERROR_MARK:
692       return true;
693 
694     default:
695       return lang_hooks.block_may_fallthru (stmt);
696     }
697 }
698 
699 
700 /* Try to determine if we can continue executing the statement
701    immediately following STMT.  This guess need not be 100% accurate;
702    simply be conservative and return true if we don't know.  This is
703    used only to avoid stupidly generating extra code. If we're wrong,
704    we'll just delete the extra code later.  */
705 
706 bool
707 gimple_stmt_may_fallthru (gimple stmt)
708 {
709   if (!stmt)
710     return true;
711 
712   switch (gimple_code (stmt))
713     {
714     case GIMPLE_GOTO:
715     case GIMPLE_RETURN:
716     case GIMPLE_RESX:
717       /* Easy cases.  If the last statement of the seq implies
718 	 control transfer, then we can't fall through.  */
719       return false;
720 
721     case GIMPLE_SWITCH:
722       /* Switch has already been lowered and represents a branch
723 	 to a selected label and hence can't fall through.  */
724       return false;
725 
726     case GIMPLE_COND:
727       /* GIMPLE_COND's are already lowered into a two-way branch.  They
728 	 can't fall through.  */
729       return false;
730 
731     case GIMPLE_BIND:
732       return gimple_seq_may_fallthru (gimple_bind_body (stmt));
733 
734     case GIMPLE_TRY:
735       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
736         return gimple_try_catch_may_fallthru (stmt);
737 
738       /* It must be a GIMPLE_TRY_FINALLY.  */
739 
740       /* The finally clause is always executed after the try clause,
741 	 so if it does not fall through, then the try-finally will not
742 	 fall through.  Otherwise, if the try clause does not fall
743 	 through, then when the finally clause falls through it will
744 	 resume execution wherever the try clause was going.  So the
745 	 whole try-finally will only fall through if both the try
746 	 clause and the finally clause fall through.  */
747       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
748 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
749 
750     case GIMPLE_EH_ELSE:
751       return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
752 	      || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
753 
754     case GIMPLE_CALL:
755       /* Functions that do not return do not fall through.  */
756       return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
757 
758     default:
759       return true;
760     }
761 }
762 
763 
764 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
765 
766 bool
767 gimple_seq_may_fallthru (gimple_seq seq)
768 {
769   return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
770 }
771 
772 
773 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
774 
775 static void
776 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
777 {
778   gimple stmt = gsi_stmt (*gsi);
779   gimple t;
780   int i;
781   return_statements_t tmp_rs;
782 
783   /* Match this up with an existing return statement that's been created.  */
784   for (i = VEC_length (return_statements_t, data->return_statements) - 1;
785        i >= 0; i--)
786     {
787       tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
788 
789       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
790 	{
791 	  /* Remove the line number from the representative return statement.
792 	     It now fills in for many such returns.  Failure to remove this
793 	     will result in incorrect results for coverage analysis.  */
794 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
795 
796 	  goto found;
797 	}
798     }
799 
800   /* Not found.  Create a new label and record the return statement.  */
801   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
802   tmp_rs.stmt = stmt;
803   VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
804 
805   /* Generate a goto statement and remove the return statement.  */
806  found:
807   /* When not optimizing, make sure user returns are preserved.  */
808   if (!optimize && gimple_has_location (stmt))
809     DECL_ARTIFICIAL (tmp_rs.label) = 0;
810   t = gimple_build_goto (tmp_rs.label);
811   gimple_set_location (t, gimple_location (stmt));
812   gimple_set_block (t, gimple_block (stmt));
813   gsi_insert_before (gsi, t, GSI_SAME_STMT);
814   gsi_remove (gsi, false);
815 }
816 
817 /* Lower a __builtin_setjmp GSI.
818 
819    __builtin_setjmp is passed a pointer to an array of five words (not
820    all will be used on all machines).  It operates similarly to the C
821    library function of the same name, but is more efficient.
822 
823    It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
824    __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
825    __builtin_setjmp_dispatcher shared among all the instances; that's
826    why it is only emitted at the end by lower_function_body.
827 
828    After full lowering, the body of the function should look like:
829 
830     {
831       void * setjmpvar.0;
832       int D.1844;
833       int D.2844;
834 
835       [...]
836 
837       __builtin_setjmp_setup (&buf, &<D1847>);
838       D.1844 = 0;
839       goto <D1846>;
840       <D1847>:;
841       __builtin_setjmp_receiver (&<D1847>);
842       D.1844 = 1;
843       <D1846>:;
844       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
845 
846       [...]
847 
848       __builtin_setjmp_setup (&buf, &<D2847>);
849       D.2844 = 0;
850       goto <D2846>;
851       <D2847>:;
852       __builtin_setjmp_receiver (&<D2847>);
853       D.2844 = 1;
854       <D2846>:;
855       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
856 
857       [...]
858 
859       <D3850>:;
860       return;
861       <D3853>: [non-local];
862       setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
863       goto setjmpvar.0;
864     }
865 
866    The dispatcher block will be both the unique destination of all the
867    abnormal call edges and the unique source of all the abnormal edges
868    to the receivers, thus keeping the complexity explosion localized.  */
869 
870 static void
871 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
872 {
873   gimple stmt = gsi_stmt (*gsi);
874   location_t loc = gimple_location (stmt);
875   tree cont_label = create_artificial_label (loc);
876   tree next_label = create_artificial_label (loc);
877   tree dest, t, arg;
878   gimple g;
879 
880   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
881      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
882   FORCED_LABEL (next_label) = 1;
883 
884   dest = gimple_call_lhs (stmt);
885 
886   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
887   arg = build_addr (next_label, current_function_decl);
888   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
889   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
890   gimple_set_location (g, loc);
891   gimple_set_block (g, gimple_block (stmt));
892   gsi_insert_before (gsi, g, GSI_SAME_STMT);
893 
894   /* Build 'DEST = 0' and insert.  */
895   if (dest)
896     {
897       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
898       gimple_set_location (g, loc);
899       gimple_set_block (g, gimple_block (stmt));
900       gsi_insert_before (gsi, g, GSI_SAME_STMT);
901     }
902 
903   /* Build 'goto CONT_LABEL' and insert.  */
904   g = gimple_build_goto (cont_label);
905   gsi_insert_before (gsi, g, GSI_SAME_STMT);
906 
907   /* Build 'NEXT_LABEL:' and insert.  */
908   g = gimple_build_label (next_label);
909   gsi_insert_before (gsi, g, GSI_SAME_STMT);
910 
911   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
912   arg = build_addr (next_label, current_function_decl);
913   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
914   g = gimple_build_call (t, 1, arg);
915   gimple_set_location (g, loc);
916   gimple_set_block (g, gimple_block (stmt));
917   gsi_insert_before (gsi, g, GSI_SAME_STMT);
918 
919   /* Build 'DEST = 1' and insert.  */
920   if (dest)
921     {
922       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
923 						       integer_one_node));
924       gimple_set_location (g, loc);
925       gimple_set_block (g, gimple_block (stmt));
926       gsi_insert_before (gsi, g, GSI_SAME_STMT);
927     }
928 
929   /* Build 'CONT_LABEL:' and insert.  */
930   g = gimple_build_label (cont_label);
931   gsi_insert_before (gsi, g, GSI_SAME_STMT);
932 
933   /* Remove the call to __builtin_setjmp.  */
934   gsi_remove (gsi, false);
935 }
936 
937 
938 /* Record the variables in VARS into function FN.  */
939 
940 void
941 record_vars_into (tree vars, tree fn)
942 {
943   if (fn != current_function_decl)
944     push_cfun (DECL_STRUCT_FUNCTION (fn));
945 
946   for (; vars; vars = DECL_CHAIN (vars))
947     {
948       tree var = vars;
949 
950       /* BIND_EXPRs contains also function/type/constant declarations
951          we don't need to care about.  */
952       if (TREE_CODE (var) != VAR_DECL)
953 	continue;
954 
955       /* Nothing to do in this case.  */
956       if (DECL_EXTERNAL (var))
957 	continue;
958 
959       /* Record the variable.  */
960       add_local_decl (cfun, var);
961       if (gimple_referenced_vars (cfun))
962 	add_referenced_var (var);
963     }
964 
965   if (fn != current_function_decl)
966     pop_cfun ();
967 }
968 
969 
970 /* Record the variables in VARS into current_function_decl.  */
971 
972 void
973 record_vars (tree vars)
974 {
975   record_vars_into (vars, current_function_decl);
976 }
977