xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimple-low.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003-2017 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
30 #include "calls.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
33 
34 /* The differences between High GIMPLE and Low GIMPLE are the
35    following:
36 
37    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
38 
39    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
40       flow and exception regions are built as an on-the-side region
41       hierarchy (See tree-eh.c:lower_eh_constructs).
42 
43    3- Multiple identical return statements are grouped into a single
44       return and gotos to the unique return site.  */
45 
46 /* Match a return statement with a label.  During lowering, we identify
47    identical return statements and replace duplicates with a jump to
48    the corresponding label.  */
49 struct return_statements_t
50 {
51   tree label;
52   greturn *stmt;
53 };
54 typedef struct return_statements_t return_statements_t;
55 
56 
57 struct lower_data
58 {
59   /* Block the current statement belongs to.  */
60   tree block;
61 
62   /* A vector of label and return statements to be moved to the end
63      of the function.  */
64   vec<return_statements_t> return_statements;
65 
66   /* True if the current statement cannot fall through.  */
67   bool cannot_fallthru;
68 };
69 
70 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
71 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
72 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
73 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
74 static void lower_builtin_setjmp (gimple_stmt_iterator *);
75 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
76 
77 
78 /* Lower the body of current_function_decl from High GIMPLE into Low
79    GIMPLE.  */
80 
81 static unsigned int
82 lower_function_body (void)
83 {
84   struct lower_data data;
85   gimple_seq body = gimple_body (current_function_decl);
86   gimple_seq lowered_body;
87   gimple_stmt_iterator i;
88   gimple *bind;
89   gimple *x;
90 
91   /* The gimplifier should've left a body of exactly one statement,
92      namely a GIMPLE_BIND.  */
93   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
94 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
95 
96   memset (&data, 0, sizeof (data));
97   data.block = DECL_INITIAL (current_function_decl);
98   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
99   BLOCK_CHAIN (data.block) = NULL_TREE;
100   TREE_ASM_WRITTEN (data.block) = 1;
101   data.return_statements.create (8);
102 
103   bind = gimple_seq_first_stmt (body);
104   lowered_body = NULL;
105   gimple_seq_add_stmt (&lowered_body, bind);
106   i = gsi_start (lowered_body);
107   lower_gimple_bind (&i, &data);
108 
109   i = gsi_last (lowered_body);
110 
111   /* If the function falls off the end, we need a null return statement.
112      If we've already got one in the return_statements vector, we don't
113      need to do anything special.  Otherwise build one by hand.  */
114   bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
115   if (may_fallthru
116       && (data.return_statements.is_empty ()
117 	  || (gimple_return_retval (data.return_statements.last().stmt)
118 	      != NULL)))
119     {
120       x = gimple_build_return (NULL);
121       gimple_set_location (x, cfun->function_end_locus);
122       gimple_set_block (x, DECL_INITIAL (current_function_decl));
123       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
124       may_fallthru = false;
125     }
126 
127   /* If we lowered any return statements, emit the representative
128      at the end of the function.  */
129   while (!data.return_statements.is_empty ())
130     {
131       return_statements_t t = data.return_statements.pop ();
132       x = gimple_build_label (t.label);
133       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
134       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
135       if (may_fallthru)
136 	{
137 	  /* Remove the line number from the representative return statement.
138 	     It now fills in for the fallthru too.  Failure to remove this
139 	     will result in incorrect results for coverage analysis.  */
140 	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
141 	  may_fallthru = false;
142 	}
143     }
144 
145   /* Once the old body has been lowered, replace it with the new
146      lowered sequence.  */
147   gimple_set_body (current_function_decl, lowered_body);
148 
149   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
150   BLOCK_SUBBLOCKS (data.block)
151     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
152 
153   clear_block_marks (data.block);
154   data.return_statements.release ();
155   return 0;
156 }
157 
158 namespace {
159 
160 const pass_data pass_data_lower_cf =
161 {
162   GIMPLE_PASS, /* type */
163   "lower", /* name */
164   OPTGROUP_NONE, /* optinfo_flags */
165   TV_NONE, /* tv_id */
166   PROP_gimple_any, /* properties_required */
167   PROP_gimple_lcf, /* properties_provided */
168   0, /* properties_destroyed */
169   0, /* todo_flags_start */
170   0, /* todo_flags_finish */
171 };
172 
173 class pass_lower_cf : public gimple_opt_pass
174 {
175 public:
176   pass_lower_cf (gcc::context *ctxt)
177     : gimple_opt_pass (pass_data_lower_cf, ctxt)
178   {}
179 
180   /* opt_pass methods: */
181   virtual unsigned int execute (function *) { return lower_function_body (); }
182 
183 }; // class pass_lower_cf
184 
185 } // anon namespace
186 
187 gimple_opt_pass *
188 make_pass_lower_cf (gcc::context *ctxt)
189 {
190   return new pass_lower_cf (ctxt);
191 }
192 
193 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
194    when they are changed -- if this has to be done, the lowering routine must
195    do it explicitly.  DATA is passed through the recursion.  */
196 
197 static void
198 lower_sequence (gimple_seq *seq, struct lower_data *data)
199 {
200   gimple_stmt_iterator gsi;
201 
202   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
203     lower_stmt (&gsi, data);
204 }
205 
206 
207 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
208    passed through the recursion.  */
209 
210 static void
211 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
212 {
213   gimple *stmt;
214 
215   stmt = gsi_stmt (*gsi);
216 
217   lower_sequence (gimple_omp_body_ptr (stmt), data);
218   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
219   gimple_omp_set_body (stmt, NULL);
220   gsi_next (gsi);
221 }
222 
223 
224 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
225    track the fallthruness of statements and get rid of unreachable return
226    statements in order to prevent the EH lowering pass from adding useless
227    edges that can cause bogus warnings to be issued later; this guess need
228    not be 100% accurate, simply be conservative and reset cannot_fallthru
229    to false if we don't know.  */
230 
231 static void
232 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
233 {
234   gimple *stmt = gsi_stmt (*gsi);
235 
236   gimple_set_block (stmt, data->block);
237 
238   switch (gimple_code (stmt))
239     {
240     case GIMPLE_BIND:
241       lower_gimple_bind (gsi, data);
242       /* Propagate fallthruness.  */
243       return;
244 
245     case GIMPLE_COND:
246     case GIMPLE_GOTO:
247     case GIMPLE_SWITCH:
248       data->cannot_fallthru = true;
249       gsi_next (gsi);
250       return;
251 
252     case GIMPLE_RETURN:
253       if (data->cannot_fallthru)
254 	{
255 	  gsi_remove (gsi, false);
256 	  /* Propagate fallthruness.  */
257 	}
258       else
259 	{
260 	  lower_gimple_return (gsi, data);
261 	  data->cannot_fallthru = true;
262 	}
263       return;
264 
265     case GIMPLE_TRY:
266       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
267 	lower_try_catch (gsi, data);
268       else
269 	{
270 	  /* It must be a GIMPLE_TRY_FINALLY.  */
271 	  bool cannot_fallthru;
272 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
273 	  cannot_fallthru = data->cannot_fallthru;
274 
275 	  /* The finally clause is always executed after the try clause,
276 	     so if it does not fall through, then the try-finally will not
277 	     fall through.  Otherwise, if the try clause does not fall
278 	     through, then when the finally clause falls through it will
279 	     resume execution wherever the try clause was going.  So the
280 	     whole try-finally will only fall through if both the try
281 	     clause and the finally clause fall through.  */
282 	  data->cannot_fallthru = false;
283 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
284 	  data->cannot_fallthru |= cannot_fallthru;
285 	  gsi_next (gsi);
286 	}
287       return;
288 
289     case GIMPLE_EH_ELSE:
290       {
291 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
292 	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
293 	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
294       }
295       break;
296 
297     case GIMPLE_NOP:
298     case GIMPLE_ASM:
299     case GIMPLE_ASSIGN:
300     case GIMPLE_PREDICT:
301     case GIMPLE_LABEL:
302     case GIMPLE_EH_MUST_NOT_THROW:
303     case GIMPLE_OMP_FOR:
304     case GIMPLE_OMP_SECTIONS:
305     case GIMPLE_OMP_SECTIONS_SWITCH:
306     case GIMPLE_OMP_SECTION:
307     case GIMPLE_OMP_SINGLE:
308     case GIMPLE_OMP_MASTER:
309     case GIMPLE_OMP_TASKGROUP:
310     case GIMPLE_OMP_ORDERED:
311     case GIMPLE_OMP_CRITICAL:
312     case GIMPLE_OMP_RETURN:
313     case GIMPLE_OMP_ATOMIC_LOAD:
314     case GIMPLE_OMP_ATOMIC_STORE:
315     case GIMPLE_OMP_CONTINUE:
316       break;
317 
318     case GIMPLE_CALL:
319       {
320 	tree decl = gimple_call_fndecl (stmt);
321 	unsigned i;
322 
323 	for (i = 0; i < gimple_call_num_args (stmt); i++)
324 	  {
325 	    tree arg = gimple_call_arg (stmt, i);
326 	    if (EXPR_P (arg))
327 	      TREE_SET_BLOCK (arg, data->block);
328 	  }
329 
330 	if (decl
331 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
332 	  {
333 	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
334 	      {
335 		lower_builtin_setjmp (gsi);
336 		data->cannot_fallthru = false;
337 		return;
338 	      }
339 	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
340 		     && flag_tree_bit_ccp
341 		     && gimple_builtin_call_types_compatible_p (stmt, decl))
342 	      {
343 		lower_builtin_posix_memalign (gsi);
344 		return;
345 	      }
346 	  }
347 
348 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
349 	  {
350 	    data->cannot_fallthru = true;
351 	    gsi_next (gsi);
352 	    return;
353 	  }
354       }
355       break;
356 
357     case GIMPLE_OMP_PARALLEL:
358     case GIMPLE_OMP_TASK:
359     case GIMPLE_OMP_TARGET:
360     case GIMPLE_OMP_TEAMS:
361     case GIMPLE_OMP_GRID_BODY:
362       data->cannot_fallthru = false;
363       lower_omp_directive (gsi, data);
364       data->cannot_fallthru = false;
365       return;
366 
367     case GIMPLE_TRANSACTION:
368       lower_sequence (gimple_transaction_body_ptr (
369 			as_a <gtransaction *> (stmt)),
370 		      data);
371       break;
372 
373     default:
374       gcc_unreachable ();
375     }
376 
377   data->cannot_fallthru = false;
378   gsi_next (gsi);
379 }
380 
381 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
382 
383 static void
384 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
385 {
386   tree old_block = data->block;
387   gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
388   tree new_block = gimple_bind_block (stmt);
389 
390   if (new_block)
391     {
392       if (new_block == old_block)
393 	{
394 	  /* The outermost block of the original function may not be the
395 	     outermost statement chain of the gimplified function.  So we
396 	     may see the outermost block just inside the function.  */
397 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
398 	  new_block = NULL;
399 	}
400       else
401 	{
402 	  /* We do not expect to handle duplicate blocks.  */
403 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
404 	  TREE_ASM_WRITTEN (new_block) = 1;
405 
406 	  /* Block tree may get clobbered by inlining.  Normally this would
407 	     be fixed in rest_of_decl_compilation using block notes, but
408 	     since we are not going to emit them, it is up to us.  */
409 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
410 	  BLOCK_SUBBLOCKS (old_block) = new_block;
411 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
412 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
413 
414 	  data->block = new_block;
415 	}
416     }
417 
418   record_vars (gimple_bind_vars (stmt));
419 
420   /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
421      need gimple_bind_vars.  */
422   tree next;
423   /* BLOCK_VARS and gimple_bind_vars share a common sub-chain.  Find
424      it by marking all BLOCK_VARS.  */
425   if (gimple_bind_block (stmt))
426     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
427       TREE_VISITED (t) = 1;
428   for (tree var = gimple_bind_vars (stmt);
429        var && ! TREE_VISITED (var); var = next)
430     {
431       next = DECL_CHAIN (var);
432       DECL_CHAIN (var) = NULL_TREE;
433     }
434   /* Unmark BLOCK_VARS.  */
435   if (gimple_bind_block (stmt))
436     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
437       TREE_VISITED (t) = 0;
438 
439   lower_sequence (gimple_bind_body_ptr (stmt), data);
440 
441   if (new_block)
442     {
443       gcc_assert (data->block == new_block);
444 
445       BLOCK_SUBBLOCKS (new_block)
446 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
447       data->block = old_block;
448     }
449 
450   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
451   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
452   gsi_remove (gsi, false);
453 }
454 
455 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
456 
457 static void
458 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
459 {
460   bool cannot_fallthru;
461   gimple *stmt = gsi_stmt (*gsi);
462   gimple_stmt_iterator i;
463 
464   /* We don't handle GIMPLE_TRY_FINALLY.  */
465   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
466 
467   lower_sequence (gimple_try_eval_ptr (stmt), data);
468   cannot_fallthru = data->cannot_fallthru;
469 
470   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
471   switch (gimple_code (gsi_stmt (i)))
472     {
473     case GIMPLE_CATCH:
474       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
475 	 catch expression and a body.  The whole try/catch may fall
476 	 through iff any of the catch bodies falls through.  */
477       for (; !gsi_end_p (i); gsi_next (&i))
478 	{
479 	  data->cannot_fallthru = false;
480 	  lower_sequence (gimple_catch_handler_ptr (
481                             as_a <gcatch *> (gsi_stmt (i))),
482 			  data);
483 	  if (!data->cannot_fallthru)
484 	    cannot_fallthru = false;
485 	}
486       break;
487 
488     case GIMPLE_EH_FILTER:
489       /* The exception filter expression only matters if there is an
490 	 exception.  If the exception does not match EH_FILTER_TYPES,
491 	 we will execute EH_FILTER_FAILURE, and we will fall through
492 	 if that falls through.  If the exception does match
493 	 EH_FILTER_TYPES, the stack unwinder will continue up the
494 	 stack, so we will not fall through.  We don't know whether we
495 	 will throw an exception which matches EH_FILTER_TYPES or not,
496 	 so we just ignore EH_FILTER_TYPES and assume that we might
497 	 throw an exception which doesn't match.  */
498       data->cannot_fallthru = false;
499       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
500       if (!data->cannot_fallthru)
501 	cannot_fallthru = false;
502       break;
503 
504     default:
505       /* This case represents statements to be executed when an
506 	 exception occurs.  Those statements are implicitly followed
507 	 by a GIMPLE_RESX to resume execution after the exception.  So
508 	 in this case the try/catch never falls through.  */
509       data->cannot_fallthru = false;
510       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
511       break;
512     }
513 
514   data->cannot_fallthru = cannot_fallthru;
515   gsi_next (gsi);
516 }
517 
518 
519 /* Try to determine whether a TRY_CATCH expression can fall through.
520    This is a subroutine of gimple_stmt_may_fallthru.  */
521 
522 static bool
523 gimple_try_catch_may_fallthru (gtry *stmt)
524 {
525   gimple_stmt_iterator i;
526 
527   /* We don't handle GIMPLE_TRY_FINALLY.  */
528   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
529 
530   /* If the TRY block can fall through, the whole TRY_CATCH can
531      fall through.  */
532   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
533     return true;
534 
535   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
536   switch (gimple_code (gsi_stmt (i)))
537     {
538     case GIMPLE_CATCH:
539       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
540 	 catch expression and a body.  The whole try/catch may fall
541 	 through iff any of the catch bodies falls through.  */
542       for (; !gsi_end_p (i); gsi_next (&i))
543 	{
544 	  if (gimple_seq_may_fallthru (gimple_catch_handler (
545 					 as_a <gcatch *> (gsi_stmt (i)))))
546 	    return true;
547 	}
548       return false;
549 
550     case GIMPLE_EH_FILTER:
551       /* The exception filter expression only matters if there is an
552 	 exception.  If the exception does not match EH_FILTER_TYPES,
553 	 we will execute EH_FILTER_FAILURE, and we will fall through
554 	 if that falls through.  If the exception does match
555 	 EH_FILTER_TYPES, the stack unwinder will continue up the
556 	 stack, so we will not fall through.  We don't know whether we
557 	 will throw an exception which matches EH_FILTER_TYPES or not,
558 	 so we just ignore EH_FILTER_TYPES and assume that we might
559 	 throw an exception which doesn't match.  */
560       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
561 
562     default:
563       /* This case represents statements to be executed when an
564 	 exception occurs.  Those statements are implicitly followed
565 	 by a GIMPLE_RESX to resume execution after the exception.  So
566 	 in this case the try/catch never falls through.  */
567       return false;
568     }
569 }
570 
571 
572 /* Try to determine if we can continue executing the statement
573    immediately following STMT.  This guess need not be 100% accurate;
574    simply be conservative and return true if we don't know.  This is
575    used only to avoid stupidly generating extra code. If we're wrong,
576    we'll just delete the extra code later.  */
577 
578 bool
579 gimple_stmt_may_fallthru (gimple *stmt)
580 {
581   if (!stmt)
582     return true;
583 
584   switch (gimple_code (stmt))
585     {
586     case GIMPLE_GOTO:
587     case GIMPLE_RETURN:
588     case GIMPLE_RESX:
589       /* Easy cases.  If the last statement of the seq implies
590 	 control transfer, then we can't fall through.  */
591       return false;
592 
593     case GIMPLE_SWITCH:
594       /* Switch has already been lowered and represents a branch
595 	 to a selected label and hence can't fall through.  */
596       return false;
597 
598     case GIMPLE_COND:
599       /* GIMPLE_COND's are already lowered into a two-way branch.  They
600 	 can't fall through.  */
601       return false;
602 
603     case GIMPLE_BIND:
604       return gimple_seq_may_fallthru (
605 	       gimple_bind_body (as_a <gbind *> (stmt)));
606 
607     case GIMPLE_TRY:
608       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
609         return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
610 
611       /* It must be a GIMPLE_TRY_FINALLY.  */
612 
613       /* The finally clause is always executed after the try clause,
614 	 so if it does not fall through, then the try-finally will not
615 	 fall through.  Otherwise, if the try clause does not fall
616 	 through, then when the finally clause falls through it will
617 	 resume execution wherever the try clause was going.  So the
618 	 whole try-finally will only fall through if both the try
619 	 clause and the finally clause fall through.  */
620       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
621 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
622 
623     case GIMPLE_EH_ELSE:
624       {
625 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
626 	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
627 		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
628 					      eh_else_stmt)));
629       }
630 
631     case GIMPLE_CALL:
632       /* Functions that do not return do not fall through.  */
633       return !gimple_call_noreturn_p (stmt);
634 
635     default:
636       return true;
637     }
638 }
639 
640 
641 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
642 
643 bool
644 gimple_seq_may_fallthru (gimple_seq seq)
645 {
646   return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
647 }
648 
649 
650 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
651 
652 static void
653 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
654 {
655   greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
656   gimple *t;
657   int i;
658   return_statements_t tmp_rs;
659 
660   /* Match this up with an existing return statement that's been created.  */
661   for (i = data->return_statements.length () - 1;
662        i >= 0; i--)
663     {
664       tmp_rs = data->return_statements[i];
665 
666       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
667 	{
668 	  /* Remove the line number from the representative return statement.
669 	     It now fills in for many such returns.  Failure to remove this
670 	     will result in incorrect results for coverage analysis.  */
671 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
672 
673 	  goto found;
674 	}
675     }
676 
677   /* Not found.  Create a new label and record the return statement.  */
678   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
679   tmp_rs.stmt = stmt;
680   data->return_statements.safe_push (tmp_rs);
681 
682   /* Generate a goto statement and remove the return statement.  */
683  found:
684   /* When not optimizing, make sure user returns are preserved.  */
685   if (!optimize && gimple_has_location (stmt))
686     DECL_ARTIFICIAL (tmp_rs.label) = 0;
687   t = gimple_build_goto (tmp_rs.label);
688   gimple_set_location (t, gimple_location (stmt));
689   gimple_set_block (t, gimple_block (stmt));
690   gsi_insert_before (gsi, t, GSI_SAME_STMT);
691   gsi_remove (gsi, false);
692 }
693 
694 /* Lower a __builtin_setjmp GSI.
695 
696    __builtin_setjmp is passed a pointer to an array of five words (not
697    all will be used on all machines).  It operates similarly to the C
698    library function of the same name, but is more efficient.
699 
700    It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
701    __builtin_setjmp_receiver.
702 
703    After full lowering, the body of the function should look like:
704 
705     {
706       int D.1844;
707       int D.2844;
708 
709       [...]
710 
711       __builtin_setjmp_setup (&buf, &<D1847>);
712       D.1844 = 0;
713       goto <D1846>;
714       <D1847>:;
715       __builtin_setjmp_receiver (&<D1847>);
716       D.1844 = 1;
717       <D1846>:;
718       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
719 
720       [...]
721 
722       __builtin_setjmp_setup (&buf, &<D2847>);
723       D.2844 = 0;
724       goto <D2846>;
725       <D2847>:;
726       __builtin_setjmp_receiver (&<D2847>);
727       D.2844 = 1;
728       <D2846>:;
729       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
730 
731       [...]
732 
733       <D3850>:;
734       return;
735     }
736 
737    During cfg creation an extra per-function (or per-OpenMP region)
738    block with ABNORMAL_DISPATCHER internal call will be added, unique
739    destination of all the abnormal call edges and the unique source of
740    all the abnormal edges to the receivers, thus keeping the complexity
741    explosion localized.  */
742 
743 static void
744 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
745 {
746   gimple *stmt = gsi_stmt (*gsi);
747   location_t loc = gimple_location (stmt);
748   tree cont_label = create_artificial_label (loc);
749   tree next_label = create_artificial_label (loc);
750   tree dest, t, arg;
751   gimple *g;
752 
753   /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
754      these builtins are modelled as non-local label jumps to the label
755      that is passed to these two builtins, so pretend we have a non-local
756      label during GIMPLE passes too.  See PR60003.  */
757   cfun->has_nonlocal_label = 1;
758 
759   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
760      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
761   FORCED_LABEL (next_label) = 1;
762 
763   tree orig_dest = dest = gimple_call_lhs (stmt);
764   if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
765     dest = create_tmp_reg (TREE_TYPE (orig_dest));
766 
767   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
768   arg = build_addr (next_label);
769   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
770   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
771   gimple_set_location (g, loc);
772   gimple_set_block (g, gimple_block (stmt));
773   gsi_insert_before (gsi, g, GSI_SAME_STMT);
774 
775   /* Build 'DEST = 0' and insert.  */
776   if (dest)
777     {
778       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
779       gimple_set_location (g, loc);
780       gimple_set_block (g, gimple_block (stmt));
781       gsi_insert_before (gsi, g, GSI_SAME_STMT);
782     }
783 
784   /* Build 'goto CONT_LABEL' and insert.  */
785   g = gimple_build_goto (cont_label);
786   gsi_insert_before (gsi, g, GSI_SAME_STMT);
787 
788   /* Build 'NEXT_LABEL:' and insert.  */
789   g = gimple_build_label (next_label);
790   gsi_insert_before (gsi, g, GSI_SAME_STMT);
791 
792   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
793   arg = build_addr (next_label);
794   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
795   g = gimple_build_call (t, 1, arg);
796   gimple_set_location (g, loc);
797   gimple_set_block (g, gimple_block (stmt));
798   gsi_insert_before (gsi, g, GSI_SAME_STMT);
799 
800   /* Build 'DEST = 1' and insert.  */
801   if (dest)
802     {
803       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
804 						       integer_one_node));
805       gimple_set_location (g, loc);
806       gimple_set_block (g, gimple_block (stmt));
807       gsi_insert_before (gsi, g, GSI_SAME_STMT);
808     }
809 
810   /* Build 'CONT_LABEL:' and insert.  */
811   g = gimple_build_label (cont_label);
812   gsi_insert_before (gsi, g, GSI_SAME_STMT);
813 
814   /* Build orig_dest = dest if necessary.  */
815   if (dest != orig_dest)
816     {
817       g = gimple_build_assign (orig_dest, dest);
818       gsi_insert_before (gsi, g, GSI_SAME_STMT);
819     }
820 
821   /* Remove the call to __builtin_setjmp.  */
822   gsi_remove (gsi, false);
823 }
824 
825 /* Lower calls to posix_memalign to
826      res = posix_memalign (ptr, align, size);
827      if (res == 0)
828        *ptr = __builtin_assume_aligned (*ptr, align);
829    or to
830      void *tem;
831      res = posix_memalign (&tem, align, size);
832      if (res == 0)
833        ptr = __builtin_assume_aligned (tem, align);
834    in case the first argument was &ptr.  That way we can get at the
835    alignment of the heap pointer in CCP.  */
836 
837 static void
838 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
839 {
840   gimple *stmt, *call = gsi_stmt (*gsi);
841   tree pptr = gimple_call_arg (call, 0);
842   tree align = gimple_call_arg (call, 1);
843   tree res = gimple_call_lhs (call);
844   tree ptr = create_tmp_reg (ptr_type_node);
845   if (TREE_CODE (pptr) == ADDR_EXPR)
846     {
847       tree tem = create_tmp_var (ptr_type_node);
848       TREE_ADDRESSABLE (tem) = 1;
849       gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
850       stmt = gimple_build_assign (ptr, tem);
851     }
852   else
853     stmt = gimple_build_assign (ptr,
854 				fold_build2 (MEM_REF, ptr_type_node, pptr,
855 					     build_int_cst (ptr_type_node, 0)));
856   if (res == NULL_TREE)
857     {
858       res = create_tmp_reg (integer_type_node);
859       gimple_call_set_lhs (call, res);
860     }
861   tree align_label = create_artificial_label (UNKNOWN_LOCATION);
862   tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
863   gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
864 				   align_label, noalign_label);
865   gsi_insert_after (gsi, cond, GSI_NEW_STMT);
866   gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
867   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
868   stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
869 			    2, ptr, align);
870   gimple_call_set_lhs (stmt, ptr);
871   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
872   stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
873 					   build_int_cst (ptr_type_node, 0)),
874 			      ptr);
875   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
876   gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
877 }
878 
879 
880 /* Record the variables in VARS into function FN.  */
881 
882 void
883 record_vars_into (tree vars, tree fn)
884 {
885   for (; vars; vars = DECL_CHAIN (vars))
886     {
887       tree var = vars;
888 
889       /* BIND_EXPRs contains also function/type/constant declarations
890          we don't need to care about.  */
891       if (!VAR_P (var))
892 	continue;
893 
894       /* Nothing to do in this case.  */
895       if (DECL_EXTERNAL (var))
896 	continue;
897 
898       /* Record the variable.  */
899       add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
900     }
901 }
902 
903 
904 /* Record the variables in VARS into current_function_decl.  */
905 
906 void
907 record_vars (tree vars)
908 {
909   record_vars_into (vars, current_function_decl);
910 }
911