xref: /dflybsd-src/contrib/gcc-8.0/gcc/gimple-low.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2*38fd1498Szrj 
3*38fd1498Szrj    Copyright (C) 2003-2018 Free Software Foundation, Inc.
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
8*38fd1498Szrj the terms of the GNU General Public License as published by the Free
9*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
10*38fd1498Szrj version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
14*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15*38fd1498Szrj for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "tree.h"
26*38fd1498Szrj #include "gimple.h"
27*38fd1498Szrj #include "tree-pass.h"
28*38fd1498Szrj #include "fold-const.h"
29*38fd1498Szrj #include "tree-nested.h"
30*38fd1498Szrj #include "calls.h"
31*38fd1498Szrj #include "gimple-iterator.h"
32*38fd1498Szrj #include "gimple-low.h"
33*38fd1498Szrj #include "predict.h"
34*38fd1498Szrj #include "gimple-predict.h"
35*38fd1498Szrj 
36*38fd1498Szrj /* The differences between High GIMPLE and Low GIMPLE are the
37*38fd1498Szrj    following:
38*38fd1498Szrj 
39*38fd1498Szrj    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40*38fd1498Szrj 
41*38fd1498Szrj    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42*38fd1498Szrj       flow and exception regions are built as an on-the-side region
43*38fd1498Szrj       hierarchy (See tree-eh.c:lower_eh_constructs).
44*38fd1498Szrj 
45*38fd1498Szrj    3- Multiple identical return statements are grouped into a single
46*38fd1498Szrj       return and gotos to the unique return site.  */
47*38fd1498Szrj 
48*38fd1498Szrj /* Match a return statement with a label.  During lowering, we identify
49*38fd1498Szrj    identical return statements and replace duplicates with a jump to
50*38fd1498Szrj    the corresponding label.  */
51*38fd1498Szrj struct return_statements_t
52*38fd1498Szrj {
53*38fd1498Szrj   tree label;
54*38fd1498Szrj   greturn *stmt;
55*38fd1498Szrj };
56*38fd1498Szrj typedef struct return_statements_t return_statements_t;
57*38fd1498Szrj 
58*38fd1498Szrj 
59*38fd1498Szrj struct lower_data
60*38fd1498Szrj {
61*38fd1498Szrj   /* Block the current statement belongs to.  */
62*38fd1498Szrj   tree block;
63*38fd1498Szrj 
64*38fd1498Szrj   /* A vector of label and return statements to be moved to the end
65*38fd1498Szrj      of the function.  */
66*38fd1498Szrj   vec<return_statements_t> return_statements;
67*38fd1498Szrj 
68*38fd1498Szrj   /* True if the current statement cannot fall through.  */
69*38fd1498Szrj   bool cannot_fallthru;
70*38fd1498Szrj };
71*38fd1498Szrj 
72*38fd1498Szrj static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
73*38fd1498Szrj static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
74*38fd1498Szrj static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
75*38fd1498Szrj static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
76*38fd1498Szrj static void lower_builtin_setjmp (gimple_stmt_iterator *);
77*38fd1498Szrj static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
78*38fd1498Szrj 
79*38fd1498Szrj 
80*38fd1498Szrj /* Lower the body of current_function_decl from High GIMPLE into Low
81*38fd1498Szrj    GIMPLE.  */
82*38fd1498Szrj 
83*38fd1498Szrj static unsigned int
lower_function_body(void)84*38fd1498Szrj lower_function_body (void)
85*38fd1498Szrj {
86*38fd1498Szrj   struct lower_data data;
87*38fd1498Szrj   gimple_seq body = gimple_body (current_function_decl);
88*38fd1498Szrj   gimple_seq lowered_body;
89*38fd1498Szrj   gimple_stmt_iterator i;
90*38fd1498Szrj   gimple *bind;
91*38fd1498Szrj   gimple *x;
92*38fd1498Szrj 
93*38fd1498Szrj   /* The gimplifier should've left a body of exactly one statement,
94*38fd1498Szrj      namely a GIMPLE_BIND.  */
95*38fd1498Szrj   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
96*38fd1498Szrj 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
97*38fd1498Szrj 
98*38fd1498Szrj   memset (&data, 0, sizeof (data));
99*38fd1498Szrj   data.block = DECL_INITIAL (current_function_decl);
100*38fd1498Szrj   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
101*38fd1498Szrj   BLOCK_CHAIN (data.block) = NULL_TREE;
102*38fd1498Szrj   TREE_ASM_WRITTEN (data.block) = 1;
103*38fd1498Szrj   data.return_statements.create (8);
104*38fd1498Szrj 
105*38fd1498Szrj   bind = gimple_seq_first_stmt (body);
106*38fd1498Szrj   lowered_body = NULL;
107*38fd1498Szrj   gimple_seq_add_stmt (&lowered_body, bind);
108*38fd1498Szrj   i = gsi_start (lowered_body);
109*38fd1498Szrj   lower_gimple_bind (&i, &data);
110*38fd1498Szrj 
111*38fd1498Szrj   i = gsi_last (lowered_body);
112*38fd1498Szrj 
113*38fd1498Szrj   /* If we had begin stmt markers from e.g. PCH, but this compilation
114*38fd1498Szrj      doesn't want them, lower_stmt will have cleaned them up; we can
115*38fd1498Szrj      now clear the flag that indicates we had them.  */
116*38fd1498Szrj   if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
117*38fd1498Szrj     {
118*38fd1498Szrj       /* This counter needs not be exact, but before lowering it will
119*38fd1498Szrj 	 most certainly be.  */
120*38fd1498Szrj       gcc_assert (cfun->debug_marker_count == 0);
121*38fd1498Szrj       cfun->debug_nonbind_markers = false;
122*38fd1498Szrj     }
123*38fd1498Szrj 
124*38fd1498Szrj   /* If the function falls off the end, we need a null return statement.
125*38fd1498Szrj      If we've already got one in the return_statements vector, we don't
126*38fd1498Szrj      need to do anything special.  Otherwise build one by hand.  */
127*38fd1498Szrj   bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
128*38fd1498Szrj   if (may_fallthru
129*38fd1498Szrj       && (data.return_statements.is_empty ()
130*38fd1498Szrj 	  || (gimple_return_retval (data.return_statements.last().stmt)
131*38fd1498Szrj 	      != NULL)))
132*38fd1498Szrj     {
133*38fd1498Szrj       x = gimple_build_return (NULL);
134*38fd1498Szrj       gimple_set_location (x, cfun->function_end_locus);
135*38fd1498Szrj       gimple_set_block (x, DECL_INITIAL (current_function_decl));
136*38fd1498Szrj       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
137*38fd1498Szrj       may_fallthru = false;
138*38fd1498Szrj     }
139*38fd1498Szrj 
140*38fd1498Szrj   /* If we lowered any return statements, emit the representative
141*38fd1498Szrj      at the end of the function.  */
142*38fd1498Szrj   while (!data.return_statements.is_empty ())
143*38fd1498Szrj     {
144*38fd1498Szrj       return_statements_t t = data.return_statements.pop ();
145*38fd1498Szrj       x = gimple_build_label (t.label);
146*38fd1498Szrj       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147*38fd1498Szrj       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
148*38fd1498Szrj       if (may_fallthru)
149*38fd1498Szrj 	{
150*38fd1498Szrj 	  /* Remove the line number from the representative return statement.
151*38fd1498Szrj 	     It now fills in for the fallthru too.  Failure to remove this
152*38fd1498Szrj 	     will result in incorrect results for coverage analysis.  */
153*38fd1498Szrj 	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
154*38fd1498Szrj 	  may_fallthru = false;
155*38fd1498Szrj 	}
156*38fd1498Szrj     }
157*38fd1498Szrj 
158*38fd1498Szrj   /* Once the old body has been lowered, replace it with the new
159*38fd1498Szrj      lowered sequence.  */
160*38fd1498Szrj   gimple_set_body (current_function_decl, lowered_body);
161*38fd1498Szrj 
162*38fd1498Szrj   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
163*38fd1498Szrj   BLOCK_SUBBLOCKS (data.block)
164*38fd1498Szrj     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
165*38fd1498Szrj 
166*38fd1498Szrj   clear_block_marks (data.block);
167*38fd1498Szrj   data.return_statements.release ();
168*38fd1498Szrj   return 0;
169*38fd1498Szrj }
170*38fd1498Szrj 
171*38fd1498Szrj namespace {
172*38fd1498Szrj 
173*38fd1498Szrj const pass_data pass_data_lower_cf =
174*38fd1498Szrj {
175*38fd1498Szrj   GIMPLE_PASS, /* type */
176*38fd1498Szrj   "lower", /* name */
177*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
178*38fd1498Szrj   TV_NONE, /* tv_id */
179*38fd1498Szrj   PROP_gimple_any, /* properties_required */
180*38fd1498Szrj   PROP_gimple_lcf, /* properties_provided */
181*38fd1498Szrj   0, /* properties_destroyed */
182*38fd1498Szrj   0, /* todo_flags_start */
183*38fd1498Szrj   0, /* todo_flags_finish */
184*38fd1498Szrj };
185*38fd1498Szrj 
186*38fd1498Szrj class pass_lower_cf : public gimple_opt_pass
187*38fd1498Szrj {
188*38fd1498Szrj public:
pass_lower_cf(gcc::context * ctxt)189*38fd1498Szrj   pass_lower_cf (gcc::context *ctxt)
190*38fd1498Szrj     : gimple_opt_pass (pass_data_lower_cf, ctxt)
191*38fd1498Szrj   {}
192*38fd1498Szrj 
193*38fd1498Szrj   /* opt_pass methods: */
execute(function *)194*38fd1498Szrj   virtual unsigned int execute (function *) { return lower_function_body (); }
195*38fd1498Szrj 
196*38fd1498Szrj }; // class pass_lower_cf
197*38fd1498Szrj 
198*38fd1498Szrj } // anon namespace
199*38fd1498Szrj 
200*38fd1498Szrj gimple_opt_pass *
make_pass_lower_cf(gcc::context * ctxt)201*38fd1498Szrj make_pass_lower_cf (gcc::context *ctxt)
202*38fd1498Szrj {
203*38fd1498Szrj   return new pass_lower_cf (ctxt);
204*38fd1498Szrj }
205*38fd1498Szrj 
206*38fd1498Szrj /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
207*38fd1498Szrj    when they are changed -- if this has to be done, the lowering routine must
208*38fd1498Szrj    do it explicitly.  DATA is passed through the recursion.  */
209*38fd1498Szrj 
210*38fd1498Szrj static void
lower_sequence(gimple_seq * seq,struct lower_data * data)211*38fd1498Szrj lower_sequence (gimple_seq *seq, struct lower_data *data)
212*38fd1498Szrj {
213*38fd1498Szrj   gimple_stmt_iterator gsi;
214*38fd1498Szrj 
215*38fd1498Szrj   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
216*38fd1498Szrj     lower_stmt (&gsi, data);
217*38fd1498Szrj }
218*38fd1498Szrj 
219*38fd1498Szrj 
220*38fd1498Szrj /* Lower the OpenMP directive statement pointed by GSI.  DATA is
221*38fd1498Szrj    passed through the recursion.  */
222*38fd1498Szrj 
223*38fd1498Szrj static void
lower_omp_directive(gimple_stmt_iterator * gsi,struct lower_data * data)224*38fd1498Szrj lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
225*38fd1498Szrj {
226*38fd1498Szrj   gimple *stmt;
227*38fd1498Szrj 
228*38fd1498Szrj   stmt = gsi_stmt (*gsi);
229*38fd1498Szrj 
230*38fd1498Szrj   lower_sequence (gimple_omp_body_ptr (stmt), data);
231*38fd1498Szrj   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
232*38fd1498Szrj   gimple_omp_set_body (stmt, NULL);
233*38fd1498Szrj   gsi_next (gsi);
234*38fd1498Szrj }
235*38fd1498Szrj 
236*38fd1498Szrj 
237*38fd1498Szrj /* Lower statement GSI.  DATA is passed through the recursion.  We try to
238*38fd1498Szrj    track the fallthruness of statements and get rid of unreachable return
239*38fd1498Szrj    statements in order to prevent the EH lowering pass from adding useless
240*38fd1498Szrj    edges that can cause bogus warnings to be issued later; this guess need
241*38fd1498Szrj    not be 100% accurate, simply be conservative and reset cannot_fallthru
242*38fd1498Szrj    to false if we don't know.  */
243*38fd1498Szrj 
244*38fd1498Szrj static void
lower_stmt(gimple_stmt_iterator * gsi,struct lower_data * data)245*38fd1498Szrj lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
246*38fd1498Szrj {
247*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
248*38fd1498Szrj 
249*38fd1498Szrj   gimple_set_block (stmt, data->block);
250*38fd1498Szrj 
251*38fd1498Szrj   switch (gimple_code (stmt))
252*38fd1498Szrj     {
253*38fd1498Szrj     case GIMPLE_BIND:
254*38fd1498Szrj       lower_gimple_bind (gsi, data);
255*38fd1498Szrj       /* Propagate fallthruness.  */
256*38fd1498Szrj       return;
257*38fd1498Szrj 
258*38fd1498Szrj     case GIMPLE_COND:
259*38fd1498Szrj     case GIMPLE_GOTO:
260*38fd1498Szrj     case GIMPLE_SWITCH:
261*38fd1498Szrj       data->cannot_fallthru = true;
262*38fd1498Szrj       gsi_next (gsi);
263*38fd1498Szrj       return;
264*38fd1498Szrj 
265*38fd1498Szrj     case GIMPLE_RETURN:
266*38fd1498Szrj       if (data->cannot_fallthru)
267*38fd1498Szrj 	{
268*38fd1498Szrj 	  gsi_remove (gsi, false);
269*38fd1498Szrj 	  /* Propagate fallthruness.  */
270*38fd1498Szrj 	}
271*38fd1498Szrj       else
272*38fd1498Szrj 	{
273*38fd1498Szrj 	  lower_gimple_return (gsi, data);
274*38fd1498Szrj 	  data->cannot_fallthru = true;
275*38fd1498Szrj 	}
276*38fd1498Szrj       return;
277*38fd1498Szrj 
278*38fd1498Szrj     case GIMPLE_TRY:
279*38fd1498Szrj       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
280*38fd1498Szrj 	lower_try_catch (gsi, data);
281*38fd1498Szrj       else
282*38fd1498Szrj 	{
283*38fd1498Szrj 	  /* It must be a GIMPLE_TRY_FINALLY.  */
284*38fd1498Szrj 	  bool cannot_fallthru;
285*38fd1498Szrj 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
286*38fd1498Szrj 	  cannot_fallthru = data->cannot_fallthru;
287*38fd1498Szrj 
288*38fd1498Szrj 	  /* The finally clause is always executed after the try clause,
289*38fd1498Szrj 	     so if it does not fall through, then the try-finally will not
290*38fd1498Szrj 	     fall through.  Otherwise, if the try clause does not fall
291*38fd1498Szrj 	     through, then when the finally clause falls through it will
292*38fd1498Szrj 	     resume execution wherever the try clause was going.  So the
293*38fd1498Szrj 	     whole try-finally will only fall through if both the try
294*38fd1498Szrj 	     clause and the finally clause fall through.  */
295*38fd1498Szrj 	  data->cannot_fallthru = false;
296*38fd1498Szrj 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
297*38fd1498Szrj 	  data->cannot_fallthru |= cannot_fallthru;
298*38fd1498Szrj 	  gsi_next (gsi);
299*38fd1498Szrj 	}
300*38fd1498Szrj       return;
301*38fd1498Szrj 
302*38fd1498Szrj     case GIMPLE_EH_ELSE:
303*38fd1498Szrj       {
304*38fd1498Szrj 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
305*38fd1498Szrj 	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
306*38fd1498Szrj 	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
307*38fd1498Szrj       }
308*38fd1498Szrj       break;
309*38fd1498Szrj 
310*38fd1498Szrj     case GIMPLE_DEBUG:
311*38fd1498Szrj       gcc_checking_assert (cfun->debug_nonbind_markers);
312*38fd1498Szrj       /* We can't possibly have debug bind stmts before lowering, we
313*38fd1498Szrj 	 first emit them when entering SSA.  */
314*38fd1498Szrj       gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
315*38fd1498Szrj       /* Propagate fallthruness.  */
316*38fd1498Szrj       /* If the function (e.g. from PCH) had debug stmts, but they're
317*38fd1498Szrj 	 disabled for this compilation, remove them.  */
318*38fd1498Szrj       if (!MAY_HAVE_DEBUG_MARKER_STMTS)
319*38fd1498Szrj 	gsi_remove (gsi, true);
320*38fd1498Szrj       else
321*38fd1498Szrj 	gsi_next (gsi);
322*38fd1498Szrj       return;
323*38fd1498Szrj 
324*38fd1498Szrj     case GIMPLE_NOP:
325*38fd1498Szrj     case GIMPLE_ASM:
326*38fd1498Szrj     case GIMPLE_ASSIGN:
327*38fd1498Szrj     case GIMPLE_PREDICT:
328*38fd1498Szrj     case GIMPLE_LABEL:
329*38fd1498Szrj     case GIMPLE_EH_MUST_NOT_THROW:
330*38fd1498Szrj     case GIMPLE_OMP_FOR:
331*38fd1498Szrj     case GIMPLE_OMP_SECTIONS:
332*38fd1498Szrj     case GIMPLE_OMP_SECTIONS_SWITCH:
333*38fd1498Szrj     case GIMPLE_OMP_SECTION:
334*38fd1498Szrj     case GIMPLE_OMP_SINGLE:
335*38fd1498Szrj     case GIMPLE_OMP_MASTER:
336*38fd1498Szrj     case GIMPLE_OMP_TASKGROUP:
337*38fd1498Szrj     case GIMPLE_OMP_ORDERED:
338*38fd1498Szrj     case GIMPLE_OMP_CRITICAL:
339*38fd1498Szrj     case GIMPLE_OMP_RETURN:
340*38fd1498Szrj     case GIMPLE_OMP_ATOMIC_LOAD:
341*38fd1498Szrj     case GIMPLE_OMP_ATOMIC_STORE:
342*38fd1498Szrj     case GIMPLE_OMP_CONTINUE:
343*38fd1498Szrj       break;
344*38fd1498Szrj 
345*38fd1498Szrj     case GIMPLE_CALL:
346*38fd1498Szrj       {
347*38fd1498Szrj 	tree decl = gimple_call_fndecl (stmt);
348*38fd1498Szrj 	unsigned i;
349*38fd1498Szrj 
350*38fd1498Szrj 	for (i = 0; i < gimple_call_num_args (stmt); i++)
351*38fd1498Szrj 	  {
352*38fd1498Szrj 	    tree arg = gimple_call_arg (stmt, i);
353*38fd1498Szrj 	    if (EXPR_P (arg))
354*38fd1498Szrj 	      TREE_SET_BLOCK (arg, data->block);
355*38fd1498Szrj 	  }
356*38fd1498Szrj 
357*38fd1498Szrj 	if (decl
358*38fd1498Szrj 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
359*38fd1498Szrj 	  {
360*38fd1498Szrj 	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
361*38fd1498Szrj 	      {
362*38fd1498Szrj 		lower_builtin_setjmp (gsi);
363*38fd1498Szrj 		data->cannot_fallthru = false;
364*38fd1498Szrj 		return;
365*38fd1498Szrj 	      }
366*38fd1498Szrj 	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
367*38fd1498Szrj 		     && flag_tree_bit_ccp
368*38fd1498Szrj 		     && gimple_builtin_call_types_compatible_p (stmt, decl))
369*38fd1498Szrj 	      {
370*38fd1498Szrj 		lower_builtin_posix_memalign (gsi);
371*38fd1498Szrj 		return;
372*38fd1498Szrj 	      }
373*38fd1498Szrj 	  }
374*38fd1498Szrj 
375*38fd1498Szrj 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
376*38fd1498Szrj 	  {
377*38fd1498Szrj 	    data->cannot_fallthru = true;
378*38fd1498Szrj 	    gsi_next (gsi);
379*38fd1498Szrj 	    return;
380*38fd1498Szrj 	  }
381*38fd1498Szrj       }
382*38fd1498Szrj       break;
383*38fd1498Szrj 
384*38fd1498Szrj     case GIMPLE_OMP_PARALLEL:
385*38fd1498Szrj     case GIMPLE_OMP_TASK:
386*38fd1498Szrj     case GIMPLE_OMP_TARGET:
387*38fd1498Szrj     case GIMPLE_OMP_TEAMS:
388*38fd1498Szrj     case GIMPLE_OMP_GRID_BODY:
389*38fd1498Szrj       data->cannot_fallthru = false;
390*38fd1498Szrj       lower_omp_directive (gsi, data);
391*38fd1498Szrj       data->cannot_fallthru = false;
392*38fd1498Szrj       return;
393*38fd1498Szrj 
394*38fd1498Szrj     case GIMPLE_TRANSACTION:
395*38fd1498Szrj       lower_sequence (gimple_transaction_body_ptr (
396*38fd1498Szrj 			as_a <gtransaction *> (stmt)),
397*38fd1498Szrj 		      data);
398*38fd1498Szrj       break;
399*38fd1498Szrj 
400*38fd1498Szrj     default:
401*38fd1498Szrj       gcc_unreachable ();
402*38fd1498Szrj     }
403*38fd1498Szrj 
404*38fd1498Szrj   data->cannot_fallthru = false;
405*38fd1498Szrj   gsi_next (gsi);
406*38fd1498Szrj }
407*38fd1498Szrj 
408*38fd1498Szrj /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
409*38fd1498Szrj 
410*38fd1498Szrj static void
lower_gimple_bind(gimple_stmt_iterator * gsi,struct lower_data * data)411*38fd1498Szrj lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
412*38fd1498Szrj {
413*38fd1498Szrj   tree old_block = data->block;
414*38fd1498Szrj   gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
415*38fd1498Szrj   tree new_block = gimple_bind_block (stmt);
416*38fd1498Szrj 
417*38fd1498Szrj   if (new_block)
418*38fd1498Szrj     {
419*38fd1498Szrj       if (new_block == old_block)
420*38fd1498Szrj 	{
421*38fd1498Szrj 	  /* The outermost block of the original function may not be the
422*38fd1498Szrj 	     outermost statement chain of the gimplified function.  So we
423*38fd1498Szrj 	     may see the outermost block just inside the function.  */
424*38fd1498Szrj 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
425*38fd1498Szrj 	  new_block = NULL;
426*38fd1498Szrj 	}
427*38fd1498Szrj       else
428*38fd1498Szrj 	{
429*38fd1498Szrj 	  /* We do not expect to handle duplicate blocks.  */
430*38fd1498Szrj 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
431*38fd1498Szrj 	  TREE_ASM_WRITTEN (new_block) = 1;
432*38fd1498Szrj 
433*38fd1498Szrj 	  /* Block tree may get clobbered by inlining.  Normally this would
434*38fd1498Szrj 	     be fixed in rest_of_decl_compilation using block notes, but
435*38fd1498Szrj 	     since we are not going to emit them, it is up to us.  */
436*38fd1498Szrj 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
437*38fd1498Szrj 	  BLOCK_SUBBLOCKS (old_block) = new_block;
438*38fd1498Szrj 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
439*38fd1498Szrj 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
440*38fd1498Szrj 
441*38fd1498Szrj 	  data->block = new_block;
442*38fd1498Szrj 	}
443*38fd1498Szrj     }
444*38fd1498Szrj 
445*38fd1498Szrj   record_vars (gimple_bind_vars (stmt));
446*38fd1498Szrj 
447*38fd1498Szrj   /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
448*38fd1498Szrj      need gimple_bind_vars.  */
449*38fd1498Szrj   tree next;
450*38fd1498Szrj   /* BLOCK_VARS and gimple_bind_vars share a common sub-chain.  Find
451*38fd1498Szrj      it by marking all BLOCK_VARS.  */
452*38fd1498Szrj   if (gimple_bind_block (stmt))
453*38fd1498Szrj     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
454*38fd1498Szrj       TREE_VISITED (t) = 1;
455*38fd1498Szrj   for (tree var = gimple_bind_vars (stmt);
456*38fd1498Szrj        var && ! TREE_VISITED (var); var = next)
457*38fd1498Szrj     {
458*38fd1498Szrj       next = DECL_CHAIN (var);
459*38fd1498Szrj       DECL_CHAIN (var) = NULL_TREE;
460*38fd1498Szrj     }
461*38fd1498Szrj   /* Unmark BLOCK_VARS.  */
462*38fd1498Szrj   if (gimple_bind_block (stmt))
463*38fd1498Szrj     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
464*38fd1498Szrj       TREE_VISITED (t) = 0;
465*38fd1498Szrj 
466*38fd1498Szrj   lower_sequence (gimple_bind_body_ptr (stmt), data);
467*38fd1498Szrj 
468*38fd1498Szrj   if (new_block)
469*38fd1498Szrj     {
470*38fd1498Szrj       gcc_assert (data->block == new_block);
471*38fd1498Szrj 
472*38fd1498Szrj       BLOCK_SUBBLOCKS (new_block)
473*38fd1498Szrj 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
474*38fd1498Szrj       data->block = old_block;
475*38fd1498Szrj     }
476*38fd1498Szrj 
477*38fd1498Szrj   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
478*38fd1498Szrj   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
479*38fd1498Szrj   gsi_remove (gsi, false);
480*38fd1498Szrj }
481*38fd1498Szrj 
482*38fd1498Szrj /* Same as above, but for a GIMPLE_TRY_CATCH.  */
483*38fd1498Szrj 
484*38fd1498Szrj static void
lower_try_catch(gimple_stmt_iterator * gsi,struct lower_data * data)485*38fd1498Szrj lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
486*38fd1498Szrj {
487*38fd1498Szrj   bool cannot_fallthru;
488*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
489*38fd1498Szrj   gimple_stmt_iterator i;
490*38fd1498Szrj 
491*38fd1498Szrj   /* We don't handle GIMPLE_TRY_FINALLY.  */
492*38fd1498Szrj   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
493*38fd1498Szrj 
494*38fd1498Szrj   lower_sequence (gimple_try_eval_ptr (stmt), data);
495*38fd1498Szrj   cannot_fallthru = data->cannot_fallthru;
496*38fd1498Szrj 
497*38fd1498Szrj   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
498*38fd1498Szrj   switch (gimple_code (gsi_stmt (i)))
499*38fd1498Szrj     {
500*38fd1498Szrj     case GIMPLE_CATCH:
501*38fd1498Szrj       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
502*38fd1498Szrj 	 catch expression and a body.  The whole try/catch may fall
503*38fd1498Szrj 	 through iff any of the catch bodies falls through.  */
504*38fd1498Szrj       for (; !gsi_end_p (i); gsi_next (&i))
505*38fd1498Szrj 	{
506*38fd1498Szrj 	  data->cannot_fallthru = false;
507*38fd1498Szrj 	  lower_sequence (gimple_catch_handler_ptr (
508*38fd1498Szrj                             as_a <gcatch *> (gsi_stmt (i))),
509*38fd1498Szrj 			  data);
510*38fd1498Szrj 	  if (!data->cannot_fallthru)
511*38fd1498Szrj 	    cannot_fallthru = false;
512*38fd1498Szrj 	}
513*38fd1498Szrj       break;
514*38fd1498Szrj 
515*38fd1498Szrj     case GIMPLE_EH_FILTER:
516*38fd1498Szrj       /* The exception filter expression only matters if there is an
517*38fd1498Szrj 	 exception.  If the exception does not match EH_FILTER_TYPES,
518*38fd1498Szrj 	 we will execute EH_FILTER_FAILURE, and we will fall through
519*38fd1498Szrj 	 if that falls through.  If the exception does match
520*38fd1498Szrj 	 EH_FILTER_TYPES, the stack unwinder will continue up the
521*38fd1498Szrj 	 stack, so we will not fall through.  We don't know whether we
522*38fd1498Szrj 	 will throw an exception which matches EH_FILTER_TYPES or not,
523*38fd1498Szrj 	 so we just ignore EH_FILTER_TYPES and assume that we might
524*38fd1498Szrj 	 throw an exception which doesn't match.  */
525*38fd1498Szrj       data->cannot_fallthru = false;
526*38fd1498Szrj       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
527*38fd1498Szrj       if (!data->cannot_fallthru)
528*38fd1498Szrj 	cannot_fallthru = false;
529*38fd1498Szrj       break;
530*38fd1498Szrj 
531*38fd1498Szrj     case GIMPLE_DEBUG:
532*38fd1498Szrj       gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
533*38fd1498Szrj       break;
534*38fd1498Szrj 
535*38fd1498Szrj     default:
536*38fd1498Szrj       /* This case represents statements to be executed when an
537*38fd1498Szrj 	 exception occurs.  Those statements are implicitly followed
538*38fd1498Szrj 	 by a GIMPLE_RESX to resume execution after the exception.  So
539*38fd1498Szrj 	 in this case the try/catch never falls through.  */
540*38fd1498Szrj       data->cannot_fallthru = false;
541*38fd1498Szrj       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
542*38fd1498Szrj       break;
543*38fd1498Szrj     }
544*38fd1498Szrj 
545*38fd1498Szrj   data->cannot_fallthru = cannot_fallthru;
546*38fd1498Szrj   gsi_next (gsi);
547*38fd1498Szrj }
548*38fd1498Szrj 
549*38fd1498Szrj 
550*38fd1498Szrj /* Try to determine whether a TRY_CATCH expression can fall through.
551*38fd1498Szrj    This is a subroutine of gimple_stmt_may_fallthru.  */
552*38fd1498Szrj 
553*38fd1498Szrj static bool
gimple_try_catch_may_fallthru(gtry * stmt)554*38fd1498Szrj gimple_try_catch_may_fallthru (gtry *stmt)
555*38fd1498Szrj {
556*38fd1498Szrj   gimple_stmt_iterator i;
557*38fd1498Szrj 
558*38fd1498Szrj   /* We don't handle GIMPLE_TRY_FINALLY.  */
559*38fd1498Szrj   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
560*38fd1498Szrj 
561*38fd1498Szrj   /* If the TRY block can fall through, the whole TRY_CATCH can
562*38fd1498Szrj      fall through.  */
563*38fd1498Szrj   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
564*38fd1498Szrj     return true;
565*38fd1498Szrj 
566*38fd1498Szrj   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
567*38fd1498Szrj   switch (gimple_code (gsi_stmt (i)))
568*38fd1498Szrj     {
569*38fd1498Szrj     case GIMPLE_CATCH:
570*38fd1498Szrj       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
571*38fd1498Szrj 	 catch expression and a body.  The whole try/catch may fall
572*38fd1498Szrj 	 through iff any of the catch bodies falls through.  */
573*38fd1498Szrj       for (; !gsi_end_p (i); gsi_next (&i))
574*38fd1498Szrj 	{
575*38fd1498Szrj 	  if (gimple_seq_may_fallthru (gimple_catch_handler (
576*38fd1498Szrj 					 as_a <gcatch *> (gsi_stmt (i)))))
577*38fd1498Szrj 	    return true;
578*38fd1498Szrj 	}
579*38fd1498Szrj       return false;
580*38fd1498Szrj 
581*38fd1498Szrj     case GIMPLE_EH_FILTER:
582*38fd1498Szrj       /* The exception filter expression only matters if there is an
583*38fd1498Szrj 	 exception.  If the exception does not match EH_FILTER_TYPES,
584*38fd1498Szrj 	 we will execute EH_FILTER_FAILURE, and we will fall through
585*38fd1498Szrj 	 if that falls through.  If the exception does match
586*38fd1498Szrj 	 EH_FILTER_TYPES, the stack unwinder will continue up the
587*38fd1498Szrj 	 stack, so we will not fall through.  We don't know whether we
588*38fd1498Szrj 	 will throw an exception which matches EH_FILTER_TYPES or not,
589*38fd1498Szrj 	 so we just ignore EH_FILTER_TYPES and assume that we might
590*38fd1498Szrj 	 throw an exception which doesn't match.  */
591*38fd1498Szrj       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
592*38fd1498Szrj 
593*38fd1498Szrj     default:
594*38fd1498Szrj       /* This case represents statements to be executed when an
595*38fd1498Szrj 	 exception occurs.  Those statements are implicitly followed
596*38fd1498Szrj 	 by a GIMPLE_RESX to resume execution after the exception.  So
597*38fd1498Szrj 	 in this case the try/catch never falls through.  */
598*38fd1498Szrj       return false;
599*38fd1498Szrj     }
600*38fd1498Szrj }
601*38fd1498Szrj 
602*38fd1498Szrj 
603*38fd1498Szrj /* Try to determine if we can continue executing the statement
604*38fd1498Szrj    immediately following STMT.  This guess need not be 100% accurate;
605*38fd1498Szrj    simply be conservative and return true if we don't know.  This is
606*38fd1498Szrj    used only to avoid stupidly generating extra code. If we're wrong,
607*38fd1498Szrj    we'll just delete the extra code later.  */
608*38fd1498Szrj 
609*38fd1498Szrj bool
gimple_stmt_may_fallthru(gimple * stmt)610*38fd1498Szrj gimple_stmt_may_fallthru (gimple *stmt)
611*38fd1498Szrj {
612*38fd1498Szrj   if (!stmt)
613*38fd1498Szrj     return true;
614*38fd1498Szrj 
615*38fd1498Szrj   switch (gimple_code (stmt))
616*38fd1498Szrj     {
617*38fd1498Szrj     case GIMPLE_GOTO:
618*38fd1498Szrj     case GIMPLE_RETURN:
619*38fd1498Szrj     case GIMPLE_RESX:
620*38fd1498Szrj       /* Easy cases.  If the last statement of the seq implies
621*38fd1498Szrj 	 control transfer, then we can't fall through.  */
622*38fd1498Szrj       return false;
623*38fd1498Szrj 
624*38fd1498Szrj     case GIMPLE_SWITCH:
625*38fd1498Szrj       /* Switch has already been lowered and represents a branch
626*38fd1498Szrj 	 to a selected label and hence can't fall through.  */
627*38fd1498Szrj       return false;
628*38fd1498Szrj 
629*38fd1498Szrj     case GIMPLE_COND:
630*38fd1498Szrj       /* GIMPLE_COND's are already lowered into a two-way branch.  They
631*38fd1498Szrj 	 can't fall through.  */
632*38fd1498Szrj       return false;
633*38fd1498Szrj 
634*38fd1498Szrj     case GIMPLE_BIND:
635*38fd1498Szrj       return gimple_seq_may_fallthru (
636*38fd1498Szrj 	       gimple_bind_body (as_a <gbind *> (stmt)));
637*38fd1498Szrj 
638*38fd1498Szrj     case GIMPLE_TRY:
639*38fd1498Szrj       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
640*38fd1498Szrj         return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
641*38fd1498Szrj 
642*38fd1498Szrj       /* It must be a GIMPLE_TRY_FINALLY.  */
643*38fd1498Szrj 
644*38fd1498Szrj       /* The finally clause is always executed after the try clause,
645*38fd1498Szrj 	 so if it does not fall through, then the try-finally will not
646*38fd1498Szrj 	 fall through.  Otherwise, if the try clause does not fall
647*38fd1498Szrj 	 through, then when the finally clause falls through it will
648*38fd1498Szrj 	 resume execution wherever the try clause was going.  So the
649*38fd1498Szrj 	 whole try-finally will only fall through if both the try
650*38fd1498Szrj 	 clause and the finally clause fall through.  */
651*38fd1498Szrj       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
652*38fd1498Szrj 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
653*38fd1498Szrj 
654*38fd1498Szrj     case GIMPLE_EH_ELSE:
655*38fd1498Szrj       {
656*38fd1498Szrj 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
657*38fd1498Szrj 	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
658*38fd1498Szrj 		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
659*38fd1498Szrj 					      eh_else_stmt)));
660*38fd1498Szrj       }
661*38fd1498Szrj 
662*38fd1498Szrj     case GIMPLE_CALL:
663*38fd1498Szrj       /* Functions that do not return do not fall through.  */
664*38fd1498Szrj       return !gimple_call_noreturn_p (stmt);
665*38fd1498Szrj 
666*38fd1498Szrj     default:
667*38fd1498Szrj       return true;
668*38fd1498Szrj     }
669*38fd1498Szrj }
670*38fd1498Szrj 
671*38fd1498Szrj 
672*38fd1498Szrj /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
673*38fd1498Szrj 
674*38fd1498Szrj bool
gimple_seq_may_fallthru(gimple_seq seq)675*38fd1498Szrj gimple_seq_may_fallthru (gimple_seq seq)
676*38fd1498Szrj {
677*38fd1498Szrj   return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
678*38fd1498Szrj }
679*38fd1498Szrj 
680*38fd1498Szrj 
681*38fd1498Szrj /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
682*38fd1498Szrj 
683*38fd1498Szrj static void
lower_gimple_return(gimple_stmt_iterator * gsi,struct lower_data * data)684*38fd1498Szrj lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
685*38fd1498Szrj {
686*38fd1498Szrj   greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
687*38fd1498Szrj   gimple *t;
688*38fd1498Szrj   int i;
689*38fd1498Szrj   return_statements_t tmp_rs;
690*38fd1498Szrj 
691*38fd1498Szrj   /* Match this up with an existing return statement that's been created.  */
692*38fd1498Szrj   for (i = data->return_statements.length () - 1;
693*38fd1498Szrj        i >= 0; i--)
694*38fd1498Szrj     {
695*38fd1498Szrj       tmp_rs = data->return_statements[i];
696*38fd1498Szrj 
697*38fd1498Szrj       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
698*38fd1498Szrj 	{
699*38fd1498Szrj 	  /* Remove the line number from the representative return statement.
700*38fd1498Szrj 	     It now fills in for many such returns.  Failure to remove this
701*38fd1498Szrj 	     will result in incorrect results for coverage analysis.  */
702*38fd1498Szrj 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
703*38fd1498Szrj 
704*38fd1498Szrj 	  goto found;
705*38fd1498Szrj 	}
706*38fd1498Szrj     }
707*38fd1498Szrj 
708*38fd1498Szrj   /* Not found.  Create a new label and record the return statement.  */
709*38fd1498Szrj   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
710*38fd1498Szrj   tmp_rs.stmt = stmt;
711*38fd1498Szrj   data->return_statements.safe_push (tmp_rs);
712*38fd1498Szrj 
713*38fd1498Szrj   /* Generate a goto statement and remove the return statement.  */
714*38fd1498Szrj  found:
715*38fd1498Szrj   /* When not optimizing, make sure user returns are preserved.  */
716*38fd1498Szrj   if (!optimize && gimple_has_location (stmt))
717*38fd1498Szrj     DECL_ARTIFICIAL (tmp_rs.label) = 0;
718*38fd1498Szrj   t = gimple_build_goto (tmp_rs.label);
719*38fd1498Szrj   gimple_set_location (t, gimple_location (stmt));
720*38fd1498Szrj   gimple_set_block (t, gimple_block (stmt));
721*38fd1498Szrj   gsi_insert_before (gsi, t, GSI_SAME_STMT);
722*38fd1498Szrj   gsi_remove (gsi, false);
723*38fd1498Szrj }
724*38fd1498Szrj 
725*38fd1498Szrj /* Lower a __builtin_setjmp GSI.
726*38fd1498Szrj 
727*38fd1498Szrj    __builtin_setjmp is passed a pointer to an array of five words (not
728*38fd1498Szrj    all will be used on all machines).  It operates similarly to the C
729*38fd1498Szrj    library function of the same name, but is more efficient.
730*38fd1498Szrj 
731*38fd1498Szrj    It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
732*38fd1498Szrj    __builtin_setjmp_receiver.
733*38fd1498Szrj 
734*38fd1498Szrj    After full lowering, the body of the function should look like:
735*38fd1498Szrj 
736*38fd1498Szrj     {
737*38fd1498Szrj       int D.1844;
738*38fd1498Szrj       int D.2844;
739*38fd1498Szrj 
740*38fd1498Szrj       [...]
741*38fd1498Szrj 
742*38fd1498Szrj       __builtin_setjmp_setup (&buf, &<D1847>);
743*38fd1498Szrj       D.1844 = 0;
744*38fd1498Szrj       goto <D1846>;
745*38fd1498Szrj       <D1847>:;
746*38fd1498Szrj       __builtin_setjmp_receiver (&<D1847>);
747*38fd1498Szrj       D.1844 = 1;
748*38fd1498Szrj       <D1846>:;
749*38fd1498Szrj       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
750*38fd1498Szrj 
751*38fd1498Szrj       [...]
752*38fd1498Szrj 
753*38fd1498Szrj       __builtin_setjmp_setup (&buf, &<D2847>);
754*38fd1498Szrj       D.2844 = 0;
755*38fd1498Szrj       goto <D2846>;
756*38fd1498Szrj       <D2847>:;
757*38fd1498Szrj       __builtin_setjmp_receiver (&<D2847>);
758*38fd1498Szrj       D.2844 = 1;
759*38fd1498Szrj       <D2846>:;
760*38fd1498Szrj       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
761*38fd1498Szrj 
762*38fd1498Szrj       [...]
763*38fd1498Szrj 
764*38fd1498Szrj       <D3850>:;
765*38fd1498Szrj       return;
766*38fd1498Szrj     }
767*38fd1498Szrj 
768*38fd1498Szrj    During cfg creation an extra per-function (or per-OpenMP region)
769*38fd1498Szrj    block with ABNORMAL_DISPATCHER internal call will be added, unique
770*38fd1498Szrj    destination of all the abnormal call edges and the unique source of
771*38fd1498Szrj    all the abnormal edges to the receivers, thus keeping the complexity
772*38fd1498Szrj    explosion localized.  */
773*38fd1498Szrj 
774*38fd1498Szrj static void
lower_builtin_setjmp(gimple_stmt_iterator * gsi)775*38fd1498Szrj lower_builtin_setjmp (gimple_stmt_iterator *gsi)
776*38fd1498Szrj {
777*38fd1498Szrj   gimple *stmt = gsi_stmt (*gsi);
778*38fd1498Szrj   location_t loc = gimple_location (stmt);
779*38fd1498Szrj   tree cont_label = create_artificial_label (loc);
780*38fd1498Szrj   tree next_label = create_artificial_label (loc);
781*38fd1498Szrj   tree dest, t, arg;
782*38fd1498Szrj   gimple *g;
783*38fd1498Szrj 
784*38fd1498Szrj   /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
785*38fd1498Szrj      these builtins are modelled as non-local label jumps to the label
786*38fd1498Szrj      that is passed to these two builtins, so pretend we have a non-local
787*38fd1498Szrj      label during GIMPLE passes too.  See PR60003.  */
788*38fd1498Szrj   cfun->has_nonlocal_label = 1;
789*38fd1498Szrj 
790*38fd1498Szrj   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
791*38fd1498Szrj      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
792*38fd1498Szrj   FORCED_LABEL (next_label) = 1;
793*38fd1498Szrj 
794*38fd1498Szrj   tree orig_dest = dest = gimple_call_lhs (stmt);
795*38fd1498Szrj   if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
796*38fd1498Szrj     dest = create_tmp_reg (TREE_TYPE (orig_dest));
797*38fd1498Szrj 
798*38fd1498Szrj   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
799*38fd1498Szrj   arg = build_addr (next_label);
800*38fd1498Szrj   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
801*38fd1498Szrj   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
802*38fd1498Szrj   gimple_set_location (g, loc);
803*38fd1498Szrj   gimple_set_block (g, gimple_block (stmt));
804*38fd1498Szrj   gsi_insert_before (gsi, g, GSI_SAME_STMT);
805*38fd1498Szrj 
806*38fd1498Szrj   /* Build 'DEST = 0' and insert.  */
807*38fd1498Szrj   if (dest)
808*38fd1498Szrj     {
809*38fd1498Szrj       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
810*38fd1498Szrj       gimple_set_location (g, loc);
811*38fd1498Szrj       gimple_set_block (g, gimple_block (stmt));
812*38fd1498Szrj       gsi_insert_before (gsi, g, GSI_SAME_STMT);
813*38fd1498Szrj     }
814*38fd1498Szrj 
815*38fd1498Szrj   /* Build 'goto CONT_LABEL' and insert.  */
816*38fd1498Szrj   g = gimple_build_goto (cont_label);
817*38fd1498Szrj   gsi_insert_before (gsi, g, GSI_SAME_STMT);
818*38fd1498Szrj 
819*38fd1498Szrj   /* Build 'NEXT_LABEL:' and insert.  */
820*38fd1498Szrj   g = gimple_build_label (next_label);
821*38fd1498Szrj   gsi_insert_before (gsi, g, GSI_SAME_STMT);
822*38fd1498Szrj 
823*38fd1498Szrj   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
824*38fd1498Szrj   arg = build_addr (next_label);
825*38fd1498Szrj   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
826*38fd1498Szrj   g = gimple_build_call (t, 1, arg);
827*38fd1498Szrj   gimple_set_location (g, loc);
828*38fd1498Szrj   gimple_set_block (g, gimple_block (stmt));
829*38fd1498Szrj   gsi_insert_before (gsi, g, GSI_SAME_STMT);
830*38fd1498Szrj 
831*38fd1498Szrj   /* Build 'DEST = 1' and insert.  */
832*38fd1498Szrj   if (dest)
833*38fd1498Szrj     {
834*38fd1498Szrj       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
835*38fd1498Szrj 						       integer_one_node));
836*38fd1498Szrj       gimple_set_location (g, loc);
837*38fd1498Szrj       gimple_set_block (g, gimple_block (stmt));
838*38fd1498Szrj       gsi_insert_before (gsi, g, GSI_SAME_STMT);
839*38fd1498Szrj     }
840*38fd1498Szrj 
841*38fd1498Szrj   /* Build 'CONT_LABEL:' and insert.  */
842*38fd1498Szrj   g = gimple_build_label (cont_label);
843*38fd1498Szrj   gsi_insert_before (gsi, g, GSI_SAME_STMT);
844*38fd1498Szrj 
845*38fd1498Szrj   /* Build orig_dest = dest if necessary.  */
846*38fd1498Szrj   if (dest != orig_dest)
847*38fd1498Szrj     {
848*38fd1498Szrj       g = gimple_build_assign (orig_dest, dest);
849*38fd1498Szrj       gsi_insert_before (gsi, g, GSI_SAME_STMT);
850*38fd1498Szrj     }
851*38fd1498Szrj 
852*38fd1498Szrj   /* Remove the call to __builtin_setjmp.  */
853*38fd1498Szrj   gsi_remove (gsi, false);
854*38fd1498Szrj }
855*38fd1498Szrj 
856*38fd1498Szrj /* Lower calls to posix_memalign to
857*38fd1498Szrj      res = posix_memalign (ptr, align, size);
858*38fd1498Szrj      if (res == 0)
859*38fd1498Szrj        *ptr = __builtin_assume_aligned (*ptr, align);
860*38fd1498Szrj    or to
861*38fd1498Szrj      void *tem;
862*38fd1498Szrj      res = posix_memalign (&tem, align, size);
863*38fd1498Szrj      if (res == 0)
864*38fd1498Szrj        ptr = __builtin_assume_aligned (tem, align);
865*38fd1498Szrj    in case the first argument was &ptr.  That way we can get at the
866*38fd1498Szrj    alignment of the heap pointer in CCP.  */
867*38fd1498Szrj 
868*38fd1498Szrj static void
lower_builtin_posix_memalign(gimple_stmt_iterator * gsi)869*38fd1498Szrj lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
870*38fd1498Szrj {
871*38fd1498Szrj   gimple *stmt, *call = gsi_stmt (*gsi);
872*38fd1498Szrj   tree pptr = gimple_call_arg (call, 0);
873*38fd1498Szrj   tree align = gimple_call_arg (call, 1);
874*38fd1498Szrj   tree res = gimple_call_lhs (call);
875*38fd1498Szrj   tree ptr = create_tmp_reg (ptr_type_node);
876*38fd1498Szrj   if (TREE_CODE (pptr) == ADDR_EXPR)
877*38fd1498Szrj     {
878*38fd1498Szrj       tree tem = create_tmp_var (ptr_type_node);
879*38fd1498Szrj       TREE_ADDRESSABLE (tem) = 1;
880*38fd1498Szrj       gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
881*38fd1498Szrj       stmt = gimple_build_assign (ptr, tem);
882*38fd1498Szrj     }
883*38fd1498Szrj   else
884*38fd1498Szrj     stmt = gimple_build_assign (ptr,
885*38fd1498Szrj 				fold_build2 (MEM_REF, ptr_type_node, pptr,
886*38fd1498Szrj 					     build_int_cst (ptr_type_node, 0)));
887*38fd1498Szrj   if (res == NULL_TREE)
888*38fd1498Szrj     {
889*38fd1498Szrj       res = create_tmp_reg (integer_type_node);
890*38fd1498Szrj       gimple_call_set_lhs (call, res);
891*38fd1498Szrj     }
892*38fd1498Szrj   tree align_label = create_artificial_label (UNKNOWN_LOCATION);
893*38fd1498Szrj   tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
894*38fd1498Szrj   gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
895*38fd1498Szrj 				   align_label, noalign_label);
896*38fd1498Szrj   gsi_insert_after (gsi, cond, GSI_NEW_STMT);
897*38fd1498Szrj   gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
898*38fd1498Szrj   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
899*38fd1498Szrj   stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
900*38fd1498Szrj 			    2, ptr, align);
901*38fd1498Szrj   gimple_call_set_lhs (stmt, ptr);
902*38fd1498Szrj   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
903*38fd1498Szrj   stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
904*38fd1498Szrj 					   build_int_cst (ptr_type_node, 0)),
905*38fd1498Szrj 			      ptr);
906*38fd1498Szrj   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
907*38fd1498Szrj   gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
908*38fd1498Szrj }
909*38fd1498Szrj 
910*38fd1498Szrj 
911*38fd1498Szrj /* Record the variables in VARS into function FN.  */
912*38fd1498Szrj 
913*38fd1498Szrj void
record_vars_into(tree vars,tree fn)914*38fd1498Szrj record_vars_into (tree vars, tree fn)
915*38fd1498Szrj {
916*38fd1498Szrj   for (; vars; vars = DECL_CHAIN (vars))
917*38fd1498Szrj     {
918*38fd1498Szrj       tree var = vars;
919*38fd1498Szrj 
920*38fd1498Szrj       /* BIND_EXPRs contains also function/type/constant declarations
921*38fd1498Szrj          we don't need to care about.  */
922*38fd1498Szrj       if (!VAR_P (var))
923*38fd1498Szrj 	continue;
924*38fd1498Szrj 
925*38fd1498Szrj       /* Nothing to do in this case.  */
926*38fd1498Szrj       if (DECL_EXTERNAL (var))
927*38fd1498Szrj 	continue;
928*38fd1498Szrj 
929*38fd1498Szrj       /* Record the variable.  */
930*38fd1498Szrj       add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
931*38fd1498Szrj     }
932*38fd1498Szrj }
933*38fd1498Szrj 
934*38fd1498Szrj 
935*38fd1498Szrj /* Record the variables in VARS into current_function_decl.  */
936*38fd1498Szrj 
937*38fd1498Szrj void
record_vars(tree vars)938*38fd1498Szrj record_vars (tree vars)
939*38fd1498Szrj {
940*38fd1498Szrj   record_vars_into (vars, current_function_decl);
941*38fd1498Szrj }
942