xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-nested.c (revision 07ece4eabb6d327c320416d49d51617a7c0fb3be)
1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4 
5    This file is part of GCC.
6 
7    GCC is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3, or (at your option)
10    any later version.
11 
12    GCC is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with GCC; see the file COPYING3.  If not see
19    <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
38 #include "ggc.h"
39 
40 
41 /* The object of this pass is to lower the representation of a set of nested
42    functions in order to expose all of the gory details of the various
43    nonlocal references.  We want to do this sooner rather than later, in
44    order to give us more freedom in emitting all of the functions in question.
45 
46    Back in olden times, when gcc was young, we developed an insanely
47    complicated scheme whereby variables which were referenced nonlocally
48    were forced to live in the stack of the declaring function, and then
49    the nested functions magically discovered where these variables were
50    placed.  In order for this scheme to function properly, it required
51    that the outer function be partially expanded, then we switch to
52    compiling the inner function, and once done with those we switch back
53    to compiling the outer function.  Such delicate ordering requirements
54    makes it difficult to do whole translation unit optimizations
55    involving such functions.
56 
57    The implementation here is much more direct.  Everything that can be
58    referenced by an inner function is a member of an explicitly created
59    structure herein called the "nonlocal frame struct".  The incoming
60    static chain for a nested function is a pointer to this struct in
61    the parent.  In this way, we settle on known offsets from a known
62    base, and so are decoupled from the logic that places objects in the
63    function's stack frame.  More importantly, we don't have to wait for
64    that to happen -- since the compilation of the inner function is no
65    longer tied to a real stack frame, the nonlocal frame struct can be
66    allocated anywhere.  Which means that the outer function is now
67    inlinable.
68 
69    Theory of operation here is very simple.  Iterate over all the
70    statements in all the functions (depth first) several times,
71    allocating structures and fields on demand.  In general we want to
72    examine inner functions first, so that we can avoid making changes
73    to outer functions which are unnecessary.
74 
75    The order of the passes matters a bit, in that later passes will be
76    skipped if it is discovered that the functions don't actually interact
77    at all.  That is, they're nested in the lexical sense but could have
78    been written as independent functions without change.  */
79 
80 
81 struct nesting_info
82 {
83   struct nesting_info *outer;
84   struct nesting_info *inner;
85   struct nesting_info *next;
86 
87   struct pointer_map_t *field_map;
88   struct pointer_map_t *var_map;
89   bitmap suppress_expansion;
90 
91   tree context;
92   tree new_local_var_chain;
93   tree debug_var_chain;
94   tree frame_type;
95   tree frame_decl;
96   tree chain_field;
97   tree chain_decl;
98   tree nl_goto_field;
99 
100   bool any_parm_remapped;
101   bool any_tramp_created;
102   char static_chain_added;
103 };
104 
105 
106 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
107 
108 static inline struct nesting_info *
109 iter_nestinfo_start (struct nesting_info *root)
110 {
111   while (root->inner)
112     root = root->inner;
113   return root;
114 }
115 
116 static inline struct nesting_info *
117 iter_nestinfo_next (struct nesting_info *node)
118 {
119   if (node->next)
120     return iter_nestinfo_start (node->next);
121   return node->outer;
122 }
123 
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
126 
127 /* Obstack used for the bitmaps in the struct above.  */
128 static struct bitmap_obstack nesting_info_bitmap_obstack;
129 
130 
131 /* We're working in so many different function contexts simultaneously,
132    that create_tmp_var is dangerous.  Prevent mishap.  */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
134 
135 /* Like create_tmp_var, except record the variable for registration at
136    the given nesting level.  */
137 
138 static tree
139 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
140 {
141   tree tmp_var;
142 
143   /* If the type is of variable size or a type which must be created by the
144      frontend, something is wrong.  Note that we explicitly allow
145      incomplete types here, since we create them ourselves here.  */
146   gcc_assert (!TREE_ADDRESSABLE (type));
147   gcc_assert (!TYPE_SIZE_UNIT (type)
148 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
149 
150   tmp_var = create_tmp_var_raw (type, prefix);
151   DECL_CONTEXT (tmp_var) = info->context;
152   TREE_CHAIN (tmp_var) = info->new_local_var_chain;
153   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
154   if (TREE_CODE (type) == COMPLEX_TYPE
155       || TREE_CODE (type) == VECTOR_TYPE)
156     DECL_GIMPLE_REG_P (tmp_var) = 1;
157 
158   info->new_local_var_chain = tmp_var;
159 
160   return tmp_var;
161 }
162 
163 /* Take the address of EXP to be used within function CONTEXT.
164    Mark it for addressability as necessary.  */
165 
166 tree
167 build_addr (tree exp, tree context)
168 {
169   tree base = exp;
170   tree save_context;
171   tree retval;
172 
173   while (handled_component_p (base))
174     base = TREE_OPERAND (base, 0);
175 
176   if (DECL_P (base))
177     TREE_ADDRESSABLE (base) = 1;
178 
179   /* Building the ADDR_EXPR will compute a set of properties for
180      that ADDR_EXPR.  Those properties are unfortunately context
181      specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
182 
183      Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184      build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL.  That
185      way the properties are for the ADDR_EXPR are computed properly.  */
186   save_context = current_function_decl;
187   current_function_decl = context;
188   retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
189   current_function_decl = save_context;
190   return retval;
191 }
192 
193 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
194 
195 void
196 insert_field_into_struct (tree type, tree field)
197 {
198   tree *p;
199 
200   DECL_CONTEXT (field) = type;
201 
202   for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
203     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
204       break;
205 
206   TREE_CHAIN (field) = *p;
207   *p = field;
208 
209   /* Set correct alignment for frame struct type.  */
210   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
211     TYPE_ALIGN (type) = DECL_ALIGN (field);
212 }
213 
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215    shared between INFO->CONTEXT and its nested functions.  This record will
216    not be complete until finalize_nesting_tree; up until that point we'll
217    be adding fields as necessary.
218 
219    We also build the DECL that represents this frame in the function.  */
220 
221 static tree
222 get_frame_type (struct nesting_info *info)
223 {
224   tree type = info->frame_type;
225   if (!type)
226     {
227       char *name;
228 
229       type = make_node (RECORD_TYPE);
230 
231       name = concat ("FRAME.",
232 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
233 		     NULL);
234       TYPE_NAME (type) = get_identifier (name);
235       free (name);
236 
237       info->frame_type = type;
238       info->frame_decl = create_tmp_var_for (info, type, "FRAME");
239 
240       /* ??? Always make it addressable for now, since it is meant to
241 	 be pointed to by the static chain pointer.  This pessimizes
242 	 when it turns out that no static chains are needed because
243 	 the nested functions referencing non-local variables are not
244 	 reachable, but the true pessimization is to create the non-
245 	 local frame structure in the first place.  */
246       TREE_ADDRESSABLE (info->frame_decl) = 1;
247     }
248   return type;
249 }
250 
251 /* Return true if DECL should be referenced by pointer in the non-local
252    frame structure.  */
253 
254 static bool
255 use_pointer_in_frame (tree decl)
256 {
257   if (TREE_CODE (decl) == PARM_DECL)
258     {
259       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260          sized decls, and inefficient to copy large aggregates.  Don't bother
261          moving anything but scalar variables.  */
262       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
263     }
264   else
265     {
266       /* Variable sized types make things "interesting" in the frame.  */
267       return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
268     }
269 }
270 
271 /* Given DECL, a non-locally accessed variable, find or create a field
272    in the non-local frame structure for the given nesting context.  */
273 
274 static tree
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 		       enum insert_option insert)
277 {
278   void **slot;
279 
280   if (insert == NO_INSERT)
281     {
282       slot = pointer_map_contains (info->field_map, decl);
283       return slot ? (tree) *slot : NULL_TREE;
284     }
285 
286   slot = pointer_map_insert (info->field_map, decl);
287   if (!*slot)
288     {
289       tree field = make_node (FIELD_DECL);
290       DECL_NAME (field) = DECL_NAME (decl);
291 
292       if (use_pointer_in_frame (decl))
293 	{
294 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
295 	  DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
296 	  DECL_NONADDRESSABLE_P (field) = 1;
297 	}
298       else
299 	{
300           TREE_TYPE (field) = TREE_TYPE (decl);
301           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
302           DECL_ALIGN (field) = DECL_ALIGN (decl);
303           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
304           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
305           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
306           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
307 	}
308 
309       insert_field_into_struct (get_frame_type (info), field);
310       *slot = field;
311 
312       if (TREE_CODE (decl) == PARM_DECL)
313 	info->any_parm_remapped = true;
314     }
315 
316   return (tree) *slot;
317 }
318 
319 /* Build or return the variable that holds the static chain within
320    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
321 
322 static tree
323 get_chain_decl (struct nesting_info *info)
324 {
325   tree decl = info->chain_decl;
326 
327   if (!decl)
328     {
329       tree type;
330 
331       type = get_frame_type (info->outer);
332       type = build_pointer_type (type);
333 
334       /* Note that this variable is *not* entered into any BIND_EXPR;
335 	 the construction of this variable is handled specially in
336 	 expand_function_start and initialize_inlined_parameters.
337 	 Note also that it's represented as a parameter.  This is more
338 	 close to the truth, since the initial value does come from
339 	 the caller.  */
340       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
341 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
342       DECL_ARTIFICIAL (decl) = 1;
343       DECL_IGNORED_P (decl) = 1;
344       TREE_USED (decl) = 1;
345       DECL_CONTEXT (decl) = info->context;
346       DECL_ARG_TYPE (decl) = type;
347 
348       /* Tell tree-inline.c that we never write to this variable, so
349 	 it can copy-prop the replacement value immediately.  */
350       TREE_READONLY (decl) = 1;
351 
352       info->chain_decl = decl;
353 
354       if (dump_file
355           && (dump_flags & TDF_DETAILS)
356 	  && !DECL_STATIC_CHAIN (info->context))
357 	fprintf (dump_file, "Setting static-chain for %s\n",
358 		 lang_hooks.decl_printable_name (info->context, 2));
359 
360       DECL_STATIC_CHAIN (info->context) = 1;
361     }
362   return decl;
363 }
364 
365 /* Build or return the field within the non-local frame state that holds
366    the static chain for INFO->CONTEXT.  This is the way to walk back up
367    multiple nesting levels.  */
368 
369 static tree
370 get_chain_field (struct nesting_info *info)
371 {
372   tree field = info->chain_field;
373 
374   if (!field)
375     {
376       tree type = build_pointer_type (get_frame_type (info->outer));
377 
378       field = make_node (FIELD_DECL);
379       DECL_NAME (field) = get_identifier ("__chain");
380       TREE_TYPE (field) = type;
381       DECL_ALIGN (field) = TYPE_ALIGN (type);
382       DECL_NONADDRESSABLE_P (field) = 1;
383 
384       insert_field_into_struct (get_frame_type (info), field);
385 
386       info->chain_field = field;
387 
388       if (dump_file
389           && (dump_flags & TDF_DETAILS)
390 	  && !DECL_STATIC_CHAIN (info->context))
391 	fprintf (dump_file, "Setting static-chain for %s\n",
392 		 lang_hooks.decl_printable_name (info->context, 2));
393 
394       DECL_STATIC_CHAIN (info->context) = 1;
395     }
396   return field;
397 }
398 
399 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
400 
401 static tree
402 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
403 		        gimple call)
404 {
405   tree t;
406 
407   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
408   gimple_call_set_lhs (call, t);
409   if (! gsi_end_p (*gsi))
410     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
411   gsi_insert_before (gsi, call, GSI_SAME_STMT);
412 
413   return t;
414 }
415 
416 
417 /* Copy EXP into a temporary.  Allocate the temporary in the context of
418    INFO and insert the initialization statement before GSI.  */
419 
420 static tree
421 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
422 {
423   tree t;
424   gimple stmt;
425 
426   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
427   stmt = gimple_build_assign (t, exp);
428   if (! gsi_end_p (*gsi))
429     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
430   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
431 
432   return t;
433 }
434 
435 
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
437 
438 static tree
439 gsi_gimplify_val (struct nesting_info *info, tree exp,
440 		  gimple_stmt_iterator *gsi)
441 {
442   if (is_gimple_val (exp))
443     return exp;
444   else
445     return init_tmp_var (info, exp, gsi);
446 }
447 
448 /* Similarly, but copy from the temporary and insert the statement
449    after the iterator.  */
450 
451 static tree
452 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
453 {
454   tree t;
455   gimple stmt;
456 
457   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
458   stmt = gimple_build_assign (exp, t);
459   if (! gsi_end_p (*gsi))
460     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
461   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
462 
463   return t;
464 }
465 
466 /* Build or return the type used to represent a nested function trampoline.  */
467 
468 static GTY(()) tree trampoline_type;
469 
470 static tree
471 get_trampoline_type (struct nesting_info *info)
472 {
473   unsigned align, size;
474   tree t;
475 
476   if (trampoline_type)
477     return trampoline_type;
478 
479   align = TRAMPOLINE_ALIGNMENT;
480   size = TRAMPOLINE_SIZE;
481 
482   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483      then allocate extra space so that we can do dynamic alignment.  */
484   if (align > STACK_BOUNDARY)
485     {
486       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
487       align = STACK_BOUNDARY;
488     }
489 
490   t = build_index_type (build_int_cst (NULL_TREE, size - 1));
491   t = build_array_type (char_type_node, t);
492   t = build_decl (DECL_SOURCE_LOCATION (info->context),
493 		  FIELD_DECL, get_identifier ("__data"), t);
494   DECL_ALIGN (t) = align;
495   DECL_USER_ALIGN (t) = 1;
496 
497   trampoline_type = make_node (RECORD_TYPE);
498   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
499   TYPE_FIELDS (trampoline_type) = t;
500   layout_type (trampoline_type);
501   DECL_CONTEXT (t) = trampoline_type;
502 
503   return trampoline_type;
504 }
505 
506 /* Given DECL, a nested function, find or create a field in the non-local
507    frame structure for a trampoline for this function.  */
508 
509 static tree
510 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
511 		       enum insert_option insert)
512 {
513   void **slot;
514 
515   if (insert == NO_INSERT)
516     {
517       slot = pointer_map_contains (info->var_map, decl);
518       return slot ? (tree) *slot : NULL_TREE;
519     }
520 
521   slot = pointer_map_insert (info->var_map, decl);
522   if (!*slot)
523     {
524       tree field = make_node (FIELD_DECL);
525       DECL_NAME (field) = DECL_NAME (decl);
526       TREE_TYPE (field) = get_trampoline_type (info);
527       TREE_ADDRESSABLE (field) = 1;
528 
529       insert_field_into_struct (get_frame_type (info), field);
530       *slot = field;
531 
532       info->any_tramp_created = true;
533     }
534 
535   return (tree) *slot;
536 }
537 
538 /* Build or return the field within the non-local frame state that holds
539    the non-local goto "jmp_buf".  The buffer itself is maintained by the
540    rtl middle-end as dynamic stack space is allocated.  */
541 
542 static tree
543 get_nl_goto_field (struct nesting_info *info)
544 {
545   tree field = info->nl_goto_field;
546   if (!field)
547     {
548       unsigned size;
549       tree type;
550 
551       /* For __builtin_nonlocal_goto, we need N words.  The first is the
552 	 frame pointer, the rest is for the target's stack pointer save
553 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
554 	 not the best interface, but it'll do for now.  */
555       if (Pmode == ptr_mode)
556 	type = ptr_type_node;
557       else
558 	type = lang_hooks.types.type_for_mode (Pmode, 1);
559 
560       size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
561       size = size / GET_MODE_SIZE (Pmode);
562       size = size + 1;
563 
564       type = build_array_type
565 	(type, build_index_type (build_int_cst (NULL_TREE, size)));
566 
567       field = make_node (FIELD_DECL);
568       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
569       TREE_TYPE (field) = type;
570       DECL_ALIGN (field) = TYPE_ALIGN (type);
571       TREE_ADDRESSABLE (field) = 1;
572 
573       insert_field_into_struct (get_frame_type (info), field);
574 
575       info->nl_goto_field = field;
576     }
577 
578   return field;
579 }
580 
581 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ.  */
582 
583 static void
584 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
585 	   struct nesting_info *info, gimple_seq seq)
586 {
587   struct walk_stmt_info wi;
588 
589   memset (&wi, 0, sizeof (wi));
590   wi.info = info;
591   wi.val_only = true;
592   walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
593 }
594 
595 
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
597 
598 static inline void
599 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
600 	       struct nesting_info *info)
601 {
602   walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
603 }
604 
605 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
606 
607 static void
608 walk_gimple_omp_for (gimple for_stmt,
609     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
610     		     struct nesting_info *info)
611 {
612   struct walk_stmt_info wi;
613   gimple_seq seq;
614   tree t;
615   size_t i;
616 
617   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
618 
619   seq = gimple_seq_alloc ();
620   memset (&wi, 0, sizeof (wi));
621   wi.info = info;
622   wi.gsi = gsi_last (seq);
623 
624   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
625     {
626       wi.val_only = false;
627       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
628 		 &wi, NULL);
629       wi.val_only = true;
630       wi.is_lhs = false;
631       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
632 		 &wi, NULL);
633 
634       wi.val_only = true;
635       wi.is_lhs = false;
636       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
637 		 &wi, NULL);
638 
639       t = gimple_omp_for_incr (for_stmt, i);
640       gcc_assert (BINARY_CLASS_P (t));
641       wi.val_only = false;
642       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
643       wi.val_only = true;
644       wi.is_lhs = false;
645       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
646     }
647 
648   if (gimple_seq_empty_p (seq))
649     gimple_seq_free (seq);
650   else
651     {
652       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
653       annotate_all_with_location (seq, gimple_location (for_stmt));
654       gimple_seq_add_seq (&pre_body, seq);
655       gimple_omp_for_set_pre_body (for_stmt, pre_body);
656     }
657 }
658 
659 /* Similarly for ROOT and all functions nested underneath, depth first.  */
660 
661 static void
662 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
663 		    struct nesting_info *root)
664 {
665   struct nesting_info *n;
666   FOR_EACH_NEST_INFO (n, root)
667     walk_function (callback_stmt, callback_op, n);
668 }
669 
670 
671 /* We have to check for a fairly pathological case.  The operands of function
672    nested function are to be interpreted in the context of the enclosing
673    function.  So if any are variably-sized, they will get remapped when the
674    enclosing function is inlined.  But that remapping would also have to be
675    done in the types of the PARM_DECLs of the nested function, meaning the
676    argument types of that function will disagree with the arguments in the
677    calls to that function.  So we'd either have to make a copy of the nested
678    function corresponding to each time the enclosing function was inlined or
679    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680    function.  The former is not practical.  The latter would still require
681    detecting this case to know when to add the conversions.  So, for now at
682    least, we don't inline such an enclosing function.
683 
684    We have to do that check recursively, so here return indicating whether
685    FNDECL has such a nested function.  ORIG_FN is the function we were
686    trying to inline to use for checking whether any argument is variably
687    modified by anything in it.
688 
689    It would be better to do this in tree-inline.c so that we could give
690    the appropriate warning for why a function can't be inlined, but that's
691    too late since the nesting structure has already been flattened and
692    adding a flag just to record this fact seems a waste of a flag.  */
693 
694 static bool
695 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
696 {
697   struct cgraph_node *cgn = cgraph_node (fndecl);
698   tree arg;
699 
700   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
701     {
702       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
703 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
704 	  return true;
705 
706       if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
707 	return true;
708     }
709 
710   return false;
711 }
712 
713 /* Construct our local datastructure describing the function nesting
714    tree rooted by CGN.  */
715 
716 static struct nesting_info *
717 create_nesting_tree (struct cgraph_node *cgn)
718 {
719   struct nesting_info *info = XCNEW (struct nesting_info);
720   info->field_map = pointer_map_create ();
721   info->var_map = pointer_map_create ();
722   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
723   info->context = cgn->decl;
724 
725   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
726     {
727       struct nesting_info *sub = create_nesting_tree (cgn);
728       sub->outer = info;
729       sub->next = info->inner;
730       info->inner = sub;
731     }
732 
733   /* See discussion at check_for_nested_with_variably_modified for a
734      discussion of why this has to be here.  */
735   if (check_for_nested_with_variably_modified (info->context, info->context))
736     DECL_UNINLINABLE (info->context) = true;
737 
738   return info;
739 }
740 
741 /* Return an expression computing the static chain for TARGET_CONTEXT
742    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
743 
744 static tree
745 get_static_chain (struct nesting_info *info, tree target_context,
746 		  gimple_stmt_iterator *gsi)
747 {
748   struct nesting_info *i;
749   tree x;
750 
751   if (info->context == target_context)
752     {
753       x = build_addr (info->frame_decl, target_context);
754     }
755   else
756     {
757       x = get_chain_decl (info);
758 
759       for (i = info->outer; i->context != target_context; i = i->outer)
760 	{
761 	  tree field = get_chain_field (i);
762 
763 	  x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
764 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
765 	  x = init_tmp_var (info, x, gsi);
766 	}
767     }
768 
769   return x;
770 }
771 
772 
773 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
774    frame as seen from INFO->CONTEXT.  Insert any necessary computations
775    before GSI.  */
776 
777 static tree
778 get_frame_field (struct nesting_info *info, tree target_context,
779 		 tree field, gimple_stmt_iterator *gsi)
780 {
781   struct nesting_info *i;
782   tree x;
783 
784   if (info->context == target_context)
785     {
786       /* Make sure frame_decl gets created.  */
787       (void) get_frame_type (info);
788       x = info->frame_decl;
789     }
790   else
791     {
792       x = get_chain_decl (info);
793 
794       for (i = info->outer; i->context != target_context; i = i->outer)
795 	{
796 	  tree field = get_chain_field (i);
797 
798 	  x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
799 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
800 	  x = init_tmp_var (info, x, gsi);
801 	}
802 
803       x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
804     }
805 
806   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
807   return x;
808 }
809 
810 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
811 
812 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
813    in the nested function with DECL_VALUE_EXPR set to reference the true
814    variable in the parent function.  This is used both for debug info
815    and in OpenMP lowering.  */
816 
817 static tree
818 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
819 {
820   tree target_context;
821   struct nesting_info *i;
822   tree x, field, new_decl;
823   void **slot;
824 
825   slot = pointer_map_insert (info->var_map, decl);
826 
827   if (*slot)
828     return (tree) *slot;
829 
830   target_context = decl_function_context (decl);
831 
832   /* A copy of the code in get_frame_field, but without the temporaries.  */
833   if (info->context == target_context)
834     {
835       /* Make sure frame_decl gets created.  */
836       (void) get_frame_type (info);
837       x = info->frame_decl;
838       i = info;
839     }
840   else
841     {
842       x = get_chain_decl (info);
843       for (i = info->outer; i->context != target_context; i = i->outer)
844 	{
845 	  field = get_chain_field (i);
846 	  x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
847 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
848 	}
849       x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
850     }
851 
852   field = lookup_field_for_decl (i, decl, INSERT);
853   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
854   if (use_pointer_in_frame (decl))
855     x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
856 
857   /* ??? We should be remapping types as well, surely.  */
858   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
859 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
860   DECL_CONTEXT (new_decl) = info->context;
861   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
862   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
863   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
864   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
865   TREE_READONLY (new_decl) = TREE_READONLY (decl);
866   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
867   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
868   if ((TREE_CODE (decl) == PARM_DECL
869        || TREE_CODE (decl) == RESULT_DECL
870        || TREE_CODE (decl) == VAR_DECL)
871       && DECL_BY_REFERENCE (decl))
872     DECL_BY_REFERENCE (new_decl) = 1;
873 
874   SET_DECL_VALUE_EXPR (new_decl, x);
875   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
876 
877   *slot = new_decl;
878   TREE_CHAIN (new_decl) = info->debug_var_chain;
879   info->debug_var_chain = new_decl;
880 
881   if (!optimize
882       && info->context != target_context
883       && variably_modified_type_p (TREE_TYPE (decl), NULL))
884     note_nonlocal_vla_type (info, TREE_TYPE (decl));
885 
886   return new_decl;
887 }
888 
889 
890 /* Callback for walk_gimple_stmt, rewrite all references to VAR
891    and PARM_DECLs that belong to outer functions.
892 
893    The rewrite will involve some number of structure accesses back up
894    the static chain.  E.g. for a variable FOO up one nesting level it'll
895    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
896    indirections apply to decls for which use_pointer_in_frame is true.  */
897 
898 static tree
899 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
900 {
901   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
902   struct nesting_info *const info = (struct nesting_info *) wi->info;
903   tree t = *tp;
904 
905   *walk_subtrees = 0;
906   switch (TREE_CODE (t))
907     {
908     case VAR_DECL:
909       /* Non-automatic variables are never processed.  */
910       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
911 	break;
912       /* FALLTHRU */
913 
914     case PARM_DECL:
915       if (decl_function_context (t) != info->context)
916 	{
917 	  tree x;
918 	  wi->changed = true;
919 
920 	  x = get_nonlocal_debug_decl (info, t);
921 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
922 	    {
923 	      tree target_context = decl_function_context (t);
924 	      struct nesting_info *i;
925 	      for (i = info->outer; i->context != target_context; i = i->outer)
926 		continue;
927 	      x = lookup_field_for_decl (i, t, INSERT);
928 	      x = get_frame_field (info, target_context, x, &wi->gsi);
929 	      if (use_pointer_in_frame (t))
930 		{
931 		  x = init_tmp_var (info, x, &wi->gsi);
932 		  x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
933 		}
934 	    }
935 
936 	  if (wi->val_only)
937 	    {
938 	      if (wi->is_lhs)
939 		x = save_tmp_var (info, x, &wi->gsi);
940 	      else
941 		x = init_tmp_var (info, x, &wi->gsi);
942 	    }
943 
944 	  *tp = x;
945 	}
946       break;
947 
948     case LABEL_DECL:
949       /* We're taking the address of a label from a parent function, but
950 	 this is not itself a non-local goto.  Mark the label such that it
951 	 will not be deleted, much as we would with a label address in
952 	 static storage.  */
953       if (decl_function_context (t) != info->context)
954         FORCED_LABEL (t) = 1;
955       break;
956 
957     case ADDR_EXPR:
958       {
959 	bool save_val_only = wi->val_only;
960 
961 	wi->val_only = false;
962 	wi->is_lhs = false;
963 	wi->changed = false;
964 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
965 	wi->val_only = true;
966 
967 	if (wi->changed)
968 	  {
969 	    tree save_context;
970 
971 	    /* If we changed anything, we might no longer be directly
972 	       referencing a decl.  */
973 	    save_context = current_function_decl;
974 	    current_function_decl = info->context;
975 	    recompute_tree_invariant_for_addr_expr (t);
976 	    current_function_decl = save_context;
977 
978 	    /* If the callback converted the address argument in a context
979 	       where we only accept variables (and min_invariant, presumably),
980 	       then compute the address into a temporary.  */
981 	    if (save_val_only)
982 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
983 				      t, &wi->gsi);
984 	  }
985       }
986       break;
987 
988     case REALPART_EXPR:
989     case IMAGPART_EXPR:
990     case COMPONENT_REF:
991     case ARRAY_REF:
992     case ARRAY_RANGE_REF:
993     case BIT_FIELD_REF:
994       /* Go down this entire nest and just look at the final prefix and
995 	 anything that describes the references.  Otherwise, we lose track
996 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
997       wi->val_only = true;
998       wi->is_lhs = false;
999       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1000 	{
1001 	  if (TREE_CODE (t) == COMPONENT_REF)
1002 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1003 		       NULL);
1004 	  else if (TREE_CODE (t) == ARRAY_REF
1005 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1006 	    {
1007 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1008 			 wi, NULL);
1009 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1010 			 wi, NULL);
1011 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1012 			 wi, NULL);
1013 	    }
1014 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
1015 	    {
1016 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1017 			 wi, NULL);
1018 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1019 			 wi, NULL);
1020 	    }
1021 	}
1022       wi->val_only = false;
1023       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1024       break;
1025 
1026     case VIEW_CONVERT_EXPR:
1027       /* Just request to look at the subtrees, leaving val_only and lhs
1028 	 untouched.  This might actually be for !val_only + lhs, in which
1029 	 case we don't want to force a replacement by a temporary.  */
1030       *walk_subtrees = 1;
1031       break;
1032 
1033     default:
1034       if (!IS_TYPE_OR_DECL_P (t))
1035 	{
1036 	  *walk_subtrees = 1;
1037           wi->val_only = true;
1038 	  wi->is_lhs = false;
1039 	}
1040       break;
1041     }
1042 
1043   return NULL_TREE;
1044 }
1045 
1046 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1047 					     struct walk_stmt_info *);
1048 
1049 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1050    and PARM_DECLs that belong to outer functions.  */
1051 
1052 static bool
1053 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1054 {
1055   struct nesting_info *const info = (struct nesting_info *) wi->info;
1056   bool need_chain = false, need_stmts = false;
1057   tree clause, decl;
1058   int dummy;
1059   bitmap new_suppress;
1060 
1061   new_suppress = BITMAP_GGC_ALLOC ();
1062   bitmap_copy (new_suppress, info->suppress_expansion);
1063 
1064   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1065     {
1066       switch (OMP_CLAUSE_CODE (clause))
1067 	{
1068 	case OMP_CLAUSE_REDUCTION:
1069 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1070 	    need_stmts = true;
1071 	  goto do_decl_clause;
1072 
1073 	case OMP_CLAUSE_LASTPRIVATE:
1074 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1075 	    need_stmts = true;
1076 	  goto do_decl_clause;
1077 
1078 	case OMP_CLAUSE_PRIVATE:
1079 	case OMP_CLAUSE_FIRSTPRIVATE:
1080 	case OMP_CLAUSE_COPYPRIVATE:
1081 	case OMP_CLAUSE_SHARED:
1082 	do_decl_clause:
1083 	  decl = OMP_CLAUSE_DECL (clause);
1084 	  if (TREE_CODE (decl) == VAR_DECL
1085 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1086 	    break;
1087 	  if (decl_function_context (decl) != info->context)
1088 	    {
1089 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1090 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1091 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1092 		need_chain = true;
1093 	    }
1094 	  break;
1095 
1096 	case OMP_CLAUSE_SCHEDULE:
1097 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1098 	    break;
1099 	  /* FALLTHRU */
1100 	case OMP_CLAUSE_IF:
1101 	case OMP_CLAUSE_NUM_THREADS:
1102 	  wi->val_only = true;
1103 	  wi->is_lhs = false;
1104 	  convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1105 	                                 &dummy, wi);
1106 	  break;
1107 
1108 	case OMP_CLAUSE_NOWAIT:
1109 	case OMP_CLAUSE_ORDERED:
1110 	case OMP_CLAUSE_DEFAULT:
1111 	case OMP_CLAUSE_COPYIN:
1112 	case OMP_CLAUSE_COLLAPSE:
1113 	case OMP_CLAUSE_UNTIED:
1114 	  break;
1115 
1116 	default:
1117 	  gcc_unreachable ();
1118 	}
1119     }
1120 
1121   info->suppress_expansion = new_suppress;
1122 
1123   if (need_stmts)
1124     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1125       switch (OMP_CLAUSE_CODE (clause))
1126 	{
1127 	case OMP_CLAUSE_REDUCTION:
1128 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1129 	    {
1130 	      tree old_context
1131 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1132 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1133 		= info->context;
1134 	      walk_body (convert_nonlocal_reference_stmt,
1135 			 convert_nonlocal_reference_op, info,
1136 			 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1137 	      walk_body (convert_nonlocal_reference_stmt,
1138 			 convert_nonlocal_reference_op, info,
1139 			 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1140 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1141 		= old_context;
1142 	    }
1143 	  break;
1144 
1145 	case OMP_CLAUSE_LASTPRIVATE:
1146 	  walk_body (convert_nonlocal_reference_stmt,
1147 		     convert_nonlocal_reference_op, info,
1148 		     OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1149 	  break;
1150 
1151 	default:
1152 	  break;
1153 	}
1154 
1155   return need_chain;
1156 }
1157 
1158 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1159 
1160 static void
1161 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1162 {
1163   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1164     type = TREE_TYPE (type);
1165 
1166   if (TYPE_NAME (type)
1167       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1168       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1169     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1170 
1171   while (POINTER_TYPE_P (type)
1172 	 || TREE_CODE (type) == VECTOR_TYPE
1173 	 || TREE_CODE (type) == FUNCTION_TYPE
1174 	 || TREE_CODE (type) == METHOD_TYPE)
1175     type = TREE_TYPE (type);
1176 
1177   if (TREE_CODE (type) == ARRAY_TYPE)
1178     {
1179       tree domain, t;
1180 
1181       note_nonlocal_vla_type (info, TREE_TYPE (type));
1182       domain = TYPE_DOMAIN (type);
1183       if (domain)
1184 	{
1185 	  t = TYPE_MIN_VALUE (domain);
1186 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1187 	      && decl_function_context (t) != info->context)
1188 	    get_nonlocal_debug_decl (info, t);
1189 	  t = TYPE_MAX_VALUE (domain);
1190 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1191 	      && decl_function_context (t) != info->context)
1192 	    get_nonlocal_debug_decl (info, t);
1193 	}
1194     }
1195 }
1196 
1197 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1198    in BLOCK.  */
1199 
1200 static void
1201 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1202 {
1203   tree var;
1204 
1205   for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1206     if (TREE_CODE (var) == VAR_DECL
1207 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1208 	&& DECL_HAS_VALUE_EXPR_P (var)
1209 	&& decl_function_context (var) != info->context)
1210       note_nonlocal_vla_type (info, TREE_TYPE (var));
1211 }
1212 
1213 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1214    PARM_DECLs that belong to outer functions.  This handles statements
1215    that are not handled via the standard recursion done in
1216    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1217    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1218    operands of STMT have been handled by this function.  */
1219 
1220 static tree
1221 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1222 				 struct walk_stmt_info *wi)
1223 {
1224   struct nesting_info *info = (struct nesting_info *) wi->info;
1225   tree save_local_var_chain;
1226   bitmap save_suppress;
1227   gimple stmt = gsi_stmt (*gsi);
1228 
1229   switch (gimple_code (stmt))
1230     {
1231     case GIMPLE_GOTO:
1232       /* Don't walk non-local gotos for now.  */
1233       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1234 	{
1235 	  wi->val_only = true;
1236 	  wi->is_lhs = false;
1237 	  *handled_ops_p = true;
1238 	  return NULL_TREE;
1239 	}
1240       break;
1241 
1242     case GIMPLE_OMP_PARALLEL:
1243     case GIMPLE_OMP_TASK:
1244       save_suppress = info->suppress_expansion;
1245       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1246 	                                wi))
1247 	{
1248 	  tree c, decl;
1249 	  decl = get_chain_decl (info);
1250 	  c = build_omp_clause (gimple_location (stmt),
1251 				OMP_CLAUSE_FIRSTPRIVATE);
1252 	  OMP_CLAUSE_DECL (c) = decl;
1253 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1254 	  gimple_omp_taskreg_set_clauses (stmt, c);
1255 	}
1256 
1257       save_local_var_chain = info->new_local_var_chain;
1258       info->new_local_var_chain = NULL;
1259 
1260       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1261 	         info, gimple_omp_body (stmt));
1262 
1263       if (info->new_local_var_chain)
1264 	declare_vars (info->new_local_var_chain,
1265 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1266 		      false);
1267       info->new_local_var_chain = save_local_var_chain;
1268       info->suppress_expansion = save_suppress;
1269       break;
1270 
1271     case GIMPLE_OMP_FOR:
1272       save_suppress = info->suppress_expansion;
1273       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1274       walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1275 	  		   convert_nonlocal_reference_op, info);
1276       walk_body (convert_nonlocal_reference_stmt,
1277 	  	 convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
1278       info->suppress_expansion = save_suppress;
1279       break;
1280 
1281     case GIMPLE_OMP_SECTIONS:
1282       save_suppress = info->suppress_expansion;
1283       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1284       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1285 	         info, gimple_omp_body (stmt));
1286       info->suppress_expansion = save_suppress;
1287       break;
1288 
1289     case GIMPLE_OMP_SINGLE:
1290       save_suppress = info->suppress_expansion;
1291       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1292       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1293 	         info, gimple_omp_body (stmt));
1294       info->suppress_expansion = save_suppress;
1295       break;
1296 
1297     case GIMPLE_OMP_SECTION:
1298     case GIMPLE_OMP_MASTER:
1299     case GIMPLE_OMP_ORDERED:
1300       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1301 	         info, gimple_omp_body (stmt));
1302       break;
1303 
1304     case GIMPLE_BIND:
1305       if (!optimize && gimple_bind_block (stmt))
1306 	note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1307 
1308       *handled_ops_p = false;
1309       return NULL_TREE;
1310 
1311     case GIMPLE_COND:
1312       wi->val_only = true;
1313       wi->is_lhs = false;
1314       *handled_ops_p = false;
1315       return NULL_TREE;
1316 
1317     default:
1318       /* For every other statement that we are not interested in
1319 	 handling here, let the walker traverse the operands.  */
1320       *handled_ops_p = false;
1321       return NULL_TREE;
1322     }
1323 
1324   /* We have handled all of STMT operands, no need to traverse the operands.  */
1325   *handled_ops_p = true;
1326   return NULL_TREE;
1327 }
1328 
1329 
1330 /* A subroutine of convert_local_reference.  Create a local variable
1331    in the parent function with DECL_VALUE_EXPR set to reference the
1332    field in FRAME.  This is used both for debug info and in OpenMP
1333    lowering.  */
1334 
1335 static tree
1336 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1337 {
1338   tree x, new_decl;
1339   void **slot;
1340 
1341   slot = pointer_map_insert (info->var_map, decl);
1342   if (*slot)
1343     return (tree) *slot;
1344 
1345   /* Make sure frame_decl gets created.  */
1346   (void) get_frame_type (info);
1347   x = info->frame_decl;
1348   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1349 
1350   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1351 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1352   DECL_CONTEXT (new_decl) = info->context;
1353   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1354   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1355   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1356   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1357   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1358   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1359   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1360   if ((TREE_CODE (decl) == PARM_DECL
1361        || TREE_CODE (decl) == RESULT_DECL
1362        || TREE_CODE (decl) == VAR_DECL)
1363       && DECL_BY_REFERENCE (decl))
1364     DECL_BY_REFERENCE (new_decl) = 1;
1365 
1366   SET_DECL_VALUE_EXPR (new_decl, x);
1367   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1368   *slot = new_decl;
1369 
1370   TREE_CHAIN (new_decl) = info->debug_var_chain;
1371   info->debug_var_chain = new_decl;
1372 
1373   /* Do not emit debug info twice.  */
1374   DECL_IGNORED_P (decl) = 1;
1375 
1376   return new_decl;
1377 }
1378 
1379 
1380 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1381    and PARM_DECLs that were referenced by inner nested functions.
1382    The rewrite will be a structure reference to the local frame variable.  */
1383 
1384 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1385 
1386 static tree
1387 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1388 {
1389   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1390   struct nesting_info *const info = (struct nesting_info *) wi->info;
1391   tree t = *tp, field, x;
1392   bool save_val_only;
1393 
1394   *walk_subtrees = 0;
1395   switch (TREE_CODE (t))
1396     {
1397     case VAR_DECL:
1398       /* Non-automatic variables are never processed.  */
1399       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1400 	break;
1401       /* FALLTHRU */
1402 
1403     case PARM_DECL:
1404       if (decl_function_context (t) == info->context)
1405 	{
1406 	  /* If we copied a pointer to the frame, then the original decl
1407 	     is used unchanged in the parent function.  */
1408 	  if (use_pointer_in_frame (t))
1409 	    break;
1410 
1411 	  /* No need to transform anything if no child references the
1412 	     variable.  */
1413 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1414 	  if (!field)
1415 	    break;
1416 	  wi->changed = true;
1417 
1418 	  x = get_local_debug_decl (info, t, field);
1419 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1420 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1421 
1422 	  if (wi->val_only)
1423 	    {
1424 	      if (wi->is_lhs)
1425 		x = save_tmp_var (info, x, &wi->gsi);
1426 	      else
1427 		x = init_tmp_var (info, x, &wi->gsi);
1428 	    }
1429 
1430 	  *tp = x;
1431 	}
1432       break;
1433 
1434     case ADDR_EXPR:
1435       save_val_only = wi->val_only;
1436       wi->val_only = false;
1437       wi->is_lhs = false;
1438       wi->changed = false;
1439       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1440       wi->val_only = save_val_only;
1441 
1442       /* If we converted anything ... */
1443       if (wi->changed)
1444 	{
1445 	  tree save_context;
1446 
1447 	  /* Then the frame decl is now addressable.  */
1448 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1449 
1450 	  save_context = current_function_decl;
1451 	  current_function_decl = info->context;
1452 	  recompute_tree_invariant_for_addr_expr (t);
1453 	  current_function_decl = save_context;
1454 
1455 	  /* If we are in a context where we only accept values, then
1456 	     compute the address into a temporary.  */
1457 	  if (save_val_only)
1458 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1459 				    t, &wi->gsi);
1460 	}
1461       break;
1462 
1463     case REALPART_EXPR:
1464     case IMAGPART_EXPR:
1465     case COMPONENT_REF:
1466     case ARRAY_REF:
1467     case ARRAY_RANGE_REF:
1468     case BIT_FIELD_REF:
1469       /* Go down this entire nest and just look at the final prefix and
1470 	 anything that describes the references.  Otherwise, we lose track
1471 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1472       save_val_only = wi->val_only;
1473       wi->val_only = true;
1474       wi->is_lhs = false;
1475       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1476 	{
1477 	  if (TREE_CODE (t) == COMPONENT_REF)
1478 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1479 		       NULL);
1480 	  else if (TREE_CODE (t) == ARRAY_REF
1481 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1482 	    {
1483 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1484 			 NULL);
1485 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1486 			 NULL);
1487 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1488 			 NULL);
1489 	    }
1490 	  else if (TREE_CODE (t) == BIT_FIELD_REF)
1491 	    {
1492 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1493 			 NULL);
1494 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1495 			 NULL);
1496 	    }
1497 	}
1498       wi->val_only = false;
1499       walk_tree (tp, convert_local_reference_op, wi, NULL);
1500       wi->val_only = save_val_only;
1501       break;
1502 
1503     case VIEW_CONVERT_EXPR:
1504       /* Just request to look at the subtrees, leaving val_only and lhs
1505 	 untouched.  This might actually be for !val_only + lhs, in which
1506 	 case we don't want to force a replacement by a temporary.  */
1507       *walk_subtrees = 1;
1508       break;
1509 
1510     default:
1511       if (!IS_TYPE_OR_DECL_P (t))
1512 	{
1513 	  *walk_subtrees = 1;
1514 	  wi->val_only = true;
1515 	  wi->is_lhs = false;
1516 	}
1517       break;
1518     }
1519 
1520   return NULL_TREE;
1521 }
1522 
1523 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1524 					  struct walk_stmt_info *);
1525 
1526 /* Helper for convert_local_reference.  Convert all the references in
1527    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1528 
1529 static bool
1530 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1531 {
1532   struct nesting_info *const info = (struct nesting_info *) wi->info;
1533   bool need_frame = false, need_stmts = false;
1534   tree clause, decl;
1535   int dummy;
1536   bitmap new_suppress;
1537 
1538   new_suppress = BITMAP_GGC_ALLOC ();
1539   bitmap_copy (new_suppress, info->suppress_expansion);
1540 
1541   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1542     {
1543       switch (OMP_CLAUSE_CODE (clause))
1544 	{
1545 	case OMP_CLAUSE_REDUCTION:
1546 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1547 	    need_stmts = true;
1548 	  goto do_decl_clause;
1549 
1550 	case OMP_CLAUSE_LASTPRIVATE:
1551 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1552 	    need_stmts = true;
1553 	  goto do_decl_clause;
1554 
1555 	case OMP_CLAUSE_PRIVATE:
1556 	case OMP_CLAUSE_FIRSTPRIVATE:
1557 	case OMP_CLAUSE_COPYPRIVATE:
1558 	case OMP_CLAUSE_SHARED:
1559 	do_decl_clause:
1560 	  decl = OMP_CLAUSE_DECL (clause);
1561 	  if (TREE_CODE (decl) == VAR_DECL
1562 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1563 	    break;
1564 	  if (decl_function_context (decl) == info->context
1565 	      && !use_pointer_in_frame (decl))
1566 	    {
1567 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1568 	      if (field)
1569 		{
1570 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1571 		  OMP_CLAUSE_DECL (clause)
1572 		    = get_local_debug_decl (info, decl, field);
1573 		  need_frame = true;
1574 		}
1575 	    }
1576 	  break;
1577 
1578 	case OMP_CLAUSE_SCHEDULE:
1579 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1580 	    break;
1581 	  /* FALLTHRU */
1582 	case OMP_CLAUSE_IF:
1583 	case OMP_CLAUSE_NUM_THREADS:
1584 	  wi->val_only = true;
1585 	  wi->is_lhs = false;
1586 	  convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1587 				      wi);
1588 	  break;
1589 
1590 	case OMP_CLAUSE_NOWAIT:
1591 	case OMP_CLAUSE_ORDERED:
1592 	case OMP_CLAUSE_DEFAULT:
1593 	case OMP_CLAUSE_COPYIN:
1594 	case OMP_CLAUSE_COLLAPSE:
1595 	case OMP_CLAUSE_UNTIED:
1596 	  break;
1597 
1598 	default:
1599 	  gcc_unreachable ();
1600 	}
1601     }
1602 
1603   info->suppress_expansion = new_suppress;
1604 
1605   if (need_stmts)
1606     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1607       switch (OMP_CLAUSE_CODE (clause))
1608 	{
1609 	case OMP_CLAUSE_REDUCTION:
1610 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1611 	    {
1612 	      tree old_context
1613 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1614 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1615 		= info->context;
1616 	      walk_body (convert_local_reference_stmt,
1617 			 convert_local_reference_op, info,
1618 			 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1619 	      walk_body (convert_local_reference_stmt,
1620 			 convert_local_reference_op, info,
1621 			 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1622 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1623 		= old_context;
1624 	    }
1625 	  break;
1626 
1627 	case OMP_CLAUSE_LASTPRIVATE:
1628 	  walk_body (convert_local_reference_stmt,
1629 		     convert_local_reference_op, info,
1630 		     OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1631 	  break;
1632 
1633 	default:
1634 	  break;
1635 	}
1636 
1637   return need_frame;
1638 }
1639 
1640 
1641 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1642    and PARM_DECLs that were referenced by inner nested functions.
1643    The rewrite will be a structure reference to the local frame variable.  */
1644 
1645 static tree
1646 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1647 			      struct walk_stmt_info *wi)
1648 {
1649   struct nesting_info *info = (struct nesting_info *) wi->info;
1650   tree save_local_var_chain;
1651   bitmap save_suppress;
1652   gimple stmt = gsi_stmt (*gsi);
1653 
1654   switch (gimple_code (stmt))
1655     {
1656     case GIMPLE_OMP_PARALLEL:
1657     case GIMPLE_OMP_TASK:
1658       save_suppress = info->suppress_expansion;
1659       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1660 	                             wi))
1661 	{
1662 	  tree c;
1663 	  (void) get_frame_type (info);
1664 	  c = build_omp_clause (gimple_location (stmt),
1665 				OMP_CLAUSE_SHARED);
1666 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
1667 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1668 	  gimple_omp_taskreg_set_clauses (stmt, c);
1669 	}
1670 
1671       save_local_var_chain = info->new_local_var_chain;
1672       info->new_local_var_chain = NULL;
1673 
1674       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1675 	         gimple_omp_body (stmt));
1676 
1677       if (info->new_local_var_chain)
1678 	declare_vars (info->new_local_var_chain,
1679 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1680       info->new_local_var_chain = save_local_var_chain;
1681       info->suppress_expansion = save_suppress;
1682       break;
1683 
1684     case GIMPLE_OMP_FOR:
1685       save_suppress = info->suppress_expansion;
1686       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1687       walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1688 			   convert_local_reference_op, info);
1689       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1690 		 info, gimple_omp_body (stmt));
1691       info->suppress_expansion = save_suppress;
1692       break;
1693 
1694     case GIMPLE_OMP_SECTIONS:
1695       save_suppress = info->suppress_expansion;
1696       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1697       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1698 		 info, gimple_omp_body (stmt));
1699       info->suppress_expansion = save_suppress;
1700       break;
1701 
1702     case GIMPLE_OMP_SINGLE:
1703       save_suppress = info->suppress_expansion;
1704       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1705       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1706 		 info, gimple_omp_body (stmt));
1707       info->suppress_expansion = save_suppress;
1708       break;
1709 
1710     case GIMPLE_OMP_SECTION:
1711     case GIMPLE_OMP_MASTER:
1712     case GIMPLE_OMP_ORDERED:
1713       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1714 		 info, gimple_omp_body (stmt));
1715       break;
1716 
1717     case GIMPLE_COND:
1718       wi->val_only = true;
1719       wi->is_lhs = false;
1720       *handled_ops_p = false;
1721       return NULL_TREE;
1722 
1723     default:
1724       /* For every other statement that we are not interested in
1725 	 handling here, let the walker traverse the operands.  */
1726       *handled_ops_p = false;
1727       return NULL_TREE;
1728     }
1729 
1730   /* Indicate that we have handled all the operands ourselves.  */
1731   *handled_ops_p = true;
1732   return NULL_TREE;
1733 }
1734 
1735 
1736 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1737    that reference labels from outer functions.  The rewrite will be a
1738    call to __builtin_nonlocal_goto.  */
1739 
1740 static tree
1741 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1742 			   struct walk_stmt_info *wi)
1743 {
1744   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1745   tree label, new_label, target_context, x, field;
1746   void **slot;
1747   gimple call;
1748   gimple stmt = gsi_stmt (*gsi);
1749 
1750   if (gimple_code (stmt) != GIMPLE_GOTO)
1751     {
1752       *handled_ops_p = false;
1753       return NULL_TREE;
1754     }
1755 
1756   label = gimple_goto_dest (stmt);
1757   if (TREE_CODE (label) != LABEL_DECL)
1758     {
1759       *handled_ops_p = false;
1760       return NULL_TREE;
1761     }
1762 
1763   target_context = decl_function_context (label);
1764   if (target_context == info->context)
1765     {
1766       *handled_ops_p = false;
1767       return NULL_TREE;
1768     }
1769 
1770   for (i = info->outer; target_context != i->context; i = i->outer)
1771     continue;
1772 
1773   /* The original user label may also be use for a normal goto, therefore
1774      we must create a new label that will actually receive the abnormal
1775      control transfer.  This new label will be marked LABEL_NONLOCAL; this
1776      mark will trigger proper behavior in the cfg, as well as cause the
1777      (hairy target-specific) non-local goto receiver code to be generated
1778      when we expand rtl.  Enter this association into var_map so that we
1779      can insert the new label into the IL during a second pass.  */
1780   slot = pointer_map_insert (i->var_map, label);
1781   if (*slot == NULL)
1782     {
1783       new_label = create_artificial_label (UNKNOWN_LOCATION);
1784       DECL_NONLOCAL (new_label) = 1;
1785       *slot = new_label;
1786     }
1787   else
1788     new_label = (tree) *slot;
1789 
1790   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
1791   field = get_nl_goto_field (i);
1792   x = get_frame_field (info, target_context, field, &wi->gsi);
1793   x = build_addr (x, target_context);
1794   x = gsi_gimplify_val (info, x, &wi->gsi);
1795   call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1796 			    build_addr (new_label, target_context), x);
1797   gsi_replace (&wi->gsi, call, false);
1798 
1799   /* We have handled all of STMT's operands, no need to keep going.  */
1800   *handled_ops_p = true;
1801   return NULL_TREE;
1802 }
1803 
1804 
1805 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1806    are referenced via nonlocal goto from a nested function.  The rewrite
1807    will involve installing a newly generated DECL_NONLOCAL label, and
1808    (potentially) a branch around the rtl gunk that is assumed to be
1809    attached to such a label.  */
1810 
1811 static tree
1812 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1813 			  struct walk_stmt_info *wi)
1814 {
1815   struct nesting_info *const info = (struct nesting_info *) wi->info;
1816   tree label, new_label;
1817   gimple_stmt_iterator tmp_gsi;
1818   void **slot;
1819   gimple stmt = gsi_stmt (*gsi);
1820 
1821   if (gimple_code (stmt) != GIMPLE_LABEL)
1822     {
1823       *handled_ops_p = false;
1824       return NULL_TREE;
1825     }
1826 
1827   label = gimple_label_label (stmt);
1828 
1829   slot = pointer_map_contains (info->var_map, label);
1830   if (!slot)
1831     {
1832       *handled_ops_p = false;
1833       return NULL_TREE;
1834     }
1835 
1836   /* If there's any possibility that the previous statement falls through,
1837      then we must branch around the new non-local label.  */
1838   tmp_gsi = wi->gsi;
1839   gsi_prev (&tmp_gsi);
1840   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1841     {
1842       gimple stmt = gimple_build_goto (label);
1843       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1844     }
1845 
1846   new_label = (tree) *slot;
1847   stmt = gimple_build_label (new_label);
1848   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1849 
1850   *handled_ops_p = true;
1851   return NULL_TREE;
1852 }
1853 
1854 
1855 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1856    of nested functions that require the use of trampolines.  The rewrite
1857    will involve a reference a trampoline generated for the occasion.  */
1858 
1859 static tree
1860 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1861 {
1862   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1863   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1864   tree t = *tp, decl, target_context, x, builtin;
1865   gimple call;
1866 
1867   *walk_subtrees = 0;
1868   switch (TREE_CODE (t))
1869     {
1870     case ADDR_EXPR:
1871       /* Build
1872 	   T.1 = &CHAIN->tramp;
1873 	   T.2 = __builtin_adjust_trampoline (T.1);
1874 	   T.3 = (func_type)T.2;
1875       */
1876 
1877       decl = TREE_OPERAND (t, 0);
1878       if (TREE_CODE (decl) != FUNCTION_DECL)
1879 	break;
1880 
1881       /* Only need to process nested functions.  */
1882       target_context = decl_function_context (decl);
1883       if (!target_context)
1884 	break;
1885 
1886       /* If the nested function doesn't use a static chain, then
1887 	 it doesn't need a trampoline.  */
1888       if (!DECL_STATIC_CHAIN (decl))
1889 	break;
1890 
1891       /* If we don't want a trampoline, then don't build one.  */
1892       if (TREE_NO_TRAMPOLINE (t))
1893 	break;
1894 
1895       /* Lookup the immediate parent of the callee, as that's where
1896 	 we need to insert the trampoline.  */
1897       for (i = info; i->context != target_context; i = i->outer)
1898 	continue;
1899       x = lookup_tramp_for_decl (i, decl, INSERT);
1900 
1901       /* Compute the address of the field holding the trampoline.  */
1902       x = get_frame_field (info, target_context, x, &wi->gsi);
1903       x = build_addr (x, target_context);
1904       x = gsi_gimplify_val (info, x, &wi->gsi);
1905 
1906       /* Do machine-specific ugliness.  Normally this will involve
1907 	 computing extra alignment, but it can really be anything.  */
1908       builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1909       call = gimple_build_call (builtin, 1, x);
1910       x = init_tmp_var_with_call (info, &wi->gsi, call);
1911 
1912       /* Cast back to the proper function type.  */
1913       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1914       x = init_tmp_var (info, x, &wi->gsi);
1915 
1916       *tp = x;
1917       break;
1918 
1919     default:
1920       if (!IS_TYPE_OR_DECL_P (t))
1921 	*walk_subtrees = 1;
1922       break;
1923     }
1924 
1925   return NULL_TREE;
1926 }
1927 
1928 
1929 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1930    to addresses of nested functions that require the use of
1931    trampolines.  The rewrite will involve a reference a trampoline
1932    generated for the occasion.  */
1933 
1934 static tree
1935 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1936 			      struct walk_stmt_info *wi)
1937 {
1938   gimple stmt = gsi_stmt (*gsi);
1939 
1940   switch (gimple_code (stmt))
1941     {
1942     case GIMPLE_CALL:
1943       {
1944 	/* Only walk call arguments, lest we generate trampolines for
1945 	   direct calls.  */
1946 	unsigned long i, nargs = gimple_call_num_args (stmt);
1947 	for (i = 0; i < nargs; i++)
1948 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
1949 		     wi, NULL);
1950 
1951 	*handled_ops_p = true;
1952 	return NULL_TREE;
1953       }
1954 
1955     default:
1956       break;
1957     }
1958 
1959   *handled_ops_p = false;
1960   return NULL_TREE;
1961 }
1962 
1963 
1964 
1965 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1966    that reference nested functions to make sure that the static chain
1967    is set up properly for the call.  */
1968 
1969 static tree
1970 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1971                      struct walk_stmt_info *wi)
1972 {
1973   struct nesting_info *const info = (struct nesting_info *) wi->info;
1974   tree decl, target_context;
1975   char save_static_chain_added;
1976   int i;
1977   gimple stmt = gsi_stmt (*gsi);
1978 
1979   switch (gimple_code (stmt))
1980     {
1981     case GIMPLE_CALL:
1982       if (gimple_call_chain (stmt))
1983 	break;
1984       decl = gimple_call_fndecl (stmt);
1985       if (!decl)
1986 	break;
1987       target_context = decl_function_context (decl);
1988       if (target_context && DECL_STATIC_CHAIN (decl))
1989 	{
1990 	  gimple_call_set_chain (stmt, get_static_chain (info, target_context,
1991 							 &wi->gsi));
1992 	  info->static_chain_added |= (1 << (info->context != target_context));
1993 	}
1994       break;
1995 
1996     case GIMPLE_OMP_PARALLEL:
1997     case GIMPLE_OMP_TASK:
1998       save_static_chain_added = info->static_chain_added;
1999       info->static_chain_added = 0;
2000       walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2001       for (i = 0; i < 2; i++)
2002 	{
2003 	  tree c, decl;
2004 	  if ((info->static_chain_added & (1 << i)) == 0)
2005 	    continue;
2006 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2007 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2008 	  for (c = gimple_omp_taskreg_clauses (stmt);
2009 	       c;
2010 	       c = OMP_CLAUSE_CHAIN (c))
2011 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2012 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2013 		&& OMP_CLAUSE_DECL (c) == decl)
2014 	      break;
2015 	  if (c == NULL)
2016 	    {
2017 	      c = build_omp_clause (gimple_location (stmt),
2018 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2019 				    : OMP_CLAUSE_SHARED);
2020 	      OMP_CLAUSE_DECL (c) = decl;
2021 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2022 	      gimple_omp_taskreg_set_clauses (stmt, c);
2023 	    }
2024 	}
2025       info->static_chain_added |= save_static_chain_added;
2026       break;
2027 
2028     case GIMPLE_OMP_FOR:
2029       walk_body (convert_gimple_call, NULL, info,
2030 	  	 gimple_omp_for_pre_body (stmt));
2031       /* FALLTHRU */
2032     case GIMPLE_OMP_SECTIONS:
2033     case GIMPLE_OMP_SECTION:
2034     case GIMPLE_OMP_SINGLE:
2035     case GIMPLE_OMP_MASTER:
2036     case GIMPLE_OMP_ORDERED:
2037     case GIMPLE_OMP_CRITICAL:
2038       walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2039       break;
2040 
2041     default:
2042       /* Keep looking for other operands.  */
2043       *handled_ops_p = false;
2044       return NULL_TREE;
2045     }
2046 
2047   *handled_ops_p = true;
2048   return NULL_TREE;
2049 }
2050 
2051 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2052    call expressions.  At the same time, determine if a nested function
2053    actually uses its static chain; if not, remember that.  */
2054 
2055 static void
2056 convert_all_function_calls (struct nesting_info *root)
2057 {
2058   unsigned int chain_count = 0, old_chain_count, iter_count;
2059   struct nesting_info *n;
2060 
2061   /* First, optimistically clear static_chain for all decls that haven't
2062      used the static chain already for variable access.  */
2063   FOR_EACH_NEST_INFO (n, root)
2064     {
2065       tree decl = n->context;
2066       if (!n->outer || (!n->chain_decl && !n->chain_field))
2067 	{
2068 	  DECL_STATIC_CHAIN (decl) = 0;
2069 	  if (dump_file && (dump_flags & TDF_DETAILS))
2070 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2071 		     lang_hooks.decl_printable_name (decl, 2));
2072 	}
2073       else
2074 	DECL_STATIC_CHAIN (decl) = 1;
2075       chain_count += DECL_STATIC_CHAIN (decl);
2076     }
2077 
2078   /* Walk the functions and perform transformations.  Note that these
2079      transformations can induce new uses of the static chain, which in turn
2080      require re-examining all users of the decl.  */
2081   /* ??? It would make sense to try to use the call graph to speed this up,
2082      but the call graph hasn't really been built yet.  Even if it did, we
2083      would still need to iterate in this loop since address-of references
2084      wouldn't show up in the callgraph anyway.  */
2085   iter_count = 0;
2086   do
2087     {
2088       old_chain_count = chain_count;
2089       chain_count = 0;
2090       iter_count++;
2091 
2092       if (dump_file && (dump_flags & TDF_DETAILS))
2093 	fputc ('\n', dump_file);
2094 
2095       FOR_EACH_NEST_INFO (n, root)
2096 	{
2097 	  tree decl = n->context;
2098 	  walk_function (convert_tramp_reference_stmt,
2099 			 convert_tramp_reference_op, n);
2100 	  walk_function (convert_gimple_call, NULL, n);
2101 	  chain_count += DECL_STATIC_CHAIN (decl);
2102 	}
2103     }
2104   while (chain_count != old_chain_count);
2105 
2106   if (dump_file && (dump_flags & TDF_DETAILS))
2107     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2108 	     iter_count);
2109 }
2110 
2111 struct nesting_copy_body_data
2112 {
2113   copy_body_data cb;
2114   struct nesting_info *root;
2115 };
2116 
2117 /* A helper subroutine for debug_var_chain type remapping.  */
2118 
2119 static tree
2120 nesting_copy_decl (tree decl, copy_body_data *id)
2121 {
2122   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2123   void **slot = pointer_map_contains (nid->root->var_map, decl);
2124 
2125   if (slot)
2126     return (tree) *slot;
2127 
2128   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2129     {
2130       tree new_decl = copy_decl_no_change (decl, id);
2131       DECL_ORIGINAL_TYPE (new_decl)
2132 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2133       return new_decl;
2134     }
2135 
2136   if (TREE_CODE (decl) == VAR_DECL
2137       || TREE_CODE (decl) == PARM_DECL
2138       || TREE_CODE (decl) == RESULT_DECL)
2139     return decl;
2140 
2141   return copy_decl_no_change (decl, id);
2142 }
2143 
2144 /* A helper function for remap_vla_decls.  See if *TP contains
2145    some remapped variables.  */
2146 
2147 static tree
2148 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2149 {
2150   struct nesting_info *root = (struct nesting_info *) data;
2151   tree t = *tp;
2152   void **slot;
2153 
2154   if (DECL_P (t))
2155     {
2156       *walk_subtrees = 0;
2157       slot = pointer_map_contains (root->var_map, t);
2158 
2159       if (slot)
2160 	return (tree) *slot;
2161     }
2162   return NULL;
2163 }
2164 
2165 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2166    involved.  */
2167 
2168 static void
2169 remap_vla_decls (tree block, struct nesting_info *root)
2170 {
2171   tree var, subblock, val, type;
2172   struct nesting_copy_body_data id;
2173 
2174   for (subblock = BLOCK_SUBBLOCKS (block);
2175        subblock;
2176        subblock = BLOCK_CHAIN (subblock))
2177     remap_vla_decls (subblock, root);
2178 
2179   for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
2180     {
2181       if (TREE_CODE (var) == VAR_DECL
2182 	  && variably_modified_type_p (TREE_TYPE (var), NULL)
2183 	  && DECL_HAS_VALUE_EXPR_P (var))
2184 	{
2185 	  type = TREE_TYPE (var);
2186 	  val = DECL_VALUE_EXPR (var);
2187 	  if (walk_tree (&type, contains_remapped_vars, root, NULL) != NULL
2188 	      ||  walk_tree (&val, contains_remapped_vars, root, NULL) != NULL)
2189 	    break;
2190 	}
2191     }
2192   if (var == NULL_TREE)
2193     return;
2194 
2195   memset (&id, 0, sizeof (id));
2196   id.cb.copy_decl = nesting_copy_decl;
2197   id.cb.decl_map = pointer_map_create ();
2198   id.root = root;
2199 
2200   for (; var; var = TREE_CHAIN (var))
2201     if (TREE_CODE (var) == VAR_DECL
2202 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
2203 	&& DECL_HAS_VALUE_EXPR_P (var))
2204       {
2205 	struct nesting_info *i;
2206 	tree newt, t, context;
2207 
2208 	t = type = TREE_TYPE (var);
2209 	val = DECL_VALUE_EXPR (var);
2210 	if (walk_tree (&type, contains_remapped_vars, root, NULL) == NULL
2211 	    && walk_tree (&val, contains_remapped_vars, root, NULL) == NULL)
2212 	  continue;
2213 
2214 	context = decl_function_context (var);
2215 	for (i = root; i; i = i->outer)
2216 	  if (i->context == context)
2217 	    break;
2218 
2219 	if (i == NULL)
2220 	  continue;
2221 
2222 	id.cb.src_fn = i->context;
2223 	id.cb.dst_fn = i->context;
2224 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2225 
2226 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2227 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2228 	  {
2229 	    newt = TREE_TYPE (newt);
2230 	    t = TREE_TYPE (t);
2231 	  }
2232 	if (TYPE_NAME (newt)
2233 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2234 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2235 	    && newt != t
2236 	    && TYPE_NAME (newt) == TYPE_NAME (t))
2237 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2238 
2239 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2240 	if (val != DECL_VALUE_EXPR (var))
2241 	  SET_DECL_VALUE_EXPR (var, val);
2242       }
2243 
2244   pointer_map_destroy (id.cb.decl_map);
2245 }
2246 
2247 /* Do "everything else" to clean up or complete state collected by the
2248    various walking passes -- lay out the types and decls, generate code
2249    to initialize the frame decl, store critical expressions in the
2250    struct function for rtl to find.  */
2251 
2252 static void
2253 finalize_nesting_tree_1 (struct nesting_info *root)
2254 {
2255   gimple_seq stmt_list;
2256   gimple stmt;
2257   tree context = root->context;
2258   struct function *sf;
2259 
2260   stmt_list = NULL;
2261 
2262   /* If we created a non-local frame type or decl, we need to lay them
2263      out at this time.  */
2264   if (root->frame_type)
2265     {
2266       /* In some cases the frame type will trigger the -Wpadded warning.
2267 	 This is not helpful; suppress it. */
2268       int save_warn_padded = warn_padded;
2269       tree *adjust;
2270 
2271       warn_padded = 0;
2272       layout_type (root->frame_type);
2273       warn_padded = save_warn_padded;
2274       layout_decl (root->frame_decl, 0);
2275 
2276       /* Remove root->frame_decl from root->new_local_var_chain, so
2277 	 that we can declare it also in the lexical blocks, which
2278 	 helps ensure virtual regs that end up appearing in its RTL
2279 	 expression get substituted in instantiate_virtual_regs().  */
2280       for (adjust = &root->new_local_var_chain;
2281 	   *adjust != root->frame_decl;
2282 	   adjust = &TREE_CHAIN (*adjust))
2283 	gcc_assert (TREE_CHAIN (*adjust));
2284       *adjust = TREE_CHAIN (*adjust);
2285 
2286       TREE_CHAIN (root->frame_decl) = NULL_TREE;
2287       declare_vars (root->frame_decl,
2288 		    gimple_seq_first_stmt (gimple_body (context)), true);
2289     }
2290 
2291   /* If any parameters were referenced non-locally, then we need to
2292      insert a copy.  Likewise, if any variables were referenced by
2293      pointer, we need to initialize the address.  */
2294   if (root->any_parm_remapped)
2295     {
2296       tree p;
2297       for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
2298 	{
2299 	  tree field, x, y;
2300 
2301 	  field = lookup_field_for_decl (root, p, NO_INSERT);
2302 	  if (!field)
2303 	    continue;
2304 
2305 	  if (use_pointer_in_frame (p))
2306 	    x = build_addr (p, context);
2307 	  else
2308 	    x = p;
2309 
2310 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
2311 		      root->frame_decl, field, NULL_TREE);
2312 	  stmt = gimple_build_assign (y, x);
2313 	  gimple_seq_add_stmt (&stmt_list, stmt);
2314 	  /* If the assignment is from a non-register the stmt is
2315 	     not valid gimple.  Make it so by using a temporary instead.  */
2316 	  if (!is_gimple_reg (x)
2317 	      && is_gimple_reg_type (TREE_TYPE (x)))
2318 	    {
2319 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
2320 	      x = init_tmp_var (root, x, &gsi);
2321 	      gimple_assign_set_rhs1 (stmt, x);
2322 	    }
2323 	}
2324     }
2325 
2326   /* If a chain_field was created, then it needs to be initialized
2327      from chain_decl.  */
2328   if (root->chain_field)
2329     {
2330       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2331 		       root->frame_decl, root->chain_field, NULL_TREE);
2332       stmt = gimple_build_assign (x, get_chain_decl (root));
2333       gimple_seq_add_stmt (&stmt_list, stmt);
2334     }
2335 
2336   /* If trampolines were created, then we need to initialize them.  */
2337   if (root->any_tramp_created)
2338     {
2339       struct nesting_info *i;
2340       for (i = root->inner; i ; i = i->next)
2341 	{
2342 	  tree arg1, arg2, arg3, x, field;
2343 
2344 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2345 	  if (!field)
2346 	    continue;
2347 
2348 	  gcc_assert (DECL_STATIC_CHAIN (i->context));
2349 	  arg3 = build_addr (root->frame_decl, context);
2350 
2351 	  arg2 = build_addr (i->context, context);
2352 
2353 	  x = build3 (COMPONENT_REF, TREE_TYPE (field),
2354 		      root->frame_decl, field, NULL_TREE);
2355 	  arg1 = build_addr (x, context);
2356 
2357 	  x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
2358 	  stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2359 	  gimple_seq_add_stmt (&stmt_list, stmt);
2360 	}
2361     }
2362 
2363   /* If we created initialization statements, insert them.  */
2364   if (stmt_list)
2365     {
2366       gimple bind;
2367       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2368       bind = gimple_seq_first_stmt (gimple_body (context));
2369       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2370       gimple_bind_set_body (bind, stmt_list);
2371     }
2372 
2373   /* If a chain_decl was created, then it needs to be registered with
2374      struct function so that it gets initialized from the static chain
2375      register at the beginning of the function.  */
2376   sf = DECL_STRUCT_FUNCTION (root->context);
2377   sf->static_chain_decl = root->chain_decl;
2378 
2379   /* Similarly for the non-local goto save area.  */
2380   if (root->nl_goto_field)
2381     {
2382       sf->nonlocal_goto_save_area
2383 	= get_frame_field (root, context, root->nl_goto_field, NULL);
2384       sf->has_nonlocal_label = 1;
2385     }
2386 
2387   /* Make sure all new local variables get inserted into the
2388      proper BIND_EXPR.  */
2389   if (root->new_local_var_chain)
2390     declare_vars (root->new_local_var_chain,
2391 		  gimple_seq_first_stmt (gimple_body (root->context)),
2392 		  false);
2393 
2394   if (root->debug_var_chain)
2395     {
2396       tree debug_var;
2397       gimple scope;
2398 
2399       remap_vla_decls (DECL_INITIAL (root->context), root);
2400 
2401       for (debug_var = root->debug_var_chain; debug_var;
2402 	   debug_var = TREE_CHAIN (debug_var))
2403 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2404 	  break;
2405 
2406       /* If there are any debug decls with variable length types,
2407 	 remap those types using other debug_var_chain variables.  */
2408       if (debug_var)
2409 	{
2410 	  struct nesting_copy_body_data id;
2411 
2412 	  memset (&id, 0, sizeof (id));
2413 	  id.cb.copy_decl = nesting_copy_decl;
2414 	  id.cb.decl_map = pointer_map_create ();
2415 	  id.root = root;
2416 
2417 	  for (; debug_var; debug_var = TREE_CHAIN (debug_var))
2418 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2419 	      {
2420 		tree type = TREE_TYPE (debug_var);
2421 		tree newt, t = type;
2422 		struct nesting_info *i;
2423 
2424 		for (i = root; i; i = i->outer)
2425 		  if (variably_modified_type_p (type, i->context))
2426 		    break;
2427 
2428 		if (i == NULL)
2429 		  continue;
2430 
2431 		id.cb.src_fn = i->context;
2432 		id.cb.dst_fn = i->context;
2433 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2434 
2435 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2436 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2437 		  {
2438 		    newt = TREE_TYPE (newt);
2439 		    t = TREE_TYPE (t);
2440 		  }
2441 		if (TYPE_NAME (newt)
2442 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2443 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2444 		    && newt != t
2445 		    && TYPE_NAME (newt) == TYPE_NAME (t))
2446 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2447 	      }
2448 
2449 	  pointer_map_destroy (id.cb.decl_map);
2450 	}
2451 
2452       scope = gimple_seq_first_stmt (gimple_body (root->context));
2453       if (gimple_bind_block (scope))
2454 	declare_vars (root->debug_var_chain, scope, true);
2455       else
2456 	BLOCK_VARS (DECL_INITIAL (root->context))
2457 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2458 		     root->debug_var_chain);
2459     }
2460 
2461   /* Dump the translated tree function.  */
2462   if (dump_file)
2463     {
2464       fputs ("\n\n", dump_file);
2465       dump_function_to_file (root->context, dump_file, dump_flags);
2466     }
2467 }
2468 
2469 static void
2470 finalize_nesting_tree (struct nesting_info *root)
2471 {
2472   struct nesting_info *n;
2473   FOR_EACH_NEST_INFO (n, root)
2474     finalize_nesting_tree_1 (n);
2475 }
2476 
2477 /* Unnest the nodes and pass them to cgraph.  */
2478 
2479 static void
2480 unnest_nesting_tree_1 (struct nesting_info *root)
2481 {
2482   struct cgraph_node *node = cgraph_node (root->context);
2483 
2484   /* For nested functions update the cgraph to reflect unnesting.
2485      We also delay finalizing of these functions up to this point.  */
2486   if (node->origin)
2487     {
2488        cgraph_unnest_node (cgraph_node (root->context));
2489        cgraph_finalize_function (root->context, true);
2490     }
2491 }
2492 
2493 static void
2494 unnest_nesting_tree (struct nesting_info *root)
2495 {
2496   struct nesting_info *n;
2497   FOR_EACH_NEST_INFO (n, root)
2498     unnest_nesting_tree_1 (n);
2499 }
2500 
2501 /* Free the data structures allocated during this pass.  */
2502 
2503 static void
2504 free_nesting_tree (struct nesting_info *root)
2505 {
2506   struct nesting_info *node, *next;
2507 
2508   node = iter_nestinfo_start (root);
2509   do
2510     {
2511       next = iter_nestinfo_next (node);
2512       pointer_map_destroy (node->var_map);
2513       pointer_map_destroy (node->field_map);
2514       free (node);
2515       node = next;
2516     }
2517   while (node);
2518 }
2519 
2520 /* Gimplify a function and all its nested functions.  */
2521 static void
2522 gimplify_all_functions (struct cgraph_node *root)
2523 {
2524   struct cgraph_node *iter;
2525   if (!gimple_body (root->decl))
2526     gimplify_function_tree (root->decl);
2527   for (iter = root->nested; iter; iter = iter->next_nested)
2528     gimplify_all_functions (iter);
2529 }
2530 
2531 /* Main entry point for this pass.  Process FNDECL and all of its nested
2532    subroutines and turn them into something less tightly bound.  */
2533 
2534 void
2535 lower_nested_functions (tree fndecl)
2536 {
2537   struct cgraph_node *cgn;
2538   struct nesting_info *root;
2539 
2540   /* If there are no nested functions, there's nothing to do.  */
2541   cgn = cgraph_node (fndecl);
2542   if (!cgn->nested)
2543     return;
2544 
2545   gimplify_all_functions (cgn);
2546 
2547   dump_file = dump_begin (TDI_nested, &dump_flags);
2548   if (dump_file)
2549     fprintf (dump_file, "\n;; Function %s\n\n",
2550 	     lang_hooks.decl_printable_name (fndecl, 2));
2551 
2552   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2553   root = create_nesting_tree (cgn);
2554 
2555   walk_all_functions (convert_nonlocal_reference_stmt,
2556                       convert_nonlocal_reference_op,
2557 		      root);
2558   walk_all_functions (convert_local_reference_stmt,
2559                       convert_local_reference_op,
2560 		      root);
2561   walk_all_functions (convert_nl_goto_reference, NULL, root);
2562   walk_all_functions (convert_nl_goto_receiver, NULL, root);
2563 
2564   convert_all_function_calls (root);
2565   finalize_nesting_tree (root);
2566   unnest_nesting_tree (root);
2567 
2568   free_nesting_tree (root);
2569   bitmap_obstack_release (&nesting_info_bitmap_obstack);
2570 
2571   if (dump_file)
2572     {
2573       dump_end (TDI_nested, dump_file);
2574       dump_file = NULL;
2575     }
2576 }
2577 
2578 #include "gt-tree-nested.h"
2579