xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-nested.c (revision ea67e31f69307fc0e2e16d6620631aeb4f6de316)
1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "hash-set.h"
25 #include "machmode.h"
26 #include "vec.h"
27 #include "double-int.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stringpool.h"
36 #include "stor-layout.h"
37 #include "tm_p.h"
38 #include "hard-reg-set.h"
39 #include "function.h"
40 #include "tree-dump.h"
41 #include "tree-inline.h"
42 #include "predict.h"
43 #include "basic-block.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "gimplify.h"
50 #include "gimple-iterator.h"
51 #include "gimple-walk.h"
52 #include "tree-iterator.h"
53 #include "bitmap.h"
54 #include "hash-map.h"
55 #include "plugin-api.h"
56 #include "ipa-ref.h"
57 #include "cgraph.h"
58 #include "tree-cfg.h"
59 #include "hashtab.h"
60 #include "rtl.h"
61 #include "flags.h"
62 #include "statistics.h"
63 #include "real.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
66 #include "expmed.h"
67 #include "dojump.h"
68 #include "explow.h"
69 #include "calls.h"
70 #include "emit-rtl.h"
71 #include "varasm.h"
72 #include "stmt.h"
73 #include "expr.h"	/* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL.  */
74 #include "langhooks.h"
75 #include "gimple-low.h"
76 #include "gomp-constants.h"
77 
78 
79 /* The object of this pass is to lower the representation of a set of nested
80    functions in order to expose all of the gory details of the various
81    nonlocal references.  We want to do this sooner rather than later, in
82    order to give us more freedom in emitting all of the functions in question.
83 
84    Back in olden times, when gcc was young, we developed an insanely
85    complicated scheme whereby variables which were referenced nonlocally
86    were forced to live in the stack of the declaring function, and then
87    the nested functions magically discovered where these variables were
88    placed.  In order for this scheme to function properly, it required
89    that the outer function be partially expanded, then we switch to
90    compiling the inner function, and once done with those we switch back
91    to compiling the outer function.  Such delicate ordering requirements
92    makes it difficult to do whole translation unit optimizations
93    involving such functions.
94 
95    The implementation here is much more direct.  Everything that can be
96    referenced by an inner function is a member of an explicitly created
97    structure herein called the "nonlocal frame struct".  The incoming
98    static chain for a nested function is a pointer to this struct in
99    the parent.  In this way, we settle on known offsets from a known
100    base, and so are decoupled from the logic that places objects in the
101    function's stack frame.  More importantly, we don't have to wait for
102    that to happen -- since the compilation of the inner function is no
103    longer tied to a real stack frame, the nonlocal frame struct can be
104    allocated anywhere.  Which means that the outer function is now
105    inlinable.
106 
107    Theory of operation here is very simple.  Iterate over all the
108    statements in all the functions (depth first) several times,
109    allocating structures and fields on demand.  In general we want to
110    examine inner functions first, so that we can avoid making changes
111    to outer functions which are unnecessary.
112 
113    The order of the passes matters a bit, in that later passes will be
114    skipped if it is discovered that the functions don't actually interact
115    at all.  That is, they're nested in the lexical sense but could have
116    been written as independent functions without change.  */
117 
118 
119 struct nesting_info
120 {
121   struct nesting_info *outer;
122   struct nesting_info *inner;
123   struct nesting_info *next;
124 
125   hash_map<tree, tree> *field_map;
126   hash_map<tree, tree> *var_map;
127   hash_set<tree *> *mem_refs;
128   bitmap suppress_expansion;
129 
130   tree context;
131   tree new_local_var_chain;
132   tree debug_var_chain;
133   tree frame_type;
134   tree frame_decl;
135   tree chain_field;
136   tree chain_decl;
137   tree nl_goto_field;
138 
139   bool any_parm_remapped;
140   bool any_tramp_created;
141   char static_chain_added;
142 };
143 
144 
145 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
146 
147 static inline struct nesting_info *
148 iter_nestinfo_start (struct nesting_info *root)
149 {
150   while (root->inner)
151     root = root->inner;
152   return root;
153 }
154 
155 static inline struct nesting_info *
156 iter_nestinfo_next (struct nesting_info *node)
157 {
158   if (node->next)
159     return iter_nestinfo_start (node->next);
160   return node->outer;
161 }
162 
163 #define FOR_EACH_NEST_INFO(I, ROOT) \
164   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
165 
166 /* Obstack used for the bitmaps in the struct above.  */
167 static struct bitmap_obstack nesting_info_bitmap_obstack;
168 
169 
170 /* We're working in so many different function contexts simultaneously,
171    that create_tmp_var is dangerous.  Prevent mishap.  */
172 #define create_tmp_var cant_use_create_tmp_var_here_dummy
173 
174 /* Like create_tmp_var, except record the variable for registration at
175    the given nesting level.  */
176 
177 static tree
178 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
179 {
180   tree tmp_var;
181 
182   /* If the type is of variable size or a type which must be created by the
183      frontend, something is wrong.  Note that we explicitly allow
184      incomplete types here, since we create them ourselves here.  */
185   gcc_assert (!TREE_ADDRESSABLE (type));
186   gcc_assert (!TYPE_SIZE_UNIT (type)
187 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
188 
189   tmp_var = create_tmp_var_raw (type, prefix);
190   DECL_CONTEXT (tmp_var) = info->context;
191   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
192   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
193   if (TREE_CODE (type) == COMPLEX_TYPE
194       || TREE_CODE (type) == VECTOR_TYPE)
195     DECL_GIMPLE_REG_P (tmp_var) = 1;
196 
197   info->new_local_var_chain = tmp_var;
198 
199   return tmp_var;
200 }
201 
202 /* Take the address of EXP to be used within function CONTEXT.
203    Mark it for addressability as necessary.  */
204 
205 tree
206 build_addr (tree exp, tree context)
207 {
208   tree base = exp;
209   tree save_context;
210   tree retval;
211 
212   while (handled_component_p (base))
213     base = TREE_OPERAND (base, 0);
214 
215   if (DECL_P (base))
216     TREE_ADDRESSABLE (base) = 1;
217 
218   /* Building the ADDR_EXPR will compute a set of properties for
219      that ADDR_EXPR.  Those properties are unfortunately context
220      specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
221 
222      Temporarily set CURRENT_FUNCTION_DECL to the desired context,
223      build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL.  That
224      way the properties are for the ADDR_EXPR are computed properly.  */
225   save_context = current_function_decl;
226   current_function_decl = context;
227   retval = build_fold_addr_expr (exp);
228   current_function_decl = save_context;
229   return retval;
230 }
231 
232 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
233 
234 void
235 insert_field_into_struct (tree type, tree field)
236 {
237   tree *p;
238 
239   DECL_CONTEXT (field) = type;
240 
241   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
242     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
243       break;
244 
245   DECL_CHAIN (field) = *p;
246   *p = field;
247 
248   /* Set correct alignment for frame struct type.  */
249   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
250     TYPE_ALIGN (type) = DECL_ALIGN (field);
251 }
252 
253 /* Build or return the RECORD_TYPE that describes the frame state that is
254    shared between INFO->CONTEXT and its nested functions.  This record will
255    not be complete until finalize_nesting_tree; up until that point we'll
256    be adding fields as necessary.
257 
258    We also build the DECL that represents this frame in the function.  */
259 
260 static tree
261 get_frame_type (struct nesting_info *info)
262 {
263   tree type = info->frame_type;
264   if (!type)
265     {
266       char *name;
267 
268       type = make_node (RECORD_TYPE);
269 
270       name = concat ("FRAME.",
271 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
272 		     NULL);
273       TYPE_NAME (type) = get_identifier (name);
274       free (name);
275 
276       info->frame_type = type;
277       info->frame_decl = create_tmp_var_for (info, type, "FRAME");
278       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
279 
280       /* ??? Always make it addressable for now, since it is meant to
281 	 be pointed to by the static chain pointer.  This pessimizes
282 	 when it turns out that no static chains are needed because
283 	 the nested functions referencing non-local variables are not
284 	 reachable, but the true pessimization is to create the non-
285 	 local frame structure in the first place.  */
286       TREE_ADDRESSABLE (info->frame_decl) = 1;
287     }
288   return type;
289 }
290 
291 /* Return true if DECL should be referenced by pointer in the non-local
292    frame structure.  */
293 
294 static bool
295 use_pointer_in_frame (tree decl)
296 {
297   if (TREE_CODE (decl) == PARM_DECL)
298     {
299       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
300          sized decls, and inefficient to copy large aggregates.  Don't bother
301          moving anything but scalar variables.  */
302       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
303     }
304   else
305     {
306       /* Variable sized types make things "interesting" in the frame.  */
307       return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
308     }
309 }
310 
311 /* Given DECL, a non-locally accessed variable, find or create a field
312    in the non-local frame structure for the given nesting context.  */
313 
314 static tree
315 lookup_field_for_decl (struct nesting_info *info, tree decl,
316 		       enum insert_option insert)
317 {
318   if (insert == NO_INSERT)
319     {
320       tree *slot = info->field_map->get (decl);
321       return slot ? *slot : NULL_TREE;
322     }
323 
324   tree *slot = &info->field_map->get_or_insert (decl);
325   if (!*slot)
326     {
327       tree field = make_node (FIELD_DECL);
328       DECL_NAME (field) = DECL_NAME (decl);
329 
330       if (use_pointer_in_frame (decl))
331 	{
332 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
333 	  DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
334 	  DECL_NONADDRESSABLE_P (field) = 1;
335 	}
336       else
337 	{
338           TREE_TYPE (field) = TREE_TYPE (decl);
339           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
340           DECL_ALIGN (field) = DECL_ALIGN (decl);
341           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
342           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
343           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
344           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
345 	}
346 
347       insert_field_into_struct (get_frame_type (info), field);
348       *slot = field;
349 
350       if (TREE_CODE (decl) == PARM_DECL)
351 	info->any_parm_remapped = true;
352     }
353 
354   return *slot;
355 }
356 
357 /* Build or return the variable that holds the static chain within
358    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
359 
360 static tree
361 get_chain_decl (struct nesting_info *info)
362 {
363   tree decl = info->chain_decl;
364 
365   if (!decl)
366     {
367       tree type;
368 
369       type = get_frame_type (info->outer);
370       type = build_pointer_type (type);
371 
372       /* Note that this variable is *not* entered into any BIND_EXPR;
373 	 the construction of this variable is handled specially in
374 	 expand_function_start and initialize_inlined_parameters.
375 	 Note also that it's represented as a parameter.  This is more
376 	 close to the truth, since the initial value does come from
377 	 the caller.  */
378       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
379 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
380       DECL_ARTIFICIAL (decl) = 1;
381       DECL_IGNORED_P (decl) = 1;
382       TREE_USED (decl) = 1;
383       DECL_CONTEXT (decl) = info->context;
384       DECL_ARG_TYPE (decl) = type;
385 
386       /* Tell tree-inline.c that we never write to this variable, so
387 	 it can copy-prop the replacement value immediately.  */
388       TREE_READONLY (decl) = 1;
389 
390       info->chain_decl = decl;
391 
392       if (dump_file
393           && (dump_flags & TDF_DETAILS)
394 	  && !DECL_STATIC_CHAIN (info->context))
395 	fprintf (dump_file, "Setting static-chain for %s\n",
396 		 lang_hooks.decl_printable_name (info->context, 2));
397 
398       DECL_STATIC_CHAIN (info->context) = 1;
399     }
400   return decl;
401 }
402 
403 /* Build or return the field within the non-local frame state that holds
404    the static chain for INFO->CONTEXT.  This is the way to walk back up
405    multiple nesting levels.  */
406 
407 static tree
408 get_chain_field (struct nesting_info *info)
409 {
410   tree field = info->chain_field;
411 
412   if (!field)
413     {
414       tree type = build_pointer_type (get_frame_type (info->outer));
415 
416       field = make_node (FIELD_DECL);
417       DECL_NAME (field) = get_identifier ("__chain");
418       TREE_TYPE (field) = type;
419       DECL_ALIGN (field) = TYPE_ALIGN (type);
420       DECL_NONADDRESSABLE_P (field) = 1;
421 
422       insert_field_into_struct (get_frame_type (info), field);
423 
424       info->chain_field = field;
425 
426       if (dump_file
427           && (dump_flags & TDF_DETAILS)
428 	  && !DECL_STATIC_CHAIN (info->context))
429 	fprintf (dump_file, "Setting static-chain for %s\n",
430 		 lang_hooks.decl_printable_name (info->context, 2));
431 
432       DECL_STATIC_CHAIN (info->context) = 1;
433     }
434   return field;
435 }
436 
437 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
438 
439 static tree
440 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
441 		        gcall *call)
442 {
443   tree t;
444 
445   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
446   gimple_call_set_lhs (call, t);
447   if (! gsi_end_p (*gsi))
448     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
449   gsi_insert_before (gsi, call, GSI_SAME_STMT);
450 
451   return t;
452 }
453 
454 
455 /* Copy EXP into a temporary.  Allocate the temporary in the context of
456    INFO and insert the initialization statement before GSI.  */
457 
458 static tree
459 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
460 {
461   tree t;
462   gimple stmt;
463 
464   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
465   stmt = gimple_build_assign (t, exp);
466   if (! gsi_end_p (*gsi))
467     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
468   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
469 
470   return t;
471 }
472 
473 
474 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
475 
476 static tree
477 gsi_gimplify_val (struct nesting_info *info, tree exp,
478 		  gimple_stmt_iterator *gsi)
479 {
480   if (is_gimple_val (exp))
481     return exp;
482   else
483     return init_tmp_var (info, exp, gsi);
484 }
485 
486 /* Similarly, but copy from the temporary and insert the statement
487    after the iterator.  */
488 
489 static tree
490 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
491 {
492   tree t;
493   gimple stmt;
494 
495   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
496   stmt = gimple_build_assign (exp, t);
497   if (! gsi_end_p (*gsi))
498     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
499   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
500 
501   return t;
502 }
503 
504 /* Build or return the type used to represent a nested function trampoline.  */
505 
506 static GTY(()) tree trampoline_type;
507 
508 static tree
509 get_trampoline_type (struct nesting_info *info)
510 {
511   unsigned align, size;
512   tree t;
513 
514   if (trampoline_type)
515     return trampoline_type;
516 
517   align = TRAMPOLINE_ALIGNMENT;
518   size = TRAMPOLINE_SIZE;
519 
520   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
521      then allocate extra space so that we can do dynamic alignment.  */
522   if (align > STACK_BOUNDARY)
523     {
524       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
525       align = STACK_BOUNDARY;
526     }
527 
528   t = build_index_type (size_int (size - 1));
529   t = build_array_type (char_type_node, t);
530   t = build_decl (DECL_SOURCE_LOCATION (info->context),
531 		  FIELD_DECL, get_identifier ("__data"), t);
532   DECL_ALIGN (t) = align;
533   DECL_USER_ALIGN (t) = 1;
534 
535   trampoline_type = make_node (RECORD_TYPE);
536   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
537   TYPE_FIELDS (trampoline_type) = t;
538   layout_type (trampoline_type);
539   DECL_CONTEXT (t) = trampoline_type;
540 
541   return trampoline_type;
542 }
543 
544 /* Given DECL, a nested function, find or create a field in the non-local
545    frame structure for a trampoline for this function.  */
546 
547 static tree
548 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
549 		       enum insert_option insert)
550 {
551   if (insert == NO_INSERT)
552     {
553       tree *slot = info->var_map->get (decl);
554       return slot ? *slot : NULL_TREE;
555     }
556 
557   tree *slot = &info->var_map->get_or_insert (decl);
558   if (!*slot)
559     {
560       tree field = make_node (FIELD_DECL);
561       DECL_NAME (field) = DECL_NAME (decl);
562       TREE_TYPE (field) = get_trampoline_type (info);
563       TREE_ADDRESSABLE (field) = 1;
564 
565       insert_field_into_struct (get_frame_type (info), field);
566       *slot = field;
567 
568       info->any_tramp_created = true;
569     }
570 
571   return *slot;
572 }
573 
574 /* Build or return the field within the non-local frame state that holds
575    the non-local goto "jmp_buf".  The buffer itself is maintained by the
576    rtl middle-end as dynamic stack space is allocated.  */
577 
578 static tree
579 get_nl_goto_field (struct nesting_info *info)
580 {
581   tree field = info->nl_goto_field;
582   if (!field)
583     {
584       unsigned size;
585       tree type;
586 
587       /* For __builtin_nonlocal_goto, we need N words.  The first is the
588 	 frame pointer, the rest is for the target's stack pointer save
589 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
590 	 not the best interface, but it'll do for now.  */
591       if (Pmode == ptr_mode)
592 	type = ptr_type_node;
593       else
594 	type = lang_hooks.types.type_for_mode (Pmode, 1);
595 
596       size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
597       size = size / GET_MODE_SIZE (Pmode);
598       size = size + 1;
599 
600       type = build_array_type
601 	(type, build_index_type (size_int (size)));
602 
603       field = make_node (FIELD_DECL);
604       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
605       TREE_TYPE (field) = type;
606       DECL_ALIGN (field) = TYPE_ALIGN (type);
607       TREE_ADDRESSABLE (field) = 1;
608 
609       insert_field_into_struct (get_frame_type (info), field);
610 
611       info->nl_goto_field = field;
612     }
613 
614   return field;
615 }
616 
617 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
618 
619 static void
620 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
621 	   struct nesting_info *info, gimple_seq *pseq)
622 {
623   struct walk_stmt_info wi;
624 
625   memset (&wi, 0, sizeof (wi));
626   wi.info = info;
627   wi.val_only = true;
628   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
629 }
630 
631 
632 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
633 
634 static inline void
635 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
636 	       struct nesting_info *info)
637 {
638   gimple_seq body = gimple_body (info->context);
639   walk_body (callback_stmt, callback_op, info, &body);
640   gimple_set_body (info->context, body);
641 }
642 
643 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
644 
645 static void
646 walk_gimple_omp_for (gomp_for *for_stmt,
647     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
648     		     struct nesting_info *info)
649 {
650   struct walk_stmt_info wi;
651   gimple_seq seq;
652   tree t;
653   size_t i;
654 
655   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
656 
657   seq = NULL;
658   memset (&wi, 0, sizeof (wi));
659   wi.info = info;
660   wi.gsi = gsi_last (seq);
661 
662   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
663     {
664       wi.val_only = false;
665       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
666 		 &wi, NULL);
667       wi.val_only = true;
668       wi.is_lhs = false;
669       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
670 		 &wi, NULL);
671 
672       wi.val_only = true;
673       wi.is_lhs = false;
674       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
675 		 &wi, NULL);
676 
677       t = gimple_omp_for_incr (for_stmt, i);
678       gcc_assert (BINARY_CLASS_P (t));
679       wi.val_only = false;
680       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
681       wi.val_only = true;
682       wi.is_lhs = false;
683       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
684     }
685 
686   seq = gsi_seq (wi.gsi);
687   if (!gimple_seq_empty_p (seq))
688     {
689       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
690       annotate_all_with_location (seq, gimple_location (for_stmt));
691       gimple_seq_add_seq (&pre_body, seq);
692       gimple_omp_for_set_pre_body (for_stmt, pre_body);
693     }
694 }
695 
696 /* Similarly for ROOT and all functions nested underneath, depth first.  */
697 
698 static void
699 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
700 		    struct nesting_info *root)
701 {
702   struct nesting_info *n;
703   FOR_EACH_NEST_INFO (n, root)
704     walk_function (callback_stmt, callback_op, n);
705 }
706 
707 
708 /* We have to check for a fairly pathological case.  The operands of function
709    nested function are to be interpreted in the context of the enclosing
710    function.  So if any are variably-sized, they will get remapped when the
711    enclosing function is inlined.  But that remapping would also have to be
712    done in the types of the PARM_DECLs of the nested function, meaning the
713    argument types of that function will disagree with the arguments in the
714    calls to that function.  So we'd either have to make a copy of the nested
715    function corresponding to each time the enclosing function was inlined or
716    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
717    function.  The former is not practical.  The latter would still require
718    detecting this case to know when to add the conversions.  So, for now at
719    least, we don't inline such an enclosing function.
720 
721    We have to do that check recursively, so here return indicating whether
722    FNDECL has such a nested function.  ORIG_FN is the function we were
723    trying to inline to use for checking whether any argument is variably
724    modified by anything in it.
725 
726    It would be better to do this in tree-inline.c so that we could give
727    the appropriate warning for why a function can't be inlined, but that's
728    too late since the nesting structure has already been flattened and
729    adding a flag just to record this fact seems a waste of a flag.  */
730 
731 static bool
732 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
733 {
734   struct cgraph_node *cgn = cgraph_node::get (fndecl);
735   tree arg;
736 
737   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
738     {
739       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
740 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
741 	  return true;
742 
743       if (check_for_nested_with_variably_modified (cgn->decl,
744 						   orig_fndecl))
745 	return true;
746     }
747 
748   return false;
749 }
750 
751 /* Construct our local datastructure describing the function nesting
752    tree rooted by CGN.  */
753 
754 static struct nesting_info *
755 create_nesting_tree (struct cgraph_node *cgn)
756 {
757   struct nesting_info *info = XCNEW (struct nesting_info);
758   info->field_map = new hash_map<tree, tree>;
759   info->var_map = new hash_map<tree, tree>;
760   info->mem_refs = new hash_set<tree *>;
761   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
762   info->context = cgn->decl;
763 
764   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
765     {
766       struct nesting_info *sub = create_nesting_tree (cgn);
767       sub->outer = info;
768       sub->next = info->inner;
769       info->inner = sub;
770     }
771 
772   /* See discussion at check_for_nested_with_variably_modified for a
773      discussion of why this has to be here.  */
774   if (check_for_nested_with_variably_modified (info->context, info->context))
775     DECL_UNINLINABLE (info->context) = true;
776 
777   return info;
778 }
779 
780 /* Return an expression computing the static chain for TARGET_CONTEXT
781    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
782 
783 static tree
784 get_static_chain (struct nesting_info *info, tree target_context,
785 		  gimple_stmt_iterator *gsi)
786 {
787   struct nesting_info *i;
788   tree x;
789 
790   if (info->context == target_context)
791     {
792       x = build_addr (info->frame_decl, target_context);
793       info->static_chain_added |= 1;
794     }
795   else
796     {
797       x = get_chain_decl (info);
798       info->static_chain_added |= 2;
799 
800       for (i = info->outer; i->context != target_context; i = i->outer)
801 	{
802 	  tree field = get_chain_field (i);
803 
804 	  x = build_simple_mem_ref (x);
805 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
806 	  x = init_tmp_var (info, x, gsi);
807 	}
808     }
809 
810   return x;
811 }
812 
813 
814 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
815    frame as seen from INFO->CONTEXT.  Insert any necessary computations
816    before GSI.  */
817 
818 static tree
819 get_frame_field (struct nesting_info *info, tree target_context,
820 		 tree field, gimple_stmt_iterator *gsi)
821 {
822   struct nesting_info *i;
823   tree x;
824 
825   if (info->context == target_context)
826     {
827       /* Make sure frame_decl gets created.  */
828       (void) get_frame_type (info);
829       x = info->frame_decl;
830       info->static_chain_added |= 1;
831     }
832   else
833     {
834       x = get_chain_decl (info);
835       info->static_chain_added |= 2;
836 
837       for (i = info->outer; i->context != target_context; i = i->outer)
838 	{
839 	  tree field = get_chain_field (i);
840 
841 	  x = build_simple_mem_ref (x);
842 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
843 	  x = init_tmp_var (info, x, gsi);
844 	}
845 
846       x = build_simple_mem_ref (x);
847     }
848 
849   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
850   return x;
851 }
852 
853 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
854 
855 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
856    in the nested function with DECL_VALUE_EXPR set to reference the true
857    variable in the parent function.  This is used both for debug info
858    and in OMP lowering.  */
859 
860 static tree
861 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
862 {
863   tree target_context;
864   struct nesting_info *i;
865   tree x, field, new_decl;
866 
867   tree *slot = &info->var_map->get_or_insert (decl);
868 
869   if (*slot)
870     return *slot;
871 
872   target_context = decl_function_context (decl);
873 
874   /* A copy of the code in get_frame_field, but without the temporaries.  */
875   if (info->context == target_context)
876     {
877       /* Make sure frame_decl gets created.  */
878       (void) get_frame_type (info);
879       x = info->frame_decl;
880       i = info;
881       info->static_chain_added |= 1;
882     }
883   else
884     {
885       x = get_chain_decl (info);
886       info->static_chain_added |= 2;
887       for (i = info->outer; i->context != target_context; i = i->outer)
888 	{
889 	  field = get_chain_field (i);
890 	  x = build_simple_mem_ref (x);
891 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
892 	}
893       x = build_simple_mem_ref (x);
894     }
895 
896   field = lookup_field_for_decl (i, decl, INSERT);
897   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
898   if (use_pointer_in_frame (decl))
899     x = build_simple_mem_ref (x);
900 
901   /* ??? We should be remapping types as well, surely.  */
902   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
903 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
904   DECL_CONTEXT (new_decl) = info->context;
905   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
906   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
907   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
908   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
909   TREE_READONLY (new_decl) = TREE_READONLY (decl);
910   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
911   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
912   if ((TREE_CODE (decl) == PARM_DECL
913        || TREE_CODE (decl) == RESULT_DECL
914        || TREE_CODE (decl) == VAR_DECL)
915       && DECL_BY_REFERENCE (decl))
916     DECL_BY_REFERENCE (new_decl) = 1;
917 
918   SET_DECL_VALUE_EXPR (new_decl, x);
919   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
920 
921   *slot = new_decl;
922   DECL_CHAIN (new_decl) = info->debug_var_chain;
923   info->debug_var_chain = new_decl;
924 
925   if (!optimize
926       && info->context != target_context
927       && variably_modified_type_p (TREE_TYPE (decl), NULL))
928     note_nonlocal_vla_type (info, TREE_TYPE (decl));
929 
930   return new_decl;
931 }
932 
933 
934 /* Callback for walk_gimple_stmt, rewrite all references to VAR
935    and PARM_DECLs that belong to outer functions.
936 
937    The rewrite will involve some number of structure accesses back up
938    the static chain.  E.g. for a variable FOO up one nesting level it'll
939    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
940    indirections apply to decls for which use_pointer_in_frame is true.  */
941 
942 static tree
943 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
944 {
945   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
946   struct nesting_info *const info = (struct nesting_info *) wi->info;
947   tree t = *tp;
948 
949   *walk_subtrees = 0;
950   switch (TREE_CODE (t))
951     {
952     case VAR_DECL:
953       /* Non-automatic variables are never processed.  */
954       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
955 	break;
956       /* FALLTHRU */
957 
958     case PARM_DECL:
959       if (decl_function_context (t) != info->context)
960 	{
961 	  tree x;
962 	  wi->changed = true;
963 
964 	  x = get_nonlocal_debug_decl (info, t);
965 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
966 	    {
967 	      tree target_context = decl_function_context (t);
968 	      struct nesting_info *i;
969 	      for (i = info->outer; i->context != target_context; i = i->outer)
970 		continue;
971 	      x = lookup_field_for_decl (i, t, INSERT);
972 	      x = get_frame_field (info, target_context, x, &wi->gsi);
973 	      if (use_pointer_in_frame (t))
974 		{
975 		  x = init_tmp_var (info, x, &wi->gsi);
976 		  x = build_simple_mem_ref (x);
977 		}
978 	    }
979 
980 	  if (wi->val_only)
981 	    {
982 	      if (wi->is_lhs)
983 		x = save_tmp_var (info, x, &wi->gsi);
984 	      else
985 		x = init_tmp_var (info, x, &wi->gsi);
986 	    }
987 
988 	  *tp = x;
989 	}
990       break;
991 
992     case LABEL_DECL:
993       /* We're taking the address of a label from a parent function, but
994 	 this is not itself a non-local goto.  Mark the label such that it
995 	 will not be deleted, much as we would with a label address in
996 	 static storage.  */
997       if (decl_function_context (t) != info->context)
998         FORCED_LABEL (t) = 1;
999       break;
1000 
1001     case ADDR_EXPR:
1002       {
1003 	bool save_val_only = wi->val_only;
1004 
1005 	wi->val_only = false;
1006 	wi->is_lhs = false;
1007 	wi->changed = false;
1008 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1009 	wi->val_only = true;
1010 
1011 	if (wi->changed)
1012 	  {
1013 	    tree save_context;
1014 
1015 	    /* If we changed anything, we might no longer be directly
1016 	       referencing a decl.  */
1017 	    save_context = current_function_decl;
1018 	    current_function_decl = info->context;
1019 	    recompute_tree_invariant_for_addr_expr (t);
1020 	    current_function_decl = save_context;
1021 
1022 	    /* If the callback converted the address argument in a context
1023 	       where we only accept variables (and min_invariant, presumably),
1024 	       then compute the address into a temporary.  */
1025 	    if (save_val_only)
1026 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1027 				      t, &wi->gsi);
1028 	  }
1029       }
1030       break;
1031 
1032     case REALPART_EXPR:
1033     case IMAGPART_EXPR:
1034     case COMPONENT_REF:
1035     case ARRAY_REF:
1036     case ARRAY_RANGE_REF:
1037     case BIT_FIELD_REF:
1038       /* Go down this entire nest and just look at the final prefix and
1039 	 anything that describes the references.  Otherwise, we lose track
1040 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1041       wi->val_only = true;
1042       wi->is_lhs = false;
1043       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1044 	{
1045 	  if (TREE_CODE (t) == COMPONENT_REF)
1046 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1047 		       NULL);
1048 	  else if (TREE_CODE (t) == ARRAY_REF
1049 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1050 	    {
1051 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1052 			 wi, NULL);
1053 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1054 			 wi, NULL);
1055 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1056 			 wi, NULL);
1057 	    }
1058 	}
1059       wi->val_only = false;
1060       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1061       break;
1062 
1063     case VIEW_CONVERT_EXPR:
1064       /* Just request to look at the subtrees, leaving val_only and lhs
1065 	 untouched.  This might actually be for !val_only + lhs, in which
1066 	 case we don't want to force a replacement by a temporary.  */
1067       *walk_subtrees = 1;
1068       break;
1069 
1070     default:
1071       if (!IS_TYPE_OR_DECL_P (t))
1072 	{
1073 	  *walk_subtrees = 1;
1074           wi->val_only = true;
1075 	  wi->is_lhs = false;
1076 	}
1077       break;
1078     }
1079 
1080   return NULL_TREE;
1081 }
1082 
1083 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1084 					     struct walk_stmt_info *);
1085 
1086 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1087    and PARM_DECLs that belong to outer functions.  */
1088 
1089 static bool
1090 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1091 {
1092   struct nesting_info *const info = (struct nesting_info *) wi->info;
1093   bool need_chain = false, need_stmts = false;
1094   tree clause, decl;
1095   int dummy;
1096   bitmap new_suppress;
1097 
1098   new_suppress = BITMAP_GGC_ALLOC ();
1099   bitmap_copy (new_suppress, info->suppress_expansion);
1100 
1101   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1102     {
1103       switch (OMP_CLAUSE_CODE (clause))
1104 	{
1105 	case OMP_CLAUSE_REDUCTION:
1106 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1107 	    need_stmts = true;
1108 	  goto do_decl_clause;
1109 
1110 	case OMP_CLAUSE_LASTPRIVATE:
1111 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1112 	    need_stmts = true;
1113 	  goto do_decl_clause;
1114 
1115 	case OMP_CLAUSE_LINEAR:
1116 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1117 	    need_stmts = true;
1118 	  wi->val_only = true;
1119 	  wi->is_lhs = false;
1120 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1121 					 &dummy, wi);
1122 	  goto do_decl_clause;
1123 
1124 	case OMP_CLAUSE_PRIVATE:
1125 	case OMP_CLAUSE_FIRSTPRIVATE:
1126 	case OMP_CLAUSE_COPYPRIVATE:
1127 	case OMP_CLAUSE_SHARED:
1128 	do_decl_clause:
1129 	  decl = OMP_CLAUSE_DECL (clause);
1130 	  if (TREE_CODE (decl) == VAR_DECL
1131 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1132 	    break;
1133 	  if (decl_function_context (decl) != info->context)
1134 	    {
1135 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1136 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1137 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1138 		need_chain = true;
1139 	    }
1140 	  break;
1141 
1142 	case OMP_CLAUSE_SCHEDULE:
1143 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1144 	    break;
1145 	  /* FALLTHRU */
1146 	case OMP_CLAUSE_FINAL:
1147 	case OMP_CLAUSE_IF:
1148 	case OMP_CLAUSE_NUM_THREADS:
1149 	case OMP_CLAUSE_DEPEND:
1150 	case OMP_CLAUSE_DEVICE:
1151 	case OMP_CLAUSE_NUM_TEAMS:
1152 	case OMP_CLAUSE_THREAD_LIMIT:
1153 	case OMP_CLAUSE_SAFELEN:
1154 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1155 	  wi->val_only = true;
1156 	  wi->is_lhs = false;
1157 	  convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1158 					 &dummy, wi);
1159 	  break;
1160 
1161 	case OMP_CLAUSE_DIST_SCHEDULE:
1162 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1163 	    {
1164 	      wi->val_only = true;
1165 	      wi->is_lhs = false;
1166 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1167 					     &dummy, wi);
1168 	    }
1169 	  break;
1170 
1171 	case OMP_CLAUSE_MAP:
1172 	case OMP_CLAUSE_TO:
1173 	case OMP_CLAUSE_FROM:
1174 	  if (OMP_CLAUSE_SIZE (clause))
1175 	    {
1176 	      wi->val_only = true;
1177 	      wi->is_lhs = false;
1178 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1179 					     &dummy, wi);
1180 	    }
1181 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1182 	    goto do_decl_clause;
1183 	  wi->val_only = true;
1184 	  wi->is_lhs = false;
1185 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1186 		     wi, NULL);
1187 	  break;
1188 
1189 	case OMP_CLAUSE_ALIGNED:
1190 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1191 	    {
1192 	      wi->val_only = true;
1193 	      wi->is_lhs = false;
1194 	      convert_nonlocal_reference_op
1195 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1196 	    }
1197 	  /* Like do_decl_clause, but don't add any suppression.  */
1198 	  decl = OMP_CLAUSE_DECL (clause);
1199 	  if (TREE_CODE (decl) == VAR_DECL
1200 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1201 	    break;
1202 	  if (decl_function_context (decl) != info->context)
1203 	    {
1204 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1205 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1206 		need_chain = true;
1207 	    }
1208 	  break;
1209 
1210 	case OMP_CLAUSE_NOWAIT:
1211 	case OMP_CLAUSE_ORDERED:
1212 	case OMP_CLAUSE_DEFAULT:
1213 	case OMP_CLAUSE_COPYIN:
1214 	case OMP_CLAUSE_COLLAPSE:
1215 	case OMP_CLAUSE_UNTIED:
1216 	case OMP_CLAUSE_MERGEABLE:
1217 	case OMP_CLAUSE_PROC_BIND:
1218 	  break;
1219 
1220 	default:
1221 	  gcc_unreachable ();
1222 	}
1223     }
1224 
1225   info->suppress_expansion = new_suppress;
1226 
1227   if (need_stmts)
1228     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1229       switch (OMP_CLAUSE_CODE (clause))
1230 	{
1231 	case OMP_CLAUSE_REDUCTION:
1232 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1233 	    {
1234 	      tree old_context
1235 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1236 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1237 		= info->context;
1238 	      walk_body (convert_nonlocal_reference_stmt,
1239 			 convert_nonlocal_reference_op, info,
1240 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1241 	      walk_body (convert_nonlocal_reference_stmt,
1242 			 convert_nonlocal_reference_op, info,
1243 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1244 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1245 		= old_context;
1246 	    }
1247 	  break;
1248 
1249 	case OMP_CLAUSE_LASTPRIVATE:
1250 	  walk_body (convert_nonlocal_reference_stmt,
1251 		     convert_nonlocal_reference_op, info,
1252 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1253 	  break;
1254 
1255 	case OMP_CLAUSE_LINEAR:
1256 	  walk_body (convert_nonlocal_reference_stmt,
1257 		     convert_nonlocal_reference_op, info,
1258 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1259 	  break;
1260 
1261 	default:
1262 	  break;
1263 	}
1264 
1265   return need_chain;
1266 }
1267 
1268 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1269 
1270 static void
1271 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1272 {
1273   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1274     type = TREE_TYPE (type);
1275 
1276   if (TYPE_NAME (type)
1277       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1278       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1279     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1280 
1281   while (POINTER_TYPE_P (type)
1282 	 || TREE_CODE (type) == VECTOR_TYPE
1283 	 || TREE_CODE (type) == FUNCTION_TYPE
1284 	 || TREE_CODE (type) == METHOD_TYPE)
1285     type = TREE_TYPE (type);
1286 
1287   if (TREE_CODE (type) == ARRAY_TYPE)
1288     {
1289       tree domain, t;
1290 
1291       note_nonlocal_vla_type (info, TREE_TYPE (type));
1292       domain = TYPE_DOMAIN (type);
1293       if (domain)
1294 	{
1295 	  t = TYPE_MIN_VALUE (domain);
1296 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1297 	      && decl_function_context (t) != info->context)
1298 	    get_nonlocal_debug_decl (info, t);
1299 	  t = TYPE_MAX_VALUE (domain);
1300 	  if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1301 	      && decl_function_context (t) != info->context)
1302 	    get_nonlocal_debug_decl (info, t);
1303 	}
1304     }
1305 }
1306 
1307 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1308    in BLOCK.  */
1309 
1310 static void
1311 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1312 {
1313   tree var;
1314 
1315   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1316     if (TREE_CODE (var) == VAR_DECL
1317 	&& variably_modified_type_p (TREE_TYPE (var), NULL)
1318 	&& DECL_HAS_VALUE_EXPR_P (var)
1319 	&& decl_function_context (var) != info->context)
1320       note_nonlocal_vla_type (info, TREE_TYPE (var));
1321 }
1322 
1323 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1324    PARM_DECLs that belong to outer functions.  This handles statements
1325    that are not handled via the standard recursion done in
1326    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1327    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1328    operands of STMT have been handled by this function.  */
1329 
1330 static tree
1331 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1332 				 struct walk_stmt_info *wi)
1333 {
1334   struct nesting_info *info = (struct nesting_info *) wi->info;
1335   tree save_local_var_chain;
1336   bitmap save_suppress;
1337   gimple stmt = gsi_stmt (*gsi);
1338 
1339   switch (gimple_code (stmt))
1340     {
1341     case GIMPLE_GOTO:
1342       /* Don't walk non-local gotos for now.  */
1343       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1344 	{
1345 	  wi->val_only = true;
1346 	  wi->is_lhs = false;
1347 	  *handled_ops_p = false;
1348 	  return NULL_TREE;
1349 	}
1350       break;
1351 
1352     case GIMPLE_OMP_PARALLEL:
1353     case GIMPLE_OMP_TASK:
1354       save_suppress = info->suppress_expansion;
1355       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1356 	                                wi))
1357 	{
1358 	  tree c, decl;
1359 	  decl = get_chain_decl (info);
1360 	  c = build_omp_clause (gimple_location (stmt),
1361 				OMP_CLAUSE_FIRSTPRIVATE);
1362 	  OMP_CLAUSE_DECL (c) = decl;
1363 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1364 	  gimple_omp_taskreg_set_clauses (stmt, c);
1365 	}
1366 
1367       save_local_var_chain = info->new_local_var_chain;
1368       info->new_local_var_chain = NULL;
1369 
1370       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1371 	         info, gimple_omp_body_ptr (stmt));
1372 
1373       if (info->new_local_var_chain)
1374 	declare_vars (info->new_local_var_chain,
1375 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1376 		      false);
1377       info->new_local_var_chain = save_local_var_chain;
1378       info->suppress_expansion = save_suppress;
1379       break;
1380 
1381     case GIMPLE_OMP_FOR:
1382       save_suppress = info->suppress_expansion;
1383       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1384       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1385 			   convert_nonlocal_reference_stmt,
1386 	  		   convert_nonlocal_reference_op, info);
1387       walk_body (convert_nonlocal_reference_stmt,
1388 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1389       info->suppress_expansion = save_suppress;
1390       break;
1391 
1392     case GIMPLE_OMP_SECTIONS:
1393       save_suppress = info->suppress_expansion;
1394       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1395       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1396 	         info, gimple_omp_body_ptr (stmt));
1397       info->suppress_expansion = save_suppress;
1398       break;
1399 
1400     case GIMPLE_OMP_SINGLE:
1401       save_suppress = info->suppress_expansion;
1402       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1403       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1404 	         info, gimple_omp_body_ptr (stmt));
1405       info->suppress_expansion = save_suppress;
1406       break;
1407 
1408     case GIMPLE_OMP_TARGET:
1409       if (!is_gimple_omp_offloaded (stmt))
1410 	{
1411 	  save_suppress = info->suppress_expansion;
1412 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1413 					wi);
1414 	  info->suppress_expansion = save_suppress;
1415 	  walk_body (convert_nonlocal_reference_stmt,
1416 		     convert_nonlocal_reference_op, info,
1417 		     gimple_omp_body_ptr (stmt));
1418 	  break;
1419 	}
1420       save_suppress = info->suppress_expansion;
1421       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1422 					wi))
1423 	{
1424 	  tree c, decl;
1425 	  decl = get_chain_decl (info);
1426 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1427 	  OMP_CLAUSE_DECL (c) = decl;
1428 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1429 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1430 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1431 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1432 	}
1433 
1434       save_local_var_chain = info->new_local_var_chain;
1435       info->new_local_var_chain = NULL;
1436 
1437       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1438 		 info, gimple_omp_body_ptr (stmt));
1439 
1440       if (info->new_local_var_chain)
1441 	declare_vars (info->new_local_var_chain,
1442 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1443 		      false);
1444       info->new_local_var_chain = save_local_var_chain;
1445       info->suppress_expansion = save_suppress;
1446       break;
1447 
1448     case GIMPLE_OMP_TEAMS:
1449       save_suppress = info->suppress_expansion;
1450       convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1451       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1452 		 info, gimple_omp_body_ptr (stmt));
1453       info->suppress_expansion = save_suppress;
1454       break;
1455 
1456     case GIMPLE_OMP_SECTION:
1457     case GIMPLE_OMP_MASTER:
1458     case GIMPLE_OMP_TASKGROUP:
1459     case GIMPLE_OMP_ORDERED:
1460       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1461 	         info, gimple_omp_body_ptr (stmt));
1462       break;
1463 
1464     case GIMPLE_BIND:
1465       {
1466       gbind *bind_stmt = as_a <gbind *> (stmt);
1467       if (!optimize && gimple_bind_block (bind_stmt))
1468 	note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
1469 
1470       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1471 	if (TREE_CODE (var) == NAMELIST_DECL)
1472 	  {
1473 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1474 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1475 	    tree decl;
1476 	    unsigned int i;
1477 
1478 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1479 	      {
1480 		if (TREE_CODE (decl) == VAR_DECL
1481 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1482 		  continue;
1483 		if (decl_function_context (decl) != info->context)
1484 		  CONSTRUCTOR_ELT (decls, i)->value
1485 		    = get_nonlocal_debug_decl (info, decl);
1486 	      }
1487 	  }
1488 
1489       *handled_ops_p = false;
1490       return NULL_TREE;
1491       }
1492     case GIMPLE_COND:
1493       wi->val_only = true;
1494       wi->is_lhs = false;
1495       *handled_ops_p = false;
1496       return NULL_TREE;
1497 
1498     default:
1499       /* For every other statement that we are not interested in
1500 	 handling here, let the walker traverse the operands.  */
1501       *handled_ops_p = false;
1502       return NULL_TREE;
1503     }
1504 
1505   /* We have handled all of STMT operands, no need to traverse the operands.  */
1506   *handled_ops_p = true;
1507   return NULL_TREE;
1508 }
1509 
1510 
1511 /* A subroutine of convert_local_reference.  Create a local variable
1512    in the parent function with DECL_VALUE_EXPR set to reference the
1513    field in FRAME.  This is used both for debug info and in OMP
1514    lowering.  */
1515 
1516 static tree
1517 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1518 {
1519   tree x, new_decl;
1520 
1521   tree *slot = &info->var_map->get_or_insert (decl);
1522   if (*slot)
1523     return *slot;
1524 
1525   /* Make sure frame_decl gets created.  */
1526   (void) get_frame_type (info);
1527   x = info->frame_decl;
1528   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1529 
1530   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1531 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1532   DECL_CONTEXT (new_decl) = info->context;
1533   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1534   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1535   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1536   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1537   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1538   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1539   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1540   if ((TREE_CODE (decl) == PARM_DECL
1541        || TREE_CODE (decl) == RESULT_DECL
1542        || TREE_CODE (decl) == VAR_DECL)
1543       && DECL_BY_REFERENCE (decl))
1544     DECL_BY_REFERENCE (new_decl) = 1;
1545 
1546   SET_DECL_VALUE_EXPR (new_decl, x);
1547   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1548   *slot = new_decl;
1549 
1550   DECL_CHAIN (new_decl) = info->debug_var_chain;
1551   info->debug_var_chain = new_decl;
1552 
1553   /* Do not emit debug info twice.  */
1554   DECL_IGNORED_P (decl) = 1;
1555 
1556   return new_decl;
1557 }
1558 
1559 
1560 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1561    and PARM_DECLs that were referenced by inner nested functions.
1562    The rewrite will be a structure reference to the local frame variable.  */
1563 
1564 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1565 
1566 static tree
1567 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1568 {
1569   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1570   struct nesting_info *const info = (struct nesting_info *) wi->info;
1571   tree t = *tp, field, x;
1572   bool save_val_only;
1573 
1574   *walk_subtrees = 0;
1575   switch (TREE_CODE (t))
1576     {
1577     case VAR_DECL:
1578       /* Non-automatic variables are never processed.  */
1579       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1580 	break;
1581       /* FALLTHRU */
1582 
1583     case PARM_DECL:
1584       if (decl_function_context (t) == info->context)
1585 	{
1586 	  /* If we copied a pointer to the frame, then the original decl
1587 	     is used unchanged in the parent function.  */
1588 	  if (use_pointer_in_frame (t))
1589 	    break;
1590 
1591 	  /* No need to transform anything if no child references the
1592 	     variable.  */
1593 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1594 	  if (!field)
1595 	    break;
1596 	  wi->changed = true;
1597 
1598 	  x = get_local_debug_decl (info, t, field);
1599 	  if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1600 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1601 
1602 	  if (wi->val_only)
1603 	    {
1604 	      if (wi->is_lhs)
1605 		x = save_tmp_var (info, x, &wi->gsi);
1606 	      else
1607 		x = init_tmp_var (info, x, &wi->gsi);
1608 	    }
1609 
1610 	  *tp = x;
1611 	}
1612       break;
1613 
1614     case ADDR_EXPR:
1615       save_val_only = wi->val_only;
1616       wi->val_only = false;
1617       wi->is_lhs = false;
1618       wi->changed = false;
1619       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1620       wi->val_only = save_val_only;
1621 
1622       /* If we converted anything ... */
1623       if (wi->changed)
1624 	{
1625 	  tree save_context;
1626 
1627 	  /* Then the frame decl is now addressable.  */
1628 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1629 
1630 	  save_context = current_function_decl;
1631 	  current_function_decl = info->context;
1632 	  recompute_tree_invariant_for_addr_expr (t);
1633 	  current_function_decl = save_context;
1634 
1635 	  /* If we are in a context where we only accept values, then
1636 	     compute the address into a temporary.  */
1637 	  if (save_val_only)
1638 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1639 				    t, &wi->gsi);
1640 	}
1641       break;
1642 
1643     case REALPART_EXPR:
1644     case IMAGPART_EXPR:
1645     case COMPONENT_REF:
1646     case ARRAY_REF:
1647     case ARRAY_RANGE_REF:
1648     case BIT_FIELD_REF:
1649       /* Go down this entire nest and just look at the final prefix and
1650 	 anything that describes the references.  Otherwise, we lose track
1651 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1652       save_val_only = wi->val_only;
1653       wi->val_only = true;
1654       wi->is_lhs = false;
1655       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1656 	{
1657 	  if (TREE_CODE (t) == COMPONENT_REF)
1658 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1659 		       NULL);
1660 	  else if (TREE_CODE (t) == ARRAY_REF
1661 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1662 	    {
1663 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1664 			 NULL);
1665 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1666 			 NULL);
1667 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1668 			 NULL);
1669 	    }
1670 	}
1671       wi->val_only = false;
1672       walk_tree (tp, convert_local_reference_op, wi, NULL);
1673       wi->val_only = save_val_only;
1674       break;
1675 
1676     case MEM_REF:
1677       save_val_only = wi->val_only;
1678       wi->val_only = true;
1679       wi->is_lhs = false;
1680       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1681 		 wi, NULL);
1682       /* We need to re-fold the MEM_REF as component references as
1683 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1684 	 fold here, as the chain record type is not yet finalized.  */
1685       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1686 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1687 	info->mem_refs->add (tp);
1688       wi->val_only = save_val_only;
1689       break;
1690 
1691     case VIEW_CONVERT_EXPR:
1692       /* Just request to look at the subtrees, leaving val_only and lhs
1693 	 untouched.  This might actually be for !val_only + lhs, in which
1694 	 case we don't want to force a replacement by a temporary.  */
1695       *walk_subtrees = 1;
1696       break;
1697 
1698     default:
1699       if (!IS_TYPE_OR_DECL_P (t))
1700 	{
1701 	  *walk_subtrees = 1;
1702 	  wi->val_only = true;
1703 	  wi->is_lhs = false;
1704 	}
1705       break;
1706     }
1707 
1708   return NULL_TREE;
1709 }
1710 
1711 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1712 					  struct walk_stmt_info *);
1713 
1714 /* Helper for convert_local_reference.  Convert all the references in
1715    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1716 
1717 static bool
1718 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1719 {
1720   struct nesting_info *const info = (struct nesting_info *) wi->info;
1721   bool need_frame = false, need_stmts = false;
1722   tree clause, decl;
1723   int dummy;
1724   bitmap new_suppress;
1725 
1726   new_suppress = BITMAP_GGC_ALLOC ();
1727   bitmap_copy (new_suppress, info->suppress_expansion);
1728 
1729   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1730     {
1731       switch (OMP_CLAUSE_CODE (clause))
1732 	{
1733 	case OMP_CLAUSE_REDUCTION:
1734 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1735 	    need_stmts = true;
1736 	  goto do_decl_clause;
1737 
1738 	case OMP_CLAUSE_LASTPRIVATE:
1739 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1740 	    need_stmts = true;
1741 	  goto do_decl_clause;
1742 
1743 	case OMP_CLAUSE_LINEAR:
1744 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1745 	    need_stmts = true;
1746 	  wi->val_only = true;
1747 	  wi->is_lhs = false;
1748 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1749 				      wi);
1750 	  goto do_decl_clause;
1751 
1752 	case OMP_CLAUSE_PRIVATE:
1753 	case OMP_CLAUSE_FIRSTPRIVATE:
1754 	case OMP_CLAUSE_COPYPRIVATE:
1755 	case OMP_CLAUSE_SHARED:
1756 	do_decl_clause:
1757 	  decl = OMP_CLAUSE_DECL (clause);
1758 	  if (TREE_CODE (decl) == VAR_DECL
1759 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1760 	    break;
1761 	  if (decl_function_context (decl) == info->context
1762 	      && !use_pointer_in_frame (decl))
1763 	    {
1764 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1765 	      if (field)
1766 		{
1767 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1768 		  OMP_CLAUSE_DECL (clause)
1769 		    = get_local_debug_decl (info, decl, field);
1770 		  need_frame = true;
1771 		}
1772 	    }
1773 	  break;
1774 
1775 	case OMP_CLAUSE_SCHEDULE:
1776 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1777 	    break;
1778 	  /* FALLTHRU */
1779 	case OMP_CLAUSE_FINAL:
1780 	case OMP_CLAUSE_IF:
1781 	case OMP_CLAUSE_NUM_THREADS:
1782 	case OMP_CLAUSE_DEPEND:
1783 	case OMP_CLAUSE_DEVICE:
1784 	case OMP_CLAUSE_NUM_TEAMS:
1785 	case OMP_CLAUSE_THREAD_LIMIT:
1786 	case OMP_CLAUSE_SAFELEN:
1787 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1788 	  wi->val_only = true;
1789 	  wi->is_lhs = false;
1790 	  convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1791 				      wi);
1792 	  break;
1793 
1794 	case OMP_CLAUSE_DIST_SCHEDULE:
1795 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1796 	    {
1797 	      wi->val_only = true;
1798 	      wi->is_lhs = false;
1799 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1800 					  &dummy, wi);
1801 	    }
1802 	  break;
1803 
1804 	case OMP_CLAUSE_MAP:
1805 	case OMP_CLAUSE_TO:
1806 	case OMP_CLAUSE_FROM:
1807 	  if (OMP_CLAUSE_SIZE (clause))
1808 	    {
1809 	      wi->val_only = true;
1810 	      wi->is_lhs = false;
1811 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
1812 					  &dummy, wi);
1813 	    }
1814 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1815 	    goto do_decl_clause;
1816 	  wi->val_only = true;
1817 	  wi->is_lhs = false;
1818 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
1819 		     wi, NULL);
1820 	  break;
1821 
1822 	case OMP_CLAUSE_ALIGNED:
1823 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1824 	    {
1825 	      wi->val_only = true;
1826 	      wi->is_lhs = false;
1827 	      convert_local_reference_op
1828 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1829 	    }
1830 	  /* Like do_decl_clause, but don't add any suppression.  */
1831 	  decl = OMP_CLAUSE_DECL (clause);
1832 	  if (TREE_CODE (decl) == VAR_DECL
1833 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1834 	    break;
1835 	  if (decl_function_context (decl) == info->context
1836 	      && !use_pointer_in_frame (decl))
1837 	    {
1838 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1839 	      if (field)
1840 		{
1841 		  OMP_CLAUSE_DECL (clause)
1842 		    = get_local_debug_decl (info, decl, field);
1843 		  need_frame = true;
1844 		}
1845 	    }
1846 	  break;
1847 
1848 	case OMP_CLAUSE_NOWAIT:
1849 	case OMP_CLAUSE_ORDERED:
1850 	case OMP_CLAUSE_DEFAULT:
1851 	case OMP_CLAUSE_COPYIN:
1852 	case OMP_CLAUSE_COLLAPSE:
1853 	case OMP_CLAUSE_UNTIED:
1854 	case OMP_CLAUSE_MERGEABLE:
1855 	case OMP_CLAUSE_PROC_BIND:
1856 	  break;
1857 
1858 	default:
1859 	  gcc_unreachable ();
1860 	}
1861     }
1862 
1863   info->suppress_expansion = new_suppress;
1864 
1865   if (need_stmts)
1866     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1867       switch (OMP_CLAUSE_CODE (clause))
1868 	{
1869 	case OMP_CLAUSE_REDUCTION:
1870 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1871 	    {
1872 	      tree old_context
1873 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1874 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1875 		= info->context;
1876 	      walk_body (convert_local_reference_stmt,
1877 			 convert_local_reference_op, info,
1878 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1879 	      walk_body (convert_local_reference_stmt,
1880 			 convert_local_reference_op, info,
1881 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1882 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1883 		= old_context;
1884 	    }
1885 	  break;
1886 
1887 	case OMP_CLAUSE_LASTPRIVATE:
1888 	  walk_body (convert_local_reference_stmt,
1889 		     convert_local_reference_op, info,
1890 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1891 	  break;
1892 
1893 	case OMP_CLAUSE_LINEAR:
1894 	  walk_body (convert_local_reference_stmt,
1895 		     convert_local_reference_op, info,
1896 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1897 	  break;
1898 
1899 	default:
1900 	  break;
1901 	}
1902 
1903   return need_frame;
1904 }
1905 
1906 
1907 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1908    and PARM_DECLs that were referenced by inner nested functions.
1909    The rewrite will be a structure reference to the local frame variable.  */
1910 
1911 static tree
1912 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1913 			      struct walk_stmt_info *wi)
1914 {
1915   struct nesting_info *info = (struct nesting_info *) wi->info;
1916   tree save_local_var_chain;
1917   bitmap save_suppress;
1918   char save_static_chain_added;
1919   bool frame_decl_added;
1920   gimple stmt = gsi_stmt (*gsi);
1921 
1922   switch (gimple_code (stmt))
1923     {
1924     case GIMPLE_OMP_PARALLEL:
1925     case GIMPLE_OMP_TASK:
1926       save_suppress = info->suppress_expansion;
1927       frame_decl_added = false;
1928       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1929 	                             wi))
1930 	{
1931 	  tree c = build_omp_clause (gimple_location (stmt),
1932 				     OMP_CLAUSE_SHARED);
1933 	  (void) get_frame_type (info);
1934 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
1935 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1936 	  gimple_omp_taskreg_set_clauses (stmt, c);
1937 	  info->static_chain_added |= 4;
1938 	  frame_decl_added = true;
1939 	}
1940 
1941       save_local_var_chain = info->new_local_var_chain;
1942       save_static_chain_added = info->static_chain_added;
1943       info->new_local_var_chain = NULL;
1944       info->static_chain_added = 0;
1945 
1946       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1947 	         gimple_omp_body_ptr (stmt));
1948 
1949       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
1950 	{
1951 	  tree c = build_omp_clause (gimple_location (stmt),
1952 				     OMP_CLAUSE_SHARED);
1953 	  (void) get_frame_type (info);
1954 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
1955 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1956 	  info->static_chain_added |= 4;
1957 	  gimple_omp_taskreg_set_clauses (stmt, c);
1958 	}
1959       if (info->new_local_var_chain)
1960 	declare_vars (info->new_local_var_chain,
1961 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1962       info->new_local_var_chain = save_local_var_chain;
1963       info->suppress_expansion = save_suppress;
1964       info->static_chain_added |= save_static_chain_added;
1965       break;
1966 
1967     case GIMPLE_OMP_FOR:
1968       save_suppress = info->suppress_expansion;
1969       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1970       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1971 			   convert_local_reference_stmt,
1972 			   convert_local_reference_op, info);
1973       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1974 		 info, gimple_omp_body_ptr (stmt));
1975       info->suppress_expansion = save_suppress;
1976       break;
1977 
1978     case GIMPLE_OMP_SECTIONS:
1979       save_suppress = info->suppress_expansion;
1980       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1981       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1982 		 info, gimple_omp_body_ptr (stmt));
1983       info->suppress_expansion = save_suppress;
1984       break;
1985 
1986     case GIMPLE_OMP_SINGLE:
1987       save_suppress = info->suppress_expansion;
1988       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1989       walk_body (convert_local_reference_stmt, convert_local_reference_op,
1990 		 info, gimple_omp_body_ptr (stmt));
1991       info->suppress_expansion = save_suppress;
1992       break;
1993 
1994     case GIMPLE_OMP_TARGET:
1995       if (!is_gimple_omp_offloaded (stmt))
1996 	{
1997 	  save_suppress = info->suppress_expansion;
1998 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1999 	  info->suppress_expansion = save_suppress;
2000 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2001 		     info, gimple_omp_body_ptr (stmt));
2002 	  break;
2003 	}
2004       save_suppress = info->suppress_expansion;
2005       frame_decl_added = false;
2006       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2007 	{
2008 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2009 	  (void) get_frame_type (info);
2010 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2011 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2012 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2013 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2014 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2015 	  info->static_chain_added |= 4;
2016 	  frame_decl_added = true;
2017 	}
2018 
2019       save_local_var_chain = info->new_local_var_chain;
2020       save_static_chain_added = info->static_chain_added;
2021       info->new_local_var_chain = NULL;
2022       info->static_chain_added = 0;
2023 
2024       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2025 		 gimple_omp_body_ptr (stmt));
2026 
2027       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2028 	{
2029 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2030 	  (void) get_frame_type (info);
2031 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2032 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2033 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2034 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2035 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2036 	  info->static_chain_added |= 4;
2037 	}
2038 
2039       if (info->new_local_var_chain)
2040 	declare_vars (info->new_local_var_chain,
2041 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2042       info->new_local_var_chain = save_local_var_chain;
2043       info->suppress_expansion = save_suppress;
2044       info->static_chain_added |= save_static_chain_added;
2045       break;
2046 
2047     case GIMPLE_OMP_TEAMS:
2048       save_suppress = info->suppress_expansion;
2049       convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2050       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2051 		 info, gimple_omp_body_ptr (stmt));
2052       info->suppress_expansion = save_suppress;
2053       break;
2054 
2055     case GIMPLE_OMP_SECTION:
2056     case GIMPLE_OMP_MASTER:
2057     case GIMPLE_OMP_TASKGROUP:
2058     case GIMPLE_OMP_ORDERED:
2059       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2060 		 info, gimple_omp_body_ptr (stmt));
2061       break;
2062 
2063     case GIMPLE_COND:
2064       wi->val_only = true;
2065       wi->is_lhs = false;
2066       *handled_ops_p = false;
2067       return NULL_TREE;
2068 
2069     case GIMPLE_ASSIGN:
2070       if (gimple_clobber_p (stmt))
2071 	{
2072 	  tree lhs = gimple_assign_lhs (stmt);
2073 	  if (!use_pointer_in_frame (lhs)
2074 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2075 	    {
2076 	      gsi_replace (gsi, gimple_build_nop (), true);
2077 	      break;
2078 	    }
2079 	}
2080       *handled_ops_p = false;
2081       return NULL_TREE;
2082 
2083     case GIMPLE_BIND:
2084       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2085 	   var;
2086 	   var = DECL_CHAIN (var))
2087 	if (TREE_CODE (var) == NAMELIST_DECL)
2088 	  {
2089 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2090 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2091 	    tree decl;
2092 	    unsigned int i;
2093 
2094 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2095 	      {
2096 		if (TREE_CODE (decl) == VAR_DECL
2097 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2098 		  continue;
2099 		if (decl_function_context (decl) == info->context
2100 		    && !use_pointer_in_frame (decl))
2101 		  {
2102 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2103 		    if (field)
2104 		      {
2105 			CONSTRUCTOR_ELT (decls, i)->value
2106 			  = get_local_debug_decl (info, decl, field);
2107 		      }
2108 		  }
2109 	      }
2110 	  }
2111 
2112       *handled_ops_p = false;
2113       return NULL_TREE;
2114 
2115     default:
2116       /* For every other statement that we are not interested in
2117 	 handling here, let the walker traverse the operands.  */
2118       *handled_ops_p = false;
2119       return NULL_TREE;
2120     }
2121 
2122   /* Indicate that we have handled all the operands ourselves.  */
2123   *handled_ops_p = true;
2124   return NULL_TREE;
2125 }
2126 
2127 
2128 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2129    that reference labels from outer functions.  The rewrite will be a
2130    call to __builtin_nonlocal_goto.  */
2131 
2132 static tree
2133 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2134 			   struct walk_stmt_info *wi)
2135 {
2136   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2137   tree label, new_label, target_context, x, field;
2138   gcall *call;
2139   gimple stmt = gsi_stmt (*gsi);
2140 
2141   if (gimple_code (stmt) != GIMPLE_GOTO)
2142     {
2143       *handled_ops_p = false;
2144       return NULL_TREE;
2145     }
2146 
2147   label = gimple_goto_dest (stmt);
2148   if (TREE_CODE (label) != LABEL_DECL)
2149     {
2150       *handled_ops_p = false;
2151       return NULL_TREE;
2152     }
2153 
2154   target_context = decl_function_context (label);
2155   if (target_context == info->context)
2156     {
2157       *handled_ops_p = false;
2158       return NULL_TREE;
2159     }
2160 
2161   for (i = info->outer; target_context != i->context; i = i->outer)
2162     continue;
2163 
2164   /* The original user label may also be use for a normal goto, therefore
2165      we must create a new label that will actually receive the abnormal
2166      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2167      mark will trigger proper behavior in the cfg, as well as cause the
2168      (hairy target-specific) non-local goto receiver code to be generated
2169      when we expand rtl.  Enter this association into var_map so that we
2170      can insert the new label into the IL during a second pass.  */
2171   tree *slot = &i->var_map->get_or_insert (label);
2172   if (*slot == NULL)
2173     {
2174       new_label = create_artificial_label (UNKNOWN_LOCATION);
2175       DECL_NONLOCAL (new_label) = 1;
2176       *slot = new_label;
2177     }
2178   else
2179     new_label = *slot;
2180 
2181   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2182   field = get_nl_goto_field (i);
2183   x = get_frame_field (info, target_context, field, gsi);
2184   x = build_addr (x, target_context);
2185   x = gsi_gimplify_val (info, x, gsi);
2186   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2187 			    2, build_addr (new_label, target_context), x);
2188   gsi_replace (gsi, call, false);
2189 
2190   /* We have handled all of STMT's operands, no need to keep going.  */
2191   *handled_ops_p = true;
2192   return NULL_TREE;
2193 }
2194 
2195 
2196 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2197    are referenced via nonlocal goto from a nested function.  The rewrite
2198    will involve installing a newly generated DECL_NONLOCAL label, and
2199    (potentially) a branch around the rtl gunk that is assumed to be
2200    attached to such a label.  */
2201 
2202 static tree
2203 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2204 			  struct walk_stmt_info *wi)
2205 {
2206   struct nesting_info *const info = (struct nesting_info *) wi->info;
2207   tree label, new_label;
2208   gimple_stmt_iterator tmp_gsi;
2209   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2210 
2211   if (!stmt)
2212     {
2213       *handled_ops_p = false;
2214       return NULL_TREE;
2215     }
2216 
2217   label = gimple_label_label (stmt);
2218 
2219   tree *slot = info->var_map->get (label);
2220   if (!slot)
2221     {
2222       *handled_ops_p = false;
2223       return NULL_TREE;
2224     }
2225 
2226   /* If there's any possibility that the previous statement falls through,
2227      then we must branch around the new non-local label.  */
2228   tmp_gsi = wi->gsi;
2229   gsi_prev (&tmp_gsi);
2230   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2231     {
2232       gimple stmt = gimple_build_goto (label);
2233       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2234     }
2235 
2236   new_label = (tree) *slot;
2237   stmt = gimple_build_label (new_label);
2238   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2239 
2240   *handled_ops_p = true;
2241   return NULL_TREE;
2242 }
2243 
2244 
2245 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2246    of nested functions that require the use of trampolines.  The rewrite
2247    will involve a reference a trampoline generated for the occasion.  */
2248 
2249 static tree
2250 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2251 {
2252   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2253   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2254   tree t = *tp, decl, target_context, x, builtin;
2255   gcall *call;
2256 
2257   *walk_subtrees = 0;
2258   switch (TREE_CODE (t))
2259     {
2260     case ADDR_EXPR:
2261       /* Build
2262 	   T.1 = &CHAIN->tramp;
2263 	   T.2 = __builtin_adjust_trampoline (T.1);
2264 	   T.3 = (func_type)T.2;
2265       */
2266 
2267       decl = TREE_OPERAND (t, 0);
2268       if (TREE_CODE (decl) != FUNCTION_DECL)
2269 	break;
2270 
2271       /* Only need to process nested functions.  */
2272       target_context = decl_function_context (decl);
2273       if (!target_context)
2274 	break;
2275 
2276       /* If the nested function doesn't use a static chain, then
2277 	 it doesn't need a trampoline.  */
2278       if (!DECL_STATIC_CHAIN (decl))
2279 	break;
2280 
2281       /* If we don't want a trampoline, then don't build one.  */
2282       if (TREE_NO_TRAMPOLINE (t))
2283 	break;
2284 
2285       /* Lookup the immediate parent of the callee, as that's where
2286 	 we need to insert the trampoline.  */
2287       for (i = info; i->context != target_context; i = i->outer)
2288 	continue;
2289       x = lookup_tramp_for_decl (i, decl, INSERT);
2290 
2291       /* Compute the address of the field holding the trampoline.  */
2292       x = get_frame_field (info, target_context, x, &wi->gsi);
2293       x = build_addr (x, target_context);
2294       x = gsi_gimplify_val (info, x, &wi->gsi);
2295 
2296       /* Do machine-specific ugliness.  Normally this will involve
2297 	 computing extra alignment, but it can really be anything.  */
2298       builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2299       call = gimple_build_call (builtin, 1, x);
2300       x = init_tmp_var_with_call (info, &wi->gsi, call);
2301 
2302       /* Cast back to the proper function type.  */
2303       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2304       x = init_tmp_var (info, x, &wi->gsi);
2305 
2306       *tp = x;
2307       break;
2308 
2309     default:
2310       if (!IS_TYPE_OR_DECL_P (t))
2311 	*walk_subtrees = 1;
2312       break;
2313     }
2314 
2315   return NULL_TREE;
2316 }
2317 
2318 
2319 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2320    to addresses of nested functions that require the use of
2321    trampolines.  The rewrite will involve a reference a trampoline
2322    generated for the occasion.  */
2323 
2324 static tree
2325 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2326 			      struct walk_stmt_info *wi)
2327 {
2328   struct nesting_info *info = (struct nesting_info *) wi->info;
2329   gimple stmt = gsi_stmt (*gsi);
2330 
2331   switch (gimple_code (stmt))
2332     {
2333     case GIMPLE_CALL:
2334       {
2335 	/* Only walk call arguments, lest we generate trampolines for
2336 	   direct calls.  */
2337 	unsigned long i, nargs = gimple_call_num_args (stmt);
2338 	for (i = 0; i < nargs; i++)
2339 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2340 		     wi, NULL);
2341 	break;
2342       }
2343 
2344     case GIMPLE_OMP_TARGET:
2345       if (!is_gimple_omp_offloaded (stmt))
2346 	{
2347 	  *handled_ops_p = false;
2348 	  return NULL_TREE;
2349 	}
2350       /* FALLTHRU */
2351     case GIMPLE_OMP_PARALLEL:
2352     case GIMPLE_OMP_TASK:
2353       {
2354 	tree save_local_var_chain = info->new_local_var_chain;
2355         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2356 	info->new_local_var_chain = NULL;
2357 	char save_static_chain_added = info->static_chain_added;
2358 	info->static_chain_added = 0;
2359         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2360 		   info, gimple_omp_body_ptr (stmt));
2361 	if (info->new_local_var_chain)
2362 	  declare_vars (info->new_local_var_chain,
2363 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2364 			false);
2365 	for (int i = 0; i < 2; i++)
2366 	  {
2367 	    tree c, decl;
2368 	    if ((info->static_chain_added & (1 << i)) == 0)
2369 	      continue;
2370 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2371 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2372 	    for (c = gimple_omp_taskreg_clauses (stmt);
2373 		 c;
2374 		 c = OMP_CLAUSE_CHAIN (c))
2375 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2376 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2377 		  && OMP_CLAUSE_DECL (c) == decl)
2378 		break;
2379 	      if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2380 		{
2381 		  c = build_omp_clause (gimple_location (stmt),
2382 					i ? OMP_CLAUSE_FIRSTPRIVATE
2383 					  : OMP_CLAUSE_SHARED);
2384 		  OMP_CLAUSE_DECL (c) = decl;
2385 		  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2386 		  gimple_omp_taskreg_set_clauses (stmt, c);
2387 		}
2388 	      else if (c == NULL)
2389 		{
2390 		  c = build_omp_clause (gimple_location (stmt),
2391 					OMP_CLAUSE_MAP);
2392 		  OMP_CLAUSE_DECL (c) = decl;
2393 		  OMP_CLAUSE_SET_MAP_KIND (c,
2394 					   i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2395 		  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2396 		  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2397 		  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2398 						 c);
2399 		}
2400 	  }
2401 	info->new_local_var_chain = save_local_var_chain;
2402 	info->static_chain_added |= save_static_chain_added;
2403       }
2404       break;
2405 
2406     default:
2407       *handled_ops_p = false;
2408       return NULL_TREE;
2409     }
2410 
2411   *handled_ops_p = true;
2412   return NULL_TREE;
2413 }
2414 
2415 
2416 
2417 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2418    that reference nested functions to make sure that the static chain
2419    is set up properly for the call.  */
2420 
2421 static tree
2422 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2423                      struct walk_stmt_info *wi)
2424 {
2425   struct nesting_info *const info = (struct nesting_info *) wi->info;
2426   tree decl, target_context;
2427   char save_static_chain_added;
2428   int i;
2429   gimple stmt = gsi_stmt (*gsi);
2430 
2431   switch (gimple_code (stmt))
2432     {
2433     case GIMPLE_CALL:
2434       if (gimple_call_chain (stmt))
2435 	break;
2436       decl = gimple_call_fndecl (stmt);
2437       if (!decl)
2438 	break;
2439       target_context = decl_function_context (decl);
2440       if (target_context && DECL_STATIC_CHAIN (decl))
2441 	{
2442 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2443 				 get_static_chain (info, target_context,
2444 						   &wi->gsi));
2445 	  info->static_chain_added |= (1 << (info->context != target_context));
2446 	}
2447       break;
2448 
2449     case GIMPLE_OMP_PARALLEL:
2450     case GIMPLE_OMP_TASK:
2451       save_static_chain_added = info->static_chain_added;
2452       info->static_chain_added = 0;
2453       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2454       for (i = 0; i < 2; i++)
2455 	{
2456 	  tree c, decl;
2457 	  if ((info->static_chain_added & (1 << i)) == 0)
2458 	    continue;
2459 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2460 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2461 	  for (c = gimple_omp_taskreg_clauses (stmt);
2462 	       c;
2463 	       c = OMP_CLAUSE_CHAIN (c))
2464 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2465 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2466 		&& OMP_CLAUSE_DECL (c) == decl)
2467 	      break;
2468 	  if (c == NULL)
2469 	    {
2470 	      c = build_omp_clause (gimple_location (stmt),
2471 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2472 				    : OMP_CLAUSE_SHARED);
2473 	      OMP_CLAUSE_DECL (c) = decl;
2474 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2475 	      gimple_omp_taskreg_set_clauses (stmt, c);
2476 	    }
2477 	}
2478       info->static_chain_added |= save_static_chain_added;
2479       break;
2480 
2481     case GIMPLE_OMP_TARGET:
2482       if (!is_gimple_omp_offloaded (stmt))
2483 	{
2484 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2485 	  break;
2486 	}
2487       save_static_chain_added = info->static_chain_added;
2488       info->static_chain_added = 0;
2489       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2490       for (i = 0; i < 2; i++)
2491 	{
2492 	  tree c, decl;
2493 	  if ((info->static_chain_added & (1 << i)) == 0)
2494 	    continue;
2495 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2496 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2497 	  for (c = gimple_omp_target_clauses (stmt);
2498 	       c;
2499 	       c = OMP_CLAUSE_CHAIN (c))
2500 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2501 		&& OMP_CLAUSE_DECL (c) == decl)
2502 	      break;
2503 	  if (c == NULL)
2504 	    {
2505 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2506 	      OMP_CLAUSE_DECL (c) = decl;
2507 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2508 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2509 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2510 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2511 					     c);
2512 	    }
2513 	}
2514       info->static_chain_added |= save_static_chain_added;
2515       break;
2516 
2517     case GIMPLE_OMP_FOR:
2518       walk_body (convert_gimple_call, NULL, info,
2519 	  	 gimple_omp_for_pre_body_ptr (stmt));
2520       /* FALLTHRU */
2521     case GIMPLE_OMP_SECTIONS:
2522     case GIMPLE_OMP_SECTION:
2523     case GIMPLE_OMP_SINGLE:
2524     case GIMPLE_OMP_TEAMS:
2525     case GIMPLE_OMP_MASTER:
2526     case GIMPLE_OMP_TASKGROUP:
2527     case GIMPLE_OMP_ORDERED:
2528     case GIMPLE_OMP_CRITICAL:
2529       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2530       break;
2531 
2532     default:
2533       /* Keep looking for other operands.  */
2534       *handled_ops_p = false;
2535       return NULL_TREE;
2536     }
2537 
2538   *handled_ops_p = true;
2539   return NULL_TREE;
2540 }
2541 
2542 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2543    call expressions.  At the same time, determine if a nested function
2544    actually uses its static chain; if not, remember that.  */
2545 
2546 static void
2547 convert_all_function_calls (struct nesting_info *root)
2548 {
2549   unsigned int chain_count = 0, old_chain_count, iter_count;
2550   struct nesting_info *n;
2551 
2552   /* First, optimistically clear static_chain for all decls that haven't
2553      used the static chain already for variable access.  But always create
2554      it if not optimizing.  This makes it possible to reconstruct the static
2555      nesting tree at run time and thus to resolve up-level references from
2556      within the debugger.  */
2557   FOR_EACH_NEST_INFO (n, root)
2558     {
2559       tree decl = n->context;
2560       if (!optimize)
2561 	{
2562 	  if (n->inner)
2563 	    (void) get_frame_type (n);
2564 	  if (n->outer)
2565 	    (void) get_chain_decl (n);
2566 	}
2567       else if (!n->outer || (!n->chain_decl && !n->chain_field))
2568 	{
2569 	  DECL_STATIC_CHAIN (decl) = 0;
2570 	  if (dump_file && (dump_flags & TDF_DETAILS))
2571 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2572 		     lang_hooks.decl_printable_name (decl, 2));
2573 	}
2574       else
2575 	DECL_STATIC_CHAIN (decl) = 1;
2576       chain_count += DECL_STATIC_CHAIN (decl);
2577     }
2578 
2579   /* Walk the functions and perform transformations.  Note that these
2580      transformations can induce new uses of the static chain, which in turn
2581      require re-examining all users of the decl.  */
2582   /* ??? It would make sense to try to use the call graph to speed this up,
2583      but the call graph hasn't really been built yet.  Even if it did, we
2584      would still need to iterate in this loop since address-of references
2585      wouldn't show up in the callgraph anyway.  */
2586   iter_count = 0;
2587   do
2588     {
2589       old_chain_count = chain_count;
2590       chain_count = 0;
2591       iter_count++;
2592 
2593       if (dump_file && (dump_flags & TDF_DETAILS))
2594 	fputc ('\n', dump_file);
2595 
2596       FOR_EACH_NEST_INFO (n, root)
2597 	{
2598 	  tree decl = n->context;
2599 	  walk_function (convert_tramp_reference_stmt,
2600 			 convert_tramp_reference_op, n);
2601 	  walk_function (convert_gimple_call, NULL, n);
2602 	  chain_count += DECL_STATIC_CHAIN (decl);
2603 	}
2604     }
2605   while (chain_count != old_chain_count);
2606 
2607   if (dump_file && (dump_flags & TDF_DETAILS))
2608     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2609 	     iter_count);
2610 }
2611 
2612 struct nesting_copy_body_data
2613 {
2614   copy_body_data cb;
2615   struct nesting_info *root;
2616 };
2617 
2618 /* A helper subroutine for debug_var_chain type remapping.  */
2619 
2620 static tree
2621 nesting_copy_decl (tree decl, copy_body_data *id)
2622 {
2623   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2624   tree *slot = nid->root->var_map->get (decl);
2625 
2626   if (slot)
2627     return (tree) *slot;
2628 
2629   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2630     {
2631       tree new_decl = copy_decl_no_change (decl, id);
2632       DECL_ORIGINAL_TYPE (new_decl)
2633 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2634       return new_decl;
2635     }
2636 
2637   if (TREE_CODE (decl) == VAR_DECL
2638       || TREE_CODE (decl) == PARM_DECL
2639       || TREE_CODE (decl) == RESULT_DECL)
2640     return decl;
2641 
2642   return copy_decl_no_change (decl, id);
2643 }
2644 
2645 /* A helper function for remap_vla_decls.  See if *TP contains
2646    some remapped variables.  */
2647 
2648 static tree
2649 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2650 {
2651   struct nesting_info *root = (struct nesting_info *) data;
2652   tree t = *tp;
2653 
2654   if (DECL_P (t))
2655     {
2656       *walk_subtrees = 0;
2657       tree *slot = root->var_map->get (t);
2658 
2659       if (slot)
2660 	return *slot;
2661     }
2662   return NULL;
2663 }
2664 
2665 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2666    involved.  */
2667 
2668 static void
2669 remap_vla_decls (tree block, struct nesting_info *root)
2670 {
2671   tree var, subblock, val, type;
2672   struct nesting_copy_body_data id;
2673 
2674   for (subblock = BLOCK_SUBBLOCKS (block);
2675        subblock;
2676        subblock = BLOCK_CHAIN (subblock))
2677     remap_vla_decls (subblock, root);
2678 
2679   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2680     if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2681       {
2682 	val = DECL_VALUE_EXPR (var);
2683 	type = TREE_TYPE (var);
2684 
2685 	if (!(TREE_CODE (val) == INDIRECT_REF
2686 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2687 	      && variably_modified_type_p (type, NULL)))
2688 	  continue;
2689 
2690 	if (root->var_map->get (TREE_OPERAND (val, 0))
2691 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
2692 	  break;
2693       }
2694 
2695   if (var == NULL_TREE)
2696     return;
2697 
2698   memset (&id, 0, sizeof (id));
2699   id.cb.copy_decl = nesting_copy_decl;
2700   id.cb.decl_map = new hash_map<tree, tree>;
2701   id.root = root;
2702 
2703   for (; var; var = DECL_CHAIN (var))
2704     if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2705       {
2706 	struct nesting_info *i;
2707 	tree newt, context;
2708 
2709 	val = DECL_VALUE_EXPR (var);
2710 	type = TREE_TYPE (var);
2711 
2712 	if (!(TREE_CODE (val) == INDIRECT_REF
2713 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2714 	      && variably_modified_type_p (type, NULL)))
2715 	  continue;
2716 
2717 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
2718 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2719 	  continue;
2720 
2721 	context = decl_function_context (var);
2722 	for (i = root; i; i = i->outer)
2723 	  if (i->context == context)
2724 	    break;
2725 
2726 	if (i == NULL)
2727 	  continue;
2728 
2729 	/* Fully expand value expressions.  This avoids having debug variables
2730 	   only referenced from them and that can be swept during GC.  */
2731         if (slot)
2732 	  {
2733 	    tree t = (tree) *slot;
2734 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2735 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2736 	  }
2737 
2738 	id.cb.src_fn = i->context;
2739 	id.cb.dst_fn = i->context;
2740 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2741 
2742 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2743 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2744 	  {
2745 	    newt = TREE_TYPE (newt);
2746 	    type = TREE_TYPE (type);
2747 	  }
2748 	if (TYPE_NAME (newt)
2749 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2750 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2751 	    && newt != type
2752 	    && TYPE_NAME (newt) == TYPE_NAME (type))
2753 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2754 
2755 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2756 	if (val != DECL_VALUE_EXPR (var))
2757 	  SET_DECL_VALUE_EXPR (var, val);
2758       }
2759 
2760   delete id.cb.decl_map;
2761 }
2762 
2763 /* Fold the MEM_REF *E.  */
2764 bool
2765 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
2766 {
2767   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2768   *ref_p = fold (*ref_p);
2769   return true;
2770 }
2771 
2772 /* Do "everything else" to clean up or complete state collected by the
2773    various walking passes -- lay out the types and decls, generate code
2774    to initialize the frame decl, store critical expressions in the
2775    struct function for rtl to find.  */
2776 
2777 static void
2778 finalize_nesting_tree_1 (struct nesting_info *root)
2779 {
2780   gimple_seq stmt_list;
2781   gimple stmt;
2782   tree context = root->context;
2783   struct function *sf;
2784 
2785   stmt_list = NULL;
2786 
2787   /* If we created a non-local frame type or decl, we need to lay them
2788      out at this time.  */
2789   if (root->frame_type)
2790     {
2791       /* In some cases the frame type will trigger the -Wpadded warning.
2792 	 This is not helpful; suppress it. */
2793       int save_warn_padded = warn_padded;
2794       tree *adjust;
2795 
2796       warn_padded = 0;
2797       layout_type (root->frame_type);
2798       warn_padded = save_warn_padded;
2799       layout_decl (root->frame_decl, 0);
2800 
2801       /* Remove root->frame_decl from root->new_local_var_chain, so
2802 	 that we can declare it also in the lexical blocks, which
2803 	 helps ensure virtual regs that end up appearing in its RTL
2804 	 expression get substituted in instantiate_virtual_regs().  */
2805       for (adjust = &root->new_local_var_chain;
2806 	   *adjust != root->frame_decl;
2807 	   adjust = &DECL_CHAIN (*adjust))
2808 	gcc_assert (DECL_CHAIN (*adjust));
2809       *adjust = DECL_CHAIN (*adjust);
2810 
2811       DECL_CHAIN (root->frame_decl) = NULL_TREE;
2812       declare_vars (root->frame_decl,
2813 		    gimple_seq_first_stmt (gimple_body (context)), true);
2814     }
2815 
2816   /* If any parameters were referenced non-locally, then we need to
2817      insert a copy.  Likewise, if any variables were referenced by
2818      pointer, we need to initialize the address.  */
2819   if (root->any_parm_remapped)
2820     {
2821       tree p;
2822       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2823 	{
2824 	  tree field, x, y;
2825 
2826 	  field = lookup_field_for_decl (root, p, NO_INSERT);
2827 	  if (!field)
2828 	    continue;
2829 
2830 	  if (use_pointer_in_frame (p))
2831 	    x = build_addr (p, context);
2832 	  else
2833 	    x = p;
2834 
2835 	  /* If the assignment is from a non-register the stmt is
2836 	     not valid gimple.  Make it so by using a temporary instead.  */
2837 	  if (!is_gimple_reg (x)
2838 	      && is_gimple_reg_type (TREE_TYPE (x)))
2839 	    {
2840 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
2841 	      x = init_tmp_var (root, x, &gsi);
2842 	    }
2843 
2844 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
2845 		      root->frame_decl, field, NULL_TREE);
2846 	  stmt = gimple_build_assign (y, x);
2847 	  gimple_seq_add_stmt (&stmt_list, stmt);
2848 	}
2849     }
2850 
2851   /* If a chain_field was created, then it needs to be initialized
2852      from chain_decl.  */
2853   if (root->chain_field)
2854     {
2855       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2856 		       root->frame_decl, root->chain_field, NULL_TREE);
2857       stmt = gimple_build_assign (x, get_chain_decl (root));
2858       gimple_seq_add_stmt (&stmt_list, stmt);
2859     }
2860 
2861   /* If trampolines were created, then we need to initialize them.  */
2862   if (root->any_tramp_created)
2863     {
2864       struct nesting_info *i;
2865       for (i = root->inner; i ; i = i->next)
2866 	{
2867 	  tree arg1, arg2, arg3, x, field;
2868 
2869 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2870 	  if (!field)
2871 	    continue;
2872 
2873 	  gcc_assert (DECL_STATIC_CHAIN (i->context));
2874 	  arg3 = build_addr (root->frame_decl, context);
2875 
2876 	  arg2 = build_addr (i->context, context);
2877 
2878 	  x = build3 (COMPONENT_REF, TREE_TYPE (field),
2879 		      root->frame_decl, field, NULL_TREE);
2880 	  arg1 = build_addr (x, context);
2881 
2882 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2883 	  stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2884 	  gimple_seq_add_stmt (&stmt_list, stmt);
2885 	}
2886     }
2887 
2888   /* If we created initialization statements, insert them.  */
2889   if (stmt_list)
2890     {
2891       gbind *bind;
2892       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2893       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
2894       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2895       gimple_bind_set_body (bind, stmt_list);
2896     }
2897 
2898   /* If a chain_decl was created, then it needs to be registered with
2899      struct function so that it gets initialized from the static chain
2900      register at the beginning of the function.  */
2901   sf = DECL_STRUCT_FUNCTION (root->context);
2902   sf->static_chain_decl = root->chain_decl;
2903 
2904   /* Similarly for the non-local goto save area.  */
2905   if (root->nl_goto_field)
2906     {
2907       sf->nonlocal_goto_save_area
2908 	= get_frame_field (root, context, root->nl_goto_field, NULL);
2909       sf->has_nonlocal_label = 1;
2910     }
2911 
2912   /* Make sure all new local variables get inserted into the
2913      proper BIND_EXPR.  */
2914   if (root->new_local_var_chain)
2915     declare_vars (root->new_local_var_chain,
2916 		  gimple_seq_first_stmt (gimple_body (root->context)),
2917 		  false);
2918 
2919   if (root->debug_var_chain)
2920     {
2921       tree debug_var;
2922       gbind *scope;
2923 
2924       remap_vla_decls (DECL_INITIAL (root->context), root);
2925 
2926       for (debug_var = root->debug_var_chain; debug_var;
2927 	   debug_var = DECL_CHAIN (debug_var))
2928 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2929 	  break;
2930 
2931       /* If there are any debug decls with variable length types,
2932 	 remap those types using other debug_var_chain variables.  */
2933       if (debug_var)
2934 	{
2935 	  struct nesting_copy_body_data id;
2936 
2937 	  memset (&id, 0, sizeof (id));
2938 	  id.cb.copy_decl = nesting_copy_decl;
2939 	  id.cb.decl_map = new hash_map<tree, tree>;
2940 	  id.root = root;
2941 
2942 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2943 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2944 	      {
2945 		tree type = TREE_TYPE (debug_var);
2946 		tree newt, t = type;
2947 		struct nesting_info *i;
2948 
2949 		for (i = root; i; i = i->outer)
2950 		  if (variably_modified_type_p (type, i->context))
2951 		    break;
2952 
2953 		if (i == NULL)
2954 		  continue;
2955 
2956 		id.cb.src_fn = i->context;
2957 		id.cb.dst_fn = i->context;
2958 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2959 
2960 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2961 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2962 		  {
2963 		    newt = TREE_TYPE (newt);
2964 		    t = TREE_TYPE (t);
2965 		  }
2966 		if (TYPE_NAME (newt)
2967 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2968 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2969 		    && newt != t
2970 		    && TYPE_NAME (newt) == TYPE_NAME (t))
2971 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2972 	      }
2973 
2974 	  delete id.cb.decl_map;
2975 	}
2976 
2977       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
2978       if (gimple_bind_block (scope))
2979 	declare_vars (root->debug_var_chain, scope, true);
2980       else
2981 	BLOCK_VARS (DECL_INITIAL (root->context))
2982 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2983 		     root->debug_var_chain);
2984     }
2985 
2986   /* Fold the rewritten MEM_REF trees.  */
2987   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
2988 
2989   /* Dump the translated tree function.  */
2990   if (dump_file)
2991     {
2992       fputs ("\n\n", dump_file);
2993       dump_function_to_file (root->context, dump_file, dump_flags);
2994     }
2995 }
2996 
2997 static void
2998 finalize_nesting_tree (struct nesting_info *root)
2999 {
3000   struct nesting_info *n;
3001   FOR_EACH_NEST_INFO (n, root)
3002     finalize_nesting_tree_1 (n);
3003 }
3004 
3005 /* Unnest the nodes and pass them to cgraph.  */
3006 
3007 static void
3008 unnest_nesting_tree_1 (struct nesting_info *root)
3009 {
3010   struct cgraph_node *node = cgraph_node::get (root->context);
3011 
3012   /* For nested functions update the cgraph to reflect unnesting.
3013      We also delay finalizing of these functions up to this point.  */
3014   if (node->origin)
3015     {
3016        node->unnest ();
3017        cgraph_node::finalize_function (root->context, true);
3018     }
3019 }
3020 
3021 static void
3022 unnest_nesting_tree (struct nesting_info *root)
3023 {
3024   struct nesting_info *n;
3025   FOR_EACH_NEST_INFO (n, root)
3026     unnest_nesting_tree_1 (n);
3027 }
3028 
3029 /* Free the data structures allocated during this pass.  */
3030 
3031 static void
3032 free_nesting_tree (struct nesting_info *root)
3033 {
3034   struct nesting_info *node, *next;
3035 
3036   node = iter_nestinfo_start (root);
3037   do
3038     {
3039       next = iter_nestinfo_next (node);
3040       delete node->var_map;
3041       delete node->field_map;
3042       delete node->mem_refs;
3043       free (node);
3044       node = next;
3045     }
3046   while (node);
3047 }
3048 
3049 /* Gimplify a function and all its nested functions.  */
3050 static void
3051 gimplify_all_functions (struct cgraph_node *root)
3052 {
3053   struct cgraph_node *iter;
3054   if (!gimple_body (root->decl))
3055     gimplify_function_tree (root->decl);
3056   for (iter = root->nested; iter; iter = iter->next_nested)
3057     gimplify_all_functions (iter);
3058 }
3059 
3060 /* Main entry point for this pass.  Process FNDECL and all of its nested
3061    subroutines and turn them into something less tightly bound.  */
3062 
3063 void
3064 lower_nested_functions (tree fndecl)
3065 {
3066   struct cgraph_node *cgn;
3067   struct nesting_info *root;
3068 
3069   /* If there are no nested functions, there's nothing to do.  */
3070   cgn = cgraph_node::get (fndecl);
3071   if (!cgn->nested)
3072     return;
3073 
3074   gimplify_all_functions (cgn);
3075 
3076   dump_file = dump_begin (TDI_nested, &dump_flags);
3077   if (dump_file)
3078     fprintf (dump_file, "\n;; Function %s\n\n",
3079 	     lang_hooks.decl_printable_name (fndecl, 2));
3080 
3081   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3082   root = create_nesting_tree (cgn);
3083 
3084   walk_all_functions (convert_nonlocal_reference_stmt,
3085                       convert_nonlocal_reference_op,
3086 		      root);
3087   walk_all_functions (convert_local_reference_stmt,
3088                       convert_local_reference_op,
3089 		      root);
3090   walk_all_functions (convert_nl_goto_reference, NULL, root);
3091   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3092 
3093   convert_all_function_calls (root);
3094   finalize_nesting_tree (root);
3095   unnest_nesting_tree (root);
3096 
3097   free_nesting_tree (root);
3098   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3099 
3100   if (dump_file)
3101     {
3102       dump_end (TDI_nested, dump_file);
3103       dump_file = NULL;
3104     }
3105 }
3106 
3107 #include "gt-tree-nested.h"
3108