xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-nested.c (revision 53d1339bf7f9c7367b35a9e1ebe693f9b047a47b)
1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 
46 
47 /* The object of this pass is to lower the representation of a set of nested
48    functions in order to expose all of the gory details of the various
49    nonlocal references.  We want to do this sooner rather than later, in
50    order to give us more freedom in emitting all of the functions in question.
51 
52    Back in olden times, when gcc was young, we developed an insanely
53    complicated scheme whereby variables which were referenced nonlocally
54    were forced to live in the stack of the declaring function, and then
55    the nested functions magically discovered where these variables were
56    placed.  In order for this scheme to function properly, it required
57    that the outer function be partially expanded, then we switch to
58    compiling the inner function, and once done with those we switch back
59    to compiling the outer function.  Such delicate ordering requirements
60    makes it difficult to do whole translation unit optimizations
61    involving such functions.
62 
63    The implementation here is much more direct.  Everything that can be
64    referenced by an inner function is a member of an explicitly created
65    structure herein called the "nonlocal frame struct".  The incoming
66    static chain for a nested function is a pointer to this struct in
67    the parent.  In this way, we settle on known offsets from a known
68    base, and so are decoupled from the logic that places objects in the
69    function's stack frame.  More importantly, we don't have to wait for
70    that to happen -- since the compilation of the inner function is no
71    longer tied to a real stack frame, the nonlocal frame struct can be
72    allocated anywhere.  Which means that the outer function is now
73    inlinable.
74 
75    Theory of operation here is very simple.  Iterate over all the
76    statements in all the functions (depth first) several times,
77    allocating structures and fields on demand.  In general we want to
78    examine inner functions first, so that we can avoid making changes
79    to outer functions which are unnecessary.
80 
81    The order of the passes matters a bit, in that later passes will be
82    skipped if it is discovered that the functions don't actually interact
83    at all.  That is, they're nested in the lexical sense but could have
84    been written as independent functions without change.  */
85 
86 
87 struct nesting_info
88 {
89   struct nesting_info *outer;
90   struct nesting_info *inner;
91   struct nesting_info *next;
92 
93   hash_map<tree, tree> *field_map;
94   hash_map<tree, tree> *var_map;
95   hash_set<tree *> *mem_refs;
96   bitmap suppress_expansion;
97 
98   tree context;
99   tree new_local_var_chain;
100   tree debug_var_chain;
101   tree frame_type;
102   tree frame_decl;
103   tree chain_field;
104   tree chain_decl;
105   tree nl_goto_field;
106 
107   bool thunk_p;
108   bool any_parm_remapped;
109   bool any_tramp_created;
110   bool any_descr_created;
111   char static_chain_added;
112 };
113 
114 
115 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
116 
117 static inline struct nesting_info *
118 iter_nestinfo_start (struct nesting_info *root)
119 {
120   while (root->inner)
121     root = root->inner;
122   return root;
123 }
124 
125 static inline struct nesting_info *
126 iter_nestinfo_next (struct nesting_info *node)
127 {
128   if (node->next)
129     return iter_nestinfo_start (node->next);
130   return node->outer;
131 }
132 
133 #define FOR_EACH_NEST_INFO(I, ROOT) \
134   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
135 
136 /* Obstack used for the bitmaps in the struct above.  */
137 static struct bitmap_obstack nesting_info_bitmap_obstack;
138 
139 
140 /* We're working in so many different function contexts simultaneously,
141    that create_tmp_var is dangerous.  Prevent mishap.  */
142 #define create_tmp_var cant_use_create_tmp_var_here_dummy
143 
144 /* Like create_tmp_var, except record the variable for registration at
145    the given nesting level.  */
146 
147 static tree
148 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
149 {
150   tree tmp_var;
151 
152   /* If the type is of variable size or a type which must be created by the
153      frontend, something is wrong.  Note that we explicitly allow
154      incomplete types here, since we create them ourselves here.  */
155   gcc_assert (!TREE_ADDRESSABLE (type));
156   gcc_assert (!TYPE_SIZE_UNIT (type)
157 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
158 
159   tmp_var = create_tmp_var_raw (type, prefix);
160   DECL_CONTEXT (tmp_var) = info->context;
161   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
162   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
163   if (TREE_CODE (type) == COMPLEX_TYPE
164       || TREE_CODE (type) == VECTOR_TYPE)
165     DECL_GIMPLE_REG_P (tmp_var) = 1;
166 
167   info->new_local_var_chain = tmp_var;
168 
169   return tmp_var;
170 }
171 
172 /* Take the address of EXP to be used within function CONTEXT.
173    Mark it for addressability as necessary.  */
174 
175 tree
176 build_addr (tree exp)
177 {
178   mark_addressable (exp);
179   return build_fold_addr_expr (exp);
180 }
181 
182 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
183 
184 void
185 insert_field_into_struct (tree type, tree field)
186 {
187   tree *p;
188 
189   DECL_CONTEXT (field) = type;
190 
191   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
192     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
193       break;
194 
195   DECL_CHAIN (field) = *p;
196   *p = field;
197 
198   /* Set correct alignment for frame struct type.  */
199   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
200     SET_TYPE_ALIGN (type, DECL_ALIGN (field));
201 }
202 
203 /* Build or return the RECORD_TYPE that describes the frame state that is
204    shared between INFO->CONTEXT and its nested functions.  This record will
205    not be complete until finalize_nesting_tree; up until that point we'll
206    be adding fields as necessary.
207 
208    We also build the DECL that represents this frame in the function.  */
209 
210 static tree
211 get_frame_type (struct nesting_info *info)
212 {
213   tree type = info->frame_type;
214   if (!type)
215     {
216       char *name;
217 
218       type = make_node (RECORD_TYPE);
219 
220       name = concat ("FRAME.",
221 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
222 		     NULL);
223       TYPE_NAME (type) = get_identifier (name);
224       free (name);
225 
226       info->frame_type = type;
227 
228       /* Do not put info->frame_decl on info->new_local_var_chain,
229 	 so that we can declare it in the lexical blocks, which
230 	 makes sure virtual regs that end up appearing in its RTL
231 	 expression get substituted in instantiate_virtual_regs.  */
232       info->frame_decl = create_tmp_var_raw (type, "FRAME");
233       DECL_CONTEXT (info->frame_decl) = info->context;
234       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
235       DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
236 
237       /* ??? Always make it addressable for now, since it is meant to
238 	 be pointed to by the static chain pointer.  This pessimizes
239 	 when it turns out that no static chains are needed because
240 	 the nested functions referencing non-local variables are not
241 	 reachable, but the true pessimization is to create the non-
242 	 local frame structure in the first place.  */
243       TREE_ADDRESSABLE (info->frame_decl) = 1;
244     }
245 
246   return type;
247 }
248 
249 /* Return true if DECL should be referenced by pointer in the non-local frame
250    structure.  */
251 
252 static bool
253 use_pointer_in_frame (tree decl)
254 {
255   if (TREE_CODE (decl) == PARM_DECL)
256     {
257       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
258 	 sized DECLs, and inefficient to copy large aggregates.  Don't bother
259 	 moving anything but scalar parameters.  */
260       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
261     }
262   else
263     {
264       /* Variable-sized DECLs can only come from OMP clauses at this point
265 	 since the gimplifier has already turned the regular variables into
266 	 pointers.  Do the same as the gimplifier.  */
267       return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
268     }
269 }
270 
271 /* Given DECL, a non-locally accessed variable, find or create a field
272    in the non-local frame structure for the given nesting context.  */
273 
274 static tree
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 		       enum insert_option insert)
277 {
278   gcc_checking_assert (decl_function_context (decl) == info->context);
279 
280   if (insert == NO_INSERT)
281     {
282       tree *slot = info->field_map->get (decl);
283       return slot ? *slot : NULL_TREE;
284     }
285 
286   tree *slot = &info->field_map->get_or_insert (decl);
287   if (!*slot)
288     {
289       tree type = get_frame_type (info);
290       tree field = make_node (FIELD_DECL);
291       DECL_NAME (field) = DECL_NAME (decl);
292 
293       if (use_pointer_in_frame (decl))
294 	{
295 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
296 	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
297 	  DECL_NONADDRESSABLE_P (field) = 1;
298 	}
299       else
300 	{
301           TREE_TYPE (field) = TREE_TYPE (decl);
302           DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
303           SET_DECL_ALIGN (field, DECL_ALIGN (decl));
304           DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
305           TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
306           DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
307           TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
308 
309 	  /* Declare the transformation and adjust the original DECL.  For a
310 	     variable or for a parameter when not optimizing, we make it point
311 	     to the field in the frame directly.  For a parameter, we don't do
312 	     it when optimizing because the variable tracking pass will already
313 	     do the job,  */
314 	  if (VAR_P (decl) || !optimize)
315 	    {
316 	      tree x
317 		= build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
318 			  field, NULL_TREE);
319 
320 	      /* If the next declaration is a PARM_DECL pointing to the DECL,
321 		 we need to adjust its VALUE_EXPR directly, since chains of
322 		 VALUE_EXPRs run afoul of garbage collection.  This occurs
323 		 in Ada for Out parameters that aren't copied in.  */
324 	      tree next = DECL_CHAIN (decl);
325 	      if (next
326 		  && TREE_CODE (next) == PARM_DECL
327 		  && DECL_HAS_VALUE_EXPR_P (next)
328 		  && DECL_VALUE_EXPR (next) == decl)
329 		SET_DECL_VALUE_EXPR (next, x);
330 
331 	      SET_DECL_VALUE_EXPR (decl, x);
332 	      DECL_HAS_VALUE_EXPR_P (decl) = 1;
333 	    }
334 	}
335 
336       insert_field_into_struct (type, field);
337       *slot = field;
338 
339       if (TREE_CODE (decl) == PARM_DECL)
340 	info->any_parm_remapped = true;
341     }
342 
343   return *slot;
344 }
345 
346 /* Build or return the variable that holds the static chain within
347    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
348 
349 static tree
350 get_chain_decl (struct nesting_info *info)
351 {
352   tree decl = info->chain_decl;
353 
354   if (!decl)
355     {
356       tree type;
357 
358       type = get_frame_type (info->outer);
359       type = build_pointer_type (type);
360 
361       /* Note that this variable is *not* entered into any BIND_EXPR;
362 	 the construction of this variable is handled specially in
363 	 expand_function_start and initialize_inlined_parameters.
364 	 Note also that it's represented as a parameter.  This is more
365 	 close to the truth, since the initial value does come from
366 	 the caller.  */
367       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
368 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
369       DECL_ARTIFICIAL (decl) = 1;
370       DECL_IGNORED_P (decl) = 1;
371       TREE_USED (decl) = 1;
372       DECL_CONTEXT (decl) = info->context;
373       DECL_ARG_TYPE (decl) = type;
374 
375       /* Tell tree-inline.c that we never write to this variable, so
376 	 it can copy-prop the replacement value immediately.  */
377       TREE_READONLY (decl) = 1;
378 
379       info->chain_decl = decl;
380 
381       if (dump_file
382           && (dump_flags & TDF_DETAILS)
383 	  && !DECL_STATIC_CHAIN (info->context))
384 	fprintf (dump_file, "Setting static-chain for %s\n",
385 		 lang_hooks.decl_printable_name (info->context, 2));
386 
387       DECL_STATIC_CHAIN (info->context) = 1;
388     }
389   return decl;
390 }
391 
392 /* Build or return the field within the non-local frame state that holds
393    the static chain for INFO->CONTEXT.  This is the way to walk back up
394    multiple nesting levels.  */
395 
396 static tree
397 get_chain_field (struct nesting_info *info)
398 {
399   tree field = info->chain_field;
400 
401   if (!field)
402     {
403       tree type = build_pointer_type (get_frame_type (info->outer));
404 
405       field = make_node (FIELD_DECL);
406       DECL_NAME (field) = get_identifier ("__chain");
407       TREE_TYPE (field) = type;
408       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
409       DECL_NONADDRESSABLE_P (field) = 1;
410 
411       insert_field_into_struct (get_frame_type (info), field);
412 
413       info->chain_field = field;
414 
415       if (dump_file
416           && (dump_flags & TDF_DETAILS)
417 	  && !DECL_STATIC_CHAIN (info->context))
418 	fprintf (dump_file, "Setting static-chain for %s\n",
419 		 lang_hooks.decl_printable_name (info->context, 2));
420 
421       DECL_STATIC_CHAIN (info->context) = 1;
422     }
423   return field;
424 }
425 
426 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
427 
428 static tree
429 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
430 		        gcall *call)
431 {
432   tree t;
433 
434   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
435   gimple_call_set_lhs (call, t);
436   if (! gsi_end_p (*gsi))
437     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
438   gsi_insert_before (gsi, call, GSI_SAME_STMT);
439 
440   return t;
441 }
442 
443 
444 /* Copy EXP into a temporary.  Allocate the temporary in the context of
445    INFO and insert the initialization statement before GSI.  */
446 
447 static tree
448 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
449 {
450   tree t;
451   gimple *stmt;
452 
453   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
454   stmt = gimple_build_assign (t, exp);
455   if (! gsi_end_p (*gsi))
456     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
457   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
458 
459   return t;
460 }
461 
462 
463 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
464 
465 static tree
466 gsi_gimplify_val (struct nesting_info *info, tree exp,
467 		  gimple_stmt_iterator *gsi)
468 {
469   if (is_gimple_val (exp))
470     return exp;
471   else
472     return init_tmp_var (info, exp, gsi);
473 }
474 
475 /* Similarly, but copy from the temporary and insert the statement
476    after the iterator.  */
477 
478 static tree
479 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
480 {
481   tree t;
482   gimple *stmt;
483 
484   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
485   stmt = gimple_build_assign (exp, t);
486   if (! gsi_end_p (*gsi))
487     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
488   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
489 
490   return t;
491 }
492 
493 /* Build or return the type used to represent a nested function trampoline.  */
494 
495 static GTY(()) tree trampoline_type;
496 
497 static tree
498 get_trampoline_type (struct nesting_info *info)
499 {
500   unsigned align, size;
501   tree t;
502 
503   if (trampoline_type)
504     return trampoline_type;
505 
506   align = TRAMPOLINE_ALIGNMENT;
507   size = TRAMPOLINE_SIZE;
508 
509   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
510      then allocate extra space so that we can do dynamic alignment.  */
511   if (align > STACK_BOUNDARY)
512     {
513       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
514       align = STACK_BOUNDARY;
515     }
516 
517   t = build_index_type (size_int (size - 1));
518   t = build_array_type (char_type_node, t);
519   t = build_decl (DECL_SOURCE_LOCATION (info->context),
520 		  FIELD_DECL, get_identifier ("__data"), t);
521   SET_DECL_ALIGN (t, align);
522   DECL_USER_ALIGN (t) = 1;
523 
524   trampoline_type = make_node (RECORD_TYPE);
525   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
526   TYPE_FIELDS (trampoline_type) = t;
527   layout_type (trampoline_type);
528   DECL_CONTEXT (t) = trampoline_type;
529 
530   return trampoline_type;
531 }
532 
533 /* Build or return the type used to represent a nested function descriptor.  */
534 
535 static GTY(()) tree descriptor_type;
536 
537 static tree
538 get_descriptor_type (struct nesting_info *info)
539 {
540   /* The base alignment is that of a function.  */
541   const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
542   tree t;
543 
544   if (descriptor_type)
545     return descriptor_type;
546 
547   t = build_index_type (integer_one_node);
548   t = build_array_type (ptr_type_node, t);
549   t = build_decl (DECL_SOURCE_LOCATION (info->context),
550 		  FIELD_DECL, get_identifier ("__data"), t);
551   SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
552   DECL_USER_ALIGN (t) = 1;
553 
554   descriptor_type = make_node (RECORD_TYPE);
555   TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
556   TYPE_FIELDS (descriptor_type) = t;
557   layout_type (descriptor_type);
558   DECL_CONTEXT (t) = descriptor_type;
559 
560   return descriptor_type;
561 }
562 
563 /* Given DECL, a nested function, find or create an element in the
564    var map for this function.  */
565 
566 static tree
567 lookup_element_for_decl (struct nesting_info *info, tree decl,
568 			 enum insert_option insert)
569 {
570   if (insert == NO_INSERT)
571     {
572       tree *slot = info->var_map->get (decl);
573       return slot ? *slot : NULL_TREE;
574     }
575 
576   tree *slot = &info->var_map->get_or_insert (decl);
577   if (!*slot)
578     *slot = build_tree_list (NULL_TREE, NULL_TREE);
579 
580   return (tree) *slot;
581 }
582 
583 /* Given DECL, a nested function, create a field in the non-local
584    frame structure for this function.  */
585 
586 static tree
587 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
588 {
589   tree field = make_node (FIELD_DECL);
590   DECL_NAME (field) = DECL_NAME (decl);
591   TREE_TYPE (field) = type;
592   TREE_ADDRESSABLE (field) = 1;
593   insert_field_into_struct (get_frame_type (info), field);
594   return field;
595 }
596 
597 /* Given DECL, a nested function, find or create a field in the non-local
598    frame structure for a trampoline for this function.  */
599 
600 static tree
601 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
602 		       enum insert_option insert)
603 {
604   tree elt, field;
605 
606   elt = lookup_element_for_decl (info, decl, insert);
607   if (!elt)
608     return NULL_TREE;
609 
610   field = TREE_PURPOSE (elt);
611 
612   if (!field && insert == INSERT)
613     {
614       field = create_field_for_decl (info, decl, get_trampoline_type (info));
615       TREE_PURPOSE (elt) = field;
616       info->any_tramp_created = true;
617     }
618 
619   return field;
620 }
621 
622 /* Given DECL, a nested function, find or create a field in the non-local
623    frame structure for a descriptor for this function.  */
624 
625 static tree
626 lookup_descr_for_decl (struct nesting_info *info, tree decl,
627 		       enum insert_option insert)
628 {
629   tree elt, field;
630 
631   elt = lookup_element_for_decl (info, decl, insert);
632   if (!elt)
633     return NULL_TREE;
634 
635   field = TREE_VALUE (elt);
636 
637   if (!field && insert == INSERT)
638     {
639       field = create_field_for_decl (info, decl, get_descriptor_type (info));
640       TREE_VALUE (elt) = field;
641       info->any_descr_created = true;
642     }
643 
644   return field;
645 }
646 
647 /* Build or return the field within the non-local frame state that holds
648    the non-local goto "jmp_buf".  The buffer itself is maintained by the
649    rtl middle-end as dynamic stack space is allocated.  */
650 
651 static tree
652 get_nl_goto_field (struct nesting_info *info)
653 {
654   tree field = info->nl_goto_field;
655   if (!field)
656     {
657       unsigned size;
658       tree type;
659 
660       /* For __builtin_nonlocal_goto, we need N words.  The first is the
661 	 frame pointer, the rest is for the target's stack pointer save
662 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
663 	 not the best interface, but it'll do for now.  */
664       if (Pmode == ptr_mode)
665 	type = ptr_type_node;
666       else
667 	type = lang_hooks.types.type_for_mode (Pmode, 1);
668 
669       scalar_int_mode mode
670 	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
671       size = GET_MODE_SIZE (mode);
672       size = size / GET_MODE_SIZE (Pmode);
673       size = size + 1;
674 
675       type = build_array_type
676 	(type, build_index_type (size_int (size)));
677 
678       field = make_node (FIELD_DECL);
679       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
680       TREE_TYPE (field) = type;
681       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
682       TREE_ADDRESSABLE (field) = 1;
683 
684       insert_field_into_struct (get_frame_type (info), field);
685 
686       info->nl_goto_field = field;
687     }
688 
689   return field;
690 }
691 
692 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
693 
694 static void
695 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
696 	   struct nesting_info *info, gimple_seq *pseq)
697 {
698   struct walk_stmt_info wi;
699 
700   memset (&wi, 0, sizeof (wi));
701   wi.info = info;
702   wi.val_only = true;
703   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
704 }
705 
706 
707 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
708 
709 static inline void
710 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
711 	       struct nesting_info *info)
712 {
713   gimple_seq body = gimple_body (info->context);
714   walk_body (callback_stmt, callback_op, info, &body);
715   gimple_set_body (info->context, body);
716 }
717 
718 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
719 
720 static void
721 walk_gimple_omp_for (gomp_for *for_stmt,
722     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
723     		     struct nesting_info *info)
724 {
725   struct walk_stmt_info wi;
726   gimple_seq seq;
727   tree t;
728   size_t i;
729 
730   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
731 
732   seq = NULL;
733   memset (&wi, 0, sizeof (wi));
734   wi.info = info;
735   wi.gsi = gsi_last (seq);
736 
737   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
738     {
739       wi.val_only = false;
740       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
741 		 &wi, NULL);
742       wi.val_only = true;
743       wi.is_lhs = false;
744       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
745 		 &wi, NULL);
746 
747       wi.val_only = true;
748       wi.is_lhs = false;
749       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
750 		 &wi, NULL);
751 
752       t = gimple_omp_for_incr (for_stmt, i);
753       gcc_assert (BINARY_CLASS_P (t));
754       wi.val_only = false;
755       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
756       wi.val_only = true;
757       wi.is_lhs = false;
758       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
759     }
760 
761   seq = gsi_seq (wi.gsi);
762   if (!gimple_seq_empty_p (seq))
763     {
764       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
765       annotate_all_with_location (seq, gimple_location (for_stmt));
766       gimple_seq_add_seq (&pre_body, seq);
767       gimple_omp_for_set_pre_body (for_stmt, pre_body);
768     }
769 }
770 
771 /* Similarly for ROOT and all functions nested underneath, depth first.  */
772 
773 static void
774 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
775 		    struct nesting_info *root)
776 {
777   struct nesting_info *n;
778   FOR_EACH_NEST_INFO (n, root)
779     walk_function (callback_stmt, callback_op, n);
780 }
781 
782 
783 /* We have to check for a fairly pathological case.  The operands of function
784    nested function are to be interpreted in the context of the enclosing
785    function.  So if any are variably-sized, they will get remapped when the
786    enclosing function is inlined.  But that remapping would also have to be
787    done in the types of the PARM_DECLs of the nested function, meaning the
788    argument types of that function will disagree with the arguments in the
789    calls to that function.  So we'd either have to make a copy of the nested
790    function corresponding to each time the enclosing function was inlined or
791    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
792    function.  The former is not practical.  The latter would still require
793    detecting this case to know when to add the conversions.  So, for now at
794    least, we don't inline such an enclosing function.
795 
796    We have to do that check recursively, so here return indicating whether
797    FNDECL has such a nested function.  ORIG_FN is the function we were
798    trying to inline to use for checking whether any argument is variably
799    modified by anything in it.
800 
801    It would be better to do this in tree-inline.c so that we could give
802    the appropriate warning for why a function can't be inlined, but that's
803    too late since the nesting structure has already been flattened and
804    adding a flag just to record this fact seems a waste of a flag.  */
805 
806 static bool
807 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
808 {
809   struct cgraph_node *cgn = cgraph_node::get (fndecl);
810   tree arg;
811 
812   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
813     {
814       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
815 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
816 	  return true;
817 
818       if (check_for_nested_with_variably_modified (cgn->decl,
819 						   orig_fndecl))
820 	return true;
821     }
822 
823   return false;
824 }
825 
826 /* Construct our local datastructure describing the function nesting
827    tree rooted by CGN.  */
828 
829 static struct nesting_info *
830 create_nesting_tree (struct cgraph_node *cgn)
831 {
832   struct nesting_info *info = XCNEW (struct nesting_info);
833   info->field_map = new hash_map<tree, tree>;
834   info->var_map = new hash_map<tree, tree>;
835   info->mem_refs = new hash_set<tree *>;
836   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
837   info->context = cgn->decl;
838   info->thunk_p = cgn->thunk.thunk_p;
839 
840   for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
841     {
842       struct nesting_info *sub = create_nesting_tree (cgn);
843       sub->outer = info;
844       sub->next = info->inner;
845       info->inner = sub;
846     }
847 
848   /* See discussion at check_for_nested_with_variably_modified for a
849      discussion of why this has to be here.  */
850   if (check_for_nested_with_variably_modified (info->context, info->context))
851     DECL_UNINLINABLE (info->context) = true;
852 
853   return info;
854 }
855 
856 /* Return an expression computing the static chain for TARGET_CONTEXT
857    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
858 
859 static tree
860 get_static_chain (struct nesting_info *info, tree target_context,
861 		  gimple_stmt_iterator *gsi)
862 {
863   struct nesting_info *i;
864   tree x;
865 
866   if (info->context == target_context)
867     {
868       x = build_addr (info->frame_decl);
869       info->static_chain_added |= 1;
870     }
871   else
872     {
873       x = get_chain_decl (info);
874       info->static_chain_added |= 2;
875 
876       for (i = info->outer; i->context != target_context; i = i->outer)
877 	{
878 	  tree field = get_chain_field (i);
879 
880 	  x = build_simple_mem_ref (x);
881 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
882 	  x = init_tmp_var (info, x, gsi);
883 	}
884     }
885 
886   return x;
887 }
888 
889 
890 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
891    frame as seen from INFO->CONTEXT.  Insert any necessary computations
892    before GSI.  */
893 
894 static tree
895 get_frame_field (struct nesting_info *info, tree target_context,
896 		 tree field, gimple_stmt_iterator *gsi)
897 {
898   struct nesting_info *i;
899   tree x;
900 
901   if (info->context == target_context)
902     {
903       /* Make sure frame_decl gets created.  */
904       (void) get_frame_type (info);
905       x = info->frame_decl;
906       info->static_chain_added |= 1;
907     }
908   else
909     {
910       x = get_chain_decl (info);
911       info->static_chain_added |= 2;
912 
913       for (i = info->outer; i->context != target_context; i = i->outer)
914 	{
915 	  tree field = get_chain_field (i);
916 
917 	  x = build_simple_mem_ref (x);
918 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
919 	  x = init_tmp_var (info, x, gsi);
920 	}
921 
922       x = build_simple_mem_ref (x);
923     }
924 
925   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
926   return x;
927 }
928 
929 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
930 
931 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
932    in the nested function with DECL_VALUE_EXPR set to reference the true
933    variable in the parent function.  This is used both for debug info
934    and in OMP lowering.  */
935 
936 static tree
937 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
938 {
939   tree target_context;
940   struct nesting_info *i;
941   tree x, field, new_decl;
942 
943   tree *slot = &info->var_map->get_or_insert (decl);
944 
945   if (*slot)
946     return *slot;
947 
948   target_context = decl_function_context (decl);
949 
950   /* A copy of the code in get_frame_field, but without the temporaries.  */
951   if (info->context == target_context)
952     {
953       /* Make sure frame_decl gets created.  */
954       (void) get_frame_type (info);
955       x = info->frame_decl;
956       i = info;
957       info->static_chain_added |= 1;
958     }
959   else
960     {
961       x = get_chain_decl (info);
962       info->static_chain_added |= 2;
963       for (i = info->outer; i->context != target_context; i = i->outer)
964 	{
965 	  field = get_chain_field (i);
966 	  x = build_simple_mem_ref (x);
967 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
968 	}
969       x = build_simple_mem_ref (x);
970     }
971 
972   field = lookup_field_for_decl (i, decl, INSERT);
973   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
974   if (use_pointer_in_frame (decl))
975     x = build_simple_mem_ref (x);
976 
977   /* ??? We should be remapping types as well, surely.  */
978   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
979 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
980   DECL_CONTEXT (new_decl) = info->context;
981   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
982   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
983   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
984   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
985   TREE_READONLY (new_decl) = TREE_READONLY (decl);
986   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
987   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
988   if ((TREE_CODE (decl) == PARM_DECL
989        || TREE_CODE (decl) == RESULT_DECL
990        || VAR_P (decl))
991       && DECL_BY_REFERENCE (decl))
992     DECL_BY_REFERENCE (new_decl) = 1;
993 
994   SET_DECL_VALUE_EXPR (new_decl, x);
995   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
996 
997   *slot = new_decl;
998   DECL_CHAIN (new_decl) = info->debug_var_chain;
999   info->debug_var_chain = new_decl;
1000 
1001   if (!optimize
1002       && info->context != target_context
1003       && variably_modified_type_p (TREE_TYPE (decl), NULL))
1004     note_nonlocal_vla_type (info, TREE_TYPE (decl));
1005 
1006   return new_decl;
1007 }
1008 
1009 
1010 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1011    and PARM_DECLs that belong to outer functions.
1012 
1013    The rewrite will involve some number of structure accesses back up
1014    the static chain.  E.g. for a variable FOO up one nesting level it'll
1015    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
1016    indirections apply to decls for which use_pointer_in_frame is true.  */
1017 
1018 static tree
1019 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1020 {
1021   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1022   struct nesting_info *const info = (struct nesting_info *) wi->info;
1023   tree t = *tp;
1024 
1025   *walk_subtrees = 0;
1026   switch (TREE_CODE (t))
1027     {
1028     case VAR_DECL:
1029       /* Non-automatic variables are never processed.  */
1030       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1031 	break;
1032       /* FALLTHRU */
1033 
1034     case PARM_DECL:
1035       {
1036 	tree x, target_context = decl_function_context (t);
1037 
1038 	if (info->context == target_context)
1039 	  break;
1040 
1041 	wi->changed = true;
1042 
1043 	if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1044 	  x = get_nonlocal_debug_decl (info, t);
1045 	else
1046 	  {
1047 	    struct nesting_info *i = info;
1048 	    while (i && i->context != target_context)
1049 	      i = i->outer;
1050 	    /* If none of the outer contexts is the target context, this means
1051 	       that the VAR or PARM_DECL is referenced in a wrong context.  */
1052 	    if (!i)
1053 	      internal_error ("%s from %s referenced in %s",
1054 			      IDENTIFIER_POINTER (DECL_NAME (t)),
1055 			      IDENTIFIER_POINTER (DECL_NAME (target_context)),
1056 			      IDENTIFIER_POINTER (DECL_NAME (info->context)));
1057 
1058 	    x = lookup_field_for_decl (i, t, INSERT);
1059 	    x = get_frame_field (info, target_context, x, &wi->gsi);
1060 	    if (use_pointer_in_frame (t))
1061 	      {
1062 		x = init_tmp_var (info, x, &wi->gsi);
1063 		x = build_simple_mem_ref (x);
1064 	      }
1065 	  }
1066 
1067 	if (wi->val_only)
1068 	  {
1069 	    if (wi->is_lhs)
1070 	      x = save_tmp_var (info, x, &wi->gsi);
1071 	    else
1072 	      x = init_tmp_var (info, x, &wi->gsi);
1073 	  }
1074 
1075 	*tp = x;
1076       }
1077       break;
1078 
1079     case LABEL_DECL:
1080       /* We're taking the address of a label from a parent function, but
1081 	 this is not itself a non-local goto.  Mark the label such that it
1082 	 will not be deleted, much as we would with a label address in
1083 	 static storage.  */
1084       if (decl_function_context (t) != info->context)
1085         FORCED_LABEL (t) = 1;
1086       break;
1087 
1088     case ADDR_EXPR:
1089       {
1090 	bool save_val_only = wi->val_only;
1091 
1092 	wi->val_only = false;
1093 	wi->is_lhs = false;
1094 	wi->changed = false;
1095 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1096 	wi->val_only = true;
1097 
1098 	if (wi->changed)
1099 	  {
1100 	    tree save_context;
1101 
1102 	    /* If we changed anything, we might no longer be directly
1103 	       referencing a decl.  */
1104 	    save_context = current_function_decl;
1105 	    current_function_decl = info->context;
1106 	    recompute_tree_invariant_for_addr_expr (t);
1107 	    current_function_decl = save_context;
1108 
1109 	    /* If the callback converted the address argument in a context
1110 	       where we only accept variables (and min_invariant, presumably),
1111 	       then compute the address into a temporary.  */
1112 	    if (save_val_only)
1113 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1114 				      t, &wi->gsi);
1115 	  }
1116       }
1117       break;
1118 
1119     case REALPART_EXPR:
1120     case IMAGPART_EXPR:
1121     case COMPONENT_REF:
1122     case ARRAY_REF:
1123     case ARRAY_RANGE_REF:
1124     case BIT_FIELD_REF:
1125       /* Go down this entire nest and just look at the final prefix and
1126 	 anything that describes the references.  Otherwise, we lose track
1127 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1128       wi->val_only = true;
1129       wi->is_lhs = false;
1130       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1131 	{
1132 	  if (TREE_CODE (t) == COMPONENT_REF)
1133 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1134 		       NULL);
1135 	  else if (TREE_CODE (t) == ARRAY_REF
1136 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1137 	    {
1138 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1139 			 wi, NULL);
1140 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1141 			 wi, NULL);
1142 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1143 			 wi, NULL);
1144 	    }
1145 	}
1146       wi->val_only = false;
1147       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1148       break;
1149 
1150     case VIEW_CONVERT_EXPR:
1151       /* Just request to look at the subtrees, leaving val_only and lhs
1152 	 untouched.  This might actually be for !val_only + lhs, in which
1153 	 case we don't want to force a replacement by a temporary.  */
1154       *walk_subtrees = 1;
1155       break;
1156 
1157     default:
1158       if (!IS_TYPE_OR_DECL_P (t))
1159 	{
1160 	  *walk_subtrees = 1;
1161           wi->val_only = true;
1162 	  wi->is_lhs = false;
1163 	}
1164       break;
1165     }
1166 
1167   return NULL_TREE;
1168 }
1169 
1170 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1171 					     struct walk_stmt_info *);
1172 
1173 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1174    and PARM_DECLs that belong to outer functions.  */
1175 
1176 static bool
1177 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1178 {
1179   struct nesting_info *const info = (struct nesting_info *) wi->info;
1180   bool need_chain = false, need_stmts = false;
1181   tree clause, decl;
1182   int dummy;
1183   bitmap new_suppress;
1184 
1185   new_suppress = BITMAP_GGC_ALLOC ();
1186   bitmap_copy (new_suppress, info->suppress_expansion);
1187 
1188   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1189     {
1190       switch (OMP_CLAUSE_CODE (clause))
1191 	{
1192 	case OMP_CLAUSE_REDUCTION:
1193 	case OMP_CLAUSE_IN_REDUCTION:
1194 	case OMP_CLAUSE_TASK_REDUCTION:
1195 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1196 	    need_stmts = true;
1197 	  goto do_decl_clause;
1198 
1199 	case OMP_CLAUSE_LASTPRIVATE:
1200 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1201 	    need_stmts = true;
1202 	  goto do_decl_clause;
1203 
1204 	case OMP_CLAUSE_LINEAR:
1205 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1206 	    need_stmts = true;
1207 	  wi->val_only = true;
1208 	  wi->is_lhs = false;
1209 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1210 					 &dummy, wi);
1211 	  goto do_decl_clause;
1212 
1213 	case OMP_CLAUSE_PRIVATE:
1214 	case OMP_CLAUSE_FIRSTPRIVATE:
1215 	case OMP_CLAUSE_COPYPRIVATE:
1216 	case OMP_CLAUSE_SHARED:
1217 	case OMP_CLAUSE_TO_DECLARE:
1218 	case OMP_CLAUSE_LINK:
1219 	case OMP_CLAUSE_USE_DEVICE_PTR:
1220 	case OMP_CLAUSE_IS_DEVICE_PTR:
1221 	do_decl_clause:
1222 	  decl = OMP_CLAUSE_DECL (clause);
1223 	  if (VAR_P (decl)
1224 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1225 	    break;
1226 	  if (decl_function_context (decl) != info->context)
1227 	    {
1228 	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1229 		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1230 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1231 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1232 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1233 		need_chain = true;
1234 	    }
1235 	  break;
1236 
1237 	case OMP_CLAUSE_SCHEDULE:
1238 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1239 	    break;
1240 	  /* FALLTHRU */
1241 	case OMP_CLAUSE_FINAL:
1242 	case OMP_CLAUSE_IF:
1243 	case OMP_CLAUSE_NUM_THREADS:
1244 	case OMP_CLAUSE_DEPEND:
1245 	case OMP_CLAUSE_DEVICE:
1246 	case OMP_CLAUSE_NUM_TEAMS:
1247 	case OMP_CLAUSE_THREAD_LIMIT:
1248 	case OMP_CLAUSE_SAFELEN:
1249 	case OMP_CLAUSE_SIMDLEN:
1250 	case OMP_CLAUSE_PRIORITY:
1251 	case OMP_CLAUSE_GRAINSIZE:
1252 	case OMP_CLAUSE_NUM_TASKS:
1253 	case OMP_CLAUSE_HINT:
1254 	case OMP_CLAUSE_NUM_GANGS:
1255 	case OMP_CLAUSE_NUM_WORKERS:
1256 	case OMP_CLAUSE_VECTOR_LENGTH:
1257 	case OMP_CLAUSE_GANG:
1258 	case OMP_CLAUSE_WORKER:
1259 	case OMP_CLAUSE_VECTOR:
1260 	case OMP_CLAUSE_ASYNC:
1261 	case OMP_CLAUSE_WAIT:
1262 	  /* Several OpenACC clauses have optional arguments.  Check if they
1263 	     are present.  */
1264 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1265 	    {
1266 	      wi->val_only = true;
1267 	      wi->is_lhs = false;
1268 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1269 					     &dummy, wi);
1270 	    }
1271 
1272 	  /* The gang clause accepts two arguments.  */
1273 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1274 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1275 	    {
1276 		wi->val_only = true;
1277 		wi->is_lhs = false;
1278 		convert_nonlocal_reference_op
1279 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1280 	    }
1281 	  break;
1282 
1283 	case OMP_CLAUSE_DIST_SCHEDULE:
1284 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1285 	    {
1286 	      wi->val_only = true;
1287 	      wi->is_lhs = false;
1288 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1289 					     &dummy, wi);
1290 	    }
1291 	  break;
1292 
1293 	case OMP_CLAUSE_MAP:
1294 	case OMP_CLAUSE_TO:
1295 	case OMP_CLAUSE_FROM:
1296 	  if (OMP_CLAUSE_SIZE (clause))
1297 	    {
1298 	      wi->val_only = true;
1299 	      wi->is_lhs = false;
1300 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1301 					     &dummy, wi);
1302 	    }
1303 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1304 	    goto do_decl_clause;
1305 	  wi->val_only = true;
1306 	  wi->is_lhs = false;
1307 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1308 		     wi, NULL);
1309 	  break;
1310 
1311 	case OMP_CLAUSE_ALIGNED:
1312 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1313 	    {
1314 	      wi->val_only = true;
1315 	      wi->is_lhs = false;
1316 	      convert_nonlocal_reference_op
1317 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1318 	    }
1319 	  /* FALLTHRU */
1320 	case OMP_CLAUSE_NONTEMPORAL:
1321 	  /* Like do_decl_clause, but don't add any suppression.  */
1322 	  decl = OMP_CLAUSE_DECL (clause);
1323 	  if (VAR_P (decl)
1324 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1325 	    break;
1326 	  if (decl_function_context (decl) != info->context)
1327 	    {
1328 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1329 	      need_chain = true;
1330 	    }
1331 	  break;
1332 
1333 	case OMP_CLAUSE_NOWAIT:
1334 	case OMP_CLAUSE_ORDERED:
1335 	case OMP_CLAUSE_DEFAULT:
1336 	case OMP_CLAUSE_COPYIN:
1337 	case OMP_CLAUSE_COLLAPSE:
1338 	case OMP_CLAUSE_TILE:
1339 	case OMP_CLAUSE_UNTIED:
1340 	case OMP_CLAUSE_MERGEABLE:
1341 	case OMP_CLAUSE_PROC_BIND:
1342 	case OMP_CLAUSE_NOGROUP:
1343 	case OMP_CLAUSE_THREADS:
1344 	case OMP_CLAUSE_SIMD:
1345 	case OMP_CLAUSE_DEFAULTMAP:
1346 	case OMP_CLAUSE_SEQ:
1347 	case OMP_CLAUSE_INDEPENDENT:
1348 	case OMP_CLAUSE_AUTO:
1349 	case OMP_CLAUSE_IF_PRESENT:
1350 	case OMP_CLAUSE_FINALIZE:
1351 	  break;
1352 
1353 	  /* The following clause belongs to the OpenACC cache directive, which
1354 	     is discarded during gimplification.  */
1355 	case OMP_CLAUSE__CACHE_:
1356 	  /* The following clauses are only allowed in the OpenMP declare simd
1357 	     directive, so not seen here.  */
1358 	case OMP_CLAUSE_UNIFORM:
1359 	case OMP_CLAUSE_INBRANCH:
1360 	case OMP_CLAUSE_NOTINBRANCH:
1361 	  /* The following clauses are only allowed on OpenMP cancel and
1362 	     cancellation point directives, which at this point have already
1363 	     been lowered into a function call.  */
1364 	case OMP_CLAUSE_FOR:
1365 	case OMP_CLAUSE_PARALLEL:
1366 	case OMP_CLAUSE_SECTIONS:
1367 	case OMP_CLAUSE_TASKGROUP:
1368 	  /* The following clauses are only added during OMP lowering; nested
1369 	     function decomposition happens before that.  */
1370 	case OMP_CLAUSE__LOOPTEMP_:
1371 	case OMP_CLAUSE__REDUCTEMP_:
1372 	case OMP_CLAUSE__SIMDUID_:
1373 	case OMP_CLAUSE__GRIDDIM_:
1374 	case OMP_CLAUSE__SIMT_:
1375 	  /* Anything else.  */
1376 	default:
1377 	  gcc_unreachable ();
1378 	}
1379     }
1380 
1381   info->suppress_expansion = new_suppress;
1382 
1383   if (need_stmts)
1384     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1385       switch (OMP_CLAUSE_CODE (clause))
1386 	{
1387 	case OMP_CLAUSE_REDUCTION:
1388 	case OMP_CLAUSE_IN_REDUCTION:
1389 	case OMP_CLAUSE_TASK_REDUCTION:
1390 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1391 	    {
1392 	      tree old_context
1393 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1394 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1395 		= info->context;
1396 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1397 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1398 		  = info->context;
1399 	      walk_body (convert_nonlocal_reference_stmt,
1400 			 convert_nonlocal_reference_op, info,
1401 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1402 	      walk_body (convert_nonlocal_reference_stmt,
1403 			 convert_nonlocal_reference_op, info,
1404 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1405 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1406 		= old_context;
1407 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1408 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1409 		  = old_context;
1410 	    }
1411 	  break;
1412 
1413 	case OMP_CLAUSE_LASTPRIVATE:
1414 	  walk_body (convert_nonlocal_reference_stmt,
1415 		     convert_nonlocal_reference_op, info,
1416 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1417 	  break;
1418 
1419 	case OMP_CLAUSE_LINEAR:
1420 	  walk_body (convert_nonlocal_reference_stmt,
1421 		     convert_nonlocal_reference_op, info,
1422 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1423 	  break;
1424 
1425 	default:
1426 	  break;
1427 	}
1428 
1429   return need_chain;
1430 }
1431 
1432 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1433 
1434 static void
1435 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1436 {
1437   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1438     type = TREE_TYPE (type);
1439 
1440   if (TYPE_NAME (type)
1441       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1442       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1443     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1444 
1445   while (POINTER_TYPE_P (type)
1446 	 || TREE_CODE (type) == VECTOR_TYPE
1447 	 || TREE_CODE (type) == FUNCTION_TYPE
1448 	 || TREE_CODE (type) == METHOD_TYPE)
1449     type = TREE_TYPE (type);
1450 
1451   if (TREE_CODE (type) == ARRAY_TYPE)
1452     {
1453       tree domain, t;
1454 
1455       note_nonlocal_vla_type (info, TREE_TYPE (type));
1456       domain = TYPE_DOMAIN (type);
1457       if (domain)
1458 	{
1459 	  t = TYPE_MIN_VALUE (domain);
1460 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1461 	      && decl_function_context (t) != info->context)
1462 	    get_nonlocal_debug_decl (info, t);
1463 	  t = TYPE_MAX_VALUE (domain);
1464 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1465 	      && decl_function_context (t) != info->context)
1466 	    get_nonlocal_debug_decl (info, t);
1467 	}
1468     }
1469 }
1470 
1471 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1472    PARM_DECLs that belong to outer functions.  This handles statements
1473    that are not handled via the standard recursion done in
1474    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1475    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1476    operands of STMT have been handled by this function.  */
1477 
1478 static tree
1479 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1480 				 struct walk_stmt_info *wi)
1481 {
1482   struct nesting_info *info = (struct nesting_info *) wi->info;
1483   tree save_local_var_chain;
1484   bitmap save_suppress;
1485   gimple *stmt = gsi_stmt (*gsi);
1486 
1487   switch (gimple_code (stmt))
1488     {
1489     case GIMPLE_GOTO:
1490       /* Don't walk non-local gotos for now.  */
1491       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1492 	{
1493 	  wi->val_only = true;
1494 	  wi->is_lhs = false;
1495 	  *handled_ops_p = false;
1496 	  return NULL_TREE;
1497 	}
1498       break;
1499 
1500     case GIMPLE_OMP_TEAMS:
1501       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1502 	{
1503 	  save_suppress = info->suppress_expansion;
1504 	  convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1505 					wi);
1506 	  walk_body (convert_nonlocal_reference_stmt,
1507 		     convert_nonlocal_reference_op, info,
1508 		     gimple_omp_body_ptr (stmt));
1509 	  info->suppress_expansion = save_suppress;
1510 	  break;
1511 	}
1512       /* FALLTHRU */
1513 
1514     case GIMPLE_OMP_PARALLEL:
1515     case GIMPLE_OMP_TASK:
1516       save_suppress = info->suppress_expansion;
1517       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1518 	                                wi))
1519 	{
1520 	  tree c, decl;
1521 	  decl = get_chain_decl (info);
1522 	  c = build_omp_clause (gimple_location (stmt),
1523 				OMP_CLAUSE_FIRSTPRIVATE);
1524 	  OMP_CLAUSE_DECL (c) = decl;
1525 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1526 	  gimple_omp_taskreg_set_clauses (stmt, c);
1527 	}
1528 
1529       save_local_var_chain = info->new_local_var_chain;
1530       info->new_local_var_chain = NULL;
1531 
1532       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1533 	         info, gimple_omp_body_ptr (stmt));
1534 
1535       if (info->new_local_var_chain)
1536 	declare_vars (info->new_local_var_chain,
1537 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1538 		      false);
1539       info->new_local_var_chain = save_local_var_chain;
1540       info->suppress_expansion = save_suppress;
1541       break;
1542 
1543     case GIMPLE_OMP_FOR:
1544       save_suppress = info->suppress_expansion;
1545       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1546       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1547 			   convert_nonlocal_reference_stmt,
1548 	  		   convert_nonlocal_reference_op, info);
1549       walk_body (convert_nonlocal_reference_stmt,
1550 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1551       info->suppress_expansion = save_suppress;
1552       break;
1553 
1554     case GIMPLE_OMP_SECTIONS:
1555       save_suppress = info->suppress_expansion;
1556       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1557       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1558 	         info, gimple_omp_body_ptr (stmt));
1559       info->suppress_expansion = save_suppress;
1560       break;
1561 
1562     case GIMPLE_OMP_SINGLE:
1563       save_suppress = info->suppress_expansion;
1564       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1565       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1566 	         info, gimple_omp_body_ptr (stmt));
1567       info->suppress_expansion = save_suppress;
1568       break;
1569 
1570     case GIMPLE_OMP_TASKGROUP:
1571       save_suppress = info->suppress_expansion;
1572       convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1573       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1574 		 info, gimple_omp_body_ptr (stmt));
1575       info->suppress_expansion = save_suppress;
1576       break;
1577 
1578     case GIMPLE_OMP_TARGET:
1579       if (!is_gimple_omp_offloaded (stmt))
1580 	{
1581 	  save_suppress = info->suppress_expansion;
1582 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1583 					wi);
1584 	  info->suppress_expansion = save_suppress;
1585 	  walk_body (convert_nonlocal_reference_stmt,
1586 		     convert_nonlocal_reference_op, info,
1587 		     gimple_omp_body_ptr (stmt));
1588 	  break;
1589 	}
1590       save_suppress = info->suppress_expansion;
1591       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1592 					wi))
1593 	{
1594 	  tree c, decl;
1595 	  decl = get_chain_decl (info);
1596 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1597 	  OMP_CLAUSE_DECL (c) = decl;
1598 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1599 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1600 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1601 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1602 	}
1603 
1604       save_local_var_chain = info->new_local_var_chain;
1605       info->new_local_var_chain = NULL;
1606 
1607       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1608 		 info, gimple_omp_body_ptr (stmt));
1609 
1610       if (info->new_local_var_chain)
1611 	declare_vars (info->new_local_var_chain,
1612 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1613 		      false);
1614       info->new_local_var_chain = save_local_var_chain;
1615       info->suppress_expansion = save_suppress;
1616       break;
1617 
1618     case GIMPLE_OMP_SECTION:
1619     case GIMPLE_OMP_MASTER:
1620     case GIMPLE_OMP_ORDERED:
1621       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1622 	         info, gimple_omp_body_ptr (stmt));
1623       break;
1624 
1625     case GIMPLE_BIND:
1626       {
1627       gbind *bind_stmt = as_a <gbind *> (stmt);
1628 
1629       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1630 	if (TREE_CODE (var) == NAMELIST_DECL)
1631 	  {
1632 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1633 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1634 	    tree decl;
1635 	    unsigned int i;
1636 
1637 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1638 	      {
1639 		if (VAR_P (decl)
1640 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1641 		  continue;
1642 		if (decl_function_context (decl) != info->context)
1643 		  CONSTRUCTOR_ELT (decls, i)->value
1644 		    = get_nonlocal_debug_decl (info, decl);
1645 	      }
1646 	  }
1647 
1648       *handled_ops_p = false;
1649       return NULL_TREE;
1650       }
1651     case GIMPLE_COND:
1652       wi->val_only = true;
1653       wi->is_lhs = false;
1654       *handled_ops_p = false;
1655       return NULL_TREE;
1656 
1657     case GIMPLE_ASSIGN:
1658       if (gimple_clobber_p (stmt))
1659 	{
1660 	  tree lhs = gimple_assign_lhs (stmt);
1661 	  if (DECL_P (lhs)
1662 	      && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1663 	      && decl_function_context (lhs) != info->context)
1664 	    {
1665 	      gsi_replace (gsi, gimple_build_nop (), true);
1666 	      break;
1667 	    }
1668 	}
1669       *handled_ops_p = false;
1670       return NULL_TREE;
1671 
1672     default:
1673       /* For every other statement that we are not interested in
1674 	 handling here, let the walker traverse the operands.  */
1675       *handled_ops_p = false;
1676       return NULL_TREE;
1677     }
1678 
1679   /* We have handled all of STMT operands, no need to traverse the operands.  */
1680   *handled_ops_p = true;
1681   return NULL_TREE;
1682 }
1683 
1684 
1685 /* A subroutine of convert_local_reference.  Create a local variable
1686    in the parent function with DECL_VALUE_EXPR set to reference the
1687    field in FRAME.  This is used both for debug info and in OMP
1688    lowering.  */
1689 
1690 static tree
1691 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1692 {
1693   tree x, new_decl;
1694 
1695   tree *slot = &info->var_map->get_or_insert (decl);
1696   if (*slot)
1697     return *slot;
1698 
1699   /* Make sure frame_decl gets created.  */
1700   (void) get_frame_type (info);
1701   x = info->frame_decl;
1702   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1703 
1704   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1705 			 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1706   DECL_CONTEXT (new_decl) = info->context;
1707   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1708   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1709   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1710   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1711   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1712   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1713   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1714   if ((TREE_CODE (decl) == PARM_DECL
1715        || TREE_CODE (decl) == RESULT_DECL
1716        || VAR_P (decl))
1717       && DECL_BY_REFERENCE (decl))
1718     DECL_BY_REFERENCE (new_decl) = 1;
1719 
1720   SET_DECL_VALUE_EXPR (new_decl, x);
1721   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1722   *slot = new_decl;
1723 
1724   DECL_CHAIN (new_decl) = info->debug_var_chain;
1725   info->debug_var_chain = new_decl;
1726 
1727   /* Do not emit debug info twice.  */
1728   DECL_IGNORED_P (decl) = 1;
1729 
1730   return new_decl;
1731 }
1732 
1733 
1734 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1735    and PARM_DECLs that were referenced by inner nested functions.
1736    The rewrite will be a structure reference to the local frame variable.  */
1737 
1738 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1739 
1740 static tree
1741 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1742 {
1743   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1744   struct nesting_info *const info = (struct nesting_info *) wi->info;
1745   tree t = *tp, field, x;
1746   bool save_val_only;
1747 
1748   *walk_subtrees = 0;
1749   switch (TREE_CODE (t))
1750     {
1751     case VAR_DECL:
1752       /* Non-automatic variables are never processed.  */
1753       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1754 	break;
1755       /* FALLTHRU */
1756 
1757     case PARM_DECL:
1758       if (t != info->frame_decl && decl_function_context (t) == info->context)
1759 	{
1760 	  /* If we copied a pointer to the frame, then the original decl
1761 	     is used unchanged in the parent function.  */
1762 	  if (use_pointer_in_frame (t))
1763 	    break;
1764 
1765 	  /* No need to transform anything if no child references the
1766 	     variable.  */
1767 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1768 	  if (!field)
1769 	    break;
1770 	  wi->changed = true;
1771 
1772 	  if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1773 	    x = get_local_debug_decl (info, t, field);
1774 	  else
1775 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1776 
1777 	  if (wi->val_only)
1778 	    {
1779 	      if (wi->is_lhs)
1780 		x = save_tmp_var (info, x, &wi->gsi);
1781 	      else
1782 		x = init_tmp_var (info, x, &wi->gsi);
1783 	    }
1784 
1785 	  *tp = x;
1786 	}
1787       break;
1788 
1789     case ADDR_EXPR:
1790       save_val_only = wi->val_only;
1791       wi->val_only = false;
1792       wi->is_lhs = false;
1793       wi->changed = false;
1794       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1795       wi->val_only = save_val_only;
1796 
1797       /* If we converted anything ... */
1798       if (wi->changed)
1799 	{
1800 	  tree save_context;
1801 
1802 	  /* Then the frame decl is now addressable.  */
1803 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1804 
1805 	  save_context = current_function_decl;
1806 	  current_function_decl = info->context;
1807 	  recompute_tree_invariant_for_addr_expr (t);
1808 	  current_function_decl = save_context;
1809 
1810 	  /* If we are in a context where we only accept values, then
1811 	     compute the address into a temporary.  */
1812 	  if (save_val_only)
1813 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1814 				    t, &wi->gsi);
1815 	}
1816       break;
1817 
1818     case REALPART_EXPR:
1819     case IMAGPART_EXPR:
1820     case COMPONENT_REF:
1821     case ARRAY_REF:
1822     case ARRAY_RANGE_REF:
1823     case BIT_FIELD_REF:
1824       /* Go down this entire nest and just look at the final prefix and
1825 	 anything that describes the references.  Otherwise, we lose track
1826 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1827       save_val_only = wi->val_only;
1828       wi->val_only = true;
1829       wi->is_lhs = false;
1830       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1831 	{
1832 	  if (TREE_CODE (t) == COMPONENT_REF)
1833 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1834 		       NULL);
1835 	  else if (TREE_CODE (t) == ARRAY_REF
1836 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1837 	    {
1838 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1839 			 NULL);
1840 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1841 			 NULL);
1842 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1843 			 NULL);
1844 	    }
1845 	}
1846       wi->val_only = false;
1847       walk_tree (tp, convert_local_reference_op, wi, NULL);
1848       wi->val_only = save_val_only;
1849       break;
1850 
1851     case MEM_REF:
1852       save_val_only = wi->val_only;
1853       wi->val_only = true;
1854       wi->is_lhs = false;
1855       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1856 		 wi, NULL);
1857       /* We need to re-fold the MEM_REF as component references as
1858 	 part of a ADDR_EXPR address are not allowed.  But we cannot
1859 	 fold here, as the chain record type is not yet finalized.  */
1860       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1861 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1862 	info->mem_refs->add (tp);
1863       wi->val_only = save_val_only;
1864       break;
1865 
1866     case VIEW_CONVERT_EXPR:
1867       /* Just request to look at the subtrees, leaving val_only and lhs
1868 	 untouched.  This might actually be for !val_only + lhs, in which
1869 	 case we don't want to force a replacement by a temporary.  */
1870       *walk_subtrees = 1;
1871       break;
1872 
1873     default:
1874       if (!IS_TYPE_OR_DECL_P (t))
1875 	{
1876 	  *walk_subtrees = 1;
1877 	  wi->val_only = true;
1878 	  wi->is_lhs = false;
1879 	}
1880       break;
1881     }
1882 
1883   return NULL_TREE;
1884 }
1885 
1886 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1887 					  struct walk_stmt_info *);
1888 
1889 /* Helper for convert_local_reference.  Convert all the references in
1890    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
1891 
1892 static bool
1893 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1894 {
1895   struct nesting_info *const info = (struct nesting_info *) wi->info;
1896   bool need_frame = false, need_stmts = false;
1897   tree clause, decl;
1898   int dummy;
1899   bitmap new_suppress;
1900 
1901   new_suppress = BITMAP_GGC_ALLOC ();
1902   bitmap_copy (new_suppress, info->suppress_expansion);
1903 
1904   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1905     {
1906       switch (OMP_CLAUSE_CODE (clause))
1907 	{
1908 	case OMP_CLAUSE_REDUCTION:
1909 	case OMP_CLAUSE_IN_REDUCTION:
1910 	case OMP_CLAUSE_TASK_REDUCTION:
1911 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1912 	    need_stmts = true;
1913 	  goto do_decl_clause;
1914 
1915 	case OMP_CLAUSE_LASTPRIVATE:
1916 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1917 	    need_stmts = true;
1918 	  goto do_decl_clause;
1919 
1920 	case OMP_CLAUSE_LINEAR:
1921 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1922 	    need_stmts = true;
1923 	  wi->val_only = true;
1924 	  wi->is_lhs = false;
1925 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
1926 				      wi);
1927 	  goto do_decl_clause;
1928 
1929 	case OMP_CLAUSE_PRIVATE:
1930 	case OMP_CLAUSE_FIRSTPRIVATE:
1931 	case OMP_CLAUSE_COPYPRIVATE:
1932 	case OMP_CLAUSE_SHARED:
1933 	case OMP_CLAUSE_TO_DECLARE:
1934 	case OMP_CLAUSE_LINK:
1935 	case OMP_CLAUSE_USE_DEVICE_PTR:
1936 	case OMP_CLAUSE_IS_DEVICE_PTR:
1937 	do_decl_clause:
1938 	  decl = OMP_CLAUSE_DECL (clause);
1939 	  if (VAR_P (decl)
1940 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1941 	    break;
1942 	  if (decl_function_context (decl) == info->context
1943 	      && !use_pointer_in_frame (decl))
1944 	    {
1945 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1946 	      if (field)
1947 		{
1948 		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1949 		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1950 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
1951 		  OMP_CLAUSE_DECL (clause)
1952 		    = get_local_debug_decl (info, decl, field);
1953 		  need_frame = true;
1954 		}
1955 	    }
1956 	  break;
1957 
1958 	case OMP_CLAUSE_SCHEDULE:
1959 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1960 	    break;
1961 	  /* FALLTHRU */
1962 	case OMP_CLAUSE_FINAL:
1963 	case OMP_CLAUSE_IF:
1964 	case OMP_CLAUSE_NUM_THREADS:
1965 	case OMP_CLAUSE_DEPEND:
1966 	case OMP_CLAUSE_DEVICE:
1967 	case OMP_CLAUSE_NUM_TEAMS:
1968 	case OMP_CLAUSE_THREAD_LIMIT:
1969 	case OMP_CLAUSE_SAFELEN:
1970 	case OMP_CLAUSE_SIMDLEN:
1971 	case OMP_CLAUSE_PRIORITY:
1972 	case OMP_CLAUSE_GRAINSIZE:
1973 	case OMP_CLAUSE_NUM_TASKS:
1974 	case OMP_CLAUSE_HINT:
1975 	case OMP_CLAUSE_NUM_GANGS:
1976 	case OMP_CLAUSE_NUM_WORKERS:
1977 	case OMP_CLAUSE_VECTOR_LENGTH:
1978 	case OMP_CLAUSE_GANG:
1979 	case OMP_CLAUSE_WORKER:
1980 	case OMP_CLAUSE_VECTOR:
1981 	case OMP_CLAUSE_ASYNC:
1982 	case OMP_CLAUSE_WAIT:
1983 	  /* Several OpenACC clauses have optional arguments.  Check if they
1984 	     are present.  */
1985 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1986 	    {
1987 	      wi->val_only = true;
1988 	      wi->is_lhs = false;
1989 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1990 					  &dummy, wi);
1991 	    }
1992 
1993 	  /* The gang clause accepts two arguments.  */
1994 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1995 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1996 	    {
1997 		wi->val_only = true;
1998 		wi->is_lhs = false;
1999 		convert_nonlocal_reference_op
2000 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2001 	    }
2002 	  break;
2003 
2004 	case OMP_CLAUSE_DIST_SCHEDULE:
2005 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2006 	    {
2007 	      wi->val_only = true;
2008 	      wi->is_lhs = false;
2009 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2010 					  &dummy, wi);
2011 	    }
2012 	  break;
2013 
2014 	case OMP_CLAUSE_MAP:
2015 	case OMP_CLAUSE_TO:
2016 	case OMP_CLAUSE_FROM:
2017 	  if (OMP_CLAUSE_SIZE (clause))
2018 	    {
2019 	      wi->val_only = true;
2020 	      wi->is_lhs = false;
2021 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2022 					  &dummy, wi);
2023 	    }
2024 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
2025 	    goto do_decl_clause;
2026 	  wi->val_only = true;
2027 	  wi->is_lhs = false;
2028 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2029 		     wi, NULL);
2030 	  break;
2031 
2032 	case OMP_CLAUSE_ALIGNED:
2033 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2034 	    {
2035 	      wi->val_only = true;
2036 	      wi->is_lhs = false;
2037 	      convert_local_reference_op
2038 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2039 	    }
2040 	  /* FALLTHRU */
2041 	case OMP_CLAUSE_NONTEMPORAL:
2042 	  /* Like do_decl_clause, but don't add any suppression.  */
2043 	  decl = OMP_CLAUSE_DECL (clause);
2044 	  if (VAR_P (decl)
2045 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2046 	    break;
2047 	  if (decl_function_context (decl) == info->context
2048 	      && !use_pointer_in_frame (decl))
2049 	    {
2050 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2051 	      if (field)
2052 		{
2053 		  OMP_CLAUSE_DECL (clause)
2054 		    = get_local_debug_decl (info, decl, field);
2055 		  need_frame = true;
2056 		}
2057 	    }
2058 	  break;
2059 
2060 	case OMP_CLAUSE_NOWAIT:
2061 	case OMP_CLAUSE_ORDERED:
2062 	case OMP_CLAUSE_DEFAULT:
2063 	case OMP_CLAUSE_COPYIN:
2064 	case OMP_CLAUSE_COLLAPSE:
2065 	case OMP_CLAUSE_TILE:
2066 	case OMP_CLAUSE_UNTIED:
2067 	case OMP_CLAUSE_MERGEABLE:
2068 	case OMP_CLAUSE_PROC_BIND:
2069 	case OMP_CLAUSE_NOGROUP:
2070 	case OMP_CLAUSE_THREADS:
2071 	case OMP_CLAUSE_SIMD:
2072 	case OMP_CLAUSE_DEFAULTMAP:
2073 	case OMP_CLAUSE_SEQ:
2074 	case OMP_CLAUSE_INDEPENDENT:
2075 	case OMP_CLAUSE_AUTO:
2076 	case OMP_CLAUSE_IF_PRESENT:
2077 	case OMP_CLAUSE_FINALIZE:
2078 	  break;
2079 
2080 	  /* The following clause belongs to the OpenACC cache directive, which
2081 	     is discarded during gimplification.  */
2082 	case OMP_CLAUSE__CACHE_:
2083 	  /* The following clauses are only allowed in the OpenMP declare simd
2084 	     directive, so not seen here.  */
2085 	case OMP_CLAUSE_UNIFORM:
2086 	case OMP_CLAUSE_INBRANCH:
2087 	case OMP_CLAUSE_NOTINBRANCH:
2088 	  /* The following clauses are only allowed on OpenMP cancel and
2089 	     cancellation point directives, which at this point have already
2090 	     been lowered into a function call.  */
2091 	case OMP_CLAUSE_FOR:
2092 	case OMP_CLAUSE_PARALLEL:
2093 	case OMP_CLAUSE_SECTIONS:
2094 	case OMP_CLAUSE_TASKGROUP:
2095 	  /* The following clauses are only added during OMP lowering; nested
2096 	     function decomposition happens before that.  */
2097 	case OMP_CLAUSE__LOOPTEMP_:
2098 	case OMP_CLAUSE__REDUCTEMP_:
2099 	case OMP_CLAUSE__SIMDUID_:
2100 	case OMP_CLAUSE__GRIDDIM_:
2101 	case OMP_CLAUSE__SIMT_:
2102 	  /* Anything else.  */
2103 	default:
2104 	  gcc_unreachable ();
2105 	}
2106     }
2107 
2108   info->suppress_expansion = new_suppress;
2109 
2110   if (need_stmts)
2111     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2112       switch (OMP_CLAUSE_CODE (clause))
2113 	{
2114 	case OMP_CLAUSE_REDUCTION:
2115 	case OMP_CLAUSE_IN_REDUCTION:
2116 	case OMP_CLAUSE_TASK_REDUCTION:
2117 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2118 	    {
2119 	      tree old_context
2120 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2121 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2122 		= info->context;
2123 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2124 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2125 		  = info->context;
2126 	      walk_body (convert_local_reference_stmt,
2127 			 convert_local_reference_op, info,
2128 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2129 	      walk_body (convert_local_reference_stmt,
2130 			 convert_local_reference_op, info,
2131 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2132 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2133 		= old_context;
2134 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2135 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2136 		  = old_context;
2137 	    }
2138 	  break;
2139 
2140 	case OMP_CLAUSE_LASTPRIVATE:
2141 	  walk_body (convert_local_reference_stmt,
2142 		     convert_local_reference_op, info,
2143 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2144 	  break;
2145 
2146 	case OMP_CLAUSE_LINEAR:
2147 	  walk_body (convert_local_reference_stmt,
2148 		     convert_local_reference_op, info,
2149 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2150 	  break;
2151 
2152 	default:
2153 	  break;
2154 	}
2155 
2156   return need_frame;
2157 }
2158 
2159 
2160 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2161    and PARM_DECLs that were referenced by inner nested functions.
2162    The rewrite will be a structure reference to the local frame variable.  */
2163 
2164 static tree
2165 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2166 			      struct walk_stmt_info *wi)
2167 {
2168   struct nesting_info *info = (struct nesting_info *) wi->info;
2169   tree save_local_var_chain;
2170   bitmap save_suppress;
2171   char save_static_chain_added;
2172   bool frame_decl_added;
2173   gimple *stmt = gsi_stmt (*gsi);
2174 
2175   switch (gimple_code (stmt))
2176     {
2177     case GIMPLE_OMP_TEAMS:
2178       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2179 	{
2180 	  save_suppress = info->suppress_expansion;
2181 	  convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2182 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2183 		     info, gimple_omp_body_ptr (stmt));
2184 	  info->suppress_expansion = save_suppress;
2185 	  break;
2186 	}
2187       /* FALLTHRU */
2188 
2189     case GIMPLE_OMP_PARALLEL:
2190     case GIMPLE_OMP_TASK:
2191       save_suppress = info->suppress_expansion;
2192       frame_decl_added = false;
2193       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2194 	                             wi))
2195 	{
2196 	  tree c = build_omp_clause (gimple_location (stmt),
2197 				     OMP_CLAUSE_SHARED);
2198 	  (void) get_frame_type (info);
2199 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2200 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2201 	  gimple_omp_taskreg_set_clauses (stmt, c);
2202 	  info->static_chain_added |= 4;
2203 	  frame_decl_added = true;
2204 	}
2205 
2206       save_local_var_chain = info->new_local_var_chain;
2207       save_static_chain_added = info->static_chain_added;
2208       info->new_local_var_chain = NULL;
2209       info->static_chain_added = 0;
2210 
2211       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2212 	         gimple_omp_body_ptr (stmt));
2213 
2214       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2215 	{
2216 	  tree c = build_omp_clause (gimple_location (stmt),
2217 				     OMP_CLAUSE_SHARED);
2218 	  (void) get_frame_type (info);
2219 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2220 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2221 	  info->static_chain_added |= 4;
2222 	  gimple_omp_taskreg_set_clauses (stmt, c);
2223 	}
2224       if (info->new_local_var_chain)
2225 	declare_vars (info->new_local_var_chain,
2226 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2227       info->new_local_var_chain = save_local_var_chain;
2228       info->suppress_expansion = save_suppress;
2229       info->static_chain_added |= save_static_chain_added;
2230       break;
2231 
2232     case GIMPLE_OMP_FOR:
2233       save_suppress = info->suppress_expansion;
2234       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2235       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2236 			   convert_local_reference_stmt,
2237 			   convert_local_reference_op, info);
2238       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2239 		 info, gimple_omp_body_ptr (stmt));
2240       info->suppress_expansion = save_suppress;
2241       break;
2242 
2243     case GIMPLE_OMP_SECTIONS:
2244       save_suppress = info->suppress_expansion;
2245       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2246       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2247 		 info, gimple_omp_body_ptr (stmt));
2248       info->suppress_expansion = save_suppress;
2249       break;
2250 
2251     case GIMPLE_OMP_SINGLE:
2252       save_suppress = info->suppress_expansion;
2253       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2254       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2255 		 info, gimple_omp_body_ptr (stmt));
2256       info->suppress_expansion = save_suppress;
2257       break;
2258 
2259     case GIMPLE_OMP_TASKGROUP:
2260       save_suppress = info->suppress_expansion;
2261       convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2262       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2263 		 info, gimple_omp_body_ptr (stmt));
2264       info->suppress_expansion = save_suppress;
2265       break;
2266 
2267     case GIMPLE_OMP_TARGET:
2268       if (!is_gimple_omp_offloaded (stmt))
2269 	{
2270 	  save_suppress = info->suppress_expansion;
2271 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2272 	  info->suppress_expansion = save_suppress;
2273 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2274 		     info, gimple_omp_body_ptr (stmt));
2275 	  break;
2276 	}
2277       save_suppress = info->suppress_expansion;
2278       frame_decl_added = false;
2279       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2280 	{
2281 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2282 	  (void) get_frame_type (info);
2283 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2284 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2285 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2286 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2287 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2288 	  info->static_chain_added |= 4;
2289 	  frame_decl_added = true;
2290 	}
2291 
2292       save_local_var_chain = info->new_local_var_chain;
2293       save_static_chain_added = info->static_chain_added;
2294       info->new_local_var_chain = NULL;
2295       info->static_chain_added = 0;
2296 
2297       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2298 		 gimple_omp_body_ptr (stmt));
2299 
2300       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2301 	{
2302 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2303 	  (void) get_frame_type (info);
2304 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2305 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2306 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2307 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2308 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2309 	  info->static_chain_added |= 4;
2310 	}
2311 
2312       if (info->new_local_var_chain)
2313 	declare_vars (info->new_local_var_chain,
2314 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2315       info->new_local_var_chain = save_local_var_chain;
2316       info->suppress_expansion = save_suppress;
2317       info->static_chain_added |= save_static_chain_added;
2318       break;
2319 
2320     case GIMPLE_OMP_SECTION:
2321     case GIMPLE_OMP_MASTER:
2322     case GIMPLE_OMP_ORDERED:
2323       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2324 		 info, gimple_omp_body_ptr (stmt));
2325       break;
2326 
2327     case GIMPLE_COND:
2328       wi->val_only = true;
2329       wi->is_lhs = false;
2330       *handled_ops_p = false;
2331       return NULL_TREE;
2332 
2333     case GIMPLE_ASSIGN:
2334       if (gimple_clobber_p (stmt))
2335 	{
2336 	  tree lhs = gimple_assign_lhs (stmt);
2337 	  if (DECL_P (lhs)
2338 	      && !use_pointer_in_frame (lhs)
2339 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2340 	    {
2341 	      gsi_replace (gsi, gimple_build_nop (), true);
2342 	      break;
2343 	    }
2344 	}
2345       *handled_ops_p = false;
2346       return NULL_TREE;
2347 
2348     case GIMPLE_BIND:
2349       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2350 	   var;
2351 	   var = DECL_CHAIN (var))
2352 	if (TREE_CODE (var) == NAMELIST_DECL)
2353 	  {
2354 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2355 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2356 	    tree decl;
2357 	    unsigned int i;
2358 
2359 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2360 	      {
2361 		if (VAR_P (decl)
2362 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2363 		  continue;
2364 		if (decl_function_context (decl) == info->context
2365 		    && !use_pointer_in_frame (decl))
2366 		  {
2367 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2368 		    if (field)
2369 		      {
2370 			CONSTRUCTOR_ELT (decls, i)->value
2371 			  = get_local_debug_decl (info, decl, field);
2372 		      }
2373 		  }
2374 	      }
2375 	  }
2376 
2377       *handled_ops_p = false;
2378       return NULL_TREE;
2379 
2380     default:
2381       /* For every other statement that we are not interested in
2382 	 handling here, let the walker traverse the operands.  */
2383       *handled_ops_p = false;
2384       return NULL_TREE;
2385     }
2386 
2387   /* Indicate that we have handled all the operands ourselves.  */
2388   *handled_ops_p = true;
2389   return NULL_TREE;
2390 }
2391 
2392 
2393 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2394    that reference labels from outer functions.  The rewrite will be a
2395    call to __builtin_nonlocal_goto.  */
2396 
2397 static tree
2398 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2399 			   struct walk_stmt_info *wi)
2400 {
2401   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2402   tree label, new_label, target_context, x, field;
2403   gcall *call;
2404   gimple *stmt = gsi_stmt (*gsi);
2405 
2406   if (gimple_code (stmt) != GIMPLE_GOTO)
2407     {
2408       *handled_ops_p = false;
2409       return NULL_TREE;
2410     }
2411 
2412   label = gimple_goto_dest (stmt);
2413   if (TREE_CODE (label) != LABEL_DECL)
2414     {
2415       *handled_ops_p = false;
2416       return NULL_TREE;
2417     }
2418 
2419   target_context = decl_function_context (label);
2420   if (target_context == info->context)
2421     {
2422       *handled_ops_p = false;
2423       return NULL_TREE;
2424     }
2425 
2426   for (i = info->outer; target_context != i->context; i = i->outer)
2427     continue;
2428 
2429   /* The original user label may also be use for a normal goto, therefore
2430      we must create a new label that will actually receive the abnormal
2431      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2432      mark will trigger proper behavior in the cfg, as well as cause the
2433      (hairy target-specific) non-local goto receiver code to be generated
2434      when we expand rtl.  Enter this association into var_map so that we
2435      can insert the new label into the IL during a second pass.  */
2436   tree *slot = &i->var_map->get_or_insert (label);
2437   if (*slot == NULL)
2438     {
2439       new_label = create_artificial_label (UNKNOWN_LOCATION);
2440       DECL_NONLOCAL (new_label) = 1;
2441       *slot = new_label;
2442     }
2443   else
2444     new_label = *slot;
2445 
2446   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2447   field = get_nl_goto_field (i);
2448   x = get_frame_field (info, target_context, field, gsi);
2449   x = build_addr (x);
2450   x = gsi_gimplify_val (info, x, gsi);
2451   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2452 			    2, build_addr (new_label), x);
2453   gsi_replace (gsi, call, false);
2454 
2455   /* We have handled all of STMT's operands, no need to keep going.  */
2456   *handled_ops_p = true;
2457   return NULL_TREE;
2458 }
2459 
2460 
2461 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2462    are referenced via nonlocal goto from a nested function.  The rewrite
2463    will involve installing a newly generated DECL_NONLOCAL label, and
2464    (potentially) a branch around the rtl gunk that is assumed to be
2465    attached to such a label.  */
2466 
2467 static tree
2468 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2469 			  struct walk_stmt_info *wi)
2470 {
2471   struct nesting_info *const info = (struct nesting_info *) wi->info;
2472   tree label, new_label;
2473   gimple_stmt_iterator tmp_gsi;
2474   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2475 
2476   if (!stmt)
2477     {
2478       *handled_ops_p = false;
2479       return NULL_TREE;
2480     }
2481 
2482   label = gimple_label_label (stmt);
2483 
2484   tree *slot = info->var_map->get (label);
2485   if (!slot)
2486     {
2487       *handled_ops_p = false;
2488       return NULL_TREE;
2489     }
2490 
2491   /* If there's any possibility that the previous statement falls through,
2492      then we must branch around the new non-local label.  */
2493   tmp_gsi = wi->gsi;
2494   gsi_prev (&tmp_gsi);
2495   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2496     {
2497       gimple *stmt = gimple_build_goto (label);
2498       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2499     }
2500 
2501   new_label = (tree) *slot;
2502   stmt = gimple_build_label (new_label);
2503   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2504 
2505   *handled_ops_p = true;
2506   return NULL_TREE;
2507 }
2508 
2509 
2510 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2511    of nested functions that require the use of trampolines.  The rewrite
2512    will involve a reference a trampoline generated for the occasion.  */
2513 
2514 static tree
2515 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2516 {
2517   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2518   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2519   tree t = *tp, decl, target_context, x, builtin;
2520   bool descr;
2521   gcall *call;
2522 
2523   *walk_subtrees = 0;
2524   switch (TREE_CODE (t))
2525     {
2526     case ADDR_EXPR:
2527       /* Build
2528 	   T.1 = &CHAIN->tramp;
2529 	   T.2 = __builtin_adjust_trampoline (T.1);
2530 	   T.3 = (func_type)T.2;
2531       */
2532 
2533       decl = TREE_OPERAND (t, 0);
2534       if (TREE_CODE (decl) != FUNCTION_DECL)
2535 	break;
2536 
2537       /* Only need to process nested functions.  */
2538       target_context = decl_function_context (decl);
2539       if (!target_context)
2540 	break;
2541 
2542       /* If the nested function doesn't use a static chain, then
2543 	 it doesn't need a trampoline.  */
2544       if (!DECL_STATIC_CHAIN (decl))
2545 	break;
2546 
2547       /* If we don't want a trampoline, then don't build one.  */
2548       if (TREE_NO_TRAMPOLINE (t))
2549 	break;
2550 
2551       /* Lookup the immediate parent of the callee, as that's where
2552 	 we need to insert the trampoline.  */
2553       for (i = info; i->context != target_context; i = i->outer)
2554 	continue;
2555 
2556       /* Decide whether to generate a descriptor or a trampoline. */
2557       descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2558 
2559       if (descr)
2560 	x = lookup_descr_for_decl (i, decl, INSERT);
2561       else
2562 	x = lookup_tramp_for_decl (i, decl, INSERT);
2563 
2564       /* Compute the address of the field holding the trampoline.  */
2565       x = get_frame_field (info, target_context, x, &wi->gsi);
2566       x = build_addr (x);
2567       x = gsi_gimplify_val (info, x, &wi->gsi);
2568 
2569       /* Do machine-specific ugliness.  Normally this will involve
2570 	 computing extra alignment, but it can really be anything.  */
2571       if (descr)
2572 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2573       else
2574 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2575       call = gimple_build_call (builtin, 1, x);
2576       x = init_tmp_var_with_call (info, &wi->gsi, call);
2577 
2578       /* Cast back to the proper function type.  */
2579       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2580       x = init_tmp_var (info, x, &wi->gsi);
2581 
2582       *tp = x;
2583       break;
2584 
2585     default:
2586       if (!IS_TYPE_OR_DECL_P (t))
2587 	*walk_subtrees = 1;
2588       break;
2589     }
2590 
2591   return NULL_TREE;
2592 }
2593 
2594 
2595 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2596    to addresses of nested functions that require the use of
2597    trampolines.  The rewrite will involve a reference a trampoline
2598    generated for the occasion.  */
2599 
2600 static tree
2601 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2602 			      struct walk_stmt_info *wi)
2603 {
2604   struct nesting_info *info = (struct nesting_info *) wi->info;
2605   gimple *stmt = gsi_stmt (*gsi);
2606 
2607   switch (gimple_code (stmt))
2608     {
2609     case GIMPLE_CALL:
2610       {
2611 	/* Only walk call arguments, lest we generate trampolines for
2612 	   direct calls.  */
2613 	unsigned long i, nargs = gimple_call_num_args (stmt);
2614 	for (i = 0; i < nargs; i++)
2615 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2616 		     wi, NULL);
2617 	break;
2618       }
2619 
2620     case GIMPLE_OMP_TEAMS:
2621       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2622 	{
2623 	  *handled_ops_p = false;
2624 	  return NULL_TREE;
2625 	}
2626       goto do_parallel;
2627 
2628     case GIMPLE_OMP_TARGET:
2629       if (!is_gimple_omp_offloaded (stmt))
2630 	{
2631 	  *handled_ops_p = false;
2632 	  return NULL_TREE;
2633 	}
2634       /* FALLTHRU */
2635     case GIMPLE_OMP_PARALLEL:
2636     case GIMPLE_OMP_TASK:
2637     do_parallel:
2638       {
2639 	tree save_local_var_chain = info->new_local_var_chain;
2640         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2641 	info->new_local_var_chain = NULL;
2642 	char save_static_chain_added = info->static_chain_added;
2643 	info->static_chain_added = 0;
2644         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2645 		   info, gimple_omp_body_ptr (stmt));
2646 	if (info->new_local_var_chain)
2647 	  declare_vars (info->new_local_var_chain,
2648 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2649 			false);
2650 	for (int i = 0; i < 2; i++)
2651 	  {
2652 	    tree c, decl;
2653 	    if ((info->static_chain_added & (1 << i)) == 0)
2654 	      continue;
2655 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2656 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2657 	    for (c = gimple_omp_taskreg_clauses (stmt);
2658 		 c;
2659 		 c = OMP_CLAUSE_CHAIN (c))
2660 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2661 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2662 		  && OMP_CLAUSE_DECL (c) == decl)
2663 		break;
2664 	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2665 	      {
2666 		c = build_omp_clause (gimple_location (stmt),
2667 				      i ? OMP_CLAUSE_FIRSTPRIVATE
2668 				      : OMP_CLAUSE_SHARED);
2669 		OMP_CLAUSE_DECL (c) = decl;
2670 		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2671 		gimple_omp_taskreg_set_clauses (stmt, c);
2672 	      }
2673 	    else if (c == NULL)
2674 	      {
2675 		c = build_omp_clause (gimple_location (stmt),
2676 				      OMP_CLAUSE_MAP);
2677 		OMP_CLAUSE_DECL (c) = decl;
2678 		OMP_CLAUSE_SET_MAP_KIND (c,
2679 					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2680 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2681 		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2682 		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2683 					       c);
2684 	      }
2685 	  }
2686 	info->new_local_var_chain = save_local_var_chain;
2687 	info->static_chain_added |= save_static_chain_added;
2688       }
2689       break;
2690 
2691     default:
2692       *handled_ops_p = false;
2693       return NULL_TREE;
2694     }
2695 
2696   *handled_ops_p = true;
2697   return NULL_TREE;
2698 }
2699 
2700 
2701 
2702 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2703    that reference nested functions to make sure that the static chain
2704    is set up properly for the call.  */
2705 
2706 static tree
2707 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2708                      struct walk_stmt_info *wi)
2709 {
2710   struct nesting_info *const info = (struct nesting_info *) wi->info;
2711   tree decl, target_context;
2712   char save_static_chain_added;
2713   int i;
2714   gimple *stmt = gsi_stmt (*gsi);
2715 
2716   switch (gimple_code (stmt))
2717     {
2718     case GIMPLE_CALL:
2719       if (gimple_call_chain (stmt))
2720 	break;
2721       decl = gimple_call_fndecl (stmt);
2722       if (!decl)
2723 	break;
2724       target_context = decl_function_context (decl);
2725       if (target_context && DECL_STATIC_CHAIN (decl))
2726 	{
2727 	  struct nesting_info *i = info;
2728 	  while (i && i->context != target_context)
2729 	    i = i->outer;
2730 	  /* If none of the outer contexts is the target context, this means
2731 	     that the function is called in a wrong context.  */
2732 	  if (!i)
2733 	    internal_error ("%s from %s called in %s",
2734 			    IDENTIFIER_POINTER (DECL_NAME (decl)),
2735 			    IDENTIFIER_POINTER (DECL_NAME (target_context)),
2736 			    IDENTIFIER_POINTER (DECL_NAME (info->context)));
2737 
2738 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2739 				 get_static_chain (info, target_context,
2740 						   &wi->gsi));
2741 	  info->static_chain_added |= (1 << (info->context != target_context));
2742 	}
2743       break;
2744 
2745     case GIMPLE_OMP_TEAMS:
2746       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2747 	{
2748 	  walk_body (convert_gimple_call, NULL, info,
2749 		     gimple_omp_body_ptr (stmt));
2750 	  break;
2751 	}
2752       /* FALLTHRU */
2753 
2754     case GIMPLE_OMP_PARALLEL:
2755     case GIMPLE_OMP_TASK:
2756       save_static_chain_added = info->static_chain_added;
2757       info->static_chain_added = 0;
2758       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2759       for (i = 0; i < 2; i++)
2760 	{
2761 	  tree c, decl;
2762 	  if ((info->static_chain_added & (1 << i)) == 0)
2763 	    continue;
2764 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2765 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2766 	  for (c = gimple_omp_taskreg_clauses (stmt);
2767 	       c;
2768 	       c = OMP_CLAUSE_CHAIN (c))
2769 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2770 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2771 		&& OMP_CLAUSE_DECL (c) == decl)
2772 	      break;
2773 	  if (c == NULL)
2774 	    {
2775 	      c = build_omp_clause (gimple_location (stmt),
2776 				    i ? OMP_CLAUSE_FIRSTPRIVATE
2777 				    : OMP_CLAUSE_SHARED);
2778 	      OMP_CLAUSE_DECL (c) = decl;
2779 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2780 	      gimple_omp_taskreg_set_clauses (stmt, c);
2781 	    }
2782 	}
2783       info->static_chain_added |= save_static_chain_added;
2784       break;
2785 
2786     case GIMPLE_OMP_TARGET:
2787       if (!is_gimple_omp_offloaded (stmt))
2788 	{
2789 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2790 	  break;
2791 	}
2792       save_static_chain_added = info->static_chain_added;
2793       info->static_chain_added = 0;
2794       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2795       for (i = 0; i < 2; i++)
2796 	{
2797 	  tree c, decl;
2798 	  if ((info->static_chain_added & (1 << i)) == 0)
2799 	    continue;
2800 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2801 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2802 	  for (c = gimple_omp_target_clauses (stmt);
2803 	       c;
2804 	       c = OMP_CLAUSE_CHAIN (c))
2805 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
2806 		&& OMP_CLAUSE_DECL (c) == decl)
2807 	      break;
2808 	  if (c == NULL)
2809 	    {
2810 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2811 	      OMP_CLAUSE_DECL (c) = decl;
2812 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2813 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2814 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2815 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2816 					     c);
2817 	    }
2818 	}
2819       info->static_chain_added |= save_static_chain_added;
2820       break;
2821 
2822     case GIMPLE_OMP_FOR:
2823       walk_body (convert_gimple_call, NULL, info,
2824 	  	 gimple_omp_for_pre_body_ptr (stmt));
2825       /* FALLTHRU */
2826     case GIMPLE_OMP_SECTIONS:
2827     case GIMPLE_OMP_SECTION:
2828     case GIMPLE_OMP_SINGLE:
2829     case GIMPLE_OMP_MASTER:
2830     case GIMPLE_OMP_TASKGROUP:
2831     case GIMPLE_OMP_ORDERED:
2832     case GIMPLE_OMP_CRITICAL:
2833       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2834       break;
2835 
2836     default:
2837       /* Keep looking for other operands.  */
2838       *handled_ops_p = false;
2839       return NULL_TREE;
2840     }
2841 
2842   *handled_ops_p = true;
2843   return NULL_TREE;
2844 }
2845 
2846 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
2847    call expressions.  At the same time, determine if a nested function
2848    actually uses its static chain; if not, remember that.  */
2849 
2850 static void
2851 convert_all_function_calls (struct nesting_info *root)
2852 {
2853   unsigned int chain_count = 0, old_chain_count, iter_count;
2854   struct nesting_info *n;
2855 
2856   /* First, optimistically clear static_chain for all decls that haven't
2857      used the static chain already for variable access.  But always create
2858      it if not optimizing.  This makes it possible to reconstruct the static
2859      nesting tree at run time and thus to resolve up-level references from
2860      within the debugger.  */
2861   FOR_EACH_NEST_INFO (n, root)
2862     {
2863       if (n->thunk_p)
2864 	continue;
2865       tree decl = n->context;
2866       if (!optimize)
2867 	{
2868 	  if (n->inner)
2869 	    (void) get_frame_type (n);
2870 	  if (n->outer)
2871 	    (void) get_chain_decl (n);
2872 	}
2873       else if (!n->outer || (!n->chain_decl && !n->chain_field))
2874 	{
2875 	  DECL_STATIC_CHAIN (decl) = 0;
2876 	  if (dump_file && (dump_flags & TDF_DETAILS))
2877 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
2878 		     lang_hooks.decl_printable_name (decl, 2));
2879 	}
2880       else
2881 	DECL_STATIC_CHAIN (decl) = 1;
2882       chain_count += DECL_STATIC_CHAIN (decl);
2883     }
2884 
2885   FOR_EACH_NEST_INFO (n, root)
2886     if (n->thunk_p)
2887       {
2888 	tree decl = n->context;
2889 	tree alias = cgraph_node::get (decl)->thunk.alias;
2890 	DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2891       }
2892 
2893   /* Walk the functions and perform transformations.  Note that these
2894      transformations can induce new uses of the static chain, which in turn
2895      require re-examining all users of the decl.  */
2896   /* ??? It would make sense to try to use the call graph to speed this up,
2897      but the call graph hasn't really been built yet.  Even if it did, we
2898      would still need to iterate in this loop since address-of references
2899      wouldn't show up in the callgraph anyway.  */
2900   iter_count = 0;
2901   do
2902     {
2903       old_chain_count = chain_count;
2904       chain_count = 0;
2905       iter_count++;
2906 
2907       if (dump_file && (dump_flags & TDF_DETAILS))
2908 	fputc ('\n', dump_file);
2909 
2910       FOR_EACH_NEST_INFO (n, root)
2911 	{
2912 	  if (n->thunk_p)
2913 	    continue;
2914 	  tree decl = n->context;
2915 	  walk_function (convert_tramp_reference_stmt,
2916 			 convert_tramp_reference_op, n);
2917 	  walk_function (convert_gimple_call, NULL, n);
2918 	  chain_count += DECL_STATIC_CHAIN (decl);
2919 	}
2920 
2921       FOR_EACH_NEST_INFO (n, root)
2922 	if (n->thunk_p)
2923 	  {
2924 	    tree decl = n->context;
2925 	    tree alias = cgraph_node::get (decl)->thunk.alias;
2926 	    DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
2927 	  }
2928     }
2929   while (chain_count != old_chain_count);
2930 
2931   if (dump_file && (dump_flags & TDF_DETAILS))
2932     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2933 	     iter_count);
2934 }
2935 
2936 struct nesting_copy_body_data
2937 {
2938   copy_body_data cb;
2939   struct nesting_info *root;
2940 };
2941 
2942 /* A helper subroutine for debug_var_chain type remapping.  */
2943 
2944 static tree
2945 nesting_copy_decl (tree decl, copy_body_data *id)
2946 {
2947   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2948   tree *slot = nid->root->var_map->get (decl);
2949 
2950   if (slot)
2951     return (tree) *slot;
2952 
2953   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2954     {
2955       tree new_decl = copy_decl_no_change (decl, id);
2956       DECL_ORIGINAL_TYPE (new_decl)
2957 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
2958       return new_decl;
2959     }
2960 
2961   if (VAR_P (decl)
2962       || TREE_CODE (decl) == PARM_DECL
2963       || TREE_CODE (decl) == RESULT_DECL)
2964     return decl;
2965 
2966   return copy_decl_no_change (decl, id);
2967 }
2968 
2969 /* A helper function for remap_vla_decls.  See if *TP contains
2970    some remapped variables.  */
2971 
2972 static tree
2973 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2974 {
2975   struct nesting_info *root = (struct nesting_info *) data;
2976   tree t = *tp;
2977 
2978   if (DECL_P (t))
2979     {
2980       *walk_subtrees = 0;
2981       tree *slot = root->var_map->get (t);
2982 
2983       if (slot)
2984 	return *slot;
2985     }
2986   return NULL;
2987 }
2988 
2989 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2990    involved.  */
2991 
2992 static void
2993 remap_vla_decls (tree block, struct nesting_info *root)
2994 {
2995   tree var, subblock, val, type;
2996   struct nesting_copy_body_data id;
2997 
2998   for (subblock = BLOCK_SUBBLOCKS (block);
2999        subblock;
3000        subblock = BLOCK_CHAIN (subblock))
3001     remap_vla_decls (subblock, root);
3002 
3003   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3004     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3005       {
3006 	val = DECL_VALUE_EXPR (var);
3007 	type = TREE_TYPE (var);
3008 
3009 	if (!(TREE_CODE (val) == INDIRECT_REF
3010 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3011 	      && variably_modified_type_p (type, NULL)))
3012 	  continue;
3013 
3014 	if (root->var_map->get (TREE_OPERAND (val, 0))
3015 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
3016 	  break;
3017       }
3018 
3019   if (var == NULL_TREE)
3020     return;
3021 
3022   memset (&id, 0, sizeof (id));
3023   id.cb.copy_decl = nesting_copy_decl;
3024   id.cb.decl_map = new hash_map<tree, tree>;
3025   id.root = root;
3026 
3027   for (; var; var = DECL_CHAIN (var))
3028     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3029       {
3030 	struct nesting_info *i;
3031 	tree newt, context;
3032 
3033 	val = DECL_VALUE_EXPR (var);
3034 	type = TREE_TYPE (var);
3035 
3036 	if (!(TREE_CODE (val) == INDIRECT_REF
3037 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3038 	      && variably_modified_type_p (type, NULL)))
3039 	  continue;
3040 
3041 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3042 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3043 	  continue;
3044 
3045 	context = decl_function_context (var);
3046 	for (i = root; i; i = i->outer)
3047 	  if (i->context == context)
3048 	    break;
3049 
3050 	if (i == NULL)
3051 	  continue;
3052 
3053 	/* Fully expand value expressions.  This avoids having debug variables
3054 	   only referenced from them and that can be swept during GC.  */
3055         if (slot)
3056 	  {
3057 	    tree t = (tree) *slot;
3058 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3059 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3060 	  }
3061 
3062 	id.cb.src_fn = i->context;
3063 	id.cb.dst_fn = i->context;
3064 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3065 
3066 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3067 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3068 	  {
3069 	    newt = TREE_TYPE (newt);
3070 	    type = TREE_TYPE (type);
3071 	  }
3072 	if (TYPE_NAME (newt)
3073 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3074 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3075 	    && newt != type
3076 	    && TYPE_NAME (newt) == TYPE_NAME (type))
3077 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3078 
3079 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3080 	if (val != DECL_VALUE_EXPR (var))
3081 	  SET_DECL_VALUE_EXPR (var, val);
3082       }
3083 
3084   delete id.cb.decl_map;
3085 }
3086 
3087 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3088    involved.  */
3089 
3090 static void
3091 fixup_vla_decls (tree block)
3092 {
3093   for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3094     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3095       {
3096 	tree val = DECL_VALUE_EXPR (var);
3097 
3098 	if (!(TREE_CODE (val) == INDIRECT_REF
3099 	      && VAR_P (TREE_OPERAND (val, 0))
3100 	      && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3101 	  continue;
3102 
3103 	/* Fully expand value expressions.  This avoids having debug variables
3104 	   only referenced from them and that can be swept during GC.  */
3105 	val = build1 (INDIRECT_REF, TREE_TYPE (val),
3106 		      DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3107 	SET_DECL_VALUE_EXPR (var, val);
3108       }
3109 
3110   for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3111     fixup_vla_decls (sub);
3112 }
3113 
3114 /* Fold the MEM_REF *E.  */
3115 bool
3116 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3117 {
3118   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3119   *ref_p = fold (*ref_p);
3120   return true;
3121 }
3122 
3123 /* Given DECL, a nested function, build an initialization call for FIELD,
3124    the trampoline or descriptor for DECL, using FUNC as the function.  */
3125 
3126 static gcall *
3127 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3128 		      tree func)
3129 {
3130   tree arg1, arg2, arg3, x;
3131 
3132   gcc_assert (DECL_STATIC_CHAIN (decl));
3133   arg3 = build_addr (info->frame_decl);
3134 
3135   arg2 = build_addr (decl);
3136 
3137   x = build3 (COMPONENT_REF, TREE_TYPE (field),
3138 	      info->frame_decl, field, NULL_TREE);
3139   arg1 = build_addr (x);
3140 
3141   return gimple_build_call (func, 3, arg1, arg2, arg3);
3142 }
3143 
3144 /* Do "everything else" to clean up or complete state collected by the various
3145    walking passes -- create a field to hold the frame base address, lay out the
3146    types and decls, generate code to initialize the frame decl, store critical
3147    expressions in the struct function for rtl to find.  */
3148 
3149 static void
3150 finalize_nesting_tree_1 (struct nesting_info *root)
3151 {
3152   gimple_seq stmt_list = NULL;
3153   gimple *stmt;
3154   tree context = root->context;
3155   struct function *sf;
3156 
3157   if (root->thunk_p)
3158     return;
3159 
3160   /* If we created a non-local frame type or decl, we need to lay them
3161      out at this time.  */
3162   if (root->frame_type)
3163     {
3164       /* Debugging information needs to compute the frame base address of the
3165 	 parent frame out of the static chain from the nested frame.
3166 
3167 	 The static chain is the address of the FRAME record, so one could
3168 	 imagine it would be possible to compute the frame base address just
3169 	 adding a constant offset to this address.  Unfortunately, this is not
3170 	 possible: if the FRAME object has alignment constraints that are
3171 	 stronger than the stack, then the offset between the frame base and
3172 	 the FRAME object will be dynamic.
3173 
3174 	 What we do instead is to append a field to the FRAME object that holds
3175 	 the frame base address: then debug info just has to fetch this
3176 	 field.  */
3177 
3178       /* Debugging information will refer to the CFA as the frame base
3179 	 address: we will do the same here.  */
3180       const tree frame_addr_fndecl
3181         = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3182 
3183       /* Create a field in the FRAME record to hold the frame base address for
3184 	 this stack frame.  Since it will be used only by the debugger, put it
3185 	 at the end of the record in order not to shift all other offsets.  */
3186       tree fb_decl = make_node (FIELD_DECL);
3187 
3188       DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3189       TREE_TYPE (fb_decl) = ptr_type_node;
3190       TREE_ADDRESSABLE (fb_decl) = 1;
3191       DECL_CONTEXT (fb_decl) = root->frame_type;
3192       TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3193 						fb_decl);
3194 
3195       /* In some cases the frame type will trigger the -Wpadded warning.
3196 	 This is not helpful; suppress it. */
3197       int save_warn_padded = warn_padded;
3198       warn_padded = 0;
3199       layout_type (root->frame_type);
3200       warn_padded = save_warn_padded;
3201       layout_decl (root->frame_decl, 0);
3202 
3203       /* Initialize the frame base address field.  If the builtin we need is
3204 	 not available, set it to NULL so that debugging information does not
3205 	 reference junk.  */
3206       tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3207 			    root->frame_decl, fb_decl, NULL_TREE);
3208       tree fb_tmp;
3209 
3210       if (frame_addr_fndecl != NULL_TREE)
3211 	{
3212 	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3213 						integer_zero_node);
3214 	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3215 
3216 	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3217 	}
3218       else
3219 	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3220       gimple_seq_add_stmt (&stmt_list,
3221 			   gimple_build_assign (fb_ref, fb_tmp));
3222 
3223       declare_vars (root->frame_decl,
3224 		    gimple_seq_first_stmt (gimple_body (context)), true);
3225     }
3226 
3227   /* If any parameters were referenced non-locally, then we need to insert
3228      a copy or a pointer.  */
3229   if (root->any_parm_remapped)
3230     {
3231       tree p;
3232       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3233 	{
3234 	  tree field, x, y;
3235 
3236 	  field = lookup_field_for_decl (root, p, NO_INSERT);
3237 	  if (!field)
3238 	    continue;
3239 
3240 	  if (use_pointer_in_frame (p))
3241 	    x = build_addr (p);
3242 	  else
3243 	    x = p;
3244 
3245 	  /* If the assignment is from a non-register the stmt is
3246 	     not valid gimple.  Make it so by using a temporary instead.  */
3247 	  if (!is_gimple_reg (x)
3248 	      && is_gimple_reg_type (TREE_TYPE (x)))
3249 	    {
3250 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3251 	      x = init_tmp_var (root, x, &gsi);
3252 	    }
3253 
3254 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3255 		      root->frame_decl, field, NULL_TREE);
3256 	  stmt = gimple_build_assign (y, x);
3257 	  gimple_seq_add_stmt (&stmt_list, stmt);
3258 	}
3259     }
3260 
3261   /* If a chain_field was created, then it needs to be initialized
3262      from chain_decl.  */
3263   if (root->chain_field)
3264     {
3265       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3266 		       root->frame_decl, root->chain_field, NULL_TREE);
3267       stmt = gimple_build_assign (x, get_chain_decl (root));
3268       gimple_seq_add_stmt (&stmt_list, stmt);
3269     }
3270 
3271   /* If trampolines were created, then we need to initialize them.  */
3272   if (root->any_tramp_created)
3273     {
3274       struct nesting_info *i;
3275       for (i = root->inner; i ; i = i->next)
3276 	{
3277 	  tree field, x;
3278 
3279 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3280 	  if (!field)
3281 	    continue;
3282 
3283 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3284 	  stmt = build_init_call_stmt (root, i->context, field, x);
3285 	  gimple_seq_add_stmt (&stmt_list, stmt);
3286 	}
3287     }
3288 
3289   /* If descriptors were created, then we need to initialize them.  */
3290   if (root->any_descr_created)
3291     {
3292       struct nesting_info *i;
3293       for (i = root->inner; i ; i = i->next)
3294 	{
3295 	  tree field, x;
3296 
3297 	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3298 	  if (!field)
3299 	    continue;
3300 
3301 	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3302 	  stmt = build_init_call_stmt (root, i->context, field, x);
3303 	  gimple_seq_add_stmt (&stmt_list, stmt);
3304 	}
3305     }
3306 
3307   /* If we created initialization statements, insert them.  */
3308   if (stmt_list)
3309     {
3310       gbind *bind;
3311       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3312       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3313       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3314       gimple_bind_set_body (bind, stmt_list);
3315     }
3316 
3317   /* If a chain_decl was created, then it needs to be registered with
3318      struct function so that it gets initialized from the static chain
3319      register at the beginning of the function.  */
3320   sf = DECL_STRUCT_FUNCTION (root->context);
3321   sf->static_chain_decl = root->chain_decl;
3322 
3323   /* Similarly for the non-local goto save area.  */
3324   if (root->nl_goto_field)
3325     {
3326       sf->nonlocal_goto_save_area
3327 	= get_frame_field (root, context, root->nl_goto_field, NULL);
3328       sf->has_nonlocal_label = 1;
3329     }
3330 
3331   /* Make sure all new local variables get inserted into the
3332      proper BIND_EXPR.  */
3333   if (root->new_local_var_chain)
3334     declare_vars (root->new_local_var_chain,
3335 		  gimple_seq_first_stmt (gimple_body (root->context)),
3336 		  false);
3337 
3338   if (root->debug_var_chain)
3339     {
3340       tree debug_var;
3341       gbind *scope;
3342 
3343       remap_vla_decls (DECL_INITIAL (root->context), root);
3344 
3345       for (debug_var = root->debug_var_chain; debug_var;
3346 	   debug_var = DECL_CHAIN (debug_var))
3347 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3348 	  break;
3349 
3350       /* If there are any debug decls with variable length types,
3351 	 remap those types using other debug_var_chain variables.  */
3352       if (debug_var)
3353 	{
3354 	  struct nesting_copy_body_data id;
3355 
3356 	  memset (&id, 0, sizeof (id));
3357 	  id.cb.copy_decl = nesting_copy_decl;
3358 	  id.cb.decl_map = new hash_map<tree, tree>;
3359 	  id.root = root;
3360 
3361 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3362 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3363 	      {
3364 		tree type = TREE_TYPE (debug_var);
3365 		tree newt, t = type;
3366 		struct nesting_info *i;
3367 
3368 		for (i = root; i; i = i->outer)
3369 		  if (variably_modified_type_p (type, i->context))
3370 		    break;
3371 
3372 		if (i == NULL)
3373 		  continue;
3374 
3375 		id.cb.src_fn = i->context;
3376 		id.cb.dst_fn = i->context;
3377 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3378 
3379 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3380 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3381 		  {
3382 		    newt = TREE_TYPE (newt);
3383 		    t = TREE_TYPE (t);
3384 		  }
3385 		if (TYPE_NAME (newt)
3386 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3387 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3388 		    && newt != t
3389 		    && TYPE_NAME (newt) == TYPE_NAME (t))
3390 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3391 	      }
3392 
3393 	  delete id.cb.decl_map;
3394 	}
3395 
3396       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3397       if (gimple_bind_block (scope))
3398 	declare_vars (root->debug_var_chain, scope, true);
3399       else
3400 	BLOCK_VARS (DECL_INITIAL (root->context))
3401 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3402 		     root->debug_var_chain);
3403     }
3404   else
3405     fixup_vla_decls (DECL_INITIAL (root->context));
3406 
3407   /* Fold the rewritten MEM_REF trees.  */
3408   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3409 
3410   /* Dump the translated tree function.  */
3411   if (dump_file)
3412     {
3413       fputs ("\n\n", dump_file);
3414       dump_function_to_file (root->context, dump_file, dump_flags);
3415     }
3416 }
3417 
3418 static void
3419 finalize_nesting_tree (struct nesting_info *root)
3420 {
3421   struct nesting_info *n;
3422   FOR_EACH_NEST_INFO (n, root)
3423     finalize_nesting_tree_1 (n);
3424 }
3425 
3426 /* Unnest the nodes and pass them to cgraph.  */
3427 
3428 static void
3429 unnest_nesting_tree_1 (struct nesting_info *root)
3430 {
3431   struct cgraph_node *node = cgraph_node::get (root->context);
3432 
3433   /* For nested functions update the cgraph to reflect unnesting.
3434      We also delay finalizing of these functions up to this point.  */
3435   if (node->origin)
3436     {
3437        node->unnest ();
3438        if (!root->thunk_p)
3439 	 cgraph_node::finalize_function (root->context, true);
3440     }
3441 }
3442 
3443 static void
3444 unnest_nesting_tree (struct nesting_info *root)
3445 {
3446   struct nesting_info *n;
3447   FOR_EACH_NEST_INFO (n, root)
3448     unnest_nesting_tree_1 (n);
3449 }
3450 
3451 /* Free the data structures allocated during this pass.  */
3452 
3453 static void
3454 free_nesting_tree (struct nesting_info *root)
3455 {
3456   struct nesting_info *node, *next;
3457 
3458   node = iter_nestinfo_start (root);
3459   do
3460     {
3461       next = iter_nestinfo_next (node);
3462       delete node->var_map;
3463       delete node->field_map;
3464       delete node->mem_refs;
3465       free (node);
3466       node = next;
3467     }
3468   while (node);
3469 }
3470 
3471 /* Gimplify a function and all its nested functions.  */
3472 static void
3473 gimplify_all_functions (struct cgraph_node *root)
3474 {
3475   struct cgraph_node *iter;
3476   if (!gimple_body (root->decl))
3477     gimplify_function_tree (root->decl);
3478   for (iter = root->nested; iter; iter = iter->next_nested)
3479     if (!iter->thunk.thunk_p)
3480       gimplify_all_functions (iter);
3481 }
3482 
3483 /* Main entry point for this pass.  Process FNDECL and all of its nested
3484    subroutines and turn them into something less tightly bound.  */
3485 
3486 void
3487 lower_nested_functions (tree fndecl)
3488 {
3489   struct cgraph_node *cgn;
3490   struct nesting_info *root;
3491 
3492   /* If there are no nested functions, there's nothing to do.  */
3493   cgn = cgraph_node::get (fndecl);
3494   if (!cgn->nested)
3495     return;
3496 
3497   gimplify_all_functions (cgn);
3498 
3499   set_dump_file (dump_begin (TDI_nested, &dump_flags));
3500   if (dump_file)
3501     fprintf (dump_file, "\n;; Function %s\n\n",
3502 	     lang_hooks.decl_printable_name (fndecl, 2));
3503 
3504   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3505   root = create_nesting_tree (cgn);
3506 
3507   walk_all_functions (convert_nonlocal_reference_stmt,
3508                       convert_nonlocal_reference_op,
3509 		      root);
3510   walk_all_functions (convert_local_reference_stmt,
3511                       convert_local_reference_op,
3512 		      root);
3513   walk_all_functions (convert_nl_goto_reference, NULL, root);
3514   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3515 
3516   convert_all_function_calls (root);
3517   finalize_nesting_tree (root);
3518   unnest_nesting_tree (root);
3519 
3520   free_nesting_tree (root);
3521   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3522 
3523   if (dump_file)
3524     {
3525       dump_end (TDI_nested, dump_file);
3526       set_dump_file (NULL);
3527     }
3528 }
3529 
3530 #include "gt-tree-nested.h"
3531