xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree-nested.cc (revision 0a3071956a3a9fdebdbf7f338cf2d439b45fc728)
1 /* Nested function decomposition for GIMPLE.
2    Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 
4    This file is part of GCC.
5 
6    GCC is free software; you can redistribute it and/or modify
7    it under the terms of the GNU General Public License as published by
8    the Free Software Foundation; either version 3, or (at your option)
9    any later version.
10 
11    GCC is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14    GNU General Public License for more details.
15 
16    You should have received a copy of the GNU General Public License
17    along with GCC; see the file COPYING3.  If not see
18    <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "cgraph.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "dumpfile.h"
35 #include "tree-inline.h"
36 #include "gimplify.h"
37 #include "gimple-iterator.h"
38 #include "gimple-walk.h"
39 #include "tree-cfg.h"
40 #include "explow.h"
41 #include "langhooks.h"
42 #include "gimple-low.h"
43 #include "gomp-constants.h"
44 #include "diagnostic.h"
45 #include "alloc-pool.h"
46 #include "tree-nested.h"
47 #include "symbol-summary.h"
48 #include "symtab-thunks.h"
49 
50 /* Summary of nested functions.  */
51 static function_summary <nested_function_info *>
52    *nested_function_sum = NULL;
53 
54 /* Return nested_function_info, if available.  */
55 nested_function_info *
get(cgraph_node * node)56 nested_function_info::get (cgraph_node *node)
57 {
58   if (!nested_function_sum)
59     return NULL;
60   return nested_function_sum->get (node);
61 }
62 
63 /* Return nested_function_info possibly creating new one.  */
64 nested_function_info *
get_create(cgraph_node * node)65 nested_function_info::get_create (cgraph_node *node)
66 {
67   if (!nested_function_sum)
68     {
69       nested_function_sum = new function_summary <nested_function_info *>
70 				   (symtab);
71       nested_function_sum->disable_insertion_hook ();
72     }
73   return nested_function_sum->get_create (node);
74 }
75 
76 /* cgraph_node is no longer nested function; update cgraph accordingly.  */
77 void
unnest_function(cgraph_node * node)78 unnest_function (cgraph_node *node)
79 {
80   nested_function_info *info = nested_function_info::get (node);
81   cgraph_node **node2 = &nested_function_info::get
82 		(nested_function_origin (node))->nested;
83 
84   gcc_checking_assert (info->origin);
85   while (*node2 != node)
86     node2 = &nested_function_info::get (*node2)->next_nested;
87   *node2 = info->next_nested;
88   info->next_nested = NULL;
89   info->origin = NULL;
90   nested_function_sum->remove (node);
91 }
92 
93 /* Destructor: unlink function from nested function lists.  */
~nested_function_info()94 nested_function_info::~nested_function_info ()
95 {
96   cgraph_node *next;
97   for (cgraph_node *n = nested; n; n = next)
98     {
99       nested_function_info *info = nested_function_info::get (n);
100       next = info->next_nested;
101       info->origin = NULL;
102       info->next_nested = NULL;
103     }
104   nested = NULL;
105   if (origin)
106     {
107       cgraph_node **node2
108 	     = &nested_function_info::get (origin)->nested;
109 
110       nested_function_info *info;
111       while ((info = nested_function_info::get (*node2)) != this && info)
112 	node2 = &info->next_nested;
113       *node2 = next_nested;
114     }
115 }
116 
117 /* Free nested function info summaries.  */
118 void
release()119 nested_function_info::release ()
120 {
121   if (nested_function_sum)
122     delete (nested_function_sum);
123   nested_function_sum = NULL;
124 }
125 
126 /* If NODE is nested function, record it.  */
127 void
maybe_record_nested_function(cgraph_node * node)128 maybe_record_nested_function (cgraph_node *node)
129 {
130   /* All nested functions gets lowered during the construction of symtab.  */
131   if (symtab->state > CONSTRUCTION)
132     return;
133   if (DECL_CONTEXT (node->decl)
134       && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
135     {
136       cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl));
137       nested_function_info *info = nested_function_info::get_create (node);
138       nested_function_info *origin_info
139 		 = nested_function_info::get_create (origin);
140 
141       info->origin = origin;
142       info->next_nested = origin_info->nested;
143       origin_info->nested = node;
144     }
145 }
146 
147 /* The object of this pass is to lower the representation of a set of nested
148    functions in order to expose all of the gory details of the various
149    nonlocal references.  We want to do this sooner rather than later, in
150    order to give us more freedom in emitting all of the functions in question.
151 
152    Back in olden times, when gcc was young, we developed an insanely
153    complicated scheme whereby variables which were referenced nonlocally
154    were forced to live in the stack of the declaring function, and then
155    the nested functions magically discovered where these variables were
156    placed.  In order for this scheme to function properly, it required
157    that the outer function be partially expanded, then we switch to
158    compiling the inner function, and once done with those we switch back
159    to compiling the outer function.  Such delicate ordering requirements
160    makes it difficult to do whole translation unit optimizations
161    involving such functions.
162 
163    The implementation here is much more direct.  Everything that can be
164    referenced by an inner function is a member of an explicitly created
165    structure herein called the "nonlocal frame struct".  The incoming
166    static chain for a nested function is a pointer to this struct in
167    the parent.  In this way, we settle on known offsets from a known
168    base, and so are decoupled from the logic that places objects in the
169    function's stack frame.  More importantly, we don't have to wait for
170    that to happen -- since the compilation of the inner function is no
171    longer tied to a real stack frame, the nonlocal frame struct can be
172    allocated anywhere.  Which means that the outer function is now
173    inlinable.
174 
175    Theory of operation here is very simple.  Iterate over all the
176    statements in all the functions (depth first) several times,
177    allocating structures and fields on demand.  In general we want to
178    examine inner functions first, so that we can avoid making changes
179    to outer functions which are unnecessary.
180 
181    The order of the passes matters a bit, in that later passes will be
182    skipped if it is discovered that the functions don't actually interact
183    at all.  That is, they're nested in the lexical sense but could have
184    been written as independent functions without change.  */
185 
186 
187 struct nesting_info
188 {
189   struct nesting_info *outer;
190   struct nesting_info *inner;
191   struct nesting_info *next;
192 
193   hash_map<tree, tree> *field_map;
194   hash_map<tree, tree> *var_map;
195   hash_set<tree *> *mem_refs;
196   bitmap suppress_expansion;
197 
198   tree context;
199   tree new_local_var_chain;
200   tree debug_var_chain;
201   tree frame_type;
202   tree frame_decl;
203   tree chain_field;
204   tree chain_decl;
205   tree nl_goto_field;
206 
207   bool thunk_p;
208   bool any_parm_remapped;
209   bool any_tramp_created;
210   bool any_descr_created;
211   char static_chain_added;
212 };
213 
214 
215 /* Iterate over the nesting tree, starting with ROOT, depth first.  */
216 
217 static inline struct nesting_info *
iter_nestinfo_start(struct nesting_info * root)218 iter_nestinfo_start (struct nesting_info *root)
219 {
220   while (root->inner)
221     root = root->inner;
222   return root;
223 }
224 
225 static inline struct nesting_info *
iter_nestinfo_next(struct nesting_info * node)226 iter_nestinfo_next (struct nesting_info *node)
227 {
228   if (node->next)
229     return iter_nestinfo_start (node->next);
230   return node->outer;
231 }
232 
233 #define FOR_EACH_NEST_INFO(I, ROOT) \
234   for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
235 
236 /* Obstack used for the bitmaps in the struct above.  */
237 static struct bitmap_obstack nesting_info_bitmap_obstack;
238 
239 
240 /* We're working in so many different function contexts simultaneously,
241    that create_tmp_var is dangerous.  Prevent mishap.  */
242 #define create_tmp_var cant_use_create_tmp_var_here_dummy
243 
244 /* Like create_tmp_var, except record the variable for registration at
245    the given nesting level.  */
246 
247 static tree
create_tmp_var_for(struct nesting_info * info,tree type,const char * prefix)248 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
249 {
250   tree tmp_var;
251 
252   /* If the type is of variable size or a type which must be created by the
253      frontend, something is wrong.  Note that we explicitly allow
254      incomplete types here, since we create them ourselves here.  */
255   gcc_assert (!TREE_ADDRESSABLE (type));
256   gcc_assert (!TYPE_SIZE_UNIT (type)
257 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
258 
259   tmp_var = create_tmp_var_raw (type, prefix);
260   DECL_CONTEXT (tmp_var) = info->context;
261   DECL_CHAIN (tmp_var) = info->new_local_var_chain;
262   DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
263 
264   info->new_local_var_chain = tmp_var;
265 
266   return tmp_var;
267 }
268 
269 /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result.  */
270 
271 static tree
build_simple_mem_ref_notrap(tree ptr)272 build_simple_mem_ref_notrap (tree ptr)
273 {
274   tree t = build_simple_mem_ref (ptr);
275   TREE_THIS_NOTRAP (t) = 1;
276   return t;
277 }
278 
279 /* Take the address of EXP to be used within function CONTEXT.
280    Mark it for addressability as necessary.  */
281 
282 tree
build_addr(tree exp)283 build_addr (tree exp)
284 {
285   mark_addressable (exp);
286   return build_fold_addr_expr (exp);
287 }
288 
289 /* Insert FIELD into TYPE, sorted by alignment requirements.  */
290 
291 void
insert_field_into_struct(tree type,tree field)292 insert_field_into_struct (tree type, tree field)
293 {
294   tree *p;
295 
296   DECL_CONTEXT (field) = type;
297 
298   for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
299     if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
300       break;
301 
302   DECL_CHAIN (field) = *p;
303   *p = field;
304 
305   /* Set correct alignment for frame struct type.  */
306   if (TYPE_ALIGN (type) < DECL_ALIGN (field))
307     SET_TYPE_ALIGN (type, DECL_ALIGN (field));
308 }
309 
310 /* Build or return the RECORD_TYPE that describes the frame state that is
311    shared between INFO->CONTEXT and its nested functions.  This record will
312    not be complete until finalize_nesting_tree; up until that point we'll
313    be adding fields as necessary.
314 
315    We also build the DECL that represents this frame in the function.  */
316 
317 static tree
get_frame_type(struct nesting_info * info)318 get_frame_type (struct nesting_info *info)
319 {
320   tree type = info->frame_type;
321   if (!type)
322     {
323       char *name;
324 
325       type = make_node (RECORD_TYPE);
326 
327       name = concat ("FRAME.",
328 		     IDENTIFIER_POINTER (DECL_NAME (info->context)),
329 		     NULL);
330       TYPE_NAME (type) = get_identifier (name);
331       free (name);
332 
333       info->frame_type = type;
334 
335       /* Do not put info->frame_decl on info->new_local_var_chain,
336 	 so that we can declare it in the lexical blocks, which
337 	 makes sure virtual regs that end up appearing in its RTL
338 	 expression get substituted in instantiate_virtual_regs.  */
339       info->frame_decl = create_tmp_var_raw (type, "FRAME");
340       DECL_CONTEXT (info->frame_decl) = info->context;
341       DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
342       DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1;
343 
344       /* ??? Always make it addressable for now, since it is meant to
345 	 be pointed to by the static chain pointer.  This pessimizes
346 	 when it turns out that no static chains are needed because
347 	 the nested functions referencing non-local variables are not
348 	 reachable, but the true pessimization is to create the non-
349 	 local frame structure in the first place.  */
350       TREE_ADDRESSABLE (info->frame_decl) = 1;
351     }
352 
353   return type;
354 }
355 
356 /* Return true if DECL should be referenced by pointer in the non-local frame
357    structure.  */
358 
359 static bool
use_pointer_in_frame(tree decl)360 use_pointer_in_frame (tree decl)
361 {
362   if (TREE_CODE (decl) == PARM_DECL)
363     {
364       /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable-
365 	 sized DECLs, and inefficient to copy large aggregates.  Don't bother
366 	 moving anything but scalar parameters.  */
367       return AGGREGATE_TYPE_P (TREE_TYPE (decl));
368     }
369   else
370     {
371       /* Variable-sized DECLs can only come from OMP clauses at this point
372 	 since the gimplifier has already turned the regular variables into
373 	 pointers.  Do the same as the gimplifier.  */
374       return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST;
375     }
376 }
377 
378 /* Given DECL, a non-locally accessed variable, find or create a field
379    in the non-local frame structure for the given nesting context.  */
380 
381 static tree
lookup_field_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)382 lookup_field_for_decl (struct nesting_info *info, tree decl,
383 		       enum insert_option insert)
384 {
385   gcc_checking_assert (decl_function_context (decl) == info->context);
386 
387   if (insert == NO_INSERT)
388     {
389       tree *slot = info->field_map->get (decl);
390       return slot ? *slot : NULL_TREE;
391     }
392 
393   tree *slot = &info->field_map->get_or_insert (decl);
394   if (!*slot)
395     {
396       tree type = get_frame_type (info);
397       tree field = make_node (FIELD_DECL);
398       DECL_NAME (field) = DECL_NAME (decl);
399 
400       if (use_pointer_in_frame (decl))
401 	{
402 	  TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
403 	  SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
404 	  DECL_NONADDRESSABLE_P (field) = 1;
405 	}
406       else
407 	{
408 	  TREE_TYPE (field) = TREE_TYPE (decl);
409 	  DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
410 	  SET_DECL_ALIGN (field, DECL_ALIGN (decl));
411 	  DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
412 	  DECL_IGNORED_P (field) = DECL_IGNORED_P (decl);
413 	  DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
414 	  TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
415 	  copy_warning (field, decl);
416 
417 	  /* Declare the transformation and adjust the original DECL.  For a
418 	     variable or for a parameter when not optimizing, we make it point
419 	     to the field in the frame directly.  For a parameter, we don't do
420 	     it when optimizing because the variable tracking pass will already
421 	     do the job,  */
422 	  if (VAR_P (decl) || !optimize)
423 	    {
424 	      tree x
425 		= build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl,
426 			  field, NULL_TREE);
427 
428 	      /* If the next declaration is a PARM_DECL pointing to the DECL,
429 		 we need to adjust its VALUE_EXPR directly, since chains of
430 		 VALUE_EXPRs run afoul of garbage collection.  This occurs
431 		 in Ada for Out parameters that aren't copied in.  */
432 	      tree next = DECL_CHAIN (decl);
433 	      if (next
434 		  && TREE_CODE (next) == PARM_DECL
435 		  && DECL_HAS_VALUE_EXPR_P (next)
436 		  && DECL_VALUE_EXPR (next) == decl)
437 		SET_DECL_VALUE_EXPR (next, x);
438 
439 	      SET_DECL_VALUE_EXPR (decl, x);
440 	      DECL_HAS_VALUE_EXPR_P (decl) = 1;
441 	    }
442 	}
443 
444       insert_field_into_struct (type, field);
445       *slot = field;
446 
447       if (TREE_CODE (decl) == PARM_DECL)
448 	info->any_parm_remapped = true;
449     }
450 
451   return *slot;
452 }
453 
454 /* Build or return the variable that holds the static chain within
455    INFO->CONTEXT.  This variable may only be used within INFO->CONTEXT.  */
456 
457 static tree
get_chain_decl(struct nesting_info * info)458 get_chain_decl (struct nesting_info *info)
459 {
460   tree decl = info->chain_decl;
461 
462   if (!decl)
463     {
464       tree type;
465 
466       type = get_frame_type (info->outer);
467       type = build_pointer_type (type);
468 
469       /* Note that this variable is *not* entered into any BIND_EXPR;
470 	 the construction of this variable is handled specially in
471 	 expand_function_start and initialize_inlined_parameters.
472 	 Note also that it's represented as a parameter.  This is more
473 	 close to the truth, since the initial value does come from
474 	 the caller.  */
475       decl = build_decl (DECL_SOURCE_LOCATION (info->context),
476 			 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
477       DECL_ARTIFICIAL (decl) = 1;
478       DECL_IGNORED_P (decl) = 1;
479       TREE_USED (decl) = 1;
480       DECL_CONTEXT (decl) = info->context;
481       DECL_ARG_TYPE (decl) = type;
482 
483       /* Tell tree-inline.cc that we never write to this variable, so
484 	 it can copy-prop the replacement value immediately.  */
485       TREE_READONLY (decl) = 1;
486 
487       info->chain_decl = decl;
488 
489       if (dump_file
490           && (dump_flags & TDF_DETAILS)
491 	  && !DECL_STATIC_CHAIN (info->context))
492 	fprintf (dump_file, "Setting static-chain for %s\n",
493 		 lang_hooks.decl_printable_name (info->context, 2));
494 
495       DECL_STATIC_CHAIN (info->context) = 1;
496     }
497   return decl;
498 }
499 
500 /* Build or return the field within the non-local frame state that holds
501    the static chain for INFO->CONTEXT.  This is the way to walk back up
502    multiple nesting levels.  */
503 
504 static tree
get_chain_field(struct nesting_info * info)505 get_chain_field (struct nesting_info *info)
506 {
507   tree field = info->chain_field;
508 
509   if (!field)
510     {
511       tree type = build_pointer_type (get_frame_type (info->outer));
512 
513       field = make_node (FIELD_DECL);
514       DECL_NAME (field) = get_identifier ("__chain");
515       TREE_TYPE (field) = type;
516       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
517       DECL_NONADDRESSABLE_P (field) = 1;
518 
519       insert_field_into_struct (get_frame_type (info), field);
520 
521       info->chain_field = field;
522 
523       if (dump_file
524           && (dump_flags & TDF_DETAILS)
525 	  && !DECL_STATIC_CHAIN (info->context))
526 	fprintf (dump_file, "Setting static-chain for %s\n",
527 		 lang_hooks.decl_printable_name (info->context, 2));
528 
529       DECL_STATIC_CHAIN (info->context) = 1;
530     }
531   return field;
532 }
533 
534 /* Initialize a new temporary with the GIMPLE_CALL STMT.  */
535 
536 static tree
init_tmp_var_with_call(struct nesting_info * info,gimple_stmt_iterator * gsi,gcall * call)537 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
538 		        gcall *call)
539 {
540   tree t;
541 
542   t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
543   gimple_call_set_lhs (call, t);
544   if (! gsi_end_p (*gsi))
545     gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
546   gsi_insert_before (gsi, call, GSI_SAME_STMT);
547 
548   return t;
549 }
550 
551 
552 /* Copy EXP into a temporary.  Allocate the temporary in the context of
553    INFO and insert the initialization statement before GSI.  */
554 
555 static tree
init_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)556 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
557 {
558   tree t;
559   gimple *stmt;
560 
561   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
562   stmt = gimple_build_assign (t, exp);
563   if (! gsi_end_p (*gsi))
564     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
565   gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
566 
567   return t;
568 }
569 
570 
571 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
572 
573 static tree
gsi_gimplify_val(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)574 gsi_gimplify_val (struct nesting_info *info, tree exp,
575 		  gimple_stmt_iterator *gsi)
576 {
577   if (is_gimple_val (exp))
578     return exp;
579   else
580     return init_tmp_var (info, exp, gsi);
581 }
582 
583 /* Similarly, but copy from the temporary and insert the statement
584    after the iterator.  */
585 
586 static tree
save_tmp_var(struct nesting_info * info,tree exp,gimple_stmt_iterator * gsi)587 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
588 {
589   tree t;
590   gimple *stmt;
591 
592   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
593   stmt = gimple_build_assign (exp, t);
594   if (! gsi_end_p (*gsi))
595     gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
596   gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
597 
598   return t;
599 }
600 
601 /* Build or return the type used to represent a nested function trampoline.  */
602 
603 static GTY(()) tree trampoline_type;
604 
605 static tree
get_trampoline_type(struct nesting_info * info)606 get_trampoline_type (struct nesting_info *info)
607 {
608   unsigned align, size;
609   tree t;
610 
611   if (trampoline_type)
612     return trampoline_type;
613 
614   align = TRAMPOLINE_ALIGNMENT;
615   size = TRAMPOLINE_SIZE;
616 
617   /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
618      then allocate extra space so that we can do dynamic alignment.  */
619   if (align > STACK_BOUNDARY)
620     {
621       size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
622       align = STACK_BOUNDARY;
623     }
624 
625   t = build_index_type (size_int (size - 1));
626   t = build_array_type (char_type_node, t);
627   t = build_decl (DECL_SOURCE_LOCATION (info->context),
628 		  FIELD_DECL, get_identifier ("__data"), t);
629   SET_DECL_ALIGN (t, align);
630   DECL_USER_ALIGN (t) = 1;
631 
632   trampoline_type = make_node (RECORD_TYPE);
633   TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
634   TYPE_FIELDS (trampoline_type) = t;
635   layout_type (trampoline_type);
636   DECL_CONTEXT (t) = trampoline_type;
637 
638   return trampoline_type;
639 }
640 
641 /* Build or return the type used to represent a nested function descriptor.  */
642 
643 static GTY(()) tree descriptor_type;
644 
645 static tree
get_descriptor_type(struct nesting_info * info)646 get_descriptor_type (struct nesting_info *info)
647 {
648   /* The base alignment is that of a function.  */
649   const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY);
650   tree t;
651 
652   if (descriptor_type)
653     return descriptor_type;
654 
655   t = build_index_type (integer_one_node);
656   t = build_array_type (ptr_type_node, t);
657   t = build_decl (DECL_SOURCE_LOCATION (info->context),
658 		  FIELD_DECL, get_identifier ("__data"), t);
659   SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align));
660   DECL_USER_ALIGN (t) = 1;
661 
662   descriptor_type = make_node (RECORD_TYPE);
663   TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor");
664   TYPE_FIELDS (descriptor_type) = t;
665   layout_type (descriptor_type);
666   DECL_CONTEXT (t) = descriptor_type;
667 
668   return descriptor_type;
669 }
670 
671 /* Given DECL, a nested function, find or create an element in the
672    var map for this function.  */
673 
674 static tree
lookup_element_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)675 lookup_element_for_decl (struct nesting_info *info, tree decl,
676 			 enum insert_option insert)
677 {
678   if (insert == NO_INSERT)
679     {
680       tree *slot = info->var_map->get (decl);
681       return slot ? *slot : NULL_TREE;
682     }
683 
684   tree *slot = &info->var_map->get_or_insert (decl);
685   if (!*slot)
686     *slot = build_tree_list (NULL_TREE, NULL_TREE);
687 
688   return (tree) *slot;
689 }
690 
691 /* Given DECL, a nested function, create a field in the non-local
692    frame structure for this function.  */
693 
694 static tree
create_field_for_decl(struct nesting_info * info,tree decl,tree type)695 create_field_for_decl (struct nesting_info *info, tree decl, tree type)
696 {
697   tree field = make_node (FIELD_DECL);
698   DECL_NAME (field) = DECL_NAME (decl);
699   TREE_TYPE (field) = type;
700   TREE_ADDRESSABLE (field) = 1;
701   insert_field_into_struct (get_frame_type (info), field);
702   return field;
703 }
704 
705 /* Given DECL, a nested function, find or create a field in the non-local
706    frame structure for a trampoline for this function.  */
707 
708 static tree
lookup_tramp_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)709 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
710 		       enum insert_option insert)
711 {
712   tree elt, field;
713 
714   elt = lookup_element_for_decl (info, decl, insert);
715   if (!elt)
716     return NULL_TREE;
717 
718   field = TREE_PURPOSE (elt);
719 
720   if (!field && insert == INSERT)
721     {
722       field = create_field_for_decl (info, decl, get_trampoline_type (info));
723       TREE_PURPOSE (elt) = field;
724       info->any_tramp_created = true;
725     }
726 
727   return field;
728 }
729 
730 /* Given DECL, a nested function, find or create a field in the non-local
731    frame structure for a descriptor for this function.  */
732 
733 static tree
lookup_descr_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)734 lookup_descr_for_decl (struct nesting_info *info, tree decl,
735 		       enum insert_option insert)
736 {
737   tree elt, field;
738 
739   elt = lookup_element_for_decl (info, decl, insert);
740   if (!elt)
741     return NULL_TREE;
742 
743   field = TREE_VALUE (elt);
744 
745   if (!field && insert == INSERT)
746     {
747       field = create_field_for_decl (info, decl, get_descriptor_type (info));
748       TREE_VALUE (elt) = field;
749       info->any_descr_created = true;
750     }
751 
752   return field;
753 }
754 
755 /* Build or return the field within the non-local frame state that holds
756    the non-local goto "jmp_buf".  The buffer itself is maintained by the
757    rtl middle-end as dynamic stack space is allocated.  */
758 
759 static tree
get_nl_goto_field(struct nesting_info * info)760 get_nl_goto_field (struct nesting_info *info)
761 {
762   tree field = info->nl_goto_field;
763   if (!field)
764     {
765       unsigned size;
766       tree type;
767 
768       /* For __builtin_nonlocal_goto, we need N words.  The first is the
769 	 frame pointer, the rest is for the target's stack pointer save
770 	 area.  The number of words is controlled by STACK_SAVEAREA_MODE;
771 	 not the best interface, but it'll do for now.  */
772       if (Pmode == ptr_mode)
773 	type = ptr_type_node;
774       else
775 	type = lang_hooks.types.type_for_mode (Pmode, 1);
776 
777       scalar_int_mode mode
778 	= as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
779       size = GET_MODE_SIZE (mode);
780       size = size / GET_MODE_SIZE (Pmode);
781       size = size + 1;
782 
783       type = build_array_type
784 	(type, build_index_type (size_int (size)));
785 
786       field = make_node (FIELD_DECL);
787       DECL_NAME (field) = get_identifier ("__nl_goto_buf");
788       TREE_TYPE (field) = type;
789       SET_DECL_ALIGN (field, TYPE_ALIGN (type));
790       TREE_ADDRESSABLE (field) = 1;
791 
792       insert_field_into_struct (get_frame_type (info), field);
793 
794       info->nl_goto_field = field;
795     }
796 
797   return field;
798 }
799 
800 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ.  */
801 
802 static void
walk_body(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info,gimple_seq * pseq)803 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
804 	   struct nesting_info *info, gimple_seq *pseq)
805 {
806   struct walk_stmt_info wi;
807 
808   memset (&wi, 0, sizeof (wi));
809   wi.info = info;
810   wi.val_only = true;
811   walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
812 }
813 
814 
815 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
816 
817 static inline void
walk_function(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)818 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
819 	       struct nesting_info *info)
820 {
821   gimple_seq body = gimple_body (info->context);
822   walk_body (callback_stmt, callback_op, info, &body);
823   gimple_set_body (info->context, body);
824 }
825 
826 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
827 
828 static void
walk_gimple_omp_for(gomp_for * for_stmt,walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * info)829 walk_gimple_omp_for (gomp_for *for_stmt,
830     		     walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
831     		     struct nesting_info *info)
832 {
833   struct walk_stmt_info wi;
834   gimple_seq seq;
835   tree t;
836   size_t i;
837 
838   walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
839 
840   seq = NULL;
841   memset (&wi, 0, sizeof (wi));
842   wi.info = info;
843   wi.gsi = gsi_last (seq);
844 
845   for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
846     {
847       wi.val_only = false;
848       walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
849 		 &wi, NULL);
850       wi.val_only = true;
851       wi.is_lhs = false;
852       walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
853 		 &wi, NULL);
854 
855       wi.val_only = true;
856       wi.is_lhs = false;
857       walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
858 		 &wi, NULL);
859 
860       t = gimple_omp_for_incr (for_stmt, i);
861       gcc_assert (BINARY_CLASS_P (t));
862       wi.val_only = false;
863       walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
864       wi.val_only = true;
865       wi.is_lhs = false;
866       walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
867     }
868 
869   seq = gsi_seq (wi.gsi);
870   if (!gimple_seq_empty_p (seq))
871     {
872       gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
873       annotate_all_with_location (seq, gimple_location (for_stmt));
874       gimple_seq_add_seq (&pre_body, seq);
875       gimple_omp_for_set_pre_body (for_stmt, pre_body);
876     }
877 }
878 
879 /* Similarly for ROOT and all functions nested underneath, depth first.  */
880 
881 static void
walk_all_functions(walk_stmt_fn callback_stmt,walk_tree_fn callback_op,struct nesting_info * root)882 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
883 		    struct nesting_info *root)
884 {
885   struct nesting_info *n;
886   FOR_EACH_NEST_INFO (n, root)
887     walk_function (callback_stmt, callback_op, n);
888 }
889 
890 
891 /* We have to check for a fairly pathological case.  The operands of function
892    nested function are to be interpreted in the context of the enclosing
893    function.  So if any are variably-sized, they will get remapped when the
894    enclosing function is inlined.  But that remapping would also have to be
895    done in the types of the PARM_DECLs of the nested function, meaning the
896    argument types of that function will disagree with the arguments in the
897    calls to that function.  So we'd either have to make a copy of the nested
898    function corresponding to each time the enclosing function was inlined or
899    add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
900    function.  The former is not practical.  The latter would still require
901    detecting this case to know when to add the conversions.  So, for now at
902    least, we don't inline such an enclosing function.
903 
904    We have to do that check recursively, so here return indicating whether
905    FNDECL has such a nested function.  ORIG_FN is the function we were
906    trying to inline to use for checking whether any argument is variably
907    modified by anything in it.
908 
909    It would be better to do this in tree-inline.cc so that we could give
910    the appropriate warning for why a function can't be inlined, but that's
911    too late since the nesting structure has already been flattened and
912    adding a flag just to record this fact seems a waste of a flag.  */
913 
914 static bool
check_for_nested_with_variably_modified(tree fndecl,tree orig_fndecl)915 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
916 {
917   struct cgraph_node *cgn = cgraph_node::get (fndecl);
918   tree arg;
919 
920   for (cgn = first_nested_function (cgn); cgn;
921        cgn = next_nested_function (cgn))
922     {
923       for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
924 	if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
925 	  return true;
926 
927       if (check_for_nested_with_variably_modified (cgn->decl,
928 						   orig_fndecl))
929 	return true;
930     }
931 
932   return false;
933 }
934 
935 /* Construct our local datastructure describing the function nesting
936    tree rooted by CGN.  */
937 
938 static struct nesting_info *
create_nesting_tree(struct cgraph_node * cgn)939 create_nesting_tree (struct cgraph_node *cgn)
940 {
941   struct nesting_info *info = XCNEW (struct nesting_info);
942   info->field_map = new hash_map<tree, tree>;
943   info->var_map = new hash_map<tree, tree>;
944   info->mem_refs = new hash_set<tree *>;
945   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
946   info->context = cgn->decl;
947   info->thunk_p = cgn->thunk;
948 
949   for (cgn = first_nested_function (cgn); cgn;
950        cgn = next_nested_function (cgn))
951     {
952       struct nesting_info *sub = create_nesting_tree (cgn);
953       sub->outer = info;
954       sub->next = info->inner;
955       info->inner = sub;
956     }
957 
958   /* See discussion at check_for_nested_with_variably_modified for a
959      discussion of why this has to be here.  */
960   if (check_for_nested_with_variably_modified (info->context, info->context))
961     DECL_UNINLINABLE (info->context) = true;
962 
963   return info;
964 }
965 
966 /* Return an expression computing the static chain for TARGET_CONTEXT
967    from INFO->CONTEXT.  Insert any necessary computations before TSI.  */
968 
969 static tree
get_static_chain(struct nesting_info * info,tree target_context,gimple_stmt_iterator * gsi)970 get_static_chain (struct nesting_info *info, tree target_context,
971 		  gimple_stmt_iterator *gsi)
972 {
973   struct nesting_info *i;
974   tree x;
975 
976   if (info->context == target_context)
977     {
978       x = build_addr (info->frame_decl);
979       info->static_chain_added |= 1;
980     }
981   else
982     {
983       x = get_chain_decl (info);
984       info->static_chain_added |= 2;
985 
986       for (i = info->outer; i->context != target_context; i = i->outer)
987 	{
988 	  tree field = get_chain_field (i);
989 
990 	  x = build_simple_mem_ref_notrap (x);
991 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
992 	  x = init_tmp_var (info, x, gsi);
993 	}
994     }
995 
996   return x;
997 }
998 
999 
1000 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
1001    frame as seen from INFO->CONTEXT.  Insert any necessary computations
1002    before GSI.  */
1003 
1004 static tree
get_frame_field(struct nesting_info * info,tree target_context,tree field,gimple_stmt_iterator * gsi)1005 get_frame_field (struct nesting_info *info, tree target_context,
1006 		 tree field, gimple_stmt_iterator *gsi)
1007 {
1008   struct nesting_info *i;
1009   tree x;
1010 
1011   if (info->context == target_context)
1012     {
1013       /* Make sure frame_decl gets created.  */
1014       (void) get_frame_type (info);
1015       x = info->frame_decl;
1016       info->static_chain_added |= 1;
1017     }
1018   else
1019     {
1020       x = get_chain_decl (info);
1021       info->static_chain_added |= 2;
1022 
1023       for (i = info->outer; i->context != target_context; i = i->outer)
1024 	{
1025 	  tree field = get_chain_field (i);
1026 
1027 	  x = build_simple_mem_ref_notrap (x);
1028 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1029 	  x = init_tmp_var (info, x, gsi);
1030 	}
1031 
1032       x = build_simple_mem_ref_notrap (x);
1033     }
1034 
1035   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1036   TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (field);
1037   return x;
1038 }
1039 
1040 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
1041 
1042 /* Helper for get_nonlocal_debug_decl and get_local_debug_decl.  */
1043 
1044 static tree
get_debug_decl(tree decl)1045 get_debug_decl (tree decl)
1046 {
1047   tree new_decl
1048     = build_decl (DECL_SOURCE_LOCATION (decl),
1049 		  VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1050   DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1051   DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1052   TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1053   TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1054   TREE_READONLY (new_decl) = TREE_READONLY (decl);
1055   TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1056   DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1057   if ((TREE_CODE (decl) == PARM_DECL
1058        || TREE_CODE (decl) == RESULT_DECL
1059        || VAR_P (decl))
1060       && DECL_BY_REFERENCE (decl))
1061     DECL_BY_REFERENCE (new_decl) = 1;
1062   /* Copy DECL_LANG_SPECIFIC and DECL_LANG_FLAG_* for OpenMP langhook
1063      purposes.  */
1064   DECL_LANG_SPECIFIC (new_decl) = DECL_LANG_SPECIFIC (decl);
1065 #define COPY_DLF(n) DECL_LANG_FLAG_##n (new_decl) = DECL_LANG_FLAG_##n (decl)
1066   COPY_DLF (0); COPY_DLF (1); COPY_DLF (2); COPY_DLF (3);
1067   COPY_DLF (4); COPY_DLF (5); COPY_DLF (6); COPY_DLF (7);
1068   COPY_DLF (8);
1069 #undef COPY_DLF
1070   return new_decl;
1071 }
1072 
1073 /* A subroutine of convert_nonlocal_reference_op.  Create a local variable
1074    in the nested function with DECL_VALUE_EXPR set to reference the true
1075    variable in the parent function.  This is used both for debug info
1076    and in OMP lowering.  */
1077 
1078 static tree
get_nonlocal_debug_decl(struct nesting_info * info,tree decl)1079 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
1080 {
1081   tree target_context;
1082   struct nesting_info *i;
1083   tree x, field, new_decl;
1084 
1085   tree *slot = &info->var_map->get_or_insert (decl);
1086 
1087   if (*slot)
1088     return *slot;
1089 
1090   target_context = decl_function_context (decl);
1091 
1092   /* A copy of the code in get_frame_field, but without the temporaries.  */
1093   if (info->context == target_context)
1094     {
1095       /* Make sure frame_decl gets created.  */
1096       (void) get_frame_type (info);
1097       x = info->frame_decl;
1098       i = info;
1099       info->static_chain_added |= 1;
1100     }
1101   else
1102     {
1103       x = get_chain_decl (info);
1104       info->static_chain_added |= 2;
1105       for (i = info->outer; i->context != target_context; i = i->outer)
1106 	{
1107 	  field = get_chain_field (i);
1108 	  x = build_simple_mem_ref_notrap (x);
1109 	  x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1110 	}
1111       x = build_simple_mem_ref_notrap (x);
1112     }
1113 
1114   field = lookup_field_for_decl (i, decl, INSERT);
1115   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1116   if (use_pointer_in_frame (decl))
1117     x = build_simple_mem_ref_notrap (x);
1118 
1119   /* ??? We should be remapping types as well, surely.  */
1120   new_decl = get_debug_decl (decl);
1121   DECL_CONTEXT (new_decl) = info->context;
1122 
1123   SET_DECL_VALUE_EXPR (new_decl, x);
1124   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1125 
1126   *slot = new_decl;
1127   DECL_CHAIN (new_decl) = info->debug_var_chain;
1128   info->debug_var_chain = new_decl;
1129 
1130   if (!optimize
1131       && info->context != target_context
1132       && variably_modified_type_p (TREE_TYPE (decl), NULL))
1133     note_nonlocal_vla_type (info, TREE_TYPE (decl));
1134 
1135   return new_decl;
1136 }
1137 
1138 
1139 /* Callback for walk_gimple_stmt, rewrite all references to VAR
1140    and PARM_DECLs that belong to outer functions.
1141 
1142    The rewrite will involve some number of structure accesses back up
1143    the static chain.  E.g. for a variable FOO up one nesting level it'll
1144    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
1145    indirections apply to decls for which use_pointer_in_frame is true.  */
1146 
1147 static tree
convert_nonlocal_reference_op(tree * tp,int * walk_subtrees,void * data)1148 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
1149 {
1150   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1151   struct nesting_info *const info = (struct nesting_info *) wi->info;
1152   tree t = *tp;
1153 
1154   *walk_subtrees = 0;
1155   switch (TREE_CODE (t))
1156     {
1157     case VAR_DECL:
1158       /* Non-automatic variables are never processed.  */
1159       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1160 	break;
1161       /* FALLTHRU */
1162 
1163     case PARM_DECL:
1164       {
1165 	tree x, target_context = decl_function_context (t);
1166 
1167 	if (info->context == target_context)
1168 	  break;
1169 
1170 	wi->changed = true;
1171 
1172 	if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1173 	  x = get_nonlocal_debug_decl (info, t);
1174 	else
1175 	  {
1176 	    struct nesting_info *i = info;
1177 	    while (i && i->context != target_context)
1178 	      i = i->outer;
1179 	    /* If none of the outer contexts is the target context, this means
1180 	       that the VAR or PARM_DECL is referenced in a wrong context.  */
1181 	    if (!i)
1182 	      internal_error ("%s from %s referenced in %s",
1183 			      IDENTIFIER_POINTER (DECL_NAME (t)),
1184 			      IDENTIFIER_POINTER (DECL_NAME (target_context)),
1185 			      IDENTIFIER_POINTER (DECL_NAME (info->context)));
1186 
1187 	    x = lookup_field_for_decl (i, t, INSERT);
1188 	    x = get_frame_field (info, target_context, x, &wi->gsi);
1189 	    if (use_pointer_in_frame (t))
1190 	      {
1191 		x = init_tmp_var (info, x, &wi->gsi);
1192 		x = build_simple_mem_ref_notrap (x);
1193 	      }
1194 	  }
1195 
1196 	if (wi->val_only)
1197 	  {
1198 	    if (wi->is_lhs)
1199 	      x = save_tmp_var (info, x, &wi->gsi);
1200 	    else
1201 	      x = init_tmp_var (info, x, &wi->gsi);
1202 	  }
1203 
1204 	*tp = x;
1205       }
1206       break;
1207 
1208     case LABEL_DECL:
1209       /* We're taking the address of a label from a parent function, but
1210 	 this is not itself a non-local goto.  Mark the label such that it
1211 	 will not be deleted, much as we would with a label address in
1212 	 static storage.  */
1213       if (decl_function_context (t) != info->context)
1214         FORCED_LABEL (t) = 1;
1215       break;
1216 
1217     case ADDR_EXPR:
1218       {
1219 	bool save_val_only = wi->val_only;
1220 
1221 	wi->val_only = false;
1222 	wi->is_lhs = false;
1223 	wi->changed = false;
1224 	walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
1225 	wi->val_only = true;
1226 
1227 	if (wi->changed)
1228 	  {
1229 	    tree save_context;
1230 
1231 	    /* If we changed anything, we might no longer be directly
1232 	       referencing a decl.  */
1233 	    save_context = current_function_decl;
1234 	    current_function_decl = info->context;
1235 	    recompute_tree_invariant_for_addr_expr (t);
1236 
1237 	    /* If the callback converted the address argument in a context
1238 	       where we only accept variables (and min_invariant, presumably),
1239 	       then compute the address into a temporary.  */
1240 	    if (save_val_only)
1241 	      *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1242 				      t, &wi->gsi);
1243 	    current_function_decl = save_context;
1244 	  }
1245       }
1246       break;
1247 
1248     case REALPART_EXPR:
1249     case IMAGPART_EXPR:
1250     case COMPONENT_REF:
1251     case ARRAY_REF:
1252     case ARRAY_RANGE_REF:
1253     case BIT_FIELD_REF:
1254       /* Go down this entire nest and just look at the final prefix and
1255 	 anything that describes the references.  Otherwise, we lose track
1256 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
1257       wi->val_only = true;
1258       wi->is_lhs = false;
1259       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1260 	{
1261 	  if (TREE_CODE (t) == COMPONENT_REF)
1262 	    walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1263 		       NULL);
1264 	  else if (TREE_CODE (t) == ARRAY_REF
1265 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
1266 	    {
1267 	      walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1268 			 wi, NULL);
1269 	      walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1270 			 wi, NULL);
1271 	      walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1272 			 wi, NULL);
1273 	    }
1274 	}
1275       wi->val_only = false;
1276       walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1277       break;
1278 
1279     case VIEW_CONVERT_EXPR:
1280       /* Just request to look at the subtrees, leaving val_only and lhs
1281 	 untouched.  This might actually be for !val_only + lhs, in which
1282 	 case we don't want to force a replacement by a temporary.  */
1283       *walk_subtrees = 1;
1284       break;
1285 
1286     default:
1287       if (!IS_TYPE_OR_DECL_P (t))
1288 	{
1289 	  *walk_subtrees = 1;
1290           wi->val_only = true;
1291 	  wi->is_lhs = false;
1292 	}
1293       break;
1294     }
1295 
1296   return NULL_TREE;
1297 }
1298 
1299 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1300 					     struct walk_stmt_info *);
1301 
1302 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1303    and PARM_DECLs that belong to outer functions.  */
1304 
1305 static bool
convert_nonlocal_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1306 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1307 {
1308   struct nesting_info *const info = (struct nesting_info *) wi->info;
1309   bool need_chain = false, need_stmts = false;
1310   tree clause, decl, *pdecl;
1311   int dummy;
1312   bitmap new_suppress;
1313 
1314   new_suppress = BITMAP_GGC_ALLOC ();
1315   bitmap_copy (new_suppress, info->suppress_expansion);
1316 
1317   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1318     {
1319       pdecl = NULL;
1320       switch (OMP_CLAUSE_CODE (clause))
1321 	{
1322 	case OMP_CLAUSE_REDUCTION:
1323 	case OMP_CLAUSE_IN_REDUCTION:
1324 	case OMP_CLAUSE_TASK_REDUCTION:
1325 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1326 	    need_stmts = true;
1327 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
1328 	    {
1329 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
1330 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
1331 		pdecl = &TREE_OPERAND (*pdecl, 0);
1332 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
1333 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
1334 		pdecl = &TREE_OPERAND (*pdecl, 0);
1335 	    }
1336 	  goto do_decl_clause;
1337 
1338 	case OMP_CLAUSE_LASTPRIVATE:
1339 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1340 	    need_stmts = true;
1341 	  goto do_decl_clause;
1342 
1343 	case OMP_CLAUSE_LINEAR:
1344 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1345 	    need_stmts = true;
1346 	  wi->val_only = true;
1347 	  wi->is_lhs = false;
1348 	  convert_nonlocal_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause),
1349 					 &dummy, wi);
1350 	  goto do_decl_clause;
1351 
1352 	case OMP_CLAUSE_PRIVATE:
1353 	case OMP_CLAUSE_FIRSTPRIVATE:
1354 	case OMP_CLAUSE_COPYPRIVATE:
1355 	case OMP_CLAUSE_SHARED:
1356 	case OMP_CLAUSE_TO_DECLARE:
1357 	case OMP_CLAUSE_LINK:
1358 	case OMP_CLAUSE_USE_DEVICE_PTR:
1359 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1360 	case OMP_CLAUSE_HAS_DEVICE_ADDR:
1361 	case OMP_CLAUSE_IS_DEVICE_PTR:
1362 	case OMP_CLAUSE_DETACH:
1363 	do_decl_clause:
1364 	  if (pdecl == NULL)
1365 	    pdecl = &OMP_CLAUSE_DECL (clause);
1366 	  decl = *pdecl;
1367 	  if (VAR_P (decl)
1368 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1369 	    break;
1370 	  if (decl_function_context (decl) != info->context)
1371 	    {
1372 	      if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
1373 		OMP_CLAUSE_SHARED_READONLY (clause) = 0;
1374 	      bitmap_set_bit (new_suppress, DECL_UID (decl));
1375 	      *pdecl = get_nonlocal_debug_decl (info, decl);
1376 	      if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1377 		need_chain = true;
1378 	    }
1379 	  break;
1380 
1381 	case OMP_CLAUSE_SCHEDULE:
1382 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1383 	    break;
1384 	  /* FALLTHRU */
1385 	case OMP_CLAUSE_FINAL:
1386 	case OMP_CLAUSE_IF:
1387 	case OMP_CLAUSE_NUM_THREADS:
1388 	case OMP_CLAUSE_DEPEND:
1389 	case OMP_CLAUSE_DEVICE:
1390 	case OMP_CLAUSE_NUM_TEAMS:
1391 	case OMP_CLAUSE_THREAD_LIMIT:
1392 	case OMP_CLAUSE_SAFELEN:
1393 	case OMP_CLAUSE_SIMDLEN:
1394 	case OMP_CLAUSE_PRIORITY:
1395 	case OMP_CLAUSE_GRAINSIZE:
1396 	case OMP_CLAUSE_NUM_TASKS:
1397 	case OMP_CLAUSE_HINT:
1398 	case OMP_CLAUSE_FILTER:
1399 	case OMP_CLAUSE_NUM_GANGS:
1400 	case OMP_CLAUSE_NUM_WORKERS:
1401 	case OMP_CLAUSE_VECTOR_LENGTH:
1402 	case OMP_CLAUSE_GANG:
1403 	case OMP_CLAUSE_WORKER:
1404 	case OMP_CLAUSE_VECTOR:
1405 	case OMP_CLAUSE_ASYNC:
1406 	case OMP_CLAUSE_WAIT:
1407 	  /* Several OpenACC clauses have optional arguments.  Check if they
1408 	     are present.  */
1409 	  if (OMP_CLAUSE_OPERAND (clause, 0))
1410 	    {
1411 	      wi->val_only = true;
1412 	      wi->is_lhs = false;
1413 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1414 					     &dummy, wi);
1415 	    }
1416 
1417 	  /* The gang clause accepts two arguments.  */
1418 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
1419 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
1420 	    {
1421 		wi->val_only = true;
1422 		wi->is_lhs = false;
1423 		convert_nonlocal_reference_op
1424 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
1425 	    }
1426 	  break;
1427 
1428 	case OMP_CLAUSE_DIST_SCHEDULE:
1429 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
1430 	    {
1431 	      wi->val_only = true;
1432 	      wi->is_lhs = false;
1433 	      convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1434 					     &dummy, wi);
1435 	    }
1436 	  break;
1437 
1438 	case OMP_CLAUSE_MAP:
1439 	case OMP_CLAUSE_TO:
1440 	case OMP_CLAUSE_FROM:
1441 	  if (OMP_CLAUSE_SIZE (clause))
1442 	    {
1443 	      wi->val_only = true;
1444 	      wi->is_lhs = false;
1445 	      convert_nonlocal_reference_op (&OMP_CLAUSE_SIZE (clause),
1446 					     &dummy, wi);
1447 	    }
1448 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
1449 	    goto do_decl_clause;
1450 	  wi->val_only = true;
1451 	  wi->is_lhs = false;
1452 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op,
1453 		     wi, NULL);
1454 	  break;
1455 
1456 	case OMP_CLAUSE_ALIGNED:
1457 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
1458 	    {
1459 	      wi->val_only = true;
1460 	      wi->is_lhs = false;
1461 	      convert_nonlocal_reference_op
1462 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
1463 	    }
1464 	  /* FALLTHRU */
1465 	case OMP_CLAUSE_NONTEMPORAL:
1466 	do_decl_clause_no_supp:
1467 	  /* Like do_decl_clause, but don't add any suppression.  */
1468 	  decl = OMP_CLAUSE_DECL (clause);
1469 	  if (VAR_P (decl)
1470 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1471 	    break;
1472 	  if (decl_function_context (decl) != info->context)
1473 	    {
1474 	      OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1475 	      need_chain = true;
1476 	    }
1477 	  break;
1478 
1479 	case OMP_CLAUSE_ALLOCATE:
1480 	  if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
1481 	    {
1482 	      wi->val_only = true;
1483 	      wi->is_lhs = false;
1484 	      convert_nonlocal_reference_op
1485 		(&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
1486 	    }
1487 	  goto do_decl_clause_no_supp;
1488 
1489 	case OMP_CLAUSE_NOWAIT:
1490 	case OMP_CLAUSE_ORDERED:
1491 	case OMP_CLAUSE_DEFAULT:
1492 	case OMP_CLAUSE_COPYIN:
1493 	case OMP_CLAUSE_COLLAPSE:
1494 	case OMP_CLAUSE_TILE:
1495 	case OMP_CLAUSE_UNTIED:
1496 	case OMP_CLAUSE_MERGEABLE:
1497 	case OMP_CLAUSE_PROC_BIND:
1498 	case OMP_CLAUSE_NOGROUP:
1499 	case OMP_CLAUSE_THREADS:
1500 	case OMP_CLAUSE_SIMD:
1501 	case OMP_CLAUSE_DEFAULTMAP:
1502 	case OMP_CLAUSE_ORDER:
1503 	case OMP_CLAUSE_SEQ:
1504 	case OMP_CLAUSE_INDEPENDENT:
1505 	case OMP_CLAUSE_AUTO:
1506 	case OMP_CLAUSE_IF_PRESENT:
1507 	case OMP_CLAUSE_FINALIZE:
1508 	case OMP_CLAUSE_BIND:
1509 	case OMP_CLAUSE__CONDTEMP_:
1510 	case OMP_CLAUSE__SCANTEMP_:
1511 	  break;
1512 
1513 	  /* The following clause belongs to the OpenACC cache directive, which
1514 	     is discarded during gimplification.  */
1515 	case OMP_CLAUSE__CACHE_:
1516 	  /* The following clauses are only allowed in the OpenMP declare simd
1517 	     directive, so not seen here.  */
1518 	case OMP_CLAUSE_UNIFORM:
1519 	case OMP_CLAUSE_INBRANCH:
1520 	case OMP_CLAUSE_NOTINBRANCH:
1521 	  /* The following clauses are only allowed on OpenMP cancel and
1522 	     cancellation point directives, which at this point have already
1523 	     been lowered into a function call.  */
1524 	case OMP_CLAUSE_FOR:
1525 	case OMP_CLAUSE_PARALLEL:
1526 	case OMP_CLAUSE_SECTIONS:
1527 	case OMP_CLAUSE_TASKGROUP:
1528 	  /* The following clauses are only added during OMP lowering; nested
1529 	     function decomposition happens before that.  */
1530 	case OMP_CLAUSE__LOOPTEMP_:
1531 	case OMP_CLAUSE__REDUCTEMP_:
1532 	case OMP_CLAUSE__SIMDUID_:
1533 	case OMP_CLAUSE__SIMT_:
1534 	  /* The following clauses are only allowed on OpenACC 'routine'
1535 	     directives, not seen here.  */
1536 	case OMP_CLAUSE_NOHOST:
1537 	  /* Anything else.  */
1538 	default:
1539 	  gcc_unreachable ();
1540 	}
1541     }
1542 
1543   info->suppress_expansion = new_suppress;
1544 
1545   if (need_stmts)
1546     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1547       switch (OMP_CLAUSE_CODE (clause))
1548 	{
1549 	case OMP_CLAUSE_REDUCTION:
1550 	case OMP_CLAUSE_IN_REDUCTION:
1551 	case OMP_CLAUSE_TASK_REDUCTION:
1552 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1553 	    {
1554 	      tree old_context
1555 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1556 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1557 		= info->context;
1558 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1559 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1560 		  = info->context;
1561 	      tree save_local_var_chain = info->new_local_var_chain;
1562 	      info->new_local_var_chain = NULL;
1563 	      gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause);
1564 	      walk_body (convert_nonlocal_reference_stmt,
1565 			 convert_nonlocal_reference_op, info, seq);
1566 	      if (info->new_local_var_chain)
1567 		declare_vars (info->new_local_var_chain,
1568 			      gimple_seq_first_stmt (*seq), false);
1569 	      info->new_local_var_chain = NULL;
1570 	      seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause);
1571 	      walk_body (convert_nonlocal_reference_stmt,
1572 			 convert_nonlocal_reference_op, info, seq);
1573 	      if (info->new_local_var_chain)
1574 		declare_vars (info->new_local_var_chain,
1575 			      gimple_seq_first_stmt (*seq), false);
1576 	      info->new_local_var_chain = save_local_var_chain;
1577 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1578 		= old_context;
1579 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1580 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
1581 		  = old_context;
1582 	    }
1583 	  break;
1584 
1585 	case OMP_CLAUSE_LASTPRIVATE:
1586 	case OMP_CLAUSE_LINEAR:
1587 	  {
1588 	    tree save_local_var_chain = info->new_local_var_chain;
1589 	    info->new_local_var_chain = NULL;
1590 	    gimple_seq *seq;
1591 	    if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE)
1592 	      seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause);
1593 	    else
1594 	      seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause);
1595 	    walk_body (convert_nonlocal_reference_stmt,
1596 		       convert_nonlocal_reference_op, info, seq);
1597 	    if (info->new_local_var_chain)
1598 	      {
1599 		gimple *g = gimple_seq_first_stmt (*seq);
1600 		if (gimple_code (g) != GIMPLE_BIND)
1601 		  {
1602 		    g = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
1603 		    *seq = NULL;
1604 		    gimple_seq_add_stmt_without_update (seq, g);
1605 		  }
1606 		declare_vars (info->new_local_var_chain,
1607 			      gimple_seq_first_stmt (*seq), false);
1608 	      }
1609 	    info->new_local_var_chain = save_local_var_chain;
1610 	  }
1611 	  break;
1612 
1613 	default:
1614 	  break;
1615 	}
1616 
1617   return need_chain;
1618 }
1619 
1620 /* Create nonlocal debug decls for nonlocal VLA array bounds.  */
1621 
1622 static void
note_nonlocal_vla_type(struct nesting_info * info,tree type)1623 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1624 {
1625   while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1626     type = TREE_TYPE (type);
1627 
1628   if (TYPE_NAME (type)
1629       && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1630       && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1631     type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1632 
1633   while (POINTER_TYPE_P (type)
1634 	 || TREE_CODE (type) == VECTOR_TYPE
1635 	 || TREE_CODE (type) == FUNCTION_TYPE
1636 	 || TREE_CODE (type) == METHOD_TYPE)
1637     type = TREE_TYPE (type);
1638 
1639   if (TREE_CODE (type) == ARRAY_TYPE)
1640     {
1641       tree domain, t;
1642 
1643       note_nonlocal_vla_type (info, TREE_TYPE (type));
1644       domain = TYPE_DOMAIN (type);
1645       if (domain)
1646 	{
1647 	  t = TYPE_MIN_VALUE (domain);
1648 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1649 	      && decl_function_context (t) != info->context)
1650 	    get_nonlocal_debug_decl (info, t);
1651 	  t = TYPE_MAX_VALUE (domain);
1652 	  if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1653 	      && decl_function_context (t) != info->context)
1654 	    get_nonlocal_debug_decl (info, t);
1655 	}
1656     }
1657 }
1658 
1659 /* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
1660    PARM_DECLs that belong to outer functions.  This handles statements
1661    that are not handled via the standard recursion done in
1662    walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
1663    convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
1664    operands of STMT have been handled by this function.  */
1665 
1666 static tree
convert_nonlocal_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)1667 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1668 				 struct walk_stmt_info *wi)
1669 {
1670   struct nesting_info *info = (struct nesting_info *) wi->info;
1671   tree save_local_var_chain;
1672   bitmap save_suppress;
1673   gimple *stmt = gsi_stmt (*gsi);
1674 
1675   switch (gimple_code (stmt))
1676     {
1677     case GIMPLE_GOTO:
1678       /* Don't walk non-local gotos for now.  */
1679       if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1680 	{
1681 	  wi->val_only = true;
1682 	  wi->is_lhs = false;
1683 	  *handled_ops_p = false;
1684 	  return NULL_TREE;
1685 	}
1686       break;
1687 
1688     case GIMPLE_OMP_TEAMS:
1689       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
1690 	{
1691 	  save_suppress = info->suppress_expansion;
1692 	  convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt),
1693 					wi);
1694 	  walk_body (convert_nonlocal_reference_stmt,
1695 		     convert_nonlocal_reference_op, info,
1696 		     gimple_omp_body_ptr (stmt));
1697 	  info->suppress_expansion = save_suppress;
1698 	  break;
1699 	}
1700       /* FALLTHRU */
1701 
1702     case GIMPLE_OMP_PARALLEL:
1703     case GIMPLE_OMP_TASK:
1704       save_suppress = info->suppress_expansion;
1705       if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1706 	                                wi))
1707 	{
1708 	  tree c, decl;
1709 	  decl = get_chain_decl (info);
1710 	  c = build_omp_clause (gimple_location (stmt),
1711 				OMP_CLAUSE_FIRSTPRIVATE);
1712 	  OMP_CLAUSE_DECL (c) = decl;
1713 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1714 	  gimple_omp_taskreg_set_clauses (stmt, c);
1715 	}
1716 
1717       save_local_var_chain = info->new_local_var_chain;
1718       info->new_local_var_chain = NULL;
1719 
1720       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1721 	         info, gimple_omp_body_ptr (stmt));
1722 
1723       if (info->new_local_var_chain)
1724 	declare_vars (info->new_local_var_chain,
1725 	              gimple_seq_first_stmt (gimple_omp_body (stmt)),
1726 		      false);
1727       info->new_local_var_chain = save_local_var_chain;
1728       info->suppress_expansion = save_suppress;
1729       break;
1730 
1731     case GIMPLE_OMP_FOR:
1732       save_suppress = info->suppress_expansion;
1733       convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1734       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
1735 			   convert_nonlocal_reference_stmt,
1736 	  		   convert_nonlocal_reference_op, info);
1737       walk_body (convert_nonlocal_reference_stmt,
1738 	  	 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1739       info->suppress_expansion = save_suppress;
1740       break;
1741 
1742     case GIMPLE_OMP_SECTIONS:
1743       save_suppress = info->suppress_expansion;
1744       convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1745       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1746 	         info, gimple_omp_body_ptr (stmt));
1747       info->suppress_expansion = save_suppress;
1748       break;
1749 
1750     case GIMPLE_OMP_SINGLE:
1751       save_suppress = info->suppress_expansion;
1752       convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1753       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1754 	         info, gimple_omp_body_ptr (stmt));
1755       info->suppress_expansion = save_suppress;
1756       break;
1757 
1758     case GIMPLE_OMP_SCOPE:
1759       save_suppress = info->suppress_expansion;
1760       convert_nonlocal_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
1761       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1762 		 info, gimple_omp_body_ptr (stmt));
1763       info->suppress_expansion = save_suppress;
1764       break;
1765 
1766     case GIMPLE_OMP_TASKGROUP:
1767       save_suppress = info->suppress_expansion;
1768       convert_nonlocal_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
1769       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1770 		 info, gimple_omp_body_ptr (stmt));
1771       info->suppress_expansion = save_suppress;
1772       break;
1773 
1774     case GIMPLE_OMP_TARGET:
1775       if (!is_gimple_omp_offloaded (stmt))
1776 	{
1777 	  save_suppress = info->suppress_expansion;
1778 	  convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1779 					wi);
1780 	  info->suppress_expansion = save_suppress;
1781 	  walk_body (convert_nonlocal_reference_stmt,
1782 		     convert_nonlocal_reference_op, info,
1783 		     gimple_omp_body_ptr (stmt));
1784 	  break;
1785 	}
1786       save_suppress = info->suppress_expansion;
1787       if (convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt),
1788 					wi))
1789 	{
1790 	  tree c, decl;
1791 	  decl = get_chain_decl (info);
1792 	  c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
1793 	  OMP_CLAUSE_DECL (c) = decl;
1794 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
1795 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
1796 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
1797 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
1798 	}
1799 
1800       save_local_var_chain = info->new_local_var_chain;
1801       info->new_local_var_chain = NULL;
1802 
1803       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1804 		 info, gimple_omp_body_ptr (stmt));
1805 
1806       if (info->new_local_var_chain)
1807 	declare_vars (info->new_local_var_chain,
1808 		      gimple_seq_first_stmt (gimple_omp_body (stmt)),
1809 		      false);
1810       info->new_local_var_chain = save_local_var_chain;
1811       info->suppress_expansion = save_suppress;
1812       break;
1813 
1814     case GIMPLE_OMP_SECTION:
1815     case GIMPLE_OMP_MASTER:
1816     case GIMPLE_OMP_MASKED:
1817     case GIMPLE_OMP_ORDERED:
1818     case GIMPLE_OMP_SCAN:
1819       walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1820 	         info, gimple_omp_body_ptr (stmt));
1821       break;
1822 
1823     case GIMPLE_BIND:
1824       {
1825       gbind *bind_stmt = as_a <gbind *> (stmt);
1826 
1827       for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
1828 	if (TREE_CODE (var) == NAMELIST_DECL)
1829 	  {
1830 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
1831 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1832 	    tree decl;
1833 	    unsigned int i;
1834 
1835 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1836 	      {
1837 		if (VAR_P (decl)
1838 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1839 		  continue;
1840 		if (decl_function_context (decl) != info->context)
1841 		  CONSTRUCTOR_ELT (decls, i)->value
1842 		    = get_nonlocal_debug_decl (info, decl);
1843 	      }
1844 	  }
1845 
1846       *handled_ops_p = false;
1847       return NULL_TREE;
1848       }
1849     case GIMPLE_COND:
1850       wi->val_only = true;
1851       wi->is_lhs = false;
1852       *handled_ops_p = false;
1853       return NULL_TREE;
1854 
1855     case GIMPLE_ASSIGN:
1856       if (gimple_clobber_p (stmt))
1857 	{
1858 	  tree lhs = gimple_assign_lhs (stmt);
1859 	  if (DECL_P (lhs)
1860 	      && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
1861 	      && decl_function_context (lhs) != info->context)
1862 	    {
1863 	      gsi_replace (gsi, gimple_build_nop (), true);
1864 	      break;
1865 	    }
1866 	}
1867       *handled_ops_p = false;
1868       return NULL_TREE;
1869 
1870     default:
1871       /* For every other statement that we are not interested in
1872 	 handling here, let the walker traverse the operands.  */
1873       *handled_ops_p = false;
1874       return NULL_TREE;
1875     }
1876 
1877   /* We have handled all of STMT operands, no need to traverse the operands.  */
1878   *handled_ops_p = true;
1879   return NULL_TREE;
1880 }
1881 
1882 
1883 /* A subroutine of convert_local_reference.  Create a local variable
1884    in the parent function with DECL_VALUE_EXPR set to reference the
1885    field in FRAME.  This is used both for debug info and in OMP
1886    lowering.  */
1887 
1888 static tree
get_local_debug_decl(struct nesting_info * info,tree decl,tree field)1889 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1890 {
1891   tree x, new_decl;
1892 
1893   tree *slot = &info->var_map->get_or_insert (decl);
1894   if (*slot)
1895     return *slot;
1896 
1897   /* Make sure frame_decl gets created.  */
1898   (void) get_frame_type (info);
1899   x = info->frame_decl;
1900   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1901 
1902   new_decl = get_debug_decl (decl);
1903   DECL_CONTEXT (new_decl) = info->context;
1904 
1905   SET_DECL_VALUE_EXPR (new_decl, x);
1906   DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1907   *slot = new_decl;
1908 
1909   DECL_CHAIN (new_decl) = info->debug_var_chain;
1910   info->debug_var_chain = new_decl;
1911 
1912   /* Do not emit debug info twice.  */
1913   DECL_IGNORED_P (decl) = 1;
1914 
1915   return new_decl;
1916 }
1917 
1918 
1919 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1920    and PARM_DECLs that were referenced by inner nested functions.
1921    The rewrite will be a structure reference to the local frame variable.  */
1922 
1923 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1924 
1925 static tree
convert_local_reference_op(tree * tp,int * walk_subtrees,void * data)1926 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1927 {
1928   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1929   struct nesting_info *const info = (struct nesting_info *) wi->info;
1930   tree t = *tp, field, x;
1931   bool save_val_only;
1932 
1933   *walk_subtrees = 0;
1934   switch (TREE_CODE (t))
1935     {
1936     case VAR_DECL:
1937       /* Non-automatic variables are never processed.  */
1938       if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1939 	break;
1940       /* FALLTHRU */
1941 
1942     case PARM_DECL:
1943       if (t != info->frame_decl && decl_function_context (t) == info->context)
1944 	{
1945 	  /* If we copied a pointer to the frame, then the original decl
1946 	     is used unchanged in the parent function.  */
1947 	  if (use_pointer_in_frame (t))
1948 	    break;
1949 
1950 	  /* No need to transform anything if no child references the
1951 	     variable.  */
1952 	  field = lookup_field_for_decl (info, t, NO_INSERT);
1953 	  if (!field)
1954 	    break;
1955 	  wi->changed = true;
1956 
1957 	  if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1958 	    x = get_local_debug_decl (info, t, field);
1959 	  else
1960 	    x = get_frame_field (info, info->context, field, &wi->gsi);
1961 
1962 	  if (wi->val_only)
1963 	    {
1964 	      if (wi->is_lhs)
1965 		x = save_tmp_var (info, x, &wi->gsi);
1966 	      else
1967 		x = init_tmp_var (info, x, &wi->gsi);
1968 	    }
1969 
1970 	  *tp = x;
1971 	}
1972       break;
1973 
1974     case ADDR_EXPR:
1975       save_val_only = wi->val_only;
1976       wi->val_only = false;
1977       wi->is_lhs = false;
1978       wi->changed = false;
1979       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1980       wi->val_only = save_val_only;
1981 
1982       /* If we converted anything ... */
1983       if (wi->changed)
1984 	{
1985 	  tree save_context;
1986 
1987 	  /* Then the frame decl is now addressable.  */
1988 	  TREE_ADDRESSABLE (info->frame_decl) = 1;
1989 
1990 	  save_context = current_function_decl;
1991 	  current_function_decl = info->context;
1992 	  recompute_tree_invariant_for_addr_expr (t);
1993 
1994 	  /* If we are in a context where we only accept values, then
1995 	     compute the address into a temporary.  */
1996 	  if (save_val_only)
1997 	    *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1998 				    t, &wi->gsi);
1999 	  current_function_decl = save_context;
2000 	}
2001       break;
2002 
2003     case REALPART_EXPR:
2004     case IMAGPART_EXPR:
2005     case COMPONENT_REF:
2006     case ARRAY_REF:
2007     case ARRAY_RANGE_REF:
2008     case BIT_FIELD_REF:
2009       /* Go down this entire nest and just look at the final prefix and
2010 	 anything that describes the references.  Otherwise, we lose track
2011 	 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value.  */
2012       save_val_only = wi->val_only;
2013       wi->val_only = true;
2014       wi->is_lhs = false;
2015       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
2016 	{
2017 	  if (TREE_CODE (t) == COMPONENT_REF)
2018 	    walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2019 		       NULL);
2020 	  else if (TREE_CODE (t) == ARRAY_REF
2021 		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2022 	    {
2023 	      walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
2024 			 NULL);
2025 	      walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
2026 			 NULL);
2027 	      walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
2028 			 NULL);
2029 	    }
2030 	}
2031       wi->val_only = false;
2032       walk_tree (tp, convert_local_reference_op, wi, NULL);
2033       wi->val_only = save_val_only;
2034       break;
2035 
2036     case MEM_REF:
2037       save_val_only = wi->val_only;
2038       wi->val_only = true;
2039       wi->is_lhs = false;
2040       walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
2041 		 wi, NULL);
2042       /* We need to re-fold the MEM_REF as component references as
2043 	 part of a ADDR_EXPR address are not allowed.  But we cannot
2044 	 fold here, as the chain record type is not yet finalized.  */
2045       if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
2046 	  && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2047 	info->mem_refs->add (tp);
2048       wi->val_only = save_val_only;
2049       break;
2050 
2051     case VIEW_CONVERT_EXPR:
2052       /* Just request to look at the subtrees, leaving val_only and lhs
2053 	 untouched.  This might actually be for !val_only + lhs, in which
2054 	 case we don't want to force a replacement by a temporary.  */
2055       *walk_subtrees = 1;
2056       break;
2057 
2058     default:
2059       if (!IS_TYPE_OR_DECL_P (t))
2060 	{
2061 	  *walk_subtrees = 1;
2062 	  wi->val_only = true;
2063 	  wi->is_lhs = false;
2064 	}
2065       break;
2066     }
2067 
2068   return NULL_TREE;
2069 }
2070 
2071 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
2072 					  struct walk_stmt_info *);
2073 
2074 /* Helper for convert_local_reference.  Convert all the references in
2075    the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
2076 
2077 static bool
convert_local_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)2078 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
2079 {
2080   struct nesting_info *const info = (struct nesting_info *) wi->info;
2081   bool need_frame = false, need_stmts = false;
2082   tree clause, decl, *pdecl;
2083   int dummy;
2084   bitmap new_suppress;
2085 
2086   new_suppress = BITMAP_GGC_ALLOC ();
2087   bitmap_copy (new_suppress, info->suppress_expansion);
2088 
2089   for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2090     {
2091       pdecl = NULL;
2092       switch (OMP_CLAUSE_CODE (clause))
2093 	{
2094 	case OMP_CLAUSE_REDUCTION:
2095 	case OMP_CLAUSE_IN_REDUCTION:
2096 	case OMP_CLAUSE_TASK_REDUCTION:
2097 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2098 	    need_stmts = true;
2099 	  if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF)
2100 	    {
2101 	      pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0);
2102 	      if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR)
2103 		pdecl = &TREE_OPERAND (*pdecl, 0);
2104 	      if (TREE_CODE (*pdecl) == INDIRECT_REF
2105 		  || TREE_CODE (*pdecl) == ADDR_EXPR)
2106 		pdecl = &TREE_OPERAND (*pdecl, 0);
2107 	    }
2108 	  goto do_decl_clause;
2109 
2110 	case OMP_CLAUSE_LASTPRIVATE:
2111 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
2112 	    need_stmts = true;
2113 	  goto do_decl_clause;
2114 
2115 	case OMP_CLAUSE_LINEAR:
2116 	  if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
2117 	    need_stmts = true;
2118 	  wi->val_only = true;
2119 	  wi->is_lhs = false;
2120 	  convert_local_reference_op (&OMP_CLAUSE_LINEAR_STEP (clause), &dummy,
2121 				      wi);
2122 	  goto do_decl_clause;
2123 
2124 	case OMP_CLAUSE_PRIVATE:
2125 	case OMP_CLAUSE_FIRSTPRIVATE:
2126 	case OMP_CLAUSE_COPYPRIVATE:
2127 	case OMP_CLAUSE_SHARED:
2128 	case OMP_CLAUSE_TO_DECLARE:
2129 	case OMP_CLAUSE_LINK:
2130 	case OMP_CLAUSE_USE_DEVICE_PTR:
2131 	case OMP_CLAUSE_USE_DEVICE_ADDR:
2132 	case OMP_CLAUSE_HAS_DEVICE_ADDR:
2133 	case OMP_CLAUSE_IS_DEVICE_PTR:
2134 	case OMP_CLAUSE_DETACH:
2135 	do_decl_clause:
2136 	  if (pdecl == NULL)
2137 	    pdecl = &OMP_CLAUSE_DECL (clause);
2138 	  decl = *pdecl;
2139 	  if (VAR_P (decl)
2140 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2141 	    break;
2142 	  if (decl_function_context (decl) == info->context
2143 	      && !use_pointer_in_frame (decl))
2144 	    {
2145 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2146 	      if (field)
2147 		{
2148 		  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED)
2149 		    OMP_CLAUSE_SHARED_READONLY (clause) = 0;
2150 		  bitmap_set_bit (new_suppress, DECL_UID (decl));
2151 		  *pdecl = get_local_debug_decl (info, decl, field);
2152 		  need_frame = true;
2153 		}
2154 	    }
2155 	  break;
2156 
2157 	case OMP_CLAUSE_SCHEDULE:
2158 	  if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
2159 	    break;
2160 	  /* FALLTHRU */
2161 	case OMP_CLAUSE_FINAL:
2162 	case OMP_CLAUSE_IF:
2163 	case OMP_CLAUSE_NUM_THREADS:
2164 	case OMP_CLAUSE_DEPEND:
2165 	case OMP_CLAUSE_DEVICE:
2166 	case OMP_CLAUSE_NUM_TEAMS:
2167 	case OMP_CLAUSE_THREAD_LIMIT:
2168 	case OMP_CLAUSE_SAFELEN:
2169 	case OMP_CLAUSE_SIMDLEN:
2170 	case OMP_CLAUSE_PRIORITY:
2171 	case OMP_CLAUSE_GRAINSIZE:
2172 	case OMP_CLAUSE_NUM_TASKS:
2173 	case OMP_CLAUSE_HINT:
2174 	case OMP_CLAUSE_FILTER:
2175 	case OMP_CLAUSE_NUM_GANGS:
2176 	case OMP_CLAUSE_NUM_WORKERS:
2177 	case OMP_CLAUSE_VECTOR_LENGTH:
2178 	case OMP_CLAUSE_GANG:
2179 	case OMP_CLAUSE_WORKER:
2180 	case OMP_CLAUSE_VECTOR:
2181 	case OMP_CLAUSE_ASYNC:
2182 	case OMP_CLAUSE_WAIT:
2183 	  /* Several OpenACC clauses have optional arguments.  Check if they
2184 	     are present.  */
2185 	  if (OMP_CLAUSE_OPERAND (clause, 0))
2186 	    {
2187 	      wi->val_only = true;
2188 	      wi->is_lhs = false;
2189 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2190 					  &dummy, wi);
2191 	    }
2192 
2193 	  /* The gang clause accepts two arguments.  */
2194 	  if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG
2195 	      && OMP_CLAUSE_GANG_STATIC_EXPR (clause))
2196 	    {
2197 		wi->val_only = true;
2198 		wi->is_lhs = false;
2199 		convert_nonlocal_reference_op
2200 		  (&OMP_CLAUSE_GANG_STATIC_EXPR (clause), &dummy, wi);
2201 	    }
2202 	  break;
2203 
2204 	case OMP_CLAUSE_DIST_SCHEDULE:
2205 	  if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL)
2206 	    {
2207 	      wi->val_only = true;
2208 	      wi->is_lhs = false;
2209 	      convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
2210 					  &dummy, wi);
2211 	    }
2212 	  break;
2213 
2214 	case OMP_CLAUSE_MAP:
2215 	case OMP_CLAUSE_TO:
2216 	case OMP_CLAUSE_FROM:
2217 	  if (OMP_CLAUSE_SIZE (clause))
2218 	    {
2219 	      wi->val_only = true;
2220 	      wi->is_lhs = false;
2221 	      convert_local_reference_op (&OMP_CLAUSE_SIZE (clause),
2222 					  &dummy, wi);
2223 	    }
2224 	  if (DECL_P (OMP_CLAUSE_DECL (clause)))
2225 	    goto do_decl_clause;
2226 	  wi->val_only = true;
2227 	  wi->is_lhs = false;
2228 	  walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op,
2229 		     wi, NULL);
2230 	  break;
2231 
2232 	case OMP_CLAUSE_ALIGNED:
2233 	  if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2234 	    {
2235 	      wi->val_only = true;
2236 	      wi->is_lhs = false;
2237 	      convert_local_reference_op
2238 		(&OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), &dummy, wi);
2239 	    }
2240 	  /* FALLTHRU */
2241 	case OMP_CLAUSE_NONTEMPORAL:
2242 	do_decl_clause_no_supp:
2243 	  /* Like do_decl_clause, but don't add any suppression.  */
2244 	  decl = OMP_CLAUSE_DECL (clause);
2245 	  if (VAR_P (decl)
2246 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2247 	    break;
2248 	  if (decl_function_context (decl) == info->context
2249 	      && !use_pointer_in_frame (decl))
2250 	    {
2251 	      tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2252 	      if (field)
2253 		{
2254 		  OMP_CLAUSE_DECL (clause)
2255 		    = get_local_debug_decl (info, decl, field);
2256 		  need_frame = true;
2257 		}
2258 	    }
2259 	  break;
2260 
2261 	case OMP_CLAUSE_ALLOCATE:
2262 	  if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause))
2263 	    {
2264 	      wi->val_only = true;
2265 	      wi->is_lhs = false;
2266 	      convert_local_reference_op
2267 		(&OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), &dummy, wi);
2268 	    }
2269 	  goto do_decl_clause_no_supp;
2270 
2271 	case OMP_CLAUSE_NOWAIT:
2272 	case OMP_CLAUSE_ORDERED:
2273 	case OMP_CLAUSE_DEFAULT:
2274 	case OMP_CLAUSE_COPYIN:
2275 	case OMP_CLAUSE_COLLAPSE:
2276 	case OMP_CLAUSE_TILE:
2277 	case OMP_CLAUSE_UNTIED:
2278 	case OMP_CLAUSE_MERGEABLE:
2279 	case OMP_CLAUSE_PROC_BIND:
2280 	case OMP_CLAUSE_NOGROUP:
2281 	case OMP_CLAUSE_THREADS:
2282 	case OMP_CLAUSE_SIMD:
2283 	case OMP_CLAUSE_DEFAULTMAP:
2284 	case OMP_CLAUSE_ORDER:
2285 	case OMP_CLAUSE_SEQ:
2286 	case OMP_CLAUSE_INDEPENDENT:
2287 	case OMP_CLAUSE_AUTO:
2288 	case OMP_CLAUSE_IF_PRESENT:
2289 	case OMP_CLAUSE_FINALIZE:
2290 	case OMP_CLAUSE_BIND:
2291 	case OMP_CLAUSE__CONDTEMP_:
2292 	case OMP_CLAUSE__SCANTEMP_:
2293 	  break;
2294 
2295 	  /* The following clause belongs to the OpenACC cache directive, which
2296 	     is discarded during gimplification.  */
2297 	case OMP_CLAUSE__CACHE_:
2298 	  /* The following clauses are only allowed in the OpenMP declare simd
2299 	     directive, so not seen here.  */
2300 	case OMP_CLAUSE_UNIFORM:
2301 	case OMP_CLAUSE_INBRANCH:
2302 	case OMP_CLAUSE_NOTINBRANCH:
2303 	  /* The following clauses are only allowed on OpenMP cancel and
2304 	     cancellation point directives, which at this point have already
2305 	     been lowered into a function call.  */
2306 	case OMP_CLAUSE_FOR:
2307 	case OMP_CLAUSE_PARALLEL:
2308 	case OMP_CLAUSE_SECTIONS:
2309 	case OMP_CLAUSE_TASKGROUP:
2310 	  /* The following clauses are only added during OMP lowering; nested
2311 	     function decomposition happens before that.  */
2312 	case OMP_CLAUSE__LOOPTEMP_:
2313 	case OMP_CLAUSE__REDUCTEMP_:
2314 	case OMP_CLAUSE__SIMDUID_:
2315 	case OMP_CLAUSE__SIMT_:
2316 	  /* The following clauses are only allowed on OpenACC 'routine'
2317 	     directives, not seen here.  */
2318 	case OMP_CLAUSE_NOHOST:
2319 	  /* Anything else.  */
2320 	default:
2321 	  gcc_unreachable ();
2322 	}
2323     }
2324 
2325   info->suppress_expansion = new_suppress;
2326 
2327   if (need_stmts)
2328     for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
2329       switch (OMP_CLAUSE_CODE (clause))
2330 	{
2331 	case OMP_CLAUSE_REDUCTION:
2332 	case OMP_CLAUSE_IN_REDUCTION:
2333 	case OMP_CLAUSE_TASK_REDUCTION:
2334 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2335 	    {
2336 	      tree old_context
2337 		= DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
2338 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2339 		= info->context;
2340 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2341 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2342 		  = info->context;
2343 	      walk_body (convert_local_reference_stmt,
2344 			 convert_local_reference_op, info,
2345 			 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
2346 	      walk_body (convert_local_reference_stmt,
2347 			 convert_local_reference_op, info,
2348 			 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
2349 	      DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
2350 		= old_context;
2351 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2352 		DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause))
2353 		  = old_context;
2354 	    }
2355 	  break;
2356 
2357 	case OMP_CLAUSE_LASTPRIVATE:
2358 	  walk_body (convert_local_reference_stmt,
2359 		     convert_local_reference_op, info,
2360 		     &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
2361 	  break;
2362 
2363 	case OMP_CLAUSE_LINEAR:
2364 	  walk_body (convert_local_reference_stmt,
2365 		     convert_local_reference_op, info,
2366 		     &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
2367 	  break;
2368 
2369 	default:
2370 	  break;
2371 	}
2372 
2373   return need_frame;
2374 }
2375 
2376 
2377 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
2378    and PARM_DECLs that were referenced by inner nested functions.
2379    The rewrite will be a structure reference to the local frame variable.  */
2380 
2381 static tree
convert_local_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2382 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2383 			      struct walk_stmt_info *wi)
2384 {
2385   struct nesting_info *info = (struct nesting_info *) wi->info;
2386   tree save_local_var_chain;
2387   bitmap save_suppress;
2388   char save_static_chain_added;
2389   bool frame_decl_added;
2390   gimple *stmt = gsi_stmt (*gsi);
2391 
2392   switch (gimple_code (stmt))
2393     {
2394     case GIMPLE_OMP_TEAMS:
2395       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2396 	{
2397 	  save_suppress = info->suppress_expansion;
2398 	  convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
2399 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2400 		     info, gimple_omp_body_ptr (stmt));
2401 	  info->suppress_expansion = save_suppress;
2402 	  break;
2403 	}
2404       /* FALLTHRU */
2405 
2406     case GIMPLE_OMP_PARALLEL:
2407     case GIMPLE_OMP_TASK:
2408       save_suppress = info->suppress_expansion;
2409       frame_decl_added = false;
2410       if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
2411 	                             wi))
2412 	{
2413 	  tree c = build_omp_clause (gimple_location (stmt),
2414 				     OMP_CLAUSE_SHARED);
2415 	  (void) get_frame_type (info);
2416 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2417 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2418 	  gimple_omp_taskreg_set_clauses (stmt, c);
2419 	  info->static_chain_added |= 4;
2420 	  frame_decl_added = true;
2421 	}
2422 
2423       save_local_var_chain = info->new_local_var_chain;
2424       save_static_chain_added = info->static_chain_added;
2425       info->new_local_var_chain = NULL;
2426       info->static_chain_added = 0;
2427 
2428       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2429 	         gimple_omp_body_ptr (stmt));
2430 
2431       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2432 	{
2433 	  tree c = build_omp_clause (gimple_location (stmt),
2434 				     OMP_CLAUSE_SHARED);
2435 	  (void) get_frame_type (info);
2436 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2437 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2438 	  info->static_chain_added |= 4;
2439 	  gimple_omp_taskreg_set_clauses (stmt, c);
2440 	}
2441       if (info->new_local_var_chain)
2442 	declare_vars (info->new_local_var_chain,
2443 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2444       info->new_local_var_chain = save_local_var_chain;
2445       info->suppress_expansion = save_suppress;
2446       info->static_chain_added |= save_static_chain_added;
2447       break;
2448 
2449     case GIMPLE_OMP_FOR:
2450       save_suppress = info->suppress_expansion;
2451       convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
2452       walk_gimple_omp_for (as_a <gomp_for *> (stmt),
2453 			   convert_local_reference_stmt,
2454 			   convert_local_reference_op, info);
2455       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2456 		 info, gimple_omp_body_ptr (stmt));
2457       info->suppress_expansion = save_suppress;
2458       break;
2459 
2460     case GIMPLE_OMP_SECTIONS:
2461       save_suppress = info->suppress_expansion;
2462       convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
2463       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2464 		 info, gimple_omp_body_ptr (stmt));
2465       info->suppress_expansion = save_suppress;
2466       break;
2467 
2468     case GIMPLE_OMP_SINGLE:
2469       save_suppress = info->suppress_expansion;
2470       convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
2471       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2472 		 info, gimple_omp_body_ptr (stmt));
2473       info->suppress_expansion = save_suppress;
2474       break;
2475 
2476     case GIMPLE_OMP_SCOPE:
2477       save_suppress = info->suppress_expansion;
2478       convert_local_omp_clauses (gimple_omp_scope_clauses_ptr (stmt), wi);
2479       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2480 		 info, gimple_omp_body_ptr (stmt));
2481       info->suppress_expansion = save_suppress;
2482       break;
2483 
2484     case GIMPLE_OMP_TASKGROUP:
2485       save_suppress = info->suppress_expansion;
2486       convert_local_omp_clauses (gimple_omp_taskgroup_clauses_ptr (stmt), wi);
2487       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2488 		 info, gimple_omp_body_ptr (stmt));
2489       info->suppress_expansion = save_suppress;
2490       break;
2491 
2492     case GIMPLE_OMP_TARGET:
2493       if (!is_gimple_omp_offloaded (stmt))
2494 	{
2495 	  save_suppress = info->suppress_expansion;
2496 	  convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
2497 	  info->suppress_expansion = save_suppress;
2498 	  walk_body (convert_local_reference_stmt, convert_local_reference_op,
2499 		     info, gimple_omp_body_ptr (stmt));
2500 	  break;
2501 	}
2502       save_suppress = info->suppress_expansion;
2503       frame_decl_added = false;
2504       if (convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi))
2505 	{
2506 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2507 	  (void) get_frame_type (info);
2508 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2509 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2510 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2511 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2512 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2513 	  info->static_chain_added |= 4;
2514 	  frame_decl_added = true;
2515 	}
2516 
2517       save_local_var_chain = info->new_local_var_chain;
2518       save_static_chain_added = info->static_chain_added;
2519       info->new_local_var_chain = NULL;
2520       info->static_chain_added = 0;
2521 
2522       walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
2523 		 gimple_omp_body_ptr (stmt));
2524 
2525       if ((info->static_chain_added & 4) != 0 && !frame_decl_added)
2526 	{
2527 	  tree c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
2528 	  (void) get_frame_type (info);
2529 	  OMP_CLAUSE_DECL (c) = info->frame_decl;
2530 	  OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2531 	  OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
2532 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2533 	  gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
2534 	  info->static_chain_added |= 4;
2535 	}
2536 
2537       if (info->new_local_var_chain)
2538 	declare_vars (info->new_local_var_chain,
2539 		      gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
2540       info->new_local_var_chain = save_local_var_chain;
2541       info->suppress_expansion = save_suppress;
2542       info->static_chain_added |= save_static_chain_added;
2543       break;
2544 
2545     case GIMPLE_OMP_SECTION:
2546     case GIMPLE_OMP_MASTER:
2547     case GIMPLE_OMP_MASKED:
2548     case GIMPLE_OMP_ORDERED:
2549     case GIMPLE_OMP_SCAN:
2550       walk_body (convert_local_reference_stmt, convert_local_reference_op,
2551 		 info, gimple_omp_body_ptr (stmt));
2552       break;
2553 
2554     case GIMPLE_COND:
2555       wi->val_only = true;
2556       wi->is_lhs = false;
2557       *handled_ops_p = false;
2558       return NULL_TREE;
2559 
2560     case GIMPLE_ASSIGN:
2561       if (gimple_clobber_p (stmt))
2562 	{
2563 	  tree lhs = gimple_assign_lhs (stmt);
2564 	  if (DECL_P (lhs)
2565 	      && decl_function_context (lhs) == info->context
2566 	      && !use_pointer_in_frame (lhs)
2567 	      && lookup_field_for_decl (info, lhs, NO_INSERT))
2568 	    {
2569 	      gsi_replace (gsi, gimple_build_nop (), true);
2570 	      break;
2571 	    }
2572 	}
2573       *handled_ops_p = false;
2574       return NULL_TREE;
2575 
2576     case GIMPLE_BIND:
2577       for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
2578 	   var;
2579 	   var = DECL_CHAIN (var))
2580 	if (TREE_CODE (var) == NAMELIST_DECL)
2581 	  {
2582 	    /* Adjust decls mentioned in NAMELIST_DECL.  */
2583 	    tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
2584 	    tree decl;
2585 	    unsigned int i;
2586 
2587 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
2588 	      {
2589 		if (VAR_P (decl)
2590 		    && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2591 		  continue;
2592 		if (decl_function_context (decl) == info->context
2593 		    && !use_pointer_in_frame (decl))
2594 		  {
2595 		    tree field = lookup_field_for_decl (info, decl, NO_INSERT);
2596 		    if (field)
2597 		      {
2598 			CONSTRUCTOR_ELT (decls, i)->value
2599 			  = get_local_debug_decl (info, decl, field);
2600 		      }
2601 		  }
2602 	      }
2603 	  }
2604 
2605       *handled_ops_p = false;
2606       return NULL_TREE;
2607 
2608     default:
2609       /* For every other statement that we are not interested in
2610 	 handling here, let the walker traverse the operands.  */
2611       *handled_ops_p = false;
2612       return NULL_TREE;
2613     }
2614 
2615   /* Indicate that we have handled all the operands ourselves.  */
2616   *handled_ops_p = true;
2617   return NULL_TREE;
2618 }
2619 
2620 
2621 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
2622    that reference labels from outer functions.  The rewrite will be a
2623    call to __builtin_nonlocal_goto.  */
2624 
2625 static tree
convert_nl_goto_reference(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2626 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2627 			   struct walk_stmt_info *wi)
2628 {
2629   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2630   tree label, new_label, target_context, x, field;
2631   gcall *call;
2632   gimple *stmt = gsi_stmt (*gsi);
2633 
2634   if (gimple_code (stmt) != GIMPLE_GOTO)
2635     {
2636       *handled_ops_p = false;
2637       return NULL_TREE;
2638     }
2639 
2640   label = gimple_goto_dest (stmt);
2641   if (TREE_CODE (label) != LABEL_DECL)
2642     {
2643       *handled_ops_p = false;
2644       return NULL_TREE;
2645     }
2646 
2647   target_context = decl_function_context (label);
2648   if (target_context == info->context)
2649     {
2650       *handled_ops_p = false;
2651       return NULL_TREE;
2652     }
2653 
2654   for (i = info->outer; target_context != i->context; i = i->outer)
2655     continue;
2656 
2657   /* The original user label may also be use for a normal goto, therefore
2658      we must create a new label that will actually receive the abnormal
2659      control transfer.  This new label will be marked LABEL_NONLOCAL; this
2660      mark will trigger proper behavior in the cfg, as well as cause the
2661      (hairy target-specific) non-local goto receiver code to be generated
2662      when we expand rtl.  Enter this association into var_map so that we
2663      can insert the new label into the IL during a second pass.  */
2664   tree *slot = &i->var_map->get_or_insert (label);
2665   if (*slot == NULL)
2666     {
2667       new_label = create_artificial_label (UNKNOWN_LOCATION);
2668       DECL_NONLOCAL (new_label) = 1;
2669       *slot = new_label;
2670     }
2671   else
2672     new_label = *slot;
2673 
2674   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
2675   field = get_nl_goto_field (i);
2676   x = get_frame_field (info, target_context, field, gsi);
2677   x = build_addr (x);
2678   x = gsi_gimplify_val (info, x, gsi);
2679   call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
2680 			    2, build_addr (new_label), x);
2681   gsi_replace (gsi, call, false);
2682 
2683   /* We have handled all of STMT's operands, no need to keep going.  */
2684   *handled_ops_p = true;
2685   return NULL_TREE;
2686 }
2687 
2688 
2689 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
2690    are referenced via nonlocal goto from a nested function.  The rewrite
2691    will involve installing a newly generated DECL_NONLOCAL label, and
2692    (potentially) a branch around the rtl gunk that is assumed to be
2693    attached to such a label.  */
2694 
2695 static tree
convert_nl_goto_receiver(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2696 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2697 			  struct walk_stmt_info *wi)
2698 {
2699   struct nesting_info *const info = (struct nesting_info *) wi->info;
2700   tree label, new_label;
2701   gimple_stmt_iterator tmp_gsi;
2702   glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
2703 
2704   if (!stmt)
2705     {
2706       *handled_ops_p = false;
2707       return NULL_TREE;
2708     }
2709 
2710   label = gimple_label_label (stmt);
2711 
2712   tree *slot = info->var_map->get (label);
2713   if (!slot)
2714     {
2715       *handled_ops_p = false;
2716       return NULL_TREE;
2717     }
2718 
2719   /* If there's any possibility that the previous statement falls through,
2720      then we must branch around the new non-local label.  */
2721   tmp_gsi = wi->gsi;
2722   gsi_prev (&tmp_gsi);
2723   if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
2724     {
2725       gimple *stmt = gimple_build_goto (label);
2726       gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2727     }
2728 
2729   new_label = (tree) *slot;
2730   stmt = gimple_build_label (new_label);
2731   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2732 
2733   *handled_ops_p = true;
2734   return NULL_TREE;
2735 }
2736 
2737 
2738 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2739    of nested functions that require the use of trampolines.  The rewrite
2740    will involve a reference a trampoline generated for the occasion.  */
2741 
2742 static tree
convert_tramp_reference_op(tree * tp,int * walk_subtrees,void * data)2743 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2744 {
2745   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2746   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2747   tree t = *tp, decl, target_context, x, builtin;
2748   bool descr;
2749   gcall *call;
2750 
2751   *walk_subtrees = 0;
2752   switch (TREE_CODE (t))
2753     {
2754     case ADDR_EXPR:
2755       /* Build
2756 	   T.1 = &CHAIN->tramp;
2757 	   T.2 = __builtin_adjust_trampoline (T.1);
2758 	   T.3 = (func_type)T.2;
2759       */
2760 
2761       decl = TREE_OPERAND (t, 0);
2762       if (TREE_CODE (decl) != FUNCTION_DECL)
2763 	break;
2764 
2765       /* Only need to process nested functions.  */
2766       target_context = decl_function_context (decl);
2767       if (!target_context)
2768 	break;
2769 
2770       /* If the nested function doesn't use a static chain, then
2771 	 it doesn't need a trampoline.  */
2772       if (!DECL_STATIC_CHAIN (decl))
2773 	break;
2774 
2775       /* If we don't want a trampoline, then don't build one.  */
2776       if (TREE_NO_TRAMPOLINE (t))
2777 	break;
2778 
2779       /* Lookup the immediate parent of the callee, as that's where
2780 	 we need to insert the trampoline.  */
2781       for (i = info; i->context != target_context; i = i->outer)
2782 	continue;
2783 
2784       /* Decide whether to generate a descriptor or a trampoline. */
2785       descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines;
2786 
2787       if (descr)
2788 	x = lookup_descr_for_decl (i, decl, INSERT);
2789       else
2790 	x = lookup_tramp_for_decl (i, decl, INSERT);
2791 
2792       /* Compute the address of the field holding the trampoline.  */
2793       x = get_frame_field (info, target_context, x, &wi->gsi);
2794       x = build_addr (x);
2795       x = gsi_gimplify_val (info, x, &wi->gsi);
2796 
2797       /* Do machine-specific ugliness.  Normally this will involve
2798 	 computing extra alignment, but it can really be anything.  */
2799       if (descr)
2800 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_DESCRIPTOR);
2801       else
2802 	builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2803       call = gimple_build_call (builtin, 1, x);
2804       x = init_tmp_var_with_call (info, &wi->gsi, call);
2805 
2806       /* Cast back to the proper function type.  */
2807       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2808       x = init_tmp_var (info, x, &wi->gsi);
2809 
2810       *tp = x;
2811       break;
2812 
2813     default:
2814       if (!IS_TYPE_OR_DECL_P (t))
2815 	*walk_subtrees = 1;
2816       break;
2817     }
2818 
2819   return NULL_TREE;
2820 }
2821 
2822 
2823 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2824    to addresses of nested functions that require the use of
2825    trampolines.  The rewrite will involve a reference a trampoline
2826    generated for the occasion.  */
2827 
2828 static tree
convert_tramp_reference_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2829 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2830 			      struct walk_stmt_info *wi)
2831 {
2832   struct nesting_info *info = (struct nesting_info *) wi->info;
2833   gimple *stmt = gsi_stmt (*gsi);
2834 
2835   switch (gimple_code (stmt))
2836     {
2837     case GIMPLE_CALL:
2838       {
2839 	/* Only walk call arguments, lest we generate trampolines for
2840 	   direct calls.  */
2841 	unsigned long i, nargs = gimple_call_num_args (stmt);
2842 	for (i = 0; i < nargs; i++)
2843 	  walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2844 		     wi, NULL);
2845 	break;
2846       }
2847 
2848     case GIMPLE_OMP_TEAMS:
2849       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2850 	{
2851 	  *handled_ops_p = false;
2852 	  return NULL_TREE;
2853 	}
2854       goto do_parallel;
2855 
2856     case GIMPLE_OMP_TARGET:
2857       if (!is_gimple_omp_offloaded (stmt))
2858 	{
2859 	  *handled_ops_p = false;
2860 	  return NULL_TREE;
2861 	}
2862       /* FALLTHRU */
2863     case GIMPLE_OMP_PARALLEL:
2864     case GIMPLE_OMP_TASK:
2865     do_parallel:
2866       {
2867 	tree save_local_var_chain = info->new_local_var_chain;
2868         walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2869 	info->new_local_var_chain = NULL;
2870 	char save_static_chain_added = info->static_chain_added;
2871 	info->static_chain_added = 0;
2872         walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2873 		   info, gimple_omp_body_ptr (stmt));
2874 	if (info->new_local_var_chain)
2875 	  declare_vars (info->new_local_var_chain,
2876 			gimple_seq_first_stmt (gimple_omp_body (stmt)),
2877 			false);
2878 	for (int i = 0; i < 2; i++)
2879 	  {
2880 	    tree c, decl;
2881 	    if ((info->static_chain_added & (1 << i)) == 0)
2882 	      continue;
2883 	    decl = i ? get_chain_decl (info) : info->frame_decl;
2884 	    /* Don't add CHAIN.* or FRAME.* twice.  */
2885 	    for (c = gimple_omp_taskreg_clauses (stmt);
2886 		 c;
2887 		 c = OMP_CLAUSE_CHAIN (c))
2888 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2889 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2890 		  && OMP_CLAUSE_DECL (c) == decl)
2891 		break;
2892 	    if (c == NULL && gimple_code (stmt) != GIMPLE_OMP_TARGET)
2893 	      {
2894 		c = build_omp_clause (gimple_location (stmt),
2895 				      i ? OMP_CLAUSE_FIRSTPRIVATE
2896 				      : OMP_CLAUSE_SHARED);
2897 		OMP_CLAUSE_DECL (c) = decl;
2898 		OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2899 		gimple_omp_taskreg_set_clauses (stmt, c);
2900 	      }
2901 	    else if (c == NULL)
2902 	      {
2903 		c = build_omp_clause (gimple_location (stmt),
2904 				      OMP_CLAUSE_MAP);
2905 		OMP_CLAUSE_DECL (c) = decl;
2906 		OMP_CLAUSE_SET_MAP_KIND (c,
2907 					 i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
2908 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
2909 		OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
2910 		gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
2911 					       c);
2912 	      }
2913 	  }
2914 	info->new_local_var_chain = save_local_var_chain;
2915 	info->static_chain_added |= save_static_chain_added;
2916       }
2917       break;
2918 
2919     default:
2920       *handled_ops_p = false;
2921       return NULL_TREE;
2922     }
2923 
2924   *handled_ops_p = true;
2925   return NULL_TREE;
2926 }
2927 
2928 
2929 
2930 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2931    that reference nested functions to make sure that the static chain
2932    is set up properly for the call.  */
2933 
2934 static tree
convert_gimple_call(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)2935 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2936                      struct walk_stmt_info *wi)
2937 {
2938   struct nesting_info *const info = (struct nesting_info *) wi->info;
2939   tree decl, target_context;
2940   char save_static_chain_added;
2941   int i;
2942   gimple *stmt = gsi_stmt (*gsi);
2943 
2944   switch (gimple_code (stmt))
2945     {
2946     case GIMPLE_CALL:
2947       if (gimple_call_chain (stmt))
2948 	break;
2949       decl = gimple_call_fndecl (stmt);
2950       if (!decl)
2951 	break;
2952       target_context = decl_function_context (decl);
2953       if (target_context && DECL_STATIC_CHAIN (decl))
2954 	{
2955 	  struct nesting_info *i = info;
2956 	  while (i && i->context != target_context)
2957 	    i = i->outer;
2958 	  /* If none of the outer contexts is the target context, this means
2959 	     that the function is called in a wrong context.  */
2960 	  if (!i)
2961 	    internal_error ("%s from %s called in %s",
2962 			    IDENTIFIER_POINTER (DECL_NAME (decl)),
2963 			    IDENTIFIER_POINTER (DECL_NAME (target_context)),
2964 			    IDENTIFIER_POINTER (DECL_NAME (info->context)));
2965 
2966 	  gimple_call_set_chain (as_a <gcall *> (stmt),
2967 				 get_static_chain (info, target_context,
2968 						   &wi->gsi));
2969 	  info->static_chain_added |= (1 << (info->context != target_context));
2970 	}
2971       break;
2972 
2973     case GIMPLE_OMP_TEAMS:
2974       if (!gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
2975 	{
2976 	  walk_body (convert_gimple_call, NULL, info,
2977 		     gimple_omp_body_ptr (stmt));
2978 	  break;
2979 	}
2980       /* FALLTHRU */
2981 
2982     case GIMPLE_OMP_PARALLEL:
2983     case GIMPLE_OMP_TASK:
2984       save_static_chain_added = info->static_chain_added;
2985       info->static_chain_added = 0;
2986       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2987       for (i = 0; i < 2; i++)
2988 	{
2989 	  tree c, decl;
2990 	  if ((info->static_chain_added & (1 << i)) == 0)
2991 	    continue;
2992 	  decl = i ? get_chain_decl (info) : info->frame_decl;
2993 	  /* Don't add CHAIN.* or FRAME.* twice.  */
2994 	  for (c = gimple_omp_taskreg_clauses (stmt);
2995 	       c;
2996 	       c = OMP_CLAUSE_CHAIN (c))
2997 	    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2998 		 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2999 		&& OMP_CLAUSE_DECL (c) == decl)
3000 	      break;
3001 	  if (c == NULL)
3002 	    {
3003 	      c = build_omp_clause (gimple_location (stmt),
3004 				    i ? OMP_CLAUSE_FIRSTPRIVATE
3005 				    : OMP_CLAUSE_SHARED);
3006 	      OMP_CLAUSE_DECL (c) = decl;
3007 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
3008 	      gimple_omp_taskreg_set_clauses (stmt, c);
3009 	    }
3010 	}
3011       info->static_chain_added |= save_static_chain_added;
3012       break;
3013 
3014     case GIMPLE_OMP_TARGET:
3015       if (!is_gimple_omp_offloaded (stmt))
3016 	{
3017 	  walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3018 	  break;
3019 	}
3020       save_static_chain_added = info->static_chain_added;
3021       info->static_chain_added = 0;
3022       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3023       for (i = 0; i < 2; i++)
3024 	{
3025 	  tree c, decl;
3026 	  if ((info->static_chain_added & (1 << i)) == 0)
3027 	    continue;
3028 	  decl = i ? get_chain_decl (info) : info->frame_decl;
3029 	  /* Don't add CHAIN.* or FRAME.* twice.  */
3030 	  for (c = gimple_omp_target_clauses (stmt);
3031 	       c;
3032 	       c = OMP_CLAUSE_CHAIN (c))
3033 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3034 		&& OMP_CLAUSE_DECL (c) == decl)
3035 	      break;
3036 	  if (c == NULL)
3037 	    {
3038 	      c = build_omp_clause (gimple_location (stmt), OMP_CLAUSE_MAP);
3039 	      OMP_CLAUSE_DECL (c) = decl;
3040 	      OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM);
3041 	      OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
3042 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
3043 	      gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
3044 					     c);
3045 	    }
3046 	}
3047       info->static_chain_added |= save_static_chain_added;
3048       break;
3049 
3050     case GIMPLE_OMP_FOR:
3051       walk_body (convert_gimple_call, NULL, info,
3052 	  	 gimple_omp_for_pre_body_ptr (stmt));
3053       /* FALLTHRU */
3054     case GIMPLE_OMP_SECTIONS:
3055     case GIMPLE_OMP_SECTION:
3056     case GIMPLE_OMP_SINGLE:
3057     case GIMPLE_OMP_SCOPE:
3058     case GIMPLE_OMP_MASTER:
3059     case GIMPLE_OMP_MASKED:
3060     case GIMPLE_OMP_TASKGROUP:
3061     case GIMPLE_OMP_ORDERED:
3062     case GIMPLE_OMP_SCAN:
3063     case GIMPLE_OMP_CRITICAL:
3064       walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
3065       break;
3066 
3067     default:
3068       /* Keep looking for other operands.  */
3069       *handled_ops_p = false;
3070       return NULL_TREE;
3071     }
3072 
3073   *handled_ops_p = true;
3074   return NULL_TREE;
3075 }
3076 
3077 /* Walk the nesting tree starting with ROOT.  Convert all trampolines and
3078    call expressions.  At the same time, determine if a nested function
3079    actually uses its static chain; if not, remember that.  */
3080 
3081 static void
convert_all_function_calls(struct nesting_info * root)3082 convert_all_function_calls (struct nesting_info *root)
3083 {
3084   unsigned int chain_count = 0, old_chain_count, iter_count;
3085   struct nesting_info *n;
3086 
3087   /* First, optimistically clear static_chain for all decls that haven't
3088      used the static chain already for variable access.  But always create
3089      it if not optimizing.  This makes it possible to reconstruct the static
3090      nesting tree at run time and thus to resolve up-level references from
3091      within the debugger.  */
3092   FOR_EACH_NEST_INFO (n, root)
3093     {
3094       if (n->thunk_p)
3095 	continue;
3096       tree decl = n->context;
3097       if (!optimize)
3098 	{
3099 	  if (n->inner)
3100 	    (void) get_frame_type (n);
3101 	  if (n->outer)
3102 	    (void) get_chain_decl (n);
3103 	}
3104       else if (!n->outer || (!n->chain_decl && !n->chain_field))
3105 	{
3106 	  DECL_STATIC_CHAIN (decl) = 0;
3107 	  if (dump_file && (dump_flags & TDF_DETAILS))
3108 	    fprintf (dump_file, "Guessing no static-chain for %s\n",
3109 		     lang_hooks.decl_printable_name (decl, 2));
3110 	}
3111       else
3112 	DECL_STATIC_CHAIN (decl) = 1;
3113       chain_count += DECL_STATIC_CHAIN (decl);
3114     }
3115 
3116   FOR_EACH_NEST_INFO (n, root)
3117     if (n->thunk_p)
3118       {
3119 	tree decl = n->context;
3120 	tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3121 	DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3122       }
3123 
3124   /* Walk the functions and perform transformations.  Note that these
3125      transformations can induce new uses of the static chain, which in turn
3126      require re-examining all users of the decl.  */
3127   /* ??? It would make sense to try to use the call graph to speed this up,
3128      but the call graph hasn't really been built yet.  Even if it did, we
3129      would still need to iterate in this loop since address-of references
3130      wouldn't show up in the callgraph anyway.  */
3131   iter_count = 0;
3132   do
3133     {
3134       old_chain_count = chain_count;
3135       chain_count = 0;
3136       iter_count++;
3137 
3138       if (dump_file && (dump_flags & TDF_DETAILS))
3139 	fputc ('\n', dump_file);
3140 
3141       FOR_EACH_NEST_INFO (n, root)
3142 	{
3143 	  if (n->thunk_p)
3144 	    continue;
3145 	  tree decl = n->context;
3146 	  walk_function (convert_tramp_reference_stmt,
3147 			 convert_tramp_reference_op, n);
3148 	  walk_function (convert_gimple_call, NULL, n);
3149 	  chain_count += DECL_STATIC_CHAIN (decl);
3150 	}
3151 
3152       FOR_EACH_NEST_INFO (n, root)
3153 	if (n->thunk_p)
3154 	  {
3155 	    tree decl = n->context;
3156 	    tree alias = thunk_info::get (cgraph_node::get (decl))->alias;
3157 	    DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias);
3158 	  }
3159     }
3160   while (chain_count != old_chain_count);
3161 
3162   if (dump_file && (dump_flags & TDF_DETAILS))
3163     fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
3164 	     iter_count);
3165 }
3166 
3167 struct nesting_copy_body_data
3168 {
3169   copy_body_data cb;
3170   struct nesting_info *root;
3171 };
3172 
3173 /* A helper subroutine for debug_var_chain type remapping.  */
3174 
3175 static tree
nesting_copy_decl(tree decl,copy_body_data * id)3176 nesting_copy_decl (tree decl, copy_body_data *id)
3177 {
3178   struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
3179   tree *slot = nid->root->var_map->get (decl);
3180 
3181   if (slot)
3182     return (tree) *slot;
3183 
3184   if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
3185     {
3186       tree new_decl = copy_decl_no_change (decl, id);
3187       DECL_ORIGINAL_TYPE (new_decl)
3188 	= remap_type (DECL_ORIGINAL_TYPE (decl), id);
3189       return new_decl;
3190     }
3191 
3192   if (VAR_P (decl)
3193       || TREE_CODE (decl) == PARM_DECL
3194       || TREE_CODE (decl) == RESULT_DECL)
3195     return decl;
3196 
3197   return copy_decl_no_change (decl, id);
3198 }
3199 
3200 /* A helper function for remap_vla_decls.  See if *TP contains
3201    some remapped variables.  */
3202 
3203 static tree
contains_remapped_vars(tree * tp,int * walk_subtrees,void * data)3204 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
3205 {
3206   struct nesting_info *root = (struct nesting_info *) data;
3207   tree t = *tp;
3208 
3209   if (DECL_P (t))
3210     {
3211       *walk_subtrees = 0;
3212       tree *slot = root->var_map->get (t);
3213 
3214       if (slot)
3215 	return *slot;
3216     }
3217   return NULL;
3218 }
3219 
3220 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
3221    involved.  */
3222 
3223 static void
remap_vla_decls(tree block,struct nesting_info * root)3224 remap_vla_decls (tree block, struct nesting_info *root)
3225 {
3226   tree var, subblock, val, type;
3227   struct nesting_copy_body_data id;
3228 
3229   for (subblock = BLOCK_SUBBLOCKS (block);
3230        subblock;
3231        subblock = BLOCK_CHAIN (subblock))
3232     remap_vla_decls (subblock, root);
3233 
3234   for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3235     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3236       {
3237 	val = DECL_VALUE_EXPR (var);
3238 	type = TREE_TYPE (var);
3239 
3240 	if (!(TREE_CODE (val) == INDIRECT_REF
3241 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3242 	      && variably_modified_type_p (type, NULL)))
3243 	  continue;
3244 
3245 	if (root->var_map->get (TREE_OPERAND (val, 0))
3246 	    || walk_tree (&type, contains_remapped_vars, root, NULL))
3247 	  break;
3248       }
3249 
3250   if (var == NULL_TREE)
3251     return;
3252 
3253   memset (&id, 0, sizeof (id));
3254   id.cb.copy_decl = nesting_copy_decl;
3255   id.cb.decl_map = new hash_map<tree, tree>;
3256   id.root = root;
3257 
3258   for (; var; var = DECL_CHAIN (var))
3259     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3260       {
3261 	struct nesting_info *i;
3262 	tree newt, context;
3263 
3264 	val = DECL_VALUE_EXPR (var);
3265 	type = TREE_TYPE (var);
3266 
3267 	if (!(TREE_CODE (val) == INDIRECT_REF
3268 	      && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
3269 	      && variably_modified_type_p (type, NULL)))
3270 	  continue;
3271 
3272 	tree *slot = root->var_map->get (TREE_OPERAND (val, 0));
3273 	if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
3274 	  continue;
3275 
3276 	context = decl_function_context (var);
3277 	for (i = root; i; i = i->outer)
3278 	  if (i->context == context)
3279 	    break;
3280 
3281 	if (i == NULL)
3282 	  continue;
3283 
3284 	/* Fully expand value expressions.  This avoids having debug variables
3285 	   only referenced from them and that can be swept during GC.  */
3286         if (slot)
3287 	  {
3288 	    tree t = (tree) *slot;
3289 	    gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
3290 	    val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
3291 	  }
3292 
3293 	id.cb.src_fn = i->context;
3294 	id.cb.dst_fn = i->context;
3295 	id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3296 
3297 	TREE_TYPE (var) = newt = remap_type (type, &id.cb);
3298 	while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3299 	  {
3300 	    newt = TREE_TYPE (newt);
3301 	    type = TREE_TYPE (type);
3302 	  }
3303 	if (TYPE_NAME (newt)
3304 	    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3305 	    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3306 	    && newt != type
3307 	    && TYPE_NAME (newt) == TYPE_NAME (type))
3308 	  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3309 
3310 	walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
3311 	if (val != DECL_VALUE_EXPR (var))
3312 	  SET_DECL_VALUE_EXPR (var, val);
3313       }
3314 
3315   delete id.cb.decl_map;
3316 }
3317 
3318 /* Fixup VLA decls in BLOCK and subblocks if remapped variables are
3319    involved.  */
3320 
3321 static void
fixup_vla_decls(tree block)3322 fixup_vla_decls (tree block)
3323 {
3324   for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
3325     if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var))
3326       {
3327 	tree val = DECL_VALUE_EXPR (var);
3328 
3329 	if (!(TREE_CODE (val) == INDIRECT_REF
3330 	      && VAR_P (TREE_OPERAND (val, 0))
3331 	      && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0))))
3332 	  continue;
3333 
3334 	/* Fully expand value expressions.  This avoids having debug variables
3335 	   only referenced from them and that can be swept during GC.  */
3336 	val = build1 (INDIRECT_REF, TREE_TYPE (val),
3337 		      DECL_VALUE_EXPR (TREE_OPERAND (val, 0)));
3338 	SET_DECL_VALUE_EXPR (var, val);
3339       }
3340 
3341   for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3342     fixup_vla_decls (sub);
3343 }
3344 
3345 /* Fold the MEM_REF *E.  */
3346 bool
fold_mem_refs(tree * const & e,void * data ATTRIBUTE_UNUSED)3347 fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED)
3348 {
3349   tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
3350   *ref_p = fold (*ref_p);
3351   return true;
3352 }
3353 
3354 /* Given DECL, a nested function, build an initialization call for FIELD,
3355    the trampoline or descriptor for DECL, using FUNC as the function.  */
3356 
3357 static gcall *
build_init_call_stmt(struct nesting_info * info,tree decl,tree field,tree func)3358 build_init_call_stmt (struct nesting_info *info, tree decl, tree field,
3359 		      tree func)
3360 {
3361   tree arg1, arg2, arg3, x;
3362 
3363   gcc_assert (DECL_STATIC_CHAIN (decl));
3364   arg3 = build_addr (info->frame_decl);
3365 
3366   arg2 = build_addr (decl);
3367 
3368   x = build3 (COMPONENT_REF, TREE_TYPE (field),
3369 	      info->frame_decl, field, NULL_TREE);
3370   arg1 = build_addr (x);
3371 
3372   return gimple_build_call (func, 3, arg1, arg2, arg3);
3373 }
3374 
3375 /* Do "everything else" to clean up or complete state collected by the various
3376    walking passes -- create a field to hold the frame base address, lay out the
3377    types and decls, generate code to initialize the frame decl, store critical
3378    expressions in the struct function for rtl to find.  */
3379 
3380 static void
finalize_nesting_tree_1(struct nesting_info * root)3381 finalize_nesting_tree_1 (struct nesting_info *root)
3382 {
3383   gimple_seq stmt_list = NULL;
3384   gimple *stmt;
3385   tree context = root->context;
3386   struct function *sf;
3387 
3388   if (root->thunk_p)
3389     return;
3390 
3391   /* If we created a non-local frame type or decl, we need to lay them
3392      out at this time.  */
3393   if (root->frame_type)
3394     {
3395       /* Debugging information needs to compute the frame base address of the
3396 	 parent frame out of the static chain from the nested frame.
3397 
3398 	 The static chain is the address of the FRAME record, so one could
3399 	 imagine it would be possible to compute the frame base address just
3400 	 adding a constant offset to this address.  Unfortunately, this is not
3401 	 possible: if the FRAME object has alignment constraints that are
3402 	 stronger than the stack, then the offset between the frame base and
3403 	 the FRAME object will be dynamic.
3404 
3405 	 What we do instead is to append a field to the FRAME object that holds
3406 	 the frame base address: then debug info just has to fetch this
3407 	 field.  */
3408 
3409       /* Debugging information will refer to the CFA as the frame base
3410 	 address: we will do the same here.  */
3411       const tree frame_addr_fndecl
3412         = builtin_decl_explicit (BUILT_IN_DWARF_CFA);
3413 
3414       /* Create a field in the FRAME record to hold the frame base address for
3415 	 this stack frame.  Since it will be used only by the debugger, put it
3416 	 at the end of the record in order not to shift all other offsets.  */
3417       tree fb_decl = make_node (FIELD_DECL);
3418 
3419       DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT");
3420       TREE_TYPE (fb_decl) = ptr_type_node;
3421       TREE_ADDRESSABLE (fb_decl) = 1;
3422       DECL_CONTEXT (fb_decl) = root->frame_type;
3423       TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type),
3424 						fb_decl);
3425 
3426       /* In some cases the frame type will trigger the -Wpadded warning.
3427 	 This is not helpful; suppress it. */
3428       int save_warn_padded = warn_padded;
3429       warn_padded = 0;
3430       layout_type (root->frame_type);
3431       warn_padded = save_warn_padded;
3432       layout_decl (root->frame_decl, 0);
3433 
3434       /* Initialize the frame base address field.  If the builtin we need is
3435 	 not available, set it to NULL so that debugging information does not
3436 	 reference junk.  */
3437       tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
3438 			    root->frame_decl, fb_decl, NULL_TREE);
3439       tree fb_tmp;
3440 
3441       if (frame_addr_fndecl != NULL_TREE)
3442 	{
3443 	  gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1,
3444 						integer_zero_node);
3445 	  gimple_stmt_iterator gsi = gsi_last (stmt_list);
3446 
3447 	  fb_tmp = init_tmp_var_with_call (root, &gsi, fb_gimple);
3448 	}
3449       else
3450 	fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0);
3451       gimple_seq_add_stmt (&stmt_list,
3452 			   gimple_build_assign (fb_ref, fb_tmp));
3453 
3454       declare_vars (root->frame_decl,
3455 		    gimple_seq_first_stmt (gimple_body (context)), true);
3456     }
3457 
3458   /* If any parameters were referenced non-locally, then we need to insert
3459      a copy or a pointer.  */
3460   if (root->any_parm_remapped)
3461     {
3462       tree p;
3463       for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
3464 	{
3465 	  tree field, x, y;
3466 
3467 	  field = lookup_field_for_decl (root, p, NO_INSERT);
3468 	  if (!field)
3469 	    continue;
3470 
3471 	  if (use_pointer_in_frame (p))
3472 	    x = build_addr (p);
3473 	  else
3474 	    x = p;
3475 
3476 	  /* If the assignment is from a non-register the stmt is
3477 	     not valid gimple.  Make it so by using a temporary instead.  */
3478 	  if (!is_gimple_reg (x)
3479 	      && is_gimple_reg_type (TREE_TYPE (x)))
3480 	    {
3481 	      gimple_stmt_iterator gsi = gsi_last (stmt_list);
3482 	      x = init_tmp_var (root, x, &gsi);
3483 	    }
3484 
3485 	  y = build3 (COMPONENT_REF, TREE_TYPE (field),
3486 		      root->frame_decl, field, NULL_TREE);
3487 	  stmt = gimple_build_assign (y, x);
3488 	  gimple_seq_add_stmt (&stmt_list, stmt);
3489 	}
3490     }
3491 
3492   /* If a chain_field was created, then it needs to be initialized
3493      from chain_decl.  */
3494   if (root->chain_field)
3495     {
3496       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
3497 		       root->frame_decl, root->chain_field, NULL_TREE);
3498       stmt = gimple_build_assign (x, get_chain_decl (root));
3499       gimple_seq_add_stmt (&stmt_list, stmt);
3500     }
3501 
3502   /* If trampolines were created, then we need to initialize them.  */
3503   if (root->any_tramp_created)
3504     {
3505       struct nesting_info *i;
3506       for (i = root->inner; i ; i = i->next)
3507 	{
3508 	  tree field, x;
3509 
3510 	  field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
3511 	  if (!field)
3512 	    continue;
3513 
3514 	  x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
3515 	  stmt = build_init_call_stmt (root, i->context, field, x);
3516 	  gimple_seq_add_stmt (&stmt_list, stmt);
3517 	}
3518     }
3519 
3520   /* If descriptors were created, then we need to initialize them.  */
3521   if (root->any_descr_created)
3522     {
3523       struct nesting_info *i;
3524       for (i = root->inner; i ; i = i->next)
3525 	{
3526 	  tree field, x;
3527 
3528 	  field = lookup_descr_for_decl (root, i->context, NO_INSERT);
3529 	  if (!field)
3530 	    continue;
3531 
3532 	  x = builtin_decl_implicit (BUILT_IN_INIT_DESCRIPTOR);
3533 	  stmt = build_init_call_stmt (root, i->context, field, x);
3534 	  gimple_seq_add_stmt (&stmt_list, stmt);
3535 	}
3536     }
3537 
3538   /* If we created initialization statements, insert them.  */
3539   if (stmt_list)
3540     {
3541       gbind *bind;
3542       annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
3543       bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
3544       gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
3545       gimple_bind_set_body (bind, stmt_list);
3546     }
3547 
3548   /* If a chain_decl was created, then it needs to be registered with
3549      struct function so that it gets initialized from the static chain
3550      register at the beginning of the function.  */
3551   sf = DECL_STRUCT_FUNCTION (root->context);
3552   sf->static_chain_decl = root->chain_decl;
3553 
3554   /* Similarly for the non-local goto save area.  */
3555   if (root->nl_goto_field)
3556     {
3557       sf->nonlocal_goto_save_area
3558 	= get_frame_field (root, context, root->nl_goto_field, NULL);
3559       sf->has_nonlocal_label = 1;
3560     }
3561 
3562   /* Make sure all new local variables get inserted into the
3563      proper BIND_EXPR.  */
3564   if (root->new_local_var_chain)
3565     declare_vars (root->new_local_var_chain,
3566 		  gimple_seq_first_stmt (gimple_body (root->context)),
3567 		  false);
3568 
3569   if (root->debug_var_chain)
3570     {
3571       tree debug_var;
3572       gbind *scope;
3573 
3574       remap_vla_decls (DECL_INITIAL (root->context), root);
3575 
3576       for (debug_var = root->debug_var_chain; debug_var;
3577 	   debug_var = DECL_CHAIN (debug_var))
3578 	if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3579 	  break;
3580 
3581       /* If there are any debug decls with variable length types,
3582 	 remap those types using other debug_var_chain variables.  */
3583       if (debug_var)
3584 	{
3585 	  struct nesting_copy_body_data id;
3586 
3587 	  memset (&id, 0, sizeof (id));
3588 	  id.cb.copy_decl = nesting_copy_decl;
3589 	  id.cb.decl_map = new hash_map<tree, tree>;
3590 	  id.root = root;
3591 
3592 	  for (; debug_var; debug_var = DECL_CHAIN (debug_var))
3593 	    if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
3594 	      {
3595 		tree type = TREE_TYPE (debug_var);
3596 		tree newt, t = type;
3597 		struct nesting_info *i;
3598 
3599 		for (i = root; i; i = i->outer)
3600 		  if (variably_modified_type_p (type, i->context))
3601 		    break;
3602 
3603 		if (i == NULL)
3604 		  continue;
3605 
3606 		id.cb.src_fn = i->context;
3607 		id.cb.dst_fn = i->context;
3608 		id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
3609 
3610 		TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
3611 		while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
3612 		  {
3613 		    newt = TREE_TYPE (newt);
3614 		    t = TREE_TYPE (t);
3615 		  }
3616 		if (TYPE_NAME (newt)
3617 		    && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
3618 		    && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
3619 		    && newt != t
3620 		    && TYPE_NAME (newt) == TYPE_NAME (t))
3621 		  TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
3622 	      }
3623 
3624 	  delete id.cb.decl_map;
3625 	}
3626 
3627       scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
3628       if (gimple_bind_block (scope))
3629 	declare_vars (root->debug_var_chain, scope, true);
3630       else
3631 	BLOCK_VARS (DECL_INITIAL (root->context))
3632 	  = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
3633 		     root->debug_var_chain);
3634     }
3635   else
3636     fixup_vla_decls (DECL_INITIAL (root->context));
3637 
3638   /* Fold the rewritten MEM_REF trees.  */
3639   root->mem_refs->traverse<void *, fold_mem_refs> (NULL);
3640 
3641   /* Dump the translated tree function.  */
3642   if (dump_file)
3643     {
3644       fputs ("\n\n", dump_file);
3645       dump_function_to_file (root->context, dump_file, dump_flags);
3646     }
3647 }
3648 
3649 static void
finalize_nesting_tree(struct nesting_info * root)3650 finalize_nesting_tree (struct nesting_info *root)
3651 {
3652   struct nesting_info *n;
3653   FOR_EACH_NEST_INFO (n, root)
3654     finalize_nesting_tree_1 (n);
3655 }
3656 
3657 /* Unnest the nodes and pass them to cgraph.  */
3658 
3659 static void
unnest_nesting_tree_1(struct nesting_info * root)3660 unnest_nesting_tree_1 (struct nesting_info *root)
3661 {
3662   struct cgraph_node *node = cgraph_node::get (root->context);
3663 
3664   /* For nested functions update the cgraph to reflect unnesting.
3665      We also delay finalizing of these functions up to this point.  */
3666   if (nested_function_info::get (node)->origin)
3667     {
3668        unnest_function (node);
3669        if (!root->thunk_p)
3670 	 cgraph_node::finalize_function (root->context, true);
3671     }
3672 }
3673 
3674 static void
unnest_nesting_tree(struct nesting_info * root)3675 unnest_nesting_tree (struct nesting_info *root)
3676 {
3677   struct nesting_info *n;
3678   FOR_EACH_NEST_INFO (n, root)
3679     unnest_nesting_tree_1 (n);
3680 }
3681 
3682 /* Free the data structures allocated during this pass.  */
3683 
3684 static void
free_nesting_tree(struct nesting_info * root)3685 free_nesting_tree (struct nesting_info *root)
3686 {
3687   struct nesting_info *node, *next;
3688 
3689   node = iter_nestinfo_start (root);
3690   do
3691     {
3692       next = iter_nestinfo_next (node);
3693       delete node->var_map;
3694       delete node->field_map;
3695       delete node->mem_refs;
3696       free (node);
3697       node = next;
3698     }
3699   while (node);
3700 }
3701 
3702 /* Gimplify a function and all its nested functions.  */
3703 static void
gimplify_all_functions(struct cgraph_node * root)3704 gimplify_all_functions (struct cgraph_node *root)
3705 {
3706   struct cgraph_node *iter;
3707   if (!gimple_body (root->decl))
3708     gimplify_function_tree (root->decl);
3709   for (iter = first_nested_function (root); iter;
3710        iter = next_nested_function (iter))
3711     if (!iter->thunk)
3712       gimplify_all_functions (iter);
3713 }
3714 
3715 /* Main entry point for this pass.  Process FNDECL and all of its nested
3716    subroutines and turn them into something less tightly bound.  */
3717 
3718 void
lower_nested_functions(tree fndecl)3719 lower_nested_functions (tree fndecl)
3720 {
3721   struct cgraph_node *cgn;
3722   struct nesting_info *root;
3723 
3724   /* If there are no nested functions, there's nothing to do.  */
3725   cgn = cgraph_node::get (fndecl);
3726   if (!first_nested_function (cgn))
3727     return;
3728 
3729   gimplify_all_functions (cgn);
3730 
3731   set_dump_file (dump_begin (TDI_nested, &dump_flags));
3732   if (dump_file)
3733     fprintf (dump_file, "\n;; Function %s\n\n",
3734 	     lang_hooks.decl_printable_name (fndecl, 2));
3735 
3736   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
3737   root = create_nesting_tree (cgn);
3738 
3739   walk_all_functions (convert_nonlocal_reference_stmt,
3740                       convert_nonlocal_reference_op,
3741 		      root);
3742   walk_all_functions (convert_local_reference_stmt,
3743                       convert_local_reference_op,
3744 		      root);
3745   walk_all_functions (convert_nl_goto_reference, NULL, root);
3746   walk_all_functions (convert_nl_goto_receiver, NULL, root);
3747 
3748   convert_all_function_calls (root);
3749   finalize_nesting_tree (root);
3750   unnest_nesting_tree (root);
3751 
3752   free_nesting_tree (root);
3753   bitmap_obstack_release (&nesting_info_bitmap_obstack);
3754 
3755   if (dump_file)
3756     {
3757       dump_end (TDI_nested, dump_file);
3758       set_dump_file (NULL);
3759     }
3760 }
3761 
3762 #include "gt-tree-nested.h"
3763