1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "tree-gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "ggc.h"
38
39
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
44
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
55
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
66 inlinable.
67
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
73
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
78
79
80 struct var_map_elt GTY(())
81 {
82 tree old;
83 tree new;
84 };
85
86 struct nesting_info GTY ((chain_next ("%h.next")))
87 {
88 struct nesting_info *outer;
89 struct nesting_info *inner;
90 struct nesting_info *next;
91
92 htab_t GTY ((param_is (struct var_map_elt))) field_map;
93 htab_t GTY ((param_is (struct var_map_elt))) var_map;
94 bitmap suppress_expansion;
95
96 tree context;
97 tree new_local_var_chain;
98 tree debug_var_chain;
99 tree frame_type;
100 tree frame_decl;
101 tree chain_field;
102 tree chain_decl;
103 tree nl_goto_field;
104
105 bool any_parm_remapped;
106 bool any_tramp_created;
107 char static_chain_added;
108 };
109
110
111 /* Hashing and equality functions for nesting_info->var_map. */
112
113 static hashval_t
var_map_hash(const void * x)114 var_map_hash (const void *x)
115 {
116 const struct var_map_elt *a = (const struct var_map_elt *) x;
117 return htab_hash_pointer (a->old);
118 }
119
120 static int
var_map_eq(const void * x,const void * y)121 var_map_eq (const void *x, const void *y)
122 {
123 const struct var_map_elt *a = (const struct var_map_elt *) x;
124 const struct var_map_elt *b = (const struct var_map_elt *) y;
125 return a->old == b->old;
126 }
127
128 /* We're working in so many different function contexts simultaneously,
129 that create_tmp_var is dangerous. Prevent mishap. */
130 #define create_tmp_var cant_use_create_tmp_var_here_dummy
131
132 /* Like create_tmp_var, except record the variable for registration at
133 the given nesting level. */
134
135 static tree
create_tmp_var_for(struct nesting_info * info,tree type,const char * prefix)136 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
137 {
138 tree tmp_var;
139
140 /* If the type is of variable size or a type which must be created by the
141 frontend, something is wrong. Note that we explicitly allow
142 incomplete types here, since we create them ourselves here. */
143 gcc_assert (!TREE_ADDRESSABLE (type));
144 gcc_assert (!TYPE_SIZE_UNIT (type)
145 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
146
147 tmp_var = create_tmp_var_raw (type, prefix);
148 DECL_CONTEXT (tmp_var) = info->context;
149 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
150 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
151 if (TREE_CODE (type) == COMPLEX_TYPE)
152 DECL_COMPLEX_GIMPLE_REG_P (tmp_var) = 1;
153
154 info->new_local_var_chain = tmp_var;
155
156 return tmp_var;
157 }
158
159 /* Take the address of EXP to be used within function CONTEXT.
160 Mark it for addressability as necessary. */
161
162 tree
build_addr(tree exp,tree context)163 build_addr (tree exp, tree context)
164 {
165 tree base = exp;
166 tree save_context;
167 tree retval;
168
169 while (handled_component_p (base))
170 base = TREE_OPERAND (base, 0);
171
172 if (DECL_P (base))
173 TREE_ADDRESSABLE (base) = 1;
174
175 /* Building the ADDR_EXPR will compute a set of properties for
176 that ADDR_EXPR. Those properties are unfortunately context
177 specific. ie, they are dependent on CURRENT_FUNCTION_DECL.
178
179 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
180 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
181 way the properties are for the ADDR_EXPR are computed properly. */
182 save_context = current_function_decl;
183 current_function_decl = context;
184 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
185 current_function_decl = save_context;;
186 return retval;
187 }
188
189 /* Insert FIELD into TYPE, sorted by alignment requirements. */
190
191 void
insert_field_into_struct(tree type,tree field)192 insert_field_into_struct (tree type, tree field)
193 {
194 tree *p;
195
196 DECL_CONTEXT (field) = type;
197
198 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
199 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
200 break;
201
202 TREE_CHAIN (field) = *p;
203 *p = field;
204 }
205
206 /* Build or return the RECORD_TYPE that describes the frame state that is
207 shared between INFO->CONTEXT and its nested functions. This record will
208 not be complete until finalize_nesting_tree; up until that point we'll
209 be adding fields as necessary.
210
211 We also build the DECL that represents this frame in the function. */
212
213 static tree
get_frame_type(struct nesting_info * info)214 get_frame_type (struct nesting_info *info)
215 {
216 tree type = info->frame_type;
217 if (!type)
218 {
219 char *name;
220
221 type = make_node (RECORD_TYPE);
222
223 name = concat ("FRAME.",
224 IDENTIFIER_POINTER (DECL_NAME (info->context)),
225 NULL);
226 TYPE_NAME (type) = get_identifier (name);
227 free (name);
228
229 info->frame_type = type;
230 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
231
232 /* ??? Always make it addressable for now, since it is meant to
233 be pointed to by the static chain pointer. This pessimizes
234 when it turns out that no static chains are needed because
235 the nested functions referencing non-local variables are not
236 reachable, but the true pessimization is to create the non-
237 local frame structure in the first place. */
238 TREE_ADDRESSABLE (info->frame_decl) = 1;
239 }
240 return type;
241 }
242
243 /* Return true if DECL should be referenced by pointer in the non-local
244 frame structure. */
245
246 static bool
use_pointer_in_frame(tree decl)247 use_pointer_in_frame (tree decl)
248 {
249 if (TREE_CODE (decl) == PARM_DECL)
250 {
251 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
252 sized decls, and inefficient to copy large aggregates. Don't bother
253 moving anything but scalar variables. */
254 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
255 }
256 else
257 {
258 /* Variable sized types make things "interesting" in the frame. */
259 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
260 }
261 }
262
263 /* Given DECL, a non-locally accessed variable, find or create a field
264 in the non-local frame structure for the given nesting context. */
265
266 static tree
lookup_field_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)267 lookup_field_for_decl (struct nesting_info *info, tree decl,
268 enum insert_option insert)
269 {
270 struct var_map_elt *elt, dummy;
271 void **slot;
272 tree field;
273
274 dummy.old = decl;
275 slot = htab_find_slot (info->field_map, &dummy, insert);
276 if (!slot)
277 {
278 gcc_assert (insert != INSERT);
279 return NULL;
280 }
281 elt = (struct var_map_elt *) *slot;
282
283 if (!elt && insert == INSERT)
284 {
285 field = make_node (FIELD_DECL);
286 DECL_NAME (field) = DECL_NAME (decl);
287
288 if (use_pointer_in_frame (decl))
289 {
290 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
291 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
292 DECL_NONADDRESSABLE_P (field) = 1;
293 }
294 else
295 {
296 TREE_TYPE (field) = TREE_TYPE (decl);
297 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
298 DECL_ALIGN (field) = DECL_ALIGN (decl);
299 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
300 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
301 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
302 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
303 }
304
305 insert_field_into_struct (get_frame_type (info), field);
306
307 elt = GGC_NEW (struct var_map_elt);
308 elt->old = decl;
309 elt->new = field;
310 *slot = elt;
311
312 if (TREE_CODE (decl) == PARM_DECL)
313 info->any_parm_remapped = true;
314 }
315 else
316 field = elt ? elt->new : NULL;
317
318 return field;
319 }
320
321 /* Build or return the variable that holds the static chain within
322 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
323
324 static tree
get_chain_decl(struct nesting_info * info)325 get_chain_decl (struct nesting_info *info)
326 {
327 tree decl = info->chain_decl;
328 if (!decl)
329 {
330 tree type;
331
332 type = get_frame_type (info->outer);
333 type = build_pointer_type (type);
334
335 /* Note that this variable is *not* entered into any BIND_EXPR;
336 the construction of this variable is handled specially in
337 expand_function_start and initialize_inlined_parameters.
338 Note also that it's represented as a parameter. This is more
339 close to the truth, since the initial value does come from
340 the caller. */
341 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
342 DECL_ARTIFICIAL (decl) = 1;
343 DECL_IGNORED_P (decl) = 1;
344 TREE_USED (decl) = 1;
345 DECL_CONTEXT (decl) = info->context;
346 DECL_ARG_TYPE (decl) = type;
347
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl) = 1;
351
352 info->chain_decl = decl;
353 }
354 return decl;
355 }
356
357 /* Build or return the field within the non-local frame state that holds
358 the static chain for INFO->CONTEXT. This is the way to walk back up
359 multiple nesting levels. */
360
361 static tree
get_chain_field(struct nesting_info * info)362 get_chain_field (struct nesting_info *info)
363 {
364 tree field = info->chain_field;
365 if (!field)
366 {
367 tree type = build_pointer_type (get_frame_type (info->outer));
368
369 field = make_node (FIELD_DECL);
370 DECL_NAME (field) = get_identifier ("__chain");
371 TREE_TYPE (field) = type;
372 DECL_ALIGN (field) = TYPE_ALIGN (type);
373 DECL_NONADDRESSABLE_P (field) = 1;
374
375 insert_field_into_struct (get_frame_type (info), field);
376
377 info->chain_field = field;
378 }
379 return field;
380 }
381
382 /* Copy EXP into a temporary. Allocate the temporary in the context of
383 INFO and insert the initialization statement before TSI. */
384
385 static tree
init_tmp_var(struct nesting_info * info,tree exp,tree_stmt_iterator * tsi)386 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
387 {
388 tree t, stmt;
389
390 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
391 stmt = build2 (MODIFY_EXPR, TREE_TYPE (t), t, exp);
392 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
393 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
394
395 return t;
396 }
397
398 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
399
400 static tree
tsi_gimplify_val(struct nesting_info * info,tree exp,tree_stmt_iterator * tsi)401 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
402 {
403 if (is_gimple_val (exp))
404 return exp;
405 else
406 return init_tmp_var (info, exp, tsi);
407 }
408
409 /* Similarly, but copy from the temporary and insert the statement
410 after the iterator. */
411
412 static tree
save_tmp_var(struct nesting_info * info,tree exp,tree_stmt_iterator * tsi)413 save_tmp_var (struct nesting_info *info, tree exp,
414 tree_stmt_iterator *tsi)
415 {
416 tree t, stmt;
417
418 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
419 stmt = build2 (MODIFY_EXPR, TREE_TYPE (t), exp, t);
420 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
421 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
422
423 return t;
424 }
425
426 /* Build or return the type used to represent a nested function trampoline. */
427
428 static GTY(()) tree trampoline_type;
429
430 static tree
get_trampoline_type(void)431 get_trampoline_type (void)
432 {
433 tree record, t;
434 unsigned align, size;
435
436 if (trampoline_type)
437 return trampoline_type;
438
439 align = TRAMPOLINE_ALIGNMENT;
440 size = TRAMPOLINE_SIZE;
441
442 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
443 then allocate extra space so that we can do dynamic alignment. */
444 if (align > STACK_BOUNDARY)
445 {
446 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
447 align = STACK_BOUNDARY;
448 }
449
450 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
451 t = build_array_type (char_type_node, t);
452 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
453 DECL_ALIGN (t) = align;
454 DECL_USER_ALIGN (t) = 1;
455
456 record = make_node (RECORD_TYPE);
457 TYPE_NAME (record) = get_identifier ("__builtin_trampoline");
458 TYPE_FIELDS (record) = t;
459 layout_type (record);
460
461 return record;
462 }
463
464 /* Given DECL, a nested function, find or create a field in the non-local
465 frame structure for a trampoline for this function. */
466
467 static tree
lookup_tramp_for_decl(struct nesting_info * info,tree decl,enum insert_option insert)468 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
469 enum insert_option insert)
470 {
471 struct var_map_elt *elt, dummy;
472 void **slot;
473 tree field;
474
475 dummy.old = decl;
476 slot = htab_find_slot (info->var_map, &dummy, insert);
477 if (!slot)
478 {
479 gcc_assert (insert != INSERT);
480 return NULL;
481 }
482 elt = (struct var_map_elt *) *slot;
483
484 if (!elt && insert == INSERT)
485 {
486 field = make_node (FIELD_DECL);
487 DECL_NAME (field) = DECL_NAME (decl);
488 TREE_TYPE (field) = get_trampoline_type ();
489 TREE_ADDRESSABLE (field) = 1;
490
491 insert_field_into_struct (get_frame_type (info), field);
492
493 elt = GGC_NEW (struct var_map_elt);
494 elt->old = decl;
495 elt->new = field;
496 *slot = elt;
497
498 info->any_tramp_created = true;
499 }
500 else
501 field = elt ? elt->new : NULL;
502
503 return field;
504 }
505
506 /* Build or return the field within the non-local frame state that holds
507 the non-local goto "jmp_buf". The buffer itself is maintained by the
508 rtl middle-end as dynamic stack space is allocated. */
509
510 static tree
get_nl_goto_field(struct nesting_info * info)511 get_nl_goto_field (struct nesting_info *info)
512 {
513 tree field = info->nl_goto_field;
514 if (!field)
515 {
516 unsigned size;
517 tree type;
518
519 /* For __builtin_nonlocal_goto, we need N words. The first is the
520 frame pointer, the rest is for the target's stack pointer save
521 area. The number of words is controlled by STACK_SAVEAREA_MODE;
522 not the best interface, but it'll do for now. */
523 if (Pmode == ptr_mode)
524 type = ptr_type_node;
525 else
526 type = lang_hooks.types.type_for_mode (Pmode, 1);
527
528 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
529 size = size / GET_MODE_SIZE (Pmode);
530 size = size + 1;
531
532 type = build_array_type
533 (type, build_index_type (build_int_cst (NULL_TREE, size)));
534
535 field = make_node (FIELD_DECL);
536 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
537 TREE_TYPE (field) = type;
538 DECL_ALIGN (field) = TYPE_ALIGN (type);
539 TREE_ADDRESSABLE (field) = 1;
540
541 insert_field_into_struct (get_frame_type (info), field);
542
543 info->nl_goto_field = field;
544 }
545
546 return field;
547 }
548
549 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
550
551 static void
walk_asm_expr(struct walk_stmt_info * wi,tree stmt)552 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
553 {
554 int noutputs = list_length (ASM_OUTPUTS (stmt));
555 const char **oconstraints
556 = (const char **) alloca ((noutputs) * sizeof (const char *));
557 int i;
558 tree link;
559 const char *constraint;
560 bool allows_mem, allows_reg, is_inout;
561
562 wi->is_lhs = true;
563 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
564 {
565 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
566 oconstraints[i] = constraint;
567 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
568 &allows_reg, &is_inout);
569
570 wi->val_only = (allows_reg || !allows_mem);
571 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
572 }
573
574 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
575 {
576 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
577 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
578 oconstraints, &allows_mem, &allows_reg);
579
580 wi->val_only = (allows_reg || !allows_mem);
581 /* Although input "m" is not really a LHS, we need a lvalue. */
582 wi->is_lhs = !wi->val_only;
583 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
584 }
585
586 wi->is_lhs = false;
587 wi->val_only = true;
588 }
589
590 /* Iterate over all sub-statements of *TP calling walk_tree with
591 WI->CALLBACK for every sub-expression in each statement found. */
592
593 void
walk_stmts(struct walk_stmt_info * wi,tree * tp)594 walk_stmts (struct walk_stmt_info *wi, tree *tp)
595 {
596 tree t = *tp;
597 int walk_subtrees;
598
599 if (!t)
600 return;
601
602 if (wi->want_locations && EXPR_HAS_LOCATION (t))
603 input_location = EXPR_LOCATION (t);
604
605 switch (TREE_CODE (t))
606 {
607 case STATEMENT_LIST:
608 {
609 tree_stmt_iterator i;
610 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
611 {
612 wi->tsi = i;
613 walk_stmts (wi, tsi_stmt_ptr (i));
614 }
615 }
616 break;
617
618 case COND_EXPR:
619 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
620 walk_stmts (wi, &COND_EXPR_THEN (t));
621 walk_stmts (wi, &COND_EXPR_ELSE (t));
622 break;
623 case CATCH_EXPR:
624 walk_stmts (wi, &CATCH_BODY (t));
625 break;
626 case EH_FILTER_EXPR:
627 walk_stmts (wi, &EH_FILTER_FAILURE (t));
628 break;
629 case TRY_CATCH_EXPR:
630 case TRY_FINALLY_EXPR:
631 walk_stmts (wi, &TREE_OPERAND (t, 0));
632 walk_stmts (wi, &TREE_OPERAND (t, 1));
633 break;
634
635 case BIND_EXPR:
636 if (wi->want_bind_expr)
637 {
638 walk_subtrees = 1;
639 wi->callback (tp, &walk_subtrees, wi);
640 if (!walk_subtrees)
641 break;
642 }
643 walk_stmts (wi, &BIND_EXPR_BODY (t));
644 break;
645
646 case RETURN_EXPR:
647 if (wi->want_return_expr)
648 {
649 walk_subtrees = 1;
650 wi->callback (tp, &walk_subtrees, wi);
651 if (!walk_subtrees)
652 break;
653 }
654 walk_stmts (wi, &TREE_OPERAND (t, 0));
655 break;
656
657 case MODIFY_EXPR:
658 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
659 wi->val_only = !is_gimple_formal_tmp_var (TREE_OPERAND (t, 0));
660 walk_tree (&TREE_OPERAND (t, 1), wi->callback, wi, NULL);
661
662 /* If the rhs is appropriate for a memory, we may use a
663 COMPONENT_REF on the lhs. */
664 wi->val_only = !is_gimple_mem_rhs (TREE_OPERAND (t, 1));
665 wi->is_lhs = true;
666 walk_tree (&TREE_OPERAND (t, 0), wi->callback, wi, NULL);
667
668 wi->val_only = true;
669 wi->is_lhs = false;
670 break;
671
672 case ASM_EXPR:
673 walk_asm_expr (wi, *tp);
674 break;
675
676 default:
677 wi->val_only = true;
678 walk_tree (tp, wi->callback, wi, NULL);
679 break;
680 }
681 }
682
683 /* Invoke CALLBACK on all statements of *STMT_P. */
684
685 static void
walk_body(walk_tree_fn callback,struct nesting_info * info,tree * stmt_p)686 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
687 {
688 struct walk_stmt_info wi;
689
690 memset (&wi, 0, sizeof (wi));
691 wi.callback = callback;
692 wi.info = info;
693 wi.val_only = true;
694
695 walk_stmts (&wi, stmt_p);
696 }
697
698 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
699
700 static inline void
walk_function(walk_tree_fn callback,struct nesting_info * info)701 walk_function (walk_tree_fn callback, struct nesting_info *info)
702 {
703 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
704 }
705
706 /* Similarly for ROOT and all functions nested underneath, depth first. */
707
708 static void
walk_all_functions(walk_tree_fn callback,struct nesting_info * root)709 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
710 {
711 do
712 {
713 if (root->inner)
714 walk_all_functions (callback, root->inner);
715 walk_function (callback, root);
716 root = root->next;
717 }
718 while (root);
719 }
720
721 /* We have to check for a fairly pathological case. The operands of function
722 nested function are to be interpreted in the context of the enclosing
723 function. So if any are variably-sized, they will get remapped when the
724 enclosing function is inlined. But that remapping would also have to be
725 done in the types of the PARM_DECLs of the nested function, meaning the
726 argument types of that function will disagree with the arguments in the
727 calls to that function. So we'd either have to make a copy of the nested
728 function corresponding to each time the enclosing function was inlined or
729 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
730 function. The former is not practical. The latter would still require
731 detecting this case to know when to add the conversions. So, for now at
732 least, we don't inline such an enclosing function.
733
734 We have to do that check recursively, so here return indicating whether
735 FNDECL has such a nested function. ORIG_FN is the function we were
736 trying to inline to use for checking whether any argument is variably
737 modified by anything in it.
738
739 It would be better to do this in tree-inline.c so that we could give
740 the appropriate warning for why a function can't be inlined, but that's
741 too late since the nesting structure has already been flattened and
742 adding a flag just to record this fact seems a waste of a flag. */
743
744 static bool
check_for_nested_with_variably_modified(tree fndecl,tree orig_fndecl)745 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
746 {
747 struct cgraph_node *cgn = cgraph_node (fndecl);
748 tree arg;
749
750 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
751 {
752 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
753 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
754 return true;
755
756 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
757 return true;
758 }
759
760 return false;
761 }
762
763 /* Construct our local datastructure describing the function nesting
764 tree rooted by CGN. */
765
766 static struct nesting_info *
create_nesting_tree(struct cgraph_node * cgn)767 create_nesting_tree (struct cgraph_node *cgn)
768 {
769 struct nesting_info *info = GGC_CNEW (struct nesting_info);
770 info->field_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free);
771 info->var_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free);
772 info->suppress_expansion = BITMAP_GGC_ALLOC ();
773 info->context = cgn->decl;
774
775 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
776 {
777 struct nesting_info *sub = create_nesting_tree (cgn);
778 sub->outer = info;
779 sub->next = info->inner;
780 info->inner = sub;
781 }
782
783 /* See discussion at check_for_nested_with_variably_modified for a
784 discussion of why this has to be here. */
785 if (check_for_nested_with_variably_modified (info->context, info->context))
786 DECL_UNINLINABLE (info->context) = true;
787
788 return info;
789 }
790
791 /* Return an expression computing the static chain for TARGET_CONTEXT
792 from INFO->CONTEXT. Insert any necessary computations before TSI. */
793
794 static tree
get_static_chain(struct nesting_info * info,tree target_context,tree_stmt_iterator * tsi)795 get_static_chain (struct nesting_info *info, tree target_context,
796 tree_stmt_iterator *tsi)
797 {
798 struct nesting_info *i;
799 tree x;
800
801 if (info->context == target_context)
802 {
803 x = build_addr (info->frame_decl, target_context);
804 }
805 else
806 {
807 x = get_chain_decl (info);
808
809 for (i = info->outer; i->context != target_context; i = i->outer)
810 {
811 tree field = get_chain_field (i);
812
813 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
814 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
815 x = init_tmp_var (info, x, tsi);
816 }
817 }
818
819 return x;
820 }
821
822 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
823 frame as seen from INFO->CONTEXT. Insert any necessary computations
824 before TSI. */
825
826 static tree
get_frame_field(struct nesting_info * info,tree target_context,tree field,tree_stmt_iterator * tsi)827 get_frame_field (struct nesting_info *info, tree target_context,
828 tree field, tree_stmt_iterator *tsi)
829 {
830 struct nesting_info *i;
831 tree x;
832
833 if (info->context == target_context)
834 {
835 /* Make sure frame_decl gets created. */
836 (void) get_frame_type (info);
837 x = info->frame_decl;
838 }
839 else
840 {
841 x = get_chain_decl (info);
842
843 for (i = info->outer; i->context != target_context; i = i->outer)
844 {
845 tree field = get_chain_field (i);
846
847 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
848 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
849 x = init_tmp_var (info, x, tsi);
850 }
851
852 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
853 }
854
855 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
856 return x;
857 }
858
859 /* A subroutine of convert_nonlocal_reference. Create a local variable
860 in the nested function with DECL_VALUE_EXPR set to reference the true
861 variable in the parent function. This is used both for debug info
862 and in OpenMP lowering. */
863
864 static tree
get_nonlocal_debug_decl(struct nesting_info * info,tree decl)865 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
866 {
867 struct var_map_elt *elt, dummy;
868 tree target_context;
869 struct nesting_info *i;
870 tree x, field, new_decl;
871 void **slot;
872
873 dummy.old = decl;
874 slot = htab_find_slot (info->var_map, &dummy, INSERT);
875 elt = *slot;
876
877 if (elt)
878 return elt->new;
879
880 target_context = decl_function_context (decl);
881
882 /* A copy of the code in get_frame_field, but without the temporaries. */
883 if (info->context == target_context)
884 {
885 /* Make sure frame_decl gets created. */
886 (void) get_frame_type (info);
887 x = info->frame_decl;
888 i = info;
889 }
890 else
891 {
892 x = get_chain_decl (info);
893 for (i = info->outer; i->context != target_context; i = i->outer)
894 {
895 field = get_chain_field (i);
896 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
897 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
898 }
899 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
900 }
901
902 field = lookup_field_for_decl (i, decl, INSERT);
903 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
904 if (use_pointer_in_frame (decl))
905 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
906
907 /* ??? We should be remapping types as well, surely. */
908 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
909 DECL_CONTEXT (new_decl) = info->context;
910 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
911 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
912 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
913 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
914 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
915 TREE_READONLY (new_decl) = TREE_READONLY (decl);
916 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
917 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
918
919 SET_DECL_VALUE_EXPR (new_decl, x);
920 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
921
922 elt = ggc_alloc (sizeof (*elt));
923 elt->old = decl;
924 elt->new = new_decl;
925 *slot = elt;
926
927 TREE_CHAIN (new_decl) = info->debug_var_chain;
928 info->debug_var_chain = new_decl;
929
930 return new_decl;
931 }
932
933 /* Called via walk_function+walk_tree, rewrite all references to VAR
934 and PARM_DECLs that belong to outer functions.
935
936 The rewrite will involve some number of structure accesses back up
937 the static chain. E.g. for a variable FOO up one nesting level it'll
938 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
939 indirections apply to decls for which use_pointer_in_frame is true. */
940
941 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
942
943 static tree
convert_nonlocal_reference(tree * tp,int * walk_subtrees,void * data)944 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
945 {
946 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
947 struct nesting_info *info = wi->info;
948 tree t = *tp;
949 tree save_local_var_chain;
950 bitmap save_suppress;
951
952 *walk_subtrees = 0;
953 switch (TREE_CODE (t))
954 {
955 case VAR_DECL:
956 /* Non-automatic variables are never processed. */
957 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
958 break;
959 /* FALLTHRU */
960
961 case PARM_DECL:
962 if (decl_function_context (t) != info->context)
963 {
964 tree x;
965 wi->changed = true;
966
967 x = get_nonlocal_debug_decl (info, t);
968 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
969 {
970 tree target_context = decl_function_context (t);
971 struct nesting_info *i;
972 for (i = info->outer; i->context != target_context; i = i->outer)
973 continue;
974 x = lookup_field_for_decl (i, t, INSERT);
975 x = get_frame_field (info, target_context, x, &wi->tsi);
976 if (use_pointer_in_frame (t))
977 {
978 x = init_tmp_var (info, x, &wi->tsi);
979 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
980 }
981 }
982
983 if (wi->val_only)
984 {
985 if (wi->is_lhs)
986 x = save_tmp_var (info, x, &wi->tsi);
987 else
988 x = init_tmp_var (info, x, &wi->tsi);
989 }
990
991 *tp = x;
992 }
993 break;
994
995 case GOTO_EXPR:
996 /* Don't walk non-local gotos for now. */
997 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
998 {
999 *walk_subtrees = 1;
1000 wi->val_only = true;
1001 wi->is_lhs = false;
1002 }
1003 break;
1004
1005 case LABEL_DECL:
1006 /* We're taking the address of a label from a parent function, but
1007 this is not itself a non-local goto. Mark the label such that it
1008 will not be deleted, much as we would with a label address in
1009 static storage. */
1010 if (decl_function_context (t) != info->context)
1011 FORCED_LABEL (t) = 1;
1012 break;
1013
1014 case ADDR_EXPR:
1015 {
1016 bool save_val_only = wi->val_only;
1017
1018 wi->val_only = false;
1019 wi->is_lhs = false;
1020 wi->changed = false;
1021 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
1022 wi->val_only = true;
1023
1024 if (wi->changed)
1025 {
1026 tree save_context;
1027
1028 /* If we changed anything, then TREE_INVARIANT is be wrong,
1029 since we're no longer directly referencing a decl. */
1030 save_context = current_function_decl;
1031 current_function_decl = info->context;
1032 recompute_tree_invariant_for_addr_expr (t);
1033 current_function_decl = save_context;
1034
1035 /* If the callback converted the address argument in a context
1036 where we only accept variables (and min_invariant, presumably),
1037 then compute the address into a temporary. */
1038 if (save_val_only)
1039 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1040 }
1041 }
1042 break;
1043
1044 case REALPART_EXPR:
1045 case IMAGPART_EXPR:
1046 case COMPONENT_REF:
1047 case ARRAY_REF:
1048 case ARRAY_RANGE_REF:
1049 case BIT_FIELD_REF:
1050 /* Go down this entire nest and just look at the final prefix and
1051 anything that describes the references. Otherwise, we lose track
1052 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1053 wi->val_only = true;
1054 wi->is_lhs = false;
1055 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1056 {
1057 if (TREE_CODE (t) == COMPONENT_REF)
1058 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1059 NULL);
1060 else if (TREE_CODE (t) == ARRAY_REF
1061 || TREE_CODE (t) == ARRAY_RANGE_REF)
1062 {
1063 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1064 NULL);
1065 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1066 NULL);
1067 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1068 NULL);
1069 }
1070 else if (TREE_CODE (t) == BIT_FIELD_REF)
1071 {
1072 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1073 NULL);
1074 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1075 NULL);
1076 }
1077 }
1078 wi->val_only = false;
1079 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1080 break;
1081
1082 case OMP_PARALLEL:
1083 save_suppress = info->suppress_expansion;
1084 if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1085 {
1086 tree c, decl;
1087 decl = get_chain_decl (info);
1088 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1089 OMP_CLAUSE_DECL (c) = decl;
1090 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1091 OMP_PARALLEL_CLAUSES (t) = c;
1092 }
1093
1094 save_local_var_chain = info->new_local_var_chain;
1095 info->new_local_var_chain = NULL;
1096
1097 walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
1098
1099 if (info->new_local_var_chain)
1100 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1101 info->new_local_var_chain = save_local_var_chain;
1102 info->suppress_expansion = save_suppress;
1103 break;
1104
1105 case OMP_FOR:
1106 case OMP_SECTIONS:
1107 case OMP_SINGLE:
1108 save_suppress = info->suppress_expansion;
1109 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1110 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1111 info->suppress_expansion = save_suppress;
1112 break;
1113
1114 case OMP_SECTION:
1115 case OMP_MASTER:
1116 case OMP_ORDERED:
1117 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1118 break;
1119
1120 default:
1121 if (!IS_TYPE_OR_DECL_P (t))
1122 {
1123 *walk_subtrees = 1;
1124 wi->val_only = true;
1125 wi->is_lhs = false;
1126 }
1127 break;
1128 }
1129
1130 return NULL_TREE;
1131 }
1132
1133 static bool
convert_nonlocal_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1134 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1135 {
1136 struct nesting_info *info = wi->info;
1137 bool need_chain = false;
1138 tree clause, decl;
1139 int dummy;
1140 bitmap new_suppress;
1141
1142 new_suppress = BITMAP_GGC_ALLOC ();
1143 bitmap_copy (new_suppress, info->suppress_expansion);
1144
1145 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1146 {
1147 switch (OMP_CLAUSE_CODE (clause))
1148 {
1149 case OMP_CLAUSE_PRIVATE:
1150 case OMP_CLAUSE_FIRSTPRIVATE:
1151 case OMP_CLAUSE_LASTPRIVATE:
1152 case OMP_CLAUSE_REDUCTION:
1153 case OMP_CLAUSE_COPYPRIVATE:
1154 case OMP_CLAUSE_SHARED:
1155 decl = OMP_CLAUSE_DECL (clause);
1156 if (decl_function_context (decl) != info->context)
1157 {
1158 bitmap_set_bit (new_suppress, DECL_UID (decl));
1159 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1160 need_chain = true;
1161 }
1162 break;
1163
1164 case OMP_CLAUSE_SCHEDULE:
1165 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1166 break;
1167 /* FALLTHRU */
1168 case OMP_CLAUSE_IF:
1169 case OMP_CLAUSE_NUM_THREADS:
1170 wi->val_only = true;
1171 wi->is_lhs = false;
1172 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1173 wi);
1174 break;
1175
1176 case OMP_CLAUSE_NOWAIT:
1177 case OMP_CLAUSE_ORDERED:
1178 case OMP_CLAUSE_DEFAULT:
1179 case OMP_CLAUSE_COPYIN:
1180 break;
1181
1182 default:
1183 gcc_unreachable ();
1184 }
1185 }
1186
1187 info->suppress_expansion = new_suppress;
1188
1189 return need_chain;
1190 }
1191
1192 /* A subroutine of convert_local_reference. Create a local variable
1193 in the parent function with DECL_VALUE_EXPR set to reference the
1194 field in FRAME. This is used both for debug info and in OpenMP
1195 lowering. */
1196
1197 static tree
get_local_debug_decl(struct nesting_info * info,tree decl,tree field)1198 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1199 {
1200 struct var_map_elt *elt, dummy;
1201 tree x, new_decl;
1202 void **slot;
1203
1204 dummy.old = decl;
1205 slot = htab_find_slot (info->var_map, &dummy, INSERT);
1206 elt = *slot;
1207
1208 if (elt)
1209 return elt->new;
1210
1211 /* Make sure frame_decl gets created. */
1212 (void) get_frame_type (info);
1213 x = info->frame_decl;
1214 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1215
1216 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1217 DECL_CONTEXT (new_decl) = info->context;
1218 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1219 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1220 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1221 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1222 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1223 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1224 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1225 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1226
1227 SET_DECL_VALUE_EXPR (new_decl, x);
1228 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1229
1230 elt = ggc_alloc (sizeof (*elt));
1231 elt->old = decl;
1232 elt->new = new_decl;
1233 *slot = elt;
1234
1235 TREE_CHAIN (new_decl) = info->debug_var_chain;
1236 info->debug_var_chain = new_decl;
1237
1238 /* Do not emit debug info twice. */
1239 DECL_IGNORED_P (decl) = 1;
1240
1241 return new_decl;
1242 }
1243
1244 /* Called via walk_function+walk_tree, rewrite all references to VAR
1245 and PARM_DECLs that were referenced by inner nested functions.
1246 The rewrite will be a structure reference to the local frame variable. */
1247
1248 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1249
1250 static tree
convert_local_reference(tree * tp,int * walk_subtrees,void * data)1251 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1252 {
1253 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1254 struct nesting_info *info = wi->info;
1255 tree t = *tp, field, x;
1256 bool save_val_only;
1257 tree save_local_var_chain;
1258 bitmap save_suppress;
1259
1260 *walk_subtrees = 0;
1261 switch (TREE_CODE (t))
1262 {
1263 case VAR_DECL:
1264 /* Non-automatic variables are never processed. */
1265 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1266 break;
1267 /* FALLTHRU */
1268
1269 case PARM_DECL:
1270 if (decl_function_context (t) == info->context)
1271 {
1272 /* If we copied a pointer to the frame, then the original decl
1273 is used unchanged in the parent function. */
1274 if (use_pointer_in_frame (t))
1275 break;
1276
1277 /* No need to transform anything if no child references the
1278 variable. */
1279 field = lookup_field_for_decl (info, t, NO_INSERT);
1280 if (!field)
1281 break;
1282 wi->changed = true;
1283
1284 x = get_local_debug_decl (info, t, field);
1285 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1286 x = get_frame_field (info, info->context, field, &wi->tsi);
1287
1288 if (wi->val_only)
1289 {
1290 if (wi->is_lhs)
1291 x = save_tmp_var (info, x, &wi->tsi);
1292 else
1293 x = init_tmp_var (info, x, &wi->tsi);
1294 }
1295
1296 *tp = x;
1297 }
1298 break;
1299
1300 case ADDR_EXPR:
1301 save_val_only = wi->val_only;
1302 wi->val_only = false;
1303 wi->is_lhs = false;
1304 wi->changed = false;
1305 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1306 wi->val_only = save_val_only;
1307
1308 /* If we converted anything ... */
1309 if (wi->changed)
1310 {
1311 tree save_context;
1312
1313 /* Then the frame decl is now addressable. */
1314 TREE_ADDRESSABLE (info->frame_decl) = 1;
1315
1316 save_context = current_function_decl;
1317 current_function_decl = info->context;
1318 recompute_tree_invariant_for_addr_expr (t);
1319 current_function_decl = save_context;
1320
1321 /* If we are in a context where we only accept values, then
1322 compute the address into a temporary. */
1323 if (save_val_only)
1324 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1325 }
1326 break;
1327
1328 case REALPART_EXPR:
1329 case IMAGPART_EXPR:
1330 case COMPONENT_REF:
1331 case ARRAY_REF:
1332 case ARRAY_RANGE_REF:
1333 case BIT_FIELD_REF:
1334 /* Go down this entire nest and just look at the final prefix and
1335 anything that describes the references. Otherwise, we lose track
1336 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1337 save_val_only = wi->val_only;
1338 wi->val_only = true;
1339 wi->is_lhs = false;
1340 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1341 {
1342 if (TREE_CODE (t) == COMPONENT_REF)
1343 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1344 NULL);
1345 else if (TREE_CODE (t) == ARRAY_REF
1346 || TREE_CODE (t) == ARRAY_RANGE_REF)
1347 {
1348 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1349 NULL);
1350 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1351 NULL);
1352 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1353 NULL);
1354 }
1355 else if (TREE_CODE (t) == BIT_FIELD_REF)
1356 {
1357 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1358 NULL);
1359 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1360 NULL);
1361 }
1362 }
1363 wi->val_only = false;
1364 walk_tree (tp, convert_local_reference, wi, NULL);
1365 wi->val_only = save_val_only;
1366 break;
1367
1368 case OMP_PARALLEL:
1369 save_suppress = info->suppress_expansion;
1370 if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1371 {
1372 tree c;
1373 (void) get_frame_type (info);
1374 c = build_omp_clause (OMP_CLAUSE_SHARED);
1375 OMP_CLAUSE_DECL (c) = info->frame_decl;
1376 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1377 OMP_PARALLEL_CLAUSES (t) = c;
1378 }
1379
1380 save_local_var_chain = info->new_local_var_chain;
1381 info->new_local_var_chain = NULL;
1382
1383 walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
1384
1385 if (info->new_local_var_chain)
1386 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1387 info->new_local_var_chain = save_local_var_chain;
1388 info->suppress_expansion = save_suppress;
1389 break;
1390
1391 case OMP_FOR:
1392 case OMP_SECTIONS:
1393 case OMP_SINGLE:
1394 save_suppress = info->suppress_expansion;
1395 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1396 walk_body (convert_local_reference, info, &OMP_BODY (t));
1397 info->suppress_expansion = save_suppress;
1398 break;
1399
1400 case OMP_SECTION:
1401 case OMP_MASTER:
1402 case OMP_ORDERED:
1403 walk_body (convert_local_reference, info, &OMP_BODY (t));
1404 break;
1405
1406 default:
1407 if (!IS_TYPE_OR_DECL_P (t))
1408 {
1409 *walk_subtrees = 1;
1410 wi->val_only = true;
1411 wi->is_lhs = false;
1412 }
1413 break;
1414 }
1415
1416 return NULL_TREE;
1417 }
1418
1419 static bool
convert_local_omp_clauses(tree * pclauses,struct walk_stmt_info * wi)1420 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1421 {
1422 struct nesting_info *info = wi->info;
1423 bool need_frame = false;
1424 tree clause, decl;
1425 int dummy;
1426 bitmap new_suppress;
1427
1428 new_suppress = BITMAP_GGC_ALLOC ();
1429 bitmap_copy (new_suppress, info->suppress_expansion);
1430
1431 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1432 {
1433 switch (OMP_CLAUSE_CODE (clause))
1434 {
1435 case OMP_CLAUSE_PRIVATE:
1436 case OMP_CLAUSE_FIRSTPRIVATE:
1437 case OMP_CLAUSE_LASTPRIVATE:
1438 case OMP_CLAUSE_REDUCTION:
1439 case OMP_CLAUSE_COPYPRIVATE:
1440 case OMP_CLAUSE_SHARED:
1441 decl = OMP_CLAUSE_DECL (clause);
1442 if (decl_function_context (decl) == info->context
1443 && !use_pointer_in_frame (decl))
1444 {
1445 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1446 if (field)
1447 {
1448 bitmap_set_bit (new_suppress, DECL_UID (decl));
1449 OMP_CLAUSE_DECL (clause)
1450 = get_local_debug_decl (info, decl, field);
1451 need_frame = true;
1452 }
1453 }
1454 break;
1455
1456 case OMP_CLAUSE_SCHEDULE:
1457 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1458 break;
1459 /* FALLTHRU */
1460 case OMP_CLAUSE_IF:
1461 case OMP_CLAUSE_NUM_THREADS:
1462 wi->val_only = true;
1463 wi->is_lhs = false;
1464 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1465 break;
1466
1467 case OMP_CLAUSE_NOWAIT:
1468 case OMP_CLAUSE_ORDERED:
1469 case OMP_CLAUSE_DEFAULT:
1470 case OMP_CLAUSE_COPYIN:
1471 break;
1472
1473 default:
1474 gcc_unreachable ();
1475 }
1476 }
1477
1478 info->suppress_expansion = new_suppress;
1479
1480 return need_frame;
1481 }
1482
1483 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1484 reference labels from outer functions. The rewrite will be a
1485 call to __builtin_nonlocal_goto. */
1486
1487 static tree
convert_nl_goto_reference(tree * tp,int * walk_subtrees,void * data)1488 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1489 {
1490 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1491 struct nesting_info *info = wi->info, *i;
1492 tree t = *tp, label, new_label, target_context, x, arg, field;
1493 struct var_map_elt *elt, dummy;
1494 void **slot;
1495
1496 *walk_subtrees = 0;
1497 if (TREE_CODE (t) != GOTO_EXPR)
1498 return NULL_TREE;
1499 label = GOTO_DESTINATION (t);
1500 if (TREE_CODE (label) != LABEL_DECL)
1501 return NULL_TREE;
1502 target_context = decl_function_context (label);
1503 if (target_context == info->context)
1504 return NULL_TREE;
1505
1506 for (i = info->outer; target_context != i->context; i = i->outer)
1507 continue;
1508
1509 /* The original user label may also be use for a normal goto, therefore
1510 we must create a new label that will actually receive the abnormal
1511 control transfer. This new label will be marked LABEL_NONLOCAL; this
1512 mark will trigger proper behavior in the cfg, as well as cause the
1513 (hairy target-specific) non-local goto receiver code to be generated
1514 when we expand rtl. Enter this association into var_map so that we
1515 can insert the new label into the IL during a second pass. */
1516 dummy.old = label;
1517 slot = htab_find_slot (i->var_map, &dummy, INSERT);
1518 elt = (struct var_map_elt *) *slot;
1519 if (elt == NULL)
1520 {
1521 new_label = create_artificial_label ();
1522 DECL_NONLOCAL (new_label) = 1;
1523
1524 elt = GGC_NEW (struct var_map_elt);
1525 elt->old = label;
1526 elt->new = new_label;
1527 *slot = elt;
1528 }
1529 else
1530 new_label = elt->new;
1531
1532 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1533 field = get_nl_goto_field (i);
1534 x = get_frame_field (info, target_context, field, &wi->tsi);
1535 x = build_addr (x, target_context);
1536 x = tsi_gimplify_val (info, x, &wi->tsi);
1537 arg = tree_cons (NULL, x, NULL);
1538 x = build_addr (new_label, target_context);
1539 arg = tree_cons (NULL, x, arg);
1540 x = implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO];
1541 x = build_function_call_expr (x, arg);
1542
1543 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1544 *tsi_stmt_ptr (wi->tsi) = x;
1545
1546 return NULL_TREE;
1547 }
1548
1549 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1550 are referenced via nonlocal goto from a nested function. The rewrite
1551 will involve installing a newly generated DECL_NONLOCAL label, and
1552 (potentially) a branch around the rtl gunk that is assumed to be
1553 attached to such a label. */
1554
1555 static tree
convert_nl_goto_receiver(tree * tp,int * walk_subtrees,void * data)1556 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1557 {
1558 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1559 struct nesting_info *info = wi->info;
1560 tree t = *tp, label, new_label, x;
1561 struct var_map_elt *elt, dummy;
1562 tree_stmt_iterator tmp_tsi;
1563
1564 *walk_subtrees = 0;
1565 if (TREE_CODE (t) != LABEL_EXPR)
1566 return NULL_TREE;
1567 label = LABEL_EXPR_LABEL (t);
1568
1569 dummy.old = label;
1570 elt = (struct var_map_elt *) htab_find (info->var_map, &dummy);
1571 if (!elt)
1572 return NULL_TREE;
1573 new_label = elt->new;
1574
1575 /* If there's any possibility that the previous statement falls through,
1576 then we must branch around the new non-local label. */
1577 tmp_tsi = wi->tsi;
1578 tsi_prev (&tmp_tsi);
1579 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1580 {
1581 x = build1 (GOTO_EXPR, void_type_node, label);
1582 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1583 }
1584 x = build1 (LABEL_EXPR, void_type_node, new_label);
1585 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1586
1587 return NULL_TREE;
1588 }
1589
1590 /* Called via walk_function+walk_tree, rewrite all references to addresses
1591 of nested functions that require the use of trampolines. The rewrite
1592 will involve a reference a trampoline generated for the occasion. */
1593
1594 static tree
convert_tramp_reference(tree * tp,int * walk_subtrees,void * data)1595 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1596 {
1597 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1598 struct nesting_info *info = wi->info, *i;
1599 tree t = *tp, decl, target_context, x, arg;
1600
1601 *walk_subtrees = 0;
1602 switch (TREE_CODE (t))
1603 {
1604 case ADDR_EXPR:
1605 /* Build
1606 T.1 = &CHAIN->tramp;
1607 T.2 = __builtin_adjust_trampoline (T.1);
1608 T.3 = (func_type)T.2;
1609 */
1610
1611 decl = TREE_OPERAND (t, 0);
1612 if (TREE_CODE (decl) != FUNCTION_DECL)
1613 break;
1614
1615 /* Only need to process nested functions. */
1616 target_context = decl_function_context (decl);
1617 if (!target_context)
1618 break;
1619
1620 /* If the nested function doesn't use a static chain, then
1621 it doesn't need a trampoline. */
1622 if (DECL_NO_STATIC_CHAIN (decl))
1623 break;
1624
1625 if (!flag_trampolines)
1626 {
1627 error ("trampoline code generation is not allowed without -ftrampolines");
1628 return NULL_TREE;
1629 }
1630 if (warn_trampolines)
1631 {
1632 warning(0, "local function address taken needing trampoline generation");
1633 }
1634 /* Lookup the immediate parent of the callee, as that's where
1635 we need to insert the trampoline. */
1636 for (i = info; i->context != target_context; i = i->outer)
1637 continue;
1638 x = lookup_tramp_for_decl (i, decl, INSERT);
1639
1640 /* Compute the address of the field holding the trampoline. */
1641 x = get_frame_field (info, target_context, x, &wi->tsi);
1642 x = build_addr (x, target_context);
1643 x = tsi_gimplify_val (info, x, &wi->tsi);
1644 arg = tree_cons (NULL, x, NULL);
1645
1646 /* Do machine-specific ugliness. Normally this will involve
1647 computing extra alignment, but it can really be anything. */
1648 x = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1649 x = build_function_call_expr (x, arg);
1650 x = init_tmp_var (info, x, &wi->tsi);
1651
1652 /* Cast back to the proper function type. */
1653 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1654 x = init_tmp_var (info, x, &wi->tsi);
1655
1656 *tp = x;
1657 break;
1658
1659 case CALL_EXPR:
1660 /* Only walk call arguments, lest we generate trampolines for
1661 direct calls. */
1662 walk_tree (&TREE_OPERAND (t, 1), convert_tramp_reference, wi, NULL);
1663 break;
1664
1665 default:
1666 if (!IS_TYPE_OR_DECL_P (t))
1667 *walk_subtrees = 1;
1668 break;
1669 }
1670
1671 return NULL_TREE;
1672 }
1673
1674 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1675 reference nested functions to make sure that the static chain is
1676 set up properly for the call. */
1677
1678 static tree
convert_call_expr(tree * tp,int * walk_subtrees,void * data)1679 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1680 {
1681 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1682 struct nesting_info *info = wi->info;
1683 tree t = *tp, decl, target_context;
1684 char save_static_chain_added;
1685 int i;
1686
1687 *walk_subtrees = 0;
1688 switch (TREE_CODE (t))
1689 {
1690 case CALL_EXPR:
1691 decl = get_callee_fndecl (t);
1692 if (!decl)
1693 break;
1694 target_context = decl_function_context (decl);
1695 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1696 {
1697 TREE_OPERAND (t, 2)
1698 = get_static_chain (info, target_context, &wi->tsi);
1699 info->static_chain_added
1700 |= (1 << (info->context != target_context));
1701 }
1702 break;
1703
1704 case RETURN_EXPR:
1705 case MODIFY_EXPR:
1706 case WITH_SIZE_EXPR:
1707 /* Only return modify and with_size_expr may contain calls. */
1708 *walk_subtrees = 1;
1709 break;
1710
1711 case OMP_PARALLEL:
1712 save_static_chain_added = info->static_chain_added;
1713 info->static_chain_added = 0;
1714 walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
1715 for (i = 0; i < 2; i++)
1716 {
1717 tree c, decl;
1718 if ((info->static_chain_added & (1 << i)) == 0)
1719 continue;
1720 decl = i ? get_chain_decl (info) : info->frame_decl;
1721 /* Don't add CHAIN.* or FRAME.* twice. */
1722 for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1723 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1724 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1725 && OMP_CLAUSE_DECL (c) == decl)
1726 break;
1727 if (c == NULL)
1728 {
1729 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1730 OMP_CLAUSE_DECL (c) = decl;
1731 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1732 OMP_PARALLEL_CLAUSES (t) = c;
1733 }
1734 }
1735 info->static_chain_added |= save_static_chain_added;
1736 break;
1737
1738 case OMP_FOR:
1739 case OMP_SECTIONS:
1740 case OMP_SECTION:
1741 case OMP_SINGLE:
1742 case OMP_MASTER:
1743 case OMP_ORDERED:
1744 case OMP_CRITICAL:
1745 walk_body (convert_call_expr, info, &OMP_BODY (t));
1746 break;
1747
1748 default:
1749 break;
1750 }
1751
1752 return NULL_TREE;
1753 }
1754
1755 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1756 trampolines and call expressions. On the way back up, determine if
1757 a nested function actually uses its static chain; if not, remember that. */
1758
1759 static void
convert_all_function_calls(struct nesting_info * root)1760 convert_all_function_calls (struct nesting_info *root)
1761 {
1762 do
1763 {
1764 if (root->inner)
1765 convert_all_function_calls (root->inner);
1766
1767 walk_function (convert_tramp_reference, root);
1768 walk_function (convert_call_expr, root);
1769
1770 /* If the function does not use a static chain, then remember that. */
1771 if (root->outer && !root->chain_decl && !root->chain_field)
1772 DECL_NO_STATIC_CHAIN (root->context) = 1;
1773 else
1774 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1775
1776 root = root->next;
1777 }
1778 while (root);
1779 }
1780
1781 /* Do "everything else" to clean up or complete state collected by the
1782 various walking passes -- lay out the types and decls, generate code
1783 to initialize the frame decl, store critical expressions in the
1784 struct function for rtl to find. */
1785
1786 static void
finalize_nesting_tree_1(struct nesting_info * root)1787 finalize_nesting_tree_1 (struct nesting_info *root)
1788 {
1789 tree stmt_list = NULL;
1790 tree context = root->context;
1791 struct function *sf;
1792
1793 /* If we created a non-local frame type or decl, we need to lay them
1794 out at this time. */
1795 if (root->frame_type)
1796 {
1797 /* In some cases the frame type will trigger the -Wpadded warning.
1798 This is not helpful; suppress it. */
1799 int save_warn_padded = warn_padded;
1800 warn_padded = 0;
1801 layout_type (root->frame_type);
1802 warn_padded = save_warn_padded;
1803 layout_decl (root->frame_decl, 0);
1804 }
1805
1806 /* If any parameters were referenced non-locally, then we need to
1807 insert a copy. Likewise, if any variables were referenced by
1808 pointer, we need to initialize the address. */
1809 if (root->any_parm_remapped)
1810 {
1811 tree p;
1812 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1813 {
1814 tree field, x, y;
1815
1816 field = lookup_field_for_decl (root, p, NO_INSERT);
1817 if (!field)
1818 continue;
1819
1820 if (use_pointer_in_frame (p))
1821 x = build_addr (p, context);
1822 else
1823 x = p;
1824
1825 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1826 root->frame_decl, field, NULL_TREE);
1827 x = build2 (MODIFY_EXPR, TREE_TYPE (field), y, x);
1828 append_to_statement_list (x, &stmt_list);
1829 }
1830 }
1831
1832 /* If a chain_field was created, then it needs to be initialized
1833 from chain_decl. */
1834 if (root->chain_field)
1835 {
1836 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1837 root->frame_decl, root->chain_field, NULL_TREE);
1838 x = build2 (MODIFY_EXPR, TREE_TYPE (x), x, get_chain_decl (root));
1839 append_to_statement_list (x, &stmt_list);
1840 }
1841
1842 /* If trampolines were created, then we need to initialize them. */
1843 if (root->any_tramp_created)
1844 {
1845 struct nesting_info *i;
1846 for (i = root->inner; i ; i = i->next)
1847 {
1848 tree arg, x, field;
1849
1850 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1851 if (!field)
1852 continue;
1853
1854 if (DECL_NO_STATIC_CHAIN (i->context))
1855 x = null_pointer_node;
1856 else
1857 x = build_addr (root->frame_decl, context);
1858 arg = tree_cons (NULL, x, NULL);
1859
1860 x = build_addr (i->context, context);
1861 arg = tree_cons (NULL, x, arg);
1862
1863 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1864 root->frame_decl, field, NULL_TREE);
1865 x = build_addr (x, context);
1866 arg = tree_cons (NULL, x, arg);
1867
1868 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1869 x = build_function_call_expr (x, arg);
1870
1871 append_to_statement_list (x, &stmt_list);
1872 }
1873 }
1874
1875 /* If we created initialization statements, insert them. */
1876 if (stmt_list)
1877 {
1878 annotate_all_with_locus (&stmt_list,
1879 DECL_SOURCE_LOCATION (context));
1880 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1881 &stmt_list);
1882 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1883 }
1884
1885 /* If a chain_decl was created, then it needs to be registered with
1886 struct function so that it gets initialized from the static chain
1887 register at the beginning of the function. */
1888 sf = DECL_STRUCT_FUNCTION (root->context);
1889 sf->static_chain_decl = root->chain_decl;
1890
1891 /* Similarly for the non-local goto save area. */
1892 if (root->nl_goto_field)
1893 {
1894 sf->nonlocal_goto_save_area
1895 = get_frame_field (root, context, root->nl_goto_field, NULL);
1896 sf->has_nonlocal_label = 1;
1897 }
1898
1899 /* Make sure all new local variables get inserted into the
1900 proper BIND_EXPR. */
1901 if (root->new_local_var_chain)
1902 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
1903 false);
1904 if (root->debug_var_chain)
1905 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
1906 true);
1907
1908 /* Dump the translated tree function. */
1909 dump_function (TDI_nested, root->context);
1910 }
1911
1912 static void
finalize_nesting_tree(struct nesting_info * root)1913 finalize_nesting_tree (struct nesting_info *root)
1914 {
1915 do
1916 {
1917 if (root->inner)
1918 finalize_nesting_tree (root->inner);
1919 finalize_nesting_tree_1 (root);
1920 root = root->next;
1921 }
1922 while (root);
1923 }
1924
1925 /* Unnest the nodes and pass them to cgraph. */
1926
1927 static void
unnest_nesting_tree_1(struct nesting_info * root)1928 unnest_nesting_tree_1 (struct nesting_info *root)
1929 {
1930 struct cgraph_node *node = cgraph_node (root->context);
1931
1932 /* For nested functions update the cgraph to reflect unnesting.
1933 We also delay finalizing of these functions up to this point. */
1934 if (node->origin)
1935 {
1936 cgraph_unnest_node (cgraph_node (root->context));
1937 cgraph_finalize_function (root->context, true);
1938 }
1939 }
1940
1941 static void
unnest_nesting_tree(struct nesting_info * root)1942 unnest_nesting_tree (struct nesting_info *root)
1943 {
1944 do
1945 {
1946 if (root->inner)
1947 unnest_nesting_tree (root->inner);
1948 unnest_nesting_tree_1 (root);
1949 root = root->next;
1950 }
1951 while (root);
1952 }
1953
1954 /* Free the data structures allocated during this pass. */
1955
1956 static void
free_nesting_tree(struct nesting_info * root)1957 free_nesting_tree (struct nesting_info *root)
1958 {
1959 struct nesting_info *next;
1960 do
1961 {
1962 if (root->inner)
1963 free_nesting_tree (root->inner);
1964 htab_delete (root->var_map);
1965 next = root->next;
1966 ggc_free (root);
1967 root = next;
1968 }
1969 while (root);
1970 }
1971
1972 static GTY(()) struct nesting_info *root;
1973
1974 /* Main entry point for this pass. Process FNDECL and all of its nested
1975 subroutines and turn them into something less tightly bound. */
1976
1977 void
lower_nested_functions(tree fndecl)1978 lower_nested_functions (tree fndecl)
1979 {
1980 struct cgraph_node *cgn;
1981
1982 /* If there are no nested functions, there's nothing to do. */
1983 cgn = cgraph_node (fndecl);
1984 if (!cgn->nested)
1985 return;
1986
1987 root = create_nesting_tree (cgn);
1988 walk_all_functions (convert_nonlocal_reference, root);
1989 walk_all_functions (convert_local_reference, root);
1990 walk_all_functions (convert_nl_goto_reference, root);
1991 walk_all_functions (convert_nl_goto_receiver, root);
1992 convert_all_function_calls (root);
1993 finalize_nesting_tree (root);
1994 unnest_nesting_tree (root);
1995 free_nesting_tree (root);
1996 root = NULL;
1997 }
1998
1999 #include "gt-tree-nested.h"
2000