svn merge -r210100:210672 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / tree-nested.c
blob327389e692d0c9a73e0ea03317309ddef1e0bb2d
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stringpool.h"
26 #include "stor-layout.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "gimple-walk.h"
41 #include "tree-iterator.h"
42 #include "bitmap.h"
43 #include "cgraph.h"
44 #include "tree-cfg.h"
45 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
46 #include "langhooks.h"
47 #include "gimple-low.h"
50 /* The object of this pass is to lower the representation of a set of nested
51 functions in order to expose all of the gory details of the various
52 nonlocal references. We want to do this sooner rather than later, in
53 order to give us more freedom in emitting all of the functions in question.
55 Back in olden times, when gcc was young, we developed an insanely
56 complicated scheme whereby variables which were referenced nonlocally
57 were forced to live in the stack of the declaring function, and then
58 the nested functions magically discovered where these variables were
59 placed. In order for this scheme to function properly, it required
60 that the outer function be partially expanded, then we switch to
61 compiling the inner function, and once done with those we switch back
62 to compiling the outer function. Such delicate ordering requirements
63 makes it difficult to do whole translation unit optimizations
64 involving such functions.
66 The implementation here is much more direct. Everything that can be
67 referenced by an inner function is a member of an explicitly created
68 structure herein called the "nonlocal frame struct". The incoming
69 static chain for a nested function is a pointer to this struct in
70 the parent. In this way, we settle on known offsets from a known
71 base, and so are decoupled from the logic that places objects in the
72 function's stack frame. More importantly, we don't have to wait for
73 that to happen -- since the compilation of the inner function is no
74 longer tied to a real stack frame, the nonlocal frame struct can be
75 allocated anywhere. Which means that the outer function is now
76 inlinable.
78 Theory of operation here is very simple. Iterate over all the
79 statements in all the functions (depth first) several times,
80 allocating structures and fields on demand. In general we want to
81 examine inner functions first, so that we can avoid making changes
82 to outer functions which are unnecessary.
84 The order of the passes matters a bit, in that later passes will be
85 skipped if it is discovered that the functions don't actually interact
86 at all. That is, they're nested in the lexical sense but could have
87 been written as independent functions without change. */
90 struct nesting_info
92 struct nesting_info *outer;
93 struct nesting_info *inner;
94 struct nesting_info *next;
96 struct pointer_map_t *field_map;
97 struct pointer_map_t *var_map;
98 struct pointer_set_t *mem_refs;
99 bitmap suppress_expansion;
101 tree context;
102 tree new_local_var_chain;
103 tree debug_var_chain;
104 tree frame_type;
105 tree frame_decl;
106 tree chain_field;
107 tree chain_decl;
108 tree nl_goto_field;
110 bool any_parm_remapped;
111 bool any_tramp_created;
112 char static_chain_added;
116 /* Iterate over the nesting tree, starting with ROOT, depth first. */
118 static inline struct nesting_info *
119 iter_nestinfo_start (struct nesting_info *root)
121 while (root->inner)
122 root = root->inner;
123 return root;
126 static inline struct nesting_info *
127 iter_nestinfo_next (struct nesting_info *node)
129 if (node->next)
130 return iter_nestinfo_start (node->next);
131 return node->outer;
134 #define FOR_EACH_NEST_INFO(I, ROOT) \
135 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
137 /* Obstack used for the bitmaps in the struct above. */
138 static struct bitmap_obstack nesting_info_bitmap_obstack;
141 /* We're working in so many different function contexts simultaneously,
142 that create_tmp_var is dangerous. Prevent mishap. */
143 #define create_tmp_var cant_use_create_tmp_var_here_dummy
145 /* Like create_tmp_var, except record the variable for registration at
146 the given nesting level. */
148 static tree
149 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
151 tree tmp_var;
153 /* If the type is of variable size or a type which must be created by the
154 frontend, something is wrong. Note that we explicitly allow
155 incomplete types here, since we create them ourselves here. */
156 gcc_assert (!TREE_ADDRESSABLE (type));
157 gcc_assert (!TYPE_SIZE_UNIT (type)
158 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
160 tmp_var = create_tmp_var_raw (type, prefix);
161 DECL_CONTEXT (tmp_var) = info->context;
162 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
163 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
164 if (TREE_CODE (type) == COMPLEX_TYPE
165 || TREE_CODE (type) == VECTOR_TYPE)
166 DECL_GIMPLE_REG_P (tmp_var) = 1;
168 info->new_local_var_chain = tmp_var;
170 return tmp_var;
173 /* Take the address of EXP to be used within function CONTEXT.
174 Mark it for addressability as necessary. */
176 tree
177 build_addr (tree exp, tree context)
179 tree base = exp;
180 tree save_context;
181 tree retval;
183 while (handled_component_p (base))
184 base = TREE_OPERAND (base, 0);
186 if (DECL_P (base))
187 TREE_ADDRESSABLE (base) = 1;
189 /* Building the ADDR_EXPR will compute a set of properties for
190 that ADDR_EXPR. Those properties are unfortunately context
191 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
193 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
194 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
195 way the properties are for the ADDR_EXPR are computed properly. */
196 save_context = current_function_decl;
197 current_function_decl = context;
198 retval = build_fold_addr_expr (exp);
199 current_function_decl = save_context;
200 return retval;
203 /* Insert FIELD into TYPE, sorted by alignment requirements. */
205 void
206 insert_field_into_struct (tree type, tree field)
208 tree *p;
210 DECL_CONTEXT (field) = type;
212 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
213 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
214 break;
216 DECL_CHAIN (field) = *p;
217 *p = field;
219 /* Set correct alignment for frame struct type. */
220 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
221 TYPE_ALIGN (type) = DECL_ALIGN (field);
224 /* Build or return the RECORD_TYPE that describes the frame state that is
225 shared between INFO->CONTEXT and its nested functions. This record will
226 not be complete until finalize_nesting_tree; up until that point we'll
227 be adding fields as necessary.
229 We also build the DECL that represents this frame in the function. */
231 static tree
232 get_frame_type (struct nesting_info *info)
234 tree type = info->frame_type;
235 if (!type)
237 char *name;
239 type = make_node (RECORD_TYPE);
241 name = concat ("FRAME.",
242 IDENTIFIER_POINTER (DECL_NAME (info->context)),
243 NULL);
244 TYPE_NAME (type) = get_identifier (name);
245 free (name);
247 info->frame_type = type;
248 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
249 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
251 /* ??? Always make it addressable for now, since it is meant to
252 be pointed to by the static chain pointer. This pessimizes
253 when it turns out that no static chains are needed because
254 the nested functions referencing non-local variables are not
255 reachable, but the true pessimization is to create the non-
256 local frame structure in the first place. */
257 TREE_ADDRESSABLE (info->frame_decl) = 1;
259 return type;
262 /* Return true if DECL should be referenced by pointer in the non-local
263 frame structure. */
265 static bool
266 use_pointer_in_frame (tree decl)
268 if (TREE_CODE (decl) == PARM_DECL)
270 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
271 sized decls, and inefficient to copy large aggregates. Don't bother
272 moving anything but scalar variables. */
273 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
275 else
277 /* Variable sized types make things "interesting" in the frame. */
278 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
282 /* Given DECL, a non-locally accessed variable, find or create a field
283 in the non-local frame structure for the given nesting context. */
285 static tree
286 lookup_field_for_decl (struct nesting_info *info, tree decl,
287 enum insert_option insert)
289 void **slot;
291 if (insert == NO_INSERT)
293 slot = pointer_map_contains (info->field_map, decl);
294 return slot ? (tree) *slot : NULL_TREE;
297 slot = pointer_map_insert (info->field_map, decl);
298 if (!*slot)
300 tree field = make_node (FIELD_DECL);
301 DECL_NAME (field) = DECL_NAME (decl);
303 if (use_pointer_in_frame (decl))
305 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
306 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
307 DECL_NONADDRESSABLE_P (field) = 1;
309 else
311 TREE_TYPE (field) = TREE_TYPE (decl);
312 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
313 DECL_ALIGN (field) = DECL_ALIGN (decl);
314 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
315 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
316 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
317 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
320 insert_field_into_struct (get_frame_type (info), field);
321 *slot = field;
323 if (TREE_CODE (decl) == PARM_DECL)
324 info->any_parm_remapped = true;
327 return (tree) *slot;
330 /* Build or return the variable that holds the static chain within
331 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
333 static tree
334 get_chain_decl (struct nesting_info *info)
336 tree decl = info->chain_decl;
338 if (!decl)
340 tree type;
342 type = get_frame_type (info->outer);
343 type = build_pointer_type (type);
345 /* Note that this variable is *not* entered into any BIND_EXPR;
346 the construction of this variable is handled specially in
347 expand_function_start and initialize_inlined_parameters.
348 Note also that it's represented as a parameter. This is more
349 close to the truth, since the initial value does come from
350 the caller. */
351 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
352 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
353 DECL_ARTIFICIAL (decl) = 1;
354 DECL_IGNORED_P (decl) = 1;
355 TREE_USED (decl) = 1;
356 DECL_CONTEXT (decl) = info->context;
357 DECL_ARG_TYPE (decl) = type;
359 /* Tell tree-inline.c that we never write to this variable, so
360 it can copy-prop the replacement value immediately. */
361 TREE_READONLY (decl) = 1;
363 info->chain_decl = decl;
365 if (dump_file
366 && (dump_flags & TDF_DETAILS)
367 && !DECL_STATIC_CHAIN (info->context))
368 fprintf (dump_file, "Setting static-chain for %s\n",
369 lang_hooks.decl_printable_name (info->context, 2));
371 DECL_STATIC_CHAIN (info->context) = 1;
373 return decl;
376 /* Build or return the field within the non-local frame state that holds
377 the static chain for INFO->CONTEXT. This is the way to walk back up
378 multiple nesting levels. */
380 static tree
381 get_chain_field (struct nesting_info *info)
383 tree field = info->chain_field;
385 if (!field)
387 tree type = build_pointer_type (get_frame_type (info->outer));
389 field = make_node (FIELD_DECL);
390 DECL_NAME (field) = get_identifier ("__chain");
391 TREE_TYPE (field) = type;
392 DECL_ALIGN (field) = TYPE_ALIGN (type);
393 DECL_NONADDRESSABLE_P (field) = 1;
395 insert_field_into_struct (get_frame_type (info), field);
397 info->chain_field = field;
399 if (dump_file
400 && (dump_flags & TDF_DETAILS)
401 && !DECL_STATIC_CHAIN (info->context))
402 fprintf (dump_file, "Setting static-chain for %s\n",
403 lang_hooks.decl_printable_name (info->context, 2));
405 DECL_STATIC_CHAIN (info->context) = 1;
407 return field;
410 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
412 static tree
413 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
414 gimple call)
416 tree t;
418 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
419 gimple_call_set_lhs (call, t);
420 if (! gsi_end_p (*gsi))
421 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
422 gsi_insert_before (gsi, call, GSI_SAME_STMT);
424 return t;
428 /* Copy EXP into a temporary. Allocate the temporary in the context of
429 INFO and insert the initialization statement before GSI. */
431 static tree
432 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
434 tree t;
435 gimple stmt;
437 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
438 stmt = gimple_build_assign (t, exp);
439 if (! gsi_end_p (*gsi))
440 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
441 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
443 return t;
447 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
449 static tree
450 gsi_gimplify_val (struct nesting_info *info, tree exp,
451 gimple_stmt_iterator *gsi)
453 if (is_gimple_val (exp))
454 return exp;
455 else
456 return init_tmp_var (info, exp, gsi);
459 /* Similarly, but copy from the temporary and insert the statement
460 after the iterator. */
462 static tree
463 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
465 tree t;
466 gimple stmt;
468 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
469 stmt = gimple_build_assign (exp, t);
470 if (! gsi_end_p (*gsi))
471 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
472 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
474 return t;
477 /* Build or return the type used to represent a nested function trampoline. */
479 static GTY(()) tree trampoline_type;
481 static tree
482 get_trampoline_type (struct nesting_info *info)
484 unsigned align, size;
485 tree t;
487 if (trampoline_type)
488 return trampoline_type;
490 align = TRAMPOLINE_ALIGNMENT;
491 size = TRAMPOLINE_SIZE;
493 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
494 then allocate extra space so that we can do dynamic alignment. */
495 if (align > STACK_BOUNDARY)
497 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
498 align = STACK_BOUNDARY;
501 t = build_index_type (size_int (size - 1));
502 t = build_array_type (char_type_node, t);
503 t = build_decl (DECL_SOURCE_LOCATION (info->context),
504 FIELD_DECL, get_identifier ("__data"), t);
505 DECL_ALIGN (t) = align;
506 DECL_USER_ALIGN (t) = 1;
508 trampoline_type = make_node (RECORD_TYPE);
509 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
510 TYPE_FIELDS (trampoline_type) = t;
511 layout_type (trampoline_type);
512 DECL_CONTEXT (t) = trampoline_type;
514 return trampoline_type;
517 /* Given DECL, a nested function, find or create a field in the non-local
518 frame structure for a trampoline for this function. */
520 static tree
521 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
522 enum insert_option insert)
524 void **slot;
526 if (insert == NO_INSERT)
528 slot = pointer_map_contains (info->var_map, decl);
529 return slot ? (tree) *slot : NULL_TREE;
532 slot = pointer_map_insert (info->var_map, decl);
533 if (!*slot)
535 tree field = make_node (FIELD_DECL);
536 DECL_NAME (field) = DECL_NAME (decl);
537 TREE_TYPE (field) = get_trampoline_type (info);
538 TREE_ADDRESSABLE (field) = 1;
540 insert_field_into_struct (get_frame_type (info), field);
541 *slot = field;
543 info->any_tramp_created = true;
546 return (tree) *slot;
549 /* Build or return the field within the non-local frame state that holds
550 the non-local goto "jmp_buf". The buffer itself is maintained by the
551 rtl middle-end as dynamic stack space is allocated. */
553 static tree
554 get_nl_goto_field (struct nesting_info *info)
556 tree field = info->nl_goto_field;
557 if (!field)
559 unsigned size;
560 tree type;
562 /* For __builtin_nonlocal_goto, we need N words. The first is the
563 frame pointer, the rest is for the target's stack pointer save
564 area. The number of words is controlled by STACK_SAVEAREA_MODE;
565 not the best interface, but it'll do for now. */
566 if (Pmode == ptr_mode)
567 type = ptr_type_node;
568 else
569 type = lang_hooks.types.type_for_mode (Pmode, 1);
571 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
572 size = size / GET_MODE_SIZE (Pmode);
573 size = size + 1;
575 type = build_array_type
576 (type, build_index_type (size_int (size)));
578 field = make_node (FIELD_DECL);
579 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
580 TREE_TYPE (field) = type;
581 DECL_ALIGN (field) = TYPE_ALIGN (type);
582 TREE_ADDRESSABLE (field) = 1;
584 insert_field_into_struct (get_frame_type (info), field);
586 info->nl_goto_field = field;
589 return field;
592 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
594 static void
595 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
596 struct nesting_info *info, gimple_seq *pseq)
598 struct walk_stmt_info wi;
600 memset (&wi, 0, sizeof (wi));
601 wi.info = info;
602 wi.val_only = true;
603 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
607 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
609 static inline void
610 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
611 struct nesting_info *info)
613 gimple_seq body = gimple_body (info->context);
614 walk_body (callback_stmt, callback_op, info, &body);
615 gimple_set_body (info->context, body);
618 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
620 static void
621 walk_gimple_omp_for (gimple for_stmt,
622 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
623 struct nesting_info *info)
625 gcc_assert (!is_gimple_omp_oacc_specifically (for_stmt));
627 struct walk_stmt_info wi;
628 gimple_seq seq;
629 tree t;
630 size_t i;
632 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
634 seq = NULL;
635 memset (&wi, 0, sizeof (wi));
636 wi.info = info;
637 wi.gsi = gsi_last (seq);
639 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
641 wi.val_only = false;
642 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
643 &wi, NULL);
644 wi.val_only = true;
645 wi.is_lhs = false;
646 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
647 &wi, NULL);
649 wi.val_only = true;
650 wi.is_lhs = false;
651 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
652 &wi, NULL);
654 t = gimple_omp_for_incr (for_stmt, i);
655 gcc_assert (BINARY_CLASS_P (t));
656 wi.val_only = false;
657 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
658 wi.val_only = true;
659 wi.is_lhs = false;
660 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
663 seq = gsi_seq (wi.gsi);
664 if (!gimple_seq_empty_p (seq))
666 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
667 annotate_all_with_location (seq, gimple_location (for_stmt));
668 gimple_seq_add_seq (&pre_body, seq);
669 gimple_omp_for_set_pre_body (for_stmt, pre_body);
673 /* Similarly for ROOT and all functions nested underneath, depth first. */
675 static void
676 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
677 struct nesting_info *root)
679 struct nesting_info *n;
680 FOR_EACH_NEST_INFO (n, root)
681 walk_function (callback_stmt, callback_op, n);
685 /* We have to check for a fairly pathological case. The operands of function
686 nested function are to be interpreted in the context of the enclosing
687 function. So if any are variably-sized, they will get remapped when the
688 enclosing function is inlined. But that remapping would also have to be
689 done in the types of the PARM_DECLs of the nested function, meaning the
690 argument types of that function will disagree with the arguments in the
691 calls to that function. So we'd either have to make a copy of the nested
692 function corresponding to each time the enclosing function was inlined or
693 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
694 function. The former is not practical. The latter would still require
695 detecting this case to know when to add the conversions. So, for now at
696 least, we don't inline such an enclosing function.
698 We have to do that check recursively, so here return indicating whether
699 FNDECL has such a nested function. ORIG_FN is the function we were
700 trying to inline to use for checking whether any argument is variably
701 modified by anything in it.
703 It would be better to do this in tree-inline.c so that we could give
704 the appropriate warning for why a function can't be inlined, but that's
705 too late since the nesting structure has already been flattened and
706 adding a flag just to record this fact seems a waste of a flag. */
708 static bool
709 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
711 struct cgraph_node *cgn = cgraph_get_node (fndecl);
712 tree arg;
714 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
716 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg))
717 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
718 return true;
720 if (check_for_nested_with_variably_modified (cgn->decl,
721 orig_fndecl))
722 return true;
725 return false;
728 /* Construct our local datastructure describing the function nesting
729 tree rooted by CGN. */
731 static struct nesting_info *
732 create_nesting_tree (struct cgraph_node *cgn)
734 struct nesting_info *info = XCNEW (struct nesting_info);
735 info->field_map = pointer_map_create ();
736 info->var_map = pointer_map_create ();
737 info->mem_refs = pointer_set_create ();
738 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
739 info->context = cgn->decl;
741 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
743 struct nesting_info *sub = create_nesting_tree (cgn);
744 sub->outer = info;
745 sub->next = info->inner;
746 info->inner = sub;
749 /* See discussion at check_for_nested_with_variably_modified for a
750 discussion of why this has to be here. */
751 if (check_for_nested_with_variably_modified (info->context, info->context))
752 DECL_UNINLINABLE (info->context) = true;
754 return info;
757 /* Return an expression computing the static chain for TARGET_CONTEXT
758 from INFO->CONTEXT. Insert any necessary computations before TSI. */
760 static tree
761 get_static_chain (struct nesting_info *info, tree target_context,
762 gimple_stmt_iterator *gsi)
764 struct nesting_info *i;
765 tree x;
767 if (info->context == target_context)
769 x = build_addr (info->frame_decl, target_context);
771 else
773 x = get_chain_decl (info);
775 for (i = info->outer; i->context != target_context; i = i->outer)
777 tree field = get_chain_field (i);
779 x = build_simple_mem_ref (x);
780 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
781 x = init_tmp_var (info, x, gsi);
785 return x;
789 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
790 frame as seen from INFO->CONTEXT. Insert any necessary computations
791 before GSI. */
793 static tree
794 get_frame_field (struct nesting_info *info, tree target_context,
795 tree field, gimple_stmt_iterator *gsi)
797 struct nesting_info *i;
798 tree x;
800 if (info->context == target_context)
802 /* Make sure frame_decl gets created. */
803 (void) get_frame_type (info);
804 x = info->frame_decl;
806 else
808 x = get_chain_decl (info);
810 for (i = info->outer; i->context != target_context; i = i->outer)
812 tree field = get_chain_field (i);
814 x = build_simple_mem_ref (x);
815 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
816 x = init_tmp_var (info, x, gsi);
819 x = build_simple_mem_ref (x);
822 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
823 return x;
826 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
828 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
829 in the nested function with DECL_VALUE_EXPR set to reference the true
830 variable in the parent function. This is used both for debug info
831 and in OpenMP lowering. */
833 static tree
834 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
836 tree target_context;
837 struct nesting_info *i;
838 tree x, field, new_decl;
839 void **slot;
841 slot = pointer_map_insert (info->var_map, decl);
843 if (*slot)
844 return (tree) *slot;
846 target_context = decl_function_context (decl);
848 /* A copy of the code in get_frame_field, but without the temporaries. */
849 if (info->context == target_context)
851 /* Make sure frame_decl gets created. */
852 (void) get_frame_type (info);
853 x = info->frame_decl;
854 i = info;
856 else
858 x = get_chain_decl (info);
859 for (i = info->outer; i->context != target_context; i = i->outer)
861 field = get_chain_field (i);
862 x = build_simple_mem_ref (x);
863 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
865 x = build_simple_mem_ref (x);
868 field = lookup_field_for_decl (i, decl, INSERT);
869 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
870 if (use_pointer_in_frame (decl))
871 x = build_simple_mem_ref (x);
873 /* ??? We should be remapping types as well, surely. */
874 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
875 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
876 DECL_CONTEXT (new_decl) = info->context;
877 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
878 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
879 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
880 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
881 TREE_READONLY (new_decl) = TREE_READONLY (decl);
882 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
883 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
884 if ((TREE_CODE (decl) == PARM_DECL
885 || TREE_CODE (decl) == RESULT_DECL
886 || TREE_CODE (decl) == VAR_DECL)
887 && DECL_BY_REFERENCE (decl))
888 DECL_BY_REFERENCE (new_decl) = 1;
890 SET_DECL_VALUE_EXPR (new_decl, x);
891 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
893 *slot = new_decl;
894 DECL_CHAIN (new_decl) = info->debug_var_chain;
895 info->debug_var_chain = new_decl;
897 if (!optimize
898 && info->context != target_context
899 && variably_modified_type_p (TREE_TYPE (decl), NULL))
900 note_nonlocal_vla_type (info, TREE_TYPE (decl));
902 return new_decl;
906 /* Callback for walk_gimple_stmt, rewrite all references to VAR
907 and PARM_DECLs that belong to outer functions.
909 The rewrite will involve some number of structure accesses back up
910 the static chain. E.g. for a variable FOO up one nesting level it'll
911 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
912 indirections apply to decls for which use_pointer_in_frame is true. */
914 static tree
915 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
917 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
918 struct nesting_info *const info = (struct nesting_info *) wi->info;
919 tree t = *tp;
921 *walk_subtrees = 0;
922 switch (TREE_CODE (t))
924 case VAR_DECL:
925 /* Non-automatic variables are never processed. */
926 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
927 break;
928 /* FALLTHRU */
930 case PARM_DECL:
931 if (decl_function_context (t) != info->context)
933 tree x;
934 wi->changed = true;
936 x = get_nonlocal_debug_decl (info, t);
937 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
939 tree target_context = decl_function_context (t);
940 struct nesting_info *i;
941 for (i = info->outer; i->context != target_context; i = i->outer)
942 continue;
943 x = lookup_field_for_decl (i, t, INSERT);
944 x = get_frame_field (info, target_context, x, &wi->gsi);
945 if (use_pointer_in_frame (t))
947 x = init_tmp_var (info, x, &wi->gsi);
948 x = build_simple_mem_ref (x);
952 if (wi->val_only)
954 if (wi->is_lhs)
955 x = save_tmp_var (info, x, &wi->gsi);
956 else
957 x = init_tmp_var (info, x, &wi->gsi);
960 *tp = x;
962 break;
964 case LABEL_DECL:
965 /* We're taking the address of a label from a parent function, but
966 this is not itself a non-local goto. Mark the label such that it
967 will not be deleted, much as we would with a label address in
968 static storage. */
969 if (decl_function_context (t) != info->context)
970 FORCED_LABEL (t) = 1;
971 break;
973 case ADDR_EXPR:
975 bool save_val_only = wi->val_only;
977 wi->val_only = false;
978 wi->is_lhs = false;
979 wi->changed = false;
980 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
981 wi->val_only = true;
983 if (wi->changed)
985 tree save_context;
987 /* If we changed anything, we might no longer be directly
988 referencing a decl. */
989 save_context = current_function_decl;
990 current_function_decl = info->context;
991 recompute_tree_invariant_for_addr_expr (t);
992 current_function_decl = save_context;
994 /* If the callback converted the address argument in a context
995 where we only accept variables (and min_invariant, presumably),
996 then compute the address into a temporary. */
997 if (save_val_only)
998 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
999 t, &wi->gsi);
1002 break;
1004 case REALPART_EXPR:
1005 case IMAGPART_EXPR:
1006 case COMPONENT_REF:
1007 case ARRAY_REF:
1008 case ARRAY_RANGE_REF:
1009 case BIT_FIELD_REF:
1010 /* Go down this entire nest and just look at the final prefix and
1011 anything that describes the references. Otherwise, we lose track
1012 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1013 wi->val_only = true;
1014 wi->is_lhs = false;
1015 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1017 if (TREE_CODE (t) == COMPONENT_REF)
1018 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1019 NULL);
1020 else if (TREE_CODE (t) == ARRAY_REF
1021 || TREE_CODE (t) == ARRAY_RANGE_REF)
1023 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1024 wi, NULL);
1025 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1026 wi, NULL);
1027 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1028 wi, NULL);
1031 wi->val_only = false;
1032 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1033 break;
1035 case VIEW_CONVERT_EXPR:
1036 /* Just request to look at the subtrees, leaving val_only and lhs
1037 untouched. This might actually be for !val_only + lhs, in which
1038 case we don't want to force a replacement by a temporary. */
1039 *walk_subtrees = 1;
1040 break;
1042 default:
1043 if (!IS_TYPE_OR_DECL_P (t))
1045 *walk_subtrees = 1;
1046 wi->val_only = true;
1047 wi->is_lhs = false;
1049 break;
1052 return NULL_TREE;
1055 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1056 struct walk_stmt_info *);
1058 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1059 and PARM_DECLs that belong to outer functions. */
1061 static bool
1062 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1064 struct nesting_info *const info = (struct nesting_info *) wi->info;
1065 bool need_chain = false, need_stmts = false;
1066 tree clause, decl;
1067 int dummy;
1068 bitmap new_suppress;
1070 new_suppress = BITMAP_GGC_ALLOC ();
1071 bitmap_copy (new_suppress, info->suppress_expansion);
1073 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1075 switch (OMP_CLAUSE_CODE (clause))
1077 case OMP_CLAUSE_REDUCTION:
1078 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1079 need_stmts = true;
1080 goto do_decl_clause;
1082 case OMP_CLAUSE_LASTPRIVATE:
1083 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1084 need_stmts = true;
1085 goto do_decl_clause;
1087 case OMP_CLAUSE_LINEAR:
1088 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1089 need_stmts = true;
1090 goto do_decl_clause;
1092 case OMP_CLAUSE_PRIVATE:
1093 case OMP_CLAUSE_FIRSTPRIVATE:
1094 case OMP_CLAUSE_COPYPRIVATE:
1095 case OMP_CLAUSE_SHARED:
1096 do_decl_clause:
1097 decl = OMP_CLAUSE_DECL (clause);
1098 if (TREE_CODE (decl) == VAR_DECL
1099 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1100 break;
1101 if (decl_function_context (decl) != info->context)
1103 bitmap_set_bit (new_suppress, DECL_UID (decl));
1104 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1105 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1106 need_chain = true;
1108 break;
1110 case OMP_CLAUSE_SCHEDULE:
1111 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1112 break;
1113 /* FALLTHRU */
1114 case OMP_CLAUSE_FINAL:
1115 case OMP_CLAUSE_IF:
1116 case OMP_CLAUSE_NUM_THREADS:
1117 case OMP_CLAUSE_DEPEND:
1118 wi->val_only = true;
1119 wi->is_lhs = false;
1120 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1121 &dummy, wi);
1122 break;
1124 case OMP_CLAUSE_NOWAIT:
1125 case OMP_CLAUSE_ORDERED:
1126 case OMP_CLAUSE_DEFAULT:
1127 case OMP_CLAUSE_COPYIN:
1128 case OMP_CLAUSE_COLLAPSE:
1129 case OMP_CLAUSE_UNTIED:
1130 case OMP_CLAUSE_MERGEABLE:
1131 break;
1133 default:
1134 gcc_unreachable ();
1138 info->suppress_expansion = new_suppress;
1140 if (need_stmts)
1141 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1142 switch (OMP_CLAUSE_CODE (clause))
1144 case OMP_CLAUSE_REDUCTION:
1145 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1147 tree old_context
1148 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1149 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1150 = info->context;
1151 walk_body (convert_nonlocal_reference_stmt,
1152 convert_nonlocal_reference_op, info,
1153 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1154 walk_body (convert_nonlocal_reference_stmt,
1155 convert_nonlocal_reference_op, info,
1156 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1157 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1158 = old_context;
1160 break;
1162 case OMP_CLAUSE_LASTPRIVATE:
1163 walk_body (convert_nonlocal_reference_stmt,
1164 convert_nonlocal_reference_op, info,
1165 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1166 break;
1168 case OMP_CLAUSE_LINEAR:
1169 walk_body (convert_nonlocal_reference_stmt,
1170 convert_nonlocal_reference_op, info,
1171 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1172 break;
1174 default:
1175 break;
1178 return need_chain;
1181 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1183 static void
1184 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1186 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1187 type = TREE_TYPE (type);
1189 if (TYPE_NAME (type)
1190 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1191 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1192 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1194 while (POINTER_TYPE_P (type)
1195 || TREE_CODE (type) == VECTOR_TYPE
1196 || TREE_CODE (type) == FUNCTION_TYPE
1197 || TREE_CODE (type) == METHOD_TYPE)
1198 type = TREE_TYPE (type);
1200 if (TREE_CODE (type) == ARRAY_TYPE)
1202 tree domain, t;
1204 note_nonlocal_vla_type (info, TREE_TYPE (type));
1205 domain = TYPE_DOMAIN (type);
1206 if (domain)
1208 t = TYPE_MIN_VALUE (domain);
1209 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1210 && decl_function_context (t) != info->context)
1211 get_nonlocal_debug_decl (info, t);
1212 t = TYPE_MAX_VALUE (domain);
1213 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1214 && decl_function_context (t) != info->context)
1215 get_nonlocal_debug_decl (info, t);
1220 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1221 in BLOCK. */
1223 static void
1224 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1226 tree var;
1228 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1229 if (TREE_CODE (var) == VAR_DECL
1230 && variably_modified_type_p (TREE_TYPE (var), NULL)
1231 && DECL_HAS_VALUE_EXPR_P (var)
1232 && decl_function_context (var) != info->context)
1233 note_nonlocal_vla_type (info, TREE_TYPE (var));
1236 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1237 PARM_DECLs that belong to outer functions. This handles statements
1238 that are not handled via the standard recursion done in
1239 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1240 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1241 operands of STMT have been handled by this function. */
1243 static tree
1244 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1245 struct walk_stmt_info *wi)
1247 struct nesting_info *info = (struct nesting_info *) wi->info;
1248 tree save_local_var_chain;
1249 bitmap save_suppress;
1250 gimple stmt = gsi_stmt (*gsi);
1252 switch (gimple_code (stmt))
1254 case GIMPLE_GOTO:
1255 /* Don't walk non-local gotos for now. */
1256 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1258 wi->val_only = true;
1259 wi->is_lhs = false;
1260 *handled_ops_p = true;
1261 return NULL_TREE;
1263 break;
1265 case GIMPLE_OACC_KERNELS:
1266 case GIMPLE_OACC_PARALLEL:
1267 gcc_unreachable ();
1269 case GIMPLE_OMP_PARALLEL:
1270 case GIMPLE_OMP_TASK:
1271 save_suppress = info->suppress_expansion;
1272 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1273 wi))
1275 tree c, decl;
1276 decl = get_chain_decl (info);
1277 c = build_omp_clause (gimple_location (stmt),
1278 OMP_CLAUSE_FIRSTPRIVATE);
1279 OMP_CLAUSE_DECL (c) = decl;
1280 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1281 gimple_omp_taskreg_set_clauses (stmt, c);
1284 save_local_var_chain = info->new_local_var_chain;
1285 info->new_local_var_chain = NULL;
1287 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1288 info, gimple_omp_body_ptr (stmt));
1290 if (info->new_local_var_chain)
1291 declare_vars (info->new_local_var_chain,
1292 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1293 false);
1294 info->new_local_var_chain = save_local_var_chain;
1295 info->suppress_expansion = save_suppress;
1296 break;
1298 case GIMPLE_OMP_FOR:
1299 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1300 save_suppress = info->suppress_expansion;
1301 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1302 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1303 convert_nonlocal_reference_op, info);
1304 walk_body (convert_nonlocal_reference_stmt,
1305 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1306 info->suppress_expansion = save_suppress;
1307 break;
1309 case GIMPLE_OMP_SECTIONS:
1310 save_suppress = info->suppress_expansion;
1311 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1312 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1313 info, gimple_omp_body_ptr (stmt));
1314 info->suppress_expansion = save_suppress;
1315 break;
1317 case GIMPLE_OMP_SINGLE:
1318 save_suppress = info->suppress_expansion;
1319 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1320 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1321 info, gimple_omp_body_ptr (stmt));
1322 info->suppress_expansion = save_suppress;
1323 break;
1325 case GIMPLE_OMP_TARGET:
1326 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1327 save_suppress = info->suppress_expansion;
1328 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1329 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1330 info, gimple_omp_body_ptr (stmt));
1331 info->suppress_expansion = save_suppress;
1332 break;
1334 case GIMPLE_OMP_TEAMS:
1335 save_suppress = info->suppress_expansion;
1336 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1337 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1338 info, gimple_omp_body_ptr (stmt));
1339 info->suppress_expansion = save_suppress;
1340 break;
1342 case GIMPLE_OMP_SECTION:
1343 case GIMPLE_OMP_MASTER:
1344 case GIMPLE_OMP_TASKGROUP:
1345 case GIMPLE_OMP_ORDERED:
1346 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1347 info, gimple_omp_body_ptr (stmt));
1348 break;
1350 case GIMPLE_BIND:
1351 if (!optimize && gimple_bind_block (stmt))
1352 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1354 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1355 if (TREE_CODE (var) == NAMELIST_DECL)
1357 /* Adjust decls mentioned in NAMELIST_DECL. */
1358 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1359 tree decl;
1360 unsigned int i;
1362 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1364 if (TREE_CODE (decl) == VAR_DECL
1365 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1366 continue;
1367 if (decl_function_context (decl) != info->context)
1368 CONSTRUCTOR_ELT (decls, i)->value
1369 = get_nonlocal_debug_decl (info, decl);
1373 *handled_ops_p = false;
1374 return NULL_TREE;
1376 case GIMPLE_COND:
1377 wi->val_only = true;
1378 wi->is_lhs = false;
1379 *handled_ops_p = false;
1380 return NULL_TREE;
1382 default:
1383 /* For every other statement that we are not interested in
1384 handling here, let the walker traverse the operands. */
1385 *handled_ops_p = false;
1386 return NULL_TREE;
1389 /* We have handled all of STMT operands, no need to traverse the operands. */
1390 *handled_ops_p = true;
1391 return NULL_TREE;
1395 /* A subroutine of convert_local_reference. Create a local variable
1396 in the parent function with DECL_VALUE_EXPR set to reference the
1397 field in FRAME. This is used both for debug info and in OpenMP
1398 lowering. */
1400 static tree
1401 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1403 tree x, new_decl;
1404 void **slot;
1406 slot = pointer_map_insert (info->var_map, decl);
1407 if (*slot)
1408 return (tree) *slot;
1410 /* Make sure frame_decl gets created. */
1411 (void) get_frame_type (info);
1412 x = info->frame_decl;
1413 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1415 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1416 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1417 DECL_CONTEXT (new_decl) = info->context;
1418 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1419 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1420 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1421 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1422 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1423 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1424 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1425 if ((TREE_CODE (decl) == PARM_DECL
1426 || TREE_CODE (decl) == RESULT_DECL
1427 || TREE_CODE (decl) == VAR_DECL)
1428 && DECL_BY_REFERENCE (decl))
1429 DECL_BY_REFERENCE (new_decl) = 1;
1431 SET_DECL_VALUE_EXPR (new_decl, x);
1432 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1433 *slot = new_decl;
1435 DECL_CHAIN (new_decl) = info->debug_var_chain;
1436 info->debug_var_chain = new_decl;
1438 /* Do not emit debug info twice. */
1439 DECL_IGNORED_P (decl) = 1;
1441 return new_decl;
1445 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1446 and PARM_DECLs that were referenced by inner nested functions.
1447 The rewrite will be a structure reference to the local frame variable. */
1449 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1451 static tree
1452 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1454 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1455 struct nesting_info *const info = (struct nesting_info *) wi->info;
1456 tree t = *tp, field, x;
1457 bool save_val_only;
1459 *walk_subtrees = 0;
1460 switch (TREE_CODE (t))
1462 case VAR_DECL:
1463 /* Non-automatic variables are never processed. */
1464 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1465 break;
1466 /* FALLTHRU */
1468 case PARM_DECL:
1469 if (decl_function_context (t) == info->context)
1471 /* If we copied a pointer to the frame, then the original decl
1472 is used unchanged in the parent function. */
1473 if (use_pointer_in_frame (t))
1474 break;
1476 /* No need to transform anything if no child references the
1477 variable. */
1478 field = lookup_field_for_decl (info, t, NO_INSERT);
1479 if (!field)
1480 break;
1481 wi->changed = true;
1483 x = get_local_debug_decl (info, t, field);
1484 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1485 x = get_frame_field (info, info->context, field, &wi->gsi);
1487 if (wi->val_only)
1489 if (wi->is_lhs)
1490 x = save_tmp_var (info, x, &wi->gsi);
1491 else
1492 x = init_tmp_var (info, x, &wi->gsi);
1495 *tp = x;
1497 break;
1499 case ADDR_EXPR:
1500 save_val_only = wi->val_only;
1501 wi->val_only = false;
1502 wi->is_lhs = false;
1503 wi->changed = false;
1504 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1505 wi->val_only = save_val_only;
1507 /* If we converted anything ... */
1508 if (wi->changed)
1510 tree save_context;
1512 /* Then the frame decl is now addressable. */
1513 TREE_ADDRESSABLE (info->frame_decl) = 1;
1515 save_context = current_function_decl;
1516 current_function_decl = info->context;
1517 recompute_tree_invariant_for_addr_expr (t);
1518 current_function_decl = save_context;
1520 /* If we are in a context where we only accept values, then
1521 compute the address into a temporary. */
1522 if (save_val_only)
1523 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1524 t, &wi->gsi);
1526 break;
1528 case REALPART_EXPR:
1529 case IMAGPART_EXPR:
1530 case COMPONENT_REF:
1531 case ARRAY_REF:
1532 case ARRAY_RANGE_REF:
1533 case BIT_FIELD_REF:
1534 /* Go down this entire nest and just look at the final prefix and
1535 anything that describes the references. Otherwise, we lose track
1536 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1537 save_val_only = wi->val_only;
1538 wi->val_only = true;
1539 wi->is_lhs = false;
1540 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1542 if (TREE_CODE (t) == COMPONENT_REF)
1543 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1544 NULL);
1545 else if (TREE_CODE (t) == ARRAY_REF
1546 || TREE_CODE (t) == ARRAY_RANGE_REF)
1548 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1549 NULL);
1550 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1551 NULL);
1552 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1553 NULL);
1556 wi->val_only = false;
1557 walk_tree (tp, convert_local_reference_op, wi, NULL);
1558 wi->val_only = save_val_only;
1559 break;
1561 case MEM_REF:
1562 save_val_only = wi->val_only;
1563 wi->val_only = true;
1564 wi->is_lhs = false;
1565 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1566 wi, NULL);
1567 /* We need to re-fold the MEM_REF as component references as
1568 part of a ADDR_EXPR address are not allowed. But we cannot
1569 fold here, as the chain record type is not yet finalized. */
1570 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1571 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1572 pointer_set_insert (info->mem_refs, tp);
1573 wi->val_only = save_val_only;
1574 break;
1576 case VIEW_CONVERT_EXPR:
1577 /* Just request to look at the subtrees, leaving val_only and lhs
1578 untouched. This might actually be for !val_only + lhs, in which
1579 case we don't want to force a replacement by a temporary. */
1580 *walk_subtrees = 1;
1581 break;
1583 default:
1584 if (!IS_TYPE_OR_DECL_P (t))
1586 *walk_subtrees = 1;
1587 wi->val_only = true;
1588 wi->is_lhs = false;
1590 break;
1593 return NULL_TREE;
1596 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1597 struct walk_stmt_info *);
1599 /* Helper for convert_local_reference. Convert all the references in
1600 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1602 static bool
1603 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1605 struct nesting_info *const info = (struct nesting_info *) wi->info;
1606 bool need_frame = false, need_stmts = false;
1607 tree clause, decl;
1608 int dummy;
1609 bitmap new_suppress;
1611 new_suppress = BITMAP_GGC_ALLOC ();
1612 bitmap_copy (new_suppress, info->suppress_expansion);
1614 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1616 switch (OMP_CLAUSE_CODE (clause))
1618 case OMP_CLAUSE_REDUCTION:
1619 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1620 need_stmts = true;
1621 goto do_decl_clause;
1623 case OMP_CLAUSE_LASTPRIVATE:
1624 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1625 need_stmts = true;
1626 goto do_decl_clause;
1628 case OMP_CLAUSE_LINEAR:
1629 if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause))
1630 need_stmts = true;
1631 goto do_decl_clause;
1633 case OMP_CLAUSE_PRIVATE:
1634 case OMP_CLAUSE_FIRSTPRIVATE:
1635 case OMP_CLAUSE_COPYPRIVATE:
1636 case OMP_CLAUSE_SHARED:
1637 do_decl_clause:
1638 decl = OMP_CLAUSE_DECL (clause);
1639 if (TREE_CODE (decl) == VAR_DECL
1640 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1641 break;
1642 if (decl_function_context (decl) == info->context
1643 && !use_pointer_in_frame (decl))
1645 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1646 if (field)
1648 bitmap_set_bit (new_suppress, DECL_UID (decl));
1649 OMP_CLAUSE_DECL (clause)
1650 = get_local_debug_decl (info, decl, field);
1651 need_frame = true;
1654 break;
1656 case OMP_CLAUSE_SCHEDULE:
1657 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1658 break;
1659 /* FALLTHRU */
1660 case OMP_CLAUSE_FINAL:
1661 case OMP_CLAUSE_IF:
1662 case OMP_CLAUSE_NUM_THREADS:
1663 case OMP_CLAUSE_DEPEND:
1664 wi->val_only = true;
1665 wi->is_lhs = false;
1666 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1667 wi);
1668 break;
1670 case OMP_CLAUSE_NOWAIT:
1671 case OMP_CLAUSE_ORDERED:
1672 case OMP_CLAUSE_DEFAULT:
1673 case OMP_CLAUSE_COPYIN:
1674 case OMP_CLAUSE_COLLAPSE:
1675 case OMP_CLAUSE_UNTIED:
1676 case OMP_CLAUSE_MERGEABLE:
1677 break;
1679 default:
1680 gcc_unreachable ();
1684 info->suppress_expansion = new_suppress;
1686 if (need_stmts)
1687 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1688 switch (OMP_CLAUSE_CODE (clause))
1690 case OMP_CLAUSE_REDUCTION:
1691 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1693 tree old_context
1694 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1695 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1696 = info->context;
1697 walk_body (convert_local_reference_stmt,
1698 convert_local_reference_op, info,
1699 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1700 walk_body (convert_local_reference_stmt,
1701 convert_local_reference_op, info,
1702 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1703 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1704 = old_context;
1706 break;
1708 case OMP_CLAUSE_LASTPRIVATE:
1709 walk_body (convert_local_reference_stmt,
1710 convert_local_reference_op, info,
1711 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1712 break;
1714 case OMP_CLAUSE_LINEAR:
1715 walk_body (convert_local_reference_stmt,
1716 convert_local_reference_op, info,
1717 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause));
1718 break;
1720 default:
1721 break;
1724 return need_frame;
1728 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1729 and PARM_DECLs that were referenced by inner nested functions.
1730 The rewrite will be a structure reference to the local frame variable. */
1732 static tree
1733 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1734 struct walk_stmt_info *wi)
1736 struct nesting_info *info = (struct nesting_info *) wi->info;
1737 tree save_local_var_chain;
1738 bitmap save_suppress;
1739 gimple stmt = gsi_stmt (*gsi);
1741 switch (gimple_code (stmt))
1743 case GIMPLE_OACC_KERNELS:
1744 case GIMPLE_OACC_PARALLEL:
1745 gcc_unreachable ();
1747 case GIMPLE_OMP_PARALLEL:
1748 case GIMPLE_OMP_TASK:
1749 save_suppress = info->suppress_expansion;
1750 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1751 wi))
1753 tree c;
1754 (void) get_frame_type (info);
1755 c = build_omp_clause (gimple_location (stmt),
1756 OMP_CLAUSE_SHARED);
1757 OMP_CLAUSE_DECL (c) = info->frame_decl;
1758 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1759 gimple_omp_taskreg_set_clauses (stmt, c);
1762 save_local_var_chain = info->new_local_var_chain;
1763 info->new_local_var_chain = NULL;
1765 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1766 gimple_omp_body_ptr (stmt));
1768 if (info->new_local_var_chain)
1769 declare_vars (info->new_local_var_chain,
1770 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1771 info->new_local_var_chain = save_local_var_chain;
1772 info->suppress_expansion = save_suppress;
1773 break;
1775 case GIMPLE_OMP_FOR:
1776 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1777 save_suppress = info->suppress_expansion;
1778 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1779 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1780 convert_local_reference_op, info);
1781 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1782 info, gimple_omp_body_ptr (stmt));
1783 info->suppress_expansion = save_suppress;
1784 break;
1786 case GIMPLE_OMP_SECTIONS:
1787 save_suppress = info->suppress_expansion;
1788 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1789 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1790 info, gimple_omp_body_ptr (stmt));
1791 info->suppress_expansion = save_suppress;
1792 break;
1794 case GIMPLE_OMP_SINGLE:
1795 save_suppress = info->suppress_expansion;
1796 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1797 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1798 info, gimple_omp_body_ptr (stmt));
1799 info->suppress_expansion = save_suppress;
1800 break;
1802 case GIMPLE_OMP_TARGET:
1803 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
1804 save_suppress = info->suppress_expansion;
1805 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1806 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1807 info, gimple_omp_body_ptr (stmt));
1808 info->suppress_expansion = save_suppress;
1809 break;
1811 case GIMPLE_OMP_TEAMS:
1812 save_suppress = info->suppress_expansion;
1813 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1814 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1815 info, gimple_omp_body_ptr (stmt));
1816 info->suppress_expansion = save_suppress;
1817 break;
1819 case GIMPLE_OMP_SECTION:
1820 case GIMPLE_OMP_MASTER:
1821 case GIMPLE_OMP_TASKGROUP:
1822 case GIMPLE_OMP_ORDERED:
1823 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1824 info, gimple_omp_body_ptr (stmt));
1825 break;
1827 case GIMPLE_COND:
1828 wi->val_only = true;
1829 wi->is_lhs = false;
1830 *handled_ops_p = false;
1831 return NULL_TREE;
1833 case GIMPLE_ASSIGN:
1834 if (gimple_clobber_p (stmt))
1836 tree lhs = gimple_assign_lhs (stmt);
1837 if (!use_pointer_in_frame (lhs)
1838 && lookup_field_for_decl (info, lhs, NO_INSERT))
1840 gsi_replace (gsi, gimple_build_nop (), true);
1841 break;
1844 *handled_ops_p = false;
1845 return NULL_TREE;
1847 case GIMPLE_BIND:
1848 for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
1849 if (TREE_CODE (var) == NAMELIST_DECL)
1851 /* Adjust decls mentioned in NAMELIST_DECL. */
1852 tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var);
1853 tree decl;
1854 unsigned int i;
1856 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl)
1858 if (TREE_CODE (decl) == VAR_DECL
1859 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1860 continue;
1861 if (decl_function_context (decl) == info->context
1862 && !use_pointer_in_frame (decl))
1864 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1865 if (field)
1867 CONSTRUCTOR_ELT (decls, i)->value
1868 = get_local_debug_decl (info, decl, field);
1874 *handled_ops_p = false;
1875 return NULL_TREE;
1877 default:
1878 /* For every other statement that we are not interested in
1879 handling here, let the walker traverse the operands. */
1880 *handled_ops_p = false;
1881 return NULL_TREE;
1884 /* Indicate that we have handled all the operands ourselves. */
1885 *handled_ops_p = true;
1886 return NULL_TREE;
1890 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1891 that reference labels from outer functions. The rewrite will be a
1892 call to __builtin_nonlocal_goto. */
1894 static tree
1895 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1896 struct walk_stmt_info *wi)
1898 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1899 tree label, new_label, target_context, x, field;
1900 void **slot;
1901 gimple call;
1902 gimple stmt = gsi_stmt (*gsi);
1904 if (gimple_code (stmt) != GIMPLE_GOTO)
1906 *handled_ops_p = false;
1907 return NULL_TREE;
1910 label = gimple_goto_dest (stmt);
1911 if (TREE_CODE (label) != LABEL_DECL)
1913 *handled_ops_p = false;
1914 return NULL_TREE;
1917 target_context = decl_function_context (label);
1918 if (target_context == info->context)
1920 *handled_ops_p = false;
1921 return NULL_TREE;
1924 for (i = info->outer; target_context != i->context; i = i->outer)
1925 continue;
1927 /* The original user label may also be use for a normal goto, therefore
1928 we must create a new label that will actually receive the abnormal
1929 control transfer. This new label will be marked LABEL_NONLOCAL; this
1930 mark will trigger proper behavior in the cfg, as well as cause the
1931 (hairy target-specific) non-local goto receiver code to be generated
1932 when we expand rtl. Enter this association into var_map so that we
1933 can insert the new label into the IL during a second pass. */
1934 slot = pointer_map_insert (i->var_map, label);
1935 if (*slot == NULL)
1937 new_label = create_artificial_label (UNKNOWN_LOCATION);
1938 DECL_NONLOCAL (new_label) = 1;
1939 *slot = new_label;
1941 else
1942 new_label = (tree) *slot;
1944 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1945 field = get_nl_goto_field (i);
1946 x = get_frame_field (info, target_context, field, gsi);
1947 x = build_addr (x, target_context);
1948 x = gsi_gimplify_val (info, x, gsi);
1949 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
1950 2, build_addr (new_label, target_context), x);
1951 gsi_replace (gsi, call, false);
1953 /* We have handled all of STMT's operands, no need to keep going. */
1954 *handled_ops_p = true;
1955 return NULL_TREE;
1959 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1960 are referenced via nonlocal goto from a nested function. The rewrite
1961 will involve installing a newly generated DECL_NONLOCAL label, and
1962 (potentially) a branch around the rtl gunk that is assumed to be
1963 attached to such a label. */
1965 static tree
1966 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1967 struct walk_stmt_info *wi)
1969 struct nesting_info *const info = (struct nesting_info *) wi->info;
1970 tree label, new_label;
1971 gimple_stmt_iterator tmp_gsi;
1972 void **slot;
1973 gimple stmt = gsi_stmt (*gsi);
1975 if (gimple_code (stmt) != GIMPLE_LABEL)
1977 *handled_ops_p = false;
1978 return NULL_TREE;
1981 label = gimple_label_label (stmt);
1983 slot = pointer_map_contains (info->var_map, label);
1984 if (!slot)
1986 *handled_ops_p = false;
1987 return NULL_TREE;
1990 /* If there's any possibility that the previous statement falls through,
1991 then we must branch around the new non-local label. */
1992 tmp_gsi = wi->gsi;
1993 gsi_prev (&tmp_gsi);
1994 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1996 gimple stmt = gimple_build_goto (label);
1997 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2000 new_label = (tree) *slot;
2001 stmt = gimple_build_label (new_label);
2002 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2004 *handled_ops_p = true;
2005 return NULL_TREE;
2009 /* Called via walk_function+walk_stmt, rewrite all references to addresses
2010 of nested functions that require the use of trampolines. The rewrite
2011 will involve a reference a trampoline generated for the occasion. */
2013 static tree
2014 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
2016 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2017 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
2018 tree t = *tp, decl, target_context, x, builtin;
2019 gimple call;
2021 *walk_subtrees = 0;
2022 switch (TREE_CODE (t))
2024 case ADDR_EXPR:
2025 /* Build
2026 T.1 = &CHAIN->tramp;
2027 T.2 = __builtin_adjust_trampoline (T.1);
2028 T.3 = (func_type)T.2;
2031 decl = TREE_OPERAND (t, 0);
2032 if (TREE_CODE (decl) != FUNCTION_DECL)
2033 break;
2035 /* Only need to process nested functions. */
2036 target_context = decl_function_context (decl);
2037 if (!target_context)
2038 break;
2040 /* If the nested function doesn't use a static chain, then
2041 it doesn't need a trampoline. */
2042 if (!DECL_STATIC_CHAIN (decl))
2043 break;
2045 /* If we don't want a trampoline, then don't build one. */
2046 if (TREE_NO_TRAMPOLINE (t))
2047 break;
2049 /* Lookup the immediate parent of the callee, as that's where
2050 we need to insert the trampoline. */
2051 for (i = info; i->context != target_context; i = i->outer)
2052 continue;
2053 x = lookup_tramp_for_decl (i, decl, INSERT);
2055 /* Compute the address of the field holding the trampoline. */
2056 x = get_frame_field (info, target_context, x, &wi->gsi);
2057 x = build_addr (x, target_context);
2058 x = gsi_gimplify_val (info, x, &wi->gsi);
2060 /* Do machine-specific ugliness. Normally this will involve
2061 computing extra alignment, but it can really be anything. */
2062 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
2063 call = gimple_build_call (builtin, 1, x);
2064 x = init_tmp_var_with_call (info, &wi->gsi, call);
2066 /* Cast back to the proper function type. */
2067 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
2068 x = init_tmp_var (info, x, &wi->gsi);
2070 *tp = x;
2071 break;
2073 default:
2074 if (!IS_TYPE_OR_DECL_P (t))
2075 *walk_subtrees = 1;
2076 break;
2079 return NULL_TREE;
2083 /* Called via walk_function+walk_gimple_stmt, rewrite all references
2084 to addresses of nested functions that require the use of
2085 trampolines. The rewrite will involve a reference a trampoline
2086 generated for the occasion. */
2088 static tree
2089 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2090 struct walk_stmt_info *wi)
2092 struct nesting_info *info = (struct nesting_info *) wi->info;
2093 gimple stmt = gsi_stmt (*gsi);
2095 switch (gimple_code (stmt))
2097 case GIMPLE_CALL:
2099 /* Only walk call arguments, lest we generate trampolines for
2100 direct calls. */
2101 unsigned long i, nargs = gimple_call_num_args (stmt);
2102 for (i = 0; i < nargs; i++)
2103 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2104 wi, NULL);
2105 break;
2108 case GIMPLE_OACC_KERNELS:
2109 case GIMPLE_OACC_PARALLEL:
2110 gcc_unreachable ();
2112 case GIMPLE_OMP_PARALLEL:
2113 case GIMPLE_OMP_TASK:
2115 tree save_local_var_chain;
2116 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2117 save_local_var_chain = info->new_local_var_chain;
2118 info->new_local_var_chain = NULL;
2119 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2120 info, gimple_omp_body_ptr (stmt));
2121 if (info->new_local_var_chain)
2122 declare_vars (info->new_local_var_chain,
2123 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2124 false);
2125 info->new_local_var_chain = save_local_var_chain;
2127 break;
2129 default:
2130 *handled_ops_p = false;
2131 return NULL_TREE;
2132 break;
2135 *handled_ops_p = true;
2136 return NULL_TREE;
2141 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2142 that reference nested functions to make sure that the static chain
2143 is set up properly for the call. */
2145 static tree
2146 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2147 struct walk_stmt_info *wi)
2149 struct nesting_info *const info = (struct nesting_info *) wi->info;
2150 tree decl, target_context;
2151 char save_static_chain_added;
2152 int i;
2153 gimple stmt = gsi_stmt (*gsi);
2155 switch (gimple_code (stmt))
2157 case GIMPLE_CALL:
2158 if (gimple_call_chain (stmt))
2159 break;
2160 decl = gimple_call_fndecl (stmt);
2161 if (!decl)
2162 break;
2163 target_context = decl_function_context (decl);
2164 if (target_context && DECL_STATIC_CHAIN (decl))
2166 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2167 &wi->gsi));
2168 info->static_chain_added |= (1 << (info->context != target_context));
2170 break;
2172 case GIMPLE_OACC_KERNELS:
2173 case GIMPLE_OACC_PARALLEL:
2174 gcc_unreachable ();
2176 case GIMPLE_OMP_PARALLEL:
2177 case GIMPLE_OMP_TASK:
2178 save_static_chain_added = info->static_chain_added;
2179 info->static_chain_added = 0;
2180 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2181 for (i = 0; i < 2; i++)
2183 tree c, decl;
2184 if ((info->static_chain_added & (1 << i)) == 0)
2185 continue;
2186 decl = i ? get_chain_decl (info) : info->frame_decl;
2187 /* Don't add CHAIN.* or FRAME.* twice. */
2188 for (c = gimple_omp_taskreg_clauses (stmt);
2190 c = OMP_CLAUSE_CHAIN (c))
2191 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2192 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2193 && OMP_CLAUSE_DECL (c) == decl)
2194 break;
2195 if (c == NULL)
2197 c = build_omp_clause (gimple_location (stmt),
2198 i ? OMP_CLAUSE_FIRSTPRIVATE
2199 : OMP_CLAUSE_SHARED);
2200 OMP_CLAUSE_DECL (c) = decl;
2201 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2202 gimple_omp_taskreg_set_clauses (stmt, c);
2205 info->static_chain_added |= save_static_chain_added;
2206 break;
2208 case GIMPLE_OMP_FOR:
2209 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2210 walk_body (convert_gimple_call, NULL, info,
2211 gimple_omp_for_pre_body_ptr (stmt));
2212 /* FALLTHRU */
2213 case GIMPLE_OMP_SECTIONS:
2214 case GIMPLE_OMP_SECTION:
2215 case GIMPLE_OMP_SINGLE:
2216 case GIMPLE_OMP_TARGET:
2217 case GIMPLE_OMP_TEAMS:
2218 case GIMPLE_OMP_MASTER:
2219 case GIMPLE_OMP_TASKGROUP:
2220 case GIMPLE_OMP_ORDERED:
2221 case GIMPLE_OMP_CRITICAL:
2222 gcc_assert (!is_gimple_omp_oacc_specifically (stmt));
2223 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2224 break;
2226 default:
2227 /* Keep looking for other operands. */
2228 *handled_ops_p = false;
2229 return NULL_TREE;
2232 *handled_ops_p = true;
2233 return NULL_TREE;
2236 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2237 call expressions. At the same time, determine if a nested function
2238 actually uses its static chain; if not, remember that. */
2240 static void
2241 convert_all_function_calls (struct nesting_info *root)
2243 unsigned int chain_count = 0, old_chain_count, iter_count;
2244 struct nesting_info *n;
2246 /* First, optimistically clear static_chain for all decls that haven't
2247 used the static chain already for variable access. */
2248 FOR_EACH_NEST_INFO (n, root)
2250 tree decl = n->context;
2251 if (!n->outer || (!n->chain_decl && !n->chain_field))
2253 DECL_STATIC_CHAIN (decl) = 0;
2254 if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file, "Guessing no static-chain for %s\n",
2256 lang_hooks.decl_printable_name (decl, 2));
2258 else
2259 DECL_STATIC_CHAIN (decl) = 1;
2260 chain_count += DECL_STATIC_CHAIN (decl);
2263 /* Walk the functions and perform transformations. Note that these
2264 transformations can induce new uses of the static chain, which in turn
2265 require re-examining all users of the decl. */
2266 /* ??? It would make sense to try to use the call graph to speed this up,
2267 but the call graph hasn't really been built yet. Even if it did, we
2268 would still need to iterate in this loop since address-of references
2269 wouldn't show up in the callgraph anyway. */
2270 iter_count = 0;
2273 old_chain_count = chain_count;
2274 chain_count = 0;
2275 iter_count++;
2277 if (dump_file && (dump_flags & TDF_DETAILS))
2278 fputc ('\n', dump_file);
2280 FOR_EACH_NEST_INFO (n, root)
2282 tree decl = n->context;
2283 walk_function (convert_tramp_reference_stmt,
2284 convert_tramp_reference_op, n);
2285 walk_function (convert_gimple_call, NULL, n);
2286 chain_count += DECL_STATIC_CHAIN (decl);
2289 while (chain_count != old_chain_count);
2291 if (dump_file && (dump_flags & TDF_DETAILS))
2292 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2293 iter_count);
2296 struct nesting_copy_body_data
2298 copy_body_data cb;
2299 struct nesting_info *root;
2302 /* A helper subroutine for debug_var_chain type remapping. */
2304 static tree
2305 nesting_copy_decl (tree decl, copy_body_data *id)
2307 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2308 void **slot = pointer_map_contains (nid->root->var_map, decl);
2310 if (slot)
2311 return (tree) *slot;
2313 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2315 tree new_decl = copy_decl_no_change (decl, id);
2316 DECL_ORIGINAL_TYPE (new_decl)
2317 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2318 return new_decl;
2321 if (TREE_CODE (decl) == VAR_DECL
2322 || TREE_CODE (decl) == PARM_DECL
2323 || TREE_CODE (decl) == RESULT_DECL)
2324 return decl;
2326 return copy_decl_no_change (decl, id);
2329 /* A helper function for remap_vla_decls. See if *TP contains
2330 some remapped variables. */
2332 static tree
2333 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2335 struct nesting_info *root = (struct nesting_info *) data;
2336 tree t = *tp;
2337 void **slot;
2339 if (DECL_P (t))
2341 *walk_subtrees = 0;
2342 slot = pointer_map_contains (root->var_map, t);
2344 if (slot)
2345 return (tree) *slot;
2347 return NULL;
2350 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2351 involved. */
2353 static void
2354 remap_vla_decls (tree block, struct nesting_info *root)
2356 tree var, subblock, val, type;
2357 struct nesting_copy_body_data id;
2359 for (subblock = BLOCK_SUBBLOCKS (block);
2360 subblock;
2361 subblock = BLOCK_CHAIN (subblock))
2362 remap_vla_decls (subblock, root);
2364 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2365 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2367 val = DECL_VALUE_EXPR (var);
2368 type = TREE_TYPE (var);
2370 if (!(TREE_CODE (val) == INDIRECT_REF
2371 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2372 && variably_modified_type_p (type, NULL)))
2373 continue;
2375 if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
2376 || walk_tree (&type, contains_remapped_vars, root, NULL))
2377 break;
2380 if (var == NULL_TREE)
2381 return;
2383 memset (&id, 0, sizeof (id));
2384 id.cb.copy_decl = nesting_copy_decl;
2385 id.cb.decl_map = pointer_map_create ();
2386 id.root = root;
2388 for (; var; var = DECL_CHAIN (var))
2389 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2391 struct nesting_info *i;
2392 tree newt, context;
2393 void **slot;
2395 val = DECL_VALUE_EXPR (var);
2396 type = TREE_TYPE (var);
2398 if (!(TREE_CODE (val) == INDIRECT_REF
2399 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2400 && variably_modified_type_p (type, NULL)))
2401 continue;
2403 slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
2404 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2405 continue;
2407 context = decl_function_context (var);
2408 for (i = root; i; i = i->outer)
2409 if (i->context == context)
2410 break;
2412 if (i == NULL)
2413 continue;
2415 /* Fully expand value expressions. This avoids having debug variables
2416 only referenced from them and that can be swept during GC. */
2417 if (slot)
2419 tree t = (tree) *slot;
2420 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2421 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2424 id.cb.src_fn = i->context;
2425 id.cb.dst_fn = i->context;
2426 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2428 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2429 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2431 newt = TREE_TYPE (newt);
2432 type = TREE_TYPE (type);
2434 if (TYPE_NAME (newt)
2435 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2436 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2437 && newt != type
2438 && TYPE_NAME (newt) == TYPE_NAME (type))
2439 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2441 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2442 if (val != DECL_VALUE_EXPR (var))
2443 SET_DECL_VALUE_EXPR (var, val);
2446 pointer_map_destroy (id.cb.decl_map);
2449 /* Fold the MEM_REF *E. */
2450 static bool
2451 fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
2453 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2454 *ref_p = fold (*ref_p);
2455 return true;
2458 /* Do "everything else" to clean up or complete state collected by the
2459 various walking passes -- lay out the types and decls, generate code
2460 to initialize the frame decl, store critical expressions in the
2461 struct function for rtl to find. */
2463 static void
2464 finalize_nesting_tree_1 (struct nesting_info *root)
2466 gimple_seq stmt_list;
2467 gimple stmt;
2468 tree context = root->context;
2469 struct function *sf;
2471 stmt_list = NULL;
2473 /* If we created a non-local frame type or decl, we need to lay them
2474 out at this time. */
2475 if (root->frame_type)
2477 /* In some cases the frame type will trigger the -Wpadded warning.
2478 This is not helpful; suppress it. */
2479 int save_warn_padded = warn_padded;
2480 tree *adjust;
2482 warn_padded = 0;
2483 layout_type (root->frame_type);
2484 warn_padded = save_warn_padded;
2485 layout_decl (root->frame_decl, 0);
2487 /* Remove root->frame_decl from root->new_local_var_chain, so
2488 that we can declare it also in the lexical blocks, which
2489 helps ensure virtual regs that end up appearing in its RTL
2490 expression get substituted in instantiate_virtual_regs(). */
2491 for (adjust = &root->new_local_var_chain;
2492 *adjust != root->frame_decl;
2493 adjust = &DECL_CHAIN (*adjust))
2494 gcc_assert (DECL_CHAIN (*adjust));
2495 *adjust = DECL_CHAIN (*adjust);
2497 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2498 declare_vars (root->frame_decl,
2499 gimple_seq_first_stmt (gimple_body (context)), true);
2502 /* If any parameters were referenced non-locally, then we need to
2503 insert a copy. Likewise, if any variables were referenced by
2504 pointer, we need to initialize the address. */
2505 if (root->any_parm_remapped)
2507 tree p;
2508 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2510 tree field, x, y;
2512 field = lookup_field_for_decl (root, p, NO_INSERT);
2513 if (!field)
2514 continue;
2516 if (use_pointer_in_frame (p))
2517 x = build_addr (p, context);
2518 else
2519 x = p;
2521 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2522 root->frame_decl, field, NULL_TREE);
2523 stmt = gimple_build_assign (y, x);
2524 gimple_seq_add_stmt (&stmt_list, stmt);
2525 /* If the assignment is from a non-register the stmt is
2526 not valid gimple. Make it so by using a temporary instead. */
2527 if (!is_gimple_reg (x)
2528 && is_gimple_reg_type (TREE_TYPE (x)))
2530 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2531 x = init_tmp_var (root, x, &gsi);
2532 gimple_assign_set_rhs1 (stmt, x);
2537 /* If a chain_field was created, then it needs to be initialized
2538 from chain_decl. */
2539 if (root->chain_field)
2541 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2542 root->frame_decl, root->chain_field, NULL_TREE);
2543 stmt = gimple_build_assign (x, get_chain_decl (root));
2544 gimple_seq_add_stmt (&stmt_list, stmt);
2547 /* If trampolines were created, then we need to initialize them. */
2548 if (root->any_tramp_created)
2550 struct nesting_info *i;
2551 for (i = root->inner; i ; i = i->next)
2553 tree arg1, arg2, arg3, x, field;
2555 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2556 if (!field)
2557 continue;
2559 gcc_assert (DECL_STATIC_CHAIN (i->context));
2560 arg3 = build_addr (root->frame_decl, context);
2562 arg2 = build_addr (i->context, context);
2564 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2565 root->frame_decl, field, NULL_TREE);
2566 arg1 = build_addr (x, context);
2568 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2569 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2570 gimple_seq_add_stmt (&stmt_list, stmt);
2574 /* If we created initialization statements, insert them. */
2575 if (stmt_list)
2577 gimple bind;
2578 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2579 bind = gimple_seq_first_stmt (gimple_body (context));
2580 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2581 gimple_bind_set_body (bind, stmt_list);
2584 /* If a chain_decl was created, then it needs to be registered with
2585 struct function so that it gets initialized from the static chain
2586 register at the beginning of the function. */
2587 sf = DECL_STRUCT_FUNCTION (root->context);
2588 sf->static_chain_decl = root->chain_decl;
2590 /* Similarly for the non-local goto save area. */
2591 if (root->nl_goto_field)
2593 sf->nonlocal_goto_save_area
2594 = get_frame_field (root, context, root->nl_goto_field, NULL);
2595 sf->has_nonlocal_label = 1;
2598 /* Make sure all new local variables get inserted into the
2599 proper BIND_EXPR. */
2600 if (root->new_local_var_chain)
2601 declare_vars (root->new_local_var_chain,
2602 gimple_seq_first_stmt (gimple_body (root->context)),
2603 false);
2605 if (root->debug_var_chain)
2607 tree debug_var;
2608 gimple scope;
2610 remap_vla_decls (DECL_INITIAL (root->context), root);
2612 for (debug_var = root->debug_var_chain; debug_var;
2613 debug_var = DECL_CHAIN (debug_var))
2614 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2615 break;
2617 /* If there are any debug decls with variable length types,
2618 remap those types using other debug_var_chain variables. */
2619 if (debug_var)
2621 struct nesting_copy_body_data id;
2623 memset (&id, 0, sizeof (id));
2624 id.cb.copy_decl = nesting_copy_decl;
2625 id.cb.decl_map = pointer_map_create ();
2626 id.root = root;
2628 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2629 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2631 tree type = TREE_TYPE (debug_var);
2632 tree newt, t = type;
2633 struct nesting_info *i;
2635 for (i = root; i; i = i->outer)
2636 if (variably_modified_type_p (type, i->context))
2637 break;
2639 if (i == NULL)
2640 continue;
2642 id.cb.src_fn = i->context;
2643 id.cb.dst_fn = i->context;
2644 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2646 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2647 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2649 newt = TREE_TYPE (newt);
2650 t = TREE_TYPE (t);
2652 if (TYPE_NAME (newt)
2653 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2654 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2655 && newt != t
2656 && TYPE_NAME (newt) == TYPE_NAME (t))
2657 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2660 pointer_map_destroy (id.cb.decl_map);
2663 scope = gimple_seq_first_stmt (gimple_body (root->context));
2664 if (gimple_bind_block (scope))
2665 declare_vars (root->debug_var_chain, scope, true);
2666 else
2667 BLOCK_VARS (DECL_INITIAL (root->context))
2668 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2669 root->debug_var_chain);
2672 /* Fold the rewritten MEM_REF trees. */
2673 pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
2675 /* Dump the translated tree function. */
2676 if (dump_file)
2678 fputs ("\n\n", dump_file);
2679 dump_function_to_file (root->context, dump_file, dump_flags);
2683 static void
2684 finalize_nesting_tree (struct nesting_info *root)
2686 struct nesting_info *n;
2687 FOR_EACH_NEST_INFO (n, root)
2688 finalize_nesting_tree_1 (n);
2691 /* Unnest the nodes and pass them to cgraph. */
2693 static void
2694 unnest_nesting_tree_1 (struct nesting_info *root)
2696 struct cgraph_node *node = cgraph_get_node (root->context);
2698 /* For nested functions update the cgraph to reflect unnesting.
2699 We also delay finalizing of these functions up to this point. */
2700 if (node->origin)
2702 cgraph_unnest_node (node);
2703 cgraph_finalize_function (root->context, true);
2707 static void
2708 unnest_nesting_tree (struct nesting_info *root)
2710 struct nesting_info *n;
2711 FOR_EACH_NEST_INFO (n, root)
2712 unnest_nesting_tree_1 (n);
2715 /* Free the data structures allocated during this pass. */
2717 static void
2718 free_nesting_tree (struct nesting_info *root)
2720 struct nesting_info *node, *next;
2722 node = iter_nestinfo_start (root);
2725 next = iter_nestinfo_next (node);
2726 pointer_map_destroy (node->var_map);
2727 pointer_map_destroy (node->field_map);
2728 pointer_set_destroy (node->mem_refs);
2729 free (node);
2730 node = next;
2732 while (node);
2735 /* Gimplify a function and all its nested functions. */
2736 static void
2737 gimplify_all_functions (struct cgraph_node *root)
2739 struct cgraph_node *iter;
2740 if (!gimple_body (root->decl))
2741 gimplify_function_tree (root->decl);
2742 for (iter = root->nested; iter; iter = iter->next_nested)
2743 gimplify_all_functions (iter);
2746 /* Main entry point for this pass. Process FNDECL and all of its nested
2747 subroutines and turn them into something less tightly bound. */
2749 void
2750 lower_nested_functions (tree fndecl)
2752 struct cgraph_node *cgn;
2753 struct nesting_info *root;
2755 /* If there are no nested functions, there's nothing to do. */
2756 cgn = cgraph_get_node (fndecl);
2757 if (!cgn->nested)
2758 return;
2760 gimplify_all_functions (cgn);
2762 dump_file = dump_begin (TDI_nested, &dump_flags);
2763 if (dump_file)
2764 fprintf (dump_file, "\n;; Function %s\n\n",
2765 lang_hooks.decl_printable_name (fndecl, 2));
2767 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2768 root = create_nesting_tree (cgn);
2770 walk_all_functions (convert_nonlocal_reference_stmt,
2771 convert_nonlocal_reference_op,
2772 root);
2773 walk_all_functions (convert_local_reference_stmt,
2774 convert_local_reference_op,
2775 root);
2776 walk_all_functions (convert_nl_goto_reference, NULL, root);
2777 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2779 convert_all_function_calls (root);
2780 finalize_nesting_tree (root);
2781 unnest_nesting_tree (root);
2783 free_nesting_tree (root);
2784 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2786 if (dump_file)
2788 dump_end (TDI_nested, dump_file);
2789 dump_file = NULL;
2793 #include "gt-tree-nested.h"