2009-10-24 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-nested.c
blob0b5e7321b16bf44e3aa10936dc038443ef565255
1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "pointer-set.h"
38 #include "ggc.h"
41 /* The object of this pass is to lower the representation of a set of nested
42 functions in order to expose all of the gory details of the various
43 nonlocal references. We want to do this sooner rather than later, in
44 order to give us more freedom in emitting all of the functions in question.
46 Back in olden times, when gcc was young, we developed an insanely
47 complicated scheme whereby variables which were referenced nonlocally
48 were forced to live in the stack of the declaring function, and then
49 the nested functions magically discovered where these variables were
50 placed. In order for this scheme to function properly, it required
51 that the outer function be partially expanded, then we switch to
52 compiling the inner function, and once done with those we switch back
53 to compiling the outer function. Such delicate ordering requirements
54 makes it difficult to do whole translation unit optimizations
55 involving such functions.
57 The implementation here is much more direct. Everything that can be
58 referenced by an inner function is a member of an explicitly created
59 structure herein called the "nonlocal frame struct". The incoming
60 static chain for a nested function is a pointer to this struct in
61 the parent. In this way, we settle on known offsets from a known
62 base, and so are decoupled from the logic that places objects in the
63 function's stack frame. More importantly, we don't have to wait for
64 that to happen -- since the compilation of the inner function is no
65 longer tied to a real stack frame, the nonlocal frame struct can be
66 allocated anywhere. Which means that the outer function is now
67 inlinable.
69 Theory of operation here is very simple. Iterate over all the
70 statements in all the functions (depth first) several times,
71 allocating structures and fields on demand. In general we want to
72 examine inner functions first, so that we can avoid making changes
73 to outer functions which are unnecessary.
75 The order of the passes matters a bit, in that later passes will be
76 skipped if it is discovered that the functions don't actually interact
77 at all. That is, they're nested in the lexical sense but could have
78 been written as independent functions without change. */
81 struct nesting_info
83 struct nesting_info *outer;
84 struct nesting_info *inner;
85 struct nesting_info *next;
87 struct pointer_map_t *field_map;
88 struct pointer_map_t *var_map;
89 bitmap suppress_expansion;
91 tree context;
92 tree new_local_var_chain;
93 tree debug_var_chain;
94 tree frame_type;
95 tree frame_decl;
96 tree chain_field;
97 tree chain_decl;
98 tree nl_goto_field;
100 bool any_parm_remapped;
101 bool any_tramp_created;
102 char static_chain_added;
106 /* Iterate over the nesting tree, starting with ROOT, depth first. */
108 static inline struct nesting_info *
109 iter_nestinfo_start (struct nesting_info *root)
111 while (root->inner)
112 root = root->inner;
113 return root;
116 static inline struct nesting_info *
117 iter_nestinfo_next (struct nesting_info *node)
119 if (node->next)
120 return iter_nestinfo_start (node->next);
121 return node->outer;
124 #define FOR_EACH_NEST_INFO(I, ROOT) \
125 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
127 /* Obstack used for the bitmaps in the struct above. */
128 static struct bitmap_obstack nesting_info_bitmap_obstack;
131 /* We're working in so many different function contexts simultaneously,
132 that create_tmp_var is dangerous. Prevent mishap. */
133 #define create_tmp_var cant_use_create_tmp_var_here_dummy
135 /* Like create_tmp_var, except record the variable for registration at
136 the given nesting level. */
138 static tree
139 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
141 tree tmp_var;
143 /* If the type is of variable size or a type which must be created by the
144 frontend, something is wrong. Note that we explicitly allow
145 incomplete types here, since we create them ourselves here. */
146 gcc_assert (!TREE_ADDRESSABLE (type));
147 gcc_assert (!TYPE_SIZE_UNIT (type)
148 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
150 tmp_var = create_tmp_var_raw (type, prefix);
151 DECL_CONTEXT (tmp_var) = info->context;
152 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
153 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
154 if (TREE_CODE (type) == COMPLEX_TYPE
155 || TREE_CODE (type) == VECTOR_TYPE)
156 DECL_GIMPLE_REG_P (tmp_var) = 1;
158 info->new_local_var_chain = tmp_var;
160 return tmp_var;
163 /* Take the address of EXP to be used within function CONTEXT.
164 Mark it for addressability as necessary. */
166 tree
167 build_addr (tree exp, tree context)
169 tree base = exp;
170 tree save_context;
171 tree retval;
173 while (handled_component_p (base))
174 base = TREE_OPERAND (base, 0);
176 if (DECL_P (base))
177 TREE_ADDRESSABLE (base) = 1;
179 /* Building the ADDR_EXPR will compute a set of properties for
180 that ADDR_EXPR. Those properties are unfortunately context
181 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
183 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
184 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
185 way the properties are for the ADDR_EXPR are computed properly. */
186 save_context = current_function_decl;
187 current_function_decl = context;
188 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
189 current_function_decl = save_context;
190 return retval;
193 /* Insert FIELD into TYPE, sorted by alignment requirements. */
195 void
196 insert_field_into_struct (tree type, tree field)
198 tree *p;
200 DECL_CONTEXT (field) = type;
202 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
203 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
204 break;
206 TREE_CHAIN (field) = *p;
207 *p = field;
209 /* Set correct alignment for frame struct type. */
210 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
211 TYPE_ALIGN (type) = DECL_ALIGN (field);
214 /* Build or return the RECORD_TYPE that describes the frame state that is
215 shared between INFO->CONTEXT and its nested functions. This record will
216 not be complete until finalize_nesting_tree; up until that point we'll
217 be adding fields as necessary.
219 We also build the DECL that represents this frame in the function. */
221 static tree
222 get_frame_type (struct nesting_info *info)
224 tree type = info->frame_type;
225 if (!type)
227 char *name;
229 type = make_node (RECORD_TYPE);
231 name = concat ("FRAME.",
232 IDENTIFIER_POINTER (DECL_NAME (info->context)),
233 NULL);
234 TYPE_NAME (type) = get_identifier (name);
235 free (name);
237 info->frame_type = type;
238 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
240 /* ??? Always make it addressable for now, since it is meant to
241 be pointed to by the static chain pointer. This pessimizes
242 when it turns out that no static chains are needed because
243 the nested functions referencing non-local variables are not
244 reachable, but the true pessimization is to create the non-
245 local frame structure in the first place. */
246 TREE_ADDRESSABLE (info->frame_decl) = 1;
248 return type;
251 /* Return true if DECL should be referenced by pointer in the non-local
252 frame structure. */
254 static bool
255 use_pointer_in_frame (tree decl)
257 if (TREE_CODE (decl) == PARM_DECL)
259 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260 sized decls, and inefficient to copy large aggregates. Don't bother
261 moving anything but scalar variables. */
262 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
264 else
266 /* Variable sized types make things "interesting" in the frame. */
267 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
274 static tree
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 enum insert_option insert)
278 void **slot;
280 if (insert == NO_INSERT)
282 slot = pointer_map_contains (info->field_map, decl);
283 return slot ? (tree) *slot : NULL_TREE;
286 slot = pointer_map_insert (info->field_map, decl);
287 if (!*slot)
289 tree field = make_node (FIELD_DECL);
290 DECL_NAME (field) = DECL_NAME (decl);
292 if (use_pointer_in_frame (decl))
294 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
295 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
296 DECL_NONADDRESSABLE_P (field) = 1;
298 else
300 TREE_TYPE (field) = TREE_TYPE (decl);
301 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
302 DECL_ALIGN (field) = DECL_ALIGN (decl);
303 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
304 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
305 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
306 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
309 insert_field_into_struct (get_frame_type (info), field);
310 *slot = field;
312 if (TREE_CODE (decl) == PARM_DECL)
313 info->any_parm_remapped = true;
316 return (tree) *slot;
319 /* Build or return the variable that holds the static chain within
320 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
322 static tree
323 get_chain_decl (struct nesting_info *info)
325 tree decl = info->chain_decl;
327 if (!decl)
329 tree type;
331 type = get_frame_type (info->outer);
332 type = build_pointer_type (type);
334 /* Note that this variable is *not* entered into any BIND_EXPR;
335 the construction of this variable is handled specially in
336 expand_function_start and initialize_inlined_parameters.
337 Note also that it's represented as a parameter. This is more
338 close to the truth, since the initial value does come from
339 the caller. */
340 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
341 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
342 DECL_ARTIFICIAL (decl) = 1;
343 DECL_IGNORED_P (decl) = 1;
344 TREE_USED (decl) = 1;
345 DECL_CONTEXT (decl) = info->context;
346 DECL_ARG_TYPE (decl) = type;
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl) = 1;
352 info->chain_decl = decl;
354 if (dump_file
355 && (dump_flags & TDF_DETAILS)
356 && !DECL_STATIC_CHAIN (info->context))
357 fprintf (dump_file, "Setting static-chain for %s\n",
358 lang_hooks.decl_printable_name (info->context, 2));
360 DECL_STATIC_CHAIN (info->context) = 1;
362 return decl;
365 /* Build or return the field within the non-local frame state that holds
366 the static chain for INFO->CONTEXT. This is the way to walk back up
367 multiple nesting levels. */
369 static tree
370 get_chain_field (struct nesting_info *info)
372 tree field = info->chain_field;
374 if (!field)
376 tree type = build_pointer_type (get_frame_type (info->outer));
378 field = make_node (FIELD_DECL);
379 DECL_NAME (field) = get_identifier ("__chain");
380 TREE_TYPE (field) = type;
381 DECL_ALIGN (field) = TYPE_ALIGN (type);
382 DECL_NONADDRESSABLE_P (field) = 1;
384 insert_field_into_struct (get_frame_type (info), field);
386 info->chain_field = field;
388 if (dump_file
389 && (dump_flags & TDF_DETAILS)
390 && !DECL_STATIC_CHAIN (info->context))
391 fprintf (dump_file, "Setting static-chain for %s\n",
392 lang_hooks.decl_printable_name (info->context, 2));
394 DECL_STATIC_CHAIN (info->context) = 1;
396 return field;
399 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
401 static tree
402 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
403 gimple call)
405 tree t;
407 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
408 gimple_call_set_lhs (call, t);
409 if (! gsi_end_p (*gsi))
410 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
411 gsi_insert_before (gsi, call, GSI_SAME_STMT);
413 return t;
417 /* Copy EXP into a temporary. Allocate the temporary in the context of
418 INFO and insert the initialization statement before GSI. */
420 static tree
421 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
423 tree t;
424 gimple stmt;
426 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
427 stmt = gimple_build_assign (t, exp);
428 if (! gsi_end_p (*gsi))
429 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
430 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
432 return t;
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
438 static tree
439 gsi_gimplify_val (struct nesting_info *info, tree exp,
440 gimple_stmt_iterator *gsi)
442 if (is_gimple_val (exp))
443 return exp;
444 else
445 return init_tmp_var (info, exp, gsi);
448 /* Similarly, but copy from the temporary and insert the statement
449 after the iterator. */
451 static tree
452 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
454 tree t;
455 gimple stmt;
457 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
458 stmt = gimple_build_assign (exp, t);
459 if (! gsi_end_p (*gsi))
460 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
461 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
463 return t;
466 /* Build or return the type used to represent a nested function trampoline. */
468 static GTY(()) tree trampoline_type;
470 static tree
471 get_trampoline_type (struct nesting_info *info)
473 unsigned align, size;
474 tree t;
476 if (trampoline_type)
477 return trampoline_type;
479 align = TRAMPOLINE_ALIGNMENT;
480 size = TRAMPOLINE_SIZE;
482 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483 then allocate extra space so that we can do dynamic alignment. */
484 if (align > STACK_BOUNDARY)
486 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
487 align = STACK_BOUNDARY;
490 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
491 t = build_array_type (char_type_node, t);
492 t = build_decl (DECL_SOURCE_LOCATION (info->context),
493 FIELD_DECL, get_identifier ("__data"), t);
494 DECL_ALIGN (t) = align;
495 DECL_USER_ALIGN (t) = 1;
497 trampoline_type = make_node (RECORD_TYPE);
498 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
499 TYPE_FIELDS (trampoline_type) = t;
500 layout_type (trampoline_type);
501 DECL_CONTEXT (t) = trampoline_type;
503 return trampoline_type;
506 /* Given DECL, a nested function, find or create a field in the non-local
507 frame structure for a trampoline for this function. */
509 static tree
510 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
511 enum insert_option insert)
513 void **slot;
515 if (insert == NO_INSERT)
517 slot = pointer_map_contains (info->var_map, decl);
518 return slot ? (tree) *slot : NULL_TREE;
521 slot = pointer_map_insert (info->var_map, decl);
522 if (!*slot)
524 tree field = make_node (FIELD_DECL);
525 DECL_NAME (field) = DECL_NAME (decl);
526 TREE_TYPE (field) = get_trampoline_type (info);
527 TREE_ADDRESSABLE (field) = 1;
529 insert_field_into_struct (get_frame_type (info), field);
530 *slot = field;
532 info->any_tramp_created = true;
535 return (tree) *slot;
538 /* Build or return the field within the non-local frame state that holds
539 the non-local goto "jmp_buf". The buffer itself is maintained by the
540 rtl middle-end as dynamic stack space is allocated. */
542 static tree
543 get_nl_goto_field (struct nesting_info *info)
545 tree field = info->nl_goto_field;
546 if (!field)
548 unsigned size;
549 tree type;
551 /* For __builtin_nonlocal_goto, we need N words. The first is the
552 frame pointer, the rest is for the target's stack pointer save
553 area. The number of words is controlled by STACK_SAVEAREA_MODE;
554 not the best interface, but it'll do for now. */
555 if (Pmode == ptr_mode)
556 type = ptr_type_node;
557 else
558 type = lang_hooks.types.type_for_mode (Pmode, 1);
560 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
561 size = size / GET_MODE_SIZE (Pmode);
562 size = size + 1;
564 type = build_array_type
565 (type, build_index_type (build_int_cst (NULL_TREE, size)));
567 field = make_node (FIELD_DECL);
568 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
569 TREE_TYPE (field) = type;
570 DECL_ALIGN (field) = TYPE_ALIGN (type);
571 TREE_ADDRESSABLE (field) = 1;
573 insert_field_into_struct (get_frame_type (info), field);
575 info->nl_goto_field = field;
578 return field;
581 /* Invoke CALLBACK on all statements of GIMPLE sequence SEQ. */
583 static void
584 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
585 struct nesting_info *info, gimple_seq seq)
587 struct walk_stmt_info wi;
589 memset (&wi, 0, sizeof (wi));
590 wi.info = info;
591 wi.val_only = true;
592 walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
598 static inline void
599 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
600 struct nesting_info *info)
602 walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
605 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
607 static void
608 walk_gimple_omp_for (gimple for_stmt,
609 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
610 struct nesting_info *info)
612 struct walk_stmt_info wi;
613 gimple_seq seq;
614 tree t;
615 size_t i;
617 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
619 seq = gimple_seq_alloc ();
620 memset (&wi, 0, sizeof (wi));
621 wi.info = info;
622 wi.gsi = gsi_last (seq);
624 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
626 wi.val_only = false;
627 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
628 &wi, NULL);
629 wi.val_only = true;
630 wi.is_lhs = false;
631 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
632 &wi, NULL);
634 wi.val_only = true;
635 wi.is_lhs = false;
636 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
637 &wi, NULL);
639 t = gimple_omp_for_incr (for_stmt, i);
640 gcc_assert (BINARY_CLASS_P (t));
641 wi.val_only = false;
642 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
643 wi.val_only = true;
644 wi.is_lhs = false;
645 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
648 if (gimple_seq_empty_p (seq))
649 gimple_seq_free (seq);
650 else
652 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
653 annotate_all_with_location (seq, gimple_location (for_stmt));
654 gimple_seq_add_seq (&pre_body, seq);
655 gimple_omp_for_set_pre_body (for_stmt, pre_body);
659 /* Similarly for ROOT and all functions nested underneath, depth first. */
661 static void
662 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
663 struct nesting_info *root)
665 struct nesting_info *n;
666 FOR_EACH_NEST_INFO (n, root)
667 walk_function (callback_stmt, callback_op, n);
671 /* We have to check for a fairly pathological case. The operands of function
672 nested function are to be interpreted in the context of the enclosing
673 function. So if any are variably-sized, they will get remapped when the
674 enclosing function is inlined. But that remapping would also have to be
675 done in the types of the PARM_DECLs of the nested function, meaning the
676 argument types of that function will disagree with the arguments in the
677 calls to that function. So we'd either have to make a copy of the nested
678 function corresponding to each time the enclosing function was inlined or
679 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
680 function. The former is not practical. The latter would still require
681 detecting this case to know when to add the conversions. So, for now at
682 least, we don't inline such an enclosing function.
684 We have to do that check recursively, so here return indicating whether
685 FNDECL has such a nested function. ORIG_FN is the function we were
686 trying to inline to use for checking whether any argument is variably
687 modified by anything in it.
689 It would be better to do this in tree-inline.c so that we could give
690 the appropriate warning for why a function can't be inlined, but that's
691 too late since the nesting structure has already been flattened and
692 adding a flag just to record this fact seems a waste of a flag. */
694 static bool
695 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
697 struct cgraph_node *cgn = cgraph_node (fndecl);
698 tree arg;
700 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
702 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
703 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
704 return true;
706 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
707 return true;
710 return false;
713 /* Construct our local datastructure describing the function nesting
714 tree rooted by CGN. */
716 static struct nesting_info *
717 create_nesting_tree (struct cgraph_node *cgn)
719 struct nesting_info *info = XCNEW (struct nesting_info);
720 info->field_map = pointer_map_create ();
721 info->var_map = pointer_map_create ();
722 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
723 info->context = cgn->decl;
725 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
727 struct nesting_info *sub = create_nesting_tree (cgn);
728 sub->outer = info;
729 sub->next = info->inner;
730 info->inner = sub;
733 /* See discussion at check_for_nested_with_variably_modified for a
734 discussion of why this has to be here. */
735 if (check_for_nested_with_variably_modified (info->context, info->context))
736 DECL_UNINLINABLE (info->context) = true;
738 return info;
741 /* Return an expression computing the static chain for TARGET_CONTEXT
742 from INFO->CONTEXT. Insert any necessary computations before TSI. */
744 static tree
745 get_static_chain (struct nesting_info *info, tree target_context,
746 gimple_stmt_iterator *gsi)
748 struct nesting_info *i;
749 tree x;
751 if (info->context == target_context)
753 x = build_addr (info->frame_decl, target_context);
755 else
757 x = get_chain_decl (info);
759 for (i = info->outer; i->context != target_context; i = i->outer)
761 tree field = get_chain_field (i);
763 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
764 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
765 x = init_tmp_var (info, x, gsi);
769 return x;
773 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
774 frame as seen from INFO->CONTEXT. Insert any necessary computations
775 before GSI. */
777 static tree
778 get_frame_field (struct nesting_info *info, tree target_context,
779 tree field, gimple_stmt_iterator *gsi)
781 struct nesting_info *i;
782 tree x;
784 if (info->context == target_context)
786 /* Make sure frame_decl gets created. */
787 (void) get_frame_type (info);
788 x = info->frame_decl;
790 else
792 x = get_chain_decl (info);
794 for (i = info->outer; i->context != target_context; i = i->outer)
796 tree field = get_chain_field (i);
798 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
799 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
800 x = init_tmp_var (info, x, gsi);
803 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
806 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
807 return x;
810 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
812 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
813 in the nested function with DECL_VALUE_EXPR set to reference the true
814 variable in the parent function. This is used both for debug info
815 and in OpenMP lowering. */
817 static tree
818 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
820 tree target_context;
821 struct nesting_info *i;
822 tree x, field, new_decl;
823 void **slot;
825 slot = pointer_map_insert (info->var_map, decl);
827 if (*slot)
828 return (tree) *slot;
830 target_context = decl_function_context (decl);
832 /* A copy of the code in get_frame_field, but without the temporaries. */
833 if (info->context == target_context)
835 /* Make sure frame_decl gets created. */
836 (void) get_frame_type (info);
837 x = info->frame_decl;
838 i = info;
840 else
842 x = get_chain_decl (info);
843 for (i = info->outer; i->context != target_context; i = i->outer)
845 field = get_chain_field (i);
846 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
847 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
849 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
852 field = lookup_field_for_decl (i, decl, INSERT);
853 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
854 if (use_pointer_in_frame (decl))
855 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
857 /* ??? We should be remapping types as well, surely. */
858 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
859 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
860 DECL_CONTEXT (new_decl) = info->context;
861 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
862 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
863 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
864 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
865 TREE_READONLY (new_decl) = TREE_READONLY (decl);
866 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
867 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
868 if ((TREE_CODE (decl) == PARM_DECL
869 || TREE_CODE (decl) == RESULT_DECL
870 || TREE_CODE (decl) == VAR_DECL)
871 && DECL_BY_REFERENCE (decl))
872 DECL_BY_REFERENCE (new_decl) = 1;
874 SET_DECL_VALUE_EXPR (new_decl, x);
875 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
877 *slot = new_decl;
878 TREE_CHAIN (new_decl) = info->debug_var_chain;
879 info->debug_var_chain = new_decl;
881 if (!optimize
882 && info->context != target_context
883 && variably_modified_type_p (TREE_TYPE (decl), NULL))
884 note_nonlocal_vla_type (info, TREE_TYPE (decl));
886 return new_decl;
890 /* Callback for walk_gimple_stmt, rewrite all references to VAR
891 and PARM_DECLs that belong to outer functions.
893 The rewrite will involve some number of structure accesses back up
894 the static chain. E.g. for a variable FOO up one nesting level it'll
895 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
896 indirections apply to decls for which use_pointer_in_frame is true. */
898 static tree
899 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
901 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
902 struct nesting_info *const info = (struct nesting_info *) wi->info;
903 tree t = *tp;
905 *walk_subtrees = 0;
906 switch (TREE_CODE (t))
908 case VAR_DECL:
909 /* Non-automatic variables are never processed. */
910 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
911 break;
912 /* FALLTHRU */
914 case PARM_DECL:
915 if (decl_function_context (t) != info->context)
917 tree x;
918 wi->changed = true;
920 x = get_nonlocal_debug_decl (info, t);
921 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
923 tree target_context = decl_function_context (t);
924 struct nesting_info *i;
925 for (i = info->outer; i->context != target_context; i = i->outer)
926 continue;
927 x = lookup_field_for_decl (i, t, INSERT);
928 x = get_frame_field (info, target_context, x, &wi->gsi);
929 if (use_pointer_in_frame (t))
931 x = init_tmp_var (info, x, &wi->gsi);
932 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
936 if (wi->val_only)
938 if (wi->is_lhs)
939 x = save_tmp_var (info, x, &wi->gsi);
940 else
941 x = init_tmp_var (info, x, &wi->gsi);
944 *tp = x;
946 break;
948 case LABEL_DECL:
949 /* We're taking the address of a label from a parent function, but
950 this is not itself a non-local goto. Mark the label such that it
951 will not be deleted, much as we would with a label address in
952 static storage. */
953 if (decl_function_context (t) != info->context)
954 FORCED_LABEL (t) = 1;
955 break;
957 case ADDR_EXPR:
959 bool save_val_only = wi->val_only;
961 wi->val_only = false;
962 wi->is_lhs = false;
963 wi->changed = false;
964 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
965 wi->val_only = true;
967 if (wi->changed)
969 tree save_context;
971 /* If we changed anything, we might no longer be directly
972 referencing a decl. */
973 save_context = current_function_decl;
974 current_function_decl = info->context;
975 recompute_tree_invariant_for_addr_expr (t);
976 current_function_decl = save_context;
978 /* If the callback converted the address argument in a context
979 where we only accept variables (and min_invariant, presumably),
980 then compute the address into a temporary. */
981 if (save_val_only)
982 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
983 t, &wi->gsi);
986 break;
988 case REALPART_EXPR:
989 case IMAGPART_EXPR:
990 case COMPONENT_REF:
991 case ARRAY_REF:
992 case ARRAY_RANGE_REF:
993 case BIT_FIELD_REF:
994 /* Go down this entire nest and just look at the final prefix and
995 anything that describes the references. Otherwise, we lose track
996 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
997 wi->val_only = true;
998 wi->is_lhs = false;
999 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1001 if (TREE_CODE (t) == COMPONENT_REF)
1002 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1003 NULL);
1004 else if (TREE_CODE (t) == ARRAY_REF
1005 || TREE_CODE (t) == ARRAY_RANGE_REF)
1007 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1008 wi, NULL);
1009 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1010 wi, NULL);
1011 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1012 wi, NULL);
1014 else if (TREE_CODE (t) == BIT_FIELD_REF)
1016 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1017 wi, NULL);
1018 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1019 wi, NULL);
1022 wi->val_only = false;
1023 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1024 break;
1026 case VIEW_CONVERT_EXPR:
1027 /* Just request to look at the subtrees, leaving val_only and lhs
1028 untouched. This might actually be for !val_only + lhs, in which
1029 case we don't want to force a replacement by a temporary. */
1030 *walk_subtrees = 1;
1031 break;
1033 default:
1034 if (!IS_TYPE_OR_DECL_P (t))
1036 *walk_subtrees = 1;
1037 wi->val_only = true;
1038 wi->is_lhs = false;
1040 break;
1043 return NULL_TREE;
1046 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1047 struct walk_stmt_info *);
1049 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1050 and PARM_DECLs that belong to outer functions. */
1052 static bool
1053 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1055 struct nesting_info *const info = (struct nesting_info *) wi->info;
1056 bool need_chain = false, need_stmts = false;
1057 tree clause, decl;
1058 int dummy;
1059 bitmap new_suppress;
1061 new_suppress = BITMAP_GGC_ALLOC ();
1062 bitmap_copy (new_suppress, info->suppress_expansion);
1064 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1066 switch (OMP_CLAUSE_CODE (clause))
1068 case OMP_CLAUSE_REDUCTION:
1069 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1070 need_stmts = true;
1071 goto do_decl_clause;
1073 case OMP_CLAUSE_LASTPRIVATE:
1074 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1075 need_stmts = true;
1076 goto do_decl_clause;
1078 case OMP_CLAUSE_PRIVATE:
1079 case OMP_CLAUSE_FIRSTPRIVATE:
1080 case OMP_CLAUSE_COPYPRIVATE:
1081 case OMP_CLAUSE_SHARED:
1082 do_decl_clause:
1083 decl = OMP_CLAUSE_DECL (clause);
1084 if (TREE_CODE (decl) == VAR_DECL
1085 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1086 break;
1087 if (decl_function_context (decl) != info->context)
1089 bitmap_set_bit (new_suppress, DECL_UID (decl));
1090 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1091 need_chain = true;
1093 break;
1095 case OMP_CLAUSE_SCHEDULE:
1096 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1097 break;
1098 /* FALLTHRU */
1099 case OMP_CLAUSE_IF:
1100 case OMP_CLAUSE_NUM_THREADS:
1101 wi->val_only = true;
1102 wi->is_lhs = false;
1103 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1104 &dummy, wi);
1105 break;
1107 case OMP_CLAUSE_NOWAIT:
1108 case OMP_CLAUSE_ORDERED:
1109 case OMP_CLAUSE_DEFAULT:
1110 case OMP_CLAUSE_COPYIN:
1111 case OMP_CLAUSE_COLLAPSE:
1112 case OMP_CLAUSE_UNTIED:
1113 break;
1115 default:
1116 gcc_unreachable ();
1120 info->suppress_expansion = new_suppress;
1122 if (need_stmts)
1123 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1124 switch (OMP_CLAUSE_CODE (clause))
1126 case OMP_CLAUSE_REDUCTION:
1127 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1129 tree old_context
1130 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1131 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1132 = info->context;
1133 walk_body (convert_nonlocal_reference_stmt,
1134 convert_nonlocal_reference_op, info,
1135 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1136 walk_body (convert_nonlocal_reference_stmt,
1137 convert_nonlocal_reference_op, info,
1138 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1139 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1140 = old_context;
1142 break;
1144 case OMP_CLAUSE_LASTPRIVATE:
1145 walk_body (convert_nonlocal_reference_stmt,
1146 convert_nonlocal_reference_op, info,
1147 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1148 break;
1150 default:
1151 break;
1154 return need_chain;
1157 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1159 static void
1160 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1162 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1163 type = TREE_TYPE (type);
1165 if (TYPE_NAME (type)
1166 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1167 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1168 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1170 while (POINTER_TYPE_P (type)
1171 || TREE_CODE (type) == VECTOR_TYPE
1172 || TREE_CODE (type) == FUNCTION_TYPE
1173 || TREE_CODE (type) == METHOD_TYPE)
1174 type = TREE_TYPE (type);
1176 if (TREE_CODE (type) == ARRAY_TYPE)
1178 tree domain, t;
1180 note_nonlocal_vla_type (info, TREE_TYPE (type));
1181 domain = TYPE_DOMAIN (type);
1182 if (domain)
1184 t = TYPE_MIN_VALUE (domain);
1185 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1186 && decl_function_context (t) != info->context)
1187 get_nonlocal_debug_decl (info, t);
1188 t = TYPE_MAX_VALUE (domain);
1189 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1190 && decl_function_context (t) != info->context)
1191 get_nonlocal_debug_decl (info, t);
1196 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1197 in BLOCK. */
1199 static void
1200 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1202 tree var;
1204 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1205 if (TREE_CODE (var) == VAR_DECL
1206 && variably_modified_type_p (TREE_TYPE (var), NULL)
1207 && DECL_HAS_VALUE_EXPR_P (var)
1208 && decl_function_context (var) != info->context)
1209 note_nonlocal_vla_type (info, TREE_TYPE (var));
1212 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1213 PARM_DECLs that belong to outer functions. This handles statements
1214 that are not handled via the standard recursion done in
1215 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1216 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1217 operands of STMT have been handled by this function. */
1219 static tree
1220 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1221 struct walk_stmt_info *wi)
1223 struct nesting_info *info = (struct nesting_info *) wi->info;
1224 tree save_local_var_chain;
1225 bitmap save_suppress;
1226 gimple stmt = gsi_stmt (*gsi);
1228 switch (gimple_code (stmt))
1230 case GIMPLE_GOTO:
1231 /* Don't walk non-local gotos for now. */
1232 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1234 wi->val_only = true;
1235 wi->is_lhs = false;
1236 *handled_ops_p = true;
1237 return NULL_TREE;
1239 break;
1241 case GIMPLE_OMP_PARALLEL:
1242 case GIMPLE_OMP_TASK:
1243 save_suppress = info->suppress_expansion;
1244 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1245 wi))
1247 tree c, decl;
1248 decl = get_chain_decl (info);
1249 c = build_omp_clause (gimple_location (stmt),
1250 OMP_CLAUSE_FIRSTPRIVATE);
1251 OMP_CLAUSE_DECL (c) = decl;
1252 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1253 gimple_omp_taskreg_set_clauses (stmt, c);
1256 save_local_var_chain = info->new_local_var_chain;
1257 info->new_local_var_chain = NULL;
1259 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1260 info, gimple_omp_body (stmt));
1262 if (info->new_local_var_chain)
1263 declare_vars (info->new_local_var_chain,
1264 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1265 false);
1266 info->new_local_var_chain = save_local_var_chain;
1267 info->suppress_expansion = save_suppress;
1268 break;
1270 case GIMPLE_OMP_FOR:
1271 save_suppress = info->suppress_expansion;
1272 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1273 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1274 convert_nonlocal_reference_op, info);
1275 walk_body (convert_nonlocal_reference_stmt,
1276 convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
1277 info->suppress_expansion = save_suppress;
1278 break;
1280 case GIMPLE_OMP_SECTIONS:
1281 save_suppress = info->suppress_expansion;
1282 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1283 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1284 info, gimple_omp_body (stmt));
1285 info->suppress_expansion = save_suppress;
1286 break;
1288 case GIMPLE_OMP_SINGLE:
1289 save_suppress = info->suppress_expansion;
1290 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1291 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1292 info, gimple_omp_body (stmt));
1293 info->suppress_expansion = save_suppress;
1294 break;
1296 case GIMPLE_OMP_SECTION:
1297 case GIMPLE_OMP_MASTER:
1298 case GIMPLE_OMP_ORDERED:
1299 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1300 info, gimple_omp_body (stmt));
1301 break;
1303 case GIMPLE_BIND:
1304 if (!optimize && gimple_bind_block (stmt))
1305 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1307 *handled_ops_p = false;
1308 return NULL_TREE;
1310 case GIMPLE_COND:
1311 wi->val_only = true;
1312 wi->is_lhs = false;
1313 *handled_ops_p = false;
1314 return NULL_TREE;
1316 default:
1317 /* For every other statement that we are not interested in
1318 handling here, let the walker traverse the operands. */
1319 *handled_ops_p = false;
1320 return NULL_TREE;
1323 /* We have handled all of STMT operands, no need to traverse the operands. */
1324 *handled_ops_p = true;
1325 return NULL_TREE;
1329 /* A subroutine of convert_local_reference. Create a local variable
1330 in the parent function with DECL_VALUE_EXPR set to reference the
1331 field in FRAME. This is used both for debug info and in OpenMP
1332 lowering. */
1334 static tree
1335 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1337 tree x, new_decl;
1338 void **slot;
1340 slot = pointer_map_insert (info->var_map, decl);
1341 if (*slot)
1342 return (tree) *slot;
1344 /* Make sure frame_decl gets created. */
1345 (void) get_frame_type (info);
1346 x = info->frame_decl;
1347 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1349 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1350 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1351 DECL_CONTEXT (new_decl) = info->context;
1352 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1353 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1354 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1355 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1356 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1357 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1358 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1359 if ((TREE_CODE (decl) == PARM_DECL
1360 || TREE_CODE (decl) == RESULT_DECL
1361 || TREE_CODE (decl) == VAR_DECL)
1362 && DECL_BY_REFERENCE (decl))
1363 DECL_BY_REFERENCE (new_decl) = 1;
1365 SET_DECL_VALUE_EXPR (new_decl, x);
1366 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1367 *slot = new_decl;
1369 TREE_CHAIN (new_decl) = info->debug_var_chain;
1370 info->debug_var_chain = new_decl;
1372 /* Do not emit debug info twice. */
1373 DECL_IGNORED_P (decl) = 1;
1375 return new_decl;
1379 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1380 and PARM_DECLs that were referenced by inner nested functions.
1381 The rewrite will be a structure reference to the local frame variable. */
1383 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1385 static tree
1386 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1388 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1389 struct nesting_info *const info = (struct nesting_info *) wi->info;
1390 tree t = *tp, field, x;
1391 bool save_val_only;
1393 *walk_subtrees = 0;
1394 switch (TREE_CODE (t))
1396 case VAR_DECL:
1397 /* Non-automatic variables are never processed. */
1398 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1399 break;
1400 /* FALLTHRU */
1402 case PARM_DECL:
1403 if (decl_function_context (t) == info->context)
1405 /* If we copied a pointer to the frame, then the original decl
1406 is used unchanged in the parent function. */
1407 if (use_pointer_in_frame (t))
1408 break;
1410 /* No need to transform anything if no child references the
1411 variable. */
1412 field = lookup_field_for_decl (info, t, NO_INSERT);
1413 if (!field)
1414 break;
1415 wi->changed = true;
1417 x = get_local_debug_decl (info, t, field);
1418 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1419 x = get_frame_field (info, info->context, field, &wi->gsi);
1421 if (wi->val_only)
1423 if (wi->is_lhs)
1424 x = save_tmp_var (info, x, &wi->gsi);
1425 else
1426 x = init_tmp_var (info, x, &wi->gsi);
1429 *tp = x;
1431 break;
1433 case ADDR_EXPR:
1434 save_val_only = wi->val_only;
1435 wi->val_only = false;
1436 wi->is_lhs = false;
1437 wi->changed = false;
1438 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1439 wi->val_only = save_val_only;
1441 /* If we converted anything ... */
1442 if (wi->changed)
1444 tree save_context;
1446 /* Then the frame decl is now addressable. */
1447 TREE_ADDRESSABLE (info->frame_decl) = 1;
1449 save_context = current_function_decl;
1450 current_function_decl = info->context;
1451 recompute_tree_invariant_for_addr_expr (t);
1452 current_function_decl = save_context;
1454 /* If we are in a context where we only accept values, then
1455 compute the address into a temporary. */
1456 if (save_val_only)
1457 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1458 t, &wi->gsi);
1460 break;
1462 case REALPART_EXPR:
1463 case IMAGPART_EXPR:
1464 case COMPONENT_REF:
1465 case ARRAY_REF:
1466 case ARRAY_RANGE_REF:
1467 case BIT_FIELD_REF:
1468 /* Go down this entire nest and just look at the final prefix and
1469 anything that describes the references. Otherwise, we lose track
1470 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1471 save_val_only = wi->val_only;
1472 wi->val_only = true;
1473 wi->is_lhs = false;
1474 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1476 if (TREE_CODE (t) == COMPONENT_REF)
1477 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1478 NULL);
1479 else if (TREE_CODE (t) == ARRAY_REF
1480 || TREE_CODE (t) == ARRAY_RANGE_REF)
1482 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1483 NULL);
1484 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1485 NULL);
1486 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1487 NULL);
1489 else if (TREE_CODE (t) == BIT_FIELD_REF)
1491 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1492 NULL);
1493 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1494 NULL);
1497 wi->val_only = false;
1498 walk_tree (tp, convert_local_reference_op, wi, NULL);
1499 wi->val_only = save_val_only;
1500 break;
1502 case VIEW_CONVERT_EXPR:
1503 /* Just request to look at the subtrees, leaving val_only and lhs
1504 untouched. This might actually be for !val_only + lhs, in which
1505 case we don't want to force a replacement by a temporary. */
1506 *walk_subtrees = 1;
1507 break;
1509 default:
1510 if (!IS_TYPE_OR_DECL_P (t))
1512 *walk_subtrees = 1;
1513 wi->val_only = true;
1514 wi->is_lhs = false;
1516 break;
1519 return NULL_TREE;
1522 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1523 struct walk_stmt_info *);
1525 /* Helper for convert_local_reference. Convert all the references in
1526 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1528 static bool
1529 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1531 struct nesting_info *const info = (struct nesting_info *) wi->info;
1532 bool need_frame = false, need_stmts = false;
1533 tree clause, decl;
1534 int dummy;
1535 bitmap new_suppress;
1537 new_suppress = BITMAP_GGC_ALLOC ();
1538 bitmap_copy (new_suppress, info->suppress_expansion);
1540 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1542 switch (OMP_CLAUSE_CODE (clause))
1544 case OMP_CLAUSE_REDUCTION:
1545 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1546 need_stmts = true;
1547 goto do_decl_clause;
1549 case OMP_CLAUSE_LASTPRIVATE:
1550 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1551 need_stmts = true;
1552 goto do_decl_clause;
1554 case OMP_CLAUSE_PRIVATE:
1555 case OMP_CLAUSE_FIRSTPRIVATE:
1556 case OMP_CLAUSE_COPYPRIVATE:
1557 case OMP_CLAUSE_SHARED:
1558 do_decl_clause:
1559 decl = OMP_CLAUSE_DECL (clause);
1560 if (TREE_CODE (decl) == VAR_DECL
1561 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1562 break;
1563 if (decl_function_context (decl) == info->context
1564 && !use_pointer_in_frame (decl))
1566 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1567 if (field)
1569 bitmap_set_bit (new_suppress, DECL_UID (decl));
1570 OMP_CLAUSE_DECL (clause)
1571 = get_local_debug_decl (info, decl, field);
1572 need_frame = true;
1575 break;
1577 case OMP_CLAUSE_SCHEDULE:
1578 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1579 break;
1580 /* FALLTHRU */
1581 case OMP_CLAUSE_IF:
1582 case OMP_CLAUSE_NUM_THREADS:
1583 wi->val_only = true;
1584 wi->is_lhs = false;
1585 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1586 wi);
1587 break;
1589 case OMP_CLAUSE_NOWAIT:
1590 case OMP_CLAUSE_ORDERED:
1591 case OMP_CLAUSE_DEFAULT:
1592 case OMP_CLAUSE_COPYIN:
1593 case OMP_CLAUSE_COLLAPSE:
1594 case OMP_CLAUSE_UNTIED:
1595 break;
1597 default:
1598 gcc_unreachable ();
1602 info->suppress_expansion = new_suppress;
1604 if (need_stmts)
1605 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1606 switch (OMP_CLAUSE_CODE (clause))
1608 case OMP_CLAUSE_REDUCTION:
1609 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1611 tree old_context
1612 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1613 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1614 = info->context;
1615 walk_body (convert_local_reference_stmt,
1616 convert_local_reference_op, info,
1617 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1618 walk_body (convert_local_reference_stmt,
1619 convert_local_reference_op, info,
1620 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1621 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1622 = old_context;
1624 break;
1626 case OMP_CLAUSE_LASTPRIVATE:
1627 walk_body (convert_local_reference_stmt,
1628 convert_local_reference_op, info,
1629 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1630 break;
1632 default:
1633 break;
1636 return need_frame;
1640 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1641 and PARM_DECLs that were referenced by inner nested functions.
1642 The rewrite will be a structure reference to the local frame variable. */
1644 static tree
1645 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1646 struct walk_stmt_info *wi)
1648 struct nesting_info *info = (struct nesting_info *) wi->info;
1649 tree save_local_var_chain;
1650 bitmap save_suppress;
1651 gimple stmt = gsi_stmt (*gsi);
1653 switch (gimple_code (stmt))
1655 case GIMPLE_OMP_PARALLEL:
1656 case GIMPLE_OMP_TASK:
1657 save_suppress = info->suppress_expansion;
1658 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1659 wi))
1661 tree c;
1662 (void) get_frame_type (info);
1663 c = build_omp_clause (gimple_location (stmt),
1664 OMP_CLAUSE_SHARED);
1665 OMP_CLAUSE_DECL (c) = info->frame_decl;
1666 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1667 gimple_omp_taskreg_set_clauses (stmt, c);
1670 save_local_var_chain = info->new_local_var_chain;
1671 info->new_local_var_chain = NULL;
1673 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1674 gimple_omp_body (stmt));
1676 if (info->new_local_var_chain)
1677 declare_vars (info->new_local_var_chain,
1678 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1679 info->new_local_var_chain = save_local_var_chain;
1680 info->suppress_expansion = save_suppress;
1681 break;
1683 case GIMPLE_OMP_FOR:
1684 save_suppress = info->suppress_expansion;
1685 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1686 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1687 convert_local_reference_op, info);
1688 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1689 info, gimple_omp_body (stmt));
1690 info->suppress_expansion = save_suppress;
1691 break;
1693 case GIMPLE_OMP_SECTIONS:
1694 save_suppress = info->suppress_expansion;
1695 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1696 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1697 info, gimple_omp_body (stmt));
1698 info->suppress_expansion = save_suppress;
1699 break;
1701 case GIMPLE_OMP_SINGLE:
1702 save_suppress = info->suppress_expansion;
1703 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1704 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1705 info, gimple_omp_body (stmt));
1706 info->suppress_expansion = save_suppress;
1707 break;
1709 case GIMPLE_OMP_SECTION:
1710 case GIMPLE_OMP_MASTER:
1711 case GIMPLE_OMP_ORDERED:
1712 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1713 info, gimple_omp_body (stmt));
1714 break;
1716 case GIMPLE_COND:
1717 wi->val_only = true;
1718 wi->is_lhs = false;
1719 *handled_ops_p = false;
1720 return NULL_TREE;
1722 default:
1723 /* For every other statement that we are not interested in
1724 handling here, let the walker traverse the operands. */
1725 *handled_ops_p = false;
1726 return NULL_TREE;
1729 /* Indicate that we have handled all the operands ourselves. */
1730 *handled_ops_p = true;
1731 return NULL_TREE;
1735 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1736 that reference labels from outer functions. The rewrite will be a
1737 call to __builtin_nonlocal_goto. */
1739 static tree
1740 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1741 struct walk_stmt_info *wi)
1743 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1744 tree label, new_label, target_context, x, field;
1745 void **slot;
1746 gimple call;
1747 gimple stmt = gsi_stmt (*gsi);
1749 if (gimple_code (stmt) != GIMPLE_GOTO)
1751 *handled_ops_p = false;
1752 return NULL_TREE;
1755 label = gimple_goto_dest (stmt);
1756 if (TREE_CODE (label) != LABEL_DECL)
1758 *handled_ops_p = false;
1759 return NULL_TREE;
1762 target_context = decl_function_context (label);
1763 if (target_context == info->context)
1765 *handled_ops_p = false;
1766 return NULL_TREE;
1769 for (i = info->outer; target_context != i->context; i = i->outer)
1770 continue;
1772 /* The original user label may also be use for a normal goto, therefore
1773 we must create a new label that will actually receive the abnormal
1774 control transfer. This new label will be marked LABEL_NONLOCAL; this
1775 mark will trigger proper behavior in the cfg, as well as cause the
1776 (hairy target-specific) non-local goto receiver code to be generated
1777 when we expand rtl. Enter this association into var_map so that we
1778 can insert the new label into the IL during a second pass. */
1779 slot = pointer_map_insert (i->var_map, label);
1780 if (*slot == NULL)
1782 new_label = create_artificial_label (UNKNOWN_LOCATION);
1783 DECL_NONLOCAL (new_label) = 1;
1784 *slot = new_label;
1786 else
1787 new_label = (tree) *slot;
1789 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1790 field = get_nl_goto_field (i);
1791 x = get_frame_field (info, target_context, field, &wi->gsi);
1792 x = build_addr (x, target_context);
1793 x = gsi_gimplify_val (info, x, &wi->gsi);
1794 call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
1795 build_addr (new_label, target_context), x);
1796 gsi_replace (&wi->gsi, call, false);
1798 /* We have handled all of STMT's operands, no need to keep going. */
1799 *handled_ops_p = true;
1800 return NULL_TREE;
1804 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1805 are referenced via nonlocal goto from a nested function. The rewrite
1806 will involve installing a newly generated DECL_NONLOCAL label, and
1807 (potentially) a branch around the rtl gunk that is assumed to be
1808 attached to such a label. */
1810 static tree
1811 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1812 struct walk_stmt_info *wi)
1814 struct nesting_info *const info = (struct nesting_info *) wi->info;
1815 tree label, new_label;
1816 gimple_stmt_iterator tmp_gsi;
1817 void **slot;
1818 gimple stmt = gsi_stmt (*gsi);
1820 if (gimple_code (stmt) != GIMPLE_LABEL)
1822 *handled_ops_p = false;
1823 return NULL_TREE;
1826 label = gimple_label_label (stmt);
1828 slot = pointer_map_contains (info->var_map, label);
1829 if (!slot)
1831 *handled_ops_p = false;
1832 return NULL_TREE;
1835 /* If there's any possibility that the previous statement falls through,
1836 then we must branch around the new non-local label. */
1837 tmp_gsi = wi->gsi;
1838 gsi_prev (&tmp_gsi);
1839 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1841 gimple stmt = gimple_build_goto (label);
1842 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1845 new_label = (tree) *slot;
1846 stmt = gimple_build_label (new_label);
1847 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1849 *handled_ops_p = true;
1850 return NULL_TREE;
1854 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1855 of nested functions that require the use of trampolines. The rewrite
1856 will involve a reference a trampoline generated for the occasion. */
1858 static tree
1859 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1861 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1862 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1863 tree t = *tp, decl, target_context, x, builtin;
1864 gimple call;
1866 *walk_subtrees = 0;
1867 switch (TREE_CODE (t))
1869 case ADDR_EXPR:
1870 /* Build
1871 T.1 = &CHAIN->tramp;
1872 T.2 = __builtin_adjust_trampoline (T.1);
1873 T.3 = (func_type)T.2;
1876 decl = TREE_OPERAND (t, 0);
1877 if (TREE_CODE (decl) != FUNCTION_DECL)
1878 break;
1880 /* Only need to process nested functions. */
1881 target_context = decl_function_context (decl);
1882 if (!target_context)
1883 break;
1885 /* If the nested function doesn't use a static chain, then
1886 it doesn't need a trampoline. */
1887 if (!DECL_STATIC_CHAIN (decl))
1888 break;
1890 /* If we don't want a trampoline, then don't build one. */
1891 if (TREE_NO_TRAMPOLINE (t))
1892 break;
1894 /* Lookup the immediate parent of the callee, as that's where
1895 we need to insert the trampoline. */
1896 for (i = info; i->context != target_context; i = i->outer)
1897 continue;
1898 x = lookup_tramp_for_decl (i, decl, INSERT);
1900 /* Compute the address of the field holding the trampoline. */
1901 x = get_frame_field (info, target_context, x, &wi->gsi);
1902 x = build_addr (x, target_context);
1903 x = gsi_gimplify_val (info, x, &wi->gsi);
1905 /* Do machine-specific ugliness. Normally this will involve
1906 computing extra alignment, but it can really be anything. */
1907 builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1908 call = gimple_build_call (builtin, 1, x);
1909 x = init_tmp_var_with_call (info, &wi->gsi, call);
1911 /* Cast back to the proper function type. */
1912 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1913 x = init_tmp_var (info, x, &wi->gsi);
1915 *tp = x;
1916 break;
1918 default:
1919 if (!IS_TYPE_OR_DECL_P (t))
1920 *walk_subtrees = 1;
1921 break;
1924 return NULL_TREE;
1928 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1929 to addresses of nested functions that require the use of
1930 trampolines. The rewrite will involve a reference a trampoline
1931 generated for the occasion. */
1933 static tree
1934 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1935 struct walk_stmt_info *wi)
1937 gimple stmt = gsi_stmt (*gsi);
1939 switch (gimple_code (stmt))
1941 case GIMPLE_CALL:
1943 /* Only walk call arguments, lest we generate trampolines for
1944 direct calls. */
1945 unsigned long i, nargs = gimple_call_num_args (stmt);
1946 for (i = 0; i < nargs; i++)
1947 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
1948 wi, NULL);
1950 *handled_ops_p = true;
1951 return NULL_TREE;
1954 default:
1955 break;
1958 *handled_ops_p = false;
1959 return NULL_TREE;
1964 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
1965 that reference nested functions to make sure that the static chain
1966 is set up properly for the call. */
1968 static tree
1969 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1970 struct walk_stmt_info *wi)
1972 struct nesting_info *const info = (struct nesting_info *) wi->info;
1973 tree decl, target_context;
1974 char save_static_chain_added;
1975 int i;
1976 gimple stmt = gsi_stmt (*gsi);
1978 switch (gimple_code (stmt))
1980 case GIMPLE_CALL:
1981 if (gimple_call_chain (stmt))
1982 break;
1983 decl = gimple_call_fndecl (stmt);
1984 if (!decl)
1985 break;
1986 target_context = decl_function_context (decl);
1987 if (target_context && DECL_STATIC_CHAIN (decl))
1989 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
1990 &wi->gsi));
1991 info->static_chain_added |= (1 << (info->context != target_context));
1993 break;
1995 case GIMPLE_OMP_PARALLEL:
1996 case GIMPLE_OMP_TASK:
1997 save_static_chain_added = info->static_chain_added;
1998 info->static_chain_added = 0;
1999 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2000 for (i = 0; i < 2; i++)
2002 tree c, decl;
2003 if ((info->static_chain_added & (1 << i)) == 0)
2004 continue;
2005 decl = i ? get_chain_decl (info) : info->frame_decl;
2006 /* Don't add CHAIN.* or FRAME.* twice. */
2007 for (c = gimple_omp_taskreg_clauses (stmt);
2009 c = OMP_CLAUSE_CHAIN (c))
2010 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2011 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2012 && OMP_CLAUSE_DECL (c) == decl)
2013 break;
2014 if (c == NULL)
2016 c = build_omp_clause (gimple_location (stmt),
2017 i ? OMP_CLAUSE_FIRSTPRIVATE
2018 : OMP_CLAUSE_SHARED);
2019 OMP_CLAUSE_DECL (c) = decl;
2020 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2021 gimple_omp_taskreg_set_clauses (stmt, c);
2024 info->static_chain_added |= save_static_chain_added;
2025 break;
2027 case GIMPLE_OMP_FOR:
2028 walk_body (convert_gimple_call, NULL, info,
2029 gimple_omp_for_pre_body (stmt));
2030 /* FALLTHRU */
2031 case GIMPLE_OMP_SECTIONS:
2032 case GIMPLE_OMP_SECTION:
2033 case GIMPLE_OMP_SINGLE:
2034 case GIMPLE_OMP_MASTER:
2035 case GIMPLE_OMP_ORDERED:
2036 case GIMPLE_OMP_CRITICAL:
2037 walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
2038 break;
2040 default:
2041 /* Keep looking for other operands. */
2042 *handled_ops_p = false;
2043 return NULL_TREE;
2046 *handled_ops_p = true;
2047 return NULL_TREE;
2050 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2051 call expressions. At the same time, determine if a nested function
2052 actually uses its static chain; if not, remember that. */
2054 static void
2055 convert_all_function_calls (struct nesting_info *root)
2057 struct nesting_info *n;
2058 int iter_count;
2059 bool any_changed;
2061 /* First, optimistically clear static_chain for all decls that haven't
2062 used the static chain already for variable access. */
2063 FOR_EACH_NEST_INFO (n, root)
2065 tree decl = n->context;
2066 if (!n->outer || (!n->chain_decl && !n->chain_field))
2068 DECL_STATIC_CHAIN (decl) = 0;
2069 if (dump_file && (dump_flags & TDF_DETAILS))
2070 fprintf (dump_file, "Guessing no static-chain for %s\n",
2071 lang_hooks.decl_printable_name (decl, 2));
2073 else
2074 DECL_STATIC_CHAIN (decl) = 1;
2077 /* Walk the functions and perform transformations. Note that these
2078 transformations can induce new uses of the static chain, which in turn
2079 require re-examining all users of the decl. */
2080 /* ??? It would make sense to try to use the call graph to speed this up,
2081 but the call graph hasn't really been built yet. Even if it did, we
2082 would still need to iterate in this loop since address-of references
2083 wouldn't show up in the callgraph anyway. */
2084 iter_count = 0;
2087 any_changed = false;
2088 iter_count++;
2090 if (dump_file && (dump_flags & TDF_DETAILS))
2091 fputc ('\n', dump_file);
2093 FOR_EACH_NEST_INFO (n, root)
2095 tree decl = n->context;
2096 bool old_static_chain = DECL_STATIC_CHAIN (decl);
2098 walk_function (convert_tramp_reference_stmt,
2099 convert_tramp_reference_op, n);
2100 walk_function (convert_gimple_call, NULL, n);
2102 /* If a call to another function created the use of a chain
2103 within this function, we'll have to continue iteration. */
2104 if (!old_static_chain && DECL_STATIC_CHAIN (decl))
2105 any_changed = true;
2108 while (any_changed);
2110 if (dump_file && (dump_flags & TDF_DETAILS))
2111 fprintf (dump_file, "convert_all_function_calls iterations: %d\n\n",
2112 iter_count);
2115 struct nesting_copy_body_data
2117 copy_body_data cb;
2118 struct nesting_info *root;
2121 /* A helper subroutine for debug_var_chain type remapping. */
2123 static tree
2124 nesting_copy_decl (tree decl, copy_body_data *id)
2126 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2127 void **slot = pointer_map_contains (nid->root->var_map, decl);
2129 if (slot)
2130 return (tree) *slot;
2132 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2134 tree new_decl = copy_decl_no_change (decl, id);
2135 DECL_ORIGINAL_TYPE (new_decl)
2136 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2137 return new_decl;
2140 if (TREE_CODE (decl) == VAR_DECL
2141 || TREE_CODE (decl) == PARM_DECL
2142 || TREE_CODE (decl) == RESULT_DECL)
2143 return decl;
2145 return copy_decl_no_change (decl, id);
2148 /* A helper function for remap_vla_decls. See if *TP contains
2149 some remapped variables. */
2151 static tree
2152 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2154 struct nesting_info *root = (struct nesting_info *) data;
2155 tree t = *tp;
2156 void **slot;
2158 if (DECL_P (t))
2160 *walk_subtrees = 0;
2161 slot = pointer_map_contains (root->var_map, t);
2163 if (slot)
2164 return (tree) *slot;
2166 return NULL;
2169 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2170 involved. */
2172 static void
2173 remap_vla_decls (tree block, struct nesting_info *root)
2175 tree var, subblock, val, type;
2176 struct nesting_copy_body_data id;
2178 for (subblock = BLOCK_SUBBLOCKS (block);
2179 subblock;
2180 subblock = BLOCK_CHAIN (subblock))
2181 remap_vla_decls (subblock, root);
2183 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
2185 if (TREE_CODE (var) == VAR_DECL
2186 && variably_modified_type_p (TREE_TYPE (var), NULL)
2187 && DECL_HAS_VALUE_EXPR_P (var))
2189 type = TREE_TYPE (var);
2190 val = DECL_VALUE_EXPR (var);
2191 if (walk_tree (&type, contains_remapped_vars, root, NULL) != NULL
2192 || walk_tree (&val, contains_remapped_vars, root, NULL) != NULL)
2193 break;
2196 if (var == NULL_TREE)
2197 return;
2199 memset (&id, 0, sizeof (id));
2200 id.cb.copy_decl = nesting_copy_decl;
2201 id.cb.decl_map = pointer_map_create ();
2202 id.root = root;
2204 for (; var; var = TREE_CHAIN (var))
2205 if (TREE_CODE (var) == VAR_DECL
2206 && variably_modified_type_p (TREE_TYPE (var), NULL)
2207 && DECL_HAS_VALUE_EXPR_P (var))
2209 struct nesting_info *i;
2210 tree newt, t, context;
2212 t = type = TREE_TYPE (var);
2213 val = DECL_VALUE_EXPR (var);
2214 if (walk_tree (&type, contains_remapped_vars, root, NULL) == NULL
2215 && walk_tree (&val, contains_remapped_vars, root, NULL) == NULL)
2216 continue;
2218 context = decl_function_context (var);
2219 for (i = root; i; i = i->outer)
2220 if (i->context == context)
2221 break;
2223 if (i == NULL)
2224 continue;
2226 id.cb.src_fn = i->context;
2227 id.cb.dst_fn = i->context;
2228 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2230 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2231 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2233 newt = TREE_TYPE (newt);
2234 t = TREE_TYPE (t);
2236 if (TYPE_NAME (newt)
2237 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2238 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2239 && newt != t
2240 && TYPE_NAME (newt) == TYPE_NAME (t))
2241 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2243 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2244 if (val != DECL_VALUE_EXPR (var))
2245 SET_DECL_VALUE_EXPR (var, val);
2248 pointer_map_destroy (id.cb.decl_map);
2251 /* Do "everything else" to clean up or complete state collected by the
2252 various walking passes -- lay out the types and decls, generate code
2253 to initialize the frame decl, store critical expressions in the
2254 struct function for rtl to find. */
2256 static void
2257 finalize_nesting_tree_1 (struct nesting_info *root)
2259 gimple_seq stmt_list;
2260 gimple stmt;
2261 tree context = root->context;
2262 struct function *sf;
2264 stmt_list = NULL;
2266 /* If we created a non-local frame type or decl, we need to lay them
2267 out at this time. */
2268 if (root->frame_type)
2270 /* In some cases the frame type will trigger the -Wpadded warning.
2271 This is not helpful; suppress it. */
2272 int save_warn_padded = warn_padded;
2273 tree *adjust;
2275 warn_padded = 0;
2276 layout_type (root->frame_type);
2277 warn_padded = save_warn_padded;
2278 layout_decl (root->frame_decl, 0);
2280 /* Remove root->frame_decl from root->new_local_var_chain, so
2281 that we can declare it also in the lexical blocks, which
2282 helps ensure virtual regs that end up appearing in its RTL
2283 expression get substituted in instantiate_virtual_regs(). */
2284 for (adjust = &root->new_local_var_chain;
2285 *adjust != root->frame_decl;
2286 adjust = &TREE_CHAIN (*adjust))
2287 gcc_assert (TREE_CHAIN (*adjust));
2288 *adjust = TREE_CHAIN (*adjust);
2290 TREE_CHAIN (root->frame_decl) = NULL_TREE;
2291 declare_vars (root->frame_decl,
2292 gimple_seq_first_stmt (gimple_body (context)), true);
2295 /* If any parameters were referenced non-locally, then we need to
2296 insert a copy. Likewise, if any variables were referenced by
2297 pointer, we need to initialize the address. */
2298 if (root->any_parm_remapped)
2300 tree p;
2301 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
2303 tree field, x, y;
2305 field = lookup_field_for_decl (root, p, NO_INSERT);
2306 if (!field)
2307 continue;
2309 if (use_pointer_in_frame (p))
2310 x = build_addr (p, context);
2311 else
2312 x = p;
2314 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2315 root->frame_decl, field, NULL_TREE);
2316 stmt = gimple_build_assign (y, x);
2317 gimple_seq_add_stmt (&stmt_list, stmt);
2318 /* If the assignment is from a non-register the stmt is
2319 not valid gimple. Make it so by using a temporary instead. */
2320 if (!is_gimple_reg (x)
2321 && is_gimple_reg_type (TREE_TYPE (x)))
2323 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2324 x = init_tmp_var (root, x, &gsi);
2325 gimple_assign_set_rhs1 (stmt, x);
2330 /* If a chain_field was created, then it needs to be initialized
2331 from chain_decl. */
2332 if (root->chain_field)
2334 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2335 root->frame_decl, root->chain_field, NULL_TREE);
2336 stmt = gimple_build_assign (x, get_chain_decl (root));
2337 gimple_seq_add_stmt (&stmt_list, stmt);
2340 /* If trampolines were created, then we need to initialize them. */
2341 if (root->any_tramp_created)
2343 struct nesting_info *i;
2344 for (i = root->inner; i ; i = i->next)
2346 tree arg1, arg2, arg3, x, field;
2348 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2349 if (!field)
2350 continue;
2352 gcc_assert (DECL_STATIC_CHAIN (i->context));
2353 arg3 = build_addr (root->frame_decl, context);
2355 arg2 = build_addr (i->context, context);
2357 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2358 root->frame_decl, field, NULL_TREE);
2359 arg1 = build_addr (x, context);
2361 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
2362 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2363 gimple_seq_add_stmt (&stmt_list, stmt);
2367 /* If we created initialization statements, insert them. */
2368 if (stmt_list)
2370 gimple bind;
2371 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2372 bind = gimple_seq_first_stmt (gimple_body (context));
2373 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2374 gimple_bind_set_body (bind, stmt_list);
2377 /* If a chain_decl was created, then it needs to be registered with
2378 struct function so that it gets initialized from the static chain
2379 register at the beginning of the function. */
2380 sf = DECL_STRUCT_FUNCTION (root->context);
2381 sf->static_chain_decl = root->chain_decl;
2383 /* Similarly for the non-local goto save area. */
2384 if (root->nl_goto_field)
2386 sf->nonlocal_goto_save_area
2387 = get_frame_field (root, context, root->nl_goto_field, NULL);
2388 sf->has_nonlocal_label = 1;
2391 /* Make sure all new local variables get inserted into the
2392 proper BIND_EXPR. */
2393 if (root->new_local_var_chain)
2394 declare_vars (root->new_local_var_chain,
2395 gimple_seq_first_stmt (gimple_body (root->context)),
2396 false);
2398 if (root->debug_var_chain)
2400 tree debug_var;
2401 gimple scope;
2403 remap_vla_decls (DECL_INITIAL (root->context), root);
2405 for (debug_var = root->debug_var_chain; debug_var;
2406 debug_var = TREE_CHAIN (debug_var))
2407 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2408 break;
2410 /* If there are any debug decls with variable length types,
2411 remap those types using other debug_var_chain variables. */
2412 if (debug_var)
2414 struct nesting_copy_body_data id;
2416 memset (&id, 0, sizeof (id));
2417 id.cb.copy_decl = nesting_copy_decl;
2418 id.cb.decl_map = pointer_map_create ();
2419 id.root = root;
2421 for (; debug_var; debug_var = TREE_CHAIN (debug_var))
2422 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2424 tree type = TREE_TYPE (debug_var);
2425 tree newt, t = type;
2426 struct nesting_info *i;
2428 for (i = root; i; i = i->outer)
2429 if (variably_modified_type_p (type, i->context))
2430 break;
2432 if (i == NULL)
2433 continue;
2435 id.cb.src_fn = i->context;
2436 id.cb.dst_fn = i->context;
2437 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2439 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2440 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2442 newt = TREE_TYPE (newt);
2443 t = TREE_TYPE (t);
2445 if (TYPE_NAME (newt)
2446 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2447 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2448 && newt != t
2449 && TYPE_NAME (newt) == TYPE_NAME (t))
2450 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2453 pointer_map_destroy (id.cb.decl_map);
2456 scope = gimple_seq_first_stmt (gimple_body (root->context));
2457 if (gimple_bind_block (scope))
2458 declare_vars (root->debug_var_chain, scope, true);
2459 else
2460 BLOCK_VARS (DECL_INITIAL (root->context))
2461 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2462 root->debug_var_chain);
2465 /* Dump the translated tree function. */
2466 if (dump_file)
2468 fputs ("\n\n", dump_file);
2469 dump_function_to_file (root->context, dump_file, dump_flags);
2473 static void
2474 finalize_nesting_tree (struct nesting_info *root)
2476 struct nesting_info *n;
2477 FOR_EACH_NEST_INFO (n, root)
2478 finalize_nesting_tree_1 (n);
2481 /* Unnest the nodes and pass them to cgraph. */
2483 static void
2484 unnest_nesting_tree_1 (struct nesting_info *root)
2486 struct cgraph_node *node = cgraph_node (root->context);
2488 /* For nested functions update the cgraph to reflect unnesting.
2489 We also delay finalizing of these functions up to this point. */
2490 if (node->origin)
2492 cgraph_unnest_node (cgraph_node (root->context));
2493 cgraph_finalize_function (root->context, true);
2497 static void
2498 unnest_nesting_tree (struct nesting_info *root)
2500 struct nesting_info *n;
2501 FOR_EACH_NEST_INFO (n, root)
2502 unnest_nesting_tree_1 (n);
2505 /* Free the data structures allocated during this pass. */
2507 static void
2508 free_nesting_tree (struct nesting_info *root)
2510 struct nesting_info *node, *next;
2512 node = iter_nestinfo_start (root);
2515 next = iter_nestinfo_next (node);
2516 pointer_map_destroy (node->var_map);
2517 pointer_map_destroy (node->field_map);
2518 free (node);
2519 node = next;
2521 while (node);
2524 /* Gimplify a function and all its nested functions. */
2525 static void
2526 gimplify_all_functions (struct cgraph_node *root)
2528 struct cgraph_node *iter;
2529 if (!gimple_body (root->decl))
2530 gimplify_function_tree (root->decl);
2531 for (iter = root->nested; iter; iter = iter->next_nested)
2532 gimplify_all_functions (iter);
2535 /* Main entry point for this pass. Process FNDECL and all of its nested
2536 subroutines and turn them into something less tightly bound. */
2538 void
2539 lower_nested_functions (tree fndecl)
2541 struct cgraph_node *cgn;
2542 struct nesting_info *root;
2544 /* If there are no nested functions, there's nothing to do. */
2545 cgn = cgraph_node (fndecl);
2546 if (!cgn->nested)
2547 return;
2549 gimplify_all_functions (cgn);
2551 dump_file = dump_begin (TDI_nested, &dump_flags);
2552 if (dump_file)
2553 fprintf (dump_file, "\n;; Function %s\n\n",
2554 lang_hooks.decl_printable_name (fndecl, 2));
2556 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2557 root = create_nesting_tree (cgn);
2559 walk_all_functions (convert_nonlocal_reference_stmt,
2560 convert_nonlocal_reference_op,
2561 root);
2562 walk_all_functions (convert_local_reference_stmt,
2563 convert_local_reference_op,
2564 root);
2565 walk_all_functions (convert_nl_goto_reference, NULL, root);
2566 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2568 convert_all_function_calls (root);
2569 finalize_nesting_tree (root);
2570 unnest_nesting_tree (root);
2572 free_nesting_tree (root);
2573 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2575 if (dump_file)
2577 dump_end (TDI_nested, dump_file);
2578 dump_file = NULL;
2582 #include "gt-tree-nested.h"