* config/alpha/alpha.md, arm/arm.c, darwin.c, frv/frv.md,
[official-gcc.git] / gcc / tree-nested.c
blobfdf39cc4623d651e38ca7299a519eeeff1f40be7
1 /* Nested function decomposition for trees.
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "function.h"
29 #include "tree-dump.h"
30 #include "tree-inline.h"
31 #include "tree-gimple.h"
32 #include "tree-iterator.h"
33 #include "tree-flow.h"
34 #include "cgraph.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "ggc.h"
40 /* The object of this pass is to lower the representation of a set of nested
41 functions in order to expose all of the gory details of the various
42 nonlocal references. We want to do this sooner rather than later, in
43 order to give us more freedom in emitting all of the functions in question.
45 Back in olden times, when gcc was young, we developed an insanely
46 complicated scheme whereby variables which were referenced nonlocally
47 were forced to live in the stack of the declaring function, and then
48 the nested functions magically discovered where these variables were
49 placed. In order for this scheme to function properly, it required
50 that the outer function be partially expanded, then we switch to
51 compiling the inner function, and once done with those we switch back
52 to compiling the outer function. Such delicate ordering requirements
53 makes it difficult to do whole translation unit optimizations
54 involving such functions.
56 The implementation here is much more direct. Everything that can be
57 referenced by an inner function is a member of an explicitly created
58 structure herein called the "nonlocal frame struct". The incoming
59 static chain for a nested function is a pointer to this struct in
60 the parent. In this way, we settle on known offsets from a known
61 base, and so are decoupled from the logic that places objects in the
62 function's stack frame. More importantly, we don't have to wait for
63 that to happen -- since the compilation of the inner function is no
64 longer tied to a real stack frame, the nonlocal frame struct can be
65 allocated anywhere. Which means that the outer function is now
66 inlinable.
68 Theory of operation here is very simple. Iterate over all the
69 statements in all the functions (depth first) several times,
70 allocating structures and fields on demand. In general we want to
71 examine inner functions first, so that we can avoid making changes
72 to outer functions which are unnecessary.
74 The order of the passes matters a bit, in that later passes will be
75 skipped if it is discovered that the functions don't actually interact
76 at all. That is, they're nested in the lexical sense but could have
77 been written as independent functions without change. */
80 struct var_map_elt GTY(())
82 tree old;
83 tree new;
86 struct nesting_info GTY ((chain_next ("%h.next")))
88 struct nesting_info *outer;
89 struct nesting_info *inner;
90 struct nesting_info *next;
92 htab_t GTY ((param_is (struct var_map_elt))) field_map;
93 htab_t GTY ((param_is (struct var_map_elt))) var_map;
94 bitmap suppress_expansion;
96 tree context;
97 tree new_local_var_chain;
98 tree debug_var_chain;
99 tree frame_type;
100 tree frame_decl;
101 tree chain_field;
102 tree chain_decl;
103 tree nl_goto_field;
105 bool any_parm_remapped;
106 bool any_tramp_created;
107 char static_chain_added;
111 /* Hashing and equality functions for nesting_info->var_map. */
113 static hashval_t
114 var_map_hash (const void *x)
116 const struct var_map_elt *a = (const struct var_map_elt *) x;
117 return htab_hash_pointer (a->old);
120 static int
121 var_map_eq (const void *x, const void *y)
123 const struct var_map_elt *a = (const struct var_map_elt *) x;
124 const struct var_map_elt *b = (const struct var_map_elt *) y;
125 return a->old == b->old;
128 /* We're working in so many different function contexts simultaneously,
129 that create_tmp_var is dangerous. Prevent mishap. */
130 #define create_tmp_var cant_use_create_tmp_var_here_dummy
132 /* Like create_tmp_var, except record the variable for registration at
133 the given nesting level. */
135 static tree
136 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
138 tree tmp_var;
140 /* If the type is of variable size or a type which must be created by the
141 frontend, something is wrong. Note that we explicitly allow
142 incomplete types here, since we create them ourselves here. */
143 gcc_assert (!TREE_ADDRESSABLE (type));
144 gcc_assert (!TYPE_SIZE_UNIT (type)
145 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
147 tmp_var = create_tmp_var_raw (type, prefix);
148 DECL_CONTEXT (tmp_var) = info->context;
149 TREE_CHAIN (tmp_var) = info->new_local_var_chain;
150 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
151 if (TREE_CODE (type) == COMPLEX_TYPE
152 || TREE_CODE (type) == VECTOR_TYPE)
153 DECL_GIMPLE_REG_P (tmp_var) = 1;
155 info->new_local_var_chain = tmp_var;
157 return tmp_var;
160 /* Take the address of EXP to be used within function CONTEXT.
161 Mark it for addressability as necessary. */
163 tree
164 build_addr (tree exp, tree context)
166 tree base = exp;
167 tree save_context;
168 tree retval;
170 while (handled_component_p (base))
171 base = TREE_OPERAND (base, 0);
173 if (DECL_P (base))
174 TREE_ADDRESSABLE (base) = 1;
176 /* Building the ADDR_EXPR will compute a set of properties for
177 that ADDR_EXPR. Those properties are unfortunately context
178 specific. ie, they are dependent on CURRENT_FUNCTION_DECL.
180 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
181 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
182 way the properties are for the ADDR_EXPR are computed properly. */
183 save_context = current_function_decl;
184 current_function_decl = context;
185 retval = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
186 current_function_decl = save_context;
187 return retval;
190 /* Insert FIELD into TYPE, sorted by alignment requirements. */
192 void
193 insert_field_into_struct (tree type, tree field)
195 tree *p;
197 DECL_CONTEXT (field) = type;
199 for (p = &TYPE_FIELDS (type); *p ; p = &TREE_CHAIN (*p))
200 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
201 break;
203 TREE_CHAIN (field) = *p;
204 *p = field;
207 /* Build or return the RECORD_TYPE that describes the frame state that is
208 shared between INFO->CONTEXT and its nested functions. This record will
209 not be complete until finalize_nesting_tree; up until that point we'll
210 be adding fields as necessary.
212 We also build the DECL that represents this frame in the function. */
214 static tree
215 get_frame_type (struct nesting_info *info)
217 tree type = info->frame_type;
218 if (!type)
220 char *name;
222 type = make_node (RECORD_TYPE);
224 name = concat ("FRAME.",
225 IDENTIFIER_POINTER (DECL_NAME (info->context)),
226 NULL);
227 TYPE_NAME (type) = get_identifier (name);
228 free (name);
230 info->frame_type = type;
231 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
233 /* ??? Always make it addressable for now, since it is meant to
234 be pointed to by the static chain pointer. This pessimizes
235 when it turns out that no static chains are needed because
236 the nested functions referencing non-local variables are not
237 reachable, but the true pessimization is to create the non-
238 local frame structure in the first place. */
239 TREE_ADDRESSABLE (info->frame_decl) = 1;
241 return type;
244 /* Return true if DECL should be referenced by pointer in the non-local
245 frame structure. */
247 static bool
248 use_pointer_in_frame (tree decl)
250 if (TREE_CODE (decl) == PARM_DECL)
252 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
253 sized decls, and inefficient to copy large aggregates. Don't bother
254 moving anything but scalar variables. */
255 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
257 else
259 /* Variable sized types make things "interesting" in the frame. */
260 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
264 /* Given DECL, a non-locally accessed variable, find or create a field
265 in the non-local frame structure for the given nesting context. */
267 static tree
268 lookup_field_for_decl (struct nesting_info *info, tree decl,
269 enum insert_option insert)
271 struct var_map_elt *elt, dummy;
272 void **slot;
273 tree field;
275 dummy.old = decl;
276 slot = htab_find_slot (info->field_map, &dummy, insert);
277 if (!slot)
279 gcc_assert (insert != INSERT);
280 return NULL;
282 elt = (struct var_map_elt *) *slot;
284 if (!elt && insert == INSERT)
286 field = make_node (FIELD_DECL);
287 DECL_NAME (field) = DECL_NAME (decl);
289 if (use_pointer_in_frame (decl))
291 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
292 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
293 DECL_NONADDRESSABLE_P (field) = 1;
295 else
297 TREE_TYPE (field) = TREE_TYPE (decl);
298 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
299 DECL_ALIGN (field) = DECL_ALIGN (decl);
300 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
301 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
302 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
303 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
306 insert_field_into_struct (get_frame_type (info), field);
308 elt = GGC_NEW (struct var_map_elt);
309 elt->old = decl;
310 elt->new = field;
311 *slot = elt;
313 if (TREE_CODE (decl) == PARM_DECL)
314 info->any_parm_remapped = true;
316 else
317 field = elt ? elt->new : NULL;
319 return field;
322 /* Build or return the variable that holds the static chain within
323 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
325 static tree
326 get_chain_decl (struct nesting_info *info)
328 tree decl = info->chain_decl;
329 if (!decl)
331 tree type;
333 type = get_frame_type (info->outer);
334 type = build_pointer_type (type);
336 /* Note that this variable is *not* entered into any BIND_EXPR;
337 the construction of this variable is handled specially in
338 expand_function_start and initialize_inlined_parameters.
339 Note also that it's represented as a parameter. This is more
340 close to the truth, since the initial value does come from
341 the caller. */
342 decl = build_decl (PARM_DECL, create_tmp_var_name ("CHAIN"), type);
343 DECL_ARTIFICIAL (decl) = 1;
344 DECL_IGNORED_P (decl) = 1;
345 TREE_USED (decl) = 1;
346 DECL_CONTEXT (decl) = info->context;
347 DECL_ARG_TYPE (decl) = type;
349 /* Tell tree-inline.c that we never write to this variable, so
350 it can copy-prop the replacement value immediately. */
351 TREE_READONLY (decl) = 1;
353 info->chain_decl = decl;
355 return decl;
358 /* Build or return the field within the non-local frame state that holds
359 the static chain for INFO->CONTEXT. This is the way to walk back up
360 multiple nesting levels. */
362 static tree
363 get_chain_field (struct nesting_info *info)
365 tree field = info->chain_field;
366 if (!field)
368 tree type = build_pointer_type (get_frame_type (info->outer));
370 field = make_node (FIELD_DECL);
371 DECL_NAME (field) = get_identifier ("__chain");
372 TREE_TYPE (field) = type;
373 DECL_ALIGN (field) = TYPE_ALIGN (type);
374 DECL_NONADDRESSABLE_P (field) = 1;
376 insert_field_into_struct (get_frame_type (info), field);
378 info->chain_field = field;
380 return field;
383 /* Copy EXP into a temporary. Allocate the temporary in the context of
384 INFO and insert the initialization statement before TSI. */
386 static tree
387 init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
389 tree t, stmt;
391 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
392 stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (t), t, exp);
393 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
394 tsi_link_before (tsi, stmt, TSI_SAME_STMT);
396 return t;
399 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
401 static tree
402 tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
404 if (is_gimple_val (exp))
405 return exp;
406 else
407 return init_tmp_var (info, exp, tsi);
410 /* Similarly, but copy from the temporary and insert the statement
411 after the iterator. */
413 static tree
414 save_tmp_var (struct nesting_info *info, tree exp,
415 tree_stmt_iterator *tsi)
417 tree t, stmt;
419 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
420 stmt = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (t), exp, t);
421 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
422 tsi_link_after (tsi, stmt, TSI_SAME_STMT);
424 return t;
427 /* Build or return the type used to represent a nested function trampoline. */
429 static GTY(()) tree trampoline_type;
431 static tree
432 get_trampoline_type (void)
434 tree record, t;
435 unsigned align, size;
437 if (trampoline_type)
438 return trampoline_type;
440 align = TRAMPOLINE_ALIGNMENT;
441 size = TRAMPOLINE_SIZE;
443 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
444 then allocate extra space so that we can do dynamic alignment. */
445 if (align > STACK_BOUNDARY)
447 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
448 align = STACK_BOUNDARY;
451 t = build_index_type (build_int_cst (NULL_TREE, size - 1));
452 t = build_array_type (char_type_node, t);
453 t = build_decl (FIELD_DECL, get_identifier ("__data"), t);
454 DECL_ALIGN (t) = align;
455 DECL_USER_ALIGN (t) = 1;
457 record = make_node (RECORD_TYPE);
458 TYPE_NAME (record) = get_identifier ("__builtin_trampoline");
459 TYPE_FIELDS (record) = t;
460 layout_type (record);
462 return record;
465 /* Given DECL, a nested function, find or create a field in the non-local
466 frame structure for a trampoline for this function. */
468 static tree
469 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
470 enum insert_option insert)
472 struct var_map_elt *elt, dummy;
473 void **slot;
474 tree field;
476 dummy.old = decl;
477 slot = htab_find_slot (info->var_map, &dummy, insert);
478 if (!slot)
480 gcc_assert (insert != INSERT);
481 return NULL;
483 elt = (struct var_map_elt *) *slot;
485 if (!elt && insert == INSERT)
487 field = make_node (FIELD_DECL);
488 DECL_NAME (field) = DECL_NAME (decl);
489 TREE_TYPE (field) = get_trampoline_type ();
490 TREE_ADDRESSABLE (field) = 1;
492 insert_field_into_struct (get_frame_type (info), field);
494 elt = GGC_NEW (struct var_map_elt);
495 elt->old = decl;
496 elt->new = field;
497 *slot = elt;
499 info->any_tramp_created = true;
501 else
502 field = elt ? elt->new : NULL;
504 return field;
507 /* Build or return the field within the non-local frame state that holds
508 the non-local goto "jmp_buf". The buffer itself is maintained by the
509 rtl middle-end as dynamic stack space is allocated. */
511 static tree
512 get_nl_goto_field (struct nesting_info *info)
514 tree field = info->nl_goto_field;
515 if (!field)
517 unsigned size;
518 tree type;
520 /* For __builtin_nonlocal_goto, we need N words. The first is the
521 frame pointer, the rest is for the target's stack pointer save
522 area. The number of words is controlled by STACK_SAVEAREA_MODE;
523 not the best interface, but it'll do for now. */
524 if (Pmode == ptr_mode)
525 type = ptr_type_node;
526 else
527 type = lang_hooks.types.type_for_mode (Pmode, 1);
529 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
530 size = size / GET_MODE_SIZE (Pmode);
531 size = size + 1;
533 type = build_array_type
534 (type, build_index_type (build_int_cst (NULL_TREE, size)));
536 field = make_node (FIELD_DECL);
537 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
538 TREE_TYPE (field) = type;
539 DECL_ALIGN (field) = TYPE_ALIGN (type);
540 TREE_ADDRESSABLE (field) = 1;
542 insert_field_into_struct (get_frame_type (info), field);
544 info->nl_goto_field = field;
547 return field;
550 /* Helper function for walk_stmts. Walk output operands of an ASM_EXPR. */
552 static void
553 walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
555 int noutputs = list_length (ASM_OUTPUTS (stmt));
556 const char **oconstraints
557 = (const char **) alloca ((noutputs) * sizeof (const char *));
558 int i;
559 tree link;
560 const char *constraint;
561 bool allows_mem, allows_reg, is_inout;
563 wi->is_lhs = true;
564 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
566 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
567 oconstraints[i] = constraint;
568 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
569 &allows_reg, &is_inout);
571 wi->val_only = (allows_reg || !allows_mem);
572 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
575 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
577 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
578 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
579 oconstraints, &allows_mem, &allows_reg);
581 wi->val_only = (allows_reg || !allows_mem);
582 /* Although input "m" is not really a LHS, we need a lvalue. */
583 wi->is_lhs = !wi->val_only;
584 walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
587 wi->is_lhs = false;
588 wi->val_only = true;
591 /* Iterate over all sub-statements of *TP calling walk_tree with
592 WI->CALLBACK for every sub-expression in each statement found. */
594 void
595 walk_stmts (struct walk_stmt_info *wi, tree *tp)
597 tree t = *tp;
598 int walk_subtrees;
600 if (!t)
601 return;
603 if (wi->want_locations && EXPR_HAS_LOCATION (t))
604 input_location = EXPR_LOCATION (t);
606 switch (TREE_CODE (t))
608 case STATEMENT_LIST:
610 tree_stmt_iterator i;
611 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
613 wi->tsi = i;
614 walk_stmts (wi, tsi_stmt_ptr (i));
617 break;
619 case COND_EXPR:
620 walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
621 walk_stmts (wi, &COND_EXPR_THEN (t));
622 walk_stmts (wi, &COND_EXPR_ELSE (t));
623 break;
624 case CATCH_EXPR:
625 walk_stmts (wi, &CATCH_BODY (t));
626 break;
627 case EH_FILTER_EXPR:
628 walk_stmts (wi, &EH_FILTER_FAILURE (t));
629 break;
630 case TRY_CATCH_EXPR:
631 case TRY_FINALLY_EXPR:
632 walk_stmts (wi, &TREE_OPERAND (t, 0));
633 walk_stmts (wi, &TREE_OPERAND (t, 1));
634 break;
636 case BIND_EXPR:
637 if (wi->want_bind_expr)
639 walk_subtrees = 1;
640 wi->callback (tp, &walk_subtrees, wi);
641 if (!walk_subtrees)
642 break;
644 walk_stmts (wi, &BIND_EXPR_BODY (t));
645 break;
647 case RETURN_EXPR:
648 if (wi->want_return_expr)
650 walk_subtrees = 1;
651 wi->callback (tp, &walk_subtrees, wi);
652 if (!walk_subtrees)
653 break;
655 walk_stmts (wi, &TREE_OPERAND (t, 0));
656 break;
658 case GIMPLE_MODIFY_STMT:
659 /* A formal temporary lhs may use a COMPONENT_REF rhs. */
660 wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
661 walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
663 /* If the rhs is appropriate for a memory, we may use a
664 COMPONENT_REF on the lhs. */
665 wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
666 wi->is_lhs = true;
667 walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
669 wi->val_only = true;
670 wi->is_lhs = false;
671 break;
673 case ASM_EXPR:
674 walk_asm_expr (wi, *tp);
675 break;
677 default:
678 wi->val_only = true;
679 walk_tree (tp, wi->callback, wi, NULL);
680 break;
684 /* Invoke CALLBACK on all statements of *STMT_P. */
686 static void
687 walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
689 struct walk_stmt_info wi;
691 memset (&wi, 0, sizeof (wi));
692 wi.callback = callback;
693 wi.info = info;
694 wi.val_only = true;
696 walk_stmts (&wi, stmt_p);
699 /* Invoke CALLBACK on all statements of INFO->CONTEXT. */
701 static inline void
702 walk_function (walk_tree_fn callback, struct nesting_info *info)
704 walk_body (callback, info, &DECL_SAVED_TREE (info->context));
707 /* Similarly for ROOT and all functions nested underneath, depth first. */
709 static void
710 walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
714 if (root->inner)
715 walk_all_functions (callback, root->inner);
716 walk_function (callback, root);
717 root = root->next;
719 while (root);
722 /* We have to check for a fairly pathological case. The operands of function
723 nested function are to be interpreted in the context of the enclosing
724 function. So if any are variably-sized, they will get remapped when the
725 enclosing function is inlined. But that remapping would also have to be
726 done in the types of the PARM_DECLs of the nested function, meaning the
727 argument types of that function will disagree with the arguments in the
728 calls to that function. So we'd either have to make a copy of the nested
729 function corresponding to each time the enclosing function was inlined or
730 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
731 function. The former is not practical. The latter would still require
732 detecting this case to know when to add the conversions. So, for now at
733 least, we don't inline such an enclosing function.
735 We have to do that check recursively, so here return indicating whether
736 FNDECL has such a nested function. ORIG_FN is the function we were
737 trying to inline to use for checking whether any argument is variably
738 modified by anything in it.
740 It would be better to do this in tree-inline.c so that we could give
741 the appropriate warning for why a function can't be inlined, but that's
742 too late since the nesting structure has already been flattened and
743 adding a flag just to record this fact seems a waste of a flag. */
745 static bool
746 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
748 struct cgraph_node *cgn = cgraph_node (fndecl);
749 tree arg;
751 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
753 for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = TREE_CHAIN (arg))
754 if (variably_modified_type_p (TREE_TYPE (arg), 0), orig_fndecl)
755 return true;
757 if (check_for_nested_with_variably_modified (cgn->decl, orig_fndecl))
758 return true;
761 return false;
764 /* Construct our local datastructure describing the function nesting
765 tree rooted by CGN. */
767 static struct nesting_info *
768 create_nesting_tree (struct cgraph_node *cgn)
770 struct nesting_info *info = GGC_CNEW (struct nesting_info);
771 info->field_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free);
772 info->var_map = htab_create_ggc (7, var_map_hash, var_map_eq, ggc_free);
773 info->suppress_expansion = BITMAP_GGC_ALLOC ();
774 info->context = cgn->decl;
776 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
778 struct nesting_info *sub = create_nesting_tree (cgn);
779 sub->outer = info;
780 sub->next = info->inner;
781 info->inner = sub;
784 /* See discussion at check_for_nested_with_variably_modified for a
785 discussion of why this has to be here. */
786 if (check_for_nested_with_variably_modified (info->context, info->context))
787 DECL_UNINLINABLE (info->context) = true;
789 return info;
792 /* Return an expression computing the static chain for TARGET_CONTEXT
793 from INFO->CONTEXT. Insert any necessary computations before TSI. */
795 static tree
796 get_static_chain (struct nesting_info *info, tree target_context,
797 tree_stmt_iterator *tsi)
799 struct nesting_info *i;
800 tree x;
802 if (info->context == target_context)
804 x = build_addr (info->frame_decl, target_context);
806 else
808 x = get_chain_decl (info);
810 for (i = info->outer; i->context != target_context; i = i->outer)
812 tree field = get_chain_field (i);
814 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
815 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
816 x = init_tmp_var (info, x, tsi);
820 return x;
823 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
824 frame as seen from INFO->CONTEXT. Insert any necessary computations
825 before TSI. */
827 static tree
828 get_frame_field (struct nesting_info *info, tree target_context,
829 tree field, tree_stmt_iterator *tsi)
831 struct nesting_info *i;
832 tree x;
834 if (info->context == target_context)
836 /* Make sure frame_decl gets created. */
837 (void) get_frame_type (info);
838 x = info->frame_decl;
840 else
842 x = get_chain_decl (info);
844 for (i = info->outer; i->context != target_context; i = i->outer)
846 tree field = get_chain_field (i);
848 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
849 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
850 x = init_tmp_var (info, x, tsi);
853 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
856 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
857 return x;
860 /* A subroutine of convert_nonlocal_reference. Create a local variable
861 in the nested function with DECL_VALUE_EXPR set to reference the true
862 variable in the parent function. This is used both for debug info
863 and in OpenMP lowering. */
865 static tree
866 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
868 struct var_map_elt *elt, dummy;
869 tree target_context;
870 struct nesting_info *i;
871 tree x, field, new_decl;
872 void **slot;
874 dummy.old = decl;
875 slot = htab_find_slot (info->var_map, &dummy, INSERT);
876 elt = *slot;
878 if (elt)
879 return elt->new;
881 target_context = decl_function_context (decl);
883 /* A copy of the code in get_frame_field, but without the temporaries. */
884 if (info->context == target_context)
886 /* Make sure frame_decl gets created. */
887 (void) get_frame_type (info);
888 x = info->frame_decl;
889 i = info;
891 else
893 x = get_chain_decl (info);
894 for (i = info->outer; i->context != target_context; i = i->outer)
896 field = get_chain_field (i);
897 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
898 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
900 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
903 field = lookup_field_for_decl (i, decl, INSERT);
904 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
905 if (use_pointer_in_frame (decl))
906 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
908 /* ??? We should be remapping types as well, surely. */
909 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
910 DECL_CONTEXT (new_decl) = info->context;
911 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
912 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
913 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
914 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
915 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
916 TREE_READONLY (new_decl) = TREE_READONLY (decl);
917 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
918 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
920 SET_DECL_VALUE_EXPR (new_decl, x);
921 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
923 elt = ggc_alloc (sizeof (*elt));
924 elt->old = decl;
925 elt->new = new_decl;
926 *slot = elt;
928 TREE_CHAIN (new_decl) = info->debug_var_chain;
929 info->debug_var_chain = new_decl;
931 return new_decl;
934 /* Called via walk_function+walk_tree, rewrite all references to VAR
935 and PARM_DECLs that belong to outer functions.
937 The rewrite will involve some number of structure accesses back up
938 the static chain. E.g. for a variable FOO up one nesting level it'll
939 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
940 indirections apply to decls for which use_pointer_in_frame is true. */
942 static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
944 static tree
945 convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
947 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
948 struct nesting_info *info = wi->info;
949 tree t = *tp;
950 tree save_local_var_chain;
951 bitmap save_suppress;
953 *walk_subtrees = 0;
954 switch (TREE_CODE (t))
956 case VAR_DECL:
957 /* Non-automatic variables are never processed. */
958 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
959 break;
960 /* FALLTHRU */
962 case PARM_DECL:
963 if (decl_function_context (t) != info->context)
965 tree x;
966 wi->changed = true;
968 x = get_nonlocal_debug_decl (info, t);
969 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
971 tree target_context = decl_function_context (t);
972 struct nesting_info *i;
973 for (i = info->outer; i->context != target_context; i = i->outer)
974 continue;
975 x = lookup_field_for_decl (i, t, INSERT);
976 x = get_frame_field (info, target_context, x, &wi->tsi);
977 if (use_pointer_in_frame (t))
979 x = init_tmp_var (info, x, &wi->tsi);
980 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
984 if (wi->val_only)
986 if (wi->is_lhs)
987 x = save_tmp_var (info, x, &wi->tsi);
988 else
989 x = init_tmp_var (info, x, &wi->tsi);
992 *tp = x;
994 break;
996 case GOTO_EXPR:
997 /* Don't walk non-local gotos for now. */
998 if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
1000 *walk_subtrees = 1;
1001 wi->val_only = true;
1002 wi->is_lhs = false;
1004 break;
1006 case LABEL_DECL:
1007 /* We're taking the address of a label from a parent function, but
1008 this is not itself a non-local goto. Mark the label such that it
1009 will not be deleted, much as we would with a label address in
1010 static storage. */
1011 if (decl_function_context (t) != info->context)
1012 FORCED_LABEL (t) = 1;
1013 break;
1015 case ADDR_EXPR:
1017 bool save_val_only = wi->val_only;
1019 wi->val_only = false;
1020 wi->is_lhs = false;
1021 wi->changed = false;
1022 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
1023 wi->val_only = true;
1025 if (wi->changed)
1027 tree save_context;
1029 /* If we changed anything, then TREE_INVARIANT is be wrong,
1030 since we're no longer directly referencing a decl. */
1031 save_context = current_function_decl;
1032 current_function_decl = info->context;
1033 recompute_tree_invariant_for_addr_expr (t);
1034 current_function_decl = save_context;
1036 /* If the callback converted the address argument in a context
1037 where we only accept variables (and min_invariant, presumably),
1038 then compute the address into a temporary. */
1039 if (save_val_only)
1040 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1043 break;
1045 case REALPART_EXPR:
1046 case IMAGPART_EXPR:
1047 case COMPONENT_REF:
1048 case ARRAY_REF:
1049 case ARRAY_RANGE_REF:
1050 case BIT_FIELD_REF:
1051 /* Go down this entire nest and just look at the final prefix and
1052 anything that describes the references. Otherwise, we lose track
1053 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1054 wi->val_only = true;
1055 wi->is_lhs = false;
1056 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1058 if (TREE_CODE (t) == COMPONENT_REF)
1059 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1060 NULL);
1061 else if (TREE_CODE (t) == ARRAY_REF
1062 || TREE_CODE (t) == ARRAY_RANGE_REF)
1064 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1065 NULL);
1066 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1067 NULL);
1068 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
1069 NULL);
1071 else if (TREE_CODE (t) == BIT_FIELD_REF)
1073 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
1074 NULL);
1075 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
1076 NULL);
1079 wi->val_only = false;
1080 walk_tree (tp, convert_nonlocal_reference, wi, NULL);
1081 break;
1083 case OMP_PARALLEL:
1084 save_suppress = info->suppress_expansion;
1085 if (convert_nonlocal_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1087 tree c, decl;
1088 decl = get_chain_decl (info);
1089 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1090 OMP_CLAUSE_DECL (c) = decl;
1091 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1092 OMP_PARALLEL_CLAUSES (t) = c;
1095 save_local_var_chain = info->new_local_var_chain;
1096 info->new_local_var_chain = NULL;
1098 walk_body (convert_nonlocal_reference, info, &OMP_PARALLEL_BODY (t));
1100 if (info->new_local_var_chain)
1101 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1102 info->new_local_var_chain = save_local_var_chain;
1103 info->suppress_expansion = save_suppress;
1104 break;
1106 case OMP_FOR:
1107 case OMP_SECTIONS:
1108 case OMP_SINGLE:
1109 save_suppress = info->suppress_expansion;
1110 convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
1111 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1112 info->suppress_expansion = save_suppress;
1113 break;
1115 case OMP_SECTION:
1116 case OMP_MASTER:
1117 case OMP_ORDERED:
1118 walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
1119 break;
1121 default:
1122 if (!IS_TYPE_OR_DECL_P (t))
1124 *walk_subtrees = 1;
1125 wi->val_only = true;
1126 wi->is_lhs = false;
1128 break;
1131 return NULL_TREE;
1134 static bool
1135 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1137 struct nesting_info *info = wi->info;
1138 bool need_chain = false;
1139 tree clause, decl;
1140 int dummy;
1141 bitmap new_suppress;
1143 new_suppress = BITMAP_GGC_ALLOC ();
1144 bitmap_copy (new_suppress, info->suppress_expansion);
1146 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1148 switch (OMP_CLAUSE_CODE (clause))
1150 case OMP_CLAUSE_PRIVATE:
1151 case OMP_CLAUSE_FIRSTPRIVATE:
1152 case OMP_CLAUSE_LASTPRIVATE:
1153 case OMP_CLAUSE_REDUCTION:
1154 case OMP_CLAUSE_COPYPRIVATE:
1155 case OMP_CLAUSE_SHARED:
1156 decl = OMP_CLAUSE_DECL (clause);
1157 if (decl_function_context (decl) != info->context)
1159 bitmap_set_bit (new_suppress, DECL_UID (decl));
1160 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1161 need_chain = true;
1163 break;
1165 case OMP_CLAUSE_SCHEDULE:
1166 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1167 break;
1168 /* FALLTHRU */
1169 case OMP_CLAUSE_IF:
1170 case OMP_CLAUSE_NUM_THREADS:
1171 wi->val_only = true;
1172 wi->is_lhs = false;
1173 convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1174 wi);
1175 break;
1177 case OMP_CLAUSE_NOWAIT:
1178 case OMP_CLAUSE_ORDERED:
1179 case OMP_CLAUSE_DEFAULT:
1180 case OMP_CLAUSE_COPYIN:
1181 break;
1183 default:
1184 gcc_unreachable ();
1188 info->suppress_expansion = new_suppress;
1190 return need_chain;
1193 /* A subroutine of convert_local_reference. Create a local variable
1194 in the parent function with DECL_VALUE_EXPR set to reference the
1195 field in FRAME. This is used both for debug info and in OpenMP
1196 lowering. */
1198 static tree
1199 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1201 struct var_map_elt *elt, dummy;
1202 tree x, new_decl;
1203 void **slot;
1205 dummy.old = decl;
1206 slot = htab_find_slot (info->var_map, &dummy, INSERT);
1207 elt = *slot;
1209 if (elt)
1210 return elt->new;
1212 /* Make sure frame_decl gets created. */
1213 (void) get_frame_type (info);
1214 x = info->frame_decl;
1215 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1217 new_decl = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1218 DECL_CONTEXT (new_decl) = info->context;
1219 DECL_SOURCE_LOCATION (new_decl) = DECL_SOURCE_LOCATION (decl);
1220 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1221 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1222 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1223 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1224 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1225 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1226 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1228 SET_DECL_VALUE_EXPR (new_decl, x);
1229 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1231 elt = ggc_alloc (sizeof (*elt));
1232 elt->old = decl;
1233 elt->new = new_decl;
1234 *slot = elt;
1236 TREE_CHAIN (new_decl) = info->debug_var_chain;
1237 info->debug_var_chain = new_decl;
1239 /* Do not emit debug info twice. */
1240 DECL_IGNORED_P (decl) = 1;
1242 return new_decl;
1245 /* Called via walk_function+walk_tree, rewrite all references to VAR
1246 and PARM_DECLs that were referenced by inner nested functions.
1247 The rewrite will be a structure reference to the local frame variable. */
1249 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1251 static tree
1252 convert_local_reference (tree *tp, int *walk_subtrees, void *data)
1254 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1255 struct nesting_info *info = wi->info;
1256 tree t = *tp, field, x;
1257 bool save_val_only;
1258 tree save_local_var_chain;
1259 bitmap save_suppress;
1261 *walk_subtrees = 0;
1262 switch (TREE_CODE (t))
1264 case VAR_DECL:
1265 /* Non-automatic variables are never processed. */
1266 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1267 break;
1268 /* FALLTHRU */
1270 case PARM_DECL:
1271 if (decl_function_context (t) == info->context)
1273 /* If we copied a pointer to the frame, then the original decl
1274 is used unchanged in the parent function. */
1275 if (use_pointer_in_frame (t))
1276 break;
1278 /* No need to transform anything if no child references the
1279 variable. */
1280 field = lookup_field_for_decl (info, t, NO_INSERT);
1281 if (!field)
1282 break;
1283 wi->changed = true;
1285 x = get_local_debug_decl (info, t, field);
1286 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1287 x = get_frame_field (info, info->context, field, &wi->tsi);
1289 if (wi->val_only)
1291 if (wi->is_lhs)
1292 x = save_tmp_var (info, x, &wi->tsi);
1293 else
1294 x = init_tmp_var (info, x, &wi->tsi);
1297 *tp = x;
1299 break;
1301 case ADDR_EXPR:
1302 save_val_only = wi->val_only;
1303 wi->val_only = false;
1304 wi->is_lhs = false;
1305 wi->changed = false;
1306 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
1307 wi->val_only = save_val_only;
1309 /* If we converted anything ... */
1310 if (wi->changed)
1312 tree save_context;
1314 /* Then the frame decl is now addressable. */
1315 TREE_ADDRESSABLE (info->frame_decl) = 1;
1317 save_context = current_function_decl;
1318 current_function_decl = info->context;
1319 recompute_tree_invariant_for_addr_expr (t);
1320 current_function_decl = save_context;
1322 /* If we are in a context where we only accept values, then
1323 compute the address into a temporary. */
1324 if (save_val_only)
1325 *tp = tsi_gimplify_val (wi->info, t, &wi->tsi);
1327 break;
1329 case REALPART_EXPR:
1330 case IMAGPART_EXPR:
1331 case COMPONENT_REF:
1332 case ARRAY_REF:
1333 case ARRAY_RANGE_REF:
1334 case BIT_FIELD_REF:
1335 /* Go down this entire nest and just look at the final prefix and
1336 anything that describes the references. Otherwise, we lose track
1337 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1338 save_val_only = wi->val_only;
1339 wi->val_only = true;
1340 wi->is_lhs = false;
1341 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1343 if (TREE_CODE (t) == COMPONENT_REF)
1344 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1345 NULL);
1346 else if (TREE_CODE (t) == ARRAY_REF
1347 || TREE_CODE (t) == ARRAY_RANGE_REF)
1349 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1350 NULL);
1351 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1352 NULL);
1353 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
1354 NULL);
1356 else if (TREE_CODE (t) == BIT_FIELD_REF)
1358 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
1359 NULL);
1360 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
1361 NULL);
1364 wi->val_only = false;
1365 walk_tree (tp, convert_local_reference, wi, NULL);
1366 wi->val_only = save_val_only;
1367 break;
1369 case OMP_PARALLEL:
1370 save_suppress = info->suppress_expansion;
1371 if (convert_local_omp_clauses (&OMP_PARALLEL_CLAUSES (t), wi))
1373 tree c;
1374 (void) get_frame_type (info);
1375 c = build_omp_clause (OMP_CLAUSE_SHARED);
1376 OMP_CLAUSE_DECL (c) = info->frame_decl;
1377 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1378 OMP_PARALLEL_CLAUSES (t) = c;
1381 save_local_var_chain = info->new_local_var_chain;
1382 info->new_local_var_chain = NULL;
1384 walk_body (convert_local_reference, info, &OMP_PARALLEL_BODY (t));
1386 if (info->new_local_var_chain)
1387 declare_vars (info->new_local_var_chain, OMP_PARALLEL_BODY (t), false);
1388 info->new_local_var_chain = save_local_var_chain;
1389 info->suppress_expansion = save_suppress;
1390 break;
1392 case OMP_FOR:
1393 case OMP_SECTIONS:
1394 case OMP_SINGLE:
1395 save_suppress = info->suppress_expansion;
1396 convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
1397 walk_body (convert_local_reference, info, &OMP_BODY (t));
1398 info->suppress_expansion = save_suppress;
1399 break;
1401 case OMP_SECTION:
1402 case OMP_MASTER:
1403 case OMP_ORDERED:
1404 walk_body (convert_local_reference, info, &OMP_BODY (t));
1405 break;
1407 default:
1408 if (!IS_TYPE_OR_DECL_P (t))
1410 *walk_subtrees = 1;
1411 wi->val_only = true;
1412 wi->is_lhs = false;
1414 break;
1417 return NULL_TREE;
1420 static bool
1421 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1423 struct nesting_info *info = wi->info;
1424 bool need_frame = false;
1425 tree clause, decl;
1426 int dummy;
1427 bitmap new_suppress;
1429 new_suppress = BITMAP_GGC_ALLOC ();
1430 bitmap_copy (new_suppress, info->suppress_expansion);
1432 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1434 switch (OMP_CLAUSE_CODE (clause))
1436 case OMP_CLAUSE_PRIVATE:
1437 case OMP_CLAUSE_FIRSTPRIVATE:
1438 case OMP_CLAUSE_LASTPRIVATE:
1439 case OMP_CLAUSE_REDUCTION:
1440 case OMP_CLAUSE_COPYPRIVATE:
1441 case OMP_CLAUSE_SHARED:
1442 decl = OMP_CLAUSE_DECL (clause);
1443 if (decl_function_context (decl) == info->context
1444 && !use_pointer_in_frame (decl))
1446 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1447 if (field)
1449 bitmap_set_bit (new_suppress, DECL_UID (decl));
1450 OMP_CLAUSE_DECL (clause)
1451 = get_local_debug_decl (info, decl, field);
1452 need_frame = true;
1455 break;
1457 case OMP_CLAUSE_SCHEDULE:
1458 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1459 break;
1460 /* FALLTHRU */
1461 case OMP_CLAUSE_IF:
1462 case OMP_CLAUSE_NUM_THREADS:
1463 wi->val_only = true;
1464 wi->is_lhs = false;
1465 convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
1466 break;
1468 case OMP_CLAUSE_NOWAIT:
1469 case OMP_CLAUSE_ORDERED:
1470 case OMP_CLAUSE_DEFAULT:
1471 case OMP_CLAUSE_COPYIN:
1472 break;
1474 default:
1475 gcc_unreachable ();
1479 info->suppress_expansion = new_suppress;
1481 return need_frame;
1484 /* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that
1485 reference labels from outer functions. The rewrite will be a
1486 call to __builtin_nonlocal_goto. */
1488 static tree
1489 convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
1491 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1492 struct nesting_info *info = wi->info, *i;
1493 tree t = *tp, label, new_label, target_context, x, arg, field;
1494 struct var_map_elt *elt, dummy;
1495 void **slot;
1497 *walk_subtrees = 0;
1498 if (TREE_CODE (t) != GOTO_EXPR)
1499 return NULL_TREE;
1500 label = GOTO_DESTINATION (t);
1501 if (TREE_CODE (label) != LABEL_DECL)
1502 return NULL_TREE;
1503 target_context = decl_function_context (label);
1504 if (target_context == info->context)
1505 return NULL_TREE;
1507 for (i = info->outer; target_context != i->context; i = i->outer)
1508 continue;
1510 /* The original user label may also be use for a normal goto, therefore
1511 we must create a new label that will actually receive the abnormal
1512 control transfer. This new label will be marked LABEL_NONLOCAL; this
1513 mark will trigger proper behavior in the cfg, as well as cause the
1514 (hairy target-specific) non-local goto receiver code to be generated
1515 when we expand rtl. Enter this association into var_map so that we
1516 can insert the new label into the IL during a second pass. */
1517 dummy.old = label;
1518 slot = htab_find_slot (i->var_map, &dummy, INSERT);
1519 elt = (struct var_map_elt *) *slot;
1520 if (elt == NULL)
1522 new_label = create_artificial_label ();
1523 DECL_NONLOCAL (new_label) = 1;
1525 elt = GGC_NEW (struct var_map_elt);
1526 elt->old = label;
1527 elt->new = new_label;
1528 *slot = elt;
1530 else
1531 new_label = elt->new;
1533 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1534 field = get_nl_goto_field (i);
1535 x = get_frame_field (info, target_context, field, &wi->tsi);
1536 x = build_addr (x, target_context);
1537 x = tsi_gimplify_val (info, x, &wi->tsi);
1538 arg = tree_cons (NULL, x, NULL);
1539 x = build_addr (new_label, target_context);
1540 arg = tree_cons (NULL, x, arg);
1541 x = implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO];
1542 x = build_function_call_expr (x, arg);
1544 SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
1545 *tsi_stmt_ptr (wi->tsi) = x;
1547 return NULL_TREE;
1550 /* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that
1551 are referenced via nonlocal goto from a nested function. The rewrite
1552 will involve installing a newly generated DECL_NONLOCAL label, and
1553 (potentially) a branch around the rtl gunk that is assumed to be
1554 attached to such a label. */
1556 static tree
1557 convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
1559 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1560 struct nesting_info *info = wi->info;
1561 tree t = *tp, label, new_label, x;
1562 struct var_map_elt *elt, dummy;
1563 tree_stmt_iterator tmp_tsi;
1565 *walk_subtrees = 0;
1566 if (TREE_CODE (t) != LABEL_EXPR)
1567 return NULL_TREE;
1568 label = LABEL_EXPR_LABEL (t);
1570 dummy.old = label;
1571 elt = (struct var_map_elt *) htab_find (info->var_map, &dummy);
1572 if (!elt)
1573 return NULL_TREE;
1574 new_label = elt->new;
1576 /* If there's any possibility that the previous statement falls through,
1577 then we must branch around the new non-local label. */
1578 tmp_tsi = wi->tsi;
1579 tsi_prev (&tmp_tsi);
1580 if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
1582 x = build1 (GOTO_EXPR, void_type_node, label);
1583 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1585 x = build1 (LABEL_EXPR, void_type_node, new_label);
1586 tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
1588 return NULL_TREE;
1591 /* Called via walk_function+walk_tree, rewrite all references to addresses
1592 of nested functions that require the use of trampolines. The rewrite
1593 will involve a reference a trampoline generated for the occasion. */
1595 static tree
1596 convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
1598 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1599 struct nesting_info *info = wi->info, *i;
1600 tree t = *tp, decl, target_context, x, arg;
1602 *walk_subtrees = 0;
1603 switch (TREE_CODE (t))
1605 case ADDR_EXPR:
1606 /* Build
1607 T.1 = &CHAIN->tramp;
1608 T.2 = __builtin_adjust_trampoline (T.1);
1609 T.3 = (func_type)T.2;
1612 decl = TREE_OPERAND (t, 0);
1613 if (TREE_CODE (decl) != FUNCTION_DECL)
1614 break;
1616 /* Only need to process nested functions. */
1617 target_context = decl_function_context (decl);
1618 if (!target_context)
1619 break;
1621 /* If the nested function doesn't use a static chain, then
1622 it doesn't need a trampoline. */
1623 if (DECL_NO_STATIC_CHAIN (decl))
1624 break;
1626 /* Lookup the immediate parent of the callee, as that's where
1627 we need to insert the trampoline. */
1628 for (i = info; i->context != target_context; i = i->outer)
1629 continue;
1630 x = lookup_tramp_for_decl (i, decl, INSERT);
1632 /* Compute the address of the field holding the trampoline. */
1633 x = get_frame_field (info, target_context, x, &wi->tsi);
1634 x = build_addr (x, target_context);
1635 x = tsi_gimplify_val (info, x, &wi->tsi);
1636 arg = tree_cons (NULL, x, NULL);
1638 /* Do machine-specific ugliness. Normally this will involve
1639 computing extra alignment, but it can really be anything. */
1640 x = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
1641 x = build_function_call_expr (x, arg);
1642 x = init_tmp_var (info, x, &wi->tsi);
1644 /* Cast back to the proper function type. */
1645 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1646 x = init_tmp_var (info, x, &wi->tsi);
1648 *tp = x;
1649 break;
1651 case CALL_EXPR:
1652 /* Only walk call arguments, lest we generate trampolines for
1653 direct calls. */
1654 walk_tree (&TREE_OPERAND (t, 1), convert_tramp_reference, wi, NULL);
1655 break;
1657 default:
1658 if (!IS_TYPE_OR_DECL_P (t))
1659 *walk_subtrees = 1;
1660 break;
1663 return NULL_TREE;
1666 /* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that
1667 reference nested functions to make sure that the static chain is
1668 set up properly for the call. */
1670 static tree
1671 convert_call_expr (tree *tp, int *walk_subtrees, void *data)
1673 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1674 struct nesting_info *info = wi->info;
1675 tree t = *tp, decl, target_context;
1676 char save_static_chain_added;
1677 int i;
1679 *walk_subtrees = 0;
1680 switch (TREE_CODE (t))
1682 case CALL_EXPR:
1683 decl = get_callee_fndecl (t);
1684 if (!decl)
1685 break;
1686 target_context = decl_function_context (decl);
1687 if (target_context && !DECL_NO_STATIC_CHAIN (decl))
1689 TREE_OPERAND (t, 2)
1690 = get_static_chain (info, target_context, &wi->tsi);
1691 info->static_chain_added
1692 |= (1 << (info->context != target_context));
1694 break;
1696 case RETURN_EXPR:
1697 case GIMPLE_MODIFY_STMT:
1698 case WITH_SIZE_EXPR:
1699 /* Only return modify and with_size_expr may contain calls. */
1700 *walk_subtrees = 1;
1701 break;
1703 case OMP_PARALLEL:
1704 save_static_chain_added = info->static_chain_added;
1705 info->static_chain_added = 0;
1706 walk_body (convert_call_expr, info, &OMP_PARALLEL_BODY (t));
1707 for (i = 0; i < 2; i++)
1709 tree c, decl;
1710 if ((info->static_chain_added & (1 << i)) == 0)
1711 continue;
1712 decl = i ? get_chain_decl (info) : info->frame_decl;
1713 /* Don't add CHAIN.* or FRAME.* twice. */
1714 for (c = OMP_PARALLEL_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
1715 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1716 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
1717 && OMP_CLAUSE_DECL (c) == decl)
1718 break;
1719 if (c == NULL)
1721 c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
1722 OMP_CLAUSE_DECL (c) = decl;
1723 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (t);
1724 OMP_PARALLEL_CLAUSES (t) = c;
1727 info->static_chain_added |= save_static_chain_added;
1728 break;
1730 case OMP_FOR:
1731 case OMP_SECTIONS:
1732 case OMP_SECTION:
1733 case OMP_SINGLE:
1734 case OMP_MASTER:
1735 case OMP_ORDERED:
1736 case OMP_CRITICAL:
1737 walk_body (convert_call_expr, info, &OMP_BODY (t));
1738 break;
1740 default:
1741 break;
1744 return NULL_TREE;
1747 /* Walk the nesting tree starting with ROOT, depth first. Convert all
1748 trampolines and call expressions. On the way back up, determine if
1749 a nested function actually uses its static chain; if not, remember that. */
1751 static void
1752 convert_all_function_calls (struct nesting_info *root)
1756 if (root->inner)
1757 convert_all_function_calls (root->inner);
1759 walk_function (convert_tramp_reference, root);
1760 walk_function (convert_call_expr, root);
1762 /* If the function does not use a static chain, then remember that. */
1763 if (root->outer && !root->chain_decl && !root->chain_field)
1764 DECL_NO_STATIC_CHAIN (root->context) = 1;
1765 else
1766 gcc_assert (!DECL_NO_STATIC_CHAIN (root->context));
1768 root = root->next;
1770 while (root);
1773 /* Do "everything else" to clean up or complete state collected by the
1774 various walking passes -- lay out the types and decls, generate code
1775 to initialize the frame decl, store critical expressions in the
1776 struct function for rtl to find. */
1778 static void
1779 finalize_nesting_tree_1 (struct nesting_info *root)
1781 tree stmt_list = NULL;
1782 tree context = root->context;
1783 struct function *sf;
1785 /* If we created a non-local frame type or decl, we need to lay them
1786 out at this time. */
1787 if (root->frame_type)
1789 /* In some cases the frame type will trigger the -Wpadded warning.
1790 This is not helpful; suppress it. */
1791 int save_warn_padded = warn_padded;
1792 warn_padded = 0;
1793 layout_type (root->frame_type);
1794 warn_padded = save_warn_padded;
1795 layout_decl (root->frame_decl, 0);
1798 /* If any parameters were referenced non-locally, then we need to
1799 insert a copy. Likewise, if any variables were referenced by
1800 pointer, we need to initialize the address. */
1801 if (root->any_parm_remapped)
1803 tree p;
1804 for (p = DECL_ARGUMENTS (context); p ; p = TREE_CHAIN (p))
1806 tree field, x, y;
1808 field = lookup_field_for_decl (root, p, NO_INSERT);
1809 if (!field)
1810 continue;
1812 if (use_pointer_in_frame (p))
1813 x = build_addr (p, context);
1814 else
1815 x = p;
1817 y = build3 (COMPONENT_REF, TREE_TYPE (field),
1818 root->frame_decl, field, NULL_TREE);
1819 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (field), y, x);
1820 append_to_statement_list (x, &stmt_list);
1824 /* If a chain_field was created, then it needs to be initialized
1825 from chain_decl. */
1826 if (root->chain_field)
1828 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
1829 root->frame_decl, root->chain_field, NULL_TREE);
1830 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (x), x, get_chain_decl (root));
1831 append_to_statement_list (x, &stmt_list);
1834 /* If trampolines were created, then we need to initialize them. */
1835 if (root->any_tramp_created)
1837 struct nesting_info *i;
1838 for (i = root->inner; i ; i = i->next)
1840 tree arg, x, field;
1842 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
1843 if (!field)
1844 continue;
1846 if (DECL_NO_STATIC_CHAIN (i->context))
1847 x = null_pointer_node;
1848 else
1849 x = build_addr (root->frame_decl, context);
1850 arg = tree_cons (NULL, x, NULL);
1852 x = build_addr (i->context, context);
1853 arg = tree_cons (NULL, x, arg);
1855 x = build3 (COMPONENT_REF, TREE_TYPE (field),
1856 root->frame_decl, field, NULL_TREE);
1857 x = build_addr (x, context);
1858 arg = tree_cons (NULL, x, arg);
1860 x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
1861 x = build_function_call_expr (x, arg);
1863 append_to_statement_list (x, &stmt_list);
1867 /* If we created initialization statements, insert them. */
1868 if (stmt_list)
1870 annotate_all_with_locus (&stmt_list,
1871 DECL_SOURCE_LOCATION (context));
1872 append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
1873 &stmt_list);
1874 BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
1877 /* If a chain_decl was created, then it needs to be registered with
1878 struct function so that it gets initialized from the static chain
1879 register at the beginning of the function. */
1880 sf = DECL_STRUCT_FUNCTION (root->context);
1881 sf->static_chain_decl = root->chain_decl;
1883 /* Similarly for the non-local goto save area. */
1884 if (root->nl_goto_field)
1886 sf->nonlocal_goto_save_area
1887 = get_frame_field (root, context, root->nl_goto_field, NULL);
1888 sf->has_nonlocal_label = 1;
1891 /* Make sure all new local variables get inserted into the
1892 proper BIND_EXPR. */
1893 if (root->new_local_var_chain)
1894 declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
1895 false);
1896 if (root->debug_var_chain)
1897 declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
1898 true);
1900 /* Dump the translated tree function. */
1901 dump_function (TDI_nested, root->context);
1904 static void
1905 finalize_nesting_tree (struct nesting_info *root)
1909 if (root->inner)
1910 finalize_nesting_tree (root->inner);
1911 finalize_nesting_tree_1 (root);
1912 root = root->next;
1914 while (root);
1917 /* Unnest the nodes and pass them to cgraph. */
1919 static void
1920 unnest_nesting_tree_1 (struct nesting_info *root)
1922 struct cgraph_node *node = cgraph_node (root->context);
1924 /* For nested functions update the cgraph to reflect unnesting.
1925 We also delay finalizing of these functions up to this point. */
1926 if (node->origin)
1928 cgraph_unnest_node (cgraph_node (root->context));
1929 cgraph_finalize_function (root->context, true);
1933 static void
1934 unnest_nesting_tree (struct nesting_info *root)
1938 if (root->inner)
1939 unnest_nesting_tree (root->inner);
1940 unnest_nesting_tree_1 (root);
1941 root = root->next;
1943 while (root);
1946 /* Free the data structures allocated during this pass. */
1948 static void
1949 free_nesting_tree (struct nesting_info *root)
1951 struct nesting_info *next;
1954 if (root->inner)
1955 free_nesting_tree (root->inner);
1956 htab_delete (root->var_map);
1957 next = root->next;
1958 ggc_free (root);
1959 root = next;
1961 while (root);
1964 static GTY(()) struct nesting_info *root;
1966 /* Main entry point for this pass. Process FNDECL and all of its nested
1967 subroutines and turn them into something less tightly bound. */
1969 void
1970 lower_nested_functions (tree fndecl)
1972 struct cgraph_node *cgn;
1974 /* If there are no nested functions, there's nothing to do. */
1975 cgn = cgraph_node (fndecl);
1976 if (!cgn->nested)
1977 return;
1979 root = create_nesting_tree (cgn);
1980 walk_all_functions (convert_nonlocal_reference, root);
1981 walk_all_functions (convert_local_reference, root);
1982 walk_all_functions (convert_nl_goto_reference, root);
1983 walk_all_functions (convert_nl_goto_receiver, root);
1984 convert_all_function_calls (root);
1985 finalize_nesting_tree (root);
1986 unnest_nesting_tree (root);
1987 free_nesting_tree (root);
1988 root = NULL;
1991 #include "gt-tree-nested.h"